Merge pull request #10413 from id/0416-e5.0.3-code-freeze
0416 e5.0.3 code freeze
This commit is contained in:
commit
9712aad7a8
|
@ -8,4 +8,7 @@ TDENGINE_TAG=3.0.2.4
|
||||||
DYNAMO_TAG=1.21.0
|
DYNAMO_TAG=1.21.0
|
||||||
CASSANDRA_TAG=3.11.6
|
CASSANDRA_TAG=3.11.6
|
||||||
|
|
||||||
|
MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server
|
||||||
|
SQLSERVER_TAG=2019-CU19-ubuntu-20.04
|
||||||
|
|
||||||
TARGET=emqx/emqx
|
TARGET=emqx/emqx
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
version: '3.9'
|
||||||
|
|
||||||
|
services:
|
||||||
|
sql_server:
|
||||||
|
container_name: sqlserver
|
||||||
|
# See also:
|
||||||
|
# https://mcr.microsoft.com/en-us/product/mssql/server/about
|
||||||
|
# https://hub.docker.com/_/microsoft-mssql-server
|
||||||
|
image: ${MS_IMAGE_ADDR}:${SQLSERVER_TAG}
|
||||||
|
environment:
|
||||||
|
# See also:
|
||||||
|
# https://learn.microsoft.com/en-us/sql/linux/sql-server-linux-configure-environment-variables
|
||||||
|
ACCEPT_EULA: "Y"
|
||||||
|
MSSQL_SA_PASSWORD: "mqtt_public1"
|
||||||
|
restart: always
|
||||||
|
# ports:
|
||||||
|
# - "1433:1433"
|
||||||
|
networks:
|
||||||
|
- emqx_bridge
|
|
@ -16,6 +16,7 @@ services:
|
||||||
- 8474:8474
|
- 8474:8474
|
||||||
- 8086:8086
|
- 8086:8086
|
||||||
- 8087:8087
|
- 8087:8087
|
||||||
|
- 11433:1433
|
||||||
- 13306:3306
|
- 13306:3306
|
||||||
- 13307:3307
|
- 13307:3307
|
||||||
- 15432:5432
|
- 15432:5432
|
||||||
|
|
|
@ -24,6 +24,7 @@ services:
|
||||||
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
|
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
|
||||||
- ./kerberos/krb5.conf:/etc/kdc/krb5.conf
|
- ./kerberos/krb5.conf:/etc/kdc/krb5.conf
|
||||||
- ./kerberos/krb5.conf:/etc/krb5.conf
|
- ./kerberos/krb5.conf:/etc/krb5.conf
|
||||||
|
# - ./odbc/odbcinst.ini:/etc/odbcinst.ini
|
||||||
working_dir: /emqx
|
working_dir: /emqx
|
||||||
tty: true
|
tty: true
|
||||||
user: "${DOCKER_USER:-root}"
|
user: "${DOCKER_USER:-root}"
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
[ms-sql]
|
||||||
|
Description=Microsoft ODBC Driver 17 for SQL Server
|
||||||
|
Driver=/opt/microsoft/msodbcsql17/lib64/libmsodbcsql-17.10.so.2.1
|
||||||
|
UsageCount=1
|
||||||
|
|
||||||
|
[ODBC Driver 17 for SQL Server]
|
||||||
|
Description=Microsoft ODBC Driver 17 for SQL Server
|
||||||
|
Driver=/opt/microsoft/msodbcsql17/lib64/libmsodbcsql-17.10.so.2.1
|
||||||
|
UsageCount=1
|
|
@ -95,5 +95,11 @@
|
||||||
"listen": "0.0.0.0:9142",
|
"listen": "0.0.0.0:9142",
|
||||||
"upstream": "cassandra:9142",
|
"upstream": "cassandra:9142",
|
||||||
"enabled": true
|
"enabled": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sqlserver",
|
||||||
|
"listen": "0.0.0.0:1433",
|
||||||
|
"upstream": "sqlserver:1433",
|
||||||
|
"enabled": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -23,7 +23,18 @@ jobs:
|
||||||
mix local.hex --force
|
mix local.hex --force
|
||||||
mix local.rebar --force
|
mix local.rebar --force
|
||||||
mix deps.get
|
mix deps.get
|
||||||
|
# we check only enterprise because `rebar3 tree`, even if an
|
||||||
|
# enterprise app is excluded from `project_app_dirs` in
|
||||||
|
# `rebar.config.erl`, will still list dependencies from it.
|
||||||
|
# Since the enterprise profile is a superset of the
|
||||||
|
# community one and thus more complete, we use the former.
|
||||||
|
env:
|
||||||
|
MIX_ENV: emqx-enterprise
|
||||||
|
PROFILE: emqx-enterprise
|
||||||
- name: check elixir deps
|
- name: check elixir deps
|
||||||
run: ./scripts/check-elixir-deps-discrepancies.exs
|
run: ./scripts/check-elixir-deps-discrepancies.exs
|
||||||
|
env:
|
||||||
|
MIX_ENV: emqx-enterprise
|
||||||
|
PROFILE: emqx-enterprise
|
||||||
|
|
||||||
...
|
...
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
filter ::
|
filter ::
|
||||||
emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address() | undefined | '_',
|
emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address() | undefined | '_',
|
||||||
enable = true :: boolean() | '_',
|
enable = true :: boolean() | '_',
|
||||||
|
payload_encode = text :: hex | text | hidden | '_',
|
||||||
|
extra = #{} :: map() | '_',
|
||||||
start_at :: integer() | undefined | '_',
|
start_at :: integer() | undefined | '_',
|
||||||
end_at :: integer() | undefined | '_'
|
end_at :: integer() | undefined | '_'
|
||||||
}).
|
}).
|
|
@ -6,6 +6,7 @@
|
||||||
{emqx_bridge,1}.
|
{emqx_bridge,1}.
|
||||||
{emqx_bridge,2}.
|
{emqx_bridge,2}.
|
||||||
{emqx_bridge,3}.
|
{emqx_bridge,3}.
|
||||||
|
{emqx_bridge,4}.
|
||||||
{emqx_broker,1}.
|
{emqx_broker,1}.
|
||||||
{emqx_cm,1}.
|
{emqx_cm,1}.
|
||||||
{emqx_conf,1}.
|
{emqx_conf,1}.
|
||||||
|
|
|
@ -18,25 +18,25 @@
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
%% Deps here may duplicate with emqx.git root level rebar.config
|
%% Deps here may duplicate with emqx.git root level rebar.config
|
||||||
%% but there not be any descrpancy.
|
%% but there may not be any discrepancy.
|
||||||
%% This rebar.config is necessary because the app may be used as a
|
%% This rebar.config is necessary because the app may be used as a
|
||||||
%% `git_subdir` dependency in other projects.
|
%% `git_subdir` dependency in other projects.
|
||||||
{deps, [
|
{deps, [
|
||||||
|
{emqx_utils, {path, "../emqx_utils"}},
|
||||||
{lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}},
|
{lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}},
|
||||||
{gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}},
|
{gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}},
|
||||||
{jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}},
|
|
||||||
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}},
|
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}},
|
||||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}},
|
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}},
|
||||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.6"}}},
|
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.6"}}},
|
||||||
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
||||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.38.0"}}},
|
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.38.1"}}},
|
||||||
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}},
|
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}},
|
||||||
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
||||||
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
||||||
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.7"}}}
|
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.7"}}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{plugins, [{rebar3_proper, "0.12.1"}]}.
|
{plugins, [{rebar3_proper, "0.12.1"}, rebar3_path_deps]}.
|
||||||
{extra_src_dirs, [{"etc", [recursive]}]}.
|
{extra_src_dirs, [{"etc", [recursive]}]}.
|
||||||
{profiles, [
|
{profiles, [
|
||||||
{test, [
|
{test, [
|
||||||
|
|
|
@ -32,25 +32,15 @@ remove_handler() ->
|
||||||
ok = emqx_config_handler:remove_handler(?LOG),
|
ok = emqx_config_handler:remove_handler(?LOG),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
%% refresh logger config when booting, the override config may have changed after node start.
|
%% refresh logger config when booting, the cluster config may have changed after node start.
|
||||||
%% Kernel's app env is confirmed before the node starts,
|
%% Kernel's app env is confirmed before the node starts,
|
||||||
%% but we only copy cluster-override.conf from other node after this node starts,
|
%% but we only copy cluster.conf from other node after this node starts,
|
||||||
%% so we need to refresh the logger config after this node starts.
|
%% so we need to refresh the logger config after this node starts.
|
||||||
%% It will not affect the logger config when cluster-override.conf is unchanged.
|
%% It will not affect the logger config when cluster.conf is unchanged.
|
||||||
refresh_config() ->
|
refresh_config() ->
|
||||||
Overrides = emqx_config:read_override_confs(),
|
|
||||||
refresh_config(Overrides).
|
|
||||||
|
|
||||||
refresh_config(#{<<"log">> := _}) ->
|
|
||||||
%% read the checked config
|
%% read the checked config
|
||||||
LogConfig = emqx:get_config(?LOG, undefined),
|
LogConfig = emqx:get_config(?LOG, undefined),
|
||||||
Conf = #{log => LogConfig},
|
do_refresh_config(#{log => LogConfig}).
|
||||||
ok = do_refresh_config(Conf);
|
|
||||||
refresh_config(_) ->
|
|
||||||
%% No config override found for 'log', do nothing
|
|
||||||
%% because the 'kernel' app should already be configured
|
|
||||||
%% from the base configs. i.e. emqx.conf + env vars
|
|
||||||
ok.
|
|
||||||
|
|
||||||
%% this call is shared between initial config refresh at boot
|
%% this call is shared between initial config refresh at boot
|
||||||
%% and dynamic config update from HTTP API
|
%% and dynamic config update from HTTP API
|
||||||
|
@ -61,10 +51,9 @@ do_refresh_config(Conf) ->
|
||||||
ok = maybe_update_log_level(Level),
|
ok = maybe_update_log_level(Level),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
%% always refresh config when the override config is changed
|
||||||
post_config_update(?LOG, _Req, NewConf, _OldConf, _AppEnvs) ->
|
post_config_update(?LOG, _Req, NewConf, _OldConf, _AppEnvs) ->
|
||||||
ok = do_refresh_config(#{log => NewConf});
|
do_refresh_config(#{log => NewConf}).
|
||||||
post_config_update(_ConfPath, _Req, _NewConf, _OldConf, _AppEnvs) ->
|
|
||||||
ok.
|
|
||||||
|
|
||||||
maybe_update_log_level(NewLevel) ->
|
maybe_update_log_level(NewLevel) ->
|
||||||
OldLevel = emqx_logger:get_primary_log_level(),
|
OldLevel = emqx_logger:get_primary_log_level(),
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
{id, "emqx"},
|
{id, "emqx"},
|
||||||
{description, "EMQX Core"},
|
{description, "EMQX Core"},
|
||||||
% strict semver, bump manually!
|
% strict semver, bump manually!
|
||||||
{vsn, "5.0.22"},
|
{vsn, "5.0.23"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
@ -16,7 +16,6 @@
|
||||||
cowboy,
|
cowboy,
|
||||||
sasl,
|
sasl,
|
||||||
os_mon,
|
os_mon,
|
||||||
jiffy,
|
|
||||||
lc,
|
lc,
|
||||||
hocon
|
hocon
|
||||||
]},
|
]},
|
||||||
|
|
|
@ -164,29 +164,29 @@ run_hook(HookPoint, Args) ->
|
||||||
run_fold_hook(HookPoint, Args, Acc) ->
|
run_fold_hook(HookPoint, Args, Acc) ->
|
||||||
emqx_hooks:run_fold(HookPoint, Args, Acc).
|
emqx_hooks:run_fold(HookPoint, Args, Acc).
|
||||||
|
|
||||||
-spec get_config(emqx_map_lib:config_key_path()) -> term().
|
-spec get_config(emqx_utils_maps:config_key_path()) -> term().
|
||||||
get_config(KeyPath) ->
|
get_config(KeyPath) ->
|
||||||
emqx_config:get(KeyPath).
|
emqx_config:get(KeyPath).
|
||||||
|
|
||||||
-spec get_config(emqx_map_lib:config_key_path(), term()) -> term().
|
-spec get_config(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||||
get_config(KeyPath, Default) ->
|
get_config(KeyPath, Default) ->
|
||||||
emqx_config:get(KeyPath, Default).
|
emqx_config:get(KeyPath, Default).
|
||||||
|
|
||||||
-spec get_raw_config(emqx_map_lib:config_key_path()) -> term().
|
-spec get_raw_config(emqx_utils_maps:config_key_path()) -> term().
|
||||||
get_raw_config(KeyPath) ->
|
get_raw_config(KeyPath) ->
|
||||||
emqx_config:get_raw(KeyPath).
|
emqx_config:get_raw(KeyPath).
|
||||||
|
|
||||||
-spec get_raw_config(emqx_map_lib:config_key_path(), term()) -> term().
|
-spec get_raw_config(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||||
get_raw_config(KeyPath, Default) ->
|
get_raw_config(KeyPath, Default) ->
|
||||||
emqx_config:get_raw(KeyPath, Default).
|
emqx_config:get_raw(KeyPath, Default).
|
||||||
|
|
||||||
-spec update_config(emqx_map_lib:config_key_path(), emqx_config:update_request()) ->
|
-spec update_config(emqx_utils_maps:config_key_path(), emqx_config:update_request()) ->
|
||||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||||
update_config(KeyPath, UpdateReq) ->
|
update_config(KeyPath, UpdateReq) ->
|
||||||
update_config(KeyPath, UpdateReq, #{}).
|
update_config(KeyPath, UpdateReq, #{}).
|
||||||
|
|
||||||
-spec update_config(
|
-spec update_config(
|
||||||
emqx_map_lib:config_key_path(),
|
emqx_utils_maps:config_key_path(),
|
||||||
emqx_config:update_request(),
|
emqx_config:update_request(),
|
||||||
emqx_config:update_opts()
|
emqx_config:update_opts()
|
||||||
) ->
|
) ->
|
||||||
|
@ -198,12 +198,12 @@ update_config([RootName | _] = KeyPath, UpdateReq, Opts) ->
|
||||||
{{update, UpdateReq}, Opts}
|
{{update, UpdateReq}, Opts}
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec remove_config(emqx_map_lib:config_key_path()) ->
|
-spec remove_config(emqx_utils_maps:config_key_path()) ->
|
||||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||||
remove_config(KeyPath) ->
|
remove_config(KeyPath) ->
|
||||||
remove_config(KeyPath, #{}).
|
remove_config(KeyPath, #{}).
|
||||||
|
|
||||||
-spec remove_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
-spec remove_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
|
||||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||||
remove_config([RootName | _] = KeyPath, Opts) ->
|
remove_config([RootName | _] = KeyPath, Opts) ->
|
||||||
emqx_config_handler:update_config(
|
emqx_config_handler:update_config(
|
||||||
|
@ -212,7 +212,7 @@ remove_config([RootName | _] = KeyPath, Opts) ->
|
||||||
{remove, Opts}
|
{remove, Opts}
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec reset_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
-spec reset_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
|
||||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||||
reset_config([RootName | _] = KeyPath, Opts) ->
|
reset_config([RootName | _] = KeyPath, Opts) ->
|
||||||
case emqx_config:get_default_value(KeyPath) of
|
case emqx_config:get_default_value(KeyPath) of
|
||||||
|
|
|
@ -423,7 +423,7 @@ do_actions(deactivate, Alarm = #deactivated_alarm{name = Name}, [log | More]) ->
|
||||||
do_actions(deactivate, Alarm, More);
|
do_actions(deactivate, Alarm, More);
|
||||||
do_actions(Operation, Alarm, [publish | More]) ->
|
do_actions(Operation, Alarm, [publish | More]) ->
|
||||||
Topic = topic(Operation),
|
Topic = topic(Operation),
|
||||||
{ok, Payload} = emqx_json:safe_encode(normalize(Alarm)),
|
{ok, Payload} = emqx_utils_json:safe_encode(normalize(Alarm)),
|
||||||
Message = emqx_message:make(
|
Message = emqx_message:make(
|
||||||
?MODULE,
|
?MODULE,
|
||||||
0,
|
0,
|
||||||
|
|
|
@ -277,9 +277,9 @@ atom(Bin) -> binary_to_existing_atom(Bin, utf8).
|
||||||
certs_dir(ChainName, ConfigOrID) ->
|
certs_dir(ChainName, ConfigOrID) ->
|
||||||
DirName = dir(ChainName, ConfigOrID),
|
DirName = dir(ChainName, ConfigOrID),
|
||||||
SubDir = iolist_to_binary(filename:join(["authn", DirName])),
|
SubDir = iolist_to_binary(filename:join(["authn", DirName])),
|
||||||
emqx_misc:safe_filename(SubDir).
|
emqx_utils:safe_filename(SubDir).
|
||||||
|
|
||||||
dir(ChainName, ID) when is_binary(ID) ->
|
dir(ChainName, ID) when is_binary(ID) ->
|
||||||
emqx_misc:safe_filename(iolist_to_binary([to_bin(ChainName), "-", ID]));
|
emqx_utils:safe_filename(iolist_to_binary([to_bin(ChainName), "-", ID]));
|
||||||
dir(ChainName, Config) when is_map(Config) ->
|
dir(ChainName, Config) when is_map(Config) ->
|
||||||
dir(ChainName, authenticator_id(Config)).
|
dir(ChainName, authenticator_id(Config)).
|
||||||
|
|
|
@ -243,7 +243,7 @@ handle_info(Info, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
terminate(_Reason, #{expiry_timer := TRef}) ->
|
terminate(_Reason, #{expiry_timer := TRef}) ->
|
||||||
emqx_misc:cancel_timer(TRef).
|
emqx_utils:cancel_timer(TRef).
|
||||||
|
|
||||||
code_change(_OldVsn, State, _Extra) ->
|
code_change(_OldVsn, State, _Extra) ->
|
||||||
{ok, State}.
|
{ok, State}.
|
||||||
|
@ -254,10 +254,10 @@ code_change(_OldVsn, State, _Extra) ->
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
ensure_expiry_timer(State) ->
|
ensure_expiry_timer(State) ->
|
||||||
State#{expiry_timer := emqx_misc:start_timer(10, expire)}.
|
State#{expiry_timer := emqx_utils:start_timer(10, expire)}.
|
||||||
-else.
|
-else.
|
||||||
ensure_expiry_timer(State) ->
|
ensure_expiry_timer(State) ->
|
||||||
State#{expiry_timer := emqx_misc:start_timer(timer:minutes(1), expire)}.
|
State#{expiry_timer := emqx_utils:start_timer(timer:minutes(1), expire)}.
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
expire_banned_items(Now) ->
|
expire_banned_items(Now) ->
|
||||||
|
|
|
@ -85,7 +85,7 @@ commit(Batch = #batch{batch_q = Q, commit_fun = Commit}) ->
|
||||||
reset(Batch).
|
reset(Batch).
|
||||||
|
|
||||||
reset(Batch = #batch{linger_timer = TRef}) ->
|
reset(Batch = #batch{linger_timer = TRef}) ->
|
||||||
_ = emqx_misc:cancel_timer(TRef),
|
_ = emqx_utils:cancel_timer(TRef),
|
||||||
Batch#batch{batch_q = [], linger_timer = undefined}.
|
Batch#batch{batch_q = [], linger_timer = undefined}.
|
||||||
|
|
||||||
-spec size(batch()) -> non_neg_integer().
|
-spec size(batch()) -> non_neg_integer().
|
||||||
|
|
|
@ -71,7 +71,7 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-import(emqx_tables, [lookup_value/2, lookup_value/3]).
|
-import(emqx_utils_ets, [lookup_value/2, lookup_value/3]).
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
@ -92,7 +92,7 @@
|
||||||
start_link(Pool, Id) ->
|
start_link(Pool, Id) ->
|
||||||
ok = create_tabs(),
|
ok = create_tabs(),
|
||||||
gen_server:start_link(
|
gen_server:start_link(
|
||||||
{local, emqx_misc:proc_name(?BROKER, Id)},
|
{local, emqx_utils:proc_name(?BROKER, Id)},
|
||||||
?MODULE,
|
?MODULE,
|
||||||
[Pool, Id],
|
[Pool, Id],
|
||||||
[]
|
[]
|
||||||
|
@ -107,15 +107,15 @@ create_tabs() ->
|
||||||
TabOpts = [public, {read_concurrency, true}, {write_concurrency, true}],
|
TabOpts = [public, {read_concurrency, true}, {write_concurrency, true}],
|
||||||
|
|
||||||
%% SubOption: {Topic, SubPid} -> SubOption
|
%% SubOption: {Topic, SubPid} -> SubOption
|
||||||
ok = emqx_tables:new(?SUBOPTION, [ordered_set | TabOpts]),
|
ok = emqx_utils_ets:new(?SUBOPTION, [ordered_set | TabOpts]),
|
||||||
|
|
||||||
%% Subscription: SubPid -> Topic1, Topic2, Topic3, ...
|
%% Subscription: SubPid -> Topic1, Topic2, Topic3, ...
|
||||||
%% duplicate_bag: o(1) insert
|
%% duplicate_bag: o(1) insert
|
||||||
ok = emqx_tables:new(?SUBSCRIPTION, [duplicate_bag | TabOpts]),
|
ok = emqx_utils_ets:new(?SUBSCRIPTION, [duplicate_bag | TabOpts]),
|
||||||
|
|
||||||
%% Subscriber: Topic -> SubPid1, SubPid2, SubPid3, ...
|
%% Subscriber: Topic -> SubPid1, SubPid2, SubPid3, ...
|
||||||
%% bag: o(n) insert:(
|
%% bag: o(n) insert:(
|
||||||
ok = emqx_tables:new(?SUBSCRIBER, [bag | TabOpts]).
|
ok = emqx_utils_ets:new(?SUBSCRIBER, [bag | TabOpts]).
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Subscribe API
|
%% Subscribe API
|
||||||
|
|
|
@ -73,11 +73,11 @@ register_sub(SubPid, SubId) when is_pid(SubPid) ->
|
||||||
|
|
||||||
-spec lookup_subid(pid()) -> maybe(emqx_types:subid()).
|
-spec lookup_subid(pid()) -> maybe(emqx_types:subid()).
|
||||||
lookup_subid(SubPid) when is_pid(SubPid) ->
|
lookup_subid(SubPid) when is_pid(SubPid) ->
|
||||||
emqx_tables:lookup_value(?SUBMON, SubPid).
|
emqx_utils_ets:lookup_value(?SUBMON, SubPid).
|
||||||
|
|
||||||
-spec lookup_subpid(emqx_types:subid()) -> maybe(pid()).
|
-spec lookup_subpid(emqx_types:subid()) -> maybe(pid()).
|
||||||
lookup_subpid(SubId) ->
|
lookup_subpid(SubId) ->
|
||||||
emqx_tables:lookup_value(?SUBID, SubId).
|
emqx_utils_ets:lookup_value(?SUBID, SubId).
|
||||||
|
|
||||||
-spec get_sub_shard(pid(), emqx_types:topic()) -> non_neg_integer().
|
-spec get_sub_shard(pid(), emqx_types:topic()) -> non_neg_integer().
|
||||||
get_sub_shard(SubPid, Topic) ->
|
get_sub_shard(SubPid, Topic) ->
|
||||||
|
@ -105,15 +105,15 @@ reclaim_seq(Topic) ->
|
||||||
|
|
||||||
init([]) ->
|
init([]) ->
|
||||||
%% Helper table
|
%% Helper table
|
||||||
ok = emqx_tables:new(?HELPER, [{read_concurrency, true}]),
|
ok = emqx_utils_ets:new(?HELPER, [{read_concurrency, true}]),
|
||||||
%% Shards: CPU * 32
|
%% Shards: CPU * 32
|
||||||
true = ets:insert(?HELPER, {shards, emqx_vm:schedulers() * 32}),
|
true = ets:insert(?HELPER, {shards, emqx_vm:schedulers() * 32}),
|
||||||
%% SubSeq: Topic -> SeqId
|
%% SubSeq: Topic -> SeqId
|
||||||
ok = emqx_sequence:create(?SUBSEQ),
|
ok = emqx_sequence:create(?SUBSEQ),
|
||||||
%% SubId: SubId -> SubPid
|
%% SubId: SubId -> SubPid
|
||||||
ok = emqx_tables:new(?SUBID, [public, {read_concurrency, true}, {write_concurrency, true}]),
|
ok = emqx_utils_ets:new(?SUBID, [public, {read_concurrency, true}, {write_concurrency, true}]),
|
||||||
%% SubMon: SubPid -> SubId
|
%% SubMon: SubPid -> SubId
|
||||||
ok = emqx_tables:new(?SUBMON, [public, {read_concurrency, true}, {write_concurrency, true}]),
|
ok = emqx_utils_ets:new(?SUBMON, [public, {read_concurrency, true}, {write_concurrency, true}]),
|
||||||
%% Stats timer
|
%% Stats timer
|
||||||
ok = emqx_stats:update_interval(broker_stats, fun emqx_broker:stats_fun/0),
|
ok = emqx_stats:update_interval(broker_stats, fun emqx_broker:stats_fun/0),
|
||||||
{ok, #{pmon => emqx_pmon:new()}}.
|
{ok, #{pmon => emqx_pmon:new()}}.
|
||||||
|
@ -131,7 +131,7 @@ handle_cast(Msg, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
handle_info({'DOWN', _MRef, process, SubPid, _Reason}, State = #{pmon := PMon}) ->
|
handle_info({'DOWN', _MRef, process, SubPid, _Reason}, State = #{pmon := PMon}) ->
|
||||||
SubPids = [SubPid | emqx_misc:drain_down(?BATCH_SIZE)],
|
SubPids = [SubPid | emqx_utils:drain_down(?BATCH_SIZE)],
|
||||||
ok = emqx_pool:async_submit(
|
ok = emqx_pool:async_submit(
|
||||||
fun lists:foreach/2, [fun clean_down/1, SubPids]
|
fun lists:foreach/2, [fun clean_down/1, SubPids]
|
||||||
),
|
),
|
||||||
|
|
|
@ -61,7 +61,7 @@
|
||||||
-export([set_field/3]).
|
-export([set_field/3]).
|
||||||
|
|
||||||
-import(
|
-import(
|
||||||
emqx_misc,
|
emqx_utils,
|
||||||
[
|
[
|
||||||
run_fold/3,
|
run_fold/3,
|
||||||
pipeline/3,
|
pipeline/3,
|
||||||
|
@ -622,7 +622,7 @@ process_connect(
|
||||||
NChannel = Channel#channel{session = Session},
|
NChannel = Channel#channel{session = Session},
|
||||||
handle_out(connack, {?RC_SUCCESS, sp(false), AckProps}, ensure_connected(NChannel));
|
handle_out(connack, {?RC_SUCCESS, sp(false), AckProps}, ensure_connected(NChannel));
|
||||||
{ok, #{session := Session, present := true, pendings := Pendings}} ->
|
{ok, #{session := Session, present := true, pendings := Pendings}} ->
|
||||||
Pendings1 = lists:usort(lists:append(Pendings, emqx_misc:drain_deliver())),
|
Pendings1 = lists:usort(lists:append(Pendings, emqx_utils:drain_deliver())),
|
||||||
NChannel = Channel#channel{
|
NChannel = Channel#channel{
|
||||||
session = Session,
|
session = Session,
|
||||||
resuming = true,
|
resuming = true,
|
||||||
|
@ -1203,7 +1203,7 @@ handle_call(
|
||||||
) ->
|
) ->
|
||||||
ok = emqx_session:takeover(Session),
|
ok = emqx_session:takeover(Session),
|
||||||
%% TODO: Should not drain deliver here (side effect)
|
%% TODO: Should not drain deliver here (side effect)
|
||||||
Delivers = emqx_misc:drain_deliver(),
|
Delivers = emqx_utils:drain_deliver(),
|
||||||
AllPendings = lists:append(Delivers, Pendings),
|
AllPendings = lists:append(Delivers, Pendings),
|
||||||
disconnect_and_shutdown(takenover, AllPendings, Channel);
|
disconnect_and_shutdown(takenover, AllPendings, Channel);
|
||||||
handle_call(list_authz_cache, Channel) ->
|
handle_call(list_authz_cache, Channel) ->
|
||||||
|
@ -1402,7 +1402,7 @@ ensure_timer(Name, Channel = #channel{timers = Timers}) ->
|
||||||
|
|
||||||
ensure_timer(Name, Time, Channel = #channel{timers = Timers}) ->
|
ensure_timer(Name, Time, Channel = #channel{timers = Timers}) ->
|
||||||
Msg = maps:get(Name, ?TIMER_TABLE),
|
Msg = maps:get(Name, ?TIMER_TABLE),
|
||||||
TRef = emqx_misc:start_timer(Time, Msg),
|
TRef = emqx_utils:start_timer(Time, Msg),
|
||||||
Channel#channel{timers = Timers#{Name => TRef}}.
|
Channel#channel{timers = Timers#{Name => TRef}}.
|
||||||
|
|
||||||
reset_timer(Name, Channel) ->
|
reset_timer(Name, Channel) ->
|
||||||
|
@ -1630,7 +1630,7 @@ check_banned(_ConnPkt, #channel{clientinfo = ClientInfo}) ->
|
||||||
%% Flapping
|
%% Flapping
|
||||||
|
|
||||||
count_flapping_event(_ConnPkt, Channel = #channel{clientinfo = ClientInfo = #{zone := Zone}}) ->
|
count_flapping_event(_ConnPkt, Channel = #channel{clientinfo = ClientInfo = #{zone := Zone}}) ->
|
||||||
emqx_config:get_zone_conf(Zone, [flapping_detect, enable]) andalso
|
is_integer(emqx_config:get_zone_conf(Zone, [flapping_detect, window_time])) andalso
|
||||||
emqx_flapping:detect(ClientInfo),
|
emqx_flapping:detect(ClientInfo),
|
||||||
{ok, Channel}.
|
{ok, Channel}.
|
||||||
|
|
||||||
|
@ -2045,7 +2045,7 @@ clear_keepalive(Channel = #channel{timers = Timers}) ->
|
||||||
undefined ->
|
undefined ->
|
||||||
Channel;
|
Channel;
|
||||||
TRef ->
|
TRef ->
|
||||||
emqx_misc:cancel_timer(TRef),
|
emqx_utils:cancel_timer(TRef),
|
||||||
Channel#channel{timers = maps:without([alive_timer], Timers)}
|
Channel#channel{timers = maps:without([alive_timer], Timers)}
|
||||||
end.
|
end.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -2241,7 +2241,7 @@ get_mqtt_conf(Zone, Key, Default) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
set_field(Name, Value, Channel) ->
|
set_field(Name, Value, Channel) ->
|
||||||
Pos = emqx_misc:index_of(Name, record_info(fields, channel)),
|
Pos = emqx_utils:index_of(Name, record_info(fields, channel)),
|
||||||
setelement(Pos + 1, Channel, Value).
|
setelement(Pos + 1, Channel, Value).
|
||||||
|
|
||||||
get_mqueue(#channel{session = Session}) ->
|
get_mqueue(#channel{session = Session}) ->
|
||||||
|
|
|
@ -651,10 +651,10 @@ cast(Msg) -> gen_server:cast(?CM, Msg).
|
||||||
|
|
||||||
init([]) ->
|
init([]) ->
|
||||||
TabOpts = [public, {write_concurrency, true}],
|
TabOpts = [public, {write_concurrency, true}],
|
||||||
ok = emqx_tables:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]),
|
ok = emqx_utils_ets:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]),
|
||||||
ok = emqx_tables:new(?CHAN_CONN_TAB, [bag | TabOpts]),
|
ok = emqx_utils_ets:new(?CHAN_CONN_TAB, [bag | TabOpts]),
|
||||||
ok = emqx_tables:new(?CHAN_INFO_TAB, [ordered_set, compressed | TabOpts]),
|
ok = emqx_utils_ets:new(?CHAN_INFO_TAB, [ordered_set, compressed | TabOpts]),
|
||||||
ok = emqx_tables:new(?CHAN_LIVE_TAB, [ordered_set, {write_concurrency, true} | TabOpts]),
|
ok = emqx_utils_ets:new(?CHAN_LIVE_TAB, [ordered_set, {write_concurrency, true} | TabOpts]),
|
||||||
ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0),
|
ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0),
|
||||||
State = #{chan_pmon => emqx_pmon:new()},
|
State = #{chan_pmon => emqx_pmon:new()},
|
||||||
{ok, State}.
|
{ok, State}.
|
||||||
|
@ -672,7 +672,7 @@ handle_cast(Msg, State) ->
|
||||||
|
|
||||||
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
|
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
|
||||||
?tp(emqx_cm_process_down, #{stale_pid => Pid, reason => _Reason}),
|
?tp(emqx_cm_process_down, #{stale_pid => Pid, reason => _Reason}),
|
||||||
ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)],
|
ChanPids = [Pid | emqx_utils:drain_down(?BATCH_SIZE)],
|
||||||
{Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon),
|
{Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon),
|
||||||
lists:foreach(fun mark_channel_disconnected/1, ChanPids),
|
lists:foreach(fun mark_channel_disconnected/1, ChanPids),
|
||||||
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]),
|
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]),
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
init_load/2,
|
init_load/2,
|
||||||
init_load/3,
|
init_load/3,
|
||||||
read_override_conf/1,
|
read_override_conf/1,
|
||||||
read_override_confs/0,
|
has_deprecated_file/0,
|
||||||
delete_override_conf_files/0,
|
delete_override_conf_files/0,
|
||||||
check_config/2,
|
check_config/2,
|
||||||
fill_defaults/1,
|
fill_defaults/1,
|
||||||
|
@ -33,8 +33,10 @@
|
||||||
save_configs/5,
|
save_configs/5,
|
||||||
save_to_app_env/1,
|
save_to_app_env/1,
|
||||||
save_to_config_map/2,
|
save_to_config_map/2,
|
||||||
save_to_override_conf/2
|
save_to_override_conf/3
|
||||||
]).
|
]).
|
||||||
|
-export([raw_conf_with_default/4]).
|
||||||
|
-export([merge_envs/2]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
get_root/1,
|
get_root/1,
|
||||||
|
@ -142,7 +144,7 @@
|
||||||
-type app_envs() :: [proplists:property()].
|
-type app_envs() :: [proplists:property()].
|
||||||
|
|
||||||
%% @doc For the given path, get root value enclosed in a single-key map.
|
%% @doc For the given path, get root value enclosed in a single-key map.
|
||||||
-spec get_root(emqx_map_lib:config_key_path()) -> map().
|
-spec get_root(emqx_utils_maps:config_key_path()) -> map().
|
||||||
get_root([RootName | _]) ->
|
get_root([RootName | _]) ->
|
||||||
#{RootName => do_get(?CONF, [RootName], #{})}.
|
#{RootName => do_get(?CONF, [RootName], #{})}.
|
||||||
|
|
||||||
|
@ -153,14 +155,14 @@ get_root_raw([RootName | _]) ->
|
||||||
|
|
||||||
%% @doc Get a config value for the given path.
|
%% @doc Get a config value for the given path.
|
||||||
%% The path should at least include root config name.
|
%% The path should at least include root config name.
|
||||||
-spec get(emqx_map_lib:config_key_path()) -> term().
|
-spec get(emqx_utils_maps:config_key_path()) -> term().
|
||||||
get(KeyPath) -> do_get(?CONF, KeyPath).
|
get(KeyPath) -> do_get(?CONF, KeyPath).
|
||||||
|
|
||||||
-spec get(emqx_map_lib:config_key_path(), term()) -> term().
|
-spec get(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||||
get(KeyPath, Default) -> do_get(?CONF, KeyPath, Default).
|
get(KeyPath, Default) -> do_get(?CONF, KeyPath, Default).
|
||||||
|
|
||||||
-spec find(emqx_map_lib:config_key_path()) ->
|
-spec find(emqx_utils_maps:config_key_path()) ->
|
||||||
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
|
{ok, term()} | {not_found, emqx_utils_maps:config_key_path(), term()}.
|
||||||
find([]) ->
|
find([]) ->
|
||||||
Ref = make_ref(),
|
Ref = make_ref(),
|
||||||
case do_get(?CONF, [], Ref) of
|
case do_get(?CONF, [], Ref) of
|
||||||
|
@ -170,12 +172,12 @@ find([]) ->
|
||||||
find(KeyPath) ->
|
find(KeyPath) ->
|
||||||
atom_conf_path(
|
atom_conf_path(
|
||||||
KeyPath,
|
KeyPath,
|
||||||
fun(AtomKeyPath) -> emqx_map_lib:deep_find(AtomKeyPath, get_root(KeyPath)) end,
|
fun(AtomKeyPath) -> emqx_utils_maps:deep_find(AtomKeyPath, get_root(KeyPath)) end,
|
||||||
{return, {not_found, KeyPath}}
|
{return, {not_found, KeyPath}}
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec find_raw(emqx_map_lib:config_key_path()) ->
|
-spec find_raw(emqx_utils_maps:config_key_path()) ->
|
||||||
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
|
{ok, term()} | {not_found, emqx_utils_maps:config_key_path(), term()}.
|
||||||
find_raw([]) ->
|
find_raw([]) ->
|
||||||
Ref = make_ref(),
|
Ref = make_ref(),
|
||||||
case do_get_raw([], Ref) of
|
case do_get_raw([], Ref) of
|
||||||
|
@ -183,9 +185,9 @@ find_raw([]) ->
|
||||||
Res -> {ok, Res}
|
Res -> {ok, Res}
|
||||||
end;
|
end;
|
||||||
find_raw(KeyPath) ->
|
find_raw(KeyPath) ->
|
||||||
emqx_map_lib:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)).
|
emqx_utils_maps:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)).
|
||||||
|
|
||||||
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path()) -> term().
|
-spec get_zone_conf(atom(), emqx_utils_maps:config_key_path()) -> term().
|
||||||
get_zone_conf(Zone, KeyPath) ->
|
get_zone_conf(Zone, KeyPath) ->
|
||||||
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
|
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
|
||||||
%% not found in zones, try to find the global config
|
%% not found in zones, try to find the global config
|
||||||
|
@ -195,7 +197,7 @@ get_zone_conf(Zone, KeyPath) ->
|
||||||
Value
|
Value
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> term().
|
-spec get_zone_conf(atom(), emqx_utils_maps:config_key_path(), term()) -> term().
|
||||||
get_zone_conf(Zone, KeyPath, Default) ->
|
get_zone_conf(Zone, KeyPath, Default) ->
|
||||||
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
|
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
|
||||||
%% not found in zones, try to find the global config
|
%% not found in zones, try to find the global config
|
||||||
|
@ -205,24 +207,24 @@ get_zone_conf(Zone, KeyPath, Default) ->
|
||||||
Value
|
Value
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec put_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> ok.
|
-spec put_zone_conf(atom(), emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||||
put_zone_conf(Zone, KeyPath, Conf) ->
|
put_zone_conf(Zone, KeyPath, Conf) ->
|
||||||
?MODULE:put(?ZONE_CONF_PATH(Zone, KeyPath), Conf).
|
?MODULE:put(?ZONE_CONF_PATH(Zone, KeyPath), Conf).
|
||||||
|
|
||||||
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) -> term().
|
-spec get_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path()) -> term().
|
||||||
get_listener_conf(Type, Listener, KeyPath) ->
|
get_listener_conf(Type, Listener, KeyPath) ->
|
||||||
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
|
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
|
||||||
|
|
||||||
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> term().
|
-spec get_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path(), term()) -> term().
|
||||||
get_listener_conf(Type, Listener, KeyPath, Default) ->
|
get_listener_conf(Type, Listener, KeyPath, Default) ->
|
||||||
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Default).
|
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Default).
|
||||||
|
|
||||||
-spec put_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> ok.
|
-spec put_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||||
put_listener_conf(Type, Listener, KeyPath, Conf) ->
|
put_listener_conf(Type, Listener, KeyPath, Conf) ->
|
||||||
?MODULE:put(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Conf).
|
?MODULE:put(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Conf).
|
||||||
|
|
||||||
-spec find_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) ->
|
-spec find_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path()) ->
|
||||||
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
|
{ok, term()} | {not_found, emqx_utils_maps:config_key_path(), term()}.
|
||||||
find_listener_conf(Type, Listener, KeyPath) ->
|
find_listener_conf(Type, Listener, KeyPath) ->
|
||||||
find(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
|
find(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
|
||||||
|
|
||||||
|
@ -241,20 +243,20 @@ erase(RootName) ->
|
||||||
persistent_term:erase(?PERSIS_KEY(?RAW_CONF, bin(RootName))),
|
persistent_term:erase(?PERSIS_KEY(?RAW_CONF, bin(RootName))),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
-spec put(emqx_map_lib:config_key_path(), term()) -> ok.
|
-spec put(emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||||
put(KeyPath, Config) ->
|
put(KeyPath, Config) ->
|
||||||
Putter = fun(Path, Map, Value) ->
|
Putter = fun(Path, Map, Value) ->
|
||||||
emqx_map_lib:deep_put(Path, Map, Value)
|
emqx_utils_maps:deep_put(Path, Map, Value)
|
||||||
end,
|
end,
|
||||||
do_put(?CONF, Putter, KeyPath, Config).
|
do_put(?CONF, Putter, KeyPath, Config).
|
||||||
|
|
||||||
%% Puts value into configuration even if path doesn't exist
|
%% Puts value into configuration even if path doesn't exist
|
||||||
%% For paths of non-existing atoms use force_put(KeyPath, Config, unsafe)
|
%% For paths of non-existing atoms use force_put(KeyPath, Config, unsafe)
|
||||||
-spec force_put(emqx_map_lib:config_key_path(), term()) -> ok.
|
-spec force_put(emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||||
force_put(KeyPath, Config) ->
|
force_put(KeyPath, Config) ->
|
||||||
force_put(KeyPath, Config, safe).
|
force_put(KeyPath, Config, safe).
|
||||||
|
|
||||||
-spec force_put(emqx_map_lib:config_key_path(), term(), safe | unsafe) -> ok.
|
-spec force_put(emqx_utils_maps:config_key_path(), term(), safe | unsafe) -> ok.
|
||||||
force_put(KeyPath0, Config, Safety) ->
|
force_put(KeyPath0, Config, Safety) ->
|
||||||
KeyPath =
|
KeyPath =
|
||||||
case Safety of
|
case Safety of
|
||||||
|
@ -262,19 +264,19 @@ force_put(KeyPath0, Config, Safety) ->
|
||||||
unsafe -> [unsafe_atom(Key) || Key <- KeyPath0]
|
unsafe -> [unsafe_atom(Key) || Key <- KeyPath0]
|
||||||
end,
|
end,
|
||||||
Putter = fun(Path, Map, Value) ->
|
Putter = fun(Path, Map, Value) ->
|
||||||
emqx_map_lib:deep_force_put(Path, Map, Value)
|
emqx_utils_maps:deep_force_put(Path, Map, Value)
|
||||||
end,
|
end,
|
||||||
do_put(?CONF, Putter, KeyPath, Config).
|
do_put(?CONF, Putter, KeyPath, Config).
|
||||||
|
|
||||||
-spec get_default_value(emqx_map_lib:config_key_path()) -> {ok, term()} | {error, term()}.
|
-spec get_default_value(emqx_utils_maps:config_key_path()) -> {ok, term()} | {error, term()}.
|
||||||
get_default_value([RootName | _] = KeyPath) ->
|
get_default_value([RootName | _] = KeyPath) ->
|
||||||
BinKeyPath = [bin(Key) || Key <- KeyPath],
|
BinKeyPath = [bin(Key) || Key <- KeyPath],
|
||||||
case find_raw([RootName]) of
|
case find_raw([RootName]) of
|
||||||
{ok, RawConf} ->
|
{ok, RawConf} ->
|
||||||
RawConf1 = emqx_map_lib:deep_remove(BinKeyPath, #{bin(RootName) => RawConf}),
|
RawConf1 = emqx_utils_maps:deep_remove(BinKeyPath, #{bin(RootName) => RawConf}),
|
||||||
try fill_defaults(get_schema_mod(RootName), RawConf1, #{}) of
|
try fill_defaults(get_schema_mod(RootName), RawConf1, #{}) of
|
||||||
FullConf ->
|
FullConf ->
|
||||||
case emqx_map_lib:deep_find(BinKeyPath, FullConf) of
|
case emqx_utils_maps:deep_find(BinKeyPath, FullConf) of
|
||||||
{not_found, _, _} -> {error, no_default_value};
|
{not_found, _, _} -> {error, no_default_value};
|
||||||
{ok, Val} -> {ok, Val}
|
{ok, Val} -> {ok, Val}
|
||||||
end
|
end
|
||||||
|
@ -285,10 +287,10 @@ get_default_value([RootName | _] = KeyPath) ->
|
||||||
{error, {rootname_not_found, RootName}}
|
{error, {rootname_not_found, RootName}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec get_raw(emqx_map_lib:config_key_path()) -> term().
|
-spec get_raw(emqx_utils_maps:config_key_path()) -> term().
|
||||||
get_raw(KeyPath) -> do_get_raw(KeyPath).
|
get_raw(KeyPath) -> do_get_raw(KeyPath).
|
||||||
|
|
||||||
-spec get_raw(emqx_map_lib:config_key_path(), term()) -> term().
|
-spec get_raw(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||||
get_raw(KeyPath, Default) -> do_get_raw(KeyPath, Default).
|
get_raw(KeyPath, Default) -> do_get_raw(KeyPath, Default).
|
||||||
|
|
||||||
-spec put_raw(map()) -> ok.
|
-spec put_raw(map()) -> ok.
|
||||||
|
@ -301,10 +303,10 @@ put_raw(Config) ->
|
||||||
hocon_maps:ensure_plain(Config)
|
hocon_maps:ensure_plain(Config)
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec put_raw(emqx_map_lib:config_key_path(), term()) -> ok.
|
-spec put_raw(emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||||
put_raw(KeyPath, Config) ->
|
put_raw(KeyPath, Config) ->
|
||||||
Putter = fun(Path, Map, Value) ->
|
Putter = fun(Path, Map, Value) ->
|
||||||
emqx_map_lib:deep_force_put(Path, Map, Value)
|
emqx_utils_maps:deep_force_put(Path, Map, Value)
|
||||||
end,
|
end,
|
||||||
do_put(?RAW_CONF, Putter, KeyPath, Config).
|
do_put(?RAW_CONF, Putter, KeyPath, Config).
|
||||||
|
|
||||||
|
@ -326,9 +328,12 @@ init_load(SchemaMod, ConfFiles) ->
|
||||||
%% in the rear of the list overrides prior values.
|
%% in the rear of the list overrides prior values.
|
||||||
-spec init_load(module(), [string()] | binary() | hocon:config()) -> ok.
|
-spec init_load(module(), [string()] | binary() | hocon:config()) -> ok.
|
||||||
init_load(SchemaMod, Conf, Opts) when is_list(Conf) orelse is_binary(Conf) ->
|
init_load(SchemaMod, Conf, Opts) when is_list(Conf) orelse is_binary(Conf) ->
|
||||||
init_load(SchemaMod, parse_hocon(Conf), Opts);
|
HasDeprecatedFile = has_deprecated_file(),
|
||||||
init_load(SchemaMod, RawConf, Opts) when is_map(RawConf) ->
|
RawConf = parse_hocon(HasDeprecatedFile, Conf),
|
||||||
ok = save_schema_mod_and_names(SchemaMod),
|
init_load(HasDeprecatedFile, SchemaMod, RawConf, Opts).
|
||||||
|
|
||||||
|
init_load(true, SchemaMod, RawConf, Opts) when is_map(RawConf) ->
|
||||||
|
%% deprecated conf will be removed in 5.1
|
||||||
%% Merge environment variable overrides on top
|
%% Merge environment variable overrides on top
|
||||||
RawConfWithEnvs = merge_envs(SchemaMod, RawConf),
|
RawConfWithEnvs = merge_envs(SchemaMod, RawConf),
|
||||||
Overrides = read_override_confs(),
|
Overrides = read_override_confs(),
|
||||||
|
@ -338,6 +343,16 @@ init_load(SchemaMod, RawConf, Opts) when is_map(RawConf) ->
|
||||||
%% check configs against the schema
|
%% check configs against the schema
|
||||||
{AppEnvs, CheckedConf} = check_config(SchemaMod, RawConfAll, #{}),
|
{AppEnvs, CheckedConf} = check_config(SchemaMod, RawConfAll, #{}),
|
||||||
save_to_app_env(AppEnvs),
|
save_to_app_env(AppEnvs),
|
||||||
|
ok = save_to_config_map(CheckedConf, RawConfAll);
|
||||||
|
init_load(false, SchemaMod, RawConf, Opts) when is_map(RawConf) ->
|
||||||
|
ok = save_schema_mod_and_names(SchemaMod),
|
||||||
|
RootNames = get_root_names(),
|
||||||
|
%% Merge environment variable overrides on top
|
||||||
|
RawConfWithEnvs = merge_envs(SchemaMod, RawConf),
|
||||||
|
RawConfAll = raw_conf_with_default(SchemaMod, RootNames, RawConfWithEnvs, Opts),
|
||||||
|
%% check configs against the schema
|
||||||
|
{AppEnvs, CheckedConf} = check_config(SchemaMod, RawConfAll, #{}),
|
||||||
|
save_to_app_env(AppEnvs),
|
||||||
ok = save_to_config_map(CheckedConf, RawConfAll).
|
ok = save_to_config_map(CheckedConf, RawConfAll).
|
||||||
|
|
||||||
%% @doc Read merged cluster + local overrides.
|
%% @doc Read merged cluster + local overrides.
|
||||||
|
@ -374,27 +389,37 @@ schema_default(Schema) ->
|
||||||
#{}
|
#{}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
parse_hocon(Conf) ->
|
parse_hocon(HasDeprecatedFile, Conf) ->
|
||||||
IncDirs = include_dirs(),
|
IncDirs = include_dirs(),
|
||||||
case do_parse_hocon(Conf, IncDirs) of
|
case do_parse_hocon(HasDeprecatedFile, Conf, IncDirs) of
|
||||||
{ok, HoconMap} ->
|
{ok, HoconMap} ->
|
||||||
HoconMap;
|
HoconMap;
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
?SLOG(error, #{
|
?SLOG(error, #{
|
||||||
msg => "failed_to_load_hocon_conf",
|
msg => "failed_to_load_hocon_file",
|
||||||
reason => Reason,
|
reason => Reason,
|
||||||
pwd => file:get_cwd(),
|
pwd => file:get_cwd(),
|
||||||
include_dirs => IncDirs,
|
include_dirs => IncDirs,
|
||||||
config_file => Conf
|
config_file => Conf
|
||||||
}),
|
}),
|
||||||
error(failed_to_load_hocon_conf)
|
error(failed_to_load_hocon_file)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
do_parse_hocon(Conf, IncDirs) ->
|
do_parse_hocon(true, Conf, IncDirs) ->
|
||||||
Opts = #{format => map, include_dirs => IncDirs},
|
Opts = #{format => map, include_dirs => IncDirs},
|
||||||
case is_binary(Conf) of
|
case is_binary(Conf) of
|
||||||
true -> hocon:binary(Conf, Opts);
|
true -> hocon:binary(Conf, Opts);
|
||||||
false -> hocon:files(Conf, Opts)
|
false -> hocon:files(Conf, Opts)
|
||||||
|
end;
|
||||||
|
do_parse_hocon(false, Conf, IncDirs) ->
|
||||||
|
Opts = #{format => map, include_dirs => IncDirs},
|
||||||
|
case is_binary(Conf) of
|
||||||
|
%% only use in test
|
||||||
|
true ->
|
||||||
|
hocon:binary(Conf, Opts);
|
||||||
|
false ->
|
||||||
|
ClusterFile = cluster_hocon_file(),
|
||||||
|
hocon:files([ClusterFile | Conf], Opts)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
include_dirs() ->
|
include_dirs() ->
|
||||||
|
@ -430,7 +455,7 @@ do_check_config(SchemaMod, RawConf, Opts0) ->
|
||||||
Opts = maps:merge(Opts0, Opts1),
|
Opts = maps:merge(Opts0, Opts1),
|
||||||
{AppEnvs, CheckedConf} =
|
{AppEnvs, CheckedConf} =
|
||||||
hocon_tconf:map_translate(SchemaMod, RawConf, Opts),
|
hocon_tconf:map_translate(SchemaMod, RawConf, Opts),
|
||||||
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(CheckedConf)}.
|
{AppEnvs, emqx_utils_maps:unsafe_atom_key_map(CheckedConf)}.
|
||||||
|
|
||||||
fill_defaults(RawConf) ->
|
fill_defaults(RawConf) ->
|
||||||
fill_defaults(RawConf, #{}).
|
fill_defaults(RawConf, #{}).
|
||||||
|
@ -466,10 +491,12 @@ fill_defaults(SchemaMod, RawConf, Opts0) ->
|
||||||
%% Delete override config files.
|
%% Delete override config files.
|
||||||
-spec delete_override_conf_files() -> ok.
|
-spec delete_override_conf_files() -> ok.
|
||||||
delete_override_conf_files() ->
|
delete_override_conf_files() ->
|
||||||
F1 = override_conf_file(#{override_to => local}),
|
F1 = deprecated_conf_file(#{override_to => local}),
|
||||||
F2 = override_conf_file(#{override_to => cluster}),
|
F2 = deprecated_conf_file(#{override_to => cluster}),
|
||||||
|
F3 = cluster_hocon_file(),
|
||||||
ok = ensure_file_deleted(F1),
|
ok = ensure_file_deleted(F1),
|
||||||
ok = ensure_file_deleted(F2).
|
ok = ensure_file_deleted(F2),
|
||||||
|
ok = ensure_file_deleted(F3).
|
||||||
|
|
||||||
ensure_file_deleted(F) ->
|
ensure_file_deleted(F) ->
|
||||||
case file:delete(F) of
|
case file:delete(F) of
|
||||||
|
@ -480,19 +507,33 @@ ensure_file_deleted(F) ->
|
||||||
|
|
||||||
-spec read_override_conf(map()) -> raw_config().
|
-spec read_override_conf(map()) -> raw_config().
|
||||||
read_override_conf(#{} = Opts) ->
|
read_override_conf(#{} = Opts) ->
|
||||||
File = override_conf_file(Opts),
|
File =
|
||||||
|
case has_deprecated_file() of
|
||||||
|
true -> deprecated_conf_file(Opts);
|
||||||
|
false -> cluster_hocon_file()
|
||||||
|
end,
|
||||||
load_hocon_file(File, map).
|
load_hocon_file(File, map).
|
||||||
|
|
||||||
override_conf_file(Opts) when is_map(Opts) ->
|
%% @doc Return `true' if this node is upgraded from older version which used cluster-override.conf for
|
||||||
|
%% cluster-wide config persistence.
|
||||||
|
has_deprecated_file() ->
|
||||||
|
DeprecatedFile = deprecated_conf_file(#{override_to => cluster}),
|
||||||
|
filelib:is_regular(DeprecatedFile).
|
||||||
|
|
||||||
|
deprecated_conf_file(Opts) when is_map(Opts) ->
|
||||||
Key =
|
Key =
|
||||||
case maps:get(override_to, Opts, cluster) of
|
case maps:get(override_to, Opts, cluster) of
|
||||||
local -> local_override_conf_file;
|
local -> local_override_conf_file;
|
||||||
cluster -> cluster_override_conf_file
|
cluster -> cluster_override_conf_file
|
||||||
end,
|
end,
|
||||||
application:get_env(emqx, Key, undefined);
|
application:get_env(emqx, Key, undefined);
|
||||||
override_conf_file(Which) when is_atom(Which) ->
|
deprecated_conf_file(Which) when is_atom(Which) ->
|
||||||
application:get_env(emqx, Which, undefined).
|
application:get_env(emqx, Which, undefined).
|
||||||
|
|
||||||
|
%% The newer version cluster-wide config persistence file.
|
||||||
|
cluster_hocon_file() ->
|
||||||
|
application:get_env(emqx, cluster_hocon_file, undefined).
|
||||||
|
|
||||||
-spec save_schema_mod_and_names(module()) -> ok.
|
-spec save_schema_mod_and_names(module()) -> ok.
|
||||||
save_schema_mod_and_names(SchemaMod) ->
|
save_schema_mod_and_names(SchemaMod) ->
|
||||||
RootNames = hocon_schema:root_names(SchemaMod),
|
RootNames = hocon_schema:root_names(SchemaMod),
|
||||||
|
@ -522,11 +563,15 @@ get_schema_mod(RootName) ->
|
||||||
get_root_names() ->
|
get_root_names() ->
|
||||||
maps:get(names, persistent_term:get(?PERSIS_SCHEMA_MODS, #{names => []})).
|
maps:get(names, persistent_term:get(?PERSIS_SCHEMA_MODS, #{names => []})).
|
||||||
|
|
||||||
-spec save_configs(app_envs(), config(), raw_config(), raw_config(), update_opts()) -> ok.
|
-spec save_configs(
|
||||||
|
app_envs(), config(), raw_config(), raw_config(), update_opts()
|
||||||
|
) -> ok.
|
||||||
|
|
||||||
save_configs(AppEnvs, Conf, RawConf, OverrideConf, Opts) ->
|
save_configs(AppEnvs, Conf, RawConf, OverrideConf, Opts) ->
|
||||||
%% We first try to save to override.conf, because saving to files is more error prone
|
%% We first try to save to files, because saving to files is more error prone
|
||||||
%% than saving into memory.
|
%% than saving into memory.
|
||||||
ok = save_to_override_conf(OverrideConf, Opts),
|
HasDeprecatedFile = has_deprecated_file(),
|
||||||
|
ok = save_to_override_conf(HasDeprecatedFile, OverrideConf, Opts),
|
||||||
save_to_app_env(AppEnvs),
|
save_to_app_env(AppEnvs),
|
||||||
save_to_config_map(Conf, RawConf).
|
save_to_config_map(Conf, RawConf).
|
||||||
|
|
||||||
|
@ -544,11 +589,12 @@ save_to_config_map(Conf, RawConf) ->
|
||||||
?MODULE:put(Conf),
|
?MODULE:put(Conf),
|
||||||
?MODULE:put_raw(RawConf).
|
?MODULE:put_raw(RawConf).
|
||||||
|
|
||||||
-spec save_to_override_conf(raw_config(), update_opts()) -> ok | {error, term()}.
|
-spec save_to_override_conf(boolean(), raw_config(), update_opts()) -> ok | {error, term()}.
|
||||||
save_to_override_conf(undefined, _) ->
|
save_to_override_conf(_, undefined, _) ->
|
||||||
ok;
|
ok;
|
||||||
save_to_override_conf(RawConf, Opts) ->
|
%% TODO: Remove deprecated override conf file when 5.1
|
||||||
case override_conf_file(Opts) of
|
save_to_override_conf(true, RawConf, Opts) ->
|
||||||
|
case deprecated_conf_file(Opts) of
|
||||||
undefined ->
|
undefined ->
|
||||||
ok;
|
ok;
|
||||||
FileName ->
|
FileName ->
|
||||||
|
@ -564,6 +610,24 @@ save_to_override_conf(RawConf, Opts) ->
|
||||||
}),
|
}),
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end
|
end
|
||||||
|
end;
|
||||||
|
save_to_override_conf(false, RawConf, _Opts) ->
|
||||||
|
case cluster_hocon_file() of
|
||||||
|
undefined ->
|
||||||
|
ok;
|
||||||
|
FileName ->
|
||||||
|
ok = filelib:ensure_dir(FileName),
|
||||||
|
case file:write_file(FileName, hocon_pp:do(RawConf, #{})) of
|
||||||
|
ok ->
|
||||||
|
ok;
|
||||||
|
{error, Reason} ->
|
||||||
|
?SLOG(error, #{
|
||||||
|
msg => "failed_to_save_conf_file",
|
||||||
|
filename => FileName,
|
||||||
|
reason => Reason
|
||||||
|
}),
|
||||||
|
{error, Reason}
|
||||||
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
add_handlers() ->
|
add_handlers() ->
|
||||||
|
@ -645,11 +709,11 @@ do_put(Type, Putter, [RootName | KeyPath], DeepValue) ->
|
||||||
do_deep_get(?CONF, KeyPath, Map, Default) ->
|
do_deep_get(?CONF, KeyPath, Map, Default) ->
|
||||||
atom_conf_path(
|
atom_conf_path(
|
||||||
KeyPath,
|
KeyPath,
|
||||||
fun(AtomKeyPath) -> emqx_map_lib:deep_get(AtomKeyPath, Map, Default) end,
|
fun(AtomKeyPath) -> emqx_utils_maps:deep_get(AtomKeyPath, Map, Default) end,
|
||||||
{return, Default}
|
{return, Default}
|
||||||
);
|
);
|
||||||
do_deep_get(?RAW_CONF, KeyPath, Map, Default) ->
|
do_deep_get(?RAW_CONF, KeyPath, Map, Default) ->
|
||||||
emqx_map_lib:deep_get([bin(Key) || Key <- KeyPath], Map, Default).
|
emqx_utils_maps:deep_get([bin(Key) || Key <- KeyPath], Map, Default).
|
||||||
|
|
||||||
do_deep_put(?CONF, Putter, KeyPath, Map, Value) ->
|
do_deep_put(?CONF, Putter, KeyPath, Map, Value) ->
|
||||||
atom_conf_path(
|
atom_conf_path(
|
||||||
|
|
|
@ -43,7 +43,6 @@
|
||||||
terminate/2,
|
terminate/2,
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
-export([is_mutable/3]).
|
|
||||||
|
|
||||||
-define(MOD, {mod}).
|
-define(MOD, {mod}).
|
||||||
-define(WKEY, '?').
|
-define(WKEY, '?').
|
||||||
|
@ -230,26 +229,15 @@ process_update_request([_], _Handlers, {remove, _Opts}) ->
|
||||||
process_update_request(ConfKeyPath, _Handlers, {remove, Opts}) ->
|
process_update_request(ConfKeyPath, _Handlers, {remove, Opts}) ->
|
||||||
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
||||||
BinKeyPath = bin_path(ConfKeyPath),
|
BinKeyPath = bin_path(ConfKeyPath),
|
||||||
case check_permissions(remove, BinKeyPath, OldRawConf, Opts) of
|
NewRawConf = emqx_utils_maps:deep_remove(BinKeyPath, OldRawConf),
|
||||||
allow ->
|
OverrideConf = remove_from_override_config(BinKeyPath, Opts),
|
||||||
NewRawConf = emqx_map_lib:deep_remove(BinKeyPath, OldRawConf),
|
{ok, NewRawConf, OverrideConf, Opts};
|
||||||
OverrideConf = remove_from_override_config(BinKeyPath, Opts),
|
|
||||||
{ok, NewRawConf, OverrideConf, Opts};
|
|
||||||
{deny, Reason} ->
|
|
||||||
{error, {permission_denied, Reason}}
|
|
||||||
end;
|
|
||||||
process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
|
process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
|
||||||
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
||||||
case do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) of
|
case do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) of
|
||||||
{ok, NewRawConf} ->
|
{ok, NewRawConf} ->
|
||||||
BinKeyPath = bin_path(ConfKeyPath),
|
OverrideConf = merge_to_override_config(NewRawConf, Opts),
|
||||||
case check_permissions(update, BinKeyPath, NewRawConf, Opts) of
|
{ok, NewRawConf, OverrideConf, Opts};
|
||||||
allow ->
|
|
||||||
OverrideConf = merge_to_override_config(NewRawConf, Opts),
|
|
||||||
{ok, NewRawConf, OverrideConf, Opts};
|
|
||||||
{deny, Reason} ->
|
|
||||||
{error, {permission_denied, Reason}}
|
|
||||||
end;
|
|
||||||
Error ->
|
Error ->
|
||||||
Error
|
Error
|
||||||
end.
|
end.
|
||||||
|
@ -271,8 +259,10 @@ do_update_config(
|
||||||
SubOldRawConf = get_sub_config(ConfKeyBin, OldRawConf),
|
SubOldRawConf = get_sub_config(ConfKeyBin, OldRawConf),
|
||||||
SubHandlers = get_sub_handlers(ConfKey, Handlers),
|
SubHandlers = get_sub_handlers(ConfKey, Handlers),
|
||||||
case do_update_config(SubConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq, ConfKeyPath) of
|
case do_update_config(SubConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq, ConfKeyPath) of
|
||||||
{ok, NewUpdateReq} -> merge_to_old_config(#{ConfKeyBin => NewUpdateReq}, OldRawConf);
|
{ok, NewUpdateReq} ->
|
||||||
Error -> Error
|
merge_to_old_config(#{ConfKeyBin => NewUpdateReq}, OldRawConf);
|
||||||
|
Error ->
|
||||||
|
Error
|
||||||
end.
|
end.
|
||||||
|
|
||||||
check_and_save_configs(
|
check_and_save_configs(
|
||||||
|
@ -445,7 +435,7 @@ remove_from_override_config(_BinKeyPath, #{persistent := false}) ->
|
||||||
undefined;
|
undefined;
|
||||||
remove_from_override_config(BinKeyPath, Opts) ->
|
remove_from_override_config(BinKeyPath, Opts) ->
|
||||||
OldConf = emqx_config:read_override_conf(Opts),
|
OldConf = emqx_config:read_override_conf(Opts),
|
||||||
emqx_map_lib:deep_remove(BinKeyPath, OldConf).
|
emqx_utils_maps:deep_remove(BinKeyPath, OldConf).
|
||||||
|
|
||||||
%% apply new config on top of override config
|
%% apply new config on top of override config
|
||||||
merge_to_override_config(_RawConf, #{persistent := false}) ->
|
merge_to_override_config(_RawConf, #{persistent := false}) ->
|
||||||
|
@ -467,7 +457,7 @@ return_change_result(_ConfKeyPath, {remove, _Opts}) ->
|
||||||
|
|
||||||
return_rawconf(ConfKeyPath, #{rawconf_with_defaults := true}) ->
|
return_rawconf(ConfKeyPath, #{rawconf_with_defaults := true}) ->
|
||||||
FullRawConf = emqx_config:fill_defaults(emqx_config:get_raw([])),
|
FullRawConf = emqx_config:fill_defaults(emqx_config:get_raw([])),
|
||||||
emqx_map_lib:deep_get(bin_path(ConfKeyPath), FullRawConf);
|
emqx_utils_maps:deep_get(bin_path(ConfKeyPath), FullRawConf);
|
||||||
return_rawconf(ConfKeyPath, _) ->
|
return_rawconf(ConfKeyPath, _) ->
|
||||||
emqx_config:get_raw(ConfKeyPath).
|
emqx_config:get_raw(ConfKeyPath).
|
||||||
|
|
||||||
|
@ -485,16 +475,16 @@ atom(Atom) when is_atom(Atom) ->
|
||||||
|
|
||||||
-dialyzer({nowarn_function, do_remove_handler/2}).
|
-dialyzer({nowarn_function, do_remove_handler/2}).
|
||||||
do_remove_handler(ConfKeyPath, Handlers) ->
|
do_remove_handler(ConfKeyPath, Handlers) ->
|
||||||
NewHandlers = emqx_map_lib:deep_remove(ConfKeyPath ++ [?MOD], Handlers),
|
NewHandlers = emqx_utils_maps:deep_remove(ConfKeyPath ++ [?MOD], Handlers),
|
||||||
remove_empty_leaf(ConfKeyPath, NewHandlers).
|
remove_empty_leaf(ConfKeyPath, NewHandlers).
|
||||||
|
|
||||||
remove_empty_leaf([], Handlers) ->
|
remove_empty_leaf([], Handlers) ->
|
||||||
Handlers;
|
Handlers;
|
||||||
remove_empty_leaf(KeyPath, Handlers) ->
|
remove_empty_leaf(KeyPath, Handlers) ->
|
||||||
case emqx_map_lib:deep_find(KeyPath, Handlers) =:= {ok, #{}} of
|
case emqx_utils_maps:deep_find(KeyPath, Handlers) =:= {ok, #{}} of
|
||||||
%% empty leaf
|
%% empty leaf
|
||||||
true ->
|
true ->
|
||||||
Handlers1 = emqx_map_lib:deep_remove(KeyPath, Handlers),
|
Handlers1 = emqx_utils_maps:deep_remove(KeyPath, Handlers),
|
||||||
SubKeyPath = lists:sublist(KeyPath, length(KeyPath) - 1),
|
SubKeyPath = lists:sublist(KeyPath, length(KeyPath) - 1),
|
||||||
remove_empty_leaf(SubKeyPath, Handlers1);
|
remove_empty_leaf(SubKeyPath, Handlers1);
|
||||||
false ->
|
false ->
|
||||||
|
@ -511,7 +501,7 @@ assert_callback_function(Mod) ->
|
||||||
end,
|
end,
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
-spec schema(module(), emqx_map_lib:config_key_path()) -> hocon_schema:schema().
|
-spec schema(module(), emqx_utils_maps:config_key_path()) -> hocon_schema:schema().
|
||||||
schema(SchemaModule, [RootKey | _]) ->
|
schema(SchemaModule, [RootKey | _]) ->
|
||||||
Roots = hocon_schema:roots(SchemaModule),
|
Roots = hocon_schema:roots(SchemaModule),
|
||||||
{Field, Translations} =
|
{Field, Translations} =
|
||||||
|
@ -546,98 +536,3 @@ load_prev_handlers() ->
|
||||||
|
|
||||||
save_handlers(Handlers) ->
|
save_handlers(Handlers) ->
|
||||||
application:set_env(emqx, ?MODULE, Handlers).
|
application:set_env(emqx, ?MODULE, Handlers).
|
||||||
|
|
||||||
check_permissions(_Action, _ConfKeyPath, _NewRawConf, #{override_to := local}) ->
|
|
||||||
allow;
|
|
||||||
check_permissions(Action, ConfKeyPath, NewRawConf, _Opts) ->
|
|
||||||
case emqx_map_lib:deep_find(ConfKeyPath, NewRawConf) of
|
|
||||||
{ok, NewRaw} ->
|
|
||||||
LocalOverride = emqx_config:read_override_conf(#{override_to => local}),
|
|
||||||
case emqx_map_lib:deep_find(ConfKeyPath, LocalOverride) of
|
|
||||||
{ok, LocalRaw} ->
|
|
||||||
case is_mutable(Action, NewRaw, LocalRaw) of
|
|
||||||
ok ->
|
|
||||||
allow;
|
|
||||||
{error, Error} ->
|
|
||||||
?SLOG(error, #{
|
|
||||||
msg => "prevent_remove_local_override_conf",
|
|
||||||
config_key_path => ConfKeyPath,
|
|
||||||
error => Error
|
|
||||||
}),
|
|
||||||
{deny, "Disable changed from local-override.conf"}
|
|
||||||
end;
|
|
||||||
{not_found, _, _} ->
|
|
||||||
allow
|
|
||||||
end;
|
|
||||||
{not_found, _, _} ->
|
|
||||||
allow
|
|
||||||
end.
|
|
||||||
|
|
||||||
is_mutable(Action, NewRaw, LocalRaw) ->
|
|
||||||
try
|
|
||||||
KeyPath = [],
|
|
||||||
is_mutable(KeyPath, Action, NewRaw, LocalRaw)
|
|
||||||
catch
|
|
||||||
throw:Error -> Error
|
|
||||||
end.
|
|
||||||
|
|
||||||
-define(REMOVE_FAILED, "remove_failed").
|
|
||||||
-define(UPDATE_FAILED, "update_failed").
|
|
||||||
|
|
||||||
is_mutable(KeyPath, Action, New = #{}, Local = #{}) ->
|
|
||||||
maps:foreach(
|
|
||||||
fun(Key, SubLocal) ->
|
|
||||||
case maps:find(Key, New) of
|
|
||||||
error -> ok;
|
|
||||||
{ok, SubNew} -> is_mutable(KeyPath ++ [Key], Action, SubNew, SubLocal)
|
|
||||||
end
|
|
||||||
end,
|
|
||||||
Local
|
|
||||||
);
|
|
||||||
is_mutable(KeyPath, remove, Update, Origin) ->
|
|
||||||
throw({error, {?REMOVE_FAILED, KeyPath, Update, Origin}});
|
|
||||||
is_mutable(_KeyPath, update, Val, Val) ->
|
|
||||||
ok;
|
|
||||||
is_mutable(KeyPath, update, Update, Origin) ->
|
|
||||||
throw({error, {?UPDATE_FAILED, KeyPath, Update, Origin}}).
|
|
||||||
|
|
||||||
-ifdef(TEST).
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
|
||||||
|
|
||||||
is_mutable_update_test() ->
|
|
||||||
Action = update,
|
|
||||||
?assertEqual(ok, is_mutable(Action, #{}, #{})),
|
|
||||||
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => #{}}}}, #{a => #{b => #{c => #{}}}})),
|
|
||||||
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c => 1}}})),
|
|
||||||
?assertEqual(
|
|
||||||
{error, {?UPDATE_FAILED, [a, b, c], 1, 2}},
|
|
||||||
is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c => 2}}})
|
|
||||||
),
|
|
||||||
?assertEqual(
|
|
||||||
{error, {?UPDATE_FAILED, [a, b, d], 2, 3}},
|
|
||||||
is_mutable(Action, #{a => #{b => #{c => 1, d => 2}}}, #{a => #{b => #{c => 1, d => 3}}})
|
|
||||||
),
|
|
||||||
ok.
|
|
||||||
|
|
||||||
is_mutable_remove_test() ->
|
|
||||||
Action = remove,
|
|
||||||
?assertEqual(ok, is_mutable(Action, #{}, #{})),
|
|
||||||
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => #{}}}}, #{a1 => #{b => #{c => #{}}}})),
|
|
||||||
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b1 => #{c => 1}}})),
|
|
||||||
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c1 => 1}}})),
|
|
||||||
|
|
||||||
?assertEqual(
|
|
||||||
{error, {?REMOVE_FAILED, [a, b, c], 1, 1}},
|
|
||||||
is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c => 1}}})
|
|
||||||
),
|
|
||||||
?assertEqual(
|
|
||||||
{error, {?REMOVE_FAILED, [a, b, c], 1, 2}},
|
|
||||||
is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c => 2}}})
|
|
||||||
),
|
|
||||||
?assertEqual(
|
|
||||||
{error, {?REMOVE_FAILED, [a, b, c], 1, 1}},
|
|
||||||
is_mutable(Action, #{a => #{b => #{c => 1, d => 2}}}, #{a => #{b => #{c => 1, d => 3}}})
|
|
||||||
),
|
|
||||||
ok.
|
|
||||||
|
|
||||||
-endif.
|
|
||||||
|
|
|
@ -77,7 +77,7 @@
|
||||||
-export([set_field/3]).
|
-export([set_field/3]).
|
||||||
|
|
||||||
-import(
|
-import(
|
||||||
emqx_misc,
|
emqx_utils,
|
||||||
[start_timer/2]
|
[start_timer/2]
|
||||||
).
|
).
|
||||||
|
|
||||||
|
@ -260,7 +260,7 @@ stats(#state{
|
||||||
{error, _} -> []
|
{error, _} -> []
|
||||||
end,
|
end,
|
||||||
ChanStats = emqx_channel:stats(Channel),
|
ChanStats = emqx_channel:stats(Channel),
|
||||||
ProcStats = emqx_misc:proc_stats(),
|
ProcStats = emqx_utils:proc_stats(),
|
||||||
lists:append([SockStats, ChanStats, ProcStats]).
|
lists:append([SockStats, ChanStats, ProcStats]).
|
||||||
|
|
||||||
%% @doc Set TCP keepalive socket options to override system defaults.
|
%% @doc Set TCP keepalive socket options to override system defaults.
|
||||||
|
@ -392,7 +392,7 @@ run_loop(
|
||||||
emqx_channel:info(zone, Channel),
|
emqx_channel:info(zone, Channel),
|
||||||
[force_shutdown]
|
[force_shutdown]
|
||||||
),
|
),
|
||||||
emqx_misc:tune_heap_size(ShutdownPolicy),
|
emqx_utils:tune_heap_size(ShutdownPolicy),
|
||||||
case activate_socket(State) of
|
case activate_socket(State) of
|
||||||
{ok, NState} ->
|
{ok, NState} ->
|
||||||
hibernate(Parent, NState);
|
hibernate(Parent, NState);
|
||||||
|
@ -472,7 +472,7 @@ ensure_stats_timer(_Timeout, State) ->
|
||||||
-compile({inline, [cancel_stats_timer/1]}).
|
-compile({inline, [cancel_stats_timer/1]}).
|
||||||
cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) ->
|
cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) ->
|
||||||
?tp(debug, cancel_stats_timer, #{}),
|
?tp(debug, cancel_stats_timer, #{}),
|
||||||
ok = emqx_misc:cancel_timer(TRef),
|
ok = emqx_utils:cancel_timer(TRef),
|
||||||
State#state{stats_timer = undefined};
|
State#state{stats_timer = undefined};
|
||||||
cancel_stats_timer(State) ->
|
cancel_stats_timer(State) ->
|
||||||
State.
|
State.
|
||||||
|
@ -558,7 +558,7 @@ handle_msg(
|
||||||
{incoming, Packet = ?CONNECT_PACKET(ConnPkt)},
|
{incoming, Packet = ?CONNECT_PACKET(ConnPkt)},
|
||||||
State = #state{idle_timer = IdleTimer}
|
State = #state{idle_timer = IdleTimer}
|
||||||
) ->
|
) ->
|
||||||
ok = emqx_misc:cancel_timer(IdleTimer),
|
ok = emqx_utils:cancel_timer(IdleTimer),
|
||||||
Serialize = emqx_frame:serialize_opts(ConnPkt),
|
Serialize = emqx_frame:serialize_opts(ConnPkt),
|
||||||
NState = State#state{
|
NState = State#state{
|
||||||
serialize = Serialize,
|
serialize = Serialize,
|
||||||
|
@ -593,7 +593,7 @@ handle_msg(
|
||||||
#state{listener = {Type, Listener}} = State
|
#state{listener = {Type, Listener}} = State
|
||||||
) ->
|
) ->
|
||||||
ActiveN = get_active_n(Type, Listener),
|
ActiveN = get_active_n(Type, Listener),
|
||||||
Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)],
|
Delivers = [Deliver | emqx_utils:drain_deliver(ActiveN)],
|
||||||
with_channel(handle_deliver, [Delivers], State);
|
with_channel(handle_deliver, [Delivers], State);
|
||||||
%% Something sent
|
%% Something sent
|
||||||
handle_msg({inet_reply, _Sock, ok}, State = #state{listener = {Type, Listener}}) ->
|
handle_msg({inet_reply, _Sock, ok}, State = #state{listener = {Type, Listener}}) ->
|
||||||
|
@ -1073,7 +1073,7 @@ check_oom(State = #state{channel = Channel}) ->
|
||||||
emqx_channel:info(zone, Channel), [force_shutdown]
|
emqx_channel:info(zone, Channel), [force_shutdown]
|
||||||
),
|
),
|
||||||
?tp(debug, check_oom, #{policy => ShutdownPolicy}),
|
?tp(debug, check_oom, #{policy => ShutdownPolicy}),
|
||||||
case emqx_misc:check_oom(ShutdownPolicy) of
|
case emqx_utils:check_oom(ShutdownPolicy) of
|
||||||
{shutdown, Reason} ->
|
{shutdown, Reason} ->
|
||||||
%% triggers terminate/2 callback immediately
|
%% triggers terminate/2 callback immediately
|
||||||
erlang:exit({shutdown, Reason});
|
erlang:exit({shutdown, Reason});
|
||||||
|
@ -1200,7 +1200,7 @@ inc_counter(Key, Inc) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
set_field(Name, Value, State) ->
|
set_field(Name, Value, State) ->
|
||||||
Pos = emqx_misc:index_of(Name, record_info(fields, state)),
|
Pos = emqx_utils:index_of(Name, record_info(fields, state)),
|
||||||
setelement(Pos + 1, State, Value).
|
setelement(Pos + 1, State, Value).
|
||||||
|
|
||||||
get_state(Pid) ->
|
get_state(Pid) ->
|
||||||
|
|
|
@ -117,7 +117,7 @@ handle_call(Call, _From, State) ->
|
||||||
handle_cast({evict, URL}, State0 = #state{refresh_timers = RefreshTimers0}) ->
|
handle_cast({evict, URL}, State0 = #state{refresh_timers = RefreshTimers0}) ->
|
||||||
emqx_ssl_crl_cache:delete(URL),
|
emqx_ssl_crl_cache:delete(URL),
|
||||||
MTimer = maps:get(URL, RefreshTimers0, undefined),
|
MTimer = maps:get(URL, RefreshTimers0, undefined),
|
||||||
emqx_misc:cancel_timer(MTimer),
|
emqx_utils:cancel_timer(MTimer),
|
||||||
RefreshTimers = maps:without([URL], RefreshTimers0),
|
RefreshTimers = maps:without([URL], RefreshTimers0),
|
||||||
State = State0#state{refresh_timers = RefreshTimers},
|
State = State0#state{refresh_timers = RefreshTimers},
|
||||||
?tp(
|
?tp(
|
||||||
|
@ -223,9 +223,9 @@ ensure_timer(URL, State = #state{refresh_interval = Timeout}) ->
|
||||||
ensure_timer(URL, State = #state{refresh_timers = RefreshTimers0}, Timeout) ->
|
ensure_timer(URL, State = #state{refresh_timers = RefreshTimers0}, Timeout) ->
|
||||||
?tp(crl_cache_ensure_timer, #{url => URL, timeout => Timeout}),
|
?tp(crl_cache_ensure_timer, #{url => URL, timeout => Timeout}),
|
||||||
MTimer = maps:get(URL, RefreshTimers0, undefined),
|
MTimer = maps:get(URL, RefreshTimers0, undefined),
|
||||||
emqx_misc:cancel_timer(MTimer),
|
emqx_utils:cancel_timer(MTimer),
|
||||||
RefreshTimers = RefreshTimers0#{
|
RefreshTimers = RefreshTimers0#{
|
||||||
URL => emqx_misc:start_timer(
|
URL => emqx_utils:start_timer(
|
||||||
Timeout,
|
Timeout,
|
||||||
{refresh, URL}
|
{refresh, URL}
|
||||||
)
|
)
|
||||||
|
@ -297,7 +297,7 @@ handle_cache_overflow(State0) ->
|
||||||
{_Time, OldestURL, InsertionTimes} = gb_trees:take_smallest(InsertionTimes0),
|
{_Time, OldestURL, InsertionTimes} = gb_trees:take_smallest(InsertionTimes0),
|
||||||
emqx_ssl_crl_cache:delete(OldestURL),
|
emqx_ssl_crl_cache:delete(OldestURL),
|
||||||
MTimer = maps:get(OldestURL, RefreshTimers0, undefined),
|
MTimer = maps:get(OldestURL, RefreshTimers0, undefined),
|
||||||
emqx_misc:cancel_timer(MTimer),
|
emqx_utils:cancel_timer(MTimer),
|
||||||
RefreshTimers = maps:remove(OldestURL, RefreshTimers0),
|
RefreshTimers = maps:remove(OldestURL, RefreshTimers0),
|
||||||
CachedURLs = sets:del_element(OldestURL, CachedURLs0),
|
CachedURLs = sets:del_element(OldestURL, CachedURLs0),
|
||||||
?tp(debug, crl_cache_overflow, #{oldest_url => OldestURL}),
|
?tp(debug, crl_cache_overflow, #{oldest_url => OldestURL}),
|
||||||
|
|
|
@ -27,6 +27,10 @@
|
||||||
%% API
|
%% API
|
||||||
-export([detect/1]).
|
-export([detect/1]).
|
||||||
|
|
||||||
|
-ifdef(TEST).
|
||||||
|
-export([get_policy/2]).
|
||||||
|
-endif.
|
||||||
|
|
||||||
%% gen_server callbacks
|
%% gen_server callbacks
|
||||||
-export([
|
-export([
|
||||||
init/1,
|
init/1,
|
||||||
|
@ -39,15 +43,6 @@
|
||||||
|
|
||||||
%% Tab
|
%% Tab
|
||||||
-define(FLAPPING_TAB, ?MODULE).
|
-define(FLAPPING_TAB, ?MODULE).
|
||||||
%% Default Policy
|
|
||||||
-define(FLAPPING_THRESHOLD, 30).
|
|
||||||
-define(FLAPPING_DURATION, 60000).
|
|
||||||
-define(FLAPPING_BANNED_INTERVAL, 300000).
|
|
||||||
-define(DEFAULT_DETECT_POLICY, #{
|
|
||||||
max_count => ?FLAPPING_THRESHOLD,
|
|
||||||
window_time => ?FLAPPING_DURATION,
|
|
||||||
ban_time => ?FLAPPING_BANNED_INTERVAL
|
|
||||||
}).
|
|
||||||
|
|
||||||
-record(flapping, {
|
-record(flapping, {
|
||||||
clientid :: emqx_types:clientid(),
|
clientid :: emqx_types:clientid(),
|
||||||
|
@ -69,7 +64,7 @@ stop() -> gen_server:stop(?MODULE).
|
||||||
%% @doc Detect flapping when a MQTT client disconnected.
|
%% @doc Detect flapping when a MQTT client disconnected.
|
||||||
-spec detect(emqx_types:clientinfo()) -> boolean().
|
-spec detect(emqx_types:clientinfo()) -> boolean().
|
||||||
detect(#{clientid := ClientId, peerhost := PeerHost, zone := Zone}) ->
|
detect(#{clientid := ClientId, peerhost := PeerHost, zone := Zone}) ->
|
||||||
Policy = #{max_count := Threshold} = get_policy(Zone),
|
Policy = #{max_count := Threshold} = get_policy([max_count, window_time, ban_time], Zone),
|
||||||
%% The initial flapping record sets the detect_cnt to 0.
|
%% The initial flapping record sets the detect_cnt to 0.
|
||||||
InitVal = #flapping{
|
InitVal = #flapping{
|
||||||
clientid = ClientId,
|
clientid = ClientId,
|
||||||
|
@ -89,8 +84,22 @@ detect(#{clientid := ClientId, peerhost := PeerHost, zone := Zone}) ->
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
get_policy(Zone) ->
|
get_policy(Keys, Zone) when is_list(Keys) ->
|
||||||
emqx_config:get_zone_conf(Zone, [flapping_detect]).
|
RootKey = flapping_detect,
|
||||||
|
Conf = emqx_config:get_zone_conf(Zone, [RootKey]),
|
||||||
|
lists:foldl(
|
||||||
|
fun(Key, Acc) ->
|
||||||
|
case maps:find(Key, Conf) of
|
||||||
|
{ok, V} -> Acc#{Key => V};
|
||||||
|
error -> Acc#{Key => emqx_config:get([RootKey, Key])}
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
#{},
|
||||||
|
Keys
|
||||||
|
);
|
||||||
|
get_policy(Key, Zone) ->
|
||||||
|
#{Key := Conf} = get_policy([Key], Zone),
|
||||||
|
Conf.
|
||||||
|
|
||||||
now_diff(TS) -> erlang:system_time(millisecond) - TS.
|
now_diff(TS) -> erlang:system_time(millisecond) - TS.
|
||||||
|
|
||||||
|
@ -99,7 +108,7 @@ now_diff(TS) -> erlang:system_time(millisecond) - TS.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
init([]) ->
|
init([]) ->
|
||||||
ok = emqx_tables:new(?FLAPPING_TAB, [
|
ok = emqx_utils_ets:new(?FLAPPING_TAB, [
|
||||||
public,
|
public,
|
||||||
set,
|
set,
|
||||||
{keypos, #flapping.clientid},
|
{keypos, #flapping.clientid},
|
||||||
|
@ -166,8 +175,7 @@ handle_cast(Msg, State) ->
|
||||||
|
|
||||||
handle_info({timeout, _TRef, {garbage_collect, Zone}}, State) ->
|
handle_info({timeout, _TRef, {garbage_collect, Zone}}, State) ->
|
||||||
Timestamp =
|
Timestamp =
|
||||||
erlang:system_time(millisecond) -
|
erlang:system_time(millisecond) - get_policy(window_time, Zone),
|
||||||
maps:get(window_time, get_policy(Zone)),
|
|
||||||
MatchSpec = [{{'_', '_', '_', '$1', '_'}, [{'<', '$1', Timestamp}], [true]}],
|
MatchSpec = [{{'_', '_', '_', '$1', '_'}, [{'<', '$1', Timestamp}], [true]}],
|
||||||
ets:select_delete(?FLAPPING_TAB, MatchSpec),
|
ets:select_delete(?FLAPPING_TAB, MatchSpec),
|
||||||
_ = start_timer(Zone),
|
_ = start_timer(Zone),
|
||||||
|
@ -183,15 +191,19 @@ code_change(_OldVsn, State, _Extra) ->
|
||||||
{ok, State}.
|
{ok, State}.
|
||||||
|
|
||||||
start_timer(Zone) ->
|
start_timer(Zone) ->
|
||||||
WindTime = maps:get(window_time, get_policy(Zone)),
|
case get_policy(window_time, Zone) of
|
||||||
emqx_misc:start_timer(WindTime, {garbage_collect, Zone}).
|
WindowTime when is_integer(WindowTime) ->
|
||||||
|
emqx_utils:start_timer(WindowTime, {garbage_collect, Zone});
|
||||||
|
disabled ->
|
||||||
|
ok
|
||||||
|
end.
|
||||||
|
|
||||||
start_timers() ->
|
start_timers() ->
|
||||||
lists:foreach(
|
maps:foreach(
|
||||||
fun({Zone, _ZoneConf}) ->
|
fun(Zone, _ZoneConf) ->
|
||||||
start_timer(Zone)
|
start_timer(Zone)
|
||||||
end,
|
end,
|
||||||
maps:to_list(emqx:get_config([zones], #{}))
|
emqx:get_config([zones], #{})
|
||||||
).
|
).
|
||||||
|
|
||||||
fmt_host(PeerHost) ->
|
fmt_host(PeerHost) ->
|
||||||
|
|
|
@ -145,10 +145,10 @@ npid() ->
|
||||||
NPid.
|
NPid.
|
||||||
|
|
||||||
to_hexstr(I) when byte_size(I) =:= 16 ->
|
to_hexstr(I) when byte_size(I) =:= 16 ->
|
||||||
emqx_misc:bin_to_hexstr(I, upper).
|
emqx_utils:bin_to_hexstr(I, upper).
|
||||||
|
|
||||||
from_hexstr(S) when byte_size(S) =:= 32 ->
|
from_hexstr(S) when byte_size(S) =:= 32 ->
|
||||||
emqx_misc:hexstr_to_bin(S).
|
emqx_utils:hexstr_to_bin(S).
|
||||||
|
|
||||||
to_base62(<<I:128>>) ->
|
to_base62(<<I:128>>) ->
|
||||||
emqx_base62:encode(I).
|
emqx_base62:encode(I).
|
||||||
|
|
|
@ -229,7 +229,7 @@ lookup(HookPoint) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
init([]) ->
|
init([]) ->
|
||||||
ok = emqx_tables:new(?TAB, [{keypos, #hook.name}, {read_concurrency, true}]),
|
ok = emqx_utils_ets:new(?TAB, [{keypos, #hook.name}, {read_concurrency, true}]),
|
||||||
{ok, #{}}.
|
{ok, #{}}.
|
||||||
|
|
||||||
handle_call({add, HookPoint, Callback = #callback{action = {M, F, _}}}, _From, State) ->
|
handle_call({add, HookPoint, Callback = #callback{action = {M, F, _}}}, _From, State) ->
|
||||||
|
|
|
@ -375,7 +375,7 @@ return_pause(infinity, PauseType, Fun, Diff, Limiter) ->
|
||||||
{PauseType, ?MINIMUM_PAUSE, make_retry_context(Fun, Diff), Limiter};
|
{PauseType, ?MINIMUM_PAUSE, make_retry_context(Fun, Diff), Limiter};
|
||||||
return_pause(Rate, PauseType, Fun, Diff, Limiter) ->
|
return_pause(Rate, PauseType, Fun, Diff, Limiter) ->
|
||||||
Val = erlang:round(Diff * emqx_limiter_schema:default_period() / Rate),
|
Val = erlang:round(Diff * emqx_limiter_schema:default_period() / Rate),
|
||||||
Pause = emqx_misc:clamp(Val, ?MINIMUM_PAUSE, ?MAXIMUM_PAUSE),
|
Pause = emqx_utils:clamp(Val, ?MINIMUM_PAUSE, ?MAXIMUM_PAUSE),
|
||||||
{PauseType, Pause, make_retry_context(Fun, Diff), Limiter}.
|
{PauseType, Pause, make_retry_context(Fun, Diff), Limiter}.
|
||||||
|
|
||||||
-spec make_retry_context(undefined | retry_fun(Limiter), non_neg_integer()) ->
|
-spec make_retry_context(undefined | retry_fun(Limiter), non_neg_integer()) ->
|
||||||
|
|
|
@ -572,7 +572,7 @@ find_limiter_cfg(Type, #{rate := _} = Cfg) ->
|
||||||
find_limiter_cfg(Type, Cfg) ->
|
find_limiter_cfg(Type, Cfg) ->
|
||||||
{
|
{
|
||||||
maps:get(Type, Cfg, undefined),
|
maps:get(Type, Cfg, undefined),
|
||||||
find_client_cfg(Type, emqx_map_lib:deep_get([client, Type], Cfg, undefined))
|
find_client_cfg(Type, emqx_utils_maps:deep_get([client, Type], Cfg, undefined))
|
||||||
}.
|
}.
|
||||||
|
|
||||||
find_client_cfg(Type, BucketCfg) ->
|
find_client_cfg(Type, BucketCfg) ->
|
||||||
|
|
|
@ -427,12 +427,12 @@ pre_config_update([listeners, _Type, _Name], {create, _NewConf}, _RawConf) ->
|
||||||
pre_config_update([listeners, _Type, _Name], {update, _Request}, undefined) ->
|
pre_config_update([listeners, _Type, _Name], {update, _Request}, undefined) ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
pre_config_update([listeners, Type, Name], {update, Request}, RawConf) ->
|
pre_config_update([listeners, Type, Name], {update, Request}, RawConf) ->
|
||||||
NewConfT = emqx_map_lib:deep_merge(RawConf, Request),
|
NewConfT = emqx_utils_maps:deep_merge(RawConf, Request),
|
||||||
NewConf = ensure_override_limiter_conf(NewConfT, Request),
|
NewConf = ensure_override_limiter_conf(NewConfT, Request),
|
||||||
CertsDir = certs_dir(Type, Name),
|
CertsDir = certs_dir(Type, Name),
|
||||||
{ok, convert_certs(CertsDir, NewConf)};
|
{ok, convert_certs(CertsDir, NewConf)};
|
||||||
pre_config_update([listeners, _Type, _Name], {action, _Action, Updated}, RawConf) ->
|
pre_config_update([listeners, _Type, _Name], {action, _Action, Updated}, RawConf) ->
|
||||||
NewConf = emqx_map_lib:deep_merge(RawConf, Updated),
|
NewConf = emqx_utils_maps:deep_merge(RawConf, Updated),
|
||||||
{ok, NewConf};
|
{ok, NewConf};
|
||||||
pre_config_update(_Path, _Request, RawConf) ->
|
pre_config_update(_Path, _Request, RawConf) ->
|
||||||
{ok, RawConf}.
|
{ok, RawConf}.
|
||||||
|
@ -500,7 +500,7 @@ esockd_opts(ListenerId, Type, Opts0) ->
|
||||||
|
|
||||||
ws_opts(Type, ListenerName, Opts) ->
|
ws_opts(Type, ListenerName, Opts) ->
|
||||||
WsPaths = [
|
WsPaths = [
|
||||||
{emqx_map_lib:deep_get([websocket, mqtt_path], Opts, "/mqtt"), emqx_ws_connection, #{
|
{emqx_utils_maps:deep_get([websocket, mqtt_path], Opts, "/mqtt"), emqx_ws_connection, #{
|
||||||
zone => zone(Opts),
|
zone => zone(Opts),
|
||||||
listener => {Type, ListenerName},
|
listener => {Type, ListenerName},
|
||||||
limiter => limiter(Opts),
|
limiter => limiter(Opts),
|
||||||
|
@ -538,7 +538,7 @@ esockd_access_rules(StrRules) ->
|
||||||
[A, CIDR] = string:tokens(S, " "),
|
[A, CIDR] = string:tokens(S, " "),
|
||||||
%% esockd rules only use words 'allow' and 'deny', both are existing
|
%% esockd rules only use words 'allow' and 'deny', both are existing
|
||||||
%% comparison of strings may be better, but there is a loss of backward compatibility
|
%% comparison of strings may be better, but there is a loss of backward compatibility
|
||||||
case emqx_misc:safe_to_existing_atom(A) of
|
case emqx_utils:safe_to_existing_atom(A) of
|
||||||
{ok, Action} ->
|
{ok, Action} ->
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
|
@ -560,7 +560,7 @@ esockd_access_rules(StrRules) ->
|
||||||
merge_default(Options) ->
|
merge_default(Options) ->
|
||||||
case lists:keytake(tcp_options, 1, Options) of
|
case lists:keytake(tcp_options, 1, Options) of
|
||||||
{value, {tcp_options, TcpOpts}, Options1} ->
|
{value, {tcp_options, TcpOpts}, Options1} ->
|
||||||
[{tcp_options, emqx_misc:merge_opts(?MQTT_SOCKOPTS, TcpOpts)} | Options1];
|
[{tcp_options, emqx_utils:merge_opts(?MQTT_SOCKOPTS, TcpOpts)} | Options1];
|
||||||
false ->
|
false ->
|
||||||
[{tcp_options, ?MQTT_SOCKOPTS} | Options]
|
[{tcp_options, ?MQTT_SOCKOPTS} | Options]
|
||||||
end.
|
end.
|
||||||
|
|
|
@ -62,11 +62,11 @@
|
||||||
%% The JSON object is pretty-printed.
|
%% The JSON object is pretty-printed.
|
||||||
%% NOTE: do not use this function for logging.
|
%% NOTE: do not use this function for logging.
|
||||||
best_effort_json(Input) ->
|
best_effort_json(Input) ->
|
||||||
best_effort_json(Input, [space, {indent, 4}]).
|
best_effort_json(Input, [pretty, force_utf8]).
|
||||||
best_effort_json(Input, Opts) ->
|
best_effort_json(Input, Opts) ->
|
||||||
Config = #{depth => unlimited, single_line => true},
|
Config = #{depth => unlimited, single_line => true},
|
||||||
JsonReady = best_effort_json_obj(Input, Config),
|
JsonReady = best_effort_json_obj(Input, Config),
|
||||||
jsx:encode(JsonReady, Opts).
|
emqx_utils_json:encode(JsonReady, Opts).
|
||||||
|
|
||||||
-spec format(logger:log_event(), config()) -> iodata().
|
-spec format(logger:log_event(), config()) -> iodata().
|
||||||
format(#{level := Level, msg := Msg, meta := Meta} = Event, Config0) when is_map(Config0) ->
|
format(#{level := Level, msg := Msg, meta := Meta} = Event, Config0) when is_map(Config0) ->
|
||||||
|
@ -92,7 +92,7 @@ format(Msg, Meta, Config) ->
|
||||||
}
|
}
|
||||||
end,
|
end,
|
||||||
Data = maps:without([report_cb], Data0),
|
Data = maps:without([report_cb], Data0),
|
||||||
jiffy:encode(json_obj(Data, Config)).
|
emqx_utils_json:encode(json_obj(Data, Config)).
|
||||||
|
|
||||||
maybe_format_msg({report, Report} = Msg, #{report_cb := Cb} = Meta, Config) ->
|
maybe_format_msg({report, Report} = Msg, #{report_cb := Cb} = Meta, Config) ->
|
||||||
case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of
|
case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of
|
||||||
|
@ -378,15 +378,15 @@ p_config() ->
|
||||||
|
|
||||||
best_effort_json_test() ->
|
best_effort_json_test() ->
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
<<"{}">>,
|
<<"{\n \n}">>,
|
||||||
emqx_logger_jsonfmt:best_effort_json([])
|
emqx_logger_jsonfmt:best_effort_json([])
|
||||||
),
|
),
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
<<"{\n \"key\": []\n}">>,
|
<<"{\n \"key\" : [\n \n ]\n}">>,
|
||||||
emqx_logger_jsonfmt:best_effort_json(#{key => []})
|
emqx_logger_jsonfmt:best_effort_json(#{key => []})
|
||||||
),
|
),
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
<<"[\n {\n \"key\": []\n }\n]">>,
|
<<"[\n {\n \"key\" : [\n \n ]\n }\n]">>,
|
||||||
emqx_logger_jsonfmt:best_effort_json([#{key => []}])
|
emqx_logger_jsonfmt:best_effort_json([#{key => []}])
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
|
@ -541,7 +541,7 @@ init([]) ->
|
||||||
CRef = counters:new(?MAX_SIZE, [write_concurrency]),
|
CRef = counters:new(?MAX_SIZE, [write_concurrency]),
|
||||||
ok = persistent_term:put(?MODULE, CRef),
|
ok = persistent_term:put(?MODULE, CRef),
|
||||||
% Create index mapping table
|
% Create index mapping table
|
||||||
ok = emqx_tables:new(?TAB, [{keypos, 2}, {read_concurrency, true}]),
|
ok = emqx_utils_ets:new(?TAB, [{keypos, 2}, {read_concurrency, true}]),
|
||||||
Metrics = lists:append([
|
Metrics = lists:append([
|
||||||
?BYTES_METRICS,
|
?BYTES_METRICS,
|
||||||
?PACKET_METRICS,
|
?PACKET_METRICS,
|
||||||
|
|
|
@ -110,7 +110,7 @@ register_listener(ListenerID, Opts) ->
|
||||||
-spec inject_sni_fun(emqx_listeners:listener_id(), map()) -> map().
|
-spec inject_sni_fun(emqx_listeners:listener_id(), map()) -> map().
|
||||||
inject_sni_fun(ListenerID, Conf0) ->
|
inject_sni_fun(ListenerID, Conf0) ->
|
||||||
SNIFun = emqx_const_v1:make_sni_fun(ListenerID),
|
SNIFun = emqx_const_v1:make_sni_fun(ListenerID),
|
||||||
Conf = emqx_map_lib:deep_merge(Conf0, #{ssl_options => #{sni_fun => SNIFun}}),
|
Conf = emqx_utils_maps:deep_merge(Conf0, #{ssl_options => #{sni_fun => SNIFun}}),
|
||||||
ok = ?MODULE:register_listener(ListenerID, Conf),
|
ok = ?MODULE:register_listener(ListenerID, Conf),
|
||||||
Conf.
|
Conf.
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ inject_sni_fun(ListenerID, Conf0) ->
|
||||||
|
|
||||||
init(_Args) ->
|
init(_Args) ->
|
||||||
logger:set_process_metadata(#{domain => [emqx, ocsp, cache]}),
|
logger:set_process_metadata(#{domain => [emqx, ocsp, cache]}),
|
||||||
emqx_tables:new(?CACHE_TAB, [
|
emqx_utils_ets:new(?CACHE_TAB, [
|
||||||
named_table,
|
named_table,
|
||||||
public,
|
public,
|
||||||
{heir, whereis(emqx_kernel_sup), none},
|
{heir, whereis(emqx_kernel_sup), none},
|
||||||
|
@ -149,7 +149,7 @@ handle_call({register_listener, ListenerID, Conf}, _From, State0) ->
|
||||||
msg => "registering_ocsp_cache",
|
msg => "registering_ocsp_cache",
|
||||||
listener_id => ListenerID
|
listener_id => ListenerID
|
||||||
}),
|
}),
|
||||||
RefreshInterval0 = emqx_map_lib:deep_get([ssl_options, ocsp, refresh_interval], Conf),
|
RefreshInterval0 = emqx_utils_maps:deep_get([ssl_options, ocsp, refresh_interval], Conf),
|
||||||
RefreshInterval = max(RefreshInterval0, ?MIN_REFRESH_INTERVAL),
|
RefreshInterval = max(RefreshInterval0, ?MIN_REFRESH_INTERVAL),
|
||||||
State = State0#{{refresh_interval, ListenerID} => RefreshInterval},
|
State = State0#{{refresh_interval, ListenerID} => RefreshInterval},
|
||||||
%% we need to pass the config along because this might be called
|
%% we need to pass the config along because this might be called
|
||||||
|
@ -476,9 +476,9 @@ ensure_timer(ListenerID, State, Timeout) ->
|
||||||
ensure_timer(ListenerID, {refresh, ListenerID}, State, Timeout).
|
ensure_timer(ListenerID, {refresh, ListenerID}, State, Timeout).
|
||||||
|
|
||||||
ensure_timer(ListenerID, Message, State, Timeout) ->
|
ensure_timer(ListenerID, Message, State, Timeout) ->
|
||||||
emqx_misc:cancel_timer(maps:get(?REFRESH_TIMER(ListenerID), State, undefined)),
|
emqx_utils:cancel_timer(maps:get(?REFRESH_TIMER(ListenerID), State, undefined)),
|
||||||
State#{
|
State#{
|
||||||
?REFRESH_TIMER(ListenerID) => emqx_misc:start_timer(
|
?REFRESH_TIMER(ListenerID) => emqx_utils:start_timer(
|
||||||
Timeout,
|
Timeout,
|
||||||
Message
|
Message
|
||||||
)
|
)
|
||||||
|
|
|
@ -180,8 +180,8 @@ code_change(_OldVsn, State, _Extra) ->
|
||||||
%% Internal functions
|
%% Internal functions
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
cancel_outdated_timer(#{mem_time_ref := MemRef, cpu_time_ref := CpuRef}) ->
|
cancel_outdated_timer(#{mem_time_ref := MemRef, cpu_time_ref := CpuRef}) ->
|
||||||
emqx_misc:cancel_timer(MemRef),
|
emqx_utils:cancel_timer(MemRef),
|
||||||
emqx_misc:cancel_timer(CpuRef),
|
emqx_utils:cancel_timer(CpuRef),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
start_cpu_check_timer() ->
|
start_cpu_check_timer() ->
|
||||||
|
@ -204,7 +204,7 @@ start_mem_check_timer() ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
start_timer(Interval, Msg) ->
|
start_timer(Interval, Msg) ->
|
||||||
emqx_misc:start_timer(Interval, Msg).
|
emqx_utils:start_timer(Interval, Msg).
|
||||||
|
|
||||||
update_mem_alarm_status(HWM) when HWM > 1.0 orelse HWM < 0.0 ->
|
update_mem_alarm_status(HWM) when HWM > 1.0 orelse HWM < 0.0 ->
|
||||||
?SLOG(warning, #{msg => "discarded_out_of_range_mem_alarm_threshold", value => HWM}),
|
?SLOG(warning, #{msg => "discarded_out_of_range_mem_alarm_threshold", value => HWM}),
|
||||||
|
|
|
@ -57,7 +57,7 @@
|
||||||
-spec start_link(atom(), pos_integer()) -> startlink_ret().
|
-spec start_link(atom(), pos_integer()) -> startlink_ret().
|
||||||
start_link(Pool, Id) ->
|
start_link(Pool, Id) ->
|
||||||
gen_server:start_link(
|
gen_server:start_link(
|
||||||
{local, emqx_misc:proc_name(?MODULE, Id)},
|
{local, emqx_utils:proc_name(?MODULE, Id)},
|
||||||
?MODULE,
|
?MODULE,
|
||||||
[Pool, Id],
|
[Pool, Id],
|
||||||
[{hibernate_after, 1000}]
|
[{hibernate_after, 1000}]
|
||||||
|
|
|
@ -98,7 +98,7 @@ mnesia(boot) ->
|
||||||
-spec start_link(atom(), pos_integer()) -> startlink_ret().
|
-spec start_link(atom(), pos_integer()) -> startlink_ret().
|
||||||
start_link(Pool, Id) ->
|
start_link(Pool, Id) ->
|
||||||
gen_server:start_link(
|
gen_server:start_link(
|
||||||
{local, emqx_misc:proc_name(?MODULE, Id)},
|
{local, emqx_utils:proc_name(?MODULE, Id)},
|
||||||
?MODULE,
|
?MODULE,
|
||||||
[Pool, Id],
|
[Pool, Id],
|
||||||
[{hibernate_after, 1000}]
|
[{hibernate_after, 1000}]
|
||||||
|
|
|
@ -145,17 +145,23 @@ roots(high) ->
|
||||||
{"listeners",
|
{"listeners",
|
||||||
sc(
|
sc(
|
||||||
ref("listeners"),
|
ref("listeners"),
|
||||||
#{}
|
#{importance => ?IMPORTANCE_HIGH}
|
||||||
)},
|
|
||||||
{"zones",
|
|
||||||
sc(
|
|
||||||
map("name", ref("zone")),
|
|
||||||
#{desc => ?DESC(zones)}
|
|
||||||
)},
|
)},
|
||||||
{"mqtt",
|
{"mqtt",
|
||||||
sc(
|
sc(
|
||||||
ref("mqtt"),
|
ref("mqtt"),
|
||||||
#{desc => ?DESC(mqtt)}
|
#{
|
||||||
|
desc => ?DESC(mqtt),
|
||||||
|
importance => ?IMPORTANCE_MEDIUM
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{"zones",
|
||||||
|
sc(
|
||||||
|
map("name", ref("zone")),
|
||||||
|
#{
|
||||||
|
desc => ?DESC(zones),
|
||||||
|
importance => ?IMPORTANCE_LOW
|
||||||
|
}
|
||||||
)},
|
)},
|
||||||
{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, authentication(global)},
|
{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, authentication(global)},
|
||||||
%% NOTE: authorization schema here is only to keep emqx app prue
|
%% NOTE: authorization schema here is only to keep emqx app prue
|
||||||
|
@ -163,7 +169,7 @@ roots(high) ->
|
||||||
{?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME,
|
{?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME,
|
||||||
sc(
|
sc(
|
||||||
ref(?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME),
|
ref(?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME),
|
||||||
#{}
|
#{importance => ?IMPORTANCE_HIDDEN}
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
roots(medium) ->
|
roots(medium) ->
|
||||||
|
@ -186,7 +192,7 @@ roots(medium) ->
|
||||||
{"overload_protection",
|
{"overload_protection",
|
||||||
sc(
|
sc(
|
||||||
ref("overload_protection"),
|
ref("overload_protection"),
|
||||||
#{}
|
#{importance => ?IMPORTANCE_HIDDEN}
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
roots(low) ->
|
roots(low) ->
|
||||||
|
@ -199,12 +205,16 @@ roots(low) ->
|
||||||
{"conn_congestion",
|
{"conn_congestion",
|
||||||
sc(
|
sc(
|
||||||
ref("conn_congestion"),
|
ref("conn_congestion"),
|
||||||
#{}
|
#{
|
||||||
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
|
}
|
||||||
)},
|
)},
|
||||||
{"stats",
|
{"stats",
|
||||||
sc(
|
sc(
|
||||||
ref("stats"),
|
ref("stats"),
|
||||||
#{}
|
#{
|
||||||
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
|
}
|
||||||
)},
|
)},
|
||||||
{"sysmon",
|
{"sysmon",
|
||||||
sc(
|
sc(
|
||||||
|
@ -219,17 +229,17 @@ roots(low) ->
|
||||||
{"flapping_detect",
|
{"flapping_detect",
|
||||||
sc(
|
sc(
|
||||||
ref("flapping_detect"),
|
ref("flapping_detect"),
|
||||||
#{}
|
#{importance => ?IMPORTANCE_HIDDEN}
|
||||||
)},
|
)},
|
||||||
{"persistent_session_store",
|
{"persistent_session_store",
|
||||||
sc(
|
sc(
|
||||||
ref("persistent_session_store"),
|
ref("persistent_session_store"),
|
||||||
#{}
|
#{importance => ?IMPORTANCE_HIDDEN}
|
||||||
)},
|
)},
|
||||||
{"trace",
|
{"trace",
|
||||||
sc(
|
sc(
|
||||||
ref("trace"),
|
ref("trace"),
|
||||||
#{}
|
#{importance => ?IMPORTANCE_HIDDEN}
|
||||||
)},
|
)},
|
||||||
{"crl_cache",
|
{"crl_cache",
|
||||||
sc(
|
sc(
|
||||||
|
@ -339,6 +349,7 @@ fields("stats") ->
|
||||||
boolean(),
|
boolean(),
|
||||||
#{
|
#{
|
||||||
default => true,
|
default => true,
|
||||||
|
importance => ?IMPORTANCE_HIDDEN,
|
||||||
desc => ?DESC(stats_enable)
|
desc => ?DESC(stats_enable)
|
||||||
}
|
}
|
||||||
)}
|
)}
|
||||||
|
@ -609,8 +620,7 @@ fields("mqtt") ->
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
fields("zone") ->
|
fields("zone") ->
|
||||||
Fields = emqx_zone_schema:roots(),
|
emqx_zone_schema:zone();
|
||||||
[{F, ref(emqx_zone_schema, F)} || F <- Fields];
|
|
||||||
fields("flapping_detect") ->
|
fields("flapping_detect") ->
|
||||||
[
|
[
|
||||||
{"enable",
|
{"enable",
|
||||||
|
@ -618,25 +628,27 @@ fields("flapping_detect") ->
|
||||||
boolean(),
|
boolean(),
|
||||||
#{
|
#{
|
||||||
default => false,
|
default => false,
|
||||||
|
deprecated => {since, "5.0.23"},
|
||||||
desc => ?DESC(flapping_detect_enable)
|
desc => ?DESC(flapping_detect_enable)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
{"max_count",
|
|
||||||
sc(
|
|
||||||
integer(),
|
|
||||||
#{
|
|
||||||
default => 15,
|
|
||||||
desc => ?DESC(flapping_detect_max_count)
|
|
||||||
}
|
|
||||||
)},
|
|
||||||
{"window_time",
|
{"window_time",
|
||||||
sc(
|
sc(
|
||||||
duration(),
|
hoconsc:union([disabled, duration()]),
|
||||||
#{
|
#{
|
||||||
default => <<"1m">>,
|
default => disabled,
|
||||||
|
importance => ?IMPORTANCE_HIGH,
|
||||||
desc => ?DESC(flapping_detect_window_time)
|
desc => ?DESC(flapping_detect_window_time)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
|
{"max_count",
|
||||||
|
sc(
|
||||||
|
non_neg_integer(),
|
||||||
|
#{
|
||||||
|
default => 15,
|
||||||
|
desc => ?DESC(flapping_detect_max_count)
|
||||||
|
}
|
||||||
|
)},
|
||||||
{"ban_time",
|
{"ban_time",
|
||||||
sc(
|
sc(
|
||||||
duration(),
|
duration(),
|
||||||
|
@ -1498,12 +1510,14 @@ fields("broker") ->
|
||||||
ref("broker_perf"),
|
ref("broker_perf"),
|
||||||
#{importance => ?IMPORTANCE_HIDDEN}
|
#{importance => ?IMPORTANCE_HIDDEN}
|
||||||
)},
|
)},
|
||||||
|
%% FIXME: Need new design for shared subscription group
|
||||||
{"shared_subscription_group",
|
{"shared_subscription_group",
|
||||||
sc(
|
sc(
|
||||||
map(name, ref("shared_subscription_group")),
|
map(name, ref("shared_subscription_group")),
|
||||||
#{
|
#{
|
||||||
example => #{<<"example_group">> => #{<<"strategy">> => <<"random">>}},
|
example => #{<<"example_group">> => #{<<"strategy">> => <<"random">>}},
|
||||||
desc => ?DESC(shared_subscription_group_strategy)
|
desc => ?DESC(shared_subscription_group_strategy),
|
||||||
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
}
|
}
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
|
@ -1853,6 +1867,8 @@ fields("trace") ->
|
||||||
{"payload_encode",
|
{"payload_encode",
|
||||||
sc(hoconsc:enum([hex, text, hidden]), #{
|
sc(hoconsc:enum([hex, text, hidden]), #{
|
||||||
default => text,
|
default => text,
|
||||||
|
deprecated => {since, "5.0.22"},
|
||||||
|
importance => ?IMPORTANCE_HIDDEN,
|
||||||
desc => ?DESC(fields_trace_payload_encode)
|
desc => ?DESC(fields_trace_payload_encode)
|
||||||
})}
|
})}
|
||||||
].
|
].
|
||||||
|
@ -2325,7 +2341,7 @@ mqtt_ssl_listener_ssl_options_validator(Conf) ->
|
||||||
fun ocsp_outer_validator/1,
|
fun ocsp_outer_validator/1,
|
||||||
fun crl_outer_validator/1
|
fun crl_outer_validator/1
|
||||||
],
|
],
|
||||||
case emqx_misc:pipeline(Checks, Conf, not_used) of
|
case emqx_utils:pipeline(Checks, Conf, not_used) of
|
||||||
{ok, _, _} ->
|
{ok, _, _} ->
|
||||||
ok;
|
ok;
|
||||||
{error, Reason, _NotUsed} ->
|
{error, Reason, _NotUsed} ->
|
||||||
|
@ -2346,7 +2362,7 @@ ocsp_outer_validator(_Conf) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
ocsp_inner_validator(#{enable_ocsp_stapling := _} = Conf) ->
|
ocsp_inner_validator(#{enable_ocsp_stapling := _} = Conf) ->
|
||||||
ocsp_inner_validator(emqx_map_lib:binary_key_map(Conf));
|
ocsp_inner_validator(emqx_utils_maps:binary_key_map(Conf));
|
||||||
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := false} = _Conf) ->
|
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := false} = _Conf) ->
|
||||||
ok;
|
ok;
|
||||||
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := true} = Conf) ->
|
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := true} = Conf) ->
|
||||||
|
@ -2581,7 +2597,7 @@ to_url(Str) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
to_json_binary(Str) ->
|
to_json_binary(Str) ->
|
||||||
case emqx_json:safe_decode(Str) of
|
case emqx_utils_json:safe_decode(Str) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
{ok, iolist_to_binary(Str)};
|
{ok, iolist_to_binary(Str)};
|
||||||
Error ->
|
Error ->
|
||||||
|
@ -2654,20 +2670,22 @@ to_atom(Str) when is_list(Str) ->
|
||||||
to_atom(Bin) when is_binary(Bin) ->
|
to_atom(Bin) when is_binary(Bin) ->
|
||||||
binary_to_atom(Bin, utf8).
|
binary_to_atom(Bin, utf8).
|
||||||
|
|
||||||
validate_heap_size(Siz) ->
|
validate_heap_size(Siz) when is_integer(Siz) ->
|
||||||
MaxSiz =
|
MaxSiz =
|
||||||
case erlang:system_info(wordsize) of
|
case erlang:system_info(wordsize) of
|
||||||
% arch_64
|
% arch_64
|
||||||
8 ->
|
8 -> (1 bsl 59) - 1;
|
||||||
(1 bsl 59) - 1;
|
|
||||||
% arch_32
|
% arch_32
|
||||||
4 ->
|
4 -> (1 bsl 27) - 1
|
||||||
(1 bsl 27) - 1
|
|
||||||
end,
|
end,
|
||||||
case Siz > MaxSiz of
|
case Siz > MaxSiz of
|
||||||
true -> error(io_lib:format("force_shutdown_policy: heap-size ~ts is too large", [Siz]));
|
true ->
|
||||||
false -> ok
|
{error, #{reason => max_heap_size_too_large, maximum => MaxSiz}};
|
||||||
end.
|
false ->
|
||||||
|
ok
|
||||||
|
end;
|
||||||
|
validate_heap_size(_SizStr) ->
|
||||||
|
{error, invalid_heap_size}.
|
||||||
|
|
||||||
validate_alarm_actions(Actions) ->
|
validate_alarm_actions(Actions) ->
|
||||||
UnSupported = lists:filter(
|
UnSupported = lists:filter(
|
||||||
|
@ -2760,7 +2778,11 @@ authentication(Which) ->
|
||||||
Module ->
|
Module ->
|
||||||
Module:root_type()
|
Module:root_type()
|
||||||
end,
|
end,
|
||||||
hoconsc:mk(Type, #{desc => Desc, converter => fun ensure_array/2}).
|
hoconsc:mk(Type, #{
|
||||||
|
desc => Desc,
|
||||||
|
converter => fun ensure_array/2,
|
||||||
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
|
}).
|
||||||
|
|
||||||
%% the older version schema allows individual element (instead of a chain) in config
|
%% the older version schema allows individual element (instead of a chain) in config
|
||||||
ensure_array(undefined, _) -> undefined;
|
ensure_array(undefined, _) -> undefined;
|
||||||
|
|
|
@ -39,7 +39,7 @@
|
||||||
%% @doc Create a sequence.
|
%% @doc Create a sequence.
|
||||||
-spec create(name()) -> ok.
|
-spec create(name()) -> ok.
|
||||||
create(Name) ->
|
create(Name) ->
|
||||||
emqx_tables:new(Name, [public, set, {write_concurrency, true}]).
|
emqx_utils_ets:new(Name, [public, set, {write_concurrency, true}]).
|
||||||
|
|
||||||
%% @doc Next value of the sequence.
|
%% @doc Next value of the sequence.
|
||||||
-spec nextval(name(), key()) -> seqid().
|
-spec nextval(name(), key()) -> seqid().
|
||||||
|
|
|
@ -941,7 +941,7 @@ age(Now, Ts) -> Now - Ts.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
set_field(Name, Value, Session) ->
|
set_field(Name, Value, Session) ->
|
||||||
Pos = emqx_misc:index_of(Name, record_info(fields, session)),
|
Pos = emqx_utils:index_of(Name, record_info(fields, session)),
|
||||||
setelement(Pos + 1, Session, Value).
|
setelement(Pos + 1, Session, Value).
|
||||||
|
|
||||||
get_mqueue(#session{mqueue = Q}) ->
|
get_mqueue(#session{mqueue = Q}) ->
|
||||||
|
|
|
@ -95,7 +95,7 @@ create_table(Tab, Storage) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
create_init_tab() ->
|
create_init_tab() ->
|
||||||
emqx_tables:new(?SESSION_INIT_TAB, [
|
emqx_utils_ets:new(?SESSION_INIT_TAB, [
|
||||||
public,
|
public,
|
||||||
{read_concurrency, true},
|
{read_concurrency, true},
|
||||||
{write_concurrency, true}
|
{write_concurrency, true}
|
||||||
|
@ -104,7 +104,7 @@ create_init_tab() ->
|
||||||
-spec start_link(atom(), pos_integer()) -> startlink_ret().
|
-spec start_link(atom(), pos_integer()) -> startlink_ret().
|
||||||
start_link(Pool, Id) ->
|
start_link(Pool, Id) ->
|
||||||
gen_server:start_link(
|
gen_server:start_link(
|
||||||
{local, emqx_misc:proc_name(?MODULE, Id)},
|
{local, emqx_utils:proc_name(?MODULE, Id)},
|
||||||
?MODULE,
|
?MODULE,
|
||||||
[Pool, Id],
|
[Pool, Id],
|
||||||
[{hibernate_after, 1000}]
|
[{hibernate_after, 1000}]
|
||||||
|
@ -182,7 +182,7 @@ pending(SessionID, MarkerIDs) ->
|
||||||
call(pick(SessionID), {pending, SessionID, MarkerIDs}).
|
call(pick(SessionID), {pending, SessionID, MarkerIDs}).
|
||||||
|
|
||||||
buffer(SessionID, STopic, Msg) ->
|
buffer(SessionID, STopic, Msg) ->
|
||||||
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||||
undefined -> ok;
|
undefined -> ok;
|
||||||
Worker -> emqx_session_router_worker:buffer(Worker, STopic, Msg)
|
Worker -> emqx_session_router_worker:buffer(Worker, STopic, Msg)
|
||||||
end.
|
end.
|
||||||
|
@ -194,7 +194,7 @@ resume_begin(From, SessionID) when is_pid(From), is_binary(SessionID) ->
|
||||||
-spec resume_end(pid(), binary()) ->
|
-spec resume_end(pid(), binary()) ->
|
||||||
{'ok', [emqx_types:message()]} | {'error', term()}.
|
{'ok', [emqx_types:message()]} | {'error', term()}.
|
||||||
resume_end(From, SessionID) when is_pid(From), is_binary(SessionID) ->
|
resume_end(From, SessionID) when is_pid(From), is_binary(SessionID) ->
|
||||||
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||||
undefined ->
|
undefined ->
|
||||||
?tp(ps_session_not_found, #{sid => SessionID}),
|
?tp(ps_session_not_found, #{sid => SessionID}),
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
|
@ -249,7 +249,7 @@ handle_cast({delete_routes, SessionID, Subscriptions}, State) ->
|
||||||
ok = lists:foreach(Fun, maps:to_list(Subscriptions)),
|
ok = lists:foreach(Fun, maps:to_list(Subscriptions)),
|
||||||
{noreply, State};
|
{noreply, State};
|
||||||
handle_cast({resume_end, SessionID, Pid}, State) ->
|
handle_cast({resume_end, SessionID, Pid}, State) ->
|
||||||
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||||
undefined -> skip;
|
undefined -> skip;
|
||||||
P when P =:= Pid -> ets:delete(?SESSION_INIT_TAB, SessionID);
|
P when P =:= Pid -> ets:delete(?SESSION_INIT_TAB, SessionID);
|
||||||
P when is_pid(P) -> skip
|
P when is_pid(P) -> skip
|
||||||
|
@ -283,7 +283,7 @@ init_resume_worker(RemotePid, SessionID, #{pmon := Pmon} = State) ->
|
||||||
error;
|
error;
|
||||||
{ok, Pid} ->
|
{ok, Pid} ->
|
||||||
Pmon1 = emqx_pmon:monitor(Pid, Pmon),
|
Pmon1 = emqx_pmon:monitor(Pid, Pmon),
|
||||||
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||||
undefined ->
|
undefined ->
|
||||||
{ok, Pid, State#{pmon => Pmon1}};
|
{ok, Pid, State#{pmon => Pmon1}};
|
||||||
{_, OldPid} ->
|
{_, OldPid} ->
|
||||||
|
|
|
@ -399,9 +399,11 @@ init([]) ->
|
||||||
ok = mria:wait_for_tables([?TAB]),
|
ok = mria:wait_for_tables([?TAB]),
|
||||||
{ok, _} = mnesia:subscribe({table, ?TAB, simple}),
|
{ok, _} = mnesia:subscribe({table, ?TAB, simple}),
|
||||||
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun ?MODULE:init_monitors/0),
|
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun ?MODULE:init_monitors/0),
|
||||||
ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]),
|
ok = emqx_utils_ets:new(?SHARED_SUBS, [protected, bag]),
|
||||||
ok = emqx_tables:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]),
|
ok = emqx_utils_ets:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]),
|
||||||
ok = emqx_tables:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [public, set, {write_concurrency, true}]),
|
ok = emqx_utils_ets:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [
|
||||||
|
public, set, {write_concurrency, true}
|
||||||
|
]),
|
||||||
{ok, update_stats(#state{pmon = PMon})}.
|
{ok, update_stats(#state{pmon = PMon})}.
|
||||||
|
|
||||||
init_monitors() ->
|
init_monitors() ->
|
||||||
|
|
|
@ -201,7 +201,7 @@ cast(Msg) -> gen_server:cast(?SERVER, Msg).
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
init(#{tick_ms := TickMs}) ->
|
init(#{tick_ms := TickMs}) ->
|
||||||
ok = emqx_tables:new(?TAB, [public, set, {write_concurrency, true}]),
|
ok = emqx_utils_ets:new(?TAB, [public, set, {write_concurrency, true}]),
|
||||||
Stats = lists:append([
|
Stats = lists:append([
|
||||||
?CONNECTION_STATS,
|
?CONNECTION_STATS,
|
||||||
?CHANNEL_STATS,
|
?CHANNEL_STATS,
|
||||||
|
@ -213,7 +213,7 @@ init(#{tick_ms := TickMs}) ->
|
||||||
{ok, start_timer(#state{updates = [], tick_ms = TickMs}), hibernate}.
|
{ok, start_timer(#state{updates = [], tick_ms = TickMs}), hibernate}.
|
||||||
|
|
||||||
start_timer(#state{tick_ms = Ms} = State) ->
|
start_timer(#state{tick_ms = Ms} = State) ->
|
||||||
State#state{timer = emqx_misc:start_timer(Ms, tick)}.
|
State#state{timer = emqx_utils:start_timer(Ms, tick)}.
|
||||||
|
|
||||||
handle_call(stop, _From, State) ->
|
handle_call(stop, _From, State) ->
|
||||||
{stop, normal, ok, State};
|
{stop, normal, ok, State};
|
||||||
|
@ -301,7 +301,7 @@ handle_info(Info, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
terminate(_Reason, #state{timer = TRef}) ->
|
terminate(_Reason, #state{timer = TRef}) ->
|
||||||
emqx_misc:cancel_timer(TRef).
|
emqx_utils:cancel_timer(TRef).
|
||||||
|
|
||||||
code_change(_OldVsn, State, _Extra) ->
|
code_change(_OldVsn, State, _Extra) ->
|
||||||
{ok, State}.
|
{ok, State}.
|
||||||
|
|
|
@ -62,7 +62,7 @@
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
-import(emqx_topic, [systop/1]).
|
-import(emqx_topic, [systop/1]).
|
||||||
-import(emqx_misc, [start_timer/2]).
|
-import(emqx_utils, [start_timer/2]).
|
||||||
|
|
||||||
-record(state, {
|
-record(state, {
|
||||||
heartbeat :: maybe(reference()),
|
heartbeat :: maybe(reference()),
|
||||||
|
@ -222,7 +222,7 @@ handle_info(Info, State) ->
|
||||||
terminate(_Reason, #state{heartbeat = TRef1, ticker = TRef2}) ->
|
terminate(_Reason, #state{heartbeat = TRef1, ticker = TRef2}) ->
|
||||||
_ = emqx_config_handler:remove_handler(?CONF_KEY_PATH),
|
_ = emqx_config_handler:remove_handler(?CONF_KEY_PATH),
|
||||||
unload_event_hooks(sys_event_messages()),
|
unload_event_hooks(sys_event_messages()),
|
||||||
lists:foreach(fun emqx_misc:cancel_timer/1, [TRef1, TRef2]).
|
lists:foreach(fun emqx_utils:cancel_timer/1, [TRef1, TRef2]).
|
||||||
|
|
||||||
unload_event_hooks([]) ->
|
unload_event_hooks([]) ->
|
||||||
ok;
|
ok;
|
||||||
|
@ -348,7 +348,7 @@ publish(Event, Payload) when
|
||||||
Event == unsubscribed
|
Event == unsubscribed
|
||||||
->
|
->
|
||||||
Topic = event_topic(Event, Payload),
|
Topic = event_topic(Event, Payload),
|
||||||
safe_publish(Topic, emqx_json:encode(Payload)).
|
safe_publish(Topic, emqx_utils_json:encode(Payload)).
|
||||||
|
|
||||||
metric_topic(Name) ->
|
metric_topic(Name) ->
|
||||||
translate_topic("metrics/", Name).
|
translate_topic("metrics/", Name).
|
||||||
|
|
|
@ -77,7 +77,7 @@ init([]) ->
|
||||||
{ok, start_timer(#{timer => undefined, events => []})}.
|
{ok, start_timer(#{timer => undefined, events => []})}.
|
||||||
|
|
||||||
start_timer(State) ->
|
start_timer(State) ->
|
||||||
State#{timer := emqx_misc:start_timer(timer:seconds(2), reset)}.
|
State#{timer := emqx_utils:start_timer(timer:seconds(2), reset)}.
|
||||||
|
|
||||||
sysm_opts(VM) ->
|
sysm_opts(VM) ->
|
||||||
sysm_opts(maps:to_list(VM), []).
|
sysm_opts(maps:to_list(VM), []).
|
||||||
|
@ -204,7 +204,7 @@ handle_info(Info, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
terminate(_Reason, #{timer := TRef}) ->
|
terminate(_Reason, #{timer := TRef}) ->
|
||||||
emqx_misc:cancel_timer(TRef),
|
emqx_utils:cancel_timer(TRef),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
code_change(_OldVsn, State, _Extra) ->
|
code_change(_OldVsn, State, _Extra) ->
|
||||||
|
|
|
@ -317,7 +317,9 @@ ensure_ssl_files(Dir, SSL, Opts) ->
|
||||||
ensure_ssl_files(_Dir, SSL, [], _Opts) ->
|
ensure_ssl_files(_Dir, SSL, [], _Opts) ->
|
||||||
{ok, SSL};
|
{ok, SSL};
|
||||||
ensure_ssl_files(Dir, SSL, [KeyPath | KeyPaths], Opts) ->
|
ensure_ssl_files(Dir, SSL, [KeyPath | KeyPaths], Opts) ->
|
||||||
case ensure_ssl_file(Dir, KeyPath, SSL, emqx_map_lib:deep_get(KeyPath, SSL, undefined), Opts) of
|
case
|
||||||
|
ensure_ssl_file(Dir, KeyPath, SSL, emqx_utils_maps:deep_get(KeyPath, SSL, undefined), Opts)
|
||||||
|
of
|
||||||
{ok, NewSSL} ->
|
{ok, NewSSL} ->
|
||||||
ensure_ssl_files(Dir, NewSSL, KeyPaths, Opts);
|
ensure_ssl_files(Dir, NewSSL, KeyPaths, Opts);
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
@ -332,7 +334,7 @@ delete_ssl_files(Dir, NewOpts0, OldOpts0) ->
|
||||||
{ok, OldOpts} = ensure_ssl_files(Dir, OldOpts0, #{dry_run => DryRun}),
|
{ok, OldOpts} = ensure_ssl_files(Dir, OldOpts0, #{dry_run => DryRun}),
|
||||||
Get = fun
|
Get = fun
|
||||||
(_KP, undefined) -> undefined;
|
(_KP, undefined) -> undefined;
|
||||||
(KP, Opts) -> emqx_map_lib:deep_get(KP, Opts, undefined)
|
(KP, Opts) -> emqx_utils_maps:deep_get(KP, Opts, undefined)
|
||||||
end,
|
end,
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun(KeyPath) -> delete_old_file(Get(KeyPath, NewOpts), Get(KeyPath, OldOpts)) end,
|
fun(KeyPath) -> delete_old_file(Get(KeyPath, NewOpts), Get(KeyPath, OldOpts)) end,
|
||||||
|
@ -372,7 +374,7 @@ do_ensure_ssl_file(Dir, KeyPath, SSL, MaybePem, DryRun) ->
|
||||||
true ->
|
true ->
|
||||||
case save_pem_file(Dir, KeyPath, MaybePem, DryRun) of
|
case save_pem_file(Dir, KeyPath, MaybePem, DryRun) of
|
||||||
{ok, Path} ->
|
{ok, Path} ->
|
||||||
NewSSL = emqx_map_lib:deep_put(KeyPath, SSL, Path),
|
NewSSL = emqx_utils_maps:deep_put(KeyPath, SSL, Path),
|
||||||
{ok, NewSSL};
|
{ok, NewSSL};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
|
@ -482,9 +484,9 @@ is_valid_pem_file(Path) ->
|
||||||
%% so they are forced to upload a cert file, or use an existing file path.
|
%% so they are forced to upload a cert file, or use an existing file path.
|
||||||
-spec drop_invalid_certs(map()) -> map().
|
-spec drop_invalid_certs(map()) -> map().
|
||||||
drop_invalid_certs(#{enable := False} = SSL) when ?IS_FALSE(False) ->
|
drop_invalid_certs(#{enable := False} = SSL) when ?IS_FALSE(False) ->
|
||||||
lists:foldl(fun emqx_map_lib:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS_A);
|
lists:foldl(fun emqx_utils_maps:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS_A);
|
||||||
drop_invalid_certs(#{<<"enable">> := False} = SSL) when ?IS_FALSE(False) ->
|
drop_invalid_certs(#{<<"enable">> := False} = SSL) when ?IS_FALSE(False) ->
|
||||||
lists:foldl(fun emqx_map_lib:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS);
|
lists:foldl(fun emqx_utils_maps:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS);
|
||||||
drop_invalid_certs(#{enable := True} = SSL) when ?IS_TRUE(True) ->
|
drop_invalid_certs(#{enable := True} = SSL) when ?IS_TRUE(True) ->
|
||||||
do_drop_invalid_certs(?SSL_FILE_OPT_PATHS_A, SSL);
|
do_drop_invalid_certs(?SSL_FILE_OPT_PATHS_A, SSL);
|
||||||
drop_invalid_certs(#{<<"enable">> := True} = SSL) when ?IS_TRUE(True) ->
|
drop_invalid_certs(#{<<"enable">> := True} = SSL) when ?IS_TRUE(True) ->
|
||||||
|
@ -493,7 +495,7 @@ drop_invalid_certs(#{<<"enable">> := True} = SSL) when ?IS_TRUE(True) ->
|
||||||
do_drop_invalid_certs([], SSL) ->
|
do_drop_invalid_certs([], SSL) ->
|
||||||
SSL;
|
SSL;
|
||||||
do_drop_invalid_certs([KeyPath | KeyPaths], SSL) ->
|
do_drop_invalid_certs([KeyPath | KeyPaths], SSL) ->
|
||||||
case emqx_map_lib:deep_get(KeyPath, SSL, undefined) of
|
case emqx_utils_maps:deep_get(KeyPath, SSL, undefined) of
|
||||||
undefined ->
|
undefined ->
|
||||||
do_drop_invalid_certs(KeyPaths, SSL);
|
do_drop_invalid_certs(KeyPaths, SSL);
|
||||||
PemOrPath ->
|
PemOrPath ->
|
||||||
|
@ -501,7 +503,7 @@ do_drop_invalid_certs([KeyPath | KeyPaths], SSL) ->
|
||||||
true ->
|
true ->
|
||||||
do_drop_invalid_certs(KeyPaths, SSL);
|
do_drop_invalid_certs(KeyPaths, SSL);
|
||||||
{error, _} ->
|
{error, _} ->
|
||||||
do_drop_invalid_certs(KeyPaths, emqx_map_lib:deep_remove(KeyPath, SSL))
|
do_drop_invalid_certs(KeyPaths, emqx_utils_maps:deep_remove(KeyPath, SSL))
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -586,7 +588,9 @@ ensure_ssl_file_key(_SSL, []) ->
|
||||||
ok;
|
ok;
|
||||||
ensure_ssl_file_key(SSL, RequiredKeyPaths) ->
|
ensure_ssl_file_key(SSL, RequiredKeyPaths) ->
|
||||||
NotFoundRef = make_ref(),
|
NotFoundRef = make_ref(),
|
||||||
Filter = fun(KeyPath) -> NotFoundRef =:= emqx_map_lib:deep_get(KeyPath, SSL, NotFoundRef) end,
|
Filter = fun(KeyPath) ->
|
||||||
|
NotFoundRef =:= emqx_utils_maps:deep_get(KeyPath, SSL, NotFoundRef)
|
||||||
|
end,
|
||||||
case lists:filter(Filter, RequiredKeyPaths) of
|
case lists:filter(Filter, RequiredKeyPaths) of
|
||||||
[] -> ok;
|
[] -> ok;
|
||||||
Miss -> {error, #{reason => ssl_file_option_not_found, which_options => Miss}}
|
Miss -> {error, #{reason => ssl_file_option_not_found, which_options => Miss}}
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
-include_lib("kernel/include/file.hrl").
|
-include_lib("kernel/include/file.hrl").
|
||||||
-include_lib("snabbkaffe/include/trace.hrl").
|
-include_lib("snabbkaffe/include/trace.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_trace.hrl").
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
publish/1,
|
publish/1,
|
||||||
|
@ -54,8 +55,6 @@
|
||||||
|
|
||||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||||
|
|
||||||
-include("emqx_trace.hrl").
|
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-export([
|
-export([
|
||||||
log_file/2,
|
log_file/2,
|
||||||
|
@ -147,7 +146,11 @@ list(Enable) ->
|
||||||
|
|
||||||
-spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) ->
|
-spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) ->
|
||||||
{ok, #?TRACE{}}
|
{ok, #?TRACE{}}
|
||||||
| {error, {duplicate_condition, iodata()} | {already_existed, iodata()} | iodata()}.
|
| {error,
|
||||||
|
{duplicate_condition, iodata()}
|
||||||
|
| {already_existed, iodata()}
|
||||||
|
| {bad_type, any()}
|
||||||
|
| iodata()}.
|
||||||
create(Trace) ->
|
create(Trace) ->
|
||||||
case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of
|
case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of
|
||||||
true ->
|
true ->
|
||||||
|
@ -222,14 +225,16 @@ format(Traces) ->
|
||||||
|
|
||||||
init([]) ->
|
init([]) ->
|
||||||
erlang:process_flag(trap_exit, true),
|
erlang:process_flag(trap_exit, true),
|
||||||
|
Fields = record_info(fields, ?TRACE),
|
||||||
ok = mria:create_table(?TRACE, [
|
ok = mria:create_table(?TRACE, [
|
||||||
{type, set},
|
{type, set},
|
||||||
{rlog_shard, ?SHARD},
|
{rlog_shard, ?SHARD},
|
||||||
{storage, disc_copies},
|
{storage, disc_copies},
|
||||||
{record_name, ?TRACE},
|
{record_name, ?TRACE},
|
||||||
{attributes, record_info(fields, ?TRACE)}
|
{attributes, Fields}
|
||||||
]),
|
]),
|
||||||
ok = mria:wait_for_tables([?TRACE]),
|
ok = mria:wait_for_tables([?TRACE]),
|
||||||
|
maybe_migrate_trace(Fields),
|
||||||
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
|
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
|
||||||
ok = filelib:ensure_dir(filename:join([trace_dir(), dummy])),
|
ok = filelib:ensure_dir(filename:join([trace_dir(), dummy])),
|
||||||
ok = filelib:ensure_dir(filename:join([zip_dir(), dummy])),
|
ok = filelib:ensure_dir(filename:join([zip_dir(), dummy])),
|
||||||
|
@ -267,7 +272,7 @@ handle_info({timeout, TRef, update_trace}, #{timer := TRef} = State) ->
|
||||||
?tp(update_trace_done, #{}),
|
?tp(update_trace_done, #{}),
|
||||||
{noreply, State#{timer => NextTRef}};
|
{noreply, State#{timer => NextTRef}};
|
||||||
handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) ->
|
handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) ->
|
||||||
emqx_misc:cancel_timer(TRef),
|
emqx_utils:cancel_timer(TRef),
|
||||||
handle_info({timeout, TRef, update_trace}, State);
|
handle_info({timeout, TRef, update_trace}, State);
|
||||||
handle_info(Info, State) ->
|
handle_info(Info, State) ->
|
||||||
?SLOG(error, #{unexpected_info => Info}),
|
?SLOG(error, #{unexpected_info => Info}),
|
||||||
|
@ -275,7 +280,7 @@ handle_info(Info, State) ->
|
||||||
|
|
||||||
terminate(_Reason, #{timer := TRef}) ->
|
terminate(_Reason, #{timer := TRef}) ->
|
||||||
_ = mnesia:unsubscribe({table, ?TRACE, simple}),
|
_ = mnesia:unsubscribe({table, ?TRACE, simple}),
|
||||||
emqx_misc:cancel_timer(TRef),
|
emqx_utils:cancel_timer(TRef),
|
||||||
stop_all_trace_handler(),
|
stop_all_trace_handler(),
|
||||||
update_trace_handler(),
|
update_trace_handler(),
|
||||||
_ = file:del_dir_r(zip_dir()),
|
_ = file:del_dir_r(zip_dir()),
|
||||||
|
@ -297,7 +302,7 @@ update_trace(Traces) ->
|
||||||
ok = stop_trace(NeedStop, Started),
|
ok = stop_trace(NeedStop, Started),
|
||||||
clean_stale_trace_files(),
|
clean_stale_trace_files(),
|
||||||
NextTime = find_closest_time(Traces, Now),
|
NextTime = find_closest_time(Traces, Now),
|
||||||
emqx_misc:start_timer(NextTime, update_trace).
|
emqx_utils:start_timer(NextTime, update_trace).
|
||||||
|
|
||||||
stop_all_trace_handler() ->
|
stop_all_trace_handler() ->
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
|
@ -358,9 +363,10 @@ start_trace(Trace) ->
|
||||||
name = Name,
|
name = Name,
|
||||||
type = Type,
|
type = Type,
|
||||||
filter = Filter,
|
filter = Filter,
|
||||||
start_at = Start
|
start_at = Start,
|
||||||
|
payload_encode = PayloadEncode
|
||||||
} = Trace,
|
} = Trace,
|
||||||
Who = #{name => Name, type => Type, filter => Filter},
|
Who = #{name => Name, type => Type, filter => Filter, payload_encode => PayloadEncode},
|
||||||
emqx_trace_handler:install(Who, debug, log_file(Name, Start)).
|
emqx_trace_handler:install(Who, debug, log_file(Name, Start)).
|
||||||
|
|
||||||
stop_trace(Finished, Started) ->
|
stop_trace(Finished, Started) ->
|
||||||
|
@ -490,6 +496,8 @@ to_trace(#{type := ip_address, ip_address := Filter} = Trace, Rec) ->
|
||||||
end;
|
end;
|
||||||
to_trace(#{type := Type}, _Rec) ->
|
to_trace(#{type := Type}, _Rec) ->
|
||||||
{error, io_lib:format("required ~s field", [Type])};
|
{error, io_lib:format("required ~s field", [Type])};
|
||||||
|
to_trace(#{payload_encode := PayloadEncode} = Trace, Rec) ->
|
||||||
|
to_trace(maps:remove(payload_encode, Trace), Rec#?TRACE{payload_encode = PayloadEncode});
|
||||||
to_trace(#{start_at := StartAt} = Trace, Rec) ->
|
to_trace(#{start_at := StartAt} = Trace, Rec) ->
|
||||||
{ok, Sec} = to_system_second(StartAt),
|
{ok, Sec} = to_system_second(StartAt),
|
||||||
to_trace(maps:remove(start_at, Trace), Rec#?TRACE{start_at = Sec});
|
to_trace(maps:remove(start_at, Trace), Rec#?TRACE{start_at = Sec});
|
||||||
|
@ -573,3 +581,29 @@ filter_cli_handler(Names) ->
|
||||||
|
|
||||||
now_second() ->
|
now_second() ->
|
||||||
os:system_time(second).
|
os:system_time(second).
|
||||||
|
|
||||||
|
maybe_migrate_trace(Fields) ->
|
||||||
|
case mnesia:table_info(emqx_trace, attributes) =:= Fields of
|
||||||
|
true ->
|
||||||
|
ok;
|
||||||
|
false ->
|
||||||
|
TransFun = fun(Trace) ->
|
||||||
|
case Trace of
|
||||||
|
{?TRACE, Name, Type, Filter, Enable, StartAt, EndAt} ->
|
||||||
|
#?TRACE{
|
||||||
|
name = Name,
|
||||||
|
type = Type,
|
||||||
|
filter = Filter,
|
||||||
|
enable = Enable,
|
||||||
|
start_at = StartAt,
|
||||||
|
end_at = EndAt,
|
||||||
|
payload_encode = text,
|
||||||
|
extra = #{}
|
||||||
|
};
|
||||||
|
#?TRACE{} ->
|
||||||
|
Trace
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
{atomic, ok} = mnesia:transform_table(?TRACE, TransFun, Fields, ?TRACE),
|
||||||
|
ok
|
||||||
|
end.
|
||||||
|
|
|
@ -44,7 +44,8 @@
|
||||||
-type tracer() :: #{
|
-type tracer() :: #{
|
||||||
name := binary(),
|
name := binary(),
|
||||||
type := clientid | topic | ip_address,
|
type := clientid | topic | ip_address,
|
||||||
filter := emqx_types:clientid() | emqx_types:topic() | emqx_trace:ip_address()
|
filter := emqx_types:clientid() | emqx_types:topic() | emqx_trace:ip_address(),
|
||||||
|
payload_encode := text | hidden | hex
|
||||||
}.
|
}.
|
||||||
|
|
||||||
-define(CONFIG(_LogFile_), #{
|
-define(CONFIG(_LogFile_), #{
|
||||||
|
@ -70,7 +71,12 @@
|
||||||
LogFilePath :: string()
|
LogFilePath :: string()
|
||||||
) -> ok | {error, term()}.
|
) -> ok | {error, term()}.
|
||||||
install(Name, Type, Filter, Level, LogFile) ->
|
install(Name, Type, Filter, Level, LogFile) ->
|
||||||
Who = #{type => Type, filter => ensure_bin(Filter), name => ensure_bin(Name)},
|
Who = #{
|
||||||
|
type => Type,
|
||||||
|
filter => ensure_bin(Filter),
|
||||||
|
name => ensure_bin(Name),
|
||||||
|
payload_encode => payload_encode()
|
||||||
|
},
|
||||||
install(Who, Level, LogFile).
|
install(Who, Level, LogFile).
|
||||||
|
|
||||||
-spec install(
|
-spec install(
|
||||||
|
@ -160,14 +166,14 @@ filters(#{type := topic, filter := Filter, name := Name}) ->
|
||||||
filters(#{type := ip_address, filter := Filter, name := Name}) ->
|
filters(#{type := ip_address, filter := Filter, name := Name}) ->
|
||||||
[{ip_address, {fun ?MODULE:filter_ip_address/2, {ensure_list(Filter), Name}}}].
|
[{ip_address, {fun ?MODULE:filter_ip_address/2, {ensure_list(Filter), Name}}}].
|
||||||
|
|
||||||
formatter(#{type := _Type}) ->
|
formatter(#{type := _Type, payload_encode := PayloadEncode}) ->
|
||||||
{emqx_trace_formatter, #{
|
{emqx_trace_formatter, #{
|
||||||
%% template is for ?SLOG message not ?TRACE.
|
%% template is for ?SLOG message not ?TRACE.
|
||||||
template => [time, " [", level, "] ", msg, "\n"],
|
template => [time, " [", level, "] ", msg, "\n"],
|
||||||
single_line => true,
|
single_line => true,
|
||||||
max_size => unlimited,
|
max_size => unlimited,
|
||||||
depth => unlimited,
|
depth => unlimited,
|
||||||
payload_encode => payload_encode()
|
payload_encode => PayloadEncode
|
||||||
}}.
|
}}.
|
||||||
|
|
||||||
filter_traces(#{id := Id, level := Level, dst := Dst, filters := Filters}, Acc) ->
|
filter_traces(#{id := Id, level := Level, dst := Dst, filters := Filters}, Acc) ->
|
||||||
|
@ -190,7 +196,7 @@ handler_id(Name, Type) ->
|
||||||
do_handler_id(Name, Type)
|
do_handler_id(Name, Type)
|
||||||
catch
|
catch
|
||||||
_:_ ->
|
_:_ ->
|
||||||
Hash = emqx_misc:bin_to_hexstr(crypto:hash(md5, Name), lower),
|
Hash = emqx_utils:bin_to_hexstr(crypto:hash(md5, Name), lower),
|
||||||
do_handler_id(Hash, Type)
|
do_handler_id(Hash, Type)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ code_change(_OldVsn, State, _Extra) ->
|
||||||
|
|
||||||
start_check_timer() ->
|
start_check_timer() ->
|
||||||
Interval = emqx:get_config([sysmon, vm, process_check_interval]),
|
Interval = emqx:get_config([sysmon, vm, process_check_interval]),
|
||||||
emqx_misc:start_timer(Interval, check).
|
emqx_utils:start_timer(Interval, check).
|
||||||
|
|
||||||
usage(Percent) ->
|
usage(Percent) ->
|
||||||
integer_to_list(floor(Percent * 100)) ++ "%".
|
integer_to_list(floor(Percent * 100)) ++ "%".
|
||||||
|
|
|
@ -52,7 +52,7 @@
|
||||||
-export([set_field/3]).
|
-export([set_field/3]).
|
||||||
|
|
||||||
-import(
|
-import(
|
||||||
emqx_misc,
|
emqx_utils,
|
||||||
[
|
[
|
||||||
maybe_apply/2,
|
maybe_apply/2,
|
||||||
start_timer/2
|
start_timer/2
|
||||||
|
@ -172,7 +172,7 @@ stats(WsPid) when is_pid(WsPid) ->
|
||||||
stats(#state{channel = Channel}) ->
|
stats(#state{channel = Channel}) ->
|
||||||
SockStats = emqx_pd:get_counters(?SOCK_STATS),
|
SockStats = emqx_pd:get_counters(?SOCK_STATS),
|
||||||
ChanStats = emqx_channel:stats(Channel),
|
ChanStats = emqx_channel:stats(Channel),
|
||||||
ProcStats = emqx_misc:proc_stats(),
|
ProcStats = emqx_utils:proc_stats(),
|
||||||
lists:append([SockStats, ChanStats, ProcStats]).
|
lists:append([SockStats, ChanStats, ProcStats]).
|
||||||
|
|
||||||
%% kick|discard|takeover
|
%% kick|discard|takeover
|
||||||
|
@ -340,7 +340,7 @@ tune_heap_size(Channel) ->
|
||||||
)
|
)
|
||||||
of
|
of
|
||||||
#{enable := false} -> ok;
|
#{enable := false} -> ok;
|
||||||
ShutdownPolicy -> emqx_misc:tune_heap_size(ShutdownPolicy)
|
ShutdownPolicy -> emqx_utils:tune_heap_size(ShutdownPolicy)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
get_stats_enable(Zone) ->
|
get_stats_enable(Zone) ->
|
||||||
|
@ -454,7 +454,7 @@ websocket_info(
|
||||||
State = #state{listener = {Type, Listener}}
|
State = #state{listener = {Type, Listener}}
|
||||||
) ->
|
) ->
|
||||||
ActiveN = get_active_n(Type, Listener),
|
ActiveN = get_active_n(Type, Listener),
|
||||||
Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)],
|
Delivers = [Deliver | emqx_utils:drain_deliver(ActiveN)],
|
||||||
with_channel(handle_deliver, [Delivers], State);
|
with_channel(handle_deliver, [Delivers], State);
|
||||||
websocket_info(
|
websocket_info(
|
||||||
{timeout, _, limit_timeout},
|
{timeout, _, limit_timeout},
|
||||||
|
@ -678,7 +678,7 @@ check_oom(State = #state{channel = Channel}) ->
|
||||||
#{enable := false} ->
|
#{enable := false} ->
|
||||||
State;
|
State;
|
||||||
#{enable := true} ->
|
#{enable := true} ->
|
||||||
case emqx_misc:check_oom(ShutdownPolicy) of
|
case emqx_utils:check_oom(ShutdownPolicy) of
|
||||||
Shutdown = {shutdown, _Reason} ->
|
Shutdown = {shutdown, _Reason} ->
|
||||||
postpone(Shutdown, State);
|
postpone(Shutdown, State);
|
||||||
_Other ->
|
_Other ->
|
||||||
|
@ -913,7 +913,7 @@ inc_qos_stats_key(_, _) -> undefined.
|
||||||
%% Cancel idle timer
|
%% Cancel idle timer
|
||||||
|
|
||||||
cancel_idle_timer(State = #state{idle_timer = IdleTimer}) ->
|
cancel_idle_timer(State = #state{idle_timer = IdleTimer}) ->
|
||||||
ok = emqx_misc:cancel_timer(IdleTimer),
|
ok = emqx_utils:cancel_timer(IdleTimer),
|
||||||
State#state{idle_timer = undefined}.
|
State#state{idle_timer = undefined}.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -1046,7 +1046,7 @@ check_max_connection(Type, Listener) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
set_field(Name, Value, State) ->
|
set_field(Name, Value, State) ->
|
||||||
Pos = emqx_misc:index_of(Name, record_info(fields, state)),
|
Pos = emqx_utils:index_of(Name, record_info(fields, state)),
|
||||||
setelement(Pos + 1, State, Value).
|
setelement(Pos + 1, State, Value).
|
||||||
|
|
||||||
%% ensure lowercase letters in headers
|
%% ensure lowercase letters in headers
|
||||||
|
|
|
@ -15,8 +15,10 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-module(emqx_zone_schema).
|
-module(emqx_zone_schema).
|
||||||
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
-export([namespace/0, roots/0, fields/1, desc/1]).
|
-export([namespace/0, roots/0, fields/1, desc/1, zone/0, zone_without_hidden/0]).
|
||||||
|
|
||||||
namespace() -> zone.
|
namespace() -> zone.
|
||||||
|
|
||||||
|
@ -33,6 +35,32 @@ roots() ->
|
||||||
"overload_protection"
|
"overload_protection"
|
||||||
].
|
].
|
||||||
|
|
||||||
|
zone() ->
|
||||||
|
Fields = roots(),
|
||||||
|
Hidden = hidden(),
|
||||||
|
lists:map(
|
||||||
|
fun(F) ->
|
||||||
|
case lists:member(F, Hidden) of
|
||||||
|
true ->
|
||||||
|
{F, ?HOCON(?R_REF(F), #{importance => ?IMPORTANCE_HIDDEN})};
|
||||||
|
false ->
|
||||||
|
{F, ?HOCON(?R_REF(F), #{})}
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
Fields
|
||||||
|
).
|
||||||
|
|
||||||
|
zone_without_hidden() ->
|
||||||
|
lists:map(fun(F) -> {F, ?HOCON(?R_REF(F), #{})} end, roots() -- hidden()).
|
||||||
|
|
||||||
|
hidden() ->
|
||||||
|
[
|
||||||
|
"stats",
|
||||||
|
"overload_protection",
|
||||||
|
"conn_congestion",
|
||||||
|
"flapping_detect"
|
||||||
|
].
|
||||||
|
|
||||||
%% zone schemas are clones from the same name from root level
|
%% zone schemas are clones from the same name from root level
|
||||||
%% only not allowed to have default values.
|
%% only not allowed to have default values.
|
||||||
fields(Name) ->
|
fields(Name) ->
|
||||||
|
|
|
@ -95,13 +95,17 @@ all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
|
LogLevel = emqx_logger:get_primary_log_level(),
|
||||||
|
ok = emqx_logger:set_log_level(debug),
|
||||||
application:set_env(ekka, strict_mode, true),
|
application:set_env(ekka, strict_mode, true),
|
||||||
emqx_common_test_helpers:boot_modules(all),
|
emqx_common_test_helpers:boot_modules(all),
|
||||||
emqx_common_test_helpers:start_apps([]),
|
emqx_common_test_helpers:start_apps([]),
|
||||||
Config.
|
[{log_level, LogLevel} | Config].
|
||||||
|
|
||||||
end_per_suite(_) ->
|
end_per_suite(Config) ->
|
||||||
emqx_common_test_helpers:stop_apps([]),
|
emqx_common_test_helpers:stop_apps([]),
|
||||||
|
LogLevel = ?config(log_level),
|
||||||
|
emqx_logger:set_log_level(LogLevel),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(Case, Config) ->
|
init_per_testcase(Case, Config) ->
|
||||||
|
|
|
@ -1137,7 +1137,7 @@ t_ws_cookie_init(_) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
t_flapping_detect(_) ->
|
t_flapping_detect(_) ->
|
||||||
emqx_config:put_zone_conf(default, [flapping_detect, enable], true),
|
emqx_config:put_zone_conf(default, [flapping_detect, window_time], 60000),
|
||||||
Parent = self(),
|
Parent = self(),
|
||||||
ok = meck:expect(
|
ok = meck:expect(
|
||||||
emqx_cm,
|
emqx_cm,
|
||||||
|
|
|
@ -314,6 +314,7 @@ stop_apps(Apps) ->
|
||||||
ok = emqx_config:delete_override_conf_files(),
|
ok = emqx_config:delete_override_conf_files(),
|
||||||
application:unset_env(emqx, local_override_conf_file),
|
application:unset_env(emqx, local_override_conf_file),
|
||||||
application:unset_env(emqx, cluster_override_conf_file),
|
application:unset_env(emqx, cluster_override_conf_file),
|
||||||
|
application:unset_env(emqx, cluster_hocon_file),
|
||||||
application:unset_env(gen_rpc, port_discovery),
|
application:unset_env(gen_rpc, port_discovery),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
@ -462,6 +463,10 @@ force_set_config_file_paths(emqx_conf, [Path] = Paths) ->
|
||||||
ok = file:write_file(Path, Bin, [append]),
|
ok = file:write_file(Path, Bin, [append]),
|
||||||
application:set_env(emqx, config_files, Paths);
|
application:set_env(emqx, config_files, Paths);
|
||||||
force_set_config_file_paths(emqx, Paths) ->
|
force_set_config_file_paths(emqx, Paths) ->
|
||||||
|
%% we need init cluster conf, so we can save the cluster conf to the file
|
||||||
|
application:set_env(emqx, local_override_conf_file, "local_override.conf"),
|
||||||
|
application:set_env(emqx, cluster_override_conf_file, "cluster_override.conf"),
|
||||||
|
application:set_env(emqx, cluster_conf_file, "cluster.hocon"),
|
||||||
application:set_env(emqx, config_files, Paths);
|
application:set_env(emqx, config_files, Paths);
|
||||||
force_set_config_file_paths(_, _) ->
|
force_set_config_file_paths(_, _) ->
|
||||||
ok.
|
ok.
|
||||||
|
@ -477,7 +482,7 @@ copy_certs(_, _) ->
|
||||||
load_config(SchemaModule, Config, Opts) ->
|
load_config(SchemaModule, Config, Opts) ->
|
||||||
ConfigBin =
|
ConfigBin =
|
||||||
case is_map(Config) of
|
case is_map(Config) of
|
||||||
true -> jsx:encode(Config);
|
true -> emqx_utils_json:encode(Config);
|
||||||
false -> Config
|
false -> Config
|
||||||
end,
|
end,
|
||||||
ok = emqx_config:delete_override_conf_files(),
|
ok = emqx_config:delete_override_conf_files(),
|
||||||
|
@ -1036,7 +1041,7 @@ switch_proxy(Switch, Name, ProxyHost, ProxyPort) ->
|
||||||
off -> #{<<"enabled">> => false};
|
off -> #{<<"enabled">> => false};
|
||||||
on -> #{<<"enabled">> => true}
|
on -> #{<<"enabled">> => true}
|
||||||
end,
|
end,
|
||||||
BodyBin = emqx_json:encode(Body),
|
BodyBin = emqx_utils_json:encode(Body),
|
||||||
{ok, {{_, 200, _}, _, _}} = httpc:request(
|
{ok, {{_, 200, _}, _, _}} = httpc:request(
|
||||||
post,
|
post,
|
||||||
{Url, [], "application/json", BodyBin},
|
{Url, [], "application/json", BodyBin},
|
||||||
|
@ -1056,7 +1061,7 @@ timeout_proxy(on, Name, ProxyHost, ProxyPort) ->
|
||||||
<<"toxicity">> => 1.0,
|
<<"toxicity">> => 1.0,
|
||||||
<<"attributes">> => #{<<"timeout">> => 0}
|
<<"attributes">> => #{<<"timeout">> => 0}
|
||||||
},
|
},
|
||||||
BodyBin = emqx_json:encode(Body),
|
BodyBin = emqx_utils_json:encode(Body),
|
||||||
{ok, {{_, 200, _}, _, _}} = httpc:request(
|
{ok, {{_, 200, _}, _, _}} = httpc:request(
|
||||||
post,
|
post,
|
||||||
{Url, [], "application/json", BodyBin},
|
{Url, [], "application/json", BodyBin},
|
||||||
|
@ -1091,7 +1096,7 @@ latency_up_proxy(on, Name, ProxyHost, ProxyPort) ->
|
||||||
<<"jitter">> => 3_000
|
<<"jitter">> => 3_000
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
BodyBin = emqx_json:encode(Body),
|
BodyBin = emqx_utils_json:encode(Body),
|
||||||
{ok, {{_, 200, _}, _, _}} = httpc:request(
|
{ok, {{_, 200, _}, _, _}} = httpc:request(
|
||||||
post,
|
post,
|
||||||
{Url, [], "application/json", BodyBin},
|
{Url, [], "application/json", BodyBin},
|
||||||
|
|
|
@ -54,7 +54,7 @@ request_api(Method, Url, QueryParams, Auth, Body, HttpOpts) ->
|
||||||
[] ->
|
[] ->
|
||||||
{NewUrl, [Auth]};
|
{NewUrl, [Auth]};
|
||||||
_ ->
|
_ ->
|
||||||
{NewUrl, [Auth], "application/json", emqx_json:encode(Body)}
|
{NewUrl, [Auth], "application/json", emqx_utils_json:encode(Body)}
|
||||||
end,
|
end,
|
||||||
do_request_api(Method, Request, HttpOpts).
|
do_request_api(Method, Request, HttpOpts).
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ do_request_api(Method, Request, HttpOpts) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
get_http_data(ResponseBody) ->
|
get_http_data(ResponseBody) ->
|
||||||
emqx_json:decode(ResponseBody, [return_maps]).
|
emqx_utils_json:decode(ResponseBody, [return_maps]).
|
||||||
|
|
||||||
auth_header(User, Pass) ->
|
auth_header(User, Pass) ->
|
||||||
Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
|
Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
|
||||||
|
|
|
@ -57,5 +57,5 @@ t_fill_default_values(_) ->
|
||||||
WithDefaults
|
WithDefaults
|
||||||
),
|
),
|
||||||
%% ensure JSON compatible
|
%% ensure JSON compatible
|
||||||
_ = emqx_json:encode(WithDefaults),
|
_ = emqx_utils_json:encode(WithDefaults),
|
||||||
ok.
|
ok.
|
||||||
|
|
|
@ -21,8 +21,7 @@
|
||||||
|
|
||||||
-define(MOD, {mod}).
|
-define(MOD, {mod}).
|
||||||
-define(WKEY, '?').
|
-define(WKEY, '?').
|
||||||
-define(LOCAL_CONF, "/tmp/local-override.conf").
|
-define(CLUSTER_CONF, "/tmp/cluster.conf").
|
||||||
-define(CLUSTER_CONF, "/tmp/cluster-override.conf").
|
|
||||||
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
@ -38,7 +37,6 @@ end_per_suite(_Config) ->
|
||||||
emqx_common_test_helpers:stop_apps([]).
|
emqx_common_test_helpers:stop_apps([]).
|
||||||
|
|
||||||
init_per_testcase(_Case, Config) ->
|
init_per_testcase(_Case, Config) ->
|
||||||
_ = file:delete(?LOCAL_CONF),
|
|
||||||
_ = file:delete(?CLUSTER_CONF),
|
_ = file:delete(?CLUSTER_CONF),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
@ -200,62 +198,6 @@ t_sub_key_update_remove(_Config) ->
|
||||||
ok = emqx_config_handler:remove_handler(KeyPath2),
|
ok = emqx_config_handler:remove_handler(KeyPath2),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_local_override_update_remove(_Config) ->
|
|
||||||
application:set_env(emqx, local_override_conf_file, ?LOCAL_CONF),
|
|
||||||
application:set_env(emqx, cluster_override_conf_file, ?CLUSTER_CONF),
|
|
||||||
KeyPath = [sysmon, os, cpu_high_watermark],
|
|
||||||
ok = emqx_config_handler:add_handler(KeyPath, ?MODULE),
|
|
||||||
LocalOpts = #{override_to => local},
|
|
||||||
{ok, Res} = emqx:update_config(KeyPath, <<"70%">>, LocalOpts),
|
|
||||||
?assertMatch(
|
|
||||||
#{
|
|
||||||
config := 0.7,
|
|
||||||
post_config_update := #{},
|
|
||||||
raw_config := <<"70%">>
|
|
||||||
},
|
|
||||||
Res
|
|
||||||
),
|
|
||||||
ClusterOpts = #{override_to => cluster},
|
|
||||||
?assertMatch(
|
|
||||||
{error, {permission_denied, _}}, emqx:update_config(KeyPath, <<"71%">>, ClusterOpts)
|
|
||||||
),
|
|
||||||
?assertMatch(0.7, emqx:get_config(KeyPath)),
|
|
||||||
|
|
||||||
KeyPath2 = [sysmon, os, cpu_low_watermark],
|
|
||||||
ok = emqx_config_handler:add_handler(KeyPath2, ?MODULE),
|
|
||||||
?assertMatch(
|
|
||||||
{error, {permission_denied, _}}, emqx:update_config(KeyPath2, <<"40%">>, ClusterOpts)
|
|
||||||
),
|
|
||||||
|
|
||||||
%% remove
|
|
||||||
?assertMatch({error, {permission_denied, _}}, emqx:remove_config(KeyPath)),
|
|
||||||
?assertEqual(
|
|
||||||
{ok, #{post_config_update => #{}}},
|
|
||||||
emqx:remove_config(KeyPath, #{override_to => local})
|
|
||||||
),
|
|
||||||
?assertEqual(
|
|
||||||
{ok, #{post_config_update => #{}}},
|
|
||||||
emqx:remove_config(KeyPath)
|
|
||||||
),
|
|
||||||
?assertError({config_not_found, KeyPath}, emqx:get_raw_config(KeyPath)),
|
|
||||||
OSKey = maps:keys(emqx:get_raw_config([sysmon, os])),
|
|
||||||
?assertEqual(false, lists:member(<<"cpu_high_watermark">>, OSKey)),
|
|
||||||
?assert(length(OSKey) > 0),
|
|
||||||
|
|
||||||
?assertEqual(
|
|
||||||
{ok, #{config => 0.8, post_config_update => #{}, raw_config => <<"80%">>}},
|
|
||||||
emqx:reset_config(KeyPath, ClusterOpts)
|
|
||||||
),
|
|
||||||
OSKey1 = maps:keys(emqx:get_raw_config([sysmon, os])),
|
|
||||||
?assertEqual(true, lists:member(<<"cpu_high_watermark">>, OSKey1)),
|
|
||||||
?assert(length(OSKey1) > 1),
|
|
||||||
|
|
||||||
ok = emqx_config_handler:remove_handler(KeyPath),
|
|
||||||
ok = emqx_config_handler:remove_handler(KeyPath2),
|
|
||||||
application:unset_env(emqx, local_override_conf_file),
|
|
||||||
application:unset_env(emqx, cluster_override_conf_file),
|
|
||||||
ok.
|
|
||||||
|
|
||||||
t_check_failed(_Config) ->
|
t_check_failed(_Config) ->
|
||||||
KeyPath = [sysmon, os, cpu_check_interval],
|
KeyPath = [sysmon, os, cpu_check_interval],
|
||||||
Opts = #{rawconf_with_defaults => true},
|
Opts = #{rawconf_with_defaults => true},
|
||||||
|
@ -426,9 +368,9 @@ wait_for_new_pid() ->
|
||||||
callback_error(FailedPath, Update, Error) ->
|
callback_error(FailedPath, Update, Error) ->
|
||||||
Opts = #{rawconf_with_defaults => true},
|
Opts = #{rawconf_with_defaults => true},
|
||||||
ok = emqx_config_handler:add_handler(FailedPath, ?MODULE),
|
ok = emqx_config_handler:add_handler(FailedPath, ?MODULE),
|
||||||
Old = emqx:get_raw_config(FailedPath),
|
Old = emqx:get_raw_config(FailedPath, undefined),
|
||||||
?assertEqual(Error, emqx:update_config(FailedPath, Update, Opts)),
|
?assertEqual(Error, emqx:update_config(FailedPath, Update, Opts)),
|
||||||
New = emqx:get_raw_config(FailedPath),
|
New = emqx:get_raw_config(FailedPath, undefined),
|
||||||
?assertEqual(Old, New),
|
?assertEqual(Old, New),
|
||||||
ok = emqx_config_handler:remove_handler(FailedPath),
|
ok = emqx_config_handler:remove_handler(FailedPath),
|
||||||
ok.
|
ok.
|
||||||
|
|
|
@ -496,16 +496,16 @@ t_get_conn_info(_) ->
|
||||||
|
|
||||||
t_oom_shutdown(init, Config) ->
|
t_oom_shutdown(init, Config) ->
|
||||||
ok = snabbkaffe:start_trace(),
|
ok = snabbkaffe:start_trace(),
|
||||||
ok = meck:new(emqx_misc, [non_strict, passthrough, no_history, no_link]),
|
ok = meck:new(emqx_utils, [non_strict, passthrough, no_history, no_link]),
|
||||||
meck:expect(
|
meck:expect(
|
||||||
emqx_misc,
|
emqx_utils,
|
||||||
check_oom,
|
check_oom,
|
||||||
fun(_) -> {shutdown, "fake_oom"} end
|
fun(_) -> {shutdown, "fake_oom"} end
|
||||||
),
|
),
|
||||||
Config;
|
Config;
|
||||||
t_oom_shutdown('end', _Config) ->
|
t_oom_shutdown('end', _Config) ->
|
||||||
snabbkaffe:stop(),
|
snabbkaffe:stop(),
|
||||||
meck:unload(emqx_misc),
|
meck:unload(emqx_utils),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_oom_shutdown(_) ->
|
t_oom_shutdown(_) ->
|
||||||
|
|
|
@ -402,7 +402,7 @@ request(Method, Url, QueryParams, Body) ->
|
||||||
Opts = #{return_all => true},
|
Opts = #{return_all => true},
|
||||||
case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of
|
case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of
|
||||||
{ok, {Reason, Headers, BodyR}} ->
|
{ok, {Reason, Headers, BodyR}} ->
|
||||||
{ok, {Reason, Headers, emqx_json:decode(BodyR, [return_maps])}};
|
{ok, {Reason, Headers, emqx_utils_json:decode(BodyR, [return_maps])}};
|
||||||
Error ->
|
Error ->
|
||||||
Error
|
Error
|
||||||
end.
|
end.
|
||||||
|
@ -997,7 +997,7 @@ do_t_update_listener(Config) ->
|
||||||
<<"enable_crl_check">> => true
|
<<"enable_crl_check">> => true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ListenerData1 = emqx_map_lib:deep_merge(ListenerData0, CRLConfig),
|
ListenerData1 = emqx_utils_maps:deep_merge(ListenerData0, CRLConfig),
|
||||||
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
|
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
|
@ -1040,7 +1040,7 @@ do_t_validations(_Config) ->
|
||||||
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
||||||
|
|
||||||
ListenerData1 =
|
ListenerData1 =
|
||||||
emqx_map_lib:deep_merge(
|
emqx_utils_maps:deep_merge(
|
||||||
ListenerData0,
|
ListenerData0,
|
||||||
#{
|
#{
|
||||||
<<"ssl_options">> =>
|
<<"ssl_options">> =>
|
||||||
|
@ -1052,7 +1052,7 @@ do_t_validations(_Config) ->
|
||||||
),
|
),
|
||||||
{error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1),
|
{error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} =
|
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} =
|
||||||
emqx_json:decode(ResRaw1, [return_maps]),
|
emqx_utils_json:decode(ResRaw1, [return_maps]),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
<<"mismatches">> :=
|
<<"mismatches">> :=
|
||||||
|
@ -1064,7 +1064,7 @@ do_t_validations(_Config) ->
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
emqx_json:decode(MsgRaw1, [return_maps])
|
emqx_utils_json:decode(MsgRaw1, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
|
@ -101,3 +101,21 @@ t_expired_detecting(_) ->
|
||||||
ets:tab2list(emqx_flapping)
|
ets:tab2list(emqx_flapping)
|
||||||
)
|
)
|
||||||
).
|
).
|
||||||
|
|
||||||
|
t_conf_without_window_time(_) ->
|
||||||
|
%% enable is deprecated, so we need to make sure it won't be used.
|
||||||
|
Global = emqx_config:get([flapping_detect]),
|
||||||
|
?assertNot(maps:is_key(enable, Global)),
|
||||||
|
%% zones don't have default value, so we need to make sure fallback to global conf.
|
||||||
|
%% this new_zone will fallback to global conf.
|
||||||
|
emqx_config:put_zone_conf(new_zone, [flapping_detect], #{}),
|
||||||
|
?assertEqual(Global, get_policy(new_zone)),
|
||||||
|
|
||||||
|
emqx_config:put_zone_conf(new_zone_1, [flapping_detect], #{window_time => 100}),
|
||||||
|
?assertEqual(100, emqx_flapping:get_policy(window_time, new_zone_1)),
|
||||||
|
?assertEqual(maps:get(ban_time, Global), emqx_flapping:get_policy(ban_time, new_zone_1)),
|
||||||
|
?assertEqual(maps:get(max_count, Global), emqx_flapping:get_policy(max_count, new_zone_1)),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
get_policy(Zone) ->
|
||||||
|
emqx_flapping:get_policy([window_time, ban_time, max_count], Zone).
|
||||||
|
|
|
@ -143,7 +143,7 @@ init_per_testcase(t_ocsp_responder_error_responses, Config) ->
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
|
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
|
||||||
ConfBin = emqx_map_lib:binary_key_map(Conf),
|
ConfBin = emqx_utils_maps:binary_key_map(Conf),
|
||||||
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
|
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
|
||||||
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
|
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
|
||||||
snabbkaffe:start_trace(),
|
snabbkaffe:start_trace(),
|
||||||
|
@ -184,7 +184,7 @@ init_per_testcase(_TestCase, Config) ->
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
|
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
|
||||||
ConfBin = emqx_map_lib:binary_key_map(Conf),
|
ConfBin = emqx_utils_maps:binary_key_map(Conf),
|
||||||
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
|
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
|
||||||
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
|
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
|
||||||
snabbkaffe:start_trace(),
|
snabbkaffe:start_trace(),
|
||||||
|
@ -430,7 +430,7 @@ request(Method, Url, QueryParams, Body) ->
|
||||||
Opts = #{return_all => true},
|
Opts = #{return_all => true},
|
||||||
case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of
|
case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of
|
||||||
{ok, {Reason, Headers, BodyR}} ->
|
{ok, {Reason, Headers, BodyR}} ->
|
||||||
{ok, {Reason, Headers, emqx_json:decode(BodyR, [return_maps])}};
|
{ok, {Reason, Headers, emqx_utils_json:decode(BodyR, [return_maps])}};
|
||||||
Error ->
|
Error ->
|
||||||
Error
|
Error
|
||||||
end.
|
end.
|
||||||
|
@ -679,7 +679,7 @@ do_t_update_listener(Config) ->
|
||||||
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
undefined,
|
undefined,
|
||||||
emqx_map_lib:deep_get([<<"ssl_options">>, <<"ocsp">>], ListenerData0, undefined)
|
emqx_utils_maps:deep_get([<<"ssl_options">>, <<"ocsp">>], ListenerData0, undefined)
|
||||||
),
|
),
|
||||||
assert_no_http_get(),
|
assert_no_http_get(),
|
||||||
|
|
||||||
|
@ -702,7 +702,7 @@ do_t_update_listener(Config) ->
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ListenerData1 = emqx_map_lib:deep_merge(ListenerData0, OCSPConfig),
|
ListenerData1 = emqx_utils_maps:deep_merge(ListenerData0, OCSPConfig),
|
||||||
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
|
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
|
@ -722,14 +722,14 @@ do_t_update_listener(Config) ->
|
||||||
%% location
|
%% location
|
||||||
?assertNotEqual(
|
?assertNotEqual(
|
||||||
IssuerPemPath,
|
IssuerPemPath,
|
||||||
emqx_map_lib:deep_get(
|
emqx_utils_maps:deep_get(
|
||||||
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
|
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
|
||||||
ListenerData2
|
ListenerData2
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
?assertNotEqual(
|
?assertNotEqual(
|
||||||
IssuerPem,
|
IssuerPem,
|
||||||
emqx_map_lib:deep_get(
|
emqx_utils_maps:deep_get(
|
||||||
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
|
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
|
||||||
ListenerData2
|
ListenerData2
|
||||||
)
|
)
|
||||||
|
@ -818,7 +818,7 @@ do_t_validations(_Config) ->
|
||||||
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
||||||
|
|
||||||
ListenerData1 =
|
ListenerData1 =
|
||||||
emqx_map_lib:deep_merge(
|
emqx_utils_maps:deep_merge(
|
||||||
ListenerData0,
|
ListenerData0,
|
||||||
#{
|
#{
|
||||||
<<"ssl_options">> =>
|
<<"ssl_options">> =>
|
||||||
|
@ -827,7 +827,7 @@ do_t_validations(_Config) ->
|
||||||
),
|
),
|
||||||
{error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1),
|
{error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} =
|
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} =
|
||||||
emqx_json:decode(ResRaw1, [return_maps]),
|
emqx_utils_json:decode(ResRaw1, [return_maps]),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
<<"mismatches">> :=
|
<<"mismatches">> :=
|
||||||
|
@ -839,11 +839,11 @@ do_t_validations(_Config) ->
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
emqx_json:decode(MsgRaw1, [return_maps])
|
emqx_utils_json:decode(MsgRaw1, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
ListenerData2 =
|
ListenerData2 =
|
||||||
emqx_map_lib:deep_merge(
|
emqx_utils_maps:deep_merge(
|
||||||
ListenerData0,
|
ListenerData0,
|
||||||
#{
|
#{
|
||||||
<<"ssl_options">> =>
|
<<"ssl_options">> =>
|
||||||
|
@ -857,7 +857,7 @@ do_t_validations(_Config) ->
|
||||||
),
|
),
|
||||||
{error, {_, _, ResRaw2}} = update_listener_via_api(ListenerId, ListenerData2),
|
{error, {_, _, ResRaw2}} = update_listener_via_api(ListenerId, ListenerData2),
|
||||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw2} =
|
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw2} =
|
||||||
emqx_json:decode(ResRaw2, [return_maps]),
|
emqx_utils_json:decode(ResRaw2, [return_maps]),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
<<"mismatches">> :=
|
<<"mismatches">> :=
|
||||||
|
@ -869,11 +869,11 @@ do_t_validations(_Config) ->
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
emqx_json:decode(MsgRaw2, [return_maps])
|
emqx_utils_json:decode(MsgRaw2, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
ListenerData3a =
|
ListenerData3a =
|
||||||
emqx_map_lib:deep_merge(
|
emqx_utils_maps:deep_merge(
|
||||||
ListenerData0,
|
ListenerData0,
|
||||||
#{
|
#{
|
||||||
<<"ssl_options">> =>
|
<<"ssl_options">> =>
|
||||||
|
@ -886,10 +886,12 @@ do_t_validations(_Config) ->
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
ListenerData3 = emqx_map_lib:deep_remove([<<"ssl_options">>, <<"certfile">>], ListenerData3a),
|
ListenerData3 = emqx_utils_maps:deep_remove(
|
||||||
|
[<<"ssl_options">>, <<"certfile">>], ListenerData3a
|
||||||
|
),
|
||||||
{error, {_, _, ResRaw3}} = update_listener_via_api(ListenerId, ListenerData3),
|
{error, {_, _, ResRaw3}} = update_listener_via_api(ListenerId, ListenerData3),
|
||||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw3} =
|
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw3} =
|
||||||
emqx_json:decode(ResRaw3, [return_maps]),
|
emqx_utils_json:decode(ResRaw3, [return_maps]),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
<<"mismatches">> :=
|
<<"mismatches">> :=
|
||||||
|
@ -901,7 +903,7 @@ do_t_validations(_Config) ->
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
emqx_json:decode(MsgRaw3, [return_maps])
|
emqx_utils_json:decode(MsgRaw3, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
|
@ -119,7 +119,7 @@ t_has_routes(_) ->
|
||||||
?R:delete_route(<<"devices/+/messages">>).
|
?R:delete_route(<<"devices/+/messages">>).
|
||||||
|
|
||||||
t_unexpected(_) ->
|
t_unexpected(_) ->
|
||||||
Router = emqx_misc:proc_name(?R, 1),
|
Router = emqx_utils:proc_name(?R, 1),
|
||||||
?assertEqual(ignored, gen_server:call(Router, bad_request)),
|
?assertEqual(ignored, gen_server:call(Router, bad_request)),
|
||||||
?assertEqual(ok, gen_server:cast(Router, bad_message)),
|
?assertEqual(ok, gen_server:cast(Router, bad_message)),
|
||||||
Router ! bad_info.
|
Router ! bad_info.
|
||||||
|
|
|
@ -191,7 +191,7 @@ ssl_files_save_delete_test() ->
|
||||||
FileKey = maps:get(<<"keyfile">>, SSL),
|
FileKey = maps:get(<<"keyfile">>, SSL),
|
||||||
?assertMatch(<<"/tmp/ssl-test-dir/key-", _:16/binary>>, FileKey),
|
?assertMatch(<<"/tmp/ssl-test-dir/key-", _:16/binary>>, FileKey),
|
||||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileKey)),
|
?assertEqual({ok, bin(test_key())}, file:read_file(FileKey)),
|
||||||
FileIssuerPem = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL),
|
FileIssuerPem = emqx_utils_maps:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL),
|
||||||
?assertMatch(<<"/tmp/ssl-test-dir/ocsp_issuer_pem-", _:16/binary>>, FileIssuerPem),
|
?assertMatch(<<"/tmp/ssl-test-dir/ocsp_issuer_pem-", _:16/binary>>, FileIssuerPem),
|
||||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileIssuerPem)),
|
?assertEqual({ok, bin(test_key())}, file:read_file(FileIssuerPem)),
|
||||||
%% no old file to delete
|
%% no old file to delete
|
||||||
|
@ -251,8 +251,8 @@ ssl_file_replace_test() ->
|
||||||
{ok, SSL3} = emqx_tls_lib:ensure_ssl_files(Dir, SSL1),
|
{ok, SSL3} = emqx_tls_lib:ensure_ssl_files(Dir, SSL1),
|
||||||
File1 = maps:get(<<"keyfile">>, SSL2),
|
File1 = maps:get(<<"keyfile">>, SSL2),
|
||||||
File2 = maps:get(<<"keyfile">>, SSL3),
|
File2 = maps:get(<<"keyfile">>, SSL3),
|
||||||
IssuerPem1 = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL2),
|
IssuerPem1 = emqx_utils_maps:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL2),
|
||||||
IssuerPem2 = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL3),
|
IssuerPem2 = emqx_utils_maps:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL3),
|
||||||
?assert(filelib:is_regular(File1)),
|
?assert(filelib:is_regular(File1)),
|
||||||
?assert(filelib:is_regular(File2)),
|
?assert(filelib:is_regular(File2)),
|
||||||
?assert(filelib:is_regular(IssuerPem1)),
|
?assert(filelib:is_regular(IssuerPem1)),
|
||||||
|
|
|
@ -22,10 +22,9 @@
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("emqx/include/emqx.hrl").
|
-include_lib("emqx/include/emqx.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_trace.hrl").
|
||||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
-record(emqx_trace, {name, type, filter, enable = true, start_at, end_at}).
|
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Setups
|
%% Setups
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -97,7 +96,9 @@ t_base_create_delete(_Config) ->
|
||||||
type => clientid,
|
type => clientid,
|
||||||
name => <<"name1">>,
|
name => <<"name1">>,
|
||||||
start_at => Now,
|
start_at => Now,
|
||||||
end_at => Now + 30 * 60
|
end_at => Now + 30 * 60,
|
||||||
|
payload_encode => text,
|
||||||
|
extra => #{}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])),
|
?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])),
|
||||||
|
@ -385,6 +386,48 @@ t_find_closed_time(_Config) ->
|
||||||
?assertEqual(1000, emqx_trace:find_closest_time(Traces, Now)),
|
?assertEqual(1000, emqx_trace:find_closest_time(Traces, Now)),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_migrate_trace(_Config) ->
|
||||||
|
build_new_trace_data(),
|
||||||
|
build_old_trace_data(),
|
||||||
|
reload(),
|
||||||
|
Traces = emqx_trace:format(emqx_trace:list()),
|
||||||
|
?assertEqual(2, erlang:length(Traces)),
|
||||||
|
lists:foreach(
|
||||||
|
fun(#{name := Name, enable := Enable}) ->
|
||||||
|
?assertEqual(true, Enable, Name)
|
||||||
|
end,
|
||||||
|
Traces
|
||||||
|
),
|
||||||
|
LoggerIds = logger:get_handler_ids(),
|
||||||
|
lists:foreach(
|
||||||
|
fun(Id) ->
|
||||||
|
?assertEqual(true, lists:member(Id, LoggerIds), LoggerIds)
|
||||||
|
end,
|
||||||
|
[
|
||||||
|
trace_topic_test_topic_migrate_new,
|
||||||
|
trace_topic_test_topic_migrate_old
|
||||||
|
]
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
build_new_trace_data() ->
|
||||||
|
Now = erlang:system_time(second),
|
||||||
|
{ok, _} = emqx_trace:create([
|
||||||
|
{<<"name">>, <<"test_topic_migrate_new">>},
|
||||||
|
{<<"type">>, topic},
|
||||||
|
{<<"topic">>, <<"/test/migrate/new">>},
|
||||||
|
{<<"start_at">>, Now - 10}
|
||||||
|
]).
|
||||||
|
|
||||||
|
build_old_trace_data() ->
|
||||||
|
Now = erlang:system_time(second),
|
||||||
|
OldAttrs = [name, type, filter, enable, start_at, end_at],
|
||||||
|
{atomic, ok} = mnesia:transform_table(emqx_trace, ignore, OldAttrs, emqx_trace),
|
||||||
|
OldTrace =
|
||||||
|
{emqx_trace, <<"test_topic_migrate_old">>, topic, <<"topic">>, true, Now - 10, Now + 100},
|
||||||
|
ok = mnesia:dirty_write(OldTrace),
|
||||||
|
ok.
|
||||||
|
|
||||||
reload() ->
|
reload() ->
|
||||||
catch ok = gen_server:stop(emqx_trace),
|
catch ok = gen_server:stop(emqx_trace),
|
||||||
{ok, _Pid} = emqx_trace:start_link().
|
{ok, _Pid} = emqx_trace:start_link().
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
{deps, [
|
{deps, [
|
||||||
{emqx, {path, "../emqx"}},
|
{emqx, {path, "../emqx"}},
|
||||||
|
{emqx_utils, {path, "../emqx_utils"}},
|
||||||
{emqx_connector, {path, "../emqx_connector"}}
|
{emqx_connector, {path, "../emqx_connector"}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_authn, [
|
{application, emqx_authn, [
|
||||||
{description, "EMQX Authentication"},
|
{description, "EMQX Authentication"},
|
||||||
{vsn, "0.1.16"},
|
{vsn, "0.1.17"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_authn_sup, emqx_authn_registry]},
|
{registered, [emqx_authn_sup, emqx_authn_registry]},
|
||||||
{applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]},
|
{applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]},
|
||||||
|
|
|
@ -872,8 +872,8 @@ lookup_from_local_node(ChainName, AuthenticatorID) ->
|
||||||
case emqx_resource:get_instance(ResourceId) of
|
case emqx_resource:get_instance(ResourceId) of
|
||||||
{error, not_found} ->
|
{error, not_found} ->
|
||||||
{error, {NodeId, not_found_resource}};
|
{error, {NodeId, not_found_resource}};
|
||||||
{ok, _, #{status := Status, metrics := ResourceMetrics}} ->
|
{ok, _, #{status := Status}} ->
|
||||||
{ok, {NodeId, Status, Metrics, ResourceMetrics}}
|
{ok, {NodeId, Status, Metrics, emqx_resource:get_metrics(ResourceId)}}
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
@ -929,7 +929,7 @@ aggregate_metrics([]) ->
|
||||||
aggregate_metrics([HeadMetrics | AllMetrics]) ->
|
aggregate_metrics([HeadMetrics | AllMetrics]) ->
|
||||||
ErrorLogger = fun(Reason) -> ?SLOG(info, #{msg => "bad_metrics_value", error => Reason}) end,
|
ErrorLogger = fun(Reason) -> ?SLOG(info, #{msg => "bad_metrics_value", error => Reason}) end,
|
||||||
Fun = fun(ElemMap, AccMap) ->
|
Fun = fun(ElemMap, AccMap) ->
|
||||||
emqx_map_lib:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger)
|
emqx_utils_maps:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger)
|
||||||
end,
|
end,
|
||||||
lists:foldl(Fun, HeadMetrics, AllMetrics).
|
lists:foldl(Fun, HeadMetrics, AllMetrics).
|
||||||
|
|
||||||
|
@ -1069,7 +1069,7 @@ update_user(ChainName, AuthenticatorID, UserID, UserInfo0) ->
|
||||||
true ->
|
true ->
|
||||||
serialize_error({missing_parameter, password});
|
serialize_error({missing_parameter, password});
|
||||||
false ->
|
false ->
|
||||||
UserInfo = emqx_map_lib:safe_atom_key_map(UserInfo0),
|
UserInfo = emqx_utils_maps:safe_atom_key_map(UserInfo0),
|
||||||
case emqx_authentication:update_user(ChainName, AuthenticatorID, UserID, UserInfo) of
|
case emqx_authentication:update_user(ChainName, AuthenticatorID, UserID, UserInfo) of
|
||||||
{ok, User} ->
|
{ok, User} ->
|
||||||
{200, User};
|
{200, User};
|
||||||
|
|
|
@ -357,7 +357,7 @@ qs([{K, V} | More], Acc) ->
|
||||||
qs(More, [["&", uri_encode(K), "=", uri_encode(V)] | Acc]).
|
qs(More, [["&", uri_encode(K), "=", uri_encode(V)] | Acc]).
|
||||||
|
|
||||||
serialize_body(<<"application/json">>, Body) ->
|
serialize_body(<<"application/json">>, Body) ->
|
||||||
emqx_json:encode(Body);
|
emqx_utils_json:encode(Body);
|
||||||
serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
||||||
qs(maps:to_list(Body)).
|
qs(maps:to_list(Body)).
|
||||||
|
|
||||||
|
@ -395,7 +395,7 @@ safely_parse_body(ContentType, Body) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
parse_body(<<"application/json", _/binary>>, Body) ->
|
parse_body(<<"application/json", _/binary>>, Body) ->
|
||||||
{ok, emqx_json:decode(Body, [return_maps])};
|
{ok, emqx_utils_json:decode(Body, [return_maps])};
|
||||||
parse_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) ->
|
parse_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) ->
|
||||||
Flags = [<<"result">>, <<"is_superuser">>],
|
Flags = [<<"result">>, <<"is_superuser">>],
|
||||||
RawMap = maps:from_list(cow_qs:parse_qs(Body)),
|
RawMap = maps:from_list(cow_qs:parse_qs(Body)),
|
||||||
|
|
|
@ -99,7 +99,7 @@ handle_info(
|
||||||
State1;
|
State1;
|
||||||
{StatusLine, Headers, Body} ->
|
{StatusLine, Headers, Body} ->
|
||||||
try
|
try
|
||||||
JWKS = jose_jwk:from(emqx_json:decode(Body, [return_maps])),
|
JWKS = jose_jwk:from(emqx_utils_json:decode(Body, [return_maps])),
|
||||||
{_, JWKs} = JWKS#jose_jwk.keys,
|
{_, JWKs} = JWKS#jose_jwk.keys,
|
||||||
State1#{jwks := JWKs}
|
State1#{jwks := JWKs}
|
||||||
catch
|
catch
|
||||||
|
|
|
@ -407,7 +407,7 @@ do_verify(_JWT, [], _VerifyClaims) ->
|
||||||
do_verify(JWT, [JWK | More], VerifyClaims) ->
|
do_verify(JWT, [JWK | More], VerifyClaims) ->
|
||||||
try jose_jws:verify(JWK, JWT) of
|
try jose_jws:verify(JWK, JWT) of
|
||||||
{true, Payload, _JWT} ->
|
{true, Payload, _JWT} ->
|
||||||
Claims0 = emqx_json:decode(Payload, [return_maps]),
|
Claims0 = emqx_utils_json:decode(Payload, [return_maps]),
|
||||||
Claims = try_convert_to_num(Claims0, [<<"exp">>, <<"iat">>, <<"nbf">>]),
|
Claims = try_convert_to_num(Claims0, [<<"exp">>, <<"iat">>, <<"nbf">>]),
|
||||||
case verify_claims(Claims, VerifyClaims) of
|
case verify_claims(Claims, VerifyClaims) of
|
||||||
ok ->
|
ok ->
|
||||||
|
|
|
@ -332,7 +332,7 @@ run_fuzzy_filter(
|
||||||
|
|
||||||
%% Example: data/user-credentials.json
|
%% Example: data/user-credentials.json
|
||||||
import_users_from_json(Bin, #{user_group := UserGroup}) ->
|
import_users_from_json(Bin, #{user_group := UserGroup}) ->
|
||||||
case emqx_json:safe_decode(Bin, [return_maps]) of
|
case emqx_utils_json:safe_decode(Bin, [return_maps]) of
|
||||||
{ok, List} ->
|
{ok, List} ->
|
||||||
trans(fun ?MODULE:import/2, [UserGroup, List]);
|
trans(fun ?MODULE:import/2, [UserGroup, List]);
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
-define(assertAuthenticatorsMatch(Guard, Path),
|
-define(assertAuthenticatorsMatch(Guard, Path),
|
||||||
(fun() ->
|
(fun() ->
|
||||||
{ok, 200, Response} = request(get, uri(Path)),
|
{ok, 200, Response} = request(get, uri(Path)),
|
||||||
?assertMatch(Guard, jiffy:decode(Response, [return_maps]))
|
?assertMatch(Guard, emqx_utils_json:decode(Response, [return_maps]))
|
||||||
end)()
|
end)()
|
||||||
).
|
).
|
||||||
|
|
||||||
|
@ -234,7 +234,7 @@ test_authenticator(PathPrefix) ->
|
||||||
get,
|
get,
|
||||||
uri(PathPrefix ++ [?CONF_NS, "password_based:http", "status"])
|
uri(PathPrefix ++ [?CONF_NS, "password_based:http", "status"])
|
||||||
),
|
),
|
||||||
{ok, RList} = emqx_json:safe_decode(Res),
|
{ok, RList} = emqx_utils_json:safe_decode(Res),
|
||||||
Snd = fun({_, Val}) -> Val end,
|
Snd = fun({_, Val}) -> Val end,
|
||||||
LookupVal = fun LookupV(List, RestJson) ->
|
LookupVal = fun LookupV(List, RestJson) ->
|
||||||
case List of
|
case List of
|
||||||
|
@ -353,7 +353,7 @@ test_authenticator_users(PathPrefix) ->
|
||||||
<<"success">> := 0,
|
<<"success">> := 0,
|
||||||
<<"nomatch">> := 1
|
<<"nomatch">> := 1
|
||||||
}
|
}
|
||||||
} = jiffy:decode(PageData0, [return_maps]);
|
} = emqx_utils_json:decode(PageData0, [return_maps]);
|
||||||
["listeners", 'tcp:default'] ->
|
["listeners", 'tcp:default'] ->
|
||||||
#{
|
#{
|
||||||
<<"metrics">> := #{
|
<<"metrics">> := #{
|
||||||
|
@ -361,7 +361,7 @@ test_authenticator_users(PathPrefix) ->
|
||||||
<<"success">> := 0,
|
<<"success">> := 0,
|
||||||
<<"nomatch">> := 1
|
<<"nomatch">> := 1
|
||||||
}
|
}
|
||||||
} = jiffy:decode(PageData0, [return_maps])
|
} = emqx_utils_json:decode(PageData0, [return_maps])
|
||||||
end,
|
end,
|
||||||
|
|
||||||
InvalidUsers = [
|
InvalidUsers = [
|
||||||
|
@ -384,7 +384,7 @@ test_authenticator_users(PathPrefix) ->
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun(User) ->
|
fun(User) ->
|
||||||
{ok, 201, UserData} = request(post, UsersUri, User),
|
{ok, 201, UserData} = request(post, UsersUri, User),
|
||||||
CreatedUser = jiffy:decode(UserData, [return_maps]),
|
CreatedUser = emqx_utils_json:decode(UserData, [return_maps]),
|
||||||
?assertMatch(#{<<"user_id">> := _}, CreatedUser)
|
?assertMatch(#{<<"user_id">> := _}, CreatedUser)
|
||||||
end,
|
end,
|
||||||
ValidUsers
|
ValidUsers
|
||||||
|
@ -411,7 +411,7 @@ test_authenticator_users(PathPrefix) ->
|
||||||
<<"success">> := 1,
|
<<"success">> := 1,
|
||||||
<<"nomatch">> := 1
|
<<"nomatch">> := 1
|
||||||
}
|
}
|
||||||
} = jiffy:decode(PageData01, [return_maps]);
|
} = emqx_utils_json:decode(PageData01, [return_maps]);
|
||||||
["listeners", 'tcp:default'] ->
|
["listeners", 'tcp:default'] ->
|
||||||
#{
|
#{
|
||||||
<<"metrics">> := #{
|
<<"metrics">> := #{
|
||||||
|
@ -419,7 +419,7 @@ test_authenticator_users(PathPrefix) ->
|
||||||
<<"success">> := 1,
|
<<"success">> := 1,
|
||||||
<<"nomatch">> := 1
|
<<"nomatch">> := 1
|
||||||
}
|
}
|
||||||
} = jiffy:decode(PageData01, [return_maps])
|
} = emqx_utils_json:decode(PageData01, [return_maps])
|
||||||
end,
|
end,
|
||||||
|
|
||||||
{ok, 200, Page1Data} = request(get, UsersUri ++ "?page=1&limit=2"),
|
{ok, 200, Page1Data} = request(get, UsersUri ++ "?page=1&limit=2"),
|
||||||
|
@ -433,7 +433,7 @@ test_authenticator_users(PathPrefix) ->
|
||||||
<<"count">> := 3
|
<<"count">> := 3
|
||||||
}
|
}
|
||||||
} =
|
} =
|
||||||
jiffy:decode(Page1Data, [return_maps]),
|
emqx_utils_json:decode(Page1Data, [return_maps]),
|
||||||
|
|
||||||
{ok, 200, Page2Data} = request(get, UsersUri ++ "?page=2&limit=2"),
|
{ok, 200, Page2Data} = request(get, UsersUri ++ "?page=2&limit=2"),
|
||||||
|
|
||||||
|
@ -445,7 +445,7 @@ test_authenticator_users(PathPrefix) ->
|
||||||
<<"limit">> := 2,
|
<<"limit">> := 2,
|
||||||
<<"count">> := 3
|
<<"count">> := 3
|
||||||
}
|
}
|
||||||
} = jiffy:decode(Page2Data, [return_maps]),
|
} = emqx_utils_json:decode(Page2Data, [return_maps]),
|
||||||
|
|
||||||
?assertEqual(2, length(Page1Users)),
|
?assertEqual(2, length(Page1Users)),
|
||||||
?assertEqual(1, length(Page2Users)),
|
?assertEqual(1, length(Page2Users)),
|
||||||
|
@ -465,7 +465,7 @@ test_authenticator_users(PathPrefix) ->
|
||||||
<<"limit">> := 3,
|
<<"limit">> := 3,
|
||||||
<<"count">> := 1
|
<<"count">> := 1
|
||||||
}
|
}
|
||||||
} = jiffy:decode(Super1Data, [return_maps]),
|
} = emqx_utils_json:decode(Super1Data, [return_maps]),
|
||||||
|
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
[<<"u2">>],
|
[<<"u2">>],
|
||||||
|
@ -482,7 +482,7 @@ test_authenticator_users(PathPrefix) ->
|
||||||
<<"limit">> := 3,
|
<<"limit">> := 3,
|
||||||
<<"count">> := 2
|
<<"count">> := 2
|
||||||
}
|
}
|
||||||
} = jiffy:decode(Super2Data, [return_maps]),
|
} = emqx_utils_json:decode(Super2Data, [return_maps]),
|
||||||
|
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
[<<"u1">>, <<"u3">>],
|
[<<"u1">>, <<"u3">>],
|
||||||
|
@ -509,7 +509,7 @@ test_authenticator_user(PathPrefix) ->
|
||||||
|
|
||||||
{ok, 200, UserData} = request(get, UsersUri ++ "/u1"),
|
{ok, 200, UserData} = request(get, UsersUri ++ "/u1"),
|
||||||
|
|
||||||
FetchedUser = jiffy:decode(UserData, [return_maps]),
|
FetchedUser = emqx_utils_json:decode(UserData, [return_maps]),
|
||||||
?assertMatch(#{<<"user_id">> := <<"u1">>}, FetchedUser),
|
?assertMatch(#{<<"user_id">> := <<"u1">>}, FetchedUser),
|
||||||
?assertNotMatch(#{<<"password">> := _}, FetchedUser),
|
?assertNotMatch(#{<<"password">> := _}, FetchedUser),
|
||||||
|
|
||||||
|
|
|
@ -41,7 +41,7 @@
|
||||||
|
|
||||||
-define(SERVER_RESPONSE_JSON(Result), ?SERVER_RESPONSE_JSON(Result, false)).
|
-define(SERVER_RESPONSE_JSON(Result), ?SERVER_RESPONSE_JSON(Result, false)).
|
||||||
-define(SERVER_RESPONSE_JSON(Result, IsSuperuser),
|
-define(SERVER_RESPONSE_JSON(Result, IsSuperuser),
|
||||||
jiffy:encode(#{
|
emqx_utils_json:encode(#{
|
||||||
result => Result,
|
result => Result,
|
||||||
is_superuser => IsSuperuser
|
is_superuser => IsSuperuser
|
||||||
})
|
})
|
||||||
|
@ -172,11 +172,11 @@ t_no_value_for_placeholder(_Config) ->
|
||||||
#{
|
#{
|
||||||
<<"cert_subject">> := <<"">>,
|
<<"cert_subject">> := <<"">>,
|
||||||
<<"cert_common_name">> := <<"">>
|
<<"cert_common_name">> := <<"">>
|
||||||
} = jiffy:decode(RawBody, [return_maps]),
|
} = emqx_utils_json:decode(RawBody, [return_maps]),
|
||||||
Req = cowboy_req:reply(
|
Req = cowboy_req:reply(
|
||||||
200,
|
200,
|
||||||
#{<<"content-type">> => <<"application/json">>},
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||||
Req1
|
Req1
|
||||||
),
|
),
|
||||||
{ok, Req, State}
|
{ok, Req, State}
|
||||||
|
@ -444,7 +444,7 @@ samples() ->
|
||||||
Req = cowboy_req:reply(
|
Req = cowboy_req:reply(
|
||||||
200,
|
200,
|
||||||
#{<<"content-type">> => <<"application/json">>},
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||||
Req0
|
Req0
|
||||||
),
|
),
|
||||||
{ok, Req, State}
|
{ok, Req, State}
|
||||||
|
@ -459,7 +459,7 @@ samples() ->
|
||||||
Req = cowboy_req:reply(
|
Req = cowboy_req:reply(
|
||||||
200,
|
200,
|
||||||
#{<<"content-type">> => <<"application/json">>},
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
jiffy:encode(#{result => allow, is_superuser => true}),
|
emqx_utils_json:encode(#{result => allow, is_superuser => true}),
|
||||||
Req0
|
Req0
|
||||||
),
|
),
|
||||||
{ok, Req, State}
|
{ok, Req, State}
|
||||||
|
@ -512,11 +512,11 @@ samples() ->
|
||||||
#{
|
#{
|
||||||
<<"username">> := <<"plain">>,
|
<<"username">> := <<"plain">>,
|
||||||
<<"password">> := <<"plain">>
|
<<"password">> := <<"plain">>
|
||||||
} = jiffy:decode(RawBody, [return_maps]),
|
} = emqx_utils_json:decode(RawBody, [return_maps]),
|
||||||
Req = cowboy_req:reply(
|
Req = cowboy_req:reply(
|
||||||
200,
|
200,
|
||||||
#{<<"content-type">> => <<"application/json">>},
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||||
Req1
|
Req1
|
||||||
),
|
),
|
||||||
{ok, Req, State}
|
{ok, Req, State}
|
||||||
|
@ -539,7 +539,7 @@ samples() ->
|
||||||
Req = cowboy_req:reply(
|
Req = cowboy_req:reply(
|
||||||
200,
|
200,
|
||||||
#{<<"content-type">> => <<"application/json">>},
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||||
Req1
|
Req1
|
||||||
),
|
),
|
||||||
{ok, Req, State}
|
{ok, Req, State}
|
||||||
|
@ -565,11 +565,11 @@ samples() ->
|
||||||
<<"peerhost">> := <<"127.0.0.1">>,
|
<<"peerhost">> := <<"127.0.0.1">>,
|
||||||
<<"cert_subject">> := <<"cert_subject_data">>,
|
<<"cert_subject">> := <<"cert_subject_data">>,
|
||||||
<<"cert_common_name">> := <<"cert_common_name_data">>
|
<<"cert_common_name">> := <<"cert_common_name_data">>
|
||||||
} = jiffy:decode(RawBody, [return_maps]),
|
} = emqx_utils_json:decode(RawBody, [return_maps]),
|
||||||
Req = cowboy_req:reply(
|
Req = cowboy_req:reply(
|
||||||
200,
|
200,
|
||||||
#{<<"content-type">> => <<"application/json">>},
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||||
Req1
|
Req1
|
||||||
),
|
),
|
||||||
{ok, Req, State}
|
{ok, Req, State}
|
||||||
|
|
|
@ -168,7 +168,7 @@ cowboy_handler(Req0, State) ->
|
||||||
Req = cowboy_req:reply(
|
Req = cowboy_req:reply(
|
||||||
200,
|
200,
|
||||||
#{<<"content-type">> => <<"application/json">>},
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||||
Req0
|
Req0
|
||||||
),
|
),
|
||||||
{ok, Req, State}.
|
{ok, Req, State}.
|
||||||
|
|
|
@ -467,7 +467,7 @@ jwks_handler(Req0, State) ->
|
||||||
Req = cowboy_req:reply(
|
Req = cowboy_req:reply(
|
||||||
200,
|
200,
|
||||||
#{<<"content-type">> => <<"application/json">>},
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
jiffy:encode(JWKS),
|
emqx_utils_json:encode(JWKS),
|
||||||
Req0
|
Req0
|
||||||
),
|
),
|
||||||
{ok, Req, State}.
|
{ok, Req, State}.
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
{erl_opts, [debug_info, nowarn_unused_import]}.
|
{erl_opts, [debug_info, nowarn_unused_import]}.
|
||||||
{deps, [
|
{deps, [
|
||||||
{emqx, {path, "../emqx"}},
|
{emqx, {path, "../emqx"}},
|
||||||
|
{emqx_utils, {path, "../emqx_utils"}},
|
||||||
{emqx_connector, {path, "../emqx_connector"}}
|
{emqx_connector, {path, "../emqx_connector"}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_authz, [
|
{application, emqx_authz, [
|
||||||
{description, "An OTP application"},
|
{description, "An OTP application"},
|
||||||
{vsn, "0.1.16"},
|
{vsn, "0.1.17"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_authz_app, []}},
|
{mod, {emqx_authz_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -344,8 +344,8 @@ lookup_from_local_node(Type) ->
|
||||||
case emqx_resource:get_instance(ResourceId) of
|
case emqx_resource:get_instance(ResourceId) of
|
||||||
{error, not_found} ->
|
{error, not_found} ->
|
||||||
{error, {NodeId, not_found_resource}};
|
{error, {NodeId, not_found_resource}};
|
||||||
{ok, _, #{status := Status, metrics := ResourceMetrics}} ->
|
{ok, _, #{status := Status}} ->
|
||||||
{ok, {NodeId, Status, Metrics, ResourceMetrics}}
|
{ok, {NodeId, Status, Metrics, emqx_resource:get_metrics(ResourceId)}}
|
||||||
end;
|
end;
|
||||||
_ ->
|
_ ->
|
||||||
Metrics = emqx_metrics_worker:get_metrics(authz_metrics, Type),
|
Metrics = emqx_metrics_worker:get_metrics(authz_metrics, Type),
|
||||||
|
@ -403,7 +403,7 @@ aggregate_metrics([]) ->
|
||||||
aggregate_metrics([HeadMetrics | AllMetrics]) ->
|
aggregate_metrics([HeadMetrics | AllMetrics]) ->
|
||||||
ErrorLogger = fun(Reason) -> ?SLOG(info, #{msg => "bad_metrics_value", error => Reason}) end,
|
ErrorLogger = fun(Reason) -> ?SLOG(info, #{msg => "bad_metrics_value", error => Reason}) end,
|
||||||
Fun = fun(ElemMap, AccMap) ->
|
Fun = fun(ElemMap, AccMap) ->
|
||||||
emqx_map_lib:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger)
|
emqx_utils_maps:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger)
|
||||||
end,
|
end,
|
||||||
lists:foldl(Fun, HeadMetrics, AllMetrics).
|
lists:foldl(Fun, HeadMetrics, AllMetrics).
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,7 @@ create(#{path := Path} = Source) ->
|
||||||
?SLOG(alert, #{
|
?SLOG(alert, #{
|
||||||
msg => failed_to_read_acl_file,
|
msg => failed_to_read_acl_file,
|
||||||
path => Path,
|
path => Path,
|
||||||
explain => emqx_misc:explain_posix(Reason)
|
explain => emqx_utils:explain_posix(Reason)
|
||||||
}),
|
}),
|
||||||
throw(failed_to_read_acl_file);
|
throw(failed_to_read_acl_file);
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
|
|
@ -227,7 +227,7 @@ encode_path(Path) ->
|
||||||
lists:flatten(["/" ++ Part || Part <- lists:map(fun uri_encode/1, Parts)]).
|
lists:flatten(["/" ++ Part || Part <- lists:map(fun uri_encode/1, Parts)]).
|
||||||
|
|
||||||
serialize_body(<<"application/json">>, Body) ->
|
serialize_body(<<"application/json">>, Body) ->
|
||||||
jsx:encode(Body);
|
emqx_utils_json:encode(Body);
|
||||||
serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
||||||
query_string(Body).
|
query_string(Body).
|
||||||
|
|
||||||
|
|
|
@ -337,7 +337,7 @@ check_ssl_opts(Conf) ->
|
||||||
(#{<<"url">> := Url} = Source) ->
|
(#{<<"url">> := Url} = Source) ->
|
||||||
case emqx_authz_http:parse_url(Url) of
|
case emqx_authz_http:parse_url(Url) of
|
||||||
{<<"https", _/binary>>, _, _} ->
|
{<<"https", _/binary>>, _, _} ->
|
||||||
case emqx_map_lib:deep_find([<<"ssl">>, <<"enable">>], Source) of
|
case emqx_utils_maps:deep_find([<<"ssl">>, <<"enable">>], Source) of
|
||||||
{ok, true} -> true;
|
{ok, true} -> true;
|
||||||
{ok, false} -> throw({ssl_not_enable, Url});
|
{ok, false} -> throw({ssl_not_enable, Url});
|
||||||
_ -> throw({ssl_enable_not_found, Url})
|
_ -> throw({ssl_enable_not_found, Url})
|
||||||
|
|
|
@ -144,7 +144,7 @@ parse_http_resp_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) ->
|
||||||
end;
|
end;
|
||||||
parse_http_resp_body(<<"application/json", _/binary>>, Body) ->
|
parse_http_resp_body(<<"application/json", _/binary>>, Body) ->
|
||||||
try
|
try
|
||||||
result(emqx_json:decode(Body, [return_maps]))
|
result(emqx_utils_json:decode(Body, [return_maps]))
|
||||||
catch
|
catch
|
||||||
_:_ -> error
|
_:_ -> error
|
||||||
end.
|
end.
|
||||||
|
|
|
@ -60,19 +60,19 @@ set_special_configs(emqx_authz) ->
|
||||||
set_special_configs(_App) ->
|
set_special_configs(_App) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_clean_cahce(_) ->
|
t_clean_cache(_) ->
|
||||||
{ok, C} = emqtt:start_link([{clientid, <<"emqx0">>}, {username, <<"emqx0">>}]),
|
{ok, C} = emqtt:start_link([{clientid, <<"emqx0">>}, {username, <<"emqx0">>}]),
|
||||||
{ok, _} = emqtt:connect(C),
|
{ok, _} = emqtt:connect(C),
|
||||||
{ok, _, _} = emqtt:subscribe(C, <<"a/b/c">>, 0),
|
{ok, _, _} = emqtt:subscribe(C, <<"a/b/c">>, 0),
|
||||||
ok = emqtt:publish(C, <<"a/b/c">>, <<"{\"x\":1,\"y\":1}">>, 0),
|
ok = emqtt:publish(C, <<"a/b/c">>, <<"{\"x\":1,\"y\":1}">>, 0),
|
||||||
|
|
||||||
{ok, 200, Result3} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
{ok, 200, Result3} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
||||||
?assertEqual(2, length(emqx_json:decode(Result3))),
|
?assertEqual(2, length(emqx_utils_json:decode(Result3))),
|
||||||
|
|
||||||
request(delete, uri(["authorization", "cache"])),
|
request(delete, uri(["authorization", "cache"])),
|
||||||
|
|
||||||
{ok, 200, Result4} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
{ok, 200, Result4} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
||||||
?assertEqual(0, length(emqx_json:decode(Result4))),
|
?assertEqual(0, length(emqx_utils_json:decode(Result4))),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
|
|
@ -95,7 +95,7 @@ t_api(_) ->
|
||||||
<<"page">> := 1,
|
<<"page">> := 1,
|
||||||
<<"hasnext">> := false
|
<<"hasnext">> := false
|
||||||
}
|
}
|
||||||
} = jsx:decode(Request1),
|
} = emqx_utils_json:decode(Request1),
|
||||||
?assertEqual(3, length(Rules1)),
|
?assertEqual(3, length(Rules1)),
|
||||||
|
|
||||||
{ok, 200, Request1_1} =
|
{ok, 200, Request1_1} =
|
||||||
|
@ -119,7 +119,7 @@ t_api(_) ->
|
||||||
<<"hasnext">> => false
|
<<"hasnext">> => false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
jsx:decode(Request1_1)
|
emqx_utils_json:decode(Request1_1)
|
||||||
),
|
),
|
||||||
|
|
||||||
{ok, 200, Request2} =
|
{ok, 200, Request2} =
|
||||||
|
@ -128,7 +128,7 @@ t_api(_) ->
|
||||||
uri(["authorization", "sources", "built_in_database", "rules", "users", "user1"]),
|
uri(["authorization", "sources", "built_in_database", "rules", "users", "user1"]),
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
#{<<"username">> := <<"user1">>, <<"rules">> := Rules1} = jsx:decode(Request2),
|
#{<<"username">> := <<"user1">>, <<"rules">> := Rules1} = emqx_utils_json:decode(Request2),
|
||||||
|
|
||||||
{ok, 204, _} =
|
{ok, 204, _} =
|
||||||
request(
|
request(
|
||||||
|
@ -142,7 +142,7 @@ t_api(_) ->
|
||||||
uri(["authorization", "sources", "built_in_database", "rules", "users", "user1"]),
|
uri(["authorization", "sources", "built_in_database", "rules", "users", "user1"]),
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
#{<<"username">> := <<"user1">>, <<"rules">> := Rules2} = jsx:decode(Request3),
|
#{<<"username">> := <<"user1">>, <<"rules">> := Rules2} = emqx_utils_json:decode(Request3),
|
||||||
?assertEqual(0, length(Rules2)),
|
?assertEqual(0, length(Rules2)),
|
||||||
|
|
||||||
{ok, 204, _} =
|
{ok, 204, _} =
|
||||||
|
@ -202,8 +202,8 @@ t_api(_) ->
|
||||||
<<"data">> := [#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3}],
|
<<"data">> := [#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3}],
|
||||||
<<"meta">> := #{<<"count">> := 1, <<"limit">> := 100, <<"page">> := 1}
|
<<"meta">> := #{<<"count">> := 1, <<"limit">> := 100, <<"page">> := 1}
|
||||||
} =
|
} =
|
||||||
jsx:decode(Request4),
|
emqx_utils_json:decode(Request4),
|
||||||
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3} = jsx:decode(Request5),
|
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3} = emqx_utils_json:decode(Request5),
|
||||||
?assertEqual(3, length(Rules3)),
|
?assertEqual(3, length(Rules3)),
|
||||||
|
|
||||||
{ok, 204, _} =
|
{ok, 204, _} =
|
||||||
|
@ -218,7 +218,7 @@ t_api(_) ->
|
||||||
uri(["authorization", "sources", "built_in_database", "rules", "clients", "client1"]),
|
uri(["authorization", "sources", "built_in_database", "rules", "clients", "client1"]),
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules4} = jsx:decode(Request6),
|
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules4} = emqx_utils_json:decode(Request6),
|
||||||
?assertEqual(0, length(Rules4)),
|
?assertEqual(0, length(Rules4)),
|
||||||
|
|
||||||
{ok, 204, _} =
|
{ok, 204, _} =
|
||||||
|
@ -252,7 +252,7 @@ t_api(_) ->
|
||||||
uri(["authorization", "sources", "built_in_database", "rules", "all"]),
|
uri(["authorization", "sources", "built_in_database", "rules", "all"]),
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
#{<<"rules">> := Rules5} = jsx:decode(Request7),
|
#{<<"rules">> := Rules5} = emqx_utils_json:decode(Request7),
|
||||||
?assertEqual(3, length(Rules5)),
|
?assertEqual(3, length(Rules5)),
|
||||||
|
|
||||||
{ok, 204, _} =
|
{ok, 204, _} =
|
||||||
|
@ -267,7 +267,7 @@ t_api(_) ->
|
||||||
uri(["authorization", "sources", "built_in_database", "rules", "all"]),
|
uri(["authorization", "sources", "built_in_database", "rules", "all"]),
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
#{<<"rules">> := Rules6} = jsx:decode(Request8),
|
#{<<"rules">> := Rules6} = emqx_utils_json:decode(Request8),
|
||||||
?assertEqual(0, length(Rules6)),
|
?assertEqual(0, length(Rules6)),
|
||||||
|
|
||||||
{ok, 204, _} =
|
{ok, 204, _} =
|
||||||
|
@ -285,7 +285,7 @@ t_api(_) ->
|
||||||
uri(["authorization", "sources", "built_in_database", "rules", "users?page=2&limit=5"]),
|
uri(["authorization", "sources", "built_in_database", "rules", "users?page=2&limit=5"]),
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
#{<<"data">> := Data1} = jsx:decode(Request9),
|
#{<<"data">> := Data1} = emqx_utils_json:decode(Request9),
|
||||||
?assertEqual(5, length(Data1)),
|
?assertEqual(5, length(Data1)),
|
||||||
|
|
||||||
{ok, 204, _} =
|
{ok, 204, _} =
|
||||||
|
@ -303,7 +303,7 @@ t_api(_) ->
|
||||||
uri(["authorization", "sources", "built_in_database", "rules", "clients?limit=5"]),
|
uri(["authorization", "sources", "built_in_database", "rules", "clients?limit=5"]),
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
#{<<"data">> := Data2} = jsx:decode(Request10),
|
#{<<"data">> := Data2} = emqx_utils_json:decode(Request10),
|
||||||
?assertEqual(5, length(Data2)),
|
?assertEqual(5, length(Data2)),
|
||||||
|
|
||||||
{ok, 400, Msg1} =
|
{ok, 400, Msg1} =
|
||||||
|
|
|
@ -76,7 +76,7 @@ t_api(_) ->
|
||||||
|
|
||||||
{ok, 200, Result1} = request(put, uri(["authorization", "settings"]), Settings1),
|
{ok, 200, Result1} = request(put, uri(["authorization", "settings"]), Settings1),
|
||||||
{ok, 200, Result1} = request(get, uri(["authorization", "settings"]), []),
|
{ok, 200, Result1} = request(get, uri(["authorization", "settings"]), []),
|
||||||
?assertEqual(Settings1, jsx:decode(Result1)),
|
?assertEqual(Settings1, emqx_utils_json:decode(Result1)),
|
||||||
|
|
||||||
Settings2 = #{
|
Settings2 = #{
|
||||||
<<"no_match">> => <<"allow">>,
|
<<"no_match">> => <<"allow">>,
|
||||||
|
@ -90,7 +90,7 @@ t_api(_) ->
|
||||||
|
|
||||||
{ok, 200, Result2} = request(put, uri(["authorization", "settings"]), Settings2),
|
{ok, 200, Result2} = request(put, uri(["authorization", "settings"]), Settings2),
|
||||||
{ok, 200, Result2} = request(get, uri(["authorization", "settings"]), []),
|
{ok, 200, Result2} = request(get, uri(["authorization", "settings"]), []),
|
||||||
?assertEqual(Settings2, jsx:decode(Result2)),
|
?assertEqual(Settings2, emqx_utils_json:decode(Result2)),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
|
|
@ -148,8 +148,8 @@ set_special_configs(_App) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(t_api, Config) ->
|
init_per_testcase(t_api, Config) ->
|
||||||
meck:new(emqx_misc, [non_strict, passthrough, no_history, no_link]),
|
meck:new(emqx_utils, [non_strict, passthrough, no_history, no_link]),
|
||||||
meck:expect(emqx_misc, gen_id, fun() -> "fake" end),
|
meck:expect(emqx_utils, gen_id, fun() -> "fake" end),
|
||||||
|
|
||||||
meck:new(emqx, [non_strict, passthrough, no_history, no_link]),
|
meck:new(emqx, [non_strict, passthrough, no_history, no_link]),
|
||||||
meck:expect(
|
meck:expect(
|
||||||
|
@ -165,7 +165,7 @@ init_per_testcase(_, Config) ->
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
end_per_testcase(t_api, _Config) ->
|
end_per_testcase(t_api, _Config) ->
|
||||||
meck:unload(emqx_misc),
|
meck:unload(emqx_utils),
|
||||||
meck:unload(emqx),
|
meck:unload(emqx),
|
||||||
ok;
|
ok;
|
||||||
end_per_testcase(_, _Config) ->
|
end_per_testcase(_, _Config) ->
|
||||||
|
@ -182,7 +182,7 @@ t_api(_) ->
|
||||||
{ok, 404, ErrResult} = request(get, uri(["authorization", "sources", "http"]), []),
|
{ok, 404, ErrResult} = request(get, uri(["authorization", "sources", "http"]), []),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{<<"code">> := <<"NOT_FOUND">>, <<"message">> := <<"Not found: http">>},
|
#{<<"code">> := <<"NOT_FOUND">>, <<"message">> := <<"Not found: http">>},
|
||||||
emqx_json:decode(ErrResult, [return_maps])
|
emqx_utils_json:decode(ErrResult, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
[
|
[
|
||||||
|
@ -215,7 +215,8 @@ t_api(_) ->
|
||||||
),
|
),
|
||||||
{ok, 200, Result3} = request(get, uri(["authorization", "sources", "http"]), []),
|
{ok, 200, Result3} = request(get, uri(["authorization", "sources", "http"]), []),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{<<"type">> := <<"http">>, <<"enable">> := false}, emqx_json:decode(Result3, [return_maps])
|
#{<<"type">> := <<"http">>, <<"enable">> := false},
|
||||||
|
emqx_utils_json:decode(Result3, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
Keyfile = emqx_common_test_helpers:app_path(
|
Keyfile = emqx_common_test_helpers:app_path(
|
||||||
|
@ -253,7 +254,7 @@ t_api(_) ->
|
||||||
<<"total">> := 0,
|
<<"total">> := 0,
|
||||||
<<"nomatch">> := 0
|
<<"nomatch">> := 0
|
||||||
}
|
}
|
||||||
} = emqx_json:decode(Status4, [return_maps]),
|
} = emqx_utils_json:decode(Status4, [return_maps]),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
<<"type">> := <<"mongodb">>,
|
<<"type">> := <<"mongodb">>,
|
||||||
|
@ -265,7 +266,7 @@ t_api(_) ->
|
||||||
<<"verify">> := <<"verify_none">>
|
<<"verify">> := <<"verify_none">>
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
emqx_json:decode(Result4, [return_maps])
|
emqx_utils_json:decode(Result4, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
{ok, Cacert} = file:read_file(Cacertfile),
|
{ok, Cacert} = file:read_file(Cacertfile),
|
||||||
|
@ -297,7 +298,7 @@ t_api(_) ->
|
||||||
<<"verify">> := <<"verify_none">>
|
<<"verify">> := <<"verify_none">>
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
emqx_json:decode(Result5, [return_maps])
|
emqx_utils_json:decode(Result5, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
{ok, 200, Status5_1} = request(get, uri(["authorization", "sources", "mongodb", "status"]), []),
|
{ok, 200, Status5_1} = request(get, uri(["authorization", "sources", "mongodb", "status"]), []),
|
||||||
|
@ -308,7 +309,7 @@ t_api(_) ->
|
||||||
<<"total">> := 0,
|
<<"total">> := 0,
|
||||||
<<"nomatch">> := 0
|
<<"nomatch">> := 0
|
||||||
}
|
}
|
||||||
} = emqx_json:decode(Status5_1, [return_maps]),
|
} = emqx_utils_json:decode(Status5_1, [return_maps]),
|
||||||
|
|
||||||
#{
|
#{
|
||||||
ssl := #{
|
ssl := #{
|
||||||
|
@ -355,7 +356,7 @@ t_api(_) ->
|
||||||
<<"code">> := <<"BAD_REQUEST">>,
|
<<"code">> := <<"BAD_REQUEST">>,
|
||||||
<<"message">> := <<"Type mismatch", _/binary>>
|
<<"message">> := <<"Type mismatch", _/binary>>
|
||||||
},
|
},
|
||||||
emqx_json:decode(TypeMismatch, [return_maps])
|
emqx_utils_json:decode(TypeMismatch, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
|
@ -443,7 +444,7 @@ t_api(_) ->
|
||||||
<<"total">> := 1,
|
<<"total">> := 1,
|
||||||
<<"nomatch">> := 0
|
<<"nomatch">> := 0
|
||||||
}
|
}
|
||||||
} = emqx_json:decode(Status5, [return_maps])
|
} = emqx_utils_json:decode(Status5, [return_maps])
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
|
|
||||||
|
@ -469,7 +470,7 @@ t_api(_) ->
|
||||||
<<"total">> := 2,
|
<<"total">> := 2,
|
||||||
<<"nomatch">> := 0
|
<<"nomatch">> := 0
|
||||||
}
|
}
|
||||||
} = emqx_json:decode(Status6, [return_maps])
|
} = emqx_utils_json:decode(Status6, [return_maps])
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
|
|
||||||
|
@ -495,7 +496,7 @@ t_api(_) ->
|
||||||
<<"total">> := 3,
|
<<"total">> := 3,
|
||||||
<<"nomatch">> := 0
|
<<"nomatch">> := 0
|
||||||
}
|
}
|
||||||
} = emqx_json:decode(Status7, [return_maps])
|
} = emqx_utils_json:decode(Status7, [return_maps])
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
@ -621,7 +622,7 @@ t_aggregate_metrics(_) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
get_sources(Result) ->
|
get_sources(Result) ->
|
||||||
maps:get(<<"sources">>, emqx_json:decode(Result, [return_maps])).
|
maps:get(<<"sources">>, emqx_utils_json:decode(Result, [return_maps])).
|
||||||
|
|
||||||
data_dir() -> emqx:data_dir().
|
data_dir() -> emqx:data_dir().
|
||||||
|
|
||||||
|
|
|
@ -311,7 +311,7 @@ t_json_body(_Config) ->
|
||||||
<<"topic">> := <<"t">>,
|
<<"topic">> := <<"t">>,
|
||||||
<<"action">> := <<"publish">>
|
<<"action">> := <<"publish">>
|
||||||
},
|
},
|
||||||
jiffy:decode(RawBody, [return_maps])
|
emqx_utils_json:decode(RawBody, [return_maps])
|
||||||
),
|
),
|
||||||
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
||||||
end,
|
end,
|
||||||
|
@ -366,7 +366,7 @@ t_placeholder_and_body(_Config) ->
|
||||||
<<"CN">> := ?PH_CERT_CN_NAME,
|
<<"CN">> := ?PH_CERT_CN_NAME,
|
||||||
<<"CS">> := ?PH_CERT_SUBJECT
|
<<"CS">> := ?PH_CERT_SUBJECT
|
||||||
},
|
},
|
||||||
jiffy:decode(PostVars, [return_maps])
|
emqx_utils_json:decode(PostVars, [return_maps])
|
||||||
),
|
),
|
||||||
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
||||||
end,
|
end,
|
||||||
|
@ -418,7 +418,7 @@ t_no_value_for_placeholder(_Config) ->
|
||||||
#{
|
#{
|
||||||
<<"mountpoint">> := <<"[]">>
|
<<"mountpoint">> := <<"[]">>
|
||||||
},
|
},
|
||||||
jiffy:decode(RawBody, [return_maps])
|
emqx_utils_json:decode(RawBody, [return_maps])
|
||||||
),
|
),
|
||||||
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
||||||
end,
|
end,
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
|
|
||||||
{erl_opts, [debug_info]}.
|
{erl_opts, [debug_info]}.
|
||||||
{deps, [{emqx, {path, "../emqx"}}]}.
|
{deps, [
|
||||||
|
{emqx, {path, "../emqx"}},
|
||||||
|
{emqx_utils, {path, "../emqx_utils"}}
|
||||||
|
]}.
|
||||||
|
|
||||||
{shell, [
|
{shell, [
|
||||||
{apps, [emqx_auto_subscribe]}
|
{apps, [emqx_auto_subscribe]}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auto_subscribe, [
|
{application, emqx_auto_subscribe, [
|
||||||
{description, "Auto subscribe Application"},
|
{description, "Auto subscribe Application"},
|
||||||
{vsn, "0.1.3"},
|
{vsn, "0.1.4"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auto_subscribe_app, []}},
|
{mod, {emqx_auto_subscribe_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -31,14 +31,17 @@
|
||||||
namespace() -> "auto_subscribe".
|
namespace() -> "auto_subscribe".
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
["auto_subscribe"].
|
[{"auto_subscribe", ?HOCON(?R_REF("auto_subscribe"), #{importance => ?IMPORTANCE_HIDDEN})}].
|
||||||
|
|
||||||
fields("auto_subscribe") ->
|
fields("auto_subscribe") ->
|
||||||
[
|
[
|
||||||
{topics,
|
{topics,
|
||||||
?HOCON(
|
?HOCON(
|
||||||
?ARRAY(?R_REF("topic")),
|
?ARRAY(?R_REF("topic")),
|
||||||
#{desc => ?DESC(auto_subscribe), default => []}
|
#{
|
||||||
|
desc => ?DESC(auto_subscribe),
|
||||||
|
default => []
|
||||||
|
}
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
fields("topic") ->
|
fields("topic") ->
|
||||||
|
|
|
@ -141,7 +141,7 @@ t_update(_) ->
|
||||||
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
Body = [#{topic => ?TOPIC_S}],
|
Body = [#{topic => ?TOPIC_S}],
|
||||||
{ok, Response} = emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, Body),
|
{ok, Response} = emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, Body),
|
||||||
ResponseMap = emqx_json:decode(Response, [return_maps]),
|
ResponseMap = emqx_utils_json:decode(Response, [return_maps]),
|
||||||
?assertEqual(1, erlang:length(ResponseMap)),
|
?assertEqual(1, erlang:length(ResponseMap)),
|
||||||
|
|
||||||
BadBody1 = #{topic => ?TOPIC_S},
|
BadBody1 = #{topic => ?TOPIC_S},
|
||||||
|
@ -177,7 +177,7 @@ t_update(_) ->
|
||||||
emqtt:disconnect(Client),
|
emqtt:disconnect(Client),
|
||||||
|
|
||||||
{ok, GETResponse} = emqx_mgmt_api_test_util:request_api(get, Path),
|
{ok, GETResponse} = emqx_mgmt_api_test_util:request_api(get, Path),
|
||||||
GETResponseMap = emqx_json:decode(GETResponse, [return_maps]),
|
GETResponseMap = emqx_utils_json:decode(GETResponse, [return_maps]),
|
||||||
?assertEqual(1, erlang:length(GETResponseMap)),
|
?assertEqual(1, erlang:length(GETResponseMap)),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
{erl_opts, [debug_info]}.
|
{erl_opts, [debug_info]}.
|
||||||
{deps, [ {emqx, {path, "../emqx"}}
|
{deps, [
|
||||||
, {emqx_resource, {path, "../../apps/emqx_resource"}}
|
{emqx, {path, "../emqx"}},
|
||||||
]}.
|
{emqx_utils, {path, "../emqx_utils"}},
|
||||||
|
{emqx_resource, {path, "../../apps/emqx_resource"}}
|
||||||
|
]}.
|
||||||
|
|
||||||
{shell, [
|
{shell, [
|
||||||
% {config, "config/sys.config"},
|
% {config, "config/sys.config"},
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_bridge, [
|
{application, emqx_bridge, [
|
||||||
{description, "EMQX bridges"},
|
{description, "EMQX bridges"},
|
||||||
{vsn, "0.1.15"},
|
{vsn, "0.1.16"},
|
||||||
{registered, [emqx_bridge_sup]},
|
{registered, [emqx_bridge_sup]},
|
||||||
{mod, {emqx_bridge_app, []}},
|
{mod, {emqx_bridge_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -34,7 +34,7 @@
|
||||||
unload/0,
|
unload/0,
|
||||||
lookup/1,
|
lookup/1,
|
||||||
lookup/2,
|
lookup/2,
|
||||||
lookup/3,
|
get_metrics/2,
|
||||||
create/3,
|
create/3,
|
||||||
disable_enable/3,
|
disable_enable/3,
|
||||||
remove/2,
|
remove/2,
|
||||||
|
@ -69,7 +69,8 @@
|
||||||
T == tdengine;
|
T == tdengine;
|
||||||
T == dynamo;
|
T == dynamo;
|
||||||
T == rocketmq;
|
T == rocketmq;
|
||||||
T == cassandra
|
T == cassandra;
|
||||||
|
T == sqlserver
|
||||||
).
|
).
|
||||||
|
|
||||||
load() ->
|
load() ->
|
||||||
|
@ -206,7 +207,7 @@ send_message(BridgeId, Message) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
query_opts(Config) ->
|
query_opts(Config) ->
|
||||||
case emqx_map_lib:deep_get([resource_opts, request_timeout], Config, false) of
|
case emqx_utils_maps:deep_get([resource_opts, request_timeout], Config, false) of
|
||||||
Timeout when is_integer(Timeout) ->
|
Timeout when is_integer(Timeout) ->
|
||||||
%% request_timeout is configured
|
%% request_timeout is configured
|
||||||
#{timeout => Timeout};
|
#{timeout => Timeout};
|
||||||
|
@ -271,6 +272,9 @@ lookup(Type, Name, RawConf) ->
|
||||||
}}
|
}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
get_metrics(Type, Name) ->
|
||||||
|
emqx_resource:get_metrics(emqx_bridge_resource:resource_id(Type, Name)).
|
||||||
|
|
||||||
maybe_upgrade(mqtt, Config) ->
|
maybe_upgrade(mqtt, Config) ->
|
||||||
emqx_bridge_compatible_config:maybe_upgrade(Config);
|
emqx_bridge_compatible_config:maybe_upgrade(Config);
|
||||||
maybe_upgrade(webhook, Config) ->
|
maybe_upgrade(webhook, Config) ->
|
||||||
|
@ -292,7 +296,7 @@ create(BridgeType, BridgeName, RawConf) ->
|
||||||
brige_action => create,
|
brige_action => create,
|
||||||
bridge_type => BridgeType,
|
bridge_type => BridgeType,
|
||||||
bridge_name => BridgeName,
|
bridge_name => BridgeName,
|
||||||
bridge_raw_config => emqx_misc:redact(RawConf)
|
bridge_raw_config => emqx_utils:redact(RawConf)
|
||||||
}),
|
}),
|
||||||
emqx_conf:update(
|
emqx_conf:update(
|
||||||
emqx_bridge:config_key_path() ++ [BridgeType, BridgeName],
|
emqx_bridge:config_key_path() ++ [BridgeType, BridgeName],
|
||||||
|
@ -363,7 +367,7 @@ perform_bridge_changes([{Action, MapConfs} | Tasks], Result0) ->
|
||||||
perform_bridge_changes(Tasks, Result).
|
perform_bridge_changes(Tasks, Result).
|
||||||
|
|
||||||
diff_confs(NewConfs, OldConfs) ->
|
diff_confs(NewConfs, OldConfs) ->
|
||||||
emqx_map_lib:diff_maps(
|
emqx_utils_maps:diff_maps(
|
||||||
flatten_confs(NewConfs),
|
flatten_confs(NewConfs),
|
||||||
flatten_confs(OldConfs)
|
flatten_confs(OldConfs)
|
||||||
).
|
).
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
-include_lib("emqx/include/emqx_api_lib.hrl").
|
-include_lib("emqx_utils/include/emqx_utils_api.hrl").
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, array/1, enum/1]).
|
-import(hoconsc, [mk/2, array/1, enum/1]).
|
||||||
|
@ -46,6 +46,7 @@
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([lookup_from_local_node/2]).
|
-export([lookup_from_local_node/2]).
|
||||||
|
-export([get_metrics_from_local_node/2]).
|
||||||
|
|
||||||
-define(BRIDGE_NOT_ENABLED,
|
-define(BRIDGE_NOT_ENABLED,
|
||||||
?BAD_REQUEST(<<"Forbidden operation, bridge not enabled">>)
|
?BAD_REQUEST(<<"Forbidden operation, bridge not enabled">>)
|
||||||
|
@ -219,7 +220,7 @@ info_example_basic(webhook) ->
|
||||||
auto_restart_interval => 15000,
|
auto_restart_interval => 15000,
|
||||||
query_mode => async,
|
query_mode => async,
|
||||||
inflight_window => 100,
|
inflight_window => 100,
|
||||||
max_queue_bytes => 100 * 1024 * 1024
|
max_buffer_bytes => 100 * 1024 * 1024
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
info_example_basic(mqtt) ->
|
info_example_basic(mqtt) ->
|
||||||
|
@ -244,7 +245,7 @@ mqtt_main_example() ->
|
||||||
health_check_interval => <<"15s">>,
|
health_check_interval => <<"15s">>,
|
||||||
auto_restart_interval => <<"60s">>,
|
auto_restart_interval => <<"60s">>,
|
||||||
query_mode => sync,
|
query_mode => sync,
|
||||||
max_queue_bytes => 100 * 1024 * 1024
|
max_buffer_bytes => 100 * 1024 * 1024
|
||||||
},
|
},
|
||||||
ssl => #{
|
ssl => #{
|
||||||
enable => false
|
enable => false
|
||||||
|
@ -467,7 +468,7 @@ schema("/bridges_probe") ->
|
||||||
end;
|
end;
|
||||||
'/bridges'(get, _Params) ->
|
'/bridges'(get, _Params) ->
|
||||||
Nodes = mria:running_nodes(),
|
Nodes = mria:running_nodes(),
|
||||||
NodeReplies = emqx_bridge_proto_v3:list_bridges_on_nodes(Nodes),
|
NodeReplies = emqx_bridge_proto_v4:list_bridges_on_nodes(Nodes),
|
||||||
case is_ok(NodeReplies) of
|
case is_ok(NodeReplies) of
|
||||||
{ok, NodeBridges} ->
|
{ok, NodeBridges} ->
|
||||||
AllBridges = [
|
AllBridges = [
|
||||||
|
@ -524,7 +525,7 @@ schema("/bridges_probe") ->
|
||||||
).
|
).
|
||||||
|
|
||||||
'/bridges/:id/metrics'(get, #{bindings := #{id := Id}}) ->
|
'/bridges/:id/metrics'(get, #{bindings := #{id := Id}}) ->
|
||||||
?TRY_PARSE_ID(Id, lookup_from_all_nodes_metrics(BridgeType, BridgeName, 200)).
|
?TRY_PARSE_ID(Id, get_metrics_from_all_nodes(BridgeType, BridgeName)).
|
||||||
|
|
||||||
'/bridges/:id/metrics/reset'(put, #{bindings := #{id := Id}}) ->
|
'/bridges/:id/metrics/reset'(put, #{bindings := #{id := Id}}) ->
|
||||||
?TRY_PARSE_ID(
|
?TRY_PARSE_ID(
|
||||||
|
@ -564,19 +565,21 @@ maybe_deobfuscate_bridge_probe(#{<<"type">> := BridgeType, <<"name">> := BridgeN
|
||||||
maybe_deobfuscate_bridge_probe(Params) ->
|
maybe_deobfuscate_bridge_probe(Params) ->
|
||||||
Params.
|
Params.
|
||||||
|
|
||||||
lookup_from_all_nodes(BridgeType, BridgeName, SuccCode) ->
|
get_metrics_from_all_nodes(BridgeType, BridgeName) ->
|
||||||
FormatFun = fun format_bridge_info/1,
|
|
||||||
do_lookup_from_all_nodes(BridgeType, BridgeName, SuccCode, FormatFun).
|
|
||||||
|
|
||||||
lookup_from_all_nodes_metrics(BridgeType, BridgeName, SuccCode) ->
|
|
||||||
FormatFun = fun format_bridge_metrics/1,
|
|
||||||
do_lookup_from_all_nodes(BridgeType, BridgeName, SuccCode, FormatFun).
|
|
||||||
|
|
||||||
do_lookup_from_all_nodes(BridgeType, BridgeName, SuccCode, FormatFun) ->
|
|
||||||
Nodes = mria:running_nodes(),
|
Nodes = mria:running_nodes(),
|
||||||
case is_ok(emqx_bridge_proto_v3:lookup_from_all_nodes(Nodes, BridgeType, BridgeName)) of
|
Result = do_bpapi_call(all, get_metrics_from_all_nodes, [Nodes, BridgeType, BridgeName]),
|
||||||
|
case Result of
|
||||||
|
Metrics when is_list(Metrics) ->
|
||||||
|
{200, format_bridge_metrics(lists:zip(Nodes, Metrics))};
|
||||||
|
{error, Reason} ->
|
||||||
|
?INTERNAL_ERROR(Reason)
|
||||||
|
end.
|
||||||
|
|
||||||
|
lookup_from_all_nodes(BridgeType, BridgeName, SuccCode) ->
|
||||||
|
Nodes = mria:running_nodes(),
|
||||||
|
case is_ok(emqx_bridge_proto_v4:lookup_from_all_nodes(Nodes, BridgeType, BridgeName)) of
|
||||||
{ok, [{ok, _} | _] = Results} ->
|
{ok, [{ok, _} | _] = Results} ->
|
||||||
{SuccCode, FormatFun([R || {ok, R} <- Results])};
|
{SuccCode, format_bridge_info([R || {ok, R} <- Results])};
|
||||||
{ok, [{error, not_found} | _]} ->
|
{ok, [{error, not_found} | _]} ->
|
||||||
?BRIDGE_NOT_FOUND(BridgeType, BridgeName);
|
?BRIDGE_NOT_FOUND(BridgeType, BridgeName);
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
@ -603,6 +606,9 @@ create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) ->
|
||||||
?BAD_REQUEST(map_to_json(Reason))
|
?BAD_REQUEST(map_to_json(Reason))
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
get_metrics_from_local_node(BridgeType, BridgeName) ->
|
||||||
|
format_metrics(emqx_bridge:get_metrics(BridgeType, BridgeName)).
|
||||||
|
|
||||||
'/bridges/:id/enable/:enable'(put, #{bindings := #{id := Id, enable := Enable}}) ->
|
'/bridges/:id/enable/:enable'(put, #{bindings := #{id := Id, enable := Enable}}) ->
|
||||||
?TRY_PARSE_ID(
|
?TRY_PARSE_ID(
|
||||||
Id,
|
Id,
|
||||||
|
@ -662,7 +668,7 @@ create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) ->
|
||||||
false ->
|
false ->
|
||||||
?BRIDGE_NOT_ENABLED;
|
?BRIDGE_NOT_ENABLED;
|
||||||
true ->
|
true ->
|
||||||
case emqx_misc:safe_to_existing_atom(Node, utf8) of
|
case emqx_utils:safe_to_existing_atom(Node, utf8) of
|
||||||
{ok, TargetNode} ->
|
{ok, TargetNode} ->
|
||||||
call_operation(TargetNode, OperFunc, [
|
call_operation(TargetNode, OperFunc, [
|
||||||
TargetNode, BridgeType, BridgeName
|
TargetNode, BridgeType, BridgeName
|
||||||
|
@ -739,7 +745,7 @@ pick_bridges_by_id(Type, Name, BridgesAllNodes) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
format_bridge_info([FirstBridge | _] = Bridges) ->
|
format_bridge_info([FirstBridge | _] = Bridges) ->
|
||||||
Res = maps:without([node, metrics], FirstBridge),
|
Res = maps:remove(node, FirstBridge),
|
||||||
NodeStatus = node_status(Bridges),
|
NodeStatus = node_status(Bridges),
|
||||||
redact(Res#{
|
redact(Res#{
|
||||||
status => aggregate_status(NodeStatus),
|
status => aggregate_status(NodeStatus),
|
||||||
|
@ -766,7 +772,7 @@ aggregate_status(AllStatus) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
collect_metrics(Bridges) ->
|
collect_metrics(Bridges) ->
|
||||||
[maps:with([node, metrics], B) || B <- Bridges].
|
[#{node => Node, metrics => Metrics} || {Node, Metrics} <- Bridges].
|
||||||
|
|
||||||
aggregate_metrics(AllMetrics) ->
|
aggregate_metrics(AllMetrics) ->
|
||||||
InitMetrics = ?EMPTY_METRICS,
|
InitMetrics = ?EMPTY_METRICS,
|
||||||
|
@ -800,9 +806,7 @@ aggregate_metrics(
|
||||||
M15 + N15,
|
M15 + N15,
|
||||||
M16 + N16,
|
M16 + N16,
|
||||||
M17 + N17
|
M17 + N17
|
||||||
);
|
).
|
||||||
aggregate_metrics(#{}, Metrics) ->
|
|
||||||
Metrics.
|
|
||||||
|
|
||||||
format_resource(
|
format_resource(
|
||||||
#{
|
#{
|
||||||
|
@ -826,63 +830,57 @@ format_resource(
|
||||||
).
|
).
|
||||||
|
|
||||||
format_resource_data(ResData) ->
|
format_resource_data(ResData) ->
|
||||||
maps:fold(fun format_resource_data/3, #{}, maps:with([status, metrics, error], ResData)).
|
maps:fold(fun format_resource_data/3, #{}, maps:with([status, error], ResData)).
|
||||||
|
|
||||||
format_resource_data(error, undefined, Result) ->
|
format_resource_data(error, undefined, Result) ->
|
||||||
Result;
|
Result;
|
||||||
format_resource_data(error, Error, Result) ->
|
format_resource_data(error, Error, Result) ->
|
||||||
Result#{status_reason => emqx_misc:readable_error_msg(Error)};
|
Result#{status_reason => emqx_utils:readable_error_msg(Error)};
|
||||||
format_resource_data(
|
|
||||||
metrics,
|
|
||||||
#{
|
|
||||||
counters := #{
|
|
||||||
'dropped' := Dropped,
|
|
||||||
'dropped.other' := DroppedOther,
|
|
||||||
'dropped.expired' := DroppedExpired,
|
|
||||||
'dropped.queue_full' := DroppedQueueFull,
|
|
||||||
'dropped.resource_not_found' := DroppedResourceNotFound,
|
|
||||||
'dropped.resource_stopped' := DroppedResourceStopped,
|
|
||||||
'matched' := Matched,
|
|
||||||
'retried' := Retried,
|
|
||||||
'late_reply' := LateReply,
|
|
||||||
'failed' := SentFailed,
|
|
||||||
'success' := SentSucc,
|
|
||||||
'received' := Rcvd
|
|
||||||
},
|
|
||||||
gauges := Gauges,
|
|
||||||
rate := #{
|
|
||||||
matched := #{current := Rate, last5m := Rate5m, max := RateMax}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Result
|
|
||||||
) ->
|
|
||||||
Queued = maps:get('queuing', Gauges, 0),
|
|
||||||
SentInflight = maps:get('inflight', Gauges, 0),
|
|
||||||
Result#{
|
|
||||||
metrics =>
|
|
||||||
?METRICS(
|
|
||||||
Dropped,
|
|
||||||
DroppedOther,
|
|
||||||
DroppedExpired,
|
|
||||||
DroppedQueueFull,
|
|
||||||
DroppedResourceNotFound,
|
|
||||||
DroppedResourceStopped,
|
|
||||||
Matched,
|
|
||||||
Queued,
|
|
||||||
Retried,
|
|
||||||
LateReply,
|
|
||||||
SentFailed,
|
|
||||||
SentInflight,
|
|
||||||
SentSucc,
|
|
||||||
Rate,
|
|
||||||
Rate5m,
|
|
||||||
RateMax,
|
|
||||||
Rcvd
|
|
||||||
)
|
|
||||||
};
|
|
||||||
format_resource_data(K, V, Result) ->
|
format_resource_data(K, V, Result) ->
|
||||||
Result#{K => V}.
|
Result#{K => V}.
|
||||||
|
|
||||||
|
format_metrics(#{
|
||||||
|
counters := #{
|
||||||
|
'dropped' := Dropped,
|
||||||
|
'dropped.other' := DroppedOther,
|
||||||
|
'dropped.expired' := DroppedExpired,
|
||||||
|
'dropped.queue_full' := DroppedQueueFull,
|
||||||
|
'dropped.resource_not_found' := DroppedResourceNotFound,
|
||||||
|
'dropped.resource_stopped' := DroppedResourceStopped,
|
||||||
|
'matched' := Matched,
|
||||||
|
'retried' := Retried,
|
||||||
|
'late_reply' := LateReply,
|
||||||
|
'failed' := SentFailed,
|
||||||
|
'success' := SentSucc,
|
||||||
|
'received' := Rcvd
|
||||||
|
},
|
||||||
|
gauges := Gauges,
|
||||||
|
rate := #{
|
||||||
|
matched := #{current := Rate, last5m := Rate5m, max := RateMax}
|
||||||
|
}
|
||||||
|
}) ->
|
||||||
|
Queued = maps:get('queuing', Gauges, 0),
|
||||||
|
SentInflight = maps:get('inflight', Gauges, 0),
|
||||||
|
?METRICS(
|
||||||
|
Dropped,
|
||||||
|
DroppedOther,
|
||||||
|
DroppedExpired,
|
||||||
|
DroppedQueueFull,
|
||||||
|
DroppedResourceNotFound,
|
||||||
|
DroppedResourceStopped,
|
||||||
|
Matched,
|
||||||
|
Queued,
|
||||||
|
Retried,
|
||||||
|
LateReply,
|
||||||
|
SentFailed,
|
||||||
|
SentInflight,
|
||||||
|
SentSucc,
|
||||||
|
Rate,
|
||||||
|
Rate5m,
|
||||||
|
RateMax,
|
||||||
|
Rcvd
|
||||||
|
).
|
||||||
|
|
||||||
fill_defaults(Type, RawConf) ->
|
fill_defaults(Type, RawConf) ->
|
||||||
PackedConf = pack_bridge_conf(Type, RawConf),
|
PackedConf = pack_bridge_conf(Type, RawConf),
|
||||||
FullConf = emqx_config:fill_defaults(emqx_bridge_schema, PackedConf, #{}),
|
FullConf = emqx_config:fill_defaults(emqx_bridge_schema, PackedConf, #{}),
|
||||||
|
@ -990,7 +988,7 @@ do_bpapi_call(Node, Call, Args) ->
|
||||||
do_bpapi_call_vsn(SupportedVersion, Call, Args) ->
|
do_bpapi_call_vsn(SupportedVersion, Call, Args) ->
|
||||||
case lists:member(SupportedVersion, supported_versions(Call)) of
|
case lists:member(SupportedVersion, supported_versions(Call)) of
|
||||||
true ->
|
true ->
|
||||||
apply(emqx_bridge_proto_v3, Call, Args);
|
apply(emqx_bridge_proto_v4, Call, Args);
|
||||||
false ->
|
false ->
|
||||||
{error, not_implemented}
|
{error, not_implemented}
|
||||||
end.
|
end.
|
||||||
|
@ -1000,12 +998,13 @@ maybe_unwrap({error, not_implemented}) ->
|
||||||
maybe_unwrap(RpcMulticallResult) ->
|
maybe_unwrap(RpcMulticallResult) ->
|
||||||
emqx_rpc:unwrap_erpc(RpcMulticallResult).
|
emqx_rpc:unwrap_erpc(RpcMulticallResult).
|
||||||
|
|
||||||
supported_versions(start_bridge_to_node) -> [2, 3];
|
supported_versions(start_bridge_to_node) -> [2, 3, 4];
|
||||||
supported_versions(start_bridges_to_all_nodes) -> [2, 3];
|
supported_versions(start_bridges_to_all_nodes) -> [2, 3, 4];
|
||||||
supported_versions(_Call) -> [1, 2, 3].
|
supported_versions(get_metrics_from_all_nodes) -> [4];
|
||||||
|
supported_versions(_Call) -> [1, 2, 3, 4].
|
||||||
|
|
||||||
redact(Term) ->
|
redact(Term) ->
|
||||||
emqx_misc:redact(Term).
|
emqx_utils:redact(Term).
|
||||||
|
|
||||||
deobfuscate(NewConf, OldConf) ->
|
deobfuscate(NewConf, OldConf) ->
|
||||||
maps:fold(
|
maps:fold(
|
||||||
|
@ -1016,7 +1015,7 @@ deobfuscate(NewConf, OldConf) ->
|
||||||
{ok, OldV} when is_map(V), is_map(OldV) ->
|
{ok, OldV} when is_map(V), is_map(OldV) ->
|
||||||
Acc#{K => deobfuscate(V, OldV)};
|
Acc#{K => deobfuscate(V, OldV)};
|
||||||
{ok, OldV} ->
|
{ok, OldV} ->
|
||||||
case emqx_misc:is_redacted(K, V) of
|
case emqx_utils:is_redacted(K, V) of
|
||||||
true ->
|
true ->
|
||||||
Acc#{K => OldV};
|
Acc#{K => OldV};
|
||||||
_ ->
|
_ ->
|
||||||
|
@ -1029,6 +1028,6 @@ deobfuscate(NewConf, OldConf) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
map_to_json(M) ->
|
map_to_json(M) ->
|
||||||
emqx_json:encode(
|
emqx_utils_json:encode(
|
||||||
emqx_map_lib:jsonable_map(M, fun(K, V) -> {K, emqx_map_lib:binary_string(V)} end)
|
emqx_utils_maps:jsonable_map(M, fun(K, V) -> {K, emqx_utils_maps:binary_string(V)} end)
|
||||||
).
|
).
|
||||||
|
|
|
@ -157,7 +157,7 @@ create(Type, Name, Conf, Opts0) ->
|
||||||
msg => "create bridge",
|
msg => "create bridge",
|
||||||
type => Type,
|
type => Type,
|
||||||
name => Name,
|
name => Name,
|
||||||
config => emqx_misc:redact(Conf)
|
config => emqx_utils:redact(Conf)
|
||||||
}),
|
}),
|
||||||
Opts = override_start_after_created(Conf, Opts0),
|
Opts = override_start_after_created(Conf, Opts0),
|
||||||
{ok, _Data} = emqx_resource:create_local(
|
{ok, _Data} = emqx_resource:create_local(
|
||||||
|
@ -186,13 +186,13 @@ update(Type, Name, {OldConf, Conf}, Opts0) ->
|
||||||
%% without restarting the bridge.
|
%% without restarting the bridge.
|
||||||
%%
|
%%
|
||||||
Opts = override_start_after_created(Conf, Opts0),
|
Opts = override_start_after_created(Conf, Opts0),
|
||||||
case emqx_map_lib:if_only_to_toggle_enable(OldConf, Conf) of
|
case emqx_utils_maps:if_only_to_toggle_enable(OldConf, Conf) of
|
||||||
false ->
|
false ->
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "update bridge",
|
msg => "update bridge",
|
||||||
type => Type,
|
type => Type,
|
||||||
name => Name,
|
name => Name,
|
||||||
config => emqx_misc:redact(Conf)
|
config => emqx_utils:redact(Conf)
|
||||||
}),
|
}),
|
||||||
case recreate(Type, Name, Conf, Opts) of
|
case recreate(Type, Name, Conf, Opts) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
|
@ -202,7 +202,7 @@ update(Type, Name, {OldConf, Conf}, Opts0) ->
|
||||||
msg => "updating_a_non_existing_bridge",
|
msg => "updating_a_non_existing_bridge",
|
||||||
type => Type,
|
type => Type,
|
||||||
name => Name,
|
name => Name,
|
||||||
config => emqx_misc:redact(Conf)
|
config => emqx_utils:redact(Conf)
|
||||||
}),
|
}),
|
||||||
create(Type, Name, Conf, Opts);
|
create(Type, Name, Conf, Opts);
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
@ -236,9 +236,9 @@ recreate(Type, Name, Conf, Opts) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
create_dry_run(Type, Conf0) ->
|
create_dry_run(Type, Conf0) ->
|
||||||
TmpPath0 = iolist_to_binary([?TEST_ID_PREFIX, emqx_misc:gen_id(8)]),
|
TmpPath0 = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]),
|
||||||
TmpPath = emqx_misc:safe_filename(TmpPath0),
|
TmpPath = emqx_utils:safe_filename(TmpPath0),
|
||||||
Conf = emqx_map_lib:safe_atom_key_map(Conf0),
|
Conf = emqx_utils_maps:safe_atom_key_map(Conf0),
|
||||||
case emqx_connector_ssl:convert_certs(TmpPath, Conf) of
|
case emqx_connector_ssl:convert_certs(TmpPath, Conf) of
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{error, Reason};
|
{error, Reason};
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
introduced_in/0,
|
introduced_in/0,
|
||||||
|
deprecated_since/0,
|
||||||
|
|
||||||
list_bridges/1,
|
list_bridges/1,
|
||||||
list_bridges_on_nodes/1,
|
list_bridges_on_nodes/1,
|
||||||
|
@ -39,6 +40,9 @@
|
||||||
introduced_in() ->
|
introduced_in() ->
|
||||||
"5.0.21".
|
"5.0.21".
|
||||||
|
|
||||||
|
deprecated_since() ->
|
||||||
|
"5.0.22".
|
||||||
|
|
||||||
-spec list_bridges(node()) -> list() | emqx_rpc:badrpc().
|
-spec list_bridges(node()) -> list() | emqx_rpc:badrpc().
|
||||||
list_bridges(Node) ->
|
list_bridges(Node) ->
|
||||||
rpc:call(Node, emqx_bridge, list, [], ?TIMEOUT).
|
rpc:call(Node, emqx_bridge, list, [], ?TIMEOUT).
|
||||||
|
|
|
@ -0,0 +1,135 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_bridge_proto_v4).
|
||||||
|
|
||||||
|
-behaviour(emqx_bpapi).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
introduced_in/0,
|
||||||
|
|
||||||
|
list_bridges_on_nodes/1,
|
||||||
|
restart_bridge_to_node/3,
|
||||||
|
start_bridge_to_node/3,
|
||||||
|
stop_bridge_to_node/3,
|
||||||
|
lookup_from_all_nodes/3,
|
||||||
|
get_metrics_from_all_nodes/3,
|
||||||
|
restart_bridges_to_all_nodes/3,
|
||||||
|
start_bridges_to_all_nodes/3,
|
||||||
|
stop_bridges_to_all_nodes/3
|
||||||
|
]).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/bpapi.hrl").
|
||||||
|
|
||||||
|
-define(TIMEOUT, 15000).
|
||||||
|
|
||||||
|
introduced_in() ->
|
||||||
|
"5.0.22".
|
||||||
|
|
||||||
|
-spec list_bridges_on_nodes([node()]) ->
|
||||||
|
emqx_rpc:erpc_multicall([emqx_resource:resource_data()]).
|
||||||
|
list_bridges_on_nodes(Nodes) ->
|
||||||
|
erpc:multicall(Nodes, emqx_bridge, list, [], ?TIMEOUT).
|
||||||
|
|
||||||
|
-type key() :: atom() | binary() | [byte()].
|
||||||
|
|
||||||
|
-spec restart_bridge_to_node(node(), key(), key()) ->
|
||||||
|
term().
|
||||||
|
restart_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||||
|
rpc:call(
|
||||||
|
Node,
|
||||||
|
emqx_bridge_resource,
|
||||||
|
restart,
|
||||||
|
[BridgeType, BridgeName],
|
||||||
|
?TIMEOUT
|
||||||
|
).
|
||||||
|
|
||||||
|
-spec start_bridge_to_node(node(), key(), key()) ->
|
||||||
|
term().
|
||||||
|
start_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||||
|
rpc:call(
|
||||||
|
Node,
|
||||||
|
emqx_bridge_resource,
|
||||||
|
start,
|
||||||
|
[BridgeType, BridgeName],
|
||||||
|
?TIMEOUT
|
||||||
|
).
|
||||||
|
|
||||||
|
-spec stop_bridge_to_node(node(), key(), key()) ->
|
||||||
|
term().
|
||||||
|
stop_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||||
|
rpc:call(
|
||||||
|
Node,
|
||||||
|
emqx_bridge_resource,
|
||||||
|
stop,
|
||||||
|
[BridgeType, BridgeName],
|
||||||
|
?TIMEOUT
|
||||||
|
).
|
||||||
|
|
||||||
|
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
|
emqx_rpc:erpc_multicall().
|
||||||
|
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
|
erpc:multicall(
|
||||||
|
Nodes,
|
||||||
|
emqx_bridge_resource,
|
||||||
|
restart,
|
||||||
|
[BridgeType, BridgeName],
|
||||||
|
?TIMEOUT
|
||||||
|
).
|
||||||
|
|
||||||
|
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
|
emqx_rpc:erpc_multicall().
|
||||||
|
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
|
erpc:multicall(
|
||||||
|
Nodes,
|
||||||
|
emqx_bridge_resource,
|
||||||
|
start,
|
||||||
|
[BridgeType, BridgeName],
|
||||||
|
?TIMEOUT
|
||||||
|
).
|
||||||
|
|
||||||
|
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
|
emqx_rpc:erpc_multicall().
|
||||||
|
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
|
erpc:multicall(
|
||||||
|
Nodes,
|
||||||
|
emqx_bridge_resource,
|
||||||
|
stop,
|
||||||
|
[BridgeType, BridgeName],
|
||||||
|
?TIMEOUT
|
||||||
|
).
|
||||||
|
|
||||||
|
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
||||||
|
emqx_rpc:erpc_multicall().
|
||||||
|
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
|
erpc:multicall(
|
||||||
|
Nodes,
|
||||||
|
emqx_bridge_api,
|
||||||
|
lookup_from_local_node,
|
||||||
|
[BridgeType, BridgeName],
|
||||||
|
?TIMEOUT
|
||||||
|
).
|
||||||
|
|
||||||
|
-spec get_metrics_from_all_nodes([node()], key(), key()) ->
|
||||||
|
emqx_rpc:erpc_multicall(emqx_metrics_worker:metrics()).
|
||||||
|
get_metrics_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
|
erpc:multicall(
|
||||||
|
Nodes,
|
||||||
|
emqx_bridge_api,
|
||||||
|
get_metrics_from_local_node,
|
||||||
|
[BridgeType, BridgeName],
|
||||||
|
?TIMEOUT
|
||||||
|
).
|
|
@ -89,7 +89,7 @@ default_resource_opts() ->
|
||||||
<<"inflight_window">> => 100,
|
<<"inflight_window">> => 100,
|
||||||
<<"auto_restart_interval">> => <<"60s">>,
|
<<"auto_restart_interval">> => <<"60s">>,
|
||||||
<<"health_check_interval">> => <<"15s">>,
|
<<"health_check_interval">> => <<"15s">>,
|
||||||
<<"max_queue_bytes">> => <<"1GB">>,
|
<<"max_buffer_bytes">> => <<"1GB">>,
|
||||||
<<"query_mode">> => <<"sync">>,
|
<<"query_mode">> => <<"sync">>,
|
||||||
%% there is only one underlying MQTT connection
|
%% there is only one underlying MQTT connection
|
||||||
%% doesn't make a lot of sense to have a large pool
|
%% doesn't make a lot of sense to have a large pool
|
||||||
|
|
|
@ -137,7 +137,7 @@ namespace() -> "bridge".
|
||||||
tags() ->
|
tags() ->
|
||||||
[<<"Bridge">>].
|
[<<"Bridge">>].
|
||||||
|
|
||||||
roots() -> [bridges].
|
roots() -> [{bridges, ?HOCON(?R_REF(bridges), #{importance => ?IMPORTANCE_HIDDEN})}].
|
||||||
|
|
||||||
fields(bridges) ->
|
fields(bridges) ->
|
||||||
[
|
[
|
||||||
|
@ -251,7 +251,7 @@ do_convert_webhook_config(
|
||||||
case {MReqTRoot, MReqTResource} of
|
case {MReqTRoot, MReqTResource} of
|
||||||
{{ok, ReqTRoot}, {ok, ReqTResource}} ->
|
{{ok, ReqTRoot}, {ok, ReqTResource}} ->
|
||||||
{_Parsed, ReqTRaw} = max({ReqTRoot, ReqTRootRaw}, {ReqTResource, ReqTResourceRaw}),
|
{_Parsed, ReqTRaw} = max({ReqTRoot, ReqTRootRaw}, {ReqTResource, ReqTResourceRaw}),
|
||||||
Conf1 = emqx_map_lib:deep_merge(
|
Conf1 = emqx_utils_maps:deep_merge(
|
||||||
Conf0,
|
Conf0,
|
||||||
#{
|
#{
|
||||||
<<"request_timeout">> => ReqTRaw,
|
<<"request_timeout">> => ReqTRaw,
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue