Merge branch 'master' into conf-refactor
This commit is contained in:
commit
494ffe86ad
|
@ -18,13 +18,13 @@
|
|||
]}.
|
||||
|
||||
%% Deps here may duplicate with emqx.git root level rebar.config
|
||||
%% but there not be any descrpancy.
|
||||
%% but there may not be any discrepancy.
|
||||
%% This rebar.config is necessary because the app may be used as a
|
||||
%% `git_subdir` dependency in other projects.
|
||||
{deps, [
|
||||
{emqx_utils, {path, "../emqx_utils"}},
|
||||
{lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}},
|
||||
{gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}},
|
||||
{jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}},
|
||||
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}},
|
||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}},
|
||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.6"}}},
|
||||
|
@ -36,7 +36,7 @@
|
|||
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.7"}}}
|
||||
]}.
|
||||
|
||||
{plugins, [{rebar3_proper, "0.12.1"}]}.
|
||||
{plugins, [{rebar3_proper, "0.12.1"}, rebar3_path_deps]}.
|
||||
{extra_src_dirs, [{"etc", [recursive]}]}.
|
||||
{profiles, [
|
||||
{test, [
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
cowboy,
|
||||
sasl,
|
||||
os_mon,
|
||||
jiffy,
|
||||
lc,
|
||||
hocon
|
||||
]},
|
||||
|
|
|
@ -164,29 +164,29 @@ run_hook(HookPoint, Args) ->
|
|||
run_fold_hook(HookPoint, Args, Acc) ->
|
||||
emqx_hooks:run_fold(HookPoint, Args, Acc).
|
||||
|
||||
-spec get_config(emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get_config(emqx_utils_maps:config_key_path()) -> term().
|
||||
get_config(KeyPath) ->
|
||||
emqx_config:get(KeyPath).
|
||||
|
||||
-spec get_config(emqx_map_lib:config_key_path(), term()) -> term().
|
||||
-spec get_config(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||
get_config(KeyPath, Default) ->
|
||||
emqx_config:get(KeyPath, Default).
|
||||
|
||||
-spec get_raw_config(emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get_raw_config(emqx_utils_maps:config_key_path()) -> term().
|
||||
get_raw_config(KeyPath) ->
|
||||
emqx_config:get_raw(KeyPath).
|
||||
|
||||
-spec get_raw_config(emqx_map_lib:config_key_path(), term()) -> term().
|
||||
-spec get_raw_config(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||
get_raw_config(KeyPath, Default) ->
|
||||
emqx_config:get_raw(KeyPath, Default).
|
||||
|
||||
-spec update_config(emqx_map_lib:config_key_path(), emqx_config:update_request()) ->
|
||||
-spec update_config(emqx_utils_maps:config_key_path(), emqx_config:update_request()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
update_config(KeyPath, UpdateReq) ->
|
||||
update_config(KeyPath, UpdateReq, #{}).
|
||||
|
||||
-spec update_config(
|
||||
emqx_map_lib:config_key_path(),
|
||||
emqx_utils_maps:config_key_path(),
|
||||
emqx_config:update_request(),
|
||||
emqx_config:update_opts()
|
||||
) ->
|
||||
|
@ -198,12 +198,12 @@ update_config([RootName | _] = KeyPath, UpdateReq, Opts) ->
|
|||
{{update, UpdateReq}, Opts}
|
||||
).
|
||||
|
||||
-spec remove_config(emqx_map_lib:config_key_path()) ->
|
||||
-spec remove_config(emqx_utils_maps:config_key_path()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
remove_config(KeyPath) ->
|
||||
remove_config(KeyPath, #{}).
|
||||
|
||||
-spec remove_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
||||
-spec remove_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
remove_config([RootName | _] = KeyPath, Opts) ->
|
||||
emqx_config_handler:update_config(
|
||||
|
@ -212,7 +212,7 @@ remove_config([RootName | _] = KeyPath, Opts) ->
|
|||
{remove, Opts}
|
||||
).
|
||||
|
||||
-spec reset_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
||||
-spec reset_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
reset_config([RootName | _] = KeyPath, Opts) ->
|
||||
case emqx_config:get_default_value(KeyPath) of
|
||||
|
|
|
@ -423,7 +423,7 @@ do_actions(deactivate, Alarm = #deactivated_alarm{name = Name}, [log | More]) ->
|
|||
do_actions(deactivate, Alarm, More);
|
||||
do_actions(Operation, Alarm, [publish | More]) ->
|
||||
Topic = topic(Operation),
|
||||
{ok, Payload} = emqx_json:safe_encode(normalize(Alarm)),
|
||||
{ok, Payload} = emqx_utils_json:safe_encode(normalize(Alarm)),
|
||||
Message = emqx_message:make(
|
||||
?MODULE,
|
||||
0,
|
||||
|
|
|
@ -277,9 +277,9 @@ atom(Bin) -> binary_to_existing_atom(Bin, utf8).
|
|||
certs_dir(ChainName, ConfigOrID) ->
|
||||
DirName = dir(ChainName, ConfigOrID),
|
||||
SubDir = iolist_to_binary(filename:join(["authn", DirName])),
|
||||
emqx_misc:safe_filename(SubDir).
|
||||
emqx_utils:safe_filename(SubDir).
|
||||
|
||||
dir(ChainName, ID) when is_binary(ID) ->
|
||||
emqx_misc:safe_filename(iolist_to_binary([to_bin(ChainName), "-", ID]));
|
||||
emqx_utils:safe_filename(iolist_to_binary([to_bin(ChainName), "-", ID]));
|
||||
dir(ChainName, Config) when is_map(Config) ->
|
||||
dir(ChainName, authenticator_id(Config)).
|
||||
|
|
|
@ -243,7 +243,7 @@ handle_info(Info, State) ->
|
|||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, #{expiry_timer := TRef}) ->
|
||||
emqx_misc:cancel_timer(TRef).
|
||||
emqx_utils:cancel_timer(TRef).
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
@ -254,10 +254,10 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
|
||||
-ifdef(TEST).
|
||||
ensure_expiry_timer(State) ->
|
||||
State#{expiry_timer := emqx_misc:start_timer(10, expire)}.
|
||||
State#{expiry_timer := emqx_utils:start_timer(10, expire)}.
|
||||
-else.
|
||||
ensure_expiry_timer(State) ->
|
||||
State#{expiry_timer := emqx_misc:start_timer(timer:minutes(1), expire)}.
|
||||
State#{expiry_timer := emqx_utils:start_timer(timer:minutes(1), expire)}.
|
||||
-endif.
|
||||
|
||||
expire_banned_items(Now) ->
|
||||
|
|
|
@ -85,7 +85,7 @@ commit(Batch = #batch{batch_q = Q, commit_fun = Commit}) ->
|
|||
reset(Batch).
|
||||
|
||||
reset(Batch = #batch{linger_timer = TRef}) ->
|
||||
_ = emqx_misc:cancel_timer(TRef),
|
||||
_ = emqx_utils:cancel_timer(TRef),
|
||||
Batch#batch{batch_q = [], linger_timer = undefined}.
|
||||
|
||||
-spec size(batch()) -> non_neg_integer().
|
||||
|
|
|
@ -71,7 +71,7 @@
|
|||
code_change/3
|
||||
]).
|
||||
|
||||
-import(emqx_tables, [lookup_value/2, lookup_value/3]).
|
||||
-import(emqx_utils_ets, [lookup_value/2, lookup_value/3]).
|
||||
|
||||
-ifdef(TEST).
|
||||
-compile(export_all).
|
||||
|
@ -92,7 +92,7 @@
|
|||
start_link(Pool, Id) ->
|
||||
ok = create_tabs(),
|
||||
gen_server:start_link(
|
||||
{local, emqx_misc:proc_name(?BROKER, Id)},
|
||||
{local, emqx_utils:proc_name(?BROKER, Id)},
|
||||
?MODULE,
|
||||
[Pool, Id],
|
||||
[]
|
||||
|
@ -107,15 +107,15 @@ create_tabs() ->
|
|||
TabOpts = [public, {read_concurrency, true}, {write_concurrency, true}],
|
||||
|
||||
%% SubOption: {Topic, SubPid} -> SubOption
|
||||
ok = emqx_tables:new(?SUBOPTION, [ordered_set | TabOpts]),
|
||||
ok = emqx_utils_ets:new(?SUBOPTION, [ordered_set | TabOpts]),
|
||||
|
||||
%% Subscription: SubPid -> Topic1, Topic2, Topic3, ...
|
||||
%% duplicate_bag: o(1) insert
|
||||
ok = emqx_tables:new(?SUBSCRIPTION, [duplicate_bag | TabOpts]),
|
||||
ok = emqx_utils_ets:new(?SUBSCRIPTION, [duplicate_bag | TabOpts]),
|
||||
|
||||
%% Subscriber: Topic -> SubPid1, SubPid2, SubPid3, ...
|
||||
%% bag: o(n) insert:(
|
||||
ok = emqx_tables:new(?SUBSCRIBER, [bag | TabOpts]).
|
||||
ok = emqx_utils_ets:new(?SUBSCRIBER, [bag | TabOpts]).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Subscribe API
|
||||
|
|
|
@ -73,11 +73,11 @@ register_sub(SubPid, SubId) when is_pid(SubPid) ->
|
|||
|
||||
-spec lookup_subid(pid()) -> maybe(emqx_types:subid()).
|
||||
lookup_subid(SubPid) when is_pid(SubPid) ->
|
||||
emqx_tables:lookup_value(?SUBMON, SubPid).
|
||||
emqx_utils_ets:lookup_value(?SUBMON, SubPid).
|
||||
|
||||
-spec lookup_subpid(emqx_types:subid()) -> maybe(pid()).
|
||||
lookup_subpid(SubId) ->
|
||||
emqx_tables:lookup_value(?SUBID, SubId).
|
||||
emqx_utils_ets:lookup_value(?SUBID, SubId).
|
||||
|
||||
-spec get_sub_shard(pid(), emqx_types:topic()) -> non_neg_integer().
|
||||
get_sub_shard(SubPid, Topic) ->
|
||||
|
@ -105,15 +105,15 @@ reclaim_seq(Topic) ->
|
|||
|
||||
init([]) ->
|
||||
%% Helper table
|
||||
ok = emqx_tables:new(?HELPER, [{read_concurrency, true}]),
|
||||
ok = emqx_utils_ets:new(?HELPER, [{read_concurrency, true}]),
|
||||
%% Shards: CPU * 32
|
||||
true = ets:insert(?HELPER, {shards, emqx_vm:schedulers() * 32}),
|
||||
%% SubSeq: Topic -> SeqId
|
||||
ok = emqx_sequence:create(?SUBSEQ),
|
||||
%% SubId: SubId -> SubPid
|
||||
ok = emqx_tables:new(?SUBID, [public, {read_concurrency, true}, {write_concurrency, true}]),
|
||||
ok = emqx_utils_ets:new(?SUBID, [public, {read_concurrency, true}, {write_concurrency, true}]),
|
||||
%% SubMon: SubPid -> SubId
|
||||
ok = emqx_tables:new(?SUBMON, [public, {read_concurrency, true}, {write_concurrency, true}]),
|
||||
ok = emqx_utils_ets:new(?SUBMON, [public, {read_concurrency, true}, {write_concurrency, true}]),
|
||||
%% Stats timer
|
||||
ok = emqx_stats:update_interval(broker_stats, fun emqx_broker:stats_fun/0),
|
||||
{ok, #{pmon => emqx_pmon:new()}}.
|
||||
|
@ -131,7 +131,7 @@ handle_cast(Msg, State) ->
|
|||
{noreply, State}.
|
||||
|
||||
handle_info({'DOWN', _MRef, process, SubPid, _Reason}, State = #{pmon := PMon}) ->
|
||||
SubPids = [SubPid | emqx_misc:drain_down(?BATCH_SIZE)],
|
||||
SubPids = [SubPid | emqx_utils:drain_down(?BATCH_SIZE)],
|
||||
ok = emqx_pool:async_submit(
|
||||
fun lists:foreach/2, [fun clean_down/1, SubPids]
|
||||
),
|
||||
|
|
|
@ -61,7 +61,7 @@
|
|||
-export([set_field/3]).
|
||||
|
||||
-import(
|
||||
emqx_misc,
|
||||
emqx_utils,
|
||||
[
|
||||
run_fold/3,
|
||||
pipeline/3,
|
||||
|
@ -622,7 +622,7 @@ process_connect(
|
|||
NChannel = Channel#channel{session = Session},
|
||||
handle_out(connack, {?RC_SUCCESS, sp(false), AckProps}, ensure_connected(NChannel));
|
||||
{ok, #{session := Session, present := true, pendings := Pendings}} ->
|
||||
Pendings1 = lists:usort(lists:append(Pendings, emqx_misc:drain_deliver())),
|
||||
Pendings1 = lists:usort(lists:append(Pendings, emqx_utils:drain_deliver())),
|
||||
NChannel = Channel#channel{
|
||||
session = Session,
|
||||
resuming = true,
|
||||
|
@ -1203,7 +1203,7 @@ handle_call(
|
|||
) ->
|
||||
ok = emqx_session:takeover(Session),
|
||||
%% TODO: Should not drain deliver here (side effect)
|
||||
Delivers = emqx_misc:drain_deliver(),
|
||||
Delivers = emqx_utils:drain_deliver(),
|
||||
AllPendings = lists:append(Delivers, Pendings),
|
||||
disconnect_and_shutdown(takenover, AllPendings, Channel);
|
||||
handle_call(list_authz_cache, Channel) ->
|
||||
|
@ -1402,7 +1402,7 @@ ensure_timer(Name, Channel = #channel{timers = Timers}) ->
|
|||
|
||||
ensure_timer(Name, Time, Channel = #channel{timers = Timers}) ->
|
||||
Msg = maps:get(Name, ?TIMER_TABLE),
|
||||
TRef = emqx_misc:start_timer(Time, Msg),
|
||||
TRef = emqx_utils:start_timer(Time, Msg),
|
||||
Channel#channel{timers = Timers#{Name => TRef}}.
|
||||
|
||||
reset_timer(Name, Channel) ->
|
||||
|
@ -2045,7 +2045,7 @@ clear_keepalive(Channel = #channel{timers = Timers}) ->
|
|||
undefined ->
|
||||
Channel;
|
||||
TRef ->
|
||||
emqx_misc:cancel_timer(TRef),
|
||||
emqx_utils:cancel_timer(TRef),
|
||||
Channel#channel{timers = maps:without([alive_timer], Timers)}
|
||||
end.
|
||||
%%--------------------------------------------------------------------
|
||||
|
@ -2241,7 +2241,7 @@ get_mqtt_conf(Zone, Key, Default) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
set_field(Name, Value, Channel) ->
|
||||
Pos = emqx_misc:index_of(Name, record_info(fields, channel)),
|
||||
Pos = emqx_utils:index_of(Name, record_info(fields, channel)),
|
||||
setelement(Pos + 1, Channel, Value).
|
||||
|
||||
get_mqueue(#channel{session = Session}) ->
|
||||
|
|
|
@ -651,10 +651,10 @@ cast(Msg) -> gen_server:cast(?CM, Msg).
|
|||
|
||||
init([]) ->
|
||||
TabOpts = [public, {write_concurrency, true}],
|
||||
ok = emqx_tables:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]),
|
||||
ok = emqx_tables:new(?CHAN_CONN_TAB, [bag | TabOpts]),
|
||||
ok = emqx_tables:new(?CHAN_INFO_TAB, [ordered_set, compressed | TabOpts]),
|
||||
ok = emqx_tables:new(?CHAN_LIVE_TAB, [ordered_set, {write_concurrency, true} | TabOpts]),
|
||||
ok = emqx_utils_ets:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]),
|
||||
ok = emqx_utils_ets:new(?CHAN_CONN_TAB, [bag | TabOpts]),
|
||||
ok = emqx_utils_ets:new(?CHAN_INFO_TAB, [ordered_set, compressed | TabOpts]),
|
||||
ok = emqx_utils_ets:new(?CHAN_LIVE_TAB, [ordered_set, {write_concurrency, true} | TabOpts]),
|
||||
ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0),
|
||||
State = #{chan_pmon => emqx_pmon:new()},
|
||||
{ok, State}.
|
||||
|
@ -672,7 +672,7 @@ handle_cast(Msg, State) ->
|
|||
|
||||
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
|
||||
?tp(emqx_cm_process_down, #{stale_pid => Pid, reason => _Reason}),
|
||||
ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)],
|
||||
ChanPids = [Pid | emqx_utils:drain_down(?BATCH_SIZE)],
|
||||
{Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon),
|
||||
lists:foreach(fun mark_channel_disconnected/1, ChanPids),
|
||||
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]),
|
||||
|
|
|
@ -144,7 +144,7 @@
|
|||
-type app_envs() :: [proplists:property()].
|
||||
|
||||
%% @doc For the given path, get root value enclosed in a single-key map.
|
||||
-spec get_root(emqx_map_lib:config_key_path()) -> map().
|
||||
-spec get_root(emqx_utils_maps:config_key_path()) -> map().
|
||||
get_root([RootName | _]) ->
|
||||
#{RootName => do_get(?CONF, [RootName], #{})}.
|
||||
|
||||
|
@ -155,14 +155,14 @@ get_root_raw([RootName | _]) ->
|
|||
|
||||
%% @doc Get a config value for the given path.
|
||||
%% The path should at least include root config name.
|
||||
-spec get(emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get(emqx_utils_maps:config_key_path()) -> term().
|
||||
get(KeyPath) -> do_get(?CONF, KeyPath).
|
||||
|
||||
-spec get(emqx_map_lib:config_key_path(), term()) -> term().
|
||||
-spec get(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||
get(KeyPath, Default) -> do_get(?CONF, KeyPath, Default).
|
||||
|
||||
-spec find(emqx_map_lib:config_key_path()) ->
|
||||
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
|
||||
-spec find(emqx_utils_maps:config_key_path()) ->
|
||||
{ok, term()} | {not_found, emqx_utils_maps:config_key_path(), term()}.
|
||||
find([]) ->
|
||||
Ref = make_ref(),
|
||||
case do_get(?CONF, [], Ref) of
|
||||
|
@ -172,12 +172,12 @@ find([]) ->
|
|||
find(KeyPath) ->
|
||||
atom_conf_path(
|
||||
KeyPath,
|
||||
fun(AtomKeyPath) -> emqx_map_lib:deep_find(AtomKeyPath, get_root(KeyPath)) end,
|
||||
fun(AtomKeyPath) -> emqx_utils_maps:deep_find(AtomKeyPath, get_root(KeyPath)) end,
|
||||
{return, {not_found, KeyPath}}
|
||||
).
|
||||
|
||||
-spec find_raw(emqx_map_lib:config_key_path()) ->
|
||||
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
|
||||
-spec find_raw(emqx_utils_maps:config_key_path()) ->
|
||||
{ok, term()} | {not_found, emqx_utils_maps:config_key_path(), term()}.
|
||||
find_raw([]) ->
|
||||
Ref = make_ref(),
|
||||
case do_get_raw([], Ref) of
|
||||
|
@ -185,9 +185,9 @@ find_raw([]) ->
|
|||
Res -> {ok, Res}
|
||||
end;
|
||||
find_raw(KeyPath) ->
|
||||
emqx_map_lib:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)).
|
||||
emqx_utils_maps:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)).
|
||||
|
||||
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get_zone_conf(atom(), emqx_utils_maps:config_key_path()) -> term().
|
||||
get_zone_conf(Zone, KeyPath) ->
|
||||
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
|
||||
%% not found in zones, try to find the global config
|
||||
|
@ -197,7 +197,7 @@ get_zone_conf(Zone, KeyPath) ->
|
|||
Value
|
||||
end.
|
||||
|
||||
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> term().
|
||||
-spec get_zone_conf(atom(), emqx_utils_maps:config_key_path(), term()) -> term().
|
||||
get_zone_conf(Zone, KeyPath, Default) ->
|
||||
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
|
||||
%% not found in zones, try to find the global config
|
||||
|
@ -207,24 +207,24 @@ get_zone_conf(Zone, KeyPath, Default) ->
|
|||
Value
|
||||
end.
|
||||
|
||||
-spec put_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> ok.
|
||||
-spec put_zone_conf(atom(), emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||
put_zone_conf(Zone, KeyPath, Conf) ->
|
||||
?MODULE:put(?ZONE_CONF_PATH(Zone, KeyPath), Conf).
|
||||
|
||||
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path()) -> term().
|
||||
get_listener_conf(Type, Listener, KeyPath) ->
|
||||
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
|
||||
|
||||
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> term().
|
||||
-spec get_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path(), term()) -> term().
|
||||
get_listener_conf(Type, Listener, KeyPath, Default) ->
|
||||
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Default).
|
||||
|
||||
-spec put_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> ok.
|
||||
-spec put_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||
put_listener_conf(Type, Listener, KeyPath, Conf) ->
|
||||
?MODULE:put(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Conf).
|
||||
|
||||
-spec find_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) ->
|
||||
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
|
||||
-spec find_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path()) ->
|
||||
{ok, term()} | {not_found, emqx_utils_maps:config_key_path(), term()}.
|
||||
find_listener_conf(Type, Listener, KeyPath) ->
|
||||
find(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
|
||||
|
||||
|
@ -243,20 +243,20 @@ erase(RootName) ->
|
|||
persistent_term:erase(?PERSIS_KEY(?RAW_CONF, bin(RootName))),
|
||||
ok.
|
||||
|
||||
-spec put(emqx_map_lib:config_key_path(), term()) -> ok.
|
||||
-spec put(emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||
put(KeyPath, Config) ->
|
||||
Putter = fun(Path, Map, Value) ->
|
||||
emqx_map_lib:deep_put(Path, Map, Value)
|
||||
emqx_utils_maps:deep_put(Path, Map, Value)
|
||||
end,
|
||||
do_put(?CONF, Putter, KeyPath, Config).
|
||||
|
||||
%% Puts value into configuration even if path doesn't exist
|
||||
%% For paths of non-existing atoms use force_put(KeyPath, Config, unsafe)
|
||||
-spec force_put(emqx_map_lib:config_key_path(), term()) -> ok.
|
||||
-spec force_put(emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||
force_put(KeyPath, Config) ->
|
||||
force_put(KeyPath, Config, safe).
|
||||
|
||||
-spec force_put(emqx_map_lib:config_key_path(), term(), safe | unsafe) -> ok.
|
||||
-spec force_put(emqx_utils_maps:config_key_path(), term(), safe | unsafe) -> ok.
|
||||
force_put(KeyPath0, Config, Safety) ->
|
||||
KeyPath =
|
||||
case Safety of
|
||||
|
@ -264,19 +264,19 @@ force_put(KeyPath0, Config, Safety) ->
|
|||
unsafe -> [unsafe_atom(Key) || Key <- KeyPath0]
|
||||
end,
|
||||
Putter = fun(Path, Map, Value) ->
|
||||
emqx_map_lib:deep_force_put(Path, Map, Value)
|
||||
emqx_utils_maps:deep_force_put(Path, Map, Value)
|
||||
end,
|
||||
do_put(?CONF, Putter, KeyPath, Config).
|
||||
|
||||
-spec get_default_value(emqx_map_lib:config_key_path()) -> {ok, term()} | {error, term()}.
|
||||
-spec get_default_value(emqx_utils_maps:config_key_path()) -> {ok, term()} | {error, term()}.
|
||||
get_default_value([RootName | _] = KeyPath) ->
|
||||
BinKeyPath = [bin(Key) || Key <- KeyPath],
|
||||
case find_raw([RootName]) of
|
||||
{ok, RawConf} ->
|
||||
RawConf1 = emqx_map_lib:deep_remove(BinKeyPath, #{bin(RootName) => RawConf}),
|
||||
RawConf1 = emqx_utils_maps:deep_remove(BinKeyPath, #{bin(RootName) => RawConf}),
|
||||
try fill_defaults(get_schema_mod(RootName), RawConf1, #{}) of
|
||||
FullConf ->
|
||||
case emqx_map_lib:deep_find(BinKeyPath, FullConf) of
|
||||
case emqx_utils_maps:deep_find(BinKeyPath, FullConf) of
|
||||
{not_found, _, _} -> {error, no_default_value};
|
||||
{ok, Val} -> {ok, Val}
|
||||
end
|
||||
|
@ -287,10 +287,10 @@ get_default_value([RootName | _] = KeyPath) ->
|
|||
{error, {rootname_not_found, RootName}}
|
||||
end.
|
||||
|
||||
-spec get_raw(emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get_raw(emqx_utils_maps:config_key_path()) -> term().
|
||||
get_raw(KeyPath) -> do_get_raw(KeyPath).
|
||||
|
||||
-spec get_raw(emqx_map_lib:config_key_path(), term()) -> term().
|
||||
-spec get_raw(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||
get_raw(KeyPath, Default) -> do_get_raw(KeyPath, Default).
|
||||
|
||||
-spec put_raw(map()) -> ok.
|
||||
|
@ -303,10 +303,10 @@ put_raw(Config) ->
|
|||
hocon_maps:ensure_plain(Config)
|
||||
).
|
||||
|
||||
-spec put_raw(emqx_map_lib:config_key_path(), term()) -> ok.
|
||||
-spec put_raw(emqx_utils_maps:config_key_path(), term()) -> ok.
|
||||
put_raw(KeyPath, Config) ->
|
||||
Putter = fun(Path, Map, Value) ->
|
||||
emqx_map_lib:deep_force_put(Path, Map, Value)
|
||||
emqx_utils_maps:deep_force_put(Path, Map, Value)
|
||||
end,
|
||||
do_put(?RAW_CONF, Putter, KeyPath, Config).
|
||||
|
||||
|
@ -455,7 +455,7 @@ do_check_config(SchemaMod, RawConf, Opts0) ->
|
|||
Opts = maps:merge(Opts0, Opts1),
|
||||
{AppEnvs, CheckedConf} =
|
||||
hocon_tconf:map_translate(SchemaMod, RawConf, Opts),
|
||||
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(CheckedConf)}.
|
||||
{AppEnvs, emqx_utils_maps:unsafe_atom_key_map(CheckedConf)}.
|
||||
|
||||
fill_defaults(RawConf) ->
|
||||
fill_defaults(RawConf, #{}).
|
||||
|
@ -709,11 +709,11 @@ do_put(Type, Putter, [RootName | KeyPath], DeepValue) ->
|
|||
do_deep_get(?CONF, KeyPath, Map, Default) ->
|
||||
atom_conf_path(
|
||||
KeyPath,
|
||||
fun(AtomKeyPath) -> emqx_map_lib:deep_get(AtomKeyPath, Map, Default) end,
|
||||
fun(AtomKeyPath) -> emqx_utils_maps:deep_get(AtomKeyPath, Map, Default) end,
|
||||
{return, Default}
|
||||
);
|
||||
do_deep_get(?RAW_CONF, KeyPath, Map, Default) ->
|
||||
emqx_map_lib:deep_get([bin(Key) || Key <- KeyPath], Map, Default).
|
||||
emqx_utils_maps:deep_get([bin(Key) || Key <- KeyPath], Map, Default).
|
||||
|
||||
do_deep_put(?CONF, Putter, KeyPath, Map, Value) ->
|
||||
atom_conf_path(
|
||||
|
|
|
@ -229,7 +229,7 @@ process_update_request([_], _Handlers, {remove, _Opts}) ->
|
|||
process_update_request(ConfKeyPath, _Handlers, {remove, Opts}) ->
|
||||
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
||||
BinKeyPath = bin_path(ConfKeyPath),
|
||||
NewRawConf = emqx_map_lib:deep_remove(BinKeyPath, OldRawConf),
|
||||
NewRawConf = emqx_utils_maps:deep_remove(BinKeyPath, OldRawConf),
|
||||
OverrideConf = remove_from_override_config(BinKeyPath, Opts),
|
||||
{ok, NewRawConf, OverrideConf, Opts};
|
||||
process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
|
||||
|
@ -435,7 +435,7 @@ remove_from_override_config(_BinKeyPath, #{persistent := false}) ->
|
|||
undefined;
|
||||
remove_from_override_config(BinKeyPath, Opts) ->
|
||||
OldConf = emqx_config:read_override_conf(Opts),
|
||||
emqx_map_lib:deep_remove(BinKeyPath, OldConf).
|
||||
emqx_utils_maps:deep_remove(BinKeyPath, OldConf).
|
||||
|
||||
%% apply new config on top of override config
|
||||
merge_to_override_config(_RawConf, #{persistent := false}) ->
|
||||
|
@ -457,7 +457,7 @@ return_change_result(_ConfKeyPath, {remove, _Opts}) ->
|
|||
|
||||
return_rawconf(ConfKeyPath, #{rawconf_with_defaults := true}) ->
|
||||
FullRawConf = emqx_config:fill_defaults(emqx_config:get_raw([])),
|
||||
emqx_map_lib:deep_get(bin_path(ConfKeyPath), FullRawConf);
|
||||
emqx_utils_maps:deep_get(bin_path(ConfKeyPath), FullRawConf);
|
||||
return_rawconf(ConfKeyPath, _) ->
|
||||
emqx_config:get_raw(ConfKeyPath).
|
||||
|
||||
|
@ -475,16 +475,16 @@ atom(Atom) when is_atom(Atom) ->
|
|||
|
||||
-dialyzer({nowarn_function, do_remove_handler/2}).
|
||||
do_remove_handler(ConfKeyPath, Handlers) ->
|
||||
NewHandlers = emqx_map_lib:deep_remove(ConfKeyPath ++ [?MOD], Handlers),
|
||||
NewHandlers = emqx_utils_maps:deep_remove(ConfKeyPath ++ [?MOD], Handlers),
|
||||
remove_empty_leaf(ConfKeyPath, NewHandlers).
|
||||
|
||||
remove_empty_leaf([], Handlers) ->
|
||||
Handlers;
|
||||
remove_empty_leaf(KeyPath, Handlers) ->
|
||||
case emqx_map_lib:deep_find(KeyPath, Handlers) =:= {ok, #{}} of
|
||||
case emqx_utils_maps:deep_find(KeyPath, Handlers) =:= {ok, #{}} of
|
||||
%% empty leaf
|
||||
true ->
|
||||
Handlers1 = emqx_map_lib:deep_remove(KeyPath, Handlers),
|
||||
Handlers1 = emqx_utils_maps:deep_remove(KeyPath, Handlers),
|
||||
SubKeyPath = lists:sublist(KeyPath, length(KeyPath) - 1),
|
||||
remove_empty_leaf(SubKeyPath, Handlers1);
|
||||
false ->
|
||||
|
@ -501,7 +501,7 @@ assert_callback_function(Mod) ->
|
|||
end,
|
||||
ok.
|
||||
|
||||
-spec schema(module(), emqx_map_lib:config_key_path()) -> hocon_schema:schema().
|
||||
-spec schema(module(), emqx_utils_maps:config_key_path()) -> hocon_schema:schema().
|
||||
schema(SchemaModule, [RootKey | _]) ->
|
||||
Roots = hocon_schema:roots(SchemaModule),
|
||||
{Field, Translations} =
|
||||
|
|
|
@ -77,7 +77,7 @@
|
|||
-export([set_field/3]).
|
||||
|
||||
-import(
|
||||
emqx_misc,
|
||||
emqx_utils,
|
||||
[start_timer/2]
|
||||
).
|
||||
|
||||
|
@ -260,7 +260,7 @@ stats(#state{
|
|||
{error, _} -> []
|
||||
end,
|
||||
ChanStats = emqx_channel:stats(Channel),
|
||||
ProcStats = emqx_misc:proc_stats(),
|
||||
ProcStats = emqx_utils:proc_stats(),
|
||||
lists:append([SockStats, ChanStats, ProcStats]).
|
||||
|
||||
%% @doc Set TCP keepalive socket options to override system defaults.
|
||||
|
@ -392,7 +392,7 @@ run_loop(
|
|||
emqx_channel:info(zone, Channel),
|
||||
[force_shutdown]
|
||||
),
|
||||
emqx_misc:tune_heap_size(ShutdownPolicy),
|
||||
emqx_utils:tune_heap_size(ShutdownPolicy),
|
||||
case activate_socket(State) of
|
||||
{ok, NState} ->
|
||||
hibernate(Parent, NState);
|
||||
|
@ -472,7 +472,7 @@ ensure_stats_timer(_Timeout, State) ->
|
|||
-compile({inline, [cancel_stats_timer/1]}).
|
||||
cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) ->
|
||||
?tp(debug, cancel_stats_timer, #{}),
|
||||
ok = emqx_misc:cancel_timer(TRef),
|
||||
ok = emqx_utils:cancel_timer(TRef),
|
||||
State#state{stats_timer = undefined};
|
||||
cancel_stats_timer(State) ->
|
||||
State.
|
||||
|
@ -558,7 +558,7 @@ handle_msg(
|
|||
{incoming, Packet = ?CONNECT_PACKET(ConnPkt)},
|
||||
State = #state{idle_timer = IdleTimer}
|
||||
) ->
|
||||
ok = emqx_misc:cancel_timer(IdleTimer),
|
||||
ok = emqx_utils:cancel_timer(IdleTimer),
|
||||
Serialize = emqx_frame:serialize_opts(ConnPkt),
|
||||
NState = State#state{
|
||||
serialize = Serialize,
|
||||
|
@ -593,7 +593,7 @@ handle_msg(
|
|||
#state{listener = {Type, Listener}} = State
|
||||
) ->
|
||||
ActiveN = get_active_n(Type, Listener),
|
||||
Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)],
|
||||
Delivers = [Deliver | emqx_utils:drain_deliver(ActiveN)],
|
||||
with_channel(handle_deliver, [Delivers], State);
|
||||
%% Something sent
|
||||
handle_msg({inet_reply, _Sock, ok}, State = #state{listener = {Type, Listener}}) ->
|
||||
|
@ -1073,7 +1073,7 @@ check_oom(State = #state{channel = Channel}) ->
|
|||
emqx_channel:info(zone, Channel), [force_shutdown]
|
||||
),
|
||||
?tp(debug, check_oom, #{policy => ShutdownPolicy}),
|
||||
case emqx_misc:check_oom(ShutdownPolicy) of
|
||||
case emqx_utils:check_oom(ShutdownPolicy) of
|
||||
{shutdown, Reason} ->
|
||||
%% triggers terminate/2 callback immediately
|
||||
erlang:exit({shutdown, Reason});
|
||||
|
@ -1200,7 +1200,7 @@ inc_counter(Key, Inc) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
set_field(Name, Value, State) ->
|
||||
Pos = emqx_misc:index_of(Name, record_info(fields, state)),
|
||||
Pos = emqx_utils:index_of(Name, record_info(fields, state)),
|
||||
setelement(Pos + 1, State, Value).
|
||||
|
||||
get_state(Pid) ->
|
||||
|
|
|
@ -117,7 +117,7 @@ handle_call(Call, _From, State) ->
|
|||
handle_cast({evict, URL}, State0 = #state{refresh_timers = RefreshTimers0}) ->
|
||||
emqx_ssl_crl_cache:delete(URL),
|
||||
MTimer = maps:get(URL, RefreshTimers0, undefined),
|
||||
emqx_misc:cancel_timer(MTimer),
|
||||
emqx_utils:cancel_timer(MTimer),
|
||||
RefreshTimers = maps:without([URL], RefreshTimers0),
|
||||
State = State0#state{refresh_timers = RefreshTimers},
|
||||
?tp(
|
||||
|
@ -223,9 +223,9 @@ ensure_timer(URL, State = #state{refresh_interval = Timeout}) ->
|
|||
ensure_timer(URL, State = #state{refresh_timers = RefreshTimers0}, Timeout) ->
|
||||
?tp(crl_cache_ensure_timer, #{url => URL, timeout => Timeout}),
|
||||
MTimer = maps:get(URL, RefreshTimers0, undefined),
|
||||
emqx_misc:cancel_timer(MTimer),
|
||||
emqx_utils:cancel_timer(MTimer),
|
||||
RefreshTimers = RefreshTimers0#{
|
||||
URL => emqx_misc:start_timer(
|
||||
URL => emqx_utils:start_timer(
|
||||
Timeout,
|
||||
{refresh, URL}
|
||||
)
|
||||
|
@ -297,7 +297,7 @@ handle_cache_overflow(State0) ->
|
|||
{_Time, OldestURL, InsertionTimes} = gb_trees:take_smallest(InsertionTimes0),
|
||||
emqx_ssl_crl_cache:delete(OldestURL),
|
||||
MTimer = maps:get(OldestURL, RefreshTimers0, undefined),
|
||||
emqx_misc:cancel_timer(MTimer),
|
||||
emqx_utils:cancel_timer(MTimer),
|
||||
RefreshTimers = maps:remove(OldestURL, RefreshTimers0),
|
||||
CachedURLs = sets:del_element(OldestURL, CachedURLs0),
|
||||
?tp(debug, crl_cache_overflow, #{oldest_url => OldestURL}),
|
||||
|
|
|
@ -99,7 +99,7 @@ now_diff(TS) -> erlang:system_time(millisecond) - TS.
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
init([]) ->
|
||||
ok = emqx_tables:new(?FLAPPING_TAB, [
|
||||
ok = emqx_utils_ets:new(?FLAPPING_TAB, [
|
||||
public,
|
||||
set,
|
||||
{keypos, #flapping.clientid},
|
||||
|
@ -184,7 +184,7 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
|
||||
start_timer(Zone) ->
|
||||
WindTime = maps:get(window_time, get_policy(Zone)),
|
||||
emqx_misc:start_timer(WindTime, {garbage_collect, Zone}).
|
||||
emqx_utils:start_timer(WindTime, {garbage_collect, Zone}).
|
||||
|
||||
start_timers() ->
|
||||
lists:foreach(
|
||||
|
|
|
@ -145,10 +145,10 @@ npid() ->
|
|||
NPid.
|
||||
|
||||
to_hexstr(I) when byte_size(I) =:= 16 ->
|
||||
emqx_misc:bin_to_hexstr(I, upper).
|
||||
emqx_utils:bin_to_hexstr(I, upper).
|
||||
|
||||
from_hexstr(S) when byte_size(S) =:= 32 ->
|
||||
emqx_misc:hexstr_to_bin(S).
|
||||
emqx_utils:hexstr_to_bin(S).
|
||||
|
||||
to_base62(<<I:128>>) ->
|
||||
emqx_base62:encode(I).
|
||||
|
|
|
@ -229,7 +229,7 @@ lookup(HookPoint) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
init([]) ->
|
||||
ok = emqx_tables:new(?TAB, [{keypos, #hook.name}, {read_concurrency, true}]),
|
||||
ok = emqx_utils_ets:new(?TAB, [{keypos, #hook.name}, {read_concurrency, true}]),
|
||||
{ok, #{}}.
|
||||
|
||||
handle_call({add, HookPoint, Callback = #callback{action = {M, F, _}}}, _From, State) ->
|
||||
|
|
|
@ -375,7 +375,7 @@ return_pause(infinity, PauseType, Fun, Diff, Limiter) ->
|
|||
{PauseType, ?MINIMUM_PAUSE, make_retry_context(Fun, Diff), Limiter};
|
||||
return_pause(Rate, PauseType, Fun, Diff, Limiter) ->
|
||||
Val = erlang:round(Diff * emqx_limiter_schema:default_period() / Rate),
|
||||
Pause = emqx_misc:clamp(Val, ?MINIMUM_PAUSE, ?MAXIMUM_PAUSE),
|
||||
Pause = emqx_utils:clamp(Val, ?MINIMUM_PAUSE, ?MAXIMUM_PAUSE),
|
||||
{PauseType, Pause, make_retry_context(Fun, Diff), Limiter}.
|
||||
|
||||
-spec make_retry_context(undefined | retry_fun(Limiter), non_neg_integer()) ->
|
||||
|
|
|
@ -572,7 +572,7 @@ find_limiter_cfg(Type, #{rate := _} = Cfg) ->
|
|||
find_limiter_cfg(Type, Cfg) ->
|
||||
{
|
||||
maps:get(Type, Cfg, undefined),
|
||||
find_client_cfg(Type, emqx_map_lib:deep_get([client, Type], Cfg, undefined))
|
||||
find_client_cfg(Type, emqx_utils_maps:deep_get([client, Type], Cfg, undefined))
|
||||
}.
|
||||
|
||||
find_client_cfg(Type, BucketCfg) ->
|
||||
|
|
|
@ -427,12 +427,12 @@ pre_config_update([listeners, _Type, _Name], {create, _NewConf}, _RawConf) ->
|
|||
pre_config_update([listeners, _Type, _Name], {update, _Request}, undefined) ->
|
||||
{error, not_found};
|
||||
pre_config_update([listeners, Type, Name], {update, Request}, RawConf) ->
|
||||
NewConfT = emqx_map_lib:deep_merge(RawConf, Request),
|
||||
NewConfT = emqx_utils_maps:deep_merge(RawConf, Request),
|
||||
NewConf = ensure_override_limiter_conf(NewConfT, Request),
|
||||
CertsDir = certs_dir(Type, Name),
|
||||
{ok, convert_certs(CertsDir, NewConf)};
|
||||
pre_config_update([listeners, _Type, _Name], {action, _Action, Updated}, RawConf) ->
|
||||
NewConf = emqx_map_lib:deep_merge(RawConf, Updated),
|
||||
NewConf = emqx_utils_maps:deep_merge(RawConf, Updated),
|
||||
{ok, NewConf};
|
||||
pre_config_update(_Path, _Request, RawConf) ->
|
||||
{ok, RawConf}.
|
||||
|
@ -500,7 +500,7 @@ esockd_opts(ListenerId, Type, Opts0) ->
|
|||
|
||||
ws_opts(Type, ListenerName, Opts) ->
|
||||
WsPaths = [
|
||||
{emqx_map_lib:deep_get([websocket, mqtt_path], Opts, "/mqtt"), emqx_ws_connection, #{
|
||||
{emqx_utils_maps:deep_get([websocket, mqtt_path], Opts, "/mqtt"), emqx_ws_connection, #{
|
||||
zone => zone(Opts),
|
||||
listener => {Type, ListenerName},
|
||||
limiter => limiter(Opts),
|
||||
|
@ -538,7 +538,7 @@ esockd_access_rules(StrRules) ->
|
|||
[A, CIDR] = string:tokens(S, " "),
|
||||
%% esockd rules only use words 'allow' and 'deny', both are existing
|
||||
%% comparison of strings may be better, but there is a loss of backward compatibility
|
||||
case emqx_misc:safe_to_existing_atom(A) of
|
||||
case emqx_utils:safe_to_existing_atom(A) of
|
||||
{ok, Action} ->
|
||||
[
|
||||
{
|
||||
|
@ -560,7 +560,7 @@ esockd_access_rules(StrRules) ->
|
|||
merge_default(Options) ->
|
||||
case lists:keytake(tcp_options, 1, Options) of
|
||||
{value, {tcp_options, TcpOpts}, Options1} ->
|
||||
[{tcp_options, emqx_misc:merge_opts(?MQTT_SOCKOPTS, TcpOpts)} | Options1];
|
||||
[{tcp_options, emqx_utils:merge_opts(?MQTT_SOCKOPTS, TcpOpts)} | Options1];
|
||||
false ->
|
||||
[{tcp_options, ?MQTT_SOCKOPTS} | Options]
|
||||
end.
|
||||
|
|
|
@ -62,11 +62,11 @@
|
|||
%% The JSON object is pretty-printed.
|
||||
%% NOTE: do not use this function for logging.
|
||||
best_effort_json(Input) ->
|
||||
best_effort_json(Input, [space, {indent, 4}]).
|
||||
best_effort_json(Input, [pretty, force_utf8]).
|
||||
best_effort_json(Input, Opts) ->
|
||||
Config = #{depth => unlimited, single_line => true},
|
||||
JsonReady = best_effort_json_obj(Input, Config),
|
||||
jsx:encode(JsonReady, Opts).
|
||||
emqx_utils_json:encode(JsonReady, Opts).
|
||||
|
||||
-spec format(logger:log_event(), config()) -> iodata().
|
||||
format(#{level := Level, msg := Msg, meta := Meta} = Event, Config0) when is_map(Config0) ->
|
||||
|
@ -92,7 +92,7 @@ format(Msg, Meta, Config) ->
|
|||
}
|
||||
end,
|
||||
Data = maps:without([report_cb], Data0),
|
||||
jiffy:encode(json_obj(Data, Config)).
|
||||
emqx_utils_json:encode(json_obj(Data, Config)).
|
||||
|
||||
maybe_format_msg({report, Report} = Msg, #{report_cb := Cb} = Meta, Config) ->
|
||||
case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of
|
||||
|
@ -378,15 +378,15 @@ p_config() ->
|
|||
|
||||
best_effort_json_test() ->
|
||||
?assertEqual(
|
||||
<<"{}">>,
|
||||
<<"{\n \n}">>,
|
||||
emqx_logger_jsonfmt:best_effort_json([])
|
||||
),
|
||||
?assertEqual(
|
||||
<<"{\n \"key\": []\n}">>,
|
||||
<<"{\n \"key\" : [\n \n ]\n}">>,
|
||||
emqx_logger_jsonfmt:best_effort_json(#{key => []})
|
||||
),
|
||||
?assertEqual(
|
||||
<<"[\n {\n \"key\": []\n }\n]">>,
|
||||
<<"[\n {\n \"key\" : [\n \n ]\n }\n]">>,
|
||||
emqx_logger_jsonfmt:best_effort_json([#{key => []}])
|
||||
),
|
||||
ok.
|
||||
|
|
|
@ -541,7 +541,7 @@ init([]) ->
|
|||
CRef = counters:new(?MAX_SIZE, [write_concurrency]),
|
||||
ok = persistent_term:put(?MODULE, CRef),
|
||||
% Create index mapping table
|
||||
ok = emqx_tables:new(?TAB, [{keypos, 2}, {read_concurrency, true}]),
|
||||
ok = emqx_utils_ets:new(?TAB, [{keypos, 2}, {read_concurrency, true}]),
|
||||
Metrics = lists:append([
|
||||
?BYTES_METRICS,
|
||||
?PACKET_METRICS,
|
||||
|
|
|
@ -110,7 +110,7 @@ register_listener(ListenerID, Opts) ->
|
|||
-spec inject_sni_fun(emqx_listeners:listener_id(), map()) -> map().
|
||||
inject_sni_fun(ListenerID, Conf0) ->
|
||||
SNIFun = emqx_const_v1:make_sni_fun(ListenerID),
|
||||
Conf = emqx_map_lib:deep_merge(Conf0, #{ssl_options => #{sni_fun => SNIFun}}),
|
||||
Conf = emqx_utils_maps:deep_merge(Conf0, #{ssl_options => #{sni_fun => SNIFun}}),
|
||||
ok = ?MODULE:register_listener(ListenerID, Conf),
|
||||
Conf.
|
||||
|
||||
|
@ -120,7 +120,7 @@ inject_sni_fun(ListenerID, Conf0) ->
|
|||
|
||||
init(_Args) ->
|
||||
logger:set_process_metadata(#{domain => [emqx, ocsp, cache]}),
|
||||
emqx_tables:new(?CACHE_TAB, [
|
||||
emqx_utils_ets:new(?CACHE_TAB, [
|
||||
named_table,
|
||||
public,
|
||||
{heir, whereis(emqx_kernel_sup), none},
|
||||
|
@ -149,7 +149,7 @@ handle_call({register_listener, ListenerID, Conf}, _From, State0) ->
|
|||
msg => "registering_ocsp_cache",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
RefreshInterval0 = emqx_map_lib:deep_get([ssl_options, ocsp, refresh_interval], Conf),
|
||||
RefreshInterval0 = emqx_utils_maps:deep_get([ssl_options, ocsp, refresh_interval], Conf),
|
||||
RefreshInterval = max(RefreshInterval0, ?MIN_REFRESH_INTERVAL),
|
||||
State = State0#{{refresh_interval, ListenerID} => RefreshInterval},
|
||||
%% we need to pass the config along because this might be called
|
||||
|
@ -476,9 +476,9 @@ ensure_timer(ListenerID, State, Timeout) ->
|
|||
ensure_timer(ListenerID, {refresh, ListenerID}, State, Timeout).
|
||||
|
||||
ensure_timer(ListenerID, Message, State, Timeout) ->
|
||||
emqx_misc:cancel_timer(maps:get(?REFRESH_TIMER(ListenerID), State, undefined)),
|
||||
emqx_utils:cancel_timer(maps:get(?REFRESH_TIMER(ListenerID), State, undefined)),
|
||||
State#{
|
||||
?REFRESH_TIMER(ListenerID) => emqx_misc:start_timer(
|
||||
?REFRESH_TIMER(ListenerID) => emqx_utils:start_timer(
|
||||
Timeout,
|
||||
Message
|
||||
)
|
||||
|
|
|
@ -180,8 +180,8 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
cancel_outdated_timer(#{mem_time_ref := MemRef, cpu_time_ref := CpuRef}) ->
|
||||
emqx_misc:cancel_timer(MemRef),
|
||||
emqx_misc:cancel_timer(CpuRef),
|
||||
emqx_utils:cancel_timer(MemRef),
|
||||
emqx_utils:cancel_timer(CpuRef),
|
||||
ok.
|
||||
|
||||
start_cpu_check_timer() ->
|
||||
|
@ -204,7 +204,7 @@ start_mem_check_timer() ->
|
|||
end.
|
||||
|
||||
start_timer(Interval, Msg) ->
|
||||
emqx_misc:start_timer(Interval, Msg).
|
||||
emqx_utils:start_timer(Interval, Msg).
|
||||
|
||||
update_mem_alarm_status(HWM) when HWM > 1.0 orelse HWM < 0.0 ->
|
||||
?SLOG(warning, #{msg => "discarded_out_of_range_mem_alarm_threshold", value => HWM}),
|
||||
|
|
|
@ -57,7 +57,7 @@
|
|||
-spec start_link(atom(), pos_integer()) -> startlink_ret().
|
||||
start_link(Pool, Id) ->
|
||||
gen_server:start_link(
|
||||
{local, emqx_misc:proc_name(?MODULE, Id)},
|
||||
{local, emqx_utils:proc_name(?MODULE, Id)},
|
||||
?MODULE,
|
||||
[Pool, Id],
|
||||
[{hibernate_after, 1000}]
|
||||
|
|
|
@ -98,7 +98,7 @@ mnesia(boot) ->
|
|||
-spec start_link(atom(), pos_integer()) -> startlink_ret().
|
||||
start_link(Pool, Id) ->
|
||||
gen_server:start_link(
|
||||
{local, emqx_misc:proc_name(?MODULE, Id)},
|
||||
{local, emqx_utils:proc_name(?MODULE, Id)},
|
||||
?MODULE,
|
||||
[Pool, Id],
|
||||
[{hibernate_after, 1000}]
|
||||
|
|
|
@ -2329,7 +2329,7 @@ mqtt_ssl_listener_ssl_options_validator(Conf) ->
|
|||
fun ocsp_outer_validator/1,
|
||||
fun crl_outer_validator/1
|
||||
],
|
||||
case emqx_misc:pipeline(Checks, Conf, not_used) of
|
||||
case emqx_utils:pipeline(Checks, Conf, not_used) of
|
||||
{ok, _, _} ->
|
||||
ok;
|
||||
{error, Reason, _NotUsed} ->
|
||||
|
@ -2350,7 +2350,7 @@ ocsp_outer_validator(_Conf) ->
|
|||
ok.
|
||||
|
||||
ocsp_inner_validator(#{enable_ocsp_stapling := _} = Conf) ->
|
||||
ocsp_inner_validator(emqx_map_lib:binary_key_map(Conf));
|
||||
ocsp_inner_validator(emqx_utils_maps:binary_key_map(Conf));
|
||||
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := false} = _Conf) ->
|
||||
ok;
|
||||
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := true} = Conf) ->
|
||||
|
@ -2585,7 +2585,7 @@ to_url(Str) ->
|
|||
end.
|
||||
|
||||
to_json_binary(Str) ->
|
||||
case emqx_json:safe_decode(Str) of
|
||||
case emqx_utils_json:safe_decode(Str) of
|
||||
{ok, _} ->
|
||||
{ok, iolist_to_binary(Str)};
|
||||
Error ->
|
||||
|
|
|
@ -39,7 +39,7 @@
|
|||
%% @doc Create a sequence.
|
||||
-spec create(name()) -> ok.
|
||||
create(Name) ->
|
||||
emqx_tables:new(Name, [public, set, {write_concurrency, true}]).
|
||||
emqx_utils_ets:new(Name, [public, set, {write_concurrency, true}]).
|
||||
|
||||
%% @doc Next value of the sequence.
|
||||
-spec nextval(name(), key()) -> seqid().
|
||||
|
|
|
@ -941,7 +941,7 @@ age(Now, Ts) -> Now - Ts.
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
set_field(Name, Value, Session) ->
|
||||
Pos = emqx_misc:index_of(Name, record_info(fields, session)),
|
||||
Pos = emqx_utils:index_of(Name, record_info(fields, session)),
|
||||
setelement(Pos + 1, Session, Value).
|
||||
|
||||
get_mqueue(#session{mqueue = Q}) ->
|
||||
|
|
|
@ -95,7 +95,7 @@ create_table(Tab, Storage) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
create_init_tab() ->
|
||||
emqx_tables:new(?SESSION_INIT_TAB, [
|
||||
emqx_utils_ets:new(?SESSION_INIT_TAB, [
|
||||
public,
|
||||
{read_concurrency, true},
|
||||
{write_concurrency, true}
|
||||
|
@ -104,7 +104,7 @@ create_init_tab() ->
|
|||
-spec start_link(atom(), pos_integer()) -> startlink_ret().
|
||||
start_link(Pool, Id) ->
|
||||
gen_server:start_link(
|
||||
{local, emqx_misc:proc_name(?MODULE, Id)},
|
||||
{local, emqx_utils:proc_name(?MODULE, Id)},
|
||||
?MODULE,
|
||||
[Pool, Id],
|
||||
[{hibernate_after, 1000}]
|
||||
|
@ -182,7 +182,7 @@ pending(SessionID, MarkerIDs) ->
|
|||
call(pick(SessionID), {pending, SessionID, MarkerIDs}).
|
||||
|
||||
buffer(SessionID, STopic, Msg) ->
|
||||
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||
case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||
undefined -> ok;
|
||||
Worker -> emqx_session_router_worker:buffer(Worker, STopic, Msg)
|
||||
end.
|
||||
|
@ -194,7 +194,7 @@ resume_begin(From, SessionID) when is_pid(From), is_binary(SessionID) ->
|
|||
-spec resume_end(pid(), binary()) ->
|
||||
{'ok', [emqx_types:message()]} | {'error', term()}.
|
||||
resume_end(From, SessionID) when is_pid(From), is_binary(SessionID) ->
|
||||
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||
case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||
undefined ->
|
||||
?tp(ps_session_not_found, #{sid => SessionID}),
|
||||
{error, not_found};
|
||||
|
@ -249,7 +249,7 @@ handle_cast({delete_routes, SessionID, Subscriptions}, State) ->
|
|||
ok = lists:foreach(Fun, maps:to_list(Subscriptions)),
|
||||
{noreply, State};
|
||||
handle_cast({resume_end, SessionID, Pid}, State) ->
|
||||
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||
case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||
undefined -> skip;
|
||||
P when P =:= Pid -> ets:delete(?SESSION_INIT_TAB, SessionID);
|
||||
P when is_pid(P) -> skip
|
||||
|
@ -283,7 +283,7 @@ init_resume_worker(RemotePid, SessionID, #{pmon := Pmon} = State) ->
|
|||
error;
|
||||
{ok, Pid} ->
|
||||
Pmon1 = emqx_pmon:monitor(Pid, Pmon),
|
||||
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||
case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
|
||||
undefined ->
|
||||
{ok, Pid, State#{pmon => Pmon1}};
|
||||
{_, OldPid} ->
|
||||
|
|
|
@ -399,9 +399,11 @@ init([]) ->
|
|||
ok = mria:wait_for_tables([?TAB]),
|
||||
{ok, _} = mnesia:subscribe({table, ?TAB, simple}),
|
||||
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun ?MODULE:init_monitors/0),
|
||||
ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]),
|
||||
ok = emqx_tables:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]),
|
||||
ok = emqx_tables:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [public, set, {write_concurrency, true}]),
|
||||
ok = emqx_utils_ets:new(?SHARED_SUBS, [protected, bag]),
|
||||
ok = emqx_utils_ets:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]),
|
||||
ok = emqx_utils_ets:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [
|
||||
public, set, {write_concurrency, true}
|
||||
]),
|
||||
{ok, update_stats(#state{pmon = PMon})}.
|
||||
|
||||
init_monitors() ->
|
||||
|
|
|
@ -201,7 +201,7 @@ cast(Msg) -> gen_server:cast(?SERVER, Msg).
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
init(#{tick_ms := TickMs}) ->
|
||||
ok = emqx_tables:new(?TAB, [public, set, {write_concurrency, true}]),
|
||||
ok = emqx_utils_ets:new(?TAB, [public, set, {write_concurrency, true}]),
|
||||
Stats = lists:append([
|
||||
?CONNECTION_STATS,
|
||||
?CHANNEL_STATS,
|
||||
|
@ -213,7 +213,7 @@ init(#{tick_ms := TickMs}) ->
|
|||
{ok, start_timer(#state{updates = [], tick_ms = TickMs}), hibernate}.
|
||||
|
||||
start_timer(#state{tick_ms = Ms} = State) ->
|
||||
State#state{timer = emqx_misc:start_timer(Ms, tick)}.
|
||||
State#state{timer = emqx_utils:start_timer(Ms, tick)}.
|
||||
|
||||
handle_call(stop, _From, State) ->
|
||||
{stop, normal, ok, State};
|
||||
|
@ -301,7 +301,7 @@ handle_info(Info, State) ->
|
|||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, #state{timer = TRef}) ->
|
||||
emqx_misc:cancel_timer(TRef).
|
||||
emqx_utils:cancel_timer(TRef).
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
|
|
@ -62,7 +62,7 @@
|
|||
-endif.
|
||||
|
||||
-import(emqx_topic, [systop/1]).
|
||||
-import(emqx_misc, [start_timer/2]).
|
||||
-import(emqx_utils, [start_timer/2]).
|
||||
|
||||
-record(state, {
|
||||
heartbeat :: maybe(reference()),
|
||||
|
@ -222,7 +222,7 @@ handle_info(Info, State) ->
|
|||
terminate(_Reason, #state{heartbeat = TRef1, ticker = TRef2}) ->
|
||||
_ = emqx_config_handler:remove_handler(?CONF_KEY_PATH),
|
||||
unload_event_hooks(sys_event_messages()),
|
||||
lists:foreach(fun emqx_misc:cancel_timer/1, [TRef1, TRef2]).
|
||||
lists:foreach(fun emqx_utils:cancel_timer/1, [TRef1, TRef2]).
|
||||
|
||||
unload_event_hooks([]) ->
|
||||
ok;
|
||||
|
@ -348,7 +348,7 @@ publish(Event, Payload) when
|
|||
Event == unsubscribed
|
||||
->
|
||||
Topic = event_topic(Event, Payload),
|
||||
safe_publish(Topic, emqx_json:encode(Payload)).
|
||||
safe_publish(Topic, emqx_utils_json:encode(Payload)).
|
||||
|
||||
metric_topic(Name) ->
|
||||
translate_topic("metrics/", Name).
|
||||
|
|
|
@ -77,7 +77,7 @@ init([]) ->
|
|||
{ok, start_timer(#{timer => undefined, events => []})}.
|
||||
|
||||
start_timer(State) ->
|
||||
State#{timer := emqx_misc:start_timer(timer:seconds(2), reset)}.
|
||||
State#{timer := emqx_utils:start_timer(timer:seconds(2), reset)}.
|
||||
|
||||
sysm_opts(VM) ->
|
||||
sysm_opts(maps:to_list(VM), []).
|
||||
|
@ -204,7 +204,7 @@ handle_info(Info, State) ->
|
|||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, #{timer := TRef}) ->
|
||||
emqx_misc:cancel_timer(TRef),
|
||||
emqx_utils:cancel_timer(TRef),
|
||||
ok.
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
|
|
|
@ -317,7 +317,9 @@ ensure_ssl_files(Dir, SSL, Opts) ->
|
|||
ensure_ssl_files(_Dir, SSL, [], _Opts) ->
|
||||
{ok, SSL};
|
||||
ensure_ssl_files(Dir, SSL, [KeyPath | KeyPaths], Opts) ->
|
||||
case ensure_ssl_file(Dir, KeyPath, SSL, emqx_map_lib:deep_get(KeyPath, SSL, undefined), Opts) of
|
||||
case
|
||||
ensure_ssl_file(Dir, KeyPath, SSL, emqx_utils_maps:deep_get(KeyPath, SSL, undefined), Opts)
|
||||
of
|
||||
{ok, NewSSL} ->
|
||||
ensure_ssl_files(Dir, NewSSL, KeyPaths, Opts);
|
||||
{error, Reason} ->
|
||||
|
@ -332,7 +334,7 @@ delete_ssl_files(Dir, NewOpts0, OldOpts0) ->
|
|||
{ok, OldOpts} = ensure_ssl_files(Dir, OldOpts0, #{dry_run => DryRun}),
|
||||
Get = fun
|
||||
(_KP, undefined) -> undefined;
|
||||
(KP, Opts) -> emqx_map_lib:deep_get(KP, Opts, undefined)
|
||||
(KP, Opts) -> emqx_utils_maps:deep_get(KP, Opts, undefined)
|
||||
end,
|
||||
lists:foreach(
|
||||
fun(KeyPath) -> delete_old_file(Get(KeyPath, NewOpts), Get(KeyPath, OldOpts)) end,
|
||||
|
@ -372,7 +374,7 @@ do_ensure_ssl_file(Dir, KeyPath, SSL, MaybePem, DryRun) ->
|
|||
true ->
|
||||
case save_pem_file(Dir, KeyPath, MaybePem, DryRun) of
|
||||
{ok, Path} ->
|
||||
NewSSL = emqx_map_lib:deep_put(KeyPath, SSL, Path),
|
||||
NewSSL = emqx_utils_maps:deep_put(KeyPath, SSL, Path),
|
||||
{ok, NewSSL};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
|
@ -482,9 +484,9 @@ is_valid_pem_file(Path) ->
|
|||
%% so they are forced to upload a cert file, or use an existing file path.
|
||||
-spec drop_invalid_certs(map()) -> map().
|
||||
drop_invalid_certs(#{enable := False} = SSL) when ?IS_FALSE(False) ->
|
||||
lists:foldl(fun emqx_map_lib:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS_A);
|
||||
lists:foldl(fun emqx_utils_maps:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS_A);
|
||||
drop_invalid_certs(#{<<"enable">> := False} = SSL) when ?IS_FALSE(False) ->
|
||||
lists:foldl(fun emqx_map_lib:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS);
|
||||
lists:foldl(fun emqx_utils_maps:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS);
|
||||
drop_invalid_certs(#{enable := True} = SSL) when ?IS_TRUE(True) ->
|
||||
do_drop_invalid_certs(?SSL_FILE_OPT_PATHS_A, SSL);
|
||||
drop_invalid_certs(#{<<"enable">> := True} = SSL) when ?IS_TRUE(True) ->
|
||||
|
@ -493,7 +495,7 @@ drop_invalid_certs(#{<<"enable">> := True} = SSL) when ?IS_TRUE(True) ->
|
|||
do_drop_invalid_certs([], SSL) ->
|
||||
SSL;
|
||||
do_drop_invalid_certs([KeyPath | KeyPaths], SSL) ->
|
||||
case emqx_map_lib:deep_get(KeyPath, SSL, undefined) of
|
||||
case emqx_utils_maps:deep_get(KeyPath, SSL, undefined) of
|
||||
undefined ->
|
||||
do_drop_invalid_certs(KeyPaths, SSL);
|
||||
PemOrPath ->
|
||||
|
@ -501,7 +503,7 @@ do_drop_invalid_certs([KeyPath | KeyPaths], SSL) ->
|
|||
true ->
|
||||
do_drop_invalid_certs(KeyPaths, SSL);
|
||||
{error, _} ->
|
||||
do_drop_invalid_certs(KeyPaths, emqx_map_lib:deep_remove(KeyPath, SSL))
|
||||
do_drop_invalid_certs(KeyPaths, emqx_utils_maps:deep_remove(KeyPath, SSL))
|
||||
end
|
||||
end.
|
||||
|
||||
|
@ -586,7 +588,9 @@ ensure_ssl_file_key(_SSL, []) ->
|
|||
ok;
|
||||
ensure_ssl_file_key(SSL, RequiredKeyPaths) ->
|
||||
NotFoundRef = make_ref(),
|
||||
Filter = fun(KeyPath) -> NotFoundRef =:= emqx_map_lib:deep_get(KeyPath, SSL, NotFoundRef) end,
|
||||
Filter = fun(KeyPath) ->
|
||||
NotFoundRef =:= emqx_utils_maps:deep_get(KeyPath, SSL, NotFoundRef)
|
||||
end,
|
||||
case lists:filter(Filter, RequiredKeyPaths) of
|
||||
[] -> ok;
|
||||
Miss -> {error, #{reason => ssl_file_option_not_found, which_options => Miss}}
|
||||
|
|
|
@ -272,7 +272,7 @@ handle_info({timeout, TRef, update_trace}, #{timer := TRef} = State) ->
|
|||
?tp(update_trace_done, #{}),
|
||||
{noreply, State#{timer => NextTRef}};
|
||||
handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) ->
|
||||
emqx_misc:cancel_timer(TRef),
|
||||
emqx_utils:cancel_timer(TRef),
|
||||
handle_info({timeout, TRef, update_trace}, State);
|
||||
handle_info(Info, State) ->
|
||||
?SLOG(error, #{unexpected_info => Info}),
|
||||
|
@ -280,7 +280,7 @@ handle_info(Info, State) ->
|
|||
|
||||
terminate(_Reason, #{timer := TRef}) ->
|
||||
_ = mnesia:unsubscribe({table, ?TRACE, simple}),
|
||||
emqx_misc:cancel_timer(TRef),
|
||||
emqx_utils:cancel_timer(TRef),
|
||||
stop_all_trace_handler(),
|
||||
update_trace_handler(),
|
||||
_ = file:del_dir_r(zip_dir()),
|
||||
|
@ -302,7 +302,7 @@ update_trace(Traces) ->
|
|||
ok = stop_trace(NeedStop, Started),
|
||||
clean_stale_trace_files(),
|
||||
NextTime = find_closest_time(Traces, Now),
|
||||
emqx_misc:start_timer(NextTime, update_trace).
|
||||
emqx_utils:start_timer(NextTime, update_trace).
|
||||
|
||||
stop_all_trace_handler() ->
|
||||
lists:foreach(
|
||||
|
|
|
@ -196,7 +196,7 @@ handler_id(Name, Type) ->
|
|||
do_handler_id(Name, Type)
|
||||
catch
|
||||
_:_ ->
|
||||
Hash = emqx_misc:bin_to_hexstr(crypto:hash(md5, Name), lower),
|
||||
Hash = emqx_utils:bin_to_hexstr(crypto:hash(md5, Name), lower),
|
||||
do_handler_id(Hash, Type)
|
||||
end.
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
|
||||
start_check_timer() ->
|
||||
Interval = emqx:get_config([sysmon, vm, process_check_interval]),
|
||||
emqx_misc:start_timer(Interval, check).
|
||||
emqx_utils:start_timer(Interval, check).
|
||||
|
||||
usage(Percent) ->
|
||||
integer_to_list(floor(Percent * 100)) ++ "%".
|
||||
|
|
|
@ -52,7 +52,7 @@
|
|||
-export([set_field/3]).
|
||||
|
||||
-import(
|
||||
emqx_misc,
|
||||
emqx_utils,
|
||||
[
|
||||
maybe_apply/2,
|
||||
start_timer/2
|
||||
|
@ -172,7 +172,7 @@ stats(WsPid) when is_pid(WsPid) ->
|
|||
stats(#state{channel = Channel}) ->
|
||||
SockStats = emqx_pd:get_counters(?SOCK_STATS),
|
||||
ChanStats = emqx_channel:stats(Channel),
|
||||
ProcStats = emqx_misc:proc_stats(),
|
||||
ProcStats = emqx_utils:proc_stats(),
|
||||
lists:append([SockStats, ChanStats, ProcStats]).
|
||||
|
||||
%% kick|discard|takeover
|
||||
|
@ -340,7 +340,7 @@ tune_heap_size(Channel) ->
|
|||
)
|
||||
of
|
||||
#{enable := false} -> ok;
|
||||
ShutdownPolicy -> emqx_misc:tune_heap_size(ShutdownPolicy)
|
||||
ShutdownPolicy -> emqx_utils:tune_heap_size(ShutdownPolicy)
|
||||
end.
|
||||
|
||||
get_stats_enable(Zone) ->
|
||||
|
@ -454,7 +454,7 @@ websocket_info(
|
|||
State = #state{listener = {Type, Listener}}
|
||||
) ->
|
||||
ActiveN = get_active_n(Type, Listener),
|
||||
Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)],
|
||||
Delivers = [Deliver | emqx_utils:drain_deliver(ActiveN)],
|
||||
with_channel(handle_deliver, [Delivers], State);
|
||||
websocket_info(
|
||||
{timeout, _, limit_timeout},
|
||||
|
@ -678,7 +678,7 @@ check_oom(State = #state{channel = Channel}) ->
|
|||
#{enable := false} ->
|
||||
State;
|
||||
#{enable := true} ->
|
||||
case emqx_misc:check_oom(ShutdownPolicy) of
|
||||
case emqx_utils:check_oom(ShutdownPolicy) of
|
||||
Shutdown = {shutdown, _Reason} ->
|
||||
postpone(Shutdown, State);
|
||||
_Other ->
|
||||
|
@ -913,7 +913,7 @@ inc_qos_stats_key(_, _) -> undefined.
|
|||
%% Cancel idle timer
|
||||
|
||||
cancel_idle_timer(State = #state{idle_timer = IdleTimer}) ->
|
||||
ok = emqx_misc:cancel_timer(IdleTimer),
|
||||
ok = emqx_utils:cancel_timer(IdleTimer),
|
||||
State#state{idle_timer = undefined}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
|
@ -1046,7 +1046,7 @@ check_max_connection(Type, Listener) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
set_field(Name, Value, State) ->
|
||||
Pos = emqx_misc:index_of(Name, record_info(fields, state)),
|
||||
Pos = emqx_utils:index_of(Name, record_info(fields, state)),
|
||||
setelement(Pos + 1, State, Value).
|
||||
|
||||
%% ensure lowercase letters in headers
|
||||
|
|
|
@ -482,7 +482,7 @@ copy_certs(_, _) ->
|
|||
load_config(SchemaModule, Config, Opts) ->
|
||||
ConfigBin =
|
||||
case is_map(Config) of
|
||||
true -> jsx:encode(Config);
|
||||
true -> emqx_utils_json:encode(Config);
|
||||
false -> Config
|
||||
end,
|
||||
ok = emqx_config:delete_override_conf_files(),
|
||||
|
@ -1041,7 +1041,7 @@ switch_proxy(Switch, Name, ProxyHost, ProxyPort) ->
|
|||
off -> #{<<"enabled">> => false};
|
||||
on -> #{<<"enabled">> => true}
|
||||
end,
|
||||
BodyBin = emqx_json:encode(Body),
|
||||
BodyBin = emqx_utils_json:encode(Body),
|
||||
{ok, {{_, 200, _}, _, _}} = httpc:request(
|
||||
post,
|
||||
{Url, [], "application/json", BodyBin},
|
||||
|
@ -1061,7 +1061,7 @@ timeout_proxy(on, Name, ProxyHost, ProxyPort) ->
|
|||
<<"toxicity">> => 1.0,
|
||||
<<"attributes">> => #{<<"timeout">> => 0}
|
||||
},
|
||||
BodyBin = emqx_json:encode(Body),
|
||||
BodyBin = emqx_utils_json:encode(Body),
|
||||
{ok, {{_, 200, _}, _, _}} = httpc:request(
|
||||
post,
|
||||
{Url, [], "application/json", BodyBin},
|
||||
|
@ -1096,7 +1096,7 @@ latency_up_proxy(on, Name, ProxyHost, ProxyPort) ->
|
|||
<<"jitter">> => 3_000
|
||||
}
|
||||
},
|
||||
BodyBin = emqx_json:encode(Body),
|
||||
BodyBin = emqx_utils_json:encode(Body),
|
||||
{ok, {{_, 200, _}, _, _}} = httpc:request(
|
||||
post,
|
||||
{Url, [], "application/json", BodyBin},
|
||||
|
|
|
@ -54,7 +54,7 @@ request_api(Method, Url, QueryParams, Auth, Body, HttpOpts) ->
|
|||
[] ->
|
||||
{NewUrl, [Auth]};
|
||||
_ ->
|
||||
{NewUrl, [Auth], "application/json", emqx_json:encode(Body)}
|
||||
{NewUrl, [Auth], "application/json", emqx_utils_json:encode(Body)}
|
||||
end,
|
||||
do_request_api(Method, Request, HttpOpts).
|
||||
|
||||
|
@ -70,7 +70,7 @@ do_request_api(Method, Request, HttpOpts) ->
|
|||
end.
|
||||
|
||||
get_http_data(ResponseBody) ->
|
||||
emqx_json:decode(ResponseBody, [return_maps]).
|
||||
emqx_utils_json:decode(ResponseBody, [return_maps]).
|
||||
|
||||
auth_header(User, Pass) ->
|
||||
Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
|
||||
|
|
|
@ -57,5 +57,5 @@ t_fill_default_values(_) ->
|
|||
WithDefaults
|
||||
),
|
||||
%% ensure JSON compatible
|
||||
_ = emqx_json:encode(WithDefaults),
|
||||
_ = emqx_utils_json:encode(WithDefaults),
|
||||
ok.
|
||||
|
|
|
@ -496,16 +496,16 @@ t_get_conn_info(_) ->
|
|||
|
||||
t_oom_shutdown(init, Config) ->
|
||||
ok = snabbkaffe:start_trace(),
|
||||
ok = meck:new(emqx_misc, [non_strict, passthrough, no_history, no_link]),
|
||||
ok = meck:new(emqx_utils, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(
|
||||
emqx_misc,
|
||||
emqx_utils,
|
||||
check_oom,
|
||||
fun(_) -> {shutdown, "fake_oom"} end
|
||||
),
|
||||
Config;
|
||||
t_oom_shutdown('end', _Config) ->
|
||||
snabbkaffe:stop(),
|
||||
meck:unload(emqx_misc),
|
||||
meck:unload(emqx_utils),
|
||||
ok.
|
||||
|
||||
t_oom_shutdown(_) ->
|
||||
|
|
|
@ -402,7 +402,7 @@ request(Method, Url, QueryParams, Body) ->
|
|||
Opts = #{return_all => true},
|
||||
case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of
|
||||
{ok, {Reason, Headers, BodyR}} ->
|
||||
{ok, {Reason, Headers, emqx_json:decode(BodyR, [return_maps])}};
|
||||
{ok, {Reason, Headers, emqx_utils_json:decode(BodyR, [return_maps])}};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
@ -997,7 +997,7 @@ do_t_update_listener(Config) ->
|
|||
<<"enable_crl_check">> => true
|
||||
}
|
||||
},
|
||||
ListenerData1 = emqx_map_lib:deep_merge(ListenerData0, CRLConfig),
|
||||
ListenerData1 = emqx_utils_maps:deep_merge(ListenerData0, CRLConfig),
|
||||
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||
?assertMatch(
|
||||
#{
|
||||
|
@ -1040,7 +1040,7 @@ do_t_validations(_Config) ->
|
|||
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
||||
|
||||
ListenerData1 =
|
||||
emqx_map_lib:deep_merge(
|
||||
emqx_utils_maps:deep_merge(
|
||||
ListenerData0,
|
||||
#{
|
||||
<<"ssl_options">> =>
|
||||
|
@ -1052,7 +1052,7 @@ do_t_validations(_Config) ->
|
|||
),
|
||||
{error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} =
|
||||
emqx_json:decode(ResRaw1, [return_maps]),
|
||||
emqx_utils_json:decode(ResRaw1, [return_maps]),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"mismatches">> :=
|
||||
|
@ -1064,7 +1064,7 @@ do_t_validations(_Config) ->
|
|||
}
|
||||
}
|
||||
},
|
||||
emqx_json:decode(MsgRaw1, [return_maps])
|
||||
emqx_utils_json:decode(MsgRaw1, [return_maps])
|
||||
),
|
||||
|
||||
ok.
|
||||
|
|
|
@ -143,7 +143,7 @@ init_per_testcase(t_ocsp_responder_error_responses, Config) ->
|
|||
}
|
||||
},
|
||||
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
|
||||
ConfBin = emqx_map_lib:binary_key_map(Conf),
|
||||
ConfBin = emqx_utils_maps:binary_key_map(Conf),
|
||||
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
|
||||
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
|
||||
snabbkaffe:start_trace(),
|
||||
|
@ -184,7 +184,7 @@ init_per_testcase(_TestCase, Config) ->
|
|||
}
|
||||
},
|
||||
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
|
||||
ConfBin = emqx_map_lib:binary_key_map(Conf),
|
||||
ConfBin = emqx_utils_maps:binary_key_map(Conf),
|
||||
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
|
||||
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
|
||||
snabbkaffe:start_trace(),
|
||||
|
@ -430,7 +430,7 @@ request(Method, Url, QueryParams, Body) ->
|
|||
Opts = #{return_all => true},
|
||||
case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of
|
||||
{ok, {Reason, Headers, BodyR}} ->
|
||||
{ok, {Reason, Headers, emqx_json:decode(BodyR, [return_maps])}};
|
||||
{ok, {Reason, Headers, emqx_utils_json:decode(BodyR, [return_maps])}};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
@ -679,7 +679,7 @@ do_t_update_listener(Config) ->
|
|||
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
||||
?assertEqual(
|
||||
undefined,
|
||||
emqx_map_lib:deep_get([<<"ssl_options">>, <<"ocsp">>], ListenerData0, undefined)
|
||||
emqx_utils_maps:deep_get([<<"ssl_options">>, <<"ocsp">>], ListenerData0, undefined)
|
||||
),
|
||||
assert_no_http_get(),
|
||||
|
||||
|
@ -702,7 +702,7 @@ do_t_update_listener(Config) ->
|
|||
}
|
||||
}
|
||||
},
|
||||
ListenerData1 = emqx_map_lib:deep_merge(ListenerData0, OCSPConfig),
|
||||
ListenerData1 = emqx_utils_maps:deep_merge(ListenerData0, OCSPConfig),
|
||||
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||
?assertMatch(
|
||||
#{
|
||||
|
@ -722,14 +722,14 @@ do_t_update_listener(Config) ->
|
|||
%% location
|
||||
?assertNotEqual(
|
||||
IssuerPemPath,
|
||||
emqx_map_lib:deep_get(
|
||||
emqx_utils_maps:deep_get(
|
||||
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
|
||||
ListenerData2
|
||||
)
|
||||
),
|
||||
?assertNotEqual(
|
||||
IssuerPem,
|
||||
emqx_map_lib:deep_get(
|
||||
emqx_utils_maps:deep_get(
|
||||
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
|
||||
ListenerData2
|
||||
)
|
||||
|
@ -818,7 +818,7 @@ do_t_validations(_Config) ->
|
|||
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
||||
|
||||
ListenerData1 =
|
||||
emqx_map_lib:deep_merge(
|
||||
emqx_utils_maps:deep_merge(
|
||||
ListenerData0,
|
||||
#{
|
||||
<<"ssl_options">> =>
|
||||
|
@ -827,7 +827,7 @@ do_t_validations(_Config) ->
|
|||
),
|
||||
{error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} =
|
||||
emqx_json:decode(ResRaw1, [return_maps]),
|
||||
emqx_utils_json:decode(ResRaw1, [return_maps]),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"mismatches">> :=
|
||||
|
@ -839,11 +839,11 @@ do_t_validations(_Config) ->
|
|||
}
|
||||
}
|
||||
},
|
||||
emqx_json:decode(MsgRaw1, [return_maps])
|
||||
emqx_utils_json:decode(MsgRaw1, [return_maps])
|
||||
),
|
||||
|
||||
ListenerData2 =
|
||||
emqx_map_lib:deep_merge(
|
||||
emqx_utils_maps:deep_merge(
|
||||
ListenerData0,
|
||||
#{
|
||||
<<"ssl_options">> =>
|
||||
|
@ -857,7 +857,7 @@ do_t_validations(_Config) ->
|
|||
),
|
||||
{error, {_, _, ResRaw2}} = update_listener_via_api(ListenerId, ListenerData2),
|
||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw2} =
|
||||
emqx_json:decode(ResRaw2, [return_maps]),
|
||||
emqx_utils_json:decode(ResRaw2, [return_maps]),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"mismatches">> :=
|
||||
|
@ -869,11 +869,11 @@ do_t_validations(_Config) ->
|
|||
}
|
||||
}
|
||||
},
|
||||
emqx_json:decode(MsgRaw2, [return_maps])
|
||||
emqx_utils_json:decode(MsgRaw2, [return_maps])
|
||||
),
|
||||
|
||||
ListenerData3a =
|
||||
emqx_map_lib:deep_merge(
|
||||
emqx_utils_maps:deep_merge(
|
||||
ListenerData0,
|
||||
#{
|
||||
<<"ssl_options">> =>
|
||||
|
@ -886,10 +886,12 @@ do_t_validations(_Config) ->
|
|||
}
|
||||
}
|
||||
),
|
||||
ListenerData3 = emqx_map_lib:deep_remove([<<"ssl_options">>, <<"certfile">>], ListenerData3a),
|
||||
ListenerData3 = emqx_utils_maps:deep_remove(
|
||||
[<<"ssl_options">>, <<"certfile">>], ListenerData3a
|
||||
),
|
||||
{error, {_, _, ResRaw3}} = update_listener_via_api(ListenerId, ListenerData3),
|
||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw3} =
|
||||
emqx_json:decode(ResRaw3, [return_maps]),
|
||||
emqx_utils_json:decode(ResRaw3, [return_maps]),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"mismatches">> :=
|
||||
|
@ -901,7 +903,7 @@ do_t_validations(_Config) ->
|
|||
}
|
||||
}
|
||||
},
|
||||
emqx_json:decode(MsgRaw3, [return_maps])
|
||||
emqx_utils_json:decode(MsgRaw3, [return_maps])
|
||||
),
|
||||
|
||||
ok.
|
||||
|
|
|
@ -119,7 +119,7 @@ t_has_routes(_) ->
|
|||
?R:delete_route(<<"devices/+/messages">>).
|
||||
|
||||
t_unexpected(_) ->
|
||||
Router = emqx_misc:proc_name(?R, 1),
|
||||
Router = emqx_utils:proc_name(?R, 1),
|
||||
?assertEqual(ignored, gen_server:call(Router, bad_request)),
|
||||
?assertEqual(ok, gen_server:cast(Router, bad_message)),
|
||||
Router ! bad_info.
|
||||
|
|
|
@ -191,7 +191,7 @@ ssl_files_save_delete_test() ->
|
|||
FileKey = maps:get(<<"keyfile">>, SSL),
|
||||
?assertMatch(<<"/tmp/ssl-test-dir/key-", _:16/binary>>, FileKey),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileKey)),
|
||||
FileIssuerPem = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL),
|
||||
FileIssuerPem = emqx_utils_maps:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL),
|
||||
?assertMatch(<<"/tmp/ssl-test-dir/ocsp_issuer_pem-", _:16/binary>>, FileIssuerPem),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileIssuerPem)),
|
||||
%% no old file to delete
|
||||
|
@ -251,8 +251,8 @@ ssl_file_replace_test() ->
|
|||
{ok, SSL3} = emqx_tls_lib:ensure_ssl_files(Dir, SSL1),
|
||||
File1 = maps:get(<<"keyfile">>, SSL2),
|
||||
File2 = maps:get(<<"keyfile">>, SSL3),
|
||||
IssuerPem1 = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL2),
|
||||
IssuerPem2 = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL3),
|
||||
IssuerPem1 = emqx_utils_maps:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL2),
|
||||
IssuerPem2 = emqx_utils_maps:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL3),
|
||||
?assert(filelib:is_regular(File1)),
|
||||
?assert(filelib:is_regular(File2)),
|
||||
?assert(filelib:is_regular(IssuerPem1)),
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
{deps, [
|
||||
{emqx, {path, "../emqx"}},
|
||||
{emqx_utils, {path, "../emqx_utils"}},
|
||||
{emqx_connector, {path, "../emqx_connector"}}
|
||||
]}.
|
||||
|
||||
|
|
|
@ -929,7 +929,7 @@ aggregate_metrics([]) ->
|
|||
aggregate_metrics([HeadMetrics | AllMetrics]) ->
|
||||
ErrorLogger = fun(Reason) -> ?SLOG(info, #{msg => "bad_metrics_value", error => Reason}) end,
|
||||
Fun = fun(ElemMap, AccMap) ->
|
||||
emqx_map_lib:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger)
|
||||
emqx_utils_maps:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger)
|
||||
end,
|
||||
lists:foldl(Fun, HeadMetrics, AllMetrics).
|
||||
|
||||
|
@ -1069,7 +1069,7 @@ update_user(ChainName, AuthenticatorID, UserID, UserInfo0) ->
|
|||
true ->
|
||||
serialize_error({missing_parameter, password});
|
||||
false ->
|
||||
UserInfo = emqx_map_lib:safe_atom_key_map(UserInfo0),
|
||||
UserInfo = emqx_utils_maps:safe_atom_key_map(UserInfo0),
|
||||
case emqx_authentication:update_user(ChainName, AuthenticatorID, UserID, UserInfo) of
|
||||
{ok, User} ->
|
||||
{200, User};
|
||||
|
|
|
@ -357,7 +357,7 @@ qs([{K, V} | More], Acc) ->
|
|||
qs(More, [["&", uri_encode(K), "=", uri_encode(V)] | Acc]).
|
||||
|
||||
serialize_body(<<"application/json">>, Body) ->
|
||||
emqx_json:encode(Body);
|
||||
emqx_utils_json:encode(Body);
|
||||
serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
||||
qs(maps:to_list(Body)).
|
||||
|
||||
|
@ -395,7 +395,7 @@ safely_parse_body(ContentType, Body) ->
|
|||
end.
|
||||
|
||||
parse_body(<<"application/json", _/binary>>, Body) ->
|
||||
{ok, emqx_json:decode(Body, [return_maps])};
|
||||
{ok, emqx_utils_json:decode(Body, [return_maps])};
|
||||
parse_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) ->
|
||||
Flags = [<<"result">>, <<"is_superuser">>],
|
||||
RawMap = maps:from_list(cow_qs:parse_qs(Body)),
|
||||
|
|
|
@ -99,7 +99,7 @@ handle_info(
|
|||
State1;
|
||||
{StatusLine, Headers, Body} ->
|
||||
try
|
||||
JWKS = jose_jwk:from(emqx_json:decode(Body, [return_maps])),
|
||||
JWKS = jose_jwk:from(emqx_utils_json:decode(Body, [return_maps])),
|
||||
{_, JWKs} = JWKS#jose_jwk.keys,
|
||||
State1#{jwks := JWKs}
|
||||
catch
|
||||
|
|
|
@ -407,7 +407,7 @@ do_verify(_JWT, [], _VerifyClaims) ->
|
|||
do_verify(JWT, [JWK | More], VerifyClaims) ->
|
||||
try jose_jws:verify(JWK, JWT) of
|
||||
{true, Payload, _JWT} ->
|
||||
Claims0 = emqx_json:decode(Payload, [return_maps]),
|
||||
Claims0 = emqx_utils_json:decode(Payload, [return_maps]),
|
||||
Claims = try_convert_to_num(Claims0, [<<"exp">>, <<"iat">>, <<"nbf">>]),
|
||||
case verify_claims(Claims, VerifyClaims) of
|
||||
ok ->
|
||||
|
|
|
@ -332,7 +332,7 @@ run_fuzzy_filter(
|
|||
|
||||
%% Example: data/user-credentials.json
|
||||
import_users_from_json(Bin, #{user_group := UserGroup}) ->
|
||||
case emqx_json:safe_decode(Bin, [return_maps]) of
|
||||
case emqx_utils_json:safe_decode(Bin, [return_maps]) of
|
||||
{ok, List} ->
|
||||
trans(fun ?MODULE:import/2, [UserGroup, List]);
|
||||
{error, Reason} ->
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
-define(assertAuthenticatorsMatch(Guard, Path),
|
||||
(fun() ->
|
||||
{ok, 200, Response} = request(get, uri(Path)),
|
||||
?assertMatch(Guard, jiffy:decode(Response, [return_maps]))
|
||||
?assertMatch(Guard, emqx_utils_json:decode(Response, [return_maps]))
|
||||
end)()
|
||||
).
|
||||
|
||||
|
@ -234,7 +234,7 @@ test_authenticator(PathPrefix) ->
|
|||
get,
|
||||
uri(PathPrefix ++ [?CONF_NS, "password_based:http", "status"])
|
||||
),
|
||||
{ok, RList} = emqx_json:safe_decode(Res),
|
||||
{ok, RList} = emqx_utils_json:safe_decode(Res),
|
||||
Snd = fun({_, Val}) -> Val end,
|
||||
LookupVal = fun LookupV(List, RestJson) ->
|
||||
case List of
|
||||
|
@ -353,7 +353,7 @@ test_authenticator_users(PathPrefix) ->
|
|||
<<"success">> := 0,
|
||||
<<"nomatch">> := 1
|
||||
}
|
||||
} = jiffy:decode(PageData0, [return_maps]);
|
||||
} = emqx_utils_json:decode(PageData0, [return_maps]);
|
||||
["listeners", 'tcp:default'] ->
|
||||
#{
|
||||
<<"metrics">> := #{
|
||||
|
@ -361,7 +361,7 @@ test_authenticator_users(PathPrefix) ->
|
|||
<<"success">> := 0,
|
||||
<<"nomatch">> := 1
|
||||
}
|
||||
} = jiffy:decode(PageData0, [return_maps])
|
||||
} = emqx_utils_json:decode(PageData0, [return_maps])
|
||||
end,
|
||||
|
||||
InvalidUsers = [
|
||||
|
@ -384,7 +384,7 @@ test_authenticator_users(PathPrefix) ->
|
|||
lists:foreach(
|
||||
fun(User) ->
|
||||
{ok, 201, UserData} = request(post, UsersUri, User),
|
||||
CreatedUser = jiffy:decode(UserData, [return_maps]),
|
||||
CreatedUser = emqx_utils_json:decode(UserData, [return_maps]),
|
||||
?assertMatch(#{<<"user_id">> := _}, CreatedUser)
|
||||
end,
|
||||
ValidUsers
|
||||
|
@ -411,7 +411,7 @@ test_authenticator_users(PathPrefix) ->
|
|||
<<"success">> := 1,
|
||||
<<"nomatch">> := 1
|
||||
}
|
||||
} = jiffy:decode(PageData01, [return_maps]);
|
||||
} = emqx_utils_json:decode(PageData01, [return_maps]);
|
||||
["listeners", 'tcp:default'] ->
|
||||
#{
|
||||
<<"metrics">> := #{
|
||||
|
@ -419,7 +419,7 @@ test_authenticator_users(PathPrefix) ->
|
|||
<<"success">> := 1,
|
||||
<<"nomatch">> := 1
|
||||
}
|
||||
} = jiffy:decode(PageData01, [return_maps])
|
||||
} = emqx_utils_json:decode(PageData01, [return_maps])
|
||||
end,
|
||||
|
||||
{ok, 200, Page1Data} = request(get, UsersUri ++ "?page=1&limit=2"),
|
||||
|
@ -433,7 +433,7 @@ test_authenticator_users(PathPrefix) ->
|
|||
<<"count">> := 3
|
||||
}
|
||||
} =
|
||||
jiffy:decode(Page1Data, [return_maps]),
|
||||
emqx_utils_json:decode(Page1Data, [return_maps]),
|
||||
|
||||
{ok, 200, Page2Data} = request(get, UsersUri ++ "?page=2&limit=2"),
|
||||
|
||||
|
@ -445,7 +445,7 @@ test_authenticator_users(PathPrefix) ->
|
|||
<<"limit">> := 2,
|
||||
<<"count">> := 3
|
||||
}
|
||||
} = jiffy:decode(Page2Data, [return_maps]),
|
||||
} = emqx_utils_json:decode(Page2Data, [return_maps]),
|
||||
|
||||
?assertEqual(2, length(Page1Users)),
|
||||
?assertEqual(1, length(Page2Users)),
|
||||
|
@ -465,7 +465,7 @@ test_authenticator_users(PathPrefix) ->
|
|||
<<"limit">> := 3,
|
||||
<<"count">> := 1
|
||||
}
|
||||
} = jiffy:decode(Super1Data, [return_maps]),
|
||||
} = emqx_utils_json:decode(Super1Data, [return_maps]),
|
||||
|
||||
?assertEqual(
|
||||
[<<"u2">>],
|
||||
|
@ -482,7 +482,7 @@ test_authenticator_users(PathPrefix) ->
|
|||
<<"limit">> := 3,
|
||||
<<"count">> := 2
|
||||
}
|
||||
} = jiffy:decode(Super2Data, [return_maps]),
|
||||
} = emqx_utils_json:decode(Super2Data, [return_maps]),
|
||||
|
||||
?assertEqual(
|
||||
[<<"u1">>, <<"u3">>],
|
||||
|
@ -509,7 +509,7 @@ test_authenticator_user(PathPrefix) ->
|
|||
|
||||
{ok, 200, UserData} = request(get, UsersUri ++ "/u1"),
|
||||
|
||||
FetchedUser = jiffy:decode(UserData, [return_maps]),
|
||||
FetchedUser = emqx_utils_json:decode(UserData, [return_maps]),
|
||||
?assertMatch(#{<<"user_id">> := <<"u1">>}, FetchedUser),
|
||||
?assertNotMatch(#{<<"password">> := _}, FetchedUser),
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
|
||||
-define(SERVER_RESPONSE_JSON(Result), ?SERVER_RESPONSE_JSON(Result, false)).
|
||||
-define(SERVER_RESPONSE_JSON(Result, IsSuperuser),
|
||||
jiffy:encode(#{
|
||||
emqx_utils_json:encode(#{
|
||||
result => Result,
|
||||
is_superuser => IsSuperuser
|
||||
})
|
||||
|
@ -172,11 +172,11 @@ t_no_value_for_placeholder(_Config) ->
|
|||
#{
|
||||
<<"cert_subject">> := <<"">>,
|
||||
<<"cert_common_name">> := <<"">>
|
||||
} = jiffy:decode(RawBody, [return_maps]),
|
||||
} = emqx_utils_json:decode(RawBody, [return_maps]),
|
||||
Req = cowboy_req:reply(
|
||||
200,
|
||||
#{<<"content-type">> => <<"application/json">>},
|
||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
||||
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||
Req1
|
||||
),
|
||||
{ok, Req, State}
|
||||
|
@ -444,7 +444,7 @@ samples() ->
|
|||
Req = cowboy_req:reply(
|
||||
200,
|
||||
#{<<"content-type">> => <<"application/json">>},
|
||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
||||
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||
Req0
|
||||
),
|
||||
{ok, Req, State}
|
||||
|
@ -459,7 +459,7 @@ samples() ->
|
|||
Req = cowboy_req:reply(
|
||||
200,
|
||||
#{<<"content-type">> => <<"application/json">>},
|
||||
jiffy:encode(#{result => allow, is_superuser => true}),
|
||||
emqx_utils_json:encode(#{result => allow, is_superuser => true}),
|
||||
Req0
|
||||
),
|
||||
{ok, Req, State}
|
||||
|
@ -512,11 +512,11 @@ samples() ->
|
|||
#{
|
||||
<<"username">> := <<"plain">>,
|
||||
<<"password">> := <<"plain">>
|
||||
} = jiffy:decode(RawBody, [return_maps]),
|
||||
} = emqx_utils_json:decode(RawBody, [return_maps]),
|
||||
Req = cowboy_req:reply(
|
||||
200,
|
||||
#{<<"content-type">> => <<"application/json">>},
|
||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
||||
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||
Req1
|
||||
),
|
||||
{ok, Req, State}
|
||||
|
@ -539,7 +539,7 @@ samples() ->
|
|||
Req = cowboy_req:reply(
|
||||
200,
|
||||
#{<<"content-type">> => <<"application/json">>},
|
||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
||||
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||
Req1
|
||||
),
|
||||
{ok, Req, State}
|
||||
|
@ -565,11 +565,11 @@ samples() ->
|
|||
<<"peerhost">> := <<"127.0.0.1">>,
|
||||
<<"cert_subject">> := <<"cert_subject_data">>,
|
||||
<<"cert_common_name">> := <<"cert_common_name_data">>
|
||||
} = jiffy:decode(RawBody, [return_maps]),
|
||||
} = emqx_utils_json:decode(RawBody, [return_maps]),
|
||||
Req = cowboy_req:reply(
|
||||
200,
|
||||
#{<<"content-type">> => <<"application/json">>},
|
||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
||||
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||
Req1
|
||||
),
|
||||
{ok, Req, State}
|
||||
|
|
|
@ -168,7 +168,7 @@ cowboy_handler(Req0, State) ->
|
|||
Req = cowboy_req:reply(
|
||||
200,
|
||||
#{<<"content-type">> => <<"application/json">>},
|
||||
jiffy:encode(#{result => allow, is_superuser => false}),
|
||||
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||
Req0
|
||||
),
|
||||
{ok, Req, State}.
|
||||
|
|
|
@ -467,7 +467,7 @@ jwks_handler(Req0, State) ->
|
|||
Req = cowboy_req:reply(
|
||||
200,
|
||||
#{<<"content-type">> => <<"application/json">>},
|
||||
jiffy:encode(JWKS),
|
||||
emqx_utils_json:encode(JWKS),
|
||||
Req0
|
||||
),
|
||||
{ok, Req, State}.
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
{erl_opts, [debug_info, nowarn_unused_import]}.
|
||||
{deps, [
|
||||
{emqx, {path, "../emqx"}},
|
||||
{emqx_utils, {path, "../emqx_utils"}},
|
||||
{emqx_connector, {path, "../emqx_connector"}}
|
||||
]}.
|
||||
|
||||
|
|
|
@ -403,7 +403,7 @@ aggregate_metrics([]) ->
|
|||
aggregate_metrics([HeadMetrics | AllMetrics]) ->
|
||||
ErrorLogger = fun(Reason) -> ?SLOG(info, #{msg => "bad_metrics_value", error => Reason}) end,
|
||||
Fun = fun(ElemMap, AccMap) ->
|
||||
emqx_map_lib:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger)
|
||||
emqx_utils_maps:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger)
|
||||
end,
|
||||
lists:foldl(Fun, HeadMetrics, AllMetrics).
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ create(#{path := Path} = Source) ->
|
|||
?SLOG(alert, #{
|
||||
msg => failed_to_read_acl_file,
|
||||
path => Path,
|
||||
explain => emqx_misc:explain_posix(Reason)
|
||||
explain => emqx_utils:explain_posix(Reason)
|
||||
}),
|
||||
throw(failed_to_read_acl_file);
|
||||
{error, Reason} ->
|
||||
|
|
|
@ -227,7 +227,7 @@ encode_path(Path) ->
|
|||
lists:flatten(["/" ++ Part || Part <- lists:map(fun uri_encode/1, Parts)]).
|
||||
|
||||
serialize_body(<<"application/json">>, Body) ->
|
||||
jsx:encode(Body);
|
||||
emqx_utils_json:encode(Body);
|
||||
serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
||||
query_string(Body).
|
||||
|
||||
|
|
|
@ -337,7 +337,7 @@ check_ssl_opts(Conf) ->
|
|||
(#{<<"url">> := Url} = Source) ->
|
||||
case emqx_authz_http:parse_url(Url) of
|
||||
{<<"https", _/binary>>, _, _} ->
|
||||
case emqx_map_lib:deep_find([<<"ssl">>, <<"enable">>], Source) of
|
||||
case emqx_utils_maps:deep_find([<<"ssl">>, <<"enable">>], Source) of
|
||||
{ok, true} -> true;
|
||||
{ok, false} -> throw({ssl_not_enable, Url});
|
||||
_ -> throw({ssl_enable_not_found, Url})
|
||||
|
|
|
@ -144,7 +144,7 @@ parse_http_resp_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) ->
|
|||
end;
|
||||
parse_http_resp_body(<<"application/json", _/binary>>, Body) ->
|
||||
try
|
||||
result(emqx_json:decode(Body, [return_maps]))
|
||||
result(emqx_utils_json:decode(Body, [return_maps]))
|
||||
catch
|
||||
_:_ -> error
|
||||
end.
|
||||
|
|
|
@ -60,19 +60,19 @@ set_special_configs(emqx_authz) ->
|
|||
set_special_configs(_App) ->
|
||||
ok.
|
||||
|
||||
t_clean_cahce(_) ->
|
||||
t_clean_cache(_) ->
|
||||
{ok, C} = emqtt:start_link([{clientid, <<"emqx0">>}, {username, <<"emqx0">>}]),
|
||||
{ok, _} = emqtt:connect(C),
|
||||
{ok, _, _} = emqtt:subscribe(C, <<"a/b/c">>, 0),
|
||||
ok = emqtt:publish(C, <<"a/b/c">>, <<"{\"x\":1,\"y\":1}">>, 0),
|
||||
|
||||
{ok, 200, Result3} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
||||
?assertEqual(2, length(emqx_json:decode(Result3))),
|
||||
?assertEqual(2, length(emqx_utils_json:decode(Result3))),
|
||||
|
||||
request(delete, uri(["authorization", "cache"])),
|
||||
|
||||
{ok, 200, Result4} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
||||
?assertEqual(0, length(emqx_json:decode(Result4))),
|
||||
?assertEqual(0, length(emqx_utils_json:decode(Result4))),
|
||||
|
||||
ok.
|
||||
|
||||
|
|
|
@ -95,7 +95,7 @@ t_api(_) ->
|
|||
<<"page">> := 1,
|
||||
<<"hasnext">> := false
|
||||
}
|
||||
} = jsx:decode(Request1),
|
||||
} = emqx_utils_json:decode(Request1),
|
||||
?assertEqual(3, length(Rules1)),
|
||||
|
||||
{ok, 200, Request1_1} =
|
||||
|
@ -119,7 +119,7 @@ t_api(_) ->
|
|||
<<"hasnext">> => false
|
||||
}
|
||||
},
|
||||
jsx:decode(Request1_1)
|
||||
emqx_utils_json:decode(Request1_1)
|
||||
),
|
||||
|
||||
{ok, 200, Request2} =
|
||||
|
@ -128,7 +128,7 @@ t_api(_) ->
|
|||
uri(["authorization", "sources", "built_in_database", "rules", "users", "user1"]),
|
||||
[]
|
||||
),
|
||||
#{<<"username">> := <<"user1">>, <<"rules">> := Rules1} = jsx:decode(Request2),
|
||||
#{<<"username">> := <<"user1">>, <<"rules">> := Rules1} = emqx_utils_json:decode(Request2),
|
||||
|
||||
{ok, 204, _} =
|
||||
request(
|
||||
|
@ -142,7 +142,7 @@ t_api(_) ->
|
|||
uri(["authorization", "sources", "built_in_database", "rules", "users", "user1"]),
|
||||
[]
|
||||
),
|
||||
#{<<"username">> := <<"user1">>, <<"rules">> := Rules2} = jsx:decode(Request3),
|
||||
#{<<"username">> := <<"user1">>, <<"rules">> := Rules2} = emqx_utils_json:decode(Request3),
|
||||
?assertEqual(0, length(Rules2)),
|
||||
|
||||
{ok, 204, _} =
|
||||
|
@ -202,8 +202,8 @@ t_api(_) ->
|
|||
<<"data">> := [#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3}],
|
||||
<<"meta">> := #{<<"count">> := 1, <<"limit">> := 100, <<"page">> := 1}
|
||||
} =
|
||||
jsx:decode(Request4),
|
||||
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3} = jsx:decode(Request5),
|
||||
emqx_utils_json:decode(Request4),
|
||||
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3} = emqx_utils_json:decode(Request5),
|
||||
?assertEqual(3, length(Rules3)),
|
||||
|
||||
{ok, 204, _} =
|
||||
|
@ -218,7 +218,7 @@ t_api(_) ->
|
|||
uri(["authorization", "sources", "built_in_database", "rules", "clients", "client1"]),
|
||||
[]
|
||||
),
|
||||
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules4} = jsx:decode(Request6),
|
||||
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules4} = emqx_utils_json:decode(Request6),
|
||||
?assertEqual(0, length(Rules4)),
|
||||
|
||||
{ok, 204, _} =
|
||||
|
@ -252,7 +252,7 @@ t_api(_) ->
|
|||
uri(["authorization", "sources", "built_in_database", "rules", "all"]),
|
||||
[]
|
||||
),
|
||||
#{<<"rules">> := Rules5} = jsx:decode(Request7),
|
||||
#{<<"rules">> := Rules5} = emqx_utils_json:decode(Request7),
|
||||
?assertEqual(3, length(Rules5)),
|
||||
|
||||
{ok, 204, _} =
|
||||
|
@ -267,7 +267,7 @@ t_api(_) ->
|
|||
uri(["authorization", "sources", "built_in_database", "rules", "all"]),
|
||||
[]
|
||||
),
|
||||
#{<<"rules">> := Rules6} = jsx:decode(Request8),
|
||||
#{<<"rules">> := Rules6} = emqx_utils_json:decode(Request8),
|
||||
?assertEqual(0, length(Rules6)),
|
||||
|
||||
{ok, 204, _} =
|
||||
|
@ -285,7 +285,7 @@ t_api(_) ->
|
|||
uri(["authorization", "sources", "built_in_database", "rules", "users?page=2&limit=5"]),
|
||||
[]
|
||||
),
|
||||
#{<<"data">> := Data1} = jsx:decode(Request9),
|
||||
#{<<"data">> := Data1} = emqx_utils_json:decode(Request9),
|
||||
?assertEqual(5, length(Data1)),
|
||||
|
||||
{ok, 204, _} =
|
||||
|
@ -303,7 +303,7 @@ t_api(_) ->
|
|||
uri(["authorization", "sources", "built_in_database", "rules", "clients?limit=5"]),
|
||||
[]
|
||||
),
|
||||
#{<<"data">> := Data2} = jsx:decode(Request10),
|
||||
#{<<"data">> := Data2} = emqx_utils_json:decode(Request10),
|
||||
?assertEqual(5, length(Data2)),
|
||||
|
||||
{ok, 400, Msg1} =
|
||||
|
|
|
@ -76,7 +76,7 @@ t_api(_) ->
|
|||
|
||||
{ok, 200, Result1} = request(put, uri(["authorization", "settings"]), Settings1),
|
||||
{ok, 200, Result1} = request(get, uri(["authorization", "settings"]), []),
|
||||
?assertEqual(Settings1, jsx:decode(Result1)),
|
||||
?assertEqual(Settings1, emqx_utils_json:decode(Result1)),
|
||||
|
||||
Settings2 = #{
|
||||
<<"no_match">> => <<"allow">>,
|
||||
|
@ -90,7 +90,7 @@ t_api(_) ->
|
|||
|
||||
{ok, 200, Result2} = request(put, uri(["authorization", "settings"]), Settings2),
|
||||
{ok, 200, Result2} = request(get, uri(["authorization", "settings"]), []),
|
||||
?assertEqual(Settings2, jsx:decode(Result2)),
|
||||
?assertEqual(Settings2, emqx_utils_json:decode(Result2)),
|
||||
|
||||
ok.
|
||||
|
||||
|
|
|
@ -148,8 +148,8 @@ set_special_configs(_App) ->
|
|||
ok.
|
||||
|
||||
init_per_testcase(t_api, Config) ->
|
||||
meck:new(emqx_misc, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(emqx_misc, gen_id, fun() -> "fake" end),
|
||||
meck:new(emqx_utils, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(emqx_utils, gen_id, fun() -> "fake" end),
|
||||
|
||||
meck:new(emqx, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(
|
||||
|
@ -165,7 +165,7 @@ init_per_testcase(_, Config) ->
|
|||
Config.
|
||||
|
||||
end_per_testcase(t_api, _Config) ->
|
||||
meck:unload(emqx_misc),
|
||||
meck:unload(emqx_utils),
|
||||
meck:unload(emqx),
|
||||
ok;
|
||||
end_per_testcase(_, _Config) ->
|
||||
|
@ -182,7 +182,7 @@ t_api(_) ->
|
|||
{ok, 404, ErrResult} = request(get, uri(["authorization", "sources", "http"]), []),
|
||||
?assertMatch(
|
||||
#{<<"code">> := <<"NOT_FOUND">>, <<"message">> := <<"Not found: http">>},
|
||||
emqx_json:decode(ErrResult, [return_maps])
|
||||
emqx_utils_json:decode(ErrResult, [return_maps])
|
||||
),
|
||||
|
||||
[
|
||||
|
@ -215,7 +215,8 @@ t_api(_) ->
|
|||
),
|
||||
{ok, 200, Result3} = request(get, uri(["authorization", "sources", "http"]), []),
|
||||
?assertMatch(
|
||||
#{<<"type">> := <<"http">>, <<"enable">> := false}, emqx_json:decode(Result3, [return_maps])
|
||||
#{<<"type">> := <<"http">>, <<"enable">> := false},
|
||||
emqx_utils_json:decode(Result3, [return_maps])
|
||||
),
|
||||
|
||||
Keyfile = emqx_common_test_helpers:app_path(
|
||||
|
@ -253,7 +254,7 @@ t_api(_) ->
|
|||
<<"total">> := 0,
|
||||
<<"nomatch">> := 0
|
||||
}
|
||||
} = emqx_json:decode(Status4, [return_maps]),
|
||||
} = emqx_utils_json:decode(Status4, [return_maps]),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"type">> := <<"mongodb">>,
|
||||
|
@ -265,7 +266,7 @@ t_api(_) ->
|
|||
<<"verify">> := <<"verify_none">>
|
||||
}
|
||||
},
|
||||
emqx_json:decode(Result4, [return_maps])
|
||||
emqx_utils_json:decode(Result4, [return_maps])
|
||||
),
|
||||
|
||||
{ok, Cacert} = file:read_file(Cacertfile),
|
||||
|
@ -297,7 +298,7 @@ t_api(_) ->
|
|||
<<"verify">> := <<"verify_none">>
|
||||
}
|
||||
},
|
||||
emqx_json:decode(Result5, [return_maps])
|
||||
emqx_utils_json:decode(Result5, [return_maps])
|
||||
),
|
||||
|
||||
{ok, 200, Status5_1} = request(get, uri(["authorization", "sources", "mongodb", "status"]), []),
|
||||
|
@ -308,7 +309,7 @@ t_api(_) ->
|
|||
<<"total">> := 0,
|
||||
<<"nomatch">> := 0
|
||||
}
|
||||
} = emqx_json:decode(Status5_1, [return_maps]),
|
||||
} = emqx_utils_json:decode(Status5_1, [return_maps]),
|
||||
|
||||
#{
|
||||
ssl := #{
|
||||
|
@ -355,7 +356,7 @@ t_api(_) ->
|
|||
<<"code">> := <<"BAD_REQUEST">>,
|
||||
<<"message">> := <<"Type mismatch", _/binary>>
|
||||
},
|
||||
emqx_json:decode(TypeMismatch, [return_maps])
|
||||
emqx_utils_json:decode(TypeMismatch, [return_maps])
|
||||
),
|
||||
|
||||
lists:foreach(
|
||||
|
@ -443,7 +444,7 @@ t_api(_) ->
|
|||
<<"total">> := 1,
|
||||
<<"nomatch">> := 0
|
||||
}
|
||||
} = emqx_json:decode(Status5, [return_maps])
|
||||
} = emqx_utils_json:decode(Status5, [return_maps])
|
||||
end
|
||||
),
|
||||
|
||||
|
@ -469,7 +470,7 @@ t_api(_) ->
|
|||
<<"total">> := 2,
|
||||
<<"nomatch">> := 0
|
||||
}
|
||||
} = emqx_json:decode(Status6, [return_maps])
|
||||
} = emqx_utils_json:decode(Status6, [return_maps])
|
||||
end
|
||||
),
|
||||
|
||||
|
@ -495,7 +496,7 @@ t_api(_) ->
|
|||
<<"total">> := 3,
|
||||
<<"nomatch">> := 0
|
||||
}
|
||||
} = emqx_json:decode(Status7, [return_maps])
|
||||
} = emqx_utils_json:decode(Status7, [return_maps])
|
||||
end
|
||||
),
|
||||
ok.
|
||||
|
@ -621,7 +622,7 @@ t_aggregate_metrics(_) ->
|
|||
).
|
||||
|
||||
get_sources(Result) ->
|
||||
maps:get(<<"sources">>, emqx_json:decode(Result, [return_maps])).
|
||||
maps:get(<<"sources">>, emqx_utils_json:decode(Result, [return_maps])).
|
||||
|
||||
data_dir() -> emqx:data_dir().
|
||||
|
||||
|
|
|
@ -311,7 +311,7 @@ t_json_body(_Config) ->
|
|||
<<"topic">> := <<"t">>,
|
||||
<<"action">> := <<"publish">>
|
||||
},
|
||||
jiffy:decode(RawBody, [return_maps])
|
||||
emqx_utils_json:decode(RawBody, [return_maps])
|
||||
),
|
||||
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
||||
end,
|
||||
|
@ -366,7 +366,7 @@ t_placeholder_and_body(_Config) ->
|
|||
<<"CN">> := ?PH_CERT_CN_NAME,
|
||||
<<"CS">> := ?PH_CERT_SUBJECT
|
||||
},
|
||||
jiffy:decode(PostVars, [return_maps])
|
||||
emqx_utils_json:decode(PostVars, [return_maps])
|
||||
),
|
||||
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
||||
end,
|
||||
|
@ -418,7 +418,7 @@ t_no_value_for_placeholder(_Config) ->
|
|||
#{
|
||||
<<"mountpoint">> := <<"[]">>
|
||||
},
|
||||
jiffy:decode(RawBody, [return_maps])
|
||||
emqx_utils_json:decode(RawBody, [return_maps])
|
||||
),
|
||||
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
||||
end,
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
%% -*- mode: erlang -*-
|
||||
|
||||
{erl_opts, [debug_info]}.
|
||||
{deps, [{emqx, {path, "../emqx"}}]}.
|
||||
{deps, [
|
||||
{emqx, {path, "../emqx"}},
|
||||
{emqx_utils, {path, "../emqx_utils"}}
|
||||
]}.
|
||||
|
||||
{shell, [
|
||||
{apps, [emqx_auto_subscribe]}
|
||||
|
|
|
@ -141,7 +141,7 @@ t_update(_) ->
|
|||
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
||||
Body = [#{topic => ?TOPIC_S}],
|
||||
{ok, Response} = emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, Body),
|
||||
ResponseMap = emqx_json:decode(Response, [return_maps]),
|
||||
ResponseMap = emqx_utils_json:decode(Response, [return_maps]),
|
||||
?assertEqual(1, erlang:length(ResponseMap)),
|
||||
|
||||
BadBody1 = #{topic => ?TOPIC_S},
|
||||
|
@ -177,7 +177,7 @@ t_update(_) ->
|
|||
emqtt:disconnect(Client),
|
||||
|
||||
{ok, GETResponse} = emqx_mgmt_api_test_util:request_api(get, Path),
|
||||
GETResponseMap = emqx_json:decode(GETResponse, [return_maps]),
|
||||
GETResponseMap = emqx_utils_json:decode(GETResponse, [return_maps]),
|
||||
?assertEqual(1, erlang:length(GETResponseMap)),
|
||||
ok.
|
||||
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
{erl_opts, [debug_info]}.
|
||||
{deps, [ {emqx, {path, "../emqx"}}
|
||||
, {emqx_resource, {path, "../../apps/emqx_resource"}}
|
||||
]}.
|
||||
{deps, [
|
||||
{emqx, {path, "../emqx"}},
|
||||
{emqx_utils, {path, "../emqx_utils"}},
|
||||
{emqx_resource, {path, "../../apps/emqx_resource"}}
|
||||
]}.
|
||||
|
||||
{shell, [
|
||||
% {config, "config/sys.config"},
|
||||
|
|
|
@ -207,7 +207,7 @@ send_message(BridgeId, Message) ->
|
|||
end.
|
||||
|
||||
query_opts(Config) ->
|
||||
case emqx_map_lib:deep_get([resource_opts, request_timeout], Config, false) of
|
||||
case emqx_utils_maps:deep_get([resource_opts, request_timeout], Config, false) of
|
||||
Timeout when is_integer(Timeout) ->
|
||||
%% request_timeout is configured
|
||||
#{timeout => Timeout};
|
||||
|
@ -296,7 +296,7 @@ create(BridgeType, BridgeName, RawConf) ->
|
|||
brige_action => create,
|
||||
bridge_type => BridgeType,
|
||||
bridge_name => BridgeName,
|
||||
bridge_raw_config => emqx_misc:redact(RawConf)
|
||||
bridge_raw_config => emqx_utils:redact(RawConf)
|
||||
}),
|
||||
emqx_conf:update(
|
||||
emqx_bridge:config_key_path() ++ [BridgeType, BridgeName],
|
||||
|
@ -367,7 +367,7 @@ perform_bridge_changes([{Action, MapConfs} | Tasks], Result0) ->
|
|||
perform_bridge_changes(Tasks, Result).
|
||||
|
||||
diff_confs(NewConfs, OldConfs) ->
|
||||
emqx_map_lib:diff_maps(
|
||||
emqx_utils_maps:diff_maps(
|
||||
flatten_confs(NewConfs),
|
||||
flatten_confs(OldConfs)
|
||||
).
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("emqx/include/emqx_api_lib.hrl").
|
||||
-include_lib("emqx_utils/include/emqx_utils_api.hrl").
|
||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
||||
|
||||
-import(hoconsc, [mk/2, array/1, enum/1]).
|
||||
|
@ -220,7 +220,7 @@ info_example_basic(webhook) ->
|
|||
auto_restart_interval => 15000,
|
||||
query_mode => async,
|
||||
inflight_window => 100,
|
||||
max_queue_bytes => 100 * 1024 * 1024
|
||||
max_buffer_bytes => 100 * 1024 * 1024
|
||||
}
|
||||
};
|
||||
info_example_basic(mqtt) ->
|
||||
|
@ -245,7 +245,7 @@ mqtt_main_example() ->
|
|||
health_check_interval => <<"15s">>,
|
||||
auto_restart_interval => <<"60s">>,
|
||||
query_mode => sync,
|
||||
max_queue_bytes => 100 * 1024 * 1024
|
||||
max_buffer_bytes => 100 * 1024 * 1024
|
||||
},
|
||||
ssl => #{
|
||||
enable => false
|
||||
|
@ -668,7 +668,7 @@ get_metrics_from_local_node(BridgeType, BridgeName) ->
|
|||
false ->
|
||||
?BRIDGE_NOT_ENABLED;
|
||||
true ->
|
||||
case emqx_misc:safe_to_existing_atom(Node, utf8) of
|
||||
case emqx_utils:safe_to_existing_atom(Node, utf8) of
|
||||
{ok, TargetNode} ->
|
||||
call_operation(TargetNode, OperFunc, [
|
||||
TargetNode, BridgeType, BridgeName
|
||||
|
@ -835,7 +835,7 @@ format_resource_data(ResData) ->
|
|||
format_resource_data(error, undefined, Result) ->
|
||||
Result;
|
||||
format_resource_data(error, Error, Result) ->
|
||||
Result#{status_reason => emqx_misc:readable_error_msg(Error)};
|
||||
Result#{status_reason => emqx_utils:readable_error_msg(Error)};
|
||||
format_resource_data(K, V, Result) ->
|
||||
Result#{K => V}.
|
||||
|
||||
|
@ -1004,7 +1004,7 @@ supported_versions(get_metrics_from_all_nodes) -> [4];
|
|||
supported_versions(_Call) -> [1, 2, 3, 4].
|
||||
|
||||
redact(Term) ->
|
||||
emqx_misc:redact(Term).
|
||||
emqx_utils:redact(Term).
|
||||
|
||||
deobfuscate(NewConf, OldConf) ->
|
||||
maps:fold(
|
||||
|
@ -1015,7 +1015,7 @@ deobfuscate(NewConf, OldConf) ->
|
|||
{ok, OldV} when is_map(V), is_map(OldV) ->
|
||||
Acc#{K => deobfuscate(V, OldV)};
|
||||
{ok, OldV} ->
|
||||
case emqx_misc:is_redacted(K, V) of
|
||||
case emqx_utils:is_redacted(K, V) of
|
||||
true ->
|
||||
Acc#{K => OldV};
|
||||
_ ->
|
||||
|
@ -1028,6 +1028,6 @@ deobfuscate(NewConf, OldConf) ->
|
|||
).
|
||||
|
||||
map_to_json(M) ->
|
||||
emqx_json:encode(
|
||||
emqx_map_lib:jsonable_map(M, fun(K, V) -> {K, emqx_map_lib:binary_string(V)} end)
|
||||
emqx_utils_json:encode(
|
||||
emqx_utils_maps:jsonable_map(M, fun(K, V) -> {K, emqx_utils_maps:binary_string(V)} end)
|
||||
).
|
||||
|
|
|
@ -157,7 +157,7 @@ create(Type, Name, Conf, Opts0) ->
|
|||
msg => "create bridge",
|
||||
type => Type,
|
||||
name => Name,
|
||||
config => emqx_misc:redact(Conf)
|
||||
config => emqx_utils:redact(Conf)
|
||||
}),
|
||||
Opts = override_start_after_created(Conf, Opts0),
|
||||
{ok, _Data} = emqx_resource:create_local(
|
||||
|
@ -186,13 +186,13 @@ update(Type, Name, {OldConf, Conf}, Opts0) ->
|
|||
%% without restarting the bridge.
|
||||
%%
|
||||
Opts = override_start_after_created(Conf, Opts0),
|
||||
case emqx_map_lib:if_only_to_toggle_enable(OldConf, Conf) of
|
||||
case emqx_utils_maps:if_only_to_toggle_enable(OldConf, Conf) of
|
||||
false ->
|
||||
?SLOG(info, #{
|
||||
msg => "update bridge",
|
||||
type => Type,
|
||||
name => Name,
|
||||
config => emqx_misc:redact(Conf)
|
||||
config => emqx_utils:redact(Conf)
|
||||
}),
|
||||
case recreate(Type, Name, Conf, Opts) of
|
||||
{ok, _} ->
|
||||
|
@ -202,7 +202,7 @@ update(Type, Name, {OldConf, Conf}, Opts0) ->
|
|||
msg => "updating_a_non_existing_bridge",
|
||||
type => Type,
|
||||
name => Name,
|
||||
config => emqx_misc:redact(Conf)
|
||||
config => emqx_utils:redact(Conf)
|
||||
}),
|
||||
create(Type, Name, Conf, Opts);
|
||||
{error, Reason} ->
|
||||
|
@ -236,9 +236,9 @@ recreate(Type, Name, Conf, Opts) ->
|
|||
).
|
||||
|
||||
create_dry_run(Type, Conf0) ->
|
||||
TmpPath0 = iolist_to_binary([?TEST_ID_PREFIX, emqx_misc:gen_id(8)]),
|
||||
TmpPath = emqx_misc:safe_filename(TmpPath0),
|
||||
Conf = emqx_map_lib:safe_atom_key_map(Conf0),
|
||||
TmpPath0 = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]),
|
||||
TmpPath = emqx_utils:safe_filename(TmpPath0),
|
||||
Conf = emqx_utils_maps:safe_atom_key_map(Conf0),
|
||||
case emqx_connector_ssl:convert_certs(TmpPath, Conf) of
|
||||
{error, Reason} ->
|
||||
{error, Reason};
|
||||
|
|
|
@ -89,7 +89,7 @@ default_resource_opts() ->
|
|||
<<"inflight_window">> => 100,
|
||||
<<"auto_restart_interval">> => <<"60s">>,
|
||||
<<"health_check_interval">> => <<"15s">>,
|
||||
<<"max_queue_bytes">> => <<"1GB">>,
|
||||
<<"max_buffer_bytes">> => <<"1GB">>,
|
||||
<<"query_mode">> => <<"sync">>,
|
||||
%% there is only one underlying MQTT connection
|
||||
%% doesn't make a lot of sense to have a large pool
|
||||
|
|
|
@ -251,7 +251,7 @@ do_convert_webhook_config(
|
|||
case {MReqTRoot, MReqTResource} of
|
||||
{{ok, ReqTRoot}, {ok, ReqTResource}} ->
|
||||
{_Parsed, ReqTRaw} = max({ReqTRoot, ReqTRootRaw}, {ReqTResource, ReqTResourceRaw}),
|
||||
Conf1 = emqx_map_lib:deep_merge(
|
||||
Conf1 = emqx_utils_maps:deep_merge(
|
||||
Conf0,
|
||||
#{
|
||||
<<"request_timeout">> => ReqTRaw,
|
||||
|
|
|
@ -975,7 +975,7 @@ t_with_redact_update(Config) ->
|
|||
),
|
||||
|
||||
%% update with redacted config
|
||||
BridgeConf = emqx_misc:redact(Template),
|
||||
BridgeConf = emqx_utils:redact(Template),
|
||||
BridgeID = emqx_bridge_resource:bridge_id(Type, Name),
|
||||
{ok, 200, _} = request(put, uri(["bridges", BridgeID]), BridgeConf, Config),
|
||||
?assertEqual(
|
||||
|
@ -1221,7 +1221,7 @@ t_inconsistent_webhook_request_timeouts(Config) ->
|
|||
URL1 = ?URL(Port, "path1"),
|
||||
Name = ?BRIDGE_NAME,
|
||||
BadBridgeParams =
|
||||
emqx_map_lib:deep_merge(
|
||||
emqx_utils_maps:deep_merge(
|
||||
?HTTP_BRIDGE(URL1, Name),
|
||||
#{
|
||||
<<"request_timeout">> => <<"1s">>,
|
||||
|
@ -1304,4 +1304,4 @@ str(S) when is_list(S) -> S;
|
|||
str(S) when is_binary(S) -> binary_to_list(S).
|
||||
|
||||
json(B) when is_binary(B) ->
|
||||
emqx_json:decode(B, [return_maps]).
|
||||
emqx_utils_json:decode(B, [return_maps]).
|
||||
|
|
|
@ -201,7 +201,7 @@ t_mqtt_conn_bridge_ingress(_) ->
|
|||
#{
|
||||
<<"type">> := ?TYPE_MQTT,
|
||||
<<"name">> := ?BRIDGE_NAME_INGRESS
|
||||
} = jsx:decode(Bridge),
|
||||
} = emqx_utils_json:decode(Bridge),
|
||||
|
||||
BridgeIDIngress = emqx_bridge_resource:bridge_id(?TYPE_MQTT, ?BRIDGE_NAME_INGRESS),
|
||||
|
||||
|
@ -270,7 +270,7 @@ t_mqtt_conn_bridge_ingress_downgrades_qos_2(_) ->
|
|||
?SERVER_CONF(<<"user1">>)#{
|
||||
<<"type">> => ?TYPE_MQTT,
|
||||
<<"name">> => BridgeName,
|
||||
<<"ingress">> => emqx_map_lib:deep_merge(
|
||||
<<"ingress">> => emqx_utils_maps:deep_merge(
|
||||
?INGRESS_CONF,
|
||||
#{<<"remote">> => #{<<"qos">> => 2}}
|
||||
)
|
||||
|
@ -313,7 +313,7 @@ t_mqtt_conn_bridge_ingress_no_payload_template(_) ->
|
|||
emqx:publish(emqx_message:make(RemoteTopic, Payload)),
|
||||
%% we should receive a message on the local broker, with specified topic
|
||||
Msg = assert_mqtt_msg_received(LocalTopic),
|
||||
?assertMatch(#{<<"payload">> := Payload}, jsx:decode(Msg#message.payload)),
|
||||
?assertMatch(#{<<"payload">> := Payload}, emqx_utils_json:decode(Msg#message.payload)),
|
||||
|
||||
%% verify the metrics of the bridge
|
||||
?assertMetrics(
|
||||
|
@ -402,7 +402,7 @@ t_mqtt_conn_bridge_egress_no_payload_template(_) ->
|
|||
Msg = assert_mqtt_msg_received(RemoteTopic),
|
||||
%% the MapMsg is all fields outputed by Rule-Engine. it's a binary coded json here.
|
||||
?assertMatch(<<ResourceID:(byte_size(ResourceID))/binary, _/binary>>, Msg#message.from),
|
||||
?assertMatch(#{<<"payload">> := Payload}, jsx:decode(Msg#message.payload)),
|
||||
?assertMatch(#{<<"payload">> := Payload}, emqx_utils_json:decode(Msg#message.payload)),
|
||||
|
||||
%% verify the metrics of the bridge
|
||||
?retry(
|
||||
|
@ -545,7 +545,7 @@ t_ingress_mqtt_bridge_with_rules(_) ->
|
|||
<<"sql">> => <<"SELECT * from \"$bridges/", BridgeIDIngress/binary, "\"">>
|
||||
}
|
||||
),
|
||||
#{<<"id">> := RuleId} = jsx:decode(Rule),
|
||||
#{<<"id">> := RuleId} = emqx_utils_json:decode(Rule),
|
||||
|
||||
%% we now test if the bridge works as expected
|
||||
|
||||
|
@ -562,7 +562,7 @@ t_ingress_mqtt_bridge_with_rules(_) ->
|
|||
%% and also the rule should be matched, with matched + 1:
|
||||
{ok, 200, Rule1} = request(get, uri(["rules", RuleId]), []),
|
||||
{ok, 200, Metrics} = request(get, uri(["rules", RuleId, "metrics"]), []),
|
||||
?assertMatch(#{<<"id">> := RuleId}, jsx:decode(Rule1)),
|
||||
?assertMatch(#{<<"id">> := RuleId}, emqx_utils_json:decode(Rule1)),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"metrics">> := #{
|
||||
|
@ -581,7 +581,7 @@ t_ingress_mqtt_bridge_with_rules(_) ->
|
|||
<<"actions.failed.unknown">> := 0
|
||||
}
|
||||
},
|
||||
jsx:decode(Metrics)
|
||||
emqx_utils_json:decode(Metrics)
|
||||
),
|
||||
|
||||
%% we also check if the actions of the rule is triggered
|
||||
|
@ -630,7 +630,7 @@ t_egress_mqtt_bridge_with_rules(_) ->
|
|||
<<"sql">> => <<"SELECT * from \"t/1\"">>
|
||||
}
|
||||
),
|
||||
#{<<"id">> := RuleId} = jsx:decode(Rule),
|
||||
#{<<"id">> := RuleId} = emqx_utils_json:decode(Rule),
|
||||
|
||||
%% we now test if the bridge works as expected
|
||||
LocalTopic = <<?EGRESS_LOCAL_TOPIC, "/1">>,
|
||||
|
@ -653,7 +653,7 @@ t_egress_mqtt_bridge_with_rules(_) ->
|
|||
timer:sleep(100),
|
||||
emqx:publish(emqx_message:make(RuleTopic, Payload2)),
|
||||
{ok, 200, Rule1} = request(get, uri(["rules", RuleId]), []),
|
||||
?assertMatch(#{<<"id">> := RuleId, <<"name">> := _}, jsx:decode(Rule1)),
|
||||
?assertMatch(#{<<"id">> := RuleId, <<"name">> := _}, emqx_utils_json:decode(Rule1)),
|
||||
{ok, 200, Metrics} = request(get, uri(["rules", RuleId, "metrics"]), []),
|
||||
?assertMatch(
|
||||
#{
|
||||
|
@ -673,7 +673,7 @@ t_egress_mqtt_bridge_with_rules(_) ->
|
|||
<<"actions.failed.unknown">> := 0
|
||||
}
|
||||
},
|
||||
jsx:decode(Metrics)
|
||||
emqx_utils_json:decode(Metrics)
|
||||
),
|
||||
|
||||
%% we should receive a message on the "remote" broker, with specified topic
|
||||
|
@ -911,17 +911,17 @@ create_bridge(Config = #{<<"type">> := Type, <<"name">> := Name}) ->
|
|||
<<"type">> := Type,
|
||||
<<"name">> := Name
|
||||
},
|
||||
jsx:decode(Bridge)
|
||||
emqx_utils_json:decode(Bridge)
|
||||
),
|
||||
emqx_bridge_resource:bridge_id(Type, Name).
|
||||
|
||||
request_bridge(BridgeID) ->
|
||||
{ok, 200, Bridge} = request(get, uri(["bridges", BridgeID]), []),
|
||||
jsx:decode(Bridge).
|
||||
emqx_utils_json:decode(Bridge).
|
||||
|
||||
request_bridge_metrics(BridgeID) ->
|
||||
{ok, 200, BridgeMetrics} = request(get, uri(["bridges", BridgeID, "metrics"]), []),
|
||||
jsx:decode(BridgeMetrics).
|
||||
emqx_utils_json:decode(BridgeMetrics).
|
||||
|
||||
request(Method, Url, Body) ->
|
||||
request(<<"connector_admin">>, Method, Url, Body).
|
||||
|
|
|
@ -175,7 +175,7 @@ bridge_async_config(#{port := Port} = Config) ->
|
|||
" inflight_window = 100\n"
|
||||
" auto_restart_interval = \"60s\"\n"
|
||||
" health_check_interval = \"15s\"\n"
|
||||
" max_queue_bytes = \"1GB\"\n"
|
||||
" max_buffer_bytes = \"1GB\"\n"
|
||||
" query_mode = \"~s\"\n"
|
||||
" request_timeout = \"~s\"\n"
|
||||
" start_after_created = \"true\"\n"
|
||||
|
|
|
@ -156,7 +156,7 @@ on_start(InstanceId, Config) ->
|
|||
msg => "failed_to_start_kafka_consumer_client",
|
||||
instance_id => InstanceId,
|
||||
kafka_hosts => BootstrapHosts,
|
||||
reason => emqx_misc:redact(Reason)
|
||||
reason => emqx_utils:redact(Reason)
|
||||
}),
|
||||
throw(?CLIENT_DOWN_MESSAGE)
|
||||
end,
|
||||
|
@ -344,7 +344,7 @@ start_consumer(Config, InstanceId, ClientID) ->
|
|||
msg => "failed_to_start_kafka_consumer",
|
||||
instance_id => InstanceId,
|
||||
kafka_hosts => emqx_bridge_kafka_impl:hosts(BootstrapHosts0),
|
||||
reason => emqx_misc:redact(Reason2)
|
||||
reason => emqx_utils:redact(Reason2)
|
||||
}),
|
||||
stop_client(ClientID),
|
||||
throw(failed_to_start_kafka_consumer)
|
||||
|
|
|
@ -299,7 +299,7 @@ init_per_testcase(TestCase, Config) when
|
|||
common_init_per_testcase(TestCase, Config)
|
||||
end;
|
||||
init_per_testcase(t_cluster_group = TestCase, Config0) ->
|
||||
Config = emqx_misc:merge_opts(Config0, [{num_partitions, 6}]),
|
||||
Config = emqx_utils:merge_opts(Config0, [{num_partitions, 6}]),
|
||||
common_init_per_testcase(TestCase, Config);
|
||||
init_per_testcase(t_multiple_topic_mappings = TestCase, Config0) ->
|
||||
KafkaTopicBase =
|
||||
|
@ -673,7 +673,7 @@ create_bridge(Config, Overrides) ->
|
|||
Type = ?BRIDGE_TYPE_BIN,
|
||||
Name = ?config(kafka_name, Config),
|
||||
KafkaConfig0 = ?config(kafka_config, Config),
|
||||
KafkaConfig = emqx_map_lib:deep_merge(KafkaConfig0, Overrides),
|
||||
KafkaConfig = emqx_utils_maps:deep_merge(KafkaConfig0, Overrides),
|
||||
emqx_bridge:create(Type, Name, KafkaConfig).
|
||||
|
||||
delete_bridge(Config) ->
|
||||
|
@ -696,7 +696,7 @@ create_bridge_api(Config, Overrides) ->
|
|||
TypeBin = ?BRIDGE_TYPE_BIN,
|
||||
Name = ?config(kafka_name, Config),
|
||||
KafkaConfig0 = ?config(kafka_config, Config),
|
||||
KafkaConfig = emqx_map_lib:deep_merge(KafkaConfig0, Overrides),
|
||||
KafkaConfig = emqx_utils_maps:deep_merge(KafkaConfig0, Overrides),
|
||||
Params = KafkaConfig#{<<"type">> => TypeBin, <<"name">> => Name},
|
||||
Path = emqx_mgmt_api_test_util:api_path(["bridges"]),
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
|
@ -705,7 +705,7 @@ create_bridge_api(Config, Overrides) ->
|
|||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {Status, Headers, Body0}} ->
|
||||
{ok, {Status, Headers, emqx_json:decode(Body0, [return_maps])}};
|
||||
{ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}};
|
||||
Error ->
|
||||
Error
|
||||
end,
|
||||
|
@ -719,7 +719,7 @@ update_bridge_api(Config, Overrides) ->
|
|||
TypeBin = ?BRIDGE_TYPE_BIN,
|
||||
Name = ?config(kafka_name, Config),
|
||||
KafkaConfig0 = ?config(kafka_config, Config),
|
||||
KafkaConfig = emqx_map_lib:deep_merge(KafkaConfig0, Overrides),
|
||||
KafkaConfig = emqx_utils_maps:deep_merge(KafkaConfig0, Overrides),
|
||||
BridgeId = emqx_bridge_resource:bridge_id(TypeBin, Name),
|
||||
Params = KafkaConfig#{<<"type">> => TypeBin, <<"name">> => Name},
|
||||
Path = emqx_mgmt_api_test_util:api_path(["bridges", BridgeId]),
|
||||
|
@ -728,7 +728,7 @@ update_bridge_api(Config, Overrides) ->
|
|||
ct:pal("updating bridge (via http): ~p", [Params]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(put, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {_Status, _Headers, Body0}} -> {ok, emqx_json:decode(Body0, [return_maps])};
|
||||
{ok, {_Status, _Headers, Body0}} -> {ok, emqx_utils_json:decode(Body0, [return_maps])};
|
||||
Error -> Error
|
||||
end,
|
||||
ct:pal("bridge update result: ~p", [Res]),
|
||||
|
@ -776,7 +776,7 @@ do_wait_for_expected_published_messages(Messages, Acc, _Timeout) when map_size(M
|
|||
do_wait_for_expected_published_messages(Messages0, Acc0, Timeout) ->
|
||||
receive
|
||||
{publish, Msg0 = #{payload := Payload}} ->
|
||||
case emqx_json:safe_decode(Payload, [return_maps]) of
|
||||
case emqx_utils_json:safe_decode(Payload, [return_maps]) of
|
||||
{error, _} ->
|
||||
ct:pal("unexpected message: ~p; discarding", [Msg0]),
|
||||
do_wait_for_expected_published_messages(Messages0, Acc0, Timeout);
|
||||
|
@ -928,7 +928,7 @@ create_rule_and_action_http(Config) ->
|
|||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
ct:pal("rule action params: ~p", [Params]),
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params) of
|
||||
{ok, Res} -> {ok, emqx_json:decode(Res, [return_maps])};
|
||||
{ok, Res} -> {ok, emqx_utils_json:decode(Res, [return_maps])};
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
|
@ -1188,7 +1188,7 @@ t_start_and_consume_ok(Config) ->
|
|||
<<"offset">> := OffsetReply,
|
||||
<<"headers">> := #{<<"hkey">> := <<"hvalue">>}
|
||||
},
|
||||
emqx_json:decode(PayloadBin, [return_maps]),
|
||||
emqx_utils_json:decode(PayloadBin, [return_maps]),
|
||||
#{
|
||||
offset_reply => OffsetReply,
|
||||
kafka_topic => KafkaTopic,
|
||||
|
@ -1300,7 +1300,7 @@ t_multiple_topic_mappings(Config) ->
|
|||
%% as configured.
|
||||
Payloads =
|
||||
lists:sort([
|
||||
case emqx_json:safe_decode(P, [return_maps]) of
|
||||
case emqx_utils_json:safe_decode(P, [return_maps]) of
|
||||
{ok, Decoded} -> Decoded;
|
||||
{error, _} -> P
|
||||
end
|
||||
|
@ -1441,7 +1441,7 @@ do_t_failed_creation_then_fixed(Config) ->
|
|||
<<"offset">> := _,
|
||||
<<"headers">> := #{<<"hkey">> := <<"hvalue">>}
|
||||
},
|
||||
emqx_json:decode(PayloadBin, [return_maps]),
|
||||
emqx_utils_json:decode(PayloadBin, [return_maps]),
|
||||
#{
|
||||
kafka_topic => KafkaTopic,
|
||||
payload => Payload
|
||||
|
@ -1543,7 +1543,7 @@ do_t_receive_after_recovery(Config) ->
|
|||
%% 2) publish messages while the consumer is down.
|
||||
%% we use `pmap' to avoid wolff sending the whole
|
||||
%% batch to a single partition.
|
||||
emqx_misc:pmap(fun(Msg) -> publish(Config, [Msg]) end, Messages1),
|
||||
emqx_utils:pmap(fun(Msg) -> publish(Config, [Msg]) end, Messages1),
|
||||
ok
|
||||
end),
|
||||
%% 3) restore and consume messages
|
||||
|
@ -1636,7 +1636,7 @@ t_bridge_rule_action_source(Config) ->
|
|||
<<"headers">> := #{<<"hkey">> := <<"hvalue">>},
|
||||
<<"topic">> := KafkaTopic
|
||||
},
|
||||
emqx_json:decode(RawPayload, [return_maps])
|
||||
emqx_utils_json:decode(RawPayload, [return_maps])
|
||||
),
|
||||
?retry(
|
||||
_Interval = 200,
|
||||
|
@ -1667,7 +1667,7 @@ t_cluster_group(Config) ->
|
|||
|| {Name, Opts} <- Cluster
|
||||
],
|
||||
on_exit(fun() ->
|
||||
emqx_misc:pmap(
|
||||
emqx_utils:pmap(
|
||||
fun(N) ->
|
||||
ct:pal("stopping ~p", [N]),
|
||||
ok = emqx_common_test_helpers:stop_slave(N)
|
||||
|
@ -1889,7 +1889,7 @@ t_cluster_node_down(Config) ->
|
|||
Cluster
|
||||
),
|
||||
on_exit(fun() ->
|
||||
emqx_misc:pmap(
|
||||
emqx_utils:pmap(
|
||||
fun(N) ->
|
||||
ct:pal("stopping ~p", [N]),
|
||||
ok = emqx_common_test_helpers:stop_slave(N)
|
||||
|
@ -2004,7 +2004,7 @@ t_begin_offset_earliest(Config) ->
|
|||
%% the consumers
|
||||
Published = receive_published(#{n => NumMessages}),
|
||||
Payloads = lists:map(
|
||||
fun(#{payload := P}) -> emqx_json:decode(P, [return_maps]) end,
|
||||
fun(#{payload := P}) -> emqx_utils_json:decode(P, [return_maps]) end,
|
||||
Published
|
||||
),
|
||||
?assert(
|
||||
|
|
|
@ -320,7 +320,7 @@ kafka_bridge_rest_api_helper(Config) ->
|
|||
<<"sql">> => <<"SELECT * from \"kafka_bridge_topic/#\"">>
|
||||
}
|
||||
),
|
||||
#{<<"id">> := RuleId} = emqx_json:decode(Rule, [return_maps]),
|
||||
#{<<"id">> := RuleId} = emqx_utils_json:decode(Rule, [return_maps]),
|
||||
%% counters should be empty before
|
||||
?assertEqual(0, emqx_resource_metrics:matched_get(ResourceId)),
|
||||
?assertEqual(0, emqx_resource_metrics:success_get(ResourceId)),
|
||||
|
@ -802,7 +802,7 @@ api_path(Parts) ->
|
|||
?HOST ++ filename:join([?BASE_PATH | Parts]).
|
||||
|
||||
json(Data) ->
|
||||
{ok, Jsx} = emqx_json:safe_decode(Data, [return_maps]),
|
||||
{ok, Jsx} = emqx_utils_json:safe_decode(Data, [return_maps]),
|
||||
Jsx.
|
||||
|
||||
delete_all_bridges() ->
|
||||
|
|
|
@ -92,7 +92,7 @@ kafka_consumer_test() ->
|
|||
),
|
||||
|
||||
%% Bad: can't repeat kafka topics.
|
||||
BadConf1 = emqx_map_lib:deep_put(
|
||||
BadConf1 = emqx_utils_maps:deep_put(
|
||||
[<<"bridges">>, <<"kafka_consumer">>, <<"my_consumer">>, <<"topic_mapping">>],
|
||||
Conf1,
|
||||
[
|
||||
|
@ -121,7 +121,7 @@ kafka_consumer_test() ->
|
|||
),
|
||||
|
||||
%% Bad: there must be at least 1 mapping.
|
||||
BadConf2 = emqx_map_lib:deep_put(
|
||||
BadConf2 = emqx_utils_maps:deep_put(
|
||||
[<<"bridges">>, <<"kafka_consumer">>, <<"my_consumer">>, <<"topic_mapping">>],
|
||||
Conf1,
|
||||
[]
|
||||
|
|
|
@ -501,15 +501,17 @@ log_and_alarm(IsSuccess, Res, #{kind := ?APPLY_KIND_INITIATE} = Meta) ->
|
|||
%% because nothing is committed
|
||||
case IsSuccess of
|
||||
true ->
|
||||
?SLOG(debug, Meta#{msg => "cluster_rpc_apply_result", result => emqx_misc:redact(Res)});
|
||||
?SLOG(debug, Meta#{msg => "cluster_rpc_apply_result", result => emqx_utils:redact(Res)});
|
||||
false ->
|
||||
?SLOG(warning, Meta#{msg => "cluster_rpc_apply_result", result => emqx_misc:redact(Res)})
|
||||
?SLOG(warning, Meta#{
|
||||
msg => "cluster_rpc_apply_result", result => emqx_utils:redact(Res)
|
||||
})
|
||||
end;
|
||||
log_and_alarm(true, Res, Meta) ->
|
||||
?SLOG(debug, Meta#{msg => "cluster_rpc_apply_ok", result => emqx_misc:redact(Res)}),
|
||||
?SLOG(debug, Meta#{msg => "cluster_rpc_apply_ok", result => emqx_utils:redact(Res)}),
|
||||
do_alarm(deactivate, Res, Meta);
|
||||
log_and_alarm(false, Res, Meta) ->
|
||||
?SLOG(error, Meta#{msg => "cluster_rpc_apply_failed", result => emqx_misc:redact(Res)}),
|
||||
?SLOG(error, Meta#{msg => "cluster_rpc_apply_failed", result => emqx_utils:redact(Res)}),
|
||||
do_alarm(activate, Res, Meta).
|
||||
|
||||
do_alarm(Fun, Res, #{tnx_id := Id} = Meta) ->
|
||||
|
|
|
@ -73,7 +73,7 @@ handle_info(Info, State) ->
|
|||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, #{timer := TRef}) ->
|
||||
emqx_misc:cancel_timer(TRef).
|
||||
emqx_utils:cancel_timer(TRef).
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
@ -82,7 +82,7 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
ensure_timer(State = #{cleanup_ms := Ms}) ->
|
||||
State#{timer := emqx_misc:start_timer(Ms, del_stale_mfa)}.
|
||||
State#{timer := emqx_utils:start_timer(Ms, del_stale_mfa)}.
|
||||
|
||||
%% @doc Keep the latest completed 100 records for querying and troubleshooting.
|
||||
del_stale_mfa(MaxHistory) ->
|
||||
|
|
|
@ -43,50 +43,50 @@ add_handler(ConfKeyPath, HandlerName) ->
|
|||
remove_handler(ConfKeyPath) ->
|
||||
emqx_config_handler:remove_handler(ConfKeyPath).
|
||||
|
||||
-spec get(emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get(emqx_utils_maps:config_key_path()) -> term().
|
||||
get(KeyPath) ->
|
||||
emqx:get_config(KeyPath).
|
||||
|
||||
-spec get(emqx_map_lib:config_key_path(), term()) -> term().
|
||||
-spec get(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||
get(KeyPath, Default) ->
|
||||
emqx:get_config(KeyPath, Default).
|
||||
|
||||
-spec get_raw(emqx_map_lib:config_key_path(), term()) -> term().
|
||||
-spec get_raw(emqx_utils_maps:config_key_path(), term()) -> term().
|
||||
get_raw(KeyPath, Default) ->
|
||||
emqx_config:get_raw(KeyPath, Default).
|
||||
|
||||
-spec get_raw(emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get_raw(emqx_utils_maps:config_key_path()) -> term().
|
||||
get_raw(KeyPath) ->
|
||||
emqx_config:get_raw(KeyPath).
|
||||
|
||||
%% @doc Returns all values in the cluster.
|
||||
-spec get_all(emqx_map_lib:config_key_path()) -> #{node() => term()}.
|
||||
-spec get_all(emqx_utils_maps:config_key_path()) -> #{node() => term()}.
|
||||
get_all(KeyPath) ->
|
||||
{ResL, []} = emqx_conf_proto_v2:get_all(KeyPath),
|
||||
maps:from_list(ResL).
|
||||
|
||||
%% @doc Returns the specified node's KeyPath, or exception if not found
|
||||
-spec get_by_node(node(), emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get_by_node(node(), emqx_utils_maps:config_key_path()) -> term().
|
||||
get_by_node(Node, KeyPath) when Node =:= node() ->
|
||||
emqx:get_config(KeyPath);
|
||||
get_by_node(Node, KeyPath) ->
|
||||
emqx_conf_proto_v2:get_config(Node, KeyPath).
|
||||
|
||||
%% @doc Returns the specified node's KeyPath, or the default value if not found
|
||||
-spec get_by_node(node(), emqx_map_lib:config_key_path(), term()) -> term().
|
||||
-spec get_by_node(node(), emqx_utils_maps:config_key_path(), term()) -> term().
|
||||
get_by_node(Node, KeyPath, Default) when Node =:= node() ->
|
||||
emqx:get_config(KeyPath, Default);
|
||||
get_by_node(Node, KeyPath, Default) ->
|
||||
emqx_conf_proto_v2:get_config(Node, KeyPath, Default).
|
||||
|
||||
%% @doc Returns the specified node's KeyPath, or config_not_found if key path not found
|
||||
-spec get_node_and_config(emqx_map_lib:config_key_path()) -> term().
|
||||
-spec get_node_and_config(emqx_utils_maps:config_key_path()) -> term().
|
||||
get_node_and_config(KeyPath) ->
|
||||
{node(), emqx:get_config(KeyPath, config_not_found)}.
|
||||
|
||||
%% @doc Update all value of key path in cluster-override.conf or local-override.conf.
|
||||
-spec update(
|
||||
emqx_map_lib:config_key_path(),
|
||||
emqx_utils_maps:config_key_path(),
|
||||
emqx_config:update_request(),
|
||||
emqx_config:update_opts()
|
||||
) ->
|
||||
|
@ -97,7 +97,7 @@ update(KeyPath, UpdateReq, Opts) ->
|
|||
%% @doc Update the specified node's key path in local-override.conf.
|
||||
-spec update(
|
||||
node(),
|
||||
emqx_map_lib:config_key_path(),
|
||||
emqx_utils_maps:config_key_path(),
|
||||
emqx_config:update_request(),
|
||||
emqx_config:update_opts()
|
||||
) ->
|
||||
|
@ -108,13 +108,13 @@ update(Node, KeyPath, UpdateReq, Opts) ->
|
|||
emqx_conf_proto_v2:update(Node, KeyPath, UpdateReq, Opts).
|
||||
|
||||
%% @doc remove all value of key path in cluster-override.conf or local-override.conf.
|
||||
-spec remove(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
||||
-spec remove(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
remove(KeyPath, Opts) ->
|
||||
emqx_conf_proto_v2:remove_config(KeyPath, Opts).
|
||||
|
||||
%% @doc remove the specified node's key path in local-override.conf.
|
||||
-spec remove(node(), emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
||||
-spec remove(node(), emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
remove(Node, KeyPath, Opts) when Node =:= node() ->
|
||||
emqx:remove_config(KeyPath, Opts#{override_to => local});
|
||||
|
@ -122,13 +122,13 @@ remove(Node, KeyPath, Opts) ->
|
|||
emqx_conf_proto_v2:remove_config(Node, KeyPath, Opts).
|
||||
|
||||
%% @doc reset all value of key path in cluster-override.conf or local-override.conf.
|
||||
-spec reset(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
||||
-spec reset(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
reset(KeyPath, Opts) ->
|
||||
emqx_conf_proto_v2:reset(KeyPath, Opts).
|
||||
|
||||
%% @doc reset the specified node's key path in local-override.conf.
|
||||
-spec reset(node(), emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
||||
-spec reset(node(), emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
reset(Node, KeyPath, Opts) when Node =:= node() ->
|
||||
emqx:reset_config(KeyPath, Opts#{override_to => local});
|
||||
|
@ -166,7 +166,7 @@ gen_schema_json(Dir, I18nFile, SchemaModule, Lang) ->
|
|||
io:format(user, "===< Including fields from importance level: ~p~n", [IncludeImportance]),
|
||||
Opts = #{desc_file => I18nFile, lang => Lang, include_importance_up_from => IncludeImportance},
|
||||
JsonMap = hocon_schema_json:gen(SchemaModule, Opts),
|
||||
IoData = jsx:encode(JsonMap, [space, {indent, 4}]),
|
||||
IoData = emqx_utils_json:encode(JsonMap, [pretty, force_utf8]),
|
||||
ok = file:write_file(SchemaJsonFile, IoData).
|
||||
|
||||
gen_api_schema_json(Dir, I18nFile, Lang) ->
|
||||
|
@ -268,13 +268,13 @@ do_gen_api_schema_json(File, SchemaMod, SchemaInfo) ->
|
|||
ApiSpec0
|
||||
),
|
||||
Components = lists:foldl(fun(M, Acc) -> maps:merge(M, Acc) end, #{}, Components0),
|
||||
IoData = jsx:encode(
|
||||
IoData = emqx_utils_json:encode(
|
||||
#{
|
||||
info => SchemaInfo,
|
||||
paths => ApiSpec,
|
||||
components => #{schemas => Components}
|
||||
},
|
||||
[space, {indent, 4}]
|
||||
[pretty, force_utf8]
|
||||
),
|
||||
file:write_file(File, IoData).
|
||||
|
||||
|
|
|
@ -38,22 +38,22 @@
|
|||
|
||||
-include_lib("emqx/include/bpapi.hrl").
|
||||
|
||||
-type update_config_key_path() :: [emqx_map_lib:config_key(), ...].
|
||||
-type update_config_key_path() :: [emqx_utils_maps:config_key(), ...].
|
||||
|
||||
introduced_in() ->
|
||||
"5.0.0".
|
||||
|
||||
-spec get_config(node(), emqx_map_lib:config_key_path()) ->
|
||||
-spec get_config(node(), emqx_utils_maps:config_key_path()) ->
|
||||
term() | emqx_rpc:badrpc().
|
||||
get_config(Node, KeyPath) ->
|
||||
rpc:call(Node, emqx, get_config, [KeyPath]).
|
||||
|
||||
-spec get_config(node(), emqx_map_lib:config_key_path(), _Default) ->
|
||||
-spec get_config(node(), emqx_utils_maps:config_key_path(), _Default) ->
|
||||
term() | emqx_rpc:badrpc().
|
||||
get_config(Node, KeyPath, Default) ->
|
||||
rpc:call(Node, emqx, get_config, [KeyPath, Default]).
|
||||
|
||||
-spec get_all(emqx_map_lib:config_key_path()) -> emqx_rpc:multicall_result().
|
||||
-spec get_all(emqx_utils_maps:config_key_path()) -> emqx_rpc:multicall_result().
|
||||
get_all(KeyPath) ->
|
||||
rpc:multicall(emqx_conf, get_node_and_config, [KeyPath], 5000).
|
||||
|
||||
|
|
|
@ -44,19 +44,19 @@ introduced_in() ->
|
|||
-spec sync_data_from_node(node()) -> {ok, binary()} | emqx_rpc:badrpc().
|
||||
sync_data_from_node(Node) ->
|
||||
rpc:call(Node, emqx_conf_app, sync_data_from_node, [], 20000).
|
||||
-type update_config_key_path() :: [emqx_map_lib:config_key(), ...].
|
||||
-type update_config_key_path() :: [emqx_utils_maps:config_key(), ...].
|
||||
|
||||
-spec get_config(node(), emqx_map_lib:config_key_path()) ->
|
||||
-spec get_config(node(), emqx_utils_maps:config_key_path()) ->
|
||||
term() | emqx_rpc:badrpc().
|
||||
get_config(Node, KeyPath) ->
|
||||
rpc:call(Node, emqx, get_config, [KeyPath]).
|
||||
|
||||
-spec get_config(node(), emqx_map_lib:config_key_path(), _Default) ->
|
||||
-spec get_config(node(), emqx_utils_maps:config_key_path(), _Default) ->
|
||||
term() | emqx_rpc:badrpc().
|
||||
get_config(Node, KeyPath, Default) ->
|
||||
rpc:call(Node, emqx, get_config, [KeyPath, Default]).
|
||||
|
||||
-spec get_all(emqx_map_lib:config_key_path()) -> emqx_rpc:multicall_result().
|
||||
-spec get_all(emqx_utils_maps:config_key_path()) -> emqx_rpc:multicall_result().
|
||||
get_all(KeyPath) ->
|
||||
rpc:multicall(emqx_conf, get_node_and_config, [KeyPath], 5000).
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
{deps, [
|
||||
{emqx, {path, "../emqx"}},
|
||||
{emqx_utils, {path, "../emqx_utils"}},
|
||||
{emqx_resource, {path, "../emqx_resource"}},
|
||||
{eldap2, {git, "https://github.com/emqx/eldap2", {tag, "v0.2.2"}}},
|
||||
{mysql, {git, "https://github.com/emqx/mysql-otp", {tag, "1.7.2"}}},
|
||||
|
|
|
@ -219,7 +219,7 @@ on_start(
|
|||
SSLOpts = emqx_tls_lib:to_client_opts(maps:get(ssl, Config)),
|
||||
{tls, SSLOpts}
|
||||
end,
|
||||
NTransportOpts = emqx_misc:ipv6_probe(TransportOpts),
|
||||
NTransportOpts = emqx_utils:ipv6_probe(TransportOpts),
|
||||
PoolOpts = [
|
||||
{host, Host},
|
||||
{port, Port},
|
||||
|
@ -425,7 +425,7 @@ do_get_status(PoolName, Timeout) ->
|
|||
Error
|
||||
end
|
||||
end,
|
||||
try emqx_misc:pmap(DoPerWorker, Workers, Timeout) of
|
||||
try emqx_utils:pmap(DoPerWorker, Workers, Timeout) of
|
||||
% we crash in case of non-empty lists since we don't know what to do in that case
|
||||
[_ | _] = Results ->
|
||||
case [E || {error, _} = E <- Results] of
|
||||
|
@ -516,7 +516,7 @@ process_request(
|
|||
}.
|
||||
|
||||
process_request_body(undefined, Msg) ->
|
||||
emqx_json:encode(Msg);
|
||||
emqx_utils_json:encode(Msg);
|
||||
process_request_body(BodyTks, Msg) ->
|
||||
emqx_plugin_libs_rule:proc_tmpl(BodyTks, Msg).
|
||||
|
||||
|
@ -603,7 +603,7 @@ is_sensitive_key(_) ->
|
|||
%% Function that will do a deep traversal of Data and remove sensitive
|
||||
%% information (i.e., passwords)
|
||||
redact(Data) ->
|
||||
emqx_misc:redact(Data, fun is_sensitive_key/1).
|
||||
emqx_utils:redact(Data, fun is_sensitive_key/1).
|
||||
|
||||
%% because the body may contain some sensitive data
|
||||
%% and at the same time the redact function will not scan the binary data
|
||||
|
|
|
@ -65,7 +65,7 @@ on_start(
|
|||
?SLOG(info, #{
|
||||
msg => "starting_ldap_connector",
|
||||
connector => InstId,
|
||||
config => emqx_misc:redact(Config)
|
||||
config => emqx_utils:redact(Config)
|
||||
}),
|
||||
Servers = emqx_schema:parse_servers(Servers0, ?LDAP_HOST_OPTIONS),
|
||||
SslOpts =
|
||||
|
|
|
@ -162,7 +162,7 @@ on_start(
|
|||
rs -> "starting_mongodb_replica_set_connector";
|
||||
sharded -> "starting_mongodb_sharded_connector"
|
||||
end,
|
||||
?SLOG(info, #{msg => Msg, connector => InstId, config => emqx_misc:redact(Config)}),
|
||||
?SLOG(info, #{msg => Msg, connector => InstId, config => emqx_utils:redact(Config)}),
|
||||
NConfig = #{hosts := Hosts} = maybe_resolve_srv_and_txt_records(Config),
|
||||
SslOpts =
|
||||
case maps:get(enable, SSL) of
|
||||
|
|
|
@ -149,7 +149,7 @@ on_start(InstanceId, Conf) ->
|
|||
?SLOG(info, #{
|
||||
msg => "starting_mqtt_connector",
|
||||
connector => InstanceId,
|
||||
config => emqx_misc:redact(Conf)
|
||||
config => emqx_utils:redact(Conf)
|
||||
}),
|
||||
BasicConf = basic_config(Conf),
|
||||
BridgeConf = BasicConf#{
|
||||
|
|
|
@ -102,7 +102,7 @@ on_start(
|
|||
?SLOG(info, #{
|
||||
msg => "starting_mysql_connector",
|
||||
connector => InstId,
|
||||
config => emqx_misc:redact(Config)
|
||||
config => emqx_utils:redact(Config)
|
||||
}),
|
||||
SslOpts =
|
||||
case maps:get(enable, SSL) of
|
||||
|
|
|
@ -95,7 +95,7 @@ on_start(
|
|||
?SLOG(info, #{
|
||||
msg => "starting_postgresql_connector",
|
||||
connector => InstId,
|
||||
config => emqx_misc:redact(Config)
|
||||
config => emqx_utils:redact(Config)
|
||||
}),
|
||||
SslOpts =
|
||||
case maps:get(enable, SSL) of
|
||||
|
|
|
@ -123,7 +123,7 @@ on_start(
|
|||
?SLOG(info, #{
|
||||
msg => "starting_redis_connector",
|
||||
connector => InstId,
|
||||
config => emqx_misc:redact(Config)
|
||||
config => emqx_utils:redact(Config)
|
||||
}),
|
||||
ConfKey =
|
||||
case Type of
|
||||
|
|
|
@ -74,7 +74,7 @@ new_ssl_config(Config, _NewSSL) ->
|
|||
normalize_key_to_bin(undefined) ->
|
||||
undefined;
|
||||
normalize_key_to_bin(Map) when is_map(Map) ->
|
||||
emqx_map_lib:binary_key_map(Map).
|
||||
emqx_utils_maps:binary_key_map(Map).
|
||||
|
||||
try_map_get(Key, Map, Default) when is_map(Map) ->
|
||||
maps:get(Key, Map, Default);
|
||||
|
|
|
@ -85,7 +85,7 @@ to_remote_msg(MapMsg, #{
|
|||
qos = QoS,
|
||||
retain = Retain,
|
||||
topic = topic(Mountpoint, Topic),
|
||||
props = emqx_misc:pub_props_to_packet(PubProps),
|
||||
props = emqx_utils:pub_props_to_packet(PubProps),
|
||||
payload = Payload
|
||||
};
|
||||
to_remote_msg(#message{topic = Topic} = Msg, #{mountpoint := Mountpoint}) ->
|
||||
|
@ -112,7 +112,7 @@ to_broker_msg(
|
|||
Retain = replace_simple_var(RetainToken, MapMsg),
|
||||
PubProps = maps:get(pub_props, MapMsg, #{}),
|
||||
set_headers(
|
||||
Props#{properties => emqx_misc:pub_props_to_packet(PubProps)},
|
||||
Props#{properties => emqx_utils:pub_props_to_packet(PubProps)},
|
||||
emqx_message:set_flags(
|
||||
#{dup => Dup, retain => Retain},
|
||||
emqx_message:make(bridge, QoS, topic(Mountpoint, Topic), Payload)
|
||||
|
@ -123,7 +123,7 @@ process_payload(From, MapMsg) ->
|
|||
do_process_payload(maps:get(payload, From, undefined), MapMsg).
|
||||
|
||||
do_process_payload(undefined, Msg) ->
|
||||
emqx_json:encode(Msg);
|
||||
emqx_utils_json:encode(Msg);
|
||||
do_process_payload(Tks, Msg) ->
|
||||
replace_vars_in_str(Tks, Msg).
|
||||
|
||||
|
|
|
@ -124,7 +124,7 @@ start_link(Name, BridgeOpts) ->
|
|||
{error, Reason} = Error ->
|
||||
?SLOG(error, #{
|
||||
msg => "client_start_failed",
|
||||
config => emqx_misc:redact(BridgeOpts),
|
||||
config => emqx_utils:redact(BridgeOpts),
|
||||
reason => Reason
|
||||
}),
|
||||
Error
|
||||
|
@ -410,7 +410,7 @@ handle_disconnect(_Reason) ->
|
|||
ok.
|
||||
|
||||
maybe_publish_local(Msg, Vars, Props) ->
|
||||
case emqx_map_lib:deep_get([local, topic], Vars, undefined) of
|
||||
case emqx_utils_maps:deep_get([local, topic], Vars, undefined) of
|
||||
%% local topic is not set, discard it
|
||||
undefined ->
|
||||
ok;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue