Merge pull request #7281 from DDDHuang/backup_json

fix: check backup file type & legal json
This commit is contained in:
DDDHuang 2022-03-16 16:16:06 +08:00 committed by GitHub
commit 0d7ad25804
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 275 additions and 81 deletions

View File

@ -3,7 +3,7 @@
{vsn, "4.4.2"}, % strict semver, bump manually! {vsn, "4.4.2"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_management_sup]}, {registered, [emqx_management_sup]},
{applications, [kernel,stdlib,minirest]}, {applications, [kernel,stdlib,emqx_plugin_libs,minirest]},
{mod, {emqx_mgmt_app,[]}}, {mod, {emqx_mgmt_app,[]}},
{env, []}, {env, []},
{licenses, ["Apache-2.0"]}, {licenses, ["Apache-2.0"]},

View File

@ -73,39 +73,17 @@
export(_Bindings, _Params) -> export(_Bindings, _Params) ->
case emqx_mgmt_data_backup:export() of case emqx_mgmt_data_backup:export() of
{ok, File = #{filename := Filename}} -> {ok, File = #{filename := Filename}} ->
minirest:return({ok, File#{filename => filename:basename(Filename)}}); minirest:return({ok, File#{filename => list_to_binary(filename:basename(Filename))}});
Return -> minirest:return(Return) Return -> minirest:return(Return)
end. end.
list_exported(_Bindings, _Params) -> list_exported(_Bindings, _Params) ->
List = [ rpc:call(Node, ?MODULE, get_list_exported, []) || Node <- ekka_mnesia:running_nodes() ], List = [rpc:call(Node, ?MODULE, get_list_exported, []) || Node <- ekka_mnesia:running_nodes()],
NList = lists:map(fun({_, FileInfo}) -> FileInfo end, lists:keysort(1, lists:append(List))), NList = lists:map(fun({_, FileInfo}) -> FileInfo end, lists:keysort(1, lists:append(List))),
minirest:return({ok, NList}). minirest:return({ok, NList}).
get_list_exported() -> get_list_exported() ->
Dir = emqx:get_env(data_dir), emqx_mgmt_data_backup:list_backup_file().
{ok, Files} = file:list_dir_all(Dir),
lists:foldl(
fun(File, Acc) ->
case filename:extension(File) =:= ".json" of
true ->
FullFile = filename:join([Dir, File]),
case file:read_file_info(FullFile) of
{ok, #file_info{size = Size, ctime = CTime = {{Y, M, D}, {H, MM, S}}}} ->
CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y, M, D, H, MM, S]),
Seconds = calendar:datetime_to_gregorian_seconds(CTime),
[{Seconds, [{filename, list_to_binary(File)},
{size, Size},
{created_at, list_to_binary(CreatedAt)},
{node, node()}
]} | Acc];
{error, Reason} ->
logger:error("Read file info of ~s failed with: ~p", [File, Reason]),
Acc
end;
false -> Acc
end
end, [], Files).
import(_Bindings, Params) -> import(_Bindings, Params) ->
case proplists:get_value(<<"filename">>, Params) of case proplists:get_value(<<"filename">>, Params) of
@ -121,22 +99,26 @@ import(_Bindings, Params) ->
case lists:member(Node, case lists:member(Node,
[ erlang:atom_to_binary(N, utf8) || N <- ekka_mnesia:running_nodes() ] [ erlang:atom_to_binary(N, utf8) || N <- ekka_mnesia:running_nodes() ]
) of ) of
true -> minirest:return(rpc:call(erlang:binary_to_atom(Node, utf8), ?MODULE, do_import, [Filename])); true ->
N = erlang:binary_to_atom(Node, utf8),
case rpc:call(N, ?MODULE, do_import, [Filename]) of
{badrpc, Reason} ->
minirest:return({error, Reason});
Res ->
minirest:return(Res)
end;
false -> minirest:return({error, no_existent_node}) false -> minirest:return({error, no_existent_node})
end end
end end
end. end.
do_import(Filename) -> do_import(Filename) ->
FullFilename = fullname(Filename), emqx_mgmt_data_backup:import(Filename, "{}").
emqx_mgmt_data_backup:import(FullFilename, "{}").
download(#{filename := Filename}, _Params) -> download(#{filename := Filename}, _Params) ->
FullFilename = fullname(Filename), case emqx_mgmt_data_backup:read_backup_file(Filename) of
case file:read_file(FullFilename) of {ok, Res} ->
{ok, Bin} -> {ok, Res};
{ok, #{filename => list_to_binary(Filename),
file => Bin}};
{error, Reason} -> {error, Reason} ->
minirest:return({error, Reason}) minirest:return({error, Reason})
end. end.
@ -146,8 +128,7 @@ upload(Bindings, Params) ->
do_upload(_Bindings, #{<<"filename">> := Filename, do_upload(_Bindings, #{<<"filename">> := Filename,
<<"file">> := Bin}) -> <<"file">> := Bin}) ->
FullFilename = fullname(Filename), case emqx_mgmt_data_backup:upload_backup_file(Filename, Bin) of
case file:write_file(FullFilename, Bin) of
ok -> ok ->
minirest:return({ok, [{node, node()}]}); minirest:return({ok, [{node, node()}]});
{error, Reason} -> {error, Reason} ->
@ -159,8 +140,7 @@ do_upload(_Bindings, _Params) ->
minirest:return({error, missing_required_params}). minirest:return({error, missing_required_params}).
delete(#{filename := Filename}, _Params) -> delete(#{filename := Filename}, _Params) ->
FullFilename = fullname(Filename), case emqx_mgmt_data_backup:delete_backup_file(Filename) of
case file:delete(FullFilename) of
ok -> ok ->
minirest:return(); minirest:return();
{error, Reason} -> {error, Reason} ->
@ -168,19 +148,16 @@ delete(#{filename := Filename}, _Params) ->
end. end.
import_content(Content) -> import_content(Content) ->
File = dump_to_tmp_file(Content),
do_import(File).
dump_to_tmp_file(Content) ->
Bin = emqx_json:encode(Content), Bin = emqx_json:encode(Content),
Filename = tmp_filename(), Filename = tmp_filename(),
ok = file:write_file(fullname(Filename), Bin), case emqx_mgmt_data_backup:upload_backup_file(Filename, Bin) of
Filename. ok ->
do_import(Filename);
fullname(Name) -> {error, Reason} ->
filename:join(emqx:get_env(data_dir), Name). {error, Reason}
end.
tmp_filename() -> tmp_filename() ->
Seconds = erlang:system_time(second), Seconds = erlang:system_time(second),
{{Y, M, D}, {H, MM, S}} = emqx_mgmt_util:datetime(Seconds), {{Y, M, D}, {H, MM, S}} = emqx_mgmt_util:datetime(Seconds),
io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S]). list_to_binary(io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S])).

View File

@ -31,6 +31,8 @@
]). ]).
-endif. -endif.
-define(BACKUP_DIR, backup).
-export([ export_rules/0 -export([ export_rules/0
, export_resources/0 , export_resources/0
, export_blacklist/0 , export_blacklist/0
@ -53,8 +55,18 @@
-export([ export/0 -export([ export/0
, import/2 , import/2
, upload_backup_file/2
, list_backup_file/0
, read_backup_file/1
, delete_backup_file/1
]). ]).
-ifdef(TEST).
-export([ backup_dir/0
, delete_all_backup_file/0
]).
-endif.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Data Export and Import %% Data Export and Import
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -600,19 +612,123 @@ to_version(Version) when is_binary(Version) ->
to_version(Version) when is_list(Version) -> to_version(Version) when is_list(Version) ->
Version. Version.
upload_backup_file(Filename0, Bin) ->
case ensure_file_name(Filename0) of
{ok, Filename} ->
case check_json(Bin) of
{ok, _} ->
logger:info("write backup file ~p", [Filename]),
file:write_file(Filename, Bin);
{error, Reason} ->
{error, Reason}
end;
{error, Reason} ->
{error, Reason}
end.
list_backup_file() ->
Filter =
fun(File) ->
case file:read_file_info(File) of
{ok, #file_info{size = Size, ctime = CTime = {{Y, M, D}, {H, MM, S}}}} ->
Seconds = calendar:datetime_to_gregorian_seconds(CTime),
BaseFilename = to_binary(filename:basename(File)),
CreatedAt = to_binary(io_lib:format("~p-~p-~p ~p:~p:~p", [Y, M, D, H, MM, S])),
Info = {
Seconds,
[{filename, BaseFilename},
{size, Size},
{created_at, CreatedAt},
{node, node()}
]
},
{true, Info};
_ ->
false
end
end,
lists:filtermap(Filter, backup_files()).
backup_files() ->
backup_files(backup_dir()) ++ backup_files(backup_dir_odl_version()).
backup_files(Dir) ->
{ok, FilesAll} = file:list_dir_all(Dir),
Files = lists:filtermap(fun legal_filename/1, FilesAll),
[filename:join([Dir, File]) || File <- Files].
look_up_file(Filename) when is_binary(Filename) ->
look_up_file(binary_to_list(Filename));
look_up_file(Filename) ->
Filter =
fun(MaybeFile) ->
filename:basename(MaybeFile) == Filename
end,
case lists:filter(Filter, backup_files()) of
[] ->
{error, not_found};
List ->
{ok, hd(List)}
end.
read_backup_file(Filename0) ->
case look_up_file(Filename0) of
{ok, Filename} ->
case file:read_file(Filename) of
{ok, Bin} ->
{ok, #{filename => to_binary(Filename0),
file => Bin}};
{error, Reason} ->
logger:error("read file ~p failed ~p", [Filename, Reason]),
{error, bad_file}
end;
{error, not_found} ->
{error, not_found}
end.
delete_backup_file(Filename0) ->
case look_up_file(Filename0) of
{ok, Filename} ->
case file:read_file_info(Filename) of
{ok, #file_info{}} ->
case file:delete(Filename) of
ok ->
logger:info("delete backup file ~p", [Filename]),
ok;
{error, Reason} ->
logger:error(
"delete backup file ~p error:~p", [Filename, Reason]),
{error, Reason}
end;
_ ->
{error, not_found}
end;
{error, not_found} ->
{error, not_found}
end.
-ifdef(TEST).
%% clean all for test
delete_all_backup_file() ->
[begin
Filename = proplists:get_value(filename, Info),
_ = delete_backup_file(Filename)
end || {_, Info} <- list_backup_file()],
ok.
-endif.
export() -> export() ->
Seconds = erlang:system_time(second), Seconds = erlang:system_time(second),
Data = do_export_data() ++ [{date, erlang:list_to_binary(emqx_mgmt_util:strftime(Seconds))}], Data = do_export_data() ++ [{date, erlang:list_to_binary(emqx_mgmt_util:strftime(Seconds))}],
{{Y, M, D}, {H, MM, S}} = emqx_mgmt_util:datetime(Seconds), {{Y, M, D}, {H, MM, S}} = emqx_mgmt_util:datetime(Seconds),
Filename = io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S]), BaseFilename = io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S]),
NFilename = filename:join([emqx:get_env(data_dir), Filename]), {ok, Filename} = ensure_file_name(BaseFilename),
ok = filelib:ensure_dir(NFilename), case file:write_file(Filename, emqx_json:encode(Data)) of
case file:write_file(NFilename, emqx_json:encode(Data)) of
ok -> ok ->
case file:read_file_info(NFilename) of case file:read_file_info(Filename) of
{ok, #file_info{size = Size, ctime = {{Y1, M1, D1}, {H1, MM1, S1}}}} -> {ok, #file_info{size = Size, ctime = {{Y1, M1, D1}, {H1, MM1, S1}}}} ->
CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y1, M1, D1, H1, MM1, S1]), CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y1, M1, D1, H1, MM1, S1]),
{ok, #{filename => list_to_binary(NFilename), {ok, #{filename => Filename,
size => Size, size => Size,
created_at => list_to_binary(CreatedAt), created_at => list_to_binary(CreatedAt),
node => node() node => node()
@ -648,9 +764,8 @@ do_export_extra_data() -> [].
-ifdef(EMQX_ENTERPRISE). -ifdef(EMQX_ENTERPRISE).
import(Filename, OverridesJson) -> import(Filename, OverridesJson) ->
case file:read_file(Filename) of case check_import_json(Filename) of
{ok, Json} -> {ok, Imported} ->
Imported = emqx_json:decode(Json, [return_maps]),
Overrides = emqx_json:decode(OverridesJson, [return_maps]), Overrides = emqx_json:decode(OverridesJson, [return_maps]),
Data = maps:merge(Imported, Overrides), Data = maps:merge(Imported, Overrides),
Version = to_version(maps:get(<<"version">>, Data)), Version = to_version(maps:get(<<"version">>, Data)),
@ -663,13 +778,13 @@ import(Filename, OverridesJson) ->
logger:error("The emqx data import failed: ~0p", [{Class, Reason, Stack}]), logger:error("The emqx data import failed: ~0p", [{Class, Reason, Stack}]),
{error, import_failed} {error, import_failed}
end; end;
Error -> Error {error, Reason} ->
{error, Reason}
end. end.
-else. -else.
import(Filename, OverridesJson) -> import(Filename, OverridesJson) ->
case file:read_file(Filename) of case check_import_json(Filename) of
{ok, Json} -> {ok, Imported} ->
Imported = emqx_json:decode(Json, [return_maps]),
Overrides = emqx_json:decode(OverridesJson, [return_maps]), Overrides = emqx_json:decode(OverridesJson, [return_maps]),
Data = maps:merge(Imported, Overrides), Data = maps:merge(Imported, Overrides),
Version = to_version(maps:get(<<"version">>, Data)), Version = to_version(maps:get(<<"version">>, Data)),
@ -688,10 +803,58 @@ import(Filename, OverridesJson) ->
logger:error("Unsupported version: ~p", [Version]), logger:error("Unsupported version: ~p", [Version]),
{error, unsupported_version, Version} {error, unsupported_version, Version}
end; end;
Error -> Error {error, Reason} ->
{error, Reason}
end. end.
-endif. -endif.
-spec(check_import_json(binary() | string()) -> {ok, map()} | {error, term()}).
check_import_json(Filename) ->
FunList = [
fun look_up_file/1,
fun(F) -> file:read_file(F) end,
fun check_json/1
],
check_import_json(Filename, FunList).
check_import_json(Res, []) ->
{ok, Res};
check_import_json(Acc, [Fun | FunList]) ->
case Fun(Acc) of
{ok, Next} ->
check_import_json(Next, FunList);
{error, Reason} ->
{error, Reason}
end.
ensure_file_name(Filename) ->
case legal_filename(Filename) of
true ->
{ok, filename:join(backup_dir(), Filename)};
false ->
{error, bad_filename}
end.
backup_dir() ->
Dir = filename:join(emqx:get_env(data_dir), ?BACKUP_DIR),
ok = filelib:ensure_dir(filename:join([Dir, dummy])),
Dir.
backup_dir_odl_version() ->
emqx:get_env(data_dir).
legal_filename(Filename) ->
MaybeJson = filename:extension(Filename),
MaybeJson == ".json" orelse MaybeJson == <<".json">>.
check_json(MaybeJson) ->
case emqx_json:safe_decode(MaybeJson, [return_maps]) of
{ok, Json} ->
{ok, Json};
{error, _} ->
{error, bad_json}
end.
do_import_data(Data, Version) -> do_import_data(Data, Version) ->
do_import_extra_data(Data, Version), do_import_extra_data(Data, Version),
import_resources_and_rules(maps:get(<<"resources">>, Data, []), maps:get(<<"rules">>, Data, []), Version), import_resources_and_rules(maps:get(<<"resources">>, Data, []), maps:get(<<"rules">>, Data, []), Version),
@ -800,3 +963,6 @@ get_old_type() ->
set_old_type(Type) -> set_old_type(Type) ->
application:set_env(emqx_auth_mnesia, as, Type). application:set_env(emqx_auth_mnesia, as, Type).
to_binary(Bin) when is_binary(Bin) -> Bin;
to_binary(Str) when is_list(Str) -> list_to_binary(Str).

View File

@ -183,7 +183,10 @@ do_import(File, Config, Overrides) ->
mnesia:clear_table(?ACL_TABLE2), mnesia:clear_table(?ACL_TABLE2),
mnesia:clear_table(emqx_user), mnesia:clear_table(emqx_user),
emqx_acl_mnesia_migrator:migrate_records(), emqx_acl_mnesia_migrator:migrate_records(),
Filename = filename:join(proplists:get_value(data_dir, Config), File), Filename = filename:basename(File),
FilePath = filename:join([proplists:get_value(data_dir, Config), File]),
{ok, Bin} = file:read_file(FilePath),
ok = emqx_mgmt_data_backup:upload_backup_file(Filename, Bin),
emqx_mgmt_data_backup:import(Filename, Overrides). emqx_mgmt_data_backup:import(Filename, Overrides).
test_import(username, {Username, Password}) -> test_import(username, {Username, Password}) ->

View File

@ -34,14 +34,18 @@ init_per_suite(Cfg) ->
Cfg. Cfg.
end_per_suite(Cfg) -> end_per_suite(Cfg) ->
emqx_mgmt_data_backup:delete_all_backup_file(),
emqx_ct_helpers:stop_apps([emqx_management, emqx_rule_engine]), emqx_ct_helpers:stop_apps([emqx_management, emqx_rule_engine]),
Cfg. Cfg.
get_data_path() -> get_data_path() ->
emqx_ct_helpers:deps_path(emqx_management, "test/emqx_bridge_mqtt_data_export_import_SUITE_data/"). emqx_ct_helpers:deps_path(emqx_management, "test/emqx_bridge_mqtt_data_export_import_SUITE_data/").
import(FilePath, Version) -> import(FilePath0, Version) ->
ok = emqx_mgmt_data_backup:import(get_data_path() ++ "/" ++ FilePath, <<"{}">>), Filename = filename:basename(FilePath0),
FilePath = filename:join([get_data_path(), FilePath0]),
{ok, Bin} = file:read_file(FilePath),
ok = emqx_mgmt_data_backup:upload_backup_file(Filename, Bin),
timer:sleep(500), timer:sleep(500),
lists:foreach(fun(#resource{id = Id, config = Config} = _Resource) -> lists:foreach(fun(#resource{id = Id, config = Config} = _Resource) ->
case Id of case Id of

View File

@ -368,6 +368,25 @@ t_cli(_) ->
[?assertMatch({match, _}, re:run(Value, "mgmt")) [?assertMatch({match, _}, re:run(Value, "mgmt"))
|| Value <- emqx_mgmt_cli:mgmt([""])]. || Value <- emqx_mgmt_cli:mgmt([""])].
t_backup_file(_)->
Filename = <<"test.json">>,
BadFilename = <<"bad.notjson">>,
Bin = emqx_json:encode(#{a => b}),
BadBin = <<"[bad json]">>,
{error, bad_filename} = emqx_mgmt_data_backup:upload_backup_file(BadFilename, Bin),
{error, bad_json} = emqx_mgmt_data_backup:upload_backup_file(Filename, BadBin),
ok = emqx_mgmt_data_backup:upload_backup_file(Filename, Bin),
{ok, #{file := <<"{\"a\":\"b\"}">>, filename := <<"test.json">>}} =
emqx_mgmt_data_backup:read_backup_file(Filename),
[{_, FileInfoList}] = emqx_mgmt_data_backup:list_backup_file(),
Filename = proplists:get_value(filename, FileInfoList),
ok = emqx_mgmt_data_backup:delete_backup_file(Filename),
{error, not_found} = emqx_mgmt_data_backup:delete_backup_file(BadFilename),
ok.
mock_print() -> mock_print() ->
catch meck:unload(emqx_ctl), catch meck:unload(emqx_ctl),
meck:new(emqx_ctl, [non_strict, passthrough]), meck:new(emqx_ctl, [non_strict, passthrough]),

View File

@ -689,6 +689,7 @@ t_data(_) ->
ok = emqx_dashboard_admin:mnesia(boot), ok = emqx_dashboard_admin:mnesia(boot),
application:ensure_all_started(emqx_rule_engine), application:ensure_all_started(emqx_rule_engine),
application:ensure_all_started(emqx_dashboard), application:ensure_all_started(emqx_dashboard),
emqx_mgmt_data_backup:delete_all_backup_file(),
{ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]), {ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]),
#{<<"filename">> := Filename, <<"node">> := Node} = emqx_ct_http:get_http_data(Data), #{<<"filename">> := Filename, <<"node">> := Node} = emqx_ct_http:get_http_data(Data),
{ok, DataList} = request_api(get, api_path(["data","export"]), auth_header_()), {ok, DataList} = request_api(get, api_path(["data","export"]), auth_header_()),
@ -701,6 +702,8 @@ t_data(_) ->
?assertMatch({ok, _}, ?assertMatch({ok, _},
request_api(post, api_path(["data","import"]), [], auth_header_(), request_api(post, api_path(["data","import"]), [], auth_header_(),
#{<<"filename">> => Filename})), #{<<"filename">> => Filename})),
_ = emqx_mgmt_data_backup:delete_backup_file(Filename),
emqx_mgmt_data_backup:delete_all_backup_file(),
application:stop(emqx_rule_engine), application:stop(emqx_rule_engine),
application:stop(emqx_dashboard), application:stop(emqx_dashboard),
ok. ok.
@ -710,13 +713,17 @@ t_data_import_content(_) ->
ok = emqx_dashboard_admin:mnesia(boot), ok = emqx_dashboard_admin:mnesia(boot),
application:ensure_all_started(emqx_rule_engine), application:ensure_all_started(emqx_rule_engine),
application:ensure_all_started(emqx_dashboard), application:ensure_all_started(emqx_dashboard),
emqx_mgmt_data_backup:delete_all_backup_file(),
{ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]), {ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]),
#{<<"filename">> := Filename} = emqx_ct_http:get_http_data(Data), #{<<"filename">> := Filename} = emqx_ct_http:get_http_data(Data),
Dir = emqx:get_env(data_dir), Dir = emqx_mgmt_data_backup:backup_dir(),
{ok, Bin} = file:read_file(filename:join(Dir, Filename)), {ok, Bin} = file:read_file(filename:join(Dir, Filename)),
Content = emqx_json:decode(Bin), Content = emqx_json:decode(Bin),
ct:pal("Content:::: ~p~n", [Content]),
?assertMatch({ok, "{\"code\":0}"}, ?assertMatch({ok, "{\"code\":0}"},
request_api(post, api_path(["data","import"]), [], auth_header_(), Content)), request_api(post, api_path(["data","import"]), [], auth_header_(), Content)),
emqx_mgmt_data_backup:delete_all_backup_file(),
application:stop(emqx_rule_engine), application:stop(emqx_rule_engine),
application:stop(emqx_dashboard). application:stop(emqx_dashboard).

View File

@ -46,8 +46,11 @@ remove_resource(Id) ->
emqx_rule_registry:remove_resource(Id), emqx_rule_registry:remove_resource(Id),
emqx_rule_registry:remove_resource_params(Id). emqx_rule_registry:remove_resource_params(Id).
import(FilePath, Version) -> import(FilePath0, Version) ->
ok = emqx_mgmt_data_backup:import(get_data_path() ++ "/" ++ FilePath, <<"{}">>), Filename = filename:basename(FilePath0),
FilePath = filename:join([get_data_path(), FilePath0]),
{ok, Bin} = file:read_file(FilePath),
ok = emqx_mgmt_data_backup:upload_backup_file(Filename, Bin),
lists:foreach(fun(#resource{id = Id, config = Config} = _Resource) -> lists:foreach(fun(#resource{id = Id, config = Config} = _Resource) ->
case Id of case Id of
<<"webhook">> -> <<"webhook">> ->

View File

@ -1,6 +1,6 @@
{application, emqx_plugin_libs, {application, emqx_plugin_libs,
[{description, "EMQ X Plugin utility libs"}, [{description, "EMQ X Plugin utility libs"},
{vsn, "4.4.1"}, {vsn, "4.4.2"},
{modules, []}, {modules, []},
{applications, [kernel,stdlib]}, {applications, [kernel,stdlib]},
{env, []} {env, []}

View File

@ -1,12 +1,24 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{VSN, {VSN,
[{"4.4.0", [{"4.4.1",
[ {update, emqx_slow_subs, {advanced, ["4.4.0"]}} [ {load_module,emqx_trace,brutal_purge,soft_purge,[]}
, {load_module,emqx_trace_api,brutal_purge,soft_purge,[]}
]},
{"4.4.0",
[ {load_module,emqx_trace,brutal_purge,soft_purge,[]}
, {load_module,emqx_trace_api,brutal_purge,soft_purge,[]}
, {update, emqx_slow_subs, {advanced, ["4.4.0"]}}
, {load_module,emqx_slow_subs_api,brutal_purge,soft_purge,[]} , {load_module,emqx_slow_subs_api,brutal_purge,soft_purge,[]}
]}, ]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{"4.4.0", [{"4.4.1",
[ {update, emqx_slow_subs, {advanced, ["4.4.0"]}} [ {load_module,emqx_trace,brutal_purge,soft_purge,[]}
, {load_module,emqx_trace_api,brutal_purge,soft_purge,[]}
]},
{"4.4.0",
[ {load_module,emqx_trace,brutal_purge,soft_purge,[]}
, {load_module,emqx_trace_api,brutal_purge,soft_purge,[]}
, {update, emqx_slow_subs, {advanced, ["4.4.0"]}}
, {load_module,emqx_slow_subs_api,brutal_purge,soft_purge,[]} , {load_module,emqx_slow_subs_api,brutal_purge,soft_purge,[]}
]}, ]},
{<<".*">>,[]}] {<<".*">>,[]}]

View File

@ -189,8 +189,8 @@ init([]) ->
ok = create_table(), ok = create_table(),
erlang:process_flag(trap_exit, true), erlang:process_flag(trap_exit, true),
OriginLogLevel = emqx_logger:get_primary_log_level(), OriginLogLevel = emqx_logger:get_primary_log_level(),
ok = filelib:ensure_dir(trace_dir()), ok = filelib:ensure_dir(filename:join([trace_dir(), dummy])),
ok = filelib:ensure_dir(zip_dir()), ok = filelib:ensure_dir(filename:join([zip_dir(), dummy])),
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}), {ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
Traces = get_enable_trace(), Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel), ok = update_log_primary_level(Traces, OriginLogLevel),
@ -462,10 +462,10 @@ to_system_second(At) ->
end. end.
zip_dir() -> zip_dir() ->
trace_dir() ++ "zip/". filename:join(trace_dir(), "zip").
trace_dir() -> trace_dir() ->
filename:join(emqx:get_env(data_dir), "trace") ++ "/". filename:join(emqx:get_env(data_dir), "trace").
log_file(Name, Start) -> log_file(Name, Start) ->
filename:join(trace_dir(), filename(Name, Start)). filename:join(trace_dir(), filename(Name, Start)).

View File

@ -95,7 +95,8 @@ download_zip_log(#{name := Name}, _Param) ->
TraceFiles = collect_trace_file(TraceLog), TraceFiles = collect_trace_file(TraceLog),
ZipDir = emqx_trace:zip_dir(), ZipDir = emqx_trace:zip_dir(),
Zips = group_trace_file(ZipDir, TraceLog, TraceFiles), Zips = group_trace_file(ZipDir, TraceLog, TraceFiles),
ZipFileName = ZipDir ++ binary_to_list(Name) ++ ".zip", ZipFileName0 = binary_to_list(Name) ++ ".zip",
ZipFileName = filename:join([Zips, ZipFileName0]),
{ok, ZipFile} = zip:zip(ZipFileName, Zips, [{cwd, ZipDir}]), {ok, ZipFile} = zip:zip(ZipFileName, Zips, [{cwd, ZipDir}]),
emqx_trace:delete_files_after_send(ZipFileName, Zips), emqx_trace:delete_files_after_send(ZipFileName, Zips),
{ok, ZipFile}; {ok, ZipFile};
@ -107,9 +108,10 @@ group_trace_file(ZipDir, TraceLog, TraceFiles) ->
lists:foldl(fun(Res, Acc) -> lists:foldl(fun(Res, Acc) ->
case Res of case Res of
{ok, Node, Bin} -> {ok, Node, Bin} ->
ZipName = ZipDir ++ Node ++ "-" ++ TraceLog, FileName = Node ++ "-" ++ TraceLog,
ZipName = filename:join([ZipDir, FileName]),
case file:write_file(ZipName, Bin) of case file:write_file(ZipName, Bin) of
ok -> [Node ++ "-" ++ TraceLog | Acc]; ok -> [FileName | Acc];
_ -> Acc _ -> Acc
end; end;
{error, Node, Reason} -> {error, Node, Reason} ->

View File

@ -333,6 +333,7 @@ relx_overlay(ReleaseType) ->
, {mkdir, "data/configs"} , {mkdir, "data/configs"}
, {mkdir, "data/patches"} , {mkdir, "data/patches"}
, {mkdir, "data/scripts"} , {mkdir, "data/scripts"}
, {mkdir, "data/backup"}
, {template, "data/loaded_plugins.tmpl", "data/loaded_plugins"} , {template, "data/loaded_plugins.tmpl", "data/loaded_plugins"}
, {template, "data/loaded_modules.tmpl", "data/loaded_modules"} , {template, "data/loaded_modules.tmpl", "data/loaded_modules"}
, {template, "data/emqx_vars", "releases/emqx_vars"} , {template, "data/emqx_vars", "releases/emqx_vars"}