fix: check backup file type & legal json

This commit is contained in:
DDDHuang 2022-03-11 16:24:30 +08:00
parent d58bfed751
commit 2da3333879
7 changed files with 207 additions and 66 deletions

View File

@ -3,7 +3,7 @@
{vsn, "4.4.2"}, % strict semver, bump manually!
{modules, []},
{registered, [emqx_management_sup]},
{applications, [kernel,stdlib,minirest]},
{applications, [kernel,stdlib,emqx_plugin_libs,minirest]},
{mod, {emqx_mgmt_app,[]}},
{env, []},
{licenses, ["Apache-2.0"]},

View File

@ -73,7 +73,7 @@
export(_Bindings, _Params) ->
case emqx_mgmt_data_backup:export() of
{ok, File = #{filename := Filename}} ->
minirest:return({ok, File#{filename => filename:basename(Filename)}});
minirest:return({ok, File#{filename => list_to_binary(filename:basename(Filename))}});
Return -> minirest:return(Return)
end.
@ -83,29 +83,7 @@ list_exported(_Bindings, _Params) ->
minirest:return({ok, NList}).
get_list_exported() ->
Dir = emqx:get_env(data_dir),
{ok, Files} = file:list_dir_all(Dir),
lists:foldl(
fun(File, Acc) ->
case filename:extension(File) =:= ".json" of
true ->
FullFile = filename:join([Dir, File]),
case file:read_file_info(FullFile) of
{ok, #file_info{size = Size, ctime = CTime = {{Y, M, D}, {H, MM, S}}}} ->
CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y, M, D, H, MM, S]),
Seconds = calendar:datetime_to_gregorian_seconds(CTime),
[{Seconds, [{filename, list_to_binary(File)},
{size, Size},
{created_at, list_to_binary(CreatedAt)},
{node, node()}
]} | Acc];
{error, Reason} ->
logger:error("Read file info of ~s failed with: ~p", [File, Reason]),
Acc
end;
false -> Acc
end
end, [], Files).
emqx_mgmt_data_backup:list_backup_file().
import(_Bindings, Params) ->
case proplists:get_value(<<"filename">>, Params) of
@ -121,22 +99,26 @@ import(_Bindings, Params) ->
case lists:member(Node,
[ erlang:atom_to_binary(N, utf8) || N <- ekka_mnesia:running_nodes() ]
) of
true -> minirest:return(rpc:call(erlang:binary_to_atom(Node, utf8), ?MODULE, do_import, [Filename]));
true ->
N = erlang:binary_to_atom(Node, utf8),
case rpc:call(N, ?MODULE, do_import, [Filename]) of
{badrpc, Reason} ->
minirest:return({error, Reason});
Res ->
minirest:return(Res)
end;
false -> minirest:return({error, no_existent_node})
end
end
end.
do_import(Filename) ->
FullFilename = fullname(Filename),
emqx_mgmt_data_backup:import(FullFilename, "{}").
emqx_mgmt_data_backup:import(Filename, "{}").
download(#{filename := Filename}, _Params) ->
FullFilename = fullname(Filename),
case file:read_file(FullFilename) of
{ok, Bin} ->
{ok, #{filename => list_to_binary(Filename),
file => Bin}};
case emqx_mgmt_data_backup:read_backup_file(Filename) of
{ok, Res} ->
{ok, Res};
{error, Reason} ->
minirest:return({error, Reason})
end.
@ -146,8 +128,7 @@ upload(Bindings, Params) ->
do_upload(_Bindings, #{<<"filename">> := Filename,
<<"file">> := Bin}) ->
FullFilename = fullname(Filename),
case file:write_file(FullFilename, Bin) of
case emqx_mgmt_data_backup:upload_backup_file(Filename, Bin) of
ok ->
minirest:return({ok, [{node, node()}]});
{error, Reason} ->
@ -159,8 +140,7 @@ do_upload(_Bindings, _Params) ->
minirest:return({error, missing_required_params}).
delete(#{filename := Filename}, _Params) ->
FullFilename = fullname(Filename),
case file:delete(FullFilename) of
case emqx_mgmt_data_backup:delete_backup_file(Filename) of
ok ->
minirest:return();
{error, Reason} ->
@ -168,17 +148,14 @@ delete(#{filename := Filename}, _Params) ->
end.
import_content(Content) ->
File = dump_to_tmp_file(Content),
do_import(File).
dump_to_tmp_file(Content) ->
Bin = emqx_json:encode(Content),
Filename = tmp_filename(),
ok = file:write_file(fullname(Filename), Bin),
Filename.
fullname(Name) ->
filename:join(emqx:get_env(data_dir), Name).
case emqx_mgmt_data_backup:upload_backup_file(Filename, Bin) of
ok ->
do_import(Filename);
{error, Reason} ->
{error, Reason}
end.
tmp_filename() ->
Seconds = erlang:system_time(second),

View File

@ -31,6 +31,8 @@
]).
-endif.
-define(BACKUP_DIR, backup).
-export([ export_rules/0
, export_resources/0
, export_blacklist/0
@ -53,8 +55,18 @@
-export([ export/0
, import/2
, upload_backup_file/2
, list_backup_file/0
, read_backup_file/1
, delete_backup_file/1
]).
-ifdef(TEST).
-export([ backup_dir/0
, delete_all_backup_file/0
]).
-endif.
%%--------------------------------------------------------------------
%% Data Export and Import
%%--------------------------------------------------------------------
@ -600,19 +612,101 @@ to_version(Version) when is_binary(Version) ->
to_version(Version) when is_list(Version) ->
Version.
upload_backup_file(Filename0, Bin) ->
case ensure_file_name(Filename0) of
{ok, Filename} ->
case check_json(Bin) of
{ok, _} ->
logger:info("write backup file ~p", [Filename]),
file:write_file(Filename, Bin);
{error, Reason} ->
{error, Reason}
end;
{error, Reason} ->
{error, Reason}
end.
list_backup_file() ->
{ok, Files} = file:list_dir_all(backup_dir()),
lists:foldl(
fun(File, Acc) ->
case filename:extension(File) =:= ".json" of
true ->
{ok, FileName} = ensure_file_name(File),
case file:read_file_info(FileName) of
{ok, #file_info{size = Size, ctime = CTime = {{Y, M, D}, {H, MM, S}}}} ->
CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y, M, D, H, MM, S]),
Seconds = calendar:datetime_to_gregorian_seconds(CTime),
[{Seconds, [{filename, list_to_binary(File)},
{size, Size},
{created_at, list_to_binary(CreatedAt)},
{node, node()}
]} | Acc];
{error, Reason} ->
logger:error("Read file info of ~s failed with: ~p", [File, Reason]),
Acc
end;
false -> Acc
end
end, [], Files).
read_backup_file(Filename0) ->
case ensure_file_name(Filename0) of
{ok, Filename} ->
case file:read_file(Filename) of
{ok, Bin} ->
{ok, #{filename => to_binary(Filename0),
file => Bin}};
{error, Reason} ->
{error, Reason}
end;
{error, Reason} ->
{error, Reason}
end.
delete_backup_file(Filename0) ->
case ensure_file_name(Filename0) of
{ok, Filename} ->
case file:read_file_info(Filename) of
{ok, #file_info{}} ->
case file:delete(Filename) of
ok ->
logger:info("delete backup file ~p", [Filename]),
ok;
{error, Reason} ->
logger:error(
"delete backup file ~p error:~p", [Filename, Reason]),
{error, Reason}
end;
_ ->
{error, not_found}
end;
{error, _Reason} ->
{error, not_found}
end.
-ifdef(TEST).
%% clean all for test
delete_all_backup_file() ->
[begin
Filename = proplists:get_value(filename, Info),
_ = delete_backup_file(Filename)
end || {_, Info} <- list_backup_file()],
ok.
-endif.
export() ->
Seconds = erlang:system_time(second),
Data = do_export_data() ++ [{date, erlang:list_to_binary(emqx_mgmt_util:strftime(Seconds))}],
{{Y, M, D}, {H, MM, S}} = emqx_mgmt_util:datetime(Seconds),
Filename = io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S]),
NFilename = filename:join([emqx:get_env(data_dir), Filename]),
ok = filelib:ensure_dir(NFilename),
case file:write_file(NFilename, emqx_json:encode(Data)) of
BaseFilename = io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S]),
{ok, Filename} = ensure_file_name(BaseFilename),
case file:write_file(Filename, emqx_json:encode(Data)) of
ok ->
case file:read_file_info(NFilename) of
case file:read_file_info(Filename) of
{ok, #file_info{size = Size, ctime = {{Y1, M1, D1}, {H1, MM1, S1}}}} ->
CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y1, M1, D1, H1, MM1, S1]),
{ok, #{filename => list_to_binary(NFilename),
{ok, #{filename => Filename,
size => Size,
created_at => list_to_binary(CreatedAt),
node => node()
@ -648,9 +742,8 @@ do_export_extra_data() -> [].
-ifdef(EMQX_ENTERPRISE).
import(Filename, OverridesJson) ->
case file:read_file(Filename) of
{ok, Json} ->
Imported = emqx_json:decode(Json, [return_maps]),
case check_import_json(Filename) of
{ok, Imported} ->
Overrides = emqx_json:decode(OverridesJson, [return_maps]),
Data = maps:merge(Imported, Overrides),
Version = to_version(maps:get(<<"version">>, Data)),
@ -663,13 +756,13 @@ import(Filename, OverridesJson) ->
logger:error("The emqx data import failed: ~0p", [{Class, Reason, Stack}]),
{error, import_failed}
end;
Error -> Error
{error, Reason} ->
{error, Reason}
end.
-else.
import(Filename, OverridesJson) ->
case file:read_file(Filename) of
{ok, Json} ->
Imported = emqx_json:decode(Json, [return_maps]),
case check_import_json(Filename) of
{ok, Imported} ->
Overrides = emqx_json:decode(OverridesJson, [return_maps]),
Data = maps:merge(Imported, Overrides),
Version = to_version(maps:get(<<"version">>, Data)),
@ -688,10 +781,52 @@ import(Filename, OverridesJson) ->
logger:error("Unsupported version: ~p", [Version]),
{error, unsupported_version, Version}
end;
Error -> Error
{error, Reason} ->
{error, Reason}
end.
-endif.
-spec(check_import_json(binary() | string()) -> {ok, map()} | {error, term()}).
check_import_json(Filename) ->
ReadFile = fun(F) -> file:read_file(F) end,
FunList = [fun ensure_file_name/1, ReadFile, fun check_json/1],
check_import_json(Filename, FunList).
check_import_json(Res, []) ->
{ok, Res};
check_import_json(Acc, [Fun | FunList]) ->
case Fun(Acc) of
{ok, Next} ->
check_import_json(Next, FunList);
{error, Reason} ->
{error, Reason}
end.
ensure_file_name(Filename) ->
case legal_filename(Filename) of
true ->
{ok, filename:join(backup_dir(), Filename)};
false ->
{error, bad_filename}
end.
backup_dir() ->
Dir = filename:join(emqx:get_env(data_dir), ?BACKUP_DIR),
ok = filelib:ensure_dir(filename:join([Dir, dummy])),
Dir.
legal_filename(Filename) ->
MaybeJson = filename:extension(Filename),
MaybeJson == ".json" orelse MaybeJson == <<".json">>.
check_json(MaybeJson) ->
case emqx_json:safe_decode(MaybeJson, [return_maps]) of
{ok, Json} ->
{ok, Json};
{error, _} ->
{error, bad_json}
end.
do_import_data(Data, Version) ->
do_import_extra_data(Data, Version),
import_resources_and_rules(maps:get(<<"resources">>, Data, []), maps:get(<<"rules">>, Data, []), Version),
@ -800,3 +935,6 @@ get_old_type() ->
set_old_type(Type) ->
application:set_env(emqx_auth_mnesia, as, Type).
to_binary(Bin) when is_binary(Bin) -> Bin;
to_binary(Str) when is_list(Str) -> list_to_binary(Str).

View File

@ -368,6 +368,25 @@ t_cli(_) ->
[?assertMatch({match, _}, re:run(Value, "mgmt"))
|| Value <- emqx_mgmt_cli:mgmt([""])].
t_backup_file(_)->
Filename = <<"test.json">>,
BadFilename = <<"bad.notjson">>,
Bin = emqx_json:encode(#{a => b}),
BadBin = <<"[bad json]">>,
{error, bad_filename} = emqx_mgmt_data_backup:upload_backup_file(BadFilename, Bin),
{error, bad_json} = emqx_mgmt_data_backup:upload_backup_file(Filename, BadBin),
ok = emqx_mgmt_data_backup:upload_backup_file(Filename, Bin),
{ok, #{file := <<"{\"a\":\"b\"}">>, filename := <<"test.json">>}} =
emqx_mgmt_data_backup:read_backup_file(Filename),
[{_, FileInfoList}] = emqx_mgmt_data_backup:list_backup_file(),
Filename = proplists:get_value(filename, FileInfoList),
ok = emqx_mgmt_data_backup:delete_backup_file(Filename),
{error, not_found} = emqx_mgmt_data_backup:delete_backup_file(BadFilename),
ok.
mock_print() ->
catch meck:unload(emqx_ctl),
meck:new(emqx_ctl, [non_strict, passthrough]),

View File

@ -689,6 +689,7 @@ t_data(_) ->
ok = emqx_dashboard_admin:mnesia(boot),
application:ensure_all_started(emqx_rule_engine),
application:ensure_all_started(emqx_dashboard),
emqx_mgmt_data_backup:delete_all_backup_file(),
{ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]),
#{<<"filename">> := Filename, <<"node">> := Node} = emqx_ct_http:get_http_data(Data),
{ok, DataList} = request_api(get, api_path(["data","export"]), auth_header_()),
@ -701,6 +702,8 @@ t_data(_) ->
?assertMatch({ok, _},
request_api(post, api_path(["data","import"]), [], auth_header_(),
#{<<"filename">> => Filename})),
_ = emqx_mgmt_data_backup:delete_backup_file(Filename),
emqx_mgmt_data_backup:delete_all_backup_file(),
application:stop(emqx_rule_engine),
application:stop(emqx_dashboard),
ok.
@ -710,13 +713,16 @@ t_data_import_content(_) ->
ok = emqx_dashboard_admin:mnesia(boot),
application:ensure_all_started(emqx_rule_engine),
application:ensure_all_started(emqx_dashboard),
emqx_mgmt_data_backup:delete_all_backup_file(),
{ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]),
#{<<"filename">> := Filename} = emqx_ct_http:get_http_data(Data),
Dir = emqx:get_env(data_dir),
Dir = emqx_mgmt_data_backup:backup_dir(),
{ok, Bin} = file:read_file(filename:join(Dir, Filename)),
Content = emqx_json:decode(Bin),
?assertMatch({ok, "{\"code\":0}"},
request_api(post, api_path(["data","import"]), [], auth_header_(), Content)),
emqx_mgmt_data_backup:delete_all_backup_file(),
application:stop(emqx_rule_engine),
application:stop(emqx_dashboard).

View File

@ -189,8 +189,8 @@ init([]) ->
ok = create_table(),
erlang:process_flag(trap_exit, true),
OriginLogLevel = emqx_logger:get_primary_log_level(),
ok = filelib:ensure_dir(trace_dir()),
ok = filelib:ensure_dir(zip_dir()),
ok = filelib:ensure_dir(filename:join([trace_dir(), dummy])),
ok = filelib:ensure_dir(filename:join([zip_dir(), dummy])),
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
@ -462,10 +462,10 @@ to_system_second(At) ->
end.
zip_dir() ->
trace_dir() ++ "zip/".
filename:join(trace_dir(), "zip").
trace_dir() ->
filename:join(emqx:get_env(data_dir), "trace") ++ "/".
filename:join(emqx:get_env(data_dir), "trace").
log_file(Name, Start) ->
filename:join(trace_dir(), filename(Name, Start)).

View File

@ -333,6 +333,7 @@ relx_overlay(ReleaseType) ->
, {mkdir, "data/configs"}
, {mkdir, "data/patches"}
, {mkdir, "data/scripts"}
, {mkdir, "data/backup"}
, {template, "data/loaded_plugins.tmpl", "data/loaded_plugins"}
, {template, "data/loaded_modules.tmpl", "data/loaded_modules"}
, {template, "data/emqx_vars", "releases/emqx_vars"}