fix: Authorization header leak in log entries for webhook
There might be another possibility for leakage. If the resource mangager for the webhook resource crashes, OTP might log the spec for the resource manager which contains the Config and thus the Authorization header. This is probably an issue for other resources as well and should be fixed in another commit. The following issue has been created for that: https://emqx.atlassian.net/browse/EMQX-8794 Fixes: https://emqx.atlassian.net/browse/EMQX-8791
This commit is contained in:
parent
bafc430184
commit
86cfbfb43c
|
@ -609,7 +609,11 @@ do_redact(K, V, Checker) ->
|
||||||
|
|
||||||
-define(REDACT_VAL, "******").
|
-define(REDACT_VAL, "******").
|
||||||
redact_v(V) when is_binary(V) -> <<?REDACT_VAL>>;
|
redact_v(V) when is_binary(V) -> <<?REDACT_VAL>>;
|
||||||
redact_v(_V) -> ?REDACT_VAL.
|
%% The HOCON schema system may generate sensitive values with this format
|
||||||
|
redact_v([{str, Bin}]) when is_binary(Bin) ->
|
||||||
|
[{str, <<?REDACT_VAL>>}];
|
||||||
|
redact_v(_V) ->
|
||||||
|
?REDACT_VAL.
|
||||||
|
|
||||||
is_redacted(K, V) ->
|
is_redacted(K, V) ->
|
||||||
do_is_redacted(K, V, fun is_sensitive_key/1).
|
do_is_redacted(K, V, fun is_sensitive_key/1).
|
||||||
|
|
|
@ -209,7 +209,7 @@ on_start(
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "starting_http_connector",
|
msg => "starting_http_connector",
|
||||||
connector => InstId,
|
connector => InstId,
|
||||||
config => emqx_misc:redact(Config)
|
config => redact(Config)
|
||||||
}),
|
}),
|
||||||
{Transport, TransportOpts} =
|
{Transport, TransportOpts} =
|
||||||
case Scheme of
|
case Scheme of
|
||||||
|
@ -285,7 +285,11 @@ on_query(
|
||||||
?TRACE(
|
?TRACE(
|
||||||
"QUERY",
|
"QUERY",
|
||||||
"http_connector_received",
|
"http_connector_received",
|
||||||
#{request => Request, connector => InstId, state => State}
|
#{
|
||||||
|
request => redact(Request),
|
||||||
|
connector => InstId,
|
||||||
|
state => redact(State)
|
||||||
|
}
|
||||||
),
|
),
|
||||||
NRequest = formalize_request(Method, BasePath, Request),
|
NRequest = formalize_request(Method, BasePath, Request),
|
||||||
case
|
case
|
||||||
|
@ -310,7 +314,7 @@ on_query(
|
||||||
{error, Reason} = Result ->
|
{error, Reason} = Result ->
|
||||||
?SLOG(error, #{
|
?SLOG(error, #{
|
||||||
msg => "http_connector_do_request_failed",
|
msg => "http_connector_do_request_failed",
|
||||||
request => NRequest,
|
request => redact(NRequest),
|
||||||
reason => Reason,
|
reason => Reason,
|
||||||
connector => InstId
|
connector => InstId
|
||||||
}),
|
}),
|
||||||
|
@ -322,7 +326,7 @@ on_query(
|
||||||
{ok, StatusCode, Headers} ->
|
{ok, StatusCode, Headers} ->
|
||||||
?SLOG(error, #{
|
?SLOG(error, #{
|
||||||
msg => "http connector do request, received error response",
|
msg => "http connector do request, received error response",
|
||||||
request => NRequest,
|
request => redact(NRequest),
|
||||||
connector => InstId,
|
connector => InstId,
|
||||||
status_code => StatusCode
|
status_code => StatusCode
|
||||||
}),
|
}),
|
||||||
|
@ -330,7 +334,7 @@ on_query(
|
||||||
{ok, StatusCode, Headers, Body} ->
|
{ok, StatusCode, Headers, Body} ->
|
||||||
?SLOG(error, #{
|
?SLOG(error, #{
|
||||||
msg => "http connector do request, received error response",
|
msg => "http connector do request, received error response",
|
||||||
request => NRequest,
|
request => redact(NRequest),
|
||||||
connector => InstId,
|
connector => InstId,
|
||||||
status_code => StatusCode
|
status_code => StatusCode
|
||||||
}),
|
}),
|
||||||
|
@ -366,7 +370,11 @@ on_query_async(
|
||||||
?TRACE(
|
?TRACE(
|
||||||
"QUERY_ASYNC",
|
"QUERY_ASYNC",
|
||||||
"http_connector_received",
|
"http_connector_received",
|
||||||
#{request => Request, connector => InstId, state => State}
|
#{
|
||||||
|
request => redact(Request),
|
||||||
|
connector => InstId,
|
||||||
|
state => redact(State)
|
||||||
|
}
|
||||||
),
|
),
|
||||||
NRequest = formalize_request(Method, BasePath, Request),
|
NRequest = formalize_request(Method, BasePath, Request),
|
||||||
Worker =
|
Worker =
|
||||||
|
@ -401,7 +409,7 @@ do_get_status(PoolName, Timeout) ->
|
||||||
{error, Reason} = Error ->
|
{error, Reason} = Error ->
|
||||||
?SLOG(error, #{
|
?SLOG(error, #{
|
||||||
msg => "http_connector_get_status_failed",
|
msg => "http_connector_get_status_failed",
|
||||||
reason => Reason,
|
reason => redact(Reason),
|
||||||
worker => Worker
|
worker => Worker
|
||||||
}),
|
}),
|
||||||
Error
|
Error
|
||||||
|
@ -554,3 +562,33 @@ reply_delegator(ReplyFunAndArgs, Result) ->
|
||||||
_ ->
|
_ ->
|
||||||
emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result)
|
emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
%% The HOCON schema system may generate sensitive keys with this format
|
||||||
|
is_sensitive_key([{str, StringKey}]) ->
|
||||||
|
is_sensitive_key(StringKey);
|
||||||
|
is_sensitive_key(Atom) when is_atom(Atom) ->
|
||||||
|
is_sensitive_key(erlang:atom_to_binary(Atom));
|
||||||
|
is_sensitive_key(Bin) when is_binary(Bin), (size(Bin) =:= 19 orelse size(Bin) =:= 13) ->
|
||||||
|
try
|
||||||
|
%% This is wrapped in a try-catch since we don't know that Bin is a
|
||||||
|
%% valid string so string:lowercase/1 might throw an exception.
|
||||||
|
%%
|
||||||
|
%% We want to convert this to lowercase since the http header fields
|
||||||
|
%% are case insensitive, which means that a user of the Webhook bridge
|
||||||
|
%% can write this field name in many different ways.
|
||||||
|
LowercaseBin = iolist_to_binary(string:lowercase(Bin)),
|
||||||
|
case LowercaseBin of
|
||||||
|
<<"authorization">> -> true;
|
||||||
|
<<"proxy-authorization">> -> true;
|
||||||
|
_ -> false
|
||||||
|
end
|
||||||
|
catch
|
||||||
|
_:_ -> false
|
||||||
|
end;
|
||||||
|
is_sensitive_key(_) ->
|
||||||
|
false.
|
||||||
|
|
||||||
|
%% Function that will do a deep traversal of Data and remove sensitive
|
||||||
|
%% information (i.e., passwords)
|
||||||
|
redact(Data) ->
|
||||||
|
emqx_misc:redact(Data, fun is_sensitive_key/1).
|
||||||
|
|
Loading…
Reference in New Issue