fix(crl): make http timeout global for all listeners
We make the CRL HTTP timeout the same for all listeners for simplicity of understanding and implementation.
This commit is contained in:
parent
6d27e24bb1
commit
c9e05acb4c
|
@ -1562,11 +1562,12 @@ listener.ssl.external.cacertfile = {{ platform_etc_dir }}/certs/cacert.pem
|
||||||
## Value: String
|
## Value: String
|
||||||
## listener.ssl.external.crl_cache_urls = http://my.crl.server/intermediate.crl.pem, http://my.other.crl.server/another.crl.pem
|
## listener.ssl.external.crl_cache_urls = http://my.crl.server/intermediate.crl.pem, http://my.other.crl.server/another.crl.pem
|
||||||
|
|
||||||
## The timeout for the HTTP request when fetching CRLs.
|
## The timeout for the HTTP request when fetching CRLs. This is
|
||||||
|
## global for all listeners.
|
||||||
##
|
##
|
||||||
## Value: Duration
|
## Value: Duration
|
||||||
## Default: 15 s
|
## Default: 15 s
|
||||||
## listener.ssl.external.crl_cache_http_timeout = 15s
|
## crl_cache.http_timeout = 15s
|
||||||
|
|
||||||
## The period to refresh the CRLs from the servers. This is global
|
## The period to refresh the CRLs from the servers. This is global
|
||||||
## for all URLs and listeners.
|
## for all URLs and listeners.
|
||||||
|
|
|
@ -1712,7 +1712,7 @@ end}.
|
||||||
{datatype, string}
|
{datatype, string}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{mapping, "listener.ssl.$name.crl_cache_http_timeout", "emqx.listeners", [
|
{mapping, "crl_cache.http_timeout", "emqx.crl_cache_http_timeout", [
|
||||||
{default, "15s"},
|
{default, "15s"},
|
||||||
{datatype, {duration, ms}}
|
{datatype, {duration, ms}}
|
||||||
]}.
|
]}.
|
||||||
|
@ -2339,7 +2339,7 @@ end}.
|
||||||
end,
|
end,
|
||||||
CRLCheck = case cuttlefish:conf_get(Prefix ++ ".enable_crl_check", Conf, false) of
|
CRLCheck = case cuttlefish:conf_get(Prefix ++ ".enable_crl_check", Conf, false) of
|
||||||
true ->
|
true ->
|
||||||
HTTPTimeout = cuttlefish:conf_get(Prefix ++ ".crl_cache_http_timeout", Conf, timer:seconds(15)),
|
HTTPTimeout = cuttlefish:conf_get("crl_cache.http_timeout", Conf, timer:seconds(15)),
|
||||||
%% {crl_check, true} doesn't work
|
%% {crl_check, true} doesn't work
|
||||||
[ {crl_check, peer}
|
[ {crl_check, peer}
|
||||||
, {crl_cache, {ssl_crl_cache, {internal, [{http, HTTPTimeout}]}}}
|
, {crl_cache, {ssl_crl_cache, {internal, [{http, HTTPTimeout}]}}}
|
||||||
|
|
|
@ -41,12 +41,13 @@
|
||||||
|
|
||||||
-define(LOG(Level, Format, Args),
|
-define(LOG(Level, Format, Args),
|
||||||
logger:log(Level, "[~p] " ++ Format, [?MODULE | Args])).
|
logger:log(Level, "[~p] " ++ Format, [?MODULE | Args])).
|
||||||
-define(HTTP_TIMEOUT, timer:seconds(10)).
|
-define(HTTP_TIMEOUT, timer:seconds(15)).
|
||||||
-define(RETRY_TIMEOUT, 5_000).
|
-define(RETRY_TIMEOUT, 5_000).
|
||||||
|
|
||||||
-record(state,
|
-record(state,
|
||||||
{ refresh_timers = #{} :: #{binary() => timer:tref()}
|
{ refresh_timers = #{} :: #{binary() => timer:tref()}
|
||||||
, refresh_interval = timer:minutes(15) :: timer:time()
|
, refresh_interval = timer:minutes(15) :: timer:time()
|
||||||
|
, http_timeout = ?HTTP_TIMEOUT :: timer:time()
|
||||||
}).
|
}).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -60,9 +61,13 @@ start_link() ->
|
||||||
timer:minutes(15)),
|
timer:minutes(15)),
|
||||||
MinimumRefreshInverval = timer:minutes(1),
|
MinimumRefreshInverval = timer:minutes(1),
|
||||||
RefreshIntervalMS = max(RefreshIntervalMS0, MinimumRefreshInverval),
|
RefreshIntervalMS = max(RefreshIntervalMS0, MinimumRefreshInverval),
|
||||||
start_link(#{urls => URLs, refresh_interval => RefreshIntervalMS}).
|
HTTPTimeoutMS = emqx:get_env(crl_cache_http_timeout, ?HTTP_TIMEOUT),
|
||||||
|
start_link(#{ urls => URLs
|
||||||
|
, refresh_interval => RefreshIntervalMS
|
||||||
|
, http_timeout => HTTPTimeoutMS
|
||||||
|
}).
|
||||||
|
|
||||||
start_link(Opts = #{urls := _, refresh_interval := _}) ->
|
start_link(Opts = #{urls := _, refresh_interval := _, http_timeout := _}) ->
|
||||||
gen_server:start_link({local, ?MODULE}, ?MODULE, Opts, []).
|
gen_server:start_link({local, ?MODULE}, ?MODULE, Opts, []).
|
||||||
|
|
||||||
refresh(URL) ->
|
refresh(URL) ->
|
||||||
|
@ -75,9 +80,15 @@ evict(URL) ->
|
||||||
%% gen_server behaviour
|
%% gen_server behaviour
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
init(#{urls := URLs, refresh_interval := RefreshIntervalMS}) ->
|
init(Config) ->
|
||||||
|
#{ urls := URLs
|
||||||
|
, refresh_interval := RefreshIntervalMS
|
||||||
|
, http_timeout := HTTPTimeoutMS
|
||||||
|
} = Config,
|
||||||
State = lists:foldl(fun(URL, Acc) -> ensure_timer(URL, Acc, 0) end,
|
State = lists:foldl(fun(URL, Acc) -> ensure_timer(URL, Acc, 0) end,
|
||||||
#state{refresh_interval = RefreshIntervalMS},
|
#state{ refresh_interval = RefreshIntervalMS
|
||||||
|
, http_timeout = HTTPTimeoutMS
|
||||||
|
},
|
||||||
URLs),
|
URLs),
|
||||||
{ok, State}.
|
{ok, State}.
|
||||||
|
|
||||||
|
@ -95,7 +106,7 @@ handle_cast({evict, URL}, State0 = #state{refresh_timers = RefreshTimers0}) ->
|
||||||
}),
|
}),
|
||||||
{noreply, State};
|
{noreply, State};
|
||||||
handle_cast({refresh, URL}, State0) ->
|
handle_cast({refresh, URL}, State0) ->
|
||||||
case do_http_fetch_and_cache(URL) of
|
case do_http_fetch_and_cache(URL, State0#state.http_timeout) of
|
||||||
{error, Error} ->
|
{error, Error} ->
|
||||||
?tp(crl_refresh_failure, #{error => Error, url => URL}),
|
?tp(crl_refresh_failure, #{error => Error, url => URL}),
|
||||||
?LOG(error, "failed to fetch crl response for ~p; error: ~p",
|
?LOG(error, "failed to fetch crl response for ~p; error: ~p",
|
||||||
|
@ -109,12 +120,14 @@ handle_cast(_Cast, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
handle_info({timeout, TRef, {refresh, URL}},
|
handle_info({timeout, TRef, {refresh, URL}},
|
||||||
State = #state{refresh_timers = RefreshTimers}) ->
|
State = #state{ refresh_timers = RefreshTimers
|
||||||
|
, http_timeout = HTTPTimeoutMS
|
||||||
|
}) ->
|
||||||
case maps:get(URL, RefreshTimers, undefined) of
|
case maps:get(URL, RefreshTimers, undefined) of
|
||||||
TRef ->
|
TRef ->
|
||||||
?tp(crl_refresh_timer, #{url => URL}),
|
?tp(crl_refresh_timer, #{url => URL}),
|
||||||
?LOG(debug, "refreshing crl response for ~p", [URL]),
|
?LOG(debug, "refreshing crl response for ~p", [URL]),
|
||||||
case do_http_fetch_and_cache(URL) of
|
case do_http_fetch_and_cache(URL, HTTPTimeoutMS) of
|
||||||
{error, Error} ->
|
{error, Error} ->
|
||||||
?LOG(error, "failed to fetch crl response for ~p; error: ~p",
|
?LOG(error, "failed to fetch crl response for ~p; error: ~p",
|
||||||
[URL, Error]),
|
[URL, Error]),
|
||||||
|
@ -142,10 +155,9 @@ http_get(URL, HTTPTimeout) ->
|
||||||
[{body_format, binary}]
|
[{body_format, binary}]
|
||||||
).
|
).
|
||||||
|
|
||||||
do_http_fetch_and_cache(URL) ->
|
do_http_fetch_and_cache(URL, HTTPTimeoutMS) ->
|
||||||
?tp(crl_http_fetch, #{crl_url => URL}),
|
?tp(crl_http_fetch, #{crl_url => URL}),
|
||||||
%% FIXME: read from config
|
Resp = ?MODULE:http_get(URL, HTTPTimeoutMS),
|
||||||
Resp = ?MODULE:http_get(URL, ?HTTP_TIMEOUT),
|
|
||||||
case Resp of
|
case Resp of
|
||||||
{ok, {{_, 200, _}, _, Body}} ->
|
{ok, {{_, 200, _}, _, Body}} ->
|
||||||
case parse_crls(Body) of
|
case parse_crls(Body) of
|
||||||
|
|
|
@ -257,6 +257,7 @@ t_init_refresh(Config) ->
|
||||||
URL2 = "http://localhost/crl2.pem",
|
URL2 = "http://localhost/crl2.pem",
|
||||||
Opts = #{ urls => [URL1, URL2]
|
Opts = #{ urls => [URL1, URL2]
|
||||||
, refresh_interval => timer:minutes(15)
|
, refresh_interval => timer:minutes(15)
|
||||||
|
, http_timeout => timer:seconds(15)
|
||||||
},
|
},
|
||||||
ok = snabbkaffe:start_trace(),
|
ok = snabbkaffe:start_trace(),
|
||||||
{ok, SubRef} = snabbkaffe:subscribe(
|
{ok, SubRef} = snabbkaffe:subscribe(
|
||||||
|
|
Loading…
Reference in New Issue