Merge pull request #11884 from zhongwencool/prometheus
support enable_basic_auth option for Prometheus scrape api
This commit is contained in:
commit
ddde927bd6
|
@ -1190,37 +1190,44 @@ tr_prometheus_collectors(Conf) ->
|
||||||
emqx_prometheus,
|
emqx_prometheus,
|
||||||
emqx_prometheus_mria
|
emqx_prometheus_mria
|
||||||
%% builtin vm collectors
|
%% builtin vm collectors
|
||||||
| tr_vm_dist_collector(Conf) ++
|
| prometheus_collectors(Conf)
|
||||||
tr_mnesia_collector(Conf) ++
|
|
||||||
tr_vm_statistics_collector(Conf) ++
|
|
||||||
tr_vm_system_info_collector(Conf) ++
|
|
||||||
tr_vm_memory_collector(Conf) ++
|
|
||||||
tr_vm_msacc_collector(Conf)
|
|
||||||
].
|
].
|
||||||
|
|
||||||
tr_vm_dist_collector(Conf) ->
|
prometheus_collectors(Conf) ->
|
||||||
Enabled = conf_get("prometheus.vm_dist_collector", Conf, disabled),
|
case conf_get("prometheus.enable_basic_auth", Conf, undefined) of
|
||||||
collector_enabled(Enabled, prometheus_vm_dist_collector).
|
%% legacy
|
||||||
|
undefined ->
|
||||||
|
tr_collector("prometheus.vm_dist_collector", prometheus_vm_dist_collector, Conf) ++
|
||||||
|
tr_collector("prometheus.mnesia_collector", prometheus_mnesia_collector, Conf) ++
|
||||||
|
tr_collector(
|
||||||
|
"prometheus.vm_statistics_collector", prometheus_vm_statistics_collector, Conf
|
||||||
|
) ++
|
||||||
|
tr_collector(
|
||||||
|
"prometheus.vm_system_info_collector", prometheus_vm_system_info_collector, Conf
|
||||||
|
) ++
|
||||||
|
tr_collector("prometheus.vm_memory_collector", prometheus_vm_memory_collector, Conf) ++
|
||||||
|
tr_collector("prometheus.vm_msacc_collector", prometheus_vm_msacc_collector, Conf);
|
||||||
|
%% new
|
||||||
|
_ ->
|
||||||
|
tr_collector("prometheus.collectors.vm_dist", prometheus_vm_dist_collector, Conf) ++
|
||||||
|
tr_collector("prometheus.collectors.mnesia", prometheus_mnesia_collector, Conf) ++
|
||||||
|
tr_collector(
|
||||||
|
"prometheus.collectors.vm_statistics", prometheus_vm_statistics_collector, Conf
|
||||||
|
) ++
|
||||||
|
tr_collector(
|
||||||
|
"prometheus.collectors.vm_system_info",
|
||||||
|
prometheus_vm_system_info_collector,
|
||||||
|
Conf
|
||||||
|
) ++
|
||||||
|
tr_collector(
|
||||||
|
"prometheus.collectors.vm_memory", prometheus_vm_memory_collector, Conf
|
||||||
|
) ++
|
||||||
|
tr_collector("prometheus.collectors.vm_msacc", prometheus_vm_msacc_collector, Conf)
|
||||||
|
end.
|
||||||
|
|
||||||
tr_mnesia_collector(Conf) ->
|
tr_collector(Key, Collect, Conf) ->
|
||||||
Enabled = conf_get("prometheus.mnesia_collector", Conf, disabled),
|
Enabled = conf_get(Key, Conf, disabled),
|
||||||
collector_enabled(Enabled, prometheus_mnesia_collector).
|
collector_enabled(Enabled, Collect).
|
||||||
|
|
||||||
tr_vm_statistics_collector(Conf) ->
|
|
||||||
Enabled = conf_get("prometheus.vm_statistics_collector", Conf, disabled),
|
|
||||||
collector_enabled(Enabled, prometheus_vm_statistics_collector).
|
|
||||||
|
|
||||||
tr_vm_system_info_collector(Conf) ->
|
|
||||||
Enabled = conf_get("prometheus.vm_system_info_collector", Conf, disabled),
|
|
||||||
collector_enabled(Enabled, prometheus_vm_system_info_collector).
|
|
||||||
|
|
||||||
tr_vm_memory_collector(Conf) ->
|
|
||||||
Enabled = conf_get("prometheus.vm_memory_collector", Conf, disabled),
|
|
||||||
collector_enabled(Enabled, prometheus_vm_memory_collector).
|
|
||||||
|
|
||||||
tr_vm_msacc_collector(Conf) ->
|
|
||||||
Enabled = conf_get("prometheus.vm_msacc_collector", Conf, disabled),
|
|
||||||
collector_enabled(Enabled, prometheus_vm_msacc_collector).
|
|
||||||
|
|
||||||
collector_enabled(enabled, Collector) -> [Collector];
|
collector_enabled(enabled, Collector) -> [Collector];
|
||||||
collector_enabled(disabled, _) -> [].
|
collector_enabled(disabled, _) -> [].
|
||||||
|
|
|
@ -185,7 +185,8 @@ fields(meta) ->
|
||||||
schema_with_example(Type, Example) ->
|
schema_with_example(Type, Example) ->
|
||||||
hoconsc:mk(Type, #{examples => #{<<"example">> => Example}}).
|
hoconsc:mk(Type, #{examples => #{<<"example">> => Example}}).
|
||||||
|
|
||||||
-spec schema_with_examples(hocon_schema:type(), map()) -> hocon_schema:field_schema_map().
|
-spec schema_with_examples(hocon_schema:type(), map() | list(tuple())) ->
|
||||||
|
hocon_schema:field_schema_map().
|
||||||
schema_with_examples(Type, Examples) ->
|
schema_with_examples(Type, Examples) ->
|
||||||
hoconsc:mk(Type, #{examples => #{<<"examples">> => Examples}}).
|
hoconsc:mk(Type, #{examples => #{<<"examples">> => Examples}}).
|
||||||
|
|
||||||
|
|
|
@ -42,6 +42,7 @@
|
||||||
%% gen_server callbacks
|
%% gen_server callbacks
|
||||||
-export([
|
-export([
|
||||||
init/1,
|
init/1,
|
||||||
|
handle_continue/2,
|
||||||
handle_call/3,
|
handle_call/3,
|
||||||
handle_cast/2,
|
handle_cast/2,
|
||||||
handle_info/2,
|
handle_info/2,
|
||||||
|
@ -74,8 +75,8 @@
|
||||||
%% APIs
|
%% APIs
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
start_link([]) ->
|
start_link(Conf) ->
|
||||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
gen_server:start_link({local, ?MODULE}, ?MODULE, Conf, []).
|
||||||
|
|
||||||
info() ->
|
info() ->
|
||||||
gen_server:call(?MODULE, info).
|
gen_server:call(?MODULE, info).
|
||||||
|
@ -84,49 +85,41 @@ info() ->
|
||||||
%% gen_server callbacks
|
%% gen_server callbacks
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
init([]) ->
|
init(Conf) ->
|
||||||
#{interval := Interval} = opts(),
|
{ok, #{}, {continue, Conf}}.
|
||||||
{ok, #{timer => ensure_timer(Interval), ok => 0, failed => 0}}.
|
|
||||||
|
|
||||||
handle_call(info, _From, State = #{timer := Timer}) ->
|
handle_continue(Conf, State) ->
|
||||||
{reply, State#{opts => opts(), next_push_ms => erlang:read_timer(Timer)}, State};
|
Opts = #{interval := Interval} = opts(Conf),
|
||||||
|
{noreply, State#{
|
||||||
|
timer => ensure_timer(Interval),
|
||||||
|
opts => Opts,
|
||||||
|
ok => 0,
|
||||||
|
failed => 0
|
||||||
|
}}.
|
||||||
|
|
||||||
|
handle_call(info, _From, State = #{timer := Timer, opts := Opts}) ->
|
||||||
|
{reply, State#{opts => Opts, next_push_ms => erlang:read_timer(Timer)}, State};
|
||||||
handle_call(_Msg, _From, State) ->
|
handle_call(_Msg, _From, State) ->
|
||||||
{reply, ok, State}.
|
{reply, ok, State}.
|
||||||
|
|
||||||
handle_cast(_Msg, State) ->
|
handle_cast(_Msg, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
handle_info({timeout, Timer, ?TIMER_MSG}, State = #{timer := Timer}) ->
|
handle_info({timeout, Timer, ?TIMER_MSG}, State = #{timer := Timer, opts := Opts}) ->
|
||||||
#{
|
#{interval := Interval, headers := Headers, url := Server} = Opts,
|
||||||
interval := Interval,
|
PushRes = push_to_push_gateway(Server, Headers),
|
||||||
headers := Headers,
|
|
||||||
job_name := JobName,
|
|
||||||
push_gateway_server := Server
|
|
||||||
} = opts(),
|
|
||||||
PushRes = push_to_push_gateway(Server, Headers, JobName),
|
|
||||||
NewTimer = ensure_timer(Interval),
|
NewTimer = ensure_timer(Interval),
|
||||||
NewState = maps:update_with(PushRes, fun(C) -> C + 1 end, 1, State#{timer => NewTimer}),
|
NewState = maps:update_with(PushRes, fun(C) -> C + 1 end, 1, State#{timer => NewTimer}),
|
||||||
%% Data is too big, hibernate for saving memory and stop system monitor warning.
|
%% Data is too big, hibernate for saving memory and stop system monitor warning.
|
||||||
{noreply, NewState, hibernate};
|
{noreply, NewState, hibernate};
|
||||||
|
handle_info({update, Conf}, State = #{timer := Timer}) ->
|
||||||
|
emqx_utils:cancel_timer(Timer),
|
||||||
|
handle_continue(Conf, State);
|
||||||
handle_info(_Msg, State) ->
|
handle_info(_Msg, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
push_to_push_gateway(Uri, Headers, JobName) when is_list(Headers) ->
|
push_to_push_gateway(Url, Headers) when is_list(Headers) ->
|
||||||
[Name, Ip] = string:tokens(atom_to_list(node()), "@"),
|
|
||||||
% NOTE: allowing errors here to keep rough backward compatibility
|
|
||||||
{JobName1, Errors} = emqx_template:render(
|
|
||||||
emqx_template:parse(JobName),
|
|
||||||
#{<<"name">> => Name, <<"host">> => Ip}
|
|
||||||
),
|
|
||||||
_ =
|
|
||||||
Errors == [] orelse
|
|
||||||
?SLOG(warning, #{
|
|
||||||
msg => "prometheus_job_name_template_invalid",
|
|
||||||
errors => Errors,
|
|
||||||
template => JobName
|
|
||||||
}),
|
|
||||||
Data = prometheus_text_format:format(),
|
Data = prometheus_text_format:format(),
|
||||||
Url = lists:concat([Uri, "/metrics/job/", unicode:characters_to_list(JobName1)]),
|
|
||||||
case httpc:request(post, {Url, Headers, "text/plain", Data}, ?HTTP_OPTIONS, []) of
|
case httpc:request(post, {Url, Headers, "text/plain", Data}, ?HTTP_OPTIONS, []) of
|
||||||
{ok, {{"HTTP/1.1", 200, _}, _RespHeaders, _RespBody}} ->
|
{ok, {{"HTTP/1.1", 200, _}, _RespHeaders, _RespBody}} ->
|
||||||
ok;
|
ok;
|
||||||
|
@ -152,8 +145,26 @@ ensure_timer(Interval) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% prometheus callbacks
|
%% prometheus callbacks
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
opts() ->
|
opts(#{interval := Interval, headers := Headers, job_name := JobName, push_gateway_server := Url}) ->
|
||||||
emqx_conf:get(?PROMETHEUS).
|
#{interval => Interval, headers => Headers, url => join_url(Url, JobName)};
|
||||||
|
opts(#{push_gateway := #{url := Url, job_name := JobName} = PushGateway}) ->
|
||||||
|
maps:put(url, join_url(Url, JobName), PushGateway).
|
||||||
|
|
||||||
|
join_url(Url, JobName0) ->
|
||||||
|
[Name, Ip] = string:tokens(atom_to_list(node()), "@"),
|
||||||
|
% NOTE: allowing errors here to keep rough backward compatibility
|
||||||
|
{JobName1, Errors} = emqx_template:render(
|
||||||
|
emqx_template:parse(JobName0),
|
||||||
|
#{<<"name">> => Name, <<"host">> => Ip}
|
||||||
|
),
|
||||||
|
_ =
|
||||||
|
Errors == [] orelse
|
||||||
|
?SLOG(warning, #{
|
||||||
|
msg => "prometheus_job_name_template_invalid",
|
||||||
|
errors => Errors,
|
||||||
|
template => JobName0
|
||||||
|
}),
|
||||||
|
lists:concat([Url, "/metrics/job/", unicode:characters_to_list(JobName1)]).
|
||||||
|
|
||||||
deregister_cleanup(_Registry) ->
|
deregister_cleanup(_Registry) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
|
@ -20,8 +20,6 @@
|
||||||
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
-import(hoconsc, [ref/2]).
|
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
api_spec/0,
|
api_spec/0,
|
||||||
paths/0,
|
paths/0,
|
||||||
|
@ -29,11 +27,10 @@
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
prometheus/2,
|
setting/2,
|
||||||
stats/2
|
stats/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-define(SCHEMA_MODULE, emqx_prometheus_schema).
|
|
||||||
-define(TAGS, [<<"Monitor">>]).
|
-define(TAGS, [<<"Monitor">>]).
|
||||||
|
|
||||||
api_spec() ->
|
api_spec() ->
|
||||||
|
@ -47,21 +44,21 @@ paths() ->
|
||||||
|
|
||||||
schema("/prometheus") ->
|
schema("/prometheus") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => prometheus,
|
'operationId' => setting,
|
||||||
get =>
|
get =>
|
||||||
#{
|
#{
|
||||||
description => ?DESC(get_prom_conf_info),
|
description => ?DESC(get_prom_conf_info),
|
||||||
tags => ?TAGS,
|
tags => ?TAGS,
|
||||||
responses =>
|
responses =>
|
||||||
#{200 => prometheus_config_schema()}
|
#{200 => prometheus_setting_schema()}
|
||||||
},
|
},
|
||||||
put =>
|
put =>
|
||||||
#{
|
#{
|
||||||
description => ?DESC(update_prom_conf_info),
|
description => ?DESC(update_prom_conf_info),
|
||||||
tags => ?TAGS,
|
tags => ?TAGS,
|
||||||
'requestBody' => prometheus_config_schema(),
|
'requestBody' => prometheus_setting_schema(),
|
||||||
responses =>
|
responses =>
|
||||||
#{200 => prometheus_config_schema()}
|
#{200 => prometheus_setting_schema()}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/prometheus/stats") ->
|
schema("/prometheus/stats") ->
|
||||||
|
@ -71,19 +68,24 @@ schema("/prometheus/stats") ->
|
||||||
#{
|
#{
|
||||||
description => ?DESC(get_prom_data),
|
description => ?DESC(get_prom_data),
|
||||||
tags => ?TAGS,
|
tags => ?TAGS,
|
||||||
security => [],
|
security => security(),
|
||||||
responses =>
|
responses =>
|
||||||
#{200 => prometheus_data_schema()}
|
#{200 => prometheus_data_schema()}
|
||||||
}
|
}
|
||||||
}.
|
}.
|
||||||
|
|
||||||
|
security() ->
|
||||||
|
case emqx_config:get([prometheus, enable_basic_auth], false) of
|
||||||
|
true -> [#{'basicAuth' => []}, #{'bearerAuth' => []}];
|
||||||
|
false -> []
|
||||||
|
end.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% API Handler funcs
|
%% API Handler funcs
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
prometheus(get, _Params) ->
|
setting(get, _Params) ->
|
||||||
{200, emqx:get_raw_config([<<"prometheus">>], #{})};
|
{200, emqx:get_raw_config([<<"prometheus">>], #{})};
|
||||||
prometheus(put, #{body := Body}) ->
|
setting(put, #{body := Body}) ->
|
||||||
case emqx_prometheus_config:update(Body) of
|
case emqx_prometheus_config:update(Body) of
|
||||||
{ok, NewConfig} ->
|
{ok, NewConfig} ->
|
||||||
{200, NewConfig};
|
{200, NewConfig};
|
||||||
|
@ -110,20 +112,57 @@ stats(get, #{headers := Headers}) ->
|
||||||
%% Internal funcs
|
%% Internal funcs
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
prometheus_config_schema() ->
|
prometheus_setting_schema() ->
|
||||||
emqx_dashboard_swagger:schema_with_example(
|
[{prometheus, #{type := Setting}}] = emqx_prometheus_schema:roots(),
|
||||||
ref(?SCHEMA_MODULE, "prometheus"),
|
emqx_dashboard_swagger:schema_with_examples(
|
||||||
prometheus_config_example()
|
Setting,
|
||||||
|
[
|
||||||
|
recommend_setting_example(),
|
||||||
|
legacy_setting_example()
|
||||||
|
]
|
||||||
).
|
).
|
||||||
|
|
||||||
prometheus_config_example() ->
|
legacy_setting_example() ->
|
||||||
#{
|
Summary = <<"legacy_deprecated_setting">>,
|
||||||
enable => true,
|
{Summary, #{
|
||||||
interval => "15s",
|
summary => Summary,
|
||||||
push_gateway_server => <<"http://127.0.0.1:9091">>,
|
value => #{
|
||||||
headers => #{'header-name' => 'header-value'},
|
enable => true,
|
||||||
job_name => <<"${name}/instance/${name}~${host}">>
|
interval => <<"15s">>,
|
||||||
}.
|
push_gateway_server => <<"http://127.0.0.1:9091">>,
|
||||||
|
headers => #{<<"Authorization">> => <<"Basic YWRtaW46Y2JraG55eWd5QDE=">>},
|
||||||
|
job_name => <<"${name}/instance/${name}~${host}">>,
|
||||||
|
vm_dist_collector => <<"disabled">>,
|
||||||
|
vm_memory_collector => <<"disabled">>,
|
||||||
|
vm_msacc_collector => <<"disabled">>,
|
||||||
|
mnesia_collector => <<"disabled">>,
|
||||||
|
vm_statistics_collector => <<"disabled">>,
|
||||||
|
vm_system_info_collector => <<"disabled">>
|
||||||
|
}
|
||||||
|
}}.
|
||||||
|
|
||||||
|
recommend_setting_example() ->
|
||||||
|
Summary = <<"recommend_setting">>,
|
||||||
|
{Summary, #{
|
||||||
|
summary => Summary,
|
||||||
|
value => #{
|
||||||
|
enable_basic_auth => false,
|
||||||
|
push_gateway => #{
|
||||||
|
interval => <<"15s">>,
|
||||||
|
url => <<"http://127.0.0.1:9091">>,
|
||||||
|
headers => #{<<"Authorization">> => <<"Basic YWRtaW46Y2JraG55eWd5QDE=">>},
|
||||||
|
job_name => <<"${name}/instance/${name}~${host}">>
|
||||||
|
},
|
||||||
|
collectors => #{
|
||||||
|
vm_dist => <<"disabled">>,
|
||||||
|
vm_memory => <<"disabled">>,
|
||||||
|
vm_msacc => <<"disabled">>,
|
||||||
|
mnesia => <<"disabled">>,
|
||||||
|
vm_statistics => <<"disabled">>,
|
||||||
|
vm_system_info => <<"disabled">>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}}.
|
||||||
|
|
||||||
prometheus_data_schema() ->
|
prometheus_data_schema() ->
|
||||||
#{
|
#{
|
||||||
|
|
|
@ -20,8 +20,9 @@
|
||||||
-include("emqx_prometheus.hrl").
|
-include("emqx_prometheus.hrl").
|
||||||
|
|
||||||
-export([add_handler/0, remove_handler/0]).
|
-export([add_handler/0, remove_handler/0]).
|
||||||
-export([post_config_update/5]).
|
-export([pre_config_update/3, post_config_update/5]).
|
||||||
-export([update/1]).
|
-export([update/1]).
|
||||||
|
-export([conf/0, is_push_gateway_server_enabled/1]).
|
||||||
|
|
||||||
update(Config) ->
|
update(Config) ->
|
||||||
case
|
case
|
||||||
|
@ -45,9 +46,55 @@ remove_handler() ->
|
||||||
ok = emqx_config_handler:remove_handler(?PROMETHEUS),
|
ok = emqx_config_handler:remove_handler(?PROMETHEUS),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
post_config_update(?PROMETHEUS, _Req, New, _Old, AppEnvs) ->
|
%% when we import the config with the old version
|
||||||
|
%% we need to respect it, and convert to new schema.
|
||||||
|
pre_config_update(?PROMETHEUS, MergeConf, OriginConf) ->
|
||||||
|
OriginType = emqx_prometheus_schema:is_recommend_type(OriginConf),
|
||||||
|
MergeType = emqx_prometheus_schema:is_recommend_type(MergeConf),
|
||||||
|
{ok,
|
||||||
|
case {OriginType, MergeType} of
|
||||||
|
{true, false} -> to_recommend_type(MergeConf);
|
||||||
|
_ -> MergeConf
|
||||||
|
end}.
|
||||||
|
|
||||||
|
to_recommend_type(Conf) ->
|
||||||
|
#{
|
||||||
|
<<"push_gateway">> => to_push_gateway(Conf),
|
||||||
|
<<"collectors">> => to_collectors(Conf)
|
||||||
|
}.
|
||||||
|
|
||||||
|
to_push_gateway(Conf) ->
|
||||||
|
Init = maps:with([<<"interval">>, <<"headers">>, <<"job_name">>, <<"enable">>], Conf),
|
||||||
|
case maps:get(<<"push_gateway_server">>, Conf, "") of
|
||||||
|
"" ->
|
||||||
|
Init#{<<"enable">> => false};
|
||||||
|
Url ->
|
||||||
|
Init#{<<"url">> => Url}
|
||||||
|
end.
|
||||||
|
|
||||||
|
to_collectors(Conf) ->
|
||||||
|
lists:foldl(
|
||||||
|
fun({From, To}, Acc) ->
|
||||||
|
case maps:find(From, Conf) of
|
||||||
|
{ok, Value} -> Acc#{To => Value};
|
||||||
|
error -> Acc
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
#{},
|
||||||
|
[
|
||||||
|
{<<"vm_dist_collector">>, <<"vm_dist">>},
|
||||||
|
{<<"mnesia_collector">>, <<"mnesia">>},
|
||||||
|
{<<"vm_statistics_collector">>, <<"vm_statistics">>},
|
||||||
|
{<<"vm_system_info_collector">>, <<"vm_system_info">>},
|
||||||
|
{<<"vm_memory_collector">>, <<"vm_memory">>},
|
||||||
|
{<<"vm_msacc_collector">>, <<"vm_msacc">>}
|
||||||
|
]
|
||||||
|
).
|
||||||
|
|
||||||
|
post_config_update(?PROMETHEUS, _Req, New, Old, AppEnvs) ->
|
||||||
update_prometheus(AppEnvs),
|
update_prometheus(AppEnvs),
|
||||||
update_push_gateway(New);
|
_ = update_push_gateway(New),
|
||||||
|
update_auth(New, Old);
|
||||||
post_config_update(_ConfPath, _Req, _NewConf, _OldConf, _AppEnvs) ->
|
post_config_update(_ConfPath, _Req, _NewConf, _OldConf, _AppEnvs) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
@ -64,7 +111,29 @@ update_prometheus(AppEnvs) ->
|
||||||
),
|
),
|
||||||
application:set_env(AppEnvs).
|
application:set_env(AppEnvs).
|
||||||
|
|
||||||
update_push_gateway(#{enable := true}) ->
|
update_push_gateway(Prometheus) ->
|
||||||
emqx_prometheus_sup:start_child(?APP);
|
case is_push_gateway_server_enabled(Prometheus) of
|
||||||
update_push_gateway(#{enable := false}) ->
|
true ->
|
||||||
emqx_prometheus_sup:stop_child(?APP).
|
case erlang:whereis(?APP) of
|
||||||
|
undefined -> emqx_prometheus_sup:start_child(?APP, Prometheus);
|
||||||
|
Pid -> emqx_prometheus_sup:update_child(Pid, Prometheus)
|
||||||
|
end;
|
||||||
|
false ->
|
||||||
|
emqx_prometheus_sup:stop_child(?APP)
|
||||||
|
end.
|
||||||
|
|
||||||
|
update_auth(#{enable_basic_auth := New}, #{enable_basic_auth := Old}) when New =/= Old ->
|
||||||
|
emqx_dashboard_listener:regenerate_minirest_dispatch(),
|
||||||
|
ok;
|
||||||
|
update_auth(_, _) ->
|
||||||
|
ok.
|
||||||
|
|
||||||
|
conf() ->
|
||||||
|
emqx_config:get(?PROMETHEUS).
|
||||||
|
|
||||||
|
is_push_gateway_server_enabled(#{enable := true, push_gateway_server := Url}) ->
|
||||||
|
Url =/= "";
|
||||||
|
is_push_gateway_server_enabled(#{push_gateway := #{url := Url, enable := Enable}}) ->
|
||||||
|
Enable andalso Url =/= "";
|
||||||
|
is_push_gateway_server_enabled(_) ->
|
||||||
|
false.
|
||||||
|
|
|
@ -27,23 +27,68 @@
|
||||||
desc/1,
|
desc/1,
|
||||||
translation/1,
|
translation/1,
|
||||||
convert_headers/2,
|
convert_headers/2,
|
||||||
validate_push_gateway_server/1
|
validate_url/1,
|
||||||
|
is_recommend_type/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
namespace() -> "prometheus".
|
namespace() -> prometheus.
|
||||||
|
|
||||||
roots() -> [{"prometheus", ?HOCON(?R_REF("prometheus"), #{translate_to => ["prometheus"]})}].
|
roots() ->
|
||||||
|
|
||||||
fields("prometheus") ->
|
|
||||||
[
|
[
|
||||||
{push_gateway_server,
|
{prometheus,
|
||||||
|
?HOCON(
|
||||||
|
?UNION(setting_union_schema()),
|
||||||
|
#{translate_to => ["prometheus"], default => #{}}
|
||||||
|
)}
|
||||||
|
].
|
||||||
|
|
||||||
|
fields(recommend_setting) ->
|
||||||
|
[
|
||||||
|
{enable_basic_auth,
|
||||||
|
?HOCON(
|
||||||
|
boolean(),
|
||||||
|
#{
|
||||||
|
default => false,
|
||||||
|
required => true,
|
||||||
|
importance => ?IMPORTANCE_HIGH,
|
||||||
|
desc => ?DESC(enable_basic_auth)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{push_gateway,
|
||||||
|
?HOCON(
|
||||||
|
?R_REF(push_gateway),
|
||||||
|
#{
|
||||||
|
required => false,
|
||||||
|
importance => ?IMPORTANCE_MEDIUM,
|
||||||
|
desc => ?DESC(push_gateway)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{collectors,
|
||||||
|
?HOCON(?R_REF(collectors), #{
|
||||||
|
required => false,
|
||||||
|
importance => ?IMPORTANCE_LOW,
|
||||||
|
desc => ?DESC(collectors)
|
||||||
|
})}
|
||||||
|
];
|
||||||
|
fields(push_gateway) ->
|
||||||
|
[
|
||||||
|
{enable,
|
||||||
|
?HOCON(
|
||||||
|
boolean(),
|
||||||
|
#{
|
||||||
|
default => false,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(push_gateway_enable)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{url,
|
||||||
?HOCON(
|
?HOCON(
|
||||||
string(),
|
string(),
|
||||||
#{
|
#{
|
||||||
|
required => false,
|
||||||
default => <<"http://127.0.0.1:9091">>,
|
default => <<"http://127.0.0.1:9091">>,
|
||||||
required => true,
|
validator => fun ?MODULE:validate_url/1,
|
||||||
validator => fun ?MODULE:validate_push_gateway_server/1,
|
desc => ?DESC(push_gateway_url)
|
||||||
desc => ?DESC(push_gateway_server)
|
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
{interval,
|
{interval,
|
||||||
|
@ -51,7 +96,7 @@ fields("prometheus") ->
|
||||||
emqx_schema:timeout_duration_ms(),
|
emqx_schema:timeout_duration_ms(),
|
||||||
#{
|
#{
|
||||||
default => <<"15s">>,
|
default => <<"15s">>,
|
||||||
required => true,
|
required => false,
|
||||||
desc => ?DESC(interval)
|
desc => ?DESC(interval)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
|
@ -70,18 +115,121 @@ fields("prometheus") ->
|
||||||
binary(),
|
binary(),
|
||||||
#{
|
#{
|
||||||
default => <<"${name}/instance/${name}~${host}">>,
|
default => <<"${name}/instance/${name}~${host}">>,
|
||||||
required => true,
|
required => false,
|
||||||
desc => ?DESC(job_name)
|
desc => ?DESC(job_name)
|
||||||
}
|
}
|
||||||
|
)}
|
||||||
|
];
|
||||||
|
fields(collectors) ->
|
||||||
|
[
|
||||||
|
{vm_dist,
|
||||||
|
?HOCON(
|
||||||
|
hoconsc:enum([disabled, enabled]),
|
||||||
|
#{
|
||||||
|
default => disabled,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(vm_dist_collector)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
%% Mnesia metrics mainly using mnesia:system_info/1
|
||||||
|
{mnesia,
|
||||||
|
?HOCON(
|
||||||
|
hoconsc:enum([enabled, disabled]),
|
||||||
|
#{
|
||||||
|
default => disabled,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(mnesia_collector)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
%% Collects Erlang VM metrics using erlang:statistics/1.
|
||||||
|
{vm_statistics,
|
||||||
|
?HOCON(
|
||||||
|
hoconsc:enum([enabled, disabled]),
|
||||||
|
#{
|
||||||
|
default => disabled,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(vm_statistics_collector)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
%% Collects Erlang VM metrics using erlang:system_info/1.
|
||||||
|
{vm_system_info,
|
||||||
|
?HOCON(
|
||||||
|
hoconsc:enum([enabled, disabled]),
|
||||||
|
#{
|
||||||
|
default => disabled,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(vm_system_info_collector)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
%% Collects information about memory dynamically allocated by the Erlang VM using erlang:memory/0,
|
||||||
|
%% it also provides basic (D)ETS statistics.
|
||||||
|
{vm_memory,
|
||||||
|
?HOCON(
|
||||||
|
hoconsc:enum([enabled, disabled]),
|
||||||
|
#{
|
||||||
|
default => disabled,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(vm_memory_collector)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
%% Collects microstate accounting metrics using erlang:statistics(microstate_accounting).
|
||||||
|
{vm_msacc,
|
||||||
|
?HOCON(
|
||||||
|
hoconsc:enum([enabled, disabled]),
|
||||||
|
#{
|
||||||
|
default => disabled,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(vm_msacc_collector)
|
||||||
|
}
|
||||||
|
)}
|
||||||
|
];
|
||||||
|
fields(legacy_deprecated_setting) ->
|
||||||
|
[
|
||||||
|
{push_gateway_server,
|
||||||
|
?HOCON(
|
||||||
|
string(),
|
||||||
|
#{
|
||||||
|
default => <<"http://127.0.0.1:9091">>,
|
||||||
|
required => true,
|
||||||
|
validator => fun ?MODULE:validate_url/1,
|
||||||
|
desc => ?DESC(legacy_push_gateway_server)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{interval,
|
||||||
|
?HOCON(
|
||||||
|
emqx_schema:timeout_duration_ms(),
|
||||||
|
#{
|
||||||
|
default => <<"15s">>,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(legacy_interval)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{headers,
|
||||||
|
?HOCON(
|
||||||
|
typerefl:alias("map", list({string(), string()}), #{}, [string(), string()]),
|
||||||
|
#{
|
||||||
|
default => #{},
|
||||||
|
required => false,
|
||||||
|
converter => fun ?MODULE:convert_headers/2,
|
||||||
|
desc => ?DESC(legacy_headers)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{job_name,
|
||||||
|
?HOCON(
|
||||||
|
binary(),
|
||||||
|
#{
|
||||||
|
default => <<"${name}/instance/${name}~${host}">>,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(legacy_job_name)
|
||||||
|
}
|
||||||
)},
|
)},
|
||||||
|
|
||||||
{enable,
|
{enable,
|
||||||
?HOCON(
|
?HOCON(
|
||||||
boolean(),
|
boolean(),
|
||||||
#{
|
#{
|
||||||
default => false,
|
default => false,
|
||||||
required => true,
|
required => true,
|
||||||
desc => ?DESC(enable)
|
desc => ?DESC(legacy_enable)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
{vm_dist_collector,
|
{vm_dist_collector,
|
||||||
|
@ -91,7 +239,7 @@ fields("prometheus") ->
|
||||||
default => disabled,
|
default => disabled,
|
||||||
required => true,
|
required => true,
|
||||||
importance => ?IMPORTANCE_LOW,
|
importance => ?IMPORTANCE_LOW,
|
||||||
desc => ?DESC(vm_dist_collector)
|
desc => ?DESC(legacy_vm_dist_collector)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
%% Mnesia metrics mainly using mnesia:system_info/1
|
%% Mnesia metrics mainly using mnesia:system_info/1
|
||||||
|
@ -102,7 +250,7 @@ fields("prometheus") ->
|
||||||
default => disabled,
|
default => disabled,
|
||||||
required => true,
|
required => true,
|
||||||
importance => ?IMPORTANCE_LOW,
|
importance => ?IMPORTANCE_LOW,
|
||||||
desc => ?DESC(mnesia_collector)
|
desc => ?DESC(legacy_mnesia_collector)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
%% Collects Erlang VM metrics using erlang:statistics/1.
|
%% Collects Erlang VM metrics using erlang:statistics/1.
|
||||||
|
@ -113,7 +261,7 @@ fields("prometheus") ->
|
||||||
default => disabled,
|
default => disabled,
|
||||||
required => true,
|
required => true,
|
||||||
importance => ?IMPORTANCE_LOW,
|
importance => ?IMPORTANCE_LOW,
|
||||||
desc => ?DESC(vm_statistics_collector)
|
desc => ?DESC(legacy_vm_statistics_collector)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
%% Collects Erlang VM metrics using erlang:system_info/1.
|
%% Collects Erlang VM metrics using erlang:system_info/1.
|
||||||
|
@ -124,7 +272,7 @@ fields("prometheus") ->
|
||||||
default => disabled,
|
default => disabled,
|
||||||
required => true,
|
required => true,
|
||||||
importance => ?IMPORTANCE_LOW,
|
importance => ?IMPORTANCE_LOW,
|
||||||
desc => ?DESC(vm_system_info_collector)
|
desc => ?DESC(legacy_vm_system_info_collector)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
%% Collects information about memory dynamically allocated by the Erlang VM using erlang:memory/0,
|
%% Collects information about memory dynamically allocated by the Erlang VM using erlang:memory/0,
|
||||||
|
@ -136,7 +284,7 @@ fields("prometheus") ->
|
||||||
default => disabled,
|
default => disabled,
|
||||||
required => true,
|
required => true,
|
||||||
importance => ?IMPORTANCE_LOW,
|
importance => ?IMPORTANCE_LOW,
|
||||||
desc => ?DESC(vm_memory_collector)
|
desc => ?DESC(legacy_vm_memory_collector)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
%% Collects microstate accounting metrics using erlang:statistics(microstate_accounting).
|
%% Collects microstate accounting metrics using erlang:statistics(microstate_accounting).
|
||||||
|
@ -147,14 +295,48 @@ fields("prometheus") ->
|
||||||
default => disabled,
|
default => disabled,
|
||||||
required => true,
|
required => true,
|
||||||
importance => ?IMPORTANCE_LOW,
|
importance => ?IMPORTANCE_LOW,
|
||||||
desc => ?DESC(vm_msacc_collector)
|
desc => ?DESC(legacy_vm_msacc_collector)
|
||||||
}
|
}
|
||||||
)}
|
)}
|
||||||
].
|
].
|
||||||
|
|
||||||
desc("prometheus") -> ?DESC(prometheus);
|
setting_union_schema() ->
|
||||||
|
RecommendSetting = ?R_REF(recommend_setting),
|
||||||
|
LegacySetting = ?R_REF(legacy_deprecated_setting),
|
||||||
|
fun
|
||||||
|
(all_union_members) ->
|
||||||
|
[RecommendSetting, LegacySetting];
|
||||||
|
({value, Setting}) ->
|
||||||
|
case is_recommend_type(Setting) of
|
||||||
|
true -> [RecommendSetting];
|
||||||
|
false -> [LegacySetting]
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% For it to be considered as new schema,
|
||||||
|
%% all keys must be included in the new configuration.
|
||||||
|
is_recommend_type(Setting) ->
|
||||||
|
case maps:keys(Setting) of
|
||||||
|
[] ->
|
||||||
|
true;
|
||||||
|
Keys ->
|
||||||
|
NewKeys = fields(recommend_setting),
|
||||||
|
Fun = fun(Key0) ->
|
||||||
|
Key = binary_to_existing_atom(Key0),
|
||||||
|
lists:keymember(Key, 1, NewKeys)
|
||||||
|
end,
|
||||||
|
lists:all(Fun, Keys)
|
||||||
|
end.
|
||||||
|
|
||||||
|
desc(prometheus) -> ?DESC(prometheus);
|
||||||
|
desc(collectors) -> ?DESC(collectors);
|
||||||
|
desc(legacy_deprecated_setting) -> ?DESC(legacy_deprecated_setting);
|
||||||
|
desc(recommend_setting) -> ?DESC(recommend_setting);
|
||||||
|
desc(push_gateway) -> ?DESC(push_gateway);
|
||||||
desc(_) -> undefined.
|
desc(_) -> undefined.
|
||||||
|
|
||||||
|
convert_headers(undefined, _) ->
|
||||||
|
undefined;
|
||||||
convert_headers(Headers, #{make_serializable := true}) ->
|
convert_headers(Headers, #{make_serializable := true}) ->
|
||||||
Headers;
|
Headers;
|
||||||
convert_headers(<<>>, _Opts) ->
|
convert_headers(<<>>, _Opts) ->
|
||||||
|
@ -170,10 +352,17 @@ convert_headers(Headers, _Opts) when is_map(Headers) ->
|
||||||
convert_headers(Headers, _Opts) when is_list(Headers) ->
|
convert_headers(Headers, _Opts) when is_list(Headers) ->
|
||||||
Headers.
|
Headers.
|
||||||
|
|
||||||
validate_push_gateway_server(Url) ->
|
validate_url(Url) ->
|
||||||
case uri_string:parse(Url) of
|
case uri_string:parse(Url) of
|
||||||
#{scheme := S} when S =:= "https" orelse S =:= "http" -> ok;
|
#{scheme := S} when
|
||||||
_ -> {error, "Invalid url"}
|
S =:= "https";
|
||||||
|
S =:= "http";
|
||||||
|
S =:= <<"https">>;
|
||||||
|
S =:= <<"http">>
|
||||||
|
->
|
||||||
|
ok;
|
||||||
|
_ ->
|
||||||
|
{error, "Invalid url"}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% for CI test, CI don't load the whole emqx_conf_schema.
|
%% for CI test, CI don't load the whole emqx_conf_schema.
|
||||||
|
|
|
@ -21,6 +21,8 @@
|
||||||
-export([
|
-export([
|
||||||
start_link/0,
|
start_link/0,
|
||||||
start_child/1,
|
start_child/1,
|
||||||
|
start_child/2,
|
||||||
|
update_child/2,
|
||||||
stop_child/1
|
stop_child/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
@ -39,11 +41,18 @@
|
||||||
start_link() ->
|
start_link() ->
|
||||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||||
|
|
||||||
-spec start_child(supervisor:child_spec() | atom()) -> ok.
|
-spec start_child(atom()) -> ok.
|
||||||
start_child(ChildSpec) when is_map(ChildSpec) ->
|
|
||||||
assert_started(supervisor:start_child(?MODULE, ChildSpec));
|
|
||||||
start_child(Mod) when is_atom(Mod) ->
|
start_child(Mod) when is_atom(Mod) ->
|
||||||
assert_started(supervisor:start_child(?MODULE, ?CHILD(Mod, []))).
|
start_child(Mod, emqx_prometheus_config:conf()).
|
||||||
|
|
||||||
|
-spec start_child(atom(), map()) -> ok.
|
||||||
|
start_child(Mod, Conf) when is_atom(Mod) ->
|
||||||
|
assert_started(supervisor:start_child(?MODULE, ?CHILD(Mod, Conf))).
|
||||||
|
|
||||||
|
-spec update_child(pid() | atom(), map()) -> ok.
|
||||||
|
update_child(Pid, Conf) ->
|
||||||
|
erlang:send(Pid, {update, Conf}),
|
||||||
|
ok.
|
||||||
|
|
||||||
-spec stop_child(any()) -> ok | {error, term()}.
|
-spec stop_child(any()) -> ok | {error, term()}.
|
||||||
stop_child(ChildId) ->
|
stop_child(ChildId) ->
|
||||||
|
@ -54,10 +63,11 @@ stop_child(ChildId) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
init([]) ->
|
init([]) ->
|
||||||
|
Conf = emqx_prometheus_config:conf(),
|
||||||
Children =
|
Children =
|
||||||
case emqx_conf:get([prometheus, enable], false) of
|
case emqx_prometheus_config:is_push_gateway_server_enabled(Conf) of
|
||||||
false -> [];
|
false -> [];
|
||||||
true -> [?CHILD(emqx_prometheus, [])]
|
true -> [?CHILD(emqx_prometheus, Conf)]
|
||||||
end,
|
end,
|
||||||
{ok, {{one_for_one, 10, 3600}, Children}}.
|
{ok, {{one_for_one, 10, 3600}, Children}}.
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
||||||
-define(CLUSTER_RPC_SHARD, emqx_cluster_rpc_shard).
|
-define(CLUSTER_RPC_SHARD, emqx_cluster_rpc_shard).
|
||||||
-define(CONF_DEFAULT, <<
|
-define(LEGACY_CONF_DEFAULT, <<
|
||||||
"\n"
|
"\n"
|
||||||
"prometheus {\n"
|
"prometheus {\n"
|
||||||
" push_gateway_server = \"http://127.0.0.1:9091\"\n"
|
" push_gateway_server = \"http://127.0.0.1:9091\"\n"
|
||||||
|
@ -38,45 +38,121 @@
|
||||||
" vm_msacc_collector = disabled\n"
|
" vm_msacc_collector = disabled\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
>>).
|
>>).
|
||||||
|
-define(CONF_DEFAULT, #{
|
||||||
|
<<"prometheus">> =>
|
||||||
|
#{
|
||||||
|
<<"enable_basic_auth">> => false,
|
||||||
|
<<"collectors">> =>
|
||||||
|
#{
|
||||||
|
<<"mnesia">> => <<"disabled">>,
|
||||||
|
<<"vm_dist">> => <<"disabled">>,
|
||||||
|
<<"vm_memory">> => <<"disabled">>,
|
||||||
|
<<"vm_msacc">> => <<"disabled">>,
|
||||||
|
<<"vm_statistics">> => <<"disabled">>,
|
||||||
|
<<"vm_system_info">> => <<"disabled">>
|
||||||
|
},
|
||||||
|
<<"push_gateway">> =>
|
||||||
|
#{
|
||||||
|
<<"enable">> => true,
|
||||||
|
<<"headers">> => #{<<"Authorization">> => <<"some-authz-tokens">>},
|
||||||
|
<<"interval">> => <<"1s">>,
|
||||||
|
<<"job_name">> => <<"${name}~${host}">>,
|
||||||
|
<<"url">> => <<"http://127.0.0.1:9091">>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Setups
|
%% Setups
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
all() ->
|
||||||
|
[
|
||||||
|
{group, new_config},
|
||||||
|
{group, legacy_config}
|
||||||
|
].
|
||||||
|
|
||||||
all() -> emqx_common_test_helpers:all(?MODULE).
|
groups() ->
|
||||||
|
[
|
||||||
|
{new_config, [sequence], common_tests()},
|
||||||
|
{legacy_config, [sequence], common_tests()}
|
||||||
|
].
|
||||||
|
|
||||||
init_per_suite(Cfg) ->
|
suite() ->
|
||||||
|
[{timetrap, {seconds, 30}}].
|
||||||
|
|
||||||
|
common_tests() ->
|
||||||
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
|
init_per_group(new_config, Config) ->
|
||||||
|
init_group(),
|
||||||
|
load_config(),
|
||||||
|
emqx_common_test_helpers:start_apps([emqx_prometheus]),
|
||||||
|
%% coverage olp metrics
|
||||||
|
{ok, _} = emqx:update_config([overload_protection, enable], true),
|
||||||
|
Config;
|
||||||
|
init_per_group(legacy_config, Config) ->
|
||||||
|
init_group(),
|
||||||
|
load_legacy_config(),
|
||||||
|
emqx_common_test_helpers:start_apps([emqx_prometheus]),
|
||||||
|
{ok, _} = emqx:update_config([overload_protection, enable], false),
|
||||||
|
Config.
|
||||||
|
|
||||||
|
init_group() ->
|
||||||
application:load(emqx_conf),
|
application:load(emqx_conf),
|
||||||
ok = ekka:start(),
|
ok = ekka:start(),
|
||||||
ok = mria_rlog:wait_for_shards([?CLUSTER_RPC_SHARD], infinity),
|
ok = mria_rlog:wait_for_shards([?CLUSTER_RPC_SHARD], infinity),
|
||||||
meck:new(emqx_alarm, [non_strict, passthrough, no_link]),
|
meck:new(emqx_alarm, [non_strict, passthrough, no_link]),
|
||||||
meck:expect(emqx_alarm, activate, 3, ok),
|
meck:expect(emqx_alarm, activate, 3, ok),
|
||||||
meck:expect(emqx_alarm, deactivate, 3, ok),
|
meck:expect(emqx_alarm, deactivate, 3, ok).
|
||||||
|
|
||||||
load_config(),
|
end_group() ->
|
||||||
emqx_common_test_helpers:start_apps([emqx_prometheus]),
|
|
||||||
Cfg.
|
|
||||||
|
|
||||||
end_per_suite(_Cfg) ->
|
|
||||||
ekka:stop(),
|
ekka:stop(),
|
||||||
mria:stop(),
|
mria:stop(),
|
||||||
mria_mnesia:delete_schema(),
|
mria_mnesia:delete_schema(),
|
||||||
meck:unload(emqx_alarm),
|
meck:unload(emqx_alarm),
|
||||||
|
|
||||||
emqx_common_test_helpers:stop_apps([emqx_prometheus]).
|
emqx_common_test_helpers:stop_apps([emqx_prometheus]).
|
||||||
|
|
||||||
|
end_per_group(_Group, Config) ->
|
||||||
|
end_group(),
|
||||||
|
Config.
|
||||||
|
|
||||||
|
init_per_testcase(t_assert_push, Config) ->
|
||||||
|
meck:new(httpc, [passthrough]),
|
||||||
|
Config;
|
||||||
|
init_per_testcase(t_push_gateway, Config) ->
|
||||||
|
start_mock_pushgateway(9091),
|
||||||
|
Config;
|
||||||
|
init_per_testcase(_Testcase, Config) ->
|
||||||
|
Config.
|
||||||
|
|
||||||
|
end_per_testcase(t_push_gateway, Config) ->
|
||||||
|
stop_mock_pushgateway(),
|
||||||
|
Config;
|
||||||
|
end_per_testcase(t_assert_push, _Config) ->
|
||||||
|
meck:unload(httpc),
|
||||||
|
ok;
|
||||||
|
end_per_testcase(_Testcase, _Config) ->
|
||||||
|
ok.
|
||||||
|
|
||||||
load_config() ->
|
load_config() ->
|
||||||
ok = emqx_common_test_helpers:load_config(emqx_prometheus_schema, ?CONF_DEFAULT).
|
ok = emqx_common_test_helpers:load_config(emqx_prometheus_schema, ?CONF_DEFAULT).
|
||||||
|
|
||||||
|
load_legacy_config() ->
|
||||||
|
ok = emqx_common_test_helpers:load_config(emqx_prometheus_schema, ?LEGACY_CONF_DEFAULT).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Test cases
|
%% Test cases
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
t_start_stop(_) ->
|
t_start_stop(_) ->
|
||||||
App = emqx_prometheus,
|
App = emqx_prometheus,
|
||||||
?assertMatch(ok, emqx_prometheus_sup:start_child(App)),
|
Conf = emqx_prometheus_config:conf(),
|
||||||
|
?assertMatch(ok, emqx_prometheus_sup:start_child(App, Conf)),
|
||||||
%% start twice return ok.
|
%% start twice return ok.
|
||||||
?assertMatch(ok, emqx_prometheus_sup:start_child(App)),
|
?assertMatch(ok, emqx_prometheus_sup:start_child(App, Conf)),
|
||||||
|
ok = gen_server:call(emqx_prometheus, dump, 1000),
|
||||||
|
ok = gen_server:cast(emqx_prometheus, dump),
|
||||||
|
dump = erlang:send(emqx_prometheus, dump),
|
||||||
?assertMatch(ok, emqx_prometheus_sup:stop_child(App)),
|
?assertMatch(ok, emqx_prometheus_sup:stop_child(App)),
|
||||||
%% stop twice return ok.
|
%% stop twice return ok.
|
||||||
?assertMatch(ok, emqx_prometheus_sup:stop_child(App)),
|
?assertMatch(ok, emqx_prometheus_sup:stop_child(App)),
|
||||||
|
@ -88,7 +164,6 @@ t_collector_no_crash_test(_) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_assert_push(_) ->
|
t_assert_push(_) ->
|
||||||
meck:new(httpc, [passthrough]),
|
|
||||||
Self = self(),
|
Self = self(),
|
||||||
AssertPush = fun(Method, Req = {Url, Headers, ContentType, _Data}, HttpOpts, Opts) ->
|
AssertPush = fun(Method, Req = {Url, Headers, ContentType, _Data}, HttpOpts, Opts) ->
|
||||||
?assertEqual(post, Method),
|
?assertEqual(post, Method),
|
||||||
|
@ -99,13 +174,51 @@ t_assert_push(_) ->
|
||||||
meck:passthrough([Method, Req, HttpOpts, Opts])
|
meck:passthrough([Method, Req, HttpOpts, Opts])
|
||||||
end,
|
end,
|
||||||
meck:expect(httpc, request, AssertPush),
|
meck:expect(httpc, request, AssertPush),
|
||||||
?assertMatch(ok, emqx_prometheus_sup:start_child(emqx_prometheus)),
|
Conf = emqx_prometheus_config:conf(),
|
||||||
|
?assertMatch(ok, emqx_prometheus_sup:start_child(emqx_prometheus, Conf)),
|
||||||
receive
|
receive
|
||||||
pass -> ok
|
pass -> ok
|
||||||
after 2000 ->
|
after 2000 ->
|
||||||
ct:fail(assert_push_request_failed)
|
ct:fail(assert_push_request_failed)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
t_only_for_coverage(_) ->
|
t_push_gateway(_) ->
|
||||||
?assertEqual("5.0.0", emqx_prometheus_proto_v1:introduced_in()),
|
Conf = emqx_prometheus_config:conf(),
|
||||||
|
?assertMatch(ok, emqx_prometheus_sup:stop_child(emqx_prometheus)),
|
||||||
|
?assertMatch(ok, emqx_prometheus_sup:start_child(emqx_prometheus, Conf)),
|
||||||
|
?assertMatch(#{ok := 0, failed := 0}, emqx_prometheus:info()),
|
||||||
|
timer:sleep(1100),
|
||||||
|
?assertMatch(#{ok := 1, failed := 0}, emqx_prometheus:info()),
|
||||||
|
ok = emqx_prometheus_sup:update_child(emqx_prometheus, Conf),
|
||||||
|
?assertMatch(#{ok := 0, failed := 0}, emqx_prometheus:info()),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
start_mock_pushgateway(Port) ->
|
||||||
|
application:ensure_all_started(cowboy),
|
||||||
|
Dispatch = cowboy_router:compile([{'_', [{'_', ?MODULE, []}]}]),
|
||||||
|
{ok, _} = cowboy:start_clear(
|
||||||
|
mock_pushgateway_listener,
|
||||||
|
[{port, Port}],
|
||||||
|
#{env => #{dispatch => Dispatch}}
|
||||||
|
).
|
||||||
|
|
||||||
|
stop_mock_pushgateway() ->
|
||||||
|
cowboy:stop_listener(mock_pushgateway_listener).
|
||||||
|
|
||||||
|
init(Req0, Opts) ->
|
||||||
|
Method = cowboy_req:method(Req0),
|
||||||
|
Headers = cowboy_req:headers(Req0),
|
||||||
|
?assertEqual(<<"POST">>, Method),
|
||||||
|
?assertMatch(
|
||||||
|
#{
|
||||||
|
<<"authorization">> := <<"some-authz-tokens">>,
|
||||||
|
<<"content-length">> := _,
|
||||||
|
<<"content-type">> := <<"text/plain">>,
|
||||||
|
<<"host">> := <<"127.0.0.1:9091">>
|
||||||
|
},
|
||||||
|
Headers
|
||||||
|
),
|
||||||
|
RespHeader = #{<<"content-type">> => <<"text/plain; charset=utf-8">>},
|
||||||
|
Req = cowboy_req:reply(200, RespHeader, <<"OK">>, Req0),
|
||||||
|
{ok, Req, Opts}.
|
||||||
|
|
|
@ -28,40 +28,59 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Setups
|
%% Setups
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
all() ->
|
all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
[
|
||||||
|
{group, new_config},
|
||||||
|
{group, legacy_config}
|
||||||
|
].
|
||||||
|
|
||||||
|
groups() ->
|
||||||
|
[
|
||||||
|
{new_config, [sequence], [t_stats_auth_api, t_stats_no_auth_api, t_prometheus_api]},
|
||||||
|
{legacy_config, [sequence], [t_stats_no_auth_api, t_legacy_prometheus_api]}
|
||||||
|
].
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
application:load(emqx_conf),
|
emqx_prometheus_SUITE:init_group(),
|
||||||
ok = ekka:start(),
|
emqx_mgmt_api_test_util:init_suite([emqx_conf]),
|
||||||
ok = mria_rlog:wait_for_shards([?CLUSTER_RPC_SHARD], infinity),
|
|
||||||
|
|
||||||
meck:new(mria_rlog, [non_strict, passthrough, no_link]),
|
|
||||||
|
|
||||||
emqx_prometheus_SUITE:load_config(),
|
|
||||||
emqx_mgmt_api_test_util:init_suite([emqx_prometheus]),
|
|
||||||
|
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
end_per_suite(Config) ->
|
end_per_suite(Config) ->
|
||||||
ekka:stop(),
|
emqx_prometheus_SUITE:end_group(),
|
||||||
mria:stop(),
|
emqx_mgmt_api_test_util:end_suite([emqx_conf]),
|
||||||
mria_mnesia:delete_schema(),
|
|
||||||
|
|
||||||
meck:unload(mria_rlog),
|
|
||||||
|
|
||||||
emqx_mgmt_api_test_util:end_suite([emqx_prometheus]),
|
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
init_per_testcase(_, Config) ->
|
init_per_group(new_config, Config) ->
|
||||||
{ok, _} = emqx_cluster_rpc:start_link(),
|
emqx_common_test_helpers:start_apps(
|
||||||
|
[emqx_prometheus],
|
||||||
|
fun(App) -> set_special_configs(App, new_config) end
|
||||||
|
),
|
||||||
|
Config;
|
||||||
|
init_per_group(legacy_config, Config) ->
|
||||||
|
emqx_common_test_helpers:start_apps(
|
||||||
|
[emqx_prometheus],
|
||||||
|
fun(App) -> set_special_configs(App, legacy_config) end
|
||||||
|
),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
end_per_group(_Group, Config) ->
|
||||||
|
_ = application:stop(emqx_prometheus),
|
||||||
|
Config.
|
||||||
|
|
||||||
|
set_special_configs(emqx_dashboard, _) ->
|
||||||
|
emqx_dashboard_api_test_helpers:set_default_config();
|
||||||
|
set_special_configs(emqx_prometheus, new_config) ->
|
||||||
|
emqx_prometheus_SUITE:load_config(),
|
||||||
|
ok;
|
||||||
|
set_special_configs(emqx_prometheus, legacy_config) ->
|
||||||
|
emqx_prometheus_SUITE:load_legacy_config(),
|
||||||
|
ok;
|
||||||
|
set_special_configs(_App, _) ->
|
||||||
|
ok.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Cases
|
%% Cases
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
t_prometheus_api(_) ->
|
t_legacy_prometheus_api(_) ->
|
||||||
Path = emqx_mgmt_api_test_util:api_path(["prometheus"]),
|
Path = emqx_mgmt_api_test_util:api_path(["prometheus"]),
|
||||||
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
{ok, Response} = emqx_mgmt_api_test_util:request_api(get, Path, "", Auth),
|
{ok, Response} = emqx_mgmt_api_test_util:request_api(get, Path, "", Auth),
|
||||||
|
@ -145,21 +164,133 @@ t_prometheus_api(_) ->
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_stats_api(_) ->
|
t_prometheus_api(_) ->
|
||||||
Path = emqx_mgmt_api_test_util:api_path(["prometheus", "stats"]),
|
Path = emqx_mgmt_api_test_util:api_path(["prometheus"]),
|
||||||
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
Headers = [{"accept", "application/json"}, Auth],
|
{ok, Response} = emqx_mgmt_api_test_util:request_api(get, Path, "", Auth),
|
||||||
{ok, Response} = emqx_mgmt_api_test_util:request_api(get, Path, "", Headers),
|
|
||||||
|
|
||||||
|
Conf = emqx_utils_json:decode(Response, [return_maps]),
|
||||||
|
?assertMatch(
|
||||||
|
#{
|
||||||
|
<<"push_gateway">> := #{},
|
||||||
|
<<"collectors">> := _,
|
||||||
|
<<"enable_basic_auth">> := _
|
||||||
|
},
|
||||||
|
Conf
|
||||||
|
),
|
||||||
|
#{
|
||||||
|
<<"push_gateway">> :=
|
||||||
|
#{<<"url">> := Url, <<"enable">> := Enable} = PushGateway,
|
||||||
|
<<"collectors">> := Collector
|
||||||
|
} = Conf,
|
||||||
|
Pid = erlang:whereis(emqx_prometheus),
|
||||||
|
?assertEqual(Enable, undefined =/= Pid, {Url, Pid}),
|
||||||
|
|
||||||
|
NewConf = Conf#{
|
||||||
|
<<"push_gateway">> => PushGateway#{
|
||||||
|
<<"interval">> => <<"2s">>,
|
||||||
|
<<"headers">> => #{
|
||||||
|
<<"test-str1">> => <<"test-value">>,
|
||||||
|
<<"test-str2">> => <<"42">>
|
||||||
|
}
|
||||||
|
},
|
||||||
|
<<"collectors">> => Collector#{
|
||||||
|
<<"vm_dist">> => <<"enabled">>,
|
||||||
|
<<"vm_system_info">> => <<"enabled">>,
|
||||||
|
<<"vm_memory">> => <<"enabled">>,
|
||||||
|
<<"vm_msacc">> => <<"enabled">>,
|
||||||
|
<<"mnesia">> => <<"enabled">>,
|
||||||
|
<<"vm_statistics">> => <<"enabled">>
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ok, Response2} = emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, NewConf),
|
||||||
|
|
||||||
|
Conf2 = emqx_utils_json:decode(Response2, [return_maps]),
|
||||||
|
?assertMatch(NewConf, Conf2),
|
||||||
|
|
||||||
|
EnvCollectors = application:get_env(prometheus, collectors, []),
|
||||||
|
PromCollectors = prometheus_registry:collectors(default),
|
||||||
|
?assertEqual(lists:sort(EnvCollectors), lists:sort(PromCollectors)),
|
||||||
|
?assert(lists:member(prometheus_vm_statistics_collector, EnvCollectors), EnvCollectors),
|
||||||
|
|
||||||
|
lists:foreach(
|
||||||
|
fun({C, Enabled}) ->
|
||||||
|
?assertEqual(Enabled, lists:member(C, EnvCollectors), EnvCollectors)
|
||||||
|
end,
|
||||||
|
[
|
||||||
|
{prometheus_vm_dist_collector, true},
|
||||||
|
{prometheus_vm_system_info_collector, true},
|
||||||
|
{prometheus_vm_memory_collector, true},
|
||||||
|
{prometheus_mnesia_collector, true},
|
||||||
|
{prometheus_vm_msacc_collector, true},
|
||||||
|
{prometheus_vm_statistics_collector, true}
|
||||||
|
]
|
||||||
|
),
|
||||||
|
|
||||||
|
?assertMatch(
|
||||||
|
#{
|
||||||
|
<<"push_gateway">> := #{
|
||||||
|
<<"headers">> := #{
|
||||||
|
<<"test-str1">> := <<"test-value">>,
|
||||||
|
<<"test-str2">> := <<"42">>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
emqx_config:get_raw([prometheus])
|
||||||
|
),
|
||||||
|
?assertMatch(
|
||||||
|
#{
|
||||||
|
push_gateway := #{
|
||||||
|
headers := [
|
||||||
|
{"test-str2", "42"},
|
||||||
|
{"test-str1", "test-value"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
emqx_config:get([prometheus])
|
||||||
|
),
|
||||||
|
|
||||||
|
NewConf1 = Conf#{<<"push_gateway">> => PushGateway#{<<"enable">> => false}},
|
||||||
|
{ok, _Response3} = emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, NewConf1),
|
||||||
|
?assertEqual(undefined, erlang:whereis(emqx_prometheus)),
|
||||||
|
|
||||||
|
ConfWithoutScheme = Conf#{
|
||||||
|
<<"push_gateway">> => PushGateway#{<<"url">> => <<"127.0.0.1:8081">>}
|
||||||
|
},
|
||||||
|
?assertMatch(
|
||||||
|
{error, {"HTTP/1.1", 400, _}},
|
||||||
|
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, ConfWithoutScheme)
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
t_stats_no_auth_api(_) ->
|
||||||
|
%% undefined is legacy prometheus
|
||||||
|
case emqx:get_config([prometheus, enable_basic_auth], undefined) of
|
||||||
|
true ->
|
||||||
|
{ok, _} = emqx:update_config([prometheus, enable_basic_auth], false),
|
||||||
|
emqx_dashboard_listener:regenerate_minirest_dispatch();
|
||||||
|
_ ->
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
emqx_dashboard_listener:regenerate_minirest_dispatch(),
|
||||||
|
Json = [{"accept", "application/json"}],
|
||||||
|
request_stats(Json, []).
|
||||||
|
|
||||||
|
t_stats_auth_api(_) ->
|
||||||
|
{ok, _} = emqx:update_config([prometheus, enable_basic_auth], true),
|
||||||
|
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
|
JsonAuth = [{"accept", "application/json"}, Auth],
|
||||||
|
request_stats(JsonAuth, Auth),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
request_stats(JsonAuth, Auth) ->
|
||||||
|
Path = emqx_mgmt_api_test_util:api_path(["prometheus", "stats"]),
|
||||||
|
{ok, Response} = emqx_mgmt_api_test_util:request_api(get, Path, "", JsonAuth),
|
||||||
Data = emqx_utils_json:decode(Response, [return_maps]),
|
Data = emqx_utils_json:decode(Response, [return_maps]),
|
||||||
?assertMatch(#{<<"client">> := _, <<"delivery">> := _}, Data),
|
?assertMatch(#{<<"client">> := _, <<"delivery">> := _}, Data),
|
||||||
|
|
||||||
{ok, _} = emqx_mgmt_api_test_util:request_api(get, Path, "", Auth),
|
{ok, _} = emqx_mgmt_api_test_util:request_api(get, Path, "", Auth),
|
||||||
|
|
||||||
ok = meck:expect(mria_rlog, backend, fun() -> rlog end),
|
ok = meck:expect(mria_rlog, backend, fun() -> rlog end),
|
||||||
{ok, _} = emqx_mgmt_api_test_util:request_api(get, Path, "", Auth),
|
{ok, _} = emqx_mgmt_api_test_util:request_api(get, Path, "", Auth).
|
||||||
|
|
||||||
ok.
|
|
||||||
|
|
||||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
%%% Internal Functions
|
%%% Internal Functions
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
Modified the Prometheus API and configuration to:
|
||||||
|
- Restructure configuration sections to group related settings, improving readability and maintainability
|
||||||
|
- Introduced `enable_basic_auth` configuration for basic authentication on the scrape API endpoint, enhancing security
|
||||||
|
- Maintained backwards compatibility while refactoring code, avoiding breaking changes
|
|
@ -1,10 +1,24 @@
|
||||||
## Prometheus
|
## Prometheus
|
||||||
|
|
||||||
## EMQX's Prometheus scraping endpoint is enabled by default without authentication.
|
## EMQX's Prometheus scraping endpoint is enabled by default without authentication.
|
||||||
## And there is no way to turn it off.
|
## You can enable basic authentication by setting enable_basic_auth to true.
|
||||||
## You can inspect it with a curl command: curl -f "127.0.0.1:18083/api/v5/prometheus/stats"
|
## You can inspect it with a curl command: curl -f "127.0.0.1:18083/api/v5/prometheus/stats"
|
||||||
|
|
||||||
prometheus {
|
prometheus {
|
||||||
# turn off this expensive collector
|
enable_basic_auth = false
|
||||||
vm_dist_collector = disabled
|
push_gateway {
|
||||||
|
enable = false
|
||||||
|
url = "http://127.0.0.1:9091"
|
||||||
|
headers {Authorization = "Basic YWRtaW46Y2JraG55eWd5QDE="}
|
||||||
|
interval = 15s
|
||||||
|
job_name = "${name}/instance/${name}~${host}"
|
||||||
|
}
|
||||||
|
collectors {
|
||||||
|
mnesia = disabled
|
||||||
|
vm_dist = disabled
|
||||||
|
vm_memory = disabled
|
||||||
|
vm_msacc = disabled
|
||||||
|
vm_statistics = disabled
|
||||||
|
vm_system_info = enabled
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,8 +11,8 @@ update_prom_conf_info.label:
|
||||||
"""Update Prometheus config"""
|
"""Update Prometheus config"""
|
||||||
|
|
||||||
get_prom_data.desc:
|
get_prom_data.desc:
|
||||||
"""Get Prometheus Data"""
|
"""Get Prometheus Metrics"""
|
||||||
get_prom_data.label:
|
get_prom_data.label:
|
||||||
"""Get Prometheus Data"""
|
"""Prometheus Metrics"""
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,5 @@
|
||||||
emqx_prometheus_schema {
|
emqx_prometheus_schema {
|
||||||
|
|
||||||
enable.desc:
|
|
||||||
"""Turn Prometheus data pushing on or off"""
|
|
||||||
|
|
||||||
headers.desc:
|
headers.desc:
|
||||||
"""An HTTP Headers when pushing to Push Gateway.<br/>
|
"""An HTTP Headers when pushing to Push Gateway.<br/>
|
||||||
For example, <code> { Authorization = "some-authz-tokens"}</code>"""
|
For example, <code> { Authorization = "some-authz-tokens"}</code>"""
|
||||||
|
@ -14,28 +11,46 @@ job_name.desc:
|
||||||
"""Job Name that is pushed to the Push Gateway. Available variables:<br/>
|
"""Job Name that is pushed to the Push Gateway. Available variables:<br/>
|
||||||
- ${name}: Name of EMQX node.<br/>
|
- ${name}: Name of EMQX node.<br/>
|
||||||
- ${host}: Host name of EMQX node.<br/>
|
- ${host}: Host name of EMQX node.<br/>
|
||||||
For example, when the EMQX node name is <code>emqx@127.0.0.1</code> then the <code>name</code> variable takes value <code>emqx</code> and the <code>host</code> variable takes value <code>127.0.0.1</code>.<br/>
|
For example, when the EMQX node name is <code>emqx@127.0.0.1</code> then the <code>name</code>
|
||||||
|
variable takes value <code>emqx</code> and the <code>host</code> variable takes value <code>127.0.0.1</code>.
|
||||||
Default value is: <code>${name}/instance/${name}~${host}</code>"""
|
Default value is: <code>${name}/instance/${name}~${host}</code>"""
|
||||||
|
|
||||||
mnesia_collector.desc:
|
|
||||||
"""Enable or disable Mnesia metrics collector"""
|
|
||||||
|
|
||||||
prometheus.desc:
|
prometheus.desc:
|
||||||
"""EMQX's Prometheus scraping endpoint is enabled by default without authentication.
|
"""EMQX's Prometheus scraping endpoint is enabled by default without authentication.
|
||||||
You can inspect it with a `curl` command like this: `curl -f "127.0.0.1:18083/api/v5/prometheus/stats"`<br/>
|
You can inspect it with a `curl` command like this: `curl -f "127.0.0.1:18083/api/v5/prometheus/stats"`"""
|
||||||
The 'enable' flag is used to turn on and off for the push-gateway integration."""
|
|
||||||
|
|
||||||
prometheus.label:
|
prometheus.label:
|
||||||
"""Prometheus"""
|
"""Prometheus"""
|
||||||
|
|
||||||
push_gateway_server.desc:
|
push_gateway.desc:
|
||||||
"""URL of Prometheus server. Pushgateway is optional, should not be configured if prometheus is to scrape EMQX."""
|
"""Push Gateway is optional, should not be configured if prometheus is to scrape EMQX."""
|
||||||
|
|
||||||
|
enable_basic_auth.desc:
|
||||||
|
"""Enable or disable basic authentication for prometheus scrape api, not for Push Gateway"""
|
||||||
|
|
||||||
|
collectors.desc:
|
||||||
|
"""The internal advanced metrics of the virtual machine are initially disabled
|
||||||
|
and are usually only enabled during performance testing.
|
||||||
|
Enabling them will increase the CPU load."""
|
||||||
|
|
||||||
|
recommend_setting.desc:
|
||||||
|
"""Recommended setting"""
|
||||||
|
|
||||||
|
push_gateway_url.desc:
|
||||||
|
"""URL of Pushgateway server. Pushgateway is optional, should not be configured if prometheus is to scrape EMQX."""
|
||||||
|
push_gateway_enable.desc:
|
||||||
|
"""Enable or disable Pushgateway"""
|
||||||
|
|
||||||
|
mnesia_collector.desc:
|
||||||
|
"""Collects Mnesia metrics mainly using <code> mnesia:system_info/1 </code>"""
|
||||||
|
|
||||||
vm_dist_collector.desc:
|
vm_dist_collector.desc:
|
||||||
"""Enable or disable VM distribution collector, collects information about the sockets and processes involved in the Erlang distribution mechanism."""
|
"""Enable or disable VM distribution collector,
|
||||||
|
collects information about the sockets and processes involved in the Erlang distribution mechanism."""
|
||||||
|
|
||||||
vm_memory_collector.desc:
|
vm_memory_collector.desc:
|
||||||
"""Enable or disable VM memory metrics collector."""
|
"""Collects information about memory dynamically allocated by the Erlang emulator using
|
||||||
|
<code> erlang:memory/0 </code>."""
|
||||||
|
|
||||||
vm_msacc_collector.desc:
|
vm_msacc_collector.desc:
|
||||||
"""Enable or disable VM microstate accounting metrics collector."""
|
"""Enable or disable VM microstate accounting metrics collector."""
|
||||||
|
@ -46,4 +61,43 @@ vm_statistics_collector.desc:
|
||||||
vm_system_info_collector.desc:
|
vm_system_info_collector.desc:
|
||||||
"""Enable or disable VM system info collector."""
|
"""Enable or disable VM system info collector."""
|
||||||
|
|
||||||
|
legacy_deprecated_setting.desc:
|
||||||
|
"""Deprecated since 5.4.0"""
|
||||||
|
|
||||||
|
legacy_enable.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.push_gateway.url` instead"""
|
||||||
|
|
||||||
|
legacy_headers.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.push_gateway.headers` instead"""
|
||||||
|
|
||||||
|
legacy_interval.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.push_gateway.interval` instead"""
|
||||||
|
|
||||||
|
legacy_job_name.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.push_gateway.job_name` instead"""
|
||||||
|
|
||||||
|
legacy_push_gateway_server.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.push_gateway.url` instead"""
|
||||||
|
|
||||||
|
legacy_mnesia_collector.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.collectors.mnesia` instead"""
|
||||||
|
|
||||||
|
legacy_vm_dist_collector.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.collectors.vm_dist` instead"""
|
||||||
|
|
||||||
|
legacy_vm_memory_collector.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.collectors.vm_memory` instead"""
|
||||||
|
|
||||||
|
legacy_vm_msacc_collector.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.collectors.vm_msacc` instead"""
|
||||||
|
|
||||||
|
legacy_vm_statistics_collector.desc:
|
||||||
|
"""Deprecated since 5.4.0, use `prometheus.collectors.vm_statistics` instead"""
|
||||||
|
|
||||||
|
legacy_vm_system_info_collector.desc:
|
||||||
|
"""Deprecated, use `prometheus.collectors.vm_system_info` instead"""
|
||||||
|
|
||||||
|
legacy_deprecated_setting.desc:
|
||||||
|
"""Deprecated since 5.4.0"""
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -295,3 +295,4 @@ dnstream
|
||||||
upstream
|
upstream
|
||||||
priv
|
priv
|
||||||
Syskeeper
|
Syskeeper
|
||||||
|
msacc
|
||||||
|
|
Loading…
Reference in New Issue