Merge pull request #10211 from zmstone/0321-hide-broker-perf-configs

0321 hide broker perf configs
This commit is contained in:
Zaiming (Stone) Shi 2023-03-28 20:31:51 +02:00 committed by GitHub
commit 7c0c36ff9b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 163 additions and 75 deletions

View File

@ -194,12 +194,15 @@ jobs:
run: |
CID=$(docker run -d --rm -P $EMQX_IMAGE_TAG)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
export EMQX_SMOKE_TEST_CHECK_HIDDEN_FIELDS='yes'
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
docker stop $CID
- name: test two nodes cluster with proto_dist=inet_tls in docker
run: |
./scripts/test/start-two-nodes-in-docker.sh -P $EMQX_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy)
# versions before 5.0.22 have hidden fields included in the API spec
export EMQX_SMOKE_TEST_CHECK_HIDDEN_FIELDS='no'
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
# cleanup
./scripts/test/start-two-nodes-in-docker.sh -c

View File

@ -29,7 +29,7 @@
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.6"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.37.2"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.38.0"}}},
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}},
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},

View File

@ -230,7 +230,7 @@ roots(low) ->
{"crl_cache",
sc(
ref("crl_cache"),
#{hidden => true}
#{importance => ?IMPORTANCE_HIDDEN}
)}
].
@ -1492,7 +1492,7 @@ fields("broker") ->
{"perf",
sc(
ref("broker_perf"),
#{}
#{importance => ?IMPORTANCE_HIDDEN}
)},
{"shared_subscription_group",
sc(
@ -2299,7 +2299,7 @@ server_ssl_opts_schema(Defaults, IsRanchListener) ->
#{
required => false,
%% TODO: remove after e5.0.2
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
validator => fun ocsp_inner_validator/1
}
)},
@ -2997,7 +2997,7 @@ quic_feature_toggle(Desc) ->
typerefl:alias("boolean", typerefl:union([true, false, 0, 1])),
#{
desc => Desc,
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
required => false,
converter => fun
(true) -> 1;
@ -3012,7 +3012,7 @@ quic_lowlevel_settings_uint(Low, High, Desc) ->
range(Low, High),
#{
required => false,
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
desc => Desc
}
).

View File

@ -1,6 +1,6 @@
{application, emqx_conf, [
{description, "EMQX configuration management"},
{vsn, "0.1.14"},
{vsn, "0.1.15"},
{registered, []},
{mod, {emqx_conf_app, []}},
{applications, [kernel, stdlib, emqx_ctl]},

View File

@ -156,7 +156,15 @@ dump_schema(Dir, SchemaModule, I18nFile) ->
gen_schema_json(Dir, I18nFile, SchemaModule, Lang) ->
SchemaJsonFile = filename:join([Dir, "schema-" ++ Lang ++ ".json"]),
io:format(user, "===< Generating: ~s~n", [SchemaJsonFile]),
Opts = #{desc_file => I18nFile, lang => Lang},
%% EMQX_SCHEMA_FULL_DUMP is quite a hidden API
%% it is used to dump the full schema for EMQX developers and supporters
IncludeImportance =
case os:getenv("EMQX_SCHEMA_FULL_DUMP") =:= "1" of
true -> ?IMPORTANCE_HIDDEN;
false -> ?IMPORTANCE_LOW
end,
io:format(user, "===< Including fields from importance level: ~p~n", [IncludeImportance]),
Opts = #{desc_file => I18nFile, lang => Lang, include_importance_up_from => IncludeImportance},
JsonMap = hocon_schema_json:gen(SchemaModule, Opts),
IoData = jsx:encode(JsonMap, [space, {indent, 4}]),
ok = file:write_file(SchemaJsonFile, IoData).
@ -220,7 +228,8 @@ gen_example(File, SchemaModule, I18nFile, Lang) ->
title => <<"EMQX Configuration Example">>,
body => <<"">>,
desc_file => I18nFile,
lang => Lang
lang => Lang,
include_importance_up_from => ?IMPORTANCE_MEDIUM
},
Example = hocon_schema_example:gen(SchemaModule, Opts),
file:write_file(File, Example).

View File

@ -28,7 +28,18 @@
-define(DEFAULT_INIT_TXN_ID, -1).
start(_StartType, _StartArgs) ->
init_conf(),
try
ok = init_conf()
catch
C:E:St ->
?SLOG(critical, #{
msg => failed_to_init_config,
exception => C,
reason => E,
stacktrace => St
}),
init:stop()
end,
ok = emqx_config_logger:refresh_config(),
emqx_conf_sup:start_link().
@ -85,9 +96,9 @@ init_load() ->
init_conf() ->
{ok, TnxId} = copy_override_conf_from_core_node(),
emqx_app:set_init_tnx_id(TnxId),
init_load(),
emqx_app:set_init_config_load_done().
_ = emqx_app:set_init_tnx_id(TnxId),
ok = init_load(),
ok = emqx_app:set_init_config_load_done().
cluster_nodes() ->
maps:get(running_nodes, ekka_cluster:info()) -- [node()].

View File

@ -397,6 +397,7 @@ fields("node") ->
#{
default => <<"emqx@127.0.0.1">>,
'readOnly' => true,
importance => ?IMPORTANCE_HIGH,
desc => ?DESC(node_name)
}
)},
@ -409,6 +410,7 @@ fields("node") ->
'readOnly' => true,
sensitive => true,
desc => ?DESC(node_cookie),
importance => ?IMPORTANCE_HIGH,
converter => fun emqx_schema:password_converter/2
}
)},
@ -419,6 +421,7 @@ fields("node") ->
mapping => "vm_args.+P",
desc => ?DESC(process_limit),
default => 2097152,
importance => ?IMPORTANCE_MEDIUM,
'readOnly' => true
}
)},
@ -429,6 +432,7 @@ fields("node") ->
mapping => "vm_args.+Q",
desc => ?DESC(max_ports),
default => 1048576,
importance => ?IMPORTANCE_HIGH,
'readOnly' => true
}
)},
@ -439,6 +443,7 @@ fields("node") ->
mapping => "vm_args.+zdbbl",
desc => ?DESC(dist_buffer_size),
default => 8192,
importance => ?IMPORTANCE_LOW,
'readOnly' => true
}
)},
@ -449,6 +454,7 @@ fields("node") ->
mapping => "vm_args.+e",
desc => ?DESC(max_ets_tables),
default => 262144,
importance => ?IMPORTANCE_LOW,
'readOnly' => true
}
)},
@ -459,6 +465,10 @@ fields("node") ->
required => true,
'readOnly' => true,
mapping => "emqx.data_dir",
%% for now, it's tricky to use a different data_dir
%% otherwise data paths in cluster config may differ
%% TODO: change configurable data file paths to relative
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(node_data_dir)
}
)},
@ -467,7 +477,7 @@ fields("node") ->
hoconsc:array(string()),
#{
mapping => "emqx.config_files",
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
required => false,
'readOnly' => true
}
@ -479,6 +489,7 @@ fields("node") ->
mapping => "emqx_machine.global_gc_interval",
default => <<"15m">>,
desc => ?DESC(node_global_gc_interval),
importance => ?IMPORTANCE_LOW,
'readOnly' => true
}
)},
@ -489,6 +500,7 @@ fields("node") ->
mapping => "vm_args.-env ERL_CRASH_DUMP",
desc => ?DESC(node_crash_dump_file),
default => crash_dump_file_default(),
importance => ?IMPORTANCE_LOW,
'readOnly' => true
}
)},
@ -499,6 +511,7 @@ fields("node") ->
mapping => "vm_args.-env ERL_CRASH_DUMP_SECONDS",
default => <<"30s">>,
desc => ?DESC(node_crash_dump_seconds),
importance => ?IMPORTANCE_LOW,
'readOnly' => true
}
)},
@ -509,6 +522,7 @@ fields("node") ->
mapping => "vm_args.-env ERL_CRASH_DUMP_BYTES",
default => <<"100MB">>,
desc => ?DESC(node_crash_dump_bytes),
importance => ?IMPORTANCE_LOW,
'readOnly' => true
}
)},
@ -519,6 +533,7 @@ fields("node") ->
mapping => "vm_args.-kernel net_ticktime",
default => <<"2m">>,
'readOnly' => true,
importance => ?IMPORTANCE_LOW,
desc => ?DESC(node_dist_net_ticktime)
}
)},
@ -529,6 +544,7 @@ fields("node") ->
mapping => "emqx_machine.backtrace_depth",
default => 23,
'readOnly' => true,
importance => ?IMPORTANCE_LOW,
desc => ?DESC(node_backtrace_depth)
}
)},
@ -539,6 +555,7 @@ fields("node") ->
mapping => "emqx_machine.applications",
default => [],
'readOnly' => true,
importance => ?IMPORTANCE_LOW,
desc => ?DESC(node_applications)
}
)},
@ -548,13 +565,17 @@ fields("node") ->
#{
desc => ?DESC(node_etc_dir),
'readOnly' => true,
importance => ?IMPORTANCE_LOW,
deprecated => {since, "5.0.8"}
}
)},
{"cluster_call",
sc(
?R_REF("cluster_call"),
#{'readOnly' => true}
#{
'readOnly' => true,
importance => ?IMPORTANCE_LOW
}
)},
{"db_backend",
sc(
@ -563,6 +584,7 @@ fields("node") ->
mapping => "mria.db_backend",
default => rlog,
'readOnly' => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(db_backend)
}
)},
@ -573,6 +595,7 @@ fields("node") ->
mapping => "mria.node_role",
default => core,
'readOnly' => true,
importance => ?IMPORTANCE_HIGH,
desc => ?DESC(db_role)
}
)},
@ -583,6 +606,7 @@ fields("node") ->
mapping => "mria.rlog_rpc_module",
default => gen_rpc,
'readOnly' => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(db_rpc_module)
}
)},
@ -593,6 +617,7 @@ fields("node") ->
mapping => "mria.tlog_push_mode",
default => async,
'readOnly' => true,
importance => ?IMPORTANCE_LOW,
desc => ?DESC(db_tlog_push_mode)
}
)},
@ -601,7 +626,7 @@ fields("node") ->
hoconsc:enum([gen_rpc, distr]),
#{
mapping => "mria.shard_transport",
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
default => gen_rpc,
desc => ?DESC(db_default_shard_transport)
}
@ -611,7 +636,7 @@ fields("node") ->
map(shard, hoconsc:enum([gen_rpc, distr])),
#{
desc => ?DESC(db_shard_transports),
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
mapping => "emqx_machine.custom_shard_transports",
default => #{}
}

View File

@ -830,36 +830,8 @@ to_bin(X) ->
X.
parse_object(PropList = [_ | _], Module, Options) when is_list(PropList) ->
{Props, Required, Refs} =
lists:foldl(
fun({Name, Hocon}, {Acc, RequiredAcc, RefsAcc}) ->
NameBin = to_bin(Name),
case hoconsc:is_schema(Hocon) of
true ->
HoconType = hocon_schema:field_schema(Hocon, type),
Init0 = init_prop([default | ?DEFAULT_FIELDS], #{}, Hocon),
SchemaToSpec = schema_converter(Options),
Init = trans_desc(Init0, Hocon, SchemaToSpec, NameBin),
{Prop, Refs1} = SchemaToSpec(HoconType, Module),
NewRequiredAcc =
case is_required(Hocon) of
true -> [NameBin | RequiredAcc];
false -> RequiredAcc
end,
{
[{NameBin, maps:merge(Prop, Init)} | Acc],
NewRequiredAcc,
Refs1 ++ RefsAcc
};
false ->
{SubObject, SubRefs} = parse_object(Hocon, Module, Options),
{[{NameBin, SubObject} | Acc], RequiredAcc, SubRefs ++ RefsAcc}
end
end,
{[], [], []},
PropList
),
Object = #{<<"type">> => object, <<"properties">> => lists:reverse(Props)},
{Props, Required, Refs} = parse_object_loop(PropList, Module, Options),
Object = #{<<"type">> => object, <<"properties">> => Props},
case Required of
[] -> {Object, Refs};
_ -> {maps:put(required, Required, Object), Refs}
@ -874,6 +846,54 @@ parse_object(Other, Module, Options) ->
}}
).
parse_object_loop(PropList0, Module, Options) ->
PropList = lists:filter(
fun({_, Hocon}) ->
case hoconsc:is_schema(Hocon) andalso is_hidden(Hocon) of
true -> false;
false -> true
end
end,
PropList0
),
parse_object_loop(PropList, Module, Options, _Props = [], _Required = [], _Refs = []).
parse_object_loop([], _Modlue, _Options, Props, Required, Refs) ->
{lists:reverse(Props), lists:usort(Required), Refs};
parse_object_loop([{Name, Hocon} | Rest], Module, Options, Props, Required, Refs) ->
NameBin = to_bin(Name),
case hoconsc:is_schema(Hocon) of
true ->
HoconType = hocon_schema:field_schema(Hocon, type),
Init0 = init_prop([default | ?DEFAULT_FIELDS], #{}, Hocon),
SchemaToSpec = schema_converter(Options),
Init = trans_desc(Init0, Hocon, SchemaToSpec, NameBin),
{Prop, Refs1} = SchemaToSpec(HoconType, Module),
NewRequiredAcc =
case is_required(Hocon) of
true -> [NameBin | Required];
false -> Required
end,
parse_object_loop(
Rest,
Module,
Options,
[{NameBin, maps:merge(Prop, Init)} | Props],
NewRequiredAcc,
Refs1 ++ Refs
);
false ->
%% TODO: there is only a handful of such
%% refactor the schema to unify the two cases
{SubObject, SubRefs} = parse_object(Hocon, Module, Options),
parse_object_loop(
Rest, Module, Options, [{NameBin, SubObject} | Props], Required, SubRefs ++ Refs
)
end.
%% return true if the field has 'importance' set to 'hidden'
is_hidden(Hocon) ->
hocon_schema:is_hidden(Hocon, #{include_importance_up_from => ?IMPORTANCE_LOW}).
is_required(Hocon) ->
hocon_schema:field_schema(Hocon, required) =:= true.

View File

@ -61,7 +61,7 @@ t_object(_Config) ->
#{
<<"schema">> =>
#{
required => [<<"timeout">>, <<"per_page">>],
required => [<<"per_page">>, <<"timeout">>],
<<"properties">> => [
{<<"per_page">>, #{
description => <<"good per page desc">>,

View File

@ -59,7 +59,7 @@ t_object(_config) ->
<<"application/json">> =>
#{
<<"schema">> => #{
required => [<<"timeout">>, <<"per_page">>],
required => [<<"per_page">>, <<"timeout">>],
<<"properties">> => [
{<<"per_page">>, #{
description => <<"good per page desc">>,

View File

@ -2,7 +2,7 @@
{application, emqx_prometheus, [
{description, "Prometheus for EMQX"},
% strict semver, bump manually!
{vsn, "5.0.7"},
{vsn, "5.0.8"},
{modules, []},
{registered, [emqx_prometheus_sup]},
{applications, [kernel, stdlib, prometheus, emqx, emqx_management]},

View File

@ -90,7 +90,7 @@ fields("prometheus") ->
#{
default => enabled,
required => true,
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_dist_collector)
}
)},
@ -100,7 +100,7 @@ fields("prometheus") ->
#{
default => enabled,
required => true,
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(mnesia_collector)
}
)},
@ -110,7 +110,7 @@ fields("prometheus") ->
#{
default => enabled,
required => true,
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_statistics_collector)
}
)},
@ -120,7 +120,7 @@ fields("prometheus") ->
#{
default => enabled,
required => true,
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_system_info_collector)
}
)},
@ -130,7 +130,7 @@ fields("prometheus") ->
#{
default => enabled,
required => true,
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_memory_collector)
}
)},
@ -140,7 +140,7 @@ fields("prometheus") ->
#{
default => enabled,
required => true,
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_msacc_collector)
}
)}

View File

@ -83,7 +83,7 @@ worker_pool_size(required) -> false;
worker_pool_size(_) -> undefined.
resume_interval(type) -> emqx_schema:duration_ms();
resume_interval(hidden) -> true;
resume_interval(importance) -> hidden;
resume_interval(desc) -> ?DESC("resume_interval");
resume_interval(required) -> false;
resume_interval(_) -> undefined.

View File

@ -28,7 +28,7 @@
init_per_suite(Config) ->
application:load(emqx_conf),
ConfigConf = <<"rule_engine {jq_function_default_timeout {}}">>,
ConfigConf = <<"rule_engine {jq_function_default_timeout=10s}">>,
ok = emqx_common_test_helpers:load_config(emqx_rule_engine_schema, ConfigConf),
Config.
@ -691,20 +691,10 @@ t_jq(_) ->
ConfigRootKey,
jq_function_default_timeout
]),
case DefaultTimeOut =< 15000 of
true ->
got_timeout =
try
apply_func(jq, [TOProgram, <<"-2">>])
catch
throw:{jq_exception, {timeout, _}} ->
%% Got timeout as expected
got_timeout
end;
false ->
%% Skip test as we don't want it to take to long time to run
ok
end.
?assertThrow(
{jq_exception, {timeout, _}},
apply_func(jq, [TOProgram, <<"-2">>])
).
ascii_string() -> list(range(0, 127)).

View File

@ -0,0 +1,3 @@
Hide `broker.broker_perf` config and API documents.
The two configs `route_lock_type` and `trie_compaction` are rarely used and requires a full cluster restart to take effect. They are not suitable for being exposed to users.
Detailed changes can be found here: https://gist.github.com/zmstone/01ad5754b9beaeaf3f5b86d14d49a0b7/revisions

View File

@ -0,0 +1,3 @@
隐藏 `broker.broker_perf` 配置项,不再在 配置和 API 的文档中展示。
`route_lock_type``trie_compaction` 这两个配置项很少使用,且需要全集群重启才能生效,不适合暴露给用户。
详细对比: https://gist.github.com/zmstone/01ad5754b9beaeaf3f5b86d14d49a0b7/revisions

View File

@ -364,7 +364,7 @@ fields(consumer_kafka_opts) ->
})},
{max_rejoin_attempts,
mk(non_neg_integer(), #{
hidden => true,
importance => ?IMPORTANCE_HIDDEN,
default => 5,
desc => ?DESC(consumer_max_rejoin_attempts)
})},

View File

@ -72,7 +72,7 @@ defmodule EMQXUmbrella.MixProject do
# in conflict by emqtt and hocon
{:getopt, "1.0.2", override: true},
{:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "1.0.7", override: true},
{:hocon, github: "emqx/hocon", tag: "0.37.2", override: true},
{:hocon, github: "emqx/hocon", tag: "0.38.0", override: true},
{:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.5.2", override: true},
{:esasl, github: "emqx/esasl", tag: "0.2.0"},
{:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"},

View File

@ -75,7 +75,7 @@
, {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.3"}}}
, {getopt, "1.0.2"}
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.7"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.37.2"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.38.0"}}}
, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}}
, {esasl, {git, "https://github.com/emqx/esasl", {tag, "0.2.0"}}}
, {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}}

View File

@ -8,6 +8,7 @@ IP=$1
PORT=$2
URL="http://$IP:$PORT/status"
## Check if EMQX is responding
ATTEMPTS=10
while ! curl "$URL" >/dev/null 2>&1; do
if [ $ATTEMPTS -eq 0 ]; then
@ -17,3 +18,26 @@ while ! curl "$URL" >/dev/null 2>&1; do
sleep 5
ATTEMPTS=$((ATTEMPTS-1))
done
## Check if the API docs are available
API_DOCS_URL="http://$IP:$PORT/api-docs/index.html"
API_DOCS_STATUS="$(curl -s -o /dev/null -w "%{http_code}" "$API_DOCS_URL")"
if [ "$API_DOCS_STATUS" != "200" ]; then
echo "emqx is not responding on $API_DOCS_URL"
exit 1
fi
## Check if the swagger.json contains hidden fields
## fail if it does
SWAGGER_JSON_URL="http://$IP:$PORT/api-docs/swagger.json"
## assert swagger.json is valid json
JSON="$(curl -s "$SWAGGER_JSON_URL")"
echo "$JSON" | jq . >/dev/null
if [ "${EMQX_SMOKE_TEST_CHECK_HIDDEN_FIELDS:-yes}" = 'yes' ]; then
## assert swagger.json does not contain trie_compaction (which is a hidden field)
if echo "$JSON" | grep -q trie_compaction; then
echo "swagger.json contains hidden fields"
exit 1
fi
fi