Merge pull request #10211 from zmstone/0321-hide-broker-perf-configs

0321 hide broker perf configs
This commit is contained in:
Zaiming (Stone) Shi 2023-03-28 20:31:51 +02:00 committed by GitHub
commit 7c0c36ff9b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 163 additions and 75 deletions

View File

@ -194,12 +194,15 @@ jobs:
run: | run: |
CID=$(docker run -d --rm -P $EMQX_IMAGE_TAG) CID=$(docker run -d --rm -P $EMQX_IMAGE_TAG)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID) HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
export EMQX_SMOKE_TEST_CHECK_HIDDEN_FIELDS='yes'
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
docker stop $CID docker stop $CID
- name: test two nodes cluster with proto_dist=inet_tls in docker - name: test two nodes cluster with proto_dist=inet_tls in docker
run: | run: |
./scripts/test/start-two-nodes-in-docker.sh -P $EMQX_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG ./scripts/test/start-two-nodes-in-docker.sh -P $EMQX_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy) HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy)
# versions before 5.0.22 have hidden fields included in the API spec
export EMQX_SMOKE_TEST_CHECK_HIDDEN_FIELDS='no'
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
# cleanup # cleanup
./scripts/test/start-two-nodes-in-docker.sh -c ./scripts/test/start-two-nodes-in-docker.sh -c

View File

@ -29,7 +29,7 @@
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}}, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.6"}}}, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.6"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.37.2"}}}, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.38.0"}}},
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}},
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},

View File

@ -230,7 +230,7 @@ roots(low) ->
{"crl_cache", {"crl_cache",
sc( sc(
ref("crl_cache"), ref("crl_cache"),
#{hidden => true} #{importance => ?IMPORTANCE_HIDDEN}
)} )}
]. ].
@ -1492,7 +1492,7 @@ fields("broker") ->
{"perf", {"perf",
sc( sc(
ref("broker_perf"), ref("broker_perf"),
#{} #{importance => ?IMPORTANCE_HIDDEN}
)}, )},
{"shared_subscription_group", {"shared_subscription_group",
sc( sc(
@ -2299,7 +2299,7 @@ server_ssl_opts_schema(Defaults, IsRanchListener) ->
#{ #{
required => false, required => false,
%% TODO: remove after e5.0.2 %% TODO: remove after e5.0.2
hidden => true, importance => ?IMPORTANCE_HIDDEN,
validator => fun ocsp_inner_validator/1 validator => fun ocsp_inner_validator/1
} }
)}, )},
@ -2997,7 +2997,7 @@ quic_feature_toggle(Desc) ->
typerefl:alias("boolean", typerefl:union([true, false, 0, 1])), typerefl:alias("boolean", typerefl:union([true, false, 0, 1])),
#{ #{
desc => Desc, desc => Desc,
hidden => true, importance => ?IMPORTANCE_HIDDEN,
required => false, required => false,
converter => fun converter => fun
(true) -> 1; (true) -> 1;
@ -3012,7 +3012,7 @@ quic_lowlevel_settings_uint(Low, High, Desc) ->
range(Low, High), range(Low, High),
#{ #{
required => false, required => false,
hidden => true, importance => ?IMPORTANCE_HIDDEN,
desc => Desc desc => Desc
} }
). ).

View File

@ -1,6 +1,6 @@
{application, emqx_conf, [ {application, emqx_conf, [
{description, "EMQX configuration management"}, {description, "EMQX configuration management"},
{vsn, "0.1.14"}, {vsn, "0.1.15"},
{registered, []}, {registered, []},
{mod, {emqx_conf_app, []}}, {mod, {emqx_conf_app, []}},
{applications, [kernel, stdlib, emqx_ctl]}, {applications, [kernel, stdlib, emqx_ctl]},

View File

@ -156,7 +156,15 @@ dump_schema(Dir, SchemaModule, I18nFile) ->
gen_schema_json(Dir, I18nFile, SchemaModule, Lang) -> gen_schema_json(Dir, I18nFile, SchemaModule, Lang) ->
SchemaJsonFile = filename:join([Dir, "schema-" ++ Lang ++ ".json"]), SchemaJsonFile = filename:join([Dir, "schema-" ++ Lang ++ ".json"]),
io:format(user, "===< Generating: ~s~n", [SchemaJsonFile]), io:format(user, "===< Generating: ~s~n", [SchemaJsonFile]),
Opts = #{desc_file => I18nFile, lang => Lang}, %% EMQX_SCHEMA_FULL_DUMP is quite a hidden API
%% it is used to dump the full schema for EMQX developers and supporters
IncludeImportance =
case os:getenv("EMQX_SCHEMA_FULL_DUMP") =:= "1" of
true -> ?IMPORTANCE_HIDDEN;
false -> ?IMPORTANCE_LOW
end,
io:format(user, "===< Including fields from importance level: ~p~n", [IncludeImportance]),
Opts = #{desc_file => I18nFile, lang => Lang, include_importance_up_from => IncludeImportance},
JsonMap = hocon_schema_json:gen(SchemaModule, Opts), JsonMap = hocon_schema_json:gen(SchemaModule, Opts),
IoData = jsx:encode(JsonMap, [space, {indent, 4}]), IoData = jsx:encode(JsonMap, [space, {indent, 4}]),
ok = file:write_file(SchemaJsonFile, IoData). ok = file:write_file(SchemaJsonFile, IoData).
@ -220,7 +228,8 @@ gen_example(File, SchemaModule, I18nFile, Lang) ->
title => <<"EMQX Configuration Example">>, title => <<"EMQX Configuration Example">>,
body => <<"">>, body => <<"">>,
desc_file => I18nFile, desc_file => I18nFile,
lang => Lang lang => Lang,
include_importance_up_from => ?IMPORTANCE_MEDIUM
}, },
Example = hocon_schema_example:gen(SchemaModule, Opts), Example = hocon_schema_example:gen(SchemaModule, Opts),
file:write_file(File, Example). file:write_file(File, Example).

View File

@ -28,7 +28,18 @@
-define(DEFAULT_INIT_TXN_ID, -1). -define(DEFAULT_INIT_TXN_ID, -1).
start(_StartType, _StartArgs) -> start(_StartType, _StartArgs) ->
init_conf(), try
ok = init_conf()
catch
C:E:St ->
?SLOG(critical, #{
msg => failed_to_init_config,
exception => C,
reason => E,
stacktrace => St
}),
init:stop()
end,
ok = emqx_config_logger:refresh_config(), ok = emqx_config_logger:refresh_config(),
emqx_conf_sup:start_link(). emqx_conf_sup:start_link().
@ -85,9 +96,9 @@ init_load() ->
init_conf() -> init_conf() ->
{ok, TnxId} = copy_override_conf_from_core_node(), {ok, TnxId} = copy_override_conf_from_core_node(),
emqx_app:set_init_tnx_id(TnxId), _ = emqx_app:set_init_tnx_id(TnxId),
init_load(), ok = init_load(),
emqx_app:set_init_config_load_done(). ok = emqx_app:set_init_config_load_done().
cluster_nodes() -> cluster_nodes() ->
maps:get(running_nodes, ekka_cluster:info()) -- [node()]. maps:get(running_nodes, ekka_cluster:info()) -- [node()].

View File

@ -397,6 +397,7 @@ fields("node") ->
#{ #{
default => <<"emqx@127.0.0.1">>, default => <<"emqx@127.0.0.1">>,
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_HIGH,
desc => ?DESC(node_name) desc => ?DESC(node_name)
} }
)}, )},
@ -409,6 +410,7 @@ fields("node") ->
'readOnly' => true, 'readOnly' => true,
sensitive => true, sensitive => true,
desc => ?DESC(node_cookie), desc => ?DESC(node_cookie),
importance => ?IMPORTANCE_HIGH,
converter => fun emqx_schema:password_converter/2 converter => fun emqx_schema:password_converter/2
} }
)}, )},
@ -419,6 +421,7 @@ fields("node") ->
mapping => "vm_args.+P", mapping => "vm_args.+P",
desc => ?DESC(process_limit), desc => ?DESC(process_limit),
default => 2097152, default => 2097152,
importance => ?IMPORTANCE_MEDIUM,
'readOnly' => true 'readOnly' => true
} }
)}, )},
@ -429,6 +432,7 @@ fields("node") ->
mapping => "vm_args.+Q", mapping => "vm_args.+Q",
desc => ?DESC(max_ports), desc => ?DESC(max_ports),
default => 1048576, default => 1048576,
importance => ?IMPORTANCE_HIGH,
'readOnly' => true 'readOnly' => true
} }
)}, )},
@ -439,6 +443,7 @@ fields("node") ->
mapping => "vm_args.+zdbbl", mapping => "vm_args.+zdbbl",
desc => ?DESC(dist_buffer_size), desc => ?DESC(dist_buffer_size),
default => 8192, default => 8192,
importance => ?IMPORTANCE_LOW,
'readOnly' => true 'readOnly' => true
} }
)}, )},
@ -449,6 +454,7 @@ fields("node") ->
mapping => "vm_args.+e", mapping => "vm_args.+e",
desc => ?DESC(max_ets_tables), desc => ?DESC(max_ets_tables),
default => 262144, default => 262144,
importance => ?IMPORTANCE_LOW,
'readOnly' => true 'readOnly' => true
} }
)}, )},
@ -459,6 +465,10 @@ fields("node") ->
required => true, required => true,
'readOnly' => true, 'readOnly' => true,
mapping => "emqx.data_dir", mapping => "emqx.data_dir",
%% for now, it's tricky to use a different data_dir
%% otherwise data paths in cluster config may differ
%% TODO: change configurable data file paths to relative
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(node_data_dir) desc => ?DESC(node_data_dir)
} }
)}, )},
@ -467,7 +477,7 @@ fields("node") ->
hoconsc:array(string()), hoconsc:array(string()),
#{ #{
mapping => "emqx.config_files", mapping => "emqx.config_files",
hidden => true, importance => ?IMPORTANCE_HIDDEN,
required => false, required => false,
'readOnly' => true 'readOnly' => true
} }
@ -479,6 +489,7 @@ fields("node") ->
mapping => "emqx_machine.global_gc_interval", mapping => "emqx_machine.global_gc_interval",
default => <<"15m">>, default => <<"15m">>,
desc => ?DESC(node_global_gc_interval), desc => ?DESC(node_global_gc_interval),
importance => ?IMPORTANCE_LOW,
'readOnly' => true 'readOnly' => true
} }
)}, )},
@ -489,6 +500,7 @@ fields("node") ->
mapping => "vm_args.-env ERL_CRASH_DUMP", mapping => "vm_args.-env ERL_CRASH_DUMP",
desc => ?DESC(node_crash_dump_file), desc => ?DESC(node_crash_dump_file),
default => crash_dump_file_default(), default => crash_dump_file_default(),
importance => ?IMPORTANCE_LOW,
'readOnly' => true 'readOnly' => true
} }
)}, )},
@ -499,6 +511,7 @@ fields("node") ->
mapping => "vm_args.-env ERL_CRASH_DUMP_SECONDS", mapping => "vm_args.-env ERL_CRASH_DUMP_SECONDS",
default => <<"30s">>, default => <<"30s">>,
desc => ?DESC(node_crash_dump_seconds), desc => ?DESC(node_crash_dump_seconds),
importance => ?IMPORTANCE_LOW,
'readOnly' => true 'readOnly' => true
} }
)}, )},
@ -509,6 +522,7 @@ fields("node") ->
mapping => "vm_args.-env ERL_CRASH_DUMP_BYTES", mapping => "vm_args.-env ERL_CRASH_DUMP_BYTES",
default => <<"100MB">>, default => <<"100MB">>,
desc => ?DESC(node_crash_dump_bytes), desc => ?DESC(node_crash_dump_bytes),
importance => ?IMPORTANCE_LOW,
'readOnly' => true 'readOnly' => true
} }
)}, )},
@ -519,6 +533,7 @@ fields("node") ->
mapping => "vm_args.-kernel net_ticktime", mapping => "vm_args.-kernel net_ticktime",
default => <<"2m">>, default => <<"2m">>,
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_LOW,
desc => ?DESC(node_dist_net_ticktime) desc => ?DESC(node_dist_net_ticktime)
} }
)}, )},
@ -529,6 +544,7 @@ fields("node") ->
mapping => "emqx_machine.backtrace_depth", mapping => "emqx_machine.backtrace_depth",
default => 23, default => 23,
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_LOW,
desc => ?DESC(node_backtrace_depth) desc => ?DESC(node_backtrace_depth)
} }
)}, )},
@ -539,6 +555,7 @@ fields("node") ->
mapping => "emqx_machine.applications", mapping => "emqx_machine.applications",
default => [], default => [],
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_LOW,
desc => ?DESC(node_applications) desc => ?DESC(node_applications)
} }
)}, )},
@ -548,13 +565,17 @@ fields("node") ->
#{ #{
desc => ?DESC(node_etc_dir), desc => ?DESC(node_etc_dir),
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_LOW,
deprecated => {since, "5.0.8"} deprecated => {since, "5.0.8"}
} }
)}, )},
{"cluster_call", {"cluster_call",
sc( sc(
?R_REF("cluster_call"), ?R_REF("cluster_call"),
#{'readOnly' => true} #{
'readOnly' => true,
importance => ?IMPORTANCE_LOW
}
)}, )},
{"db_backend", {"db_backend",
sc( sc(
@ -563,6 +584,7 @@ fields("node") ->
mapping => "mria.db_backend", mapping => "mria.db_backend",
default => rlog, default => rlog,
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(db_backend) desc => ?DESC(db_backend)
} }
)}, )},
@ -573,6 +595,7 @@ fields("node") ->
mapping => "mria.node_role", mapping => "mria.node_role",
default => core, default => core,
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_HIGH,
desc => ?DESC(db_role) desc => ?DESC(db_role)
} }
)}, )},
@ -583,6 +606,7 @@ fields("node") ->
mapping => "mria.rlog_rpc_module", mapping => "mria.rlog_rpc_module",
default => gen_rpc, default => gen_rpc,
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(db_rpc_module) desc => ?DESC(db_rpc_module)
} }
)}, )},
@ -593,6 +617,7 @@ fields("node") ->
mapping => "mria.tlog_push_mode", mapping => "mria.tlog_push_mode",
default => async, default => async,
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_LOW,
desc => ?DESC(db_tlog_push_mode) desc => ?DESC(db_tlog_push_mode)
} }
)}, )},
@ -601,7 +626,7 @@ fields("node") ->
hoconsc:enum([gen_rpc, distr]), hoconsc:enum([gen_rpc, distr]),
#{ #{
mapping => "mria.shard_transport", mapping => "mria.shard_transport",
hidden => true, importance => ?IMPORTANCE_HIDDEN,
default => gen_rpc, default => gen_rpc,
desc => ?DESC(db_default_shard_transport) desc => ?DESC(db_default_shard_transport)
} }
@ -611,7 +636,7 @@ fields("node") ->
map(shard, hoconsc:enum([gen_rpc, distr])), map(shard, hoconsc:enum([gen_rpc, distr])),
#{ #{
desc => ?DESC(db_shard_transports), desc => ?DESC(db_shard_transports),
hidden => true, importance => ?IMPORTANCE_HIDDEN,
mapping => "emqx_machine.custom_shard_transports", mapping => "emqx_machine.custom_shard_transports",
default => #{} default => #{}
} }

View File

@ -830,36 +830,8 @@ to_bin(X) ->
X. X.
parse_object(PropList = [_ | _], Module, Options) when is_list(PropList) -> parse_object(PropList = [_ | _], Module, Options) when is_list(PropList) ->
{Props, Required, Refs} = {Props, Required, Refs} = parse_object_loop(PropList, Module, Options),
lists:foldl( Object = #{<<"type">> => object, <<"properties">> => Props},
fun({Name, Hocon}, {Acc, RequiredAcc, RefsAcc}) ->
NameBin = to_bin(Name),
case hoconsc:is_schema(Hocon) of
true ->
HoconType = hocon_schema:field_schema(Hocon, type),
Init0 = init_prop([default | ?DEFAULT_FIELDS], #{}, Hocon),
SchemaToSpec = schema_converter(Options),
Init = trans_desc(Init0, Hocon, SchemaToSpec, NameBin),
{Prop, Refs1} = SchemaToSpec(HoconType, Module),
NewRequiredAcc =
case is_required(Hocon) of
true -> [NameBin | RequiredAcc];
false -> RequiredAcc
end,
{
[{NameBin, maps:merge(Prop, Init)} | Acc],
NewRequiredAcc,
Refs1 ++ RefsAcc
};
false ->
{SubObject, SubRefs} = parse_object(Hocon, Module, Options),
{[{NameBin, SubObject} | Acc], RequiredAcc, SubRefs ++ RefsAcc}
end
end,
{[], [], []},
PropList
),
Object = #{<<"type">> => object, <<"properties">> => lists:reverse(Props)},
case Required of case Required of
[] -> {Object, Refs}; [] -> {Object, Refs};
_ -> {maps:put(required, Required, Object), Refs} _ -> {maps:put(required, Required, Object), Refs}
@ -874,6 +846,54 @@ parse_object(Other, Module, Options) ->
}} }}
). ).
parse_object_loop(PropList0, Module, Options) ->
PropList = lists:filter(
fun({_, Hocon}) ->
case hoconsc:is_schema(Hocon) andalso is_hidden(Hocon) of
true -> false;
false -> true
end
end,
PropList0
),
parse_object_loop(PropList, Module, Options, _Props = [], _Required = [], _Refs = []).
parse_object_loop([], _Modlue, _Options, Props, Required, Refs) ->
{lists:reverse(Props), lists:usort(Required), Refs};
parse_object_loop([{Name, Hocon} | Rest], Module, Options, Props, Required, Refs) ->
NameBin = to_bin(Name),
case hoconsc:is_schema(Hocon) of
true ->
HoconType = hocon_schema:field_schema(Hocon, type),
Init0 = init_prop([default | ?DEFAULT_FIELDS], #{}, Hocon),
SchemaToSpec = schema_converter(Options),
Init = trans_desc(Init0, Hocon, SchemaToSpec, NameBin),
{Prop, Refs1} = SchemaToSpec(HoconType, Module),
NewRequiredAcc =
case is_required(Hocon) of
true -> [NameBin | Required];
false -> Required
end,
parse_object_loop(
Rest,
Module,
Options,
[{NameBin, maps:merge(Prop, Init)} | Props],
NewRequiredAcc,
Refs1 ++ Refs
);
false ->
%% TODO: there is only a handful of such
%% refactor the schema to unify the two cases
{SubObject, SubRefs} = parse_object(Hocon, Module, Options),
parse_object_loop(
Rest, Module, Options, [{NameBin, SubObject} | Props], Required, SubRefs ++ Refs
)
end.
%% return true if the field has 'importance' set to 'hidden'
is_hidden(Hocon) ->
hocon_schema:is_hidden(Hocon, #{include_importance_up_from => ?IMPORTANCE_LOW}).
is_required(Hocon) -> is_required(Hocon) ->
hocon_schema:field_schema(Hocon, required) =:= true. hocon_schema:field_schema(Hocon, required) =:= true.

View File

@ -61,7 +61,7 @@ t_object(_Config) ->
#{ #{
<<"schema">> => <<"schema">> =>
#{ #{
required => [<<"timeout">>, <<"per_page">>], required => [<<"per_page">>, <<"timeout">>],
<<"properties">> => [ <<"properties">> => [
{<<"per_page">>, #{ {<<"per_page">>, #{
description => <<"good per page desc">>, description => <<"good per page desc">>,

View File

@ -59,7 +59,7 @@ t_object(_config) ->
<<"application/json">> => <<"application/json">> =>
#{ #{
<<"schema">> => #{ <<"schema">> => #{
required => [<<"timeout">>, <<"per_page">>], required => [<<"per_page">>, <<"timeout">>],
<<"properties">> => [ <<"properties">> => [
{<<"per_page">>, #{ {<<"per_page">>, #{
description => <<"good per page desc">>, description => <<"good per page desc">>,

View File

@ -2,7 +2,7 @@
{application, emqx_prometheus, [ {application, emqx_prometheus, [
{description, "Prometheus for EMQX"}, {description, "Prometheus for EMQX"},
% strict semver, bump manually! % strict semver, bump manually!
{vsn, "5.0.7"}, {vsn, "5.0.8"},
{modules, []}, {modules, []},
{registered, [emqx_prometheus_sup]}, {registered, [emqx_prometheus_sup]},
{applications, [kernel, stdlib, prometheus, emqx, emqx_management]}, {applications, [kernel, stdlib, prometheus, emqx, emqx_management]},

View File

@ -90,7 +90,7 @@ fields("prometheus") ->
#{ #{
default => enabled, default => enabled,
required => true, required => true,
hidden => true, importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_dist_collector) desc => ?DESC(vm_dist_collector)
} }
)}, )},
@ -100,7 +100,7 @@ fields("prometheus") ->
#{ #{
default => enabled, default => enabled,
required => true, required => true,
hidden => true, importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(mnesia_collector) desc => ?DESC(mnesia_collector)
} }
)}, )},
@ -110,7 +110,7 @@ fields("prometheus") ->
#{ #{
default => enabled, default => enabled,
required => true, required => true,
hidden => true, importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_statistics_collector) desc => ?DESC(vm_statistics_collector)
} }
)}, )},
@ -120,7 +120,7 @@ fields("prometheus") ->
#{ #{
default => enabled, default => enabled,
required => true, required => true,
hidden => true, importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_system_info_collector) desc => ?DESC(vm_system_info_collector)
} }
)}, )},
@ -130,7 +130,7 @@ fields("prometheus") ->
#{ #{
default => enabled, default => enabled,
required => true, required => true,
hidden => true, importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_memory_collector) desc => ?DESC(vm_memory_collector)
} }
)}, )},
@ -140,7 +140,7 @@ fields("prometheus") ->
#{ #{
default => enabled, default => enabled,
required => true, required => true,
hidden => true, importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(vm_msacc_collector) desc => ?DESC(vm_msacc_collector)
} }
)} )}

View File

@ -83,7 +83,7 @@ worker_pool_size(required) -> false;
worker_pool_size(_) -> undefined. worker_pool_size(_) -> undefined.
resume_interval(type) -> emqx_schema:duration_ms(); resume_interval(type) -> emqx_schema:duration_ms();
resume_interval(hidden) -> true; resume_interval(importance) -> hidden;
resume_interval(desc) -> ?DESC("resume_interval"); resume_interval(desc) -> ?DESC("resume_interval");
resume_interval(required) -> false; resume_interval(required) -> false;
resume_interval(_) -> undefined. resume_interval(_) -> undefined.

View File

@ -28,7 +28,7 @@
init_per_suite(Config) -> init_per_suite(Config) ->
application:load(emqx_conf), application:load(emqx_conf),
ConfigConf = <<"rule_engine {jq_function_default_timeout {}}">>, ConfigConf = <<"rule_engine {jq_function_default_timeout=10s}">>,
ok = emqx_common_test_helpers:load_config(emqx_rule_engine_schema, ConfigConf), ok = emqx_common_test_helpers:load_config(emqx_rule_engine_schema, ConfigConf),
Config. Config.
@ -691,20 +691,10 @@ t_jq(_) ->
ConfigRootKey, ConfigRootKey,
jq_function_default_timeout jq_function_default_timeout
]), ]),
case DefaultTimeOut =< 15000 of ?assertThrow(
true -> {jq_exception, {timeout, _}},
got_timeout = apply_func(jq, [TOProgram, <<"-2">>])
try ).
apply_func(jq, [TOProgram, <<"-2">>])
catch
throw:{jq_exception, {timeout, _}} ->
%% Got timeout as expected
got_timeout
end;
false ->
%% Skip test as we don't want it to take to long time to run
ok
end.
ascii_string() -> list(range(0, 127)). ascii_string() -> list(range(0, 127)).

View File

@ -0,0 +1,3 @@
Hide `broker.broker_perf` config and API documents.
The two configs `route_lock_type` and `trie_compaction` are rarely used and requires a full cluster restart to take effect. They are not suitable for being exposed to users.
Detailed changes can be found here: https://gist.github.com/zmstone/01ad5754b9beaeaf3f5b86d14d49a0b7/revisions

View File

@ -0,0 +1,3 @@
隐藏 `broker.broker_perf` 配置项,不再在 配置和 API 的文档中展示。
`route_lock_type``trie_compaction` 这两个配置项很少使用,且需要全集群重启才能生效,不适合暴露给用户。
详细对比: https://gist.github.com/zmstone/01ad5754b9beaeaf3f5b86d14d49a0b7/revisions

View File

@ -364,7 +364,7 @@ fields(consumer_kafka_opts) ->
})}, })},
{max_rejoin_attempts, {max_rejoin_attempts,
mk(non_neg_integer(), #{ mk(non_neg_integer(), #{
hidden => true, importance => ?IMPORTANCE_HIDDEN,
default => 5, default => 5,
desc => ?DESC(consumer_max_rejoin_attempts) desc => ?DESC(consumer_max_rejoin_attempts)
})}, })},

View File

@ -72,7 +72,7 @@ defmodule EMQXUmbrella.MixProject do
# in conflict by emqtt and hocon # in conflict by emqtt and hocon
{:getopt, "1.0.2", override: true}, {:getopt, "1.0.2", override: true},
{:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "1.0.7", override: true}, {:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "1.0.7", override: true},
{:hocon, github: "emqx/hocon", tag: "0.37.2", override: true}, {:hocon, github: "emqx/hocon", tag: "0.38.0", override: true},
{:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.5.2", override: true}, {:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.5.2", override: true},
{:esasl, github: "emqx/esasl", tag: "0.2.0"}, {:esasl, github: "emqx/esasl", tag: "0.2.0"},
{:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"}, {:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"},

View File

@ -75,7 +75,7 @@
, {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.3"}}} , {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.3"}}}
, {getopt, "1.0.2"} , {getopt, "1.0.2"}
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.7"}}} , {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.7"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.37.2"}}} , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.38.0"}}}
, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}} , {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}}
, {esasl, {git, "https://github.com/emqx/esasl", {tag, "0.2.0"}}} , {esasl, {git, "https://github.com/emqx/esasl", {tag, "0.2.0"}}}
, {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}} , {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}}

View File

@ -8,6 +8,7 @@ IP=$1
PORT=$2 PORT=$2
URL="http://$IP:$PORT/status" URL="http://$IP:$PORT/status"
## Check if EMQX is responding
ATTEMPTS=10 ATTEMPTS=10
while ! curl "$URL" >/dev/null 2>&1; do while ! curl "$URL" >/dev/null 2>&1; do
if [ $ATTEMPTS -eq 0 ]; then if [ $ATTEMPTS -eq 0 ]; then
@ -17,3 +18,26 @@ while ! curl "$URL" >/dev/null 2>&1; do
sleep 5 sleep 5
ATTEMPTS=$((ATTEMPTS-1)) ATTEMPTS=$((ATTEMPTS-1))
done done
## Check if the API docs are available
API_DOCS_URL="http://$IP:$PORT/api-docs/index.html"
API_DOCS_STATUS="$(curl -s -o /dev/null -w "%{http_code}" "$API_DOCS_URL")"
if [ "$API_DOCS_STATUS" != "200" ]; then
echo "emqx is not responding on $API_DOCS_URL"
exit 1
fi
## Check if the swagger.json contains hidden fields
## fail if it does
SWAGGER_JSON_URL="http://$IP:$PORT/api-docs/swagger.json"
## assert swagger.json is valid json
JSON="$(curl -s "$SWAGGER_JSON_URL")"
echo "$JSON" | jq . >/dev/null
if [ "${EMQX_SMOKE_TEST_CHECK_HIDDEN_FIELDS:-yes}" = 'yes' ]; then
## assert swagger.json does not contain trie_compaction (which is a hidden field)
if echo "$JSON" | grep -q trie_compaction; then
echo "swagger.json contains hidden fields"
exit 1
fi
fi