Merge pull request #7125 from zhongwencool/schema-json
feat: generate hot config's schema.json and remove nullable option.
This commit is contained in:
commit
13a0eff678
|
@ -19,7 +19,7 @@
|
||||||
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.1"}}}
|
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.1"}}}
|
||||||
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.12.1"}}}
|
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.12.1"}}}
|
||||||
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.0"}}}
|
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.0"}}}
|
||||||
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.24.0"}}}
|
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.25.0"}}}
|
||||||
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
|
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
|
||||||
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
|
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
|
||||||
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.18.0"}}}
|
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.18.0"}}}
|
||||||
|
|
|
@ -295,7 +295,7 @@ include_dirs() ->
|
||||||
[filename:join(emqx:data_dir(), "configs")].
|
[filename:join(emqx:data_dir(), "configs")].
|
||||||
|
|
||||||
merge_envs(SchemaMod, RawConf) ->
|
merge_envs(SchemaMod, RawConf) ->
|
||||||
Opts = #{nullable => true, %% TODO: evil, remove, nullable should be declared in schema
|
Opts = #{required => false, %% TODO: evil, remove, required should be declared in schema
|
||||||
format => map,
|
format => map,
|
||||||
apply_override_envs => true
|
apply_override_envs => true
|
||||||
},
|
},
|
||||||
|
@ -308,7 +308,7 @@ check_config(SchemaMod, RawConf) ->
|
||||||
|
|
||||||
check_config(SchemaMod, RawConf, Opts0) ->
|
check_config(SchemaMod, RawConf, Opts0) ->
|
||||||
Opts1 = #{return_plain => true,
|
Opts1 = #{return_plain => true,
|
||||||
nullable => true, %% TODO: evil, remove, nullable should be declared in schema
|
required => false, %% TODO: evil, remove, required should be declared in schema
|
||||||
format => map
|
format => map
|
||||||
},
|
},
|
||||||
Opts = maps:merge(Opts0, Opts1),
|
Opts = maps:merge(Opts0, Opts1),
|
||||||
|
@ -331,7 +331,7 @@ fill_defaults(RawConf) ->
|
||||||
-spec fill_defaults(module(), raw_config()) -> map().
|
-spec fill_defaults(module(), raw_config()) -> map().
|
||||||
fill_defaults(SchemaMod, RawConf) ->
|
fill_defaults(SchemaMod, RawConf) ->
|
||||||
hocon_tconf:check_plain(SchemaMod, RawConf,
|
hocon_tconf:check_plain(SchemaMod, RawConf,
|
||||||
#{nullable => true, only_fill_defaults => true},
|
#{required => false, only_fill_defaults => true},
|
||||||
root_names_from_conf(RawConf)).
|
root_names_from_conf(RawConf)).
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -34,9 +34,9 @@ format_path([Name | Rest]) ->
|
||||||
-spec check(module(), hocon:config() | iodata()) ->
|
-spec check(module(), hocon:config() | iodata()) ->
|
||||||
{ok, hocon:config()} | {error, any()}.
|
{ok, hocon:config()} | {error, any()}.
|
||||||
check(SchemaModule, Conf) when is_map(Conf) ->
|
check(SchemaModule, Conf) when is_map(Conf) ->
|
||||||
%% TODO: remove nullable
|
%% TODO: remove required
|
||||||
%% fields should state nullable or not in their schema
|
%% fields should state required or not in their schema
|
||||||
Opts = #{atom_key => true, nullable => true},
|
Opts = #{atom_key => true, required => false},
|
||||||
try
|
try
|
||||||
{ok, hocon_tconf:check_plain(SchemaModule, Conf, Opts)}
|
{ok, hocon_tconf:check_plain(SchemaModule, Conf, Opts)}
|
||||||
catch
|
catch
|
||||||
|
|
|
@ -586,31 +586,31 @@ fields("listeners") ->
|
||||||
[ {"tcp",
|
[ {"tcp",
|
||||||
sc(map(name, ref("mqtt_tcp_listener")),
|
sc(map(name, ref("mqtt_tcp_listener")),
|
||||||
#{ desc => "TCP listeners"
|
#{ desc => "TCP listeners"
|
||||||
, nullable => {true, recursively}
|
, required => {false, recursively}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
, {"ssl",
|
, {"ssl",
|
||||||
sc(map(name, ref("mqtt_ssl_listener")),
|
sc(map(name, ref("mqtt_ssl_listener")),
|
||||||
#{ desc => "SSL listeners"
|
#{ desc => "SSL listeners"
|
||||||
, nullable => {true, recursively}
|
, required => {false, recursively}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
, {"ws",
|
, {"ws",
|
||||||
sc(map(name, ref("mqtt_ws_listener")),
|
sc(map(name, ref("mqtt_ws_listener")),
|
||||||
#{ desc => "HTTP websocket listeners"
|
#{ desc => "HTTP websocket listeners"
|
||||||
, nullable => {true, recursively}
|
, required => {false, recursively}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
, {"wss",
|
, {"wss",
|
||||||
sc(map(name, ref("mqtt_wss_listener")),
|
sc(map(name, ref("mqtt_wss_listener")),
|
||||||
#{ desc => "HTTPS websocket listeners"
|
#{ desc => "HTTPS websocket listeners"
|
||||||
, nullable => {true, recursively}
|
, required => {false, recursively}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
, {"quic",
|
, {"quic",
|
||||||
sc(map(name, ref("mqtt_quic_listener")),
|
sc(map(name, ref("mqtt_quic_listener")),
|
||||||
#{ desc => "QUIC listeners"
|
#{ desc => "QUIC listeners"
|
||||||
, nullable => {true, recursively}
|
, required => {false, recursively}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
@ -1129,7 +1129,7 @@ mqtt_listener() ->
|
||||||
base_listener() ->
|
base_listener() ->
|
||||||
[ {"bind",
|
[ {"bind",
|
||||||
sc(hoconsc:union([ip_port(), integer()]),
|
sc(hoconsc:union([ip_port(), integer()]),
|
||||||
#{ nullable => false
|
#{ required => true
|
||||||
})}
|
})}
|
||||||
, {"acceptors",
|
, {"acceptors",
|
||||||
sc(integer(),
|
sc(integer(),
|
||||||
|
@ -1188,7 +1188,7 @@ common_ssl_opts_schema(Defaults) ->
|
||||||
, {"cacertfile",
|
, {"cacertfile",
|
||||||
sc(string(),
|
sc(string(),
|
||||||
#{ default => D("cacertfile")
|
#{ default => D("cacertfile")
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc =>
|
, desc =>
|
||||||
"""Trusted PEM format CA certificates bundle file.<br>
|
"""Trusted PEM format CA certificates bundle file.<br>
|
||||||
The certificates in this file are used to verify the TLS peer's certificates.
|
The certificates in this file are used to verify the TLS peer's certificates.
|
||||||
|
@ -1203,7 +1203,7 @@ already established connections.
|
||||||
, {"certfile",
|
, {"certfile",
|
||||||
sc(string(),
|
sc(string(),
|
||||||
#{ default => D("certfile")
|
#{ default => D("certfile")
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc =>
|
, desc =>
|
||||||
"""PEM format certificates chain file.<br>
|
"""PEM format certificates chain file.<br>
|
||||||
The certificates in this file should be in reversed order of the certificate
|
The certificates in this file should be in reversed order of the certificate
|
||||||
|
@ -1217,7 +1217,7 @@ the file if it is to be added.
|
||||||
, {"keyfile",
|
, {"keyfile",
|
||||||
sc(string(),
|
sc(string(),
|
||||||
#{ default => D("keyfile")
|
#{ default => D("keyfile")
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc =>
|
, desc =>
|
||||||
"""PEM format private key file.<br>
|
"""PEM format private key file.<br>
|
||||||
"""
|
"""
|
||||||
|
@ -1241,7 +1241,7 @@ the file if it is to be added.
|
||||||
, {"password",
|
, {"password",
|
||||||
sc(string(),
|
sc(string(),
|
||||||
#{ sensitive => true
|
#{ sensitive => true
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc =>
|
, desc =>
|
||||||
"""String containing the user's password. Only used if the private
|
"""String containing the user's password. Only used if the private
|
||||||
key file is password-protected."""
|
key file is password-protected."""
|
||||||
|
@ -1289,7 +1289,7 @@ server_ssl_opts_schema(Defaults, IsRanchListener) ->
|
||||||
[ {"dhfile",
|
[ {"dhfile",
|
||||||
sc(string(),
|
sc(string(),
|
||||||
#{ default => D("dhfile")
|
#{ default => D("dhfile")
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc =>
|
, desc =>
|
||||||
"""Path to a file containing PEM-encoded Diffie Hellman parameters
|
"""Path to a file containing PEM-encoded Diffie Hellman parameters
|
||||||
to be used by the server if a cipher suite using Diffie Hellman
|
to be used by the server if a cipher suite using Diffie Hellman
|
||||||
|
@ -1345,7 +1345,7 @@ client_ssl_opts_schema(Defaults) ->
|
||||||
common_ssl_opts_schema(Defaults) ++
|
common_ssl_opts_schema(Defaults) ++
|
||||||
[ { "server_name_indication",
|
[ { "server_name_indication",
|
||||||
sc(hoconsc:union([disable, string()]),
|
sc(hoconsc:union([disable, string()]),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc =>
|
, desc =>
|
||||||
"""Specify the host name to be used in TLS Server Name Indication extension.<br>
|
"""Specify the host name to be used in TLS Server Name Indication extension.<br>
|
||||||
For instance, when connecting to \"server.example.net\", the genuine server
|
For instance, when connecting to \"server.example.net\", the genuine server
|
||||||
|
|
|
@ -116,7 +116,7 @@ fields(request_user_create) ->
|
||||||
fields(request_user_update) ->
|
fields(request_user_update) ->
|
||||||
[
|
[
|
||||||
{password, binary()},
|
{password, binary()},
|
||||||
{is_superuser, mk(boolean(), #{default => false, nullable => true})}
|
{is_superuser, mk(boolean(), #{default => false, required => false})}
|
||||||
];
|
];
|
||||||
|
|
||||||
fields(request_move) ->
|
fields(request_move) ->
|
||||||
|
@ -128,7 +128,7 @@ fields(request_import_users) ->
|
||||||
fields(response_user) ->
|
fields(response_user) ->
|
||||||
[
|
[
|
||||||
{user_id, binary()},
|
{user_id, binary()},
|
||||||
{is_superuser, mk(boolean(), #{default => false, nullable => true})}
|
{is_superuser, mk(boolean(), #{default => false, required => false})}
|
||||||
];
|
];
|
||||||
|
|
||||||
fields(response_users) ->
|
fields(response_users) ->
|
||||||
|
@ -379,8 +379,8 @@ schema("/authentication/:id/users") ->
|
||||||
description => <<"List users in authenticator in global authentication chain">>,
|
description => <<"List users in authenticator in global authentication chain">>,
|
||||||
parameters => [
|
parameters => [
|
||||||
param_auth_id(),
|
param_auth_id(),
|
||||||
{page, mk(integer(), #{in => query, desc => <<"Page Index">>, nullable => true})},
|
{page, mk(integer(), #{in => query, desc => <<"Page Index">>, required => false})},
|
||||||
{limit, mk(integer(), #{in => query, desc => <<"Page Limit">>, nullable => true})}
|
{limit, mk(integer(), #{in => query, desc => <<"Page Limit">>, required => false})}
|
||||||
],
|
],
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => emqx_dashboard_swagger:schema_with_example(
|
200 => emqx_dashboard_swagger:schema_with_example(
|
||||||
|
@ -415,8 +415,8 @@ schema("/listeners/:listener_id/authentication/:id/users") ->
|
||||||
description => <<"List users in authenticator in listener authentication chain">>,
|
description => <<"List users in authenticator in listener authentication chain">>,
|
||||||
parameters => [
|
parameters => [
|
||||||
param_listener_id(), param_auth_id(),
|
param_listener_id(), param_auth_id(),
|
||||||
{page, mk(integer(), #{in => query, desc => <<"Page Index">>, nullable => true})},
|
{page, mk(integer(), #{in => query, desc => <<"Page Index">>, required => false})},
|
||||||
{limit, mk(integer(), #{in => query, desc => <<"Page Limit">>, nullable => true})}
|
{limit, mk(integer(), #{in => query, desc => <<"Page Limit">>, required => false})}
|
||||||
],
|
],
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => emqx_dashboard_swagger:schema_with_example(
|
200 => emqx_dashboard_swagger:schema_with_example(
|
||||||
|
|
|
@ -81,8 +81,7 @@ salt_rounds(default) -> 10;
|
||||||
salt_rounds(_) -> undefined.
|
salt_rounds(_) -> undefined.
|
||||||
|
|
||||||
dk_length(type) -> integer();
|
dk_length(type) -> integer();
|
||||||
dk_length(nullable) -> true;
|
dk_length(required) -> false;
|
||||||
dk_length(default) -> undefined;
|
|
||||||
dk_length(_) -> undefined.
|
dk_length(_) -> undefined.
|
||||||
|
|
||||||
type_rw(type) ->
|
type_rw(type) ->
|
||||||
|
|
|
@ -54,8 +54,8 @@ root_type() ->
|
||||||
|
|
||||||
mechanism(Name) ->
|
mechanism(Name) ->
|
||||||
hoconsc:mk(hoconsc:enum([Name]),
|
hoconsc:mk(hoconsc:enum([Name]),
|
||||||
#{nullable => false}).
|
#{required => true}).
|
||||||
|
|
||||||
backend(Name) ->
|
backend(Name) ->
|
||||||
hoconsc:mk(hoconsc:enum([Name]),
|
hoconsc:mk(hoconsc:enum([Name]),
|
||||||
#{nullable => false}).
|
#{required => true}).
|
||||||
|
|
|
@ -77,7 +77,7 @@ validations() ->
|
||||||
|
|
||||||
url(type) -> binary();
|
url(type) -> binary();
|
||||||
url(validator) -> [?NOT_EMPTY("the value of the field 'url' cannot be empty")];
|
url(validator) -> [?NOT_EMPTY("the value of the field 'url' cannot be empty")];
|
||||||
url(nullable) -> false;
|
url(required) -> true;
|
||||||
url(_) -> undefined.
|
url(_) -> undefined.
|
||||||
|
|
||||||
headers(type) -> map();
|
headers(type) -> map();
|
||||||
|
|
|
@ -76,11 +76,11 @@ password_hash_field(type) -> binary();
|
||||||
password_hash_field(_) -> undefined.
|
password_hash_field(_) -> undefined.
|
||||||
|
|
||||||
salt_field(type) -> binary();
|
salt_field(type) -> binary();
|
||||||
salt_field(nullable) -> true;
|
salt_field(required) -> false;
|
||||||
salt_field(_) -> undefined.
|
salt_field(_) -> undefined.
|
||||||
|
|
||||||
is_superuser_field(type) -> binary();
|
is_superuser_field(type) -> binary();
|
||||||
is_superuser_field(nullable) -> true;
|
is_superuser_field(required) -> false;
|
||||||
is_superuser_field(_) -> undefined.
|
is_superuser_field(_) -> undefined.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
|
@ -85,7 +85,7 @@ fields(position) ->
|
||||||
|
|
||||||
url(type) -> binary();
|
url(type) -> binary();
|
||||||
url(validator) -> [?NOT_EMPTY("the value of the field 'url' cannot be empty")];
|
url(validator) -> [?NOT_EMPTY("the value of the field 'url' cannot be empty")];
|
||||||
url(nullable) -> false;
|
url(required) -> true;
|
||||||
url(_) -> undefined.
|
url(_) -> undefined.
|
||||||
|
|
||||||
headers(type) -> map();
|
headers(type) -> map();
|
||||||
|
|
|
@ -145,7 +145,7 @@ fields(redis_cluster) ->
|
||||||
http_common_fields() ->
|
http_common_fields() ->
|
||||||
[ {url, fun url/1}
|
[ {url, fun url/1}
|
||||||
, {request_timeout, mk_duration("Request timeout", #{default => "30s"})}
|
, {request_timeout, mk_duration("Request timeout", #{default => "30s"})}
|
||||||
, {body, #{type => map(), nullable => true}}
|
, {body, #{type => map(), required => false}}
|
||||||
] ++ maps:to_list(maps:without([ base_url
|
] ++ maps:to_list(maps:without([ base_url
|
||||||
, pool_type],
|
, pool_type],
|
||||||
maps:from_list(connector_fields(http)))).
|
maps:from_list(connector_fields(http)))).
|
||||||
|
@ -181,7 +181,7 @@ headers_no_content_type(_) -> undefined.
|
||||||
|
|
||||||
url(type) -> binary();
|
url(type) -> binary();
|
||||||
url(validator) -> [?NOT_EMPTY("the value of the field 'url' cannot be empty")];
|
url(validator) -> [?NOT_EMPTY("the value of the field 'url' cannot be empty")];
|
||||||
url(nullable) -> false;
|
url(required) -> true;
|
||||||
url(_) -> undefined.
|
url(_) -> undefined.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
|
@ -13,7 +13,7 @@ roots() -> [].
|
||||||
fields("bridge") ->
|
fields("bridge") ->
|
||||||
basic_config() ++
|
basic_config() ++
|
||||||
[ {url, mk(binary(),
|
[ {url, mk(binary(),
|
||||||
#{ nullable => false
|
#{ required => true
|
||||||
, desc =>"""
|
, desc =>"""
|
||||||
The URL of the HTTP Bridge.<br>
|
The URL of the HTTP Bridge.<br>
|
||||||
Template with variables is allowed in the path, but variables cannot be used in the scheme, host,
|
Template with variables is allowed in the path, but variables cannot be used in the scheme, host,
|
||||||
|
|
|
@ -52,7 +52,7 @@ common_bridge_fields() ->
|
||||||
})}
|
})}
|
||||||
, {connector,
|
, {connector,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
#{ nullable => false
|
#{ required => true
|
||||||
, example => <<"mqtt:my_mqtt_connector">>
|
, example => <<"mqtt:my_mqtt_connector">>
|
||||||
, desc =>"""
|
, desc =>"""
|
||||||
The connector ID to be used for this bridge. Connector IDs must be of format:
|
The connector ID to be used for this bridge. Connector IDs must be of format:
|
||||||
|
@ -75,7 +75,7 @@ metrics_status_fields() ->
|
||||||
|
|
||||||
direction_field(Dir, Desc) ->
|
direction_field(Dir, Desc) ->
|
||||||
{direction, mk(Dir,
|
{direction, mk(Dir,
|
||||||
#{ nullable => false
|
#{ required => true
|
||||||
, default => egress
|
, default => egress
|
||||||
, desc => "The direction of the bridge. Can be one of 'ingress' or 'egress'.<br>"
|
, desc => "The direction of the bridge. Can be one of 'ingress' or 'egress'.<br>"
|
||||||
++ Desc
|
++ Desc
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
|
|
||||||
-compile({no_auto_import, [get/1, get/2]}).
|
-compile({no_auto_import, [get/1, get/2]}).
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
-export([add_handler/2, remove_handler/1]).
|
-export([add_handler/2, remove_handler/1]).
|
||||||
-export([get/1, get/2, get_raw/2, get_all/1]).
|
-export([get/1, get/2, get_raw/2, get_all/1]).
|
||||||
|
@ -124,14 +125,22 @@ reset(Node, KeyPath, Opts) ->
|
||||||
%% @doc Called from build script.
|
%% @doc Called from build script.
|
||||||
-spec dump_schema(file:name_all()) -> ok.
|
-spec dump_schema(file:name_all()) -> ok.
|
||||||
dump_schema(Dir) ->
|
dump_schema(Dir) ->
|
||||||
|
SchemaMdFile = filename:join([Dir, "config.md"]),
|
||||||
|
io:format(user, "===< Generating: ~s~n", [SchemaMdFile ]),
|
||||||
|
ok = gen_doc(SchemaMdFile),
|
||||||
|
|
||||||
|
%% for scripts/spellcheck.
|
||||||
SchemaJsonFile = filename:join([Dir, "schema.json"]),
|
SchemaJsonFile = filename:join([Dir, "schema.json"]),
|
||||||
|
io:format(user, "===< Generating: ~s~n", [SchemaJsonFile]),
|
||||||
JsonMap = hocon_schema_json:gen(emqx_conf_schema),
|
JsonMap = hocon_schema_json:gen(emqx_conf_schema),
|
||||||
IoData = jsx:encode(JsonMap, [space, {indent, 4}]),
|
IoData = jsx:encode(JsonMap, [space, {indent, 4}]),
|
||||||
io:format(user, "===< Generating: ~s~n", [SchemaJsonFile]),
|
|
||||||
ok = file:write_file(SchemaJsonFile, IoData),
|
ok = file:write_file(SchemaJsonFile, IoData),
|
||||||
SchemaMarkdownFile = filename:join([Dir, "config.md"]),
|
|
||||||
io:format(user, "===< Generating: ~s~n", [SchemaMarkdownFile ]),
|
%% hot-update configuration schema
|
||||||
ok = gen_doc(SchemaMarkdownFile).
|
HotConfigSchemaFile = filename:join([Dir, "hot-config-schema.json"]),
|
||||||
|
io:format(user, "===< Generating: ~s~n", [HotConfigSchemaFile]),
|
||||||
|
ok = gen_hot_conf_schema(HotConfigSchemaFile),
|
||||||
|
ok.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Internal functions
|
%% Internal functions
|
||||||
|
@ -158,3 +167,157 @@ check_cluster_rpc_result(Result) ->
|
||||||
{error, Error} -> %% all MFA return not ok or {ok, term()}.
|
{error, Error} -> %% all MFA return not ok or {ok, term()}.
|
||||||
Error
|
Error
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
%% Only gen hot_conf schema, not all configuration fields.
|
||||||
|
gen_hot_conf_schema(File) ->
|
||||||
|
{ApiSpec0, Components0} = emqx_dashboard_swagger:spec(emqx_mgmt_api_configs,
|
||||||
|
#{schema_converter => fun hocon_schema_to_spec/2}),
|
||||||
|
ApiSpec = lists:foldl(fun({Path, Spec, _, _}, Acc) ->
|
||||||
|
NewSpec = maps:fold(fun(Method, #{responses := Responses}, SubAcc) ->
|
||||||
|
case Responses of
|
||||||
|
#{<<"200">> :=
|
||||||
|
#{<<"content">> := #{<<"application/json">> := #{<<"schema">> := Schema}}}} ->
|
||||||
|
SubAcc#{Method => Schema};
|
||||||
|
_ -> SubAcc
|
||||||
|
end
|
||||||
|
end, #{}, Spec),
|
||||||
|
Acc#{list_to_atom(Path) => NewSpec} end, #{}, ApiSpec0),
|
||||||
|
Components = lists:foldl(fun(M, Acc) -> maps:merge(M, Acc) end, #{}, Components0),
|
||||||
|
IoData = jsx:encode(#{
|
||||||
|
info => #{title => <<"EMQX Hot Conf Schema">>, version => <<"0.1.0">>},
|
||||||
|
paths => ApiSpec,
|
||||||
|
components => #{schemas => Components}
|
||||||
|
}, [space, {indent, 4}]),
|
||||||
|
file:write_file(File, IoData).
|
||||||
|
|
||||||
|
-define(INIT_SCHEMA, #{fields => #{}, translations => #{},
|
||||||
|
validations => [], namespace => undefined}).
|
||||||
|
|
||||||
|
-define(TO_REF(_N_, _F_), iolist_to_binary([to_bin(_N_), ".", to_bin(_F_)])).
|
||||||
|
-define(TO_COMPONENTS_SCHEMA(_M_, _F_), iolist_to_binary([<<"#/components/schemas/">>,
|
||||||
|
?TO_REF(emqx_dashboard_swagger:namespace(_M_), _F_)])).
|
||||||
|
|
||||||
|
hocon_schema_to_spec(?R_REF(Module, StructName), _LocalModule) ->
|
||||||
|
{#{<<"$ref">> => ?TO_COMPONENTS_SCHEMA(Module, StructName)},
|
||||||
|
[{Module, StructName}]};
|
||||||
|
hocon_schema_to_spec(?REF(StructName), LocalModule) ->
|
||||||
|
{#{<<"$ref">> => ?TO_COMPONENTS_SCHEMA(LocalModule, StructName)},
|
||||||
|
[{LocalModule, StructName}]};
|
||||||
|
hocon_schema_to_spec(Type, LocalModule) when ?IS_TYPEREFL(Type) ->
|
||||||
|
{typename_to_spec(typerefl:name(Type), LocalModule), []};
|
||||||
|
hocon_schema_to_spec(?ARRAY(Item), LocalModule) ->
|
||||||
|
{Schema, Refs} = hocon_schema_to_spec(Item, LocalModule),
|
||||||
|
{#{type => array, items => Schema}, Refs};
|
||||||
|
hocon_schema_to_spec(?LAZY(Item), LocalModule) ->
|
||||||
|
hocon_schema_to_spec(Item, LocalModule);
|
||||||
|
hocon_schema_to_spec(?ENUM(Items), _LocalModule) ->
|
||||||
|
{#{type => enum, symbols => Items}, []};
|
||||||
|
hocon_schema_to_spec(?MAP(Name, Type), LocalModule) ->
|
||||||
|
{Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule),
|
||||||
|
{#{<<"type">> => object,
|
||||||
|
<<"properties">> => #{<<"$", (to_bin(Name))/binary>> => Schema}},
|
||||||
|
SubRefs};
|
||||||
|
hocon_schema_to_spec(?UNION(Types), LocalModule) ->
|
||||||
|
{OneOf, Refs} = lists:foldl(fun(Type, {Acc, RefsAcc}) ->
|
||||||
|
{Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule),
|
||||||
|
{[Schema | Acc], SubRefs ++ RefsAcc}
|
||||||
|
end, {[], []}, Types),
|
||||||
|
{#{<<"oneOf">> => OneOf}, Refs};
|
||||||
|
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
||||||
|
{#{type => enum, symbols => [Atom]}, []}.
|
||||||
|
|
||||||
|
typename_to_spec("user_id_type()", _Mod) -> #{type => enum, symbols => [clientid, username]};
|
||||||
|
typename_to_spec("term()", _Mod) -> #{type => string};
|
||||||
|
typename_to_spec("boolean()", _Mod) -> #{type => boolean};
|
||||||
|
typename_to_spec("binary()", _Mod) -> #{type => string};
|
||||||
|
typename_to_spec("float()", _Mod) -> #{type => number};
|
||||||
|
typename_to_spec("integer()", _Mod) -> #{type => number};
|
||||||
|
typename_to_spec("non_neg_integer()", _Mod) -> #{type => number, minimum => 1};
|
||||||
|
typename_to_spec("number()", _Mod) -> #{type => number};
|
||||||
|
typename_to_spec("string()", _Mod) -> #{type => string};
|
||||||
|
typename_to_spec("atom()", _Mod) -> #{type => string};
|
||||||
|
|
||||||
|
typename_to_spec("duration()", _Mod) -> #{type => duration};
|
||||||
|
typename_to_spec("duration_s()", _Mod) -> #{type => duration};
|
||||||
|
typename_to_spec("duration_ms()", _Mod) -> #{type => duration};
|
||||||
|
typename_to_spec("percent()", _Mod) -> #{type => percent};
|
||||||
|
typename_to_spec("file()", _Mod) -> #{type => string};
|
||||||
|
typename_to_spec("ip_port()", _Mod) -> #{type => ip_port};
|
||||||
|
typename_to_spec("url()", _Mod) -> #{type => url};
|
||||||
|
typename_to_spec("bytesize()", _Mod) -> #{type => byteSize};
|
||||||
|
typename_to_spec("wordsize()", _Mod) -> #{type => byteSize};
|
||||||
|
typename_to_spec("qos()", _Mod) -> #{type => enum, symbols => [0, 1, 2]};
|
||||||
|
typename_to_spec("comma_separated_list()", _Mod) -> #{type => comma_separated_string};
|
||||||
|
typename_to_spec("comma_separated_atoms()", _Mod) -> #{type => comma_separated_string};
|
||||||
|
typename_to_spec("pool_type()", _Mod) -> #{type => enum, symbols => [random, hash]};
|
||||||
|
typename_to_spec("log_level()", _Mod) ->
|
||||||
|
#{type => enum, symbols => [debug, info, notice, warning, error, critical, alert, emergency, all]};
|
||||||
|
typename_to_spec("rate()", _Mod) -> #{type => string};
|
||||||
|
typename_to_spec("capacity()", _Mod) -> #{type => string};
|
||||||
|
typename_to_spec("burst_rate()", _Mod) -> #{type => string};
|
||||||
|
typename_to_spec("failure_strategy()", _Mod) -> #{type => enum, symbols => [force, drop, throw]};
|
||||||
|
typename_to_spec("initial()", _Mod) -> #{type => string};
|
||||||
|
typename_to_spec(Name, Mod) ->
|
||||||
|
Spec = range(Name),
|
||||||
|
Spec1 = remote_module_type(Spec, Name, Mod),
|
||||||
|
Spec2 = typerefl_array(Spec1, Name, Mod),
|
||||||
|
Spec3 = integer(Spec2, Name),
|
||||||
|
default_type(Spec3).
|
||||||
|
|
||||||
|
default_type(nomatch) -> #{type => string};
|
||||||
|
default_type(Type) -> Type.
|
||||||
|
|
||||||
|
range(Name) ->
|
||||||
|
case string:split(Name, "..") of
|
||||||
|
[MinStr, MaxStr] -> %% 1..10 1..inf -inf..10
|
||||||
|
Schema = #{type => number},
|
||||||
|
Schema1 = add_integer_prop(Schema, minimum, MinStr),
|
||||||
|
add_integer_prop(Schema1, maximum, MaxStr);
|
||||||
|
_ -> nomatch
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% Module:Type
|
||||||
|
remote_module_type(nomatch, Name, Mod) ->
|
||||||
|
case string:split(Name, ":") of
|
||||||
|
[_Module, Type] -> typename_to_spec(Type, Mod);
|
||||||
|
_ -> nomatch
|
||||||
|
end;
|
||||||
|
remote_module_type(Spec, _Name, _Mod) -> Spec.
|
||||||
|
|
||||||
|
%% [string()] or [integer()] or [xxx].
|
||||||
|
typerefl_array(nomatch, Name, Mod) ->
|
||||||
|
case string:trim(Name, leading, "[") of
|
||||||
|
Name -> nomatch;
|
||||||
|
Name1 ->
|
||||||
|
case string:trim(Name1, trailing, "]") of
|
||||||
|
Name1 -> notmatch;
|
||||||
|
Name2 ->
|
||||||
|
Schema = typename_to_spec(Name2, Mod),
|
||||||
|
#{type => array, items => Schema}
|
||||||
|
end
|
||||||
|
end;
|
||||||
|
typerefl_array(Spec, _Name, _Mod) -> Spec.
|
||||||
|
|
||||||
|
%% integer(1)
|
||||||
|
integer(nomatch, Name) ->
|
||||||
|
case string:to_integer(Name) of
|
||||||
|
{Int, []} -> #{type => enum, symbols => [Int], default => Int};
|
||||||
|
_ -> nomatch
|
||||||
|
end;
|
||||||
|
integer(Spec, _Name) -> Spec.
|
||||||
|
|
||||||
|
add_integer_prop(Schema, Key, Value) ->
|
||||||
|
case string:to_integer(Value) of
|
||||||
|
{error, no_integer} -> Schema;
|
||||||
|
{Int, []}when Key =:= minimum -> Schema#{Key => Int};
|
||||||
|
{Int, []} -> Schema#{Key => Int}
|
||||||
|
end.
|
||||||
|
|
||||||
|
to_bin(List) when is_list(List) ->
|
||||||
|
case io_lib:printable_list(List) of
|
||||||
|
true -> unicode:characters_to_binary(List);
|
||||||
|
false -> List
|
||||||
|
end;
|
||||||
|
to_bin(Boolean) when is_boolean(Boolean) -> Boolean;
|
||||||
|
to_bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8);
|
||||||
|
to_bin(X) -> X.
|
||||||
|
|
|
@ -262,7 +262,7 @@ fields("node") ->
|
||||||
})}
|
})}
|
||||||
, {"data_dir",
|
, {"data_dir",
|
||||||
sc(string(),
|
sc(string(),
|
||||||
#{ nullable => false,
|
#{ required => true,
|
||||||
mapping => "emqx.data_dir",
|
mapping => "emqx.data_dir",
|
||||||
desc => "Path to the persistent data directory. It must be unique per broker instance."
|
desc => "Path to the persistent data directory. It must be unique per broker instance."
|
||||||
})}
|
})}
|
||||||
|
|
|
@ -61,7 +61,7 @@ roots() ->
|
||||||
fields(config) ->
|
fields(config) ->
|
||||||
[ {base_url,
|
[ {base_url,
|
||||||
sc(url(),
|
sc(url(),
|
||||||
#{ nullable => false
|
#{ required => true
|
||||||
, validator => fun(#{query := _Query}) ->
|
, validator => fun(#{query := _Query}) ->
|
||||||
{error, "There must be no query in the base_url"};
|
{error, "There must be no query in the base_url"};
|
||||||
(_) -> ok
|
(_) -> ok
|
||||||
|
@ -106,7 +106,7 @@ For example: http://localhost:9901/
|
||||||
, {request, hoconsc:mk(
|
, {request, hoconsc:mk(
|
||||||
ref("request"),
|
ref("request"),
|
||||||
#{ default => undefined
|
#{ default => undefined
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc => """
|
, desc => """
|
||||||
If the request is provided, the caller can send HTTP requests via
|
If the request is provided, the caller can send HTTP requests via
|
||||||
<code>emqx_resource:query(ResourceId, {send_message, BridgeId, Message})</code>
|
<code>emqx_resource:query(ResourceId, {send_message, BridgeId, Message})</code>
|
||||||
|
@ -115,13 +115,13 @@ If the request is provided, the caller can send HTTP requests via
|
||||||
] ++ emqx_connector_schema_lib:ssl_fields();
|
] ++ emqx_connector_schema_lib:ssl_fields();
|
||||||
|
|
||||||
fields("request") ->
|
fields("request") ->
|
||||||
[ {method, hoconsc:mk(hoconsc:enum([post, put, get, delete]), #{nullable => true})}
|
[ {method, hoconsc:mk(hoconsc:enum([post, put, get, delete]), #{required => false})}
|
||||||
, {path, hoconsc:mk(binary(), #{nullable => true})}
|
, {path, hoconsc:mk(binary(), #{required => false})}
|
||||||
, {body, hoconsc:mk(binary(), #{nullable => true})}
|
, {body, hoconsc:mk(binary(), #{required => false})}
|
||||||
, {headers, hoconsc:mk(map(), #{nullable => true})}
|
, {headers, hoconsc:mk(map(), #{required => false})}
|
||||||
, {request_timeout,
|
, {request_timeout,
|
||||||
sc(emqx_schema:duration_ms(),
|
sc(emqx_schema:duration_ms(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => "The timeout when sending request to the HTTP server"
|
, desc => "The timeout when sending request to the HTTP server"
|
||||||
})}
|
})}
|
||||||
].
|
].
|
||||||
|
|
|
@ -91,11 +91,9 @@ mongo_fields() ->
|
||||||
, {pool_size, fun emqx_connector_schema_lib:pool_size/1}
|
, {pool_size, fun emqx_connector_schema_lib:pool_size/1}
|
||||||
, {username, fun emqx_connector_schema_lib:username/1}
|
, {username, fun emqx_connector_schema_lib:username/1}
|
||||||
, {password, fun emqx_connector_schema_lib:password/1}
|
, {password, fun emqx_connector_schema_lib:password/1}
|
||||||
, {auth_source, #{type => binary(),
|
, {auth_source, #{type => binary(), required => false}}
|
||||||
nullable => true}}
|
|
||||||
, {database, fun emqx_connector_schema_lib:database/1}
|
, {database, fun emqx_connector_schema_lib:database/1}
|
||||||
, {topology, #{type => hoconsc:ref(?MODULE, topology),
|
, {topology, #{type => hoconsc:ref(?MODULE, topology), required => false}}
|
||||||
nullable => true}}
|
|
||||||
] ++
|
] ++
|
||||||
emqx_connector_schema_lib:ssl_fields().
|
emqx_connector_schema_lib:ssl_fields().
|
||||||
|
|
||||||
|
@ -289,14 +287,14 @@ init_worker_options([], Acc) -> Acc.
|
||||||
%% Schema funcs
|
%% Schema funcs
|
||||||
|
|
||||||
server(type) -> emqx_schema:ip_port();
|
server(type) -> emqx_schema:ip_port();
|
||||||
server(nullable) -> false;
|
server(required) -> true;
|
||||||
server(validator) -> [?NOT_EMPTY("the value of the field 'server' cannot be empty")];
|
server(validator) -> [?NOT_EMPTY("the value of the field 'server' cannot be empty")];
|
||||||
server(converter) -> fun to_server_raw/1;
|
server(converter) -> fun to_server_raw/1;
|
||||||
server(desc) -> ?SERVER_DESC("MongoDB", integer_to_list(?MONGO_DEFAULT_PORT));
|
server(desc) -> ?SERVER_DESC("MongoDB", integer_to_list(?MONGO_DEFAULT_PORT));
|
||||||
server(_) -> undefined.
|
server(_) -> undefined.
|
||||||
|
|
||||||
servers(type) -> binary();
|
servers(type) -> binary();
|
||||||
servers(nullable) -> false;
|
servers(required) -> true;
|
||||||
servers(validator) -> [?NOT_EMPTY("the value of the field 'servers' cannot be empty")];
|
servers(validator) -> [?NOT_EMPTY("the value of the field 'servers' cannot be empty")];
|
||||||
servers(converter) -> fun to_servers_raw/1;
|
servers(converter) -> fun to_servers_raw/1;
|
||||||
servers(desc) -> ?SERVERS_DESC ++ server(desc);
|
servers(desc) -> ?SERVERS_DESC ++ server(desc);
|
||||||
|
@ -311,11 +309,11 @@ r_mode(default) -> master;
|
||||||
r_mode(_) -> undefined.
|
r_mode(_) -> undefined.
|
||||||
|
|
||||||
duration(type) -> emqx_schema:duration_ms();
|
duration(type) -> emqx_schema:duration_ms();
|
||||||
duration(nullable) -> true;
|
duration(required) -> false;
|
||||||
duration(_) -> undefined.
|
duration(_) -> undefined.
|
||||||
|
|
||||||
replica_set_name(type) -> binary();
|
replica_set_name(type) -> binary();
|
||||||
replica_set_name(nullable) -> true;
|
replica_set_name(required) -> false;
|
||||||
replica_set_name(_) -> undefined.
|
replica_set_name(_) -> undefined.
|
||||||
|
|
||||||
srv_record(type) -> boolean();
|
srv_record(type) -> boolean();
|
||||||
|
|
|
@ -50,7 +50,7 @@ fields(config) ->
|
||||||
emqx_connector_schema_lib:ssl_fields().
|
emqx_connector_schema_lib:ssl_fields().
|
||||||
|
|
||||||
server(type) -> emqx_schema:ip_port();
|
server(type) -> emqx_schema:ip_port();
|
||||||
server(nullable) -> false;
|
server(required) -> true;
|
||||||
server(validator) -> [?NOT_EMPTY("the value of the field 'server' cannot be empty")];
|
server(validator) -> [?NOT_EMPTY("the value of the field 'server' cannot be empty")];
|
||||||
server(converter) -> fun to_server/1;
|
server(converter) -> fun to_server/1;
|
||||||
server(desc) -> ?SERVER_DESC("MySQL", integer_to_list(?MYSQL_DEFAULT_PORT));
|
server(desc) -> ?SERVER_DESC("MySQL", integer_to_list(?MYSQL_DEFAULT_PORT));
|
||||||
|
|
|
@ -56,11 +56,11 @@ fields(config) ->
|
||||||
emqx_connector_schema_lib:ssl_fields().
|
emqx_connector_schema_lib:ssl_fields().
|
||||||
|
|
||||||
named_queries(type) -> map();
|
named_queries(type) -> map();
|
||||||
named_queries(nullable) -> true;
|
named_queries(required) -> false;
|
||||||
named_queries(_) -> undefined.
|
named_queries(_) -> undefined.
|
||||||
|
|
||||||
server(type) -> emqx_schema:ip_port();
|
server(type) -> emqx_schema:ip_port();
|
||||||
server(nullable) -> false;
|
server(required) -> true;
|
||||||
server(validator) -> [?NOT_EMPTY("the value of the field 'server' cannot be empty")];
|
server(validator) -> [?NOT_EMPTY("the value of the field 'server' cannot be empty")];
|
||||||
server(converter) -> fun to_server/1;
|
server(converter) -> fun to_server/1;
|
||||||
server(desc) -> ?SERVER_DESC("PostgreSQL", integer_to_list(?PGSQL_DEFAULT_PORT));
|
server(desc) -> ?SERVER_DESC("PostgreSQL", integer_to_list(?PGSQL_DEFAULT_PORT));
|
||||||
|
|
|
@ -76,14 +76,14 @@ fields(sentinel) ->
|
||||||
emqx_connector_schema_lib:ssl_fields().
|
emqx_connector_schema_lib:ssl_fields().
|
||||||
|
|
||||||
server(type) -> emqx_schema:ip_port();
|
server(type) -> emqx_schema:ip_port();
|
||||||
server(nullable) -> false;
|
server(required) -> true;
|
||||||
server(validator) -> [?NOT_EMPTY("the value of the field 'server' cannot be empty")];
|
server(validator) -> [?NOT_EMPTY("the value of the field 'server' cannot be empty")];
|
||||||
server(converter) -> fun to_server_raw/1;
|
server(converter) -> fun to_server_raw/1;
|
||||||
server(desc) -> ?SERVER_DESC("Redis", integer_to_list(?REDIS_DEFAULT_PORT));
|
server(desc) -> ?SERVER_DESC("Redis", integer_to_list(?REDIS_DEFAULT_PORT));
|
||||||
server(_) -> undefined.
|
server(_) -> undefined.
|
||||||
|
|
||||||
servers(type) -> list();
|
servers(type) -> list();
|
||||||
servers(nullable) -> false;
|
servers(required) -> true;
|
||||||
servers(validator) -> [?NOT_EMPTY("the value of the field 'servers' cannot be empty")];
|
servers(validator) -> [?NOT_EMPTY("the value of the field 'servers' cannot be empty")];
|
||||||
servers(converter) -> fun to_servers_raw/1;
|
servers(converter) -> fun to_servers_raw/1;
|
||||||
servers(desc) -> ?SERVERS_DESC ++ server(desc);
|
servers(desc) -> ?SERVERS_DESC ++ server(desc);
|
||||||
|
|
|
@ -66,7 +66,7 @@ relational_db_fields() ->
|
||||||
].
|
].
|
||||||
|
|
||||||
database(type) -> binary();
|
database(type) -> binary();
|
||||||
database(nullable) -> false;
|
database(required) -> true;
|
||||||
database(validator) -> [?NOT_EMPTY("the value of the field 'database' cannot be empty")];
|
database(validator) -> [?NOT_EMPTY("the value of the field 'database' cannot be empty")];
|
||||||
database(_) -> undefined.
|
database(_) -> undefined.
|
||||||
|
|
||||||
|
@ -76,11 +76,11 @@ pool_size(validator) -> [?MIN(1)];
|
||||||
pool_size(_) -> undefined.
|
pool_size(_) -> undefined.
|
||||||
|
|
||||||
username(type) -> binary();
|
username(type) -> binary();
|
||||||
username(nullable) -> true;
|
username(required) -> false;
|
||||||
username(_) -> undefined.
|
username(_) -> undefined.
|
||||||
|
|
||||||
password(type) -> binary();
|
password(type) -> binary();
|
||||||
password(nullable) -> true;
|
password(required) -> false;
|
||||||
password(_) -> undefined.
|
password(_) -> undefined.
|
||||||
|
|
||||||
auto_reconnect(type) -> boolean();
|
auto_reconnect(type) -> boolean();
|
||||||
|
|
|
@ -57,7 +57,7 @@ topic filters for 'remote_topic' of ingress connections.
|
||||||
})}
|
})}
|
||||||
, {name,
|
, {name,
|
||||||
sc(binary(),
|
sc(binary(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => "Connector name, used as a human-readable description of the connector."
|
, desc => "Connector name, used as a human-readable description of the connector."
|
||||||
})}
|
})}
|
||||||
, {server,
|
, {server,
|
||||||
|
@ -105,7 +105,7 @@ fields("ingress") ->
|
||||||
%% the message maybe subscribed by rules, in this case 'local_topic' is not necessary
|
%% the message maybe subscribed by rules, in this case 'local_topic' is not necessary
|
||||||
[ {remote_topic,
|
[ {remote_topic,
|
||||||
sc(binary(),
|
sc(binary(),
|
||||||
#{ nullable => false
|
#{ required => true
|
||||||
, desc => "Receive messages from which topic of the remote broker"
|
, desc => "Receive messages from which topic of the remote broker"
|
||||||
})}
|
})}
|
||||||
, {remote_qos,
|
, {remote_qos,
|
||||||
|
|
|
@ -28,9 +28,12 @@ check_fields({FieldName, FieldValue}) ->
|
||||||
?assert(is_atom(FieldName)),
|
?assert(is_atom(FieldName)),
|
||||||
if
|
if
|
||||||
is_map(FieldValue) ->
|
is_map(FieldValue) ->
|
||||||
|
ct:pal("~p~n", [{FieldName, FieldValue}]),
|
||||||
?assert(
|
?assert(
|
||||||
maps:is_key(type, FieldValue) and maps:is_key(default, FieldValue) or
|
(maps:is_key(type, FieldValue)
|
||||||
(maps:is_key(nullable, FieldValue) and maps:get(nullable, FieldValue, false) =:= true)
|
andalso maps:is_key(default, FieldValue))
|
||||||
|
orelse ((maps:is_key(required, FieldValue)
|
||||||
|
andalso maps:get(required, FieldValue) =:= false))
|
||||||
);
|
);
|
||||||
true ->
|
true ->
|
||||||
?assert(is_function(FieldValue))
|
?assert(is_function(FieldValue))
|
||||||
|
|
|
@ -48,7 +48,7 @@ fields("http") ->
|
||||||
[ {"protocol", sc(
|
[ {"protocol", sc(
|
||||||
hoconsc:enum([http, https]),
|
hoconsc:enum([http, https]),
|
||||||
#{ desc => "HTTP/HTTPS protocol."
|
#{ desc => "HTTP/HTTPS protocol."
|
||||||
, nullable => false
|
, required => true
|
||||||
, default => http})}
|
, default => http})}
|
||||||
, {"bind", fun bind/1}
|
, {"bind", fun bind/1}
|
||||||
, {"num_acceptors", sc(
|
, {"num_acceptors", sc(
|
||||||
|
@ -74,18 +74,18 @@ fields("https") ->
|
||||||
|
|
||||||
bind(type) -> hoconsc:union([non_neg_integer(), emqx_schema:ip_port()]);
|
bind(type) -> hoconsc:union([non_neg_integer(), emqx_schema:ip_port()]);
|
||||||
bind(default) -> 18083;
|
bind(default) -> 18083;
|
||||||
bind(nullable) -> false;
|
bind(required) -> true;
|
||||||
bind(desc) -> "Port without IP(18083) or port with specified IP(127.0.0.1:18083).";
|
bind(desc) -> "Port without IP(18083) or port with specified IP(127.0.0.1:18083).";
|
||||||
bind(_) -> undefined.
|
bind(_) -> undefined.
|
||||||
|
|
||||||
default_username(type) -> string();
|
default_username(type) -> string();
|
||||||
default_username(default) -> "admin";
|
default_username(default) -> "admin";
|
||||||
default_username(nullable) -> false;
|
default_username(required) -> true;
|
||||||
default_username(_) -> undefined.
|
default_username(_) -> undefined.
|
||||||
|
|
||||||
default_password(type) -> string();
|
default_password(type) -> string();
|
||||||
default_password(default) -> "public";
|
default_password(default) -> "public";
|
||||||
default_password(nullable) -> false;
|
default_password(required) -> true;
|
||||||
default_password(sensitive) -> true;
|
default_password(sensitive) -> true;
|
||||||
default_password(desc) -> """
|
default_password(desc) -> """
|
||||||
The initial default password for dashboard 'admin' user.
|
The initial default password for dashboard 'admin' user.
|
||||||
|
@ -94,7 +94,7 @@ default_password(_) -> undefined.
|
||||||
|
|
||||||
cors(type) -> boolean();
|
cors(type) -> boolean();
|
||||||
cors(default) -> false;
|
cors(default) -> false;
|
||||||
cors(nullable) -> true;
|
cors(required) -> false;
|
||||||
cors(desc) ->
|
cors(desc) ->
|
||||||
"""Support Cross-Origin Resource Sharing (CORS).
|
"""Support Cross-Origin Resource Sharing (CORS).
|
||||||
Allows a server to indicate any origins (domain, scheme, or port) other than
|
Allows a server to indicate any origins (domain, scheme, or port) other than
|
||||||
|
|
|
@ -21,21 +21,21 @@
|
||||||
|
|
||||||
%% API
|
%% API
|
||||||
-export([spec/1, spec/2]).
|
-export([spec/1, spec/2]).
|
||||||
-export([namespace/0, fields/1]).
|
-export([namespace/0, namespace/1, fields/1]).
|
||||||
-export([schema_with_example/2, schema_with_examples/2]).
|
-export([schema_with_example/2, schema_with_examples/2]).
|
||||||
-export([error_codes/1, error_codes/2]).
|
-export([error_codes/1, error_codes/2]).
|
||||||
|
|
||||||
-export([filter_check_request/2, filter_check_request_and_translate_body/2]).
|
-export([filter_check_request/2, filter_check_request_and_translate_body/2]).
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-export([ parse_spec_ref/2
|
-export([ parse_spec_ref/3
|
||||||
, components/1
|
, components/2
|
||||||
]).
|
]).
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
-define(METHODS, [get, post, put, head, delete, patch, options, trace]).
|
-define(METHODS, [get, post, put, head, delete, patch, options, trace]).
|
||||||
|
|
||||||
-define(DEFAULT_FIELDS, [example, allowReserved, style, format,
|
-define(DEFAULT_FIELDS, [example, allowReserved, style, format, readOnly,
|
||||||
explode, maxLength, allowEmptyValue, deprecated, minimum, maximum]).
|
explode, maxLength, allowEmptyValue, deprecated, minimum, maximum]).
|
||||||
|
|
||||||
-define(INIT_SCHEMA, #{fields => #{}, translations => #{},
|
-define(INIT_SCHEMA, #{fields => #{}, translations => #{},
|
||||||
|
@ -56,7 +56,10 @@
|
||||||
-type(filter_result() :: {ok, request()} | {400, 'BAD_REQUEST', binary()}).
|
-type(filter_result() :: {ok, request()} | {400, 'BAD_REQUEST', binary()}).
|
||||||
-type(filter() :: fun((request(), request_meta()) -> filter_result())).
|
-type(filter() :: fun((request(), request_meta()) -> filter_result())).
|
||||||
|
|
||||||
-type(spec_opts() :: #{check_schema => boolean() | filter(), translate_body => boolean()}).
|
-type(spec_opts() :: #{check_schema => boolean() | filter(),
|
||||||
|
translate_body => boolean(),
|
||||||
|
schema_converter => fun((hocon_schema:schema(), Module::atom()) -> map())
|
||||||
|
}).
|
||||||
|
|
||||||
-type(route_path() :: string() | binary()).
|
-type(route_path() :: string() | binary()).
|
||||||
-type(route_methods() :: map()).
|
-type(route_methods() :: map()).
|
||||||
|
@ -79,12 +82,12 @@ spec(Module, Options) ->
|
||||||
Paths = apply(Module, paths, []),
|
Paths = apply(Module, paths, []),
|
||||||
{ApiSpec, AllRefs} =
|
{ApiSpec, AllRefs} =
|
||||||
lists:foldl(fun(Path, {AllAcc, AllRefsAcc}) ->
|
lists:foldl(fun(Path, {AllAcc, AllRefsAcc}) ->
|
||||||
{OperationId, Specs, Refs} = parse_spec_ref(Module, Path),
|
{OperationId, Specs, Refs} = parse_spec_ref(Module, Path, Options),
|
||||||
CheckSchema = support_check_schema(Options),
|
CheckSchema = support_check_schema(Options),
|
||||||
{[{filename:join("/", Path), Specs, OperationId, CheckSchema} | AllAcc],
|
{[{filename:join("/", Path), Specs, OperationId, CheckSchema} | AllAcc],
|
||||||
Refs ++ AllRefsAcc}
|
Refs ++ AllRefsAcc}
|
||||||
end, {[], []}, Paths),
|
end, {[], []}, Paths),
|
||||||
{ApiSpec, components(lists:usort(AllRefs))}.
|
{ApiSpec, components(lists:usort(AllRefs), Options)}.
|
||||||
|
|
||||||
-spec(namespace() -> hocon_schema:name()).
|
-spec(namespace() -> hocon_schema:name()).
|
||||||
namespace() -> "public".
|
namespace() -> "public".
|
||||||
|
@ -162,7 +165,7 @@ support_check_schema(#{check_schema := Filter}) when is_function(Filter, 2) ->
|
||||||
support_check_schema(_) ->
|
support_check_schema(_) ->
|
||||||
#{filter => undefined}.
|
#{filter => undefined}.
|
||||||
|
|
||||||
parse_spec_ref(Module, Path) ->
|
parse_spec_ref(Module, Path, Options) ->
|
||||||
Schema =
|
Schema =
|
||||||
try
|
try
|
||||||
erlang:apply(Module, schema, [Path])
|
erlang:apply(Module, schema, [Path])
|
||||||
|
@ -172,7 +175,7 @@ parse_spec_ref(Module, Path) ->
|
||||||
{Specs, Refs} = maps:fold(fun(Method, Meta, {Acc, RefsAcc}) ->
|
{Specs, Refs} = maps:fold(fun(Method, Meta, {Acc, RefsAcc}) ->
|
||||||
(not lists:member(Method, ?METHODS))
|
(not lists:member(Method, ?METHODS))
|
||||||
andalso throw({error, #{module => Module, path => Path, method => Method}}),
|
andalso throw({error, #{module => Module, path => Path, method => Method}}),
|
||||||
{Spec, SubRefs} = meta_to_spec(Meta, Module),
|
{Spec, SubRefs} = meta_to_spec(Meta, Module, Options),
|
||||||
{Acc#{Method => Spec}, SubRefs ++ RefsAcc}
|
{Acc#{Method => Spec}, SubRefs ++ RefsAcc}
|
||||||
end, {#{}, []},
|
end, {#{}, []},
|
||||||
maps:without(['operationId'], Schema)),
|
maps:without(['operationId'], Schema)),
|
||||||
|
@ -217,7 +220,7 @@ check_request_body(#{body := Body}, Schema, Module, CheckFun, true) ->
|
||||||
_ -> Type0
|
_ -> Type0
|
||||||
end,
|
end,
|
||||||
NewSchema = ?INIT_SCHEMA#{roots => [{root, Type}]},
|
NewSchema = ?INIT_SCHEMA#{roots => [{root, Type}]},
|
||||||
Option = #{nullable => true},
|
Option = #{required => false},
|
||||||
#{<<"root">> := NewBody} = CheckFun(NewSchema, #{<<"root">> => Body}, Option),
|
#{<<"root">> := NewBody} = CheckFun(NewSchema, #{<<"root">> => Body}, Option),
|
||||||
NewBody;
|
NewBody;
|
||||||
%% TODO not support nest object check yet, please use ref!
|
%% TODO not support nest object check yet, please use ref!
|
||||||
|
@ -239,10 +242,10 @@ check_request_body(#{body := Body}, Spec, _Module, _CheckFun, false)when is_map(
|
||||||
Body.
|
Body.
|
||||||
|
|
||||||
%% tags, description, summary, security, deprecated
|
%% tags, description, summary, security, deprecated
|
||||||
meta_to_spec(Meta, Module) ->
|
meta_to_spec(Meta, Module, Options) ->
|
||||||
{Params, Refs1} = parameters(maps:get(parameters, Meta, []), Module),
|
{Params, Refs1} = parameters(maps:get(parameters, Meta, []), Module),
|
||||||
{RequestBody, Refs2} = request_body(maps:get('requestBody', Meta, []), Module),
|
{RequestBody, Refs2} = request_body(maps:get('requestBody', Meta, []), Module),
|
||||||
{Responses, Refs3} = responses(maps:get(responses, Meta, #{}), Module),
|
{Responses, Refs3} = responses(maps:get(responses, Meta, #{}), Module, Options),
|
||||||
{
|
{
|
||||||
to_spec(Meta, Params, RequestBody, Responses),
|
to_spec(Meta, Params, RequestBody, Responses),
|
||||||
lists:usort(Refs1 ++ Refs2 ++ Refs3)
|
lists:usort(Refs1 ++ Refs2 ++ Refs3)
|
||||||
|
@ -265,30 +268,22 @@ parameters(Params, Module) ->
|
||||||
In = hocon_schema:field_schema(Type, in),
|
In = hocon_schema:field_schema(Type, in),
|
||||||
In =:= undefined andalso
|
In =:= undefined andalso
|
||||||
throw({error, <<"missing in:path/query field in parameters">>}),
|
throw({error, <<"missing in:path/query field in parameters">>}),
|
||||||
Nullable = hocon_schema:field_schema(Type, nullable),
|
Required = hocon_schema:field_schema(Type, required),
|
||||||
Default = hocon_schema:field_schema(Type, default),
|
Default = hocon_schema:field_schema(Type, default),
|
||||||
HoconType = hocon_schema:field_schema(Type, type),
|
HoconType = hocon_schema:field_schema(Type, type),
|
||||||
Meta = init_meta(Nullable, Default),
|
Meta = init_meta(Default),
|
||||||
{ParamType, Refs} = hocon_schema_to_spec(HoconType, Module),
|
{ParamType, Refs} = hocon_schema_to_spec(HoconType, Module),
|
||||||
Spec0 = init_prop([required | ?DEFAULT_FIELDS],
|
Spec0 = init_prop([required | ?DEFAULT_FIELDS],
|
||||||
#{schema => maps:merge(ParamType, Meta), name => Name, in => In}, Type),
|
#{schema => maps:merge(ParamType, Meta), name => Name, in => In}, Type),
|
||||||
Spec1 = trans_required(Spec0, Nullable, In),
|
Spec1 = trans_required(Spec0, Required, In),
|
||||||
Spec2 = trans_desc(Spec1, Type),
|
Spec2 = trans_desc(Spec1, Type),
|
||||||
{[Spec2 | Acc], Refs ++ RefsAcc}
|
{[Spec2 | Acc], Refs ++ RefsAcc}
|
||||||
end
|
end
|
||||||
end, {[], []}, Params),
|
end, {[], []}, Params),
|
||||||
{lists:reverse(SpecList), AllRefs}.
|
{lists:reverse(SpecList), AllRefs}.
|
||||||
|
|
||||||
init_meta(Nullable, Default) ->
|
init_meta(undefined) -> #{};
|
||||||
Init =
|
init_meta(Default) -> #{default => Default}.
|
||||||
case Nullable of
|
|
||||||
true -> #{nullable => true};
|
|
||||||
_ -> #{}
|
|
||||||
end,
|
|
||||||
case Default =:= undefined of
|
|
||||||
true -> Init;
|
|
||||||
false -> Init#{default => Default}
|
|
||||||
end.
|
|
||||||
|
|
||||||
init_prop(Keys, Init, Type) ->
|
init_prop(Keys, Init, Type) ->
|
||||||
lists:foldl(fun(Key, Acc) ->
|
lists:foldl(fun(Key, Acc) ->
|
||||||
|
@ -298,7 +293,7 @@ init_prop(Keys, Init, Type) ->
|
||||||
end
|
end
|
||||||
end, Init, Keys).
|
end, Init, Keys).
|
||||||
|
|
||||||
trans_required(Spec, false, _) -> Spec#{required => true};
|
trans_required(Spec, true, _) -> Spec#{required => true};
|
||||||
trans_required(Spec, _, path) -> Spec#{required => true};
|
trans_required(Spec, _, path) -> Spec#{required => true};
|
||||||
trans_required(Spec, _, _) -> Spec.
|
trans_required(Spec, _, _) -> Spec.
|
||||||
|
|
||||||
|
@ -317,28 +312,29 @@ request_body(Schema, Module) ->
|
||||||
HoconSchema = hocon_schema:field_schema(Schema, type),
|
HoconSchema = hocon_schema:field_schema(Schema, type),
|
||||||
SchemaExamples = hocon_schema:field_schema(Schema, examples),
|
SchemaExamples = hocon_schema:field_schema(Schema, examples),
|
||||||
{hocon_schema_to_spec(HoconSchema, Module), SchemaExamples};
|
{hocon_schema_to_spec(HoconSchema, Module), SchemaExamples};
|
||||||
false -> {parse_object(Schema, Module), undefined}
|
false -> {parse_object(Schema, Module, #{}), undefined}
|
||||||
end,
|
end,
|
||||||
{#{<<"content">> => content(Props, Examples)},
|
{#{<<"content">> => content(Props, Examples)},
|
||||||
Refs}.
|
Refs}.
|
||||||
|
|
||||||
responses(Responses, Module) ->
|
responses(Responses, Module, Options) ->
|
||||||
{Spec, Refs, _} = maps:fold(fun response/3, {#{}, [], Module}, Responses),
|
{Spec, Refs, _, _} = maps:fold(fun response/3, {#{}, [], Module, Options}, Responses),
|
||||||
{Spec, Refs}.
|
{Spec, Refs}.
|
||||||
|
|
||||||
response(Status, Bin, {Acc, RefsAcc, Module}) when is_binary(Bin) ->
|
response(Status, Bin, {Acc, RefsAcc, Module, Options}) when is_binary(Bin) ->
|
||||||
{Acc#{integer_to_binary(Status) => #{description => Bin}}, RefsAcc, Module};
|
{Acc#{integer_to_binary(Status) => #{description => Bin}}, RefsAcc, Module, Options};
|
||||||
%% Support swagger raw object(file download).
|
%% Support swagger raw object(file download).
|
||||||
%% TODO: multi type response(i.e. Support both 'application/json' and 'plain/text')
|
%% TODO: multi type response(i.e. Support both 'application/json' and 'plain/text')
|
||||||
response(Status, #{content := _} = Content, {Acc, RefsAcc, Module}) ->
|
response(Status, #{content := _} = Content, {Acc, RefsAcc, Module, Options}) ->
|
||||||
{Acc#{integer_to_binary(Status) => Content}, RefsAcc, Module};
|
{Acc#{integer_to_binary(Status) => Content}, RefsAcc, Module, Options};
|
||||||
response(Status, ?REF(StructName), {Acc, RefsAcc, Module}) ->
|
response(Status, ?REF(StructName), {Acc, RefsAcc, Module, Options}) ->
|
||||||
response(Status, ?R_REF(Module, StructName), {Acc, RefsAcc, Module});
|
response(Status, ?R_REF(Module, StructName), {Acc, RefsAcc, Module, Options});
|
||||||
response(Status, ?R_REF(_Mod, _Name) = RRef, {Acc, RefsAcc, Module}) ->
|
response(Status, ?R_REF(_Mod, _Name) = RRef, {Acc, RefsAcc, Module, Options}) ->
|
||||||
{Spec, Refs} = hocon_schema_to_spec(RRef, Module),
|
SchemaToSpec = schema_converter(Options),
|
||||||
|
{Spec, Refs} = SchemaToSpec(RRef, Module),
|
||||||
Content = content(Spec),
|
Content = content(Spec),
|
||||||
{Acc#{integer_to_binary(Status) => #{<<"content">> => Content}}, Refs ++ RefsAcc, Module};
|
{Acc#{integer_to_binary(Status) => #{<<"content">> => Content}}, Refs ++ RefsAcc, Module, Options};
|
||||||
response(Status, Schema, {Acc, RefsAcc, Module}) ->
|
response(Status, Schema, {Acc, RefsAcc, Module, Options}) ->
|
||||||
case hoconsc:is_schema(Schema) of
|
case hoconsc:is_schema(Schema) of
|
||||||
true ->
|
true ->
|
||||||
Hocon = hocon_schema:field_schema(Schema, type),
|
Hocon = hocon_schema:field_schema(Schema, type),
|
||||||
|
@ -348,33 +344,34 @@ response(Status, Schema, {Acc, RefsAcc, Module}) ->
|
||||||
Content = content(Spec, Examples),
|
Content = content(Spec, Examples),
|
||||||
{
|
{
|
||||||
Acc#{integer_to_binary(Status) => Init#{<<"content">> => Content}},
|
Acc#{integer_to_binary(Status) => Init#{<<"content">> => Content}},
|
||||||
Refs ++ RefsAcc, Module
|
Refs ++ RefsAcc, Module, Options
|
||||||
};
|
};
|
||||||
false ->
|
false ->
|
||||||
{Props, Refs} = parse_object(Schema, Module),
|
{Props, Refs} = parse_object(Schema, Module, Options),
|
||||||
Content = #{<<"content">> => content(Props)},
|
Init = trans_desc(#{}, Schema),
|
||||||
{Acc#{integer_to_binary(Status) => Content}, Refs ++ RefsAcc, Module}
|
Content = Init#{<<"content">> => content(Props)},
|
||||||
|
{Acc#{integer_to_binary(Status) => Content}, Refs ++ RefsAcc, Module, Options}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
components(Refs) ->
|
components(Refs, Options) ->
|
||||||
lists:sort(maps:fold(fun(K, V, Acc) -> [#{K => V} | Acc] end, [],
|
lists:sort(maps:fold(fun(K, V, Acc) -> [#{K => V} | Acc] end, [],
|
||||||
components(Refs, #{}, []))).
|
components(Options, Refs, #{}, []))).
|
||||||
|
|
||||||
components([], SpecAcc, []) -> SpecAcc;
|
components(_Options, [], SpecAcc, []) -> SpecAcc;
|
||||||
components([], SpecAcc, SubRefAcc) -> components(SubRefAcc, SpecAcc, []);
|
components(Options, [], SpecAcc, SubRefAcc) -> components(Options, SubRefAcc, SpecAcc, []);
|
||||||
components([{Module, Field} | Refs], SpecAcc, SubRefsAcc) ->
|
components(Options, [{Module, Field} | Refs], SpecAcc, SubRefsAcc) ->
|
||||||
Props = hocon_schema_fields(Module, Field),
|
Props = hocon_schema_fields(Module, Field),
|
||||||
Namespace = namespace(Module),
|
Namespace = namespace(Module),
|
||||||
{Object, SubRefs} = parse_object(Props, Module),
|
{Object, SubRefs} = parse_object(Props, Module, Options),
|
||||||
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Object},
|
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Object},
|
||||||
components(Refs, NewSpecAcc, SubRefs ++ SubRefsAcc);
|
components(Options, Refs, NewSpecAcc, SubRefs ++ SubRefsAcc);
|
||||||
%% parameters in ref only have one value, not array
|
%% parameters in ref only have one value, not array
|
||||||
components([{Module, Field, parameter} | Refs], SpecAcc, SubRefsAcc) ->
|
components(Options, [{Module, Field, parameter} | Refs], SpecAcc, SubRefsAcc) ->
|
||||||
Props = hocon_schema_fields(Module, Field),
|
Props = hocon_schema_fields(Module, Field),
|
||||||
{[Param], SubRefs} = parameters(Props, Module),
|
{[Param], SubRefs} = parameters(Props, Module),
|
||||||
Namespace = namespace(Module),
|
Namespace = namespace(Module),
|
||||||
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Param},
|
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Param},
|
||||||
components(Refs, NewSpecAcc, SubRefs ++ SubRefsAcc).
|
components(Options, Refs, NewSpecAcc, SubRefs ++ SubRefsAcc).
|
||||||
|
|
||||||
hocon_schema_fields(Module, StructName) ->
|
hocon_schema_fields(Module, StructName) ->
|
||||||
case apply(Module, fields, [StructName]) of
|
case apply(Module, fields, [StructName]) of
|
||||||
|
@ -425,7 +422,7 @@ hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
||||||
|
|
||||||
%% todo: Find a way to fetch enum value from user_id_type().
|
%% todo: Find a way to fetch enum value from user_id_type().
|
||||||
typename_to_spec("user_id_type()", _Mod) -> #{type => string, enum => [clientid, username]};
|
typename_to_spec("user_id_type()", _Mod) -> #{type => string, enum => [clientid, username]};
|
||||||
typename_to_spec("term()", _Mod) -> #{type => string, example => "term"};
|
typename_to_spec("term()", _Mod) -> #{type => string, example => "any"};
|
||||||
typename_to_spec("boolean()", _Mod) -> #{type => boolean, example => true};
|
typename_to_spec("boolean()", _Mod) -> #{type => boolean, example => true};
|
||||||
typename_to_spec("binary()", _Mod) -> #{type => string, example => <<"binary-example">>};
|
typename_to_spec("binary()", _Mod) -> #{type => string, example => <<"binary-example">>};
|
||||||
typename_to_spec("float()", _Mod) -> #{type => number, example => 3.14159};
|
typename_to_spec("float()", _Mod) -> #{type => number, example => 3.14159};
|
||||||
|
@ -444,7 +441,6 @@ typename_to_spec("epoch_millisecond()", _Mod) ->
|
||||||
#{type => integer, example => 1640995200000, desc => <<"epoch-millisecond">>},
|
#{type => integer, example => 1640995200000, desc => <<"epoch-millisecond">>},
|
||||||
#{type => string, example => <<"2022-01-01T00:00:00.000Z">>, format => <<"date-time">>}]
|
#{type => string, example => <<"2022-01-01T00:00:00.000Z">>, format => <<"date-time">>}]
|
||||||
};
|
};
|
||||||
typename_to_spec("unicode_binary()", _Mod) -> #{type => string, example => <<"unicode-binary">>};
|
|
||||||
typename_to_spec("duration()", _Mod) -> #{type => string, example => <<"12m">>};
|
typename_to_spec("duration()", _Mod) -> #{type => string, example => <<"12m">>};
|
||||||
typename_to_spec("duration_s()", _Mod) -> #{type => string, example => <<"1h">>};
|
typename_to_spec("duration_s()", _Mod) -> #{type => string, example => <<"1h">>};
|
||||||
typename_to_spec("duration_ms()", _Mod) -> #{type => string, example => <<"32s">>};
|
typename_to_spec("duration_ms()", _Mod) -> #{type => string, example => <<"32s">>};
|
||||||
|
@ -547,7 +543,7 @@ to_bin(Boolean) when is_boolean(Boolean) -> Boolean;
|
||||||
to_bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8);
|
to_bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8);
|
||||||
to_bin(X) -> X.
|
to_bin(X) -> X.
|
||||||
|
|
||||||
parse_object(PropList = [_ | _], Module) when is_list(PropList) ->
|
parse_object(PropList = [_ | _], Module, Options) when is_list(PropList) ->
|
||||||
{Props, Required, Refs} =
|
{Props, Required, Refs} =
|
||||||
lists:foldl(fun({Name, Hocon}, {Acc, RequiredAcc, RefsAcc}) ->
|
lists:foldl(fun({Name, Hocon}, {Acc, RequiredAcc, RefsAcc}) ->
|
||||||
NameBin = to_bin(Name),
|
NameBin = to_bin(Name),
|
||||||
|
@ -556,7 +552,8 @@ parse_object(PropList = [_ | _], Module) when is_list(PropList) ->
|
||||||
HoconType = hocon_schema:field_schema(Hocon, type),
|
HoconType = hocon_schema:field_schema(Hocon, type),
|
||||||
Init0 = init_prop([default | ?DEFAULT_FIELDS], #{}, Hocon),
|
Init0 = init_prop([default | ?DEFAULT_FIELDS], #{}, Hocon),
|
||||||
Init = trans_desc(Init0, Hocon),
|
Init = trans_desc(Init0, Hocon),
|
||||||
{Prop, Refs1} = hocon_schema_to_spec(HoconType, Module),
|
SchemaToSpec = schema_converter(Options),
|
||||||
|
{Prop, Refs1} = SchemaToSpec(HoconType, Module),
|
||||||
NewRequiredAcc =
|
NewRequiredAcc =
|
||||||
case is_required(Hocon) of
|
case is_required(Hocon) of
|
||||||
true -> [NameBin | RequiredAcc];
|
true -> [NameBin | RequiredAcc];
|
||||||
|
@ -564,7 +561,7 @@ parse_object(PropList = [_ | _], Module) when is_list(PropList) ->
|
||||||
end,
|
end,
|
||||||
{[{NameBin, maps:merge(Prop, Init)} | Acc], NewRequiredAcc, Refs1 ++ RefsAcc};
|
{[{NameBin, maps:merge(Prop, Init)} | Acc], NewRequiredAcc, Refs1 ++ RefsAcc};
|
||||||
false ->
|
false ->
|
||||||
{SubObject, SubRefs} = parse_object(Hocon, Module),
|
{SubObject, SubRefs} = parse_object(Hocon, Module, Options),
|
||||||
{[{NameBin, SubObject} | Acc], RequiredAcc, SubRefs ++ RefsAcc}
|
{[{NameBin, SubObject} | Acc], RequiredAcc, SubRefs ++ RefsAcc}
|
||||||
end
|
end
|
||||||
end, {[], [], []}, PropList),
|
end, {[], [], []}, PropList),
|
||||||
|
@ -573,14 +570,13 @@ parse_object(PropList = [_ | _], Module) when is_list(PropList) ->
|
||||||
[] -> {Object, Refs};
|
[] -> {Object, Refs};
|
||||||
_ -> {maps:put(required, Required, Object), Refs}
|
_ -> {maps:put(required, Required, Object), Refs}
|
||||||
end;
|
end;
|
||||||
parse_object(Other, Module) ->
|
parse_object(Other, Module, Options) ->
|
||||||
erlang:throw({error,
|
erlang:throw({error,
|
||||||
#{msg => <<"Object only supports not empty proplists">>,
|
#{msg => <<"Object only supports not empty proplists">>,
|
||||||
args => Other, module => Module}}).
|
args => Other, module => Module, options => Options}}).
|
||||||
|
|
||||||
is_required(Hocon) ->
|
is_required(Hocon) ->
|
||||||
hocon_schema:field_schema(Hocon, required) =:= true orelse
|
hocon_schema:field_schema(Hocon, required) =:= true.
|
||||||
hocon_schema:field_schema(Hocon, nullable) =:= false.
|
|
||||||
|
|
||||||
content(ApiSpec) ->
|
content(ApiSpec) ->
|
||||||
content(ApiSpec, undefined).
|
content(ApiSpec, undefined).
|
||||||
|
@ -593,3 +589,6 @@ content(ApiSpec, Examples) when is_map(Examples) ->
|
||||||
to_ref(Mod, StructName, Acc, RefsAcc) ->
|
to_ref(Mod, StructName, Acc, RefsAcc) ->
|
||||||
Ref = #{<<"$ref">> => ?TO_COMPONENTS_PARAM(Mod, StructName)},
|
Ref = #{<<"$ref">> => ?TO_COMPONENTS_PARAM(Mod, StructName)},
|
||||||
{[Ref | Acc], [{Mod, StructName, parameter} | RefsAcc]}.
|
{[Ref | Acc], [{Mod, StructName, parameter} | RefsAcc]}.
|
||||||
|
|
||||||
|
schema_converter(Options) ->
|
||||||
|
maps:get(schema_converter, Options, fun hocon_schema_to_spec/2).
|
||||||
|
|
|
@ -50,8 +50,8 @@ t_ref(_Config) ->
|
||||||
LocalPath = "/test/in/ref/local",
|
LocalPath = "/test/in/ref/local",
|
||||||
Path = "/test/in/ref",
|
Path = "/test/in/ref",
|
||||||
Expect = [#{<<"$ref">> => <<"#/components/parameters/emqx_swagger_parameter_SUITE.page">>}],
|
Expect = [#{<<"$ref">> => <<"#/components/parameters/emqx_swagger_parameter_SUITE.page">>}],
|
||||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}),
|
||||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, LocalPath),
|
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, LocalPath, #{}),
|
||||||
?assertEqual(test, OperationId),
|
?assertEqual(test, OperationId),
|
||||||
Params = maps:get(parameters, maps:get(post, Spec)),
|
Params = maps:get(parameters, maps:get(post, Spec)),
|
||||||
?assertEqual(Expect, Params),
|
?assertEqual(Expect, Params),
|
||||||
|
@ -64,7 +64,7 @@ t_public_ref(_Config) ->
|
||||||
#{<<"$ref">> => <<"#/components/parameters/public.page">>},
|
#{<<"$ref">> => <<"#/components/parameters/public.page">>},
|
||||||
#{<<"$ref">> => <<"#/components/parameters/public.limit">>}
|
#{<<"$ref">> => <<"#/components/parameters/public.limit">>}
|
||||||
],
|
],
|
||||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}),
|
||||||
?assertEqual(test, OperationId),
|
?assertEqual(test, OperationId),
|
||||||
Params = maps:get(parameters, maps:get(post, Spec)),
|
Params = maps:get(parameters, maps:get(post, Spec)),
|
||||||
?assertEqual(Expect, Params),
|
?assertEqual(Expect, Params),
|
||||||
|
@ -80,7 +80,7 @@ t_public_ref(_Config) ->
|
||||||
#{<<"public.page">> => #{description => <<"Page number of the results to fetch.">>,
|
#{<<"public.page">> => #{description => <<"Page number of the results to fetch.">>,
|
||||||
example => 1,in => query,name => page,
|
example => 1,in => query,name => page,
|
||||||
schema => #{default => 1,example => 100,type => integer}}}],
|
schema => #{default => 1,example => 100,type => integer}}}],
|
||||||
?assertEqual(ExpectRefs, emqx_dashboard_swagger:components(Refs)),
|
?assertEqual(ExpectRefs, emqx_dashboard_swagger:components(Refs,#{})),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_in_mix(_Config) ->
|
t_in_mix(_Config) ->
|
||||||
|
@ -110,7 +110,7 @@ t_in_mix(_Config) ->
|
||||||
|
|
||||||
t_without_in(_Config) ->
|
t_without_in(_Config) ->
|
||||||
?assertThrow({error, <<"missing in:path/query field in parameters">>},
|
?assertThrow({error, <<"missing in:path/query field in parameters">>},
|
||||||
emqx_dashboard_swagger:parse_spec_ref(?MODULE, "/test/without/in")),
|
emqx_dashboard_swagger:parse_spec_ref(?MODULE, "/test/without/in", #{})),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_require(_Config) ->
|
t_require(_Config) ->
|
||||||
|
@ -124,8 +124,7 @@ t_nullable(_Config) ->
|
||||||
NullableFalse = [#{in => query,name => userid, required => true,
|
NullableFalse = [#{in => query,name => userid, required => true,
|
||||||
schema => #{example => <<"binary-example">>, type => string}}],
|
schema => #{example => <<"binary-example">>, type => string}}],
|
||||||
NullableTrue = [#{in => query,name => userid,
|
NullableTrue = [#{in => query,name => userid,
|
||||||
schema => #{example => <<"binary-example">>, type => string,
|
schema => #{example => <<"binary-example">>, type => string}, required => false}],
|
||||||
nullable => true}}],
|
|
||||||
validate("/nullable/false", NullableFalse),
|
validate("/nullable/false", NullableFalse),
|
||||||
validate("/nullable/true", NullableTrue),
|
validate("/nullable/true", NullableTrue),
|
||||||
ok.
|
ok.
|
||||||
|
@ -133,10 +132,10 @@ t_nullable(_Config) ->
|
||||||
t_method(_Config) ->
|
t_method(_Config) ->
|
||||||
PathOk = "/method/ok",
|
PathOk = "/method/ok",
|
||||||
PathError = "/method/error",
|
PathError = "/method/error",
|
||||||
{test, Spec, []} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, PathOk),
|
{test, Spec, []} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, PathOk, #{}),
|
||||||
?assertEqual(lists:sort(?METHODS), lists:sort(maps:keys(Spec))),
|
?assertEqual(lists:sort(?METHODS), lists:sort(maps:keys(Spec))),
|
||||||
?assertThrow({error, #{module := ?MODULE, path := PathError, method := bar}},
|
?assertThrow({error, #{module := ?MODULE, path := PathError, method := bar}},
|
||||||
emqx_dashboard_swagger:parse_spec_ref(?MODULE, PathError)),
|
emqx_dashboard_swagger:parse_spec_ref(?MODULE, PathError, #{})),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_in_path_trans(_Config) ->
|
t_in_path_trans(_Config) ->
|
||||||
|
@ -242,7 +241,7 @@ assert_all_filters_equal(Spec, Filter) ->
|
||||||
Spec).
|
Spec).
|
||||||
|
|
||||||
validate(Path, ExpectParams) ->
|
validate(Path, ExpectParams) ->
|
||||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}),
|
||||||
?assertEqual(test, OperationId),
|
?assertEqual(test, OperationId),
|
||||||
Params = maps:get(parameters, maps:get(post, Spec)),
|
Params = maps:get(parameters, maps:get(post, Spec)),
|
||||||
?assertEqual(ExpectParams, Params),
|
?assertEqual(ExpectParams, Params),
|
||||||
|
@ -358,9 +357,9 @@ schema("/test/without/in") ->
|
||||||
schema("/required/false") ->
|
schema("/required/false") ->
|
||||||
to_schema([{'userid', mk(binary(), #{in => query, required => false})}]);
|
to_schema([{'userid', mk(binary(), #{in => query, required => false})}]);
|
||||||
schema("/nullable/false") ->
|
schema("/nullable/false") ->
|
||||||
to_schema([{'userid', mk(binary(), #{in => query, nullable => false})}]);
|
to_schema([{'userid', mk(binary(), #{in => query, required => true})}]);
|
||||||
schema("/nullable/true") ->
|
schema("/nullable/true") ->
|
||||||
to_schema([{'userid', mk(binary(), #{in => query, nullable => true})}]);
|
to_schema([{'userid', mk(binary(), #{in => query, required => false})}]);
|
||||||
schema("/method/ok") ->
|
schema("/method/ok") ->
|
||||||
Response = #{responses => #{200 => <<"ok">>}},
|
Response = #{responses => #{200 => <<"ok">>}},
|
||||||
lists:foldl(fun(Method, Acc) -> Acc#{Method => Response} end,
|
lists:foldl(fun(Method, Acc) -> Acc#{Method => Response} end,
|
||||||
|
|
|
@ -50,10 +50,10 @@ fields("ref3") ->
|
||||||
|
|
||||||
default_username(type) -> string();
|
default_username(type) -> string();
|
||||||
default_username(default) -> "admin";
|
default_username(default) -> "admin";
|
||||||
default_username(nullable) -> false;
|
default_username(required) -> true;
|
||||||
default_username(_) -> undefined.
|
default_username(_) -> undefined.
|
||||||
|
|
||||||
default_password(type) -> string();
|
default_password(type) -> string();
|
||||||
default_password(default) -> "public";
|
default_password(default) -> "public";
|
||||||
default_password(nullable) -> false;
|
default_password(required) -> true;
|
||||||
default_password(_) -> undefined.
|
default_password(_) -> undefined.
|
||||||
|
|
|
@ -151,7 +151,7 @@ t_nest_ref(_Config) ->
|
||||||
t_none_ref(_Config) ->
|
t_none_ref(_Config) ->
|
||||||
Path = "/ref/none",
|
Path = "/ref/none",
|
||||||
?assertThrow({error, #{mfa := {?MODULE, schema, [Path]}}},
|
?assertThrow({error, #{mfa := {?MODULE, schema, [Path]}}},
|
||||||
emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path)),
|
emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{})),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_sub_fields(_Config) ->
|
t_sub_fields(_Config) ->
|
||||||
|
@ -472,11 +472,11 @@ t_object_trans_error(_Config) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
validate(Path, ExpectSpec, ExpectRefs) ->
|
validate(Path, ExpectSpec, ExpectRefs) ->
|
||||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}),
|
||||||
?assertEqual(test, OperationId),
|
?assertEqual(test, OperationId),
|
||||||
?assertEqual(ExpectSpec, Spec),
|
?assertEqual(ExpectSpec, Spec),
|
||||||
?assertEqual(ExpectRefs, Refs),
|
?assertEqual(ExpectRefs, Refs),
|
||||||
{Spec, emqx_dashboard_swagger:components(Refs)}.
|
{Spec, emqx_dashboard_swagger:components(Refs, #{})}.
|
||||||
|
|
||||||
|
|
||||||
filter(ApiSpec, Path) ->
|
filter(ApiSpec, Path) ->
|
||||||
|
@ -499,16 +499,16 @@ paths() ->
|
||||||
|
|
||||||
schema("/object") ->
|
schema("/object") ->
|
||||||
to_schema([
|
to_schema([
|
||||||
{per_page, mk(range(1, 100), #{nullable => false, desc => <<"good per page desc">>})},
|
{per_page, mk(range(1, 100), #{required => true, desc => <<"good per page desc">>})},
|
||||||
{timeout, mk(hoconsc:union([infinity, emqx_schema:duration_s()]),
|
{timeout, mk(hoconsc:union([infinity, emqx_schema:duration_s()]),
|
||||||
#{default => 5, nullable => false})},
|
#{default => 5, required => true})},
|
||||||
{inner_ref, mk(hoconsc:ref(?MODULE, good_ref), #{})}
|
{inner_ref, mk(hoconsc:ref(?MODULE, good_ref), #{})}
|
||||||
]);
|
]);
|
||||||
schema("/nest/object") ->
|
schema("/nest/object") ->
|
||||||
to_schema([
|
to_schema([
|
||||||
{per_page, mk(range(1, 100), #{desc => <<"good per page desc">>})},
|
{per_page, mk(range(1, 100), #{desc => <<"good per page desc">>})},
|
||||||
{timeout, mk(hoconsc:union([infinity, emqx_schema:duration_s()]),
|
{timeout, mk(hoconsc:union([infinity, emqx_schema:duration_s()]),
|
||||||
#{default => 5, nullable => false})},
|
#{default => 5, required => true})},
|
||||||
{nest_object, [
|
{nest_object, [
|
||||||
{good_nest_1, mk(integer(), #{})},
|
{good_nest_1, mk(integer(), #{})},
|
||||||
{good_nest_2, mk(hoconsc:ref(?MODULE, good_ref), #{})}
|
{good_nest_2, mk(hoconsc:ref(?MODULE, good_ref), #{})}
|
||||||
|
@ -572,5 +572,5 @@ enable(_) -> undefined.
|
||||||
|
|
||||||
init_file(type) -> binary();
|
init_file(type) -> binary();
|
||||||
init_file(desc) -> <<"test test desc">>;
|
init_file(desc) -> <<"test test desc">>;
|
||||||
init_file(nullable) -> true;
|
init_file(required) -> false;
|
||||||
init_file(_) -> undefined.
|
init_file(_) -> undefined.
|
||||||
|
|
|
@ -69,7 +69,7 @@ t_error(_Config) ->
|
||||||
{<<"message">>, #{description => <<"Details description of the error.">>,
|
{<<"message">>, #{description => <<"Details description of the error.">>,
|
||||||
example => <<"Error code to troubleshoot problems.">>, type => string}}]
|
example => <<"Error code to troubleshoot problems.">>, type => string}}]
|
||||||
}}}},
|
}}}},
|
||||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}),
|
||||||
?assertEqual(test, OperationId),
|
?assertEqual(test, OperationId),
|
||||||
Response = maps:get(responses, maps:get(get, Spec)),
|
Response = maps:get(responses, maps:get(get, Spec)),
|
||||||
?assertEqual(Error400, maps:get(<<"400">>, Response)),
|
?assertEqual(Error400, maps:get(<<"400">>, Response)),
|
||||||
|
@ -197,7 +197,7 @@ t_complicated_type(_Config) ->
|
||||||
{<<"fix_integer">>, #{default => 100, enum => [100], example => 100,type => integer}}
|
{<<"fix_integer">>, #{default => 100, enum => [100], example => 100,type => integer}}
|
||||||
],
|
],
|
||||||
<<"type">> => object}}}},
|
<<"type">> => object}}}},
|
||||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}),
|
||||||
?assertEqual(test, OperationId),
|
?assertEqual(test, OperationId),
|
||||||
Response = maps:get(responses, maps:get(post, Spec)),
|
Response = maps:get(responses, maps:get(post, Spec)),
|
||||||
?assertEqual(Object, maps:get(<<"200">>, Response)),
|
?assertEqual(Object, maps:get(<<"200">>, Response)),
|
||||||
|
@ -299,9 +299,9 @@ schema("/simple/bin") ->
|
||||||
to_schema(<<"binary ok">>);
|
to_schema(<<"binary ok">>);
|
||||||
schema("/object") ->
|
schema("/object") ->
|
||||||
Object = [
|
Object = [
|
||||||
{per_page, mk(range(1, 100), #{nullable => false, desc => <<"good per page desc">>})},
|
{per_page, mk(range(1, 100), #{required => true, desc => <<"good per page desc">>})},
|
||||||
{timeout, mk(hoconsc:union([infinity, emqx_schema:duration_s()]),
|
{timeout, mk(hoconsc:union([infinity, emqx_schema:duration_s()]),
|
||||||
#{default => 5, nullable => false})},
|
#{default => 5, required => true})},
|
||||||
{inner_ref, mk(hoconsc:ref(?MODULE, good_ref), #{})}
|
{inner_ref, mk(hoconsc:ref(?MODULE, good_ref), #{})}
|
||||||
],
|
],
|
||||||
to_schema(Object);
|
to_schema(Object);
|
||||||
|
@ -309,7 +309,7 @@ schema("/nest/object") ->
|
||||||
Response = [
|
Response = [
|
||||||
{per_page, mk(range(1, 100), #{desc => <<"good per page desc">>})},
|
{per_page, mk(range(1, 100), #{desc => <<"good per page desc">>})},
|
||||||
{timeout, mk(hoconsc:union([infinity, emqx_schema:duration_s()]),
|
{timeout, mk(hoconsc:union([infinity, emqx_schema:duration_s()]),
|
||||||
#{default => 5, nullable => false})},
|
#{default => 5, required => true})},
|
||||||
{nest_object, [
|
{nest_object, [
|
||||||
{good_nest_1, mk(integer(), #{})},
|
{good_nest_1, mk(integer(), #{})},
|
||||||
{good_nest_2, mk(hoconsc:ref(?MODULE, good_ref), #{})}
|
{good_nest_2, mk(hoconsc:ref(?MODULE, good_ref), #{})}
|
||||||
|
@ -379,14 +379,14 @@ schema("/fields/sub") ->
|
||||||
to_schema(hoconsc:ref(sub_fields)).
|
to_schema(hoconsc:ref(sub_fields)).
|
||||||
|
|
||||||
validate(Path, ExpectObject, ExpectRefs) ->
|
validate(Path, ExpectObject, ExpectRefs) ->
|
||||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}),
|
||||||
?assertEqual(test, OperationId),
|
?assertEqual(test, OperationId),
|
||||||
Response = maps:get(responses, maps:get(post, Spec)),
|
Response = maps:get(responses, maps:get(post, Spec)),
|
||||||
?assertEqual(ExpectObject, maps:get(<<"200">>, Response)),
|
?assertEqual(ExpectObject, maps:get(<<"200">>, Response)),
|
||||||
?assertEqual(ExpectObject, maps:get(<<"201">>, Response)),
|
?assertEqual(ExpectObject, maps:get(<<"201">>, Response)),
|
||||||
?assertEqual(#{}, maps:without([<<"201">>, <<"200">>], Response)),
|
?assertEqual(#{}, maps:without([<<"201">>, <<"200">>], Response)),
|
||||||
?assertEqual(ExpectRefs, Refs),
|
?assertEqual(ExpectRefs, Refs),
|
||||||
{Spec, emqx_dashboard_swagger:components(Refs)}.
|
{Spec, emqx_dashboard_swagger:components(Refs, #{})}.
|
||||||
|
|
||||||
to_schema(Object) ->
|
to_schema(Object) ->
|
||||||
#{
|
#{
|
||||||
|
@ -425,5 +425,5 @@ enable(_) -> undefined.
|
||||||
|
|
||||||
init_file(type) -> binary();
|
init_file(type) -> binary();
|
||||||
init_file(desc) -> <<"test test desc">>;
|
init_file(desc) -> <<"test test desc">>;
|
||||||
init_file(nullable) -> true;
|
init_file(required) -> false;
|
||||||
init_file(_) -> undefined.
|
init_file(_) -> undefined.
|
||||||
|
|
|
@ -219,7 +219,7 @@ params_gateway_status_in_qs() ->
|
||||||
[{status,
|
[{status,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
#{ in => query
|
#{ in => query
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc => <<"Gateway Status">>
|
, desc => <<"Gateway Status">>
|
||||||
, example => <<"">>
|
, example => <<"">>
|
||||||
})}
|
})}
|
||||||
|
@ -245,11 +245,11 @@ fields(gateway_overview) ->
|
||||||
#{desc => <<"The Gateway created datetime">>})}
|
#{desc => <<"The Gateway created datetime">>})}
|
||||||
, {started_at,
|
, {started_at,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"The Gateway started datetime">>})}
|
, desc => <<"The Gateway started datetime">>})}
|
||||||
, {stopped_at,
|
, {stopped_at,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"The Gateway stopped datetime">>})}
|
, desc => <<"The Gateway stopped datetime">>})}
|
||||||
, {max_connections,
|
, {max_connections,
|
||||||
mk(integer(),
|
mk(integer(),
|
||||||
|
@ -260,7 +260,7 @@ fields(gateway_overview) ->
|
||||||
})}
|
})}
|
||||||
, {listeners,
|
, {listeners,
|
||||||
mk(hoconsc:array(ref(gateway_listener_overview)),
|
mk(hoconsc:array(ref(gateway_listener_overview)),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc => <<"The Gateway listeners overview">>})}
|
, desc => <<"The Gateway listeners overview">>})}
|
||||||
];
|
];
|
||||||
fields(gateway_listener_overview) ->
|
fields(gateway_listener_overview) ->
|
||||||
|
@ -295,7 +295,7 @@ fields(Listener) when Listener == tcp_listener;
|
||||||
Listener == dtls_listener ->
|
Listener == dtls_listener ->
|
||||||
[ {id,
|
[ {id,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Listener ID">>})}
|
, desc => <<"Listener ID">>})}
|
||||||
, {type,
|
, {type,
|
||||||
mk(hoconsc:union([tcp, ssl, udp, dtls]),
|
mk(hoconsc:union([tcp, ssl, udp, dtls]),
|
||||||
|
@ -305,7 +305,7 @@ fields(Listener) when Listener == tcp_listener;
|
||||||
#{ desc => <<"Listener Name">>})}
|
#{ desc => <<"Listener Name">>})}
|
||||||
, {running,
|
, {running,
|
||||||
mk(boolean(),
|
mk(boolean(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Listener running status">>})}
|
, desc => <<"Listener running status">>})}
|
||||||
] ++ emqx_gateway_schema:fields(Listener);
|
] ++ emqx_gateway_schema:fields(Listener);
|
||||||
|
|
||||||
|
@ -334,7 +334,7 @@ convert_listener_struct(Schema) ->
|
||||||
{value, {listeners,
|
{value, {listeners,
|
||||||
#{type := Type}}, Schema1} = lists:keytake(listeners, 1, Schema),
|
#{type := Type}}, Schema1} = lists:keytake(listeners, 1, Schema),
|
||||||
ListenerSchema = hoconsc:mk(listeners_schema(Type),
|
ListenerSchema = hoconsc:mk(listeners_schema(Type),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc => <<"The gateway listeners">>
|
, desc => <<"The gateway listeners">>
|
||||||
}),
|
}),
|
||||||
lists:keystore(listeners, 1, Schema1, {listeners, ListenerSchema}).
|
lists:keystore(listeners, 1, Schema1, {listeners, ListenerSchema}).
|
||||||
|
|
|
@ -287,13 +287,13 @@ params_userid_in_path() ->
|
||||||
params_paging_in_qs() ->
|
params_paging_in_qs() ->
|
||||||
[{page, mk(integer(),
|
[{page, mk(integer(),
|
||||||
#{ in => query
|
#{ in => query
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc => <<"Page Index">>
|
, desc => <<"Page Index">>
|
||||||
, example => 1
|
, example => 1
|
||||||
})},
|
})},
|
||||||
{limit, mk(integer(),
|
{limit, mk(integer(),
|
||||||
#{ in => query
|
#{ in => query
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc => <<"Page Limit">>
|
, desc => <<"Page Limit">>
|
||||||
, example => 100
|
, example => 100
|
||||||
})}
|
})}
|
||||||
|
|
|
@ -468,7 +468,7 @@ params_client_insta() ->
|
||||||
++ params_gateway_name_in_path().
|
++ params_gateway_name_in_path().
|
||||||
|
|
||||||
params_client_searching_in_qs() ->
|
params_client_searching_in_qs() ->
|
||||||
M = #{in => query, nullable => true, example => <<"">>},
|
M = #{in => query, required => false, example => <<"">>},
|
||||||
[ {node,
|
[ {node,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
M#{desc => <<"Match the client's node name">>})}
|
M#{desc => <<"Match the client's node name">>})}
|
||||||
|
@ -532,7 +532,7 @@ params_paging() ->
|
||||||
[ {page,
|
[ {page,
|
||||||
mk(integer(),
|
mk(integer(),
|
||||||
#{ in => query
|
#{ in => query
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc => <<"Page Index">>
|
, desc => <<"Page Index">>
|
||||||
, example => 1
|
, example => 1
|
||||||
})}
|
})}
|
||||||
|
@ -540,7 +540,7 @@ params_paging() ->
|
||||||
mk(integer(),
|
mk(integer(),
|
||||||
#{ in => query
|
#{ in => query
|
||||||
, desc => <<"Page Limit">>
|
, desc => <<"Page Limit">>
|
||||||
, nullable => true
|
, required => false
|
||||||
, example => 100
|
, example => 100
|
||||||
})}
|
})}
|
||||||
].
|
].
|
||||||
|
|
|
@ -434,13 +434,13 @@ params_userid_in_path() ->
|
||||||
params_paging_in_qs() ->
|
params_paging_in_qs() ->
|
||||||
[{page, mk(integer(),
|
[{page, mk(integer(),
|
||||||
#{ in => query
|
#{ in => query
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc => <<"Page Index">>
|
, desc => <<"Page Index">>
|
||||||
, example => 1
|
, example => 1
|
||||||
})},
|
})},
|
||||||
{limit, mk(integer(),
|
{limit, mk(integer(),
|
||||||
#{ in => query
|
#{ in => query
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc => <<"Page Limit">>
|
, desc => <<"Page Limit">>
|
||||||
, example => 100
|
, example => 100
|
||||||
})}
|
})}
|
||||||
|
@ -457,22 +457,22 @@ fields(listener) ->
|
||||||
common_listener_opts() ++
|
common_listener_opts() ++
|
||||||
[ {tcp,
|
[ {tcp,
|
||||||
mk(ref(tcp_listener_opts),
|
mk(ref(tcp_listener_opts),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc => <<"The tcp socket options for tcp or ssl listener">>
|
, desc => <<"The tcp socket options for tcp or ssl listener">>
|
||||||
})}
|
})}
|
||||||
, {ssl,
|
, {ssl,
|
||||||
mk(ref(ssl_listener_opts),
|
mk(ref(ssl_listener_opts),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc => <<"The ssl socket options for ssl listener">>
|
, desc => <<"The ssl socket options for ssl listener">>
|
||||||
})}
|
})}
|
||||||
, {udp,
|
, {udp,
|
||||||
mk(ref(udp_listener_opts),
|
mk(ref(udp_listener_opts),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc => <<"The udp socket options for udp or dtls listener">>
|
, desc => <<"The udp socket options for udp or dtls listener">>
|
||||||
})}
|
})}
|
||||||
, {dtls,
|
, {dtls,
|
||||||
mk(ref(dtls_listener_opts),
|
mk(ref(dtls_listener_opts),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc => <<"The dtls socket options for dtls listener">>
|
, desc => <<"The dtls socket options for dtls listener">>
|
||||||
})}
|
})}
|
||||||
];
|
];
|
||||||
|
@ -529,47 +529,47 @@ lists_key_without([K | Ks], N, L) ->
|
||||||
common_listener_opts() ->
|
common_listener_opts() ->
|
||||||
[ {enable,
|
[ {enable,
|
||||||
mk(boolean(),
|
mk(boolean(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Whether to enable this listener">>})}
|
, desc => <<"Whether to enable this listener">>})}
|
||||||
, {id,
|
, {id,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Listener Id">>})}
|
, desc => <<"Listener Id">>})}
|
||||||
, {name,
|
, {name,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Listener name">>})}
|
, desc => <<"Listener name">>})}
|
||||||
, {type,
|
, {type,
|
||||||
mk(hoconsc:enum([tcp, ssl, udp, dtls]),
|
mk(hoconsc:enum([tcp, ssl, udp, dtls]),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Listener type. Enum: tcp, udp, ssl, dtls">>})}
|
, desc => <<"Listener type. Enum: tcp, udp, ssl, dtls">>})}
|
||||||
, {running,
|
, {running,
|
||||||
mk(boolean(),
|
mk(boolean(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Listener running status">>})}
|
, desc => <<"Listener running status">>})}
|
||||||
, {bind,
|
, {bind,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Listener bind address or port">>})}
|
, desc => <<"Listener bind address or port">>})}
|
||||||
, {acceptors,
|
, {acceptors,
|
||||||
mk(integer(),
|
mk(integer(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Listener acceptors number">>})}
|
, desc => <<"Listener acceptors number">>})}
|
||||||
, {access_rules,
|
, {access_rules,
|
||||||
mk(hoconsc:array(binary()),
|
mk(hoconsc:array(binary()),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Listener Access rules for client">>})}
|
, desc => <<"Listener Access rules for client">>})}
|
||||||
, {max_conn_rate,
|
, {max_conn_rate,
|
||||||
mk(integer(),
|
mk(integer(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Max connection rate for the listener">>})}
|
, desc => <<"Max connection rate for the listener">>})}
|
||||||
, {max_connections,
|
, {max_connections,
|
||||||
mk(integer(),
|
mk(integer(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc => <<"Max connections for the listener">>})}
|
, desc => <<"Max connections for the listener">>})}
|
||||||
, {mountpoint,
|
, {mountpoint,
|
||||||
mk(binary(),
|
mk(binary(),
|
||||||
#{ nullable => true
|
#{ required => false
|
||||||
, desc =>
|
, desc =>
|
||||||
<<"The Mounpoint for clients of the listener. "
|
<<"The Mounpoint for clients of the listener. "
|
||||||
"The gateway-level mountpoint configuration can be overloaded "
|
"The gateway-level mountpoint configuration can be overloaded "
|
||||||
|
@ -577,7 +577,7 @@ common_listener_opts() ->
|
||||||
%% FIXME:
|
%% FIXME:
|
||||||
, {authentication,
|
, {authentication,
|
||||||
mk(emqx_authn_schema:authenticator_type(),
|
mk(emqx_authn_schema:authenticator_type(),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc => <<"The authenticatior for this listener">>
|
, desc => <<"The authenticatior for this listener">>
|
||||||
})}
|
})}
|
||||||
] ++ emqx_gateway_schema:proxy_protocol_opts().
|
] ++ emqx_gateway_schema:proxy_protocol_opts().
|
||||||
|
|
|
@ -59,21 +59,21 @@ roots() -> [gateway].
|
||||||
fields(gateway) ->
|
fields(gateway) ->
|
||||||
[{stomp,
|
[{stomp,
|
||||||
sc(ref(stomp),
|
sc(ref(stomp),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc =>
|
, desc =>
|
||||||
"The Stomp Gateway configuration.<br>
|
"The Stomp Gateway configuration.<br>
|
||||||
This gateway supports v1.2/1.1/1.0"
|
This gateway supports v1.2/1.1/1.0"
|
||||||
})},
|
})},
|
||||||
{mqttsn,
|
{mqttsn,
|
||||||
sc(ref(mqttsn),
|
sc(ref(mqttsn),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc =>
|
, desc =>
|
||||||
"The MQTT-SN Gateway configuration.<br>
|
"The MQTT-SN Gateway configuration.<br>
|
||||||
This gateway only supports the v1.2 protocol"
|
This gateway only supports the v1.2 protocol"
|
||||||
})},
|
})},
|
||||||
{coap,
|
{coap,
|
||||||
sc(ref(coap),
|
sc(ref(coap),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc =>
|
, desc =>
|
||||||
"The CoAP Gateway configuration.<br>
|
"The CoAP Gateway configuration.<br>
|
||||||
This gateway is implemented based on RFC-7252 and
|
This gateway is implemented based on RFC-7252 and
|
||||||
|
@ -81,14 +81,14 @@ https://core-wg.github.io/coap-pubsub/draft-ietf-core-pubsub.html"
|
||||||
})},
|
})},
|
||||||
{lwm2m,
|
{lwm2m,
|
||||||
sc(ref(lwm2m),
|
sc(ref(lwm2m),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc =>
|
, desc =>
|
||||||
"The LwM2M Gateway configuration.<br>
|
"The LwM2M Gateway configuration.<br>
|
||||||
This gateway only supports the v1.0.1 protocol"
|
This gateway only supports the v1.0.1 protocol"
|
||||||
})},
|
})},
|
||||||
{exproto,
|
{exproto,
|
||||||
sc(ref(exproto),
|
sc(ref(exproto),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc => "The Extension Protocol configuration"
|
, desc => "The Extension Protocol configuration"
|
||||||
})}
|
})}
|
||||||
];
|
];
|
||||||
|
@ -120,7 +120,7 @@ fields(mqttsn) ->
|
||||||
[ {gateway_id,
|
[ {gateway_id,
|
||||||
sc(integer(),
|
sc(integer(),
|
||||||
#{ default => 1
|
#{ default => 1
|
||||||
, nullable => false
|
, required => true
|
||||||
, desc =>
|
, desc =>
|
||||||
"MQTT-SN Gateway ID.<br>
|
"MQTT-SN Gateway ID.<br>
|
||||||
When the <code>broadcast</code> option is enabled,
|
When the <code>broadcast</code> option is enabled,
|
||||||
|
@ -145,7 +145,7 @@ The client just sends its 'PUBLISH' messages to a GW"
|
||||||
, {predefined,
|
, {predefined,
|
||||||
sc(hoconsc:array(ref(mqttsn_predefined)),
|
sc(hoconsc:array(ref(mqttsn_predefined)),
|
||||||
#{ default => []
|
#{ default => []
|
||||||
, nullable => {true, recursively}
|
, required => {false, recursively}
|
||||||
, desc =>
|
, desc =>
|
||||||
<<"The pre-defined topic IDs and topic names.<br>
|
<<"The pre-defined topic IDs and topic names.<br>
|
||||||
A 'pre-defined' topic ID is a topic ID whose mapping to a topic name
|
A 'pre-defined' topic ID is a topic ID whose mapping to a topic name
|
||||||
|
@ -221,7 +221,7 @@ fields(lwm2m) ->
|
||||||
[ {xml_dir,
|
[ {xml_dir,
|
||||||
sc(binary(),
|
sc(binary(),
|
||||||
#{ default =>"etc/lwm2m_xml"
|
#{ default =>"etc/lwm2m_xml"
|
||||||
, nullable => false
|
, required => true
|
||||||
, desc => "The Directory for LwM2M Resource definition"
|
, desc => "The Directory for LwM2M Resource definition"
|
||||||
})}
|
})}
|
||||||
, {lifetime_min,
|
, {lifetime_min,
|
||||||
|
@ -261,7 +261,7 @@ beyond this time window are temporarily stored in memory."
|
||||||
})}
|
})}
|
||||||
, {translators,
|
, {translators,
|
||||||
sc(ref(lwm2m_translators),
|
sc(ref(lwm2m_translators),
|
||||||
#{ nullable => false
|
#{ required => true
|
||||||
, desc => "Topic configuration for LwM2M's gateway publishing and subscription"
|
, desc => "Topic configuration for LwM2M's gateway publishing and subscription"
|
||||||
})}
|
})}
|
||||||
, {listeners, sc(ref(udp_listeners))}
|
, {listeners, sc(ref(udp_listeners))}
|
||||||
|
@ -270,12 +270,12 @@ beyond this time window are temporarily stored in memory."
|
||||||
fields(exproto) ->
|
fields(exproto) ->
|
||||||
[ {server,
|
[ {server,
|
||||||
sc(ref(exproto_grpc_server),
|
sc(ref(exproto_grpc_server),
|
||||||
#{ nullable => false
|
#{ required => true
|
||||||
, desc => "Configurations for starting the <code>ConnectionAdapter</code> service"
|
, desc => "Configurations for starting the <code>ConnectionAdapter</code> service"
|
||||||
})}
|
})}
|
||||||
, {handler,
|
, {handler,
|
||||||
sc(ref(exproto_grpc_handler),
|
sc(ref(exproto_grpc_handler),
|
||||||
#{ nullable => false
|
#{ required => true
|
||||||
, desc => "Configurations for request to <code>ConnectionHandler</code> service"
|
, desc => "Configurations for request to <code>ConnectionHandler</code> service"
|
||||||
})}
|
})}
|
||||||
, {listeners, sc(ref(udp_tcp_listeners))}
|
, {listeners, sc(ref(udp_tcp_listeners))}
|
||||||
|
@ -284,18 +284,18 @@ fields(exproto) ->
|
||||||
fields(exproto_grpc_server) ->
|
fields(exproto_grpc_server) ->
|
||||||
[ {bind,
|
[ {bind,
|
||||||
sc(hoconsc:union([ip_port(), integer()]),
|
sc(hoconsc:union([ip_port(), integer()]),
|
||||||
#{nullable => false})}
|
#{required => true})}
|
||||||
, {ssl,
|
, {ssl,
|
||||||
sc(ref(ssl_server_opts),
|
sc(ref(ssl_server_opts),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
})}
|
})}
|
||||||
];
|
];
|
||||||
|
|
||||||
fields(exproto_grpc_handler) ->
|
fields(exproto_grpc_handler) ->
|
||||||
[ {address, sc(binary(), #{nullable => false})}
|
[ {address, sc(binary(), #{required => true})}
|
||||||
, {ssl,
|
, {ssl,
|
||||||
sc(ref(ssl_client_opts),
|
sc(ref(ssl_client_opts),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
})}
|
})}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -324,13 +324,13 @@ fields(lwm2m_translators) ->
|
||||||
For each new LwM2M client that succeeds in going online, the gateway creates
|
For each new LwM2M client that succeeds in going online, the gateway creates
|
||||||
a subscription relationship to receive downstream commands and send it to
|
a subscription relationship to receive downstream commands and send it to
|
||||||
the LwM2M client"
|
the LwM2M client"
|
||||||
, nullable => false
|
, required => true
|
||||||
})}
|
})}
|
||||||
, {response,
|
, {response,
|
||||||
sc(ref(translator),
|
sc(ref(translator),
|
||||||
#{ desc =>
|
#{ desc =>
|
||||||
"The topic for gateway to publish the acknowledge events from LwM2M client"
|
"The topic for gateway to publish the acknowledge events from LwM2M client"
|
||||||
, nullable => false
|
, required => true
|
||||||
})}
|
})}
|
||||||
, {notify,
|
, {notify,
|
||||||
sc(ref(translator),
|
sc(ref(translator),
|
||||||
|
@ -338,24 +338,24 @@ the LwM2M client"
|
||||||
"The topic for gateway to publish the notify events from LwM2M client.<br>
|
"The topic for gateway to publish the notify events from LwM2M client.<br>
|
||||||
After succeed observe a resource of LwM2M client, Gateway will send the
|
After succeed observe a resource of LwM2M client, Gateway will send the
|
||||||
notify events via this topic, if the client reports any resource changes"
|
notify events via this topic, if the client reports any resource changes"
|
||||||
, nullable => false
|
, required => true
|
||||||
})}
|
})}
|
||||||
, {register,
|
, {register,
|
||||||
sc(ref(translator),
|
sc(ref(translator),
|
||||||
#{ desc =>
|
#{ desc =>
|
||||||
"The topic for gateway to publish the register events from LwM2M client.<br>"
|
"The topic for gateway to publish the register events from LwM2M client.<br>"
|
||||||
, nullable => false
|
, required => true
|
||||||
})}
|
})}
|
||||||
, {update,
|
, {update,
|
||||||
sc(ref(translator),
|
sc(ref(translator),
|
||||||
#{ desc =>
|
#{ desc =>
|
||||||
"The topic for gateway to publish the update events from LwM2M client.<br>"
|
"The topic for gateway to publish the update events from LwM2M client.<br>"
|
||||||
, nullable => false
|
, required => true
|
||||||
})}
|
})}
|
||||||
];
|
];
|
||||||
|
|
||||||
fields(translator) ->
|
fields(translator) ->
|
||||||
[ {topic, sc(binary(), #{nullable => false})}
|
[ {topic, sc(binary(), #{required => true})}
|
||||||
, {qos, sc(emqx_schema:qos(), #{default => 0})}
|
, {qos, sc(emqx_schema:qos(), #{default => 0})}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -423,7 +423,7 @@ fields(dtls_opts) ->
|
||||||
|
|
||||||
authentication_schema() ->
|
authentication_schema() ->
|
||||||
sc(emqx_authn_schema:authenticator_type(),
|
sc(emqx_authn_schema:authenticator_type(),
|
||||||
#{ nullable => {true, recursively}
|
#{ required => {false, recursively}
|
||||||
, desc =>
|
, desc =>
|
||||||
"""Default authentication configs for all the gateway listeners.<br>
|
"""Default authentication configs for all the gateway listeners.<br>
|
||||||
For per-listener overrides see <code>authentication</code>
|
For per-listener overrides see <code>authentication</code>
|
||||||
|
|
|
@ -44,7 +44,7 @@ schema("/alarms") ->
|
||||||
hoconsc:ref(emqx_dashboard_swagger, limit),
|
hoconsc:ref(emqx_dashboard_swagger, limit),
|
||||||
{activated, hoconsc:mk(boolean(), #{in => query,
|
{activated, hoconsc:mk(boolean(), #{in => query,
|
||||||
desc => <<"All alarms, if not specified">>,
|
desc => <<"All alarms, if not specified">>,
|
||||||
nullable => true})}
|
required => false})}
|
||||||
],
|
],
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => [
|
200 => [
|
||||||
|
|
|
@ -94,7 +94,7 @@ fields(app) ->
|
||||||
{expired_at, hoconsc:mk(hoconsc:union([undefined, emqx_datetime:epoch_second()]),
|
{expired_at, hoconsc:mk(hoconsc:union([undefined, emqx_datetime:epoch_second()]),
|
||||||
#{desc => "No longer valid datetime",
|
#{desc => "No longer valid datetime",
|
||||||
example => <<"2021-12-05T02:01:34.186Z">>,
|
example => <<"2021-12-05T02:01:34.186Z">>,
|
||||||
nullable => true,
|
required => false,
|
||||||
default => undefined
|
default => undefined
|
||||||
})},
|
})},
|
||||||
{created_at, hoconsc:mk(emqx_datetime:epoch_second(),
|
{created_at, hoconsc:mk(emqx_datetime:epoch_second(),
|
||||||
|
@ -102,8 +102,8 @@ fields(app) ->
|
||||||
example => <<"2021-12-01T00:00:00.000Z">>
|
example => <<"2021-12-01T00:00:00.000Z">>
|
||||||
})},
|
})},
|
||||||
{desc, hoconsc:mk(binary(),
|
{desc, hoconsc:mk(binary(),
|
||||||
#{example => <<"Note">>, nullable => true})},
|
#{example => <<"Note">>, required => false})},
|
||||||
{enable, hoconsc:mk(boolean(), #{desc => "Enable/Disable", nullable => true})}
|
{enable, hoconsc:mk(boolean(), #{desc => "Enable/Disable", required => false})}
|
||||||
];
|
];
|
||||||
fields(name) ->
|
fields(name) ->
|
||||||
[{name, hoconsc:mk(binary(),
|
[{name, hoconsc:mk(binary(),
|
||||||
|
|
|
@ -101,27 +101,27 @@ fields(ban) ->
|
||||||
[
|
[
|
||||||
{as, hoconsc:mk(hoconsc:enum(?BANNED_TYPES), #{
|
{as, hoconsc:mk(hoconsc:enum(?BANNED_TYPES), #{
|
||||||
desc => <<"Banned type clientid, username, peerhost">>,
|
desc => <<"Banned type clientid, username, peerhost">>,
|
||||||
nullable => false,
|
required => true,
|
||||||
example => username})},
|
example => username})},
|
||||||
{who, hoconsc:mk(binary(), #{
|
{who, hoconsc:mk(binary(), #{
|
||||||
desc => <<"Client info as banned type">>,
|
desc => <<"Client info as banned type">>,
|
||||||
nullable => false,
|
required => true,
|
||||||
example => <<"Banned name"/utf8>>})},
|
example => <<"Banned name"/utf8>>})},
|
||||||
{by, hoconsc:mk(binary(), #{
|
{by, hoconsc:mk(binary(), #{
|
||||||
desc => <<"Commander">>,
|
desc => <<"Commander">>,
|
||||||
nullable => true,
|
required => false,
|
||||||
example => <<"mgmt_api">>})},
|
example => <<"mgmt_api">>})},
|
||||||
{reason, hoconsc:mk(binary(), #{
|
{reason, hoconsc:mk(binary(), #{
|
||||||
desc => <<"Banned reason">>,
|
desc => <<"Banned reason">>,
|
||||||
nullable => true,
|
required => false,
|
||||||
example => <<"Too many requests">>})},
|
example => <<"Too many requests">>})},
|
||||||
{at, hoconsc:mk(emqx_datetime:epoch_second(), #{
|
{at, hoconsc:mk(emqx_datetime:epoch_second(), #{
|
||||||
desc => <<"Create banned time, rfc3339, now if not specified">>,
|
desc => <<"Create banned time, rfc3339, now if not specified">>,
|
||||||
nullable => true,
|
required => false,
|
||||||
example => <<"2021-10-25T21:48:47+08:00">>})},
|
example => <<"2021-10-25T21:48:47+08:00">>})},
|
||||||
{until, hoconsc:mk(emqx_datetime:epoch_second(), #{
|
{until, hoconsc:mk(emqx_datetime:epoch_second(), #{
|
||||||
desc => <<"Cancel banned time, rfc3339, now + 5 minute if not specified">>,
|
desc => <<"Cancel banned time, rfc3339, now + 5 minute if not specified">>,
|
||||||
nullable => true,
|
required => false,
|
||||||
example => <<"2021-10-25T21:53:47+08:00">>})
|
example => <<"2021-10-25T21:53:47+08:00">>})
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
|
@ -93,65 +93,65 @@ schema("/clients") ->
|
||||||
hoconsc:ref(emqx_dashboard_swagger, limit),
|
hoconsc:ref(emqx_dashboard_swagger, limit),
|
||||||
{node, hoconsc:mk(binary(), #{
|
{node, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Node name">>,
|
desc => <<"Node name">>,
|
||||||
example => atom_to_list(node())})},
|
example => atom_to_list(node())})},
|
||||||
{username, hoconsc:mk(binary(), #{
|
{username, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"User name">>})},
|
desc => <<"User name">>})},
|
||||||
{zone, hoconsc:mk(binary(), #{
|
{zone, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true})},
|
required => false})},
|
||||||
{ip_address, hoconsc:mk(binary(), #{
|
{ip_address, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Client's IP address">>,
|
desc => <<"Client's IP address">>,
|
||||||
example => <<"127.0.0.1">>})},
|
example => <<"127.0.0.1">>})},
|
||||||
{conn_state, hoconsc:mk(hoconsc:enum([connected, idle, disconnected]), #{
|
{conn_state, hoconsc:mk(hoconsc:enum([connected, idle, disconnected]), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"The current connection status of the client, ",
|
desc => <<"The current connection status of the client, ",
|
||||||
"the possible values are connected,idle,disconnected">>})},
|
"the possible values are connected,idle,disconnected">>})},
|
||||||
{clean_start, hoconsc:mk(boolean(), #{
|
{clean_start, hoconsc:mk(boolean(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
description => <<"Whether the client uses a new session">>})},
|
description => <<"Whether the client uses a new session">>})},
|
||||||
{proto_name, hoconsc:mk(hoconsc:enum(['MQTT', 'CoAP', 'LwM2M', 'MQTT-SN']), #{
|
{proto_name, hoconsc:mk(hoconsc:enum(['MQTT', 'CoAP', 'LwM2M', 'MQTT-SN']), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
description => <<"Client protocol name, ",
|
description => <<"Client protocol name, ",
|
||||||
"the possible values are MQTT,CoAP,LwM2M,MQTT-SN">>})},
|
"the possible values are MQTT,CoAP,LwM2M,MQTT-SN">>})},
|
||||||
{proto_ver, hoconsc:mk(binary(), #{
|
{proto_ver, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Client protocol version">>})},
|
desc => <<"Client protocol version">>})},
|
||||||
{like_clientid, hoconsc:mk(binary(), #{
|
{like_clientid, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Fuzzy search of client identifier by substring method">>})},
|
desc => <<"Fuzzy search of client identifier by substring method">>})},
|
||||||
{like_username, hoconsc:mk(binary(), #{
|
{like_username, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Client user name, fuzzy search by substring">>})},
|
desc => <<"Client user name, fuzzy search by substring">>})},
|
||||||
{gte_created_at, hoconsc:mk(emqx_datetime:epoch_millisecond(), #{
|
{gte_created_at, hoconsc:mk(emqx_datetime:epoch_millisecond(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Search client session creation time by greater",
|
desc => <<"Search client session creation time by greater",
|
||||||
" than or equal method, rfc3339 or timestamp(millisecond)">>})},
|
" than or equal method, rfc3339 or timestamp(millisecond)">>})},
|
||||||
{lte_created_at, hoconsc:mk(emqx_datetime:epoch_millisecond(), #{
|
{lte_created_at, hoconsc:mk(emqx_datetime:epoch_millisecond(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Search client session creation time by less",
|
desc => <<"Search client session creation time by less",
|
||||||
" than or equal method, rfc3339 or timestamp(millisecond)">>})},
|
" than or equal method, rfc3339 or timestamp(millisecond)">>})},
|
||||||
{gte_connected_at, hoconsc:mk(emqx_datetime:epoch_millisecond(), #{
|
{gte_connected_at, hoconsc:mk(emqx_datetime:epoch_millisecond(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Search client connection creation time by greater"
|
desc => <<"Search client connection creation time by greater"
|
||||||
" than or equal method, rfc3339 or timestamp(epoch millisecond)">>})},
|
" than or equal method, rfc3339 or timestamp(epoch millisecond)">>})},
|
||||||
{lte_connected_at, hoconsc:mk(emqx_datetime:epoch_millisecond(), #{
|
{lte_connected_at, hoconsc:mk(emqx_datetime:epoch_millisecond(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Search client connection creation time by less"
|
desc => <<"Search client connection creation time by less"
|
||||||
" than or equal method, rfc3339 or timestamp(millisecond)">>})}
|
" than or equal method, rfc3339 or timestamp(millisecond)">>})}
|
||||||
],
|
],
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
-behaviour(minirest_api).
|
-behaviour(minirest_api).
|
||||||
|
|
||||||
-export([api_spec/0]).
|
-export([api_spec/0, namespace/0]).
|
||||||
-export([paths/0, schema/1, fields/1]).
|
-export([paths/0, schema/1, fields/1]).
|
||||||
|
|
||||||
-export([config/3, config_reset/3, configs/3, get_full_config/0]).
|
-export([config/3, config_reset/3, configs/3, get_full_config/0]).
|
||||||
|
@ -29,15 +29,31 @@
|
||||||
-define(PREFIX, "/configs/").
|
-define(PREFIX, "/configs/").
|
||||||
-define(PREFIX_RESET, "/configs_reset/").
|
-define(PREFIX_RESET, "/configs_reset/").
|
||||||
-define(ERR_MSG(MSG), list_to_binary(io_lib:format("~p", [MSG]))).
|
-define(ERR_MSG(MSG), list_to_binary(io_lib:format("~p", [MSG]))).
|
||||||
-define(EXCLUDES, [listeners, node, cluster, gateway, rule_engine]).
|
-define(EXCLUDES, [
|
||||||
|
exhook,
|
||||||
|
gateway,
|
||||||
|
plugins,
|
||||||
|
bridges,
|
||||||
|
"rule_engine",
|
||||||
|
"authorization",
|
||||||
|
"authentication",
|
||||||
|
"rpc",
|
||||||
|
"db",
|
||||||
|
"connectors",
|
||||||
|
"slow_subs",
|
||||||
|
"psk_authentication"
|
||||||
|
]).
|
||||||
|
|
||||||
api_spec() ->
|
api_spec() ->
|
||||||
emqx_dashboard_swagger:spec(?MODULE).
|
emqx_dashboard_swagger:spec(?MODULE).
|
||||||
|
|
||||||
|
namespace() -> "configuration".
|
||||||
|
|
||||||
paths() ->
|
paths() ->
|
||||||
["/configs", "/configs_reset/:rootname"] ++
|
["/configs", "/configs_reset/:rootname"] ++
|
||||||
lists:map(fun({Name, _Type}) -> ?PREFIX ++ to_list(Name) end, config_list(?EXCLUDES)).
|
lists:map(fun({Name, _Type}) -> ?PREFIX ++ to_list(Name) end, config_list(?EXCLUDES)).
|
||||||
|
|
||||||
|
|
||||||
schema("/configs") ->
|
schema("/configs") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => configs,
|
'operationId' => configs,
|
||||||
|
@ -51,12 +67,15 @@ schema("/configs") ->
|
||||||
desc =>
|
desc =>
|
||||||
<<"Node's name: If you do not fill in the fields, this node will be used by default.">>})}],
|
<<"Node's name: If you do not fill in the fields, this node will be used by default.">>})}],
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => config_list([])
|
200 => config_list(?EXCLUDES)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/configs_reset/:rootname") ->
|
schema("/configs_reset/:rootname") ->
|
||||||
Paths = lists:map(fun({Path, _}) -> Path end, config_list(?EXCLUDES)),
|
Paths = lists:map(
|
||||||
|
fun({Path, _})when is_atom(Path) -> Path;
|
||||||
|
({Path, _}) when is_list(Path) -> list_to_atom(Path)
|
||||||
|
end, config_list(?EXCLUDES)),
|
||||||
#{
|
#{
|
||||||
'operationId' => config_reset,
|
'operationId' => config_reset,
|
||||||
post => #{
|
post => #{
|
||||||
|
@ -186,7 +205,7 @@ conf_path_from_querystr(Req) ->
|
||||||
|
|
||||||
config_list(Exclude) ->
|
config_list(Exclude) ->
|
||||||
Roots = emqx_conf_schema:roots(),
|
Roots = emqx_conf_schema:roots(),
|
||||||
lists:foldl(fun(Key, Acc) -> lists:delete(Key, Acc) end, Roots, Exclude).
|
lists:foldl(fun(Key, Acc) -> lists:keydelete(Key, 1, Acc) end, Roots, Exclude).
|
||||||
|
|
||||||
to_list(L) when is_list(L) -> L;
|
to_list(L) when is_list(L) -> L;
|
||||||
to_list(Atom) when is_atom(Atom) -> atom_to_list(Atom).
|
to_list(Atom) when is_atom(Atom) -> atom_to_list(Atom).
|
||||||
|
|
|
@ -71,7 +71,7 @@ schema("/metrics") ->
|
||||||
[{ aggregate
|
[{ aggregate
|
||||||
, mk( boolean()
|
, mk( boolean()
|
||||||
, #{ in => query
|
, #{ in => query
|
||||||
, nullable => true
|
, required => false
|
||||||
, desc => <<"Whether to aggregate all nodes Metrics">>})
|
, desc => <<"Whether to aggregate all nodes Metrics">>})
|
||||||
}]
|
}]
|
||||||
, responses =>
|
, responses =>
|
||||||
|
|
|
@ -140,7 +140,7 @@ fields(node_info) ->
|
||||||
, mk( emqx_schema:bytesize()
|
, mk( emqx_schema:bytesize()
|
||||||
, #{desc => <<"Used memory">>, example => "256.00M"})}
|
, #{desc => <<"Used memory">>, example => "256.00M"})}
|
||||||
, { node_status
|
, { node_status
|
||||||
, mk( enum(["Running", "Stopped"])
|
, mk( enum(['Running', 'Stopped'])
|
||||||
, #{desc => <<"Node status">>, example => "Running"})}
|
, #{desc => <<"Node status">>, example => "Running"})}
|
||||||
, { otp_release
|
, { otp_release
|
||||||
, mk( string()
|
, mk( string()
|
||||||
|
@ -164,7 +164,7 @@ fields(node_info) ->
|
||||||
, mk( string()
|
, mk( string()
|
||||||
, #{desc => <<"Path to log files">>, example => "path/to/log | not found"})}
|
, #{desc => <<"Path to log files">>, example => "path/to/log | not found"})}
|
||||||
, { role
|
, { role
|
||||||
, mk( enum(["core", "replicant"])
|
, mk( enum([core, replicant])
|
||||||
, #{desc => <<"Node role">>, example => "core"})}
|
, #{desc => <<"Node role">>, example => "core"})}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
|
|
@ -59,7 +59,7 @@ fields(aggregate) ->
|
||||||
, mk( boolean()
|
, mk( boolean()
|
||||||
, #{ desc => <<"Calculation aggregate for all nodes">>
|
, #{ desc => <<"Calculation aggregate for all nodes">>
|
||||||
, in => query
|
, in => query
|
||||||
, nullable => true
|
, required => false
|
||||||
, example => false})}
|
, example => false})}
|
||||||
];
|
];
|
||||||
fields(node_stats_data) ->
|
fields(node_stats_data) ->
|
||||||
|
|
|
@ -75,38 +75,38 @@ parameters() ->
|
||||||
{
|
{
|
||||||
node, hoconsc:mk(binary(), #{
|
node, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Node name">>,
|
desc => <<"Node name">>,
|
||||||
example => atom_to_list(node())})
|
example => atom_to_list(node())})
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
clientid, hoconsc:mk(binary(), #{
|
clientid, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Client ID">>})
|
desc => <<"Client ID">>})
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
qos, hoconsc:mk(emqx_schema:qos(), #{
|
qos, hoconsc:mk(emqx_schema:qos(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"QoS">>})
|
desc => <<"QoS">>})
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
topic, hoconsc:mk(binary(), #{
|
topic, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Topic, url encoding">>})
|
desc => <<"Topic, url encoding">>})
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
match_topic, hoconsc:mk(binary(), #{
|
match_topic, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Match topic string, url encoding">>})
|
desc => <<"Match topic string, url encoding">>})
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
share_group, hoconsc:mk(binary(), #{
|
share_group, hoconsc:mk(binary(), #{
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
desc => <<"Shared subscription group name">>})
|
desc => <<"Shared subscription group name">>})
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
|
@ -144,45 +144,45 @@ fields(trace) ->
|
||||||
{name, hoconsc:mk(binary(),
|
{name, hoconsc:mk(binary(),
|
||||||
#{desc => "Unique and format by [a-zA-Z0-9-_]",
|
#{desc => "Unique and format by [a-zA-Z0-9-_]",
|
||||||
validator => fun ?MODULE:validate_name/1,
|
validator => fun ?MODULE:validate_name/1,
|
||||||
nullable => false,
|
required => true,
|
||||||
example => <<"EMQX-TRACE-1">>})},
|
example => <<"EMQX-TRACE-1">>})},
|
||||||
{type, hoconsc:mk(hoconsc:enum([clientid, topic, ip_address]),
|
{type, hoconsc:mk(hoconsc:enum([clientid, topic, ip_address]),
|
||||||
#{desc => """Filter type""",
|
#{desc => """Filter type""",
|
||||||
nullable => false,
|
required => true,
|
||||||
example => <<"clientid">>})},
|
example => <<"clientid">>})},
|
||||||
{topic, hoconsc:mk(binary(),
|
{topic, hoconsc:mk(binary(),
|
||||||
#{desc => """support mqtt wildcard topic.""",
|
#{desc => """support mqtt wildcard topic.""",
|
||||||
nullable => true,
|
required => false,
|
||||||
example => <<"/dev/#">>})},
|
example => <<"/dev/#">>})},
|
||||||
{clientid, hoconsc:mk(binary(),
|
{clientid, hoconsc:mk(binary(),
|
||||||
#{desc => """mqtt clientid.""",
|
#{desc => """mqtt clientid.""",
|
||||||
nullable => true,
|
required => false,
|
||||||
example => <<"dev-001">>})},
|
example => <<"dev-001">>})},
|
||||||
%% TODO add ip_address type in emqx_schema.erl
|
%% TODO add ip_address type in emqx_schema.erl
|
||||||
{ip_address, hoconsc:mk(binary(),
|
{ip_address, hoconsc:mk(binary(),
|
||||||
#{desc => "client ip address",
|
#{desc => "client ip address",
|
||||||
nullable => true,
|
required => false,
|
||||||
example => <<"127.0.0.1">>
|
example => <<"127.0.0.1">>
|
||||||
})},
|
})},
|
||||||
{status, hoconsc:mk(hoconsc:enum([running, stopped, waiting]),
|
{status, hoconsc:mk(hoconsc:enum([running, stopped, waiting]),
|
||||||
#{desc => "trace status",
|
#{desc => "trace status",
|
||||||
nullable => true,
|
required => false,
|
||||||
example => running
|
example => running
|
||||||
})},
|
})},
|
||||||
{start_at, hoconsc:mk(emqx_datetime:epoch_second(),
|
{start_at, hoconsc:mk(emqx_datetime:epoch_second(),
|
||||||
#{desc => "rfc3339 timestamp or epoch second",
|
#{desc => "rfc3339 timestamp or epoch second",
|
||||||
nullable => true,
|
required => false,
|
||||||
example => <<"2021-11-04T18:17:38+08:00">>
|
example => <<"2021-11-04T18:17:38+08:00">>
|
||||||
})},
|
})},
|
||||||
{end_at, hoconsc:mk(emqx_datetime:epoch_second(),
|
{end_at, hoconsc:mk(emqx_datetime:epoch_second(),
|
||||||
#{desc => "rfc3339 timestamp or epoch second",
|
#{desc => "rfc3339 timestamp or epoch second",
|
||||||
nullable => true,
|
required => false,
|
||||||
example => <<"2021-11-05T18:17:38+08:00">>
|
example => <<"2021-11-05T18:17:38+08:00">>
|
||||||
})},
|
})},
|
||||||
{log_size, hoconsc:mk(hoconsc:array(map()),
|
{log_size, hoconsc:mk(hoconsc:array(map()),
|
||||||
#{desc => "trace log size",
|
#{desc => "trace log size",
|
||||||
example => [#{<<"node">> => <<"emqx@127.0.0.1">>, <<"size">> => 1024}],
|
example => [#{<<"node">> => <<"emqx@127.0.0.1">>, <<"size">> => 1024}],
|
||||||
nullable => true})}
|
required => false})}
|
||||||
];
|
];
|
||||||
fields(name) ->
|
fields(name) ->
|
||||||
[{name, hoconsc:mk(binary(),
|
[{name, hoconsc:mk(binary(),
|
||||||
|
@ -198,14 +198,14 @@ fields(node) ->
|
||||||
#{
|
#{
|
||||||
desc => "Node name",
|
desc => "Node name",
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true
|
required => false
|
||||||
})}];
|
})}];
|
||||||
fields(bytes) ->
|
fields(bytes) ->
|
||||||
[{bytes, hoconsc:mk(integer(),
|
[{bytes, hoconsc:mk(integer(),
|
||||||
#{
|
#{
|
||||||
desc => "Maximum number of bytes to store in request",
|
desc => "Maximum number of bytes to store in request",
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
default => 1000
|
default => 1000
|
||||||
})}];
|
})}];
|
||||||
fields(position) ->
|
fields(position) ->
|
||||||
|
@ -213,7 +213,7 @@ fields(position) ->
|
||||||
#{
|
#{
|
||||||
desc => "Offset from the current trace position.",
|
desc => "Offset from the current trace position.",
|
||||||
in => query,
|
in => query,
|
||||||
nullable => true,
|
required => false,
|
||||||
default => 0
|
default => 0
|
||||||
})}].
|
})}].
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,7 @@ t_http_test(_Config) ->
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
#{
|
#{
|
||||||
<<"code">> => <<"BAD_REQUEST">>,
|
<<"code">> => <<"BAD_REQUEST">>,
|
||||||
<<"message">> => <<"name : not_nullable">>
|
<<"message">> => <<"name : mandatory_required_field">>
|
||||||
}, json(Body)),
|
}, json(Body)),
|
||||||
|
|
||||||
Name = <<"test-name">>,
|
Name = <<"test-name">>,
|
||||||
|
|
|
@ -121,12 +121,12 @@ fields(reset) ->
|
||||||
<<"Topic Name. If this parameter is not present,"
|
<<"Topic Name. If this parameter is not present,"
|
||||||
" all created topic metrics will be reset">>
|
" all created topic metrics will be reset">>
|
||||||
, example => <<"testtopic/1">>
|
, example => <<"testtopic/1">>
|
||||||
, nullable => true})}
|
, required => false})}
|
||||||
, {action
|
, {action
|
||||||
, mk( string()
|
, mk( string()
|
||||||
, #{ desc => <<"Action Name. Only as a \"reset\"">>
|
, #{ desc => <<"Action Name. Only as a \"reset\"">>
|
||||||
, enum => [reset]
|
, enum => [reset]
|
||||||
, nullable => false
|
, required => true
|
||||||
, example => <<"reset">>})}
|
, example => <<"reset">>})}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -135,21 +135,21 @@ fields(topic_metrics) ->
|
||||||
, mk( binary()
|
, mk( binary()
|
||||||
, #{ desc => <<"Topic Name">>
|
, #{ desc => <<"Topic Name">>
|
||||||
, example => <<"testtopic/1">>
|
, example => <<"testtopic/1">>
|
||||||
, nullable => false})},
|
, required => true})},
|
||||||
{ create_time
|
{ create_time
|
||||||
, mk( emqx_datetime:epoch_second()
|
, mk( emqx_datetime:epoch_second()
|
||||||
, #{ desc => <<"Topic Metrics created date time, in rfc3339">>
|
, #{ desc => <<"Topic Metrics created date time, in rfc3339">>
|
||||||
, nullable => false
|
, required => true
|
||||||
, example => <<"2022-01-14T21:48:47+08:00">>})},
|
, example => <<"2022-01-14T21:48:47+08:00">>})},
|
||||||
{ reset_time
|
{ reset_time
|
||||||
, mk( emqx_datetime:epoch_second()
|
, mk( emqx_datetime:epoch_second()
|
||||||
, #{ desc => <<"Topic Metrics reset date time, in rfc3339. Nullable if never reset">>
|
, #{ desc => <<"Topic Metrics reset date time, in rfc3339. Nullable if never reset">>
|
||||||
, nullable => true
|
, required => false
|
||||||
, example => <<"2022-01-14T21:48:47+08:00">>})},
|
, example => <<"2022-01-14T21:48:47+08:00">>})},
|
||||||
{ metrics
|
{ metrics
|
||||||
, mk( ref(metrics)
|
, mk( ref(metrics)
|
||||||
, #{ desc => <<"Topic Metrics fields">>
|
, #{ desc => <<"Topic Metrics fields">>
|
||||||
, nullable => false})
|
, required => true})
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
|
@ -54,12 +54,12 @@ state_fields() ->
|
||||||
"It should match the plugin application name-version as the "
|
"It should match the plugin application name-version as the "
|
||||||
"for the plugin release package name<br>"
|
"for the plugin release package name<br>"
|
||||||
"For example: my_plugin-0.1.0."
|
"For example: my_plugin-0.1.0."
|
||||||
, nullable => false
|
, required => true
|
||||||
})}
|
})}
|
||||||
, {enable,
|
, {enable,
|
||||||
hoconsc:mk(boolean(),
|
hoconsc:mk(boolean(),
|
||||||
#{ desc => "Set to 'true' to enable this plugin"
|
#{ desc => "Set to 'true' to enable this plugin"
|
||||||
, nullable => false
|
, required => true
|
||||||
})}
|
})}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
@ -69,14 +69,14 @@ root_fields() ->
|
||||||
].
|
].
|
||||||
|
|
||||||
states(type) -> hoconsc:array(hoconsc:ref(state));
|
states(type) -> hoconsc:array(hoconsc:ref(state));
|
||||||
states(nullable) -> true;
|
states(required) -> false;
|
||||||
states(default) -> [];
|
states(default) -> [];
|
||||||
states(desc) -> "An array of plugins in the desired states.<br>"
|
states(desc) -> "An array of plugins in the desired states.<br>"
|
||||||
"The plugins are started in the defined order";
|
"The plugins are started in the defined order";
|
||||||
states(_) -> undefined.
|
states(_) -> undefined.
|
||||||
|
|
||||||
install_dir(type) -> string();
|
install_dir(type) -> string();
|
||||||
install_dir(nullable) -> true;
|
install_dir(required) -> false;
|
||||||
install_dir(default) -> "plugins"; %% runner's root dir
|
install_dir(default) -> "plugins"; %% runner's root dir
|
||||||
install_dir(T) when T =/= desc -> undefined;
|
install_dir(T) when T =/= desc -> undefined;
|
||||||
install_dir(desc) -> """
|
install_dir(desc) -> """
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
[ {emqx, {path, "../emqx"}},
|
[ {emqx, {path, "../emqx"}},
|
||||||
%% FIXME: tag this as v3.1.3
|
%% FIXME: tag this as v3.1.3
|
||||||
{prometheus, {git, "https://github.com/deadtrickster/prometheus.erl", {tag, "v4.8.1"}}},
|
{prometheus, {git, "https://github.com/deadtrickster/prometheus.erl", {tag, "v4.8.1"}}},
|
||||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.24.0"}}}
|
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.25.0"}}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{edoc_opts, [{preprocess, true}]}.
|
{edoc_opts, [{preprocess, true}]}.
|
||||||
|
|
|
@ -60,7 +60,7 @@ init_file(desc) ->
|
||||||
"The file has to be structured line-by-line, each line must be in ",
|
"The file has to be structured line-by-line, each line must be in ",
|
||||||
"the format of <code>PSKIdentity:SharedSecret</code>. For example: ",
|
"the format of <code>PSKIdentity:SharedSecret</code>. For example: ",
|
||||||
"<code>mydevice1:c2VjcmV0</code>">>;
|
"<code>mydevice1:c2VjcmV0</code>">>;
|
||||||
init_file(nullable) -> true;
|
init_file(required) -> false;
|
||||||
init_file(_) -> undefined.
|
init_file(_) -> undefined.
|
||||||
|
|
||||||
separator(type) -> binary();
|
separator(type) -> binary();
|
||||||
|
|
|
@ -35,11 +35,11 @@ roots() -> [{name, fun name/1},
|
||||||
{register, fun register/1}].
|
{register, fun register/1}].
|
||||||
|
|
||||||
name(type) -> atom();
|
name(type) -> atom();
|
||||||
name(nullable) -> false;
|
name(required) -> true;
|
||||||
name(_) -> undefined.
|
name(_) -> undefined.
|
||||||
|
|
||||||
register(type) -> boolean();
|
register(type) -> boolean();
|
||||||
register(nullable) -> false;
|
register(required) -> true;
|
||||||
register(default) -> false;
|
register(default) -> false;
|
||||||
register(_) -> undefined.
|
register(_) -> undefined.
|
||||||
|
|
||||||
|
|
|
@ -54,8 +54,8 @@ fields("rule_info") ->
|
||||||
|
|
||||||
%% TODO: we can delete this API if the Dashboard not denpends on it
|
%% TODO: we can delete this API if the Dashboard not denpends on it
|
||||||
fields("rule_events") ->
|
fields("rule_events") ->
|
||||||
ETopics = [emqx_rule_events:event_topic(E) || E <- emqx_rule_events:event_names()],
|
ETopics = [binary_to_atom(emqx_rule_events:event_topic(E)) || E <- emqx_rule_events:event_names()],
|
||||||
[ {"event", sc(hoconsc:enum(ETopics), #{desc => "The event topics", nullable => false})}
|
[ {"event", sc(hoconsc:enum(ETopics), #{desc => "The event topics", required => true})}
|
||||||
, {"title", sc(binary(), #{desc => "The title", example => "some title"})}
|
, {"title", sc(binary(), #{desc => "The title", example => "some title"})}
|
||||||
, {"description", sc(binary(), #{desc => "The description", example => "some desc"})}
|
, {"description", sc(binary(), #{desc => "The description", example => "some desc"})}
|
||||||
, {"columns", sc(map(), #{desc => "The columns"})}
|
, {"columns", sc(map(), #{desc => "The columns"})}
|
||||||
|
@ -75,7 +75,7 @@ fields("rule_test") ->
|
||||||
]),
|
]),
|
||||||
#{desc => "The context of the event for testing",
|
#{desc => "The context of the event for testing",
|
||||||
default => #{}})}
|
default => #{}})}
|
||||||
, {"sql", sc(binary(), #{desc => "The SQL of the rule for testing", nullable => false})}
|
, {"sql", sc(binary(), #{desc => "The SQL of the rule for testing", required => true})}
|
||||||
];
|
];
|
||||||
|
|
||||||
fields("metrics") ->
|
fields("metrics") ->
|
||||||
|
@ -121,7 +121,7 @@ fields("node_metrics") ->
|
||||||
] ++ fields("metrics");
|
] ++ fields("metrics");
|
||||||
|
|
||||||
fields("ctx_pub") ->
|
fields("ctx_pub") ->
|
||||||
[ {"event_type", sc(message_publish, #{desc => "Event Type", nullable => false})}
|
[ {"event_type", sc(message_publish, #{desc => "Event Type", required => true})}
|
||||||
, {"id", sc(binary(), #{desc => "Message ID"})}
|
, {"id", sc(binary(), #{desc => "Message ID"})}
|
||||||
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
||||||
, {"username", sc(binary(), #{desc => "The User Name"})}
|
, {"username", sc(binary(), #{desc => "The User Name"})}
|
||||||
|
@ -133,7 +133,7 @@ fields("ctx_pub") ->
|
||||||
] ++ [qos()];
|
] ++ [qos()];
|
||||||
|
|
||||||
fields("ctx_sub") ->
|
fields("ctx_sub") ->
|
||||||
[ {"event_type", sc(session_subscribed, #{desc => "Event Type", nullable => false})}
|
[ {"event_type", sc(session_subscribed, #{desc => "Event Type", required => true})}
|
||||||
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
||||||
, {"username", sc(binary(), #{desc => "The User Name"})}
|
, {"username", sc(binary(), #{desc => "The User Name"})}
|
||||||
, {"payload", sc(binary(), #{desc => "The Message Payload"})}
|
, {"payload", sc(binary(), #{desc => "The Message Payload"})}
|
||||||
|
@ -144,11 +144,11 @@ fields("ctx_sub") ->
|
||||||
] ++ [qos()];
|
] ++ [qos()];
|
||||||
|
|
||||||
fields("ctx_unsub") ->
|
fields("ctx_unsub") ->
|
||||||
[{"event_type", sc(session_unsubscribed, #{desc => "Event Type", nullable => false})}] ++
|
[{"event_type", sc(session_unsubscribed, #{desc => "Event Type", required => true})}] ++
|
||||||
proplists:delete("event_type", fields("ctx_sub"));
|
proplists:delete("event_type", fields("ctx_sub"));
|
||||||
|
|
||||||
fields("ctx_delivered") ->
|
fields("ctx_delivered") ->
|
||||||
[ {"event_type", sc(message_delivered, #{desc => "Event Type", nullable => false})}
|
[ {"event_type", sc(message_delivered, #{desc => "Event Type", required => true})}
|
||||||
, {"id", sc(binary(), #{desc => "Message ID"})}
|
, {"id", sc(binary(), #{desc => "Message ID"})}
|
||||||
, {"from_clientid", sc(binary(), #{desc => "The Client ID"})}
|
, {"from_clientid", sc(binary(), #{desc => "The Client ID"})}
|
||||||
, {"from_username", sc(binary(), #{desc => "The User Name"})}
|
, {"from_username", sc(binary(), #{desc => "The User Name"})}
|
||||||
|
@ -162,11 +162,11 @@ fields("ctx_delivered") ->
|
||||||
] ++ [qos()];
|
] ++ [qos()];
|
||||||
|
|
||||||
fields("ctx_acked") ->
|
fields("ctx_acked") ->
|
||||||
[{"event_type", sc(message_acked, #{desc => "Event Type", nullable => false})}] ++
|
[{"event_type", sc(message_acked, #{desc => "Event Type", required => true})}] ++
|
||||||
proplists:delete("event_type", fields("ctx_delivered"));
|
proplists:delete("event_type", fields("ctx_delivered"));
|
||||||
|
|
||||||
fields("ctx_dropped") ->
|
fields("ctx_dropped") ->
|
||||||
[ {"event_type", sc(message_dropped, #{desc => "Event Type", nullable => false})}
|
[ {"event_type", sc(message_dropped, #{desc => "Event Type", required => true})}
|
||||||
, {"id", sc(binary(), #{desc => "Message ID"})}
|
, {"id", sc(binary(), #{desc => "Message ID"})}
|
||||||
, {"reason", sc(binary(), #{desc => "The Reason for Dropping"})}
|
, {"reason", sc(binary(), #{desc => "The Reason for Dropping"})}
|
||||||
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
||||||
|
@ -179,7 +179,7 @@ fields("ctx_dropped") ->
|
||||||
] ++ [qos()];
|
] ++ [qos()];
|
||||||
|
|
||||||
fields("ctx_connected") ->
|
fields("ctx_connected") ->
|
||||||
[ {"event_type", sc(client_connected, #{desc => "Event Type", nullable => false})}
|
[ {"event_type", sc(client_connected, #{desc => "Event Type", required => true})}
|
||||||
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
||||||
, {"username", sc(binary(), #{desc => "The User Name"})}
|
, {"username", sc(binary(), #{desc => "The User Name"})}
|
||||||
, {"mountpoint", sc(binary(), #{desc => "The Mountpoint"})}
|
, {"mountpoint", sc(binary(), #{desc => "The Mountpoint"})}
|
||||||
|
@ -196,7 +196,7 @@ fields("ctx_connected") ->
|
||||||
];
|
];
|
||||||
|
|
||||||
fields("ctx_disconnected") ->
|
fields("ctx_disconnected") ->
|
||||||
[ {"event_type", sc(client_disconnected, #{desc => "Event Type", nullable => false})}
|
[ {"event_type", sc(client_disconnected, #{desc => "Event Type", required => true})}
|
||||||
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
, {"clientid", sc(binary(), #{desc => "The Client ID"})}
|
||||||
, {"username", sc(binary(), #{desc => "The User Name"})}
|
, {"username", sc(binary(), #{desc => "The User Name"})}
|
||||||
, {"reason", sc(binary(), #{desc => "The Reason for Disconnect"})}
|
, {"reason", sc(binary(), #{desc => "The Reason for Disconnect"})}
|
||||||
|
@ -211,7 +211,7 @@ qos() ->
|
||||||
|
|
||||||
rule_id() ->
|
rule_id() ->
|
||||||
{"id", sc(binary(),
|
{"id", sc(binary(),
|
||||||
#{ desc => "The ID of the rule", nullable => false
|
#{ desc => "The ID of the rule", required => true
|
||||||
, example => "293fb66f"
|
, example => "293fb66f"
|
||||||
})}.
|
})}.
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ SQL query to transform the messages.<br>
|
||||||
Example: <code>SELECT * FROM \"test/topic\" WHERE payload.x = 1</code><br>
|
Example: <code>SELECT * FROM \"test/topic\" WHERE payload.x = 1</code><br>
|
||||||
"""
|
"""
|
||||||
, example => "SELECT * FROM \"test/topic\" WHERE payload.x = 1"
|
, example => "SELECT * FROM \"test/topic\" WHERE payload.x = 1"
|
||||||
, nullable => false
|
, required => true
|
||||||
, validator => fun ?MODULE:validate_sql/1
|
, validator => fun ?MODULE:validate_sql/1
|
||||||
})}
|
})}
|
||||||
, {"outputs", sc(hoconsc:array(hoconsc:union(outputs())),
|
, {"outputs", sc(hoconsc:array(hoconsc:union(outputs())),
|
||||||
|
@ -143,7 +143,7 @@ fields("republish_args") ->
|
||||||
The target topic of message to be re-published.<br>
|
The target topic of message to be re-published.<br>
|
||||||
Template with variables is allowed, see description of the 'republish_args'.
|
Template with variables is allowed, see description of the 'republish_args'.
|
||||||
"""
|
"""
|
||||||
, nullable => false
|
, required => true
|
||||||
, example => <<"a/1">>
|
, example => <<"a/1">>
|
||||||
})}
|
})}
|
||||||
, {qos, sc(qos(),
|
, {qos, sc(qos(),
|
||||||
|
@ -182,7 +182,7 @@ rule_name() ->
|
||||||
{"name", sc(binary(),
|
{"name", sc(binary(),
|
||||||
#{ desc => "The name of the rule"
|
#{ desc => "The name of the rule"
|
||||||
, default => ""
|
, default => ""
|
||||||
, nullable => false
|
, required => true
|
||||||
, example => "foo"
|
, example => "foo"
|
||||||
})}.
|
})}.
|
||||||
|
|
||||||
|
|
|
@ -33,19 +33,19 @@ namespace() -> "statsd".
|
||||||
roots() -> ["statsd"].
|
roots() -> ["statsd"].
|
||||||
|
|
||||||
fields("statsd") ->
|
fields("statsd") ->
|
||||||
[ {enable, hoconsc:mk(boolean(), #{default => false, nullable => false})}
|
[ {enable, hoconsc:mk(boolean(), #{default => false, required => true})}
|
||||||
, {server, fun server/1}
|
, {server, fun server/1}
|
||||||
, {sample_time_interval, fun duration_ms/1}
|
, {sample_time_interval, fun duration_ms/1}
|
||||||
, {flush_time_interval, fun duration_ms/1}
|
, {flush_time_interval, fun duration_ms/1}
|
||||||
].
|
].
|
||||||
|
|
||||||
server(type) -> emqx_schema:ip_port();
|
server(type) -> emqx_schema:ip_port();
|
||||||
server(nullable) -> false;
|
server(required) -> true;
|
||||||
server(default) -> "127.0.0.1:8125";
|
server(default) -> "127.0.0.1:8125";
|
||||||
server(_) -> undefined.
|
server(_) -> undefined.
|
||||||
|
|
||||||
duration_ms(type) -> emqx_schema:duration_ms();
|
duration_ms(type) -> emqx_schema:duration_ms();
|
||||||
duration_ms(nullable) -> false;
|
duration_ms(required) -> true;
|
||||||
duration_ms(default) -> "10s";
|
duration_ms(default) -> "10s";
|
||||||
duration_ms(_) -> undefined.
|
duration_ms(_) -> undefined.
|
||||||
|
|
||||||
|
|
2
mix.exs
2
mix.exs
|
@ -68,7 +68,7 @@ defmodule EMQXUmbrella.MixProject do
|
||||||
# in conflict by emqtt and hocon
|
# in conflict by emqtt and hocon
|
||||||
{:getopt, "1.0.2", override: true},
|
{:getopt, "1.0.2", override: true},
|
||||||
{:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "0.18.0", override: true},
|
{:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "0.18.0", override: true},
|
||||||
{:hocon, github: "emqx/hocon", tag: "0.24.0", override: true},
|
{:hocon, github: "emqx/hocon", tag: "0.25.0", override: true},
|
||||||
{:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.4.1", override: true},
|
{:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.4.1", override: true},
|
||||||
{:esasl, github: "emqx/esasl", tag: "0.2.0"},
|
{:esasl, github: "emqx/esasl", tag: "0.2.0"},
|
||||||
{:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"},
|
{:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"},
|
||||||
|
|
|
@ -66,7 +66,7 @@
|
||||||
, {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.2"}}}
|
, {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.2"}}}
|
||||||
, {getopt, "1.0.2"}
|
, {getopt, "1.0.2"}
|
||||||
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.18.0"}}}
|
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.18.0"}}}
|
||||||
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.24.0"}}}
|
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.25.0"}}}
|
||||||
, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.4.1"}}}
|
, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.4.1"}}}
|
||||||
, {esasl, {git, "https://github.com/emqx/esasl", {tag, "0.2.0"}}}
|
, {esasl, {git, "https://github.com/emqx/esasl", {tag, "0.2.0"}}}
|
||||||
, {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}}
|
, {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}}
|
||||||
|
|
Loading…
Reference in New Issue