Merge pull request #7478 from ieQu1/doc-schema-7
docs(schema): Add descriptions of fields and records
This commit is contained in:
commit
20f66664d7
|
@ -218,7 +218,7 @@ jobs:
|
||||||
- emqx
|
- emqx
|
||||||
- emqx-enterprise
|
- emqx-enterprise
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
container: "ghcr.io/iequ1/emqx-schema-validate:0.2.3"
|
container: "ghcr.io/iequ1/emqx-schema-validate:0.3.0"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v2
|
- uses: actions/download-artifact@v2
|
||||||
name: Download schema dump
|
name: Download schema dump
|
||||||
|
|
|
@ -515,7 +515,7 @@ authorization {
|
||||||
## Default: allow
|
## Default: allow
|
||||||
no_match: allow
|
no_match: allow
|
||||||
|
|
||||||
## The action when authorization check reject current operation
|
## The action when the authorization check rejects an operation
|
||||||
##
|
##
|
||||||
## @doc authorization.deny_action
|
## @doc authorization.deny_action
|
||||||
## ValueType: ignore | disconnect
|
## ValueType: ignore | disconnect
|
||||||
|
@ -1048,16 +1048,16 @@ broker {
|
||||||
## Default: true
|
## Default: true
|
||||||
route_batch_clean = true
|
route_batch_clean = true
|
||||||
|
|
||||||
## Performance toggle for subscribe/unsubscribe wildcard topic.
|
## Performance tuning for subscribe/unsubscribe wildcard topic.
|
||||||
## Change this toggle only when there are many wildcard topics.
|
## Change this parameter only when there are many wildcard topics.
|
||||||
##
|
##
|
||||||
## NOTE: when changing from/to 'global' lock, it requires all
|
## NOTE: when changing from/to 'global' lock, it requires all
|
||||||
## nodes in the cluster to be stopped before the change.
|
## nodes in the cluster to be stopped before the change.
|
||||||
##
|
##
|
||||||
## @doc broker.perf.route_lock_type
|
## @doc broker.perf.route_lock_type
|
||||||
## ValueType: key | tab | global
|
## ValueType: key | tab | global
|
||||||
## - key: mnesia translational updates with per-key locks. recommended for single node setup.
|
## - key: mnesia transactional updates with per-key locks. recommended for single node setup.
|
||||||
## - tab: mnesia translational updates with table lock. recommended for multi-nodes setup.
|
## - tab: mnesia transactional updates with table lock. recommended for multi-nodes setup.
|
||||||
## - global: global lock protected updates. recommended for larger cluster.
|
## - global: global lock protected updates. recommended for larger cluster.
|
||||||
## Default: key
|
## Default: key
|
||||||
perf.route_lock_type = key
|
perf.route_lock_type = key
|
||||||
|
|
|
@ -326,7 +326,10 @@ fields("authorization") ->
|
||||||
{"deny_action",
|
{"deny_action",
|
||||||
sc(
|
sc(
|
||||||
hoconsc:enum([ignore, disconnect]),
|
hoconsc:enum([ignore, disconnect]),
|
||||||
#{default => ignore}
|
#{
|
||||||
|
default => ignore,
|
||||||
|
desc => "The action when the authorization check rejects an operation."
|
||||||
|
}
|
||||||
)},
|
)},
|
||||||
{"cache",
|
{"cache",
|
||||||
sc(
|
sc(
|
||||||
|
@ -920,7 +923,7 @@ fields("mqtt_quic_listener") ->
|
||||||
{"certfile",
|
{"certfile",
|
||||||
sc(
|
sc(
|
||||||
string(),
|
string(),
|
||||||
#{desc => "Path to the certificate."}
|
#{desc => "Path to the certificate file."}
|
||||||
)},
|
)},
|
||||||
{"keyfile",
|
{"keyfile",
|
||||||
sc(
|
sc(
|
||||||
|
@ -949,7 +952,11 @@ fields("ws_opts") ->
|
||||||
{"mqtt_piggyback",
|
{"mqtt_piggyback",
|
||||||
sc(
|
sc(
|
||||||
hoconsc:enum([single, multiple]),
|
hoconsc:enum([single, multiple]),
|
||||||
#{default => multiple}
|
#{
|
||||||
|
default => multiple,
|
||||||
|
desc =>
|
||||||
|
"Whether a WebSocket message is allowed to contain multiple MQTT packets."
|
||||||
|
}
|
||||||
)},
|
)},
|
||||||
{"compress",
|
{"compress",
|
||||||
sc(
|
sc(
|
||||||
|
@ -1280,12 +1287,34 @@ fields("broker_perf") ->
|
||||||
{"route_lock_type",
|
{"route_lock_type",
|
||||||
sc(
|
sc(
|
||||||
hoconsc:enum([key, tab, global]),
|
hoconsc:enum([key, tab, global]),
|
||||||
#{default => key}
|
#{
|
||||||
|
default => key,
|
||||||
|
desc =>
|
||||||
|
"Performance tuning for subscribing/unsubscribing a wildcard topic.<br/>\n"
|
||||||
|
"Change this parameter only when there are many wildcard topics.<br/>\n"
|
||||||
|
"NOTE: when changing from/to `global` lock, it requires all\n"
|
||||||
|
"nodes in the cluster to be stopped before the change.\n\n"
|
||||||
|
" - `key`: mnesia transactional updates with per-key locks. "
|
||||||
|
"Recommended for a single-node setup.\n"
|
||||||
|
" - `tab`: mnesia transactional updates with table lock. Recommended for a cluster setup.\n"
|
||||||
|
" - `global`: updates are protected with a global lock. Recommended for large clusters."
|
||||||
|
}
|
||||||
)},
|
)},
|
||||||
{"trie_compaction",
|
{"trie_compaction",
|
||||||
sc(
|
sc(
|
||||||
boolean(),
|
boolean(),
|
||||||
#{default => true}
|
#{
|
||||||
|
default => true,
|
||||||
|
desc =>
|
||||||
|
"Enable trie path compaction.<br/>\n"
|
||||||
|
"Enabling it significantly improves wildcard topic subscribe\n"
|
||||||
|
"rate, if wildcard topics have unique prefixes like:\n"
|
||||||
|
"'sensor/{{id}}/+/', where ID is unique per subscriber.<br/>\n"
|
||||||
|
"Topic match performance (when publishing) may degrade if messages\n"
|
||||||
|
"are mostly published to topics with large number of levels.<br/>\n"
|
||||||
|
"NOTE: This is a cluster-wide configuration.\n"
|
||||||
|
"It requires all nodes to be stopped before changing it."
|
||||||
|
}
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
fields("sys_topics") ->
|
fields("sys_topics") ->
|
||||||
|
@ -1293,12 +1322,21 @@ fields("sys_topics") ->
|
||||||
{"sys_msg_interval",
|
{"sys_msg_interval",
|
||||||
sc(
|
sc(
|
||||||
hoconsc:union([disabled, duration()]),
|
hoconsc:union([disabled, duration()]),
|
||||||
#{default => "1m"}
|
#{
|
||||||
|
default => "1m",
|
||||||
|
desc => "Time interval of publishing `$SYS` messages."
|
||||||
|
}
|
||||||
)},
|
)},
|
||||||
{"sys_heartbeat_interval",
|
{"sys_heartbeat_interval",
|
||||||
sc(
|
sc(
|
||||||
hoconsc:union([disabled, duration()]),
|
hoconsc:union([disabled, duration()]),
|
||||||
#{default => "30s"}
|
#{
|
||||||
|
default => "30s",
|
||||||
|
desc =>
|
||||||
|
"Time interval for publishing following heartbeat messages:<br/>"
|
||||||
|
" - `$SYS/brokers/<node>/uptime`\n"
|
||||||
|
" - `$SYS/brokers/<node>/datetime`"
|
||||||
|
}
|
||||||
)},
|
)},
|
||||||
{"sys_event_messages",
|
{"sys_event_messages",
|
||||||
sc(
|
sc(
|
||||||
|
@ -2045,8 +2083,8 @@ server_ssl_opts_schema(Defaults, IsRanchListener) ->
|
||||||
default => D("dhfile"),
|
default => D("dhfile"),
|
||||||
required => false,
|
required => false,
|
||||||
desc =>
|
desc =>
|
||||||
"Path to a file containing PEM-encoded Diffie Hellman parameters\n"
|
"Path to a file containing PEM-encoded Diffie-Hellman parameters\n"
|
||||||
"to be used by the server if a cipher suite using Diffie Hellman\n"
|
"to be used by the server if a cipher suite using Diffie-Hellman\n"
|
||||||
"key exchange is negotiated. If not specified, default parameters\n"
|
"key exchange is negotiated. If not specified, default parameters\n"
|
||||||
"are used.<br>\n"
|
"are used.<br>\n"
|
||||||
"NOTE: The <code>dhfile</code> option is not supported by TLS 1.3."
|
"NOTE: The <code>dhfile</code> option is not supported by TLS 1.3."
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
-module(emqx_zone_schema).
|
-module(emqx_zone_schema).
|
||||||
|
|
||||||
-export([namespace/0, roots/0, fields/1]).
|
-export([namespace/0, roots/0, fields/1, desc/1]).
|
||||||
|
|
||||||
namespace() -> zone.
|
namespace() -> zone.
|
||||||
|
|
||||||
|
@ -38,6 +38,9 @@ roots() ->
|
||||||
fields(Name) ->
|
fields(Name) ->
|
||||||
[{N, no_default(Sc)} || {N, Sc} <- emqx_schema:fields(Name)].
|
[{N, no_default(Sc)} || {N, Sc} <- emqx_schema:fields(Name)].
|
||||||
|
|
||||||
|
desc(Name) ->
|
||||||
|
emqx_schema:desc(Name).
|
||||||
|
|
||||||
%% no default values for zone settings
|
%% no default values for zone settings
|
||||||
no_default(Sc) ->
|
no_default(Sc) ->
|
||||||
fun
|
fun
|
||||||
|
|
|
@ -47,6 +47,7 @@
|
||||||
-export([
|
-export([
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
|
desc/1,
|
||||||
namespace/0
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
@ -69,39 +70,72 @@ fields(bcrypt_rw) ->
|
||||||
fields(bcrypt) ++
|
fields(bcrypt) ++
|
||||||
[{salt_rounds, fun salt_rounds/1}];
|
[{salt_rounds, fun salt_rounds/1}];
|
||||||
fields(bcrypt) ->
|
fields(bcrypt) ->
|
||||||
[{name, {enum, [bcrypt]}}];
|
[{name, sc(bcrypt, #{desc => "BCRYPT password hashing."})}];
|
||||||
fields(pbkdf2) ->
|
fields(pbkdf2) ->
|
||||||
[
|
[
|
||||||
{name, {enum, [pbkdf2]}},
|
{name, sc(pbkdf2, #{desc => "PBKDF2 password hashing."})},
|
||||||
{mac_fun, {enum, [md4, md5, ripemd160, sha, sha224, sha256, sha384, sha512]}},
|
{mac_fun,
|
||||||
{iterations, integer()},
|
sc(
|
||||||
|
hoconsc:enum([md4, md5, ripemd160, sha, sha224, sha256, sha384, sha512]),
|
||||||
|
#{desc => "Specifies mac_fun for PBKDF2 hashing algorithm."}
|
||||||
|
)},
|
||||||
|
{iterations,
|
||||||
|
sc(
|
||||||
|
integer(),
|
||||||
|
#{desc => "Iteration count for PBKDF2 hashing algorithm."}
|
||||||
|
)},
|
||||||
{dk_length, fun dk_length/1}
|
{dk_length, fun dk_length/1}
|
||||||
];
|
];
|
||||||
fields(other_algorithms) ->
|
fields(other_algorithms) ->
|
||||||
[
|
[
|
||||||
{name, {enum, [plain, md5, sha, sha256, sha512]}},
|
{name,
|
||||||
|
sc(
|
||||||
|
hoconsc:enum([plain, md5, sha, sha256, sha512]),
|
||||||
|
#{
|
||||||
|
desc =>
|
||||||
|
"Simple password hashing algorithm."
|
||||||
|
}
|
||||||
|
)},
|
||||||
{salt_position, fun salt_position/1}
|
{salt_position, fun salt_position/1}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc(bcrypt_rw) ->
|
||||||
|
"Settings for bcrypt password hashing algorithm (for DB backends with write capability).";
|
||||||
|
desc(bcrypt) ->
|
||||||
|
"Settings for bcrypt password hashing algorithm.";
|
||||||
|
desc(pbkdf2) ->
|
||||||
|
"Settings for PBKDF2 password hashing algorithm.";
|
||||||
|
desc(other_algorithms) ->
|
||||||
|
"Settings for other password hashing algorithms.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
salt_position(type) -> {enum, [prefix, suffix]};
|
salt_position(type) -> {enum, [prefix, suffix]};
|
||||||
salt_position(desc) -> "Specifies whether the password salt is stored as a prefix or the suffix.";
|
|
||||||
salt_position(default) -> prefix;
|
salt_position(default) -> prefix;
|
||||||
|
salt_position(desc) -> "Salt position for PLAIN, MD5, SHA, SHA256 and SHA512 algorithms.";
|
||||||
salt_position(_) -> undefined.
|
salt_position(_) -> undefined.
|
||||||
|
|
||||||
salt_rounds(type) -> integer();
|
salt_rounds(type) -> integer();
|
||||||
salt_rounds(desc) -> "Cost factor for the bcrypt hash.";
|
|
||||||
salt_rounds(default) -> 10;
|
salt_rounds(default) -> 10;
|
||||||
|
salt_rounds(desc) -> "Salt rounds for BCRYPT password generation.";
|
||||||
salt_rounds(_) -> undefined.
|
salt_rounds(_) -> undefined.
|
||||||
|
|
||||||
dk_length(type) -> integer();
|
dk_length(type) ->
|
||||||
dk_length(desc) -> "Length of the derived key.";
|
integer();
|
||||||
dk_length(required) -> false;
|
dk_length(required) ->
|
||||||
dk_length(_) -> undefined.
|
false;
|
||||||
|
dk_length(desc) ->
|
||||||
|
"Derived length for PBKDF2 hashing algorithm. If not specified, "
|
||||||
|
"calculated automatically based on `mac_fun`.";
|
||||||
|
dk_length(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
type_rw(type) ->
|
type_rw(type) ->
|
||||||
hoconsc:union(rw_refs());
|
hoconsc:union(rw_refs());
|
||||||
type_rw(default) ->
|
type_rw(default) ->
|
||||||
#{<<"name">> => sha256, <<"salt_position">> => prefix};
|
#{<<"name">> => sha256, <<"salt_position">> => prefix};
|
||||||
|
type_rw(desc) ->
|
||||||
|
"Options for password hash creation and verification.";
|
||||||
type_rw(_) ->
|
type_rw(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
|
@ -109,6 +143,8 @@ type_ro(type) ->
|
||||||
hoconsc:union(ro_refs());
|
hoconsc:union(ro_refs());
|
||||||
type_ro(default) ->
|
type_ro(default) ->
|
||||||
#{<<"name">> => sha256, <<"salt_position">> => prefix};
|
#{<<"name">> => sha256, <<"salt_position">> => prefix};
|
||||||
|
type_ro(desc) ->
|
||||||
|
"Options for password hash verification.";
|
||||||
type_ro(_) ->
|
type_ro(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
|
@ -199,3 +235,5 @@ ro_refs() ->
|
||||||
hoconsc:ref(?MODULE, pbkdf2),
|
hoconsc:ref(?MODULE, pbkdf2),
|
||||||
hoconsc:ref(?MODULE, other_algorithms)
|
hoconsc:ref(?MODULE, other_algorithms)
|
||||||
].
|
].
|
||||||
|
|
||||||
|
sc(Type, Meta) -> hoconsc:mk(Type, Meta).
|
||||||
|
|
|
@ -26,7 +26,8 @@
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1
|
fields/1,
|
||||||
|
desc/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -105,11 +106,19 @@ fields(?CONF_NS) ->
|
||||||
{iteration_count, fun iteration_count/1}
|
{iteration_count, fun iteration_count/1}
|
||||||
] ++ emqx_authn_schema:common_fields().
|
] ++ emqx_authn_schema:common_fields().
|
||||||
|
|
||||||
|
desc(?CONF_NS) ->
|
||||||
|
"Settings for Salted Challenge Response Authentication Mechanism\n"
|
||||||
|
"(SCRAM) authentication.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
algorithm(type) -> hoconsc:enum([sha256, sha512]);
|
algorithm(type) -> hoconsc:enum([sha256, sha512]);
|
||||||
|
algorithm(desc) -> "Hashing algorithm.";
|
||||||
algorithm(default) -> sha256;
|
algorithm(default) -> sha256;
|
||||||
algorithm(_) -> undefined.
|
algorithm(_) -> undefined.
|
||||||
|
|
||||||
iteration_count(type) -> non_neg_integer();
|
iteration_count(type) -> non_neg_integer();
|
||||||
|
iteration_count(desc) -> "Iteration count.";
|
||||||
iteration_count(default) -> 4096;
|
iteration_count(default) -> 4096;
|
||||||
iteration_count(_) -> undefined.
|
iteration_count(_) -> undefined.
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,7 @@
|
||||||
namespace/0,
|
namespace/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
|
desc/1,
|
||||||
validations/0
|
validations/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
@ -56,21 +57,29 @@ roots() ->
|
||||||
|
|
||||||
fields(get) ->
|
fields(get) ->
|
||||||
[
|
[
|
||||||
{method, #{type => get, default => post}},
|
{method, #{type => get, default => post, desc => "HTTP method."}},
|
||||||
{headers, fun headers_no_content_type/1}
|
{headers, fun headers_no_content_type/1}
|
||||||
] ++ common_fields();
|
] ++ common_fields();
|
||||||
fields(post) ->
|
fields(post) ->
|
||||||
[
|
[
|
||||||
{method, #{type => post, default => post}},
|
{method, #{type => post, default => post, desc => "HTTP method."}},
|
||||||
{headers, fun headers/1}
|
{headers, fun headers/1}
|
||||||
] ++ common_fields().
|
] ++ common_fields().
|
||||||
|
|
||||||
|
desc(get) ->
|
||||||
|
"Settings for HTTP-based authentication (GET).";
|
||||||
|
desc(post) ->
|
||||||
|
"Settings for HTTP-based authentication (POST).";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
common_fields() ->
|
common_fields() ->
|
||||||
[
|
[
|
||||||
{mechanism, emqx_authn_schema:mechanism('password_based')},
|
{mechanism, emqx_authn_schema:mechanism('password_based')},
|
||||||
{backend, emqx_authn_schema:backend(http)},
|
{backend, emqx_authn_schema:backend(http)},
|
||||||
{url, fun url/1},
|
{url, fun url/1},
|
||||||
{body, map([{fuzzy, term(), binary()}])},
|
{body,
|
||||||
|
hoconsc:mk(map([{fuzzy, term(), binary()}]), #{desc => "Body of the HTTP request."})},
|
||||||
{request_timeout, fun request_timeout/1}
|
{request_timeout, fun request_timeout/1}
|
||||||
] ++ emqx_authn_schema:common_fields() ++
|
] ++ emqx_authn_schema:common_fields() ++
|
||||||
maps:to_list(
|
maps:to_list(
|
||||||
|
@ -90,12 +99,15 @@ validations() ->
|
||||||
].
|
].
|
||||||
|
|
||||||
url(type) -> binary();
|
url(type) -> binary();
|
||||||
|
url(desc) -> "URL of the auth server.";
|
||||||
url(validator) -> [?NOT_EMPTY("the value of the field 'url' cannot be empty")];
|
url(validator) -> [?NOT_EMPTY("the value of the field 'url' cannot be empty")];
|
||||||
url(required) -> true;
|
url(required) -> true;
|
||||||
url(_) -> undefined.
|
url(_) -> undefined.
|
||||||
|
|
||||||
headers(type) ->
|
headers(type) ->
|
||||||
map();
|
map();
|
||||||
|
headers(desc) ->
|
||||||
|
"List of HTTP headers.";
|
||||||
headers(converter) ->
|
headers(converter) ->
|
||||||
fun(Headers) ->
|
fun(Headers) ->
|
||||||
maps:merge(default_headers(), transform_header_name(Headers))
|
maps:merge(default_headers(), transform_header_name(Headers))
|
||||||
|
@ -107,6 +119,8 @@ headers(_) ->
|
||||||
|
|
||||||
headers_no_content_type(type) ->
|
headers_no_content_type(type) ->
|
||||||
map();
|
map();
|
||||||
|
headers_no_content_type(desc) ->
|
||||||
|
"List of HTTP headers.";
|
||||||
headers_no_content_type(converter) ->
|
headers_no_content_type(converter) ->
|
||||||
fun(Headers) ->
|
fun(Headers) ->
|
||||||
maps:merge(default_headers_no_content_type(), transform_header_name(Headers))
|
maps:merge(default_headers_no_content_type(), transform_header_name(Headers))
|
||||||
|
@ -117,6 +131,7 @@ headers_no_content_type(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
request_timeout(type) -> emqx_schema:duration_ms();
|
request_timeout(type) -> emqx_schema:duration_ms();
|
||||||
|
request_timeout(desc) -> "HTTP request timeout";
|
||||||
request_timeout(default) -> <<"5s">>;
|
request_timeout(default) -> <<"5s">>;
|
||||||
request_timeout(_) -> undefined.
|
request_timeout(_) -> undefined.
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,8 @@
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1
|
fields/1,
|
||||||
|
desc/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -54,20 +55,20 @@ roots() ->
|
||||||
|
|
||||||
fields('hmac-based') ->
|
fields('hmac-based') ->
|
||||||
[
|
[
|
||||||
{use_jwks, {enum, [false]}},
|
{use_jwks, sc(hoconsc:enum([false]), #{desc => ""})},
|
||||||
{algorithm, {enum, ['hmac-based']}},
|
{algorithm, sc(hoconsc:enum(['hmac-based']), #{desc => "Signing algorithm."})},
|
||||||
{secret, fun secret/1},
|
{secret, fun secret/1},
|
||||||
{secret_base64_encoded, fun secret_base64_encoded/1}
|
{secret_base64_encoded, fun secret_base64_encoded/1}
|
||||||
] ++ common_fields();
|
] ++ common_fields();
|
||||||
fields('public-key') ->
|
fields('public-key') ->
|
||||||
[
|
[
|
||||||
{use_jwks, {enum, [false]}},
|
{use_jwks, sc(hoconsc:enum([false]), #{desc => ""})},
|
||||||
{algorithm, {enum, ['public-key']}},
|
{algorithm, sc(hoconsc:enum(['public-key']), #{desc => "Signing algorithm."})},
|
||||||
{certificate, fun certificate/1}
|
{certificate, fun certificate/1}
|
||||||
] ++ common_fields();
|
] ++ common_fields();
|
||||||
fields('jwks') ->
|
fields('jwks') ->
|
||||||
[
|
[
|
||||||
{use_jwks, {enum, [true]}},
|
{use_jwks, sc(hoconsc:enum([true]), #{desc => ""})},
|
||||||
{endpoint, fun endpoint/1},
|
{endpoint, fun endpoint/1},
|
||||||
{refresh_interval, fun refresh_interval/1},
|
{refresh_interval, fun refresh_interval/1},
|
||||||
{ssl, #{
|
{ssl, #{
|
||||||
|
@ -75,12 +76,13 @@ fields('jwks') ->
|
||||||
hoconsc:ref(?MODULE, ssl_enable),
|
hoconsc:ref(?MODULE, ssl_enable),
|
||||||
hoconsc:ref(?MODULE, ssl_disable)
|
hoconsc:ref(?MODULE, ssl_disable)
|
||||||
]),
|
]),
|
||||||
|
desc => "Enable/disable SSL.",
|
||||||
default => #{<<"enable">> => false}
|
default => #{<<"enable">> => false}
|
||||||
}}
|
}}
|
||||||
] ++ common_fields();
|
] ++ common_fields();
|
||||||
fields(ssl_enable) ->
|
fields(ssl_enable) ->
|
||||||
[
|
[
|
||||||
{enable, #{type => true}},
|
{enable, #{type => true, desc => ""}},
|
||||||
{cacertfile, fun cacertfile/1},
|
{cacertfile, fun cacertfile/1},
|
||||||
{certfile, fun certfile/1},
|
{certfile, fun certfile/1},
|
||||||
{keyfile, fun keyfile/1},
|
{keyfile, fun keyfile/1},
|
||||||
|
@ -88,7 +90,20 @@ fields(ssl_enable) ->
|
||||||
{server_name_indication, fun server_name_indication/1}
|
{server_name_indication, fun server_name_indication/1}
|
||||||
];
|
];
|
||||||
fields(ssl_disable) ->
|
fields(ssl_disable) ->
|
||||||
[{enable, #{type => false}}].
|
[{enable, #{type => false, desc => ""}}].
|
||||||
|
|
||||||
|
desc('hmac-based') ->
|
||||||
|
"Settings for HMAC-based token signing algorithm.";
|
||||||
|
desc('public-key') ->
|
||||||
|
"Settings for public key-based token signing algorithm.";
|
||||||
|
desc('jwks') ->
|
||||||
|
"Settings for a signing using JSON Web Key Set (JWKs).";
|
||||||
|
desc(ssl_disable) ->
|
||||||
|
"";
|
||||||
|
desc(ssl_enable) ->
|
||||||
|
"SSL configuration.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
common_fields() ->
|
common_fields() ->
|
||||||
[
|
[
|
||||||
|
@ -97,41 +112,53 @@ common_fields() ->
|
||||||
] ++ emqx_authn_schema:common_fields().
|
] ++ emqx_authn_schema:common_fields().
|
||||||
|
|
||||||
secret(type) -> binary();
|
secret(type) -> binary();
|
||||||
|
secret(desc) -> "The key to verify the JWT Token using HMAC algorithm.";
|
||||||
secret(_) -> undefined.
|
secret(_) -> undefined.
|
||||||
|
|
||||||
secret_base64_encoded(type) -> boolean();
|
secret_base64_encoded(type) -> boolean();
|
||||||
|
secret_base64_encoded(desc) -> "Enable/disable base64 encoding of the secret.";
|
||||||
secret_base64_encoded(default) -> false;
|
secret_base64_encoded(default) -> false;
|
||||||
secret_base64_encoded(_) -> undefined.
|
secret_base64_encoded(_) -> undefined.
|
||||||
|
|
||||||
certificate(type) -> string();
|
certificate(type) -> string();
|
||||||
|
certificate(desc) -> "The certificate used for signing the token.";
|
||||||
certificate(_) -> undefined.
|
certificate(_) -> undefined.
|
||||||
|
|
||||||
endpoint(type) -> string();
|
endpoint(type) -> string();
|
||||||
|
endpoint(desc) -> "JWKs endpoint.";
|
||||||
endpoint(_) -> undefined.
|
endpoint(_) -> undefined.
|
||||||
|
|
||||||
refresh_interval(type) -> integer();
|
refresh_interval(type) -> integer();
|
||||||
|
refresh_interval(desc) -> "JWKs refresh interval";
|
||||||
refresh_interval(default) -> 300;
|
refresh_interval(default) -> 300;
|
||||||
refresh_interval(validator) -> [fun(I) -> I > 0 end];
|
refresh_interval(validator) -> [fun(I) -> I > 0 end];
|
||||||
refresh_interval(_) -> undefined.
|
refresh_interval(_) -> undefined.
|
||||||
|
|
||||||
cacertfile(type) -> string();
|
cacertfile(type) -> string();
|
||||||
|
cacertfile(desc) -> "Path to the SSL CA certificate file.";
|
||||||
cacertfile(_) -> undefined.
|
cacertfile(_) -> undefined.
|
||||||
|
|
||||||
certfile(type) -> string();
|
certfile(type) -> string();
|
||||||
|
certfile(desc) -> "Path to the SSL certificate file.";
|
||||||
certfile(_) -> undefined.
|
certfile(_) -> undefined.
|
||||||
|
|
||||||
keyfile(type) -> string();
|
keyfile(type) -> string();
|
||||||
|
keyfile(desc) -> "Path to the SSL secret key file.";
|
||||||
keyfile(_) -> undefined.
|
keyfile(_) -> undefined.
|
||||||
|
|
||||||
verify(type) -> hoconsc:enum([verify_peer, verify_none]);
|
verify(type) -> hoconsc:enum([verify_peer, verify_none]);
|
||||||
|
verify(desc) -> "Enable or disable SSL peer verification.";
|
||||||
verify(default) -> verify_none;
|
verify(default) -> verify_none;
|
||||||
verify(_) -> undefined.
|
verify(_) -> undefined.
|
||||||
|
|
||||||
server_name_indication(type) -> string();
|
server_name_indication(type) -> string();
|
||||||
|
server_name_indication(desc) -> "SSL SNI (Server Name Indication)";
|
||||||
server_name_indication(_) -> undefined.
|
server_name_indication(_) -> undefined.
|
||||||
|
|
||||||
verify_claims(type) ->
|
verify_claims(type) ->
|
||||||
list();
|
list();
|
||||||
|
verify_claims(desc) ->
|
||||||
|
"The list of claims to verify.";
|
||||||
verify_claims(default) ->
|
verify_claims(default) ->
|
||||||
#{};
|
#{};
|
||||||
verify_claims(validator) ->
|
verify_claims(validator) ->
|
||||||
|
@ -413,3 +440,5 @@ to_binary(A) when is_atom(A) ->
|
||||||
atom_to_binary(A);
|
atom_to_binary(A);
|
||||||
to_binary(B) when is_binary(B) ->
|
to_binary(B) when is_binary(B) ->
|
||||||
B.
|
B.
|
||||||
|
|
||||||
|
sc(Type, Meta) -> hoconsc:mk(Type, Meta).
|
||||||
|
|
|
@ -26,7 +26,8 @@
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1
|
fields/1,
|
||||||
|
desc/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -108,7 +109,13 @@ fields(?CONF_NS) ->
|
||||||
{password_hash_algorithm, fun emqx_authn_password_hashing:type_rw/1}
|
{password_hash_algorithm, fun emqx_authn_password_hashing:type_rw/1}
|
||||||
] ++ emqx_authn_schema:common_fields().
|
] ++ emqx_authn_schema:common_fields().
|
||||||
|
|
||||||
|
desc(?CONF_NS) ->
|
||||||
|
"Configuration for authentication using the built-in database.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
user_id_type(type) -> user_id_type();
|
user_id_type(type) -> user_id_type();
|
||||||
|
user_id_type(desc) -> "Authenticate by client ID or username.";
|
||||||
user_id_type(default) -> <<"username">>;
|
user_id_type(default) -> <<"username">>;
|
||||||
user_id_type(_) -> undefined.
|
user_id_type(_) -> undefined.
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,8 @@
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1
|
fields/1,
|
||||||
|
desc/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -55,10 +56,17 @@ fields(?CONF_NS) ->
|
||||||
] ++ emqx_authn_schema:common_fields() ++
|
] ++ emqx_authn_schema:common_fields() ++
|
||||||
emqx_connector_mysql:fields(config).
|
emqx_connector_mysql:fields(config).
|
||||||
|
|
||||||
|
desc(?CONF_NS) ->
|
||||||
|
"Configuration for authentication using MySQL database.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
query(type) -> string();
|
query(type) -> string();
|
||||||
|
query(desc) -> "SQL query used to lookup client data.";
|
||||||
query(_) -> undefined.
|
query(_) -> undefined.
|
||||||
|
|
||||||
query_timeout(type) -> emqx_schema:duration_ms();
|
query_timeout(type) -> emqx_schema:duration_ms();
|
||||||
|
query_timeout(desc) -> "Timeout for the SQL query.";
|
||||||
query_timeout(default) -> "5s";
|
query_timeout(default) -> "5s";
|
||||||
query_timeout(_) -> undefined.
|
query_timeout(_) -> undefined.
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,8 @@
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1
|
fields/1,
|
||||||
|
desc/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -61,7 +62,13 @@ fields(?CONF_NS) ->
|
||||||
emqx_authn_schema:common_fields() ++
|
emqx_authn_schema:common_fields() ++
|
||||||
proplists:delete(named_queries, emqx_connector_pgsql:fields(config)).
|
proplists:delete(named_queries, emqx_connector_pgsql:fields(config)).
|
||||||
|
|
||||||
|
desc(?CONF_NS) ->
|
||||||
|
"Configuration for PostgreSQL authentication backend.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
query(type) -> string();
|
query(type) -> string();
|
||||||
|
query(desc) -> "`SQL` query for looking up authentication data.";
|
||||||
query(_) -> undefined.
|
query(_) -> undefined.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
|
@ -26,7 +26,8 @@
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1
|
fields/1,
|
||||||
|
desc/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -59,6 +60,15 @@ fields(cluster) ->
|
||||||
fields(sentinel) ->
|
fields(sentinel) ->
|
||||||
common_fields() ++ emqx_connector_redis:fields(sentinel).
|
common_fields() ++ emqx_connector_redis:fields(sentinel).
|
||||||
|
|
||||||
|
desc(standalone) ->
|
||||||
|
"Configuration for a standalone Redis instance.";
|
||||||
|
desc(cluster) ->
|
||||||
|
"Configuration for a Redis cluster.";
|
||||||
|
desc(sentinel) ->
|
||||||
|
"Configuration for a Redis Sentinel.";
|
||||||
|
desc(_) ->
|
||||||
|
"".
|
||||||
|
|
||||||
common_fields() ->
|
common_fields() ->
|
||||||
[
|
[
|
||||||
{mechanism, emqx_authn_schema:mechanism('password_based')},
|
{mechanism, emqx_authn_schema:mechanism('password_based')},
|
||||||
|
@ -68,6 +78,7 @@ common_fields() ->
|
||||||
] ++ emqx_authn_schema:common_fields().
|
] ++ emqx_authn_schema:common_fields().
|
||||||
|
|
||||||
cmd(type) -> string();
|
cmd(type) -> string();
|
||||||
|
cmd(desc) -> "Redis query.";
|
||||||
cmd(_) -> undefined.
|
cmd(_) -> undefined.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
|
@ -31,7 +31,8 @@
|
||||||
namespace/0,
|
namespace/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
validations/0
|
validations/0,
|
||||||
|
desc/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -95,10 +96,11 @@ fields("authorization") ->
|
||||||
];
|
];
|
||||||
fields(file) ->
|
fields(file) ->
|
||||||
[
|
[
|
||||||
{type, #{type => file}},
|
{type, #{type => file, desc => "Backend type."}},
|
||||||
{enable, #{
|
{enable, #{
|
||||||
type => boolean(),
|
type => boolean(),
|
||||||
default => true
|
default => true,
|
||||||
|
desc => "Enable this backend."
|
||||||
}},
|
}},
|
||||||
{path, #{
|
{path, #{
|
||||||
type => string(),
|
type => string(),
|
||||||
|
@ -116,20 +118,21 @@ fields(file) ->
|
||||||
];
|
];
|
||||||
fields(http_get) ->
|
fields(http_get) ->
|
||||||
[
|
[
|
||||||
{method, #{type => get, default => post}},
|
{method, #{type => get, default => get, desc => "HTTP method."}},
|
||||||
{headers, fun headers_no_content_type/1}
|
{headers, fun headers_no_content_type/1}
|
||||||
] ++ http_common_fields();
|
] ++ http_common_fields();
|
||||||
fields(http_post) ->
|
fields(http_post) ->
|
||||||
[
|
[
|
||||||
{method, #{type => post, default => post}},
|
{method, #{type => post, default => post, desc => "HTTP method."}},
|
||||||
{headers, fun headers/1}
|
{headers, fun headers/1}
|
||||||
] ++ http_common_fields();
|
] ++ http_common_fields();
|
||||||
fields(mnesia) ->
|
fields(mnesia) ->
|
||||||
[
|
[
|
||||||
{type, #{type => 'built_in_database'}},
|
{type, #{type => 'built_in_database', desc => "Backend type."}},
|
||||||
{enable, #{
|
{enable, #{
|
||||||
type => boolean(),
|
type => boolean(),
|
||||||
default => true
|
default => true,
|
||||||
|
desc => "Enable this backend."
|
||||||
}}
|
}}
|
||||||
];
|
];
|
||||||
fields(mongo_single) ->
|
fields(mongo_single) ->
|
||||||
|
@ -144,9 +147,10 @@ fields(mysql) ->
|
||||||
fields(postgresql) ->
|
fields(postgresql) ->
|
||||||
[
|
[
|
||||||
{query, query()},
|
{query, query()},
|
||||||
{type, #{type => postgresql}},
|
{type, #{type => postgresql, desc => "Backend type."}},
|
||||||
{enable, #{
|
{enable, #{
|
||||||
type => boolean(),
|
type => boolean(),
|
||||||
|
desc => "Enable this backend.",
|
||||||
default => true
|
default => true
|
||||||
}}
|
}}
|
||||||
] ++ emqx_connector_pgsql:fields(config);
|
] ++ emqx_connector_pgsql:fields(config);
|
||||||
|
@ -160,6 +164,35 @@ fields(redis_cluster) ->
|
||||||
connector_fields(redis, cluster) ++
|
connector_fields(redis, cluster) ++
|
||||||
[{cmd, query()}].
|
[{cmd, query()}].
|
||||||
|
|
||||||
|
desc("authorization") ->
|
||||||
|
"Configuration related to the client authorization.";
|
||||||
|
desc(file) ->
|
||||||
|
"Authorization using a static file.";
|
||||||
|
desc(http_get) ->
|
||||||
|
"Authorization using an external HTTP server (via GET requests).";
|
||||||
|
desc(http_post) ->
|
||||||
|
"Authorization using an external HTTP server (via POST requests).";
|
||||||
|
desc(mnesia) ->
|
||||||
|
"Authorization using a built-in database (mnesia).";
|
||||||
|
desc(mongo_single) ->
|
||||||
|
"Authorization using a single MongoDB instance.";
|
||||||
|
desc(mongo_rs) ->
|
||||||
|
"Authorization using a MongoDB replica set.";
|
||||||
|
desc(mongo_sharded) ->
|
||||||
|
"Authorization using a sharded MongoDB cluster.";
|
||||||
|
desc(mysql) ->
|
||||||
|
"Authorization using a MySQL database.";
|
||||||
|
desc(postgresql) ->
|
||||||
|
"Authorization using a PostgreSQL database.";
|
||||||
|
desc(redis_single) ->
|
||||||
|
"Authorization using a single Redis instance.";
|
||||||
|
desc(redis_sentinel) ->
|
||||||
|
"Authorization using a Redis Sentinel.";
|
||||||
|
desc(redis_cluster) ->
|
||||||
|
"Authorization using a Redis cluster.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
http_common_fields() ->
|
http_common_fields() ->
|
||||||
[
|
[
|
||||||
{url, fun url/1},
|
{url, fun url/1},
|
||||||
|
@ -301,7 +334,7 @@ union_array(Item) when is_list(Item) ->
|
||||||
query() ->
|
query() ->
|
||||||
#{
|
#{
|
||||||
type => binary(),
|
type => binary(),
|
||||||
desc => "",
|
desc => "Database query used to retrieve authorization data.",
|
||||||
validator => fun(S) ->
|
validator => fun(S) ->
|
||||||
case size(S) > 0 of
|
case size(S) > 0 of
|
||||||
true -> ok;
|
true -> ok;
|
||||||
|
|
|
@ -22,7 +22,8 @@
|
||||||
|
|
||||||
-export([ namespace/0
|
-export([ namespace/0
|
||||||
, roots/0
|
, roots/0
|
||||||
, fields/1]).
|
, fields/1
|
||||||
|
, desc/1]).
|
||||||
|
|
||||||
namespace() -> "auto_subscribe".
|
namespace() -> "auto_subscribe".
|
||||||
|
|
||||||
|
@ -30,7 +31,8 @@ roots() ->
|
||||||
["auto_subscribe"].
|
["auto_subscribe"].
|
||||||
|
|
||||||
fields("auto_subscribe") ->
|
fields("auto_subscribe") ->
|
||||||
[ {topics, hoconsc:array(hoconsc:ref(?MODULE, "topic"))}
|
[ {topics, hoconsc:mk(hoconsc:array(hoconsc:ref(?MODULE, "topic")),
|
||||||
|
#{desc => "List of auto-subscribe topics."})}
|
||||||
];
|
];
|
||||||
|
|
||||||
fields("topic") ->
|
fields("topic") ->
|
||||||
|
@ -52,6 +54,13 @@ fields("topic") ->
|
||||||
desc => "Not local. MQTT 5.0 definition."})}
|
desc => "Not local. MQTT 5.0 definition."})}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc("auto_subscribe") ->
|
||||||
|
"Configuration for `auto_subscribe` feature.";
|
||||||
|
desc("topic") ->
|
||||||
|
"";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
topic_example() ->
|
topic_example() ->
|
||||||
<<"/clientid/", ?PH_S_CLIENTID,
|
<<"/clientid/", ?PH_S_CLIENTID,
|
||||||
"/username/", ?PH_S_USERNAME,
|
"/username/", ?PH_S_USERNAME,
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, enum/1]).
|
-import(hoconsc, [mk/2, enum/1]).
|
||||||
|
|
||||||
-export([roots/0, fields/1, namespace/0]).
|
-export([roots/0, fields/1, namespace/0, desc/1]).
|
||||||
|
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
@ -16,30 +16,30 @@ fields("config") ->
|
||||||
basic_config() ++
|
basic_config() ++
|
||||||
[ {url, mk(binary(),
|
[ {url, mk(binary(),
|
||||||
#{ required => true
|
#{ required => true
|
||||||
, desc =>"""
|
, desc =>"
|
||||||
The URL of the HTTP Bridge.<br>
|
The URL of the HTTP Bridge.<br>
|
||||||
Template with variables is allowed in the path, but variables cannot be used in the scheme, host,
|
Template with variables is allowed in the path, but variables cannot be used in the scheme, host,
|
||||||
or port part.<br>
|
or port part.<br>
|
||||||
For example, <code> http://localhost:9901/${topic} </code> is allowed, but
|
For example, <code> http://localhost:9901/${topic} </code> is allowed, but
|
||||||
<code> http://${host}:9901/message </code> or <code> http://localhost:${port}/message </code>
|
<code> http://${host}:9901/message </code> or <code> http://localhost:${port}/message </code>
|
||||||
is not allowed.
|
is not allowed.
|
||||||
"""
|
"
|
||||||
})}
|
})}
|
||||||
, {local_topic, mk(binary(),
|
, {local_topic, mk(binary(),
|
||||||
#{ desc =>"""
|
#{ desc =>"
|
||||||
The MQTT topic filter to be forwarded to the HTTP server. All MQTT 'PUBLISH' messages with the topic
|
The MQTT topic filter to be forwarded to the HTTP server. All MQTT 'PUBLISH' messages with the topic
|
||||||
matching the local_topic will be forwarded.<br/>
|
matching the local_topic will be forwarded.<br/>
|
||||||
NOTE: if this bridge is used as the output of a rule (EMQX rule engine), and also local_topic is
|
NOTE: if this bridge is used as the output of a rule (EMQX rule engine), and also local_topic is
|
||||||
configured, then both the data got from the rule and the MQTT messages that match local_topic
|
configured, then both the data got from the rule and the MQTT messages that match local_topic
|
||||||
will be forwarded.
|
will be forwarded.
|
||||||
"""
|
"
|
||||||
})}
|
})}
|
||||||
, {method, mk(method(),
|
, {method, mk(method(),
|
||||||
#{ default => post
|
#{ default => post
|
||||||
, desc =>"""
|
, desc =>"
|
||||||
The method of the HTTP request. All the available methods are: post, put, get, delete.<br>
|
The method of the HTTP request. All the available methods are: post, put, get, delete.<br>
|
||||||
Template with variables is allowed.<br>
|
Template with variables is allowed.<br>
|
||||||
"""
|
"
|
||||||
})}
|
})}
|
||||||
, {headers, mk(map(),
|
, {headers, mk(map(),
|
||||||
#{ default => #{
|
#{ default => #{
|
||||||
|
@ -48,24 +48,22 @@ Template with variables is allowed.<br>
|
||||||
<<"connection">> => <<"keep-alive">>,
|
<<"connection">> => <<"keep-alive">>,
|
||||||
<<"content-type">> => <<"application/json">>,
|
<<"content-type">> => <<"application/json">>,
|
||||||
<<"keep-alive">> => <<"timeout=5">>}
|
<<"keep-alive">> => <<"timeout=5">>}
|
||||||
, desc =>"""
|
, desc =>"
|
||||||
The headers of the HTTP request.<br>
|
The headers of the HTTP request.<br>
|
||||||
Template with variables is allowed.
|
Template with variables is allowed.
|
||||||
"""
|
"
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
, {body, mk(binary(),
|
, {body, mk(binary(),
|
||||||
#{ default => <<"${payload}">>
|
#{ default => <<"${payload}">>
|
||||||
, desc =>"""
|
, desc =>"
|
||||||
The body of the HTTP request.<br>
|
The body of the HTTP request.<br>
|
||||||
Template with variables is allowed.
|
Template with variables is allowed.
|
||||||
"""
|
"
|
||||||
})}
|
})}
|
||||||
, {request_timeout, mk(emqx_schema:duration_ms(),
|
, {request_timeout, mk(emqx_schema:duration_ms(),
|
||||||
#{ default => <<"15s">>
|
#{ default => <<"15s">>
|
||||||
, desc =>"""
|
, desc => "HTTP request timeout."
|
||||||
How long will the HTTP request timeout.
|
|
||||||
"""
|
|
||||||
})}
|
})}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -80,6 +78,13 @@ fields("put") ->
|
||||||
fields("get") ->
|
fields("get") ->
|
||||||
emqx_bridge_schema:metrics_status_fields() ++ fields("post").
|
emqx_bridge_schema:metrics_status_fields() ++ fields("post").
|
||||||
|
|
||||||
|
desc("config") ->
|
||||||
|
"Configuration for an HTTP bridge.";
|
||||||
|
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
|
||||||
|
["Configuration for HTTP bridge using `", string:to_upper(Method), "` method."];
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
basic_config() ->
|
basic_config() ->
|
||||||
[ {enable,
|
[ {enable,
|
||||||
mk(boolean(),
|
mk(boolean(),
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
-import(hoconsc, [mk/2]).
|
-import(hoconsc, [mk/2]).
|
||||||
|
|
||||||
-export([roots/0, fields/1]).
|
-export([roots/0, fields/1, desc/1]).
|
||||||
|
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
@ -41,6 +41,11 @@ fields("get_ingress") ->
|
||||||
fields("get_egress") ->
|
fields("get_egress") ->
|
||||||
emqx_bridge_schema:metrics_status_fields() ++ fields("post_egress").
|
emqx_bridge_schema:metrics_status_fields() ++ fields("post_egress").
|
||||||
|
|
||||||
|
desc(Rec) when Rec =:= "ingress"; Rec =:= "egress" ->
|
||||||
|
"Configuration for MQTT bridge.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
type_field() ->
|
type_field() ->
|
||||||
{type, mk(mqtt,
|
{type, mk(mqtt,
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, ref/2]).
|
-import(hoconsc, [mk/2, ref/2]).
|
||||||
|
|
||||||
-export([roots/0, fields/1, namespace/0]).
|
-export([roots/0, fields/1, desc/1, namespace/0]).
|
||||||
|
|
||||||
-export([ get_response/0
|
-export([ get_response/0
|
||||||
, put_request/0
|
, put_request/0
|
||||||
|
@ -86,11 +86,12 @@ namespace() -> "bridge".
|
||||||
roots() -> [bridges].
|
roots() -> [bridges].
|
||||||
|
|
||||||
fields(bridges) ->
|
fields(bridges) ->
|
||||||
[{http, mk(hoconsc:map(name, ref(emqx_bridge_http_schema, "config")), #{})}]
|
[{http, mk(hoconsc:map(name, ref(emqx_bridge_http_schema, "config")),
|
||||||
++ [{T, mk(hoconsc:map(name, hoconsc:union([
|
#{desc => "HTTP bridges to an HTTP server."})}]
|
||||||
ref(schema_mod(T), "ingress"),
|
++ [{T, mk(hoconsc:map(name, hoconsc:union([ ref(schema_mod(T), "ingress")
|
||||||
ref(schema_mod(T), "egress")
|
, ref(schema_mod(T), "egress")
|
||||||
])), #{})} || T <- ?CONN_TYPES];
|
])),
|
||||||
|
#{desc => "MQTT bridges to/from another MQTT broker"})} || T <- ?CONN_TYPES];
|
||||||
|
|
||||||
fields("metrics") ->
|
fields("metrics") ->
|
||||||
[ {"matched", mk(integer(), #{desc => "Count of this bridge is queried"})}
|
[ {"matched", mk(integer(), #{desc => "Count of this bridge is queried"})}
|
||||||
|
@ -112,11 +113,22 @@ fields("node_status") ->
|
||||||
, {"status", mk(status(), #{})}
|
, {"status", mk(status(), #{})}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc(bridges) ->
|
||||||
|
"Configuration for MQTT bridges.";
|
||||||
|
desc("metrics") ->
|
||||||
|
"Bridge metrics.";
|
||||||
|
desc("node_metrics") ->
|
||||||
|
"Node metrics.";
|
||||||
|
desc("node_status") ->
|
||||||
|
"Node status.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
status() ->
|
status() ->
|
||||||
hoconsc:enum([connected, disconnected, connecting]).
|
hoconsc:enum([connected, disconnected, connecting]).
|
||||||
|
|
||||||
node_name() ->
|
node_name() ->
|
||||||
{"node", mk(binary(), #{desc => "The node name", example => "emqx@127.0.0.1"})}.
|
{"node", mk(binary(), #{desc => "The node name.", example => "emqx@127.0.0.1"})}.
|
||||||
|
|
||||||
schema_mod(Type) ->
|
schema_mod(Type) ->
|
||||||
list_to_atom(lists:concat(["emqx_bridge_", Type, "_schema"])).
|
list_to_atom(lists:concat(["emqx_bridge_", Type, "_schema"])).
|
||||||
|
|
|
@ -134,6 +134,7 @@ fields("cluster") ->
|
||||||
#{ mapping => "ekka.proto_dist"
|
#{ mapping => "ekka.proto_dist"
|
||||||
, default => inet_tcp
|
, default => inet_tcp
|
||||||
, 'readOnly' => true
|
, 'readOnly' => true
|
||||||
|
, desc => "The Erlang distribution protocol for the cluster."
|
||||||
})}
|
})}
|
||||||
, {"static",
|
, {"static",
|
||||||
sc(ref(cluster_static),
|
sc(ref(cluster_static),
|
||||||
|
|
|
@ -36,6 +36,7 @@
|
||||||
|
|
||||||
-export([ roots/0
|
-export([ roots/0
|
||||||
, fields/1
|
, fields/1
|
||||||
|
, desc/1
|
||||||
, validations/0
|
, validations/0
|
||||||
, namespace/0
|
, namespace/0
|
||||||
]).
|
]).
|
||||||
|
@ -66,66 +67,73 @@ fields(config) ->
|
||||||
{error, "There must be no query in the base_url"};
|
{error, "There must be no query in the base_url"};
|
||||||
(_) -> ok
|
(_) -> ok
|
||||||
end
|
end
|
||||||
, desc => """
|
, desc => "
|
||||||
The base URL is the URL includes only the scheme, host and port.<br>
|
The base URL is the URL includes only the scheme, host and port.<br/>
|
||||||
When send an HTTP request, the real URL to be used is the concatenation of the base URL and the
|
When send an HTTP request, the real URL to be used is the concatenation of the base URL and the
|
||||||
path parameter (passed by the emqx_resource:query/2,3 or provided by the request parameter).<br>
|
path parameter (passed by the emqx_resource:query/2,3 or provided by the request parameter).<br/>
|
||||||
For example: http://localhost:9901/
|
For example: `http://localhost:9901/`
|
||||||
"""
|
"
|
||||||
})}
|
})}
|
||||||
, {connect_timeout,
|
, {connect_timeout,
|
||||||
sc(emqx_schema:duration_ms(),
|
sc(emqx_schema:duration_ms(),
|
||||||
#{ default => "15s"
|
#{ default => "15s"
|
||||||
, desc => "The timeout when connecting to the HTTP server"
|
, desc => "The timeout when connecting to the HTTP server."
|
||||||
})}
|
})}
|
||||||
, {max_retries,
|
, {max_retries,
|
||||||
sc(non_neg_integer(),
|
sc(non_neg_integer(),
|
||||||
#{ default => 5
|
#{ default => 5
|
||||||
, desc => "Max retry times if error on sending request"
|
, desc => "Max retry times if error on sending request."
|
||||||
})}
|
})}
|
||||||
, {retry_interval,
|
, {retry_interval,
|
||||||
sc(emqx_schema:duration(),
|
sc(emqx_schema:duration(),
|
||||||
#{ default => "1s"
|
#{ default => "1s"
|
||||||
, desc => "Interval before next retry if error on sending request"
|
, desc => "Interval between retries."
|
||||||
})}
|
})}
|
||||||
, {pool_type,
|
, {pool_type,
|
||||||
sc(pool_type(),
|
sc(pool_type(),
|
||||||
#{ default => random
|
#{ default => random
|
||||||
, desc => "The type of the pool. Can be one of random, hash"
|
, desc => "The type of the pool. Can be one of `random`, `hash`."
|
||||||
})}
|
})}
|
||||||
, {pool_size,
|
, {pool_size,
|
||||||
sc(non_neg_integer(),
|
sc(non_neg_integer(),
|
||||||
#{ default => 8
|
#{ default => 8
|
||||||
, desc => "The pool size"
|
, desc => "The pool size."
|
||||||
})}
|
})}
|
||||||
, {enable_pipelining,
|
, {enable_pipelining,
|
||||||
sc(boolean(),
|
sc(boolean(),
|
||||||
#{ default => true
|
#{ default => true
|
||||||
, desc => "Enable the HTTP pipeline"
|
, desc => "Enable the HTTP pipeline."
|
||||||
})}
|
})}
|
||||||
, {request, hoconsc:mk(
|
, {request, hoconsc:mk(
|
||||||
ref("request"),
|
ref("request"),
|
||||||
#{ default => undefined
|
#{ default => undefined
|
||||||
, required => false
|
, required => false
|
||||||
, desc => """
|
, desc => "
|
||||||
If the request is provided, the caller can send HTTP requests via
|
If the request is provided, the caller can send HTTP requests via
|
||||||
<code>emqx_resource:query(ResourceId, {send_message, BridgeId, Message})</code>
|
<code>emqx_resource:query(ResourceId, {send_message, BridgeId, Message})</code>
|
||||||
"""
|
"
|
||||||
})}
|
})}
|
||||||
] ++ emqx_connector_schema_lib:ssl_fields();
|
] ++ emqx_connector_schema_lib:ssl_fields();
|
||||||
|
|
||||||
fields("request") ->
|
fields("request") ->
|
||||||
[ {method, hoconsc:mk(hoconsc:enum([post, put, get, delete]), #{required => false})}
|
[ {method, hoconsc:mk(hoconsc:enum([post, put, get, delete]), #{required => false, desc => "HTTP method."})}
|
||||||
, {path, hoconsc:mk(binary(), #{required => false})}
|
, {path, hoconsc:mk(binary(), #{required => false, desc => "URL path."})}
|
||||||
, {body, hoconsc:mk(binary(), #{required => false})}
|
, {body, hoconsc:mk(binary(), #{required => false, desc => "HTTP request body."})}
|
||||||
, {headers, hoconsc:mk(map(), #{required => false})}
|
, {headers, hoconsc:mk(map(), #{required => false, desc => "List of HTTP headers."})}
|
||||||
, {request_timeout,
|
, {request_timeout,
|
||||||
sc(emqx_schema:duration_ms(),
|
sc(emqx_schema:duration_ms(),
|
||||||
#{ required => false
|
#{ required => false
|
||||||
, desc => "The timeout when sending request to the HTTP server"
|
, desc => "HTTP request timeout."
|
||||||
})}
|
})}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc(config) ->
|
||||||
|
"";
|
||||||
|
desc("request") ->
|
||||||
|
"";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
validations() ->
|
validations() ->
|
||||||
[ {check_ssl_opts, fun check_ssl_opts/1} ].
|
[ {check_ssl_opts, fun check_ssl_opts/1} ].
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
%% ecpool callback
|
%% ecpool callback
|
||||||
-export([connect/1]).
|
-export([connect/1]).
|
||||||
|
|
||||||
-export([roots/0, fields/1]).
|
-export([roots/0, fields/1, desc/1]).
|
||||||
|
|
||||||
-export([mongo_query/5, check_worker_health/1]).
|
-export([mongo_query/5, check_worker_health/1]).
|
||||||
|
|
||||||
|
@ -89,18 +89,33 @@ fields(topology) ->
|
||||||
, {min_heartbeat_frequency_ms, fun duration/1}
|
, {min_heartbeat_frequency_ms, fun duration/1}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc(single) ->
|
||||||
|
"Settings for a single MongoDB instance.";
|
||||||
|
desc(rs) ->
|
||||||
|
"Settings for replica set.";
|
||||||
|
desc(sharded) ->
|
||||||
|
"Settings for sharded cluster.";
|
||||||
|
desc(topology) ->
|
||||||
|
"Topology of MongoDB.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
mongo_fields() ->
|
mongo_fields() ->
|
||||||
[ {srv_record, fun srv_record/1}
|
[ {srv_record, fun srv_record/1}
|
||||||
, {pool_size, fun emqx_connector_schema_lib:pool_size/1}
|
, {pool_size, fun emqx_connector_schema_lib:pool_size/1}
|
||||||
, {username, fun emqx_connector_schema_lib:username/1}
|
, {username, fun emqx_connector_schema_lib:username/1}
|
||||||
, {password, fun emqx_connector_schema_lib:password/1}
|
, {password, fun emqx_connector_schema_lib:password/1}
|
||||||
, {auth_source, #{type => binary(), required => false}}
|
, {auth_source, #{ type => binary()
|
||||||
|
, required => false
|
||||||
|
, desc => "Database name associated with the user's credentials."
|
||||||
|
}}
|
||||||
, {database, fun emqx_connector_schema_lib:database/1}
|
, {database, fun emqx_connector_schema_lib:database/1}
|
||||||
, {topology, #{type => hoconsc:ref(?MODULE, topology), required => false}}
|
, {topology, #{type => hoconsc:ref(?MODULE, topology), required => false}}
|
||||||
] ++
|
] ++
|
||||||
emqx_connector_schema_lib:ssl_fields().
|
emqx_connector_schema_lib:ssl_fields().
|
||||||
|
|
||||||
internal_pool_size(type) -> integer();
|
internal_pool_size(type) -> integer();
|
||||||
|
internal_pool_size(desc) -> "Pool size on start.";
|
||||||
internal_pool_size(default) -> 1;
|
internal_pool_size(default) -> 1;
|
||||||
internal_pool_size(validator) -> [?MIN(1)];
|
internal_pool_size(validator) -> [?MIN(1)];
|
||||||
internal_pool_size(_) -> undefined.
|
internal_pool_size(_) -> undefined.
|
||||||
|
|
|
@ -56,6 +56,7 @@ fields(config) ->
|
||||||
emqx_connector_schema_lib:ssl_fields().
|
emqx_connector_schema_lib:ssl_fields().
|
||||||
|
|
||||||
named_queries(type) -> map();
|
named_queries(type) -> map();
|
||||||
|
named_queries(desc) -> "Key-value list of prepared SQL statements.";
|
||||||
named_queries(required) -> false;
|
named_queries(required) -> false;
|
||||||
named_queries(_) -> undefined.
|
named_queries(_) -> undefined.
|
||||||
|
|
||||||
|
|
|
@ -55,22 +55,29 @@ roots() ->
|
||||||
fields(single) ->
|
fields(single) ->
|
||||||
[ {server, fun server/1}
|
[ {server, fun server/1}
|
||||||
, {redis_type, #{type => hoconsc:enum([single]),
|
, {redis_type, #{type => hoconsc:enum([single]),
|
||||||
default => single}}
|
default => single,
|
||||||
|
desc => "Redis type."
|
||||||
|
}}
|
||||||
] ++
|
] ++
|
||||||
redis_fields() ++
|
redis_fields() ++
|
||||||
emqx_connector_schema_lib:ssl_fields();
|
emqx_connector_schema_lib:ssl_fields();
|
||||||
fields(cluster) ->
|
fields(cluster) ->
|
||||||
[ {servers, fun servers/1}
|
[ {servers, fun servers/1}
|
||||||
, {redis_type, #{type => hoconsc:enum([cluster]),
|
, {redis_type, #{type => hoconsc:enum([cluster]),
|
||||||
default => cluster}}
|
default => cluster,
|
||||||
|
desc => "Redis type."
|
||||||
|
}}
|
||||||
] ++
|
] ++
|
||||||
redis_fields() ++
|
redis_fields() ++
|
||||||
emqx_connector_schema_lib:ssl_fields();
|
emqx_connector_schema_lib:ssl_fields();
|
||||||
fields(sentinel) ->
|
fields(sentinel) ->
|
||||||
[ {servers, fun servers/1}
|
[ {servers, fun servers/1}
|
||||||
, {redis_type, #{type => hoconsc:enum([sentinel]),
|
, {redis_type, #{type => hoconsc:enum([sentinel]),
|
||||||
default => sentinel}}
|
default => sentinel,
|
||||||
, {sentinel, #{type => string()}}
|
desc => "Redis type."
|
||||||
|
}}
|
||||||
|
, {sentinel, #{type => string(), desc => "The cluster name in Redis sentinel mode."
|
||||||
|
}}
|
||||||
] ++
|
] ++
|
||||||
redis_fields() ++
|
redis_fields() ++
|
||||||
emqx_connector_schema_lib:ssl_fields().
|
emqx_connector_schema_lib:ssl_fields().
|
||||||
|
@ -203,7 +210,9 @@ redis_fields() ->
|
||||||
[ {pool_size, fun emqx_connector_schema_lib:pool_size/1}
|
[ {pool_size, fun emqx_connector_schema_lib:pool_size/1}
|
||||||
, {password, fun emqx_connector_schema_lib:password/1}
|
, {password, fun emqx_connector_schema_lib:password/1}
|
||||||
, {database, #{type => integer(),
|
, {database, #{type => integer(),
|
||||||
default => 0}}
|
default => 0,
|
||||||
|
desc => "Redis database ID."
|
||||||
|
}}
|
||||||
, {auto_reconnect, fun emqx_connector_schema_lib:auto_reconnect/1}
|
, {auto_reconnect, fun emqx_connector_schema_lib:auto_reconnect/1}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, ref/2]).
|
-import(hoconsc, [mk/2, ref/2]).
|
||||||
|
|
||||||
-export([roots/0, fields/1]).
|
-export([roots/0, fields/1, desc/1]).
|
||||||
|
|
||||||
-export([ get_response/0
|
-export([ get_response/0
|
||||||
, put_request/0
|
, put_request/0
|
||||||
|
@ -62,5 +62,13 @@ fields("connectors") ->
|
||||||
})}
|
})}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc(Record) when Record =:= connectors;
|
||||||
|
Record =:= "connectors" ->
|
||||||
|
"Configuration for EMQX connectors.<br/>"
|
||||||
|
"A connector maintains the data related to the external resources,\n"
|
||||||
|
"such as MySQL database.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
schema_mod(Type) ->
|
schema_mod(Type) ->
|
||||||
list_to_atom(lists:concat(["emqx_connector_", Type])).
|
list_to_atom(lists:concat(["emqx_connector_", Type])).
|
||||||
|
|
|
@ -190,6 +190,8 @@ the memory cache reaches 'seg_bytes'.
|
||||||
})}
|
})}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc("connector") ->
|
||||||
|
"Generic configuration for the connector.";
|
||||||
desc("ingress") ->
|
desc("ingress") ->
|
||||||
ingress_desc();
|
ingress_desc();
|
||||||
desc("egress") ->
|
desc("egress") ->
|
||||||
|
|
|
@ -19,7 +19,9 @@
|
||||||
|
|
||||||
-export([ roots/0
|
-export([ roots/0
|
||||||
, fields/1
|
, fields/1
|
||||||
,namespace/0]).
|
, namespace/0
|
||||||
|
, desc/1
|
||||||
|
]).
|
||||||
|
|
||||||
namespace() -> <<"dashboard">>.
|
namespace() -> <<"dashboard">>.
|
||||||
roots() -> ["dashboard"].
|
roots() -> ["dashboard"].
|
||||||
|
@ -95,6 +97,15 @@ fields("https") ->
|
||||||
proplists:delete("fail_if_no_peer_cert",
|
proplists:delete("fail_if_no_peer_cert",
|
||||||
emqx_schema:server_ssl_opts_schema(#{}, true)).
|
emqx_schema:server_ssl_opts_schema(#{}, true)).
|
||||||
|
|
||||||
|
desc("dashboard") ->
|
||||||
|
"Configuration for EMQX dashboard.";
|
||||||
|
desc("http") ->
|
||||||
|
"Configuration for the dashboard listener (plaintext).";
|
||||||
|
desc("https") ->
|
||||||
|
"Configuration for the dashboard listener (TLS).";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
bind(type) -> hoconsc:union([non_neg_integer(), emqx_schema:ip_port()]);
|
bind(type) -> hoconsc:union([non_neg_integer(), emqx_schema:ip_port()]);
|
||||||
bind(default) -> 18083;
|
bind(default) -> 18083;
|
||||||
bind(required) -> true;
|
bind(required) -> true;
|
||||||
|
|
|
@ -33,16 +33,17 @@ roots() ->
|
||||||
[
|
[
|
||||||
"delayed",
|
"delayed",
|
||||||
"telemetry",
|
"telemetry",
|
||||||
array("rewrite"),
|
array("rewrite", #{desc => "List of topic rewrite rules."}),
|
||||||
array("topic_metrics")
|
array("topic_metrics", #{desc => "List of topics whose metrics are reported."})
|
||||||
].
|
].
|
||||||
|
|
||||||
fields("telemetry") ->
|
fields("telemetry") ->
|
||||||
[{enable, hoconsc:mk(boolean(), #{default => false})}];
|
[{enable, hoconsc:mk(boolean(), #{default => false, desc => "Enable telemetry."})}];
|
||||||
fields("delayed") ->
|
fields("delayed") ->
|
||||||
[
|
[
|
||||||
{enable, hoconsc:mk(boolean(), #{default => false})},
|
{enable, hoconsc:mk(boolean(), #{default => false, desc => "Enable `delayed` module."})},
|
||||||
{max_delayed_messages, sc(integer(), #{})}
|
{max_delayed_messages,
|
||||||
|
sc(integer(), #{desc => "Maximum number of delayed messages (0 is no limit)."})}
|
||||||
];
|
];
|
||||||
fields("rewrite") ->
|
fields("rewrite") ->
|
||||||
[
|
[
|
||||||
|
@ -64,16 +65,16 @@ fields("rewrite") ->
|
||||||
{re, fun regular_expression/1}
|
{re, fun regular_expression/1}
|
||||||
];
|
];
|
||||||
fields("topic_metrics") ->
|
fields("topic_metrics") ->
|
||||||
[{topic, sc(binary(), #{})}].
|
[{topic, sc(binary(), #{desc => "Collect metrics for the topic."})}].
|
||||||
|
|
||||||
desc("telemetry") ->
|
desc("telemetry") ->
|
||||||
"Settings for the telemetry module.";
|
"Settings for the telemetry module.";
|
||||||
desc("delayed") ->
|
desc("delayed") ->
|
||||||
"Settings for the delayed module.";
|
"Settings for the delayed module.";
|
||||||
desc("rewrite") ->
|
desc("rewrite") ->
|
||||||
"Settings for the rewrite module.";
|
"Rewrite rule.";
|
||||||
desc("topic_metrics") ->
|
desc("topic_metrics") ->
|
||||||
"Settings for the topic metrics module.";
|
"";
|
||||||
desc(_) ->
|
desc(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
|
@ -89,6 +90,6 @@ is_re(Bin) ->
|
||||||
{error, Reason} -> {error, {Bin, Reason}}
|
{error, Reason} -> {error, {Bin, Reason}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
array(Name) -> {Name, hoconsc:array(hoconsc:ref(?MODULE, Name))}.
|
array(Name, Meta) -> {Name, hoconsc:mk(hoconsc:array(hoconsc:ref(?MODULE, Name)), Meta)}.
|
||||||
|
|
||||||
sc(Type, Meta) -> hoconsc:mk(Type, Meta).
|
sc(Type, Meta) -> hoconsc:mk(Type, Meta).
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
-export([roots/0, fields/1, namespace/0]).
|
-export([roots/0, fields/1, desc/1, namespace/0]).
|
||||||
|
|
||||||
-define(TYPE(Type), hoconsc:mk(Type)).
|
-define(TYPE(Type), hoconsc:mk(Type)).
|
||||||
|
|
||||||
|
@ -26,7 +26,8 @@ fields("retainer") ->
|
||||||
, {stop_publish_clear_msg, sc(boolean(),
|
, {stop_publish_clear_msg, sc(boolean(),
|
||||||
"When the retained flag of the `PUBLISH` message is set and Payload is empty, "
|
"When the retained flag of the `PUBLISH` message is set and Payload is empty, "
|
||||||
"whether to continue to publish the message.<br/>"
|
"whether to continue to publish the message.<br/>"
|
||||||
"See: http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718038",
|
"See: "
|
||||||
|
"http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718038",
|
||||||
false)}
|
false)}
|
||||||
, {backend, backend_config()}
|
, {backend, backend_config()}
|
||||||
];
|
];
|
||||||
|
@ -52,13 +53,25 @@ fields(flow_control) ->
|
||||||
0)}
|
0)}
|
||||||
, {batch_deliver_limiter, sc(emqx_limiter_schema:bucket_name(),
|
, {batch_deliver_limiter, sc(emqx_limiter_schema:bucket_name(),
|
||||||
"The rate limiter name for retained messages' delivery.<br/>"
|
"The rate limiter name for retained messages' delivery.<br/>"
|
||||||
"Limiter helps to avoid delivering too many messages to the client at once, which may cause the client "
|
"Limiter helps to avoid delivering too many messages to the client at once, "
|
||||||
"to block or crash, or drop messages due to exceeding the size of the message queue.<br/>"
|
"which may cause the client "
|
||||||
"The names of the available rate limiters are taken from the existing rate limiters under `limiter.batch`.<br/>"
|
"to block or crash, or drop messages due to exceeding the size of the message"
|
||||||
|
" queue.<br/>"
|
||||||
|
"The names of the available rate limiters are taken from the existing rate "
|
||||||
|
"limiters under `limiter.batch`.<br/>"
|
||||||
"If this field is empty, limiter is not used.",
|
"If this field is empty, limiter is not used.",
|
||||||
undefined)}
|
undefined)}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc("retainer") ->
|
||||||
|
"Configuration related to handling `PUBLISH` packets with a `retain` flag set to 1.";
|
||||||
|
desc(mnesia_config) ->
|
||||||
|
"Configuration of the internal database storing retained messages.";
|
||||||
|
desc(flow_control) ->
|
||||||
|
"Retainer batching and rate limiting.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Internal functions
|
%% Internal functions
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -74,4 +87,7 @@ is_pos_integer(V) ->
|
||||||
V >= 0.
|
V >= 0.
|
||||||
|
|
||||||
backend_config() ->
|
backend_config() ->
|
||||||
#{type => hoconsc:union([hoconsc:ref(?MODULE, mnesia_config)])}.
|
#{
|
||||||
|
type => hoconsc:union([hoconsc:ref(?MODULE, mnesia_config)]),
|
||||||
|
desc => "Settings for the database storing the retained messages."
|
||||||
|
}.
|
||||||
|
|
|
@ -43,16 +43,16 @@ fields("rule_engine") ->
|
||||||
fields("rules") ->
|
fields("rules") ->
|
||||||
[ rule_name()
|
[ rule_name()
|
||||||
, {"sql", sc(binary(),
|
, {"sql", sc(binary(),
|
||||||
#{ desc => """
|
#{ desc => "
|
||||||
SQL query to transform the messages.<br>
|
SQL query to transform the messages.<br>
|
||||||
Example: <code>SELECT * FROM \"test/topic\" WHERE payload.x = 1</code><br>
|
Example: <code>SELECT * FROM \"test/topic\" WHERE payload.x = 1</code><br>
|
||||||
"""
|
"
|
||||||
, example => "SELECT * FROM \"test/topic\" WHERE payload.x = 1"
|
, example => "SELECT * FROM \"test/topic\" WHERE payload.x = 1"
|
||||||
, required => true
|
, required => true
|
||||||
, validator => fun ?MODULE:validate_sql/1
|
, validator => fun ?MODULE:validate_sql/1
|
||||||
})}
|
})}
|
||||||
, {"outputs", sc(hoconsc:array(hoconsc:union(outputs())),
|
, {"outputs", sc(hoconsc:array(hoconsc:union(outputs())),
|
||||||
#{ desc => """
|
#{ desc => "
|
||||||
A list of outputs of the rule.<br>
|
A list of outputs of the rule.<br>
|
||||||
An output can be a string that refers to the channel ID of an EMQX bridge, or an object
|
An output can be a string that refers to the channel ID of an EMQX bridge, or an object
|
||||||
that refers to a function.<br>
|
that refers to a function.<br>
|
||||||
|
@ -65,7 +65,7 @@ If one of the output crashed, all other outputs come after it will still be exec
|
||||||
original order.<br>
|
original order.<br>
|
||||||
If there's any error when running an output, there will be an error message, and the 'failure'
|
If there's any error when running an output, there will be an error message, and the 'failure'
|
||||||
counter of the function output or the bridge channel will increase.
|
counter of the function output or the bridge channel will increase.
|
||||||
"""
|
"
|
||||||
, default => []
|
, default => []
|
||||||
, example => [
|
, example => [
|
||||||
<<"http:my_http_bridge">>,
|
<<"http:my_http_bridge">>,
|
||||||
|
@ -96,62 +96,62 @@ fields("builtin_output_console") ->
|
||||||
|
|
||||||
fields("user_provided_function") ->
|
fields("user_provided_function") ->
|
||||||
[ {function, sc(binary(),
|
[ {function, sc(binary(),
|
||||||
#{ desc => """
|
#{ desc => "
|
||||||
The user provided function. Should be in the format: '{module}:{function}'.<br>
|
The user provided function. Should be in the format: '{module}:{function}'.<br>
|
||||||
Where {module} is the Erlang callback module and {function} is the Erlang function.
|
Where {module} is the Erlang callback module and {function} is the Erlang function.
|
||||||
<br>
|
<br>
|
||||||
To write your own function, checkout the function <code>console</code> and
|
To write your own function, checkout the function <code>console</code> and
|
||||||
<code>republish</code> in the source file:
|
<code>republish</code> in the source file:
|
||||||
<code>apps/emqx_rule_engine/src/emqx_rule_outputs.erl</code> as an example.
|
<code>apps/emqx_rule_engine/src/emqx_rule_outputs.erl</code> as an example.
|
||||||
"""
|
"
|
||||||
, example => "module:function"
|
, example => "module:function"
|
||||||
})}
|
})}
|
||||||
, {args, sc(map(),
|
, {args, sc(map(),
|
||||||
#{ desc => """
|
#{ desc => "
|
||||||
The args will be passed as the 3rd argument to module:function/3,
|
The args will be passed as the 3rd argument to module:function/3,
|
||||||
checkout the function <code>console</code> and <code>republish</code> in the source file:
|
checkout the function <code>console</code> and <code>republish</code> in the source file:
|
||||||
<code>apps/emqx_rule_engine/src/emqx_rule_outputs.erl</code> as an example.
|
<code>apps/emqx_rule_engine/src/emqx_rule_outputs.erl</code> as an example.
|
||||||
"""
|
"
|
||||||
, default => #{}
|
, default => #{}
|
||||||
})}
|
})}
|
||||||
];
|
];
|
||||||
|
|
||||||
fields("republish_args") ->
|
fields("republish_args") ->
|
||||||
[ {topic, sc(binary(),
|
[ {topic, sc(binary(),
|
||||||
#{ desc =>"""
|
#{ desc =>"
|
||||||
The target topic of message to be re-published.<br>
|
The target topic of message to be re-published.<br>
|
||||||
Template with variables is allowed, see description of the 'republish_args'.
|
Template with variables is allowed, see description of the 'republish_args'.
|
||||||
"""
|
"
|
||||||
, required => true
|
, required => true
|
||||||
, example => <<"a/1">>
|
, example => <<"a/1">>
|
||||||
})}
|
})}
|
||||||
, {qos, sc(qos(),
|
, {qos, sc(qos(),
|
||||||
#{ desc => """
|
#{ desc => "
|
||||||
The qos of the message to be re-published.
|
The qos of the message to be re-published.
|
||||||
Template with variables is allowed, see description of the 'republish_args'.<br>
|
Template with variables is allowed, see description of the 'republish_args'.<br>
|
||||||
Defaults to ${qos}. If variable ${qos} is not found from the selected result of the rule,
|
Defaults to ${qos}. If variable ${qos} is not found from the selected result of the rule,
|
||||||
0 is used.
|
0 is used.
|
||||||
"""
|
"
|
||||||
, default => <<"${qos}">>
|
, default => <<"${qos}">>
|
||||||
, example => <<"${qos}">>
|
, example => <<"${qos}">>
|
||||||
})}
|
})}
|
||||||
, {retain, sc(hoconsc:union([binary(), boolean()]),
|
, {retain, sc(hoconsc:union([binary(), boolean()]),
|
||||||
#{ desc => """
|
#{ desc => "
|
||||||
The 'retain' flag of the message to be re-published.
|
The 'retain' flag of the message to be re-published.
|
||||||
Template with variables is allowed, see description of the 'republish_args'.<br>
|
Template with variables is allowed, see description of the 'republish_args'.<br>
|
||||||
Defaults to ${retain}. If variable ${retain} is not found from the selected result
|
Defaults to ${retain}. If variable ${retain} is not found from the selected result
|
||||||
of the rule, false is used.
|
of the rule, false is used.
|
||||||
"""
|
"
|
||||||
, default => <<"${retain}">>
|
, default => <<"${retain}">>
|
||||||
, example => <<"${retain}">>
|
, example => <<"${retain}">>
|
||||||
})}
|
})}
|
||||||
, {payload, sc(binary(),
|
, {payload, sc(binary(),
|
||||||
#{ desc => """
|
#{ desc => "
|
||||||
The payload of the message to be re-published.
|
The payload of the message to be re-published.
|
||||||
Template with variables is allowed, see description of the 'republish_args'.<br>.
|
Template with variables is allowed, see description of the 'republish_args'.<br>.
|
||||||
Defaults to ${payload}. If variable ${payload} is not found from the selected result
|
Defaults to ${payload}. If variable ${payload} is not found from the selected result
|
||||||
of the rule, then the string \"undefined\" is used.
|
of the rule, then the string \"undefined\" is used.
|
||||||
"""
|
"
|
||||||
, default => <<"${payload}">>
|
, default => <<"${payload}">>
|
||||||
, example => <<"${payload}">>
|
, example => <<"${payload}">>
|
||||||
})}
|
})}
|
||||||
|
|
|
@ -2,32 +2,37 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
-export([roots/0, fields/1, namespace/0]).
|
-export([roots/0, fields/1, desc/1, namespace/0]).
|
||||||
|
|
||||||
namespace() -> "slow_subs".
|
namespace() -> "slow_subs".
|
||||||
|
|
||||||
roots() -> ["slow_subs"].
|
roots() -> ["slow_subs"].
|
||||||
|
|
||||||
fields("slow_subs") ->
|
fields("slow_subs") ->
|
||||||
[ {enable, sc(boolean(), false, "Enable this feature")}
|
[ {enable, sc(boolean(), false, "Enable this feature.")}
|
||||||
, {threshold,
|
, {threshold,
|
||||||
sc(emqx_schema:duration_ms(),
|
sc(emqx_schema:duration_ms(),
|
||||||
"500ms",
|
"500ms",
|
||||||
"The latency threshold for statistics, the minimum value is 100ms")}
|
"The latency threshold for statistics, the minimum value is 100ms.")}
|
||||||
, {expire_interval,
|
, {expire_interval,
|
||||||
sc(emqx_schema:duration_ms(),
|
sc(emqx_schema:duration_ms(),
|
||||||
"300s",
|
"300s",
|
||||||
"The eviction time of the record, which in the statistics record table")}
|
"The eviction time of the record, which in the statistics record table.")}
|
||||||
, {top_k_num,
|
, {top_k_num,
|
||||||
sc(integer(),
|
sc(integer(),
|
||||||
10,
|
10,
|
||||||
"The maximum number of records in the slow subscription statistics record table")}
|
"The maximum number of records in the slow subscription statistics record table.")}
|
||||||
, {stats_type,
|
, {stats_type,
|
||||||
sc(hoconsc:union([whole, internal, response]),
|
sc(hoconsc:union([whole, internal, response]),
|
||||||
whole,
|
whole,
|
||||||
"The method to calculate the latency")}
|
"The method to calculate the latency.")}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc("slow_subs") ->
|
||||||
|
"Configuration for `slow_subs` feature.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Internal functions
|
%% Internal functions
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
|
@ -24,7 +24,8 @@
|
||||||
|
|
||||||
-export([ namespace/0
|
-export([ namespace/0
|
||||||
, roots/0
|
, roots/0
|
||||||
, fields/1]).
|
, fields/1
|
||||||
|
, desc/1]).
|
||||||
|
|
||||||
-typerefl_from_string({ip_port/0, emqx_statsd_schema, to_ip_port}).
|
-typerefl_from_string({ip_port/0, emqx_statsd_schema, to_ip_port}).
|
||||||
|
|
||||||
|
@ -43,6 +44,11 @@ fields("statsd") ->
|
||||||
, {flush_time_interval, fun flush_interval/1}
|
, {flush_time_interval, fun flush_interval/1}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc("statsd") ->
|
||||||
|
"Configuration related to reporting metrics to statsd.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
server(type) -> emqx_schema:ip_port();
|
server(type) -> emqx_schema:ip_port();
|
||||||
server(required) -> true;
|
server(required) -> true;
|
||||||
server(default) -> "127.0.0.1:8125";
|
server(default) -> "127.0.0.1:8125";
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
|
|
||||||
-behaviour(hocon_schema).
|
-behaviour(hocon_schema).
|
||||||
|
|
||||||
-export([roots/0, fields/1, validations/0]).
|
-export([roots/0, fields/1, validations/0, desc/1]).
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[
|
[
|
||||||
|
@ -40,7 +40,7 @@ fields(key_license) ->
|
||||||
type => string(),
|
type => string(),
|
||||||
%% so it's not logged
|
%% so it's not logged
|
||||||
sensitive => true,
|
sensitive => true,
|
||||||
desc => "Configure the license as a string"
|
desc => "License string"
|
||||||
}}
|
}}
|
||||||
| common_fields()
|
| common_fields()
|
||||||
];
|
];
|
||||||
|
@ -53,6 +53,13 @@ fields(file_license) ->
|
||||||
| common_fields()
|
| common_fields()
|
||||||
].
|
].
|
||||||
|
|
||||||
|
desc(key_license) ->
|
||||||
|
"License provisioned as a string.";
|
||||||
|
desc(file_license) ->
|
||||||
|
"License provisioned as a file.";
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
common_fields() ->
|
common_fields() ->
|
||||||
[
|
[
|
||||||
{connection_low_watermark, #{
|
{connection_low_watermark, #{
|
||||||
|
|
Loading…
Reference in New Issue