fix(spellcheck): fix several spellcheck issues not caught in CI

Fixes some issus later found by CI that somehow didn't block the originating PRs.

Example failure:

https://github.com/emqx/emqx/actions/runs/5715470851/job/15485284918?pr=11372
This commit is contained in:
Thales Macedo Garitezi 2023-07-31 11:42:02 -03:00
parent b24e7e2559
commit 620b5fc048
6 changed files with 67 additions and 9 deletions

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_azure_event_hub, [ {application, emqx_bridge_azure_event_hub, [
{description, "EMQX Enterprise Azure Event Hub Bridge"}, {description, "EMQX Enterprise Azure Event Hub Bridge"},
{vsn, "0.1.0"}, {vsn, "0.1.1"},
{registered, []}, {registered, []},
{applications, [ {applications, [
kernel, kernel,

View File

@ -68,8 +68,25 @@ fields(Method) ->
Fields = emqx_bridge_kafka:fields(Method), Fields = emqx_bridge_kafka:fields(Method),
override_documentations(Fields). override_documentations(Fields).
desc(_) -> desc("config_producer") ->
undefined. ?DESC("desc_config");
desc("ssl_client_opts") ->
emqx_schema:desc("ssl_client_opts");
desc("get_producer") ->
["Configuration for Azure Event Hub using `GET` method."];
desc("put_producer") ->
["Configuration for Azure Event Hub using `PUT` method."];
desc("post_producer") ->
["Configuration for Azure Event Hub using `POST` method."];
desc(Name) ->
lists:member(Name, struct_names()) orelse throw({missing_desc, Name}),
?DESC(Name).
struct_names() ->
[
auth_username_password,
producer_kafka_opts
].
conn_bridge_examples(Method) -> conn_bridge_examples(Method) ->
[ [
@ -162,7 +179,15 @@ ref(Name) ->
producer_overrides() -> producer_overrides() ->
#{ #{
authentication => mk(ref(auth_username_password), #{default => #{}, required => true}), authentication =>
mk(
ref(auth_username_password),
#{
default => #{},
required => true,
desc => ?DESC("authentication")
}
),
bootstrap_hosts => bootstrap_hosts =>
mk( mk(
binary(), binary(),

View File

@ -105,7 +105,7 @@ partition_count_refresh_interval.label:
"""Partition Count Refresh Interval""" """Partition Count Refresh Interval"""
max_batch_bytes.desc: max_batch_bytes.desc:
"""Maximum bytes to collect in a Azure Event Hub message batch. Most of the Kafka brokers default to a limit of 1 MB batch size. EMQX's default value is less than 1 MB in order to compensate Kafka message encoding overheads (especially when each individual message is very small). When a single message is over the limit, it is still sent (as a single element batch).""" """Maximum bytes to collect in an Azure Event Hub message batch. Most of the Kafka brokers default to a limit of 1 MB batch size. EMQX's default value is less than 1 MB in order to compensate Kafka message encoding overheads (especially when each individual message is very small). When a single message is over the limit, it is still sent (as a single element batch)."""
max_batch_bytes.label: max_batch_bytes.label:
"""Max Batch Bytes""" """Max Batch Bytes"""
@ -284,4 +284,28 @@ sync_query_timeout.desc:
sync_query_timeout.label: sync_query_timeout.label:
"""Synchronous Query Timeout""" """Synchronous Query Timeout"""
auth_username_password.desc:
"""Username/password based authentication."""
auth_username_password.label:
"""Username/password Auth"""
auth_sasl_password.desc:
"""The password for connecting to Azure Event Hub. Should be the "connection string-primary key" of a Namespace shared access policy."""
auth_sasl_password.label:
"""Password"""
producer_kafka_opts.desc:
"""Azure Event Hub producer configs."""
producer_kafka_opts.label:
"""Azure Event Hub Producer"""
desc_config.desc:
"""Configuration for an Azure Event Hub bridge."""
desc_config.label:
"""Azure Event Hub Bridge Configuration"""
} }

View File

@ -36,8 +36,9 @@ local_topic.label:
template.desc: template.desc:
"""Template, the default value is empty. When this value is empty the whole message will be stored in the database.<br> """Template, the default value is empty. When this value is empty the whole message will be stored in the database.<br>
The template can be any valid json with placeholders and make sure all keys for table are here, example:<br> The template can be any valid JSON with placeholders and make sure all keys for table are here, example:<br>
{"id" : "${id}", "clientid" : "${clientid}", "data" : "${payload.data}"}""" <code>{"id" : "${id}", "clientid" : "${clientid}", "data" : "${payload.data}"}</code>
"""
template.label: template.label:
"""Template""" """Template"""

View File

@ -35,7 +35,7 @@ local_topic.label:
"""Local Topic""" """Local Topic"""
write_syntax.desc: write_syntax.desc:
"""Conf of GreptimeDB gRPC protocol to write data points.The write syntax is a text-based format that provides the measurement, tag set, field set, and timestamp of a data point, and placeholder supported, which is the same as InfluxDB line protocol. """Conf of GreptimeDB gRPC protocol to write data points. Write syntax is a text-based format that provides the measurement, tag set, field set, and timestamp of a data point, and placeholder supported, which is the same as InfluxDB line protocol.
See also [InfluxDB 2.3 Line Protocol](https://docs.influxdata.com/influxdb/v2.3/reference/syntax/line-protocol/) and See also [InfluxDB 2.3 Line Protocol](https://docs.influxdata.com/influxdb/v2.3/reference/syntax/line-protocol/) and
[GreptimeDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/) </br> [GreptimeDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/) </br>
TLDR:</br> TLDR:</br>

View File

@ -17,8 +17,16 @@ if ! [ -f "$SCHEMA" ]; then
exit 1 exit 1
fi fi
if [[ -t 1 ]];
then
DOCKER_TERMINAL_OPT="-t"
else
DOCKER_TERMINAL_OPT=""
fi
set +e set +e
docker run --rm -i --name spellcheck \ # shellcheck disable=SC2086
docker run --rm -i ${DOCKER_TERMINAL_OPT} --name spellcheck \
-v "${PROJ_ROOT}"/scripts/spellcheck/dicts:/dicts \ -v "${PROJ_ROOT}"/scripts/spellcheck/dicts:/dicts \
-v "$SCHEMA":/schema.json \ -v "$SCHEMA":/schema.json \
ghcr.io/emqx/emqx-schema-validate:0.4.0 /schema.json ghcr.io/emqx/emqx-schema-validate:0.4.0 /schema.json