Merge pull request #12925 from zmstone/0424-merge-latest-master-to-release-57

0424 merge latest master to release 57
This commit is contained in:
Zaiming (Stone) Shi 2024-04-25 10:47:00 +02:00 committed by GitHub
commit 82790e6ea4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
262 changed files with 12394 additions and 2871 deletions

View File

@ -4,7 +4,7 @@ services:
greptimedb: greptimedb:
container_name: greptimedb container_name: greptimedb
hostname: greptimedb hostname: greptimedb
image: greptime/greptimedb:v0.4.4 image: greptime/greptimedb:v0.7.1
expose: expose:
- "4000" - "4000"
- "4001" - "4001"

View File

@ -1,24 +1,53 @@
version: '3.9' version: '3.9'
services: services:
iotdb: iotdb_1_3_0:
container_name: iotdb container_name: iotdb130
hostname: iotdb hostname: iotdb130
image: apache/iotdb:1.1.0-standalone image: apache/iotdb:1.3.0-standalone
restart: always restart: always
environment: environment:
- enable_rest_service=true - enable_rest_service=true
- cn_internal_address=iotdb - cn_internal_address=iotdb130
- cn_internal_port=10710 - cn_internal_port=10710
- cn_consensus_port=10720 - cn_consensus_port=10720
- cn_target_config_node_list=iotdb:10710 - cn_seed_config_node=iotdb130:10710
- dn_rpc_address=iotdb - dn_rpc_address=iotdb130
- dn_internal_address=iotdb - dn_internal_address=iotdb130
- dn_rpc_port=6667 - dn_rpc_port=6667
- dn_mpp_data_exchange_port=10740 - dn_mpp_data_exchange_port=10740
- dn_schema_region_consensus_port=10750 - dn_schema_region_consensus_port=10750
- dn_data_region_consensus_port=10760 - dn_data_region_consensus_port=10760
- dn_target_config_node_list=iotdb:10710 - dn_seed_config_node=iotdb130:10710
# volumes:
# - ./data:/iotdb/data
# - ./logs:/iotdb/logs
expose:
- "18080"
# IoTDB's REST interface, uncomment for local testing
# ports:
# - "18080:18080"
networks:
- emqx_bridge
iotdb_1_1_0:
container_name: iotdb110
hostname: iotdb110
image: apache/iotdb:1.1.0-standalone
restart: always
environment:
- enable_rest_service=true
- cn_internal_address=iotdb110
- cn_internal_port=10710
- cn_consensus_port=10720
- cn_target_config_node_list=iotdb110:10710
- dn_rpc_address=iotdb110
- dn_internal_address=iotdb110
- dn_rpc_port=6667
- dn_mpp_data_exchange_port=10740
- dn_schema_region_consensus_port=10750
- dn_data_region_consensus_port=10760
- dn_target_config_node_list=iotdb110:10710
# volumes: # volumes:
# - ./data:/iotdb/data # - ./data:/iotdb/data
# - ./logs:/iotdb/logs # - ./logs:/iotdb/logs

View File

@ -9,3 +9,4 @@ accounts:
defaultGroupPerm: PUB|SUB defaultGroupPerm: PUB|SUB
topicPerms: topicPerms:
- TopicTest=PUB|SUB - TopicTest=PUB|SUB
- Topic2=PUB|SUB

View File

@ -139,9 +139,15 @@
"enabled": true "enabled": true
}, },
{ {
"name": "iotdb", "name": "iotdb110",
"listen": "0.0.0.0:18080", "listen": "0.0.0.0:18080",
"upstream": "iotdb:18080", "upstream": "iotdb110:18080",
"enabled": true
},
{
"name": "iotdb130",
"listen": "0.0.0.0:28080",
"upstream": "iotdb130:18080",
"enabled": true "enabled": true
}, },
{ {

23
.github/CODEOWNERS vendored
View File

@ -1,18 +1,29 @@
## Default ## Default
* @emqx/emqx-review-board * @emqx/emqx-review-board
# emqx-review-board members
## HJianBo
## id
## ieQu1
## keynslug
## qzhuyan
## savonarola
## terry-xiaoyu
## thalesmg
## zhongwencool
## zmstone
## apps ## apps
/apps/emqx/ @emqx/emqx-review-board @lafirest /apps/emqx/ @emqx/emqx-review-board @lafirest
/apps/emqx_connector/ @emqx/emqx-review-board /apps/emqx_auth/ @emqx/emqx-review-board @JimMoen
/apps/emqx_auth/ @emqx/emqx-review-board @JimMoen @savonarola
/apps/emqx_connector/ @emqx/emqx-review-board @JimMoen /apps/emqx_connector/ @emqx/emqx-review-board @JimMoen
/apps/emqx_dashboard/ @emqx/emqx-review-board @JimMoen @lafirest /apps/emqx_dashboard/ @emqx/emqx-review-board @JimMoen @lafirest
/apps/emqx_dashboard_rbac/ @emqx/emqx-review-board @lafirest /apps/emqx_dashboard_rbac/ @emqx/emqx-review-board @lafirest
/apps/emqx_dashboard_sso/ @emqx/emqx-review-board @JimMoen @lafirest /apps/emqx_dashboard_sso/ @emqx/emqx-review-board @JimMoen @lafirest
/apps/emqx_exhook/ @emqx/emqx-review-board @JimMoen @HJianBo /apps/emqx_exhook/ @emqx/emqx-review-board @JimMoen
/apps/emqx_ft/ @emqx/emqx-review-board @savonarola @keynslug
/apps/emqx_gateway/ @emqx/emqx-review-board @lafirest /apps/emqx_gateway/ @emqx/emqx-review-board @lafirest
/apps/emqx_management/ @emqx/emqx-review-board @lafirest @sstrigler /apps/emqx_management/ @emqx/emqx-review-board @lafirest
/apps/emqx_opentelemetry @emqx/emqx-review-board @SergeTupchiy
/apps/emqx_plugins/ @emqx/emqx-review-board @JimMoen /apps/emqx_plugins/ @emqx/emqx-review-board @JimMoen
/apps/emqx_prometheus/ @emqx/emqx-review-board @JimMoen /apps/emqx_prometheus/ @emqx/emqx-review-board @JimMoen
/apps/emqx_psk/ @emqx/emqx-review-board @lafirest /apps/emqx_psk/ @emqx/emqx-review-board @lafirest
@ -20,7 +31,7 @@
/apps/emqx_rule_engine/ @emqx/emqx-review-board @kjellwinblad /apps/emqx_rule_engine/ @emqx/emqx-review-board @kjellwinblad
/apps/emqx_slow_subs/ @emqx/emqx-review-board @lafirest /apps/emqx_slow_subs/ @emqx/emqx-review-board @lafirest
/apps/emqx_statsd/ @emqx/emqx-review-board @JimMoen /apps/emqx_statsd/ @emqx/emqx-review-board @JimMoen
/apps/emqx_durable_storage/ @emqx/emqx-review-board @ieQu1 @keynslug /apps/emqx_durable_storage/ @emqx/emqx-review-board @keynslug
## CI ## CI
/deploy/ @emqx/emqx-review-board @Rory-Z /deploy/ @emqx/emqx-review-board @Rory-Z

View File

@ -151,7 +151,23 @@ jobs:
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
- name: build emqx packages - name: build tgz
env:
PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }}
OS: ${{ matrix.os }}
IS_ELIXIR: ${{ matrix.with_elixir }}
BUILDER: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os }}"
BUILDER_SYSTEM: force_docker
run: |
./scripts/buildx.sh \
--profile $PROFILE \
--arch $ARCH \
--builder $BUILDER \
--elixir $IS_ELIXIR \
--pkgtype tgz
- name: build pkg
if: matrix.with_elixir == 'no'
env: env:
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }} ARCH: ${{ matrix.arch }}

View File

@ -24,8 +24,8 @@ jobs:
matrix: matrix:
branch: branch:
- master - master
- release-55
- release-56 - release-56
- release-57
language: language:
- cpp - cpp
- python - python

View File

@ -31,7 +31,7 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | run: |
gh api --method GET -f head_branch=master -f status=completed -f exclude_pull_requests=true /repos/emqx/emqx/actions/runs > runs.json gh api --method GET -f head_sha=$(git rev-parse HEAD) -f status=completed -f exclude_pull_requests=true /repos/emqx/emqx/actions/runs > runs.json
for id in $(jq -r '.workflow_runs[] | select((."conclusion" == "failure") and (."name" != "Keep master green") and .run_attempt < 3) | .id' runs.json); do for id in $(jq -r '.workflow_runs[] | select((."conclusion" == "failure") and (."name" != "Keep master green") and .run_attempt < 3) | .id' runs.json); do
echo "rerun https://github.com/emqx/emqx/actions/runs/$id" echo "rerun https://github.com/emqx/emqx/actions/runs/$id"
gh api --method POST /repos/emqx/emqx/actions/runs/$id/rerun-failed-jobs || true gh api --method POST /repos/emqx/emqx/actions/runs/$id/rerun-failed-jobs || true

View File

@ -67,12 +67,13 @@ jobs:
BUCKET=${{ secrets.AWS_S3_BUCKET }} BUCKET=${{ secrets.AWS_S3_BUCKET }}
OUTPUT_DIR=${{ steps.profile.outputs.s3dir }} OUTPUT_DIR=${{ steps.profile.outputs.s3dir }}
aws s3 cp --recursive s3://$BUCKET/$OUTPUT_DIR/${{ env.ref_name }} packages aws s3 cp --recursive s3://$BUCKET/$OUTPUT_DIR/${{ env.ref_name }} packages
- uses: emqx/upload-assets@8d2083b4dbe3151b0b735572eaa153b6acb647fe # 0.5.0 - uses: emqx/upload-assets@974befcf0e72a1811360a81c798855efb66b0551 # 0.5.2
env: env:
GITHUB_TOKEN: ${{ github.token }} GITHUB_TOKEN: ${{ github.token }}
with: with:
asset_paths: '["packages/*"]' asset_paths: '["packages/*"]'
tag_name: "${{ env.ref_name }}" tag_name: "${{ env.ref_name }}"
skip_existing: true
- name: update to emqx.io - name: update to emqx.io
if: startsWith(env.ref_name, 'v') && ((github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artefacts) if: startsWith(env.ref_name, 'v') && ((github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artefacts)
run: | run: |

View File

@ -47,6 +47,9 @@ jobs:
echo "_EMQX_DOCKER_IMAGE_TAG=$_EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV echo "_EMQX_DOCKER_IMAGE_TAG=$_EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- name: dashboard tests - name: dashboard tests
working-directory: ./scripts/ui-tests working-directory: ./scripts/ui-tests
env:
EMQX_VERSION: ${{ inputs.version-emqx }}
EMQX_ENTERPRISE_VERSION: ${{ inputs.version-emqx-enterprise }}
run: | run: |
set -eu set -eu
docker compose up --abort-on-container-exit --exit-code-from selenium docker compose up --abort-on-container-exit --exit-code-from selenium

View File

@ -35,6 +35,7 @@ jobs:
shell: bash shell: bash
outputs: outputs:
matrix: ${{ steps.matrix.outputs.matrix }} matrix: ${{ steps.matrix.outputs.matrix }}
skip: ${{ steps.matrix.outputs.skip }}
steps: steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with: with:
@ -49,12 +50,16 @@ jobs:
changed_files="$(git diff --name-only ${BEFORE_REF} ${AFTER_REF} apps/emqx)" changed_files="$(git diff --name-only ${BEFORE_REF} ${AFTER_REF} apps/emqx)"
if [ "$changed_files" = '' ]; then if [ "$changed_files" = '' ]; then
echo "nothing changed in apps/emqx, ignored." echo "nothing changed in apps/emqx, ignored."
echo "matrix=[]" | tee -a $GITHUB_OUTPUT echo 'matrix=[]' | tee -a $GITHUB_OUTPUT
echo 'skip=true' | tee -a $GITHUB_OUTPUT
exit 0 exit 0
else
echo 'skip=false' | tee -a $GITHUB_OUTPUT
echo 'matrix=[{"type": "eunit_proper_and_static"},{"type": "1_3"},{"type": "2_3"},{"type": "3_3"}]' | tee -a $GITHUB_OUTPUT
fi fi
echo 'matrix=[{"type": "eunit_proper_and_static"},{"type": "1_3"},{"type": "2_3"},{"type": "3_3"}]' | tee -a $GITHUB_OUTPUT
run_emqx_app_tests: run_emqx_app_tests:
if: needs.prepare_matrix.outputs.skip != 'true'
needs: needs:
- prepare_matrix - prepare_matrix
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}

2
.gitignore vendored
View File

@ -76,3 +76,5 @@ rebar-git-cache.tar
.docker_image_tag .docker_image_tag
.emqx_docker_image_tags .emqx_docker_image_tags
.git/ .git/
apps/emqx_utils/src/emqx_variform_parser.erl
apps/emqx_utils/src/emqx_variform_scan.erl

View File

@ -20,8 +20,8 @@ endif
# Dashboard version # Dashboard version
# from https://github.com/emqx/emqx-dashboard5 # from https://github.com/emqx/emqx-dashboard5
export EMQX_DASHBOARD_VERSION ?= v1.8.0 export EMQX_DASHBOARD_VERSION ?= v1.9.0-beta.1
export EMQX_EE_DASHBOARD_VERSION ?= e1.6.0 export EMQX_EE_DASHBOARD_VERSION ?= e1.7.0-beta.1
PROFILE ?= emqx PROFILE ?= emqx
REL_PROFILES := emqx emqx-enterprise REL_PROFILES := emqx emqx-enterprise

View File

@ -673,7 +673,6 @@ end).
-define(SHARE, "$share"). -define(SHARE, "$share").
-define(QUEUE, "$queue"). -define(QUEUE, "$queue").
-define(SHARE(Group, Topic), emqx_topic:join([<<?SHARE>>, Group, Topic])).
-define(REDISPATCH_TO(GROUP, TOPIC), {GROUP, TOPIC}). -define(REDISPATCH_TO(GROUP, TOPIC), {GROUP, TOPIC}).

View File

@ -20,12 +20,17 @@
-record(?TRACE, { -record(?TRACE, {
name :: binary() | undefined | '_', name :: binary() | undefined | '_',
type :: clientid | topic | ip_address | undefined | '_', type :: clientid | topic | ip_address | ruleid | undefined | '_',
filter :: filter ::
emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address() | undefined | '_', emqx_types:topic()
| emqx_types:clientid()
| emqx_trace:ip_address()
| emqx_trace:ruleid()
| undefined
| '_',
enable = true :: boolean() | '_', enable = true :: boolean() | '_',
payload_encode = text :: hex | text | hidden | '_', payload_encode = text :: hex | text | hidden | '_',
extra = #{} :: map() | '_', extra = #{formatter => text} :: #{formatter => text | json} | '_',
start_at :: integer() | undefined | '_', start_at :: integer() | undefined | '_',
end_at :: integer() | undefined | '_' end_at :: integer() | undefined | '_'
}). }).

View File

@ -44,11 +44,20 @@
). ).
-define(SLOG_THROTTLE(Level, Data, Meta), -define(SLOG_THROTTLE(Level, Data, Meta),
case emqx_log_throttler:allow(maps:get(msg, Data)) of case logger:allow(Level, ?MODULE) of
true -> true ->
?SLOG(Level, Data, Meta); (fun(#{msg := __Msg} = __Data) ->
case emqx_log_throttler:allow(__Msg) of
true ->
logger:log(Level, __Data, Meta);
false ->
?_DO_TRACE(Level, __Msg, maps:merge(__Data, Meta))
end
end)(
Data
);
false -> false ->
?_DO_TRACE(Level, maps:get(msg, Data), maps:merge(Data, Meta)) ok
end end
). ).

View File

@ -184,7 +184,7 @@ list_all_pubranges(Node) ->
session_open(Node, ClientId) -> session_open(Node, ClientId) ->
ClientInfo = #{}, ClientInfo = #{},
ConnInfo = #{peername => {undefined, undefined}}, ConnInfo = #{peername => {undefined, undefined}, proto_name => <<"MQTT">>, proto_ver => 5},
WillMsg = undefined, WillMsg = undefined,
erpc:call( erpc:call(
Node, Node,
@ -252,7 +252,6 @@ t_session_subscription_idempotency(Config) ->
ok ok
end, end,
fun(Trace) -> fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
Session = session_open(Node1, ClientId), Session = session_open(Node1, ClientId),
?assertMatch( ?assertMatch(
#{SubTopicFilter := #{}}, #{SubTopicFilter := #{}},
@ -326,7 +325,6 @@ t_session_unsubscription_idempotency(Config) ->
ok ok
end, end,
fun(Trace) -> fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
Session = session_open(Node1, ClientId), Session = session_open(Node1, ClientId),
?assertEqual( ?assertEqual(
#{}, #{},
@ -415,10 +413,7 @@ do_t_session_discard(Params) ->
ok ok
end, end,
fun(Trace) -> []
ct:pal("trace:\n ~p", [Trace]),
ok
end
), ),
ok. ok.

View File

@ -27,8 +27,8 @@
{lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}}, {lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}},
{gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}}, {gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}},
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}}, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}},
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.11.1"}}}, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.11.2"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.1"}}}, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.3"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.42.1"}}}, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.42.1"}}},
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},

View File

@ -16,9 +16,14 @@
-module(emqx_config_backup). -module(emqx_config_backup).
-type ok_result() :: #{
root_key => emqx_utils_maps:config_key(),
changed => [emqx_utils_maps:config_key_path()]
}.
-type error_result() :: #{root_key => emqx_utils_maps:config_key(), reason => term()}.
-callback import_config(RawConf :: map()) -> -callback import_config(RawConf :: map()) ->
{ok, #{ {ok, ok_result()}
root_key => emqx_utils_maps:config_key(), | {error, error_result()}
changed => [emqx_utils_maps:config_key_path()] | {results, {[ok_result()], [error_result()]}}.
}}
| {error, #{root_key => emqx_utils_maps:config_key(), reason => term()}}.

View File

@ -253,8 +253,12 @@ persist_publish(Msg) ->
case emqx_persistent_message:persist(Msg) of case emqx_persistent_message:persist(Msg) of
ok -> ok ->
[persisted]; [persisted];
{_SkipOrError, _Reason} -> {skipped, _} ->
% TODO: log errors? [];
{error, Recoverable, Reason} ->
?SLOG(debug, #{
msg => "failed_to_persist_message", is_recoverable => Recoverable, reason => Reason
}),
[] []
end. end.

View File

@ -251,7 +251,7 @@ init(
MP -> MP MP -> MP
end, end,
ListenerId = emqx_listeners:listener_id(Type, Listener), ListenerId = emqx_listeners:listener_id(Type, Listener),
ClientInfo0 = set_peercert_infos( ClientInfo = set_peercert_infos(
Peercert, Peercert,
#{ #{
zone => Zone, zone => Zone,
@ -269,8 +269,6 @@ init(
}, },
Zone Zone
), ),
AttrExtractionConfig = get_mqtt_conf(Zone, client_attrs_init),
ClientInfo = initialize_client_attrs_from_cert(AttrExtractionConfig, ClientInfo0, Peercert),
{NClientInfo, NConnInfo} = take_ws_cookie(ClientInfo, ConnInfo), {NClientInfo, NConnInfo} = take_ws_cookie(ClientInfo, ConnInfo),
#channel{ #channel{
conninfo = NConnInfo, conninfo = NConnInfo,
@ -1575,7 +1573,7 @@ enrich_client(ConnPkt, Channel = #channel{clientinfo = ClientInfo}) ->
fun maybe_username_as_clientid/2, fun maybe_username_as_clientid/2,
fun maybe_assign_clientid/2, fun maybe_assign_clientid/2,
%% attr init should happen after clientid and username assign %% attr init should happen after clientid and username assign
fun maybe_set_client_initial_attr/2 fun maybe_set_client_initial_attrs/2
], ],
ConnPkt, ConnPkt,
ClientInfo ClientInfo
@ -1587,47 +1585,6 @@ enrich_client(ConnPkt, Channel = #channel{clientinfo = ClientInfo}) ->
{error, ReasonCode, Channel#channel{clientinfo = NClientInfo}} {error, ReasonCode, Channel#channel{clientinfo = NClientInfo}}
end. end.
initialize_client_attrs_from_cert(
#{
extract_from := From,
extract_regexp := Regexp,
extract_as := AttrName
},
ClientInfo,
Peercert
) when From =:= cn orelse From =:= dn ->
case extract_client_attr_from_cert(From, Regexp, Peercert) of
{ok, Value} ->
?SLOG(
debug,
#{
msg => "client_attr_init_from_cert",
extracted_as => AttrName,
extracted_value => Value
}
),
ClientInfo#{client_attrs => #{AttrName => Value}};
_ ->
ClientInfo#{client_attrs => #{}}
end;
initialize_client_attrs_from_cert(_, ClientInfo, _Peercert) ->
ClientInfo.
extract_client_attr_from_cert(cn, Regexp, Peercert) ->
CN = esockd_peercert:common_name(Peercert),
re_extract(CN, Regexp);
extract_client_attr_from_cert(dn, Regexp, Peercert) ->
DN = esockd_peercert:subject(Peercert),
re_extract(DN, Regexp).
re_extract(Str, Regexp) when is_binary(Str) ->
case re:run(Str, Regexp, [{capture, all_but_first, list}]) of
{match, [_ | _] = List} -> {ok, iolist_to_binary(List)};
_ -> nomatch
end;
re_extract(_NotStr, _Regexp) ->
ignored.
set_username( set_username(
#mqtt_packet_connect{username = Username}, #mqtt_packet_connect{username = Username},
ClientInfo = #{username := undefined} ClientInfo = #{username := undefined}
@ -1668,75 +1625,50 @@ maybe_assign_clientid(#mqtt_packet_connect{clientid = <<>>}, ClientInfo) ->
maybe_assign_clientid(#mqtt_packet_connect{clientid = ClientId}, ClientInfo) -> maybe_assign_clientid(#mqtt_packet_connect{clientid = ClientId}, ClientInfo) ->
{ok, ClientInfo#{clientid => ClientId}}. {ok, ClientInfo#{clientid => ClientId}}.
maybe_set_client_initial_attr(ConnPkt, #{zone := Zone} = ClientInfo0) -> get_client_attrs_init_config(Zone) ->
Config = get_mqtt_conf(Zone, client_attrs_init), get_mqtt_conf(Zone, client_attrs_init, []).
ClientInfo = initialize_client_attrs_from_user_property(Config, ConnPkt, ClientInfo0),
Attrs = maps:get(client_attrs, ClientInfo, #{}),
case extract_attr_from_clientinfo(Config, ClientInfo) of
{ok, Value} ->
#{extract_as := Name} = Config,
?SLOG(
debug,
#{
msg => "client_attr_init_from_clientinfo",
extracted_as => Name,
extracted_value => Value
}
),
{ok, ClientInfo#{client_attrs => Attrs#{Name => Value}}};
_ ->
{ok, ClientInfo}
end.
initialize_client_attrs_from_user_property( maybe_set_client_initial_attrs(ConnPkt, #{zone := Zone} = ClientInfo) ->
#{ Inits = get_client_attrs_init_config(Zone),
extract_from := user_property, UserProperty = get_user_property_as_map(ConnPkt),
extract_as := PropertyKey {ok, initialize_client_attrs(Inits, ClientInfo#{user_property => UserProperty})}.
},
ConnPkt,
ClientInfo
) ->
case extract_client_attr_from_user_property(ConnPkt, PropertyKey) of
{ok, Value} ->
?SLOG(
debug,
#{
msg => "client_attr_init_from_user_property",
extracted_as => PropertyKey,
extracted_value => Value
}
),
ClientInfo#{client_attrs => #{PropertyKey => Value}};
_ ->
ClientInfo
end;
initialize_client_attrs_from_user_property(_, _ConnInfo, ClientInfo) ->
ClientInfo.
extract_client_attr_from_user_property( initialize_client_attrs(Inits, ClientInfo) ->
#mqtt_packet_connect{properties = #{'User-Property' := UserProperty}}, PropertyKey lists:foldl(
) -> fun(#{expression := Variform, set_as_attr := Name}, Acc) ->
case lists:keyfind(PropertyKey, 1, UserProperty) of Attrs = maps:get(client_attrs, ClientInfo, #{}),
{_, Value} -> case emqx_variform:render(Variform, ClientInfo) of
{ok, Value}; {ok, Value} ->
_ -> ?SLOG(
not_found debug,
end; #{
extract_client_attr_from_user_property(_ConnPkt, _PropertyKey) -> msg => "client_attr_initialized",
ignored. set_as_attr => Name,
attr_value => Value
}
),
Acc#{client_attrs => Attrs#{Name => Value}};
{error, Reason} ->
?SLOG(
warning,
#{
msg => "client_attr_initialization_failed",
reason => Reason
}
),
Acc
end
end,
ClientInfo,
Inits
).
extract_attr_from_clientinfo(#{extract_from := clientid, extract_regexp := Regexp}, #{ get_user_property_as_map(#mqtt_packet_connect{properties = #{'User-Property' := UserProperty}}) when
clientid := ClientId is_list(UserProperty)
}) ->
re_extract(ClientId, Regexp);
extract_attr_from_clientinfo(#{extract_from := username, extract_regexp := Regexp}, #{
username := Username
}) when
Username =/= undefined
-> ->
re_extract(Username, Regexp); maps:from_list(UserProperty);
extract_attr_from_clientinfo(_Config, _CLientInfo) -> get_user_property_as_map(_) ->
ignored. #{}.
fix_mountpoint(#{mountpoint := undefined} = ClientInfo) -> fix_mountpoint(#{mountpoint := undefined} = ClientInfo) ->
ClientInfo; ClientInfo;

View File

@ -222,7 +222,9 @@
% Messages delivered % Messages delivered
{counter, 'messages.delivered'}, {counter, 'messages.delivered'},
% Messages acked % Messages acked
{counter, 'messages.acked'} {counter, 'messages.acked'},
% Messages persistently stored
{counter, 'messages.persisted'}
] ]
). ).
@ -718,4 +720,5 @@ reserved_idx('overload_protection.gc') -> 403;
reserved_idx('overload_protection.new_conn') -> 404; reserved_idx('overload_protection.new_conn') -> 404;
reserved_idx('messages.validation_succeeded') -> 405; reserved_idx('messages.validation_succeeded') -> 405;
reserved_idx('messages.validation_failed') -> 406; reserved_idx('messages.validation_failed') -> 406;
reserved_idx('messages.persisted') -> 407;
reserved_idx(_) -> undefined. reserved_idx(_) -> undefined.

View File

@ -98,7 +98,7 @@ pre_config_update(_Root, _NewConf, _OldConf) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-spec persist(emqx_types:message()) -> -spec persist(emqx_types:message()) ->
ok | {skipped, _Reason} | {error, _TODO}. emqx_ds:store_batch_result() | {skipped, needs_no_persistence}.
persist(Msg) -> persist(Msg) ->
?WHEN_ENABLED( ?WHEN_ENABLED(
case needs_persistence(Msg) andalso has_subscribers(Msg) of case needs_persistence(Msg) andalso has_subscribers(Msg) of
@ -114,6 +114,7 @@ needs_persistence(Msg) ->
-spec store_message(emqx_types:message()) -> emqx_ds:store_batch_result(). -spec store_message(emqx_types:message()) -> emqx_ds:store_batch_result().
store_message(Msg) -> store_message(Msg) ->
emqx_metrics:inc('messages.persisted'),
emqx_ds:store_batch(?PERSISTENT_MESSAGE_DB, [Msg], #{sync => false}). emqx_ds:store_batch(?PERSISTENT_MESSAGE_DB, [Msg], #{sync => false}).
has_subscribers(#message{topic = Topic}) -> has_subscribers(#message{topic = Topic}) ->

View File

@ -75,7 +75,8 @@
%% Managment APIs: %% Managment APIs:
-export([ -export([
list_client_subscriptions/1 list_client_subscriptions/1,
get_client_subscription/2
]). ]).
%% session table operations %% session table operations
@ -116,15 +117,42 @@
%% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be %% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be
%% an atom, in theory (?). %% an atom, in theory (?).
-type id() :: binary(). -type id() :: binary().
-type topic_filter() :: emqx_types:topic(). -type topic_filter() :: emqx_types:topic() | #share{}.
%% Subscription and subscription states:
%%
%% Persistent sessions cannot simply update or delete subscriptions,
%% since subscription parameters must be exactly the same during
%% replay.
%%
%% To solve this problem, we store subscriptions in a twofold manner:
%%
%% - `subscription' is an object that holds up-to-date information
%% about the client's subscription and a reference to the latest
%% subscription state id
%%
%% - `subscription_state' is an immutable object that holds
%% information about the subcription parameters at a certain point of
%% time
%%
%% New subscription states are created whenever the client subscribes
%% to a topics, or updates an existing subscription.
%%
%% Stream replay states contain references to the subscription states.
%%
%% Outdated subscription states are discarded when they are not
%% referenced by either subscription or stream replay state objects.
-type subscription_id() :: integer(). -type subscription_id() :: integer().
%% This type is a result of merging
%% `emqx_persistent_session_ds_subs:subscription()' with its current
%% state.
-type subscription() :: #{ -type subscription() :: #{
id := subscription_id(), id := subscription_id(),
start_time := emqx_ds:time(), start_time := emqx_ds:time(),
props := map(), current_state := emqx_persistent_session_ds_subs:subscription_state_id(),
deleted := boolean() subopts := map()
}. }.
-define(TIMER_PULL, timer_pull). -define(TIMER_PULL, timer_pull).
@ -184,7 +212,9 @@
seqno_q2_dup, seqno_q2_dup,
seqno_q2_rec, seqno_q2_rec,
seqno_q2_next, seqno_q2_next,
n_streams n_streams,
awaiting_rel_cnt,
awaiting_rel_max
]). ]).
%% %%
@ -206,7 +236,8 @@ open(#{clientid := ClientID} = ClientInfo, ConnInfo, MaybeWillMsg, Conf) ->
ok = emqx_cm:takeover_kick(ClientID), ok = emqx_cm:takeover_kick(ClientID),
case session_open(ClientID, ClientInfo, ConnInfo, MaybeWillMsg) of case session_open(ClientID, ClientInfo, ConnInfo, MaybeWillMsg) of
Session0 = #{} -> Session0 = #{} ->
Session = Session0#{props => Conf}, Session1 = Session0#{props => Conf},
Session = do_expire(ClientInfo, Session1),
{true, ensure_timers(Session), []}; {true, ensure_timers(Session), []};
false -> false ->
false false
@ -249,7 +280,7 @@ info(is_persistent, #{}) ->
info(subscriptions, #{s := S}) -> info(subscriptions, #{s := S}) ->
emqx_persistent_session_ds_subs:to_map(S); emqx_persistent_session_ds_subs:to_map(S);
info(subscriptions_cnt, #{s := S}) -> info(subscriptions_cnt, #{s := S}) ->
emqx_topic_gbt:size(emqx_persistent_session_ds_state:get_subscriptions(S)); emqx_persistent_session_ds_state:n_subscriptions(S);
info(subscriptions_max, #{props := Conf}) -> info(subscriptions_max, #{props := Conf}) ->
maps:get(max_subscriptions, Conf); maps:get(max_subscriptions, Conf);
info(upgrade_qos, #{props := Conf}) -> info(upgrade_qos, #{props := Conf}) ->
@ -262,21 +293,21 @@ info(inflight_max, #{inflight := Inflight}) ->
emqx_persistent_session_ds_inflight:receive_maximum(Inflight); emqx_persistent_session_ds_inflight:receive_maximum(Inflight);
info(retry_interval, #{props := Conf}) -> info(retry_interval, #{props := Conf}) ->
maps:get(retry_interval, Conf); maps:get(retry_interval, Conf);
% info(mqueue, #sessmem{mqueue = MQueue}) ->
% MQueue;
info(mqueue_len, #{inflight := Inflight}) -> info(mqueue_len, #{inflight := Inflight}) ->
emqx_persistent_session_ds_inflight:n_buffered(all, Inflight); emqx_persistent_session_ds_inflight:n_buffered(all, Inflight);
% info(mqueue_max, #sessmem{mqueue = MQueue}) ->
% emqx_mqueue:max_len(MQueue);
info(mqueue_dropped, _Session) -> info(mqueue_dropped, _Session) ->
0; 0;
%% info(next_pkt_id, #{s := S}) -> %% info(next_pkt_id, #{s := S}) ->
%% {PacketId, _} = emqx_persistent_message_ds_replayer:next_packet_id(S), %% {PacketId, _} = emqx_persistent_message_ds_replayer:next_packet_id(S),
%% PacketId; %% PacketId;
% info(awaiting_rel, #sessmem{awaiting_rel = AwaitingRel}) -> info(awaiting_rel, #{s := S}) ->
% AwaitingRel; emqx_persistent_session_ds_state:fold_awaiting_rel(fun maps:put/3, #{}, S);
%% info(awaiting_rel_cnt, #{s := S}) -> info(awaiting_rel_max, #{props := Conf}) ->
%% seqno_diff(?QOS_2, ?rec, ?committed(?QOS_2), S); maps:get(max_awaiting_rel, Conf);
info(awaiting_rel_cnt, #{s := S}) ->
emqx_persistent_session_ds_state:n_awaiting_rel(S);
info(await_rel_timeout, #{props := Conf}) ->
maps:get(await_rel_timeout, Conf);
info(seqno_q1_comm, #{s := S}) -> info(seqno_q1_comm, #{s := S}) ->
emqx_persistent_session_ds_state:get_seqno(?committed(?QOS_1), S); emqx_persistent_session_ds_state:get_seqno(?committed(?QOS_1), S);
info(seqno_q1_dup, #{s := S}) -> info(seqno_q1_dup, #{s := S}) ->
@ -292,17 +323,7 @@ info(seqno_q2_rec, #{s := S}) ->
info(seqno_q2_next, #{s := S}) -> info(seqno_q2_next, #{s := S}) ->
emqx_persistent_session_ds_state:get_seqno(?next(?QOS_2), S); emqx_persistent_session_ds_state:get_seqno(?next(?QOS_2), S);
info(n_streams, #{s := S}) -> info(n_streams, #{s := S}) ->
emqx_persistent_session_ds_state:fold_streams( emqx_persistent_session_ds_state:n_streams(S);
fun(_, _, Acc) -> Acc + 1 end,
0,
S
);
info(awaiting_rel_max, #{props := Conf}) ->
maps:get(max_awaiting_rel, Conf);
info(await_rel_timeout, #{props := _Conf}) ->
%% TODO: currently this setting is ignored:
%% maps:get(await_rel_timeout, Conf).
0;
info({MsgsQ, _PagerParams}, _Session) when MsgsQ =:= mqueue_msgs; MsgsQ =:= inflight_msgs -> info({MsgsQ, _PagerParams}, _Session) when MsgsQ =:= mqueue_msgs; MsgsQ =:= inflight_msgs ->
{error, not_implemented}. {error, not_implemented}.
@ -337,93 +358,49 @@ print_session(ClientId) ->
-spec subscribe(topic_filter(), emqx_types:subopts(), session()) -> -spec subscribe(topic_filter(), emqx_types:subopts(), session()) ->
{ok, session()} | {error, emqx_types:reason_code()}. {ok, session()} | {error, emqx_types:reason_code()}.
subscribe(
#share{},
_SubOpts,
_Session
) ->
%% TODO: Shared subscriptions are not supported yet:
{error, ?RC_SHARED_SUBSCRIPTIONS_NOT_SUPPORTED};
subscribe( subscribe(
TopicFilter, TopicFilter,
SubOpts, SubOpts,
Session = #{id := ID, s := S0} Session
) -> ) ->
case emqx_persistent_session_ds_subs:lookup(TopicFilter, S0) of case emqx_persistent_session_ds_subs:on_subscribe(TopicFilter, SubOpts, Session) of
undefined -> {ok, S1} ->
%% TODO: max subscriptions S = emqx_persistent_session_ds_state:commit(S1),
{ok, Session#{s => S}};
%% N.B.: we chose to update the router before adding the Error = {error, _} ->
%% subscription to the session/iterator table. The Error
%% reasoning for this is as follows: end.
%%
%% Messages matching this topic filter should start to be
%% persisted as soon as possible to avoid missing
%% messages. If this is the first such persistent session
%% subscription, it's important to do so early on.
%%
%% This could, in turn, lead to some inconsistency: if
%% such a route gets created but the session/iterator data
%% fails to be updated accordingly, we have a dangling
%% route. To remove such dangling routes, we may have a
%% periodic GC process that removes routes that do not
%% have a matching persistent subscription. Also, route
%% operations use dirty mnesia operations, which
%% inherently have room for inconsistencies.
%%
%% In practice, we use the iterator reference table as a
%% source of truth, since it is guarded by a transaction
%% context: we consider a subscription operation to be
%% successful if it ended up changing this table. Both
%% router and iterator information can be reconstructed
%% from this table, if needed.
ok = emqx_persistent_session_ds_router:do_add_route(TopicFilter, ID),
{SubId, S1} = emqx_persistent_session_ds_state:new_id(S0),
Subscription = #{
start_time => now_ms(),
props => SubOpts,
id => SubId,
deleted => false
},
IsNew = true;
Subscription0 = #{} ->
Subscription = Subscription0#{props => SubOpts},
IsNew = false,
S1 = S0
end,
S = emqx_persistent_session_ds_subs:on_subscribe(TopicFilter, Subscription, S1),
?tp(persistent_session_ds_subscription_added, #{
topic_filter => TopicFilter, sub => Subscription, is_new => IsNew
}),
{ok, Session#{s => S}}.
-spec unsubscribe(topic_filter(), session()) -> -spec unsubscribe(topic_filter(), session()) ->
{ok, session(), emqx_types:subopts()} | {error, emqx_types:reason_code()}. {ok, session(), emqx_types:subopts()} | {error, emqx_types:reason_code()}.
unsubscribe( unsubscribe(
TopicFilter, TopicFilter,
Session = #{id := ID, s := S0} Session = #{id := SessionId, s := S0}
) -> ) ->
case emqx_persistent_session_ds_subs:lookup(TopicFilter, S0) of case emqx_persistent_session_ds_subs:on_unsubscribe(SessionId, TopicFilter, S0) of
undefined -> {ok, S1, #{id := SubId, subopts := SubOpts}} ->
{error, ?RC_NO_SUBSCRIPTION_EXISTED}; S2 = emqx_persistent_session_ds_stream_scheduler:on_unsubscribe(SubId, S1),
Subscription = #{props := SubOpts} -> S = emqx_persistent_session_ds_state:commit(S2),
S = do_unsubscribe(ID, TopicFilter, Subscription, S0), {ok, Session#{s => S}, SubOpts};
{ok, Session#{s => S}, SubOpts} Error = {error, _} ->
Error
end. end.
-spec do_unsubscribe(id(), topic_filter(), subscription(), emqx_persistent_session_ds_state:t()) ->
emqx_persistent_session_ds_state:t().
do_unsubscribe(SessionId, TopicFilter, Subscription = #{id := SubId}, S0) ->
S1 = emqx_persistent_session_ds_subs:on_unsubscribe(TopicFilter, Subscription, S0),
?tp(persistent_session_ds_subscription_delete, #{
session_id => SessionId, topic_filter => TopicFilter
}),
S = emqx_persistent_session_ds_stream_scheduler:on_unsubscribe(SubId, S1),
?tp_span(
persistent_session_ds_subscription_route_delete,
#{session_id => SessionId, topic_filter => TopicFilter},
ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilter, SessionId)
),
S.
-spec get_subscription(topic_filter(), session()) -> -spec get_subscription(topic_filter(), session()) ->
emqx_types:subopts() | undefined. emqx_types:subopts() | undefined.
get_subscription(#share{}, _) ->
%% TODO: shared subscriptions are not supported yet:
undefined;
get_subscription(TopicFilter, #{s := S}) -> get_subscription(TopicFilter, #{s := S}) ->
case emqx_persistent_session_ds_subs:lookup(TopicFilter, S) of case emqx_persistent_session_ds_subs:lookup(TopicFilter, S) of
_Subscription = #{props := SubOpts} -> #{subopts := SubOpts} ->
SubOpts; SubOpts;
undefined -> undefined ->
undefined undefined
@ -436,11 +413,72 @@ get_subscription(TopicFilter, #{s := S}) ->
-spec publish(emqx_types:packet_id(), emqx_types:message(), session()) -> -spec publish(emqx_types:packet_id(), emqx_types:message(), session()) ->
{ok, emqx_types:publish_result(), session()} {ok, emqx_types:publish_result(), session()}
| {error, emqx_types:reason_code()}. | {error, emqx_types:reason_code()}.
publish(
PacketId,
Msg = #message{qos = ?QOS_2, timestamp = Ts},
Session = #{s := S0}
) ->
case is_awaiting_full(Session) of
false ->
case emqx_persistent_session_ds_state:get_awaiting_rel(PacketId, S0) of
undefined ->
Results = emqx_broker:publish(Msg),
S = emqx_persistent_session_ds_state:put_awaiting_rel(PacketId, Ts, S0),
{ok, Results, Session#{s => S}};
_Ts ->
{error, ?RC_PACKET_IDENTIFIER_IN_USE}
end;
true ->
{error, ?RC_RECEIVE_MAXIMUM_EXCEEDED}
end;
publish(_PacketId, Msg, Session) -> publish(_PacketId, Msg, Session) ->
%% TODO: QoS2
Result = emqx_broker:publish(Msg), Result = emqx_broker:publish(Msg),
{ok, Result, Session}. {ok, Result, Session}.
is_awaiting_full(#{s := S, props := Props}) ->
emqx_persistent_session_ds_state:n_awaiting_rel(S) >=
maps:get(max_awaiting_rel, Props, infinity).
-spec expire(emqx_types:clientinfo(), session()) ->
{ok, [], timeout(), session()} | {ok, [], session()}.
expire(ClientInfo, Session0 = #{props := Props}) ->
Session = #{s := S} = do_expire(ClientInfo, Session0),
case emqx_persistent_session_ds_state:n_awaiting_rel(S) of
0 ->
{ok, [], Session};
_ ->
AwaitRelTimeout = maps:get(await_rel_timeout, Props),
{ok, [], AwaitRelTimeout, Session}
end.
do_expire(ClientInfo, Session = #{s := S0, props := Props}) ->
%% 1. Find expired packet IDs:
Now = erlang:system_time(millisecond),
AwaitRelTimeout = maps:get(await_rel_timeout, Props),
ExpiredPacketIds =
emqx_persistent_session_ds_state:fold_awaiting_rel(
fun(PacketId, Ts, Acc) ->
Age = Now - Ts,
case Age > AwaitRelTimeout of
true ->
[PacketId | Acc];
false ->
Acc
end
end,
[],
S0
),
%% 2. Perform side effects:
_ = emqx_session_events:handle_event(ClientInfo, {expired_rel, length(ExpiredPacketIds)}),
%% 3. Update state:
S = lists:foldl(
fun emqx_persistent_session_ds_state:del_awaiting_rel/2,
S0,
ExpiredPacketIds
),
Session#{s => S}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Client -> Broker: PUBACK %% Client -> Broker: PUBACK
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -477,9 +515,14 @@ pubrec(PacketId, Session0) ->
-spec pubrel(emqx_types:packet_id(), session()) -> -spec pubrel(emqx_types:packet_id(), session()) ->
{ok, session()} | {error, emqx_types:reason_code()}. {ok, session()} | {error, emqx_types:reason_code()}.
pubrel(_PacketId, Session = #{}) -> pubrel(PacketId, Session = #{s := S0}) ->
% TODO: stub case emqx_persistent_session_ds_state:get_awaiting_rel(PacketId, S0) of
{ok, Session}. undefined ->
{error, ?RC_PACKET_IDENTIFIER_NOT_FOUND};
_TS ->
S = emqx_persistent_session_ds_state:del_awaiting_rel(PacketId, S0),
{ok, Session#{s => S}}
end.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Client -> Broker: PUBCOMP %% Client -> Broker: PUBCOMP
@ -552,6 +595,8 @@ handle_timeout(_ClientInfo, #req_sync{from = From, ref = Ref}, Session = #{s :=
S = emqx_persistent_session_ds_state:commit(S0), S = emqx_persistent_session_ds_state:commit(S0),
From ! Ref, From ! Ref,
{ok, [], Session#{s => S}}; {ok, [], Session#{s => S}};
handle_timeout(ClientInfo, expire_awaiting_rel, Session) ->
expire(ClientInfo, Session);
handle_timeout(_ClientInfo, Timeout, Session) -> handle_timeout(_ClientInfo, Timeout, Session) ->
?SLOG(warning, #{msg => "unknown_ds_timeout", timeout => Timeout}), ?SLOG(warning, #{msg => "unknown_ds_timeout", timeout => Timeout}),
{ok, [], Session}. {ok, [], Session}.
@ -571,7 +616,7 @@ replay(ClientInfo, [], Session0 = #{s := S0}) ->
Session = replay_streams(Session0#{replay => Streams}, ClientInfo), Session = replay_streams(Session0#{replay => Streams}, ClientInfo),
{ok, [], Session}. {ok, [], Session}.
replay_streams(Session0 = #{replay := [{_StreamKey, Srs0} | Rest]}, ClientInfo) -> replay_streams(Session0 = #{replay := [{StreamKey, Srs0} | Rest]}, ClientInfo) ->
case replay_batch(Srs0, Session0, ClientInfo) of case replay_batch(Srs0, Session0, ClientInfo) of
Session = #{} -> Session = #{} ->
replay_streams(Session#{replay := Rest}, ClientInfo); replay_streams(Session#{replay := Rest}, ClientInfo);
@ -579,7 +624,7 @@ replay_streams(Session0 = #{replay := [{_StreamKey, Srs0} | Rest]}, ClientInfo)
RetryTimeout = ?TIMEOUT_RETRY_REPLAY, RetryTimeout = ?TIMEOUT_RETRY_REPLAY,
?SLOG(warning, #{ ?SLOG(warning, #{
msg => "failed_to_fetch_replay_batch", msg => "failed_to_fetch_replay_batch",
stream => Srs0, stream => StreamKey,
reason => Reason, reason => Reason,
class => recoverable, class => recoverable,
retry_in_ms => RetryTimeout retry_in_ms => RetryTimeout
@ -645,7 +690,7 @@ list_client_subscriptions(ClientId) ->
%% TODO: this is not the most optimal implementation, since it %% TODO: this is not the most optimal implementation, since it
%% should be possible to avoid reading extra data (streams, etc.) %% should be possible to avoid reading extra data (streams, etc.)
case print_session(ClientId) of case print_session(ClientId) of
Sess = #{s := #{subscriptions := Subs}} -> Sess = #{s := #{subscriptions := Subs, subscription_states := SStates}} ->
Node = Node =
case Sess of case Sess of
#{'_alive' := {true, Pid}} -> #{'_alive' := {true, Pid}} ->
@ -655,8 +700,9 @@ list_client_subscriptions(ClientId) ->
end, end,
SubList = SubList =
maps:fold( maps:fold(
fun(Topic, #{props := SubProps}, Acc) -> fun(Topic, #{current_state := CS}, Acc) ->
Elem = {Topic, SubProps}, #{subopts := SubOpts} = maps:get(CS, SStates),
Elem = {Topic, SubOpts},
[Elem | Acc] [Elem | Acc]
end, end,
[], [],
@ -670,6 +716,11 @@ list_client_subscriptions(ClientId) ->
{error, not_found} {error, not_found}
end. end.
-spec get_client_subscription(emqx_types:clientid(), emqx_types:topic()) ->
subscription() | undefined.
get_client_subscription(ClientId, Topic) ->
emqx_persistent_session_ds_subs:cold_get_subscription(ClientId, Topic).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Session tables operations %% Session tables operations
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -701,7 +752,12 @@ sync(ClientId) ->
%% the broker. %% the broker.
-spec session_open(id(), emqx_types:clientinfo(), emqx_types:conninfo(), emqx_maybe:t(message())) -> -spec session_open(id(), emqx_types:clientinfo(), emqx_types:conninfo(), emqx_maybe:t(message())) ->
session() | false. session() | false.
session_open(SessionId, ClientInfo, NewConnInfo, MaybeWillMsg) -> session_open(
SessionId,
ClientInfo,
NewConnInfo = #{proto_name := ProtoName, proto_ver := ProtoVer},
MaybeWillMsg
) ->
NowMS = now_ms(), NowMS = now_ms(),
case emqx_persistent_session_ds_state:open(SessionId) of case emqx_persistent_session_ds_state:open(SessionId) of
{ok, S0} -> {ok, S0} ->
@ -720,8 +776,9 @@ session_open(SessionId, ClientInfo, NewConnInfo, MaybeWillMsg) ->
maps:get(peername, NewConnInfo), S2 maps:get(peername, NewConnInfo), S2
), ),
S4 = emqx_persistent_session_ds_state:set_will_message(MaybeWillMsg, S3), S4 = emqx_persistent_session_ds_state:set_will_message(MaybeWillMsg, S3),
S5 = emqx_persistent_session_ds_state:set_clientinfo(ClientInfo, S4), S5 = set_clientinfo(ClientInfo, S4),
S = emqx_persistent_session_ds_state:commit(S5), S6 = emqx_persistent_session_ds_state:set_protocol({ProtoName, ProtoVer}, S5),
S = emqx_persistent_session_ds_state:commit(S6),
Inflight = emqx_persistent_session_ds_inflight:new( Inflight = emqx_persistent_session_ds_inflight:new(
receive_maximum(NewConnInfo) receive_maximum(NewConnInfo)
), ),
@ -744,7 +801,9 @@ session_open(SessionId, ClientInfo, NewConnInfo, MaybeWillMsg) ->
emqx_session:conf() emqx_session:conf()
) -> ) ->
session(). session().
session_ensure_new(Id, ClientInfo, ConnInfo, MaybeWillMsg, Conf) -> session_ensure_new(
Id, ClientInfo, ConnInfo = #{proto_name := ProtoName, proto_ver := ProtoVer}, MaybeWillMsg, Conf
) ->
?tp(debug, persistent_session_ds_ensure_new, #{id => Id}), ?tp(debug, persistent_session_ds_ensure_new, #{id => Id}),
Now = now_ms(), Now = now_ms(),
S0 = emqx_persistent_session_ds_state:create_new(Id), S0 = emqx_persistent_session_ds_state:create_new(Id),
@ -767,8 +826,9 @@ session_ensure_new(Id, ClientInfo, ConnInfo, MaybeWillMsg, Conf) ->
] ]
), ),
S5 = emqx_persistent_session_ds_state:set_will_message(MaybeWillMsg, S4), S5 = emqx_persistent_session_ds_state:set_will_message(MaybeWillMsg, S4),
S6 = emqx_persistent_session_ds_state:set_clientinfo(ClientInfo, S5), S6 = set_clientinfo(ClientInfo, S5),
S = emqx_persistent_session_ds_state:commit(S6), S7 = emqx_persistent_session_ds_state:set_protocol({ProtoName, ProtoVer}, S6),
S = emqx_persistent_session_ds_state:commit(S7),
#{ #{
id => Id, id => Id,
props => Conf, props => Conf,
@ -779,18 +839,12 @@ session_ensure_new(Id, ClientInfo, ConnInfo, MaybeWillMsg, Conf) ->
%% @doc Called when a client reconnects with `clean session=true' or %% @doc Called when a client reconnects with `clean session=true' or
%% during session GC %% during session GC
-spec session_drop(id(), _Reason) -> ok. -spec session_drop(id(), _Reason) -> ok.
session_drop(ID, Reason) -> session_drop(SessionId, Reason) ->
case emqx_persistent_session_ds_state:open(ID) of case emqx_persistent_session_ds_state:open(SessionId) of
{ok, S0} -> {ok, S0} ->
?tp(debug, drop_persistent_session, #{client_id => ID, reason => Reason}), ?tp(debug, drop_persistent_session, #{client_id => SessionId, reason => Reason}),
_S = emqx_persistent_session_ds_subs:fold( emqx_persistent_session_ds_subs:on_session_drop(SessionId, S0),
fun(TopicFilter, Subscription, S) -> emqx_persistent_session_ds_state:delete(SessionId);
do_unsubscribe(ID, TopicFilter, Subscription, S)
end,
S0,
S0
),
emqx_persistent_session_ds_state:delete(ID);
undefined -> undefined ->
ok ok
end. end.
@ -798,6 +852,11 @@ session_drop(ID, Reason) ->
now_ms() -> now_ms() ->
erlang:system_time(millisecond). erlang:system_time(millisecond).
set_clientinfo(ClientInfo0, S) ->
%% Remove unnecessary fields from the clientinfo:
ClientInfo = maps:without([cn, dn, auth_result], ClientInfo0),
emqx_persistent_session_ds_state:set_clientinfo(ClientInfo, S).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% RPC targets (v1) %% RPC targets (v1)
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -867,29 +926,38 @@ new_batch({StreamKey, Srs0}, BatchSize, Session0 = #{s := S0}, ClientInfo) ->
%% TODO: Handle unrecoverable error. %% TODO: Handle unrecoverable error.
?SLOG(info, #{ ?SLOG(info, #{
msg => "failed_to_fetch_batch", msg => "failed_to_fetch_batch",
stream => Srs1, stream => StreamKey,
reason => Reason, reason => Reason,
class => Class class => Class
}), }),
Session0 Session0
end. end.
enqueue_batch(IsReplay, BatchSize, Srs0, Session = #{inflight := Inflight0}, ClientInfo) -> enqueue_batch(IsReplay, BatchSize, Srs0, Session = #{inflight := Inflight0, s := S}, ClientInfo) ->
#srs{ #srs{
it_begin = ItBegin0, it_begin = ItBegin0,
it_end = ItEnd0, it_end = ItEnd0,
first_seqno_qos1 = FirstSeqnoQos1, first_seqno_qos1 = FirstSeqnoQos1,
first_seqno_qos2 = FirstSeqnoQos2 first_seqno_qos2 = FirstSeqnoQos2,
sub_state_id = SubStateId
} = Srs0, } = Srs0,
ItBegin = ItBegin =
case IsReplay of case IsReplay of
true -> ItBegin0; true -> ItBegin0;
false -> ItEnd0 false -> ItEnd0
end, end,
SubState = #{} = emqx_persistent_session_ds_state:get_subscription_state(SubStateId, S),
case emqx_ds:next(?PERSISTENT_MESSAGE_DB, ItBegin, BatchSize) of case emqx_ds:next(?PERSISTENT_MESSAGE_DB, ItBegin, BatchSize) of
{ok, ItEnd, Messages} -> {ok, ItEnd, Messages} ->
{Inflight, LastSeqnoQos1, LastSeqnoQos2} = process_batch( {Inflight, LastSeqnoQos1, LastSeqnoQos2} = process_batch(
IsReplay, Session, ClientInfo, FirstSeqnoQos1, FirstSeqnoQos2, Messages, Inflight0 IsReplay,
Session,
SubState,
ClientInfo,
FirstSeqnoQos1,
FirstSeqnoQos2,
Messages,
Inflight0
), ),
Srs = Srs0#srs{ Srs = Srs0#srs{
it_begin = ItBegin, it_begin = ItBegin,
@ -913,27 +981,29 @@ enqueue_batch(IsReplay, BatchSize, Srs0, Session = #{inflight := Inflight0}, Cli
%% key_of_iter(#{3 := #{3 := #{5 := K}}}) -> %% key_of_iter(#{3 := #{3 := #{5 := K}}}) ->
%% K. %% K.
process_batch(_IsReplay, _Session, _ClientInfo, LastSeqNoQos1, LastSeqNoQos2, [], Inflight) -> process_batch(
_IsReplay, _Session, _SubState, _ClientInfo, LastSeqNoQos1, LastSeqNoQos2, [], Inflight
) ->
{Inflight, LastSeqNoQos1, LastSeqNoQos2}; {Inflight, LastSeqNoQos1, LastSeqNoQos2};
process_batch( process_batch(
IsReplay, Session, ClientInfo, FirstSeqNoQos1, FirstSeqNoQos2, [KV | Messages], Inflight0 IsReplay,
Session,
SubState,
ClientInfo,
FirstSeqNoQos1,
FirstSeqNoQos2,
[KV | Messages],
Inflight0
) -> ) ->
#{s := S, props := #{upgrade_qos := UpgradeQoS}} = Session, #{s := S} = Session,
{_DsMsgKey, Msg0 = #message{topic = Topic}} = KV, #{upgrade_qos := UpgradeQoS, subopts := SubOpts} = SubState,
{_DsMsgKey, Msg0} = KV,
Comm1 = emqx_persistent_session_ds_state:get_seqno(?committed(?QOS_1), S), Comm1 = emqx_persistent_session_ds_state:get_seqno(?committed(?QOS_1), S),
Comm2 = emqx_persistent_session_ds_state:get_seqno(?committed(?QOS_2), S), Comm2 = emqx_persistent_session_ds_state:get_seqno(?committed(?QOS_2), S),
Dup1 = emqx_persistent_session_ds_state:get_seqno(?dup(?QOS_1), S), Dup1 = emqx_persistent_session_ds_state:get_seqno(?dup(?QOS_1), S),
Dup2 = emqx_persistent_session_ds_state:get_seqno(?dup(?QOS_2), S), Dup2 = emqx_persistent_session_ds_state:get_seqno(?dup(?QOS_2), S),
Rec = emqx_persistent_session_ds_state:get_seqno(?rec, S), Rec = emqx_persistent_session_ds_state:get_seqno(?rec, S),
Subs = emqx_persistent_session_ds_state:get_subscriptions(S), Msgs = emqx_session:enrich_message(ClientInfo, Msg0, SubOpts, UpgradeQoS),
Msgs = [
Msg
|| SubMatch <- emqx_topic_gbt:matches(Topic, Subs, []),
Msg <- begin
#{props := SubOpts} = emqx_topic_gbt:get_record(SubMatch, Subs),
emqx_session:enrich_message(ClientInfo, Msg0, SubOpts, UpgradeQoS)
end
],
{Inflight, LastSeqNoQos1, LastSeqNoQos2} = lists:foldl( {Inflight, LastSeqNoQos1, LastSeqNoQos2} = lists:foldl(
fun(Msg = #message{qos = Qos}, {Acc, SeqNoQos10, SeqNoQos20}) -> fun(Msg = #message{qos = Qos}, {Acc, SeqNoQos10, SeqNoQos20}) ->
case Qos of case Qos of
@ -989,14 +1059,16 @@ process_batch(
Msgs Msgs
), ),
process_batch( process_batch(
IsReplay, Session, ClientInfo, LastSeqNoQos1, LastSeqNoQos2, Messages, Inflight IsReplay, Session, SubState, ClientInfo, LastSeqNoQos1, LastSeqNoQos2, Messages, Inflight
). ).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Transient messages %% Transient messages
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
enqueue_transient(ClientInfo, Msg0, Session = #{s := S, props := #{upgrade_qos := UpgradeQoS}}) -> enqueue_transient(
_ClientInfo, Msg = #message{qos = Qos}, Session = #{inflight := Inflight0, s := S0}
) ->
%% TODO: Such messages won't be retransmitted, should the session %% TODO: Such messages won't be retransmitted, should the session
%% reconnect before transient messages are acked. %% reconnect before transient messages are acked.
%% %%
@ -1006,18 +1078,6 @@ enqueue_transient(ClientInfo, Msg0, Session = #{s := S, props := #{upgrade_qos :
%% queued messages. Since streams in this DB are exclusive to the %% queued messages. Since streams in this DB are exclusive to the
%% session, messages from the queue can be dropped as soon as they %% session, messages from the queue can be dropped as soon as they
%% are acked. %% are acked.
Subs = emqx_persistent_session_ds_state:get_subscriptions(S),
Msgs = [
Msg
|| SubMatch <- emqx_topic_gbt:matches(Msg0#message.topic, Subs, []),
Msg <- begin
#{props := SubOpts} = emqx_topic_gbt:get_record(SubMatch, Subs),
emqx_session:enrich_message(ClientInfo, Msg0, SubOpts, UpgradeQoS)
end
],
lists:foldl(fun do_enqueue_transient/2, Session, Msgs).
do_enqueue_transient(Msg = #message{qos = Qos}, Session = #{inflight := Inflight0, s := S0}) ->
case Qos of case Qos of
?QOS_0 -> ?QOS_0 ->
S = S0, S = S0,

View File

@ -65,17 +65,21 @@
last_seqno_qos2 = 0 :: emqx_persistent_session_ds:seqno(), last_seqno_qos2 = 0 :: emqx_persistent_session_ds:seqno(),
%% This stream belongs to an unsubscribed topic-filter, and is %% This stream belongs to an unsubscribed topic-filter, and is
%% marked for deletion: %% marked for deletion:
unsubscribed = false :: boolean() unsubscribed = false :: boolean(),
%% Reference to the subscription state:
sub_state_id :: emqx_persistent_session_ds_subs:subscription_state_id()
}). }).
%% Session metadata keys: %% Session metadata keys:
-define(created_at, created_at). -define(created_at, created_at).
-define(last_alive_at, last_alive_at). -define(last_alive_at, last_alive_at).
-define(expiry_interval, expiry_interval). -define(expiry_interval, expiry_interval).
%% Unique integer used to create unique identities %% Unique integer used to create unique identities:
-define(last_id, last_id). -define(last_id, last_id).
%% Connection info (relevent for the dashboard):
-define(peername, peername). -define(peername, peername).
-define(will_message, will_message). -define(will_message, will_message).
-define(clientinfo, clientinfo). -define(clientinfo, clientinfo).
-define(protocol, protocol).
-endif. -endif.

View File

@ -22,6 +22,9 @@
%% It is responsible for saving, caching, and restoring session state. %% It is responsible for saving, caching, and restoring session state.
%% It is completely devoid of business logic. Not even the default %% It is completely devoid of business logic. Not even the default
%% values should be set in this module. %% values should be set in this module.
%%
%% Session process MUST NOT use `cold_*' functions! They are reserved
%% for use in the management APIs.
-module(emqx_persistent_session_ds_state). -module(emqx_persistent_session_ds_state).
-export([create_tables/0]). -export([create_tables/0]).
@ -33,22 +36,44 @@
-export([get_clientinfo/1, set_clientinfo/2]). -export([get_clientinfo/1, set_clientinfo/2]).
-export([get_will_message/1, set_will_message/2, clear_will_message/1, clear_will_message_now/1]). -export([get_will_message/1, set_will_message/2, clear_will_message/1, clear_will_message_now/1]).
-export([get_peername/1, set_peername/2]). -export([get_peername/1, set_peername/2]).
-export([get_protocol/1, set_protocol/2]).
-export([new_id/1]). -export([new_id/1]).
-export([get_stream/2, put_stream/3, del_stream/2, fold_streams/3]). -export([get_stream/2, put_stream/3, del_stream/2, fold_streams/3, n_streams/1]).
-export([get_seqno/2, put_seqno/3]). -export([get_seqno/2, put_seqno/3]).
-export([get_rank/2, put_rank/3, del_rank/2, fold_ranks/3]). -export([get_rank/2, put_rank/3, del_rank/2, fold_ranks/3]).
-export([get_subscriptions/1, put_subscription/4, del_subscription/3]). -export([
get_subscription_state/2,
cold_get_subscription_state/2,
fold_subscription_states/3,
put_subscription_state/3,
del_subscription_state/2
]).
-export([
get_subscription/2,
cold_get_subscription/2,
fold_subscriptions/3,
n_subscriptions/1,
put_subscription/3,
del_subscription/2
]).
-export([
get_awaiting_rel/2,
put_awaiting_rel/3,
del_awaiting_rel/2,
fold_awaiting_rel/3,
n_awaiting_rel/1
]).
-export([make_session_iterator/0, session_iterator_next/2]). -export([make_session_iterator/0, session_iterator_next/2]).
-export_type([ -export_type([
t/0, t/0,
metadata/0, metadata/0,
subscriptions/0,
seqno_type/0, seqno_type/0,
stream_key/0, stream_key/0,
rank_key/0, rank_key/0,
session_iterator/0 session_iterator/0,
protocol/0
]). ]).
-include("emqx_mqtt.hrl"). -include("emqx_mqtt.hrl").
@ -62,8 +87,6 @@
-type message() :: emqx_types:message(). -type message() :: emqx_types:message().
-type subscriptions() :: emqx_topic_gbt:t(_SubId, emqx_persistent_session_ds:subscription()).
-opaque session_iterator() :: emqx_persistent_session_ds:id() | '$end_of_table'. -opaque session_iterator() :: emqx_persistent_session_ds:id() | '$end_of_table'.
%% Generic key-value wrapper that is used for exporting arbitrary %% Generic key-value wrapper that is used for exporting arbitrary
@ -92,13 +115,16 @@
dirty :: #{K => dirty | del} dirty :: #{K => dirty | del}
}. }.
-type protocol() :: {binary(), emqx_types:proto_ver()}.
-type metadata() :: -type metadata() ::
#{ #{
?created_at => emqx_persistent_session_ds:timestamp(), ?created_at => emqx_persistent_session_ds:timestamp(),
?last_alive_at => emqx_persistent_session_ds:timestamp(), ?last_alive_at => emqx_persistent_session_ds:timestamp(),
?expiry_interval => non_neg_integer(), ?expiry_interval => non_neg_integer(),
?last_id => integer(), ?last_id => integer(),
?peername => emqx_types:peername() ?peername => emqx_types:peername(),
?protocol => protocol()
}. }.
-type seqno_type() :: -type seqno_type() ::
@ -110,22 +136,49 @@
| ?rec | ?rec
| ?committed(?QOS_2). | ?committed(?QOS_2).
-define(id, id).
-define(dirty, dirty).
-define(metadata, metadata).
-define(subscriptions, subscriptions).
-define(subscription_states, subscription_states).
-define(seqnos, seqnos).
-define(streams, streams).
-define(ranks, ranks).
-define(awaiting_rel, awaiting_rel).
-opaque t() :: #{ -opaque t() :: #{
id := emqx_persistent_session_ds:id(), ?id := emqx_persistent_session_ds:id(),
dirty := boolean(), ?dirty := boolean(),
metadata := metadata(), ?metadata := metadata(),
subscriptions := subscriptions(), ?subscriptions := pmap(
seqnos := pmap(seqno_type(), emqx_persistent_session_ds:seqno()), emqx_persistent_session_ds:topic_filter(), emqx_persistent_session_ds_subs:subscription()
streams := pmap(emqx_ds:stream(), emqx_persistent_session_ds:stream_state()), ),
ranks := pmap(term(), integer()) ?subscription_states := pmap(
emqx_persistent_session_ds_subs:subscription_state_id(),
emqx_persistent_session_ds_subs:subscription_state()
),
?seqnos := pmap(seqno_type(), emqx_persistent_session_ds:seqno()),
?streams := pmap(emqx_ds:stream(), emqx_persistent_session_ds:stream_state()),
?ranks := pmap(term(), integer()),
?awaiting_rel := pmap(emqx_types:packet_id(), _Timestamp :: integer())
}. }.
-define(session_tab, emqx_ds_session_tab). -define(session_tab, emqx_ds_session_tab).
-define(subscription_tab, emqx_ds_session_subscriptions). -define(subscription_tab, emqx_ds_session_subscriptions).
-define(subscription_states_tab, emqx_ds_session_subscription_states).
-define(stream_tab, emqx_ds_session_streams). -define(stream_tab, emqx_ds_session_streams).
-define(seqno_tab, emqx_ds_session_seqnos). -define(seqno_tab, emqx_ds_session_seqnos).
-define(rank_tab, emqx_ds_session_ranks). -define(rank_tab, emqx_ds_session_ranks).
-define(pmap_tables, [?stream_tab, ?seqno_tab, ?rank_tab, ?subscription_tab]). -define(awaiting_rel_tab, emqx_ds_session_awaiting_rel).
-define(pmaps, [
{?subscriptions, ?subscription_tab},
{?subscription_states, ?subscription_states_tab},
{?streams, ?stream_tab},
{?seqnos, ?seqno_tab},
{?ranks, ?rank_tab},
{?awaiting_rel, ?awaiting_rel_tab}
]).
%% Enable this flag if you suspect some code breaks the sequence: %% Enable this flag if you suspect some code breaks the sequence:
-ifndef(CHECK_SEQNO). -ifndef(CHECK_SEQNO).
@ -152,23 +205,25 @@ create_tables() ->
{attributes, record_info(fields, kv)} {attributes, record_info(fields, kv)}
] ]
), ),
[create_kv_pmap_table(Table) || Table <- ?pmap_tables], {_, PmapTables} = lists:unzip(?pmaps),
mria:wait_for_tables([?session_tab | ?pmap_tables]). [create_kv_pmap_table(Table) || Table <- PmapTables],
mria:wait_for_tables([?session_tab | PmapTables]).
-spec open(emqx_persistent_session_ds:id()) -> {ok, t()} | undefined. -spec open(emqx_persistent_session_ds:id()) -> {ok, t()} | undefined.
open(SessionId) -> open(SessionId) ->
ro_transaction(fun() -> ro_transaction(fun() ->
case kv_restore(?session_tab, SessionId) of case kv_restore(?session_tab, SessionId) of
[Metadata] -> [Metadata] ->
Rec = #{ Rec = update_pmaps(
id => SessionId, fun(_Pmap, Table) ->
metadata => Metadata, pmap_open(Table, SessionId)
subscriptions => read_subscriptions(SessionId), end,
streams => pmap_open(?stream_tab, SessionId), #{
seqnos => pmap_open(?seqno_tab, SessionId), id => SessionId,
ranks => pmap_open(?rank_tab, SessionId), metadata => Metadata,
?unset_dirty ?unset_dirty
}, }
),
{ok, Rec}; {ok, Rec};
[] -> [] ->
undefined undefined
@ -185,27 +240,13 @@ print_session(SessionId) ->
end. end.
-spec format(t()) -> map(). -spec format(t()) -> map().
format(#{ format(Rec) ->
metadata := Metadata, update_pmaps(
subscriptions := SubsGBT, fun(Pmap, _Table) ->
streams := Streams, pmap_format(Pmap)
seqnos := Seqnos,
ranks := Ranks
}) ->
Subs = emqx_topic_gbt:fold(
fun(Key, Sub, Acc) ->
maps:put(emqx_topic_gbt:get_topic(Key), Sub, Acc)
end, end,
#{}, maps:without([id, dirty], Rec)
SubsGBT ).
),
#{
metadata => Metadata,
subscriptions => Subs,
streams => pmap_format(Streams),
seqnos => pmap_format(Seqnos),
ranks => pmap_format(Ranks)
}.
-spec list_sessions() -> [emqx_persistent_session_ds:id()]. -spec list_sessions() -> [emqx_persistent_session_ds:id()].
list_sessions() -> list_sessions() ->
@ -215,7 +256,7 @@ list_sessions() ->
delete(Id) -> delete(Id) ->
transaction( transaction(
fun() -> fun() ->
[kv_pmap_delete(Table, Id) || Table <- ?pmap_tables], [kv_pmap_delete(Table, Id) || {_, Table} <- ?pmaps],
mnesia:delete(?session_tab, Id, write) mnesia:delete(?session_tab, Id, write)
end end
). ).
@ -226,36 +267,34 @@ commit(Rec = #{dirty := false}) ->
commit( commit(
Rec = #{ Rec = #{
id := SessionId, id := SessionId,
metadata := Metadata, metadata := Metadata
streams := Streams,
seqnos := SeqNos,
ranks := Ranks
} }
) -> ) ->
check_sequence(Rec), check_sequence(Rec),
transaction(fun() -> transaction(fun() ->
kv_persist(?session_tab, SessionId, Metadata), kv_persist(?session_tab, SessionId, Metadata),
Rec#{ update_pmaps(
streams => pmap_commit(SessionId, Streams), fun(Pmap, _Table) ->
seqnos => pmap_commit(SessionId, SeqNos), pmap_commit(SessionId, Pmap)
ranks => pmap_commit(SessionId, Ranks), end,
?unset_dirty Rec#{?unset_dirty}
} )
end). end).
-spec create_new(emqx_persistent_session_ds:id()) -> t(). -spec create_new(emqx_persistent_session_ds:id()) -> t().
create_new(SessionId) -> create_new(SessionId) ->
transaction(fun() -> transaction(fun() ->
delete(SessionId), delete(SessionId),
#{ update_pmaps(
id => SessionId, fun(_Pmap, Table) ->
metadata => #{}, pmap_open(Table, SessionId)
subscriptions => emqx_topic_gbt:new(), end,
streams => pmap_open(?stream_tab, SessionId), #{
seqnos => pmap_open(?seqno_tab, SessionId), id => SessionId,
ranks => pmap_open(?rank_tab, SessionId), metadata => #{},
?set_dirty ?set_dirty
} }
)
end). end).
%% %%
@ -292,6 +331,14 @@ get_peername(Rec) ->
set_peername(Val, Rec) -> set_peername(Val, Rec) ->
set_meta(?peername, Val, Rec). set_meta(?peername, Val, Rec).
-spec get_protocol(t()) -> protocol() | undefined.
get_protocol(Rec) ->
get_meta(?protocol, Rec).
-spec set_protocol(protocol(), t()) -> t().
set_protocol(Val, Rec) ->
set_meta(?protocol, Val, Rec).
-spec get_clientinfo(t()) -> emqx_maybe:t(emqx_types:clientinfo()). -spec get_clientinfo(t()) -> emqx_maybe:t(emqx_types:clientinfo()).
get_clientinfo(Rec) -> get_clientinfo(Rec) ->
get_meta(?clientinfo, Rec). get_meta(?clientinfo, Rec).
@ -336,30 +383,65 @@ new_id(Rec) ->
%% %%
-spec get_subscriptions(t()) -> subscriptions(). -spec get_subscription(emqx_persistent_session_ds:topic_filter(), t()) ->
get_subscriptions(#{subscriptions := Subs}) -> emqx_persistent_session_ds_subs:subscription() | undefined.
Subs. get_subscription(TopicFilter, Rec) ->
gen_get(?subscriptions, TopicFilter, Rec).
-spec cold_get_subscription(emqx_persistent_session_ds:id(), emqx_types:topic()) ->
[emqx_persistent_session_ds_subs:subscription()].
cold_get_subscription(SessionId, Topic) ->
kv_pmap_read(?subscription_tab, SessionId, Topic).
-spec fold_subscriptions(fun(), Acc, t()) -> Acc.
fold_subscriptions(Fun, Acc, Rec) ->
gen_fold(?subscriptions, Fun, Acc, Rec).
-spec n_subscriptions(t()) -> non_neg_integer().
n_subscriptions(Rec) ->
gen_size(?subscriptions, Rec).
-spec put_subscription( -spec put_subscription(
emqx_persistent_session_ds:topic_filter(), emqx_persistent_session_ds:topic_filter(),
_SubId, emqx_persistent_session_ds_subs:subscription(),
emqx_persistent_session_ds:subscription(),
t() t()
) -> t(). ) -> t().
put_subscription(TopicFilter, SubId, Subscription, Rec = #{id := Id, subscriptions := Subs0}) -> put_subscription(TopicFilter, Subscription, Rec) ->
%% Note: currently changes to the subscriptions are persisted immediately. gen_put(?subscriptions, TopicFilter, Subscription, Rec).
Key = {TopicFilter, SubId},
transaction(fun() -> kv_pmap_persist(?subscription_tab, Id, Key, Subscription) end),
Subs = emqx_topic_gbt:insert(TopicFilter, SubId, Subscription, Subs0),
Rec#{subscriptions => Subs}.
-spec del_subscription(emqx_persistent_session_ds:topic_filter(), _SubId, t()) -> t(). -spec del_subscription(emqx_persistent_session_ds:topic_filter(), t()) -> t().
del_subscription(TopicFilter, SubId, Rec = #{id := Id, subscriptions := Subs0}) -> del_subscription(TopicFilter, Rec) ->
%% Note: currently the subscriptions are persisted immediately. gen_del(?subscriptions, TopicFilter, Rec).
Key = {TopicFilter, SubId},
transaction(fun() -> kv_pmap_delete(?subscription_tab, Id, Key) end), %%
Subs = emqx_topic_gbt:delete(TopicFilter, SubId, Subs0),
Rec#{subscriptions => Subs}. -spec get_subscription_state(emqx_persistent_session_ds_subs:subscription_state_id(), t()) ->
emqx_persistent_session_ds_subs:subscription_state() | undefined.
get_subscription_state(SStateId, Rec) ->
gen_get(?subscription_states, SStateId, Rec).
-spec cold_get_subscription_state(
emqx_persistent_session_ds:id(), emqx_persistent_session_ds_subs:subscription_state_id()
) ->
[emqx_persistent_session_ds_subs:subscription_state()].
cold_get_subscription_state(SessionId, SStateId) ->
kv_pmap_read(?subscription_states_tab, SessionId, SStateId).
-spec fold_subscription_states(fun(), Acc, t()) -> Acc.
fold_subscription_states(Fun, Acc, Rec) ->
gen_fold(?subscription_states, Fun, Acc, Rec).
-spec put_subscription_state(
emqx_persistent_session_ds_subs:subscription_state_id(),
emqx_persistent_session_ds_subs:subscription_state(),
t()
) -> t().
put_subscription_state(SStateId, SState, Rec) ->
gen_put(?subscription_states, SStateId, SState, Rec).
-spec del_subscription_state(emqx_persistent_session_ds_subs:subscription_state_id(), t()) -> t().
del_subscription_state(SStateId, Rec) ->
gen_del(?subscription_states, SStateId, Rec).
%% %%
@ -368,29 +450,33 @@ del_subscription(TopicFilter, SubId, Rec = #{id := Id, subscriptions := Subs0})
-spec get_stream(stream_key(), t()) -> -spec get_stream(stream_key(), t()) ->
emqx_persistent_session_ds:stream_state() | undefined. emqx_persistent_session_ds:stream_state() | undefined.
get_stream(Key, Rec) -> get_stream(Key, Rec) ->
gen_get(streams, Key, Rec). gen_get(?streams, Key, Rec).
-spec put_stream(stream_key(), emqx_persistent_session_ds:stream_state(), t()) -> t(). -spec put_stream(stream_key(), emqx_persistent_session_ds:stream_state(), t()) -> t().
put_stream(Key, Val, Rec) -> put_stream(Key, Val, Rec) ->
gen_put(streams, Key, Val, Rec). gen_put(?streams, Key, Val, Rec).
-spec del_stream(stream_key(), t()) -> t(). -spec del_stream(stream_key(), t()) -> t().
del_stream(Key, Rec) -> del_stream(Key, Rec) ->
gen_del(streams, Key, Rec). gen_del(?streams, Key, Rec).
-spec fold_streams(fun(), Acc, t()) -> Acc. -spec fold_streams(fun(), Acc, t()) -> Acc.
fold_streams(Fun, Acc, Rec) -> fold_streams(Fun, Acc, Rec) ->
gen_fold(streams, Fun, Acc, Rec). gen_fold(?streams, Fun, Acc, Rec).
-spec n_streams(t()) -> non_neg_integer().
n_streams(Rec) ->
gen_size(?streams, Rec).
%% %%
-spec get_seqno(seqno_type(), t()) -> emqx_persistent_session_ds:seqno() | undefined. -spec get_seqno(seqno_type(), t()) -> emqx_persistent_session_ds:seqno() | undefined.
get_seqno(Key, Rec) -> get_seqno(Key, Rec) ->
gen_get(seqnos, Key, Rec). gen_get(?seqnos, Key, Rec).
-spec put_seqno(seqno_type(), emqx_persistent_session_ds:seqno(), t()) -> t(). -spec put_seqno(seqno_type(), emqx_persistent_session_ds:seqno(), t()) -> t().
put_seqno(Key, Val, Rec) -> put_seqno(Key, Val, Rec) ->
gen_put(seqnos, Key, Val, Rec). gen_put(?seqnos, Key, Val, Rec).
%% %%
@ -398,19 +484,43 @@ put_seqno(Key, Val, Rec) ->
-spec get_rank(rank_key(), t()) -> integer() | undefined. -spec get_rank(rank_key(), t()) -> integer() | undefined.
get_rank(Key, Rec) -> get_rank(Key, Rec) ->
gen_get(ranks, Key, Rec). gen_get(?ranks, Key, Rec).
-spec put_rank(rank_key(), integer(), t()) -> t(). -spec put_rank(rank_key(), integer(), t()) -> t().
put_rank(Key, Val, Rec) -> put_rank(Key, Val, Rec) ->
gen_put(ranks, Key, Val, Rec). gen_put(?ranks, Key, Val, Rec).
-spec del_rank(rank_key(), t()) -> t(). -spec del_rank(rank_key(), t()) -> t().
del_rank(Key, Rec) -> del_rank(Key, Rec) ->
gen_del(ranks, Key, Rec). gen_del(?ranks, Key, Rec).
-spec fold_ranks(fun(), Acc, t()) -> Acc. -spec fold_ranks(fun(), Acc, t()) -> Acc.
fold_ranks(Fun, Acc, Rec) -> fold_ranks(Fun, Acc, Rec) ->
gen_fold(ranks, Fun, Acc, Rec). gen_fold(?ranks, Fun, Acc, Rec).
%%
-spec get_awaiting_rel(emqx_types:packet_id(), t()) -> integer() | undefined.
get_awaiting_rel(Key, Rec) ->
gen_get(?awaiting_rel, Key, Rec).
-spec put_awaiting_rel(emqx_types:packet_id(), _Timestamp :: integer(), t()) -> t().
put_awaiting_rel(Key, Val, Rec) ->
gen_put(?awaiting_rel, Key, Val, Rec).
-spec del_awaiting_rel(emqx_types:packet_id(), t()) -> t().
del_awaiting_rel(Key, Rec) ->
gen_del(?awaiting_rel, Key, Rec).
-spec fold_awaiting_rel(fun(), Acc, t()) -> Acc.
fold_awaiting_rel(Fun, Acc, Rec) ->
gen_fold(?awaiting_rel, Fun, Acc, Rec).
-spec n_awaiting_rel(t()) -> non_neg_integer().
n_awaiting_rel(Rec) ->
gen_size(?awaiting_rel, Rec).
%%
-spec make_session_iterator() -> session_iterator(). -spec make_session_iterator() -> session_iterator().
make_session_iterator() -> make_session_iterator() ->
@ -475,16 +585,20 @@ gen_del(Field, Key, Rec) ->
Rec#{?set_dirty} Rec#{?set_dirty}
). ).
%% gen_size(Field, Rec) ->
check_sequence(Rec),
pmap_size(maps:get(Field, Rec)).
read_subscriptions(SessionId) -> -spec update_pmaps(fun((pmap(_K, _V) | undefined, atom()) -> term()), map()) -> map().
Records = kv_pmap_restore(?subscription_tab, SessionId), update_pmaps(Fun, Map) ->
lists:foldl( lists:foldl(
fun({{TopicFilter, SubId}, Subscription}, Acc) -> fun({MapKey, Table}, Acc) ->
emqx_topic_gbt:insert(TopicFilter, SubId, Subscription, Acc) OldVal = maps:get(MapKey, Map, undefined),
Val = Fun(OldVal, Table),
maps:put(MapKey, Val, Acc)
end, end,
emqx_topic_gbt:new(), Map,
Records ?pmaps
). ).
%% %%
@ -547,6 +661,10 @@ pmap_commit(
pmap_format(#pmap{cache = Cache}) -> pmap_format(#pmap{cache = Cache}) ->
Cache. Cache.
-spec pmap_size(pmap(_K, _V)) -> non_neg_integer().
pmap_size(#pmap{cache = Cache}) ->
maps:size(Cache).
%% Functions dealing with set tables: %% Functions dealing with set tables:
kv_persist(Tab, SessionId, Val0) -> kv_persist(Tab, SessionId, Val0) ->
@ -574,6 +692,14 @@ kv_pmap_persist(Tab, SessionId, Key, Val0) ->
Val = encoder(encode, Tab, Val0), Val = encoder(encode, Tab, Val0),
mnesia:write(Tab, #kv{k = {SessionId, Key}, v = Val}, write). mnesia:write(Tab, #kv{k = {SessionId, Key}, v = Val}, write).
kv_pmap_read(Table, SessionId, Key) ->
lists:map(
fun(#kv{v = Val}) ->
encoder(decode, Table, Val)
end,
mnesia:dirty_read(Table, {SessionId, Key})
).
kv_pmap_restore(Table, SessionId) -> kv_pmap_restore(Table, SessionId) ->
MS = [{#kv{k = {SessionId, '$1'}, v = '$2'}, [], [{{'$1', '$2'}}]}], MS = [{#kv{k = {SessionId, '$1'}, v = '$2'}, [], [{{'$1', '$2'}}]}],
Objs = mnesia:select(Table, MS, read), Objs = mnesia:select(Table, MS, read),

View File

@ -126,9 +126,10 @@ find_new_streams(S) ->
renew_streams(S0) -> renew_streams(S0) ->
S1 = remove_unsubscribed_streams(S0), S1 = remove_unsubscribed_streams(S0),
S2 = remove_fully_replayed_streams(S1), S2 = remove_fully_replayed_streams(S1),
S3 = update_stream_subscription_state_ids(S2),
emqx_persistent_session_ds_subs:fold( emqx_persistent_session_ds_subs:fold(
fun fun
(Key, #{start_time := StartTime, id := SubId, deleted := false}, Acc) -> (Key, #{start_time := StartTime, id := SubId, current_state := SStateId}, Acc) ->
TopicFilter = emqx_topic:words(Key), TopicFilter = emqx_topic:words(Key),
Streams = select_streams( Streams = select_streams(
SubId, SubId,
@ -137,7 +138,7 @@ renew_streams(S0) ->
), ),
lists:foldl( lists:foldl(
fun(I, Acc1) -> fun(I, Acc1) ->
ensure_iterator(TopicFilter, StartTime, SubId, I, Acc1) ensure_iterator(TopicFilter, StartTime, SubId, SStateId, I, Acc1)
end, end,
Acc, Acc,
Streams Streams
@ -145,8 +146,8 @@ renew_streams(S0) ->
(_Key, _DeletedSubscription, Acc) -> (_Key, _DeletedSubscription, Acc) ->
Acc Acc
end, end,
S2, S3,
S2 S3
). ).
-spec on_unsubscribe( -spec on_unsubscribe(
@ -201,23 +202,32 @@ is_fully_acked(Srs, S) ->
%% Internal functions %% Internal functions
%%================================================================================ %%================================================================================
ensure_iterator(TopicFilter, StartTime, SubId, {{RankX, RankY}, Stream}, S) -> ensure_iterator(TopicFilter, StartTime, SubId, SStateId, {{RankX, RankY}, Stream}, S) ->
Key = {SubId, Stream}, Key = {SubId, Stream},
case emqx_persistent_session_ds_state:get_stream(Key, S) of case emqx_persistent_session_ds_state:get_stream(Key, S) of
undefined -> undefined ->
?SLOG(debug, #{ ?SLOG(debug, #{
msg => new_stream, key => Key, stream => Stream msg => new_stream, key => Key, stream => Stream
}), }),
{ok, Iterator} = emqx_ds:make_iterator( case emqx_ds:make_iterator(?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartTime) of
?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartTime {ok, Iterator} ->
), NewStreamState = #srs{
NewStreamState = #srs{ rank_x = RankX,
rank_x = RankX, rank_y = RankY,
rank_y = RankY, it_begin = Iterator,
it_begin = Iterator, it_end = Iterator,
it_end = Iterator sub_state_id = SStateId
}, },
emqx_persistent_session_ds_state:put_stream(Key, NewStreamState, S); emqx_persistent_session_ds_state:put_stream(Key, NewStreamState, S);
{error, recoverable, Reason} ->
?SLOG(warning, #{
msg => "failed_to_initialize_stream_iterator",
stream => Stream,
class => recoverable,
reason => Reason
}),
S
end;
#srs{} -> #srs{} ->
S S
end. end.
@ -342,6 +352,38 @@ remove_fully_replayed_streams(S0) ->
S1 S1
). ).
%% @doc Update subscription state IDs for all streams that don't have unacked messages
-spec update_stream_subscription_state_ids(emqx_persistent_session_ds_state:t()) ->
emqx_persistent_session_ds_state:t().
update_stream_subscription_state_ids(S0) ->
CommQos1 = emqx_persistent_session_ds_state:get_seqno(?committed(?QOS_1), S0),
CommQos2 = emqx_persistent_session_ds_state:get_seqno(?committed(?QOS_2), S0),
%% Find the latest state IDs for each subscription:
LastSStateIds = emqx_persistent_session_ds_state:fold_subscriptions(
fun(_, #{id := SubId, current_state := SStateId}, Acc) ->
Acc#{SubId => SStateId}
end,
#{},
S0
),
%% Update subscription state IDs for fully acked streams:
emqx_persistent_session_ds_state:fold_streams(
fun
(_, #srs{unsubscribed = true}, S) ->
S;
(Key = {SubId, _Stream}, SRS0, S) ->
case is_fully_acked(CommQos1, CommQos2, SRS0) of
true ->
SRS = SRS0#srs{sub_state_id = maps:get(SubId, LastSStateIds)},
emqx_persistent_session_ds_state:put_stream(Key, SRS, S);
false ->
S
end
end,
S0,
S0
).
%% @doc Compare the streams by the order in which they were replayed. %% @doc Compare the streams by the order in which they were replayed.
compare_streams( compare_streams(
{_KeyA, #srs{first_seqno_qos1 = A1, first_seqno_qos2 = A2}}, {_KeyA, #srs{first_seqno_qos1 = A1, first_seqno_qos2 = A2}},

View File

@ -24,14 +24,56 @@
-module(emqx_persistent_session_ds_subs). -module(emqx_persistent_session_ds_subs).
%% API: %% API:
-export([on_subscribe/3, on_unsubscribe/3, gc/1, lookup/2, to_map/1, fold/3, fold_all/3]). -export([
on_subscribe/3,
on_unsubscribe/3,
on_session_drop/2,
gc/1,
lookup/2,
to_map/1,
fold/3
]).
-export_type([]). %% Management API:
-export([
cold_get_subscription/2
]).
-export_type([subscription_state_id/0, subscription/0, subscription_state/0]).
-include("emqx_persistent_session_ds.hrl").
-include("emqx_mqtt.hrl").
-include_lib("snabbkaffe/include/trace.hrl").
%%================================================================================ %%================================================================================
%% Type declarations %% Type declarations
%%================================================================================ %%================================================================================
-type subscription() :: #{
%% Session-unique identifier of the subscription. Other objects
%% can use it as a compact reference:
id := emqx_persistent_session_ds:subscription_id(),
%% Reference to the current subscription state:
current_state := subscription_state_id(),
%% Time when the subscription was added:
start_time := emqx_ds:time()
}.
-type subscription_state_id() :: integer().
-type subscription_state() :: #{
parent_subscription := emqx_persistent_session_ds:subscription_id(),
upgrade_qos := boolean(),
%% SubOpts:
subopts := #{
nl => _,
qos => _,
rap => _,
subid => _,
_ => _
}
}.
%%================================================================================ %%================================================================================
%% API functions %% API functions
%%================================================================================ %%================================================================================
@ -39,41 +81,131 @@
%% @doc Process a new subscription %% @doc Process a new subscription
-spec on_subscribe( -spec on_subscribe(
emqx_persistent_session_ds:topic_filter(), emqx_persistent_session_ds:topic_filter(),
emqx_persistent_session_ds:subscription(), emqx_types:subopts(),
emqx_persistent_session_ds_state:t() emqx_persistent_session_ds:session()
) -> ) ->
emqx_persistent_session_ds_state:t(). {ok, emqx_persistent_session_ds_state:t()} | {error, ?RC_QUOTA_EXCEEDED}.
on_subscribe(TopicFilter, Subscription, S) -> on_subscribe(TopicFilter, SubOpts, #{id := SessionId, s := S0, props := Props}) ->
emqx_persistent_session_ds_state:put_subscription(TopicFilter, [], Subscription, S). #{upgrade_qos := UpgradeQoS, max_subscriptions := MaxSubscriptions} = Props,
case emqx_persistent_session_ds_state:get_subscription(TopicFilter, S0) of
undefined ->
%% This is a new subscription:
case emqx_persistent_session_ds_state:n_subscriptions(S0) < MaxSubscriptions of
true ->
ok = emqx_persistent_session_ds_router:do_add_route(TopicFilter, SessionId),
{SubId, S1} = emqx_persistent_session_ds_state:new_id(S0),
{SStateId, S2} = emqx_persistent_session_ds_state:new_id(S1),
SState = #{
parent_subscription => SubId, upgrade_qos => UpgradeQoS, subopts => SubOpts
},
S3 = emqx_persistent_session_ds_state:put_subscription_state(
SStateId, SState, S2
),
Subscription = #{
id => SubId,
current_state => SStateId,
start_time => now_ms()
},
S = emqx_persistent_session_ds_state:put_subscription(
TopicFilter, Subscription, S3
),
?tp(persistent_session_ds_subscription_added, #{
topic_filter => TopicFilter, session => SessionId
}),
{ok, S};
false ->
{error, ?RC_QUOTA_EXCEEDED}
end;
Sub0 = #{current_state := SStateId0, id := SubId} ->
SState = #{parent_subscription => SubId, upgrade_qos => UpgradeQoS, subopts => SubOpts},
case emqx_persistent_session_ds_state:get_subscription_state(SStateId0, S0) of
SState ->
%% Client resubscribed with the same parameters:
{ok, S0};
_ ->
%% Subsription parameters changed:
{SStateId, S1} = emqx_persistent_session_ds_state:new_id(S0),
S2 = emqx_persistent_session_ds_state:put_subscription_state(
SStateId, SState, S1
),
Sub = Sub0#{current_state => SStateId},
S = emqx_persistent_session_ds_state:put_subscription(TopicFilter, Sub, S2),
{ok, S}
end
end.
%% @doc Process UNSUBSCRIBE %% @doc Process UNSUBSCRIBE
-spec on_unsubscribe( -spec on_unsubscribe(
emqx_persistent_session_ds:id(),
emqx_persistent_session_ds:topic_filter(), emqx_persistent_session_ds:topic_filter(),
emqx_persistent_session_ds:subscription(),
emqx_persistent_session_ds_state:t() emqx_persistent_session_ds_state:t()
) -> ) ->
emqx_persistent_session_ds_state:t(). {ok, emqx_persistent_session_ds_state:t(), emqx_persistent_session_ds:subscription()}
on_unsubscribe(TopicFilter, Subscription0, S0) -> | {error, ?RC_NO_SUBSCRIPTION_EXISTED}.
%% Note: we cannot delete the subscription immediately, since its on_unsubscribe(SessionId, TopicFilter, S0) ->
%% metadata can be used during replay (see `process_batch'). We case lookup(TopicFilter, S0) of
%% instead mark it as deleted, and let `subscription_gc' function undefined ->
%% dispatch it later: {error, ?RC_NO_SUBSCRIPTION_EXISTED};
Subscription = Subscription0#{deleted => true}, Subscription ->
emqx_persistent_session_ds_state:put_subscription(TopicFilter, [], Subscription, S0). ?tp(persistent_session_ds_subscription_delete, #{
session_id => SessionId, topic_filter => TopicFilter
}),
?tp_span(
persistent_session_ds_subscription_route_delete,
#{session_id => SessionId, topic_filter => TopicFilter},
ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilter, SessionId)
),
{ok, emqx_persistent_session_ds_state:del_subscription(TopicFilter, S0), Subscription}
end.
%% @doc Remove subscriptions that have been marked for deletion, and -spec on_session_drop(emqx_persistent_session_ds:id(), emqx_persistent_session_ds_state:t()) -> ok.
%% that don't have any unacked messages: on_session_drop(SessionId, S0) ->
fold(
fun(TopicFilter, _Subscription, S) ->
case on_unsubscribe(SessionId, TopicFilter, S) of
{ok, S1, _} -> S1;
_ -> S
end
end,
S0,
S0
).
%% @doc Remove subscription states that don't have a parent, and that
%% don't have any unacked messages:
-spec gc(emqx_persistent_session_ds_state:t()) -> emqx_persistent_session_ds_state:t(). -spec gc(emqx_persistent_session_ds_state:t()) -> emqx_persistent_session_ds_state:t().
gc(S0) -> gc(S0) ->
fold_all( %% Create a set of subscription states IDs referenced either by a
fun(TopicFilter, #{id := SubId, deleted := Deleted}, Acc) -> %% subscription or a stream replay state:
case Deleted andalso has_no_unacked_streams(SubId, S0) of AliveSet0 = emqx_persistent_session_ds_state:fold_subscriptions(
true -> fun(_TopicFilter, #{current_state := SStateId}, Acc) ->
emqx_persistent_session_ds_state:del_subscription(TopicFilter, [], Acc); Acc#{SStateId => true}
end,
#{},
S0
),
AliveSet = emqx_persistent_session_ds_state:fold_streams(
fun(_StreamId, SRS = #srs{sub_state_id = SStateId}, Acc) ->
case emqx_persistent_session_ds_stream_scheduler:is_fully_acked(SRS, S0) of
false -> false ->
Acc#{SStateId => true};
true ->
Acc Acc
end end
end, end,
AliveSet0,
S0
),
%% Delete dangling subscription states:
emqx_persistent_session_ds_state:fold_subscription_states(
fun(SStateId, _, S) ->
case maps:is_key(SStateId, AliveSet) of
true ->
S;
false ->
emqx_persistent_session_ds_state:del_subscription_state(SStateId, S)
end
end,
S0, S0,
S0 S0
). ).
@ -82,12 +214,16 @@ gc(S0) ->
-spec lookup(emqx_persistent_session_ds:topic_filter(), emqx_persistent_session_ds_state:t()) -> -spec lookup(emqx_persistent_session_ds:topic_filter(), emqx_persistent_session_ds_state:t()) ->
emqx_persistent_session_ds:subscription() | undefined. emqx_persistent_session_ds:subscription() | undefined.
lookup(TopicFilter, S) -> lookup(TopicFilter, S) ->
Subs = emqx_persistent_session_ds_state:get_subscriptions(S), case emqx_persistent_session_ds_state:get_subscription(TopicFilter, S) of
case emqx_topic_gbt:lookup(TopicFilter, [], Subs, undefined) of Sub = #{current_state := SStateId} ->
#{deleted := true} -> case emqx_persistent_session_ds_state:get_subscription_state(SStateId, S) of
undefined; #{subopts := SubOpts} ->
Sub -> Sub#{subopts => SubOpts};
Sub undefined ->
undefined
end;
undefined ->
undefined
end. end.
%% @doc Convert active subscriptions to a map, for information %% @doc Convert active subscriptions to a map, for information
@ -95,7 +231,7 @@ lookup(TopicFilter, S) ->
-spec to_map(emqx_persistent_session_ds_state:t()) -> map(). -spec to_map(emqx_persistent_session_ds_state:t()) -> map().
to_map(S) -> to_map(S) ->
fold( fold(
fun(TopicFilter, #{props := Props}, Acc) -> Acc#{TopicFilter => Props} end, fun(TopicFilter, _, Acc) -> Acc#{TopicFilter => lookup(TopicFilter, S)} end,
#{}, #{},
S S
). ).
@ -107,48 +243,29 @@ to_map(S) ->
emqx_persistent_session_ds_state:t() emqx_persistent_session_ds_state:t()
) -> ) ->
Acc. Acc.
fold(Fun, AccIn, S) -> fold(Fun, Acc, S) ->
fold_all( emqx_persistent_session_ds_state:fold_subscriptions(Fun, Acc, S).
fun(TopicFilter, Sub = #{deleted := Deleted}, Acc) ->
case Deleted of
true -> Acc;
false -> Fun(TopicFilter, Sub, Acc)
end
end,
AccIn,
S
).
%% @doc Fold over all subscriptions, including inactive ones: -spec cold_get_subscription(emqx_persistent_session_ds:id(), emqx_types:topic()) ->
-spec fold_all( emqx_persistent_session_ds:subscription() | undefined.
fun((emqx_types:topic(), emqx_persistent_session_ds:subscription(), Acc) -> Acc), cold_get_subscription(SessionId, Topic) ->
Acc, case emqx_persistent_session_ds_state:cold_get_subscription(SessionId, Topic) of
emqx_persistent_session_ds_state:t() [Sub = #{current_state := SStateId}] ->
) -> case
Acc. emqx_persistent_session_ds_state:cold_get_subscription_state(SessionId, SStateId)
fold_all(Fun, AccIn, S) -> of
Subs = emqx_persistent_session_ds_state:get_subscriptions(S), [#{subopts := Subopts}] ->
emqx_topic_gbt:fold( Sub#{subopts => Subopts};
fun(Key, Sub, Acc) -> Fun(emqx_topic_gbt:get_topic(Key), Sub, Acc) end, _ ->
AccIn, undefined
Subs end;
). _ ->
undefined
end.
%%================================================================================ %%================================================================================
%% Internal functions %% Internal functions
%%================================================================================ %%================================================================================
-spec has_no_unacked_streams( now_ms() ->
emqx_persistent_session_ds:subscription_id(), emqx_persistent_session_ds_state:t() erlang:system_time(millisecond).
) -> boolean().
has_no_unacked_streams(SubId, S) ->
emqx_persistent_session_ds_state:fold_streams(
fun
({SID, _Stream}, Srs, Acc) when SID =:= SubId ->
emqx_persistent_session_ds_stream_scheduler:is_fully_acked(Srs, S) andalso Acc;
(_StreamKey, _Srs, Acc) ->
Acc
end,
true,
S
).

View File

@ -61,6 +61,8 @@
}. }.
-type url() :: binary(). -type url() :: binary().
-type json_binary() :: binary(). -type json_binary() :: binary().
-type template() :: binary().
-type template_str() :: string().
-typerefl_from_string({duration/0, emqx_schema, to_duration}). -typerefl_from_string({duration/0, emqx_schema, to_duration}).
-typerefl_from_string({duration_s/0, emqx_schema, to_duration_s}). -typerefl_from_string({duration_s/0, emqx_schema, to_duration_s}).
@ -78,6 +80,8 @@
-typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}). -typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}).
-typerefl_from_string({url/0, emqx_schema, to_url}). -typerefl_from_string({url/0, emqx_schema, to_url}).
-typerefl_from_string({json_binary/0, emqx_schema, to_json_binary}). -typerefl_from_string({json_binary/0, emqx_schema, to_json_binary}).
-typerefl_from_string({template/0, emqx_schema, to_template}).
-typerefl_from_string({template_str/0, emqx_schema, to_template_str}).
-type parsed_server() :: #{ -type parsed_server() :: #{
hostname := string(), hostname := string(),
@ -120,7 +124,9 @@
to_erl_cipher_suite/1, to_erl_cipher_suite/1,
to_comma_separated_atoms/1, to_comma_separated_atoms/1,
to_url/1, to_url/1,
to_json_binary/1 to_json_binary/1,
to_template/1,
to_template_str/1
]). ]).
-export([ -export([
@ -160,7 +166,9 @@
comma_separated_atoms/0, comma_separated_atoms/0,
url/0, url/0,
json_binary/0, json_binary/0,
port_number/0 port_number/0,
template/0,
template_str/0
]). ]).
-export([namespace/0, roots/0, roots/1, fields/1, desc/1, tags/0]). -export([namespace/0, roots/0, roots/1, fields/1, desc/1, tags/0]).
@ -1734,20 +1742,38 @@ fields(durable_storage) ->
emqx_ds_schema:schema(); emqx_ds_schema:schema();
fields("client_attrs_init") -> fields("client_attrs_init") ->
[ [
{extract_from, {expression,
sc( sc(
hoconsc:enum([clientid, username, cn, dn, user_property]), typerefl:alias("string", any()),
#{desc => ?DESC("client_attrs_init_extract_from")} #{
desc => ?DESC("client_attrs_init_expression"),
converter => fun compile_variform/2
}
)}, )},
{extract_regexp, sc(binary(), #{desc => ?DESC("client_attrs_init_extract_regexp")})}, {set_as_attr,
{extract_as,
sc(binary(), #{ sc(binary(), #{
default => <<"alias">>, desc => ?DESC("client_attrs_init_set_as_attr"),
desc => ?DESC("client_attrs_init_extract_as"),
validator => fun restricted_string/1 validator => fun restricted_string/1
})} })}
]. ].
compile_variform(undefined, _Opts) ->
undefined;
compile_variform(Expression, #{make_serializable := true}) ->
case is_binary(Expression) of
true ->
Expression;
false ->
emqx_variform:decompile(Expression)
end;
compile_variform(Expression, _Opts) ->
case emqx_variform:compile(Expression) of
{ok, Compiled} ->
Compiled;
{error, Reason} ->
throw(#{expression => Expression, reason => Reason})
end.
restricted_string(Str) -> restricted_string(Str) ->
case emqx_utils:is_restricted_str(Str) of case emqx_utils:is_restricted_str(Str) of
true -> ok; true -> ok;
@ -2576,6 +2602,12 @@ to_json_binary(Str) ->
Error Error
end. end.
to_template(Str) ->
{ok, iolist_to_binary(Str)}.
to_template_str(Str) ->
{ok, unicode:characters_to_list(Str, utf8)}.
%% @doc support the following format: %% @doc support the following format:
%% - 127.0.0.1:1883 %% - 127.0.0.1:1883
%% - ::1:1883 %% - ::1:1883
@ -3552,9 +3584,9 @@ mqtt_general() ->
)}, )},
{"client_attrs_init", {"client_attrs_init",
sc( sc(
hoconsc:union([disabled, ref("client_attrs_init")]), hoconsc:array(ref("client_attrs_init")),
#{ #{
default => disabled, default => [],
desc => ?DESC("client_attrs_init") desc => ?DESC("client_attrs_init")
} }
)} )}

View File

@ -429,6 +429,11 @@ enrich_deliver(ClientInfo, {deliver, Topic, Msg}, UpgradeQoS, Session) ->
end, end,
enrich_message(ClientInfo, Msg, SubOpts, UpgradeQoS). enrich_message(ClientInfo, Msg, SubOpts, UpgradeQoS).
%% Caution: updating this function _may_ break consistency of replay
%% for persistent sessions. Persistent sessions expect it to return
%% the same result during replay. If it changes the behavior between
%% releases, sessions restored from the cold storage may end up
%% replaying messages with different QoS, etc.
enrich_message( enrich_message(
ClientInfo = #{clientid := ClientId}, ClientInfo = #{clientid := ClientId},
Msg = #message{from = ClientId}, Msg = #message{from = ClientId},

View File

@ -22,6 +22,7 @@
-include("types.hrl"). -include("types.hrl").
-include("logger.hrl"). -include("logger.hrl").
-include("emqx_hooks.hrl"). -include("emqx_hooks.hrl").
-include("emqx_mqtt.hrl").
-export([ -export([
start_link/0, start_link/0,
@ -279,7 +280,7 @@ on_client_subscribed(
clientid => ClientId, clientid => ClientId,
username => Username, username => Username,
protocol => Protocol, protocol => Protocol,
topic => Topic, topic => emqx_topic:maybe_format_share(Topic),
subopts => SubOpts, subopts => SubOpts,
ts => erlang:system_time(millisecond) ts => erlang:system_time(millisecond)
}, },
@ -298,7 +299,7 @@ on_client_unsubscribed(
clientid => ClientId, clientid => ClientId,
username => Username, username => Username,
protocol => Protocol, protocol => Protocol,
topic => Topic, topic => emqx_topic:maybe_format_share(Topic),
ts => erlang:system_time(millisecond) ts => erlang:system_time(millisecond)
}, },
publish(unsubscribed, Payload). publish(unsubscribed, Payload).

View File

@ -28,7 +28,8 @@
subscribe/3, subscribe/3,
unsubscribe/2, unsubscribe/2,
log/3, log/3,
log/4 log/4,
rendered_action_template/2
]). ]).
-export([ -export([
@ -66,6 +67,9 @@
-export_type([ip_address/0]). -export_type([ip_address/0]).
-type ip_address() :: string(). -type ip_address() :: string().
-export_type([ruleid/0]).
-type ruleid() :: binary().
publish(#message{topic = <<"$SYS/", _/binary>>}) -> publish(#message{topic = <<"$SYS/", _/binary>>}) ->
ignore; ignore;
publish(#message{from = From, topic = Topic, payload = Payload}) when publish(#message{from = From, topic = Topic, payload = Payload}) when
@ -83,6 +87,32 @@ unsubscribe(<<"$SYS/", _/binary>>, _SubOpts) ->
unsubscribe(Topic, SubOpts) -> unsubscribe(Topic, SubOpts) ->
?TRACE("UNSUBSCRIBE", "unsubscribe", #{topic => Topic, sub_opts => SubOpts}). ?TRACE("UNSUBSCRIBE", "unsubscribe", #{topic => Topic, sub_opts => SubOpts}).
rendered_action_template(ActionID, RenderResult) ->
TraceResult = ?TRACE(
"QUERY_RENDER",
"action_template_rendered",
#{
result => RenderResult,
action_id => ActionID
}
),
case logger:get_process_metadata() of
#{stop_action_after_render := true} ->
%% We throw an unrecoverable error to stop action before the
%% resource is called/modified
StopMsg = lists:flatten(
io_lib:format(
"Action ~ts stopped after template rendering due to test setting.",
[ActionID]
)
),
MsgBin = unicode:characters_to_binary(StopMsg),
error({unrecoverable_error, {action_stopped_after_template_rendering, MsgBin}});
_ ->
ok
end,
TraceResult.
log(List, Msg, Meta) -> log(List, Msg, Meta) ->
log(debug, List, Msg, Meta). log(debug, List, Msg, Meta).
@ -159,8 +189,10 @@ create(Trace) ->
case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of
true -> true ->
case to_trace(Trace) of case to_trace(Trace) of
{ok, TraceRec} -> insert_new_trace(TraceRec); {ok, TraceRec} ->
{error, Reason} -> {error, Reason} insert_new_trace(TraceRec);
{error, Reason} ->
{error, Reason}
end; end;
false -> false ->
{error, {error,
@ -222,7 +254,11 @@ format(Traces) ->
lists:map( lists:map(
fun(Trace0 = #?TRACE{}) -> fun(Trace0 = #?TRACE{}) ->
[_ | Values] = tuple_to_list(Trace0), [_ | Values] = tuple_to_list(Trace0),
maps:from_list(lists:zip(Fields, Values)) Map0 = maps:from_list(lists:zip(Fields, Values)),
Extra = maps:get(extra, Map0, #{}),
Formatter = maps:get(formatter, Extra, text),
Map1 = Map0#{formatter => Formatter},
maps:remove(extra, Map1)
end, end,
Traces Traces
). ).
@ -368,9 +404,17 @@ start_trace(Trace) ->
type = Type, type = Type,
filter = Filter, filter = Filter,
start_at = Start, start_at = Start,
payload_encode = PayloadEncode payload_encode = PayloadEncode,
extra = Extra
} = Trace, } = Trace,
Who = #{name => Name, type => Type, filter => Filter, payload_encode => PayloadEncode}, Formatter = maps:get(formatter, Extra, text),
Who = #{
name => Name,
type => Type,
filter => Filter,
payload_encode => PayloadEncode,
formatter => Formatter
},
emqx_trace_handler:install(Who, debug, log_file(Name, Start)). emqx_trace_handler:install(Who, debug, log_file(Name, Start)).
stop_trace(Finished, Started) -> stop_trace(Finished, Started) ->
@ -517,6 +561,9 @@ to_trace(#{type := ip_address, ip_address := Filter} = Trace, Rec) ->
Error -> Error ->
Error Error
end; end;
to_trace(#{type := ruleid, ruleid := Filter} = Trace, Rec) ->
Trace0 = maps:without([type, ruleid], Trace),
to_trace(Trace0, Rec#?TRACE{type = ruleid, filter = Filter});
to_trace(#{type := Type}, _Rec) -> to_trace(#{type := Type}, _Rec) ->
{error, io_lib:format("required ~s field", [Type])}; {error, io_lib:format("required ~s field", [Type])};
to_trace(#{payload_encode := PayloadEncode} = Trace, Rec) -> to_trace(#{payload_encode := PayloadEncode} = Trace, Rec) ->
@ -532,6 +579,12 @@ to_trace(#{end_at := EndAt} = Trace, Rec) ->
{ok, _Sec} -> {ok, _Sec} ->
{error, "end_at time has already passed"} {error, "end_at time has already passed"}
end; end;
to_trace(#{formatter := Formatter} = Trace, Rec) ->
Extra = Rec#?TRACE.extra,
to_trace(
maps:remove(formatter, Trace),
Rec#?TRACE{extra = Extra#{formatter => Formatter}}
);
to_trace(_, Rec) -> to_trace(_, Rec) ->
{ok, Rec}. {ok, Rec}.

View File

@ -27,12 +27,14 @@
install/3, install/3,
install/4, install/4,
install/5, install/5,
install/6,
uninstall/1, uninstall/1,
uninstall/2 uninstall/2
]). ]).
%% For logger handler filters callbacks %% For logger handler filters callbacks
-export([ -export([
filter_ruleid/2,
filter_clientid/2, filter_clientid/2,
filter_topic/2, filter_topic/2,
filter_ip_address/2 filter_ip_address/2
@ -45,7 +47,8 @@
name := binary(), name := binary(),
type := clientid | topic | ip_address, type := clientid | topic | ip_address,
filter := emqx_types:clientid() | emqx_types:topic() | emqx_trace:ip_address(), filter := emqx_types:clientid() | emqx_types:topic() | emqx_trace:ip_address(),
payload_encode := text | hidden | hex payload_encode := text | hidden | hex,
formatter => json | text
}. }.
-define(CONFIG(_LogFile_), #{ -define(CONFIG(_LogFile_), #{
@ -68,17 +71,29 @@
Type :: clientid | topic | ip_address, Type :: clientid | topic | ip_address,
Filter :: emqx_types:clientid() | emqx_types:topic() | string(), Filter :: emqx_types:clientid() | emqx_types:topic() | string(),
Level :: logger:level() | all, Level :: logger:level() | all,
LogFilePath :: string() LogFilePath :: string(),
Formatter :: text | json
) -> ok | {error, term()}. ) -> ok | {error, term()}.
install(Name, Type, Filter, Level, LogFile) -> install(Name, Type, Filter, Level, LogFile, Formatter) ->
Who = #{ Who = #{
type => Type, type => Type,
filter => ensure_bin(Filter), filter => ensure_bin(Filter),
name => ensure_bin(Name), name => ensure_bin(Name),
payload_encode => payload_encode() payload_encode => payload_encode(),
formatter => Formatter
}, },
install(Who, Level, LogFile). install(Who, Level, LogFile).
-spec install(
Name :: binary() | list(),
Type :: clientid | topic | ip_address,
Filter :: emqx_types:clientid() | emqx_types:topic() | string(),
Level :: logger:level() | all,
LogFilePath :: string()
) -> ok | {error, term()}.
install(Name, Type, Filter, Level, LogFile) ->
install(Name, Type, Filter, Level, LogFile, text).
-spec install( -spec install(
Type :: clientid | topic | ip_address, Type :: clientid | topic | ip_address,
Filter :: emqx_types:clientid() | emqx_types:topic() | string(), Filter :: emqx_types:clientid() | emqx_types:topic() | string(),
@ -133,9 +148,23 @@ uninstall(HandlerId) ->
running() -> running() ->
lists:foldl(fun filter_traces/2, [], emqx_logger:get_log_handlers(started)). lists:foldl(fun filter_traces/2, [], emqx_logger:get_log_handlers(started)).
-spec filter_ruleid(logger:log_event(), {binary(), atom()}) -> logger:log_event() | stop.
filter_ruleid(#{meta := Meta = #{rule_id := RuleId}} = Log, {MatchId, _Name}) ->
RuleIDs = maps:get(rule_ids, Meta, #{}),
IsMatch = (RuleId =:= MatchId) orelse maps:get(MatchId, RuleIDs, false),
filter_ret(IsMatch andalso is_trace(Meta), Log);
filter_ruleid(#{meta := Meta = #{rule_ids := RuleIDs}} = Log, {MatchId, _Name}) ->
filter_ret(maps:get(MatchId, RuleIDs, false) andalso is_trace(Meta), Log);
filter_ruleid(_Log, _ExpectId) ->
stop.
-spec filter_clientid(logger:log_event(), {binary(), atom()}) -> logger:log_event() | stop. -spec filter_clientid(logger:log_event(), {binary(), atom()}) -> logger:log_event() | stop.
filter_clientid(#{meta := Meta = #{clientid := ClientId}} = Log, {MatchId, _Name}) -> filter_clientid(#{meta := Meta = #{clientid := ClientId}} = Log, {MatchId, _Name}) ->
filter_ret(ClientId =:= MatchId andalso is_trace(Meta), Log); ClientIDs = maps:get(client_ids, Meta, #{}),
IsMatch = (ClientId =:= MatchId) orelse maps:get(MatchId, ClientIDs, false),
filter_ret(IsMatch andalso is_trace(Meta), Log);
filter_clientid(#{meta := Meta = #{client_ids := ClientIDs}} = Log, {MatchId, _Name}) ->
filter_ret(maps:get(MatchId, ClientIDs, false) andalso is_trace(Meta), Log);
filter_clientid(_Log, _ExpectId) -> filter_clientid(_Log, _ExpectId) ->
stop. stop.
@ -164,8 +193,14 @@ filters(#{type := clientid, filter := Filter, name := Name}) ->
filters(#{type := topic, filter := Filter, name := Name}) -> filters(#{type := topic, filter := Filter, name := Name}) ->
[{topic, {fun ?MODULE:filter_topic/2, {ensure_bin(Filter), Name}}}]; [{topic, {fun ?MODULE:filter_topic/2, {ensure_bin(Filter), Name}}}];
filters(#{type := ip_address, filter := Filter, name := Name}) -> filters(#{type := ip_address, filter := Filter, name := Name}) ->
[{ip_address, {fun ?MODULE:filter_ip_address/2, {ensure_list(Filter), Name}}}]. [{ip_address, {fun ?MODULE:filter_ip_address/2, {ensure_list(Filter), Name}}}];
filters(#{type := ruleid, filter := Filter, name := Name}) ->
[{ruleid, {fun ?MODULE:filter_ruleid/2, {ensure_bin(Filter), Name}}}].
formatter(#{type := _Type, payload_encode := PayloadEncode, formatter := json}) ->
{emqx_trace_json_formatter, #{
payload_encode => PayloadEncode
}};
formatter(#{type := _Type, payload_encode := PayloadEncode}) -> formatter(#{type := _Type, payload_encode := PayloadEncode}) ->
{emqx_trace_formatter, #{ {emqx_trace_formatter, #{
%% template is for ?SLOG message not ?TRACE. %% template is for ?SLOG message not ?TRACE.
@ -184,7 +219,8 @@ filter_traces(#{id := Id, level := Level, dst := Dst, filters := Filters}, Acc)
[{Type, {FilterFun, {Filter, Name}}}] when [{Type, {FilterFun, {Filter, Name}}}] when
Type =:= topic orelse Type =:= topic orelse
Type =:= clientid orelse Type =:= clientid orelse
Type =:= ip_address Type =:= ip_address orelse
Type =:= ruleid
-> ->
[Init#{type => Type, filter => Filter, name => Name, filter_fun => FilterFun} | Acc]; [Init#{type => Type, filter => Filter, name => Name, filter_fun => FilterFun} | Acc];
_ -> _ ->

View File

@ -0,0 +1,130 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_json_formatter).
-include("emqx_mqtt.hrl").
-export([format/2]).
%% logger_formatter:config/0 is not exported.
-type config() :: map().
%%%-----------------------------------------------------------------
%%% Callback Function
%%%-----------------------------------------------------------------
-spec format(LogEvent, Config) -> unicode:chardata() when
LogEvent :: logger:log_event(),
Config :: config().
format(
LogMap,
#{payload_encode := PEncode}
) ->
%% We just make some basic transformations on the input LogMap and then do
%% an external call to create the JSON text
Time = emqx_utils_calendar:now_to_rfc3339(microsecond),
LogMap1 = LogMap#{time => Time},
LogMap2 = prepare_log_map(LogMap1, PEncode),
[emqx_logger_jsonfmt:best_effort_json(LogMap2, [force_utf8]), "\n"].
%%%-----------------------------------------------------------------
%%% Helper Functions
%%%-----------------------------------------------------------------
prepare_log_map(LogMap, PEncode) ->
NewKeyValuePairs = [prepare_key_value(K, V, PEncode) || {K, V} <- maps:to_list(LogMap)],
maps:from_list(NewKeyValuePairs).
prepare_key_value(payload = K, V, PEncode) ->
NewV =
try
format_payload(V, PEncode)
catch
_:_ ->
V
end,
{K, NewV};
prepare_key_value(packet = K, V, PEncode) ->
NewV =
try
format_packet(V, PEncode)
catch
_:_ ->
V
end,
{K, NewV};
prepare_key_value(rule_ids = K, V, _PEncode) ->
NewV =
try
format_map_set_to_list(V)
catch
_:_ ->
V
end,
{K, NewV};
prepare_key_value(client_ids = K, V, _PEncode) ->
NewV =
try
format_map_set_to_list(V)
catch
_:_ ->
V
end,
{K, NewV};
prepare_key_value(action_id = K, V, _PEncode) ->
try
{action_info, format_action_info(V)}
catch
_:_ ->
{K, V}
end;
prepare_key_value(K, V, PEncode) when is_map(V) ->
{K, prepare_log_map(V, PEncode)};
prepare_key_value(K, V, _PEncode) ->
{K, V}.
format_packet(undefined, _) -> "";
format_packet(Packet, Encode) -> emqx_packet:format(Packet, Encode).
format_payload(undefined, _) ->
"";
format_payload(_, hidden) ->
"******";
format_payload(Payload, text) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) ->
unicode:characters_to_list(Payload);
format_payload(Payload, hex) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) -> binary:encode_hex(Payload);
format_payload(<<Part:?TRUNCATED_PAYLOAD_SIZE/binary, _/binary>> = Payload, Type) ->
emqx_packet:format_truncated_payload(Part, byte_size(Payload), Type).
format_map_set_to_list(Map) ->
Items = [
begin
%% Assert that it is really a map set
true = V,
%% Assert that the keys have the expected type
true = is_binary(K),
K
end
|| {K, V} <- maps:to_list(Map)
],
lists:sort(Items).
format_action_info(V) ->
[<<"action">>, Type, Name | _] = binary:split(V, <<":">>, [global]),
#{
type => Type,
name => Name
}.

View File

@ -395,13 +395,14 @@ t_certdn_as_alias(_) ->
test_cert_extraction_as_alias(Which) -> test_cert_extraction_as_alias(Which) ->
%% extract the first two chars %% extract the first two chars
Re = <<"^(..).*$">>,
ClientId = iolist_to_binary(["ClientIdFor_", atom_to_list(Which)]), ClientId = iolist_to_binary(["ClientIdFor_", atom_to_list(Which)]),
emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], #{ {ok, Compiled} = emqx_variform:compile("substr(" ++ atom_to_list(Which) ++ ",0,2)"),
extract_from => Which, emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], [
extract_regexp => Re, #{
extract_as => <<"alias">> expression => Compiled,
}), set_as_attr => <<"alias">>
}
]),
SslConf = emqx_common_test_helpers:client_mtls('tlsv1.2'), SslConf = emqx_common_test_helpers:client_mtls('tlsv1.2'),
{ok, Client} = emqtt:start_link([ {ok, Client} = emqtt:start_link([
{clientid, ClientId}, {port, 8883}, {ssl, true}, {ssl_opts, SslConf} {clientid, ClientId}, {port, 8883}, {ssl, true}, {ssl_opts, SslConf}
@ -416,10 +417,13 @@ test_cert_extraction_as_alias(Which) ->
t_client_attr_from_user_property(_Config) -> t_client_attr_from_user_property(_Config) ->
ClientId = atom_to_binary(?FUNCTION_NAME), ClientId = atom_to_binary(?FUNCTION_NAME),
emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], #{ {ok, Compiled} = emqx_variform:compile("user_property.group"),
extract_from => user_property, emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], [
extract_as => <<"group">> #{
}), expression => Compiled,
set_as_attr => <<"group">>
}
]),
SslConf = emqx_common_test_helpers:client_mtls('tlsv1.3'), SslConf = emqx_common_test_helpers:client_mtls('tlsv1.3'),
{ok, Client} = emqtt:start_link([ {ok, Client} = emqtt:start_link([
{clientid, ClientId}, {clientid, ClientId},

View File

@ -454,7 +454,7 @@ zone_global_defaults() ->
upgrade_qos => false, upgrade_qos => false,
use_username_as_clientid => false, use_username_as_clientid => false,
wildcard_subscription => true, wildcard_subscription => true,
client_attrs_init => disabled client_attrs_init => []
}, },
overload_protection => overload_protection =>
#{ #{

View File

@ -150,11 +150,13 @@ t_client_attr_as_mountpoint(_Config) ->
<<"limiter">> => #{}, <<"limiter">> => #{},
<<"mountpoint">> => <<"groups/${client_attrs.ns}/">> <<"mountpoint">> => <<"groups/${client_attrs.ns}/">>
}, },
emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], #{ {ok, Compiled} = emqx_variform:compile("nth(1,tokens(clientid,'-'))"),
extract_from => clientid, emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], [
extract_regexp => <<"^(.+)-.+$">>, #{
extract_as => <<"ns">> expression => Compiled,
}), set_as_attr => <<"ns">>
}
]),
emqx_logger:set_log_level(debug), emqx_logger:set_log_level(debug),
with_listener(tcp, attr_as_moutpoint, ListenerConf, fun() -> with_listener(tcp, attr_as_moutpoint, ListenerConf, fun() ->
{ok, Client} = emqtt:start_link(#{ {ok, Client} = emqtt:start_link(#{
@ -170,7 +172,7 @@ t_client_attr_as_mountpoint(_Config) ->
?assertMatch([_], emqx_router:match_routes(MatchTopic)), ?assertMatch([_], emqx_router:match_routes(MatchTopic)),
emqtt:stop(Client) emqtt:stop(Client)
end), end),
emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], disabled), emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], []),
ok. ok.
t_current_conns_tcp(_Config) -> t_current_conns_tcp(_Config) ->

View File

@ -476,7 +476,7 @@ t_replication_options(_Config) ->
resend_window := 60 resend_window := 60
} }
}, },
emqx_ds_replication_layer_meta:get_options(?PERSISTENT_MESSAGE_DB) emqx_ds_replication_layer_meta:db_config(?PERSISTENT_MESSAGE_DB)
), ),
?assertMatch( ?assertMatch(
#{ #{
@ -584,6 +584,8 @@ message(Topic, Payload, PublishedAt) ->
id = emqx_guid:gen() id = emqx_guid:gen()
}. }.
on_message_dropped(#message{flags = #{sys := true}}, _Context, _Res, _TestPid) ->
ok;
on_message_dropped(Msg, Context, Res, TestPid) -> on_message_dropped(Msg, Context, Res, TestPid) ->
ErrCtx = #{msg => Msg, ctx => Context, res => Res}, ErrCtx = #{msg => Msg, ctx => Context, res => Res},
ct:pal("this hook should not be called.\n ~p", [ErrCtx]), ct:pal("this hook should not be called.\n ~p", [ErrCtx]),

View File

@ -74,9 +74,6 @@ session_id() ->
topic() -> topic() ->
oneof([<<"foo">>, <<"bar">>, <<"foo/#">>, <<"//+/#">>]). oneof([<<"foo">>, <<"bar">>, <<"foo/#">>, <<"//+/#">>]).
subid() ->
oneof([[]]).
subscription() -> subscription() ->
oneof([#{}]). oneof([#{}]).
@ -129,18 +126,25 @@ put_req() ->
{Track, Seqno}, {Track, Seqno},
{seqno_track(), seqno()}, {seqno_track(), seqno()},
{#s.seqno, put_seqno, Track, Seqno} {#s.seqno, put_seqno, Track, Seqno}
),
?LET(
{Topic, Subscription},
{topic(), subscription()},
{#s.subs, put_subscription, Topic, Subscription}
) )
]). ]).
get_req() -> get_req() ->
oneof([ oneof([
{#s.streams, get_stream, stream_id()}, {#s.streams, get_stream, stream_id()},
{#s.seqno, get_seqno, seqno_track()} {#s.seqno, get_seqno, seqno_track()},
{#s.subs, get_subscription, topic()}
]). ]).
del_req() -> del_req() ->
oneof([ oneof([
{#s.streams, del_stream, stream_id()} {#s.streams, del_stream, stream_id()},
{#s.subs, del_subscription, topic()}
]). ]).
command(S) -> command(S) ->
@ -153,13 +157,6 @@ command(S) ->
{2, {call, ?MODULE, reopen, [session_id(S)]}}, {2, {call, ?MODULE, reopen, [session_id(S)]}},
{2, {call, ?MODULE, commit, [session_id(S)]}}, {2, {call, ?MODULE, commit, [session_id(S)]}},
%% Subscriptions:
{3,
{call, ?MODULE, put_subscription, [
session_id(S), topic(), subid(), subscription()
]}},
{3, {call, ?MODULE, del_subscription, [session_id(S), topic(), subid()]}},
%% Metadata: %% Metadata:
{3, {call, ?MODULE, put_metadata, [session_id(S), put_metadata()]}}, {3, {call, ?MODULE, put_metadata, [session_id(S), put_metadata()]}},
{3, {call, ?MODULE, get_metadata, [session_id(S), get_metadata()]}}, {3, {call, ?MODULE, get_metadata, [session_id(S), get_metadata()]}},
@ -170,7 +167,6 @@ command(S) ->
{3, {call, ?MODULE, gen_del, [session_id(S), del_req()]}}, {3, {call, ?MODULE, gen_del, [session_id(S), del_req()]}},
%% Getters: %% Getters:
{4, {call, ?MODULE, get_subscriptions, [session_id(S)]}},
{1, {call, ?MODULE, iterate_sessions, [batch_size()]}} {1, {call, ?MODULE, iterate_sessions, [batch_size()]}}
]); ]);
false -> false ->
@ -207,19 +203,6 @@ postcondition(S, {call, ?MODULE, gen_get, [SessionId, {Idx, Fun, Key}]}, Result)
#{session_id => SessionId, key => Key, 'fun' => Fun} #{session_id => SessionId, key => Key, 'fun' => Fun}
), ),
true; true;
postcondition(S, {call, ?MODULE, get_subscriptions, [SessionId]}, Result) ->
#{SessionId := #s{subs = Subs}} = S,
?assertEqual(maps:size(Subs), emqx_topic_gbt:size(Result)),
maps:foreach(
fun({TopicFilter, Id}, Expected) ->
?assertEqual(
Expected,
emqx_topic_gbt:lookup(TopicFilter, Id, Result, default)
)
end,
Subs
),
true;
postcondition(_, _, _) -> postcondition(_, _, _) ->
true. true.
@ -227,22 +210,6 @@ next_state(S, _V, {call, ?MODULE, create_new, [SessionId]}) ->
S#{SessionId => #s{}}; S#{SessionId => #s{}};
next_state(S, _V, {call, ?MODULE, delete, [SessionId]}) -> next_state(S, _V, {call, ?MODULE, delete, [SessionId]}) ->
maps:remove(SessionId, S); maps:remove(SessionId, S);
next_state(S, _V, {call, ?MODULE, put_subscription, [SessionId, TopicFilter, SubId, Subscription]}) ->
Key = {TopicFilter, SubId},
update(
SessionId,
#s.subs,
fun(Subs) -> Subs#{Key => Subscription} end,
S
);
next_state(S, _V, {call, ?MODULE, del_subscription, [SessionId, TopicFilter, SubId]}) ->
Key = {TopicFilter, SubId},
update(
SessionId,
#s.subs,
fun(Subs) -> maps:remove(Key, Subs) end,
S
);
next_state(S, _V, {call, ?MODULE, put_metadata, [SessionId, {Key, _Fun, Val}]}) -> next_state(S, _V, {call, ?MODULE, put_metadata, [SessionId, {Key, _Fun, Val}]}) ->
update( update(
SessionId, SessionId,
@ -296,19 +263,6 @@ reopen(SessionId) ->
{ok, S} = emqx_persistent_session_ds_state:open(SessionId), {ok, S} = emqx_persistent_session_ds_state:open(SessionId),
put_state(SessionId, S). put_state(SessionId, S).
put_subscription(SessionId, TopicFilter, SubId, Subscription) ->
S = emqx_persistent_session_ds_state:put_subscription(
TopicFilter, SubId, Subscription, get_state(SessionId)
),
put_state(SessionId, S).
del_subscription(SessionId, TopicFilter, SubId) ->
S = emqx_persistent_session_ds_state:del_subscription(TopicFilter, SubId, get_state(SessionId)),
put_state(SessionId, S).
get_subscriptions(SessionId) ->
emqx_persistent_session_ds_state:get_subscriptions(get_state(SessionId)).
put_metadata(SessionId, {_MetaKey, Fun, Value}) -> put_metadata(SessionId, {_MetaKey, Fun, Value}) ->
S = apply(emqx_persistent_session_ds_state, Fun, [Value, get_state(SessionId)]), S = apply(emqx_persistent_session_ds_state, Fun, [Value, get_state(SessionId)]),
put_state(SessionId, S). put_state(SessionId, S).

View File

@ -1004,9 +1004,9 @@ t_different_groups_same_topic(Config) when is_list(Config) ->
GroupB = <<"bb">>, GroupB = <<"bb">>,
Topic = <<"t/1">>, Topic = <<"t/1">>,
SharedTopicGroupA = ?SHARE(GroupA, Topic), SharedTopicGroupA = format_share(GroupA, Topic),
?UPDATE_SUB_QOS(C, SharedTopicGroupA, ?QOS_2), ?UPDATE_SUB_QOS(C, SharedTopicGroupA, ?QOS_2),
SharedTopicGroupB = ?SHARE(GroupB, Topic), SharedTopicGroupB = format_share(GroupB, Topic),
?UPDATE_SUB_QOS(C, SharedTopicGroupB, ?QOS_2), ?UPDATE_SUB_QOS(C, SharedTopicGroupB, ?QOS_2),
?retry( ?retry(
@ -1050,11 +1050,11 @@ t_different_groups_update_subopts(Config) when is_list(Config) ->
Topic = <<"t/1">>, Topic = <<"t/1">>,
GroupA = <<"aa">>, GroupA = <<"aa">>,
GroupB = <<"bb">>, GroupB = <<"bb">>,
SharedTopicGroupA = ?SHARE(GroupA, Topic), SharedTopicGroupA = format_share(GroupA, Topic),
SharedTopicGroupB = ?SHARE(GroupB, Topic), SharedTopicGroupB = format_share(GroupB, Topic),
Fun = fun(Group, QoS) -> Fun = fun(Group, QoS) ->
?UPDATE_SUB_QOS(C, ?SHARE(Group, Topic), QoS), ?UPDATE_SUB_QOS(C, format_share(Group, Topic), QoS),
?assertMatch( ?assertMatch(
#{qos := QoS}, #{qos := QoS},
emqx_broker:get_subopts(ClientId, emqx_topic:make_shared_record(Group, Topic)) emqx_broker:get_subopts(ClientId, emqx_topic:make_shared_record(Group, Topic))
@ -1153,6 +1153,9 @@ t_queue_subscription(Config) when is_list(Config) ->
%% help functions %% help functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
format_share(Group, Topic) ->
emqx_topic:maybe_format_share(emqx_topic:make_shared_record(Group, Topic)).
kill_process(Pid) -> kill_process(Pid) ->
kill_process(Pid, fun(_) -> erlang:exit(Pid, kill) end). kill_process(Pid, fun(_) -> erlang:exit(Pid, kill) end).

View File

@ -96,7 +96,7 @@ t_base_create_delete(_Config) ->
start_at => Now, start_at => Now,
end_at => Now + 30 * 60, end_at => Now + 30 * 60,
payload_encode => text, payload_encode => text,
extra => #{} formatter => text
} }
], ],
?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])), ?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])),
@ -511,4 +511,13 @@ build_old_trace_data() ->
reload() -> reload() ->
catch ok = gen_server:stop(emqx_trace), catch ok = gen_server:stop(emqx_trace),
{ok, _Pid} = emqx_trace:start_link(). case emqx_trace:start_link() of
{ok, _Pid} = Res ->
Res;
NotOKRes ->
ct:pal(
"emqx_trace:start_link() gave result: ~p\n"
"(perhaps it is already started)",
[NotOKRes]
)
end.

View File

@ -353,13 +353,13 @@ init(_Opts) ->
ok = emqx_config_handler:add_handler([listeners, '?', '?', ?CONF_ROOT], Module), ok = emqx_config_handler:add_handler([listeners, '?', '?', ?CONF_ROOT], Module),
ok = hook_deny(), ok = hook_deny(),
{ok, #{hooked => false, providers => #{}, init_done => false}, {ok, #{hooked => false, providers => #{}, init_done => false},
{continue, initialize_authentication}}. {continue, {initialize_authentication, init}}}.
handle_call(get_providers, _From, #{providers := Providers} = State) -> handle_call(get_providers, _From, #{providers := Providers} = State) ->
reply(Providers, State); reply(Providers, State);
handle_call( handle_call(
{register_providers, Providers}, {register_providers, Providers},
_From, From,
#{providers := Reg0} = State #{providers := Reg0} = State
) -> ) ->
case lists:filter(fun({T, _}) -> maps:is_key(T, Reg0) end, Providers) of case lists:filter(fun({T, _}) -> maps:is_key(T, Reg0) end, Providers) of
@ -371,7 +371,7 @@ handle_call(
Reg0, Reg0,
Providers Providers
), ),
reply(ok, State#{providers := Reg}, initialize_authentication); reply(ok, State#{providers := Reg}, {initialize_authentication, From});
Clashes -> Clashes ->
reply({error, {authentication_type_clash, Clashes}}, State) reply({error, {authentication_type_clash, Clashes}}, State)
end; end;
@ -447,10 +447,10 @@ handle_call(Req, _From, State) ->
?SLOG(error, #{msg => "unexpected_call", call => Req}), ?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}. {reply, ignored, State}.
handle_continue(initialize_authentication, #{init_done := true} = State) -> handle_continue({initialize_authentication, _From}, #{init_done := true} = State) ->
{noreply, State}; {noreply, State};
handle_continue(initialize_authentication, #{providers := Providers} = State) -> handle_continue({initialize_authentication, From}, #{providers := Providers} = State) ->
InitDone = initialize_authentication(Providers), InitDone = initialize_authentication(Providers, From),
{noreply, maybe_hook(State#{init_done := InitDone})}. {noreply, maybe_hook(State#{init_done := InitDone})}.
handle_cast(Req, State) -> handle_cast(Req, State) ->
@ -484,11 +484,13 @@ code_change(_OldVsn, State, _Extra) ->
%% Private functions %% Private functions
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
initialize_authentication(Providers) -> initialize_authentication(Providers, From) ->
ProviderTypes = maps:keys(Providers), ProviderTypes = maps:keys(Providers),
Chains = chain_configs(), Chains = chain_configs(),
HasProviders = has_providers_for_configs(Chains, ProviderTypes), HasProviders = has_providers_for_configs(Chains, ProviderTypes),
do_initialize_authentication(Providers, Chains, HasProviders). Result = do_initialize_authentication(Providers, Chains, HasProviders),
?tp(info, authn_chains_initialization_done, #{from => From, result => Result}),
Result.
do_initialize_authentication(_Providers, _Chains, _HasProviders = false) -> do_initialize_authentication(_Providers, _Chains, _HasProviders = false) ->
false; false;
@ -500,7 +502,6 @@ do_initialize_authentication(Providers, Chains, _HasProviders = true) ->
Chains Chains
), ),
ok = unhook_deny(), ok = unhook_deny(),
?tp(info, authn_chains_initialization_done, #{}),
true. true.
initialize_chain_authentication(_Providers, _ChainName, []) -> initialize_chain_authentication(_Providers, _ChainName, []) ->

View File

@ -69,9 +69,10 @@ t_initialize(_Config) ->
emqx_access_control:authenticate(?CLIENTINFO) emqx_access_control:authenticate(?CLIENTINFO)
), ),
Self = self(),
?assertWaitEvent( ?assertWaitEvent(
ok = emqx_authn_test_lib:register_fake_providers([{password_based, built_in_database}]), ok = emqx_authn_test_lib:register_fake_providers([{password_based, built_in_database}]),
#{?snk_kind := authn_chains_initialization_done}, #{?snk_kind := authn_chains_initialization_done, from := {Self, _}},
100 100
), ),

View File

@ -557,12 +557,14 @@ t_publish_last_will_testament_denied_topic(_Config) ->
t_alias_prefix(_Config) -> t_alias_prefix(_Config) ->
{ok, _} = emqx_authz:update(?CMD_REPLACE, [?SOURCE_FILE_CLIENT_ATTR]), {ok, _} = emqx_authz:update(?CMD_REPLACE, [?SOURCE_FILE_CLIENT_ATTR]),
ExtractSuffix = <<"^.*-(.*)$">>, %% '^.*-(.*)$': extract the suffix after the last '-'
emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], #{ {ok, Compiled} = emqx_variform:compile("concat(regex_extract(clientid,'^.*-(.*)$'))"),
extract_from => clientid, emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], [
extract_regexp => ExtractSuffix, #{
extract_as => <<"alias">> expression => Compiled,
}), set_as_attr => <<"alias">>
}
]),
ClientId = <<"org1-name2">>, ClientId = <<"org1-name2">>,
SubTopic = <<"name2/#">>, SubTopic = <<"name2/#">>,
SubTopicNotAllowed = <<"name3/#">>, SubTopicNotAllowed = <<"name3/#">>,
@ -572,7 +574,7 @@ t_alias_prefix(_Config) ->
?assertMatch({ok, _, [?RC_NOT_AUTHORIZED]}, emqtt:subscribe(C, SubTopicNotAllowed)), ?assertMatch({ok, _, [?RC_NOT_AUTHORIZED]}, emqtt:subscribe(C, SubTopicNotAllowed)),
unlink(C), unlink(C),
emqtt:stop(C), emqtt:stop(C),
emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], disalbed), emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], []),
ok. ok.
%% client is allowed by ACL to publish to its LWT topic, is connected, %% client is allowed by ACL to publish to its LWT topic, is connected,

View File

@ -41,6 +41,9 @@
]). ]).
-export([clean_cache/0]). -export([clean_cache/0]).
%% For tests
-export([hard_coded_test_action_info_modules/0]).
-callback bridge_v1_type_name() -> -callback bridge_v1_type_name() ->
atom() atom()
| { | {
@ -128,8 +131,13 @@ hard_coded_action_info_modules_common() ->
emqx_bridge_mqtt_pubsub_action_info emqx_bridge_mqtt_pubsub_action_info
]. ].
%% This exists so that it can be mocked for test cases
hard_coded_test_action_info_modules() -> [].
hard_coded_action_info_modules() -> hard_coded_action_info_modules() ->
hard_coded_action_info_modules_common() ++ hard_coded_action_info_modules_ee(). hard_coded_action_info_modules_common() ++
hard_coded_action_info_modules_ee() ++
?MODULE:hard_coded_test_action_info_modules().
%% ==================================================================== %% ====================================================================
%% API %% API

View File

@ -1030,7 +1030,26 @@ bridge_v2_type_to_connector_type(Type) ->
import_config(RawConf) -> import_config(RawConf) ->
%% actions structure %% actions structure
emqx_bridge:import_config(RawConf, <<"actions">>, ?ROOT_KEY_ACTIONS, config_key_path()). ActionRes = emqx_bridge:import_config(
RawConf, <<"actions">>, ?ROOT_KEY_ACTIONS, config_key_path()
),
SourceRes = emqx_bridge:import_config(
RawConf, <<"sources">>, ?ROOT_KEY_SOURCES, config_key_path_sources()
),
group_import_results([ActionRes, SourceRes]).
group_import_results(Results0) ->
Results = lists:foldr(
fun
({ok, OkRes}, {OkAcc, ErrAcc}) ->
{[OkRes | OkAcc], ErrAcc};
({error, ErrRes}, {OkAcc, ErrAcc}) ->
{OkAcc, [ErrRes | ErrAcc]}
end,
{[], []},
Results0
),
{results, Results}.
%%==================================================================== %%====================================================================
%% Config Update Handler API %% Config Update Handler API

View File

@ -1007,7 +1007,13 @@ call_operation(NodeOrAll, OperFunc, Args = [_Nodes, _ConfRootKey, BridgeType, Br
{error, not_implemented} -> {error, not_implemented} ->
?NOT_IMPLEMENTED; ?NOT_IMPLEMENTED;
{error, timeout} -> {error, timeout} ->
?BAD_REQUEST(<<"Request timeout">>); BridgeId = emqx_bridge_resource:bridge_id(BridgeType, BridgeName),
?SLOG(warning, #{
msg => "bridge_bpapi_call_timeout",
bridge => BridgeId,
call => OperFunc
}),
?SERVICE_UNAVAILABLE(<<"Request timeout">>);
{error, {start_pool_failed, Name, Reason}} -> {error, {start_pool_failed, Name, Reason}} ->
Msg = bin( Msg = bin(
io_lib:format("Failed to start ~p pool for reason ~p", [Name, redact(Reason)]) io_lib:format("Failed to start ~p pool for reason ~p", [Name, redact(Reason)])
@ -1018,9 +1024,8 @@ call_operation(NodeOrAll, OperFunc, Args = [_Nodes, _ConfRootKey, BridgeType, Br
?SLOG(warning, #{ ?SLOG(warning, #{
msg => "bridge_inconsistent_in_cluster_for_call_operation", msg => "bridge_inconsistent_in_cluster_for_call_operation",
reason => not_found, reason => not_found,
type => BridgeType, bridge => BridgeId,
name => BridgeName, call => OperFunc
bridge => BridgeId
}), }),
?SERVICE_UNAVAILABLE(<<"Bridge not found on remote node: ", BridgeId/binary>>); ?SERVICE_UNAVAILABLE(<<"Bridge not found on remote node: ", BridgeId/binary>>);
{error, {node_not_found, Node}} -> {error, {node_not_found, Node}} ->

View File

@ -825,22 +825,53 @@ do_start_stop_bridges(Type, Config) ->
%% Connecting to this endpoint should always timeout %% Connecting to this endpoint should always timeout
BadServer = iolist_to_binary(io_lib:format("localhost:~B", [ListenPort])), BadServer = iolist_to_binary(io_lib:format("localhost:~B", [ListenPort])),
BadName = <<"bad_", (atom_to_binary(Type))/binary>>, BadName = <<"bad_", (atom_to_binary(Type))/binary>>,
CreateRes0 = request_json(
post,
uri(["bridges"]),
?MQTT_BRIDGE(BadServer, BadName),
Config
),
?assertMatch( ?assertMatch(
{ok, 201, #{ {ok, 201, #{
<<"type">> := ?BRIDGE_TYPE_MQTT, <<"type">> := ?BRIDGE_TYPE_MQTT,
<<"name">> := BadName, <<"name">> := BadName,
<<"enable">> := true, <<"enable">> := true,
<<"server">> := BadServer, <<"server">> := BadServer
<<"status">> := <<"connecting">>,
<<"node_status">> := [_ | _]
}}, }},
request_json( CreateRes0
post,
uri(["bridges"]),
?MQTT_BRIDGE(BadServer, BadName),
Config
)
), ),
{ok, 201, CreateRes1} = CreateRes0,
case CreateRes1 of
#{
<<"node_status">> := [
#{
<<"status">> := <<"disconnected">>,
<<"status_reason">> := <<"connack_timeout">>
},
#{<<"status">> := <<"connecting">>}
| _
],
%% `inconsistent': one node is `?status_disconnected' (because it has already
%% timed out), the other node is `?status_connecting' (started later and
%% haven't timed out yet)
<<"status">> := <<"inconsistent">>,
<<"status_reason">> := <<"connack_timeout">>
} ->
ok;
#{
<<"node_status">> := [_, _ | _],
<<"status">> := <<"disconnected">>,
<<"status_reason">> := <<"connack_timeout">>
} ->
ok;
#{
<<"node_status">> := [_],
<<"status">> := <<"connecting">>
} ->
ok;
_ ->
error({unexpected_result, CreateRes1})
end,
BadBridgeID = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE_MQTT, BadName), BadBridgeID = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE_MQTT, BadName),
?assertMatch( ?assertMatch(
%% request from product: return 400 on such errors %% request from product: return 400 on such errors

View File

@ -705,7 +705,7 @@ t_async_query(Config, MakeMessageFun, IsSuccessCheck, TracePoint) ->
), ),
receive receive
{result, Result} -> IsSuccessCheck(Result) {result, Result} -> IsSuccessCheck(Result)
after 5_000 -> after 8_000 ->
throw(timeout) throw(timeout)
end, end,
ok. ok.

View File

@ -2,7 +2,7 @@
{erl_opts, [debug_info]}. {erl_opts, [debug_info]}.
{deps, [ {deps, [
{ecql, {git, "https://github.com/emqx/ecql.git", {tag, "v0.6.1"}}}, {ecql, {git, "https://github.com/emqx/ecql.git", {tag, "v0.7.0"}}},
{emqx_connector, {path, "../../apps/emqx_connector"}}, {emqx_connector, {path, "../../apps/emqx_connector"}},
{emqx_resource, {path, "../../apps/emqx_resource"}}, {emqx_resource, {path, "../../apps/emqx_resource"}},
{emqx_bridge, {path, "../../apps/emqx_bridge"}} {emqx_bridge, {path, "../../apps/emqx_bridge"}}

View File

@ -181,7 +181,7 @@ fields("post", Type) ->
cql_field() -> cql_field() ->
{cql, {cql,
mk( mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("cql_template"), default => ?DEFAULT_CQL, format => <<"sql">>} #{desc => ?DESC("cql_template"), default => ?DEFAULT_CQL, format => <<"sql">>}
)}. )}.

View File

@ -581,7 +581,6 @@ t_write_failure(Config) ->
) )
end), end),
fun(Trace0) -> fun(Trace0) ->
ct:pal("trace: ~p", [Trace0]),
Trace = ?of_kind( Trace = ?of_kind(
[buffer_worker_flush_nack, buffer_worker_retry_inflight_failed], Trace0 [buffer_worker_flush_nack, buffer_worker_retry_inflight_failed], Trace0
), ),

View File

@ -184,8 +184,12 @@ fields("post", Type) ->
sql_field() -> sql_field() ->
{sql, {sql,
mk( mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>} #{
desc => ?DESC("sql_template"),
default => ?DEFAULT_SQL,
format => <<"sql">>
}
)}. )}.
batch_value_separator_field() -> batch_value_separator_field() ->

View File

@ -87,6 +87,7 @@ connector_values() ->
<<"url">> => <<"http://127.0.0.1:8000">>, <<"url">> => <<"http://127.0.0.1:8000">>,
<<"aws_access_key_id">> => <<"root">>, <<"aws_access_key_id">> => <<"root">>,
<<"aws_secret_access_key">> => <<"******">>, <<"aws_secret_access_key">> => <<"******">>,
<<"region">> => <<"us-west-2">>,
<<"pool_size">> => 8, <<"pool_size">> => 8,
<<"resource_opts">> => <<"resource_opts">> =>
#{ #{
@ -113,7 +114,8 @@ action_values() ->
<<"parameters">> => <<"parameters">> =>
#{ #{
<<"table">> => <<"mqtt_msg">>, <<"table">> => <<"mqtt_msg">>,
<<"template">> => ?DEFAULT_TEMPLATE <<"template">> => ?DEFAULT_TEMPLATE,
<<"hash_key">> => <<"clientid">>
} }
}. }.
@ -161,10 +163,16 @@ fields(dynamo_action) ->
fields(action_parameters) -> fields(action_parameters) ->
Parameters = Parameters =
[ [
{template, {template, template_field_schema()},
{hash_key,
mk( mk(
binary(), binary(),
#{desc => ?DESC("template"), default => ?DEFAULT_TEMPLATE} #{desc => ?DESC("hash_key"), required => true}
)},
{range_key,
mk(
binary(),
#{desc => ?DESC("range_key"), required => false}
)} )}
] ++ emqx_bridge_dynamo_connector:fields(config), ] ++ emqx_bridge_dynamo_connector:fields(config),
lists:foldl( lists:foldl(
@ -174,6 +182,7 @@ fields(action_parameters) ->
Parameters, Parameters,
[ [
url, url,
region,
aws_access_key_id, aws_access_key_id,
aws_secret_access_key, aws_secret_access_key,
pool_size, pool_size,
@ -199,16 +208,22 @@ fields(connector_resource_opts) ->
fields("config") -> fields("config") ->
[ [
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})}, {enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
{template, {template, template_field_schema()},
mk(
binary(),
#{desc => ?DESC("template"), default => ?DEFAULT_TEMPLATE}
)},
{local_topic, {local_topic,
mk( mk(
binary(), binary(),
#{desc => ?DESC("local_topic"), default => undefined} #{desc => ?DESC("local_topic"), default => undefined}
)}, )},
{hash_key,
mk(
binary(),
#{desc => ?DESC("hash_key"), required => true}
)},
{range_key,
mk(
binary(),
#{desc => ?DESC("range_key"), required => false}
)},
{resource_opts, {resource_opts,
mk( mk(
ref(?MODULE, "creation_opts"), ref(?MODULE, "creation_opts"),
@ -230,6 +245,15 @@ fields("put") ->
fields("get") -> fields("get") ->
emqx_bridge_schema:status_fields() ++ fields("post"). emqx_bridge_schema:status_fields() ++ fields("post").
template_field_schema() ->
mk(
emqx_schema:template(),
#{
desc => ?DESC("template"),
default => ?DEFAULT_TEMPLATE
}
).
desc("config") -> desc("config") ->
?DESC("desc_config"); ?DESC("desc_config");
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" -> desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->

View File

@ -45,6 +45,7 @@ roots() ->
fields(config) -> fields(config) ->
[ [
{url, mk(binary(), #{required => true, desc => ?DESC("url")})}, {url, mk(binary(), #{required => true, desc => ?DESC("url")})},
{region, mk(binary(), #{required => true, desc => ?DESC("region")})},
{table, mk(binary(), #{required => true, desc => ?DESC("table")})}, {table, mk(binary(), #{required => true, desc => ?DESC("table")})},
{aws_access_key_id, {aws_access_key_id,
mk( mk(
@ -102,6 +103,12 @@ on_start(
pool_name => InstanceId, pool_name => InstanceId,
installed_channels => #{} installed_channels => #{}
}, },
case Config of
#{region := Region} ->
application:set_env(erlcloud, aws_region, to_str(Region));
_ ->
ok
end,
case emqx_resource_pool:start(InstanceId, ?MODULE, Options) of case emqx_resource_pool:start(InstanceId, ?MODULE, Options) of
ok -> ok ->
{ok, State}; {ok, State};
@ -126,12 +133,20 @@ on_add_channel(
create_channel_state( create_channel_state(
#{parameters := Conf} = _ChannelConfig #{parameters := Conf} = _ChannelConfig
) -> ) ->
#{ Keys = maps:with([hash_key, range_key], Conf),
table := Table Keys1 = maps:fold(
} = Conf, fun(K, V, Acc) ->
Acc#{K := erlang:binary_to_existing_atom(V)}
end,
Keys,
Keys
),
Base = maps:without([template, hash_key, range_key], Conf),
Base1 = maps:merge(Base, Keys1),
Templates = parse_template_from_conf(Conf), Templates = parse_template_from_conf(Conf),
State = #{ State = Base1#{
table => Table,
templates => Templates templates => Templates
}, },
{ok, State}. {ok, State}.
@ -232,11 +247,16 @@ do_query(
templates := Templates templates := Templates
} = ChannelState, } = ChannelState,
Result = Result =
ecpool:pick_and_do( case ensuare_dynamo_keys(Query, ChannelState) of
PoolName, true ->
{emqx_bridge_dynamo_connector_client, query, [Table, QueryTuple, Templates]}, ecpool:pick_and_do(
no_handover PoolName,
), {emqx_bridge_dynamo_connector_client, query, [Table, QueryTuple, Templates]},
no_handover
);
_ ->
{error, missing_filter_or_range_key}
end,
case Result of case Result of
{error, Reason} -> {error, Reason} ->
@ -288,6 +308,25 @@ get_query_tuple([{_ChannelId, {_QueryType, _Data}} | _]) ->
get_query_tuple([InsertQuery | _]) -> get_query_tuple([InsertQuery | _]) ->
get_query_tuple(InsertQuery). get_query_tuple(InsertQuery).
ensuare_dynamo_keys({_, Data} = Query, State) when is_map(Data) ->
ensuare_dynamo_keys([Query], State);
ensuare_dynamo_keys([{_, Data} | _] = Queries, State) when is_map(Data) ->
Keys = maps:to_list(maps:with([hash_key, range_key], State)),
lists:all(
fun({_, Query}) ->
lists:all(
fun({_, Key}) ->
maps:is_key(Key, Query)
end,
Keys
)
end,
Queries
);
%% this is not a insert query
ensuare_dynamo_keys(_Query, _State) ->
true.
connect(Opts) -> connect(Opts) ->
Config = proplists:get_value(config, Opts), Config = proplists:get_value(config, Opts),
{ok, _Pid} = emqx_bridge_dynamo_connector_client:start_link(Config). {ok, _Pid} = emqx_bridge_dynamo_connector_client:start_link(Config).

View File

@ -16,6 +16,7 @@
-define(TABLE_BIN, to_bin(?TABLE)). -define(TABLE_BIN, to_bin(?TABLE)).
-define(ACCESS_KEY_ID, "root"). -define(ACCESS_KEY_ID, "root").
-define(SECRET_ACCESS_KEY, "public"). -define(SECRET_ACCESS_KEY, "public").
-define(REGION, "us-west-2").
-define(HOST, "dynamo"). -define(HOST, "dynamo").
-define(PORT, 8000). -define(PORT, 8000).
-define(SCHEMA, "http://"). -define(SCHEMA, "http://").
@ -177,7 +178,9 @@ dynamo_config(BridgeType, Config) ->
"bridges.~s.~s {" "bridges.~s.~s {"
"\n enable = true" "\n enable = true"
"\n url = \"http://~s:~p\"" "\n url = \"http://~s:~p\""
"\n region = ~p"
"\n table = ~p" "\n table = ~p"
"\n hash_key =\"clientid\""
"\n aws_access_key_id = ~p" "\n aws_access_key_id = ~p"
"\n aws_secret_access_key = ~p" "\n aws_secret_access_key = ~p"
"\n resource_opts = {" "\n resource_opts = {"
@ -191,6 +194,7 @@ dynamo_config(BridgeType, Config) ->
Name, Name,
Host, Host,
Port, Port,
?REGION,
?TABLE, ?TABLE,
?ACCESS_KEY_ID, ?ACCESS_KEY_ID,
%% NOTE: using file-based secrets with HOCON configs %% NOTE: using file-based secrets with HOCON configs
@ -210,7 +214,8 @@ action_config(Config) ->
<<"enable">> => true, <<"enable">> => true,
<<"parameters">> => <<"parameters">> =>
#{ #{
<<"table">> => ?TABLE <<"table">> => ?TABLE,
<<"hash_key">> => <<"clientid">>
}, },
<<"resource_opts">> => <<"resource_opts">> =>
#{ #{
@ -234,6 +239,7 @@ connector_config(Config) ->
<<"url">> => URL, <<"url">> => URL,
<<"aws_access_key_id">> => ?ACCESS_KEY_ID, <<"aws_access_key_id">> => ?ACCESS_KEY_ID,
<<"aws_secret_access_key">> => AccessKey, <<"aws_secret_access_key">> => AccessKey,
<<"region">> => ?REGION,
<<"enable">> => true, <<"enable">> => true,
<<"pool_size">> => 8, <<"pool_size">> => 8,
<<"resource_opts">> => <<"resource_opts">> =>
@ -355,7 +361,7 @@ t_setup_via_config_and_publish(Config) ->
create_bridge(Config) create_bridge(Config)
), ),
MsgId = emqx_utils:gen_id(), MsgId = emqx_utils:gen_id(),
SentData = #{id => MsgId, payload => ?PAYLOAD}, SentData = #{clientid => <<"clientid">>, id => MsgId, payload => ?PAYLOAD},
?check_trace( ?check_trace(
begin begin
?wait_async_action( ?wait_async_action(
@ -421,7 +427,7 @@ t_setup_via_http_api_and_publish(Config) ->
create_bridge_http(PgsqlConfig) create_bridge_http(PgsqlConfig)
), ),
MsgId = emqx_utils:gen_id(), MsgId = emqx_utils:gen_id(),
SentData = #{id => MsgId, payload => ?PAYLOAD}, SentData = #{clientid => <<"clientid">>, id => MsgId, payload => ?PAYLOAD},
?check_trace( ?check_trace(
begin begin
?wait_async_action( ?wait_async_action(
@ -486,7 +492,7 @@ t_write_failure(Config) ->
#{?snk_kind := resource_connected_enter}, #{?snk_kind := resource_connected_enter},
20_000 20_000
), ),
SentData = #{id => emqx_utils:gen_id(), payload => ?PAYLOAD}, SentData = #{clientid => <<"clientid">>, id => emqx_utils:gen_id(), payload => ?PAYLOAD},
emqx_common_test_helpers:with_failure(down, ProxyName, ProxyHost, ProxyPort, fun() -> emqx_common_test_helpers:with_failure(down, ProxyName, ProxyHost, ProxyPort, fun() ->
?assertMatch( ?assertMatch(
{error, {resource_error, #{reason := timeout}}}, send_message(Config, SentData) {error, {resource_error, #{reason := timeout}}}, send_message(Config, SentData)
@ -513,12 +519,21 @@ t_simple_query(Config) ->
ok. ok.
t_missing_data(Config) -> t_missing_data(Config) ->
?assertMatch(
{ok, _},
create_bridge(Config)
),
Result = send_message(Config, #{clientid => <<"clientid">>}),
?assertMatch({error, {<<"ValidationException">>, <<>>}}, Result),
ok.
t_missing_hash_key(Config) ->
?assertMatch( ?assertMatch(
{ok, _}, {ok, _},
create_bridge(Config) create_bridge(Config)
), ),
Result = send_message(Config, #{}), Result = send_message(Config, #{}),
?assertMatch({error, {unrecoverable_error, {invalid_request, _}}}, Result), ?assertMatch({error, missing_filter_or_range_key}, Result),
ok. ok.
t_bad_parameter(Config) -> t_bad_parameter(Config) ->
@ -543,7 +558,9 @@ t_action_create_via_http(Config) ->
emqx_bridge_v2_testlib:t_create_via_http(Config). emqx_bridge_v2_testlib:t_create_via_http(Config).
t_action_sync_query(Config) -> t_action_sync_query(Config) ->
MakeMessageFun = fun() -> #{id => <<"the_message_id">>, payload => ?PAYLOAD} end, MakeMessageFun = fun() ->
#{clientid => <<"clientid">>, id => <<"the_message_id">>, payload => ?PAYLOAD}
end,
IsSuccessCheck = fun(Result) -> ?assertEqual({ok, []}, Result) end, IsSuccessCheck = fun(Result) -> ?assertEqual({ok, []}, Result) end,
TracePoint = dynamo_connector_query_return, TracePoint = dynamo_connector_query_return,
emqx_bridge_v2_testlib:t_sync_query(Config, MakeMessageFun, IsSuccessCheck, TracePoint). emqx_bridge_v2_testlib:t_sync_query(Config, MakeMessageFun, IsSuccessCheck, TracePoint).

View File

@ -135,7 +135,7 @@ overwrite() ->
index() -> index() ->
{index, {index,
?HOCON( ?HOCON(
binary(), emqx_schema:template(),
#{ #{
required => true, required => true,
example => <<"${payload.index}">>, example => <<"${payload.index}">>,
@ -146,7 +146,7 @@ index() ->
id(Required) -> id(Required) ->
{id, {id,
?HOCON( ?HOCON(
binary(), emqx_schema:template(),
#{ #{
required => Required, required => Required,
example => <<"${payload.id}">>, example => <<"${payload.id}">>,
@ -157,7 +157,7 @@ id(Required) ->
doc() -> doc() ->
{doc, {doc,
?HOCON( ?HOCON(
binary(), emqx_schema:template(),
#{ #{
required => false, required => false,
example => <<"${payload.doc}">>, example => <<"${payload.doc}">>,
@ -187,7 +187,7 @@ doc_as_upsert() ->
routing() -> routing() ->
{routing, {routing,
?HOCON( ?HOCON(
binary(), emqx_schema:template(),
#{ #{
required => false, required => false,
example => <<"${payload.routing}">>, example => <<"${payload.routing}">>,

View File

@ -122,7 +122,7 @@ fields(producer) ->
)}, )},
{ordering_key_template, {ordering_key_template,
sc( sc(
binary(), emqx_schema:template(),
#{ #{
default => <<>>, default => <<>>,
desc => ?DESC("ordering_key_template") desc => ?DESC("ordering_key_template")
@ -130,7 +130,7 @@ fields(producer) ->
)}, )},
{payload_template, {payload_template,
sc( sc(
binary(), emqx_schema:template(),
#{ #{
default => <<>>, default => <<>>,
desc => ?DESC("payload_template") desc => ?DESC("payload_template")
@ -201,8 +201,11 @@ fields(consumer_topic_mapping) ->
{qos, mk(emqx_schema:qos(), #{default => 0, desc => ?DESC(consumer_mqtt_qos)})}, {qos, mk(emqx_schema:qos(), #{default => 0, desc => ?DESC(consumer_mqtt_qos)})},
{payload_template, {payload_template,
mk( mk(
string(), emqx_schema:template(),
#{default => <<"${.}">>, desc => ?DESC(consumer_mqtt_payload)} #{
default => <<"${.}">>,
desc => ?DESC(consumer_mqtt_payload)
}
)} )}
]; ];
fields("consumer_resource_opts") -> fields("consumer_resource_opts") ->
@ -221,14 +224,18 @@ fields("consumer_resource_opts") ->
fields(key_value_pair) -> fields(key_value_pair) ->
[ [
{key, {key,
mk(binary(), #{ mk(emqx_schema:template(), #{
required => true, required => true,
validator => [ validator => [
emqx_resource_validator:not_empty("Key templates must not be empty") emqx_resource_validator:not_empty("Key templates must not be empty")
], ],
desc => ?DESC(kv_pair_key) desc => ?DESC(kv_pair_key)
})}, })},
{value, mk(binary(), #{required => true, desc => ?DESC(kv_pair_value)})} {value,
mk(emqx_schema:template(), #{
required => true,
desc => ?DESC(kv_pair_value)
})}
]; ];
fields("get_producer") -> fields("get_producer") ->
emqx_bridge_schema:status_fields() ++ fields("post_producer"); emqx_bridge_schema:status_fields() ++ fields("post_producer");

View File

@ -1929,7 +1929,6 @@ t_bad_attributes(Config) ->
ok ok
end, end,
fun(Trace) -> fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
?assertMatch( ?assertMatch(
[ [
#{placeholder := [<<"payload">>, <<"ok">>], value := #{}}, #{placeholder := [<<"payload">>, <<"ok">>], value := #{}},

View File

@ -6,7 +6,7 @@
{emqx_connector, {path, "../../apps/emqx_connector"}}, {emqx_connector, {path, "../../apps/emqx_connector"}},
{emqx_resource, {path, "../../apps/emqx_resource"}}, {emqx_resource, {path, "../../apps/emqx_resource"}},
{emqx_bridge, {path, "../../apps/emqx_bridge"}}, {emqx_bridge, {path, "../../apps/emqx_bridge"}},
{greptimedb, {git, "https://github.com/GreptimeTeam/greptimedb-client-erl", {tag, "v0.1.7"}}} {greptimedb, {git, "https://github.com/GreptimeTeam/greptimedb-ingester-erl", {tag, "v0.1.8"}}}
]}. ]}.
{plugins, [rebar3_path_deps]}. {plugins, [rebar3_path_deps]}.
{project_plugins, [erlfmt]}. {project_plugins, [erlfmt]}.

View File

@ -324,7 +324,7 @@ query_by_clientid(Topic, ClientId, Config) ->
{"Content-Type", "application/x-www-form-urlencoded"} {"Content-Type", "application/x-www-form-urlencoded"}
], ],
Body = <<"sql=select * from \"", Topic/binary, "\" where clientid='", ClientId/binary, "'">>, Body = <<"sql=select * from \"", Topic/binary, "\" where clientid='", ClientId/binary, "'">>,
{ok, 200, _Headers, RawBody0} = {ok, StatusCode, _Headers, RawBody0} =
ehttpc:request( ehttpc:request(
EHttpcPoolName, EHttpcPoolName,
post, post,
@ -335,7 +335,6 @@ query_by_clientid(Topic, ClientId, Config) ->
case emqx_utils_json:decode(RawBody0, [return_maps]) of case emqx_utils_json:decode(RawBody0, [return_maps]) of
#{ #{
<<"code">> := 0,
<<"output">> := [ <<"output">> := [
#{ #{
<<"records">> := #{ <<"records">> := #{
@ -344,12 +343,12 @@ query_by_clientid(Topic, ClientId, Config) ->
} }
} }
] ]
} -> } when StatusCode >= 200 andalso StatusCode =< 300 ->
make_row(Schema, Rows); make_row(Schema, Rows);
#{ #{
<<"code">> := Code, <<"code">> := Code,
<<"error">> := Error <<"error">> := Error
} -> } when StatusCode > 300 ->
GreptimedbName = ?config(greptimedb_name, Config), GreptimedbName = ?config(greptimedb_name, Config),
Type = greptimedb_type_bin(?config(greptimedb_type, Config)), Type = greptimedb_type_bin(?config(greptimedb_type, Config)),
BridgeId = emqx_bridge_resource:bridge_id(Type, GreptimedbName), BridgeId = emqx_bridge_resource:bridge_id(Type, GreptimedbName),
@ -367,7 +366,9 @@ query_by_clientid(Topic, ClientId, Config) ->
_ -> _ ->
%% Table not found %% Table not found
#{} #{}
end end;
Error ->
{error, Error}
end. end.
make_row(null, _Rows) -> make_row(null, _Rows) ->
@ -910,69 +911,6 @@ t_start_exception(Config) ->
), ),
ok. ok.
t_write_failure(Config) ->
ProxyName = ?config(proxy_name, Config),
ProxyPort = ?config(proxy_port, Config),
ProxyHost = ?config(proxy_host, Config),
QueryMode = ?config(query_mode, Config),
{ok, _} = create_bridge(Config),
ClientId = emqx_guid:to_hexstr(emqx_guid:gen()),
Payload = #{
int_key => -123,
bool => true,
float_key => 24.5,
uint_key => 123
},
SentData = #{
<<"clientid">> => ClientId,
<<"topic">> => atom_to_binary(?FUNCTION_NAME),
<<"timestamp">> => erlang:system_time(millisecond),
<<"payload">> => Payload
},
?check_trace(
emqx_common_test_helpers:with_failure(down, ProxyName, ProxyHost, ProxyPort, fun() ->
case QueryMode of
sync ->
?wait_async_action(
?assertMatch(
{error, {resource_error, #{reason := timeout}}},
send_message(Config, SentData)
),
#{?snk_kind := handle_async_reply, action := nack},
1_000
);
async ->
?wait_async_action(
?assertEqual(ok, send_message(Config, SentData)),
#{?snk_kind := handle_async_reply},
1_000
)
end
end),
fun(Trace0) ->
case QueryMode of
sync ->
Trace = ?of_kind(handle_async_reply, Trace0),
?assertMatch([_ | _], Trace),
[#{result := Result} | _] = Trace,
?assert(
not emqx_bridge_greptimedb_connector:is_unrecoverable_error(Result),
#{got => Result}
);
async ->
Trace = ?of_kind(handle_async_reply, Trace0),
?assertMatch([_ | _], Trace),
[#{result := Result} | _] = Trace,
?assert(
not emqx_bridge_greptimedb_connector:is_unrecoverable_error(Result),
#{got => Result}
)
end,
ok
end
),
ok.
t_missing_field(Config) -> t_missing_field(Config) ->
BatchSize = ?config(batch_size, Config), BatchSize = ?config(batch_size, Config),
IsBatch = BatchSize > 1, IsBatch = BatchSize > 1,

View File

@ -167,13 +167,13 @@ fields(action_parameters) ->
})}, })},
{partition_key, {partition_key,
mk(binary(), #{ mk(emqx_schema:template(), #{
required => false, desc => ?DESC(emqx_bridge_hstreamdb_connector, "partition_key") required => false,
desc => ?DESC(emqx_bridge_hstreamdb_connector, "partition_key")
})}, })},
{grpc_flush_timeout, fun grpc_flush_timeout/1}, {grpc_flush_timeout, fun grpc_flush_timeout/1},
{record_template, {record_template, record_template_schema()},
mk(binary(), #{default => <<"${payload}">>, desc => ?DESC("record_template")})},
{aggregation_pool_size, {aggregation_pool_size,
mk(pos_integer(), #{ mk(pos_integer(), #{
default => ?DEFAULT_AGG_POOL_SIZE, desc => ?DESC("aggregation_pool_size") default => ?DEFAULT_AGG_POOL_SIZE, desc => ?DESC("aggregation_pool_size")
@ -222,6 +222,12 @@ fields("put") ->
hstream_bridge_common_fields() ++ hstream_bridge_common_fields() ++
connector_fields(). connector_fields().
record_template_schema() ->
mk(emqx_schema:template(), #{
default => <<"${payload}">>,
desc => ?DESC("record_template")
}).
grpc_timeout(type) -> emqx_schema:timeout_duration_ms(); grpc_timeout(type) -> emqx_schema:timeout_duration_ms();
grpc_timeout(desc) -> ?DESC(emqx_bridge_hstreamdb_connector, "grpc_timeout"); grpc_timeout(desc) -> ?DESC(emqx_bridge_hstreamdb_connector, "grpc_timeout");
grpc_timeout(default) -> ?DEFAULT_GRPC_TIMEOUT_RAW; grpc_timeout(default) -> ?DEFAULT_GRPC_TIMEOUT_RAW;
@ -239,8 +245,7 @@ hstream_bridge_common_fields() ->
[ [
{direction, mk(egress, #{desc => ?DESC("config_direction"), default => egress})}, {direction, mk(egress, #{desc => ?DESC("config_direction"), default => egress})},
{local_topic, mk(binary(), #{desc => ?DESC("local_topic")})}, {local_topic, mk(binary(), #{desc => ?DESC("local_topic")})},
{record_template, {record_template, record_template_schema()}
mk(binary(), #{default => <<"${payload}">>, desc => ?DESC("record_template")})}
] ++ ] ++
emqx_resource_schema:fields("resource_opts"). emqx_resource_schema:fields("resource_opts").

View File

@ -128,9 +128,10 @@ fields("request") ->
desc => ?DESC("method"), desc => ?DESC("method"),
validator => fun ?MODULE:validate_method/1 validator => fun ?MODULE:validate_method/1
})}, })},
{path, hoconsc:mk(binary(), #{required => false, desc => ?DESC("path")})}, {path, hoconsc:mk(emqx_schema:template(), #{required => false, desc => ?DESC("path")})},
{body, hoconsc:mk(binary(), #{required => false, desc => ?DESC("body")})}, {body, hoconsc:mk(emqx_schema:template(), #{required => false, desc => ?DESC("body")})},
{headers, hoconsc:mk(map(), #{required => false, desc => ?DESC("headers")})}, {headers,
hoconsc:mk(map(), #{required => false, desc => ?DESC("headers"), is_template => true})},
{max_retries, {max_retries,
sc( sc(
non_neg_integer(), non_neg_integer(),
@ -315,7 +316,7 @@ on_query(InstId, {send_message, Msg}, State) ->
ClientId = maps:get(clientid, Msg, undefined), ClientId = maps:get(clientid, Msg, undefined),
on_query( on_query(
InstId, InstId,
{ClientId, Method, {Path, Headers, Body}, Timeout, Retry}, {undefined, ClientId, Method, {Path, Headers, Body}, Timeout, Retry},
State State
) )
end; end;
@ -345,19 +346,19 @@ on_query(
ClientId = clientid(Msg), ClientId = clientid(Msg),
on_query( on_query(
InstId, InstId,
{ClientId, Method, {Path, Headers, Body}, Timeout, Retry}, {ActionId, ClientId, Method, {Path, Headers, Body}, Timeout, Retry},
State State
) )
end; end;
on_query(InstId, {Method, Request}, State) -> on_query(InstId, {Method, Request}, State) ->
%% TODO: Get retry from State %% TODO: Get retry from State
on_query(InstId, {undefined, Method, Request, 5000, _Retry = 2}, State); on_query(InstId, {undefined, undefined, Method, Request, 5000, _Retry = 2}, State);
on_query(InstId, {Method, Request, Timeout}, State) -> on_query(InstId, {Method, Request, Timeout}, State) ->
%% TODO: Get retry from State %% TODO: Get retry from State
on_query(InstId, {undefined, Method, Request, Timeout, _Retry = 2}, State); on_query(InstId, {undefined, undefined, Method, Request, Timeout, _Retry = 2}, State);
on_query( on_query(
InstId, InstId,
{KeyOrNum, Method, Request, Timeout, Retry}, {ActionId, KeyOrNum, Method, Request, Timeout, Retry},
#{base_path := BasePath} = State #{base_path := BasePath} = State
) -> ) ->
?TRACE( ?TRACE(
@ -367,10 +368,12 @@ on_query(
request => redact_request(Request), request => redact_request(Request),
note => ?READACT_REQUEST_NOTE, note => ?READACT_REQUEST_NOTE,
connector => InstId, connector => InstId,
action_id => ActionId,
state => redact(State) state => redact(State)
} }
), ),
NRequest = formalize_request(Method, BasePath, Request), NRequest = formalize_request(Method, BasePath, Request),
trace_rendered_action_template(ActionId, Method, NRequest, Timeout),
Worker = resolve_pool_worker(State, KeyOrNum), Worker = resolve_pool_worker(State, KeyOrNum),
Result0 = ehttpc:request( Result0 = ehttpc:request(
Worker, Worker,
@ -427,7 +430,7 @@ on_query_async(InstId, {send_message, Msg}, ReplyFunAndArgs, State) ->
ClientId = maps:get(clientid, Msg, undefined), ClientId = maps:get(clientid, Msg, undefined),
on_query_async( on_query_async(
InstId, InstId,
{ClientId, Method, {Path, Headers, Body}, Timeout}, {undefined, ClientId, Method, {Path, Headers, Body}, Timeout},
ReplyFunAndArgs, ReplyFunAndArgs,
State State
) )
@ -457,14 +460,14 @@ on_query_async(
ClientId = clientid(Msg), ClientId = clientid(Msg),
on_query_async( on_query_async(
InstId, InstId,
{ClientId, Method, {Path, Headers, Body}, Timeout}, {ActionId, ClientId, Method, {Path, Headers, Body}, Timeout},
ReplyFunAndArgs, ReplyFunAndArgs,
State State
) )
end; end;
on_query_async( on_query_async(
InstId, InstId,
{KeyOrNum, Method, Request, Timeout}, {ActionId, KeyOrNum, Method, Request, Timeout},
ReplyFunAndArgs, ReplyFunAndArgs,
#{base_path := BasePath} = State #{base_path := BasePath} = State
) -> ) ->
@ -480,6 +483,7 @@ on_query_async(
} }
), ),
NRequest = formalize_request(Method, BasePath, Request), NRequest = formalize_request(Method, BasePath, Request),
trace_rendered_action_template(ActionId, Method, NRequest, Timeout),
MaxAttempts = maps:get(max_attempts, State, 3), MaxAttempts = maps:get(max_attempts, State, 3),
Context = #{ Context = #{
attempt => 1, attempt => 1,
@ -499,6 +503,31 @@ on_query_async(
), ),
{ok, Worker}. {ok, Worker}.
trace_rendered_action_template(ActionId, Method, NRequest, Timeout) ->
case NRequest of
{Path, Headers} ->
emqx_trace:rendered_action_template(
ActionId,
#{
path => Path,
method => Method,
headers => emqx_utils_redact:redact_headers(Headers),
timeout => Timeout
}
);
{Path, Headers, Body} ->
emqx_trace:rendered_action_template(
ActionId,
#{
path => Path,
method => Method,
headers => emqx_utils_redact:redact_headers(Headers),
timeout => Timeout,
body => Body
}
)
end.
resolve_pool_worker(State, undefined) -> resolve_pool_worker(State, undefined) ->
resolve_pool_worker(State, self()); resolve_pool_worker(State, self());
resolve_pool_worker(#{pool_name := PoolName} = State, Key) -> resolve_pool_worker(#{pool_name := PoolName} = State, Key) ->

View File

@ -114,7 +114,7 @@ fields("parameters_opts") ->
[ [
{path, {path,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
desc => ?DESC("config_path"), desc => ?DESC("config_path"),
required => false required => false
@ -270,7 +270,8 @@ headers_field() ->
<<"content-type">> => <<"application/json">>, <<"content-type">> => <<"application/json">>,
<<"keep-alive">> => <<"timeout=5">> <<"keep-alive">> => <<"timeout=5">>
}, },
desc => ?DESC("config_headers") desc => ?DESC("config_headers"),
is_template => true
} }
)}. )}.
@ -287,7 +288,7 @@ method_field() ->
body_field() -> body_field() ->
{body, {body,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
default => undefined, default => undefined,
desc => ?DESC("config_body") desc => ?DESC("config_body")

View File

@ -30,8 +30,8 @@
-include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl").
-include_lib("emqx/include/asserts.hrl"). -include_lib("emqx/include/asserts.hrl").
-define(BRIDGE_TYPE, <<"webhook">>). -define(BRIDGE_TYPE, emqx_bridge_http_test_lib:bridge_type()).
-define(BRIDGE_NAME, atom_to_binary(?MODULE)). -define(BRIDGE_NAME, emqx_bridge_http_test_lib:bridge_name()).
all() -> all() ->
emqx_common_test_helpers:all(?MODULE). emqx_common_test_helpers:all(?MODULE).
@ -73,21 +73,10 @@ suite() ->
init_per_testcase(t_bad_bridge_config, Config) -> init_per_testcase(t_bad_bridge_config, Config) ->
Config; Config;
init_per_testcase(t_send_async_connection_timeout, Config) -> init_per_testcase(Case, Config) when
HTTPPath = <<"/path">>, Case =:= t_send_async_connection_timeout orelse Case =:= t_send_get_trace_messages
ServerSSLOpts = false, ->
{ok, {HTTPPort, _Pid}} = emqx_bridge_http_connector_test_server:start_link( emqx_bridge_http_test_lib:init_http_success_server(Config);
_Port = random, HTTPPath, ServerSSLOpts
),
ResponseDelayMS = 500,
ok = emqx_bridge_http_connector_test_server:set_handler(
success_http_handler(#{response_delay => ResponseDelayMS})
),
[
{http_server, #{port => HTTPPort, path => HTTPPath}},
{response_delay_ms, ResponseDelayMS}
| Config
];
init_per_testcase(t_path_not_found, Config) -> init_per_testcase(t_path_not_found, Config) ->
HTTPPath = <<"/nonexisting/path">>, HTTPPath = <<"/nonexisting/path">>,
ServerSSLOpts = false, ServerSSLOpts = false,
@ -115,7 +104,9 @@ init_per_testcase(t_bridge_probes_header_atoms, Config) ->
{ok, {HTTPPort, _Pid}} = emqx_bridge_http_connector_test_server:start_link( {ok, {HTTPPort, _Pid}} = emqx_bridge_http_connector_test_server:start_link(
_Port = random, HTTPPath, ServerSSLOpts _Port = random, HTTPPath, ServerSSLOpts
), ),
ok = emqx_bridge_http_connector_test_server:set_handler(success_http_handler()), ok = emqx_bridge_http_connector_test_server:set_handler(
emqx_bridge_http_test_lib:success_http_handler()
),
[{http_server, #{port => HTTPPort, path => HTTPPath}} | Config]; [{http_server, #{port => HTTPPort, path => HTTPPath}} | Config];
init_per_testcase(_TestCase, Config) -> init_per_testcase(_TestCase, Config) ->
Server = start_http_server(#{response_delay_ms => 0}), Server = start_http_server(#{response_delay_ms => 0}),
@ -126,7 +117,8 @@ end_per_testcase(TestCase, _Config) when
TestCase =:= t_too_many_requests; TestCase =:= t_too_many_requests;
TestCase =:= t_rule_action_expired; TestCase =:= t_rule_action_expired;
TestCase =:= t_bridge_probes_header_atoms; TestCase =:= t_bridge_probes_header_atoms;
TestCase =:= t_send_async_connection_timeout TestCase =:= t_send_async_connection_timeout;
TestCase =:= t_send_get_trace_messages
-> ->
ok = emqx_bridge_http_connector_test_server:stop(), ok = emqx_bridge_http_connector_test_server:stop(),
persistent_term:erase({?MODULE, times_called}), persistent_term:erase({?MODULE, times_called}),
@ -250,115 +242,8 @@ get_metrics(Name) ->
Type = <<"http">>, Type = <<"http">>,
emqx_bridge:get_metrics(Type, Name). emqx_bridge:get_metrics(Type, Name).
bridge_async_config(#{port := Port} = Config) ->
Type = maps:get(type, Config, ?BRIDGE_TYPE),
Name = maps:get(name, Config, ?BRIDGE_NAME),
Host = maps:get(host, Config, "localhost"),
Path = maps:get(path, Config, ""),
PoolSize = maps:get(pool_size, Config, 1),
QueryMode = maps:get(query_mode, Config, "async"),
ConnectTimeout = maps:get(connect_timeout, Config, "1s"),
RequestTimeout = maps:get(request_timeout, Config, "10s"),
ResumeInterval = maps:get(resume_interval, Config, "1s"),
HealthCheckInterval = maps:get(health_check_interval, Config, "200ms"),
ResourceRequestTTL = maps:get(resource_request_ttl, Config, "infinity"),
LocalTopic =
case maps:find(local_topic, Config) of
{ok, LT} ->
lists:flatten(["local_topic = \"", LT, "\""]);
error ->
""
end,
ConfigString = io_lib:format(
"bridges.~s.~s {\n"
" url = \"http://~s:~p~s\"\n"
" connect_timeout = \"~p\"\n"
" enable = true\n"
%% local_topic
" ~s\n"
" enable_pipelining = 100\n"
" max_retries = 2\n"
" method = \"post\"\n"
" pool_size = ~p\n"
" pool_type = \"random\"\n"
" request_timeout = \"~s\"\n"
" body = \"${id}\"\n"
" resource_opts {\n"
" inflight_window = 100\n"
" health_check_interval = \"~s\"\n"
" max_buffer_bytes = \"1GB\"\n"
" query_mode = \"~s\"\n"
" request_ttl = \"~p\"\n"
" resume_interval = \"~s\"\n"
" start_after_created = \"true\"\n"
" start_timeout = \"5s\"\n"
" worker_pool_size = \"1\"\n"
" }\n"
" ssl {\n"
" enable = false\n"
" }\n"
"}\n",
[
Type,
Name,
Host,
Port,
Path,
ConnectTimeout,
LocalTopic,
PoolSize,
RequestTimeout,
HealthCheckInterval,
QueryMode,
ResourceRequestTTL,
ResumeInterval
]
),
ct:pal(ConfigString),
parse_and_check(ConfigString, Type, Name).
parse_and_check(ConfigString, BridgeType, Name) ->
{ok, RawConf} = hocon:binary(ConfigString, #{format => map}),
hocon_tconf:check_plain(emqx_bridge_schema, RawConf, #{required => false, atom_key => false}),
#{<<"bridges">> := #{BridgeType := #{Name := RetConfig}}} = RawConf,
RetConfig.
make_bridge(Config) -> make_bridge(Config) ->
Type = ?BRIDGE_TYPE, emqx_bridge_http_test_lib:make_bridge(Config).
Name = ?BRIDGE_NAME,
BridgeConfig = bridge_async_config(Config#{
name => Name,
type => Type
}),
{ok, _} = emqx_bridge:create(
Type,
Name,
BridgeConfig
),
emqx_bridge_resource:bridge_id(Type, Name).
success_http_handler() ->
success_http_handler(#{response_delay => 0}).
success_http_handler(Opts) ->
ResponseDelay = maps:get(response_delay, Opts, 0),
TestPid = self(),
fun(Req0, State) ->
{ok, Body, Req} = cowboy_req:read_body(Req0),
Headers = cowboy_req:headers(Req),
ct:pal("http request received: ~p", [
#{body => Body, headers => Headers, response_delay => ResponseDelay}
]),
ResponseDelay > 0 andalso timer:sleep(ResponseDelay),
TestPid ! {http, Headers, Body},
Rep = cowboy_req:reply(
200,
#{<<"content-type">> => <<"text/plain">>},
<<"hello">>,
Req
),
{ok, Rep, State}
end.
not_found_http_handler() -> not_found_http_handler() ->
TestPid = self(), TestPid = self(),
@ -452,6 +337,102 @@ t_send_async_connection_timeout(Config) ->
receive_request_notifications(MessageIDs, ResponseDelayMS, []), receive_request_notifications(MessageIDs, ResponseDelayMS, []),
ok. ok.
t_send_get_trace_messages(Config) ->
ResponseDelayMS = ?config(response_delay_ms, Config),
#{port := Port, path := Path} = ?config(http_server, Config),
BridgeID = make_bridge(#{
port => Port,
path => Path,
pool_size => 1,
query_mode => "async",
connect_timeout => integer_to_list(ResponseDelayMS * 2) ++ "ms",
request_timeout => "10s",
resume_interval => "200ms",
health_check_interval => "200ms",
resource_request_ttl => "infinity"
}),
RuleTopic = iolist_to_binary([<<"my_rule_topic/">>, atom_to_binary(?FUNCTION_NAME)]),
SQL = <<"SELECT payload.id as id FROM \"", RuleTopic/binary, "\"">>,
{ok, #{<<"id">> := RuleId}} =
emqx_bridge_testlib:create_rule_and_action_http(
?BRIDGE_TYPE,
RuleTopic,
Config,
#{sql => SQL}
),
%% ===================================
%% Create trace for RuleId
%% ===================================
Now = erlang:system_time(second) - 10,
Start = Now,
End = Now + 60,
TraceName = atom_to_binary(?FUNCTION_NAME),
Trace = #{
name => TraceName,
type => ruleid,
ruleid => RuleId,
start_at => Start,
end_at => End
},
emqx_trace_SUITE:reload(),
ok = emqx_trace:clear(),
{ok, _} = emqx_trace:create(Trace),
%% ===================================
ResourceId = emqx_bridge_resource:resource_id(BridgeID),
?retry(
_Interval0 = 200,
_NAttempts0 = 20,
?assertMatch({ok, connected}, emqx_resource_manager:health_check(ResourceId))
),
?retry(
_Interval0 = 200,
_NAttempts0 = 20,
?assertEqual(<<>>, read_rule_trace_file(TraceName, Now))
),
Msg = emqx_message:make(RuleTopic, <<"{\"id\": 1}">>),
emqx:publish(Msg),
?retry(
_Interval = 500,
_NAttempts = 20,
?assertMatch(
#{
counters := #{
'matched' := 1,
'actions.failed' := 0,
'actions.failed.unknown' := 0,
'actions.success' := 1,
'actions.total' := 1
}
},
emqx_metrics_worker:get_metrics(rule_metrics, RuleId)
)
),
ok = emqx_trace_handler_SUITE:filesync(TraceName, ruleid),
{ok, Bin} = file:read_file(emqx_trace:log_file(TraceName, Now)),
?retry(
_Interval0 = 200,
_NAttempts0 = 20,
begin
Bin = read_rule_trace_file(TraceName, Now),
?assertNotEqual(nomatch, binary:match(Bin, [<<"rule_activated">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"SQL_yielded_result">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"bridge_action">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"action_template_rendered">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"QUERY_ASYNC">>]))
end
),
emqx_trace:delete(TraceName),
ok.
read_rule_trace_file(TraceName, From) ->
emqx_trace:check(),
ok = emqx_trace_handler_SUITE:filesync(TraceName, ruleid),
{ok, Bin} = file:read_file(emqx_trace:log_file(TraceName, From)),
Bin.
t_async_free_retries(Config) -> t_async_free_retries(Config) ->
#{port := Port} = ?config(http_server, Config), #{port := Port} = ?config(http_server, Config),
_BridgeID = make_bridge(#{ _BridgeID = make_bridge(#{
@ -518,7 +499,7 @@ t_async_common_retries(Config) ->
ok. ok.
t_bad_bridge_config(_Config) -> t_bad_bridge_config(_Config) ->
BridgeConfig = bridge_async_config(#{port => 12345}), BridgeConfig = emqx_bridge_http_test_lib:bridge_async_config(#{port => 12345}),
?assertMatch( ?assertMatch(
{ok, {ok,
{{_, 201, _}, _Headers, #{ {{_, 201, _}, _Headers, #{
@ -540,7 +521,7 @@ t_bad_bridge_config(_Config) ->
t_start_stop(Config) -> t_start_stop(Config) ->
#{port := Port} = ?config(http_server, Config), #{port := Port} = ?config(http_server, Config),
BridgeConfig = bridge_async_config(#{ BridgeConfig = emqx_bridge_http_test_lib:bridge_async_config(#{
type => ?BRIDGE_TYPE, type => ?BRIDGE_TYPE,
name => ?BRIDGE_NAME, name => ?BRIDGE_NAME,
port => Port port => Port
@ -554,7 +535,7 @@ t_path_not_found(Config) ->
begin begin
#{port := Port, path := Path} = ?config(http_server, Config), #{port := Port, path := Path} = ?config(http_server, Config),
MQTTTopic = <<"t/webhook">>, MQTTTopic = <<"t/webhook">>,
BridgeConfig = bridge_async_config(#{ BridgeConfig = emqx_bridge_http_test_lib:bridge_async_config(#{
type => ?BRIDGE_TYPE, type => ?BRIDGE_TYPE,
name => ?BRIDGE_NAME, name => ?BRIDGE_NAME,
local_topic => MQTTTopic, local_topic => MQTTTopic,
@ -593,7 +574,7 @@ t_too_many_requests(Config) ->
begin begin
#{port := Port, path := Path} = ?config(http_server, Config), #{port := Port, path := Path} = ?config(http_server, Config),
MQTTTopic = <<"t/webhook">>, MQTTTopic = <<"t/webhook">>,
BridgeConfig = bridge_async_config(#{ BridgeConfig = emqx_bridge_http_test_lib:bridge_async_config(#{
type => ?BRIDGE_TYPE, type => ?BRIDGE_TYPE,
name => ?BRIDGE_NAME, name => ?BRIDGE_NAME,
local_topic => MQTTTopic, local_topic => MQTTTopic,
@ -633,7 +614,7 @@ t_rule_action_expired(Config) ->
?check_trace( ?check_trace(
begin begin
RuleTopic = <<"t/webhook/rule">>, RuleTopic = <<"t/webhook/rule">>,
BridgeConfig = bridge_async_config(#{ BridgeConfig = emqx_bridge_http_test_lib:bridge_async_config(#{
type => ?BRIDGE_TYPE, type => ?BRIDGE_TYPE,
name => ?BRIDGE_NAME, name => ?BRIDGE_NAME,
host => "non.existent.host", host => "non.existent.host",
@ -689,7 +670,7 @@ t_bridge_probes_header_atoms(Config) ->
?check_trace( ?check_trace(
begin begin
LocalTopic = <<"t/local/topic">>, LocalTopic = <<"t/local/topic">>,
BridgeConfig0 = bridge_async_config(#{ BridgeConfig0 = emqx_bridge_http_test_lib:bridge_async_config(#{
type => ?BRIDGE_TYPE, type => ?BRIDGE_TYPE,
name => ?BRIDGE_NAME, name => ?BRIDGE_NAME,
port => Port, port => Port,

View File

@ -0,0 +1,161 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_bridge_http_test_lib).
-export([
bridge_type/0,
bridge_name/0,
make_bridge/1,
bridge_async_config/1,
init_http_success_server/1,
success_http_handler/0
]).
-define(BRIDGE_TYPE, bridge_type()).
-define(BRIDGE_NAME, bridge_name()).
bridge_type() ->
<<"webhook">>.
bridge_name() ->
atom_to_binary(?MODULE).
make_bridge(Config) ->
Type = ?BRIDGE_TYPE,
Name = ?BRIDGE_NAME,
BridgeConfig = bridge_async_config(Config#{
name => Name,
type => Type
}),
{ok, _} = emqx_bridge:create(
Type,
Name,
BridgeConfig
),
emqx_bridge_resource:bridge_id(Type, Name).
bridge_async_config(#{port := Port} = Config) ->
Type = maps:get(type, Config, ?BRIDGE_TYPE),
Name = maps:get(name, Config, ?BRIDGE_NAME),
Host = maps:get(host, Config, "localhost"),
Path = maps:get(path, Config, ""),
PoolSize = maps:get(pool_size, Config, 1),
QueryMode = maps:get(query_mode, Config, "async"),
ConnectTimeout = maps:get(connect_timeout, Config, "1s"),
RequestTimeout = maps:get(request_timeout, Config, "10s"),
ResumeInterval = maps:get(resume_interval, Config, "1s"),
HealthCheckInterval = maps:get(health_check_interval, Config, "200ms"),
ResourceRequestTTL = maps:get(resource_request_ttl, Config, "infinity"),
LocalTopic =
case maps:find(local_topic, Config) of
{ok, LT} ->
lists:flatten(["local_topic = \"", LT, "\""]);
error ->
""
end,
ConfigString = io_lib:format(
"bridges.~s.~s {\n"
" url = \"http://~s:~p~s\"\n"
" connect_timeout = \"~p\"\n"
" enable = true\n"
%% local_topic
" ~s\n"
" enable_pipelining = 100\n"
" max_retries = 2\n"
" method = \"post\"\n"
" pool_size = ~p\n"
" pool_type = \"random\"\n"
" request_timeout = \"~s\"\n"
" body = \"${id}\"\n"
" resource_opts {\n"
" inflight_window = 100\n"
" health_check_interval = \"~s\"\n"
" max_buffer_bytes = \"1GB\"\n"
" query_mode = \"~s\"\n"
" request_ttl = \"~p\"\n"
" resume_interval = \"~s\"\n"
" start_after_created = \"true\"\n"
" start_timeout = \"5s\"\n"
" worker_pool_size = \"1\"\n"
" }\n"
" ssl {\n"
" enable = false\n"
" }\n"
"}\n",
[
Type,
Name,
Host,
Port,
Path,
ConnectTimeout,
LocalTopic,
PoolSize,
RequestTimeout,
HealthCheckInterval,
QueryMode,
ResourceRequestTTL,
ResumeInterval
]
),
ct:pal(ConfigString),
parse_and_check(ConfigString, Type, Name).
parse_and_check(ConfigString, BridgeType, Name) ->
{ok, RawConf} = hocon:binary(ConfigString, #{format => map}),
hocon_tconf:check_plain(emqx_bridge_schema, RawConf, #{required => false, atom_key => false}),
#{<<"bridges">> := #{BridgeType := #{Name := RetConfig}}} = RawConf,
RetConfig.
success_http_handler() ->
success_http_handler(#{response_delay => 0}).
success_http_handler(Opts) ->
ResponseDelay = maps:get(response_delay, Opts, 0),
TestPid = self(),
fun(Req0, State) ->
{ok, Body, Req} = cowboy_req:read_body(Req0),
Headers = cowboy_req:headers(Req),
ct:pal("http request received: ~p", [
#{body => Body, headers => Headers, response_delay => ResponseDelay}
]),
ResponseDelay > 0 andalso timer:sleep(ResponseDelay),
TestPid ! {http, Headers, Body},
Rep = cowboy_req:reply(
200,
#{<<"content-type">> => <<"text/plain">>},
<<"hello">>,
Req
),
{ok, Rep, State}
end.
init_http_success_server(Config) ->
HTTPPath = <<"/path">>,
ServerSSLOpts = false,
{ok, {HTTPPort, _Pid}} = emqx_bridge_http_connector_test_server:start_link(
_Port = random, HTTPPath, ServerSSLOpts
),
ResponseDelayMS = 500,
ok = emqx_bridge_http_connector_test_server:set_handler(
success_http_handler(#{response_delay => ResponseDelayMS})
),
[
{http_server, #{port => HTTPPort, path => HTTPPath}},
{response_delay_ms, ResponseDelayMS},
{bridge_name, ?BRIDGE_NAME}
| Config
].

View File

@ -42,7 +42,7 @@
%% api %% api
write_syntax_type() -> write_syntax_type() ->
typerefl:alias("string", write_syntax()). typerefl:alias("template", write_syntax()).
%% Examples %% Examples
conn_bridge_examples(Method) -> conn_bridge_examples(Method) ->

View File

@ -59,6 +59,9 @@
-define(DEFAULT_TIMESTAMP_TMPL, "${timestamp}"). -define(DEFAULT_TIMESTAMP_TMPL, "${timestamp}").
-define(set_tag, set_tag).
-define(set_field, set_field).
-define(IS_HTTP_ERROR(STATUS_CODE), -define(IS_HTTP_ERROR(STATUS_CODE),
(is_integer(STATUS_CODE) andalso (is_integer(STATUS_CODE) andalso
(STATUS_CODE < 200 orelse STATUS_CODE >= 300)) (STATUS_CODE < 200 orelse STATUS_CODE >= 300))
@ -710,8 +713,8 @@ line_to_point(
precision := Precision precision := Precision
} = Item } = Item
) -> ) ->
{_, EncodedTags} = maps:fold(fun maps_config_to_data/3, {Data, #{}}, Tags), {_, EncodedTags, _} = maps:fold(fun maps_config_to_data/3, {Data, #{}, ?set_tag}, Tags),
{_, EncodedFields} = maps:fold(fun maps_config_to_data/3, {Data, #{}}, Fields), {_, EncodedFields, _} = maps:fold(fun maps_config_to_data/3, {Data, #{}, ?set_field}, Fields),
maps:without([precision], Item#{ maps:without([precision], Item#{
measurement => emqx_placeholder:proc_tmpl(Measurement, Data), measurement => emqx_placeholder:proc_tmpl(Measurement, Data),
tags => EncodedTags, tags => EncodedTags,
@ -727,34 +730,43 @@ time_unit(ms) -> millisecond;
time_unit(us) -> microsecond; time_unit(us) -> microsecond;
time_unit(ns) -> nanosecond. time_unit(ns) -> nanosecond.
maps_config_to_data(K, V, {Data, Res}) -> maps_config_to_data(K, V, {Data, Res, SetType}) ->
KTransOptions = #{return => rawlist, var_trans => fun key_filter/1}, KTransOptions = #{return => rawlist, var_trans => fun key_filter/1},
VTransOptions = #{return => rawlist, var_trans => fun data_filter/1}, VTransOptions = #{return => rawlist, var_trans => fun data_filter/1},
NK = emqx_placeholder:proc_tmpl(K, Data, KTransOptions), NK = emqx_placeholder:proc_tmpl(K, Data, KTransOptions),
NV = proc_quoted(V, Data, VTransOptions), NV = proc_quoted(V, Data, VTransOptions),
case {NK, NV} of case {NK, NV} of
{[undefined], _} -> {[undefined], _} ->
{Data, Res}; {Data, Res, SetType};
%% undefined value in normal format [undefined] or int/uint format [undefined, <<"i">>] %% undefined value in normal format [undefined] or int/uint format [undefined, <<"i">>]
{_, [undefined | _]} -> {_, [undefined | _]} ->
{Data, Res}; {Data, Res, SetType};
{_, {quoted, [undefined | _]}} -> {_, {quoted, [undefined | _]}} ->
{Data, Res}; {Data, Res, SetType};
_ -> _ ->
{Data, Res#{ NRes = Res#{
list_to_binary(NK) => value_type(NV, tmpl_type(V)) list_to_binary(NK) => value_type(NV, #{
}} tmpl_type => tmpl_type(V), set_type => SetType
})
},
{Data, NRes, SetType}
end. end.
value_type([Number], #{set_type := ?set_tag}) when is_number(Number) ->
%% all `tag` values are treated as string
%% See also: https://docs.influxdata.com/influxdb/v2/reference/syntax/line-protocol/#tag-set
emqx_utils_conv:bin(Number);
value_type([Str], #{set_type := ?set_tag}) when is_binary(Str) ->
Str;
value_type({quoted, ValList}, _) -> value_type({quoted, ValList}, _) ->
{string_list, ValList}; {string_list, ValList};
value_type([Int, <<"i">>], mixed) when is_integer(Int) -> value_type([Int, <<"i">>], #{tmpl_type := mixed}) when is_integer(Int) ->
{int, Int}; {int, Int};
value_type([UInt, <<"u">>], mixed) when is_integer(UInt) -> value_type([UInt, <<"u">>], #{tmpl_type := mixed}) when is_integer(UInt) ->
{uint, UInt}; {uint, UInt};
%% write `1`, `1.0`, `-1.0` all as float %% write `1`, `1.0`, `-1.0` all as float
%% see also: https://docs.influxdata.com/influxdb/v2.7/reference/syntax/line-protocol/#float %% see also: https://docs.influxdata.com/influxdb/v2.7/reference/syntax/line-protocol/#float
value_type([Number], _) when is_number(Number) -> value_type([Number], #{set_type := ?set_field}) when is_number(Number) ->
{float, Number}; {float, Number};
value_type([<<"t">>], _) -> value_type([<<"t">>], _) ->
't'; 't';
@ -776,9 +788,9 @@ value_type([<<"FALSE">>], _) ->
'FALSE'; 'FALSE';
value_type([<<"False">>], _) -> value_type([<<"False">>], _) ->
'False'; 'False';
value_type([Str], variable) when is_binary(Str) -> value_type([Str], #{tmpl_type := variable}) when is_binary(Str) ->
Str; Str;
value_type([Str], literal) when is_binary(Str) -> value_type([Str], #{tmpl_type := literal, set_type := ?set_field}) when is_binary(Str) ->
%% if Str is a literal string suffixed with `i` or `u`, we should convert it to int/uint. %% if Str is a literal string suffixed with `i` or `u`, we should convert it to int/uint.
%% otherwise, we should convert it to float. %% otherwise, we should convert it to float.
NumStr = binary:part(Str, 0, byte_size(Str) - 1), NumStr = binary:part(Str, 0, byte_size(Str) - 1),

View File

@ -864,6 +864,53 @@ t_any_num_as_float(Config) ->
TimeReturned = pad_zero(TimeReturned0), TimeReturned = pad_zero(TimeReturned0),
?assertEqual(TsStr, TimeReturned). ?assertEqual(TsStr, TimeReturned).
t_tag_set_use_literal_value(Config) ->
QueryMode = ?config(query_mode, Config),
Const = erlang:system_time(nanosecond),
ConstBin = integer_to_binary(Const),
TsStr = iolist_to_binary(
calendar:system_time_to_rfc3339(Const, [{unit, nanosecond}, {offset, "Z"}])
),
?assertMatch(
{ok, _},
create_bridge(
Config,
#{
<<"write_syntax">> =>
<<"mqtt,clientid=${clientid},tag_key1=100,tag_key2=123.4,tag_key3=66i,tag_key4=${payload.float_dp}",
" ",
"field_key1=100.1,field_key2=100i,field_key3=${payload.float_dp},bar=5i",
" ", ConstBin/binary>>
}
)
),
ClientId = emqx_guid:to_hexstr(emqx_guid:gen()),
Payload = #{
%% with decimal point
float_dp => 123.4
},
SentData = #{
<<"clientid">> => ClientId,
<<"topic">> => atom_to_binary(?FUNCTION_NAME),
<<"payload">> => Payload,
<<"timestamp">> => erlang:system_time(millisecond)
},
case QueryMode of
sync ->
?assertMatch({ok, 204, _}, send_message(Config, SentData)),
ok;
async ->
?assertMatch(ok, send_message(Config, SentData))
end,
%% sleep is still need even in sync mode, or we would get an empty result sometimes
ct:sleep(1500),
PersistedData = query_by_clientid(ClientId, Config),
Expected = #{field_key1 => <<"100.1">>, field_key2 => <<"100">>, field_key3 => <<"123.4">>},
assert_persisted_data(ClientId, Expected, PersistedData),
TimeReturned0 = maps:get(<<"_time">>, maps:get(<<"field_key1">>, PersistedData)),
TimeReturned = pad_zero(TimeReturned0),
?assertEqual(TsStr, TimeReturned).
t_bad_timestamp(Config) -> t_bad_timestamp(Config) ->
InfluxDBType = ?config(influxdb_type, Config), InfluxDBType = ?config(influxdb_type, Config),
InfluxDBName = ?config(influxdb_name, Config), InfluxDBName = ?config(influxdb_name, Config),

View File

@ -5,6 +5,8 @@
-ifndef(EMQX_BRIDGE_IOTDB_HRL). -ifndef(EMQX_BRIDGE_IOTDB_HRL).
-define(EMQX_BRIDGE_IOTDB_HRL, true). -define(EMQX_BRIDGE_IOTDB_HRL, true).
-define(VSN_1_3_X, 'v1.3.x').
-define(VSN_1_2_X, 'v1.2.x').
-define(VSN_1_1_X, 'v1.1.x'). -define(VSN_1_1_X, 'v1.1.x').
-define(VSN_1_0_X, 'v1.0.x'). -define(VSN_1_0_X, 'v1.0.x').
-define(VSN_0_13_X, 'v0.13.x'). -define(VSN_0_13_X, 'v0.13.x').

View File

@ -66,12 +66,7 @@ fields(action_config) ->
] ]
); );
fields(action_resource_opts) -> fields(action_resource_opts) ->
lists:filter( emqx_bridge_v2_schema:action_resource_opts_fields();
fun({K, _V}) ->
not lists:member(K, unsupported_opts())
end,
emqx_bridge_v2_schema:action_resource_opts_fields()
);
fields(action_parameters) -> fields(action_parameters) ->
[ [
{is_aligned, {is_aligned,
@ -84,7 +79,7 @@ fields(action_parameters) ->
)}, )},
{device_id, {device_id,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
desc => ?DESC("config_device_id") desc => ?DESC("config_device_id")
} }
@ -114,7 +109,7 @@ fields(action_parameters_data) ->
)}, )},
{measurement, {measurement,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
required => true, required => true,
desc => ?DESC("config_parameters_measurement") desc => ?DESC("config_parameters_measurement")
@ -122,7 +117,9 @@ fields(action_parameters_data) ->
)}, )},
{data_type, {data_type,
mk( mk(
hoconsc:union([enum([text, boolean, int32, int64, float, double]), binary()]), hoconsc:union([
enum([text, boolean, int32, int64, float, double]), emqx_schema:template()
]),
#{ #{
required => true, required => true,
desc => ?DESC("config_parameters_data_type") desc => ?DESC("config_parameters_data_type")
@ -130,7 +127,7 @@ fields(action_parameters_data) ->
)}, )},
{value, {value,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
required => true, required => true,
desc => ?DESC("config_parameters_value") desc => ?DESC("config_parameters_value")
@ -150,7 +147,7 @@ fields("get_bridge_v2") ->
fields("config") -> fields("config") ->
basic_config() ++ request_config(); basic_config() ++ request_config();
fields("creation_opts") -> fields("creation_opts") ->
proplists_without(unsupported_opts(), emqx_resource_schema:fields("creation_opts")); emqx_resource_schema:fields("creation_opts");
fields(auth_basic) -> fields(auth_basic) ->
[ [
{username, mk(binary(), #{required => true, desc => ?DESC("config_auth_basic_username")})}, {username, mk(binary(), #{required => true, desc => ?DESC("config_auth_basic_username")})},
@ -220,10 +217,10 @@ basic_config() ->
)}, )},
{iotdb_version, {iotdb_version,
mk( mk(
hoconsc:enum([?VSN_1_1_X, ?VSN_1_0_X, ?VSN_0_13_X]), hoconsc:enum([?VSN_1_3_X, ?VSN_1_1_X, ?VSN_1_0_X, ?VSN_0_13_X]),
#{ #{
desc => ?DESC("config_iotdb_version"), desc => ?DESC("config_iotdb_version"),
default => ?VSN_1_1_X default => ?VSN_1_3_X
} }
)} )}
] ++ resource_creation_opts() ++ ] ++ resource_creation_opts() ++
@ -268,12 +265,6 @@ resource_creation_opts() ->
)} )}
]. ].
unsupported_opts() ->
[
batch_size,
batch_time
].
%%------------------------------------------------------------------------------------------------- %%-------------------------------------------------------------------------------------------------
%% v2 examples %% v2 examples
%%------------------------------------------------------------------------------------------------- %%-------------------------------------------------------------------------------------------------

View File

@ -21,6 +21,8 @@
on_get_status/2, on_get_status/2,
on_query/3, on_query/3,
on_query_async/4, on_query_async/4,
on_batch_query/3,
on_batch_query_async/4,
on_add_channel/4, on_add_channel/4,
on_remove_channel/3, on_remove_channel/3,
on_get_channels/1, on_get_channels/1,
@ -94,7 +96,7 @@ connector_example_values() ->
name => <<"iotdb_connector">>, name => <<"iotdb_connector">>,
type => iotdb, type => iotdb,
enable => true, enable => true,
iotdb_version => ?VSN_1_1_X, iotdb_version => ?VSN_1_3_X,
authentication => #{ authentication => #{
<<"username">> => <<"root">>, <<"username">> => <<"root">>,
<<"password">> => <<"******">> <<"password">> => <<"******">>
@ -133,10 +135,10 @@ fields("connection_fields") ->
)}, )},
{iotdb_version, {iotdb_version,
mk( mk(
hoconsc:enum([?VSN_1_1_X, ?VSN_1_0_X, ?VSN_0_13_X]), hoconsc:enum([?VSN_1_3_X, ?VSN_1_1_X, ?VSN_1_0_X, ?VSN_0_13_X]),
#{ #{
desc => ?DESC(emqx_bridge_iotdb, "config_iotdb_version"), desc => ?DESC(emqx_bridge_iotdb, "config_iotdb_version"),
default => ?VSN_1_1_X default => ?VSN_1_3_X
} }
)}, )},
{authentication, {authentication,
@ -280,8 +282,8 @@ on_query(
state => emqx_utils:redact(State) state => emqx_utils:redact(State)
}), }),
case try_render_message(Req, IoTDBVsn, Channels) of case try_render_messages([Req], IoTDBVsn, Channels) of
{ok, IoTDBPayload} -> {ok, [IoTDBPayload]} ->
handle_response( handle_response(
emqx_bridge_http_connector:on_query( emqx_bridge_http_connector:on_query(
InstanceId, {ChannelId, IoTDBPayload}, State InstanceId, {ChannelId, IoTDBPayload}, State
@ -306,8 +308,8 @@ on_query_async(
send_message => Req, send_message => Req,
state => emqx_utils:redact(State) state => emqx_utils:redact(State)
}), }),
case try_render_message(Req, IoTDBVsn, Channels) of case try_render_messages([Req], IoTDBVsn, Channels) of
{ok, IoTDBPayload} -> {ok, [IoTDBPayload]} ->
ReplyFunAndArgs = ReplyFunAndArgs =
{ {
fun(Result) -> fun(Result) ->
@ -323,6 +325,71 @@ on_query_async(
Error Error
end. end.
on_batch_query_async(
InstId,
Requests,
Callback,
#{iotdb_version := IoTDBVsn, channels := Channels} = State
) ->
?tp(iotdb_bridge_on_batch_query_async, #{instance_id => InstId}),
[{ChannelId, _Message} | _] = Requests,
?SLOG(debug, #{
msg => "iotdb_bridge_on_query_batch_async_called",
instance_id => InstId,
send_message => Requests,
state => emqx_utils:redact(State)
}),
case try_render_messages(Requests, IoTDBVsn, Channels) of
{ok, IoTDBPayloads} ->
ReplyFunAndArgs =
{
fun(Result) ->
Response = handle_response(Result),
emqx_resource:apply_reply_fun(Callback, Response)
end,
[]
},
lists:map(
fun(IoTDBPayload) ->
emqx_bridge_http_connector:on_query_async(
InstId, {ChannelId, IoTDBPayload}, ReplyFunAndArgs, State
)
end,
IoTDBPayloads
);
Error ->
Error
end.
on_batch_query(
InstId,
[{ChannelId, _Message}] = Requests,
#{iotdb_version := IoTDBVsn, channels := Channels} = State
) ->
?tp(iotdb_bridge_on_batch_query, #{instance_id => InstId}),
?SLOG(debug, #{
msg => "iotdb_bridge_on_batch_query_called",
instance_id => InstId,
send_message => Requests,
state => emqx_utils:redact(State)
}),
case try_render_messages(Requests, IoTDBVsn, Channels) of
{ok, IoTDBPayloads} ->
lists:map(
fun(IoTDBPayload) ->
handle_response(
emqx_bridge_http_connector:on_query(
InstId, {ChannelId, IoTDBPayload}, State
)
)
end,
IoTDBPayloads
);
Error ->
Error
end.
on_add_channel( on_add_channel(
InstanceId, InstanceId,
#{iotdb_version := Version, channels := Channels} = OldState0, #{iotdb_version := Version, channels := Channels} = OldState0,
@ -342,6 +409,7 @@ on_add_channel(
Path = Path =
case Version of case Version of
?VSN_1_1_X -> InsertTabletPathV2; ?VSN_1_1_X -> InsertTabletPathV2;
?VSN_1_3_X -> InsertTabletPathV2;
_ -> InsertTabletPathV1 _ -> InsertTabletPathV1
end, end,
@ -442,14 +510,14 @@ maybe_preproc_tmpl(Value) when is_binary(Value) ->
maybe_preproc_tmpl(Value) -> maybe_preproc_tmpl(Value) ->
Value. Value.
proc_data(PreProcessedData, Msg) -> proc_data(PreProcessedData, Msg, IoTDBVsn) ->
NowNS = erlang:system_time(nanosecond), NowNS = erlang:system_time(nanosecond),
Nows = #{ Nows = #{
now_ms => erlang:convert_time_unit(NowNS, nanosecond, millisecond), now_ms => erlang:convert_time_unit(NowNS, nanosecond, millisecond),
now_us => erlang:convert_time_unit(NowNS, nanosecond, microsecond), now_us => erlang:convert_time_unit(NowNS, nanosecond, microsecond),
now_ns => NowNS now_ns => NowNS
}, },
proc_data(PreProcessedData, Msg, Nows, []). proc_data(PreProcessedData, Msg, Nows, IoTDBVsn, []).
proc_data( proc_data(
[ [
@ -463,15 +531,16 @@ proc_data(
], ],
Msg, Msg,
Nows, Nows,
IotDbVsn,
Acc Acc
) -> ) ->
DataType = list_to_binary( DataType = list_to_binary(
string:uppercase(binary_to_list(emqx_placeholder:proc_tmpl(DataType0, Msg))) string:uppercase(binary_to_list(emqx_placeholder:proc_tmpl(DataType0, Msg)))
), ),
try try
proc_data(T, Msg, Nows, [ proc_data(T, Msg, Nows, IotDbVsn, [
#{ #{
timestamp => iot_timestamp(TimestampTkn, Msg, Nows), timestamp => iot_timestamp(IotDbVsn, TimestampTkn, Msg, Nows),
measurement => emqx_placeholder:proc_tmpl(Measurement, Msg), measurement => emqx_placeholder:proc_tmpl(Measurement, Msg),
data_type => DataType, data_type => DataType,
value => proc_value(DataType, ValueTkn, Msg) value => proc_value(DataType, ValueTkn, Msg)
@ -485,23 +554,28 @@ proc_data(
?SLOG(debug, #{exception => Error, reason => Reason, stacktrace => Stacktrace}), ?SLOG(debug, #{exception => Error, reason => Reason, stacktrace => Stacktrace}),
{error, invalid_data} {error, invalid_data}
end; end;
proc_data([], _Msg, _Nows, Acc) -> proc_data([], _Msg, _Nows, _IotDbVsn, Acc) ->
{ok, lists:reverse(Acc)}. {ok, lists:reverse(Acc)}.
iot_timestamp(Timestamp, _, _) when is_integer(Timestamp) -> iot_timestamp(_IotDbVsn, Timestamp, _, _) when is_integer(Timestamp) ->
Timestamp; Timestamp;
iot_timestamp(TimestampTkn, Msg, Nows) -> iot_timestamp(IotDbVsn, TimestampTkn, Msg, Nows) ->
iot_timestamp(emqx_placeholder:proc_tmpl(TimestampTkn, Msg), Nows). iot_timestamp(IotDbVsn, emqx_placeholder:proc_tmpl(TimestampTkn, Msg), Nows).
iot_timestamp(<<"now_us">>, #{now_us := NowUs}) -> %% > v1.3.0 don't allow write nanoseconds nor microseconds
iot_timestamp(?VSN_1_3_X, <<"now_us">>, #{now_ms := NowMs}) ->
NowMs;
iot_timestamp(?VSN_1_3_X, <<"now_ns">>, #{now_ms := NowMs}) ->
NowMs;
iot_timestamp(_IotDbVsn, <<"now_us">>, #{now_us := NowUs}) ->
NowUs; NowUs;
iot_timestamp(<<"now_ns">>, #{now_ns := NowNs}) -> iot_timestamp(_IotDbVsn, <<"now_ns">>, #{now_ns := NowNs}) ->
NowNs; NowNs;
iot_timestamp(Timestamp, #{now_ms := NowMs}) when iot_timestamp(_IotDbVsn, Timestamp, #{now_ms := NowMs}) when
Timestamp =:= <<"now">>; Timestamp =:= <<"now_ms">>; Timestamp =:= <<>> Timestamp =:= <<"now">>; Timestamp =:= <<"now_ms">>; Timestamp =:= <<>>
-> ->
NowMs; NowMs;
iot_timestamp(Timestamp, _) when is_binary(Timestamp) -> iot_timestamp(_IotDbVsn, Timestamp, _) when is_binary(Timestamp) ->
binary_to_integer(Timestamp). binary_to_integer(Timestamp).
proc_value(<<"TEXT">>, ValueTkn, Msg) -> proc_value(<<"TEXT">>, ValueTkn, Msg) ->
@ -526,6 +600,7 @@ replace_var(Val, _Data) ->
convert_bool(B) when is_boolean(B) -> B; convert_bool(B) when is_boolean(B) -> B;
convert_bool(null) -> null; convert_bool(null) -> null;
convert_bool(undefined) -> null;
convert_bool(1) -> true; convert_bool(1) -> true;
convert_bool(0) -> false; convert_bool(0) -> false;
convert_bool(<<"1">>) -> true; convert_bool(<<"1">>) -> true;
@ -568,11 +643,10 @@ convert_float(undefined) ->
make_iotdb_insert_request(DataList, IsAligned, DeviceId, IoTDBVsn) -> make_iotdb_insert_request(DataList, IsAligned, DeviceId, IoTDBVsn) ->
InitAcc = #{timestamps => [], measurements => [], dtypes => [], values => []}, InitAcc = #{timestamps => [], measurements => [], dtypes => [], values => []},
Rows = replace_dtypes(aggregate_rows(DataList, InitAcc), IoTDBVsn), Rows = replace_dtypes(aggregate_rows(DataList, InitAcc), IoTDBVsn),
{ok, maps:merge(Rows, #{
maps:merge(Rows, #{ iotdb_field_key(is_aligned, IoTDBVsn) => IsAligned,
iotdb_field_key(is_aligned, IoTDBVsn) => IsAligned, iotdb_field_key(device_id, IoTDBVsn) => DeviceId
iotdb_field_key(device_id, IoTDBVsn) => DeviceId }).
})}.
replace_dtypes(Rows0, IoTDBVsn) -> replace_dtypes(Rows0, IoTDBVsn) ->
{Types, Rows} = maps:take(dtypes, Rows0), {Types, Rows} = maps:take(dtypes, Rows0),
@ -632,18 +706,24 @@ insert_value(1, Data, [Value | Values]) ->
insert_value(Index, Data, [Value | Values]) -> insert_value(Index, Data, [Value | Values]) ->
[[null | Value] | insert_value(Index - 1, Data, Values)]. [[null | Value] | insert_value(Index - 1, Data, Values)].
iotdb_field_key(is_aligned, ?VSN_1_3_X) ->
<<"is_aligned">>;
iotdb_field_key(is_aligned, ?VSN_1_1_X) -> iotdb_field_key(is_aligned, ?VSN_1_1_X) ->
<<"is_aligned">>; <<"is_aligned">>;
iotdb_field_key(is_aligned, ?VSN_1_0_X) -> iotdb_field_key(is_aligned, ?VSN_1_0_X) ->
<<"is_aligned">>; <<"is_aligned">>;
iotdb_field_key(is_aligned, ?VSN_0_13_X) -> iotdb_field_key(is_aligned, ?VSN_0_13_X) ->
<<"isAligned">>; <<"isAligned">>;
iotdb_field_key(device_id, ?VSN_1_3_X) ->
<<"device">>;
iotdb_field_key(device_id, ?VSN_1_1_X) -> iotdb_field_key(device_id, ?VSN_1_1_X) ->
<<"device">>; <<"device">>;
iotdb_field_key(device_id, ?VSN_1_0_X) -> iotdb_field_key(device_id, ?VSN_1_0_X) ->
<<"device">>; <<"device">>;
iotdb_field_key(device_id, ?VSN_0_13_X) -> iotdb_field_key(device_id, ?VSN_0_13_X) ->
<<"deviceId">>; <<"deviceId">>;
iotdb_field_key(data_types, ?VSN_1_3_X) ->
<<"data_types">>;
iotdb_field_key(data_types, ?VSN_1_1_X) -> iotdb_field_key(data_types, ?VSN_1_1_X) ->
<<"data_types">>; <<"data_types">>;
iotdb_field_key(data_types, ?VSN_1_0_X) -> iotdb_field_key(data_types, ?VSN_1_0_X) ->
@ -706,14 +786,37 @@ preproc_data_template(DataList) ->
DataList DataList
). ).
try_render_message({ChannelId, Msg}, IoTDBVsn, Channels) -> try_render_messages([{ChannelId, _} | _] = Msgs, IoTDBVsn, Channels) ->
case maps:find(ChannelId, Channels) of case maps:find(ChannelId, Channels) of
{ok, Channel} -> {ok, Channel} ->
render_channel_message(Channel, IoTDBVsn, Msg); case do_render_message(Msgs, Channel, IoTDBVsn, #{}) of
RenderMsgs when is_map(RenderMsgs) ->
{ok,
lists:map(
fun({{DeviceId, IsAligned}, DataList}) ->
make_iotdb_insert_request(DataList, IsAligned, DeviceId, IoTDBVsn)
end,
maps:to_list(RenderMsgs)
)};
Error ->
Error
end;
_ -> _ ->
{error, {unrecoverable_error, {invalid_channel_id, ChannelId}}} {error, {unrecoverable_error, {invalid_channel_id, ChannelId}}}
end. end.
do_render_message([], _Channel, _IoTDBVsn, Acc) ->
Acc;
do_render_message([{_, Msg} | Msgs], Channel, IoTDBVsn, Acc) ->
case render_channel_message(Channel, IoTDBVsn, Msg) of
{ok, NewDataList, DeviceId, IsAligned} ->
Fun = fun(V) -> NewDataList ++ V end,
Acc1 = maps:update_with({DeviceId, IsAligned}, Fun, NewDataList, Acc),
do_render_message(Msgs, Channel, IoTDBVsn, Acc1);
Error ->
Error
end.
render_channel_message(#{is_aligned := IsAligned} = Channel, IoTDBVsn, Message) -> render_channel_message(#{is_aligned := IsAligned} = Channel, IoTDBVsn, Message) ->
Payloads = to_list(parse_payload(get_payload(Message))), Payloads = to_list(parse_payload(get_payload(Message))),
case device_id(Message, Payloads, Channel) of case device_id(Message, Payloads, Channel) of
@ -724,9 +827,9 @@ render_channel_message(#{is_aligned := IsAligned} = Channel, IoTDBVsn, Message)
[] -> [] ->
{error, invalid_template}; {error, invalid_template};
DataTemplate -> DataTemplate ->
case proc_data(DataTemplate, Message) of case proc_data(DataTemplate, Message, IoTDBVsn) of
{ok, DataList} -> {ok, DataList} ->
make_iotdb_insert_request(DataList, IsAligned, DeviceId, IoTDBVsn); {ok, DataList, DeviceId, IsAligned};
Error -> Error ->
Error Error
end end

View File

@ -20,14 +20,16 @@
all() -> all() ->
[ [
{group, plain}, {group, iotdb110},
{group, iotdb130},
{group, legacy} {group, legacy}
]. ].
groups() -> groups() ->
AllTCs = emqx_common_test_helpers:all(?MODULE), AllTCs = emqx_common_test_helpers:all(?MODULE),
[ [
{plain, AllTCs}, {iotdb110, AllTCs},
{iotdb130, AllTCs},
{legacy, AllTCs} {legacy, AllTCs}
]. ].
@ -37,10 +39,15 @@ init_per_suite(Config) ->
end_per_suite(Config) -> end_per_suite(Config) ->
emqx_bridge_v2_testlib:end_per_suite(Config). emqx_bridge_v2_testlib:end_per_suite(Config).
init_per_group(plain = Type, Config0) -> init_per_group(Type, Config0) when Type =:= iotdb110 orelse Type =:= iotdb130 ->
Host = os:getenv("IOTDB_PLAIN_HOST", "toxiproxy.emqx.net"), Host = os:getenv("IOTDB_PLAIN_HOST", "toxiproxy.emqx.net"),
Port = list_to_integer(os:getenv("IOTDB_PLAIN_PORT", "18080")), ProxyName = atom_to_list(Type),
ProxyName = "iotdb", {IotDbVersion, DefaultPort} =
case Type of
iotdb110 -> {?VSN_1_1_X, "18080"};
iotdb130 -> {?VSN_1_3_X, "28080"}
end,
Port = list_to_integer(os:getenv("IOTDB_PLAIN_PORT", DefaultPort)),
case emqx_common_test_helpers:is_tcp_server_available(Host, Port) of case emqx_common_test_helpers:is_tcp_server_available(Host, Port) of
true -> true ->
Config = emqx_bridge_v2_testlib:init_per_group(Type, ?BRIDGE_TYPE_BIN, Config0), Config = emqx_bridge_v2_testlib:init_per_group(Type, ?BRIDGE_TYPE_BIN, Config0),
@ -48,7 +55,7 @@ init_per_group(plain = Type, Config0) ->
{bridge_host, Host}, {bridge_host, Host},
{bridge_port, Port}, {bridge_port, Port},
{proxy_name, ProxyName}, {proxy_name, ProxyName},
{iotdb_version, ?VSN_1_1_X}, {iotdb_version, IotDbVersion},
{iotdb_rest_prefix, <<"/rest/v2/">>} {iotdb_rest_prefix, <<"/rest/v2/">>}
| Config | Config
]; ];
@ -87,7 +94,8 @@ init_per_group(_Group, Config) ->
Config. Config.
end_per_group(Group, Config) when end_per_group(Group, Config) when
Group =:= plain; Group =:= iotdb110;
Group =:= iotdb130;
Group =:= legacy Group =:= legacy
-> ->
emqx_bridge_v2_testlib:end_per_group(Config), emqx_bridge_v2_testlib:end_per_group(Config),
@ -245,7 +253,9 @@ iotdb_query(Config, Query) ->
iotdb_request(Config, Path, Body, Opts). iotdb_request(Config, Path, Body, Opts).
is_success_check({ok, 200, _, Body}) -> is_success_check({ok, 200, _, Body}) ->
?assert(is_code(200, emqx_utils_json:decode(Body))). ?assert(is_code(200, emqx_utils_json:decode(Body)));
is_success_check(Other) ->
throw(Other).
is_code(Code, #{<<"code">> := Code}) -> true; is_code(Code, #{<<"code">> := Code}) -> true;
is_code(_, _) -> false. is_code(_, _) -> false.
@ -359,89 +369,96 @@ t_async_query(Config) ->
t_sync_query_aggregated(Config) -> t_sync_query_aggregated(Config) ->
DeviceId = iotdb_device(Config), DeviceId = iotdb_device(Config),
MS = erlang:system_time(millisecond) - 5000,
Payload = [ Payload = [
make_iotdb_payload(DeviceId, "temp", "INT32", "36", 1685112026290), make_iotdb_payload(DeviceId, "temp", "INT32", "36", MS - 7000),
make_iotdb_payload(DeviceId, "temp", "INT32", 37, 1685112026291), make_iotdb_payload(DeviceId, "temp", "INT32", 37, MS - 6000),
make_iotdb_payload(DeviceId, "temp", "INT32", 38.7, 1685112026292), make_iotdb_payload(DeviceId, "temp", "INT64", 38.7, MS - 5000),
make_iotdb_payload(DeviceId, "temp", "INT32", "39", <<"1685112026293">>), make_iotdb_payload(DeviceId, "temp", "INT64", "39", integer_to_binary(MS - 4000)),
make_iotdb_payload(DeviceId, "temp", "INT64", "36", 1685112026294), make_iotdb_payload(DeviceId, "temp", "INT64", "34", MS - 3000),
make_iotdb_payload(DeviceId, "temp", "INT64", 36, 1685112026295), make_iotdb_payload(DeviceId, "temp", "INT32", 33.7, MS - 2000),
make_iotdb_payload(DeviceId, "temp", "INT64", 36.7, 1685112026296), make_iotdb_payload(DeviceId, "temp", "INT32", 32, MS - 1000),
%% implicit 'now()' timestamp
make_iotdb_payload(DeviceId, "temp", "INT32", "40"),
%% [FIXME] neither nanoseconds nor microseconds don't seem to be supported by IoTDB %% [FIXME] neither nanoseconds nor microseconds don't seem to be supported by IoTDB
(make_iotdb_payload(DeviceId, "temp", "INT32", "41"))#{timestamp => <<"now_us">>}, (make_iotdb_payload(DeviceId, "temp", "INT32", "41"))#{timestamp => <<"now_us">>},
(make_iotdb_payload(DeviceId, "temp", "INT32", "42"))#{timestamp => <<"now_ns">>},
make_iotdb_payload(DeviceId, "weight", "FLOAT", "87.3", 1685112026290), make_iotdb_payload(DeviceId, "weight", "FLOAT", "87.3", MS - 6000),
make_iotdb_payload(DeviceId, "weight", "FLOAT", 87.3, 1685112026291), make_iotdb_payload(DeviceId, "weight", "FLOAT", 87.3, MS - 5000),
make_iotdb_payload(DeviceId, "weight", "FLOAT", 87, 1685112026292), make_iotdb_payload(DeviceId, "weight", "FLOAT", 87, MS - 4000),
make_iotdb_payload(DeviceId, "weight", "DOUBLE", "87.3", 1685112026293), make_iotdb_payload(DeviceId, "weight", "DOUBLE", "87.3", MS - 3000),
make_iotdb_payload(DeviceId, "weight", "DOUBLE", 87.3, 1685112026294), make_iotdb_payload(DeviceId, "weight", "DOUBLE", 87.3, MS - 2000),
make_iotdb_payload(DeviceId, "weight", "DOUBLE", 87, 1685112026295), make_iotdb_payload(DeviceId, "weight", "DOUBLE", 87, MS - 1000),
make_iotdb_payload(DeviceId, "charged", "BOOLEAN", "1", 1685112026300), make_iotdb_payload(DeviceId, "charged", "BOOLEAN", "1", MS + 1000),
make_iotdb_payload(DeviceId, "floated", "BOOLEAN", 1, 1685112026300), make_iotdb_payload(DeviceId, "floated", "BOOLEAN", 1, MS + 1000),
make_iotdb_payload(DeviceId, "started", "BOOLEAN", true, 1685112026300), make_iotdb_payload(DeviceId, "started", "BOOLEAN", true, MS + 1000),
make_iotdb_payload(DeviceId, "stoked", "BOOLEAN", "true", 1685112026300), make_iotdb_payload(DeviceId, "stoked", "BOOLEAN", "true", MS + 1000),
make_iotdb_payload(DeviceId, "enriched", "BOOLEAN", "TRUE", 1685112026300), make_iotdb_payload(DeviceId, "enriched", "BOOLEAN", "TRUE", MS + 1000),
make_iotdb_payload(DeviceId, "gutted", "BOOLEAN", "True", 1685112026300), make_iotdb_payload(DeviceId, "gutted", "BOOLEAN", "True", MS + 1000),
make_iotdb_payload(DeviceId, "drained", "BOOLEAN", "0", 1685112026300), make_iotdb_payload(DeviceId, "drained", "BOOLEAN", "0", MS + 1000),
make_iotdb_payload(DeviceId, "toasted", "BOOLEAN", 0, 1685112026300), make_iotdb_payload(DeviceId, "toasted", "BOOLEAN", 0, MS + 1000),
make_iotdb_payload(DeviceId, "uncharted", "BOOLEAN", false, 1685112026300), make_iotdb_payload(DeviceId, "uncharted", "BOOLEAN", false, MS + 1000),
make_iotdb_payload(DeviceId, "dazzled", "BOOLEAN", "false", 1685112026300), make_iotdb_payload(DeviceId, "dazzled", "BOOLEAN", "false", MS + 1000),
make_iotdb_payload(DeviceId, "unplugged", "BOOLEAN", "FALSE", 1685112026300), make_iotdb_payload(DeviceId, "unplugged", "BOOLEAN", "FALSE", MS + 1000),
make_iotdb_payload(DeviceId, "unraveled", "BOOLEAN", "False", 1685112026300), make_iotdb_payload(DeviceId, "unraveled", "BOOLEAN", "False", MS + 1000),
make_iotdb_payload(DeviceId, "undecided", "BOOLEAN", null, 1685112026300), make_iotdb_payload(DeviceId, "undecided", "BOOLEAN", null, MS + 1000),
make_iotdb_payload(DeviceId, "foo", "TEXT", "bar", 1685112026300) make_iotdb_payload(DeviceId, "foo", "TEXT", "bar", MS + 1000)
], ],
MakeMessageFun = make_message_fun(iotdb_topic(Config), Payload), MakeMessageFun = make_message_fun(iotdb_topic(Config), Payload),
ok = emqx_bridge_v2_testlib:t_sync_query( ok = emqx_bridge_v2_testlib:t_sync_query(
Config, MakeMessageFun, fun is_success_check/1, iotdb_bridge_on_query Config, MakeMessageFun, fun is_success_check/1, iotdb_bridge_on_query
), ),
%% check temp Time = integer_to_binary(MS - 20000),
QueryTemp = <<"select temp from ", DeviceId/binary>>,
{ok, {{_, 200, _}, _, ResultTemp}} = iotdb_query(Config, QueryTemp),
?assertMatch(
#{<<"values">> := [[36, 37, 38, 39, 36, 36, 36, 40, 41, 42]]},
emqx_utils_json:decode(ResultTemp)
),
%% check weight %% check weight
QueryWeight = <<"select weight from ", DeviceId/binary>>, QueryWeight = <<"select weight from ", DeviceId/binary, " where time > ", Time/binary>>,
{ok, {{_, 200, _}, _, ResultWeight}} = iotdb_query(Config, QueryWeight), {ok, {{_, 200, _}, _, ResultWeight}} = iotdb_query(Config, QueryWeight),
?assertMatch( ?assertMatch(
#{<<"values">> := [[87.3, 87.3, 87.0, 87.3, 87.3, 87.0]]}, #{<<"values">> := [[87.3, 87.3, 87.0, 87.3, 87.3, 87.0]]},
emqx_utils_json:decode(ResultWeight) emqx_utils_json:decode(ResultWeight)
), ),
%% check rest ts = 1685112026300 %% [FIXME] https://github.com/apache/iotdb/issues/12375
QueryRest = <<"select * from ", DeviceId/binary, " where time = 1685112026300">>, %% null don't seem to be supported by IoTDB insertTablet when 1.3.0
{ok, {{_, 200, _}, _, ResultRest}} = iotdb_query(Config, QueryRest), case ?config(iotdb_version, Config) of
#{<<"values">> := Values, <<"expressions">> := Expressions} = emqx_utils_json:decode( ?VSN_1_3_X ->
ResultRest skip;
), _ ->
Results = maps:from_list(lists:zipwith(fun(K, [V]) -> {K, V} end, Expressions, Values)), %% check rest ts = MS + 1000
Exp = #{ CheckTime = integer_to_binary(MS + 1000),
exp(DeviceId, "charged") => true, QueryRest = <<"select * from ", DeviceId/binary, " where time = ", CheckTime/binary>>,
exp(DeviceId, "floated") => true, {ok, {{_, 200, _}, _, ResultRest}} = iotdb_query(Config, QueryRest),
exp(DeviceId, "started") => true, #{<<"values">> := Values, <<"expressions">> := Expressions} = emqx_utils_json:decode(
exp(DeviceId, "stoked") => true, ResultRest
exp(DeviceId, "enriched") => true, ),
exp(DeviceId, "gutted") => true, Results = maps:from_list(lists:zipwith(fun(K, [V]) -> {K, V} end, Expressions, Values)),
exp(DeviceId, "drained") => false, Exp = #{
exp(DeviceId, "toasted") => false, exp(DeviceId, "charged") => true,
exp(DeviceId, "uncharted") => false, exp(DeviceId, "floated") => true,
exp(DeviceId, "dazzled") => false, exp(DeviceId, "started") => true,
exp(DeviceId, "unplugged") => false, exp(DeviceId, "stoked") => true,
exp(DeviceId, "unraveled") => false, exp(DeviceId, "enriched") => true,
exp(DeviceId, "undecided") => null, exp(DeviceId, "gutted") => true,
exp(DeviceId, "foo") => <<"bar">>, exp(DeviceId, "drained") => false,
exp(DeviceId, "temp") => null, exp(DeviceId, "toasted") => false,
exp(DeviceId, "weight") => null exp(DeviceId, "uncharted") => false,
}, exp(DeviceId, "dazzled") => false,
?assertEqual(Exp, Results), exp(DeviceId, "unplugged") => false,
exp(DeviceId, "unraveled") => false,
exp(DeviceId, "undecided") => null,
exp(DeviceId, "foo") => <<"bar">>,
exp(DeviceId, "temp") => null,
exp(DeviceId, "weight") => null
},
?assertEqual(Exp, Results),
%% check temp
QueryTemp = <<"select temp from ", DeviceId/binary, " where time > ", Time/binary>>,
{ok, {{_, 200, _}, _, ResultTemp}} = iotdb_query(Config, QueryTemp),
?assertMatch(
#{<<"values">> := [[36, 37, 38, 39, 34, 33, 32, 41]]},
emqx_utils_json:decode(ResultTemp)
)
end,
ok. ok.
exp(Dev, M0) -> exp(Dev, M0) ->

View File

@ -389,7 +389,7 @@ fields(producer_kafka_opts) ->
)}, )},
{kafka_headers, {kafka_headers,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
required => false, required => false,
validator => fun kafka_header_validator/1, validator => fun kafka_header_validator/1,
@ -462,12 +462,12 @@ fields(producer_kafka_ext_headers) ->
[ [
{kafka_ext_header_key, {kafka_ext_header_key,
mk( mk(
binary(), emqx_schema:template(),
#{required => true, desc => ?DESC(producer_kafka_ext_header_key)} #{required => true, desc => ?DESC(producer_kafka_ext_header_key)}
)}, )},
{kafka_ext_header_value, {kafka_ext_header_value,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
required => true, required => true,
validator => fun kafka_ext_header_value_validator/1, validator => fun kafka_ext_header_value_validator/1,
@ -477,11 +477,20 @@ fields(producer_kafka_ext_headers) ->
]; ];
fields(kafka_message) -> fields(kafka_message) ->
[ [
{key, mk(string(), #{default => <<"${.clientid}">>, desc => ?DESC(kafka_message_key)})}, {key,
{value, mk(string(), #{default => <<"${.}">>, desc => ?DESC(kafka_message_value)})}, mk(emqx_schema:template(), #{
default => <<"${.clientid}">>,
desc => ?DESC(kafka_message_key)
})},
{value,
mk(emqx_schema:template(), #{
default => <<"${.}">>,
desc => ?DESC(kafka_message_value)
})},
{timestamp, {timestamp,
mk(string(), #{ mk(emqx_schema:template(), #{
default => <<"${.timestamp}">>, desc => ?DESC(kafka_message_timestamp) default => <<"${.timestamp}">>,
desc => ?DESC(kafka_message_timestamp)
})} })}
]; ];
fields(producer_buffer) -> fields(producer_buffer) ->
@ -536,8 +545,11 @@ fields(consumer_topic_mapping) ->
{qos, mk(emqx_schema:qos(), #{default => 0, desc => ?DESC(consumer_mqtt_qos)})}, {qos, mk(emqx_schema:qos(), #{default => 0, desc => ?DESC(consumer_mqtt_qos)})},
{payload_template, {payload_template,
mk( mk(
string(), emqx_schema:template(),
#{default => <<"${.}">>, desc => ?DESC(consumer_mqtt_payload)} #{
default => <<"${.}">>,
desc => ?DESC(consumer_mqtt_payload)
}
)} )}
]; ];
fields(consumer_kafka_opts) -> fields(consumer_kafka_opts) ->
@ -744,8 +756,8 @@ producer_strategy_key_validator(
producer_strategy_key_validator(emqx_utils_maps:binary_key_map(Conf)); producer_strategy_key_validator(emqx_utils_maps:binary_key_map(Conf));
producer_strategy_key_validator(#{ producer_strategy_key_validator(#{
<<"partition_strategy">> := key_dispatch, <<"partition_strategy">> := key_dispatch,
<<"message">> := #{<<"key">> := ""} <<"message">> := #{<<"key">> := Key}
}) -> }) when Key =:= "" orelse Key =:= <<>> ->
{error, "Message key cannot be empty when `key_dispatch` strategy is used"}; {error, "Message key cannot be empty when `key_dispatch` strategy is used"};
producer_strategy_key_validator(_) -> producer_strategy_key_validator(_) ->
ok. ok.

View File

@ -357,7 +357,7 @@ kafka_consumer_hocon() ->
%% assert compatibility %% assert compatibility
bridge_schema_json_test() -> bridge_schema_json_test() ->
JSON = iolist_to_binary(emqx_conf:bridge_schema_json()), JSON = iolist_to_binary(emqx_dashboard_schema_api:bridge_schema_json()),
Map = emqx_utils_json:decode(JSON), Map = emqx_utils_json:decode(JSON),
Path = [<<"components">>, <<"schemas">>, <<"bridge_kafka.post_producer">>, <<"properties">>], Path = [<<"components">>, <<"schemas">>, <<"bridge_kafka.post_producer">>, <<"properties">>],
?assertMatch(#{<<"kafka">> := _}, emqx_utils_maps:deep_get(Path, Map)). ?assertMatch(#{<<"kafka">> := _}, emqx_utils_maps:deep_get(Path, Map)).

View File

@ -150,7 +150,7 @@ fields(producer) ->
[ [
{payload_template, {payload_template,
sc( sc(
binary(), emqx_schema:template(),
#{ #{
default => <<"${.}">>, default => <<"${.}">>,
desc => ?DESC("payload_template") desc => ?DESC("payload_template")

View File

@ -44,8 +44,10 @@ roots() -> [].
fields("config") -> fields("config") ->
[ [
{enable, mk(boolean(), #{desc => ?DESC("enable"), default => true})}, {enable, mk(boolean(), #{desc => ?DESC("enable"), default => true})},
{collection, mk(binary(), #{desc => ?DESC("collection"), default => <<"mqtt">>})}, {collection,
{payload_template, mk(binary(), #{required => false, desc => ?DESC("payload_template")})}, mk(emqx_schema:template(), #{desc => ?DESC("collection"), default => <<"mqtt">>})},
{payload_template,
mk(emqx_schema:template(), #{required => false, desc => ?DESC("payload_template")})},
{resource_opts, {resource_opts,
mk( mk(
ref(?MODULE, "creation_opts"), ref(?MODULE, "creation_opts"),

View File

@ -450,7 +450,6 @@ connect(Options) ->
options => emqx_utils:redact(Options) options => emqx_utils:redact(Options)
}), }),
Name = proplists:get_value(name, Options), Name = proplists:get_value(name, Options),
WorkerId = proplists:get_value(ecpool_worker_id, Options),
ClientOpts = proplists:get_value(client_opts, Options), ClientOpts = proplists:get_value(client_opts, Options),
case emqtt:start_link(mk_client_opts(Name, WorkerId, ClientOpts)) of case emqtt:start_link(mk_client_opts(Name, WorkerId, ClientOpts)) of
{ok, Pid} -> {ok, Pid} ->

View File

@ -200,7 +200,7 @@ fields("ingress_local") ->
[ [
{topic, {topic,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
validator => fun emqx_schema:non_empty_string/1, validator => fun emqx_schema:non_empty_string/1,
desc => ?DESC("ingress_local_topic"), desc => ?DESC("ingress_local_topic"),
@ -217,7 +217,7 @@ fields("ingress_local") ->
)}, )},
{retain, {retain,
mk( mk(
hoconsc:union([boolean(), binary()]), hoconsc:union([boolean(), emqx_schema:template()]),
#{ #{
default => <<"${retain}">>, default => <<"${retain}">>,
desc => ?DESC("retain") desc => ?DESC("retain")
@ -225,7 +225,7 @@ fields("ingress_local") ->
)}, )},
{payload, {payload,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
default => undefined, default => undefined,
desc => ?DESC("payload") desc => ?DESC("payload")
@ -268,7 +268,7 @@ fields("egress_remote") ->
[ [
{topic, {topic,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
required => true, required => true,
validator => fun emqx_schema:non_empty_string/1, validator => fun emqx_schema:non_empty_string/1,
@ -286,7 +286,7 @@ fields("egress_remote") ->
)}, )},
{retain, {retain,
mk( mk(
hoconsc:union([boolean(), binary()]), hoconsc:union([boolean(), emqx_schema:template()]),
#{ #{
required => false, required => false,
default => false, default => false,
@ -295,7 +295,7 @@ fields("egress_remote") ->
)}, )},
{payload, {payload,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
default => undefined, default => undefined,
desc => ?DESC("payload") desc => ?DESC("payload")
@ -344,7 +344,7 @@ desc(_) ->
undefined. undefined.
qos() -> qos() ->
hoconsc:union([emqx_schema:qos(), binary()]). hoconsc:union([emqx_schema:qos(), emqx_schema:template()]).
parse_server(Str) -> parse_server(Str) ->
#{hostname := Host, port := Port} = emqx_schema:parse_server(Str, ?MQTT_HOST_OPTS), #{hostname := Host, port := Port} = emqx_schema:parse_server(Str, ?MQTT_HOST_OPTS),

View File

@ -117,7 +117,7 @@ fields("config") ->
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})}, {enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
{sql, {sql,
mk( mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>} #{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>}
)}, )},
{local_topic, {local_topic,

View File

@ -517,7 +517,6 @@ t_write_failure(Config) ->
ok ok
end, end,
fun(Trace0) -> fun(Trace0) ->
ct:pal("trace: ~p", [Trace0]),
Trace = ?of_kind(buffer_worker_flush_nack, Trace0), Trace = ?of_kind(buffer_worker_flush_nack, Trace0),
?assertMatch([#{result := {error, _}} | _], Trace), ?assertMatch([#{result := {error, _}} | _], Trace),
[#{result := {error, Error}} | _] = Trace, [#{result := {error, Error}} | _] = Trace,

View File

@ -146,7 +146,7 @@ fields(action_parameters_data) ->
[ [
{timestamp, {timestamp,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
desc => ?DESC("config_parameters_timestamp"), desc => ?DESC("config_parameters_timestamp"),
required => false required => false
@ -154,7 +154,7 @@ fields(action_parameters_data) ->
)}, )},
{metric, {metric,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
required => true, required => true,
desc => ?DESC("config_parameters_metric") desc => ?DESC("config_parameters_metric")
@ -162,7 +162,7 @@ fields(action_parameters_data) ->
)}, )},
{tags, {tags,
mk( mk(
hoconsc:union([map(), binary()]), hoconsc:union([map(), emqx_schema:template()]),
#{ #{
required => true, required => true,
desc => ?DESC("config_parameters_tags"), desc => ?DESC("config_parameters_tags"),
@ -188,7 +188,7 @@ fields(action_parameters_data) ->
)}, )},
{value, {value,
mk( mk(
hoconsc:union([integer(), float(), binary()]), hoconsc:union([integer(), float(), emqx_schema:template()]),
#{ #{
required => true, required => true,
desc => ?DESC("config_parameters_value") desc => ?DESC("config_parameters_value")

View File

@ -158,7 +158,7 @@ fields(action_parameters) ->
[ [
{sql, {sql,
hoconsc:mk( hoconsc:mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>} #{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>}
)} )}
]; ];
@ -177,7 +177,7 @@ fields("config") ->
)}, )},
{sql, {sql,
hoconsc:mk( hoconsc:mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>} #{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>}
)}, )},
{local_topic, {local_topic,

View File

@ -61,7 +61,7 @@ fields(action_parameters) ->
[ [
{sql, {sql,
hoconsc:mk( hoconsc:mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("sql_template"), default => default_sql(), format => <<"sql">>} #{desc => ?DESC("sql_template"), default => default_sql(), format => <<"sql">>}
)} )}
]; ];

View File

@ -520,7 +520,6 @@ t_write_failure(Config) ->
) )
end), end),
fun(Trace0) -> fun(Trace0) ->
ct:pal("trace: ~p", [Trace0]),
Trace = ?of_kind(buffer_worker_flush_nack, Trace0), Trace = ?of_kind(buffer_worker_flush_nack, Trace0),
?assertMatch([#{result := {error, _}} | _], Trace), ?assertMatch([#{result := {error, _}} | _], Trace),
[#{result := {error, Error}} | _] = Trace, [#{result := {error, Error}} | _] = Trace,

View File

@ -51,12 +51,12 @@ fields(action_parameters) ->
fields(producer_pulsar_message) -> fields(producer_pulsar_message) ->
[ [
{key, {key,
?HOCON(string(), #{ ?HOCON(emqx_schema:template(), #{
default => <<"${.clientid}">>, default => <<"${.clientid}">>,
desc => ?DESC("producer_key_template") desc => ?DESC("producer_key_template")
})}, })},
{value, {value,
?HOCON(string(), #{ ?HOCON(emqx_schema:template(), #{
default => <<"${.}">>, default => <<"${.}">>,
desc => ?DESC("producer_value_template") desc => ?DESC("producer_value_template")
})} })}

View File

@ -1235,7 +1235,7 @@ t_resilience(Config) ->
after 1_000 -> ct:fail("producer didn't stop!") after 1_000 -> ct:fail("producer didn't stop!")
end, end,
Consumed = lists:flatmap( Consumed = lists:flatmap(
fun(_) -> receive_consumed(5_000) end, lists:seq(1, NumProduced) fun(_) -> receive_consumed(10_000) end, lists:seq(1, NumProduced)
), ),
?assertEqual(NumProduced, length(Consumed)), ?assertEqual(NumProduced, length(Consumed)),
ExpectedPayloads = lists:map(fun integer_to_binary/1, lists:seq(1, NumProduced)), ExpectedPayloads = lists:map(fun integer_to_binary/1, lists:seq(1, NumProduced)),

View File

@ -99,7 +99,7 @@ fields(action_parameters) ->
)}, )},
{payload_template, {payload_template,
hoconsc:mk( hoconsc:mk(
binary(), emqx_schema:template(),
#{ #{
default => <<"">>, default => <<"">>,
desc => ?DESC(?CONNECTOR_SCHEMA, "payload_template") desc => ?DESC(?CONNECTOR_SCHEMA, "payload_template")

View File

@ -52,7 +52,7 @@ init_per_group(_Group, Config) ->
common_init_per_group(Opts) -> common_init_per_group(Opts) ->
emqx_common_test_helpers:render_and_load_app_config(emqx_conf), emqx_common_test_helpers:render_and_load_app_config(emqx_conf),
ok = emqx_common_test_helpers:start_apps([ ok = emqx_common_test_helpers:start_apps([
emqx_conf, emqx_bridge, emqx_bridge_rabbitmq, emqx_rule_engine emqx_conf, emqx_bridge, emqx_bridge_rabbitmq, emqx_rule_engine, emqx_modules
]), ]),
ok = emqx_connector_test_helpers:start_apps([emqx_resource]), ok = emqx_connector_test_helpers:start_apps([emqx_resource]),
{ok, _} = application:ensure_all_started(emqx_connector), {ok, _} = application:ensure_all_started(emqx_connector),
@ -116,7 +116,9 @@ end_per_group(_Group, Config) ->
} = get_channel_connection(Config), } = get_channel_connection(Config),
amqp_channel:call(Channel, #'queue.purge'{queue = rabbit_mq_queue()}), amqp_channel:call(Channel, #'queue.purge'{queue = rabbit_mq_queue()}),
emqx_mgmt_api_test_util:end_suite(), emqx_mgmt_api_test_util:end_suite(),
ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_bridge_rabbitmq, emqx_rule_engine]), ok = emqx_common_test_helpers:stop_apps([
emqx_conf, emqx_bridge_rabbitmq, emqx_rule_engine, emqx_modules
]),
ok = emqx_connector_test_helpers:stop_apps([emqx_resource]), ok = emqx_connector_test_helpers:stop_apps([emqx_resource]),
_ = application:stop(emqx_connector), _ = application:stop(emqx_connector),
_ = application:stop(emqx_bridge), _ = application:stop(emqx_bridge),

View File

@ -211,7 +211,7 @@ desc(_) ->
undefined. undefined.
command_template(type) -> command_template(type) ->
list(binary()); hoconsc:array(emqx_schema:template());
command_template(required) -> command_template(required) ->
true; true;
command_template(validator) -> command_template(validator) ->

View File

@ -162,8 +162,13 @@ fields(action_parameters) ->
[ [
{template, {template,
mk( mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("template"), default => ?DEFAULT_TEMPLATE} #{desc => ?DESC("template"), default => ?DEFAULT_TEMPLATE}
)},
{strategy,
mk(
hoconsc:union([roundrobin, binary()]),
#{desc => ?DESC("strategy"), default => roundrobin}
)} )}
] ++ emqx_bridge_rocketmq_connector:fields(config), ] ++ emqx_bridge_rocketmq_connector:fields(config),
lists:foldl( lists:foldl(
@ -173,6 +178,7 @@ fields(action_parameters) ->
Parameters, Parameters,
[ [
servers, servers,
namespace,
pool_size, pool_size,
auto_reconnect, auto_reconnect,
access_key, access_key,
@ -205,17 +211,21 @@ fields("config") ->
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})}, {enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
{template, {template,
mk( mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("template"), default => ?DEFAULT_TEMPLATE} #{desc => ?DESC("template"), default => ?DEFAULT_TEMPLATE}
)}, )},
{local_topic, {local_topic,
mk( mk(
binary(), binary(),
#{desc => ?DESC("local_topic"), required => false} #{desc => ?DESC("local_topic"), required => false}
)},
{strategy,
mk(
hoconsc:union([roundrobin, binary()]),
#{desc => ?DESC("strategy"), default => roundrobin}
)} )}
] ++ emqx_resource_schema:fields("resource_opts") ++ ] ++ emqx_resource_schema:fields("resource_opts") ++
(emqx_bridge_rocketmq_connector:fields(config) -- emqx_bridge_rocketmq_connector:fields(config);
emqx_connector_schema_lib:prepare_statement_fields());
fields("post") -> fields("post") ->
[type_field(), name_field() | fields("config")]; [type_field(), name_field() | fields("config")];
fields("put") -> fields("put") ->

View File

@ -45,9 +45,14 @@ roots() ->
fields(config) -> fields(config) ->
[ [
{servers, servers()}, {servers, servers()},
{topic, {namespace,
mk( mk(
binary(), binary(),
#{required => false, desc => ?DESC(namespace)}
)},
{topic,
mk(
emqx_schema:template(),
#{default => <<"TopicTest">>, desc => ?DESC(topic)} #{default => <<"TopicTest">>, desc => ?DESC(topic)}
)}, )},
{access_key, {access_key,
@ -107,7 +112,7 @@ on_start(
), ),
ClientId = client_id(InstanceId), ClientId = client_id(InstanceId),
ACLInfo = acl_info(AccessKey, SecretKey, SecurityToken), ACLInfo = acl_info(AccessKey, SecretKey, SecurityToken),
ClientCfg = #{acl_info => ACLInfo}, ClientCfg = namespace(#{acl_info => ACLInfo}, Config),
State = #{ State = #{
client_id => ClientId, client_id => ClientId,
@ -156,10 +161,12 @@ create_channel_state(
TopicTks = emqx_placeholder:preproc_tmpl(Topic), TopicTks = emqx_placeholder:preproc_tmpl(Topic),
ProducerOpts = make_producer_opts(Conf, ACLInfo), ProducerOpts = make_producer_opts(Conf, ACLInfo),
Templates = parse_template(Conf), Templates = parse_template(Conf),
DispatchStrategy = parse_dispatch_strategy(Conf),
State = #{ State = #{
topic => Topic, topic => Topic,
topic_tokens => TopicTks, topic_tokens => TopicTks,
templates => Templates, templates => Templates,
dispatch_strategy => DispatchStrategy,
sync_timeout => SyncTimeout, sync_timeout => SyncTimeout,
acl_info => ACLInfo, acl_info => ACLInfo,
producers_opts => ProducerOpts producers_opts => ProducerOpts
@ -202,7 +209,7 @@ on_stop(InstanceId, _State) ->
({_, client_id, ClientId}) -> ({_, client_id, ClientId}) ->
destory_producers_map(ClientId), destory_producers_map(ClientId),
ok = rocketmq:stop_and_delete_supervised_client(ClientId); ok = rocketmq:stop_and_delete_supervised_client(ClientId);
({_, _Topic, Producer}) -> ({_, _ProducerGroup, Producer}) ->
_ = rocketmq:stop_and_delete_supervised_producers(Producer) _ = rocketmq:stop_and_delete_supervised_producers(Producer)
end, end,
emqx_resource:get_allocated_resources_list(InstanceId) emqx_resource:get_allocated_resources_list(InstanceId)
@ -250,15 +257,16 @@ do_query(
#{ #{
topic_tokens := TopicTks, topic_tokens := TopicTks,
templates := Templates, templates := Templates,
dispatch_strategy := DispatchStrategy,
sync_timeout := RequestTimeout, sync_timeout := RequestTimeout,
producers_opts := ProducerOpts producers_opts := ProducerOpts
} = maps:get(ChannelId, Channels), } = maps:get(ChannelId, Channels),
TopicKey = get_topic_key(Query, TopicTks), TopicKey = get_topic_key(Query, TopicTks),
Data = apply_template(Query, Templates), Data = apply_template(Query, Templates, DispatchStrategy),
Result = safe_do_produce( Result = safe_do_produce(
InstanceId, QueryFunc, ClientId, TopicKey, Data, ProducerOpts, RequestTimeout ChannelId, InstanceId, QueryFunc, ClientId, TopicKey, Data, ProducerOpts, RequestTimeout
), ),
case Result of case Result of
{error, Reason} -> {error, Reason} ->
@ -284,9 +292,11 @@ do_query(
get_channel_id({ChannelId, _}) -> ChannelId; get_channel_id({ChannelId, _}) -> ChannelId;
get_channel_id([{ChannelId, _} | _]) -> ChannelId. get_channel_id([{ChannelId, _} | _]) -> ChannelId.
safe_do_produce(InstanceId, QueryFunc, ClientId, TopicKey, Data, ProducerOpts, RequestTimeout) -> safe_do_produce(
ChannelId, InstanceId, QueryFunc, ClientId, TopicKey, Data, ProducerOpts, RequestTimeout
) ->
try try
Producers = get_producers(InstanceId, ClientId, TopicKey, ProducerOpts), Producers = get_producers(ChannelId, InstanceId, ClientId, TopicKey, ProducerOpts),
produce(InstanceId, QueryFunc, Producers, Data, RequestTimeout) produce(InstanceId, QueryFunc, Producers, Data, RequestTimeout)
catch catch
_Type:Reason -> _Type:Reason ->
@ -315,24 +325,57 @@ parse_template([{Key, H} | T], Templates) ->
parse_template([], Templates) -> parse_template([], Templates) ->
Templates. Templates.
%% returns a procedure to generate the produce context
parse_dispatch_strategy(#{strategy := roundrobin}) ->
fun(_) ->
#{}
end;
parse_dispatch_strategy(#{strategy := Template}) ->
Tokens = emqx_placeholder:preproc_tmpl(Template),
fun(Msg) ->
#{
key =>
case emqx_placeholder:proc_tmpl(Tokens, Msg) of
<<"undefined">> ->
%% Since the key may be absent on some kinds of events (ex:
%% `topic' is absent in `client.disconnected'), and this key is
%% used for routing, we generate a random key when it's absent to
%% better distribute the load, effectively making it `random'
%% dispatch if the key is absent and we are using `key_dispatch'.
%% Otherwise, it'll be deterministic.
emqx_guid:gen();
Key ->
Key
end
}
end.
get_topic_key({_, Msg}, TopicTks) -> get_topic_key({_, Msg}, TopicTks) ->
emqx_placeholder:proc_tmpl(TopicTks, Msg); emqx_placeholder:proc_tmpl(TopicTks, Msg);
get_topic_key([Query | _], TopicTks) -> get_topic_key([Query | _], TopicTks) ->
get_topic_key(Query, TopicTks). get_topic_key(Query, TopicTks).
apply_template({Key, Msg} = _Req, Templates) -> %% return a message data and its context,
%% {binary(), rocketmq_producers:produce_context()})
apply_template({Key, Msg} = _Req, Templates, DispatchStrategy) ->
{
case maps:get(Key, Templates, undefined) of
undefined ->
emqx_utils_json:encode(Msg);
Template ->
emqx_placeholder:proc_tmpl(Template, Msg)
end,
DispatchStrategy(Msg)
};
apply_template([{Key, _} | _] = Reqs, Templates, DispatchStrategy) ->
case maps:get(Key, Templates, undefined) of case maps:get(Key, Templates, undefined) of
undefined -> undefined ->
emqx_utils_json:encode(Msg); [{emqx_utils_json:encode(Msg), DispatchStrategy(Msg)} || {_, Msg} <- Reqs];
Template -> Template ->
emqx_placeholder:proc_tmpl(Template, Msg) [
end; {emqx_placeholder:proc_tmpl(Template, Msg), DispatchStrategy(Msg)}
apply_template([{Key, _} | _] = Reqs, Templates) -> || {_, Msg} <- Reqs
case maps:get(Key, Templates, undefined) of ]
undefined ->
[emqx_utils_json:encode(Msg) || {_, Msg} <- Reqs];
Template ->
[emqx_placeholder:proc_tmpl(Template, Msg) || {_, Msg} <- Reqs]
end. end.
client_id(ResourceId) -> client_id(ResourceId) ->
@ -377,6 +420,10 @@ acl_info(AccessKey, SecretKey, SecurityToken) when is_binary(AccessKey) ->
acl_info(_, _, _) -> acl_info(_, _, _) ->
#{}. #{}.
namespace(ClientCfg, Config) ->
Namespace = maps:get(namespace, Config, <<>>),
ClientCfg#{namespace => Namespace}.
create_producers_map(ClientId) -> create_producers_map(ClientId) ->
_ = ets:new(ClientId, [public, named_table, {read_concurrency, true}]), _ = ets:new(ClientId, [public, named_table, {read_concurrency, true}]),
ok. ok.
@ -391,16 +438,21 @@ destory_producers_map(ClientId) ->
ets:delete(Tid) ets:delete(Tid)
end. end.
get_producers(InstanceId, ClientId, Topic, ProducerOpts) -> get_producers(ChannelId, InstanceId, ClientId, Topic, ProducerOpts) ->
case ets:lookup(ClientId, Topic) of %% The topic need to be included in the name since we can have multiple
%% topics per channel due to templating.
ProducerGroup = iolist_to_binary([ChannelId, "_", Topic]),
case ets:lookup(ClientId, ProducerGroup) of
[{_, Producers}] -> [{_, Producers}] ->
Producers; Producers;
_ -> _ ->
ProducerGroup = iolist_to_binary([atom_to_list(ClientId), "_", Topic]), %% TODO: the name needs to be an atom but this may cause atom leak so we
%% should figure out a way to avoid this
ProducerOpts2 = ProducerOpts#{name => binary_to_atom(ProducerGroup)},
{ok, Producers} = rocketmq:ensure_supervised_producers( {ok, Producers} = rocketmq:ensure_supervised_producers(
ClientId, ProducerGroup, Topic, ProducerOpts ClientId, ProducerGroup, Topic, ProducerOpts2
), ),
ok = emqx_resource:allocate_resource(InstanceId, Topic, Producers), ok = emqx_resource:allocate_resource(InstanceId, ProducerGroup, Producers),
ets:insert(ClientId, {Topic, Producers}), ets:insert(ClientId, {ProducerGroup, Producers}),
Producers Producers
end. end.

View File

@ -263,6 +263,60 @@ t_setup_via_http_api_and_publish(Config) ->
), ),
ok. ok.
t_setup_two_actions_via_http_api_and_publish(Config) ->
BridgeType = ?GET_CONFIG(rocketmq_bridge_type, Config),
Name = ?GET_CONFIG(rocketmq_name, Config),
RocketMQConf = ?GET_CONFIG(rocketmq_config, Config),
RocketMQConf2 = RocketMQConf#{
<<"name">> => Name,
<<"type">> => BridgeType
},
?assertMatch(
{ok, _},
create_bridge_http(RocketMQConf2)
),
{ok, #{raw_config := ActionConf}} = emqx_bridge_v2:lookup(actions, BridgeType, Name),
Topic2 = <<"Topic2">>,
ActionConf2 = emqx_utils_maps:deep_force_put(
[<<"parameters">>, <<"topic">>], ActionConf, Topic2
),
Action2Name = atom_to_binary(?FUNCTION_NAME),
{ok, _} = emqx_bridge_v2:create(BridgeType, Action2Name, ActionConf2),
SentData = #{payload => ?PAYLOAD},
?check_trace(
begin
?wait_async_action(
?assertEqual(ok, send_message(Config, SentData)),
#{?snk_kind := rocketmq_connector_query_return},
10_000
),
ok
end,
fun(Trace0) ->
Trace = ?of_kind(rocketmq_connector_query_return, Trace0),
?assertMatch([#{result := ok}], Trace),
ok
end
),
Config2 = proplists:delete(rocketmq_name, Config),
Config3 = [{rocketmq_name, Action2Name} | Config2],
?check_trace(
begin
?wait_async_action(
?assertEqual(ok, send_message(Config3, SentData)),
#{?snk_kind := rocketmq_connector_query_return},
10_000
),
ok
end,
fun(Trace0) ->
Trace = ?of_kind(rocketmq_connector_query_return, Trace0),
?assertMatch([#{result := ok}], Trace),
ok
end
),
ok.
t_get_status(Config) -> t_get_status(Config) ->
?assertMatch( ?assertMatch(
{ok, _}, {ok, _},

View File

@ -77,7 +77,7 @@ fields(s3_upload_parameters) ->
[ [
{content, {content,
hoconsc:mk( hoconsc:mk(
string(), emqx_schema:template(),
#{ #{
required => false, required => false,
default => <<"${.}">>, default => <<"${.}">>,

View File

@ -192,7 +192,7 @@ fields(action_parameters) ->
[ [
{sql, {sql,
mk( mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>} #{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>}
)} )}
]; ];

View File

@ -112,7 +112,7 @@ fields("parameters") ->
[ [
{target_topic, {target_topic,
mk( mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("target_topic"), default => <<"${topic}">>} #{desc => ?DESC("target_topic"), default => <<"${topic}">>}
)}, )},
{target_qos, {target_qos,
@ -122,7 +122,7 @@ fields("parameters") ->
)}, )},
{template, {template,
mk( mk(
binary(), emqx_schema:template(),
#{desc => ?DESC("template"), default => <<"${payload}">>} #{desc => ?DESC("template"), default => <<"${payload}">>}
)} )}
]; ];

View File

@ -83,7 +83,7 @@ fields("config") ->
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})}, {enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
{sql, {sql,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
desc => ?DESC("sql_template"), desc => ?DESC("sql_template"),
default => ?DEFAULT_SQL, default => ?DEFAULT_SQL,
@ -125,7 +125,7 @@ fields(action_parameters) ->
{database, fun emqx_connector_schema_lib:database/1}, {database, fun emqx_connector_schema_lib:database/1},
{sql, {sql,
mk( mk(
binary(), emqx_schema:template(),
#{ #{
desc => ?DESC("sql_template"), desc => ?DESC("sql_template"),
default => ?DEFAULT_SQL, default => ?DEFAULT_SQL,

View File

@ -224,6 +224,7 @@ reset() -> gen_server:call(?MODULE, reset).
status() -> status() ->
transaction(fun ?MODULE:trans_status/0, []). transaction(fun ?MODULE:trans_status/0, []).
%% DO NOT delete this on_leave_clean/0, It's use when rpc before v560.
on_leave_clean() -> on_leave_clean() ->
on_leave_clean(node()). on_leave_clean(node()).
@ -367,7 +368,7 @@ handle_call({fast_forward_to_commit, ToTnxId}, _From, State) ->
NodeId = do_fast_forward_to_commit(ToTnxId, State), NodeId = do_fast_forward_to_commit(ToTnxId, State),
{reply, NodeId, State, catch_up(State)}; {reply, NodeId, State, catch_up(State)};
handle_call(on_leave, _From, State) -> handle_call(on_leave, _From, State) ->
{atomic, ok} = transaction(fun ?MODULE:on_leave_clean/0, []), {atomic, ok} = transaction(fun ?MODULE:on_leave_clean/1, [node()]),
{reply, ok, State#{is_leaving := true}}; {reply, ok, State#{is_leaving := true}};
handle_call(_, _From, State) -> handle_call(_, _From, State) ->
{reply, ok, State, catch_up(State)}. {reply, ok, State, catch_up(State)}.

View File

@ -31,13 +31,6 @@
-export([dump_schema/2, reformat_schema_dump/2]). -export([dump_schema/2, reformat_schema_dump/2]).
-export([schema_module/0]). -export([schema_module/0]).
%% TODO: move to emqx_dashboard when we stop building api schema at build time
-export([
hotconf_schema_json/0,
bridge_schema_json/0,
hocon_schema_to_spec/2
]).
%% for rpc %% for rpc
-export([get_node_and_config/1]). -export([get_node_and_config/1]).
@ -311,12 +304,22 @@ gen_flat_doc(RootNames, #{full_name := FullName, fields := Fields} = S, DescReso
false -> false ->
ok ok
end, end,
#{ try
text => short_name(FullName), #{
hash => format_hash(FullName), text => short_name(FullName),
doc => maps:get(desc, S, <<"">>), hash => format_hash(FullName),
fields => format_fields(Fields, DescResolver) doc => maps:get(desc, S, <<"">>),
}. fields => format_fields(Fields, DescResolver)
}
catch
throw:Reason ->
io:format(
standard_error,
"failed_to_build_doc for ~s:~n~p~n",
[FullName, Reason]
),
error(failed_to_build_doc)
end.
format_fields(Fields, DescResolver) -> format_fields(Fields, DescResolver) ->
[format_field(F, DescResolver) || F <- Fields]. [format_field(F, DescResolver) || F <- Fields].
@ -456,17 +459,6 @@ warn_bad_namespace(Namespace) ->
ok ok
end. end.
%% TODO: move this function to emqx_dashboard when we stop generating this JSON at build time.
hotconf_schema_json() ->
SchemaInfo = #{title => <<"EMQX Hot Conf API Schema">>, version => <<"0.1.0">>},
gen_api_schema_json_iodata(emqx_mgmt_api_configs, SchemaInfo).
%% TODO: move this function to emqx_dashboard when we stop generating this JSON at build time.
bridge_schema_json() ->
Version = <<"0.1.0">>,
SchemaInfo = #{title => <<"EMQX Data Bridge API Schema">>, version => Version},
gen_api_schema_json_iodata(emqx_bridge_api, SchemaInfo).
%% @doc return the root schema module. %% @doc return the root schema module.
-spec schema_module() -> module(). -spec schema_module() -> module().
schema_module() -> schema_module() ->
@ -506,57 +498,6 @@ make_desc_resolver(Lang) ->
unicode:characters_to_binary(Desc) unicode:characters_to_binary(Desc)
end. end.
gen_api_schema_json_iodata(SchemaMod, SchemaInfo) ->
emqx_dashboard_swagger:gen_api_schema_json_iodata(
SchemaMod,
SchemaInfo,
fun ?MODULE:hocon_schema_to_spec/2
).
-define(TO_REF(_N_, _F_), iolist_to_binary([to_bin(_N_), ".", to_bin(_F_)])).
-define(TO_COMPONENTS_SCHEMA(_M_, _F_),
iolist_to_binary([
<<"#/components/schemas/">>,
?TO_REF(emqx_dashboard_swagger:namespace(_M_), _F_)
])
).
hocon_schema_to_spec(?R_REF(Module, StructName), _LocalModule) ->
{#{<<"$ref">> => ?TO_COMPONENTS_SCHEMA(Module, StructName)}, [{Module, StructName}]};
hocon_schema_to_spec(?REF(StructName), LocalModule) ->
{#{<<"$ref">> => ?TO_COMPONENTS_SCHEMA(LocalModule, StructName)}, [{LocalModule, StructName}]};
hocon_schema_to_spec(Type, LocalModule) when ?IS_TYPEREFL(Type) ->
{typename_to_spec(typerefl:name(Type), LocalModule), []};
hocon_schema_to_spec(?ARRAY(Item), LocalModule) ->
{Schema, Refs} = hocon_schema_to_spec(Item, LocalModule),
{#{type => array, items => Schema}, Refs};
hocon_schema_to_spec(?ENUM(Items), _LocalModule) ->
{#{type => enum, symbols => Items}, []};
hocon_schema_to_spec(?MAP(Name, Type), LocalModule) ->
{Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule),
{
#{
<<"type">> => object,
<<"properties">> => #{<<"$", (to_bin(Name))/binary>> => Schema}
},
SubRefs
};
hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) ->
{OneOf, Refs} = lists:foldl(
fun(Type, {Acc, RefsAcc}) ->
{Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule),
{[Schema | Acc], SubRefs ++ RefsAcc}
end,
{[], []},
hoconsc:union_members(Types)
),
{#{<<"oneOf">> => OneOf}, Refs};
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
{#{type => enum, symbols => [Atom]}, []}.
typename_to_spec(TypeStr, Module) ->
emqx_conf_schema_types:readable_dashboard(Module, TypeStr).
join_format(Snippets) -> join_format(Snippets) ->
case [S || S <- Snippets, S =/= undefined] of case [S || S <- Snippets, S =/= undefined] of
[] -> [] ->

View File

@ -33,8 +33,19 @@ readable(Module, TypeStr) when is_list(TypeStr) ->
%% Module is ignored so far as all types are distinguished by their names %% Module is ignored so far as all types are distinguished by their names
readable(TypeStr) readable(TypeStr)
catch catch
throw:unknown_type -> throw:Reason ->
fail(#{reason => unknown_type, type => TypeStr, module => Module}) throw(#{
reason => Reason,
type => TypeStr,
module => Module
});
error:Reason:Stacktrace ->
throw(#{
reason => Reason,
stacktrace => Stacktrace,
type => TypeStr,
module => Module
})
end. end.
readable_swagger(Module, TypeStr) -> readable_swagger(Module, TypeStr) ->
@ -49,22 +60,28 @@ readable_docgen(Module, TypeStr) ->
get_readable(Module, TypeStr, Flavor) -> get_readable(Module, TypeStr, Flavor) ->
Map = readable(Module, TypeStr), Map = readable(Module, TypeStr),
case maps:get(Flavor, Map, undefined) of case maps:get(Flavor, Map, undefined) of
undefined -> fail(#{reason => unknown_type, module => Module, type => TypeStr}); undefined -> throw(#{reason => unknown_type, module => Module, type => TypeStr});
Value -> Value Value -> Value
end. end.
%% Fail the build or test. Production code should never get here.
-spec fail(_) -> no_return().
fail(Reason) ->
io:format(standard_error, "ERROR: ~p~n", [Reason]),
error(Reason).
readable("boolean()") -> readable("boolean()") ->
#{ #{
swagger => #{type => boolean}, swagger => #{type => boolean},
dashboard => #{type => boolean}, dashboard => #{type => boolean},
docgen => #{type => "Boolean"} docgen => #{type => "Boolean"}
}; };
readable("template()") ->
#{
swagger => #{type => string},
dashboard => #{type => string, is_template => true},
docgen => #{type => "String", desc => ?DESC(template)}
};
readable("template_str()") ->
#{
swagger => #{type => string},
dashboard => #{type => string, is_template => true},
docgen => #{type => "String", desc => ?DESC(template)}
};
readable("binary()") -> readable("binary()") ->
#{ #{
swagger => #{type => string}, swagger => #{type => string},

Some files were not shown because too many files have changed in this diff Show More