Merge pull request #4768 from emqx/resolve-conflict-to-5.0
Auto-pull-request-on-2021-05-08
This commit is contained in:
commit
70d2e0e905
|
@ -72,7 +72,7 @@
|
||||||
|
|
||||||
[shell bench]
|
[shell bench]
|
||||||
!cd $BENCH_PATH
|
!cd $BENCH_PATH
|
||||||
!./emqtt_bench pub -c 10 -I 1000 -t t/%i -s 64 -L 600
|
!./emqtt_bench pub -c 10 -I 1000 -t t/%i -s 64 -L 300
|
||||||
???sent
|
???sent
|
||||||
|
|
||||||
[shell emqx]
|
[shell emqx]
|
||||||
|
@ -109,7 +109,7 @@
|
||||||
???publish complete
|
???publish complete
|
||||||
??SH-PROMPT:
|
??SH-PROMPT:
|
||||||
!curl http://127.0.0.1:8080/counter
|
!curl http://127.0.0.1:8080/counter
|
||||||
???{"data":600,"code":0}
|
???{"data":300,"code":0}
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
|
|
||||||
[shell emqx2]
|
[shell emqx2]
|
||||||
|
|
|
@ -95,6 +95,10 @@ jobs:
|
||||||
if (Test-Path rebar.lock) {
|
if (Test-Path rebar.lock) {
|
||||||
Remove-Item -Force -Path rebar.lock
|
Remove-Item -Force -Path rebar.lock
|
||||||
}
|
}
|
||||||
|
make ensure-rebar3
|
||||||
|
copy rebar3 "${{ steps.install_erlang.outputs.erlpath }}\bin"
|
||||||
|
ls "${{ steps.install_erlang.outputs.erlpath }}\bin"
|
||||||
|
rebar3 --help
|
||||||
make ${{ matrix.profile }}
|
make ${{ matrix.profile }}
|
||||||
mkdir -p _packages/${{ matrix.profile }}
|
mkdir -p _packages/${{ matrix.profile }}
|
||||||
Compress-Archive -Path _build/${{ matrix.profile }}/rel/emqx -DestinationPath _build/${{ matrix.profile }}/rel/$pkg_name
|
Compress-Archive -Path _build/${{ matrix.profile }}/rel/emqx -DestinationPath _build/${{ matrix.profile }}/rel/$pkg_name
|
||||||
|
@ -155,6 +159,8 @@ jobs:
|
||||||
- name: build
|
- name: build
|
||||||
run: |
|
run: |
|
||||||
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
|
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
|
||||||
|
make -C source ensure-rebar3
|
||||||
|
sudo cp source/rebar3 /usr/local/bin/rebar3
|
||||||
make -C source ${{ matrix.profile }}-zip
|
make -C source ${{ matrix.profile }}-zip
|
||||||
- name: test
|
- name: test
|
||||||
run: |
|
run: |
|
||||||
|
|
|
@ -53,7 +53,7 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
erl_otp:
|
erl_otp:
|
||||||
- 23.2.7.2
|
- 23.2.7.2-emqx-2
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
|
@ -82,11 +82,14 @@ jobs:
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
if: steps.cache.outputs.cache-hit != 'true'
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
run: |
|
run: |
|
||||||
|
export OTP_GITHUB_URL="https://github.com/emqx/otp"
|
||||||
kerl build ${{ matrix.erl_otp }}
|
kerl build ${{ matrix.erl_otp }}
|
||||||
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }}
|
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }}
|
||||||
- name: build
|
- name: build
|
||||||
run: |
|
run: |
|
||||||
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
|
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
|
||||||
|
make ensure-rebar3
|
||||||
|
sudo cp rebar3 /usr/local/bin/rebar3
|
||||||
make ${EMQX_NAME}-zip
|
make ${EMQX_NAME}-zip
|
||||||
- name: test
|
- name: test
|
||||||
run: |
|
run: |
|
||||||
|
|
|
@ -130,7 +130,7 @@ jobs:
|
||||||
docker exec --env-file .env -i erlang bash -c "make coveralls"
|
docker exec --env-file .env -i erlang bash -c "make coveralls"
|
||||||
- name: cat rebar.crashdump
|
- name: cat rebar.crashdump
|
||||||
if: failure()
|
if: failure()
|
||||||
run: if [ -f 'rebar3.crashdump' ];then cat 'rebar3.crashdump' fi
|
run: if [ -f 'rebar3.crashdump' ];then cat 'rebar3.crashdump'; fi
|
||||||
- uses: actions/upload-artifact@v1
|
- uses: actions/upload-artifact@v1
|
||||||
if: failure()
|
if: failure()
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_bridge_mqtt,
|
{application, emqx_bridge_mqtt,
|
||||||
[{description, "EMQ X Bridge to MQTT Broker"},
|
[{description, "EMQ X Bridge to MQTT Broker"},
|
||||||
{vsn, "4.3.0"}, % strict semver, bump manually!
|
{vsn, "4.3.1"}, % strict semver, bump manually!
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [kernel,stdlib,replayq,emqtt]},
|
{applications, [kernel,stdlib,replayq,emqtt]},
|
||||||
|
|
|
@ -2,9 +2,15 @@
|
||||||
|
|
||||||
{VSN,
|
{VSN,
|
||||||
[
|
[
|
||||||
|
{"4.3.0", [
|
||||||
|
{load_module, emqx_bridge_worker, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
|
{"4.3.0", [
|
||||||
|
{load_module, emqx_bridge_worker, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
]
|
]
|
||||||
}.
|
}.
|
||||||
|
|
|
@ -120,7 +120,7 @@ t_observe_acl_deny(_Config) ->
|
||||||
ok = meck:unload(emqx_access_control).
|
ok = meck:unload(emqx_access_control).
|
||||||
|
|
||||||
t_observe_wildcard(_Config) ->
|
t_observe_wildcard(_Config) ->
|
||||||
Topic = <<"+/b">>, TopicStr = http_uri:encode(binary_to_list(Topic)),
|
Topic = <<"+/b">>, TopicStr = emqx_http_lib:uri_encode(binary_to_list(Topic)),
|
||||||
Payload = <<"123">>,
|
Payload = <<"123">>,
|
||||||
Uri = "coap://127.0.0.1/mqtt/"++TopicStr++"?c=client1&u=tom&p=secret",
|
Uri = "coap://127.0.0.1/mqtt/"++TopicStr++"?c=client1&u=tom&p=secret",
|
||||||
{ok, Pid, N, Code, Content} = er_coap_observer:observe(Uri),
|
{ok, Pid, N, Code, Content} = er_coap_observer:observe(Uri),
|
||||||
|
@ -143,7 +143,7 @@ t_observe_wildcard(_Config) ->
|
||||||
[] = emqx:subscribers(Topic).
|
[] = emqx:subscribers(Topic).
|
||||||
|
|
||||||
t_observe_pub(_Config) ->
|
t_observe_pub(_Config) ->
|
||||||
Topic = <<"+/b">>, TopicStr = http_uri:encode(binary_to_list(Topic)),
|
Topic = <<"+/b">>, TopicStr = emqx_http_lib:uri_encode(binary_to_list(Topic)),
|
||||||
Uri = "coap://127.0.0.1/mqtt/"++TopicStr++"?c=client1&u=tom&p=secret",
|
Uri = "coap://127.0.0.1/mqtt/"++TopicStr++"?c=client1&u=tom&p=secret",
|
||||||
{ok, Pid, N, Code, Content} = er_coap_observer:observe(Uri),
|
{ok, Pid, N, Code, Content} = er_coap_observer:observe(Uri),
|
||||||
?LOGT("observer Pid=~p, N=~p, Code=~p, Content=~p", [Pid, N, Code, Content]),
|
?LOGT("observer Pid=~p, N=~p, Code=~p, Content=~p", [Pid, N, Code, Content]),
|
||||||
|
@ -152,7 +152,7 @@ t_observe_pub(_Config) ->
|
||||||
?assert(is_pid(SubPid)),
|
?assert(is_pid(SubPid)),
|
||||||
|
|
||||||
Topic2 = <<"a/b">>, Payload2 = <<"UFO">>,
|
Topic2 = <<"a/b">>, Payload2 = <<"UFO">>,
|
||||||
TopicStr2 = http_uri:encode(binary_to_list(Topic2)),
|
TopicStr2 = emqx_http_lib:uri_encode(binary_to_list(Topic2)),
|
||||||
URI2 = "coap://127.0.0.1/mqtt/"++TopicStr2++"?c=client1&u=tom&p=secret",
|
URI2 = "coap://127.0.0.1/mqtt/"++TopicStr2++"?c=client1&u=tom&p=secret",
|
||||||
|
|
||||||
Reply2 = er_coap_client:request(put, URI2, #coap_content{format = <<"application/octet-stream">>, payload = Payload2}),
|
Reply2 = er_coap_client:request(put, URI2, #coap_content{format = <<"application/octet-stream">>, payload = Payload2}),
|
||||||
|
@ -164,7 +164,7 @@ t_observe_pub(_Config) ->
|
||||||
?assertEqual(Payload2, PayloadRecv2),
|
?assertEqual(Payload2, PayloadRecv2),
|
||||||
|
|
||||||
Topic3 = <<"j/b">>, Payload3 = <<"ET629">>,
|
Topic3 = <<"j/b">>, Payload3 = <<"ET629">>,
|
||||||
TopicStr3 = http_uri:encode(binary_to_list(Topic3)),
|
TopicStr3 = emqx_http_lib:uri_encode(binary_to_list(Topic3)),
|
||||||
URI3 = "coap://127.0.0.1/mqtt/"++TopicStr3++"?c=client2&u=mike&p=guess",
|
URI3 = "coap://127.0.0.1/mqtt/"++TopicStr3++"?c=client2&u=mike&p=guess",
|
||||||
Reply3 = er_coap_client:request(put, URI3, #coap_content{format = <<"application/octet-stream">>, payload = Payload3}),
|
Reply3 = er_coap_client:request(put, URI3, #coap_content{format = <<"application/octet-stream">>, payload = Payload3}),
|
||||||
{ok,changed, _} = Reply3,
|
{ok,changed, _} = Reply3,
|
||||||
|
@ -186,7 +186,7 @@ t_one_clientid_sub_2_topics(_Config) ->
|
||||||
[SubPid] = emqx:subscribers(Topic1),
|
[SubPid] = emqx:subscribers(Topic1),
|
||||||
?assert(is_pid(SubPid)),
|
?assert(is_pid(SubPid)),
|
||||||
|
|
||||||
Topic2 = <<"x/y">>, TopicStr2 = http_uri:encode(binary_to_list(Topic2)),
|
Topic2 = <<"x/y">>, TopicStr2 = emqx_http_lib:uri_encode(binary_to_list(Topic2)),
|
||||||
Payload2 = <<"456">>,
|
Payload2 = <<"456">>,
|
||||||
Uri2 = "coap://127.0.0.1/mqtt/"++TopicStr2++"?c=client1&u=tom&p=secret",
|
Uri2 = "coap://127.0.0.1/mqtt/"++TopicStr2++"?c=client1&u=tom&p=secret",
|
||||||
{ok, Pid2, N2, Code2, Content2} = er_coap_observer:observe(Uri2),
|
{ok, Pid2, N2, Code2, Content2} = er_coap_observer:observe(Uri2),
|
||||||
|
@ -217,7 +217,7 @@ t_invalid_parameter(_Config) ->
|
||||||
%% "cid=client2" is invaid
|
%% "cid=client2" is invaid
|
||||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
Topic3 = <<"a/b">>, Payload3 = <<"ET629">>,
|
Topic3 = <<"a/b">>, Payload3 = <<"ET629">>,
|
||||||
TopicStr3 = http_uri:encode(binary_to_list(Topic3)),
|
TopicStr3 = emqx_http_lib:uri_encode(binary_to_list(Topic3)),
|
||||||
URI3 = "coap://127.0.0.1/mqtt/"++TopicStr3++"?cid=client2&u=tom&p=simple",
|
URI3 = "coap://127.0.0.1/mqtt/"++TopicStr3++"?cid=client2&u=tom&p=simple",
|
||||||
Reply3 = er_coap_client:request(put, URI3, #coap_content{format = <<"application/octet-stream">>, payload = Payload3}),
|
Reply3 = er_coap_client:request(put, URI3, #coap_content{format = <<"application/octet-stream">>, payload = Payload3}),
|
||||||
?assertMatch({error,bad_request}, Reply3),
|
?assertMatch({error,bad_request}, Reply3),
|
||||||
|
|
|
@ -173,7 +173,7 @@ t_case01_publish_post(_Config) ->
|
||||||
?assertEqual(<<"42">>, CT2),
|
?assertEqual(<<"42">>, CT2),
|
||||||
|
|
||||||
%% post to publish message to topic maintopic/topic1
|
%% post to publish message to topic maintopic/topic1
|
||||||
FullTopicStr = http_uri:encode(binary_to_list(FullTopic)),
|
FullTopicStr = emqx_http_lib:uri_encode(binary_to_list(FullTopic)),
|
||||||
URI2 = "coap://127.0.0.1/ps/"++FullTopicStr++"?c=client1&u=tom&p=secret",
|
URI2 = "coap://127.0.0.1/ps/"++FullTopicStr++"?c=client1&u=tom&p=secret",
|
||||||
PubPayload = <<"PUBLISH">>,
|
PubPayload = <<"PUBLISH">>,
|
||||||
|
|
||||||
|
@ -286,7 +286,7 @@ t_case01_publish_put(_Config) ->
|
||||||
?assertEqual(<<"42">>, CT2),
|
?assertEqual(<<"42">>, CT2),
|
||||||
|
|
||||||
%% put to publish message to topic maintopic/topic1
|
%% put to publish message to topic maintopic/topic1
|
||||||
FullTopicStr = http_uri:encode(binary_to_list(FullTopic)),
|
FullTopicStr = emqx_http_lib:uri_encode(binary_to_list(FullTopic)),
|
||||||
URI2 = "coap://127.0.0.1/ps/"++FullTopicStr++"?c=client1&u=tom&p=secret",
|
URI2 = "coap://127.0.0.1/ps/"++FullTopicStr++"?c=client1&u=tom&p=secret",
|
||||||
PubPayload = <<"PUBLISH">>,
|
PubPayload = <<"PUBLISH">>,
|
||||||
|
|
||||||
|
@ -430,7 +430,7 @@ t_case01_subscribe(_Config) ->
|
||||||
t_case02_subscribe(_Config) ->
|
t_case02_subscribe(_Config) ->
|
||||||
Topic = <<"a/b">>,
|
Topic = <<"a/b">>,
|
||||||
TopicStr = binary_to_list(Topic),
|
TopicStr = binary_to_list(Topic),
|
||||||
PercentEncodedTopic = http_uri:encode(TopicStr),
|
PercentEncodedTopic = emqx_http_lib:uri_encode(TopicStr),
|
||||||
Payload = <<"payload">>,
|
Payload = <<"payload">>,
|
||||||
|
|
||||||
%% post to publish a new topic "a/b", and the topic is created
|
%% post to publish a new topic "a/b", and the topic is created
|
||||||
|
@ -477,7 +477,7 @@ t_case03_subscribe(_Config) ->
|
||||||
%% Subscribe to the unexisted topic "a/b", got not_found
|
%% Subscribe to the unexisted topic "a/b", got not_found
|
||||||
Topic = <<"a/b">>,
|
Topic = <<"a/b">>,
|
||||||
TopicStr = binary_to_list(Topic),
|
TopicStr = binary_to_list(Topic),
|
||||||
PercentEncodedTopic = http_uri:encode(TopicStr),
|
PercentEncodedTopic = emqx_http_lib:uri_encode(TopicStr),
|
||||||
Uri = "coap://127.0.0.1/ps/"++PercentEncodedTopic++"?c=client1&u=tom&p=secret",
|
Uri = "coap://127.0.0.1/ps/"++PercentEncodedTopic++"?c=client1&u=tom&p=secret",
|
||||||
{error, not_found} = er_coap_observer:observe(Uri),
|
{error, not_found} = er_coap_observer:observe(Uri),
|
||||||
|
|
||||||
|
@ -487,7 +487,7 @@ t_case04_subscribe(_Config) ->
|
||||||
%% Subscribe to the wildcad topic "+/b", got bad_request
|
%% Subscribe to the wildcad topic "+/b", got bad_request
|
||||||
Topic = <<"+/b">>,
|
Topic = <<"+/b">>,
|
||||||
TopicStr = binary_to_list(Topic),
|
TopicStr = binary_to_list(Topic),
|
||||||
PercentEncodedTopic = http_uri:encode(TopicStr),
|
PercentEncodedTopic = emqx_http_lib:uri_encode(TopicStr),
|
||||||
Uri = "coap://127.0.0.1/ps/"++PercentEncodedTopic++"?c=client1&u=tom&p=secret",
|
Uri = "coap://127.0.0.1/ps/"++PercentEncodedTopic++"?c=client1&u=tom&p=secret",
|
||||||
{error, bad_request} = er_coap_observer:observe(Uri),
|
{error, bad_request} = er_coap_observer:observe(Uri),
|
||||||
|
|
||||||
|
@ -582,7 +582,7 @@ t_case04_read(_Config) ->
|
||||||
t_case05_read(_Config) ->
|
t_case05_read(_Config) ->
|
||||||
Topic = <<"a/b">>,
|
Topic = <<"a/b">>,
|
||||||
TopicStr = binary_to_list(Topic),
|
TopicStr = binary_to_list(Topic),
|
||||||
PercentEncodedTopic = http_uri:encode(TopicStr),
|
PercentEncodedTopic = emqx_http_lib:uri_encode(TopicStr),
|
||||||
Payload = <<"payload">>,
|
Payload = <<"payload">>,
|
||||||
|
|
||||||
%% post to publish a new topic "a/b", and the topic is created
|
%% post to publish a new topic "a/b", and the topic is created
|
||||||
|
@ -609,7 +609,7 @@ t_case05_read(_Config) ->
|
||||||
t_case01_delete(_Config) ->
|
t_case01_delete(_Config) ->
|
||||||
TopicInPayload = <<"a/b">>,
|
TopicInPayload = <<"a/b">>,
|
||||||
TopicStr = binary_to_list(TopicInPayload),
|
TopicStr = binary_to_list(TopicInPayload),
|
||||||
PercentEncodedTopic = http_uri:encode(TopicStr),
|
PercentEncodedTopic = emqx_http_lib:uri_encode(TopicStr),
|
||||||
Payload = list_to_binary("<"++PercentEncodedTopic++">;ct=42"),
|
Payload = list_to_binary("<"++PercentEncodedTopic++">;ct=42"),
|
||||||
URI = "coap://127.0.0.1/ps/"++"?c=client1&u=tom&p=secret",
|
URI = "coap://127.0.0.1/ps/"++"?c=client1&u=tom&p=secret",
|
||||||
|
|
||||||
|
@ -621,7 +621,7 @@ t_case01_delete(_Config) ->
|
||||||
|
|
||||||
%% Client post to CREATE topic "a/b/c"
|
%% Client post to CREATE topic "a/b/c"
|
||||||
TopicInPayload1 = <<"a/b/c">>,
|
TopicInPayload1 = <<"a/b/c">>,
|
||||||
PercentEncodedTopic1 = http_uri:encode(binary_to_list(TopicInPayload1)),
|
PercentEncodedTopic1 = emqx_http_lib:uri_encode(binary_to_list(TopicInPayload1)),
|
||||||
Payload1 = list_to_binary("<"++PercentEncodedTopic1++">;ct=42"),
|
Payload1 = list_to_binary("<"++PercentEncodedTopic1++">;ct=42"),
|
||||||
Reply1 = er_coap_client:request(post, URI, #coap_content{format = <<"application/link-format">>, payload = Payload1}),
|
Reply1 = er_coap_client:request(post, URI, #coap_content{format = <<"application/link-format">>, payload = Payload1}),
|
||||||
?LOGT("Reply =~p", [Reply1]),
|
?LOGT("Reply =~p", [Reply1]),
|
||||||
|
@ -643,7 +643,7 @@ t_case01_delete(_Config) ->
|
||||||
t_case02_delete(_Config) ->
|
t_case02_delete(_Config) ->
|
||||||
TopicInPayload = <<"a/b">>,
|
TopicInPayload = <<"a/b">>,
|
||||||
TopicStr = binary_to_list(TopicInPayload),
|
TopicStr = binary_to_list(TopicInPayload),
|
||||||
PercentEncodedTopic = http_uri:encode(TopicStr),
|
PercentEncodedTopic = emqx_http_lib:uri_encode(TopicStr),
|
||||||
|
|
||||||
%% DELETE the unexisted topic "a/b"
|
%% DELETE the unexisted topic "a/b"
|
||||||
Uri1 = "coap://127.0.0.1/ps/"++PercentEncodedTopic++"?c=client1&u=tom&p=secret",
|
Uri1 = "coap://127.0.0.1/ps/"++PercentEncodedTopic++"?c=client1&u=tom&p=secret",
|
||||||
|
|
|
@ -237,14 +237,14 @@ message EmptySuccess { }
|
||||||
message ValuedResponse {
|
message ValuedResponse {
|
||||||
|
|
||||||
// The responsed value type
|
// The responsed value type
|
||||||
// - ignore: Ignore the responsed value
|
|
||||||
// - contiune: Use the responsed value and execute the next hook
|
// - contiune: Use the responsed value and execute the next hook
|
||||||
|
// - ignore: Ignore the responsed value
|
||||||
// - stop_and_return: Use the responsed value and stop the chain executing
|
// - stop_and_return: Use the responsed value and stop the chain executing
|
||||||
enum ResponsedType {
|
enum ResponsedType {
|
||||||
|
|
||||||
IGNORE = 0;
|
CONTINUE = 0;
|
||||||
|
|
||||||
CONTINUE = 1;
|
IGNORE = 1;
|
||||||
|
|
||||||
STOP_AND_RETURN = 2;
|
STOP_AND_RETURN = 2;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_exhook,
|
{application, emqx_exhook,
|
||||||
[{description, "EMQ X Extension for Hook"},
|
[{description, "EMQ X Extension for Hook"},
|
||||||
{vsn, "4.3.0"},
|
{vsn, "4.3.1"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_exhook_app, []}},
|
{mod, {emqx_exhook_app, []}},
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
%% -*-: erlang -*-
|
||||||
|
{VSN,
|
||||||
|
[
|
||||||
|
{"4.3.0", [
|
||||||
|
{load_module, emqx_exhook_pb, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{"4.3.0", [
|
||||||
|
{load_module, emqx_exhook_pb, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
]
|
||||||
|
}.
|
|
@ -1886,8 +1886,11 @@ std_register(UdpSock, Epn, ObjectList, MsgId1, RespTopic) ->
|
||||||
timer:sleep(100).
|
timer:sleep(100).
|
||||||
|
|
||||||
resolve_uri(Uri) ->
|
resolve_uri(Uri) ->
|
||||||
{ok, {Scheme, _UserInfo, Host, PortNo, Path, Query}} =
|
{ok, #{scheme := Scheme,
|
||||||
http_uri:parse(Uri, [{scheme_defaults, [{coap, ?DEFAULT_COAP_PORT}, {coaps, ?DEFAULT_COAPS_PORT}]}]),
|
host := Host,
|
||||||
|
port := PortNo,
|
||||||
|
path := Path} = URIMap} = emqx_http_lib:uri_parse(Uri),
|
||||||
|
Query = maps:get(query, URIMap, ""),
|
||||||
{ok, PeerIP} = inet:getaddr(Host, inet),
|
{ok, PeerIP} = inet:getaddr(Host, inet),
|
||||||
{Scheme, {PeerIP, PortNo}, split_path(Path), split_query(Query)}.
|
{Scheme, {PeerIP, PortNo}, split_path(Path), split_query(Query)}.
|
||||||
|
|
||||||
|
@ -1896,7 +1899,7 @@ split_path([$/]) -> [];
|
||||||
split_path([$/ | Path]) -> split_segments(Path, $/, []).
|
split_path([$/ | Path]) -> split_segments(Path, $/, []).
|
||||||
|
|
||||||
split_query([]) -> [];
|
split_query([]) -> [];
|
||||||
split_query([$? | Path]) -> split_segments(Path, $&, []).
|
split_query(Path) -> split_segments(Path, $&, []).
|
||||||
|
|
||||||
split_segments(Path, Char, Acc) ->
|
split_segments(Path, Char, Acc) ->
|
||||||
case string:rchr(Path, Char) of
|
case string:rchr(Path, Char) of
|
||||||
|
@ -1908,7 +1911,7 @@ split_segments(Path, Char, Acc) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
make_segment(Seg) ->
|
make_segment(Seg) ->
|
||||||
list_to_binary(http_uri:decode(Seg)).
|
list_to_binary(emqx_http_lib:uri_decode(Seg)).
|
||||||
|
|
||||||
|
|
||||||
get_coap_path(Options) ->
|
get_coap_path(Options) ->
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_management,
|
{application, emqx_management,
|
||||||
[{description, "EMQ X Management API and CLI"},
|
[{description, "EMQ X Management API and CLI"},
|
||||||
{vsn, "4.3.2"}, % strict semver, bump manually!
|
{vsn, "4.3.3"}, % strict semver, bump manually!
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_management_sup]},
|
{registered, [emqx_management_sup]},
|
||||||
{applications, [kernel,stdlib,minirest]},
|
{applications, [kernel,stdlib,minirest]},
|
||||||
|
|
|
@ -1,14 +1,24 @@
|
||||||
%% -*-: erlang -*-
|
%% -*-: erlang -*-
|
||||||
{"4.3.2",
|
{VSN,
|
||||||
[ {<<"4.3.[0-1]">>,
|
[ {"4.3.2",
|
||||||
[ {load_module, emqx_mgmt_data_backup, brutal_purge, soft_purge, []}
|
[ {load_module, emqx_mgmt, brutal_purge, soft_purge, []}
|
||||||
, {load_module, emqx_mgmt_cli, brutal_purge, soft_purge, []}
|
]},
|
||||||
]}
|
|
||||||
],
|
|
||||||
[
|
|
||||||
{<<"4.3.[0-1]">>,
|
{<<"4.3.[0-1]">>,
|
||||||
[ {load_module, emqx_mgmt_data_backup, brutal_purge, soft_purge, []}
|
[ {load_module, emqx_mgmt_data_backup, brutal_purge, soft_purge, []}
|
||||||
, {load_module, emqx_mgmt_cli, brutal_purge, soft_purge, []}
|
, {load_module, emqx_mgmt_cli, brutal_purge, soft_purge, []}
|
||||||
]}
|
, {load_module, emqx_mgmt, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{"4.3.2",
|
||||||
|
[ {load_module, emqx_mgmt, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<"4.3.[0-1]">>,
|
||||||
|
[ {load_module, emqx_mgmt_data_backup, brutal_purge, soft_purge, []}
|
||||||
|
, {load_module, emqx_mgmt_cli, brutal_purge, soft_purge, []}
|
||||||
|
, {load_module, emqx_mgmt, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
]
|
]
|
||||||
}.
|
}.
|
||||||
|
|
|
@ -139,7 +139,7 @@ node_info(Node) when Node =:= node() ->
|
||||||
Info#{node => node(),
|
Info#{node => node(),
|
||||||
otp_release => iolist_to_binary(otp_rel()),
|
otp_release => iolist_to_binary(otp_rel()),
|
||||||
memory_total => proplists:get_value(allocated, Memory),
|
memory_total => proplists:get_value(allocated, Memory),
|
||||||
memory_used => proplists:get_value(used, Memory),
|
memory_used => proplists:get_value(total, Memory),
|
||||||
process_available => erlang:system_info(process_limit),
|
process_available => erlang:system_info(process_limit),
|
||||||
process_used => erlang:system_info(process_count),
|
process_used => erlang:system_info(process_count),
|
||||||
max_fds => proplists:get_value(max_fds, lists:usort(lists:flatten(erlang:system_info(check_io)))),
|
max_fds => proplists:get_value(max_fds, lists:usort(lists:flatten(erlang:system_info(check_io)))),
|
||||||
|
|
|
@ -53,6 +53,12 @@
|
||||||
, unregister/2
|
, unregister/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-export([ get_topic_metrics/2
|
||||||
|
, register_topic_metrics/2
|
||||||
|
, unregister_topic_metrics/2
|
||||||
|
, unregister_all_topic_metrics/1
|
||||||
|
]).
|
||||||
|
|
||||||
list(#{topic := Topic0}, _Params) ->
|
list(#{topic := Topic0}, _Params) ->
|
||||||
execute_when_enabled(fun() ->
|
execute_when_enabled(fun() ->
|
||||||
Topic = emqx_mgmt_util:urldecode(Topic0),
|
Topic = emqx_mgmt_util:urldecode(Topic0),
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_modules,
|
{application, emqx_modules,
|
||||||
[{description, "EMQ X Module Management"},
|
[{description, "EMQ X Module Management"},
|
||||||
{vsn, "4.3.1"},
|
{vsn, "4.3.2"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{applications, [kernel,stdlib]},
|
{applications, [kernel,stdlib]},
|
||||||
{mod, {emqx_modules_app, []}},
|
{mod, {emqx_modules_app, []}},
|
||||||
|
|
|
@ -1,14 +1,22 @@
|
||||||
%% -*-: erlang -*-
|
%% -*-: erlang -*-
|
||||||
{VSN,
|
{VSN,
|
||||||
[
|
[
|
||||||
|
{"4.3.1", [
|
||||||
|
{load_module, emqx_mod_api_topic_metrics, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
{"4.3.0", [
|
{"4.3.0", [
|
||||||
{update, emqx_mod_delayed, {advanced, []}}
|
{update, emqx_mod_delayed, {advanced, []}},
|
||||||
|
{load_module, emqx_mod_api_topic_metrics, brutal_purge, soft_purge, []}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
|
{"4.3.1", [
|
||||||
|
{load_module, emqx_mod_api_topic_metrics, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
{"4.3.0", [
|
{"4.3.0", [
|
||||||
{update, emqx_mod_delayed, {advanced, []}}
|
{update, emqx_mod_delayed, {advanced, []}},
|
||||||
|
{load_module, emqx_mod_api_topic_metrics, brutal_purge, soft_purge, []}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_telemetry,
|
{application, emqx_telemetry,
|
||||||
[{description, "EMQ X Telemetry"},
|
[{description, "EMQ X Telemetry"},
|
||||||
{vsn, "4.3.0"}, % strict semver, bump manually!
|
{vsn, "4.3.1"}, % strict semver, bump manually!
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_telemetry_sup]},
|
{registered, [emqx_telemetry_sup]},
|
||||||
{applications, [kernel,stdlib]},
|
{applications, [kernel,stdlib]},
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
%% -*- mode: erlang -*-
|
||||||
|
{VSN,
|
||||||
|
[
|
||||||
|
{"4.3.0", [
|
||||||
|
{load_module, emqx_telemetry, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{"4.3.0", [
|
||||||
|
{load_module, emqx_telemetry, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
]
|
||||||
|
}.
|
|
@ -12,8 +12,10 @@ else
|
||||||
EDITION='opensource'
|
EDITION='opensource'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
## emqx_release.hrl is the single source of truth for release version
|
||||||
RELEASE="$(grep -E "define.+EMQX_RELEASE.+${EDITION}" include/emqx_release.hrl | cut -d '"' -f2)"
|
RELEASE="$(grep -E "define.+EMQX_RELEASE.+${EDITION}" include/emqx_release.hrl | cut -d '"' -f2)"
|
||||||
|
|
||||||
|
## git commit hash is added as suffix in case the git tag and release version is not an exact match
|
||||||
if [ -d .git ] && ! git describe --tags --match "[e|v]${RELEASE}" --exact >/dev/null 2>&1; then
|
if [ -d .git ] && ! git describe --tags --match "[e|v]${RELEASE}" --exact >/dev/null 2>&1; then
|
||||||
SUFFIX="-$(git rev-parse HEAD | cut -b1-8)"
|
SUFFIX="-$(git rev-parse HEAD | cut -b1-8)"
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -2301,7 +2301,7 @@ end}.
|
||||||
|
|
||||||
{mapping, "broker.session_locking_strategy", "emqx.session_locking_strategy", [
|
{mapping, "broker.session_locking_strategy", "emqx.session_locking_strategy", [
|
||||||
{default, quorum},
|
{default, quorum},
|
||||||
{datatype, {enum, [local,one,quorum,all]}}
|
{datatype, {enum, [local,leader,quorum,all]}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
%% @doc Shared Subscription Dispatch Strategy.
|
%% @doc Shared Subscription Dispatch Strategy.
|
||||||
|
|
|
@ -8,7 +8,8 @@
|
||||||
|
|
||||||
{edoc_opts, [{preprocess,true}]}.
|
{edoc_opts, [{preprocess,true}]}.
|
||||||
{erl_opts, [warn_unused_vars,warn_shadow_vars,warn_unused_import,
|
{erl_opts, [warn_unused_vars,warn_shadow_vars,warn_unused_import,
|
||||||
warn_obsolete_guard,compressed]}.
|
warn_obsolete_guard,compressed,
|
||||||
|
{d, snk_kind, msg}]}.
|
||||||
|
|
||||||
{extra_src_dirs, [{"etc", [{recursive,true}]}]}.
|
{extra_src_dirs, [{"etc", [{recursive,true}]}]}.
|
||||||
|
|
||||||
|
@ -35,7 +36,8 @@
|
||||||
{erl_first_files, ["src/emqx_logger.erl", "src/emqx_rule_actions_trans.erl"]}.
|
{erl_first_files, ["src/emqx_logger.erl", "src/emqx_rule_actions_trans.erl"]}.
|
||||||
|
|
||||||
{deps,
|
{deps,
|
||||||
[ {ehttpc, {git, "https://github.com/emqx/ehttpc", {tag, "0.1.5"}}}
|
[ {gpb, "4.11.2"} %% gpb only used to build, but not for release, pin it here to avoid fetching a wrong version due to rebar plugins scattered in all the deps
|
||||||
|
, {ehttpc, {git, "https://github.com/emqx/ehttpc", {tag, "0.1.5"}}}
|
||||||
, {eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.6.5"}}}
|
, {eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.6.5"}}}
|
||||||
, {gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}}
|
, {gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}}
|
||||||
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
|
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
|
||||||
|
@ -53,7 +55,7 @@
|
||||||
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
|
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
|
||||||
, {observer_cli, "1.6.1"} % NOTE: depends on recon 2.5.1
|
, {observer_cli, "1.6.1"} % NOTE: depends on recon 2.5.1
|
||||||
, {getopt, "1.0.1"}
|
, {getopt, "1.0.1"}
|
||||||
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.12.0"}}}
|
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.13.0"}}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{xref_ignores,
|
{xref_ignores,
|
||||||
|
|
|
@ -2,10 +2,15 @@
|
||||||
|
|
||||||
-export([do/2]).
|
-export([do/2]).
|
||||||
|
|
||||||
do(_Dir, CONFIG) ->
|
do(Dir, CONFIG) ->
|
||||||
{HasElixir, C1} = deps(CONFIG),
|
case iolist_to_binary(Dir) of
|
||||||
Config = dialyzer(C1),
|
<<".">> ->
|
||||||
maybe_dump(Config ++ [{overrides, overrides()}] ++ coveralls() ++ config(HasElixir)).
|
{HasElixir, C1} = deps(CONFIG),
|
||||||
|
Config = dialyzer(C1),
|
||||||
|
maybe_dump(Config ++ [{overrides, overrides()}] ++ coveralls() ++ config(HasElixir));
|
||||||
|
_ ->
|
||||||
|
CONFIG
|
||||||
|
end.
|
||||||
|
|
||||||
bcrypt() ->
|
bcrypt() ->
|
||||||
{bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {branch, "0.6.0"}}}.
|
{bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {branch, "0.6.0"}}}.
|
||||||
|
@ -46,6 +51,8 @@ overrides() ->
|
||||||
[ {add, [ {extra_src_dirs, [{"etc", [{recursive,true}]}]}
|
[ {add, [ {extra_src_dirs, [{"etc", [{recursive,true}]}]}
|
||||||
, {erl_opts, [{compile_info, [{emqx_vsn, get_vsn()}]}]}
|
, {erl_opts, [{compile_info, [{emqx_vsn, get_vsn()}]}]}
|
||||||
]}
|
]}
|
||||||
|
, {add, snabbkaffe,
|
||||||
|
[{erl_opts, common_compile_opts()}]}
|
||||||
] ++ community_plugin_overrides().
|
] ++ community_plugin_overrides().
|
||||||
|
|
||||||
community_plugin_overrides() ->
|
community_plugin_overrides() ->
|
||||||
|
@ -106,6 +113,7 @@ test_deps() ->
|
||||||
common_compile_opts() ->
|
common_compile_opts() ->
|
||||||
[ debug_info % alwyas include debug_info
|
[ debug_info % alwyas include debug_info
|
||||||
, {compile_info, [{emqx_vsn, get_vsn()}]}
|
, {compile_info, [{emqx_vsn, get_vsn()}]}
|
||||||
|
, {d, snk_kind, msg}
|
||||||
] ++
|
] ++
|
||||||
[{d, 'EMQX_ENTERPRISE'} || is_enterprise()] ++
|
[{d, 'EMQX_ENTERPRISE'} || is_enterprise()] ++
|
||||||
[{d, 'EMQX_BENCHMARK'} || os:getenv("EMQX_BENCHMARK") =:= "1" ].
|
[{d, 'EMQX_BENCHMARK'} || os:getenv("EMQX_BENCHMARK") =:= "1" ].
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
latest_release=$(git describe --tags "$(git rev-list --tags --max-count=1 --remotes=refs/remote/origin)")
|
remote="refs/remote/$(git remote -v | grep fetch | grep 'emqx/emqx' | awk '{print $1}')"
|
||||||
|
latest_release=$(git describe --tags "$(git rev-list --tags --max-count=1 --remotes="$remote")")
|
||||||
|
|
||||||
bad_app_count=0
|
bad_app_count=0
|
||||||
|
|
||||||
|
|
|
@ -12,11 +12,14 @@ if [ -f 'EMQX_ENTERPRISE' ]; then
|
||||||
DASHBOARD_PATH='lib-ee/emqx_dashboard/priv'
|
DASHBOARD_PATH='lib-ee/emqx_dashboard/priv'
|
||||||
DASHBOARD_REPO='emqx-enterprise-dashboard-frontend-src'
|
DASHBOARD_REPO='emqx-enterprise-dashboard-frontend-src'
|
||||||
AUTH="Authorization: token $(cat scripts/git-token)"
|
AUTH="Authorization: token $(cat scripts/git-token)"
|
||||||
|
# have to be resolved with auth and redirect
|
||||||
|
DIRECT_DOWNLOAD_URL=""
|
||||||
else
|
else
|
||||||
VERSION="${EMQX_CE_DASHBOARD_VERSION}"
|
VERSION="${EMQX_CE_DASHBOARD_VERSION}"
|
||||||
DASHBOARD_PATH='lib-ce/emqx_dashboard/priv'
|
DASHBOARD_PATH='lib-ce/emqx_dashboard/priv'
|
||||||
DASHBOARD_REPO='emqx-dashboard-frontend'
|
DASHBOARD_REPO='emqx-dashboard-frontend'
|
||||||
AUTH=""
|
AUTH=""
|
||||||
|
DIRECT_DOWNLOAD_URL="https://github.com/emqx/${DASHBOARD_REPO}/releases/download/${VERSION}/emqx-dashboard.zip"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
case $(uname) in
|
case $(uname) in
|
||||||
|
@ -32,27 +35,32 @@ if [ -d "$DASHBOARD_PATH/www" ] && [ "$(version)" = "$VERSION" ]; then
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
get_assets(){
|
find_url() {
|
||||||
# Get the download URL of our desired asset
|
# Get the download URL of our desired asset
|
||||||
download_url="$(curl --silent --show-error \
|
release_url="https://api.github.com/repos/emqx/${DASHBOARD_REPO}/releases/tags/${VERSION}"
|
||||||
--header "${AUTH}" \
|
release_info="$(curl --silent --show-error --header "${AUTH}" --header "Accept: application/vnd.github.v3+json" "$release_url")"
|
||||||
--header "Accept: application/vnd.github.v3+json" \
|
if ! download_url="$(echo "$release_info" | jq --raw-output ".assets[] | select(.name==\"${RELEASE_ASSET_FILE}\").url" | tr -d '\n' | tr -d '\r')"; then
|
||||||
"https://api.github.com/repos/emqx/${DASHBOARD_REPO}/releases/tags/${VERSION}" \
|
echo "failed to query $release_url"
|
||||||
| jq --raw-output ".assets[] | select(.name==\"${RELEASE_ASSET_FILE}\").url" \
|
echo "${release_info}"
|
||||||
| tr -d '\n' | tr -d '\r')"
|
exit 1
|
||||||
|
fi
|
||||||
# Get GitHub's S3 redirect URL
|
# Get GitHub's S3 redirect URL
|
||||||
redirect_url=$(curl --silent --show-error \
|
|
||||||
--header "${AUTH}" \
|
|
||||||
--header "Accept: application/octet-stream" \
|
|
||||||
--write-out "%{redirect_url}" \
|
|
||||||
"$download_url")
|
|
||||||
curl --silent --show-error \
|
curl --silent --show-error \
|
||||||
|
--header "${AUTH}" \
|
||||||
--header "Accept: application/octet-stream" \
|
--header "Accept: application/octet-stream" \
|
||||||
--output "${RELEASE_ASSET_FILE}" \
|
--write-out "%{redirect_url}" \
|
||||||
"$redirect_url"
|
"$download_url"
|
||||||
}
|
}
|
||||||
|
|
||||||
get_assets
|
if [ -z "$DIRECT_DOWNLOAD_URL" ]; then
|
||||||
|
DIRECT_DOWNLOAD_URL="$(find_url)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
curl -L --silent --show-error \
|
||||||
|
--header "Accept: application/octet-stream" \
|
||||||
|
--output "${RELEASE_ASSET_FILE}" \
|
||||||
|
"$DIRECT_DOWNLOAD_URL"
|
||||||
|
|
||||||
unzip -q "$RELEASE_ASSET_FILE" -d "$DASHBOARD_PATH"
|
unzip -q "$RELEASE_ASSET_FILE" -d "$DASHBOARD_PATH"
|
||||||
rm -rf "$DASHBOARD_PATH/www"
|
rm -rf "$DASHBOARD_PATH/www"
|
||||||
mv "$DASHBOARD_PATH/dist" "$DASHBOARD_PATH/www"
|
mv "$DASHBOARD_PATH/dist" "$DASHBOARD_PATH/www"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{application, emqx,
|
{application, emqx,
|
||||||
[{id, "emqx"},
|
[{id, "emqx"},
|
||||||
{description, "EMQ X"},
|
{description, "EMQ X"},
|
||||||
{vsn, "4.3.2"}, % strict semver, bump manually!
|
{vsn, "4.3.3"}, % strict semver, bump manually!
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [kernel,stdlib,gproc,gen_rpc,esockd,cowboy,sasl,os_mon]},
|
{applications, [kernel,stdlib,gproc,gen_rpc,esockd,cowboy,sasl,os_mon]},
|
||||||
|
|
|
@ -1,17 +1,25 @@
|
||||||
%% -*-: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{VSN,
|
{VSN,
|
||||||
[
|
[
|
||||||
|
{"4.3.2", [
|
||||||
|
{load_module, emqx_http_lib, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
{"4.3.1", [
|
{"4.3.1", [
|
||||||
|
{load_module, emqx_ws_connection, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_connection, brutal_purge, soft_purge, []},
|
{load_module, emqx_connection, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_frame, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_cm, brutal_purge, soft_purge, []},
|
{load_module, emqx_cm, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_congestion, brutal_purge, soft_purge, []},
|
{load_module, emqx_congestion, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_node_dump, brutal_purge, soft_purge, []},
|
{load_module, emqx_node_dump, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_channel, brutal_purge, soft_purge, []},
|
{load_module, emqx_channel, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_app, brutal_purge, soft_purge, []},
|
{load_module, emqx_app, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_plugins, brutal_purge, soft_purge, []}
|
{load_module, emqx_plugins, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_logger_textfmt, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_http_lib, brutal_purge, soft_purge, []}
|
||||||
]},
|
]},
|
||||||
{"4.3.0", [
|
{"4.3.0", [
|
||||||
{load_module, emqx_logger_jsonfmt, brutal_purge, soft_purge, []},
|
{load_module, emqx_logger_jsonfmt, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_ws_connection, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_congestion, brutal_purge, soft_purge, []},
|
{load_module, emqx_congestion, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_connection, brutal_purge, soft_purge, []},
|
{load_module, emqx_connection, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_frame, brutal_purge, soft_purge, []},
|
{load_module, emqx_frame, brutal_purge, soft_purge, []},
|
||||||
|
@ -21,24 +29,33 @@
|
||||||
{load_module, emqx_channel, brutal_purge, soft_purge, []},
|
{load_module, emqx_channel, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_app, brutal_purge, soft_purge, []},
|
{load_module, emqx_app, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_plugins, brutal_purge, soft_purge, []},
|
{load_module, emqx_plugins, brutal_purge, soft_purge, []},
|
||||||
%%
|
{load_module, emqx_logger_textfmt, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_metrics, brutal_purge, soft_purge, []},
|
{load_module, emqx_metrics, brutal_purge, soft_purge, []},
|
||||||
{apply, {emqx_metrics, upgrade_retained_delayed_counter_type, []}}
|
{apply, {emqx_metrics, upgrade_retained_delayed_counter_type, []}},
|
||||||
|
{load_module, emqx_http_lib, brutal_purge, soft_purge, []}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
|
{"4.3.2", [
|
||||||
|
{load_module, emqx_http_lib, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
{"4.3.1", [
|
{"4.3.1", [
|
||||||
|
{load_module, emqx_ws_connection, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_connection, brutal_purge, soft_purge, []},
|
{load_module, emqx_connection, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_frame, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_cm, brutal_purge, soft_purge, []},
|
{load_module, emqx_cm, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_congestion, brutal_purge, soft_purge, []},
|
{load_module, emqx_congestion, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_node_dump, brutal_purge, soft_purge, []},
|
{load_module, emqx_node_dump, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_channel, brutal_purge, soft_purge, []},
|
{load_module, emqx_channel, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_app, brutal_purge, soft_purge, []},
|
{load_module, emqx_app, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_plugins, brutal_purge, soft_purge, []}
|
{load_module, emqx_plugins, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_logger_textfmt, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_http_lib, brutal_purge, soft_purge, []}
|
||||||
]},
|
]},
|
||||||
{"4.3.0", [
|
{"4.3.0", [
|
||||||
{load_module, emqx_logger_jsonfmt, brutal_purge, soft_purge, []},
|
{load_module, emqx_logger_jsonfmt, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_ws_connection, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_connection, brutal_purge, soft_purge, []},
|
{load_module, emqx_connection, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_congestion, brutal_purge, soft_purge, []},
|
{load_module, emqx_congestion, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_frame, brutal_purge, soft_purge, []},
|
{load_module, emqx_frame, brutal_purge, soft_purge, []},
|
||||||
|
@ -48,10 +65,11 @@
|
||||||
{load_module, emqx_channel, brutal_purge, soft_purge, []},
|
{load_module, emqx_channel, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_app, brutal_purge, soft_purge, []},
|
{load_module, emqx_app, brutal_purge, soft_purge, []},
|
||||||
{load_module, emqx_plugins, brutal_purge, soft_purge, []},
|
{load_module, emqx_plugins, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_logger_textfmt, brutal_purge, soft_purge, []},
|
||||||
%% Just load the module. We don't need to change the 'messages.retained'
|
%% Just load the module. We don't need to change the 'messages.retained'
|
||||||
%% and 'messages.retained' counter type.
|
%% and 'messages.retained' counter type.
|
||||||
{load_module, emqx_metrics, brutal_purge, soft_purge, []},
|
{load_module, emqx_metrics, brutal_purge, soft_purge, []},
|
||||||
{apply, {emqx_metrics, upgrade_retained_delayed_counter_type, []}}
|
{load_module, emqx_http_lib, brutal_purge, soft_purge, []}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
]
|
]
|
||||||
|
|
|
@ -41,7 +41,8 @@
|
||||||
, stats/1
|
, stats/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([ async_set_keepalive/4
|
-export([ async_set_keepalive/3
|
||||||
|
, async_set_keepalive/4
|
||||||
, async_set_socket_options/2
|
, async_set_socket_options/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
@ -200,6 +201,9 @@ stats(#state{transport = Transport,
|
||||||
%%
|
%%
|
||||||
%% NOTE: This API sets TCP socket options, which has nothing to do with
|
%% NOTE: This API sets TCP socket options, which has nothing to do with
|
||||||
%% the MQTT layer's keepalive (PINGREQ and PINGRESP).
|
%% the MQTT layer's keepalive (PINGREQ and PINGRESP).
|
||||||
|
async_set_keepalive(Idle, Interval, Probes) ->
|
||||||
|
async_set_keepalive(self(), Idle, Interval, Probes).
|
||||||
|
|
||||||
async_set_keepalive(Pid, Idle, Interval, Probes) ->
|
async_set_keepalive(Pid, Idle, Interval, Probes) ->
|
||||||
Options = [ {keepalive, true}
|
Options = [ {keepalive, true}
|
||||||
, {raw, 6, 4, <<Idle:32/native>>}
|
, {raw, 6, 4, <<Idle:32/native>>}
|
||||||
|
|
|
@ -121,17 +121,8 @@ parse(Bin, {{body, #{hdr := Header,
|
||||||
len := Length,
|
len := Length,
|
||||||
rest := Body}
|
rest := Body}
|
||||||
}, Options}) when is_binary(Bin) ->
|
}, Options}) when is_binary(Bin) ->
|
||||||
BodyBytes = body_bytes(Body),
|
NewBody = append_body(Body, Bin),
|
||||||
{NewBodyPart, Tail} = split(BodyBytes + size(Bin) - Length, Bin),
|
parse_frame(NewBody, Header, Length, Options).
|
||||||
NewBody = append_body(Body, NewBodyPart),
|
|
||||||
parse_frame(NewBody, Tail, Header, Length, Options).
|
|
||||||
|
|
||||||
%% split given binary with the first N bytes
|
|
||||||
split(N, Bin) when N =< 0 ->
|
|
||||||
{Bin, <<>>};
|
|
||||||
split(N, Bin) when N =< size(Bin) ->
|
|
||||||
<<H:N/binary, T/binary>> = Bin,
|
|
||||||
{H, T}.
|
|
||||||
|
|
||||||
parse_remaining_len(<<>>, Header, Options) ->
|
parse_remaining_len(<<>>, Header, Options) ->
|
||||||
{more, {{len, #{hdr => Header, len => {1, 0}}}, Options}};
|
{more, {{len, #{hdr => Header, len => {1, 0}}}, Options}};
|
||||||
|
@ -178,19 +169,15 @@ append_body(H, T) when is_binary(H) ->
|
||||||
append_body(?Q(Bytes, Q), T) ->
|
append_body(?Q(Bytes, Q), T) ->
|
||||||
?Q(Bytes + iolist_size(T), queue:in(T, Q)).
|
?Q(Bytes + iolist_size(T), queue:in(T, Q)).
|
||||||
|
|
||||||
flatten_body(Body, Tail) when is_binary(Body) -> <<Body/binary, Tail/binary>>;
|
flatten_body(Body) when is_binary(Body) -> Body;
|
||||||
flatten_body(?Q(_, Q), Tail) -> iolist_to_binary([queue:to_list(Q), Tail]).
|
flatten_body(?Q(_, Q)) -> iolist_to_binary(queue:to_list(Q)).
|
||||||
|
|
||||||
|
parse_frame(Body, Header, 0, Options) ->
|
||||||
|
{ok, packet(Header), flatten_body(Body), ?none(Options)};
|
||||||
parse_frame(Body, Header, Length, Options) ->
|
parse_frame(Body, Header, Length, Options) ->
|
||||||
%% already appended
|
|
||||||
parse_frame(Body, _SplitTail = <<>>, Header, Length, Options).
|
|
||||||
|
|
||||||
parse_frame(Body, Tail, Header, 0, Options) ->
|
|
||||||
{ok, packet(Header), flatten_body(Body, Tail), ?none(Options)};
|
|
||||||
parse_frame(Body, Tail, Header, Length, Options) ->
|
|
||||||
case body_bytes(Body) >= Length of
|
case body_bytes(Body) >= Length of
|
||||||
true ->
|
true ->
|
||||||
<<FrameBin:Length/binary, Rest/binary>> = flatten_body(Body, Tail),
|
<<FrameBin:Length/binary, Rest/binary>> = flatten_body(Body),
|
||||||
case parse_packet(Header, FrameBin, Options) of
|
case parse_packet(Header, FrameBin, Options) of
|
||||||
{Variable, Payload} ->
|
{Variable, Payload} ->
|
||||||
{ok, packet(Header, Variable, Payload), Rest, ?none(Options)};
|
{ok, packet(Header, Variable, Payload), Rest, ?none(Options)};
|
||||||
|
@ -202,7 +189,7 @@ parse_frame(Body, Tail, Header, Length, Options) ->
|
||||||
false ->
|
false ->
|
||||||
{more, {{body, #{hdr => Header,
|
{more, {{body, #{hdr => Header,
|
||||||
len => Length,
|
len => Length,
|
||||||
rest => append_body(Body, Tail)
|
rest => Body
|
||||||
}}, Options}}
|
}}, Options}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
|
@ -108,11 +108,7 @@ normalise_headers(Headers0) ->
|
||||||
[{K, proplists:get_value(K, Headers)} || K <- Keys].
|
[{K, proplists:get_value(K, Headers)} || K <- Keys].
|
||||||
|
|
||||||
normalise_parse_result(#{host := Host, scheme := Scheme0} = Map) ->
|
normalise_parse_result(#{host := Host, scheme := Scheme0} = Map) ->
|
||||||
Scheme = atom_scheme(Scheme0),
|
{Scheme, DefaultPort} = atom_scheme_and_default_port(Scheme0),
|
||||||
DefaultPort = case https =:= Scheme of
|
|
||||||
true -> 443;
|
|
||||||
false -> 80
|
|
||||||
end,
|
|
||||||
Port = case maps:get(port, Map, undefined) of
|
Port = case maps:get(port, Map, undefined) of
|
||||||
N when is_number(N) -> N;
|
N when is_number(N) -> N;
|
||||||
_ -> DefaultPort
|
_ -> DefaultPort
|
||||||
|
@ -122,11 +118,14 @@ normalise_parse_result(#{host := Host, scheme := Scheme0} = Map) ->
|
||||||
, port => Port
|
, port => Port
|
||||||
}.
|
}.
|
||||||
|
|
||||||
%% NOTE: so far we only support http schemes.
|
%% NOTE: so far we only support http/coap schemes.
|
||||||
atom_scheme(Scheme) when is_list(Scheme) -> atom_scheme(list_to_binary(Scheme));
|
atom_scheme_and_default_port(Scheme) when is_list(Scheme) ->
|
||||||
atom_scheme(<<"https">>) -> https;
|
atom_scheme_and_default_port(list_to_binary(Scheme));
|
||||||
atom_scheme(<<"http">>) -> http;
|
atom_scheme_and_default_port(<<"http">> ) -> {http, 80};
|
||||||
atom_scheme(Other) -> throw({unsupported_scheme, Other}).
|
atom_scheme_and_default_port(<<"https">>) -> {https, 443};
|
||||||
|
atom_scheme_and_default_port(<<"coap">> ) -> {coap, 5683};
|
||||||
|
atom_scheme_and_default_port(<<"coaps">>) -> {coaps, 5684};
|
||||||
|
atom_scheme_and_default_port(Other) -> throw({unsupported_scheme, Other}).
|
||||||
|
|
||||||
do_uri_encode(Char) ->
|
do_uri_encode(Char) ->
|
||||||
case reserved(Char) of
|
case reserved(Char) of
|
||||||
|
|
|
@ -35,15 +35,9 @@ format(#{msg := Msg0, meta := Meta} = Event, Config) ->
|
||||||
logger_formatter:format(Event#{msg := Msg}, Config).
|
logger_formatter:format(Event#{msg := Msg}, Config).
|
||||||
|
|
||||||
maybe_merge({report, Report}, Meta) when is_map(Report) ->
|
maybe_merge({report, Report}, Meta) when is_map(Report) ->
|
||||||
{report, maps:merge(rename(Report), filter(Meta))};
|
{report, maps:merge(Report, filter(Meta))};
|
||||||
maybe_merge(Report, _Meta) ->
|
maybe_merge(Report, _Meta) ->
|
||||||
Report.
|
Report.
|
||||||
|
|
||||||
filter(Meta) ->
|
filter(Meta) ->
|
||||||
maps:without(?WITHOUT_MERGE, Meta).
|
maps:without(?WITHOUT_MERGE, Meta).
|
||||||
|
|
||||||
rename(#{'$kind' := Kind} = Meta0) -> % snabbkaffe
|
|
||||||
Meta = maps:remove('$kind', Meta0),
|
|
||||||
Meta#{msg => Kind};
|
|
||||||
rename(Meta) ->
|
|
||||||
Meta.
|
|
||||||
|
|
|
@ -52,7 +52,7 @@ censor([Key | _], Val) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
is_sensitive(Key) when is_atom(Key) ->
|
is_sensitive(Key) when is_atom(Key) ->
|
||||||
is_sensitive(atom_to_binary(Key));
|
is_sensitive(atom_to_binary(Key, utf8));
|
||||||
is_sensitive(Key) when is_list(Key) ->
|
is_sensitive(Key) when is_list(Key) ->
|
||||||
try iolist_to_binary(Key) of
|
try iolist_to_binary(Key) of
|
||||||
Bin ->
|
Bin ->
|
||||||
|
|
|
@ -172,7 +172,14 @@ load_ext_plugin(PluginDir) ->
|
||||||
error({plugin_app_file_not_found, AppFile})
|
error({plugin_app_file_not_found, AppFile})
|
||||||
end,
|
end,
|
||||||
ok = load_plugin_app(AppName, Ebin),
|
ok = load_plugin_app(AppName, Ebin),
|
||||||
ok = load_plugin_conf(AppName, PluginDir).
|
try
|
||||||
|
ok = load_plugin_conf(AppName, PluginDir)
|
||||||
|
catch
|
||||||
|
throw : {conf_file_not_found, ConfFile} ->
|
||||||
|
%% this is maybe a dependency of an external plugin
|
||||||
|
?LOG(debug, "config_load_error_ignored for app=~p, path=~s", [AppName, ConfFile]),
|
||||||
|
ok
|
||||||
|
end.
|
||||||
|
|
||||||
load_plugin_app(AppName, Ebin) ->
|
load_plugin_app(AppName, Ebin) ->
|
||||||
_ = code:add_patha(Ebin),
|
_ = code:add_patha(Ebin),
|
||||||
|
@ -180,8 +187,8 @@ load_plugin_app(AppName, Ebin) ->
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun(BeamFile) ->
|
fun(BeamFile) ->
|
||||||
Module = list_to_atom(filename:basename(BeamFile, ".beam")),
|
Module = list_to_atom(filename:basename(BeamFile, ".beam")),
|
||||||
case code:ensure_loaded(Module) of
|
case code:load_file(Module) of
|
||||||
{module, Module} -> ok;
|
{module, _} -> ok;
|
||||||
{error, Reason} -> error({failed_to_load_plugin_beam, BeamFile, Reason})
|
{error, Reason} -> error({failed_to_load_plugin_beam, BeamFile, Reason})
|
||||||
end
|
end
|
||||||
end, Modules),
|
end, Modules),
|
||||||
|
@ -193,12 +200,12 @@ load_plugin_app(AppName, Ebin) ->
|
||||||
load_plugin_conf(AppName, PluginDir) ->
|
load_plugin_conf(AppName, PluginDir) ->
|
||||||
Priv = filename:join([PluginDir, "priv"]),
|
Priv = filename:join([PluginDir, "priv"]),
|
||||||
Etc = filename:join([PluginDir, "etc"]),
|
Etc = filename:join([PluginDir, "etc"]),
|
||||||
Schema = filelib:wildcard(filename:join([Priv, "*.schema"])),
|
|
||||||
ConfFile = filename:join([Etc, atom_to_list(AppName) ++ ".conf"]),
|
ConfFile = filename:join([Etc, atom_to_list(AppName) ++ ".conf"]),
|
||||||
Conf = case filelib:is_file(ConfFile) of
|
Conf = case filelib:is_file(ConfFile) of
|
||||||
true -> cuttlefish_conf:file(ConfFile);
|
true -> cuttlefish_conf:file(ConfFile);
|
||||||
false -> error({conf_file_not_found, ConfFile})
|
false -> throw({conf_file_not_found, ConfFile})
|
||||||
end,
|
end,
|
||||||
|
Schema = filelib:wildcard(filename:join([Priv, "*.schema"])),
|
||||||
?LOG(debug, "loading_extra_plugin_config conf=~s, schema=~s", [ConfFile, Schema]),
|
?LOG(debug, "loading_extra_plugin_config conf=~s, schema=~s", [ConfFile, Schema]),
|
||||||
AppsEnv = cuttlefish_generator:map(cuttlefish_schema:files(Schema), Conf),
|
AppsEnv = cuttlefish_generator:map(cuttlefish_schema:files(Schema), Conf),
|
||||||
lists:foreach(fun({AppName1, Envs}) ->
|
lists:foreach(fun({AppName1, Envs}) ->
|
||||||
|
|
|
@ -257,13 +257,16 @@ websocket_init([Req, Opts]) ->
|
||||||
case proplists:get_bool(proxy_protocol, Opts)
|
case proplists:get_bool(proxy_protocol, Opts)
|
||||||
andalso maps:get(proxy_header, Req) of
|
andalso maps:get(proxy_header, Req) of
|
||||||
#{src_address := SrcAddr, src_port := SrcPort, ssl := SSL} ->
|
#{src_address := SrcAddr, src_port := SrcPort, ssl := SSL} ->
|
||||||
ProxyName = {SrcAddr, SrcPort},
|
SourceName = {SrcAddr, SrcPort},
|
||||||
%% Notice: Only CN is available in Proxy Protocol V2 additional info
|
%% Notice: Only CN is available in Proxy Protocol V2 additional info
|
||||||
ProxySSL = case maps:get(cn, SSL, undefined) of
|
SourceSSL = case maps:get(cn, SSL, undefined) of
|
||||||
undeined -> nossl;
|
undeined -> nossl;
|
||||||
CN -> [{pp2_ssl_cn, CN}]
|
CN -> [{pp2_ssl_cn, CN}]
|
||||||
end,
|
end,
|
||||||
{ProxyName, ProxySSL};
|
{SourceName, SourceSSL};
|
||||||
|
#{src_address := SrcAddr, src_port := SrcPort} ->
|
||||||
|
SourceName = {SrcAddr, SrcPort},
|
||||||
|
{SourceName , nossl};
|
||||||
_ ->
|
_ ->
|
||||||
{get_peer(Req, Opts), cowboy_req:cert(Req)}
|
{get_peer(Req, Opts), cowboy_req:cert(Req)}
|
||||||
end,
|
end,
|
||||||
|
|
|
@ -181,16 +181,20 @@ t_discard_session(_) ->
|
||||||
ok = meck:unload(emqx_connection).
|
ok = meck:unload(emqx_connection).
|
||||||
|
|
||||||
t_discard_session_race(_) ->
|
t_discard_session_race(_) ->
|
||||||
ok = snabbkaffe:start_trace(),
|
?check_trace(
|
||||||
#{conninfo := ConnInfo0} = ?ChanInfo,
|
begin
|
||||||
ConnInfo = ConnInfo0#{conn_mod := emqx_ws_connection},
|
#{conninfo := ConnInfo0} = ?ChanInfo,
|
||||||
{Pid, Ref} = spawn_monitor(fun() -> receive stop -> exit(normal) end end),
|
ConnInfo = ConnInfo0#{conn_mod := emqx_ws_connection},
|
||||||
ok = emqx_cm:register_channel(<<"clientid">>, Pid, ConnInfo),
|
{Pid, Ref} = spawn_monitor(fun() -> receive stop -> exit(normal) end end),
|
||||||
Pid ! stop,
|
ok = emqx_cm:register_channel(<<"clientid">>, Pid, ConnInfo),
|
||||||
receive {'DOWN', Ref, process, Pid, normal} -> ok end,
|
Pid ! stop,
|
||||||
ok = emqx_cm:discard_session(<<"clientid">>),
|
receive {'DOWN', Ref, process, Pid, normal} -> ok end,
|
||||||
{ok, _} = ?block_until(#{?snk_kind := "session_already_gone", pid := Pid}, 1000),
|
ok = emqx_cm:discard_session(<<"clientid">>),
|
||||||
snabbkaffe:stop().
|
{ok, _} = ?block_until(#{?snk_kind := "session_already_gone", pid := Pid}, 1000)
|
||||||
|
end,
|
||||||
|
fun(_, _) ->
|
||||||
|
true
|
||||||
|
end).
|
||||||
|
|
||||||
t_takeover_session(_) ->
|
t_takeover_session(_) ->
|
||||||
#{conninfo := ConnInfo} = ?ChanInfo,
|
#{conninfo := ConnInfo} = ?ChanInfo,
|
||||||
|
|
|
@ -58,7 +58,8 @@ groups() ->
|
||||||
t_serialize_parse_connack_v5
|
t_serialize_parse_connack_v5
|
||||||
]},
|
]},
|
||||||
{publish, [parallel],
|
{publish, [parallel],
|
||||||
[t_serialize_parse_qos0_publish,
|
[t_parse_sticky_frames,
|
||||||
|
t_serialize_parse_qos0_publish,
|
||||||
t_serialize_parse_qos1_publish,
|
t_serialize_parse_qos1_publish,
|
||||||
t_serialize_parse_qos2_publish,
|
t_serialize_parse_qos2_publish,
|
||||||
t_serialize_parse_publish_v5
|
t_serialize_parse_publish_v5
|
||||||
|
@ -286,6 +287,24 @@ t_serialize_parse_connack_v5(_) ->
|
||||||
Packet = ?CONNACK_PACKET(?RC_SUCCESS, 0, Props),
|
Packet = ?CONNACK_PACKET(?RC_SUCCESS, 0, Props),
|
||||||
?assertEqual(Packet, parse_serialize(Packet, #{version => ?MQTT_PROTO_V5})).
|
?assertEqual(Packet, parse_serialize(Packet, #{version => ?MQTT_PROTO_V5})).
|
||||||
|
|
||||||
|
t_parse_sticky_frames(_) ->
|
||||||
|
Payload = lists:duplicate(10, 0),
|
||||||
|
P = #mqtt_packet{header = #mqtt_packet_header{type = ?PUBLISH,
|
||||||
|
dup = false,
|
||||||
|
qos = ?QOS_0,
|
||||||
|
retain = false},
|
||||||
|
variable = #mqtt_packet_publish{topic_name = <<"a/b">>,
|
||||||
|
packet_id = undefined},
|
||||||
|
payload = iolist_to_binary(Payload)
|
||||||
|
},
|
||||||
|
Bin = serialize_to_binary(P),
|
||||||
|
Size = size(Bin),
|
||||||
|
<<H:(Size-2)/binary, TailTwoBytes/binary>> = Bin,
|
||||||
|
{more, PState1} = emqx_frame:parse(H), %% needs 2 more bytes
|
||||||
|
%% feed 3 bytes as if the next 1 byte belongs to the next packet.
|
||||||
|
{ok, _, <<42>>, PState2} = emqx_frame:parse(iolist_to_binary([TailTwoBytes, 42]), PState1),
|
||||||
|
?assertMatch({none, _}, PState2).
|
||||||
|
|
||||||
t_serialize_parse_qos0_publish(_) ->
|
t_serialize_parse_qos0_publish(_) ->
|
||||||
Bin = <<48,14,0,7,120,120,120,47,121,121,121,104,101,108,108,111>>,
|
Bin = <<48,14,0,7,120,120,120,47,121,121,121,104,101,108,108,111>>,
|
||||||
Packet = #mqtt_packet{header = #mqtt_packet_header{type = ?PUBLISH,
|
Packet = #mqtt_packet{header = #mqtt_packet_header{type = ?PUBLISH,
|
||||||
|
|
|
@ -66,6 +66,16 @@ uri_parse_test_() ->
|
||||||
emqx_http_lib:uri_parse("HTTPS://127.0.0.1"))
|
emqx_http_lib:uri_parse("HTTPS://127.0.0.1"))
|
||||||
end
|
end
|
||||||
}
|
}
|
||||||
|
, {"coap default port",
|
||||||
|
fun() -> ?assertMatch({ok, #{scheme := coap, port := 5683}},
|
||||||
|
emqx_http_lib:uri_parse("coap://127.0.0.1"))
|
||||||
|
end
|
||||||
|
}
|
||||||
|
, {"coaps default port",
|
||||||
|
fun() -> ?assertMatch({ok, #{scheme := coaps, port := 5684}},
|
||||||
|
emqx_http_lib:uri_parse("coaps://127.0.0.1"))
|
||||||
|
end
|
||||||
|
}
|
||||||
, {"unsupported_scheme",
|
, {"unsupported_scheme",
|
||||||
fun() -> ?assertEqual({error, {unsupported_scheme, <<"wss">>}},
|
fun() -> ?assertEqual({error, {unsupported_scheme, <<"wss">>}},
|
||||||
emqx_http_lib:uri_parse("wss://127.0.0.1"))
|
emqx_http_lib:uri_parse("wss://127.0.0.1"))
|
||||||
|
|
Loading…
Reference in New Issue