Compare commits
258 Commits
master
...
feat-truly
Author | SHA1 | Date |
---|---|---|
![]() |
6d37d2df68 | |
![]() |
0955122468 | |
![]() |
8a4e0a3ecb | |
![]() |
dbb519ee0e | |
![]() |
5cbdfa61b8 | |
![]() |
1fa7e70081 | |
![]() |
eb5e1a5cb9 | |
![]() |
9af60ac126 | |
![]() |
29be556e9a | |
![]() |
c5869b62dc | |
![]() |
8f853982a6 | |
![]() |
2d29ba8550 | |
![]() |
28e23523a5 | |
![]() |
4b2586fec4 | |
![]() |
f8acb31f89 | |
![]() |
aa90177302 | |
![]() |
818bde1820 | |
![]() |
2fb8ffa8c2 | |
![]() |
c6c9ba400e | |
![]() |
42695a2f9a | |
![]() |
1d0c8a4eef | |
![]() |
f00e254bdf | |
![]() |
3d9054d25e | |
![]() |
0ab1b7c95d | |
![]() |
2a3d1fcb78 | |
![]() |
fa92e61440 | |
![]() |
763f567f7d | |
![]() |
c9d39b4d35 | |
![]() |
8526032200 | |
![]() |
d28f913b94 | |
![]() |
3175d59e7b | |
![]() |
9832a2ed00 | |
![]() |
ecb6c1c59e | |
![]() |
831f2eda0c | |
![]() |
fb62487801 | |
![]() |
765a76fa80 | |
![]() |
7f4809f61a | |
![]() |
cfe3efed4a | |
![]() |
791caba2ed | |
![]() |
2ce592040e | |
![]() |
afd6fe181c | |
![]() |
b50f0b67d4 | |
![]() |
cb3d2fd6c3 | |
![]() |
18fc82855b | |
![]() |
dd4e307753 | |
![]() |
ec30fb346a | |
![]() |
ba3e7841e5 | |
![]() |
49c7eae211 | |
![]() |
6d48bbf34c | |
![]() |
ba319e1159 | |
![]() |
43ac315444 | |
![]() |
8341a4d4a7 | |
![]() |
b4c2643291 | |
![]() |
67b543f01e | |
![]() |
3e1abbddd2 | |
![]() |
48d932af83 | |
![]() |
a198158bfb | |
![]() |
99453df637 | |
![]() |
fd34eb1f4e | |
![]() |
224cc0d5c7 | |
![]() |
4896c03881 | |
![]() |
d2f4c55fd7 | |
![]() |
3cae4437fa | |
![]() |
1984e5b68c | |
![]() |
51d2fa1359 | |
![]() |
08c2907d44 | |
![]() |
9038da0bd2 | |
![]() |
51bc9c83c3 | |
![]() |
f7d70d05ab | |
![]() |
44d666f62b | |
![]() |
d2649eea81 | |
![]() |
14aaa4affe | |
![]() |
cb5db8059b | |
![]() |
703f52cec7 | |
![]() |
4b098ce3af | |
![]() |
7e1f3c5882 | |
![]() |
b60e33ca41 | |
![]() |
4643415b0b | |
![]() |
6dca349435 | |
![]() |
85723e4a35 | |
![]() |
75fac32c12 | |
![]() |
5ca28749ed | |
![]() |
b88674e876 | |
![]() |
58b5d3709d | |
![]() |
230af7990d | |
![]() |
7c1ce8bc70 | |
![]() |
668bcad4e0 | |
![]() |
20ee42be87 | |
![]() |
29ad2c04da | |
![]() |
ecf4d196eb | |
![]() |
89fbf5fea2 | |
![]() |
ca77749281 | |
![]() |
6bee6279f8 | |
![]() |
c50c72b18e | |
![]() |
f793883e35 | |
![]() |
aca6367561 | |
![]() |
4020db8fc1 | |
![]() |
c8dda45c55 | |
![]() |
1d9f5ea133 | |
![]() |
0120f8cf45 | |
![]() |
e012e77ce6 | |
![]() |
e1a2dc9138 | |
![]() |
3fa442f4a4 | |
![]() |
feef02b639 | |
![]() |
da2c41702d | |
![]() |
7804b39e08 | |
![]() |
44d16a26ab | |
![]() |
42339b2e35 | |
![]() |
ebbf567fff | |
![]() |
ec89781cc4 | |
![]() |
40850d981d | |
![]() |
188b889ed3 | |
![]() |
004160af56 | |
![]() |
2dba91d6d0 | |
![]() |
19031e21ec | |
![]() |
d59d3849e0 | |
![]() |
92a3d683cf | |
![]() |
55d4dfde44 | |
![]() |
1ae3f8c204 | |
![]() |
9d993e1625 | |
![]() |
9f843d618d | |
![]() |
80421651d7 | |
![]() |
d777ca7baf | |
![]() |
3ad0678892 | |
![]() |
4664b85968 | |
![]() |
ff2d73ad3b | |
![]() |
1a694814e0 | |
![]() |
f92ff4494b | |
![]() |
1a291d5d97 | |
![]() |
9b097ac73f | |
![]() |
5fc1036cf7 | |
![]() |
ed61999fdf | |
![]() |
4eacaa29bd | |
![]() |
aaa7cd0a44 | |
![]() |
eeb44086c8 | |
![]() |
a27b75b98e | |
![]() |
eb88a0b7b6 | |
![]() |
e78967cfc3 | |
![]() |
fe343a0407 | |
![]() |
e532fff4df | |
![]() |
7bcc67f95d | |
![]() |
ea15aa3f9e | |
![]() |
6aa61ea78d | |
![]() |
a41e6604cf | |
![]() |
473e600b53 | |
![]() |
dffa81120c | |
![]() |
f92cfa72d2 | |
![]() |
1245020ec0 | |
![]() |
844133c7c5 | |
![]() |
f9601804e5 | |
![]() |
d2d42ed33e | |
![]() |
31a1942b61 | |
![]() |
391eb55324 | |
![]() |
a84b6b74bd | |
![]() |
e0336e60da | |
![]() |
e5c4277109 | |
![]() |
45b9f682b2 | |
![]() |
94712064b5 | |
![]() |
f3bd1f1c3a | |
![]() |
ced8693043 | |
![]() |
66f69e7693 | |
![]() |
7ec8dc21a6 | |
![]() |
1e4ca14476 | |
![]() |
b3db4d0f7c | |
![]() |
22f7b0b8e5 | |
![]() |
60e830fef7 | |
![]() |
f9a9d4a6f0 | |
![]() |
5854bfab57 | |
![]() |
bf0036bf81 | |
![]() |
554879c9d7 | |
![]() |
c09cb64db6 | |
![]() |
ff96250b0b | |
![]() |
992e094ce9 | |
![]() |
efa71d12fe | |
![]() |
de96349ddf | |
![]() |
5b671e5c4f | |
![]() |
ed53b859d9 | |
![]() |
c45de03ac8 | |
![]() |
87ee94b6f2 | |
![]() |
d4ce7f328d | |
![]() |
ac0639f6b1 | |
![]() |
ff4229bb93 | |
![]() |
8fb9170df8 | |
![]() |
46e58a50d0 | |
![]() |
35ae097038 | |
![]() |
a7d67eb862 | |
![]() |
253fa9167c | |
![]() |
36fa9f99be | |
![]() |
65df4fd9ca | |
![]() |
49a00c3412 | |
![]() |
2479c2a80b | |
![]() |
07f58c0e9e | |
![]() |
4618eb985a | |
![]() |
2bd12a9a3d | |
![]() |
d3d019cb89 | |
![]() |
07c29e8c55 | |
![]() |
dd23ee6b15 | |
![]() |
fee3462603 | |
![]() |
a538979c3b | |
![]() |
8809d72ee5 | |
![]() |
a9a1c71eb4 | |
![]() |
b851a7ea7c | |
![]() |
7103324426 | |
![]() |
f6138e8971 | |
![]() |
b1d190fd3b | |
![]() |
fbaf2646f9 | |
![]() |
31cbd66f61 | |
![]() |
c878c73395 | |
![]() |
5bc33b9b5b | |
![]() |
befd22282f | |
![]() |
5edb5351b0 | |
![]() |
2b1249ba9c | |
![]() |
8b4b9a119b | |
![]() |
46d0cb69dc | |
![]() |
240a4b88a5 | |
![]() |
d3d636fc99 | |
![]() |
8cd1fa41b6 | |
![]() |
750cb2d491 | |
![]() |
780e403262 | |
![]() |
05b16c601b | |
![]() |
8935d28ed4 | |
![]() |
0c66fcef00 | |
![]() |
637cd5e804 | |
![]() |
5fbf83e7f0 | |
![]() |
513cd001ac | |
![]() |
868cd6e57c | |
![]() |
a8aabd5f74 | |
![]() |
002cbb6d8b | |
![]() |
e87838f272 | |
![]() |
f18b9a92bc | |
![]() |
49a78c8ef2 | |
![]() |
8ad42cb827 | |
![]() |
f17962e79a | |
![]() |
98c4fff43f | |
![]() |
bfc6c3aa42 | |
![]() |
1a438125c7 | |
![]() |
2092bedb12 | |
![]() |
a6bd1c90d5 | |
![]() |
3ddbdbc6c1 | |
![]() |
2c0916ff05 | |
![]() |
77a41ea88f | |
![]() |
b92940af29 | |
![]() |
bed45417dc | |
![]() |
8110ef7a64 | |
![]() |
ecec9bd2f6 | |
![]() |
6724e59e7a | |
![]() |
5962c9c83c | |
![]() |
c0367fb8dd | |
![]() |
0ecaa80fb8 | |
![]() |
bdd9154001 | |
![]() |
bbed1b55e0 | |
![]() |
074c0bd2cc | |
![]() |
69ef5cbdc3 | |
![]() |
42a6f2aba5 | |
![]() |
0184a1b3e8 | |
![]() |
86766ee7f1 | |
![]() |
8eebdd5cdb | |
![]() |
1f57968c9b |
|
@ -0,0 +1,14 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -xe
|
||||||
|
|
||||||
|
cd "$EMQX_PATH"
|
||||||
|
|
||||||
|
rm -rf _build _upgrade_base
|
||||||
|
|
||||||
|
mkdir _upgrade_base
|
||||||
|
pushd _upgrade_base
|
||||||
|
wget "https://s3-us-west-2.amazonaws.com/packages.emqx/emqx-ce/v${EMQX_BASE}/emqx-ubuntu20.04-${EMQX_BASE}-amd64.zip"
|
||||||
|
popd
|
||||||
|
|
||||||
|
make emqx-zip
|
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -xe
|
||||||
|
|
||||||
|
mkdir -p "$TEST_PATH"
|
||||||
|
cd "$TEST_PATH"
|
||||||
|
|
||||||
|
cp ../"$EMQX_PATH"/_upgrade_base/*.zip ./
|
||||||
|
unzip ./*.zip
|
||||||
|
|
||||||
|
cp ../"$EMQX_PATH"/_packages/emqx/*.zip ./emqx/releases/
|
||||||
|
|
||||||
|
git clone --depth 1 https://github.com/terry-xiaoyu/one_more_emqx.git
|
||||||
|
|
||||||
|
./one_more_emqx/one_more_emqx.sh emqx2
|
|
@ -0,0 +1,17 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -xe
|
||||||
|
|
||||||
|
export EMQX_PATH="$1"
|
||||||
|
export EMQX_BASE="$2"
|
||||||
|
|
||||||
|
export TEST_PATH="emqx_test"
|
||||||
|
|
||||||
|
./build.sh
|
||||||
|
|
||||||
|
VERSION=$("$EMQX_PATH"/pkg-vsn.sh)
|
||||||
|
export VERSION
|
||||||
|
|
||||||
|
./prepare.sh
|
||||||
|
|
||||||
|
./test.sh
|
|
@ -0,0 +1,121 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
EMQX_ENDPOINT="http://localhost:8081/api/v4/acl"
|
||||||
|
EMQX2_ENDPOINT="http://localhost:8917/api/v4/acl"
|
||||||
|
|
||||||
|
function run() {
|
||||||
|
emqx="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
echo "[$emqx]" "$@"
|
||||||
|
|
||||||
|
pushd "$TEST_PATH/$emqx"
|
||||||
|
"$@"
|
||||||
|
popd
|
||||||
|
}
|
||||||
|
|
||||||
|
function post_rule() {
|
||||||
|
endpoint="$1"
|
||||||
|
rule="$2"
|
||||||
|
echo -n "->($endpoint) "
|
||||||
|
curl -s -u admin:public -X POST "$endpoint" -d "$rule"
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
|
||||||
|
function verify_clientid_rule() {
|
||||||
|
endpoint="$1"
|
||||||
|
id="$2"
|
||||||
|
echo -n "<-($endpoint) "
|
||||||
|
curl -s -u admin:public "$endpoint/clientid/$id" | grep "$id" || (echo "verify rule for client $id failed" && return 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run nodes
|
||||||
|
|
||||||
|
run emqx ./bin/emqx start
|
||||||
|
run emqx2 ./bin/emqx start
|
||||||
|
|
||||||
|
run emqx ./bin/emqx_ctl plugins load emqx_auth_mnesia
|
||||||
|
run emqx2 ./bin/emqx_ctl plugins load emqx_auth_mnesia
|
||||||
|
|
||||||
|
run emqx2 ./bin/emqx_ctl cluster join 'emqx@127.0.0.1'
|
||||||
|
|
||||||
|
# Add ACL rule to unupgraded EMQX nodes
|
||||||
|
|
||||||
|
post_rule "$EMQX_ENDPOINT" '{"clientid": "CLIENT1_A","topic": "t", "action": "pub", "access": "allow"}'
|
||||||
|
post_rule "$EMQX2_ENDPOINT" '{"clientid": "CLIENT1_B","topic": "t", "action": "pub", "access": "allow"}'
|
||||||
|
|
||||||
|
# Upgrade emqx2 node
|
||||||
|
|
||||||
|
run emqx2 ./bin/emqx install "$VERSION"
|
||||||
|
sleep 60
|
||||||
|
|
||||||
|
# Verify upgrade blocked
|
||||||
|
|
||||||
|
run emqx2 ./bin/emqx eval 'emqx_acl_mnesia_migrator:is_old_table_migrated().' | grep false || (echo "emqx2 shouldn't have migrated" && exit 1)
|
||||||
|
|
||||||
|
# Verify old rules on both nodes
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT1_A'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT1_A'
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT1_B'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT1_B'
|
||||||
|
|
||||||
|
# Add ACL on OLD and NEW node, verify on all nodes
|
||||||
|
|
||||||
|
post_rule "$EMQX_ENDPOINT" '{"clientid": "CLIENT2_A","topic": "t", "action": "pub", "access": "allow"}'
|
||||||
|
post_rule "$EMQX2_ENDPOINT" '{"clientid": "CLIENT2_B","topic": "t", "action": "pub", "access": "allow"}'
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT2_A'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT2_A'
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT2_B'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT2_B'
|
||||||
|
|
||||||
|
# Upgrade emqx node
|
||||||
|
|
||||||
|
run emqx ./bin/emqx install "$VERSION"
|
||||||
|
|
||||||
|
# Wait for upgrade
|
||||||
|
|
||||||
|
sleep 60
|
||||||
|
|
||||||
|
# Verify if upgrade occured
|
||||||
|
|
||||||
|
run emqx ./bin/emqx eval 'emqx_acl_mnesia_migrator:is_old_table_migrated().' | grep true || (echo "emqx should have migrated" && exit 1)
|
||||||
|
run emqx2 ./bin/emqx eval 'emqx_acl_mnesia_migrator:is_old_table_migrated().' | grep true || (echo "emqx2 should have migrated" && exit 1)
|
||||||
|
|
||||||
|
# Verify rules are kept
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT1_A'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT1_A'
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT1_B'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT1_B'
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT2_A'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT2_A'
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT2_B'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT2_B'
|
||||||
|
|
||||||
|
# Add ACL on OLD and NEW node, verify on all nodes
|
||||||
|
|
||||||
|
post_rule "$EMQX_ENDPOINT" '{"clientid": "CLIENT3_A","topic": "t", "action": "pub", "access": "allow"}'
|
||||||
|
post_rule "$EMQX2_ENDPOINT" '{"clientid": "CLIENT3_B","topic": "t", "action": "pub", "access": "allow"}'
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT3_A'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT3_A'
|
||||||
|
|
||||||
|
verify_clientid_rule "$EMQX_ENDPOINT" 'CLIENT3_B'
|
||||||
|
verify_clientid_rule "$EMQX2_ENDPOINT" 'CLIENT3_B'
|
||||||
|
|
||||||
|
# Stop nodes
|
||||||
|
|
||||||
|
run emqx ./bin/emqx stop
|
||||||
|
run emqx2 ./bin/emqx stop
|
||||||
|
|
||||||
|
echo "Success!"
|
||||||
|
|
|
@ -0,0 +1,99 @@
|
||||||
|
version: '3.9'
|
||||||
|
|
||||||
|
services:
|
||||||
|
haproxy:
|
||||||
|
container_name: haproxy
|
||||||
|
image: haproxy:2.3
|
||||||
|
depends_on:
|
||||||
|
- emqx1
|
||||||
|
- emqx2
|
||||||
|
volumes:
|
||||||
|
- ./haproxy/haproxy.cfg:/usr/local/etc/haproxy/haproxy.cfg
|
||||||
|
- ../../etc/certs:/usr/local/etc/haproxy/certs
|
||||||
|
ports:
|
||||||
|
- "18083:18083"
|
||||||
|
# - "1883:1883"
|
||||||
|
# - "8883:8883"
|
||||||
|
# - "8083:8083"
|
||||||
|
# - "5683:5683/udp"
|
||||||
|
# - "9999:9999"
|
||||||
|
# - "8084:8084"
|
||||||
|
networks:
|
||||||
|
- emqx_bridge
|
||||||
|
working_dir: /usr/local/etc/haproxy
|
||||||
|
command:
|
||||||
|
- bash
|
||||||
|
- -c
|
||||||
|
- |
|
||||||
|
cat /usr/local/etc/haproxy/certs/cert.pem /usr/local/etc/haproxy/certs/key.pem > /usr/local/etc/haproxy/certs/emqx.pem
|
||||||
|
haproxy -f /usr/local/etc/haproxy/haproxy.cfg
|
||||||
|
|
||||||
|
emqx1:
|
||||||
|
restart: always
|
||||||
|
container_name: node1.emqx.io
|
||||||
|
image: $TARGET:$EMQX_TAG
|
||||||
|
env_file:
|
||||||
|
- conf.cluster.env
|
||||||
|
volumes:
|
||||||
|
- etc:/opt/emqx/etc
|
||||||
|
environment:
|
||||||
|
- "EMQX_HOST=node1.emqx.io"
|
||||||
|
ports:
|
||||||
|
- "11881:18083"
|
||||||
|
# - "1883:1883"
|
||||||
|
command:
|
||||||
|
- /bin/sh
|
||||||
|
- -c
|
||||||
|
- |
|
||||||
|
sed -i "s 127.0.0.1 $$(ip route show |grep "link" |awk '{print $$1}') g" /opt/emqx/etc/acl.conf
|
||||||
|
sed -i '/emqx_telemetry/d' /opt/emqx/data/loaded_plugins
|
||||||
|
/opt/emqx/bin/emqx foreground
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 25s
|
||||||
|
retries: 5
|
||||||
|
networks:
|
||||||
|
emqx_bridge:
|
||||||
|
aliases:
|
||||||
|
- node1.emqx.io
|
||||||
|
|
||||||
|
emqx2:
|
||||||
|
restart: always
|
||||||
|
container_name: node2.emqx.io
|
||||||
|
image: $TARGET:$EMQX_TAG
|
||||||
|
env_file:
|
||||||
|
- conf.cluster.env
|
||||||
|
volumes:
|
||||||
|
- etc:/opt/emqx/etc
|
||||||
|
environment:
|
||||||
|
- "EMQX_HOST=node2.emqx.io"
|
||||||
|
ports:
|
||||||
|
- "11882:18083"
|
||||||
|
command:
|
||||||
|
- /bin/sh
|
||||||
|
- -c
|
||||||
|
- |
|
||||||
|
sed -i "s 127.0.0.1 $$(ip route show |grep "link" |awk '{print $$1}') g" /opt/emqx/etc/acl.conf
|
||||||
|
sed -i '/emqx_telemetry/d' /opt/emqx/data/loaded_plugins
|
||||||
|
/opt/emqx/bin/emqx foreground
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "/opt/emqx/bin/emqx", "ping"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 25s
|
||||||
|
retries: 5
|
||||||
|
networks:
|
||||||
|
emqx_bridge:
|
||||||
|
aliases:
|
||||||
|
- node2.emqx.io
|
||||||
|
volumes:
|
||||||
|
etc:
|
||||||
|
networks:
|
||||||
|
emqx_bridge:
|
||||||
|
driver: bridge
|
||||||
|
name: emqx_bridge
|
||||||
|
ipam:
|
||||||
|
driver: default
|
||||||
|
config:
|
||||||
|
- subnet: 172.100.239.0/24
|
||||||
|
gateway: 172.100.239.1
|
|
@ -27,6 +27,7 @@ services:
|
||||||
haproxy -f /usr/local/etc/haproxy/haproxy.cfg
|
haproxy -f /usr/local/etc/haproxy/haproxy.cfg
|
||||||
|
|
||||||
emqx1:
|
emqx1:
|
||||||
|
restart: always
|
||||||
container_name: node1.emqx.io
|
container_name: node1.emqx.io
|
||||||
image: $TARGET:$EMQX_TAG
|
image: $TARGET:$EMQX_TAG
|
||||||
env_file:
|
env_file:
|
||||||
|
@ -51,6 +52,7 @@ services:
|
||||||
- node1.emqx.io
|
- node1.emqx.io
|
||||||
|
|
||||||
emqx2:
|
emqx2:
|
||||||
|
restart: always
|
||||||
container_name: node2.emqx.io
|
container_name: node2.emqx.io
|
||||||
image: $TARGET:$EMQX_TAG
|
image: $TARGET:$EMQX_TAG
|
||||||
env_file:
|
env_file:
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
version: '3.9'
|
||||||
|
|
||||||
|
services:
|
||||||
|
web_server:
|
||||||
|
container_name: Tomcat
|
||||||
|
build:
|
||||||
|
context: ./http-service
|
||||||
|
image: web-server
|
||||||
|
networks:
|
||||||
|
- emqx_bridge
|
|
@ -0,0 +1,15 @@
|
||||||
|
FROM tomcat:10.0.5
|
||||||
|
|
||||||
|
RUN wget https://downloads.apache.org/maven/maven-3/3.6.3/binaries/apache-maven-3.6.3-bin.zip \
|
||||||
|
&& unzip apache-maven-3.6.3-bin.zip \
|
||||||
|
&& mv apache-maven-3.6.3 /opt/apache-maven-3.6.3/ \
|
||||||
|
&& ln -s /opt/apache-maven-3.6.3/ /opt/maven
|
||||||
|
ENV M2_HOME=/opt/maven
|
||||||
|
ENV M2=$M2_HOME/bin
|
||||||
|
ENV PATH=$M2:$PATH
|
||||||
|
COPY ./web-server /code
|
||||||
|
WORKDIR /code
|
||||||
|
RUN mvn package -Dmaven.skip.test=true
|
||||||
|
RUN mv ./target/emqx-web-0.0.1.war /usr/local/tomcat/webapps/emqx-web.war
|
||||||
|
EXPOSE 8080
|
||||||
|
CMD ["/usr/local/tomcat/bin/catalina.sh","run"]
|
|
@ -0,0 +1,65 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<groupId>emqx-web</groupId>
|
||||||
|
<artifactId>emqx-web</artifactId>
|
||||||
|
<version>0.0.1</version>
|
||||||
|
<packaging>war</packaging>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>mysql</groupId>
|
||||||
|
<artifactId>mysql-connector-java</artifactId>
|
||||||
|
<version>8.0.16</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-dbutils</groupId>
|
||||||
|
<artifactId>commons-dbutils</artifactId>
|
||||||
|
<version>1.7</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-logging</groupId>
|
||||||
|
<artifactId>commons-logging</artifactId>
|
||||||
|
<version>1.2</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-dbcp</groupId>
|
||||||
|
<artifactId>commons-dbcp</artifactId>
|
||||||
|
<version>1.4</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-pool</groupId>
|
||||||
|
<artifactId>commons-pool</artifactId>
|
||||||
|
<version>1.6</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>jakarta.servlet</groupId>
|
||||||
|
<artifactId>jakarta.servlet-api</artifactId>
|
||||||
|
<version>5.0.0</version>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<build>
|
||||||
|
<resources>
|
||||||
|
<resource>
|
||||||
|
<directory>src/main/reousrce</directory>
|
||||||
|
<excludes>
|
||||||
|
<exclude>**/*.java</exclude>
|
||||||
|
</excludes>
|
||||||
|
</resource>
|
||||||
|
</resources>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<version>3.8.1</version>
|
||||||
|
<configuration>
|
||||||
|
<source>1.8</source>
|
||||||
|
<target>1.8</target>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-war-plugin</artifactId>
|
||||||
|
<version>3.2.3</version>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,54 @@
|
||||||
|
package com.emqx.dao;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
import org.apache.commons.dbutils.QueryRunner;
|
||||||
|
import org.apache.commons.dbutils.handlers.ScalarHandler;
|
||||||
|
|
||||||
|
import com.emqx.util.EmqxDatabaseUtil;
|
||||||
|
|
||||||
|
public class AuthDAO {
|
||||||
|
|
||||||
|
public String getUserName(String userName) throws IOException, SQLException {
|
||||||
|
QueryRunner runner = new QueryRunner(EmqxDatabaseUtil.getDataSource());
|
||||||
|
String sql = "select password from http_user where username='"+userName+"'";
|
||||||
|
String password =runner.query(sql, new ScalarHandler<String>());
|
||||||
|
return password;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getClient(String clientid) throws IOException, SQLException {
|
||||||
|
QueryRunner runner = new QueryRunner(EmqxDatabaseUtil.getDataSource());
|
||||||
|
String sql = "select password from http_user where clientid='"+clientid+"'";
|
||||||
|
String password =runner.query(sql, new ScalarHandler<String>());
|
||||||
|
return password;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getUserAccess(String userName) throws IOException, SQLException {
|
||||||
|
QueryRunner runner = new QueryRunner(EmqxDatabaseUtil.getDataSource());
|
||||||
|
String sql = "select access from http_acl where username='"+userName+"'";
|
||||||
|
String access =runner.query(sql, new ScalarHandler<String>());
|
||||||
|
return access;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getUserTopic(String userName) throws IOException, SQLException {
|
||||||
|
QueryRunner runner = new QueryRunner(EmqxDatabaseUtil.getDataSource());
|
||||||
|
String sql = "select topic from http_acl where username='"+userName+"'";
|
||||||
|
String topic =runner.query(sql, new ScalarHandler<String>());
|
||||||
|
return topic;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getClientAccess(String clientid) throws IOException, SQLException {
|
||||||
|
QueryRunner runner = new QueryRunner(EmqxDatabaseUtil.getDataSource());
|
||||||
|
String sql = "select access from http_acl where clientid='"+clientid+"'";
|
||||||
|
String access =runner.query(sql, new ScalarHandler<String>());
|
||||||
|
return access;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getClientTopic(String clientid) throws IOException, SQLException {
|
||||||
|
QueryRunner runner = new QueryRunner(EmqxDatabaseUtil.getDataSource());
|
||||||
|
String sql = "select topic from http_acl where clientid='"+clientid+"'";
|
||||||
|
String topic =runner.query(sql, new ScalarHandler<String>());
|
||||||
|
return topic;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
package com.emqx.dao;
|
||||||
|
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.FileNotFoundException;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import org.apache.commons.dbcp.BasicDataSource;
|
||||||
|
import org.apache.commons.dbutils.QueryRunner;
|
||||||
|
import org.apache.commons.dbutils.handlers.ColumnListHandler;
|
||||||
|
import org.apache.commons.dbutils.handlers.ScalarHandler;
|
||||||
|
import org.apache.commons.dbutils.handlers.columns.StringColumnHandler;
|
||||||
|
|
||||||
|
|
||||||
|
public class DBUtilsTest {
|
||||||
|
|
||||||
|
public static void main(String args[]) throws FileNotFoundException, IOException, SQLException {
|
||||||
|
Properties property = new Properties();//流文件
|
||||||
|
|
||||||
|
property.load(DBUtilsTest.class.getClassLoader().getResourceAsStream("database.properties"));
|
||||||
|
|
||||||
|
BasicDataSource dataSource = new BasicDataSource();
|
||||||
|
dataSource.setDriverClassName(property.getProperty("jdbc.driver"));
|
||||||
|
dataSource.setUrl(property.getProperty("jdbc.url"));
|
||||||
|
dataSource.setUsername(property.getProperty("jdbc.username"));
|
||||||
|
dataSource.setPassword(property.getProperty("jdbc.password"));
|
||||||
|
|
||||||
|
// 初始化连接数 if(initialSize!=null)
|
||||||
|
//dataSource.setInitialSize(Integer.parseInt(initialSize));
|
||||||
|
|
||||||
|
// 最小空闲连接 if(minIdle!=null)
|
||||||
|
//dataSource.setMinIdle(Integer.parseInt(minIdle));
|
||||||
|
|
||||||
|
// 最大空闲连接 if(maxIdle!=null)
|
||||||
|
//dataSource.setMaxIdle(Integer.parseInt(maxIdle));
|
||||||
|
|
||||||
|
QueryRunner runner = new QueryRunner(dataSource);
|
||||||
|
String sql="select username from mqtt_user where id=1";
|
||||||
|
String result = runner.query(sql, new ScalarHandler<String>());
|
||||||
|
|
||||||
|
System.out.println(result);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,103 @@
|
||||||
|
package com.emqx.servlet;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
import com.emqx.dao.AuthDAO;
|
||||||
|
|
||||||
|
import jakarta.servlet.ServletException;
|
||||||
|
import jakarta.servlet.http.HttpServlet;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import jakarta.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
|
public class AclServlet extends HttpServlet {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
|
||||||
|
// TODO Auto-generated method stub
|
||||||
|
doPost(req, resp);
|
||||||
|
}
|
||||||
|
@Override
|
||||||
|
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
|
||||||
|
String clientid = req.getParameter("clientid");
|
||||||
|
String username = req.getParameter("username");
|
||||||
|
String access = req.getParameter("access");
|
||||||
|
String topic = req.getParameter("topic");
|
||||||
|
//String password = req.getParameter("password");
|
||||||
|
|
||||||
|
//step0: password is not null, or not pass.
|
||||||
|
|
||||||
|
AuthDAO dao = new AuthDAO();
|
||||||
|
try {
|
||||||
|
//step1: check username access&topic
|
||||||
|
if(username != null) {
|
||||||
|
String access_1 = dao.getUserAccess(username);
|
||||||
|
String topic_1 = dao.getUserTopic(username);
|
||||||
|
|
||||||
|
if(access.equals(access_1)) {
|
||||||
|
if(topic.equals(topic_1)) {
|
||||||
|
resp.setStatus(200);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if(clientid != null){
|
||||||
|
String access_2 = dao.getClientAccess(clientid);
|
||||||
|
String topic_2 = dao.getClientTopic(clientid);
|
||||||
|
if(access.equals(access_2)) {
|
||||||
|
if(topic.equals(topic_2)) {
|
||||||
|
resp.setStatus(200);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}else {//step2.1: username password is not match, then check clientid password
|
||||||
|
if(clientid != null){
|
||||||
|
String access_3 = dao.getClientAccess(clientid);
|
||||||
|
String topic_3 = dao.getClientTopic(clientid);
|
||||||
|
if(access.equals(access_3)) {
|
||||||
|
if(topic.equals(topic_3)) {
|
||||||
|
resp.setStatus(200);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}else {//step2.2: username is null, then check clientid password
|
||||||
|
if(clientid != null){
|
||||||
|
String access_4 = dao.getClientAccess(clientid);
|
||||||
|
String topic_4 = dao.getClientTopic(clientid);
|
||||||
|
if(access.equals(access_4)) {
|
||||||
|
if(topic.equals(topic_4)) {
|
||||||
|
resp.setStatus(200);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
// TODO Auto-generated catch block
|
||||||
|
e.printStackTrace();
|
||||||
|
} catch (SQLException e) {
|
||||||
|
// TODO Auto-generated catch block
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,72 @@
|
||||||
|
package com.emqx.servlet;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
import com.emqx.dao.AuthDAO;
|
||||||
|
|
||||||
|
import jakarta.servlet.ServletException;
|
||||||
|
import jakarta.servlet.http.HttpServlet;
|
||||||
|
import jakarta.servlet.http.HttpServletRequest;
|
||||||
|
import jakarta.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
|
public class AuthServlet extends HttpServlet {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
|
||||||
|
// TODO Auto-generated method stub
|
||||||
|
doPost(req, resp);
|
||||||
|
}
|
||||||
|
@Override
|
||||||
|
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
|
||||||
|
String clientid = req.getParameter("clientid");
|
||||||
|
String username =req.getParameter("username");
|
||||||
|
String password = req.getParameter("password");
|
||||||
|
|
||||||
|
//step0: password is not null, or not pass.
|
||||||
|
if(password == null) {
|
||||||
|
resp.setStatus(400);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
AuthDAO dao = new AuthDAO();
|
||||||
|
try {
|
||||||
|
//step1: check username password
|
||||||
|
if(username != null) {
|
||||||
|
String password_d = dao.getUserName(username);
|
||||||
|
|
||||||
|
if(password.equals(password_d)) {
|
||||||
|
resp.setStatus(200);
|
||||||
|
//200
|
||||||
|
}else {//step2.1: username password is not match, then check clientid password
|
||||||
|
if(clientid != null){
|
||||||
|
String password_c = dao.getClient(clientid);
|
||||||
|
if(password.equals(password_c)) {
|
||||||
|
resp.setStatus(200);
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}else {//step2.2: username is null, then check clientid password
|
||||||
|
if(clientid != null){
|
||||||
|
String password_c = dao.getClient(clientid);
|
||||||
|
if(password.equals(password_c)) {
|
||||||
|
resp.setStatus(200);
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
resp.setStatus(400);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
// TODO Auto-generated catch block
|
||||||
|
e.printStackTrace();
|
||||||
|
} catch (SQLException e) {
|
||||||
|
// TODO Auto-generated catch block
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,27 @@
|
||||||
|
package com.emqx.util;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
import org.apache.commons.dbcp.BasicDataSource;
|
||||||
|
|
||||||
|
import com.emqx.dao.DBUtilsTest;
|
||||||
|
|
||||||
|
public class EmqxDatabaseUtil {
|
||||||
|
|
||||||
|
public static DataSource getDataSource() throws IOException {
|
||||||
|
Properties property = new Properties();// 流文件
|
||||||
|
|
||||||
|
property.load(EmqxDatabaseUtil.class.getClassLoader().getResourceAsStream("database.properties"));
|
||||||
|
|
||||||
|
BasicDataSource dataSource = new BasicDataSource();
|
||||||
|
dataSource.setDriverClassName(property.getProperty("jdbc.driver"));
|
||||||
|
dataSource.setUrl(property.getProperty("jdbc.url"));
|
||||||
|
dataSource.setUsername(property.getProperty("jdbc.username"));
|
||||||
|
dataSource.setPassword(property.getProperty("jdbc.password"));
|
||||||
|
|
||||||
|
return dataSource;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
jdbc.driver= com.mysql.jdbc.Driver
|
||||||
|
jdbc.url= jdbc:mysql://mysql_server:3306/mqtt
|
||||||
|
jdbc.username= root
|
||||||
|
jdbc.password= public
|
|
@ -0,0 +1,3 @@
|
||||||
|
Manifest-Version: 1.0
|
||||||
|
Class-Path:
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<web-app xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns="http://JAVA.sun.com/xml/ns/javaee"
|
||||||
|
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
|
||||||
|
id="WebApp_ID" version="2.5">
|
||||||
|
<display-name>emqx-web</display-name>
|
||||||
|
<servlet>
|
||||||
|
<servlet-name>Auth</servlet-name>
|
||||||
|
<servlet-class>com.emqx.servlet.AuthServlet</servlet-class>
|
||||||
|
</servlet>
|
||||||
|
<servlet>
|
||||||
|
<servlet-name>Acl</servlet-name>
|
||||||
|
<servlet-class>com.emqx.servlet.AclServlet</servlet-class>
|
||||||
|
</servlet>
|
||||||
|
<servlet-mapping>
|
||||||
|
<servlet-name>Auth</servlet-name>
|
||||||
|
<url-pattern>/auth</url-pattern>
|
||||||
|
</servlet-mapping>
|
||||||
|
<servlet-mapping>
|
||||||
|
<servlet-name>Acl</servlet-name>
|
||||||
|
<url-pattern>/acl</url-pattern>
|
||||||
|
</servlet-mapping>
|
||||||
|
<welcome-file-list>
|
||||||
|
<welcome-file>index.html</welcome-file>
|
||||||
|
<welcome-file>index.htm</welcome-file>
|
||||||
|
<welcome-file>index.jsp</welcome-file>
|
||||||
|
<welcome-file>default.html</welcome-file>
|
||||||
|
<welcome-file>default.htm</welcome-file>
|
||||||
|
<welcome-file>default.jsp</welcome-file>
|
||||||
|
</welcome-file-list>
|
||||||
|
</web-app>
|
|
@ -0,0 +1,10 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>love</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
It's lucky, jiabanxiang.
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -1,7 +1,7 @@
|
||||||
{erl_opts, [debug_info]}.
|
{erl_opts, [debug_info]}.
|
||||||
{deps,
|
{deps,
|
||||||
[
|
[
|
||||||
{minirest, {git, "https://github.com/emqx/minirest.git", {tag, "0.3.5"}}}
|
{minirest, {git, "https://github.com/emqx/minirest.git", {tag, "0.3.6"}}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{shell, [
|
{shell, [
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
USAGE="$0 profile vsn old_vsn package_path"
|
||||||
|
EXAMPLE="$0 emqx 4.3.8-b3bb6075 v4.3.2 /home/alice/relup_dubug/downloaded_packages"
|
||||||
|
|
||||||
|
if [[ $# -ne 4 ]]; then
|
||||||
|
echo "$USAGE"
|
||||||
|
echo "$EXAMPLE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
PROFILE="$1"
|
||||||
|
VSN="$2"
|
||||||
|
OLD_VSN="$3"
|
||||||
|
PACKAGE_PATH="$4"
|
||||||
|
|
||||||
|
TEMPDIR=$(mktemp -d)
|
||||||
|
trap '{ rm -rf -- "$TEMPDIR"; }' EXIT
|
||||||
|
|
||||||
|
git clone --branch=master "https://github.com/terry-xiaoyu/one_more_emqx.git" "$TEMPDIR/one_more_emqx"
|
||||||
|
cp -r "$PACKAGE_PATH" "$TEMPDIR/packages"
|
||||||
|
cp relup.lux "$TEMPDIR/"
|
||||||
|
cp -r http_server "$TEMPDIR/http_server"
|
||||||
|
|
||||||
|
exec docker run \
|
||||||
|
-v "$TEMPDIR:/relup_test" \
|
||||||
|
-w "/relup_test" \
|
||||||
|
-e REBAR_COLOR=none \
|
||||||
|
-it emqx/relup-test-env:erl23.2.7.2-emqx-2-ubuntu20.04 \
|
||||||
|
lux \
|
||||||
|
--progress verbose \
|
||||||
|
--case_timeout infinity \
|
||||||
|
--var PROFILE="$PROFILE" \
|
||||||
|
--var PACKAGE_PATH="/relup_test/packages" \
|
||||||
|
--var ONE_MORE_EMQX_PATH="/relup_test/one_more_emqx" \
|
||||||
|
--var VSN="$VSN" \
|
||||||
|
--var OLD_VSN="$OLD_VSN" \
|
||||||
|
relup.lux
|
|
@ -1,15 +1,12 @@
|
||||||
[config var=PROFILE]
|
[config var=PROFILE]
|
||||||
[config var=PACKAGE_PATH]
|
[config var=PACKAGE_PATH]
|
||||||
[config var=BENCH_PATH]
|
|
||||||
[config var=ONE_MORE_EMQX_PATH]
|
[config var=ONE_MORE_EMQX_PATH]
|
||||||
[config var=VSN]
|
[config var=VSN]
|
||||||
[config var=OLD_VSNS]
|
[config var=OLD_VSN]
|
||||||
|
|
||||||
[config shell_cmd=/bin/bash]
|
[config shell_cmd=/bin/bash]
|
||||||
[config timeout=600000]
|
[config timeout=600000]
|
||||||
|
|
||||||
[loop old_vsn $OLD_VSNS]
|
|
||||||
|
|
||||||
[shell http_server]
|
[shell http_server]
|
||||||
!cd http_server
|
!cd http_server
|
||||||
!rebar3 shell
|
!rebar3 shell
|
||||||
|
@ -22,12 +19,11 @@
|
||||||
|
|
||||||
[shell emqx]
|
[shell emqx]
|
||||||
!cd $PACKAGE_PATH
|
!cd $PACKAGE_PATH
|
||||||
!unzip -q -o $PROFILE-ubuntu20.04-$(echo $old_vsn | sed -r 's/[v|e]//g')-amd64.zip
|
!unzip -q -o $PROFILE-ubuntu20.04-$(echo $OLD_VSN | sed -r 's/[v|e]//g')-amd64.zip
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
|
|
||||||
!cd emqx
|
!cd emqx
|
||||||
!sed -i 's|listener.wss.external[ \t]*=.*|listener.wss.external = 8085|g' etc/emqx.conf
|
!export EMQX_LOG__LEVEL=debug
|
||||||
!sed -i '/emqx_telemetry/d' data/loaded_plugins
|
|
||||||
|
|
||||||
!./bin/emqx start
|
!./bin/emqx start
|
||||||
?EMQ X .* is started successfully!
|
?EMQ X .* is started successfully!
|
||||||
|
@ -40,10 +36,10 @@
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
!cd emqx2
|
!cd emqx2
|
||||||
|
|
||||||
!sed -i '/emqx_telemetry/d' data/loaded_plugins
|
!export EMQX_LOG__LEVEL=debug
|
||||||
|
|
||||||
!./bin/emqx start
|
!./bin/emqx start
|
||||||
?EMQ X (.*) is started successfully!
|
?EMQ X .* is started successfully!
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
|
|
||||||
!./bin/emqx_ctl cluster join emqx@127.0.0.1
|
!./bin/emqx_ctl cluster join emqx@127.0.0.1
|
||||||
|
@ -63,6 +59,8 @@
|
||||||
!./bin/emqx_ctl rules create 'SELECT * FROM "t/#"' '[{"name":"data_to_webserver", "params": {"$$resource": "resource:691c29ba"}}]'
|
!./bin/emqx_ctl rules create 'SELECT * FROM "t/#"' '[{"name":"data_to_webserver", "params": {"$$resource": "resource:691c29ba"}}]'
|
||||||
?created
|
?created
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
|
!sleep 5
|
||||||
|
?SH-PROMPT
|
||||||
|
|
||||||
[shell emqx]
|
[shell emqx]
|
||||||
!./bin/emqx_ctl resources list
|
!./bin/emqx_ctl resources list
|
||||||
|
@ -71,11 +69,11 @@
|
||||||
!./bin/emqx_ctl rules list
|
!./bin/emqx_ctl rules list
|
||||||
?691c29ba
|
?691c29ba
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
|
!./bin/emqx_ctl broker metrics | grep "messages.publish"
|
||||||
|
???SH-PROMPT
|
||||||
|
|
||||||
[shell bench]
|
[shell bench]
|
||||||
!cd $BENCH_PATH
|
!emqtt_bench pub -c 10 -I 1000 -t t/%i -s 64 -L 300
|
||||||
|
|
||||||
!./emqtt_bench pub -c 10 -I 1000 -t t/%i -s 64 -L 300
|
|
||||||
???sent
|
???sent
|
||||||
|
|
||||||
[shell emqx]
|
[shell emqx]
|
||||||
|
@ -99,6 +97,10 @@
|
||||||
"""
|
"""
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
|
|
||||||
|
!./bin/emqx_ctl plugins list | grep --color=never emqx_management
|
||||||
|
?Plugin\(emqx_management.*active=true\)
|
||||||
|
?SH-PROMPT
|
||||||
|
|
||||||
[shell emqx2]
|
[shell emqx2]
|
||||||
!echo "" > log/emqx.log.1
|
!echo "" > log/emqx.log.1
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
|
@ -120,9 +122,29 @@
|
||||||
"""
|
"""
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
|
|
||||||
|
!./bin/emqx_ctl plugins list | grep --color=never emqx_management
|
||||||
|
?Plugin\(emqx_management.*active=true\)
|
||||||
|
?SH-PROMPT
|
||||||
|
|
||||||
[shell bench]
|
[shell bench]
|
||||||
???publish complete
|
???publish complete
|
||||||
??SH-PROMPT:
|
??SH-PROMPT:
|
||||||
|
!sleep 30
|
||||||
|
?SH-PROMPT
|
||||||
|
|
||||||
|
[shell emqx]
|
||||||
|
!./bin/emqx_ctl broker metrics | grep "messages.publish"
|
||||||
|
???SH-PROMPT
|
||||||
|
|
||||||
|
[shell bench]
|
||||||
|
!curl --user admin:public --silent --show-error http://localhost:8081/api/v4/rules | jq -M --raw-output ".data[0].metrics[] | select(.node==\"emqx@127.0.0.1\").matched"
|
||||||
|
?300
|
||||||
|
?SH-PROMPT
|
||||||
|
|
||||||
|
!curl --user admin:public --silent --show-error http://localhost:8081/api/v4/rules | jq -M --raw-output ".data[0].actions[0].metrics[] | select(.node==\"emqx@127.0.0.1\").success"
|
||||||
|
?300
|
||||||
|
?SH-PROMPT
|
||||||
|
|
||||||
!curl http://127.0.0.1:8080/counter
|
!curl http://127.0.0.1:8080/counter
|
||||||
???{"data":300,"code":0}
|
???{"data":300,"code":0}
|
||||||
?SH-PROMPT
|
?SH-PROMPT
|
||||||
|
@ -158,8 +180,6 @@
|
||||||
!halt(3).
|
!halt(3).
|
||||||
?SH-PROMPT:
|
?SH-PROMPT:
|
||||||
|
|
||||||
[endloop]
|
|
||||||
|
|
||||||
[cleanup]
|
[cleanup]
|
||||||
!echo ==$$?==
|
!echo ==$$?==
|
||||||
?==0==
|
?==0==
|
||||||
|
|
|
@ -83,6 +83,7 @@ jobs:
|
||||||
- name: build
|
- name: build
|
||||||
env:
|
env:
|
||||||
PYTHON: python
|
PYTHON: python
|
||||||
|
DIAGNOSTIC: 1
|
||||||
run: |
|
run: |
|
||||||
$env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH"
|
$env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH"
|
||||||
|
|
||||||
|
@ -168,9 +169,11 @@ jobs:
|
||||||
- name: build
|
- name: build
|
||||||
run: |
|
run: |
|
||||||
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
|
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
|
||||||
make -C source ensure-rebar3
|
cd source
|
||||||
sudo cp source/rebar3 /usr/local/bin/rebar3
|
make ensure-rebar3
|
||||||
make -C source ${{ matrix.profile }}-zip
|
sudo cp rebar3 /usr/local/bin/rebar3
|
||||||
|
rm -rf _build/${{ matrix.profile }}/lib
|
||||||
|
make ${{ matrix.profile }}-zip
|
||||||
- name: test
|
- name: test
|
||||||
run: |
|
run: |
|
||||||
cd source
|
cd source
|
||||||
|
@ -465,7 +468,7 @@ jobs:
|
||||||
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
|
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
|
||||||
-H "Accept: application/vnd.github.v3+json" \
|
-H "Accept: application/vnd.github.v3+json" \
|
||||||
-X POST \
|
-X POST \
|
||||||
-d "{\"ref\":\"v1.0.1\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \
|
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \
|
||||||
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
|
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
|
||||||
- name: update repo.emqx.io
|
- name: update repo.emqx.io
|
||||||
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
|
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
|
||||||
|
@ -474,7 +477,7 @@ jobs:
|
||||||
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
|
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
|
||||||
-H "Accept: application/vnd.github.v3+json" \
|
-H "Accept: application/vnd.github.v3+json" \
|
||||||
-X POST \
|
-X POST \
|
||||||
-d "{\"ref\":\"v1.0.1\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \
|
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \
|
||||||
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
|
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
|
||||||
- name: update homebrew packages
|
- name: update homebrew packages
|
||||||
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
|
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
|
||||||
|
@ -484,7 +487,7 @@ jobs:
|
||||||
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
|
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
|
||||||
-H "Accept: application/vnd.github.v3+json" \
|
-H "Accept: application/vnd.github.v3+json" \
|
||||||
-X POST \
|
-X POST \
|
||||||
-d "{\"ref\":\"v1.0.1\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \
|
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \
|
||||||
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches"
|
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches"
|
||||||
fi
|
fi
|
||||||
- uses: geekyeggo/delete-artifact@v1
|
- uses: geekyeggo/delete-artifact@v1
|
||||||
|
|
|
@ -38,6 +38,11 @@ jobs:
|
||||||
run: make ${EMQX_NAME}-zip
|
run: make ${EMQX_NAME}-zip
|
||||||
- name: build deb/rpm packages
|
- name: build deb/rpm packages
|
||||||
run: make ${EMQX_NAME}-pkg
|
run: make ${EMQX_NAME}-pkg
|
||||||
|
- uses: actions/upload-artifact@v1
|
||||||
|
if: failure()
|
||||||
|
with:
|
||||||
|
name: rebar3.crashdump
|
||||||
|
path: ./rebar3.crashdump
|
||||||
- name: pakcages test
|
- name: pakcages test
|
||||||
run: |
|
run: |
|
||||||
export CODE_PATH=$GITHUB_WORKSPACE
|
export CODE_PATH=$GITHUB_WORKSPACE
|
||||||
|
@ -94,6 +99,11 @@ jobs:
|
||||||
make ensure-rebar3
|
make ensure-rebar3
|
||||||
sudo cp rebar3 /usr/local/bin/rebar3
|
sudo cp rebar3 /usr/local/bin/rebar3
|
||||||
make ${EMQX_NAME}-zip
|
make ${EMQX_NAME}-zip
|
||||||
|
- uses: actions/upload-artifact@v1
|
||||||
|
if: failure()
|
||||||
|
with:
|
||||||
|
name: rebar3.crashdump
|
||||||
|
path: ./rebar3.crashdump
|
||||||
- name: test
|
- name: test
|
||||||
run: |
|
run: |
|
||||||
pkg_name=$(basename _packages/${EMQX_NAME}/emqx-*.zip)
|
pkg_name=$(basename _packages/${EMQX_NAME}/emqx-*.zip)
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
name: ACL fix & migration integration tests
|
||||||
|
|
||||||
|
on: workflow_dispatch
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
container: emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04
|
||||||
|
strategy:
|
||||||
|
fail-fast: true
|
||||||
|
env:
|
||||||
|
BASE_VERSION: "4.3.0"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
path: emqx
|
||||||
|
- name: Prepare scripts
|
||||||
|
run: |
|
||||||
|
cp ./emqx/.ci/acl_migration_test/*.sh ./
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
./suite.sh emqx "$BASE_VERSION"
|
|
@ -0,0 +1,437 @@
|
||||||
|
name: Integration Test Suites
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "v4.*"
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- "main-v4.*"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
imgname: ${{ steps.build_docker.outputs.imgname}}
|
||||||
|
version: ${{ steps.build_docker.outputs.version}}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: build docker
|
||||||
|
id: build_docker
|
||||||
|
run: |
|
||||||
|
if [ -f EMQX_ENTERPRISE ]; then
|
||||||
|
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
||||||
|
git config --global credential.helper store
|
||||||
|
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
|
||||||
|
make deps-emqx-ee
|
||||||
|
fi
|
||||||
|
make docker
|
||||||
|
echo "::set-output name=version::$(./pkg-vsn.sh)"
|
||||||
|
if [ -f EMQX_ENTERPRISE ]; then
|
||||||
|
echo "::set-output name=imgname::emqx-ee"
|
||||||
|
else
|
||||||
|
echo "::set-output name=imgname::emqx"
|
||||||
|
fi
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: emqx-docker-image-zip
|
||||||
|
path: _packages/${{ steps.build_docker.outputs.imgname }}/${{ steps.build_docker.outputs.imgname }}-docker-${{ steps.build_docker.outputs.version }}.zip
|
||||||
|
|
||||||
|
webhook:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
webhook_type:
|
||||||
|
- webhook_data_bridge
|
||||||
|
|
||||||
|
needs: build
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: emqx-docker-image-zip
|
||||||
|
path: /tmp
|
||||||
|
- name: load docker image
|
||||||
|
env:
|
||||||
|
imgname: ${{ needs.build.outputs.imgname}}
|
||||||
|
version: ${{ needs.build.outputs.version }}
|
||||||
|
run: |
|
||||||
|
unzip -q /tmp/${imgname}-docker-${version}.zip -d /tmp
|
||||||
|
docker load < /tmp/${imgname}-docker-${version}
|
||||||
|
- name: docker compose up
|
||||||
|
timeout-minutes: 5
|
||||||
|
env:
|
||||||
|
TARGET: emqx/${{ needs.build.outputs.imgname }}
|
||||||
|
EMQX_TAG: ${{ needs.build.outputs.version }}
|
||||||
|
run: |
|
||||||
|
docker-compose \
|
||||||
|
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||||
|
up -d --build
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: emqx/emqx-svt-web-server
|
||||||
|
ref: web-server-1.0
|
||||||
|
path: emqx-svt-web-server
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
- name: run webserver in docker
|
||||||
|
run: |
|
||||||
|
cd ./emqx-svt-web-server/svtserver
|
||||||
|
mvn clean package
|
||||||
|
cd target
|
||||||
|
docker run --name webserver --network emqx_bridge -d -v $(pwd)/svtserver-0.0.1.jar:/webserver/svtserver-0.0.1.jar --workdir /webserver openjdk:8-jdk bash \
|
||||||
|
-c "java -jar svtserver-0.0.1.jar"
|
||||||
|
- name: wait docker compose up
|
||||||
|
timeout-minutes: 5
|
||||||
|
run: |
|
||||||
|
while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do
|
||||||
|
echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx";
|
||||||
|
sleep 5;
|
||||||
|
done
|
||||||
|
docker ps -a
|
||||||
|
echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV
|
||||||
|
echo WEB_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' webserver) >> $GITHUB_ENV
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: emqx/emqx-fvt
|
||||||
|
ref: integration_test_suites
|
||||||
|
path: scripts
|
||||||
|
- uses: actions/setup-java@v1
|
||||||
|
with:
|
||||||
|
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||||
|
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||||
|
architecture: x64 # (x64 or x86) - defaults to x64
|
||||||
|
- name: install jmeter
|
||||||
|
timeout-minutes: 10
|
||||||
|
env:
|
||||||
|
JMETER_VERSION: 5.3
|
||||||
|
run: |
|
||||||
|
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz https://downloads.apache.org/jmeter/binaries/apache-jmeter-$JMETER_VERSION.tgz
|
||||||
|
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||||
|
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||||
|
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||||
|
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-2.0.2-jar-with-dependencies.jar
|
||||||
|
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||||
|
- name: run jmeter
|
||||||
|
run: |
|
||||||
|
/opt/jmeter/bin/jmeter.sh \
|
||||||
|
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||||
|
-t scripts/.ci/automate-test-suite/${{ matrix.webhook_type }}.jmx \
|
||||||
|
-Demqx_ip=$HAPROXY_IP \
|
||||||
|
-Dweb_ip=$WEB_IP \
|
||||||
|
-l jmeter_logs/webhook_${{ matrix.webhook_type }}.jtl \
|
||||||
|
-j jmeter_logs/logs/webhook_${{ matrix.webhook_type }}.log
|
||||||
|
- name: check logs
|
||||||
|
run: |
|
||||||
|
if cat jmeter_logs/webhook_${{ matrix.webhook_type }}.jtl | grep -e '<failure>true</failure>' > /dev/null 2>&1; then
|
||||||
|
echo "check logs filed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- uses: actions/upload-artifact@v1
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: jmeter_logs
|
||||||
|
path: ./jmeter_logs
|
||||||
|
|
||||||
|
mysql:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
mysql_tag:
|
||||||
|
- 5.7
|
||||||
|
- 8
|
||||||
|
mysql_type:
|
||||||
|
- mysql_auth_acl
|
||||||
|
|
||||||
|
needs: build
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: emqx-docker-image-zip
|
||||||
|
path: /tmp
|
||||||
|
- name: load docker image
|
||||||
|
env:
|
||||||
|
imgname: ${{ needs.build.outputs.imgname }}
|
||||||
|
version: ${{ needs.build.outputs.version }}
|
||||||
|
run: |
|
||||||
|
unzip -q /tmp/${imgname}-docker-${version}.zip -d /tmp
|
||||||
|
docker load < /tmp/${imgname}-docker-${version}
|
||||||
|
- name: docker compose up
|
||||||
|
timeout-minutes: 5
|
||||||
|
env:
|
||||||
|
TARGET: emqx/${{ needs.build.outputs.imgname }}
|
||||||
|
EMQX_TAG: ${{ needs.build.outputs.version }}
|
||||||
|
MYSQL_TAG: ${{ matrix.mysql_tag }}
|
||||||
|
run: |
|
||||||
|
docker-compose \
|
||||||
|
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||||
|
-f .ci/docker-compose-file/docker-compose-mysql-tls.yaml \
|
||||||
|
up -d --build
|
||||||
|
- name: wait docker compose up
|
||||||
|
timeout-minutes: 5
|
||||||
|
run: |
|
||||||
|
while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do
|
||||||
|
echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx";
|
||||||
|
sleep 5;
|
||||||
|
done
|
||||||
|
while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \
|
||||||
|
!= $(docker ps -a --filter name=client | wc -l) ]; do
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
docker ps -a
|
||||||
|
echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV
|
||||||
|
echo MYSQL_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' mysql) >> $GITHUB_ENV
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: emqx/emqx-fvt
|
||||||
|
ref: integration_test_suites
|
||||||
|
path: scripts
|
||||||
|
- uses: actions/setup-java@v1
|
||||||
|
with:
|
||||||
|
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||||
|
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||||
|
architecture: x64 # (x64 or x86) - defaults to x64
|
||||||
|
- name: install jmeter
|
||||||
|
timeout-minutes: 10
|
||||||
|
env:
|
||||||
|
JMETER_VERSION: 5.3
|
||||||
|
run: |
|
||||||
|
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz https://downloads.apache.org/jmeter/binaries/apache-jmeter-$JMETER_VERSION.tgz
|
||||||
|
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||||
|
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||||
|
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||||
|
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-2.0.2-jar-with-dependencies.jar
|
||||||
|
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||||
|
- name: install jmeter plugin
|
||||||
|
run: |
|
||||||
|
wget --no-verbose -O "/opt/jmeter/lib/mysql-connector-java-8.0.16.jar" https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar
|
||||||
|
- name: run jmeter
|
||||||
|
run: |
|
||||||
|
/opt/jmeter/bin/jmeter.sh \
|
||||||
|
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||||
|
-t scripts/.ci/automate-test-suite/${{ matrix.mysql_type }}.jmx \
|
||||||
|
-Droute="apps/emqx_auth_mysql/test/emqx_auth_mysql_SUITE_data" \
|
||||||
|
-Dmysql_ip=$MYSQL_IP \
|
||||||
|
-Demqx_ip=$HAPROXY_IP \
|
||||||
|
-Ddbname="mqtt" \
|
||||||
|
-Dmysql_user="ssluser" \
|
||||||
|
-Ddb_user="root" \
|
||||||
|
-Dmysql_pwd="public" \
|
||||||
|
-Dconfig_path="/tmp/etc" \
|
||||||
|
-Ddocker_path=".ci/docker-compose-file" \
|
||||||
|
-l jmeter_logs/${{ matrix.mysql_type }}_${{ matrix.mysql_tag }}.jtl \
|
||||||
|
-j jmeter_logs/logs/${{ matrix.mysql_type }}_${{ matrix.mysql_tag }}.log
|
||||||
|
- name: check logs
|
||||||
|
run: |
|
||||||
|
if cat jmeter_logs/${{ matrix.mysql_type }}_${{ matrix.mysql_tag }}.jtl | grep -e '<failure>true</failure>' > /dev/null 2>&1; then
|
||||||
|
echo "check logs filed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- uses: actions/upload-artifact@v1
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: jmeter_logs
|
||||||
|
path: ./jmeter_logs
|
||||||
|
|
||||||
|
|
||||||
|
postgresql:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
pgsql_type:
|
||||||
|
- pgsql_auth_acl
|
||||||
|
pgsql_tag:
|
||||||
|
- 9
|
||||||
|
- 10
|
||||||
|
- 11
|
||||||
|
- 12
|
||||||
|
- 13
|
||||||
|
|
||||||
|
needs: build
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: emqx-docker-image-zip
|
||||||
|
path: /tmp
|
||||||
|
- name: load docker image
|
||||||
|
env:
|
||||||
|
imgname: ${{ needs.build.outputs.imgname }}
|
||||||
|
version: ${{ needs.build.outputs.version }}
|
||||||
|
run: |
|
||||||
|
unzip -q /tmp/${imgname}-docker-${version}.zip -d /tmp
|
||||||
|
docker load < /tmp/${imgname}-docker-${version}
|
||||||
|
- name: docker compose up
|
||||||
|
timeout-minutes: 5
|
||||||
|
env:
|
||||||
|
TARGET: emqx/${{ needs.build.outputs.imgname }}
|
||||||
|
EMQX_TAG: ${{ needs.build.outputs.version }}
|
||||||
|
PGSQL_TAG: ${{ matrix.pgsql_tag }}
|
||||||
|
run: |
|
||||||
|
docker-compose \
|
||||||
|
-f .ci/docker-compose-file/docker-compose-emqx-broker-cluster.yaml \
|
||||||
|
-f .ci/docker-compose-file/docker-compose-pgsql-tls.yaml \
|
||||||
|
up -d --build
|
||||||
|
- name: wait docker compose up
|
||||||
|
timeout-minutes: 5
|
||||||
|
run: |
|
||||||
|
while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do
|
||||||
|
echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx";
|
||||||
|
sleep 5;
|
||||||
|
done
|
||||||
|
docker ps -a
|
||||||
|
echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV
|
||||||
|
echo PGSQL_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' pgsql) >> $GITHUB_ENV
|
||||||
|
echo CONFIG_PATH=$(docker inspect -f '{{ range .Mounts }}{{ if eq .Name "docker-compose-file_etc" }}{{ .Source }}{{ end }}{{ end }}' node1.emqx.io) >> $GITHUB_ENV
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: emqx/emqx-fvt
|
||||||
|
ref: integration_test_suites
|
||||||
|
path: scripts
|
||||||
|
- uses: actions/setup-java@v1
|
||||||
|
with:
|
||||||
|
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||||
|
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||||
|
architecture: x64 # (x64 or x86) - defaults to x64
|
||||||
|
- name: install jmeter
|
||||||
|
timeout-minutes: 10
|
||||||
|
env:
|
||||||
|
JMETER_VERSION: 5.3
|
||||||
|
run: |
|
||||||
|
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz https://downloads.apache.org/jmeter/binaries/apache-jmeter-$JMETER_VERSION.tgz
|
||||||
|
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||||
|
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||||
|
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||||
|
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-2.0.2-jar-with-dependencies.jar
|
||||||
|
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||||
|
- name: install jmeter plugin
|
||||||
|
run: |
|
||||||
|
wget --no-verbose -O "/opt/jmeter/lib/postgresql-42.2.18.jar" https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.18/postgresql-42.2.18.jar
|
||||||
|
- name: run jmeter
|
||||||
|
run: |
|
||||||
|
sudo /opt/jmeter/bin/jmeter.sh \
|
||||||
|
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||||
|
-t scripts/.ci/automate-test-suite/${{ matrix.pgsql_type }}.jmx \
|
||||||
|
-Droute="apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data" \
|
||||||
|
-Dca_name="ca.pem" \
|
||||||
|
-Dkey_name="client-key.pem" \
|
||||||
|
-Dcert_name="client-cert.pem" \
|
||||||
|
-Ddb_ip=$PGSQL_IP \
|
||||||
|
-Dpgsql_ip=$PGSQL_IP \
|
||||||
|
-Demqx_ip=$HAPROXY_IP \
|
||||||
|
-Dpgsql_user="root" \
|
||||||
|
-Dpgsql_pwd="public" \
|
||||||
|
-Ddbname="mqtt" \
|
||||||
|
-Dpgsql_db="mqtt" \
|
||||||
|
-Dport="5432" \
|
||||||
|
-Dconfig_path=$CONFIG_PATH \
|
||||||
|
-Ddocker_path=".ci/docker-compose-file" \
|
||||||
|
-l jmeter_logs/${{ matrix.pgsql_type }}_${{ matrix.pgsql_tag }}.jtl \
|
||||||
|
-j jmeter_logs/logs/${{ matrix.pgsql_type }}_${{ matrix.pgsql_tag }}.log
|
||||||
|
- name: check logs
|
||||||
|
run: |
|
||||||
|
if cat jmeter_logs/${{ matrix.pgsql_type }}_${{ matrix.pgsql_tag }}.jtl | grep -e '<failure>true</failure>' > /dev/null 2>&1; then
|
||||||
|
echo "check logs filed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- uses: actions/upload-artifact@v1
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: jmeter_logs
|
||||||
|
path: ./jmeter_logs
|
||||||
|
|
||||||
|
http:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
needs: build
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
name: emqx-docker-image-zip
|
||||||
|
path: /tmp
|
||||||
|
- name: load docker image
|
||||||
|
env:
|
||||||
|
imgname: ${{ needs.build.outputs.imgname }}
|
||||||
|
version: ${{ needs.build.outputs.version }}
|
||||||
|
run: |
|
||||||
|
unzip -q /tmp/${imgname}-docker-${version}.zip -d /tmp
|
||||||
|
docker load < /tmp/${imgname}-docker-${version}
|
||||||
|
- name: docker compose up
|
||||||
|
timeout-minutes: 5
|
||||||
|
env:
|
||||||
|
TARGET: emqx/${{ needs.build.outputs.imgname }}
|
||||||
|
EMQX_TAG: ${{ needs.build.outputs.version }}
|
||||||
|
MYSQL_TAG: 8
|
||||||
|
run: |
|
||||||
|
docker-compose \
|
||||||
|
-f .ci/docker-compose-file/docker-compose-emqx-broker-cluster.yaml \
|
||||||
|
-f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \
|
||||||
|
-f .ci/docker-compose-file/docker-compose-enterprise-tomcat-tcp.yaml \
|
||||||
|
up -d --build
|
||||||
|
- name: wait docker compose up
|
||||||
|
timeout-minutes: 5
|
||||||
|
run: |
|
||||||
|
while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do
|
||||||
|
echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx";
|
||||||
|
sleep 5;
|
||||||
|
done
|
||||||
|
docker ps -a
|
||||||
|
echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV
|
||||||
|
echo HTTP_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' Tomcat) >> $GITHUB_ENV
|
||||||
|
echo MYSQL_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' mysql) >> $GITHUB_ENV
|
||||||
|
echo CONFIG_PATH=$(docker inspect -f '{{ range .Mounts }}{{ if eq .Name "docker-compose-file_etc" }}{{ .Source }}{{ end }}{{ end }}' node1.emqx.io) >> $GITHUB_ENV
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: emqx/emqx-fvt
|
||||||
|
ref: integration_test_suites
|
||||||
|
path: scripts
|
||||||
|
- uses: actions/setup-java@v1
|
||||||
|
with:
|
||||||
|
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||||
|
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||||
|
architecture: x64 # (x64 or x86) - defaults to x64
|
||||||
|
- name: install jmeter
|
||||||
|
timeout-minutes: 10
|
||||||
|
env:
|
||||||
|
JMETER_VERSION: 5.3
|
||||||
|
run: |
|
||||||
|
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz https://downloads.apache.org/jmeter/binaries/apache-jmeter-$JMETER_VERSION.tgz
|
||||||
|
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||||
|
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||||
|
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||||
|
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-2.0.2-jar-with-dependencies.jar
|
||||||
|
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||||
|
- name: install jmeter plugin
|
||||||
|
run: |
|
||||||
|
wget --no-verbose -O "/opt/jmeter/lib/mysql-connector-java-8.0.16.jar" https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar
|
||||||
|
- name: run jmeter
|
||||||
|
run: |
|
||||||
|
sudo /opt/jmeter/bin/jmeter.sh \
|
||||||
|
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||||
|
-t scripts/.ci/automate-test-suite/http_auth_acl.jmx \
|
||||||
|
-Dmysql_ip=$MYSQL_IP \
|
||||||
|
-Demqx_ip=$HAPROXY_IP \
|
||||||
|
-Dweb_server_ip=$HTTP_IP \
|
||||||
|
-Dconfig_path=$CONFIG_PATH \
|
||||||
|
-Ddocker_path=".ci/docker-compose-file" \
|
||||||
|
-l jmeter_logs/http_auth_acl.jtl \
|
||||||
|
-j jmeter_logs/logs/http_auth_acl.log
|
||||||
|
- name: check logs
|
||||||
|
run: |
|
||||||
|
if cat jmeter_logs/http_auth_acl.jtl | grep -e '<failure>true</failure>' > /dev/null 2>&1; then
|
||||||
|
echo "check logs filed"
|
||||||
|
sudo cat /var/lib/docker/volumes/docker-compose-file_etc/_data/emqx.conf
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- uses: actions/upload-artifact@v1
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: jmeter_logs
|
||||||
|
path: ./jmeter_logs
|
|
@ -1,11 +1,12 @@
|
||||||
name: Compatibility Test Suite
|
name: Compatibility Test Suite
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 */6 * * *'
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- v*
|
- v*
|
||||||
- e*
|
- e*
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
ldap:
|
ldap:
|
||||||
|
|
|
@ -130,11 +130,27 @@ jobs:
|
||||||
echo "waiting emqx started";
|
echo "waiting emqx started";
|
||||||
sleep 10;
|
sleep 10;
|
||||||
done
|
done
|
||||||
- name: get pods log
|
- name: get emqx-0 pods log
|
||||||
if: failure()
|
if: failure()
|
||||||
env:
|
env:
|
||||||
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||||
run: kubectl describe pods emqx-0
|
run: |
|
||||||
|
kubectl describe pods emqx-0
|
||||||
|
kubectl logs emqx-0
|
||||||
|
- name: get emqx-1 pods log
|
||||||
|
if: failure()
|
||||||
|
env:
|
||||||
|
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||||
|
run: |
|
||||||
|
kubectl describe pods emqx-1
|
||||||
|
kubectl logs emqx-1
|
||||||
|
- name: get emqx-2 pods log
|
||||||
|
if: failure()
|
||||||
|
env:
|
||||||
|
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||||
|
run: |
|
||||||
|
kubectl describe pods emqx-2
|
||||||
|
kubectl logs emqx-2
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
repository: emqx/paho.mqtt.testing
|
repository: emqx/paho.mqtt.testing
|
||||||
|
@ -162,76 +178,78 @@ jobs:
|
||||||
fi
|
fi
|
||||||
exit $RESULT
|
exit $RESULT
|
||||||
|
|
||||||
relup_test:
|
relup_test_plan:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
container: emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04
|
||||||
|
outputs:
|
||||||
|
profile: ${{ steps.profile-and-versions.outputs.profile }}
|
||||||
|
vsn: ${{ steps.profile-and-versions.outputs.vsn }}
|
||||||
|
old_vsns: ${{ steps.profile-and-versions.outputs.old_vsns }}
|
||||||
|
broker: ${{ steps.profile-and-versions.outputs.broker }}
|
||||||
|
matrix: ${{ steps.generate-matrix.outputs.matrix }}
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
name: Checkout
|
||||||
|
with:
|
||||||
|
path: emqx
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Get profile and version list
|
||||||
|
id: profile-and-versions
|
||||||
|
run: |
|
||||||
|
cd emqx
|
||||||
|
vsn="$(./pkg-vsn.sh)"
|
||||||
|
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
|
||||||
|
|
||||||
|
if make emqx-ee --dry-run > /dev/null 2>&1; then
|
||||||
|
profile="emqx-ee"
|
||||||
|
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")"
|
||||||
|
broker="emqx-ee"
|
||||||
|
|
||||||
|
else
|
||||||
|
profile="emqx"
|
||||||
|
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
|
||||||
|
broker="emqx-ce"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
echo "::set-output name=vsn::$vsn"
|
||||||
|
echo "::set-output name=profile::$profile"
|
||||||
|
echo "::set-output name=broker::$broker"
|
||||||
|
echo "::set-output name=old_vsns::$old_vsns"
|
||||||
|
- name: Generate matrix
|
||||||
|
id: generate-matrix
|
||||||
|
run: |
|
||||||
|
matrix=$(echo -n "$OLD_VSNS" | jq -R -s -c 'split(" ")')
|
||||||
|
echo "::set-output name=matrix::$matrix"
|
||||||
|
|
||||||
|
relup_test_build:
|
||||||
|
needs: relup_test_plan
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
container: emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04
|
container: emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
env:
|
||||||
|
OLD_VSNS: "${{ needs.relup_test_plan.outputs.old_vsns }}"
|
||||||
|
PROFILE: "${{ needs.relup_test_plan.outputs.profile }}"
|
||||||
|
BROKER: "${{ needs.relup_test_plan.outputs.broker }}"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.8'
|
|
||||||
architecture: 'x64'
|
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
name: Checkout
|
||||||
with:
|
with:
|
||||||
repository: emqx/paho.mqtt.testing
|
|
||||||
ref: develop-4.0
|
|
||||||
path: paho.mqtt.testing
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: terry-xiaoyu/one_more_emqx
|
|
||||||
ref: master
|
|
||||||
path: one_more_emqx
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: emqx/emqtt-bench
|
|
||||||
ref: master
|
|
||||||
path: emqtt-bench
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: hawk/lux
|
|
||||||
ref: lux-2.6
|
|
||||||
path: lux
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: ${{ github.repository }}
|
|
||||||
path: emqx
|
path: emqx
|
||||||
fetch-depth: 0
|
- name: Prepare credentials
|
||||||
- name: prepare
|
|
||||||
run: |
|
run: |
|
||||||
if make -C emqx emqx-ee --dry-run > /dev/null 2>&1; then
|
if [ "$PROFILE" = "emqx-ee" ]; then
|
||||||
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
||||||
git config --global credential.helper store
|
git config --global credential.helper store
|
||||||
echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token
|
echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token
|
||||||
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "PROFILE=emqx" >> $GITHUB_ENV
|
|
||||||
fi
|
fi
|
||||||
- name: get version
|
- name: Download bases
|
||||||
run: |
|
|
||||||
set -e -x -u
|
|
||||||
cd emqx
|
|
||||||
if [ $PROFILE = "emqx" ];then
|
|
||||||
broker="emqx-ce"
|
|
||||||
edition='opensource'
|
|
||||||
else
|
|
||||||
broker="emqx-ee"
|
|
||||||
edition='enterprise'
|
|
||||||
fi
|
|
||||||
echo "BROKER=$broker" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
vsn="$(./pkg-vsn.sh)"
|
|
||||||
echo "VSN=$vsn" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
|
|
||||||
if [ $PROFILE = "emqx" ]; then
|
|
||||||
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
|
|
||||||
else
|
|
||||||
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")"
|
|
||||||
fi
|
|
||||||
echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV
|
|
||||||
- name: download emqx
|
|
||||||
run: |
|
run: |
|
||||||
set -e -x -u
|
set -e -x -u
|
||||||
mkdir -p emqx/_upgrade_base
|
mkdir -p emqx/_upgrade_base
|
||||||
|
@ -240,39 +258,71 @@ jobs:
|
||||||
for old_vsn in ${old_vsns[@]}; do
|
for old_vsn in ${old_vsns[@]}; do
|
||||||
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip
|
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip
|
||||||
done
|
done
|
||||||
- name: build emqx
|
- name: Build emqx
|
||||||
run: make -C emqx ${PROFILE}-zip
|
run: make -C emqx ${PROFILE}-zip
|
||||||
- name: build emqtt-bench
|
- uses: actions/upload-artifact@v2
|
||||||
run: make -C emqtt-bench
|
name: Upload built emqx and test scenario
|
||||||
- name: build lux
|
with:
|
||||||
run: |
|
name: emqx_built
|
||||||
set -e -u -x
|
path: |
|
||||||
cd lux
|
emqx/_packages/*/*.zip
|
||||||
autoconf
|
emqx/.ci/fvt_tests
|
||||||
./configure
|
|
||||||
make
|
relup_test_run:
|
||||||
make install
|
needs:
|
||||||
- name: run relup test
|
- relup_test_plan
|
||||||
timeout-minutes: 20
|
- relup_test_build
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
container: emqx/relup-test-env:erl23.2.7.2-emqx-2-ubuntu20.04
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
old_vsn: ${{ fromJson(needs.relup_test_plan.outputs.matrix) }}
|
||||||
|
env:
|
||||||
|
OLD_VSN: "${{ matrix.old_vsn }}"
|
||||||
|
PROFILE: "${{ needs.relup_test_plan.outputs.profile }}"
|
||||||
|
VSN: "${{ needs.relup_test_plan.outputs.vsn }}"
|
||||||
|
BROKER: "${{ needs.relup_test_plan.outputs.broker }}"
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
name: Dowload built emqx and test scenario
|
||||||
|
with:
|
||||||
|
name: emqx_built
|
||||||
|
path: emqx_built
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
name: Checkout one_more_emqx
|
||||||
|
with:
|
||||||
|
repository: terry-xiaoyu/one_more_emqx
|
||||||
|
ref: master
|
||||||
|
path: one_more_emqx
|
||||||
|
- name: Prepare packages
|
||||||
run: |
|
run: |
|
||||||
set -e -x -u
|
set -e -x -u
|
||||||
if [ -n "$OLD_VSNS" ]; then
|
mkdir -p packages
|
||||||
mkdir -p packages
|
cp emqx_built/_packages/*/*.zip packages
|
||||||
cp emqx/_packages/${PROFILE}/*.zip packages
|
cd packages
|
||||||
cp emqx/_upgrade_base/*.zip packages
|
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$OLD_VSN/$PROFILE-ubuntu20.04-${OLD_VSN#[e|v]}-amd64.zip
|
||||||
lux \
|
- name: Run relup test scenario
|
||||||
--case_timeout infinity \
|
timeout-minutes: 5
|
||||||
--var PROFILE=$PROFILE \
|
run: |
|
||||||
--var PACKAGE_PATH=$(pwd)/packages \
|
lux \
|
||||||
--var BENCH_PATH=$(pwd)/emqtt-bench \
|
--progress verbose \
|
||||||
--var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \
|
--case_timeout infinity \
|
||||||
--var VSN="$VSN" \
|
--var PROFILE=$PROFILE \
|
||||||
--var OLD_VSNS="$OLD_VSNS" \
|
--var PACKAGE_PATH=$(pwd)/packages \
|
||||||
emqx/.ci/fvt_tests/relup.lux
|
--var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \
|
||||||
fi
|
--var VSN="$VSN" \
|
||||||
- uses: actions/upload-artifact@v1
|
--var OLD_VSN="$OLD_VSN" \
|
||||||
|
emqx_built/.ci/fvt_tests/relup.lux
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
name: Save debug data
|
||||||
if: failure()
|
if: failure()
|
||||||
with:
|
with:
|
||||||
name: lux_logs
|
name: debug_data
|
||||||
path: lux_logs
|
path: |
|
||||||
|
packages/emqx/log/emqx.log.1
|
||||||
|
packages/emqx2/log/emqx.log.1
|
||||||
|
packages/*.zip
|
||||||
|
lux_logs
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
erlang 24.0.1-emqx-1
|
erlang 23.2.7.2-emqx-2
|
||||||
|
|
2
Makefile
2
Makefile
|
@ -5,7 +5,7 @@ BUILD = $(CURDIR)/build
|
||||||
SCRIPTS = $(CURDIR)/scripts
|
SCRIPTS = $(CURDIR)/scripts
|
||||||
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
|
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
|
||||||
export EMQX_DESC ?= EMQ X
|
export EMQX_DESC ?= EMQ X
|
||||||
export EMQX_CE_DASHBOARD_VERSION ?= v4.3.1
|
export EMQX_CE_DASHBOARD_VERSION ?= v4.3.3
|
||||||
ifeq ($(OS),Windows_NT)
|
ifeq ($(OS),Windows_NT)
|
||||||
export REBAR_COLOR=none
|
export REBAR_COLOR=none
|
||||||
endif
|
endif
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
EMQ X, a highly scalable, highly available distributed MQTT messaging broker for IoT.
|
||||||
|
Copyright (c) 2017-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
|
||||||
|
This product contains code developed at EMQ Technologies Co., Ltd.
|
||||||
|
Visit https://www.emqx.come to learn more.
|
|
@ -42,18 +42,18 @@ auth.http.auth_req.params = clientid=%c,username=%u,password=%P
|
||||||
## Value: URL
|
## Value: URL
|
||||||
##
|
##
|
||||||
## Examples: http://127.0.0.1:80/mqtt/superuser, https://[::1]:80/mqtt/superuser
|
## Examples: http://127.0.0.1:80/mqtt/superuser, https://[::1]:80/mqtt/superuser
|
||||||
auth.http.super_req.url = http://127.0.0.1:80/mqtt/superuser
|
# auth.http.super_req.url = http://127.0.0.1:80/mqtt/superuser
|
||||||
|
|
||||||
## HTTP Request Method for SuperUser Request
|
## HTTP Request Method for SuperUser Request
|
||||||
##
|
##
|
||||||
## Value: post | get
|
## Value: post | get
|
||||||
auth.http.super_req.method = post
|
# auth.http.super_req.method = post
|
||||||
|
|
||||||
## HTTP Request Headers for SuperUser Request, Content-Type header is configured by default.
|
## HTTP Request Headers for SuperUser Request, Content-Type header is configured by default.
|
||||||
## The possible values of the Content-Type header: application/x-www-form-urlencoded, application/json
|
## The possible values of the Content-Type header: application/x-www-form-urlencoded, application/json
|
||||||
##
|
##
|
||||||
## Examples: auth.http.super_req.headers.accept = */*
|
## Examples: auth.http.super_req.headers.accept = */*
|
||||||
auth.http.super_req.headers.content-type = application/x-www-form-urlencoded
|
# auth.http.super_req.headers.content-type = application/x-www-form-urlencoded
|
||||||
|
|
||||||
## Parameters used to construct the request body or query string parameters
|
## Parameters used to construct the request body or query string parameters
|
||||||
## When the request method is GET, these parameters will be converted into query string parameters
|
## When the request method is GET, these parameters will be converted into query string parameters
|
||||||
|
@ -70,7 +70,7 @@ auth.http.super_req.headers.content-type = application/x-www-form-urlencoded
|
||||||
## - %d: subject of client TLS cert
|
## - %d: subject of client TLS cert
|
||||||
##
|
##
|
||||||
## Value: <K1>=<V1>,<K2>=<V2>,...
|
## Value: <K1>=<V1>,<K2>=<V2>,...
|
||||||
auth.http.super_req.params = clientid=%c,username=%u
|
# auth.http.super_req.params = clientid=%c,username=%u
|
||||||
|
|
||||||
## HTTP URL API path for ACL Request
|
## HTTP URL API path for ACL Request
|
||||||
## Comment out this config to disable ACL checks
|
## Comment out this config to disable ACL checks
|
||||||
|
@ -136,6 +136,11 @@ auth.http.connect_timeout = 5s
|
||||||
## Value: Number
|
## Value: Number
|
||||||
auth.http.pool_size = 32
|
auth.http.pool_size = 32
|
||||||
|
|
||||||
|
## Whether to enable HTTP Pipelining
|
||||||
|
##
|
||||||
|
## See: https://en.wikipedia.org/wiki/HTTP_pipelining
|
||||||
|
auth.http.enable_pipelining = true
|
||||||
|
|
||||||
##------------------------------------------------------------------------------
|
##------------------------------------------------------------------------------
|
||||||
## SSL options
|
## SSL options
|
||||||
|
|
||||||
|
|
|
@ -109,6 +109,11 @@ end}.
|
||||||
{datatype, integer}
|
{datatype, integer}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
|
{mapping, "auth.http.enable_pipelining", "emqx_auth_http.enable_pipelining", [
|
||||||
|
{default, true},
|
||||||
|
{datatype, {enum, [true, false]}}
|
||||||
|
]}.
|
||||||
|
|
||||||
{mapping, "auth.http.ssl.cacertfile", "emqx_auth_http.cacertfile", [
|
{mapping, "auth.http.ssl.cacertfile", "emqx_auth_http.cacertfile", [
|
||||||
{datatype, string}
|
{datatype, string}
|
||||||
]}.
|
]}.
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_auth_http,
|
{application, emqx_auth_http,
|
||||||
[{description, "EMQ X Authentication/ACL with HTTP API"},
|
[{description, "EMQ X Authentication/ACL with HTTP API"},
|
||||||
{vsn, "4.3.0"}, % strict semver, bump manually!
|
{vsn, "4.3.2"}, % strict semver, bump manually!
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_auth_http_sup]},
|
{registered, [emqx_auth_http_sup]},
|
||||||
{applications, [kernel,stdlib,ehttpc]},
|
{applications, [kernel,stdlib,ehttpc]},
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
%% -*-: erlang -*-
|
||||||
|
|
||||||
|
{VSN,
|
||||||
|
[
|
||||||
|
{<<"4.3.[0-1]">>, [
|
||||||
|
{restart_application, emqx_auth_http}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{<<"4.3.[0-1]">>, [
|
||||||
|
{restart_application, emqx_auth_http}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
]
|
||||||
|
}.
|
|
@ -50,14 +50,14 @@ translate_env(EnvName) ->
|
||||||
case application:get_env(?APP, EnvName) of
|
case application:get_env(?APP, EnvName) of
|
||||||
undefined -> ok;
|
undefined -> ok;
|
||||||
{ok, Req} ->
|
{ok, Req} ->
|
||||||
|
{ok, EnablePipelining} = application:get_env(?APP, enable_pipelining),
|
||||||
{ok, PoolSize} = application:get_env(?APP, pool_size),
|
{ok, PoolSize} = application:get_env(?APP, pool_size),
|
||||||
{ok, ConnectTimeout} = application:get_env(?APP, connect_timeout),
|
{ok, ConnectTimeout} = application:get_env(?APP, connect_timeout),
|
||||||
URL = proplists:get_value(url, Req),
|
URL = proplists:get_value(url, Req),
|
||||||
{ok, #{host := Host,
|
{ok, #{host := Host,
|
||||||
path := Path0,
|
|
||||||
port := Port,
|
port := Port,
|
||||||
scheme := Scheme}} = emqx_http_lib:uri_parse(URL),
|
scheme := Scheme} = URIMap} = emqx_http_lib:uri_parse(URL),
|
||||||
Path = path(Path0),
|
Path = path(URIMap),
|
||||||
MoreOpts = case Scheme of
|
MoreOpts = case Scheme of
|
||||||
http ->
|
http ->
|
||||||
[{transport_opts, emqx_misc:ipv6_probe([])}];
|
[{transport_opts, emqx_misc:ipv6_probe([])}];
|
||||||
|
@ -89,6 +89,7 @@ translate_env(EnvName) ->
|
||||||
end,
|
end,
|
||||||
PoolOpts = [{host, Host},
|
PoolOpts = [{host, Host},
|
||||||
{port, Port},
|
{port, Port},
|
||||||
|
{enable_pipelining, EnablePipelining},
|
||||||
{pool_size, PoolSize},
|
{pool_size, PoolSize},
|
||||||
{pool_type, random},
|
{pool_type, random},
|
||||||
{connect_timeout, ConnectTimeout},
|
{connect_timeout, ConnectTimeout},
|
||||||
|
@ -151,8 +152,12 @@ ensure_content_type_header(Method, Headers)
|
||||||
ensure_content_type_header(_Method, Headers) ->
|
ensure_content_type_header(_Method, Headers) ->
|
||||||
lists:keydelete("content-type", 1, Headers).
|
lists:keydelete("content-type", 1, Headers).
|
||||||
|
|
||||||
path("") ->
|
path(#{path := "", 'query' := Query}) ->
|
||||||
|
"?" ++ Query;
|
||||||
|
path(#{path := Path, 'query' := Query}) ->
|
||||||
|
Path ++ "?" ++ Query;
|
||||||
|
path(#{path := ""}) ->
|
||||||
"/";
|
"/";
|
||||||
path(Path) ->
|
path(#{path := Path}) ->
|
||||||
Path.
|
Path.
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
|
|
||||||
request(PoolName, get, Path, Headers, Params, Timeout) ->
|
request(PoolName, get, Path, Headers, Params, Timeout) ->
|
||||||
NewPath = Path ++ "?" ++ binary_to_list(cow_qs:qs(bin_kw(Params))),
|
NewPath = Path ++ "?" ++ binary_to_list(cow_qs:qs(bin_kw(Params))),
|
||||||
reply(ehttpc:request(ehttpc_pool:pick_worker(PoolName), get, {NewPath, Headers}, Timeout));
|
reply(ehttpc:request(PoolName, get, {NewPath, Headers}, Timeout));
|
||||||
|
|
||||||
request(PoolName, post, Path, Headers, Params, Timeout) ->
|
request(PoolName, post, Path, Headers, Params, Timeout) ->
|
||||||
Body = case proplists:get_value("content-type", Headers) of
|
Body = case proplists:get_value("content-type", Headers) of
|
||||||
|
@ -38,7 +38,7 @@ request(PoolName, post, Path, Headers, Params, Timeout) ->
|
||||||
"application/json" ->
|
"application/json" ->
|
||||||
emqx_json:encode(bin_kw(Params))
|
emqx_json:encode(bin_kw(Params))
|
||||||
end,
|
end,
|
||||||
reply(ehttpc:request(ehttpc_pool:pick_worker(PoolName), post, {Path, Headers, Body}, Timeout)).
|
reply(ehttpc:request(PoolName, post, {Path, Headers, Body}, Timeout)).
|
||||||
|
|
||||||
reply({ok, StatusCode, _Headers}) ->
|
reply({ok, StatusCode, _Headers}) ->
|
||||||
{ok, StatusCode, <<>>};
|
{ok, StatusCode, <<>>};
|
||||||
|
|
|
@ -27,10 +27,6 @@
|
||||||
, description/0
|
, description/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-import(proplists, [get_value/2]).
|
|
||||||
|
|
||||||
-import(emqx_auth_ldap_cli, [search/4]).
|
|
||||||
|
|
||||||
-spec(register_metrics() -> ok).
|
-spec(register_metrics() -> ok).
|
||||||
register_metrics() ->
|
register_metrics() ->
|
||||||
lists:foreach(fun emqx_metrics:ensure/1, ?ACL_METRICS).
|
lists:foreach(fun emqx_metrics:ensure/1, ?ACL_METRICS).
|
||||||
|
@ -70,14 +66,14 @@ do_check_acl(#{username := Username}, PubSub, Topic, _NoMatchAction,
|
||||||
|
|
||||||
BaseDN = emqx_auth_ldap:replace_vars(CustomBaseDN, ReplaceRules),
|
BaseDN = emqx_auth_ldap:replace_vars(CustomBaseDN, ReplaceRules),
|
||||||
|
|
||||||
case search(Pool, BaseDN, Filter, [Attribute, Attribute1]) of
|
case emqx_auth_ldap_cli:search(Pool, BaseDN, Filter, [Attribute, Attribute1]) of
|
||||||
{error, noSuchObject} ->
|
{error, noSuchObject} ->
|
||||||
ok;
|
ok;
|
||||||
{ok, #eldap_search_result{entries = []}} ->
|
{ok, #eldap_search_result{entries = []}} ->
|
||||||
ok;
|
ok;
|
||||||
{ok, #eldap_search_result{entries = [Entry]}} ->
|
{ok, #eldap_search_result{entries = [Entry]}} ->
|
||||||
Topics = get_value(Attribute, Entry#eldap_entry.attributes)
|
Topics = proplists:get_value(Attribute, Entry#eldap_entry.attributes, [])
|
||||||
++ get_value(Attribute1, Entry#eldap_entry.attributes),
|
++ proplists:get_value(Attribute1, Entry#eldap_entry.attributes, []),
|
||||||
match(Topic, Topics);
|
match(Topic, Topics);
|
||||||
Error ->
|
Error ->
|
||||||
?LOG(error, "[LDAP] search error:~p", [Error]),
|
?LOG(error, "[LDAP] search error:~p", [Error]),
|
||||||
|
@ -95,4 +91,3 @@ match(Topic, [Filter | Topics]) ->
|
||||||
|
|
||||||
description() ->
|
description() ->
|
||||||
"ACL with LDAP".
|
"ACL with LDAP".
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_auth_ldap,
|
{application, emqx_auth_ldap,
|
||||||
[{description, "EMQ X Authentication/ACL with LDAP"},
|
[{description, "EMQ X Authentication/ACL with LDAP"},
|
||||||
{vsn, "4.3.0"}, % strict semver, bump manually!
|
{vsn, "4.3.2"}, % strict semver, bump manually!
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_auth_ldap_sup]},
|
{registered, [emqx_auth_ldap_sup]},
|
||||||
{applications, [kernel,stdlib,eldap2,ecpool]},
|
{applications, [kernel,stdlib,eldap2,ecpool]},
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
%% -*-: erlang -*-
|
||||||
|
{VSN,
|
||||||
|
[ {"4.3.0",
|
||||||
|
[ {load_module, emqx_acl_ldap, brutal_purge, soft_purge, []}
|
||||||
|
, {load_module, emqx_auth_ldap_cli, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{"4.3.1",
|
||||||
|
[ {load_module, emqx_auth_ldap_cli, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{"4.3.0",
|
||||||
|
[ {load_module, emqx_acl_ldap, brutal_purge, soft_purge, []}
|
||||||
|
, {load_module, emqx_auth_ldap_cli, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{"4.3.1",
|
||||||
|
[ {load_module, emqx_auth_ldap_cli, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
]
|
||||||
|
}.
|
|
@ -76,8 +76,8 @@ connect(Opts) ->
|
||||||
search(Pool, Base, Filter) ->
|
search(Pool, Base, Filter) ->
|
||||||
ecpool:with_client(Pool,
|
ecpool:with_client(Pool,
|
||||||
fun(C) ->
|
fun(C) ->
|
||||||
case application:get_env(?APP, bind_as_user) of
|
case application:get_env(?APP, bind_as_user, false) of
|
||||||
{ok, true} ->
|
true ->
|
||||||
{ok, Opts} = application:get_env(?APP, ldap),
|
{ok, Opts} = application:get_env(?APP, ldap),
|
||||||
BindDn = get_value(bind_dn, Opts),
|
BindDn = get_value(bind_dn, Opts),
|
||||||
BindPassword = get_value(bind_password, Opts),
|
BindPassword = get_value(bind_password, Opts),
|
||||||
|
@ -91,7 +91,7 @@ search(Pool, Base, Filter) ->
|
||||||
catch
|
catch
|
||||||
error:Reason -> {error, Reason}
|
error:Reason -> {error, Reason}
|
||||||
end;
|
end;
|
||||||
{ok, false} ->
|
false ->
|
||||||
eldap2:search(C, [{base, Base},
|
eldap2:search(C, [{base, Base},
|
||||||
{filter, Filter},
|
{filter, Filter},
|
||||||
{deref, eldap2:derefFindingBaseObj()}])
|
{deref, eldap2:derefFindingBaseObj()}])
|
||||||
|
@ -101,8 +101,8 @@ search(Pool, Base, Filter) ->
|
||||||
search(Pool, Base, Filter, Attributes) ->
|
search(Pool, Base, Filter, Attributes) ->
|
||||||
ecpool:with_client(Pool,
|
ecpool:with_client(Pool,
|
||||||
fun(C) ->
|
fun(C) ->
|
||||||
case application:get_env(?APP, bind_as_user) of
|
case application:get_env(?APP, bind_as_user, false) of
|
||||||
{ok, true} ->
|
true ->
|
||||||
{ok, Opts} = application:get_env(?APP, ldap),
|
{ok, Opts} = application:get_env(?APP, ldap),
|
||||||
BindDn = get_value(bind_dn, Opts),
|
BindDn = get_value(bind_dn, Opts),
|
||||||
BindPassword = get_value(bind_password, Opts),
|
BindPassword = get_value(bind_password, Opts),
|
||||||
|
@ -117,7 +117,7 @@ search(Pool, Base, Filter, Attributes) ->
|
||||||
catch
|
catch
|
||||||
error:Reason -> {error, Reason}
|
error:Reason -> {error, Reason}
|
||||||
end;
|
end;
|
||||||
{ok, false} ->
|
false ->
|
||||||
eldap2:search(C, [{base, Base},
|
eldap2:search(C, [{base, Base},
|
||||||
{filter, Filter},
|
{filter, Filter},
|
||||||
{attributes, Attributes},
|
{attributes, Attributes},
|
||||||
|
|
|
@ -1,21 +1,47 @@
|
||||||
-define(APP, emqx_auth_mnesia).
|
-define(APP, emqx_auth_mnesia).
|
||||||
|
|
||||||
-type(login():: {clientid, binary()}
|
-type(login() :: {clientid, binary()}
|
||||||
| {username, binary()}).
|
| {username, binary()}).
|
||||||
|
|
||||||
|
-type(acl_target() :: login() | all).
|
||||||
|
|
||||||
|
-type(acl_target_type() :: clientid | username | all).
|
||||||
|
|
||||||
|
-type(access():: allow | deny).
|
||||||
|
-type(action():: pub | sub).
|
||||||
|
-type(legacy_action():: action() | pubsub).
|
||||||
|
-type(created_at():: integer()).
|
||||||
|
|
||||||
-record(emqx_user, {
|
-record(emqx_user, {
|
||||||
login :: login(),
|
login :: login(),
|
||||||
password :: binary(),
|
password :: binary(),
|
||||||
created_at :: integer()
|
created_at :: created_at()
|
||||||
}).
|
}).
|
||||||
|
|
||||||
-record(emqx_acl, {
|
-define(ACL_TABLE, emqx_acl).
|
||||||
filter:: {login() | all, emqx_topic:topic()},
|
|
||||||
action :: pub | sub | pubsub,
|
-define(MIGRATION_MARK_KEY, emqx_acl2_migration_started).
|
||||||
access :: allow | deny,
|
|
||||||
created_at :: integer()
|
-record(?ACL_TABLE, {
|
||||||
|
filter :: {acl_target(), emqx_topic:topic()} | ?MIGRATION_MARK_KEY,
|
||||||
|
action :: legacy_action(),
|
||||||
|
access :: access(),
|
||||||
|
created_at :: created_at()
|
||||||
}).
|
}).
|
||||||
|
|
||||||
|
-define(MIGRATION_MARK_RECORD, #?ACL_TABLE{filter = ?MIGRATION_MARK_KEY, action = pub, access = deny, created_at = 0}).
|
||||||
|
|
||||||
|
-type(rule() :: {access(), action(), emqx_topic:topic(), created_at()}).
|
||||||
|
|
||||||
|
-define(ACL_TABLE2, emqx_acl2).
|
||||||
|
|
||||||
|
-record(?ACL_TABLE2, {
|
||||||
|
who :: acl_target(),
|
||||||
|
rules :: [ rule() ]
|
||||||
|
}).
|
||||||
|
|
||||||
|
-type(acl_record() :: {acl_target(), emqx_topic:topic(), action(), access(), created_at()}).
|
||||||
|
|
||||||
-record(auth_metrics, {
|
-record(auth_metrics, {
|
||||||
success = 'client.auth.success',
|
success = 'client.auth.success',
|
||||||
failure = 'client.auth.failure',
|
failure = 'client.auth.failure',
|
||||||
|
|
|
@ -18,24 +18,16 @@
|
||||||
|
|
||||||
-include("emqx_auth_mnesia.hrl").
|
-include("emqx_auth_mnesia.hrl").
|
||||||
|
|
||||||
-include_lib("stdlib/include/ms_transform.hrl").
|
|
||||||
|
|
||||||
-define(TABLE, emqx_acl).
|
|
||||||
|
|
||||||
%% ACL Callbacks
|
%% ACL Callbacks
|
||||||
-export([ init/0
|
-export([ init/0
|
||||||
, register_metrics/0
|
, register_metrics/0
|
||||||
, check_acl/5
|
, check_acl/5
|
||||||
, description/0
|
, description/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
init() ->
|
init() ->
|
||||||
ok = ekka_mnesia:create_table(emqx_acl, [
|
ok = emqx_acl_mnesia_db:create_table(),
|
||||||
{type, bag},
|
ok = emqx_acl_mnesia_db:create_table2().
|
||||||
{disc_copies, [node()]},
|
|
||||||
{attributes, record_info(fields, emqx_acl)},
|
|
||||||
{storage_properties, [{ets, [{read_concurrency, true}]}]}]),
|
|
||||||
ok = ekka_mnesia:copy_table(emqx_acl, disc_copies).
|
|
||||||
|
|
||||||
-spec(register_metrics() -> ok).
|
-spec(register_metrics() -> ok).
|
||||||
register_metrics() ->
|
register_metrics() ->
|
||||||
|
@ -46,12 +38,12 @@ check_acl(ClientInfo = #{ clientid := Clientid }, PubSub, Topic, _NoMatchAction,
|
||||||
|
|
||||||
Acls = case Username of
|
Acls = case Username of
|
||||||
undefined ->
|
undefined ->
|
||||||
emqx_acl_mnesia_cli:lookup_acl({clientid, Clientid}) ++
|
emqx_acl_mnesia_db:lookup_acl({clientid, Clientid}) ++
|
||||||
emqx_acl_mnesia_cli:lookup_acl(all);
|
emqx_acl_mnesia_db:lookup_acl(all);
|
||||||
_ ->
|
_ ->
|
||||||
emqx_acl_mnesia_cli:lookup_acl({clientid, Clientid}) ++
|
emqx_acl_mnesia_db:lookup_acl({clientid, Clientid}) ++
|
||||||
emqx_acl_mnesia_cli:lookup_acl({username, Username}) ++
|
emqx_acl_mnesia_db:lookup_acl({username, Username}) ++
|
||||||
emqx_acl_mnesia_cli:lookup_acl(all)
|
emqx_acl_mnesia_db:lookup_acl(all)
|
||||||
end,
|
end,
|
||||||
|
|
||||||
case match(ClientInfo, PubSub, Topic, Acls) of
|
case match(ClientInfo, PubSub, Topic, Acls) of
|
||||||
|
@ -83,7 +75,6 @@ match(ClientInfo, PubSub, Topic, [ {_, ACLTopic, Action, Access, _} | Acls]) ->
|
||||||
match_topic(ClientInfo, Topic, ACLTopic) when is_binary(Topic) ->
|
match_topic(ClientInfo, Topic, ACLTopic) when is_binary(Topic) ->
|
||||||
emqx_topic:match(Topic, feed_var(ClientInfo, ACLTopic)).
|
emqx_topic:match(Topic, feed_var(ClientInfo, ACLTopic)).
|
||||||
|
|
||||||
match_actions(_, pubsub) -> true;
|
|
||||||
match_actions(subscribe, sub) -> true;
|
match_actions(subscribe, sub) -> true;
|
||||||
match_actions(publish, pub) -> true;
|
match_actions(publish, pub) -> true;
|
||||||
match_actions(_, _) -> false.
|
match_actions(_, _) -> false.
|
||||||
|
|
|
@ -16,8 +16,6 @@
|
||||||
|
|
||||||
-module(emqx_acl_mnesia_api).
|
-module(emqx_acl_mnesia_api).
|
||||||
|
|
||||||
-include("emqx_auth_mnesia.hrl").
|
|
||||||
|
|
||||||
-include_lib("stdlib/include/ms_transform.hrl").
|
-include_lib("stdlib/include/ms_transform.hrl").
|
||||||
|
|
||||||
-import(proplists, [ get_value/2
|
-import(proplists, [ get_value/2
|
||||||
|
@ -99,26 +97,22 @@
|
||||||
]).
|
]).
|
||||||
|
|
||||||
list_clientid(_Bindings, Params) ->
|
list_clientid(_Bindings, Params) ->
|
||||||
MatchSpec = ets:fun2ms(
|
Table = emqx_acl_mnesia_db:login_acl_table(clientid),
|
||||||
fun({emqx_acl, {{clientid, Clientid}, Topic}, Action, Access, CreatedAt}) -> {{clientid,Clientid}, Topic, Action,Access, CreatedAt} end),
|
return({ok, emqx_auth_mnesia_api:paginate_qh(Table, count(Table), Params, fun emqx_acl_mnesia_db:comparing/2, fun format/1)}).
|
||||||
return({ok, emqx_auth_mnesia_api:paginate(emqx_acl, MatchSpec, Params, fun emqx_acl_mnesia_cli:comparing/2, fun format/1)}).
|
|
||||||
|
|
||||||
list_username(_Bindings, Params) ->
|
list_username(_Bindings, Params) ->
|
||||||
MatchSpec = ets:fun2ms(
|
Table = emqx_acl_mnesia_db:login_acl_table(username),
|
||||||
fun({emqx_acl, {{username, Username}, Topic}, Action, Access, CreatedAt}) -> {{username, Username}, Topic, Action,Access, CreatedAt} end),
|
return({ok, emqx_auth_mnesia_api:paginate_qh(Table, count(Table), Params, fun emqx_acl_mnesia_db:comparing/2, fun format/1)}).
|
||||||
return({ok, emqx_auth_mnesia_api:paginate(emqx_acl, MatchSpec, Params, fun emqx_acl_mnesia_cli:comparing/2, fun format/1)}).
|
|
||||||
|
|
||||||
list_all(_Bindings, Params) ->
|
list_all(_Bindings, Params) ->
|
||||||
MatchSpec = ets:fun2ms(
|
Table = emqx_acl_mnesia_db:login_acl_table(all),
|
||||||
fun({emqx_acl, {all, Topic}, Action, Access, CreatedAt}) -> {all, Topic, Action,Access, CreatedAt}end
|
return({ok, emqx_auth_mnesia_api:paginate_qh(Table, count(Table), Params, fun emqx_acl_mnesia_db:comparing/2, fun format/1)}).
|
||||||
),
|
|
||||||
return({ok, emqx_auth_mnesia_api:paginate(emqx_acl, MatchSpec, Params, fun emqx_acl_mnesia_cli:comparing/2, fun format/1)}).
|
|
||||||
|
|
||||||
|
|
||||||
lookup(#{clientid := Clientid}, _Params) ->
|
lookup(#{clientid := Clientid}, _Params) ->
|
||||||
return({ok, format(emqx_acl_mnesia_cli:lookup_acl({clientid, urldecode(Clientid)}))});
|
return({ok, format(emqx_acl_mnesia_db:lookup_acl({clientid, urldecode(Clientid)}))});
|
||||||
lookup(#{username := Username}, _Params) ->
|
lookup(#{username := Username}, _Params) ->
|
||||||
return({ok, format(emqx_acl_mnesia_cli:lookup_acl({username, urldecode(Username)}))}).
|
return({ok, format(emqx_acl_mnesia_db:lookup_acl({username, urldecode(Username)}))}).
|
||||||
|
|
||||||
add(_Bindings, Params) ->
|
add(_Bindings, Params) ->
|
||||||
[ P | _] = Params,
|
[ P | _] = Params,
|
||||||
|
@ -144,15 +138,15 @@ do_add(Params) ->
|
||||||
Username = get_value(<<"username">>, Params, undefined),
|
Username = get_value(<<"username">>, Params, undefined),
|
||||||
Login = case {Clientid, Username} of
|
Login = case {Clientid, Username} of
|
||||||
{undefined, undefined} -> all;
|
{undefined, undefined} -> all;
|
||||||
{_, undefined} -> {clientid, urldecode(Clientid)};
|
{_, undefined} -> {clientid, Clientid};
|
||||||
{undefined, _} -> {username, urldecode(Username)}
|
{undefined, _} -> {username, Username}
|
||||||
end,
|
end,
|
||||||
Topic = urldecode(get_value(<<"topic">>, Params)),
|
Topic = get_value(<<"topic">>, Params),
|
||||||
Action = urldecode(get_value(<<"action">>, Params)),
|
Action = get_value(<<"action">>, Params),
|
||||||
Access = urldecode(get_value(<<"access">>, Params)),
|
Access = get_value(<<"access">>, Params),
|
||||||
Re = case validate([login, topic, action, access], [Login, Topic, Action, Access]) of
|
Re = case validate([login, topic, action, access], [Login, Topic, Action, Access]) of
|
||||||
ok ->
|
ok ->
|
||||||
emqx_acl_mnesia_cli:add_acl(Login, Topic, erlang:binary_to_atom(Action, utf8), erlang:binary_to_atom(Access, utf8));
|
emqx_acl_mnesia_db:add_acl(Login, Topic, erlang:binary_to_atom(Action, utf8), erlang:binary_to_atom(Access, utf8));
|
||||||
Err -> Err
|
Err -> Err
|
||||||
end,
|
end,
|
||||||
maps:merge(#{topic => Topic,
|
maps:merge(#{topic => Topic,
|
||||||
|
@ -165,15 +159,19 @@ do_add(Params) ->
|
||||||
end).
|
end).
|
||||||
|
|
||||||
delete(#{clientid := Clientid, topic := Topic}, _) ->
|
delete(#{clientid := Clientid, topic := Topic}, _) ->
|
||||||
return(emqx_acl_mnesia_cli:remove_acl({clientid, urldecode(Clientid)}, urldecode(Topic)));
|
return(emqx_acl_mnesia_db:remove_acl({clientid, urldecode(Clientid)}, urldecode(Topic)));
|
||||||
delete(#{username := Username, topic := Topic}, _) ->
|
delete(#{username := Username, topic := Topic}, _) ->
|
||||||
return(emqx_acl_mnesia_cli:remove_acl({username, urldecode(Username)}, urldecode(Topic)));
|
return(emqx_acl_mnesia_db:remove_acl({username, urldecode(Username)}, urldecode(Topic)));
|
||||||
delete(#{topic := Topic}, _) ->
|
delete(#{topic := Topic}, _) ->
|
||||||
return(emqx_acl_mnesia_cli:remove_acl(all, urldecode(Topic))).
|
return(emqx_acl_mnesia_db:remove_acl(all, urldecode(Topic))).
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Interval Funcs
|
%% Interval Funcs
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
count(QH) ->
|
||||||
|
qlc:fold(fun(_, Count) -> Count + 1 end, 0, QH).
|
||||||
|
|
||||||
format({{clientid, Clientid}, Topic, Action, Access, _CreatedAt}) ->
|
format({{clientid, Clientid}, Topic, Action, Access, _CreatedAt}) ->
|
||||||
#{clientid => Clientid, topic => Topic, action => Action, access => Access};
|
#{clientid => Clientid, topic => Topic, action => Action, access => Access};
|
||||||
format({{username, Username}, Topic, Action, Access, _CreatedAt}) ->
|
format({{username, Username}, Topic, Action, Access, _CreatedAt}) ->
|
||||||
|
|
|
@ -16,110 +16,28 @@
|
||||||
|
|
||||||
-module(emqx_acl_mnesia_cli).
|
-module(emqx_acl_mnesia_cli).
|
||||||
|
|
||||||
-include("emqx_auth_mnesia.hrl").
|
|
||||||
-include_lib("emqx/include/logger.hrl").
|
|
||||||
-include_lib("stdlib/include/ms_transform.hrl").
|
|
||||||
-define(TABLE, emqx_acl).
|
|
||||||
|
|
||||||
%% Acl APIs
|
|
||||||
-export([ add_acl/4
|
|
||||||
, lookup_acl/1
|
|
||||||
, all_acls/0
|
|
||||||
, all_acls/1
|
|
||||||
, remove_acl/2
|
|
||||||
]).
|
|
||||||
|
|
||||||
-export([cli/1]).
|
-export([cli/1]).
|
||||||
-export([comparing/2]).
|
|
||||||
%%--------------------------------------------------------------------
|
|
||||||
%% Acl API
|
|
||||||
%%--------------------------------------------------------------------
|
|
||||||
|
|
||||||
%% @doc Add Acls
|
|
||||||
-spec(add_acl(login() | all, emqx_topic:topic(), pub | sub | pubsub, allow | deny) ->
|
|
||||||
ok | {error, any()}).
|
|
||||||
add_acl(Login, Topic, Action, Access) ->
|
|
||||||
Filter = {Login, Topic},
|
|
||||||
Acl = #?TABLE{
|
|
||||||
filter = Filter,
|
|
||||||
action = Action,
|
|
||||||
access = Access,
|
|
||||||
created_at = erlang:system_time(millisecond)
|
|
||||||
},
|
|
||||||
ret(mnesia:transaction(
|
|
||||||
fun() ->
|
|
||||||
OldRecords = mnesia:wread({?TABLE, Filter}),
|
|
||||||
case Action of
|
|
||||||
pubsub ->
|
|
||||||
update_permission(pub, Acl, OldRecords),
|
|
||||||
update_permission(sub, Acl, OldRecords);
|
|
||||||
_ ->
|
|
||||||
update_permission(Action, Acl, OldRecords)
|
|
||||||
end
|
|
||||||
end)).
|
|
||||||
|
|
||||||
%% @doc Lookup acl by login
|
|
||||||
-spec(lookup_acl(login() | all) -> list()).
|
|
||||||
lookup_acl(undefined) -> [];
|
|
||||||
lookup_acl(Login) ->
|
|
||||||
MatchSpec = ets:fun2ms(fun({?TABLE, {Filter, ACLTopic}, Action, Access, CreatedAt})
|
|
||||||
when Filter =:= Login ->
|
|
||||||
{Filter, ACLTopic, Action, Access, CreatedAt}
|
|
||||||
end),
|
|
||||||
lists:sort(fun comparing/2, ets:select(?TABLE, MatchSpec)).
|
|
||||||
|
|
||||||
%% @doc Remove acl
|
|
||||||
-spec(remove_acl(login() | all, emqx_topic:topic()) -> ok | {error, any()}).
|
|
||||||
remove_acl(Login, Topic) ->
|
|
||||||
ret(mnesia:transaction(fun mnesia:delete/1, [{?TABLE, {Login, Topic}}])).
|
|
||||||
|
|
||||||
%% @doc All logins
|
|
||||||
-spec(all_acls() -> list()).
|
|
||||||
all_acls() ->
|
|
||||||
all_acls(clientid) ++
|
|
||||||
all_acls(username) ++
|
|
||||||
all_acls(all).
|
|
||||||
|
|
||||||
all_acls(clientid) ->
|
|
||||||
MatchSpec = ets:fun2ms(
|
|
||||||
fun({?TABLE, {{clientid, Clientid}, Topic}, Action, Access, CreatedAt}) ->
|
|
||||||
{{clientid, Clientid}, Topic, Action, Access, CreatedAt}
|
|
||||||
end),
|
|
||||||
lists:sort(fun comparing/2, ets:select(?TABLE, MatchSpec));
|
|
||||||
all_acls(username) ->
|
|
||||||
MatchSpec = ets:fun2ms(
|
|
||||||
fun({?TABLE, {{username, Username}, Topic}, Action, Access, CreatedAt}) ->
|
|
||||||
{{username, Username}, Topic, Action, Access, CreatedAt}
|
|
||||||
end),
|
|
||||||
lists:sort(fun comparing/2, ets:select(?TABLE, MatchSpec));
|
|
||||||
all_acls(all) ->
|
|
||||||
MatchSpec = ets:fun2ms(
|
|
||||||
fun({?TABLE, {all, Topic}, Action, Access, CreatedAt}) ->
|
|
||||||
{all, Topic, Action, Access, CreatedAt}
|
|
||||||
end
|
|
||||||
),
|
|
||||||
lists:sort(fun comparing/2, ets:select(?TABLE, MatchSpec)).
|
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% ACL Cli
|
%% ACL Cli
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
cli(["list"]) ->
|
cli(["list"]) ->
|
||||||
[print_acl(Acl) || Acl <- all_acls()];
|
[print_acl(Acl) || Acl <- emqx_acl_mnesia_db:all_acls()];
|
||||||
|
|
||||||
cli(["list", "clientid"]) ->
|
cli(["list", "clientid"]) ->
|
||||||
[print_acl(Acl) || Acl <- all_acls(clientid)];
|
[print_acl(Acl) || Acl <- emqx_acl_mnesia_db:all_acls(clientid)];
|
||||||
|
|
||||||
cli(["list", "username"]) ->
|
cli(["list", "username"]) ->
|
||||||
[print_acl(Acl) || Acl <- all_acls(username)];
|
[print_acl(Acl) || Acl <- emqx_acl_mnesia_db:all_acls(username)];
|
||||||
|
|
||||||
cli(["list", "_all"]) ->
|
cli(["list", "_all"]) ->
|
||||||
[print_acl(Acl) || Acl <- all_acls(all)];
|
[print_acl(Acl) || Acl <- emqx_acl_mnesia_db:all_acls(all)];
|
||||||
|
|
||||||
cli(["add", "clientid", Clientid, Topic, Action, Access]) ->
|
cli(["add", "clientid", Clientid, Topic, Action, Access]) ->
|
||||||
case validate(action, Action) andalso validate(access, Access) of
|
case validate(action, Action) andalso validate(access, Access) of
|
||||||
true ->
|
true ->
|
||||||
case add_acl(
|
case emqx_acl_mnesia_db:add_acl(
|
||||||
{clientid, iolist_to_binary(Clientid)},
|
{clientid, iolist_to_binary(Clientid)},
|
||||||
iolist_to_binary(Topic),
|
iolist_to_binary(Topic),
|
||||||
list_to_existing_atom(Action),
|
list_to_existing_atom(Action),
|
||||||
|
@ -135,7 +53,7 @@ cli(["add", "clientid", Clientid, Topic, Action, Access]) ->
|
||||||
cli(["add", "username", Username, Topic, Action, Access]) ->
|
cli(["add", "username", Username, Topic, Action, Access]) ->
|
||||||
case validate(action, Action) andalso validate(access, Access) of
|
case validate(action, Action) andalso validate(access, Access) of
|
||||||
true ->
|
true ->
|
||||||
case add_acl(
|
case emqx_acl_mnesia_db:add_acl(
|
||||||
{username, iolist_to_binary(Username)},
|
{username, iolist_to_binary(Username)},
|
||||||
iolist_to_binary(Topic),
|
iolist_to_binary(Topic),
|
||||||
list_to_existing_atom(Action),
|
list_to_existing_atom(Action),
|
||||||
|
@ -151,7 +69,7 @@ cli(["add", "username", Username, Topic, Action, Access]) ->
|
||||||
cli(["add", "_all", Topic, Action, Access]) ->
|
cli(["add", "_all", Topic, Action, Access]) ->
|
||||||
case validate(action, Action) andalso validate(access, Access) of
|
case validate(action, Action) andalso validate(access, Access) of
|
||||||
true ->
|
true ->
|
||||||
case add_acl(
|
case emqx_acl_mnesia_db:add_acl(
|
||||||
all,
|
all,
|
||||||
iolist_to_binary(Topic),
|
iolist_to_binary(Topic),
|
||||||
list_to_existing_atom(Action),
|
list_to_existing_atom(Action),
|
||||||
|
@ -165,16 +83,16 @@ cli(["add", "_all", Topic, Action, Access]) ->
|
||||||
end;
|
end;
|
||||||
|
|
||||||
cli(["show", "clientid", Clientid]) ->
|
cli(["show", "clientid", Clientid]) ->
|
||||||
[print_acl(Acl) || Acl <- lookup_acl({clientid, iolist_to_binary(Clientid)})];
|
[print_acl(Acl) || Acl <- emqx_acl_mnesia_db:lookup_acl({clientid, iolist_to_binary(Clientid)})];
|
||||||
|
|
||||||
cli(["show", "username", Username]) ->
|
cli(["show", "username", Username]) ->
|
||||||
[print_acl(Acl) || Acl <- lookup_acl({username, iolist_to_binary(Username)})];
|
[print_acl(Acl) || Acl <- emqx_acl_mnesia_db:lookup_acl({username, iolist_to_binary(Username)})];
|
||||||
|
|
||||||
cli(["del", "clientid", Clientid, Topic])->
|
cli(["del", "clientid", Clientid, Topic])->
|
||||||
cli(["delete", "clientid", Clientid, Topic]);
|
cli(["delete", "clientid", Clientid, Topic]);
|
||||||
|
|
||||||
cli(["delete", "clientid", Clientid, Topic])->
|
cli(["delete", "clientid", Clientid, Topic])->
|
||||||
case remove_acl({clientid, iolist_to_binary(Clientid)}, iolist_to_binary(Topic)) of
|
case emqx_acl_mnesia_db:remove_acl({clientid, iolist_to_binary(Clientid)}, iolist_to_binary(Topic)) of
|
||||||
ok -> emqx_ctl:print("ok~n");
|
ok -> emqx_ctl:print("ok~n");
|
||||||
{error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason])
|
{error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason])
|
||||||
end;
|
end;
|
||||||
|
@ -183,7 +101,7 @@ cli(["del", "username", Username, Topic])->
|
||||||
cli(["delete", "username", Username, Topic]);
|
cli(["delete", "username", Username, Topic]);
|
||||||
|
|
||||||
cli(["delete", "username", Username, Topic])->
|
cli(["delete", "username", Username, Topic])->
|
||||||
case remove_acl({username, iolist_to_binary(Username)}, iolist_to_binary(Topic)) of
|
case emqx_acl_mnesia_db:remove_acl({username, iolist_to_binary(Username)}, iolist_to_binary(Topic)) of
|
||||||
ok -> emqx_ctl:print("ok~n");
|
ok -> emqx_ctl:print("ok~n");
|
||||||
{error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason])
|
{error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason])
|
||||||
end;
|
end;
|
||||||
|
@ -192,7 +110,7 @@ cli(["del", "_all", Topic])->
|
||||||
cli(["delete", "_all", Topic]);
|
cli(["delete", "_all", Topic]);
|
||||||
|
|
||||||
cli(["delete", "_all", Topic])->
|
cli(["delete", "_all", Topic])->
|
||||||
case remove_acl(all, iolist_to_binary(Topic)) of
|
case emqx_acl_mnesia_db:remove_acl(all, iolist_to_binary(Topic)) of
|
||||||
ok -> emqx_ctl:print("ok~n");
|
ok -> emqx_ctl:print("ok~n");
|
||||||
{error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason])
|
{error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason])
|
||||||
end;
|
end;
|
||||||
|
@ -215,13 +133,6 @@ cli(_) ->
|
||||||
%% Internal functions
|
%% Internal functions
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
comparing({_, _, _, _, CreatedAt1},
|
|
||||||
{_, _, _, _, CreatedAt2}) ->
|
|
||||||
CreatedAt1 >= CreatedAt2.
|
|
||||||
|
|
||||||
ret({atomic, ok}) -> ok;
|
|
||||||
ret({aborted, Error}) -> {error, Error}.
|
|
||||||
|
|
||||||
validate(action, "pub") -> true;
|
validate(action, "pub") -> true;
|
||||||
validate(action, "sub") -> true;
|
validate(action, "sub") -> true;
|
||||||
validate(action, "pubsub") -> true;
|
validate(action, "pubsub") -> true;
|
||||||
|
@ -244,27 +155,3 @@ print_acl({all, Topic, Action, Access, _}) ->
|
||||||
"Acl($all topic = ~p action = ~p access = ~p)~n",
|
"Acl($all topic = ~p action = ~p access = ~p)~n",
|
||||||
[Topic, Action, Access]
|
[Topic, Action, Access]
|
||||||
).
|
).
|
||||||
|
|
||||||
update_permission(Action, Acl0, OldRecords) ->
|
|
||||||
Acl = Acl0 #?TABLE{action = Action},
|
|
||||||
maybe_delete_shadowed_records(Action, OldRecords),
|
|
||||||
mnesia:write(Acl).
|
|
||||||
|
|
||||||
maybe_delete_shadowed_records(_, []) ->
|
|
||||||
ok;
|
|
||||||
maybe_delete_shadowed_records(Action1, [Rec = #emqx_acl{action = Action2} | Rest]) ->
|
|
||||||
if Action1 =:= Action2 ->
|
|
||||||
ok = mnesia:delete_object(Rec);
|
|
||||||
Action2 =:= pubsub ->
|
|
||||||
%% Perform migration from the old data format on the
|
|
||||||
%% fly. This is needed only for the enterprise version,
|
|
||||||
%% delete this branch on 5.0
|
|
||||||
mnesia:delete_object(Rec),
|
|
||||||
mnesia:write(Rec#?TABLE{action = other_action(Action1)});
|
|
||||||
true ->
|
|
||||||
ok
|
|
||||||
end,
|
|
||||||
maybe_delete_shadowed_records(Action1, Rest).
|
|
||||||
|
|
||||||
other_action(pub) -> sub;
|
|
||||||
other_action(sub) -> pub.
|
|
||||||
|
|
|
@ -0,0 +1,339 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_acl_mnesia_db).
|
||||||
|
|
||||||
|
-include("emqx_auth_mnesia.hrl").
|
||||||
|
-include_lib("stdlib/include/ms_transform.hrl").
|
||||||
|
-include_lib("stdlib/include/qlc.hrl").
|
||||||
|
|
||||||
|
%% ACL APIs
|
||||||
|
-export([ create_table/0
|
||||||
|
, create_table2/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
-export([ add_acl/4
|
||||||
|
, lookup_acl/1
|
||||||
|
, all_acls_export/0
|
||||||
|
, all_acls/0
|
||||||
|
, all_acls/1
|
||||||
|
, remove_acl/2
|
||||||
|
, merge_acl_records/3
|
||||||
|
, login_acl_table/1
|
||||||
|
, is_migration_started/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
-export([comparing/2]).
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% ACL API
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
%% @doc Create table `emqx_acl` of old format rules
|
||||||
|
-spec(create_table() -> ok).
|
||||||
|
create_table() ->
|
||||||
|
ok = ekka_mnesia:create_table(?ACL_TABLE, [
|
||||||
|
{type, bag},
|
||||||
|
{disc_copies, [node()]},
|
||||||
|
{attributes, record_info(fields, ?ACL_TABLE)},
|
||||||
|
{storage_properties, [{ets, [{read_concurrency, true}]}]}]),
|
||||||
|
ok = ekka_mnesia:copy_table(?ACL_TABLE, disc_copies).
|
||||||
|
|
||||||
|
%% @doc Create table `emqx_acl2` of new format rules
|
||||||
|
-spec(create_table2() -> ok).
|
||||||
|
create_table2() ->
|
||||||
|
ok = ekka_mnesia:create_table(?ACL_TABLE2, [
|
||||||
|
{type, ordered_set},
|
||||||
|
{disc_copies, [node()]},
|
||||||
|
{attributes, record_info(fields, ?ACL_TABLE2)},
|
||||||
|
{storage_properties, [{ets, [{read_concurrency, true}]}]}]),
|
||||||
|
ok = ekka_mnesia:copy_table(?ACL_TABLE2, disc_copies).
|
||||||
|
|
||||||
|
%% @doc Add Acls
|
||||||
|
-spec(add_acl(acl_target(), emqx_topic:topic(), legacy_action(), access()) ->
|
||||||
|
ok | {error, any()}).
|
||||||
|
add_acl(Login, Topic, Action, Access) ->
|
||||||
|
ret(mnesia:transaction(fun() ->
|
||||||
|
case is_migration_started() of
|
||||||
|
true -> add_acl_new(Login, Topic, Action, Access);
|
||||||
|
false -> add_acl_old(Login, Topic, Action, Access)
|
||||||
|
end
|
||||||
|
end)).
|
||||||
|
|
||||||
|
%% @doc Lookup acl by login
|
||||||
|
-spec(lookup_acl(acl_target()) -> list(acl_record())).
|
||||||
|
lookup_acl(undefined) -> [];
|
||||||
|
lookup_acl(Login) ->
|
||||||
|
% After migration to ?ACL_TABLE2, ?ACL_TABLE never has any rules. This lookup should be removed later.
|
||||||
|
MatchSpec = ets:fun2ms(fun(#?ACL_TABLE{filter = {Filter, _}} = Rec)
|
||||||
|
when Filter =:= Login -> Rec
|
||||||
|
end),
|
||||||
|
OldRecs = ets:select(?ACL_TABLE, MatchSpec),
|
||||||
|
|
||||||
|
NewAcls = ets:lookup(?ACL_TABLE2, Login),
|
||||||
|
MergedAcl = merge_acl_records(Login, OldRecs, NewAcls),
|
||||||
|
lists:sort(fun comparing/2, acl_to_list(MergedAcl)).
|
||||||
|
|
||||||
|
%% @doc Remove ACL
|
||||||
|
-spec remove_acl(acl_target(), emqx_topic:topic()) -> ok | {error, any()}.
|
||||||
|
remove_acl(Login, Topic) ->
|
||||||
|
ret(mnesia:transaction(fun() ->
|
||||||
|
mnesia:delete({?ACL_TABLE, {Login, Topic}}),
|
||||||
|
case mnesia:wread({?ACL_TABLE2, Login}) of
|
||||||
|
[] -> ok;
|
||||||
|
[#?ACL_TABLE2{rules = Rules} = Acl] ->
|
||||||
|
case delete_topic_rules(Topic, Rules) of
|
||||||
|
[] -> mnesia:delete({?ACL_TABLE2, Login});
|
||||||
|
[_ | _] = RemainingRules ->
|
||||||
|
mnesia:write(Acl#?ACL_TABLE2{rules = RemainingRules})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end)).
|
||||||
|
|
||||||
|
%% @doc All ACL rules
|
||||||
|
-spec(all_acls() -> list(acl_record())).
|
||||||
|
all_acls() ->
|
||||||
|
all_acls(username) ++
|
||||||
|
all_acls(clientid) ++
|
||||||
|
all_acls(all).
|
||||||
|
|
||||||
|
%% @doc All ACL rules of specified type
|
||||||
|
-spec(all_acls(acl_target_type()) -> list(acl_record())).
|
||||||
|
all_acls(AclTargetType) ->
|
||||||
|
lists:sort(fun comparing/2, qlc:eval(login_acl_table(AclTargetType))).
|
||||||
|
|
||||||
|
%% @doc All ACL rules fetched transactionally
|
||||||
|
-spec(all_acls_export() -> list(acl_record())).
|
||||||
|
all_acls_export() ->
|
||||||
|
AclTargetTypes = [username, clientid, all],
|
||||||
|
MatchSpecNew = lists:flatmap(fun login_match_spec_new/1, AclTargetTypes),
|
||||||
|
MatchSpecOld = lists:flatmap(fun login_match_spec_old/1, AclTargetTypes),
|
||||||
|
|
||||||
|
{atomic, Records} = mnesia:transaction(
|
||||||
|
fun() ->
|
||||||
|
QH = acl_table(MatchSpecNew, MatchSpecOld, fun mnesia:table/2, fun lookup_mnesia/2),
|
||||||
|
qlc:eval(QH)
|
||||||
|
end),
|
||||||
|
Records.
|
||||||
|
|
||||||
|
%% @doc QLC table of logins matching spec
|
||||||
|
-spec(login_acl_table(acl_target_type()) -> qlc:query_handle()).
|
||||||
|
login_acl_table(AclTargetType) ->
|
||||||
|
MatchSpecNew = login_match_spec_new(AclTargetType),
|
||||||
|
MatchSpecOld = login_match_spec_old(AclTargetType),
|
||||||
|
acl_table(MatchSpecNew, MatchSpecOld, fun ets:table/2, fun lookup_ets/2).
|
||||||
|
|
||||||
|
%% @doc Combine old `emqx_acl` ACL records with a new `emqx_acl2` ACL record for a given login
|
||||||
|
-spec(merge_acl_records(acl_target(), [#?ACL_TABLE{}], [#?ACL_TABLE2{}]) -> #?ACL_TABLE2{}).
|
||||||
|
merge_acl_records(Login, OldRecs, Acls) ->
|
||||||
|
OldRules = old_recs_to_rules(OldRecs),
|
||||||
|
NewRules = case Acls of
|
||||||
|
[] -> [];
|
||||||
|
[#?ACL_TABLE2{rules = Rules}] -> Rules
|
||||||
|
end,
|
||||||
|
#?ACL_TABLE2{who = Login, rules = merge_rules(NewRules, OldRules)}.
|
||||||
|
|
||||||
|
%% @doc Checks if background migration of ACL rules from `emqx_acl` to `emqx_acl2` format started.
|
||||||
|
%% Should be run in transaction
|
||||||
|
-spec(is_migration_started() -> boolean()).
|
||||||
|
is_migration_started() ->
|
||||||
|
case mnesia:read({?ACL_TABLE, ?MIGRATION_MARK_KEY}) of
|
||||||
|
[?MIGRATION_MARK_RECORD | _] -> true;
|
||||||
|
[] -> false
|
||||||
|
end.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Internal functions
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
add_acl_new(Login, Topic, Action, Access) ->
|
||||||
|
Rule = {Access, Action, Topic, erlang:system_time(millisecond)},
|
||||||
|
Rules = normalize_rule(Rule),
|
||||||
|
OldAcl = mnesia:wread({?ACL_TABLE2, Login}),
|
||||||
|
NewAcl = case OldAcl of
|
||||||
|
[#?ACL_TABLE2{rules = OldRules} = Acl] ->
|
||||||
|
Acl#?ACL_TABLE2{rules = merge_rules(Rules, OldRules)};
|
||||||
|
[] ->
|
||||||
|
#?ACL_TABLE2{who = Login, rules = Rules}
|
||||||
|
end,
|
||||||
|
mnesia:write(NewAcl).
|
||||||
|
|
||||||
|
add_acl_old(Login, Topic, Action, Access) ->
|
||||||
|
Filter = {Login, Topic},
|
||||||
|
Acl = #?ACL_TABLE{
|
||||||
|
filter = Filter,
|
||||||
|
action = Action,
|
||||||
|
access = Access,
|
||||||
|
created_at = erlang:system_time(millisecond)
|
||||||
|
},
|
||||||
|
OldRecords = mnesia:wread({?ACL_TABLE, Filter}),
|
||||||
|
case Action of
|
||||||
|
pubsub ->
|
||||||
|
update_permission(pub, Acl, OldRecords),
|
||||||
|
update_permission(sub, Acl, OldRecords);
|
||||||
|
_ ->
|
||||||
|
update_permission(Action, Acl, OldRecords)
|
||||||
|
end.
|
||||||
|
|
||||||
|
old_recs_to_rules(OldRecs) ->
|
||||||
|
lists:flatmap(fun old_rec_to_rules/1, OldRecs).
|
||||||
|
|
||||||
|
old_rec_to_rules(#?ACL_TABLE{filter = {_, Topic}, action = Action, access = Access, created_at = CreatedAt}) ->
|
||||||
|
normalize_rule({Access, Action, Topic, CreatedAt}).
|
||||||
|
|
||||||
|
normalize_rule({Access, pubsub, Topic, CreatedAt}) ->
|
||||||
|
[{Access, pub, Topic, CreatedAt}, {Access, sub, Topic, CreatedAt}];
|
||||||
|
normalize_rule({Access, Action, Topic, CreatedAt}) ->
|
||||||
|
[{Access, Action, Topic, CreatedAt}].
|
||||||
|
|
||||||
|
merge_rules([], OldRules) -> OldRules;
|
||||||
|
merge_rules([NewRule | RestNewRules], OldRules) ->
|
||||||
|
merge_rules(RestNewRules, merge_rule(NewRule, OldRules)).
|
||||||
|
|
||||||
|
merge_rule({_, Action, Topic, _ } = NewRule, OldRules) ->
|
||||||
|
[NewRule | lists:filter(
|
||||||
|
fun({_, OldAction, OldTopic, _}) ->
|
||||||
|
{Action, Topic} =/= {OldAction, OldTopic}
|
||||||
|
end, OldRules)].
|
||||||
|
|
||||||
|
acl_to_list(#?ACL_TABLE2{who = Login, rules = Rules}) ->
|
||||||
|
[{Login, Topic, Action, Access, CreatedAt} || {Access, Action, Topic, CreatedAt} <- Rules].
|
||||||
|
|
||||||
|
delete_topic_rules(Topic, Rules) ->
|
||||||
|
[Rule || {_, _, T, _} = Rule <- Rules, T =/= Topic].
|
||||||
|
|
||||||
|
comparing({_, _, _, _, CreatedAt} = Rec1,
|
||||||
|
{_, _, _, _, CreatedAt} = Rec2) ->
|
||||||
|
Rec1 >= Rec2;
|
||||||
|
|
||||||
|
comparing({_, _, _, _, CreatedAt1},
|
||||||
|
{_, _, _, _, CreatedAt2}) ->
|
||||||
|
CreatedAt1 >= CreatedAt2.
|
||||||
|
|
||||||
|
login_match_spec_old(all) ->
|
||||||
|
ets:fun2ms(fun(#?ACL_TABLE{filter = {all, _}} = Record) ->
|
||||||
|
Record
|
||||||
|
end);
|
||||||
|
|
||||||
|
login_match_spec_old(Type) when (Type =:= username) or (Type =:= clientid) ->
|
||||||
|
ets:fun2ms(fun(#?ACL_TABLE{filter = {{RecordType, _}, _}} = Record)
|
||||||
|
when RecordType =:= Type -> Record
|
||||||
|
end).
|
||||||
|
|
||||||
|
login_match_spec_new(all) ->
|
||||||
|
ets:fun2ms(fun(#?ACL_TABLE2{who = all} = Record) ->
|
||||||
|
Record
|
||||||
|
end);
|
||||||
|
|
||||||
|
login_match_spec_new(Type) when (Type =:= username) or (Type =:= clientid) ->
|
||||||
|
ets:fun2ms(fun(#?ACL_TABLE2{who = {RecordType, _}} = Record)
|
||||||
|
when RecordType =:= Type -> Record
|
||||||
|
end).
|
||||||
|
|
||||||
|
acl_table(MatchSpecNew, MatchSpecOld, TableFun, LookupFun) ->
|
||||||
|
TraverseFun =
|
||||||
|
fun() ->
|
||||||
|
CursorNew =
|
||||||
|
qlc:cursor(
|
||||||
|
TableFun(?ACL_TABLE2, [{traverse, {select, MatchSpecNew}}])),
|
||||||
|
CursorOld =
|
||||||
|
qlc:cursor(
|
||||||
|
TableFun(?ACL_TABLE, [{traverse, {select, MatchSpecOld}}])),
|
||||||
|
traverse_new(CursorNew, CursorOld, #{}, LookupFun)
|
||||||
|
end,
|
||||||
|
|
||||||
|
qlc:table(TraverseFun, []).
|
||||||
|
|
||||||
|
|
||||||
|
% These are traverse funs for qlc table created by `acl_table/4`.
|
||||||
|
% Traversing consumes memory: it collects logins present in `?ACL_TABLE` and
|
||||||
|
% at the same time having rules in `?ACL_TABLE2`.
|
||||||
|
% Such records appear if ACLs are inserted before migration started.
|
||||||
|
% After migration, number of such logins is zero, so traversing starts working in
|
||||||
|
% constant memory.
|
||||||
|
|
||||||
|
traverse_new(CursorNew, CursorOld, FoundKeys, LookupFun) ->
|
||||||
|
Acls = qlc:next_answers(CursorNew, 1),
|
||||||
|
case Acls of
|
||||||
|
[] ->
|
||||||
|
qlc:delete_cursor(CursorNew),
|
||||||
|
traverse_old(CursorOld, FoundKeys);
|
||||||
|
[#?ACL_TABLE2{who = Login, rules = Rules} = Acl] ->
|
||||||
|
Keys = lists:usort([{Login, Topic} || {_, _, Topic, _} <- Rules]),
|
||||||
|
OldRecs = lists:flatmap(fun(Key) -> LookupFun(?ACL_TABLE, Key) end, Keys),
|
||||||
|
MergedAcl = merge_acl_records(Login, OldRecs, [Acl]),
|
||||||
|
NewFoundKeys =
|
||||||
|
lists:foldl(fun(#?ACL_TABLE{filter = Key}, Found) -> maps:put(Key, true, Found) end,
|
||||||
|
FoundKeys,
|
||||||
|
OldRecs),
|
||||||
|
case acl_to_list(MergedAcl) of
|
||||||
|
[] ->
|
||||||
|
traverse_new(CursorNew, CursorOld, NewFoundKeys, LookupFun);
|
||||||
|
List ->
|
||||||
|
List ++ fun() -> traverse_new(CursorNew, CursorOld, NewFoundKeys, LookupFun) end
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
|
traverse_old(CursorOld, FoundKeys) ->
|
||||||
|
OldAcls = qlc:next_answers(CursorOld),
|
||||||
|
case OldAcls of
|
||||||
|
[] ->
|
||||||
|
qlc:delete_cursor(CursorOld),
|
||||||
|
[];
|
||||||
|
_ ->
|
||||||
|
Records = [ {Login, Topic, Action, Access, CreatedAt}
|
||||||
|
|| #?ACL_TABLE{filter = {Login, Topic}, action = LegacyAction, access = Access, created_at = CreatedAt} <- OldAcls,
|
||||||
|
{_, Action, _, _} <- normalize_rule({Access, LegacyAction, Topic, CreatedAt}),
|
||||||
|
not maps:is_key({Login, Topic}, FoundKeys)
|
||||||
|
],
|
||||||
|
case Records of
|
||||||
|
[] -> traverse_old(CursorOld, FoundKeys);
|
||||||
|
List -> List ++ fun() -> traverse_old(CursorOld, FoundKeys) end
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
|
lookup_mnesia(Tab, Key) ->
|
||||||
|
mnesia:read({Tab, Key}).
|
||||||
|
|
||||||
|
lookup_ets(Tab, Key) ->
|
||||||
|
ets:lookup(Tab, Key).
|
||||||
|
|
||||||
|
update_permission(Action, Acl0, OldRecords) ->
|
||||||
|
Acl = Acl0 #?ACL_TABLE{action = Action},
|
||||||
|
maybe_delete_shadowed_records(Action, OldRecords),
|
||||||
|
mnesia:write(Acl).
|
||||||
|
|
||||||
|
maybe_delete_shadowed_records(_, []) ->
|
||||||
|
ok;
|
||||||
|
maybe_delete_shadowed_records(Action1, [Rec = #emqx_acl{action = Action2} | Rest]) ->
|
||||||
|
if Action1 =:= Action2 ->
|
||||||
|
ok = mnesia:delete_object(Rec);
|
||||||
|
Action2 =:= pubsub ->
|
||||||
|
%% Perform migration from the old data format on the
|
||||||
|
%% fly. This is needed only for the enterprise version,
|
||||||
|
%% delete this branch on 5.0
|
||||||
|
mnesia:delete_object(Rec),
|
||||||
|
mnesia:write(Rec#?ACL_TABLE{action = other_action(Action1)});
|
||||||
|
true ->
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
maybe_delete_shadowed_records(Action1, Rest).
|
||||||
|
|
||||||
|
other_action(pub) -> sub;
|
||||||
|
other_action(sub) -> pub.
|
||||||
|
|
||||||
|
ret({atomic, ok}) -> ok;
|
||||||
|
ret({aborted, Error}) -> {error, Error}.
|
|
@ -0,0 +1,215 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_acl_mnesia_migrator).
|
||||||
|
|
||||||
|
-include("emqx_auth_mnesia.hrl").
|
||||||
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
|
-behaviour(gen_statem).
|
||||||
|
|
||||||
|
-define(CHECK_ALL_NODES_INTERVAL, 60000).
|
||||||
|
|
||||||
|
-type(migration_delay_reason() :: old_nodes | bad_nodes).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
callback_mode/0,
|
||||||
|
init/1
|
||||||
|
]).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
waiting_all_nodes/3,
|
||||||
|
checking_old_table/3,
|
||||||
|
migrating/3
|
||||||
|
]).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
start_link/0,
|
||||||
|
start_link/1,
|
||||||
|
start_supervised/0,
|
||||||
|
stop_supervised/0,
|
||||||
|
migrate_records/0,
|
||||||
|
is_migrating_on_node/1,
|
||||||
|
is_old_table_migrated/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% External interface
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
start_link() ->
|
||||||
|
start_link(?MODULE).
|
||||||
|
|
||||||
|
start_link(Name) when is_atom(Name) ->
|
||||||
|
start_link(#{
|
||||||
|
name => Name
|
||||||
|
});
|
||||||
|
|
||||||
|
start_link(#{name := Name} = Opts) ->
|
||||||
|
gen_statem:start_link({local, Name}, ?MODULE, Opts, []).
|
||||||
|
|
||||||
|
start_supervised() ->
|
||||||
|
try
|
||||||
|
{ok, _} = supervisor:restart_child(emqx_auth_mnesia_sup, ?MODULE),
|
||||||
|
ok
|
||||||
|
catch
|
||||||
|
exit:{noproc, _} -> ok
|
||||||
|
end.
|
||||||
|
|
||||||
|
stop_supervised() ->
|
||||||
|
try
|
||||||
|
ok = supervisor:terminate_child(emqx_auth_mnesia_sup, ?MODULE),
|
||||||
|
ok = supervisor:delete_child(emqx_auth_mnesia_sup, ?MODULE)
|
||||||
|
catch
|
||||||
|
exit:{noproc, _} -> ok
|
||||||
|
end.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% gen_statem callbacks
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
callback_mode() -> state_functions.
|
||||||
|
|
||||||
|
init(Opts) ->
|
||||||
|
ok = emqx_acl_mnesia_db:create_table(),
|
||||||
|
ok = emqx_acl_mnesia_db:create_table2(),
|
||||||
|
Name = maps:get(name, Opts, ?MODULE),
|
||||||
|
CheckNodesInterval = maps:get(check_nodes_interval, Opts, ?CHECK_ALL_NODES_INTERVAL),
|
||||||
|
GetNodes = maps:get(get_nodes, Opts, fun all_nodes/0),
|
||||||
|
Data =
|
||||||
|
#{name => Name,
|
||||||
|
check_nodes_interval => CheckNodesInterval,
|
||||||
|
get_nodes => GetNodes},
|
||||||
|
{ok, waiting_all_nodes, Data, [{state_timeout, 0, check_nodes}]}.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% state callbacks
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
waiting_all_nodes(state_timeout, check_nodes, Data) ->
|
||||||
|
#{name := Name, check_nodes_interval := CheckNodesInterval, get_nodes := GetNodes} = Data,
|
||||||
|
case is_all_nodes_migrating(Name, GetNodes()) of
|
||||||
|
true ->
|
||||||
|
?tp(info, emqx_acl_mnesia_migrator_check_old_table, #{}),
|
||||||
|
{next_state, checking_old_table, Data, [{next_event, internal, check_old_table}]};
|
||||||
|
{false, Reason, Nodes} ->
|
||||||
|
?tp(info,
|
||||||
|
emqx_acl_mnesia_migrator_bad_nodes_delay,
|
||||||
|
#{delay => CheckNodesInterval,
|
||||||
|
reason => Reason,
|
||||||
|
name => Name,
|
||||||
|
nodes => Nodes}),
|
||||||
|
{keep_state_and_data, [{state_timeout, CheckNodesInterval, check_nodes}]}
|
||||||
|
end.
|
||||||
|
|
||||||
|
checking_old_table(internal, check_old_table, Data) ->
|
||||||
|
case is_old_table_migrated() of
|
||||||
|
true ->
|
||||||
|
?tp(info, emqx_acl_mnesia_migrator_finish, #{}),
|
||||||
|
{next_state, finished, Data, [{hibernate, true}]};
|
||||||
|
false ->
|
||||||
|
?tp(info, emqx_acl_mnesia_migrator_start_migration, #{}),
|
||||||
|
{next_state, migrating, Data, [{next_event, internal, start_migration}]}
|
||||||
|
end.
|
||||||
|
|
||||||
|
migrating(internal, start_migration, Data) ->
|
||||||
|
ok = migrate_records(),
|
||||||
|
{next_state, checking_old_table, Data, [{next_event, internal, check_old_table}]}.
|
||||||
|
|
||||||
|
%% @doc Returns `true` if migration is started in the local node, otherwise crash.
|
||||||
|
-spec(is_migrating_on_node(atom()) -> true).
|
||||||
|
is_migrating_on_node(Name) ->
|
||||||
|
true = is_pid(erlang:whereis(Name)).
|
||||||
|
|
||||||
|
%% @doc Run migration of records
|
||||||
|
-spec(migrate_records() -> ok).
|
||||||
|
migrate_records() ->
|
||||||
|
ok = add_migration_mark(),
|
||||||
|
Key = peek_record(),
|
||||||
|
do_migrate_records(Key).
|
||||||
|
|
||||||
|
%% @doc Run migration of records
|
||||||
|
-spec(is_all_nodes_migrating(atom(), list(node())) -> true | {false, migration_delay_reason(), list(node())}).
|
||||||
|
is_all_nodes_migrating(Name, Nodes) ->
|
||||||
|
case rpc:multicall(Nodes, ?MODULE, is_migrating_on_node, [Name]) of
|
||||||
|
{Results, []} ->
|
||||||
|
OldNodes = [ Node || {Node, Result} <- lists:zip(Nodes, Results), Result =/= true ],
|
||||||
|
case OldNodes of
|
||||||
|
[] -> true;
|
||||||
|
_ -> {false, old_nodes, OldNodes}
|
||||||
|
end;
|
||||||
|
{_, [_BadNode | _] = BadNodes} ->
|
||||||
|
{false, bad_nodes, BadNodes}
|
||||||
|
end.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Internal functions
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
all_nodes() ->
|
||||||
|
ekka_mnesia:cluster_nodes(all).
|
||||||
|
|
||||||
|
is_old_table_migrated() ->
|
||||||
|
Result =
|
||||||
|
mnesia:transaction(fun() ->
|
||||||
|
case mnesia:first(?ACL_TABLE) of
|
||||||
|
?MIGRATION_MARK_KEY ->
|
||||||
|
case mnesia:next(?ACL_TABLE, ?MIGRATION_MARK_KEY) of
|
||||||
|
'$end_of_table' -> true;
|
||||||
|
_OtherKey -> false
|
||||||
|
end;
|
||||||
|
'$end_of_table' -> false;
|
||||||
|
_OtherKey -> false
|
||||||
|
end
|
||||||
|
end),
|
||||||
|
case Result of
|
||||||
|
{atomic, true} ->
|
||||||
|
true;
|
||||||
|
_ ->
|
||||||
|
false
|
||||||
|
end.
|
||||||
|
|
||||||
|
add_migration_mark() ->
|
||||||
|
{atomic, ok} = mnesia:transaction(fun() -> mnesia:write(?MIGRATION_MARK_RECORD) end),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
peek_record() ->
|
||||||
|
Key = mnesia:dirty_first(?ACL_TABLE),
|
||||||
|
case Key of
|
||||||
|
?MIGRATION_MARK_KEY ->
|
||||||
|
mnesia:dirty_next(?ACL_TABLE, Key);
|
||||||
|
_ -> Key
|
||||||
|
end.
|
||||||
|
|
||||||
|
do_migrate_records('$end_of_table') -> ok;
|
||||||
|
do_migrate_records({_Login, _Topic} = Key) ->
|
||||||
|
?tp(emqx_acl_mnesia_migrator_record_selected, #{key => Key}),
|
||||||
|
_ = mnesia:transaction(fun migrate_one_record/1, [Key]),
|
||||||
|
do_migrate_records(peek_record()).
|
||||||
|
|
||||||
|
migrate_one_record({Login, _Topic} = Key) ->
|
||||||
|
case mnesia:wread({?ACL_TABLE, Key}) of
|
||||||
|
[] ->
|
||||||
|
?tp(emqx_acl_mnesia_migrator_record_missed, #{key => Key}),
|
||||||
|
record_missing;
|
||||||
|
OldRecs ->
|
||||||
|
Acls = mnesia:wread({?ACL_TABLE2, Login}),
|
||||||
|
UpdatedAcl = emqx_acl_mnesia_db:merge_acl_records(Login, OldRecs, Acls),
|
||||||
|
ok = mnesia:write(UpdatedAcl),
|
||||||
|
ok = mnesia:delete({?ACL_TABLE, Key}),
|
||||||
|
?tp(emqx_acl_mnesia_migrator_record_migrated, #{key => Key})
|
||||||
|
end.
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_auth_mnesia,
|
{application, emqx_auth_mnesia,
|
||||||
[{description, "EMQ X Authentication with Mnesia"},
|
[{description, "EMQ X Authentication with Mnesia"},
|
||||||
{vsn, "4.3.0"}, % strict semver, bump manually
|
{vsn, "4.3.4"}, % strict semver, bump manually
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [kernel,stdlib,mnesia]},
|
{applications, [kernel,stdlib,mnesia]},
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
%% -*- mode: erlang -*-
|
||||||
|
{VSN,
|
||||||
|
[
|
||||||
|
{<<"4.3.[0-3]">>, [
|
||||||
|
{add_module,emqx_acl_mnesia_db},
|
||||||
|
{add_module,emqx_acl_mnesia_migrator, [emqx_acl_mnesia_db]},
|
||||||
|
{update, emqx_auth_mnesia_sup, supervisor},
|
||||||
|
{apply, {emqx_acl_mnesia_migrator, start_supervised, []}},
|
||||||
|
{load_module,emqx_auth_mnesia_api, brutal_purge,soft_purge,[]},
|
||||||
|
{load_module,emqx_acl_mnesia, brutal_purge,soft_purge,[]},
|
||||||
|
{load_module,emqx_acl_mnesia_api, brutal_purge,soft_purge,[]},
|
||||||
|
{load_module,emqx_acl_mnesia_cli, brutal_purge,soft_purge,[]}
|
||||||
|
]},
|
||||||
|
{<<".*">>, [
|
||||||
|
]}
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{<<"4.3.[0-3]">>, [
|
||||||
|
{apply, {emqx_acl_mnesia_migrator, stop_supervised, []}},
|
||||||
|
{update, emqx_auth_mnesia_sup, supervisor},
|
||||||
|
{load_module,emqx_acl_mnesia_cli, brutal_purge,soft_purge,[]},
|
||||||
|
{load_module,emqx_acl_mnesia_api, brutal_purge,soft_purge,[]},
|
||||||
|
{load_module,emqx_auth_mnesia_api, brutal_purge,soft_purge,[]},
|
||||||
|
{load_module,emqx_acl_mnesia, brutal_purge,soft_purge,[]},
|
||||||
|
{delete_module,emqx_acl_mnesia_migrator},
|
||||||
|
{delete_module,emqx_acl_mnesia_db}
|
||||||
|
]},
|
||||||
|
{<<".*">>, [
|
||||||
|
]}
|
||||||
|
]
|
||||||
|
}.
|
|
@ -23,7 +23,7 @@
|
||||||
|
|
||||||
-import(proplists, [get_value/2]).
|
-import(proplists, [get_value/2]).
|
||||||
-import(minirest, [return/1]).
|
-import(minirest, [return/1]).
|
||||||
-export([paginate/5]).
|
-export([paginate_qh/5]).
|
||||||
|
|
||||||
-export([ list_clientid/2
|
-export([ list_clientid/2
|
||||||
, lookup_clientid/2
|
, lookup_clientid/2
|
||||||
|
@ -133,15 +133,15 @@ add_clientid(_Bindings, Params) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
do_add_clientid([ Params | ParamsN ], ReList ) ->
|
do_add_clientid([ Params | ParamsN ], ReList ) ->
|
||||||
Clientid = urldecode(get_value(<<"clientid">>, Params)),
|
Clientid = get_value(<<"clientid">>, Params),
|
||||||
do_add_clientid(ParamsN, [{Clientid, format_msg(do_add_clientid(Params))} | ReList]);
|
do_add_clientid(ParamsN, [{Clientid, format_msg(do_add_clientid(Params))} | ReList]);
|
||||||
|
|
||||||
do_add_clientid([], ReList) ->
|
do_add_clientid([], ReList) ->
|
||||||
{ok, ReList}.
|
{ok, ReList}.
|
||||||
|
|
||||||
do_add_clientid(Params) ->
|
do_add_clientid(Params) ->
|
||||||
Clientid = urldecode(get_value(<<"clientid">>, Params)),
|
Clientid = get_value(<<"clientid">>, Params),
|
||||||
Password = urldecode(get_value(<<"password">>, Params)),
|
Password = get_value(<<"password">>, Params),
|
||||||
Login = {clientid, Clientid},
|
Login = {clientid, Clientid},
|
||||||
case validate([login, password], [Login, Password]) of
|
case validate([login, password], [Login, Password]) of
|
||||||
ok ->
|
ok ->
|
||||||
|
@ -152,7 +152,7 @@ do_add_clientid(Params) ->
|
||||||
update_clientid(#{clientid := Clientid}, Params) ->
|
update_clientid(#{clientid := Clientid}, Params) ->
|
||||||
Password = get_value(<<"password">>, Params),
|
Password = get_value(<<"password">>, Params),
|
||||||
case validate([password], [Password]) of
|
case validate([password], [Password]) of
|
||||||
ok -> return(emqx_auth_mnesia_cli:update_user({clientid, urldecode(Clientid)}, urldecode(Password)));
|
ok -> return(emqx_auth_mnesia_cli:update_user({clientid, urldecode(Clientid)}, Password));
|
||||||
Err -> return(Err)
|
Err -> return(Err)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -182,15 +182,15 @@ add_username(_Bindings, Params) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
do_add_username([ Params | ParamsN ], ReList ) ->
|
do_add_username([ Params | ParamsN ], ReList ) ->
|
||||||
Username = urldecode(get_value(<<"username">>, Params)),
|
Username = get_value(<<"username">>, Params),
|
||||||
do_add_username(ParamsN, [{Username, format_msg(do_add_username(Params))} | ReList]);
|
do_add_username(ParamsN, [{Username, format_msg(do_add_username(Params))} | ReList]);
|
||||||
|
|
||||||
do_add_username([], ReList) ->
|
do_add_username([], ReList) ->
|
||||||
{ok, ReList}.
|
{ok, ReList}.
|
||||||
|
|
||||||
do_add_username(Params) ->
|
do_add_username(Params) ->
|
||||||
Username = urldecode(get_value(<<"username">>, Params)),
|
Username = get_value(<<"username">>, Params),
|
||||||
Password = urldecode(get_value(<<"password">>, Params)),
|
Password = get_value(<<"password">>, Params),
|
||||||
Login = {username, Username},
|
Login = {username, Username},
|
||||||
case validate([login, password], [Login, Password]) of
|
case validate([login, password], [Login, Password]) of
|
||||||
ok ->
|
ok ->
|
||||||
|
@ -201,7 +201,7 @@ do_add_username(Params) ->
|
||||||
update_username(#{username := Username}, Params) ->
|
update_username(#{username := Username}, Params) ->
|
||||||
Password = get_value(<<"password">>, Params),
|
Password = get_value(<<"password">>, Params),
|
||||||
case validate([password], [Password]) of
|
case validate([password], [Password]) of
|
||||||
ok -> return(emqx_auth_mnesia_cli:update_user({username, urldecode(Username)}, urldecode(Password)));
|
ok -> return(emqx_auth_mnesia_cli:update_user({username, urldecode(Username)}, Password));
|
||||||
Err -> return(Err)
|
Err -> return(Err)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -212,9 +212,12 @@ delete_username(#{username := Username}, _) ->
|
||||||
%% Paging Query
|
%% Paging Query
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
paginate(Tables, MatchSpec, Params, ComparingFun, RowFun) ->
|
paginate(Table, MatchSpec, Params, ComparingFun, RowFun) ->
|
||||||
Qh = query_handle(Tables, MatchSpec),
|
Qh = query_handle(Table, MatchSpec),
|
||||||
Count = count(Tables, MatchSpec),
|
Count = count(Table, MatchSpec),
|
||||||
|
paginate_qh(Qh, Count, Params, ComparingFun, RowFun).
|
||||||
|
|
||||||
|
paginate_qh(Qh, Count, Params, ComparingFun, RowFun) ->
|
||||||
Page = page(Params),
|
Page = page(Params),
|
||||||
Limit = limit(Params),
|
Limit = limit(Params),
|
||||||
Cursor = qlc:cursor(Qh),
|
Cursor = qlc:cursor(Qh),
|
||||||
|
@ -231,24 +234,12 @@ paginate(Tables, MatchSpec, Params, ComparingFun, RowFun) ->
|
||||||
|
|
||||||
query_handle(Table, MatchSpec) when is_atom(Table) ->
|
query_handle(Table, MatchSpec) when is_atom(Table) ->
|
||||||
Options = {traverse, {select, MatchSpec}},
|
Options = {traverse, {select, MatchSpec}},
|
||||||
qlc:q([R|| R <- ets:table(Table, Options)]);
|
qlc:q([R || R <- ets:table(Table, Options)]).
|
||||||
query_handle([Table], MatchSpec) when is_atom(Table) ->
|
|
||||||
Options = {traverse, {select, MatchSpec}},
|
|
||||||
qlc:q([R|| R <- ets:table(Table, Options)]);
|
|
||||||
query_handle(Tables, MatchSpec) ->
|
|
||||||
Options = {traverse, {select, MatchSpec}},
|
|
||||||
qlc:append([qlc:q([E || E <- ets:table(T, Options)]) || T <- Tables]).
|
|
||||||
|
|
||||||
count(Table, MatchSpec) when is_atom(Table) ->
|
count(Table, MatchSpec) when is_atom(Table) ->
|
||||||
[{MatchPattern, Where, _Re}] = MatchSpec,
|
[{MatchPattern, Where, _Re}] = MatchSpec,
|
||||||
NMatchSpec = [{MatchPattern, Where, [true]}],
|
NMatchSpec = [{MatchPattern, Where, [true]}],
|
||||||
ets:select_count(Table, NMatchSpec);
|
ets:select_count(Table, NMatchSpec).
|
||||||
count([Table], MatchSpec) when is_atom(Table) ->
|
|
||||||
[{MatchPattern, Where, _Re}] = MatchSpec,
|
|
||||||
NMatchSpec = [{MatchPattern, Where, [true]}],
|
|
||||||
ets:select_count(Table, NMatchSpec);
|
|
||||||
count(Tables, MatchSpec) ->
|
|
||||||
lists:sum([count(T, MatchSpec) || T <- Tables]).
|
|
||||||
|
|
||||||
page(Params) ->
|
page(Params) ->
|
||||||
binary_to_integer(proplists:get_value(<<"_page">>, Params, <<"1">>)).
|
binary_to_integer(proplists:get_value(<<"_page">>, Params, <<"1">>)).
|
||||||
|
@ -263,13 +254,11 @@ limit(Params) ->
|
||||||
%% Interval Funcs
|
%% Interval Funcs
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
format([{?TABLE, {clientid, ClientId}, Password, _InterTime}]) ->
|
format([{?TABLE, {clientid, ClientId}, _Password, _InterTime}]) ->
|
||||||
#{clientid => ClientId,
|
#{clientid => ClientId};
|
||||||
password => Password};
|
|
||||||
|
|
||||||
format([{?TABLE, {username, Username}, Password, _InterTime}]) ->
|
format([{?TABLE, {username, Username}, _Password, _InterTime}]) ->
|
||||||
#{username => Username,
|
#{username => Username};
|
||||||
password => Password};
|
|
||||||
|
|
||||||
format([]) ->
|
format([]) ->
|
||||||
#{}.
|
#{}.
|
||||||
|
|
|
@ -33,4 +33,16 @@ start_link() ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
init([]) ->
|
init([]) ->
|
||||||
{ok, {{one_for_one, 10, 100}, []}}.
|
{ok, {{one_for_one, 10, 100}, [
|
||||||
|
child_spec(emqx_acl_mnesia_migrator, worker, [])
|
||||||
|
]}}.
|
||||||
|
|
||||||
|
child_spec(M, worker, Args) ->
|
||||||
|
#{id => M,
|
||||||
|
start => {M, start_link, Args},
|
||||||
|
restart => permanent,
|
||||||
|
shutdown => 5000,
|
||||||
|
type => worker,
|
||||||
|
modules => [M]
|
||||||
|
}.
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-include("emqx_auth_mnesia.hrl").
|
-include("emqx_auth_mnesia.hrl").
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
-import(emqx_ct_http, [ request_api/3
|
-import(emqx_ct_http, [ request_api/3
|
||||||
, request_api/5
|
, request_api/5
|
||||||
|
@ -39,10 +40,15 @@ all() ->
|
||||||
emqx_ct:all(?MODULE).
|
emqx_ct:all(?MODULE).
|
||||||
|
|
||||||
groups() ->
|
groups() ->
|
||||||
[].
|
[{async_migration_tests, [sequence], [
|
||||||
|
t_old_and_new_acl_migration_by_migrator,
|
||||||
|
t_old_and_new_acl_migration_repeated_by_migrator,
|
||||||
|
t_migration_concurrency
|
||||||
|
]}].
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
emqx_ct_helpers:start_apps([emqx_modules, emqx_management, emqx_auth_mnesia], fun set_special_configs/1),
|
emqx_ct_helpers:start_apps([emqx_modules, emqx_management, emqx_auth_mnesia], fun set_special_configs/1),
|
||||||
|
supervisor:terminate_child(emqx_auth_mnesia_sup, emqx_acl_mnesia_migrator),
|
||||||
create_default_app(),
|
create_default_app(),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
@ -50,14 +56,32 @@ end_per_suite(_Config) ->
|
||||||
delete_default_app(),
|
delete_default_app(),
|
||||||
emqx_ct_helpers:stop_apps([emqx_modules, emqx_management, emqx_auth_mnesia]).
|
emqx_ct_helpers:stop_apps([emqx_modules, emqx_management, emqx_auth_mnesia]).
|
||||||
|
|
||||||
init_per_testcase(t_check_acl_as_clientid, Config) ->
|
init_per_testcase_clean(_, Config) ->
|
||||||
|
mnesia:clear_table(?ACL_TABLE),
|
||||||
|
mnesia:clear_table(?ACL_TABLE2),
|
||||||
|
Config.
|
||||||
|
|
||||||
|
init_per_testcase_emqx_hook(t_check_acl_as_clientid, Config) ->
|
||||||
emqx:hook('client.check_acl', fun emqx_acl_mnesia:check_acl/5, [#{key_as => clientid}]),
|
emqx:hook('client.check_acl', fun emqx_acl_mnesia:check_acl/5, [#{key_as => clientid}]),
|
||||||
Config;
|
Config;
|
||||||
|
init_per_testcase_emqx_hook(_, Config) ->
|
||||||
init_per_testcase(_, Config) ->
|
|
||||||
emqx:hook('client.check_acl', fun emqx_acl_mnesia:check_acl/5, [#{key_as => username}]),
|
emqx:hook('client.check_acl', fun emqx_acl_mnesia:check_acl/5, [#{key_as => username}]),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
init_per_testcase_migration(t_management_before_migration, Config) ->
|
||||||
|
Config;
|
||||||
|
init_per_testcase_migration(_, Config) ->
|
||||||
|
emqx_acl_mnesia_migrator:migrate_records(),
|
||||||
|
Config.
|
||||||
|
|
||||||
|
init_per_testcase(Case, Config) ->
|
||||||
|
PerTestInitializers = [
|
||||||
|
fun init_per_testcase_clean/2,
|
||||||
|
fun init_per_testcase_migration/2,
|
||||||
|
fun init_per_testcase_emqx_hook/2
|
||||||
|
],
|
||||||
|
lists:foldl(fun(Init, Conf) -> Init(Case, Conf) end, Config, PerTestInitializers).
|
||||||
|
|
||||||
end_per_testcase(_, Config) ->
|
end_per_testcase(_, Config) ->
|
||||||
emqx:unhook('client.check_acl', fun emqx_acl_mnesia:check_acl/5),
|
emqx:unhook('client.check_acl', fun emqx_acl_mnesia:check_acl/5),
|
||||||
Config.
|
Config.
|
||||||
|
@ -76,25 +100,34 @@ set_special_configs(_App) ->
|
||||||
%% Testcases
|
%% Testcases
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
t_management(_Config) ->
|
t_management_before_migration(_Config) ->
|
||||||
clean_all_acls(),
|
{atomic, IsStarted} = mnesia:transaction(fun emqx_acl_mnesia_db:is_migration_started/0),
|
||||||
?assertEqual("Acl with Mnesia", emqx_acl_mnesia:description()),
|
?assertNot(IsStarted),
|
||||||
?assertEqual([], emqx_acl_mnesia_cli:all_acls()),
|
run_acl_tests().
|
||||||
|
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/%c">>, sub, allow),
|
t_management_after_migration(_Config) ->
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/+">>, pub, deny),
|
{atomic, IsStarted} = mnesia:transaction(fun emqx_acl_mnesia_db:is_migration_started/0),
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({username, <<"test_username">>}, <<"topic/%u">>, sub, deny),
|
?assert(IsStarted),
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({username, <<"test_username">>}, <<"topic/+">>, pub, allow),
|
run_acl_tests().
|
||||||
ok = emqx_acl_mnesia_cli:add_acl(all, <<"#">>, pubsub, deny),
|
|
||||||
|
run_acl_tests() ->
|
||||||
|
?assertEqual("Acl with Mnesia", emqx_acl_mnesia:description()),
|
||||||
|
?assertEqual([], emqx_acl_mnesia_db:all_acls()),
|
||||||
|
|
||||||
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/%c">>, sub, allow),
|
||||||
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/+">>, pub, deny),
|
||||||
|
ok = emqx_acl_mnesia_db:add_acl({username, <<"test_username">>}, <<"topic/%u">>, sub, deny),
|
||||||
|
ok = emqx_acl_mnesia_db:add_acl({username, <<"test_username">>}, <<"topic/+">>, pub, allow),
|
||||||
|
ok = emqx_acl_mnesia_db:add_acl(all, <<"#">>, pubsub, deny),
|
||||||
%% Sleeps below are needed to hide the race condition between
|
%% Sleeps below are needed to hide the race condition between
|
||||||
%% mnesia and ets dirty select in check_acl, that make this test
|
%% mnesia and ets dirty select in check_acl, that make this test
|
||||||
%% flaky
|
%% flaky
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
|
|
||||||
?assertEqual(2, length(emqx_acl_mnesia_cli:lookup_acl({clientid, <<"test_clientid">>}))),
|
?assertEqual(2, length(emqx_acl_mnesia_db:lookup_acl({clientid, <<"test_clientid">>}))),
|
||||||
?assertEqual(2, length(emqx_acl_mnesia_cli:lookup_acl({username, <<"test_username">>}))),
|
?assertEqual(2, length(emqx_acl_mnesia_db:lookup_acl({username, <<"test_username">>}))),
|
||||||
?assertEqual(2, length(emqx_acl_mnesia_cli:lookup_acl(all))),
|
?assertEqual(2, length(emqx_acl_mnesia_db:lookup_acl(all))),
|
||||||
?assertEqual(6, length(emqx_acl_mnesia_cli:all_acls())),
|
?assertEqual(6, length(emqx_acl_mnesia_db:all_acls())),
|
||||||
|
|
||||||
User1 = #{zone => external, clientid => <<"test_clientid">>},
|
User1 = #{zone => external, clientid => <<"test_clientid">>},
|
||||||
User2 = #{zone => external, clientid => <<"no_exist">>, username => <<"test_username">>},
|
User2 = #{zone => external, clientid => <<"no_exist">>, username => <<"test_username">>},
|
||||||
|
@ -110,30 +143,30 @@ t_management(_Config) ->
|
||||||
deny = emqx_access_control:check_acl(User3, publish, <<"topic/A/B">>),
|
deny = emqx_access_control:check_acl(User3, publish, <<"topic/A/B">>),
|
||||||
|
|
||||||
%% Test merging of pubsub capability:
|
%% Test merging of pubsub capability:
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pubsub, deny),
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pubsub, deny),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
||||||
deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pub, allow),
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pub, allow),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
||||||
allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pubsub, allow),
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pubsub, allow),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
allow = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
allow = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
||||||
allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, sub, deny),
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, sub, deny),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
||||||
allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pub, deny),
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pub, deny),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
||||||
deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
||||||
|
|
||||||
%% Test implicit migration of pubsub to pub and sub:
|
%% Test implicit migration of pubsub to pub and sub:
|
||||||
ok = emqx_acl_mnesia_cli:remove_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>),
|
ok = emqx_acl_mnesia_db:remove_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>),
|
||||||
ok = mnesia:dirty_write(#emqx_acl{
|
ok = mnesia:dirty_write(#?ACL_TABLE{
|
||||||
filter = {{clientid, <<"test_clientid">>}, <<"topic/mix">>},
|
filter = {{clientid, <<"test_clientid">>}, <<"topic/mix">>},
|
||||||
action = pubsub,
|
action = pubsub,
|
||||||
access = allow,
|
access = allow,
|
||||||
|
@ -142,24 +175,130 @@ t_management(_Config) ->
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
allow = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
allow = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
||||||
allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pub, deny),
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pub, deny),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
allow = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
allow = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
||||||
deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
||||||
ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, sub, deny),
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, sub, deny),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>),
|
||||||
deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>),
|
||||||
|
|
||||||
ok = emqx_acl_mnesia_cli:remove_acl({clientid, <<"test_clientid">>}, <<"topic/%c">>),
|
ok = emqx_acl_mnesia_db:remove_acl({clientid, <<"test_clientid">>}, <<"topic/%c">>),
|
||||||
ok = emqx_acl_mnesia_cli:remove_acl({clientid, <<"test_clientid">>}, <<"topic/+">>),
|
ok = emqx_acl_mnesia_db:remove_acl({clientid, <<"test_clientid">>}, <<"topic/+">>),
|
||||||
ok = emqx_acl_mnesia_cli:remove_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>),
|
ok = emqx_acl_mnesia_db:remove_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>),
|
||||||
ok = emqx_acl_mnesia_cli:remove_acl({username, <<"test_username">>}, <<"topic/%u">>),
|
ok = emqx_acl_mnesia_db:remove_acl({username, <<"test_username">>}, <<"topic/%u">>),
|
||||||
ok = emqx_acl_mnesia_cli:remove_acl({username, <<"test_username">>}, <<"topic/+">>),
|
ok = emqx_acl_mnesia_db:remove_acl({username, <<"test_username">>}, <<"topic/+">>),
|
||||||
ok = emqx_acl_mnesia_cli:remove_acl(all, <<"#">>),
|
ok = emqx_acl_mnesia_db:remove_acl(all, <<"#">>),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
|
|
||||||
?assertEqual([], emqx_acl_mnesia_cli:all_acls()).
|
?assertEqual([], emqx_acl_mnesia_db:all_acls()).
|
||||||
|
|
||||||
|
t_old_and_new_acl_combination(_Config) ->
|
||||||
|
create_conflicting_records(),
|
||||||
|
|
||||||
|
?assertEqual(combined_conflicting_records(), emqx_acl_mnesia_db:all_acls()),
|
||||||
|
?assertEqual(
|
||||||
|
lists:usort(combined_conflicting_records()),
|
||||||
|
lists:usort(emqx_acl_mnesia_db:all_acls_export())).
|
||||||
|
|
||||||
|
t_old_and_new_acl_migration(_Config) ->
|
||||||
|
create_conflicting_records(),
|
||||||
|
emqx_acl_mnesia_migrator:migrate_records(),
|
||||||
|
|
||||||
|
?assertEqual(combined_conflicting_records(), emqx_acl_mnesia_db:all_acls()),
|
||||||
|
?assertEqual(
|
||||||
|
lists:usort(combined_conflicting_records()),
|
||||||
|
lists:usort(emqx_acl_mnesia_db:all_acls_export())),
|
||||||
|
|
||||||
|
% check that old table is not popoulated anymore
|
||||||
|
ok = emqx_acl_mnesia_db:add_acl({clientid, <<"test_clientid">>}, <<"topic/%c">>, sub, allow),
|
||||||
|
?assert(emqx_acl_mnesia_migrator:is_old_table_migrated()).
|
||||||
|
|
||||||
|
|
||||||
|
t_migration_concurrency(_Config) ->
|
||||||
|
Key = {{clientid,<<"client6">>}, <<"t">>},
|
||||||
|
Record = #?ACL_TABLE{filter = Key, action = pubsub, access = deny, created_at = 0},
|
||||||
|
{atomic, ok} = mnesia:transaction(fun mnesia:write/1, [Record]),
|
||||||
|
|
||||||
|
LockWaitAndDelete =
|
||||||
|
fun() ->
|
||||||
|
[_Rec] = mnesia:wread({?ACL_TABLE, Key}),
|
||||||
|
{{Pid, Ref}, _} =
|
||||||
|
?wait_async_action(spawn_monitor(fun emqx_acl_mnesia_migrator:migrate_records/0),
|
||||||
|
#{?snk_kind := emqx_acl_mnesia_migrator_record_selected},
|
||||||
|
1000),
|
||||||
|
mnesia:delete({?ACL_TABLE, Key}),
|
||||||
|
{Pid, Ref}
|
||||||
|
end,
|
||||||
|
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
{atomic, {Pid, Ref}} = mnesia:transaction(LockWaitAndDelete),
|
||||||
|
receive {'DOWN', Ref, process, Pid, _} -> ok end
|
||||||
|
end,
|
||||||
|
fun(_, Trace) ->
|
||||||
|
?assertMatch([_], ?of_kind(emqx_acl_mnesia_migrator_record_missed, Trace))
|
||||||
|
end),
|
||||||
|
|
||||||
|
?assert(emqx_acl_mnesia_migrator:is_old_table_migrated()),
|
||||||
|
?assertEqual([], emqx_acl_mnesia_db:all_acls()).
|
||||||
|
|
||||||
|
|
||||||
|
t_old_and_new_acl_migration_by_migrator(_Config) ->
|
||||||
|
create_conflicting_records(),
|
||||||
|
|
||||||
|
meck:new(fake_nodes, [non_strict]),
|
||||||
|
meck:expect(fake_nodes, all, fun() -> [node(), 'somebadnode@127.0.0.1'] end),
|
||||||
|
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
% check all nodes every 30 ms
|
||||||
|
{ok, _} = emqx_acl_mnesia_migrator:start_link(#{
|
||||||
|
name => ct_migrator,
|
||||||
|
check_nodes_interval => 30,
|
||||||
|
get_nodes => fun fake_nodes:all/0
|
||||||
|
}),
|
||||||
|
timer:sleep(100)
|
||||||
|
end,
|
||||||
|
fun(_, Trace) ->
|
||||||
|
?assertEqual([], ?of_kind(emqx_acl_mnesia_migrator_start_migration, Trace))
|
||||||
|
end),
|
||||||
|
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
meck:expect(fake_nodes, all, fun() -> [node()] end),
|
||||||
|
timer:sleep(100)
|
||||||
|
end,
|
||||||
|
fun(_, Trace) ->
|
||||||
|
?assertMatch([_], ?of_kind(emqx_acl_mnesia_migrator_finish, Trace))
|
||||||
|
end),
|
||||||
|
|
||||||
|
meck:unload(fake_nodes),
|
||||||
|
|
||||||
|
?assertEqual(combined_conflicting_records(), emqx_acl_mnesia_db:all_acls()),
|
||||||
|
?assert(emqx_acl_mnesia_migrator:is_old_table_migrated()).
|
||||||
|
|
||||||
|
t_old_and_new_acl_migration_repeated_by_migrator(_Config) ->
|
||||||
|
create_conflicting_records(),
|
||||||
|
emqx_acl_mnesia_migrator:migrate_records(),
|
||||||
|
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
{ok, _} = emqx_acl_mnesia_migrator:start_link(ct_migrator),
|
||||||
|
timer:sleep(100)
|
||||||
|
end,
|
||||||
|
fun(_, Trace) ->
|
||||||
|
?assertEqual([], ?of_kind(emqx_acl_mnesia_migrator_start_migration, Trace)),
|
||||||
|
?assertMatch([_], ?of_kind(emqx_acl_mnesia_migrator_finish, Trace))
|
||||||
|
end).
|
||||||
|
|
||||||
|
t_start_stop_supervised(_Config) ->
|
||||||
|
?assertEqual(undefined, whereis(emqx_acl_mnesia_migrator)),
|
||||||
|
ok = emqx_acl_mnesia_migrator:start_supervised(),
|
||||||
|
?assert(is_pid(whereis(emqx_acl_mnesia_migrator))),
|
||||||
|
ok = emqx_acl_mnesia_migrator:stop_supervised(),
|
||||||
|
?assertEqual(undefined, whereis(emqx_acl_mnesia_migrator)).
|
||||||
|
|
||||||
t_acl_cli(_Config) ->
|
t_acl_cli(_Config) ->
|
||||||
meck:new(emqx_ctl, [non_strict, passthrough]),
|
meck:new(emqx_ctl, [non_strict, passthrough]),
|
||||||
|
@ -168,8 +307,6 @@ t_acl_cli(_Config) ->
|
||||||
meck:expect(emqx_ctl, usage, fun(Usages) -> emqx_ctl:format_usage(Usages) end),
|
meck:expect(emqx_ctl, usage, fun(Usages) -> emqx_ctl:format_usage(Usages) end),
|
||||||
meck:expect(emqx_ctl, usage, fun(Cmd, Descr) -> emqx_ctl:format_usage(Cmd, Descr) end),
|
meck:expect(emqx_ctl, usage, fun(Cmd, Descr) -> emqx_ctl:format_usage(Cmd, Descr) end),
|
||||||
|
|
||||||
clean_all_acls(),
|
|
||||||
|
|
||||||
?assertEqual(0, length(emqx_acl_mnesia_cli:cli(["list"]))),
|
?assertEqual(0, length(emqx_acl_mnesia_cli:cli(["list"]))),
|
||||||
|
|
||||||
emqx_acl_mnesia_cli:cli(["add", "clientid", "test_clientid", "topic/A", "pub", "deny"]),
|
emqx_acl_mnesia_cli:cli(["add", "clientid", "test_clientid", "topic/A", "pub", "deny"]),
|
||||||
|
@ -202,8 +339,6 @@ t_acl_cli(_Config) ->
|
||||||
meck:unload(emqx_ctl).
|
meck:unload(emqx_ctl).
|
||||||
|
|
||||||
t_rest_api(_Config) ->
|
t_rest_api(_Config) ->
|
||||||
clean_all_acls(),
|
|
||||||
|
|
||||||
Params1 = [#{<<"clientid">> => <<"test_clientid">>,
|
Params1 = [#{<<"clientid">> => <<"test_clientid">>,
|
||||||
<<"topic">> => <<"topic/A">>,
|
<<"topic">> => <<"topic/A">>,
|
||||||
<<"action">> => <<"pub">>,
|
<<"action">> => <<"pub">>,
|
||||||
|
@ -273,13 +408,24 @@ t_rest_api(_Config) ->
|
||||||
{ok, Res3} = request_http_rest_list(["$all"]),
|
{ok, Res3} = request_http_rest_list(["$all"]),
|
||||||
?assertMatch([], get_http_data(Res3)).
|
?assertMatch([], get_http_data(Res3)).
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
|
||||||
%% Helpers
|
|
||||||
%%------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
clean_all_acls() ->
|
create_conflicting_records() ->
|
||||||
[ mnesia:dirty_delete({emqx_acl, Login})
|
Records = [
|
||||||
|| Login <- mnesia:dirty_all_keys(emqx_acl)].
|
#?ACL_TABLE{filter = {{clientid,<<"client6">>}, <<"t">>}, action = pubsub, access = deny, created_at = 0},
|
||||||
|
#?ACL_TABLE{filter = {{clientid,<<"client5">>}, <<"t">>}, action = pubsub, access = deny, created_at = 1},
|
||||||
|
#?ACL_TABLE2{who = {clientid,<<"client5">>}, rules = [{allow, sub, <<"t">>, 2}]}
|
||||||
|
],
|
||||||
|
mnesia:transaction(fun() -> lists:foreach(fun mnesia:write/1, Records) end).
|
||||||
|
|
||||||
|
|
||||||
|
combined_conflicting_records() ->
|
||||||
|
% pubsub's are split, ACL_TABLE2 rules shadow ACL_TABLE rules
|
||||||
|
[
|
||||||
|
{{clientid,<<"client5">>},<<"t">>,sub,allow,2},
|
||||||
|
{{clientid,<<"client5">>},<<"t">>,pub,deny,1},
|
||||||
|
{{clientid,<<"client6">>},<<"t">>,sub,deny,0},
|
||||||
|
{{clientid,<<"client6">>},<<"t">>,pub,deny,0}
|
||||||
|
].
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% HTTP Request
|
%% HTTP Request
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{deps,
|
{deps,
|
||||||
%% NOTE: mind poolboy version when updating mongodb-erlang version
|
%% NOTE: mind poolboy version when updating mongodb-erlang version
|
||||||
[{mongodb, {git,"https://github.com/emqx/mongodb-erlang", {tag, "v3.0.7"}}},
|
[{mongodb, {git,"https://github.com/emqx/mongodb-erlang", {tag, "v3.0.10"}}},
|
||||||
%% mongodb-erlang uses a special fork https://github.com/comtihon/poolboy.git
|
%% mongodb-erlang uses a special fork https://github.com/comtihon/poolboy.git
|
||||||
%% (which has overflow_ttl feature added).
|
%% (which has overflow_ttl feature added).
|
||||||
%% However, it references `{branch, "master}` (commit 9c06a9a on 2021-04-07).
|
%% However, it references `{branch, "master}` (commit 9c06a9a on 2021-04-07).
|
||||||
|
|
|
@ -1,23 +1,23 @@
|
||||||
-----BEGIN CERTIFICATE-----
|
-----BEGIN CERTIFICATE-----
|
||||||
MIID1zCCAb8CCQC/+qKgZd+m/DANBgkqhkiG9w0BAQsFADA1MRMwEQYDVQQKDApS
|
MIID1zCCAb8CCQC/+qKgZd+m/jANBgkqhkiG9w0BAQsFADA1MRMwEQYDVQQKDApS
|
||||||
ZWRpcyBUZXN0MR4wHAYDVQQDDBVDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMjAx
|
ZWRpcyBUZXN0MR4wHAYDVQQDDBVDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMjEx
|
||||||
MDI5MDEzNDE2WhcNMjExMDI5MDEzNDE2WjAmMRMwEQYDVQQKDApSZWRpcyBUZXN0
|
MTAxMDgwMDU1WhcNMzExMDMwMDgwMDU1WjAmMRMwEQYDVQQKDApSZWRpcyBUZXN0
|
||||||
MQ8wDQYDVQQDDAZTZXJ2ZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
MQ8wDQYDVQQDDAZTZXJ2ZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||||
AQDSs3bQ9sYi2AhFuHU75Ryk1HHSgfzA6pQAJilmJdTy0s5vyiWe1HQJaWkMcS5V
|
AQDSs3bQ9sYi2AhFuHU75Ryk1HHSgfzA6pQAJilmJdTy0s5vyiWe1HQJaWkMcS5V
|
||||||
GVzGMK+c+OBqtXtDDninL3betg1YPMjSCOjPMOTC1H9K7+effwf7Iwpnw9Zro8mb
|
GVzGMK+c+OBqtXtDDninL3betg1YPMjSCOjPMOTC1H9K7+effwf7Iwpnw9Zro8mb
|
||||||
TEmMslIYhhcDedzT9Owli4QAgbgTn4l1BYuKX9CLrrKFtnr21miKu3ydViy9q7T1
|
TEmMslIYhhcDedzT9Owli4QAgbgTn4l1BYuKX9CLrrKFtnr21miKu3ydViy9q7T1
|
||||||
pib3eigvAyk7X2fadHFArGEttsXrD6cetPPkSF/1OLWNlqzUKXzhSyrBXzO44Kks
|
pib3eigvAyk7X2fadHFArGEttsXrD6cetPPkSF/1OLWNlqzUKXzhSyrBXzO44Kks
|
||||||
fwR/EpTiES9g4dNOL2wvKS/YE1fNKhiCENrNxTXQo1l0yOdm2+MeyOeHFzRuS0b/
|
fwR/EpTiES9g4dNOL2wvKS/YE1fNKhiCENrNxTXQo1l0yOdm2+MeyOeHFzRuS0b/
|
||||||
+uGDFOPPi04KXeO6dQ5olBCPAgMBAAEwDQYJKoZIhvcNAQELBQADggIBADn0E2vG
|
+uGDFOPPi04KXeO6dQ5olBCPAgMBAAEwDQYJKoZIhvcNAQELBQADggIBALRSylnk
|
||||||
iQWe8/I7VbBdPhPNupVNcLvew10eIHxY2g5vSruCSVRQTgk8itVMRmDQxbb7gdDW
|
JJhEFRniuQ+H1kbfZlVOqnSqGkm38r8X76dnYRZfkFlxVzU2q5HPnSdiL9D3JrrH
|
||||||
jnCRbxykxbLjM9iCRljnOCsIcTi7qO7JRl8niV8dtEpPOs9lZxEdNXjIV1iZoWf3
|
P7wIA5zjr76zK7GPJjkRExRZy5sTLSpsGp7YIGAZ19J3KsDVHSOvPTl38c6L217a
|
||||||
arBbPQSyQZvTQHG6qbFnyCdMMyyXGGvEPGQDaBiKH+Ko1qeAbCi0zupChYvxmtZ8
|
YzPeQL5IrrW55URmA5PZFu3lsm9z7CNguw1wn2pCNNB+r/cRl4iELehZJT891CQe
|
||||||
hSTPlMFezDT9bKoNY0pkJSELfokEPU/Pn6Lz/NVbdzmCMjVa/xmF3s31g+DGhz95
|
nV9a1YfHY/DkDoMnmrKqmeYdvje8n1uSqTnIV/wNiASU36ztxxD8ZmwprxxbjLSs
|
||||||
4AyOnCr6o0aydPVVV3pB/BCezNXPUxpp53BG0w/K2f2DnKYCvGvJbqDAaJ8bG/J1
|
aBjBvsR/eBHbWrz2W1dc5ppgGLuCkiEKmh6/IWX/ZQqyBCzZkmFNiTs8QiLtmoC4
|
||||||
EFSOmwobdwVxJz3KNubmo1qJ6xOl/YT7yyqPRQRM1SY8nZW+YcoJSZjOe8wJVlob
|
2bXkPVSyq5wT7eisGbRdcY9vGDtoW/WZOmFVA4XEDVx8M9fb4speHwoHRuTfWsA0
|
||||||
d0bOwN1C3HQwomyMWes187bEQP6Y36HuEbR1fK8yIOzGsGDKRFAFwQwMgw2M91lr
|
6Y8P9XpYjG2tQoPpxrZaRshZ+SiHWPov7cAvY34szFePfTWR8gzbL6SgpDz30ceh
|
||||||
EJIP5NRD3OZRuiYDiVfVhDZDaNahrAMZUcPCgeCAwc4YG6Gp2sDtdorOl4kIJYWE
|
XIuTArOMQMhfWHn3NaOc6hlkRsoviNhc5IXR9VjIdaNJCamEoLVNWZsvHJCUiP10
|
||||||
BbBZ0Jplq9+g6ciu5ChjAW8iFl0Ae5U24MxPGXnrxiRF4WWxLeZMVLXLDvlPqReD
|
yx+9/0a9vI6G+i8oKQ+eKJsfP8Ikoiolf7vU6M+/1kF+sSMxGjFwkMCxLgZB67+a
|
||||||
CHII5ifyvGEt5+RhqtZC/L+HimL+5wQgOlntqhUdLb6yWRz7YW37PFMnUXU3MXe9
|
m9kw83sVfykWLQ3eRwhdBz0/JiiYtDbbtyqgs3kPhJs9SGZUhDc/7R0lTWf4zxoJ
|
||||||
uY7m73ZLluXiLojcZxU2+cx89u5FOJxrYtrj
|
l3y7pn/3nJvYrGX7uCBbWPUuqWeHVM9Ip6AZ
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
|
@ -114,6 +114,17 @@ bridge.mqtt.aws.keyfile = {{ platform_etc_dir }}/certs/client-key.pem
|
||||||
## Value: String
|
## Value: String
|
||||||
bridge.mqtt.aws.ciphers = TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256,TLS_CHACHA20_POLY1305_SHA256,TLS_AES_128_CCM_SHA256,TLS_AES_128_CCM_8_SHA256,ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-DES-CBC3-SHA,ECDH-ECDSA-AES256-GCM-SHA384,ECDH-RSA-AES256-GCM-SHA384,ECDH-ECDSA-AES256-SHA384,ECDH-RSA-AES256-SHA384,DHE-DSS-AES256-GCM-SHA384,DHE-DSS-AES256-SHA256,AES256-GCM-SHA384,AES256-SHA256,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256,ECDH-ECDSA-AES128-GCM-SHA256,ECDH-RSA-AES128-GCM-SHA256,ECDH-ECDSA-AES128-SHA256,ECDH-RSA-AES128-SHA256,DHE-DSS-AES128-GCM-SHA256,DHE-DSS-AES128-SHA256,AES128-GCM-SHA256,AES128-SHA256,ECDHE-ECDSA-AES256-SHA,ECDHE-RSA-AES256-SHA,DHE-DSS-AES256-SHA,ECDH-ECDSA-AES256-SHA,ECDH-RSA-AES256-SHA,AES256-SHA,ECDHE-ECDSA-AES128-SHA,ECDHE-RSA-AES128-SHA,DHE-DSS-AES128-SHA,ECDH-ECDSA-AES128-SHA,ECDH-RSA-AES128-SHA,AES128-SHA
|
bridge.mqtt.aws.ciphers = TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256,TLS_CHACHA20_POLY1305_SHA256,TLS_AES_128_CCM_SHA256,TLS_AES_128_CCM_8_SHA256,ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-DES-CBC3-SHA,ECDH-ECDSA-AES256-GCM-SHA384,ECDH-RSA-AES256-GCM-SHA384,ECDH-ECDSA-AES256-SHA384,ECDH-RSA-AES256-SHA384,DHE-DSS-AES256-GCM-SHA384,DHE-DSS-AES256-SHA256,AES256-GCM-SHA384,AES256-SHA256,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256,ECDH-ECDSA-AES128-GCM-SHA256,ECDH-RSA-AES128-GCM-SHA256,ECDH-ECDSA-AES128-SHA256,ECDH-RSA-AES128-SHA256,DHE-DSS-AES128-GCM-SHA256,DHE-DSS-AES128-SHA256,AES128-GCM-SHA256,AES128-SHA256,ECDHE-ECDSA-AES256-SHA,ECDHE-RSA-AES256-SHA,DHE-DSS-AES256-SHA,ECDH-ECDSA-AES256-SHA,ECDH-RSA-AES256-SHA,AES256-SHA,ECDHE-ECDSA-AES128-SHA,ECDHE-RSA-AES128-SHA,DHE-DSS-AES128-SHA,ECDH-ECDSA-AES128-SHA,ECDH-RSA-AES128-SHA,AES128-SHA
|
||||||
|
|
||||||
|
## SSL peer validation with verify_peer or verify_none
|
||||||
|
## More information at: http://erlang.org/doc/man/ssl.html
|
||||||
|
##
|
||||||
|
## Value: true | false
|
||||||
|
#bridge.mqtt.aws.verify = false
|
||||||
|
|
||||||
|
## SSL hostname to be used in TLS Server Name Indication extension
|
||||||
|
##
|
||||||
|
## Value: String | disable
|
||||||
|
#bridge.mqtt.aws.server_name_indication = disable
|
||||||
|
|
||||||
## Ciphers for TLS PSK.
|
## Ciphers for TLS PSK.
|
||||||
## Note that 'bridge.${BridgeName}.ciphers' and 'bridge.${BridgeName}.psk_ciphers' cannot
|
## Note that 'bridge.${BridgeName}.ciphers' and 'bridge.${BridgeName}.psk_ciphers' cannot
|
||||||
## be configured at the same time.
|
## be configured at the same time.
|
||||||
|
|
|
@ -75,6 +75,14 @@
|
||||||
{datatype, string}
|
{datatype, string}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
|
{mapping, "bridge.mqtt.$name.verify", "emqx_bridge_mqtt.bridges", [
|
||||||
|
{datatype, {enum, [true, false]}}
|
||||||
|
]}.
|
||||||
|
|
||||||
|
{mapping, "bridge.mqtt.$name.server_name_indication", "emqx_bridge_mqtt.bridges", [
|
||||||
|
{datatype, string}
|
||||||
|
]}.
|
||||||
|
|
||||||
{mapping, "bridge.mqtt.$name.ciphers", "emqx_bridge_mqtt.bridges", [
|
{mapping, "bridge.mqtt.$name.ciphers", "emqx_bridge_mqtt.bridges", [
|
||||||
{datatype, string}
|
{datatype, string}
|
||||||
]}.
|
]}.
|
||||||
|
@ -144,6 +152,8 @@
|
||||||
(ciphers) -> true;
|
(ciphers) -> true;
|
||||||
(psk_ciphers) -> true;
|
(psk_ciphers) -> true;
|
||||||
(tls_versions) -> true;
|
(tls_versions) -> true;
|
||||||
|
(verify) -> true;
|
||||||
|
(server_name_indication) -> true;
|
||||||
(_Opt) -> false
|
(_Opt) -> false
|
||||||
end,
|
end,
|
||||||
|
|
||||||
|
@ -153,6 +163,14 @@
|
||||||
[{ciphers, Split(Ciphers)}];
|
[{ciphers, Split(Ciphers)}];
|
||||||
(psk_ciphers, Ciphers) ->
|
(psk_ciphers, Ciphers) ->
|
||||||
[{ciphers, MapPSKCiphers(Split(Ciphers))}, {user_lookup_fun, {fun emqx_psk:lookup/3, <<>>}}];
|
[{ciphers, MapPSKCiphers(Split(Ciphers))}, {user_lookup_fun, {fun emqx_psk:lookup/3, <<>>}}];
|
||||||
|
(verify, true) ->
|
||||||
|
[{verify, verify_peer}];
|
||||||
|
(verify, false) ->
|
||||||
|
[{verify, verify_none}];
|
||||||
|
(server_name_indication, "disabled") ->
|
||||||
|
[{server_name_indication, disabled}];
|
||||||
|
(server_name_indication, Hostname) ->
|
||||||
|
[{server_name_indication, Hostname}];
|
||||||
(Opt, Val) ->
|
(Opt, Val) ->
|
||||||
[{Opt, Val}]
|
[{Opt, Val}]
|
||||||
end,
|
end,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_bridge_mqtt,
|
{application, emqx_bridge_mqtt,
|
||||||
[{description, "EMQ X Bridge to MQTT Broker"},
|
[{description, "EMQ X Bridge to MQTT Broker"},
|
||||||
{vsn, "4.3.1"}, % strict semver, bump manually!
|
{vsn, "4.3.2"}, % strict semver, bump manually!
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [kernel,stdlib,replayq,emqtt]},
|
{applications, [kernel,stdlib,replayq,emqtt]},
|
||||||
|
|
|
@ -5,12 +5,14 @@
|
||||||
{"4.3.0", [
|
{"4.3.0", [
|
||||||
{load_module, emqx_bridge_worker, brutal_purge, soft_purge, []}
|
{load_module, emqx_bridge_worker, brutal_purge, soft_purge, []}
|
||||||
]},
|
]},
|
||||||
|
{"4.3.1", []},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
{"4.3.0", [
|
{"4.3.0", [
|
||||||
{load_module, emqx_bridge_worker, brutal_purge, soft_purge, []}
|
{load_module, emqx_bridge_worker, brutal_purge, soft_purge, []}
|
||||||
]},
|
]},
|
||||||
|
{"4.3.1", []},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
]
|
]
|
||||||
}.
|
}.
|
||||||
|
|
|
@ -27,7 +27,7 @@
|
||||||
|
|
||||||
-define(wait(For, Timeout), emqx_ct_helpers:wait_for(?FUNCTION_NAME, ?LINE, fun() -> For end, Timeout)).
|
-define(wait(For, Timeout), emqx_ct_helpers:wait_for(?FUNCTION_NAME, ?LINE, fun() -> For end, Timeout)).
|
||||||
|
|
||||||
-define(SNK_WAIT(WHAT), ?assertMatch({ok, _}, ?block_until(#{?snk_kind := WHAT}, 2000, 1000))).
|
-define(SNK_WAIT(WHAT), ?assertMatch({ok, _}, ?block_until(#{?snk_kind := WHAT}, 5000, 5000))).
|
||||||
|
|
||||||
receive_messages(Count) ->
|
receive_messages(Count) ->
|
||||||
receive_messages(Count, []).
|
receive_messages(Count, []).
|
||||||
|
|
|
@ -2,8 +2,31 @@
|
||||||
## EMQ X Hooks
|
## EMQ X Hooks
|
||||||
##====================================================================
|
##====================================================================
|
||||||
|
|
||||||
|
## The default value or action will be returned, while the request to
|
||||||
|
## the gRPC server failed or no available grpc server running.
|
||||||
|
##
|
||||||
|
## Default: deny
|
||||||
|
## Value: ignore | deny
|
||||||
|
#exhook.request_failed_action = deny
|
||||||
|
|
||||||
|
## The timeout to request grpc server
|
||||||
|
##
|
||||||
|
## Default: 5s
|
||||||
|
## Value: Duration
|
||||||
|
#exhook.request_timeout = 5s
|
||||||
|
|
||||||
|
## Whether to automatically reconnect (initialize) the gRPC server
|
||||||
|
##
|
||||||
|
## When gRPC is not available, exhook tries to request the gRPC service at
|
||||||
|
## that interval and reinitialize the list of mounted hooks.
|
||||||
|
##
|
||||||
|
## Default: false
|
||||||
|
## Value: false | Duration
|
||||||
|
#exhook.auto_reconnect = 60s
|
||||||
|
|
||||||
|
|
||||||
##--------------------------------------------------------------------
|
##--------------------------------------------------------------------
|
||||||
## Server Address
|
## The Hook callback servers
|
||||||
|
|
||||||
## The gRPC server url
|
## The gRPC server url
|
||||||
##
|
##
|
||||||
|
|
|
@ -1,5 +1,31 @@
|
||||||
%%-*- mode: erlang -*-
|
%%-*- mode: erlang -*-
|
||||||
|
|
||||||
|
{mapping, "exhook.request_failed_action", "emqx_exhook.request_failed_action", [
|
||||||
|
{default, "deny"},
|
||||||
|
{datatype, {enum, [ignore, deny]}}
|
||||||
|
]}.
|
||||||
|
|
||||||
|
{mapping, "exhook.request_timeout", "emqx_exhook.request_timeout", [
|
||||||
|
{default, "5s"},
|
||||||
|
{datatype, {duration, ms}}
|
||||||
|
]}.
|
||||||
|
|
||||||
|
{mapping, "exhook.auto_reconnect", "emqx_exhook.auto_reconnect", [
|
||||||
|
{default, "60s"},
|
||||||
|
{datatype, string}
|
||||||
|
]}.
|
||||||
|
|
||||||
|
{translation, "emqx_exhook.auto_reconnect", fun(Conf) ->
|
||||||
|
case cuttlefish:conf_get("exhook.auto_reconnect", Conf) of
|
||||||
|
"false" -> false;
|
||||||
|
Dur ->
|
||||||
|
case cuttlefish_duration:parse(Dur, ms) of
|
||||||
|
Ms when is_integer(Ms) -> Ms;
|
||||||
|
{error, Reason} -> error(Reason)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end}.
|
||||||
|
|
||||||
{mapping, "exhook.server.$name.url", "emqx_exhook.servers", [
|
{mapping, "exhook.server.$name.url", "emqx_exhook.servers", [
|
||||||
{datatype, string}
|
{datatype, string}
|
||||||
]}.
|
]}.
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{deps,
|
{deps,
|
||||||
[{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.2"}}}
|
[{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.3"}}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{grpc,
|
{grpc,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_exhook,
|
{application, emqx_exhook,
|
||||||
[{description, "EMQ X Extension for Hook"},
|
[{description, "EMQ X Extension for Hook"},
|
||||||
{vsn, "4.3.1"},
|
{vsn, "4.3.4"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_exhook_app, []}},
|
{mod, {emqx_exhook_app, []}},
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
%% -*-: erlang -*-
|
%% -*-: erlang -*-
|
||||||
{VSN,
|
{VSN,
|
||||||
[
|
[
|
||||||
{"4.3.0", [
|
{<<"4.3.[0-3]">>, [
|
||||||
{load_module, emqx_exhook_pb, brutal_purge, soft_purge, []}
|
{restart_application, emqx_exhook}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
{"4.3.0", [
|
{<<"4.3.[0-3]">>, [
|
||||||
{load_module, emqx_exhook_pb, brutal_purge, soft_purge, []}
|
{restart_application, emqx_exhook}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
]
|
]
|
||||||
|
|
|
@ -21,10 +21,8 @@
|
||||||
|
|
||||||
-logger_header("[ExHook]").
|
-logger_header("[ExHook]").
|
||||||
|
|
||||||
%% Mgmt APIs
|
-export([ enable/1
|
||||||
-export([ enable/2
|
|
||||||
, disable/1
|
, disable/1
|
||||||
, disable_all/0
|
|
||||||
, list/0
|
, list/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
@ -36,101 +34,85 @@
|
||||||
%% Mgmt APIs
|
%% Mgmt APIs
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
%% XXX: Only return the running servers
|
-spec enable(atom()|string()) -> ok | {error, term()}.
|
||||||
-spec list() -> [emqx_exhook_server:server()].
|
enable(Name) ->
|
||||||
list() ->
|
with_mngr(fun(Pid) -> emqx_exhook_mngr:enable(Pid, Name) end).
|
||||||
[server(Name) || Name <- running()].
|
|
||||||
|
|
||||||
-spec enable(atom()|string(), list()) -> ok | {error, term()}.
|
|
||||||
enable(Name, Opts) ->
|
|
||||||
case lists:member(Name, running()) of
|
|
||||||
true ->
|
|
||||||
{error, already_started};
|
|
||||||
_ ->
|
|
||||||
case emqx_exhook_server:load(Name, Opts) of
|
|
||||||
{ok, ServiceState} ->
|
|
||||||
save(Name, ServiceState);
|
|
||||||
{error, Reason} ->
|
|
||||||
?LOG(error, "Load server ~p failed: ~p", [Name, Reason]),
|
|
||||||
{error, Reason}
|
|
||||||
end
|
|
||||||
end.
|
|
||||||
|
|
||||||
-spec disable(atom()|string()) -> ok | {error, term()}.
|
-spec disable(atom()|string()) -> ok | {error, term()}.
|
||||||
disable(Name) ->
|
disable(Name) ->
|
||||||
case server(Name) of
|
with_mngr(fun(Pid) -> emqx_exhook_mngr:disable(Pid, Name) end).
|
||||||
undefined -> {error, not_running};
|
|
||||||
Service ->
|
-spec list() -> [atom() | string()].
|
||||||
ok = emqx_exhook_server:unload(Service),
|
list() ->
|
||||||
unsave(Name)
|
with_mngr(fun(Pid) -> emqx_exhook_mngr:list(Pid) end).
|
||||||
|
|
||||||
|
with_mngr(Fun) ->
|
||||||
|
case lists:keyfind(emqx_exhook_mngr, 1,
|
||||||
|
supervisor:which_children(emqx_exhook_sup)) of
|
||||||
|
{_, Pid, _, _} ->
|
||||||
|
Fun(Pid);
|
||||||
|
_ ->
|
||||||
|
{error, no_manager_svr}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec disable_all() -> ok.
|
%%--------------------------------------------------------------------
|
||||||
disable_all() ->
|
|
||||||
lists:foreach(fun disable/1, running()).
|
|
||||||
|
|
||||||
%%----------------------------------------------------------
|
|
||||||
%% Dispatch APIs
|
%% Dispatch APIs
|
||||||
%%----------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-spec cast(atom(), map()) -> ok.
|
-spec cast(atom(), map()) -> ok.
|
||||||
cast(Hookpoint, Req) ->
|
cast(Hookpoint, Req) ->
|
||||||
cast(Hookpoint, Req, running()).
|
cast(Hookpoint, Req, emqx_exhook_mngr:running()).
|
||||||
|
|
||||||
cast(_, _, []) ->
|
cast(_, _, []) ->
|
||||||
ok;
|
ok;
|
||||||
cast(Hookpoint, Req, [ServiceName|More]) ->
|
cast(Hookpoint, Req, [ServerName|More]) ->
|
||||||
%% XXX: Need a real asynchronous running
|
%% XXX: Need a real asynchronous running
|
||||||
_ = emqx_exhook_server:call(Hookpoint, Req, server(ServiceName)),
|
_ = emqx_exhook_server:call(Hookpoint, Req,
|
||||||
|
emqx_exhook_mngr:server(ServerName)),
|
||||||
cast(Hookpoint, Req, More).
|
cast(Hookpoint, Req, More).
|
||||||
|
|
||||||
-spec call_fold(atom(), term(), function())
|
-spec call_fold(atom(), term(), function())
|
||||||
-> {ok, term()}
|
-> {ok, term()}
|
||||||
| {stop, term()}.
|
| {stop, term()}.
|
||||||
call_fold(Hookpoint, Req, AccFun) ->
|
call_fold(Hookpoint, Req, AccFun) ->
|
||||||
call_fold(Hookpoint, Req, AccFun, running()).
|
FailedAction = emqx_exhook_mngr:get_request_failed_action(),
|
||||||
|
ServerNames = emqx_exhook_mngr:running(),
|
||||||
|
case ServerNames == [] andalso FailedAction == deny of
|
||||||
|
true ->
|
||||||
|
{stop, deny_action_result(Hookpoint, Req)};
|
||||||
|
_ ->
|
||||||
|
call_fold(Hookpoint, Req, FailedAction, AccFun, ServerNames)
|
||||||
|
end.
|
||||||
|
|
||||||
call_fold(_, Req, _, []) ->
|
call_fold(_, Req, _, _, []) ->
|
||||||
{ok, Req};
|
{ok, Req};
|
||||||
call_fold(Hookpoint, Req, AccFun, [ServiceName|More]) ->
|
call_fold(Hookpoint, Req, FailedAction, AccFun, [ServerName|More]) ->
|
||||||
case emqx_exhook_server:call(Hookpoint, Req, server(ServiceName)) of
|
Server = emqx_exhook_mngr:server(ServerName),
|
||||||
|
case emqx_exhook_server:call(Hookpoint, Req, Server) of
|
||||||
{ok, Resp} ->
|
{ok, Resp} ->
|
||||||
case AccFun(Req, Resp) of
|
case AccFun(Req, Resp) of
|
||||||
{stop, NReq} -> {stop, NReq};
|
{stop, NReq} ->
|
||||||
{ok, NReq} -> call_fold(Hookpoint, NReq, AccFun, More);
|
{stop, NReq};
|
||||||
_ -> call_fold(Hookpoint, Req, AccFun, More)
|
{ok, NReq} ->
|
||||||
|
call_fold(Hookpoint, NReq, FailedAction, AccFun, More);
|
||||||
|
_ ->
|
||||||
|
call_fold(Hookpoint, Req, FailedAction, AccFun, More)
|
||||||
end;
|
end;
|
||||||
_ ->
|
_ ->
|
||||||
call_fold(Hookpoint, Req, AccFun, More)
|
case FailedAction of
|
||||||
|
deny ->
|
||||||
|
{stop, deny_action_result(Hookpoint, Req)};
|
||||||
|
_ ->
|
||||||
|
call_fold(Hookpoint, Req, FailedAction, AccFun, More)
|
||||||
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%%----------------------------------------------------------
|
%% XXX: Hard-coded the deny response
|
||||||
%% Storage
|
deny_action_result('client.authenticate', _) ->
|
||||||
|
#{result => false};
|
||||||
-compile({inline, [save/2]}).
|
deny_action_result('client.check_acl', _) ->
|
||||||
save(Name, ServiceState) ->
|
#{result => false};
|
||||||
Saved = persistent_term:get(?APP, []),
|
deny_action_result('message.publish', Msg) ->
|
||||||
persistent_term:put(?APP, lists:reverse([Name | Saved])),
|
%% TODO: Not support to deny a message
|
||||||
persistent_term:put({?APP, Name}, ServiceState).
|
%% maybe we can put the 'allow_publish' into message header
|
||||||
|
Msg.
|
||||||
-compile({inline, [unsave/1]}).
|
|
||||||
unsave(Name) ->
|
|
||||||
case persistent_term:get(?APP, []) of
|
|
||||||
[] ->
|
|
||||||
persistent_term:erase(?APP);
|
|
||||||
Saved ->
|
|
||||||
persistent_term:put(?APP, lists:delete(Name, Saved))
|
|
||||||
end,
|
|
||||||
persistent_term:erase({?APP, Name}),
|
|
||||||
ok.
|
|
||||||
|
|
||||||
-compile({inline, [running/0]}).
|
|
||||||
running() ->
|
|
||||||
persistent_term:get(?APP, []).
|
|
||||||
|
|
||||||
-compile({inline, [server/1]}).
|
|
||||||
server(Name) ->
|
|
||||||
case catch persistent_term:get({?APP, Name}) of
|
|
||||||
{'EXIT', {badarg,_}} -> undefined;
|
|
||||||
Service -> Service
|
|
||||||
end.
|
|
||||||
|
|
|
@ -22,73 +22,23 @@
|
||||||
|
|
||||||
-emqx_plugin(extension).
|
-emqx_plugin(extension).
|
||||||
|
|
||||||
-define(CNTER, emqx_exhook_counter).
|
|
||||||
|
|
||||||
-export([ start/2
|
-export([ start/2
|
||||||
, stop/1
|
, stop/1
|
||||||
, prep_stop/1
|
, prep_stop/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% Internal export
|
|
||||||
-export([ load_server/2
|
|
||||||
, unload_server/1
|
|
||||||
, unload_exhooks/0
|
|
||||||
, init_hooks_cnter/0
|
|
||||||
]).
|
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Application callbacks
|
%% Application callbacks
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
start(_StartType, _StartArgs) ->
|
start(_StartType, _StartArgs) ->
|
||||||
{ok, Sup} = emqx_exhook_sup:start_link(),
|
{ok, Sup} = emqx_exhook_sup:start_link(),
|
||||||
|
|
||||||
%% Init counter
|
|
||||||
init_hooks_cnter(),
|
|
||||||
|
|
||||||
%% Load all dirvers
|
|
||||||
load_all_servers(),
|
|
||||||
|
|
||||||
%% Register CLI
|
|
||||||
emqx_ctl:register_command(exhook, {emqx_exhook_cli, cli}, []),
|
emqx_ctl:register_command(exhook, {emqx_exhook_cli, cli}, []),
|
||||||
{ok, Sup}.
|
{ok, Sup}.
|
||||||
|
|
||||||
prep_stop(State) ->
|
prep_stop(State) ->
|
||||||
emqx_ctl:unregister_command(exhook),
|
emqx_ctl:unregister_command(exhook),
|
||||||
_ = unload_exhooks(),
|
|
||||||
ok = unload_all_servers(),
|
|
||||||
State.
|
State.
|
||||||
|
|
||||||
stop(_State) ->
|
stop(_State) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
|
||||||
%% Internal funcs
|
|
||||||
%%--------------------------------------------------------------------
|
|
||||||
|
|
||||||
load_all_servers() ->
|
|
||||||
lists:foreach(fun({Name, Options}) ->
|
|
||||||
load_server(Name, Options)
|
|
||||||
end, application:get_env(?APP, servers, [])).
|
|
||||||
|
|
||||||
unload_all_servers() ->
|
|
||||||
emqx_exhook:disable_all().
|
|
||||||
|
|
||||||
load_server(Name, Options) ->
|
|
||||||
emqx_exhook:enable(Name, Options).
|
|
||||||
|
|
||||||
unload_server(Name) ->
|
|
||||||
emqx_exhook:disable(Name).
|
|
||||||
|
|
||||||
unload_exhooks() ->
|
|
||||||
[emqx:unhook(Name, {M, F}) ||
|
|
||||||
{Name, {M, F, _A}} <- ?ENABLED_HOOKS].
|
|
||||||
|
|
||||||
init_hooks_cnter() ->
|
|
||||||
try
|
|
||||||
_ = ets:new(?CNTER, [named_table, public]), ok
|
|
||||||
catch
|
|
||||||
exit:badarg:_ ->
|
|
||||||
ok
|
|
||||||
end.
|
|
||||||
|
|
||||||
|
|
|
@ -22,25 +22,18 @@
|
||||||
|
|
||||||
cli(["server", "list"]) ->
|
cli(["server", "list"]) ->
|
||||||
if_enabled(fun() ->
|
if_enabled(fun() ->
|
||||||
Services = emqx_exhook:list(),
|
ServerNames = emqx_exhook:list(),
|
||||||
[emqx_ctl:print("HookServer(~s)~n",
|
[emqx_ctl:print("Server(~s)~n", [format(Name)]) || Name <- ServerNames]
|
||||||
[emqx_exhook_server:format(Service)]) || Service <- Services]
|
|
||||||
end);
|
end);
|
||||||
|
|
||||||
cli(["server", "enable", Name0]) ->
|
cli(["server", "enable", Name]) ->
|
||||||
if_enabled(fun() ->
|
if_enabled(fun() ->
|
||||||
Name = list_to_atom(Name0),
|
print(emqx_exhook:enable(list_to_existing_atom(Name)))
|
||||||
case proplists:get_value(Name, application:get_env(?APP, servers, [])) of
|
|
||||||
undefined ->
|
|
||||||
emqx_ctl:print("not_found~n");
|
|
||||||
Opts ->
|
|
||||||
print(emqx_exhook:enable(Name, Opts))
|
|
||||||
end
|
|
||||||
end);
|
end);
|
||||||
|
|
||||||
cli(["server", "disable", Name]) ->
|
cli(["server", "disable", Name]) ->
|
||||||
if_enabled(fun() ->
|
if_enabled(fun() ->
|
||||||
print(emqx_exhook:disable(list_to_atom(Name)))
|
print(emqx_exhook:disable(list_to_existing_atom(Name)))
|
||||||
end);
|
end);
|
||||||
|
|
||||||
cli(["server", "stats"]) ->
|
cli(["server", "stats"]) ->
|
||||||
|
@ -65,7 +58,8 @@ print({error, Reason}) ->
|
||||||
|
|
||||||
if_enabled(Fun) ->
|
if_enabled(Fun) ->
|
||||||
case lists:keymember(?APP, 1, application:which_applications()) of
|
case lists:keymember(?APP, 1, application:which_applications()) of
|
||||||
true -> Fun();
|
true ->
|
||||||
|
Fun();
|
||||||
_ -> hint()
|
_ -> hint()
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -79,3 +73,11 @@ stats() ->
|
||||||
_ -> Acc
|
_ -> Acc
|
||||||
end
|
end
|
||||||
end, [], emqx_metrics:all())).
|
end, [], emqx_metrics:all())).
|
||||||
|
|
||||||
|
format(Name) ->
|
||||||
|
case emqx_exhook_mngr:server(Name) of
|
||||||
|
undefined ->
|
||||||
|
io_lib:format("name=~s, hooks=#{}, active=false", [Name]);
|
||||||
|
Server ->
|
||||||
|
emqx_exhook_server:format(Server)
|
||||||
|
end.
|
||||||
|
|
|
@ -0,0 +1,311 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
%% @doc Manage the server status and reload strategy
|
||||||
|
-module(emqx_exhook_mngr).
|
||||||
|
|
||||||
|
-behaviour(gen_server).
|
||||||
|
|
||||||
|
-include("emqx_exhook.hrl").
|
||||||
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
|
||||||
|
%% APIs
|
||||||
|
-export([start_link/3]).
|
||||||
|
|
||||||
|
%% Mgmt API
|
||||||
|
-export([ enable/2
|
||||||
|
, disable/2
|
||||||
|
, list/1
|
||||||
|
]).
|
||||||
|
|
||||||
|
%% Helper funcs
|
||||||
|
-export([ running/0
|
||||||
|
, server/1
|
||||||
|
, put_request_failed_action/1
|
||||||
|
, get_request_failed_action/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
%% gen_server callbacks
|
||||||
|
-export([ init/1
|
||||||
|
, handle_call/3
|
||||||
|
, handle_cast/2
|
||||||
|
, handle_info/2
|
||||||
|
, terminate/2
|
||||||
|
, code_change/3
|
||||||
|
]).
|
||||||
|
|
||||||
|
-record(state, {
|
||||||
|
%% Running servers
|
||||||
|
running :: map(), %% XXX: server order?
|
||||||
|
%% Wait to reload servers
|
||||||
|
waiting :: map(),
|
||||||
|
%% Marked stopped servers
|
||||||
|
stopped :: map(),
|
||||||
|
%% Auto reconnect timer interval
|
||||||
|
auto_reconnect :: false | non_neg_integer(),
|
||||||
|
%% Request options
|
||||||
|
request_options :: grpc_client:options(),
|
||||||
|
%% Timer references
|
||||||
|
trefs :: map()
|
||||||
|
}).
|
||||||
|
|
||||||
|
-type servers() :: [{Name :: atom(), server_options()}].
|
||||||
|
|
||||||
|
-type server_options() :: [ {scheme, http | https}
|
||||||
|
| {host, string()}
|
||||||
|
| {port, inet:port_number()}
|
||||||
|
].
|
||||||
|
|
||||||
|
-define(DEFAULT_TIMEOUT, 60000).
|
||||||
|
|
||||||
|
-define(CNTER, emqx_exhook_counter).
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% APIs
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-spec start_link(servers(), false | non_neg_integer(), grpc_client:options())
|
||||||
|
->ignore
|
||||||
|
| {ok, pid()}
|
||||||
|
| {error, any()}.
|
||||||
|
start_link(Servers, AutoReconnect, ReqOpts) ->
|
||||||
|
gen_server:start_link(?MODULE, [Servers, AutoReconnect, ReqOpts], []).
|
||||||
|
|
||||||
|
-spec enable(pid(), atom()|string()) -> ok | {error, term()}.
|
||||||
|
enable(Pid, Name) ->
|
||||||
|
call(Pid, {load, Name}).
|
||||||
|
|
||||||
|
-spec disable(pid(), atom()|string()) -> ok | {error, term()}.
|
||||||
|
disable(Pid, Name) ->
|
||||||
|
call(Pid, {unload, Name}).
|
||||||
|
|
||||||
|
list(Pid) ->
|
||||||
|
call(Pid, list).
|
||||||
|
|
||||||
|
call(Pid, Req) ->
|
||||||
|
gen_server:call(Pid, Req, ?DEFAULT_TIMEOUT).
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% gen_server callbacks
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
init([Servers, AutoReconnect, ReqOpts0]) ->
|
||||||
|
process_flag(trap_exit, true),
|
||||||
|
%% XXX: Due to the ExHook Module in the enterprise,
|
||||||
|
%% this process may start multiple times and they will share this table
|
||||||
|
try
|
||||||
|
_ = ets:new(?CNTER, [named_table, public]), ok
|
||||||
|
catch
|
||||||
|
error:badarg:_ ->
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
|
||||||
|
%% put the global option
|
||||||
|
put_request_failed_action(
|
||||||
|
maps:get(request_failed_action, ReqOpts0, deny)
|
||||||
|
),
|
||||||
|
|
||||||
|
%% Load the hook servers
|
||||||
|
ReqOpts = maps:without([request_failed_action], ReqOpts0),
|
||||||
|
{Waiting, Running} = load_all_servers(Servers, ReqOpts),
|
||||||
|
{ok, ensure_reload_timer(
|
||||||
|
#state{waiting = Waiting,
|
||||||
|
running = Running,
|
||||||
|
stopped = #{},
|
||||||
|
request_options = ReqOpts,
|
||||||
|
auto_reconnect = AutoReconnect,
|
||||||
|
trefs = #{}
|
||||||
|
}
|
||||||
|
)}.
|
||||||
|
|
||||||
|
%% @private
|
||||||
|
load_all_servers(Servers, ReqOpts) ->
|
||||||
|
load_all_servers(Servers, ReqOpts, #{}, #{}).
|
||||||
|
load_all_servers([], _Request, Waiting, Running) ->
|
||||||
|
{Waiting, Running};
|
||||||
|
load_all_servers([{Name, Options}|More], ReqOpts, Waiting, Running) ->
|
||||||
|
{NWaiting, NRunning} =
|
||||||
|
case emqx_exhook_server:load(Name, Options, ReqOpts) of
|
||||||
|
{ok, ServerState} ->
|
||||||
|
save(Name, ServerState),
|
||||||
|
{Waiting, Running#{Name => Options}};
|
||||||
|
{error, _} ->
|
||||||
|
{Waiting#{Name => Options}, Running}
|
||||||
|
end,
|
||||||
|
load_all_servers(More, ReqOpts, NWaiting, NRunning).
|
||||||
|
|
||||||
|
handle_call({load, Name}, _From, State) ->
|
||||||
|
{Result, NState} = do_load_server(Name, State),
|
||||||
|
{reply, Result, NState};
|
||||||
|
|
||||||
|
handle_call({unload, Name}, _From, State) ->
|
||||||
|
case do_unload_server(Name, State) of
|
||||||
|
{error, Reason} ->
|
||||||
|
{reply, {error, Reason}, State};
|
||||||
|
{ok, NState} ->
|
||||||
|
{reply, ok, NState}
|
||||||
|
end;
|
||||||
|
|
||||||
|
handle_call(list, _From, State = #state{
|
||||||
|
running = Running,
|
||||||
|
waiting = Waiting,
|
||||||
|
stopped = Stopped}) ->
|
||||||
|
ServerNames = maps:keys(Running)
|
||||||
|
++ maps:keys(Waiting)
|
||||||
|
++ maps:keys(Stopped),
|
||||||
|
{reply, ServerNames, State};
|
||||||
|
|
||||||
|
handle_call(_Request, _From, State) ->
|
||||||
|
Reply = ok,
|
||||||
|
{reply, Reply, State}.
|
||||||
|
|
||||||
|
handle_cast(_Msg, State) ->
|
||||||
|
{noreply, State}.
|
||||||
|
|
||||||
|
handle_info({timeout, _Ref, {reload, Name}}, State) ->
|
||||||
|
{Result, NState} = do_load_server(Name, State),
|
||||||
|
case Result of
|
||||||
|
ok ->
|
||||||
|
{noreply, NState};
|
||||||
|
{error, not_found} ->
|
||||||
|
{noreply, NState};
|
||||||
|
{error, Reason} ->
|
||||||
|
?LOG(warning, "Failed to reload exhook callback server \"~s\", "
|
||||||
|
"Reason: ~0p", [Name, Reason]),
|
||||||
|
{noreply, ensure_reload_timer(NState)}
|
||||||
|
end;
|
||||||
|
|
||||||
|
handle_info(_Info, State) ->
|
||||||
|
{noreply, State}.
|
||||||
|
|
||||||
|
terminate(_Reason, State = #state{running = Running}) ->
|
||||||
|
_ = maps:fold(fun(Name, _, AccIn) ->
|
||||||
|
{ok, NAccIn} = do_unload_server(Name, AccIn),
|
||||||
|
NAccIn
|
||||||
|
end, State, Running),
|
||||||
|
_ = unload_exhooks(),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
code_change(_OldVsn, State, _Extra) ->
|
||||||
|
{ok, State}.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Internal funcs
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
unload_exhooks() ->
|
||||||
|
[emqx:unhook(Name, {M, F}) ||
|
||||||
|
{Name, {M, F, _A}} <- ?ENABLED_HOOKS].
|
||||||
|
|
||||||
|
do_load_server(Name, State0 = #state{
|
||||||
|
waiting = Waiting,
|
||||||
|
running = Running,
|
||||||
|
stopped = Stopped,
|
||||||
|
request_options = ReqOpts}) ->
|
||||||
|
State = clean_reload_timer(Name, State0),
|
||||||
|
case maps:get(Name, Running, undefined) of
|
||||||
|
undefined ->
|
||||||
|
case maps:get(Name, Stopped,
|
||||||
|
maps:get(Name, Waiting, undefined)) of
|
||||||
|
undefined ->
|
||||||
|
{{error, not_found}, State};
|
||||||
|
Options ->
|
||||||
|
case emqx_exhook_server:load(Name, Options, ReqOpts) of
|
||||||
|
{ok, ServerState} ->
|
||||||
|
save(Name, ServerState),
|
||||||
|
?LOG(info, "Load exhook callback server "
|
||||||
|
"\"~s\" successfully!", [Name]),
|
||||||
|
{ok, State#state{
|
||||||
|
running = maps:put(Name, Options, Running),
|
||||||
|
waiting = maps:remove(Name, Waiting),
|
||||||
|
stopped = maps:remove(Name, Stopped)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
{error, Reason} ->
|
||||||
|
{{error, Reason}, State}
|
||||||
|
end
|
||||||
|
end;
|
||||||
|
_ ->
|
||||||
|
{{error, already_started}, State}
|
||||||
|
end.
|
||||||
|
|
||||||
|
do_unload_server(Name, State = #state{running = Running, stopped = Stopped}) ->
|
||||||
|
case maps:take(Name, Running) of
|
||||||
|
error -> {error, not_running};
|
||||||
|
{Options, NRunning} ->
|
||||||
|
ok = emqx_exhook_server:unload(server(Name)),
|
||||||
|
ok = unsave(Name),
|
||||||
|
{ok, State#state{running = NRunning,
|
||||||
|
stopped = maps:put(Name, Options, Stopped)
|
||||||
|
}}
|
||||||
|
end.
|
||||||
|
|
||||||
|
ensure_reload_timer(State = #state{auto_reconnect = false}) ->
|
||||||
|
State;
|
||||||
|
ensure_reload_timer(State = #state{waiting = Waiting,
|
||||||
|
trefs = TRefs,
|
||||||
|
auto_reconnect = Intv}) ->
|
||||||
|
NRefs = maps:fold(fun(Name, _, AccIn) ->
|
||||||
|
case maps:get(Name, AccIn, undefined) of
|
||||||
|
undefined ->
|
||||||
|
Ref = erlang:start_timer(Intv, self(), {reload, Name}),
|
||||||
|
AccIn#{Name => Ref};
|
||||||
|
_HasRef ->
|
||||||
|
AccIn
|
||||||
|
end
|
||||||
|
end, TRefs, Waiting),
|
||||||
|
State#state{trefs = NRefs}.
|
||||||
|
|
||||||
|
clean_reload_timer(Name, State = #state{trefs = TRefs}) ->
|
||||||
|
case maps:take(Name, TRefs) of
|
||||||
|
error -> State;
|
||||||
|
{TRef, NTRefs} ->
|
||||||
|
_ = erlang:cancel_timer(TRef),
|
||||||
|
State#state{trefs = NTRefs}
|
||||||
|
end.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Server state persistent
|
||||||
|
|
||||||
|
put_request_failed_action(Val) ->
|
||||||
|
persistent_term:put({?APP, request_failed_action}, Val).
|
||||||
|
|
||||||
|
get_request_failed_action() ->
|
||||||
|
persistent_term:get({?APP, request_failed_action}).
|
||||||
|
|
||||||
|
save(Name, ServerState) ->
|
||||||
|
Saved = persistent_term:get(?APP, []),
|
||||||
|
persistent_term:put(?APP, lists:reverse([Name | Saved])),
|
||||||
|
persistent_term:put({?APP, Name}, ServerState).
|
||||||
|
|
||||||
|
unsave(Name) ->
|
||||||
|
case persistent_term:get(?APP, []) of
|
||||||
|
[] ->
|
||||||
|
persistent_term:erase(?APP);
|
||||||
|
Saved ->
|
||||||
|
persistent_term:put(?APP, lists:delete(Name, Saved))
|
||||||
|
end,
|
||||||
|
persistent_term:erase({?APP, Name}),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
running() ->
|
||||||
|
persistent_term:get(?APP, []).
|
||||||
|
|
||||||
|
server(Name) ->
|
||||||
|
case catch persistent_term:get({?APP, Name}) of
|
||||||
|
{'EXIT', {badarg,_}} -> undefined;
|
||||||
|
Service -> Service
|
||||||
|
end.
|
|
@ -25,7 +25,7 @@
|
||||||
-define(PB_CLIENT_MOD, emqx_exhook_v_1_hook_provider_client).
|
-define(PB_CLIENT_MOD, emqx_exhook_v_1_hook_provider_client).
|
||||||
|
|
||||||
%% Load/Unload
|
%% Load/Unload
|
||||||
-export([ load/2
|
-export([ load/3
|
||||||
, unload/1
|
, unload/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
@ -40,8 +40,8 @@
|
||||||
-record(server, {
|
-record(server, {
|
||||||
%% Server name (equal to grpc client channel name)
|
%% Server name (equal to grpc client channel name)
|
||||||
name :: server_name(),
|
name :: server_name(),
|
||||||
%% The server started options
|
%% The function options
|
||||||
options :: list(),
|
options :: map(),
|
||||||
%% gRPC channel pid
|
%% gRPC channel pid
|
||||||
channel :: pid(),
|
channel :: pid(),
|
||||||
%% Registered hook names and options
|
%% Registered hook names and options
|
||||||
|
@ -81,8 +81,8 @@
|
||||||
%% Load/Unload APIs
|
%% Load/Unload APIs
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-spec load(atom(), list()) -> {ok, server()} | {error, term()} .
|
-spec load(atom(), list(), map()) -> {ok, server()} | {error, term()} .
|
||||||
load(Name0, Opts0) ->
|
load(Name0, Opts0, ReqOpts) ->
|
||||||
Name = to_list(Name0),
|
Name = to_list(Name0),
|
||||||
{SvrAddr, ClientOpts} = channel_opts(Opts0),
|
{SvrAddr, ClientOpts} = channel_opts(Opts0),
|
||||||
case emqx_exhook_sup:start_grpc_client_channel(
|
case emqx_exhook_sup:start_grpc_client_channel(
|
||||||
|
@ -90,7 +90,7 @@ load(Name0, Opts0) ->
|
||||||
SvrAddr,
|
SvrAddr,
|
||||||
ClientOpts) of
|
ClientOpts) of
|
||||||
{ok, _ChannPoolPid} ->
|
{ok, _ChannPoolPid} ->
|
||||||
case do_init(Name) of
|
case do_init(Name, ReqOpts) of
|
||||||
{ok, HookSpecs} ->
|
{ok, HookSpecs} ->
|
||||||
%% Reigster metrics
|
%% Reigster metrics
|
||||||
Prefix = lists:flatten(
|
Prefix = lists:flatten(
|
||||||
|
@ -99,7 +99,7 @@ load(Name0, Opts0) ->
|
||||||
%% Ensure hooks
|
%% Ensure hooks
|
||||||
ensure_hooks(HookSpecs),
|
ensure_hooks(HookSpecs),
|
||||||
{ok, #server{name = Name,
|
{ok, #server{name = Name,
|
||||||
options = Opts0,
|
options = ReqOpts,
|
||||||
channel = _ChannPoolPid,
|
channel = _ChannPoolPid,
|
||||||
hookspec = HookSpecs,
|
hookspec = HookSpecs,
|
||||||
prefix = Prefix }};
|
prefix = Prefix }};
|
||||||
|
@ -122,7 +122,7 @@ channel_opts(Opts) ->
|
||||||
Scheme = proplists:get_value(scheme, Opts),
|
Scheme = proplists:get_value(scheme, Opts),
|
||||||
Host = proplists:get_value(host, Opts),
|
Host = proplists:get_value(host, Opts),
|
||||||
Port = proplists:get_value(port, Opts),
|
Port = proplists:get_value(port, Opts),
|
||||||
SvrAddr = lists:flatten(io_lib:format("~s://~s:~w", [Scheme, Host, Port])),
|
SvrAddr = format_http_uri(Scheme, Host, Port),
|
||||||
ClientOpts = case Scheme of
|
ClientOpts = case Scheme of
|
||||||
https ->
|
https ->
|
||||||
SslOpts = lists:keydelete(ssl, 1, proplists:get_value(ssl_options, Opts, [])),
|
SslOpts = lists:keydelete(ssl, 1, proplists:get_value(ssl_options, Opts, [])),
|
||||||
|
@ -133,20 +133,27 @@ channel_opts(Opts) ->
|
||||||
end,
|
end,
|
||||||
{SvrAddr, ClientOpts}.
|
{SvrAddr, ClientOpts}.
|
||||||
|
|
||||||
|
format_http_uri(Scheme, Host0, Port) ->
|
||||||
|
Host = case is_tuple(Host0) of
|
||||||
|
true -> inet:ntoa(Host0);
|
||||||
|
_ -> Host0
|
||||||
|
end,
|
||||||
|
lists:flatten(io_lib:format("~s://~s:~w", [Scheme, Host, Port])).
|
||||||
|
|
||||||
-spec unload(server()) -> ok.
|
-spec unload(server()) -> ok.
|
||||||
unload(#server{name = Name, hookspec = HookSpecs}) ->
|
unload(#server{name = Name, options = ReqOpts, hookspec = HookSpecs}) ->
|
||||||
_ = do_deinit(Name),
|
_ = do_deinit(Name, ReqOpts),
|
||||||
_ = may_unload_hooks(HookSpecs),
|
_ = may_unload_hooks(HookSpecs),
|
||||||
_ = emqx_exhook_sup:stop_grpc_client_channel(Name),
|
_ = emqx_exhook_sup:stop_grpc_client_channel(Name),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
do_deinit(Name) ->
|
do_deinit(Name, ReqOpts) ->
|
||||||
_ = do_call(Name, 'on_provider_unloaded', #{}),
|
_ = do_call(Name, 'on_provider_unloaded', #{}, ReqOpts),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
do_init(ChannName) ->
|
do_init(ChannName, ReqOpts) ->
|
||||||
Req = #{broker => maps:from_list(emqx_sys:info())},
|
Req = #{broker => maps:from_list(emqx_sys:info())},
|
||||||
case do_call(ChannName, 'on_provider_loaded', Req) of
|
case do_call(ChannName, 'on_provider_loaded', Req, ReqOpts) of
|
||||||
{ok, InitialResp} ->
|
{ok, InitialResp} ->
|
||||||
try
|
try
|
||||||
{ok, resovle_hookspec(maps:get(hooks, InitialResp, []))}
|
{ok, resovle_hookspec(maps:get(hooks, InitialResp, []))}
|
||||||
|
@ -212,7 +219,7 @@ may_unload_hooks(HookSpecs) ->
|
||||||
end, maps:keys(HookSpecs)).
|
end, maps:keys(HookSpecs)).
|
||||||
|
|
||||||
format(#server{name = Name, hookspec = Hooks}) ->
|
format(#server{name = Name, hookspec = Hooks}) ->
|
||||||
io_lib:format("name=~p, hooks=~0p", [Name, Hooks]).
|
io_lib:format("name=~s, hooks=~0p, active=true", [Name, Hooks]).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% APIs
|
%% APIs
|
||||||
|
@ -225,7 +232,8 @@ name(#server{name = Name}) ->
|
||||||
-> ignore
|
-> ignore
|
||||||
| {ok, Resp :: term()}
|
| {ok, Resp :: term()}
|
||||||
| {error, term()}.
|
| {error, term()}.
|
||||||
call(Hookpoint, Req, #server{name = ChannName, hookspec = Hooks, prefix = Prefix}) ->
|
call(Hookpoint, Req, #server{name = ChannName, options = ReqOpts,
|
||||||
|
hookspec = Hooks, prefix = Prefix}) ->
|
||||||
GrpcFunc = hk2func(Hookpoint),
|
GrpcFunc = hk2func(Hookpoint),
|
||||||
case maps:get(Hookpoint, Hooks, undefined) of
|
case maps:get(Hookpoint, Hooks, undefined) of
|
||||||
undefined -> ignore;
|
undefined -> ignore;
|
||||||
|
@ -240,7 +248,7 @@ call(Hookpoint, Req, #server{name = ChannName, hookspec = Hooks, prefix = Prefix
|
||||||
false -> ignore;
|
false -> ignore;
|
||||||
_ ->
|
_ ->
|
||||||
inc_metrics(Prefix, Hookpoint),
|
inc_metrics(Prefix, Hookpoint),
|
||||||
do_call(ChannName, GrpcFunc, Req)
|
do_call(ChannName, GrpcFunc, Req, ReqOpts)
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -258,9 +266,9 @@ match_topic_filter(_, []) ->
|
||||||
match_topic_filter(TopicName, TopicFilter) ->
|
match_topic_filter(TopicName, TopicFilter) ->
|
||||||
lists:any(fun(F) -> emqx_topic:match(TopicName, F) end, TopicFilter).
|
lists:any(fun(F) -> emqx_topic:match(TopicName, F) end, TopicFilter).
|
||||||
|
|
||||||
-spec do_call(string(), atom(), map()) -> {ok, map()} | {error, term()}.
|
-spec do_call(string(), atom(), map(), map()) -> {ok, map()} | {error, term()}.
|
||||||
do_call(ChannName, Fun, Req) ->
|
do_call(ChannName, Fun, Req, ReqOpts) ->
|
||||||
Options = #{channel => ChannName},
|
Options = ReqOpts#{channel => ChannName},
|
||||||
?LOG(debug, "Call ~0p:~0p(~0p, ~0p)", [?PB_CLIENT_MOD, Fun, Req, Options]),
|
?LOG(debug, "Call ~0p:~0p(~0p, ~0p)", [?PB_CLIENT_MOD, Fun, Req, Options]),
|
||||||
case catch apply(?PB_CLIENT_MOD, Fun, [Req, Options]) of
|
case catch apply(?PB_CLIENT_MOD, Fun, [Req, Options]) of
|
||||||
{ok, Resp, _Metadata} ->
|
{ok, Resp, _Metadata} ->
|
||||||
|
|
|
@ -26,6 +26,14 @@
|
||||||
, stop_grpc_client_channel/1
|
, stop_grpc_client_channel/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-define(CHILD(Mod, Type, Args),
|
||||||
|
#{ id => Mod
|
||||||
|
, start => {Mod, start_link, Args}
|
||||||
|
, type => Type
|
||||||
|
, shutdown => 15000
|
||||||
|
}
|
||||||
|
).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Supervisor APIs & Callbacks
|
%% Supervisor APIs & Callbacks
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -34,7 +42,23 @@ start_link() ->
|
||||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||||
|
|
||||||
init([]) ->
|
init([]) ->
|
||||||
{ok, {{one_for_one, 10, 100}, []}}.
|
Mngr = ?CHILD(emqx_exhook_mngr, worker,
|
||||||
|
[servers(), auto_reconnect(), request_options()]),
|
||||||
|
{ok, {{one_for_one, 10, 100}, [Mngr]}}.
|
||||||
|
|
||||||
|
servers() ->
|
||||||
|
env(servers, []).
|
||||||
|
|
||||||
|
auto_reconnect() ->
|
||||||
|
env(auto_reconnect, 60000).
|
||||||
|
|
||||||
|
request_options() ->
|
||||||
|
#{timeout => env(request_timeout, 5000),
|
||||||
|
request_failed_action => env(request_failed_action, deny)
|
||||||
|
}.
|
||||||
|
|
||||||
|
env(Key, Def) ->
|
||||||
|
application:get_env(emqx_exhook, Key, Def).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% APIs
|
%% APIs
|
||||||
|
|
|
@ -52,17 +52,32 @@ set_special_cfgs(emqx_exhook) ->
|
||||||
t_noserver_nohook(_) ->
|
t_noserver_nohook(_) ->
|
||||||
emqx_exhook:disable(default),
|
emqx_exhook:disable(default),
|
||||||
?assertEqual([], ets:tab2list(emqx_hooks)),
|
?assertEqual([], ets:tab2list(emqx_hooks)),
|
||||||
|
ok = emqx_exhook:enable(default),
|
||||||
Opts = proplists:get_value(
|
|
||||||
default,
|
|
||||||
application:get_env(emqx_exhook, servers, [])
|
|
||||||
),
|
|
||||||
ok = emqx_exhook:enable(default, Opts),
|
|
||||||
?assertNotEqual([], ets:tab2list(emqx_hooks)).
|
?assertNotEqual([], ets:tab2list(emqx_hooks)).
|
||||||
|
|
||||||
|
t_access_failed_if_no_server_running(_) ->
|
||||||
|
emqx_exhook:disable(default),
|
||||||
|
ClientInfo = #{clientid => <<"user-id-1">>,
|
||||||
|
username => <<"usera">>,
|
||||||
|
peerhost => {127,0,0,1},
|
||||||
|
sockport => 1883,
|
||||||
|
protocol => mqtt,
|
||||||
|
mountpoint => undefined
|
||||||
|
},
|
||||||
|
?assertMatch({stop, #{auth_result := not_authorized}},
|
||||||
|
emqx_exhook_handler:on_client_authenticate(ClientInfo, #{auth_result => success})),
|
||||||
|
|
||||||
|
?assertMatch({stop, deny},
|
||||||
|
emqx_exhook_handler:on_client_check_acl(ClientInfo, publish, <<"t/1">>, allow)),
|
||||||
|
|
||||||
|
Message = emqx_message:make(<<"t/1">>, <<"abc">>),
|
||||||
|
?assertMatch({stop, Message},
|
||||||
|
emqx_exhook_handler:on_message_publish(Message)),
|
||||||
|
emqx_exhook:enable(default).
|
||||||
|
|
||||||
t_cli_list(_) ->
|
t_cli_list(_) ->
|
||||||
meck_print(),
|
meck_print(),
|
||||||
?assertEqual( [[emqx_exhook_server:format(Svr) || Svr <- emqx_exhook:list()]]
|
?assertEqual( [[emqx_exhook_server:format(emqx_exhook_mngr:server(Name)) || Name <- emqx_exhook:list()]]
|
||||||
, emqx_exhook_cli:cli(["server", "list"])
|
, emqx_exhook_cli:cli(["server", "list"])
|
||||||
),
|
),
|
||||||
unmeck_print().
|
unmeck_print().
|
||||||
|
@ -71,7 +86,7 @@ t_cli_enable_disable(_) ->
|
||||||
meck_print(),
|
meck_print(),
|
||||||
?assertEqual([already_started], emqx_exhook_cli:cli(["server", "enable", "default"])),
|
?assertEqual([already_started], emqx_exhook_cli:cli(["server", "enable", "default"])),
|
||||||
?assertEqual(ok, emqx_exhook_cli:cli(["server", "disable", "default"])),
|
?assertEqual(ok, emqx_exhook_cli:cli(["server", "disable", "default"])),
|
||||||
?assertEqual([], emqx_exhook_cli:cli(["server", "list"])),
|
?assertEqual([["name=default, hooks=#{}, active=false"]], emqx_exhook_cli:cli(["server", "list"])),
|
||||||
|
|
||||||
?assertEqual([not_running], emqx_exhook_cli:cli(["server", "disable", "default"])),
|
?assertEqual([not_running], emqx_exhook_cli:cli(["server", "disable", "default"])),
|
||||||
?assertEqual(ok, emqx_exhook_cli:cli(["server", "enable", "default"])),
|
?assertEqual(ok, emqx_exhook_cli:cli(["server", "enable", "default"])),
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{deps,
|
{deps,
|
||||||
[{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.2"}}}
|
[{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.3"}}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{grpc,
|
{grpc,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_exproto,
|
{application, emqx_exproto,
|
||||||
[{description, "EMQ X Extension for Protocol"},
|
[{description, "EMQ X Extension for Protocol"},
|
||||||
{vsn, "4.3.0"}, %% strict semver
|
{vsn, "4.3.2"}, %% strict semver
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_exproto_app, []}},
|
{mod, {emqx_exproto_app, []}},
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
%% -*-: erlang -*-
|
||||||
|
{VSN,
|
||||||
|
[
|
||||||
|
{"4.3.1", [
|
||||||
|
{load_module, emqx_exproto_gsvr, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_gcli, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_channel, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{"4.3.0", [
|
||||||
|
{load_module, emqx_exproto_gsvr, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_gcli, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_conn, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_channel, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
],
|
||||||
|
[
|
||||||
|
{"4.3.1", [
|
||||||
|
{load_module, emqx_exproto_gsvr, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_gcli, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_channel, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{"4.3.0", [
|
||||||
|
{load_module, emqx_exproto_gsvr, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_gcli, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_conn, brutal_purge, soft_purge, []},
|
||||||
|
{load_module, emqx_exproto_channel, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{<<".*">>, []}
|
||||||
|
]
|
||||||
|
}.
|
|
@ -164,12 +164,21 @@ init(ConnInfo = #{socktype := Socktype,
|
||||||
try_dispatch(on_socket_created, wrap(Req), Channel).
|
try_dispatch(on_socket_created, wrap(Req), Channel).
|
||||||
|
|
||||||
%% @private
|
%% @private
|
||||||
peercert(nossl, ConnInfo) ->
|
peercert(NoSsl, ConnInfo) when NoSsl == nossl;
|
||||||
|
NoSsl == undefined ->
|
||||||
ConnInfo;
|
ConnInfo;
|
||||||
peercert(Peercert, ConnInfo) ->
|
peercert(Peercert, ConnInfo) ->
|
||||||
ConnInfo#{peercert =>
|
Fn = fun(_, V) -> V =/= undefined end,
|
||||||
#{cn => esockd_peercert:common_name(Peercert),
|
Infos = maps:filter(Fn,
|
||||||
dn => esockd_peercert:subject(Peercert)}}.
|
#{cn => esockd_peercert:common_name(Peercert),
|
||||||
|
dn => esockd_peercert:subject(Peercert)}
|
||||||
|
),
|
||||||
|
case maps:size(Infos) of
|
||||||
|
0 ->
|
||||||
|
ConnInfo;
|
||||||
|
_ ->
|
||||||
|
ConnInfo#{peercert => Infos}
|
||||||
|
end.
|
||||||
|
|
||||||
%% @private
|
%% @private
|
||||||
socktype(tcp) -> 'TCP';
|
socktype(tcp) -> 'TCP';
|
||||||
|
@ -234,7 +243,7 @@ handle_timeout(_TRef, {keepalive, StatVal},
|
||||||
end;
|
end;
|
||||||
|
|
||||||
handle_timeout(_TRef, force_close, Channel = #channel{closed_reason = Reason}) ->
|
handle_timeout(_TRef, force_close, Channel = #channel{closed_reason = Reason}) ->
|
||||||
{shutdown, {error, {force_close, Reason}}, Channel};
|
{shutdown, Reason, Channel};
|
||||||
|
|
||||||
handle_timeout(_TRef, Msg, Channel) ->
|
handle_timeout(_TRef, Msg, Channel) ->
|
||||||
?WARN("Unexpected timeout: ~p", [Msg]),
|
?WARN("Unexpected timeout: ~p", [Msg]),
|
||||||
|
@ -260,7 +269,7 @@ handle_call({auth, ClientInfo0, Password},
|
||||||
Channel = #channel{conninfo = ConnInfo,
|
Channel = #channel{conninfo = ConnInfo,
|
||||||
clientinfo = ClientInfo}) ->
|
clientinfo = ClientInfo}) ->
|
||||||
ClientInfo1 = enrich_clientinfo(ClientInfo0, ClientInfo),
|
ClientInfo1 = enrich_clientinfo(ClientInfo0, ClientInfo),
|
||||||
NConnInfo = enrich_conninfo(ClientInfo1, ConnInfo),
|
NConnInfo = enrich_conninfo(ClientInfo0, ConnInfo),
|
||||||
|
|
||||||
Channel1 = Channel#channel{conninfo = NConnInfo,
|
Channel1 = Channel#channel{conninfo = NConnInfo,
|
||||||
clientinfo = ClientInfo1},
|
clientinfo = ClientInfo1},
|
||||||
|
@ -364,13 +373,13 @@ handle_info({sock_closed, Reason},
|
||||||
case queue:len(Queue) =:= 0
|
case queue:len(Queue) =:= 0
|
||||||
andalso Inflight =:= undefined of
|
andalso Inflight =:= undefined of
|
||||||
true ->
|
true ->
|
||||||
Channel1 = ensure_disconnected({sock_closed, Reason}, Channel),
|
Channel1 = ensure_disconnected(Reason, Channel),
|
||||||
{shutdown, {sock_closed, Reason}, Channel1};
|
{shutdown, Reason, Channel1};
|
||||||
_ ->
|
_ ->
|
||||||
%% delayed close process for flushing all callback funcs to gRPC server
|
%% delayed close process for flushing all callback funcs to gRPC server
|
||||||
Channel1 = Channel#channel{closed_reason = {sock_closed, Reason}},
|
Channel1 = Channel#channel{closed_reason = Reason},
|
||||||
Channel2 = ensure_timer(force_timer, Channel1),
|
Channel2 = ensure_timer(force_timer, Channel1),
|
||||||
{ok, ensure_disconnected({sock_closed, Reason}, Channel2)}
|
{ok, ensure_disconnected(Reason, Channel2)}
|
||||||
end;
|
end;
|
||||||
|
|
||||||
handle_info({hreply, on_socket_created, ok}, Channel) ->
|
handle_info({hreply, on_socket_created, ok}, Channel) ->
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
%% TCP/TLS/UDP/DTLS Connection
|
%% TCP/TLS/UDP/DTLS Connection
|
||||||
-module(emqx_exproto_conn).
|
-module(emqx_exproto_conn).
|
||||||
|
|
||||||
|
-include_lib("esockd/include/esockd.hrl").
|
||||||
-include_lib("emqx/include/types.hrl").
|
-include_lib("emqx/include/types.hrl").
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
|
||||||
|
@ -106,12 +107,8 @@ start_link(Socket = {udp, _SockPid, _Sock}, Peername, Options) ->
|
||||||
%% tcp/ssl/dtls
|
%% tcp/ssl/dtls
|
||||||
start_link(esockd_transport, Sock, Options) ->
|
start_link(esockd_transport, Sock, Options) ->
|
||||||
Socket = {esockd_transport, Sock},
|
Socket = {esockd_transport, Sock},
|
||||||
case esockd_transport:peername(Sock) of
|
Args = [self(), Socket, undefined, Options],
|
||||||
{ok, Peername} ->
|
{ok, proc_lib:spawn_link(?MODULE, init, Args)}.
|
||||||
Args = [self(), Socket, Peername, Options],
|
|
||||||
{ok, proc_lib:spawn_link(?MODULE, init, Args)};
|
|
||||||
R = {error, _} -> R
|
|
||||||
end.
|
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% API
|
%% API
|
||||||
|
@ -170,6 +167,12 @@ stop(Pid) ->
|
||||||
%% Wrapped funcs
|
%% Wrapped funcs
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
esockd_peername({udp, _SockPid, _Sock}, Peername) ->
|
||||||
|
Peername;
|
||||||
|
esockd_peername({esockd_transport, Sock}, _Peername) ->
|
||||||
|
{ok, Peername} = esockd_transport:ensure_ok_or_exit(peername, [Sock]),
|
||||||
|
Peername.
|
||||||
|
|
||||||
esockd_wait(Socket = {udp, _SockPid, _Sock}) ->
|
esockd_wait(Socket = {udp, _SockPid, _Sock}) ->
|
||||||
{ok, Socket};
|
{ok, Socket};
|
||||||
esockd_wait({esockd_transport, Sock}) ->
|
esockd_wait({esockd_transport, Sock}) ->
|
||||||
|
@ -195,7 +198,12 @@ esockd_ensure_ok_or_exit(Fun, {esockd_transport, Socket}) ->
|
||||||
esockd_type({udp, _, _}) ->
|
esockd_type({udp, _, _}) ->
|
||||||
udp;
|
udp;
|
||||||
esockd_type({esockd_transport, Socket}) ->
|
esockd_type({esockd_transport, Socket}) ->
|
||||||
esockd_transport:type(Socket).
|
case esockd_transport:type(Socket) of
|
||||||
|
proxy ->
|
||||||
|
esockd_transport:type(Socket#proxy_socket.socket);
|
||||||
|
Type ->
|
||||||
|
Type
|
||||||
|
end.
|
||||||
|
|
||||||
esockd_setopts({udp, _, _}, _) ->
|
esockd_setopts({udp, _, _}, _) ->
|
||||||
ok;
|
ok;
|
||||||
|
@ -221,9 +229,10 @@ send(Data, #state{socket = {esockd_transport, Sock}}) ->
|
||||||
-define(DEFAULT_IDLE_TIMEOUT, 30000).
|
-define(DEFAULT_IDLE_TIMEOUT, 30000).
|
||||||
-define(DEFAULT_OOM_POLICY, #{max_heap_size => 4194304,message_queue_len => 32000}).
|
-define(DEFAULT_OOM_POLICY, #{max_heap_size => 4194304,message_queue_len => 32000}).
|
||||||
|
|
||||||
init(Parent, WrappedSock, Peername, Options) ->
|
init(Parent, WrappedSock, Peername0, Options) ->
|
||||||
case esockd_wait(WrappedSock) of
|
case esockd_wait(WrappedSock) of
|
||||||
{ok, NWrappedSock} ->
|
{ok, NWrappedSock} ->
|
||||||
|
Peername = esockd_peername(NWrappedSock, Peername0),
|
||||||
run_loop(Parent, init_state(NWrappedSock, Peername, Options));
|
run_loop(Parent, init_state(NWrappedSock, Peername, Options));
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
ok = esockd_close(WrappedSock),
|
ok = esockd_close(WrappedSock),
|
||||||
|
|
|
@ -89,15 +89,22 @@ handle_cast({rpc, Fun, Req, Options, From}, State = #state{streams = Streams}) -
|
||||||
{ok, Stream} ->
|
{ok, Stream} ->
|
||||||
case catch grpc_client:send(Stream, Req) of
|
case catch grpc_client:send(Stream, Req) of
|
||||||
ok ->
|
ok ->
|
||||||
?LOG(debug, "Send to ~p method successfully, request: ~0p", [Fun, Req]),
|
?LOG(debug, "Send to ~s method successfully, request: ~0p", [Fun, Req]),
|
||||||
reply(From, Fun, ok),
|
reply(From, Fun, ok),
|
||||||
{noreply, State#state{streams = Streams#{Fun => Stream}}};
|
{noreply, State#state{streams = Streams#{Fun => Stream}}};
|
||||||
|
{'EXIT', {not_found, _Stk}} ->
|
||||||
|
%% Not found the stream, reopen it
|
||||||
|
?LOG(info, "Can not find the old stream ref for ~s; "
|
||||||
|
"re-try with a new stream!", [Fun]),
|
||||||
|
handle_cast({rpc, Fun, Req, Options, From},
|
||||||
|
State#state{streams = maps:remove(Fun, Streams)});
|
||||||
{'EXIT', {timeout, _Stk}} ->
|
{'EXIT', {timeout, _Stk}} ->
|
||||||
?LOG(error, "Send to ~p method timeout, request: ~0p", [Fun, Req]),
|
?LOG(error, "Send to ~s method timeout, request: ~0p", [Fun, Req]),
|
||||||
reply(From, Fun, {error, timeout}),
|
reply(From, Fun, {error, timeout}),
|
||||||
{noreply, State#state{streams = Streams#{Fun => Stream}}};
|
{noreply, State#state{streams = Streams#{Fun => Stream}}};
|
||||||
{'EXIT', {Reason1, _Stk}} ->
|
{'EXIT', {Reason1, _Stk}} ->
|
||||||
?LOG(error, "Send to ~p method failure, request: ~0p, stacktrace: ~0p", [Fun, Req, _Stk]),
|
?LOG(error, "Send to ~s method failure, request: ~0p, reason: ~p, "
|
||||||
|
"stacktrace: ~0p", [Fun, Req, Reason1, _Stk]),
|
||||||
reply(From, Fun, {error, Reason1}),
|
reply(From, Fun, {error, Reason1}),
|
||||||
{noreply, State#state{streams = Streams#{Fun => undefined}}}
|
{noreply, State#state{streams = Streams#{Fun => undefined}}}
|
||||||
end
|
end
|
||||||
|
|
|
@ -123,12 +123,14 @@ call(ConnStr, Req) ->
|
||||||
{error, ?RESP_PARAMS_TYPE_ERROR,
|
{error, ?RESP_PARAMS_TYPE_ERROR,
|
||||||
<<"The conn type error">>};
|
<<"The conn type error">>};
|
||||||
Pid when is_pid(Pid) ->
|
Pid when is_pid(Pid) ->
|
||||||
case erlang:is_process_alive(Pid) of
|
case catch emqx_exproto_conn:call(Pid, Req) of
|
||||||
true ->
|
{'EXIT',{noproc, _}} ->
|
||||||
emqx_exproto_conn:call(Pid, Req);
|
|
||||||
false ->
|
|
||||||
{error, ?RESP_CONN_PROCESS_NOT_ALIVE,
|
{error, ?RESP_CONN_PROCESS_NOT_ALIVE,
|
||||||
<<"Connection process is not alive">>}
|
<<"Connection process is not alive">>};
|
||||||
|
{'EXIT',{timeout, _}} ->
|
||||||
|
{error, ?RESP_UNKNOWN, <<"Connection is not answered">>};
|
||||||
|
Result ->
|
||||||
|
Result
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
|
@ -146,4 +146,4 @@ lwm2m.dtls.ciphers = ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,E
|
||||||
## Note that 'lwm2m.dtls.ciphers' and 'lwm2m.dtls.psk_ciphers' cannot
|
## Note that 'lwm2m.dtls.ciphers' and 'lwm2m.dtls.psk_ciphers' cannot
|
||||||
## be configured at the same time.
|
## be configured at the same time.
|
||||||
## See 'https://tools.ietf.org/html/rfc4279#section-2'.
|
## See 'https://tools.ietf.org/html/rfc4279#section-2'.
|
||||||
#lwm2m.dtls.psk_ciphers = PSK-AES128-CBC-SHA,PSK-AES256-CBC-SHA,PSK-3DES-EDE-CBC-SHA,PSK-RC4-SHA
|
#lwm2m.dtls.psk_ciphers = RSA-PSK-AES256-GCM-SHA384,RSA-PSK-AES256-CBC-SHA384,RSA-PSK-AES128-GCM-SHA256,RSA-PSK-AES128-CBC-SHA256,RSA-PSK-AES256-CBC-SHA,RSA-PSK-AES128-CBC-SHA
|
||||||
|
|
|
@ -185,7 +185,7 @@ end}.
|
||||||
OldCert = cuttlefish:conf_get("lwm2m.certfile", Conf, undefined),
|
OldCert = cuttlefish:conf_get("lwm2m.certfile", Conf, undefined),
|
||||||
|
|
||||||
%% Ciphers
|
%% Ciphers
|
||||||
SplitFun = fun(undefined) -> undefined; (S) -> string:tokens(S, ",") end,
|
SplitFun = fun(undefined) -> []; (S) -> string:tokens(S, ",") end,
|
||||||
Ciphers =
|
Ciphers =
|
||||||
case cuttlefish:conf_get("lwm2m.dtls.ciphers", Conf, undefined) of
|
case cuttlefish:conf_get("lwm2m.dtls.ciphers", Conf, undefined) of
|
||||||
undefined ->
|
undefined ->
|
||||||
|
@ -198,16 +198,17 @@ end}.
|
||||||
undefined ->
|
undefined ->
|
||||||
[];
|
[];
|
||||||
C2 ->
|
C2 ->
|
||||||
Psk = lists:map(fun("PSK-AES128-CBC-SHA") -> {psk, aes_128_cbc, sha};
|
Psk = lists:map(fun("PSK-AES128-CBC-SHA") -> "RSA-PSK-AES128-CBC-SHA";
|
||||||
("PSK-AES256-CBC-SHA") -> {psk, aes_256_cbc, sha};
|
("PSK-AES256-CBC-SHA") -> "RSA-PSK-AES256-CBC-SHA";
|
||||||
("PSK-3DES-EDE-CBC-SHA") -> {psk, '3des_ede_cbc', sha};
|
("PSK-3DES-EDE-CBC-SHA") -> "RSA-PSK-3DES-EDE-CBC-SHA";
|
||||||
("PSK-RC4-SHA") -> {psk, rc4_128, sha}
|
("PSK-RC4-SHA") -> "RSA-PSK-RC4-SHA";
|
||||||
end, SplitFun(C2)),
|
(Suite) -> Suite
|
||||||
|
end, SplitFun(C2)),
|
||||||
[{ciphers, Psk}, {user_lookup_fun, {fun emqx_psk:lookup/3, <<>>}}]
|
[{ciphers, Psk}, {user_lookup_fun, {fun emqx_psk:lookup/3, <<>>}}]
|
||||||
end,
|
end,
|
||||||
Ciphers /= []
|
Ciphers /= []
|
||||||
andalso PskCiphers /= []
|
andalso PskCiphers /= []
|
||||||
andalso cuttlefish:invalid("The 'lwm2m.dtls.ciphers' and 'lwm2m.dtls.psk_ciphers' cannot exist simultaneously."),
|
andalso cuttlefish:invalid("The 'lwm2m.dtls.ciphers' and 'lwm2m.dtls.psk_ciphers' cannot coexist"),
|
||||||
|
|
||||||
NCiphers = Ciphers ++ PskCiphers,
|
NCiphers = Ciphers ++ PskCiphers,
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{deps,
|
{deps,
|
||||||
[{lwm2m_coap, {git, "https://github.com/emqx/lwm2m-coap", {tag, "v1.1.2"}}}
|
[{lwm2m_coap, {git, "https://github.com/emqx/lwm2m-coap", {tag, "v1.1.5"}}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{profiles,
|
{profiles,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application,emqx_lwm2m,
|
{application,emqx_lwm2m,
|
||||||
[{description,"EMQ X LwM2M Gateway"},
|
[{description,"EMQ X LwM2M Gateway"},
|
||||||
{vsn, "4.3.1"}, % strict semver, bump manually!
|
{vsn, "4.3.4"}, % strict semver, bump manually!
|
||||||
{modules,[]},
|
{modules,[]},
|
||||||
{registered,[emqx_lwm2m_sup]},
|
{registered,[emqx_lwm2m_sup]},
|
||||||
{applications,[kernel,stdlib,lwm2m_coap]},
|
{applications,[kernel,stdlib,lwm2m_coap]},
|
||||||
|
|
|
@ -1,15 +1,21 @@
|
||||||
%% -*-: erlang -*-
|
%% -*-: erlang -*-
|
||||||
{VSN,
|
{"4.3.4",
|
||||||
[
|
[
|
||||||
{"4.3.0", [
|
{<<"4\\.3\\.[0-1]">>, [
|
||||||
{load_module, emqx_lwm2m_protocol, brutal_purge, soft_purge, []}
|
{restart_application, emqx_lwm2m}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{"4.3.2", [
|
||||||
|
{load_module, emqx_lwm2m_message, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{"4.3.3", []} %% only config change
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
{"4.3.0", [
|
{<<"4\\.3\\.[0-1]">>, [
|
||||||
{load_module, emqx_lwm2m_protocol, brutal_purge, soft_purge, []}
|
{restart_application, emqx_lwm2m}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{"4.3.2", [
|
||||||
|
{load_module, emqx_lwm2m_message, brutal_purge, soft_purge, []}
|
||||||
|
]},
|
||||||
|
{"4.3.3", []} %% only config change
|
||||||
]
|
]
|
||||||
}.
|
}.
|
||||||
|
|
|
@ -0,0 +1,162 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_lwm2m_api).
|
||||||
|
|
||||||
|
-import(minirest, [return/1]).
|
||||||
|
|
||||||
|
-rest_api(#{name => list,
|
||||||
|
method => 'GET',
|
||||||
|
path => "/lwm2m_channels/",
|
||||||
|
func => list,
|
||||||
|
descr => "A list of all lwm2m channel"
|
||||||
|
}).
|
||||||
|
|
||||||
|
-rest_api(#{name => list,
|
||||||
|
method => 'GET',
|
||||||
|
path => "/nodes/:atom:node/lwm2m_channels/",
|
||||||
|
func => list,
|
||||||
|
descr => "A list of lwm2m channel of a node"
|
||||||
|
}).
|
||||||
|
|
||||||
|
-rest_api(#{name => lookup_cmd,
|
||||||
|
method => 'GET',
|
||||||
|
path => "/lookup_cmd/:bin:ep/",
|
||||||
|
func => lookup_cmd,
|
||||||
|
descr => "Send a lwm2m downlink command"
|
||||||
|
}).
|
||||||
|
|
||||||
|
-rest_api(#{name => lookup_cmd,
|
||||||
|
method => 'GET',
|
||||||
|
path => "/nodes/:atom:node/lookup_cmd/:bin:ep/",
|
||||||
|
func => lookup_cmd,
|
||||||
|
descr => "Send a lwm2m downlink command of a node"
|
||||||
|
}).
|
||||||
|
|
||||||
|
-export([ list/2
|
||||||
|
, lookup_cmd/2
|
||||||
|
]).
|
||||||
|
|
||||||
|
list(#{node := Node }, Params) ->
|
||||||
|
case Node = node() of
|
||||||
|
true -> list(#{}, Params);
|
||||||
|
_ -> rpc_call(Node, list, [#{}, Params])
|
||||||
|
end;
|
||||||
|
|
||||||
|
list(#{}, _Params) ->
|
||||||
|
Channels = emqx_lwm2m_cm:all_channels(),
|
||||||
|
return({ok, format(Channels)}).
|
||||||
|
|
||||||
|
lookup_cmd(#{ep := Ep, node := Node}, Params) ->
|
||||||
|
case Node = node() of
|
||||||
|
true -> lookup_cmd(#{ep => Ep}, Params);
|
||||||
|
_ -> rpc_call(Node, lookup_cmd, [#{ep => Ep}, Params])
|
||||||
|
end;
|
||||||
|
|
||||||
|
lookup_cmd(#{ep := Ep}, Params) ->
|
||||||
|
MsgType = proplists:get_value(<<"msgType">>, Params),
|
||||||
|
Path0 = proplists:get_value(<<"path">>, Params),
|
||||||
|
case emqx_lwm2m_cm:lookup_cmd(Ep, Path0, MsgType) of
|
||||||
|
[] -> return({ok, []});
|
||||||
|
[{_, undefined} | _] -> return({ok, []});
|
||||||
|
[{{IMEI, Path, MsgType}, undefined}] ->
|
||||||
|
return({ok, [{imei, IMEI},
|
||||||
|
{'msgType', IMEI},
|
||||||
|
{'code', <<"6.01">>},
|
||||||
|
{'codeMsg', <<"reply_not_received">>},
|
||||||
|
{'path', Path}]});
|
||||||
|
[{{IMEI, Path, MsgType}, {Code, CodeMsg, Content}}] ->
|
||||||
|
Payload1 = format_cmd_content(Content, MsgType),
|
||||||
|
return({ok, [{imei, IMEI},
|
||||||
|
{'msgType', IMEI},
|
||||||
|
{'code', Code},
|
||||||
|
{'codeMsg', CodeMsg},
|
||||||
|
{'path', Path}] ++ Payload1})
|
||||||
|
end.
|
||||||
|
|
||||||
|
rpc_call(Node, Fun, Args) ->
|
||||||
|
case rpc:call(Node, ?MODULE, Fun, Args) of
|
||||||
|
{badrpc, Reason} -> {error, Reason};
|
||||||
|
Res -> Res
|
||||||
|
end.
|
||||||
|
|
||||||
|
format(Channels) ->
|
||||||
|
lists:map(fun({IMEI, #{lifetime := LifeTime,
|
||||||
|
peername := Peername,
|
||||||
|
version := Version,
|
||||||
|
reg_info := RegInfo}}) ->
|
||||||
|
ObjectList = lists:map(fun(Path) ->
|
||||||
|
[ObjId | _] = path_list(Path),
|
||||||
|
case emqx_lwm2m_xml_object:get_obj_def(binary_to_integer(ObjId), true) of
|
||||||
|
{error, _} ->
|
||||||
|
{Path, Path};
|
||||||
|
ObjDefinition ->
|
||||||
|
ObjectName = emqx_lwm2m_xml_object:get_object_name(ObjDefinition),
|
||||||
|
{Path, list_to_binary(ObjectName)}
|
||||||
|
end
|
||||||
|
end, maps:get(<<"objectList">>, RegInfo)),
|
||||||
|
{IpAddr, Port} = Peername,
|
||||||
|
[{imei, IMEI},
|
||||||
|
{lifetime, LifeTime},
|
||||||
|
{ip_address, iolist_to_binary(ntoa(IpAddr))},
|
||||||
|
{port, Port},
|
||||||
|
{version, Version},
|
||||||
|
{'objectList', ObjectList}]
|
||||||
|
end, Channels).
|
||||||
|
|
||||||
|
format_cmd_content(undefined, _MsgType) -> [];
|
||||||
|
format_cmd_content(Content, <<"discover">>) ->
|
||||||
|
[H | Content1] = Content,
|
||||||
|
{_, [HObjId]} = emqx_lwm2m_coap_resource:parse_object_list(H),
|
||||||
|
[ObjId | _]= path_list(HObjId),
|
||||||
|
ObjectList = case Content1 of
|
||||||
|
[Content2 | _] ->
|
||||||
|
{_, ObjL} = emqx_lwm2m_coap_resource:parse_object_list(Content2),
|
||||||
|
ObjL;
|
||||||
|
[] -> []
|
||||||
|
end,
|
||||||
|
R = case emqx_lwm2m_xml_object:get_obj_def(binary_to_integer(ObjId), true) of
|
||||||
|
{error, _} ->
|
||||||
|
lists:map(fun(Object) -> {Object, Object} end, ObjectList);
|
||||||
|
ObjDefinition ->
|
||||||
|
lists:map(fun(Object) ->
|
||||||
|
[_, _, ResId| _] = path_list(Object),
|
||||||
|
Operations = case emqx_lwm2m_xml_object:get_resource_operations(binary_to_integer(ResId), ObjDefinition) of
|
||||||
|
"E" -> [{operations, list_to_binary("E")}];
|
||||||
|
Oper -> [{'dataType', list_to_binary(emqx_lwm2m_xml_object:get_resource_type(binary_to_integer(ResId), ObjDefinition))},
|
||||||
|
{operations, list_to_binary(Oper)}]
|
||||||
|
end,
|
||||||
|
[{path, Object},
|
||||||
|
{name, list_to_binary(emqx_lwm2m_xml_object:get_resource_name(binary_to_integer(ResId), ObjDefinition))}
|
||||||
|
] ++ Operations
|
||||||
|
end, ObjectList)
|
||||||
|
end,
|
||||||
|
[{content, R}];
|
||||||
|
format_cmd_content(Content, _) ->
|
||||||
|
[{content, Content}].
|
||||||
|
|
||||||
|
ntoa({0,0,0,0,0,16#ffff,AB,CD}) ->
|
||||||
|
inet_parse:ntoa({AB bsr 8, AB rem 256, CD bsr 8, CD rem 256});
|
||||||
|
ntoa(IP) ->
|
||||||
|
inet_parse:ntoa(IP).
|
||||||
|
|
||||||
|
path_list(Path) ->
|
||||||
|
case binary:split(binary_util:trim(Path, $/), [<<$/>>], [global]) of
|
||||||
|
[ObjId, ObjInsId, ResId, ResInstId] -> [ObjId, ObjInsId, ResId, ResInstId];
|
||||||
|
[ObjId, ObjInsId, ResId] -> [ObjId, ObjInsId, ResId];
|
||||||
|
[ObjId, ObjInsId] -> [ObjId, ObjInsId];
|
||||||
|
[ObjId] -> [ObjId]
|
||||||
|
end.
|
|
@ -0,0 +1,153 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_lwm2m_cm).
|
||||||
|
|
||||||
|
-export([start_link/0]).
|
||||||
|
|
||||||
|
-export([ register_channel/5
|
||||||
|
, update_reg_info/2
|
||||||
|
, unregister_channel/1
|
||||||
|
]).
|
||||||
|
|
||||||
|
-export([ lookup_channel/1
|
||||||
|
, all_channels/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
-export([ register_cmd/3
|
||||||
|
, register_cmd/4
|
||||||
|
, lookup_cmd/3
|
||||||
|
, lookup_cmd_by_imei/1
|
||||||
|
]).
|
||||||
|
|
||||||
|
%% gen_server callbacks
|
||||||
|
-export([ init/1
|
||||||
|
, handle_call/3
|
||||||
|
, handle_cast/2
|
||||||
|
, handle_info/2
|
||||||
|
, terminate/2
|
||||||
|
, code_change/3
|
||||||
|
]).
|
||||||
|
|
||||||
|
-define(LOG(Level, Format, Args), logger:Level("LWM2M-CM: " ++ Format, Args)).
|
||||||
|
|
||||||
|
%% Server name
|
||||||
|
-define(CM, ?MODULE).
|
||||||
|
|
||||||
|
-define(LWM2M_CHANNEL_TAB, emqx_lwm2m_channel).
|
||||||
|
-define(LWM2M_CMD_TAB, emqx_lwm2m_cmd).
|
||||||
|
|
||||||
|
%% Batch drain
|
||||||
|
-define(BATCH_SIZE, 100000).
|
||||||
|
|
||||||
|
%% @doc Start the channel manager.
|
||||||
|
start_link() ->
|
||||||
|
gen_server:start_link({local, ?CM}, ?MODULE, [], []).
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% API
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
register_channel(IMEI, RegInfo, LifeTime, Ver, Peername) ->
|
||||||
|
Info = #{
|
||||||
|
reg_info => RegInfo,
|
||||||
|
lifetime => LifeTime,
|
||||||
|
version => Ver,
|
||||||
|
peername => Peername
|
||||||
|
},
|
||||||
|
true = ets:insert(?LWM2M_CHANNEL_TAB, {IMEI, Info}),
|
||||||
|
cast({registered, {IMEI, self()}}).
|
||||||
|
|
||||||
|
update_reg_info(IMEI, RegInfo) ->
|
||||||
|
case lookup_channel(IMEI) of
|
||||||
|
[{_, RegInfo0}] ->
|
||||||
|
true = ets:insert(?LWM2M_CHANNEL_TAB, {IMEI, RegInfo0#{reg_info => RegInfo}}),
|
||||||
|
ok;
|
||||||
|
[] ->
|
||||||
|
ok
|
||||||
|
end.
|
||||||
|
|
||||||
|
unregister_channel(IMEI) when is_binary(IMEI) ->
|
||||||
|
true = ets:delete(?LWM2M_CHANNEL_TAB, IMEI),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
lookup_channel(IMEI) ->
|
||||||
|
ets:lookup(?LWM2M_CHANNEL_TAB, IMEI).
|
||||||
|
|
||||||
|
all_channels() ->
|
||||||
|
ets:tab2list(?LWM2M_CHANNEL_TAB).
|
||||||
|
|
||||||
|
register_cmd(IMEI, Path, Type) ->
|
||||||
|
true = ets:insert(?LWM2M_CMD_TAB, {{IMEI, Path, Type}, undefined}).
|
||||||
|
|
||||||
|
register_cmd(_IMEI, undefined, _Type, _Result) ->
|
||||||
|
ok;
|
||||||
|
register_cmd(IMEI, Path, Type, Result) ->
|
||||||
|
true = ets:insert(?LWM2M_CMD_TAB, {{IMEI, Path, Type}, Result}).
|
||||||
|
|
||||||
|
lookup_cmd(IMEI, Path, Type) ->
|
||||||
|
ets:lookup(?LWM2M_CMD_TAB, {IMEI, Path, Type}).
|
||||||
|
|
||||||
|
lookup_cmd_by_imei(IMEI) ->
|
||||||
|
ets:select(?LWM2M_CHANNEL_TAB, [{{{IMEI, '_', '_'}, '$1'}, [], ['$_']}]).
|
||||||
|
|
||||||
|
%% @private
|
||||||
|
cast(Msg) -> gen_server:cast(?CM, Msg).
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% gen_server callbacks
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
init([]) ->
|
||||||
|
TabOpts = [public, {write_concurrency, true}, {read_concurrency, true}],
|
||||||
|
ok = emqx_tables:new(?LWM2M_CHANNEL_TAB, [set, compressed | TabOpts]),
|
||||||
|
ok = emqx_tables:new(?LWM2M_CMD_TAB, [set, compressed | TabOpts]),
|
||||||
|
{ok, #{chan_pmon => emqx_pmon:new()}}.
|
||||||
|
|
||||||
|
handle_call(Req, _From, State) ->
|
||||||
|
?LOG(error, "Unexpected call: ~p", [Req]),
|
||||||
|
{reply, ignored, State}.
|
||||||
|
|
||||||
|
handle_cast({registered, {IMEI, ChanPid}}, State = #{chan_pmon := PMon}) ->
|
||||||
|
PMon1 = emqx_pmon:monitor(ChanPid, IMEI, PMon),
|
||||||
|
{noreply, State#{chan_pmon := PMon1}};
|
||||||
|
|
||||||
|
handle_cast(Msg, State) ->
|
||||||
|
?LOG(error, "Unexpected cast: ~p", [Msg]),
|
||||||
|
{noreply, State}.
|
||||||
|
|
||||||
|
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
|
||||||
|
ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)],
|
||||||
|
{Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon),
|
||||||
|
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun clean_down/1, Items]),
|
||||||
|
{noreply, State#{chan_pmon := PMon1}};
|
||||||
|
|
||||||
|
handle_info(Info, State) ->
|
||||||
|
?LOG(error, "Unexpected info: ~p", [Info]),
|
||||||
|
{noreply, State}.
|
||||||
|
|
||||||
|
terminate(_Reason, _State) ->
|
||||||
|
emqx_stats:cancel_update(chan_stats).
|
||||||
|
|
||||||
|
code_change(_OldVsn, State, _Extra) ->
|
||||||
|
{ok, State}.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Internal functions
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
clean_down({_ChanPid, IMEI}) ->
|
||||||
|
unregister_channel(IMEI).
|
|
@ -0,0 +1,41 @@
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_lwm2m_cm_sup).
|
||||||
|
|
||||||
|
-behaviour(supervisor).
|
||||||
|
|
||||||
|
-export([start_link/0]).
|
||||||
|
|
||||||
|
-export([init/1]).
|
||||||
|
|
||||||
|
start_link() ->
|
||||||
|
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||||
|
|
||||||
|
init([]) ->
|
||||||
|
CM = #{id => emqx_lwm2m_cm,
|
||||||
|
start => {emqx_lwm2m_cm, start_link, []},
|
||||||
|
restart => permanent,
|
||||||
|
shutdown => 5000,
|
||||||
|
type => worker,
|
||||||
|
modules => [emqx_lwm2m_cm]},
|
||||||
|
SupFlags = #{strategy => one_for_one,
|
||||||
|
intensity => 100,
|
||||||
|
period => 10
|
||||||
|
},
|
||||||
|
{ok, {SupFlags, [CM]}}.
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
-export([ mqtt2coap/2
|
-export([ mqtt2coap/2
|
||||||
, coap2mqtt/4
|
, coap2mqtt/4
|
||||||
, ack2mqtt/1
|
, ack2mqtt/1
|
||||||
|
, extract_path/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([path_list/1]).
|
-export([path_list/1]).
|
||||||
|
|
|
@ -48,11 +48,11 @@
|
||||||
-define(LOG(Level, Format, Args), logger:Level("LWM2M-RESOURCE: " ++ Format, Args)).
|
-define(LOG(Level, Format, Args), logger:Level("LWM2M-RESOURCE: " ++ Format, Args)).
|
||||||
|
|
||||||
-dialyzer([{nowarn_function, [coap_discover/2]}]).
|
-dialyzer([{nowarn_function, [coap_discover/2]}]).
|
||||||
% we use {'absolute', string(), [{atom(), binary()}]} as coap_uri()
|
% we use {'absolute', list(binary()), [{atom(), binary()}]} as coap_uri()
|
||||||
% https://github.com/emqx/lwm2m-coap/blob/258e9bd3762124395e83c1e68a1583b84718230f/src/lwm2m_coap_resource.erl#L61
|
% https://github.com/emqx/lwm2m-coap/blob/258e9bd3762124395e83c1e68a1583b84718230f/src/lwm2m_coap_resource.erl#L61
|
||||||
% resource operations
|
% resource operations
|
||||||
coap_discover(_Prefix, _Args) ->
|
coap_discover(_Prefix, _Args) ->
|
||||||
[{absolute, "mqtt", []}].
|
[{absolute, [<<"mqtt">>], []}].
|
||||||
|
|
||||||
coap_get(ChId, [?PREFIX], Query, Content, Lwm2mState) ->
|
coap_get(ChId, [?PREFIX], Query, Content, Lwm2mState) ->
|
||||||
?LOG(debug, "~p ~p GET Query=~p, Content=~p", [self(),ChId, Query, Content]),
|
?LOG(debug, "~p ~p GET Query=~p, Content=~p", [self(),ChId, Query, Content]),
|
||||||
|
|
|
@ -22,6 +22,9 @@
|
||||||
, opaque_to_json/2
|
, opaque_to_json/2
|
||||||
, translate_json/1
|
, translate_json/1
|
||||||
]).
|
]).
|
||||||
|
-ifdef(TEST).
|
||||||
|
-export([ bits/1 ]).
|
||||||
|
-endif.
|
||||||
|
|
||||||
-include("emqx_lwm2m.hrl").
|
-include("emqx_lwm2m.hrl").
|
||||||
|
|
||||||
|
@ -197,7 +200,10 @@ value_ex(K, Value) when K =:= <<"Integer">>; K =:= <<"Float">>; K =:= <<"Time">>
|
||||||
value_ex(K, Value) when K =:= <<"String">> ->
|
value_ex(K, Value) when K =:= <<"String">> ->
|
||||||
Value;
|
Value;
|
||||||
value_ex(K, Value) when K =:= <<"Opaque">> ->
|
value_ex(K, Value) when K =:= <<"Opaque">> ->
|
||||||
Value;
|
%% XXX: force to decode it with base64
|
||||||
|
%% This may not be a good implementation, but it is
|
||||||
|
%% consistent with the treatment of Opaque in value/3
|
||||||
|
base64:decode(Value);
|
||||||
value_ex(K, <<"true">>) when K =:= <<"Boolean">> -> <<1>>;
|
value_ex(K, <<"true">>) when K =:= <<"Boolean">> -> <<1>>;
|
||||||
value_ex(K, <<"false">>) when K =:= <<"Boolean">> -> <<0>>;
|
value_ex(K, <<"false">>) when K =:= <<"Boolean">> -> <<0>>;
|
||||||
|
|
||||||
|
@ -361,23 +367,6 @@ encode_number(Int) when is_integer(Int) ->
|
||||||
encode_number(Float) when is_float(Float) ->
|
encode_number(Float) when is_float(Float) ->
|
||||||
<<Float:64/float>>.
|
<<Float:64/float>>.
|
||||||
|
|
||||||
encode_int(Int) when Int >= 0 ->
|
|
||||||
binary:encode_unsigned(Int);
|
|
||||||
encode_int(Int) when Int < 0 ->
|
|
||||||
Size = byte_size_of_signed(-Int) * 8,
|
|
||||||
<<Int:Size/signed>>.
|
|
||||||
|
|
||||||
byte_size_of_signed(UInt) ->
|
|
||||||
byte_size_of_signed(UInt, 0).
|
|
||||||
|
|
||||||
byte_size_of_signed(UInt, N) ->
|
|
||||||
BitSize = (8*N - 1),
|
|
||||||
Max = (1 bsl BitSize),
|
|
||||||
if
|
|
||||||
UInt =< Max -> N;
|
|
||||||
UInt > Max -> byte_size_of_signed(UInt, N+1)
|
|
||||||
end.
|
|
||||||
|
|
||||||
binary_to_number(NumStr) ->
|
binary_to_number(NumStr) ->
|
||||||
try
|
try
|
||||||
binary_to_integer(NumStr)
|
binary_to_integer(NumStr)
|
||||||
|
@ -385,3 +374,26 @@ binary_to_number(NumStr) ->
|
||||||
error:badarg ->
|
error:badarg ->
|
||||||
binary_to_float(NumStr)
|
binary_to_float(NumStr)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
encode_int(Int) ->
|
||||||
|
Bits = bits(Int),
|
||||||
|
<<Int:Bits/signed>>.
|
||||||
|
|
||||||
|
bits(I) when I < 0 -> bits_neg(I);
|
||||||
|
bits(I) -> bits_pos(I).
|
||||||
|
|
||||||
|
%% Quote:
|
||||||
|
%% Integer: An 8, 16, 32 or 64-bit signed integer.
|
||||||
|
%% The valid range of the value for a Resource SHOULD be defined.
|
||||||
|
%% This data type is also used for the purpose of enumeration.
|
||||||
|
%%
|
||||||
|
%% NOTE: Integer should not be encoded to 24-bits, 40-bits, etc.
|
||||||
|
bits_pos(I) when I < (1 bsl 7) -> 8;
|
||||||
|
bits_pos(I) when I < (1 bsl 15) -> 16;
|
||||||
|
bits_pos(I) when I < (1 bsl 31) -> 32;
|
||||||
|
bits_pos(I) when I < (1 bsl 63) -> 64.
|
||||||
|
|
||||||
|
bits_neg(I) when I >= -((1 bsl 7)) -> 8;
|
||||||
|
bits_neg(I) when I >= -((1 bsl 15)) -> 16;
|
||||||
|
bits_neg(I) when I >= -((1 bsl 31)) -> 32;
|
||||||
|
bits_neg(I) when I >= -((1 bsl 63)) -> 64.
|
||||||
|
|
|
@ -103,6 +103,7 @@ init(CoapPid, EndpointName, Peername = {_Peerhost, _Port}, RegInfo = #{<<"lt">>
|
||||||
emqx_cm:register_channel(EndpointName, CoapPid, conninfo(Lwm2mState1))
|
emqx_cm:register_channel(EndpointName, CoapPid, conninfo(Lwm2mState1))
|
||||||
end),
|
end),
|
||||||
emqx_cm:insert_channel_info(EndpointName, info(Lwm2mState1), stats(Lwm2mState1)),
|
emqx_cm:insert_channel_info(EndpointName, info(Lwm2mState1), stats(Lwm2mState1)),
|
||||||
|
emqx_lwm2m_cm:register_channel(EndpointName, RegInfo, LifeTime, Ver, Peername),
|
||||||
|
|
||||||
{ok, Lwm2mState1#lwm2m_state{life_timer = emqx_lwm2m_timer:start_timer(LifeTime, {life_timer, expired})}};
|
{ok, Lwm2mState1#lwm2m_state{life_timer = emqx_lwm2m_timer:start_timer(LifeTime, {life_timer, expired})}};
|
||||||
{error, Error} ->
|
{error, Error} ->
|
||||||
|
@ -120,10 +121,8 @@ post_init(Lwm2mState = #lwm2m_state{endpoint_name = _EndpointName,
|
||||||
_ = send_to_broker(<<"register">>, #{<<"data">> => RegInfo}, Lwm2mState),
|
_ = send_to_broker(<<"register">>, #{<<"data">> => RegInfo}, Lwm2mState),
|
||||||
Lwm2mState#lwm2m_state{mqtt_topic = Topic}.
|
Lwm2mState#lwm2m_state{mqtt_topic = Topic}.
|
||||||
|
|
||||||
update_reg_info(NewRegInfo, Lwm2mState = #lwm2m_state{
|
update_reg_info(NewRegInfo, Lwm2mState=#lwm2m_state{life_timer = LifeTimer, register_info = RegInfo,
|
||||||
life_timer = LifeTimer, register_info = RegInfo,
|
coap_pid = CoapPid, endpoint_name = Epn}) ->
|
||||||
coap_pid = CoapPid}) ->
|
|
||||||
|
|
||||||
UpdatedRegInfo = maps:merge(RegInfo, NewRegInfo),
|
UpdatedRegInfo = maps:merge(RegInfo, NewRegInfo),
|
||||||
|
|
||||||
_ = case proplists:get_value(update_msg_publish_condition,
|
_ = case proplists:get_value(update_msg_publish_condition,
|
||||||
|
@ -134,6 +133,7 @@ update_reg_info(NewRegInfo, Lwm2mState = #lwm2m_state{
|
||||||
%% - report the registration info update, but only when objectList is updated.
|
%% - report the registration info update, but only when objectList is updated.
|
||||||
case NewRegInfo of
|
case NewRegInfo of
|
||||||
#{<<"objectList">> := _} ->
|
#{<<"objectList">> := _} ->
|
||||||
|
emqx_lwm2m_cm:update_reg_info(Epn, NewRegInfo),
|
||||||
send_to_broker(<<"update">>, #{<<"data">> => UpdatedRegInfo}, Lwm2mState);
|
send_to_broker(<<"update">>, #{<<"data">> => UpdatedRegInfo}, Lwm2mState);
|
||||||
_ -> ok
|
_ -> ok
|
||||||
end
|
end
|
||||||
|
@ -151,7 +151,8 @@ update_reg_info(NewRegInfo, Lwm2mState = #lwm2m_state{
|
||||||
register_info = UpdatedRegInfo}.
|
register_info = UpdatedRegInfo}.
|
||||||
|
|
||||||
replace_reg_info(NewRegInfo, Lwm2mState=#lwm2m_state{life_timer = LifeTimer,
|
replace_reg_info(NewRegInfo, Lwm2mState=#lwm2m_state{life_timer = LifeTimer,
|
||||||
coap_pid = CoapPid}) ->
|
coap_pid = CoapPid,
|
||||||
|
endpoint_name = EndpointName}) ->
|
||||||
_ = send_to_broker(<<"register">>, #{<<"data">> => NewRegInfo}, Lwm2mState),
|
_ = send_to_broker(<<"register">>, #{<<"data">> => NewRegInfo}, Lwm2mState),
|
||||||
|
|
||||||
%% - flush cached donwlink commands
|
%% - flush cached donwlink commands
|
||||||
|
@ -161,7 +162,7 @@ replace_reg_info(NewRegInfo, Lwm2mState=#lwm2m_state{life_timer = LifeTimer,
|
||||||
UpdatedLifeTimer = emqx_lwm2m_timer:refresh_timer(
|
UpdatedLifeTimer = emqx_lwm2m_timer:refresh_timer(
|
||||||
maps:get(<<"lt">>, NewRegInfo), LifeTimer),
|
maps:get(<<"lt">>, NewRegInfo), LifeTimer),
|
||||||
|
|
||||||
_ = send_auto_observe(CoapPid, NewRegInfo),
|
_ = send_auto_observe(CoapPid, NewRegInfo, EndpointName),
|
||||||
|
|
||||||
?LOG(debug, "Replace RegInfo to: ~p", [NewRegInfo]),
|
?LOG(debug, "Replace RegInfo to: ~p", [NewRegInfo]),
|
||||||
Lwm2mState#lwm2m_state{life_timer = UpdatedLifeTimer,
|
Lwm2mState#lwm2m_state{life_timer = UpdatedLifeTimer,
|
||||||
|
@ -174,15 +175,20 @@ send_ul_data(EventType, Payload, Lwm2mState=#lwm2m_state{coap_pid = CoapPid}) ->
|
||||||
Lwm2mState.
|
Lwm2mState.
|
||||||
|
|
||||||
auto_observe(Lwm2mState = #lwm2m_state{register_info = RegInfo,
|
auto_observe(Lwm2mState = #lwm2m_state{register_info = RegInfo,
|
||||||
coap_pid = CoapPid}) ->
|
coap_pid = CoapPid,
|
||||||
_ = send_auto_observe(CoapPid, RegInfo),
|
endpoint_name = EndpointName}) ->
|
||||||
|
_ = send_auto_observe(CoapPid, RegInfo, EndpointName),
|
||||||
Lwm2mState.
|
Lwm2mState.
|
||||||
|
|
||||||
deliver(#message{topic = Topic, payload = Payload}, Lwm2mState = #lwm2m_state{coap_pid = CoapPid, register_info = RegInfo, started_at = StartedAt}) ->
|
deliver(#message{topic = Topic, payload = Payload},
|
||||||
|
Lwm2mState = #lwm2m_state{coap_pid = CoapPid,
|
||||||
|
register_info = RegInfo,
|
||||||
|
started_at = StartedAt,
|
||||||
|
endpoint_name = EndpointName}) ->
|
||||||
IsCacheMode = is_cache_mode(RegInfo, StartedAt),
|
IsCacheMode = is_cache_mode(RegInfo, StartedAt),
|
||||||
?LOG(debug, "Get MQTT message from broker, IsCacheModeNow?: ~p, Topic: ~p, Payload: ~p", [IsCacheMode, Topic, Payload]),
|
?LOG(debug, "Get MQTT message from broker, IsCacheModeNow?: ~p, Topic: ~p, Payload: ~p", [IsCacheMode, Topic, Payload]),
|
||||||
AlternatePath = maps:get(<<"alternatePath">>, RegInfo, <<"/">>),
|
AlternatePath = maps:get(<<"alternatePath">>, RegInfo, <<"/">>),
|
||||||
deliver_to_coap(AlternatePath, Payload, CoapPid, IsCacheMode),
|
deliver_to_coap(AlternatePath, Payload, CoapPid, IsCacheMode, EndpointName),
|
||||||
Lwm2mState.
|
Lwm2mState.
|
||||||
|
|
||||||
get_info(Lwm2mState = #lwm2m_state{endpoint_name = EndpointName, peername = {PeerHost, _},
|
get_info(Lwm2mState = #lwm2m_state{endpoint_name = EndpointName, peername = {PeerHost, _},
|
||||||
|
@ -238,20 +244,21 @@ time_now() -> erlang:system_time(millisecond).
|
||||||
%% Deliver downlink message to coap
|
%% Deliver downlink message to coap
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
deliver_to_coap(AlternatePath, JsonData, CoapPid, CacheMode) when is_binary(JsonData)->
|
deliver_to_coap(AlternatePath, JsonData, CoapPid, CacheMode, EndpointName) when is_binary(JsonData)->
|
||||||
try
|
try
|
||||||
TermData = emqx_json:decode(JsonData, [return_maps]),
|
TermData = emqx_json:decode(JsonData, [return_maps]),
|
||||||
deliver_to_coap(AlternatePath, TermData, CoapPid, CacheMode)
|
deliver_to_coap(AlternatePath, TermData, CoapPid, CacheMode, EndpointName)
|
||||||
catch
|
catch
|
||||||
C:R:Stack ->
|
C:R:Stack ->
|
||||||
?LOG(error, "deliver_to_coap - Invalid JSON: ~p, Exception: ~p, stacktrace: ~p",
|
?LOG(error, "deliver_to_coap - Invalid JSON: ~p, Exception: ~p, stacktrace: ~p",
|
||||||
[JsonData, {C, R}, Stack])
|
[JsonData, {C, R}, Stack])
|
||||||
end;
|
end;
|
||||||
|
|
||||||
deliver_to_coap(AlternatePath, TermData, CoapPid, CacheMode) when is_map(TermData) ->
|
deliver_to_coap(AlternatePath, TermData, CoapPid, CacheMode, EndpointName) when is_map(TermData) ->
|
||||||
?LOG(info, "SEND To CoAP, AlternatePath=~p, Data=~p", [AlternatePath, TermData]),
|
?LOG(info, "SEND To CoAP, AlternatePath=~p, Data=~p", [AlternatePath, TermData]),
|
||||||
{CoapRequest, Ref} = emqx_lwm2m_cmd_handler:mqtt2coap(AlternatePath, TermData),
|
{CoapRequest, Ref} = emqx_lwm2m_cmd_handler:mqtt2coap(AlternatePath, TermData),
|
||||||
|
MsgType = maps:get(<<"msgType">>, Ref),
|
||||||
|
emqx_lwm2m_cm:register_cmd(EndpointName, emqx_lwm2m_cmd_handler:extract_path(Ref), MsgType),
|
||||||
case CacheMode of
|
case CacheMode of
|
||||||
false ->
|
false ->
|
||||||
do_deliver_to_coap(CoapPid, CoapRequest, Ref);
|
do_deliver_to_coap(CoapPid, CoapRequest, Ref);
|
||||||
|
@ -266,7 +273,12 @@ deliver_to_coap(AlternatePath, TermData, CoapPid, CacheMode) when is_map(TermDat
|
||||||
send_to_broker(EventType, Payload = #{}, Lwm2mState) ->
|
send_to_broker(EventType, Payload = #{}, Lwm2mState) ->
|
||||||
do_send_to_broker(EventType, Payload, Lwm2mState).
|
do_send_to_broker(EventType, Payload, Lwm2mState).
|
||||||
|
|
||||||
do_send_to_broker(EventType, Payload, Lwm2mState) ->
|
do_send_to_broker(EventType, #{<<"data">> := Data} = Payload, #lwm2m_state{endpoint_name = EndpointName} = Lwm2mState) ->
|
||||||
|
ReqPath = maps:get(<<"reqPath">>, Data, undefined),
|
||||||
|
Code = maps:get(<<"code">>, Data, undefined),
|
||||||
|
CodeMsg = maps:get(<<"codeMsg">>, Data, undefined),
|
||||||
|
Content = maps:get(<<"content">>, Data, undefined),
|
||||||
|
emqx_lwm2m_cm:register_cmd(EndpointName, ReqPath, EventType, {Code, CodeMsg, Content}),
|
||||||
NewPayload = maps:put(<<"msgType">>, EventType, Payload),
|
NewPayload = maps:put(<<"msgType">>, EventType, Payload),
|
||||||
Topic = uplink_topic(EventType, Lwm2mState),
|
Topic = uplink_topic(EventType, Lwm2mState),
|
||||||
publish(Topic, emqx_json:encode(NewPayload), _Qos = 0, Lwm2mState#lwm2m_state.endpoint_name).
|
publish(Topic, emqx_json:encode(NewPayload), _Qos = 0, Lwm2mState#lwm2m_state.endpoint_name).
|
||||||
|
@ -281,7 +293,7 @@ auto_observe_object_list(Expected, Registered) ->
|
||||||
Expected1 = lists:map(fun(S) -> iolist_to_binary(S) end, Expected),
|
Expected1 = lists:map(fun(S) -> iolist_to_binary(S) end, Expected),
|
||||||
lists:filter(fun(S) -> lists:member(S, Expected1) end, Registered).
|
lists:filter(fun(S) -> lists:member(S, Expected1) end, Registered).
|
||||||
|
|
||||||
send_auto_observe(CoapPid, RegInfo) ->
|
send_auto_observe(CoapPid, RegInfo, EndpointName) ->
|
||||||
%% - auto observe the objects
|
%% - auto observe the objects
|
||||||
case proplists:get_value(auto_observe, lwm2m_coap_responder:options(), false) of
|
case proplists:get_value(auto_observe, lwm2m_coap_responder:options(), false) of
|
||||||
false ->
|
false ->
|
||||||
|
@ -292,25 +304,37 @@ send_auto_observe(CoapPid, RegInfo) ->
|
||||||
maps:get(<<"objectList">>, RegInfo, [])
|
maps:get(<<"objectList">>, RegInfo, [])
|
||||||
),
|
),
|
||||||
AlternatePath = maps:get(<<"alternatePath">>, RegInfo, <<"/">>),
|
AlternatePath = maps:get(<<"alternatePath">>, RegInfo, <<"/">>),
|
||||||
auto_observe(AlternatePath, Objectlists, CoapPid)
|
auto_observe(AlternatePath, Objectlists, CoapPid, EndpointName)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
auto_observe(AlternatePath, ObjectList, CoapPid) ->
|
auto_observe(AlternatePath, ObjectList, CoapPid, EndpointName) ->
|
||||||
?LOG(info, "Auto Observe on: ~p", [ObjectList]),
|
?LOG(info, "Auto Observe on: ~p", [ObjectList]),
|
||||||
erlang:spawn(fun() ->
|
erlang:spawn(fun() ->
|
||||||
observe_object_list(AlternatePath, ObjectList, CoapPid)
|
observe_object_list(AlternatePath, ObjectList, CoapPid, EndpointName)
|
||||||
end).
|
end).
|
||||||
|
|
||||||
observe_object_list(AlternatePath, ObjectList, CoapPid) ->
|
observe_object_list(AlternatePath, ObjectList, CoapPid, EndpointName) ->
|
||||||
lists:foreach(fun(ObjectPath) ->
|
lists:foreach(fun(ObjectPath) ->
|
||||||
observe_object_slowly(AlternatePath, ObjectPath, CoapPid, 100)
|
[ObjId| LastPath] = emqx_lwm2m_cmd_handler:path_list(ObjectPath),
|
||||||
|
case ObjId of
|
||||||
|
<<"19">> ->
|
||||||
|
[ObjInsId | _LastPath1] = LastPath,
|
||||||
|
case ObjInsId of
|
||||||
|
<<"0">> ->
|
||||||
|
observe_object_slowly(AlternatePath, <<"/19/0/0">>, CoapPid, 100, EndpointName);
|
||||||
|
_ ->
|
||||||
|
observe_object_slowly(AlternatePath, ObjectPath, CoapPid, 100, EndpointName)
|
||||||
|
end;
|
||||||
|
_ ->
|
||||||
|
observe_object_slowly(AlternatePath, ObjectPath, CoapPid, 100, EndpointName)
|
||||||
|
end
|
||||||
end, ObjectList).
|
end, ObjectList).
|
||||||
|
|
||||||
observe_object_slowly(AlternatePath, ObjectPath, CoapPid, Interval) ->
|
observe_object_slowly(AlternatePath, ObjectPath, CoapPid, Interval, EndpointName) ->
|
||||||
observe_object(AlternatePath, ObjectPath, CoapPid),
|
observe_object(AlternatePath, ObjectPath, CoapPid, EndpointName),
|
||||||
timer:sleep(Interval).
|
timer:sleep(Interval).
|
||||||
|
|
||||||
observe_object(AlternatePath, ObjectPath, CoapPid) ->
|
observe_object(AlternatePath, ObjectPath, CoapPid, EndpointName) ->
|
||||||
Payload = #{
|
Payload = #{
|
||||||
<<"msgType">> => <<"observe">>,
|
<<"msgType">> => <<"observe">>,
|
||||||
<<"data">> => #{
|
<<"data">> => #{
|
||||||
|
@ -318,7 +342,7 @@ observe_object(AlternatePath, ObjectPath, CoapPid) ->
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
?LOG(info, "Observe ObjectPath: ~p", [ObjectPath]),
|
?LOG(info, "Observe ObjectPath: ~p", [ObjectPath]),
|
||||||
deliver_to_coap(AlternatePath, Payload, CoapPid, false).
|
deliver_to_coap(AlternatePath, Payload, CoapPid, false, EndpointName).
|
||||||
|
|
||||||
do_deliver_to_coap_slowly(CoapPid, CoapRequestList, Interval) ->
|
do_deliver_to_coap_slowly(CoapPid, CoapRequestList, Interval) ->
|
||||||
erlang:spawn(fun() ->
|
erlang:spawn(fun() ->
|
||||||
|
|
|
@ -29,4 +29,11 @@ start_link() ->
|
||||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||||
|
|
||||||
init(_Args) ->
|
init(_Args) ->
|
||||||
{ok, { {one_for_all, 10, 3600}, [?CHILD(emqx_lwm2m_xml_object_db)] }}.
|
CmSup = #{id => emqx_lwm2m_cm_sup,
|
||||||
|
start => {emqx_lwm2m_cm_sup, start_link, []},
|
||||||
|
restart => permanent,
|
||||||
|
shutdown => infinity,
|
||||||
|
type => supervisor,
|
||||||
|
modules => [emqx_lwm2m_cm_sup]
|
||||||
|
},
|
||||||
|
{ok, { {one_for_all, 10, 3600}, [?CHILD(emqx_lwm2m_xml_object_db), CmSup] }}.
|
||||||
|
|
|
@ -21,9 +21,11 @@
|
||||||
|
|
||||||
-export([ get_obj_def/2
|
-export([ get_obj_def/2
|
||||||
, get_object_id/1
|
, get_object_id/1
|
||||||
|
, get_object_name/1
|
||||||
, get_object_and_resource_id/2
|
, get_object_and_resource_id/2
|
||||||
, get_resource_type/2
|
, get_resource_type/2
|
||||||
, get_resource_name/2
|
, get_resource_name/2
|
||||||
|
, get_resource_operations/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-define(LOG(Level, Format, Args),
|
-define(LOG(Level, Format, Args),
|
||||||
|
@ -42,6 +44,10 @@ get_object_id(ObjDefinition) ->
|
||||||
[#xmlText{value=ObjectId}] = xmerl_xpath:string("ObjectID/text()", ObjDefinition),
|
[#xmlText{value=ObjectId}] = xmerl_xpath:string("ObjectID/text()", ObjDefinition),
|
||||||
ObjectId.
|
ObjectId.
|
||||||
|
|
||||||
|
get_object_name(ObjDefinition) ->
|
||||||
|
[#xmlText{value=ObjectName}] = xmerl_xpath:string("Name/text()", ObjDefinition),
|
||||||
|
ObjectName.
|
||||||
|
|
||||||
|
|
||||||
get_object_and_resource_id(ResourceNameBinary, ObjDefinition) ->
|
get_object_and_resource_id(ResourceNameBinary, ObjDefinition) ->
|
||||||
ResourceNameString = binary_to_list(ResourceNameBinary),
|
ResourceNameString = binary_to_list(ResourceNameBinary),
|
||||||
|
@ -60,3 +66,8 @@ get_resource_name(ResourceIdInt, ObjDefinition) ->
|
||||||
ResourceIdString = integer_to_list(ResourceIdInt),
|
ResourceIdString = integer_to_list(ResourceIdInt),
|
||||||
[#xmlText{value=Name}] = xmerl_xpath:string("Resources/Item[@ID=\""++ResourceIdString++"\"]/Name/text()", ObjDefinition),
|
[#xmlText{value=Name}] = xmerl_xpath:string("Resources/Item[@ID=\""++ResourceIdString++"\"]/Name/text()", ObjDefinition),
|
||||||
Name.
|
Name.
|
||||||
|
|
||||||
|
get_resource_operations(ResourceIdInt, ObjDefinition) ->
|
||||||
|
ResourceIdString = integer_to_list(ResourceIdInt),
|
||||||
|
[#xmlText{value=Operations}] = xmerl_xpath:string("Resources/Item[@ID=\""++ResourceIdString++"\"]/Operations/text()", ObjDefinition),
|
||||||
|
Operations.
|
||||||
|
|
|
@ -58,7 +58,7 @@ find_objectid(ObjectId) ->
|
||||||
false -> ObjectId
|
false -> ObjectId
|
||||||
end,
|
end,
|
||||||
case ets:lookup(?LWM2M_OBJECT_DEF_TAB, ObjectIdInt) of
|
case ets:lookup(?LWM2M_OBJECT_DEF_TAB, ObjectIdInt) of
|
||||||
[] -> error(no_xml_definition);
|
[] -> {error, no_xml_definition};
|
||||||
[{ObjectId, Xml}] -> Xml
|
[{ObjectId, Xml}] -> Xml
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -121,8 +121,10 @@ load(BaseDir) ->
|
||||||
true -> BaseDir++"*.xml";
|
true -> BaseDir++"*.xml";
|
||||||
false -> BaseDir++"/*.xml"
|
false -> BaseDir++"/*.xml"
|
||||||
end,
|
end,
|
||||||
AllXmlFiles = filelib:wildcard(Wild),
|
case filelib:wildcard(Wild) of
|
||||||
load_loop(AllXmlFiles).
|
[] -> error(no_xml_files_found, BaseDir);
|
||||||
|
AllXmlFiles -> load_loop(AllXmlFiles)
|
||||||
|
end.
|
||||||
|
|
||||||
load_loop([]) ->
|
load_loop([]) ->
|
||||||
ok;
|
ok;
|
||||||
|
|
|
@ -40,6 +40,8 @@ all() ->
|
||||||
, {group, test_grp_4_discover}
|
, {group, test_grp_4_discover}
|
||||||
, {group, test_grp_5_write_attr}
|
, {group, test_grp_5_write_attr}
|
||||||
, {group, test_grp_6_observe}
|
, {group, test_grp_6_observe}
|
||||||
|
, {group, test_grp_8_object_19}
|
||||||
|
, {group, test_grp_bugs}
|
||||||
].
|
].
|
||||||
|
|
||||||
suite() -> [{timetrap, {seconds, 90}}].
|
suite() -> [{timetrap, {seconds, 90}}].
|
||||||
|
@ -98,13 +100,16 @@ groups() ->
|
||||||
]},
|
]},
|
||||||
{test_grp_8_object_19, [RepeatOpt], [
|
{test_grp_8_object_19, [RepeatOpt], [
|
||||||
case80_specail_object_19_1_0_write,
|
case80_specail_object_19_1_0_write,
|
||||||
case80_specail_object_19_0_0_notify,
|
case80_specail_object_19_0_0_notify
|
||||||
case80_specail_object_19_0_0_response,
|
%case80_specail_object_19_0_0_response,
|
||||||
case80_normal_object_19_0_0_read
|
%case80_normal_object_19_0_0_read
|
||||||
]},
|
]},
|
||||||
{test_grp_9_psm_queue_mode, [RepeatOpt], [
|
{test_grp_9_psm_queue_mode, [RepeatOpt], [
|
||||||
case90_psm_mode,
|
case90_psm_mode,
|
||||||
case90_queue_mode
|
case90_queue_mode
|
||||||
|
]},
|
||||||
|
{test_grp_bugs, [RepeatOpt], [
|
||||||
|
case_bug_emqx_4989
|
||||||
]}
|
]}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
@ -144,6 +149,56 @@ end_per_testcase(_AllTestCase, Config) ->
|
||||||
%% Cases
|
%% Cases
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
case_bug_emqx_4989(Config) ->
|
||||||
|
%% https://github.com/emqx/emqx/issues/4989
|
||||||
|
% step 1, device register ...
|
||||||
|
Epn = "urn:oma:lwm2m:oma:3",
|
||||||
|
MsgId1 = 15,
|
||||||
|
UdpSock = ?config(sock, Config),
|
||||||
|
ObjectList = <<"</1>, </2>, </3/0>, </4>, </5>">>,
|
||||||
|
RespTopic = list_to_binary("lwm2m/"++Epn++"/up/resp"),
|
||||||
|
emqtt:subscribe(?config(emqx_c, Config), RespTopic, qos0),
|
||||||
|
timer:sleep(200),
|
||||||
|
|
||||||
|
std_register(UdpSock, Epn, ObjectList, MsgId1, RespTopic),
|
||||||
|
|
||||||
|
% step2, send a WRITE command to device
|
||||||
|
CommandTopic = <<"lwm2m/", (list_to_binary(Epn))/binary, "/dn/dm">>,
|
||||||
|
CmdId = 307,
|
||||||
|
Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId,
|
||||||
|
<<"msgType">> => <<"write">>,
|
||||||
|
<<"data">> => #{
|
||||||
|
<<"path">> => <<"/1/0/2">>,
|
||||||
|
<<"type">> => <<"Integer">>,
|
||||||
|
<<"value">> => 129
|
||||||
|
}
|
||||||
|
},
|
||||||
|
CommandJson = emqx_json:encode(Command),
|
||||||
|
test_mqtt_broker:publish(CommandTopic, CommandJson, 0),
|
||||||
|
timer:sleep(50),
|
||||||
|
Request2 = test_recv_coap_request(UdpSock),
|
||||||
|
#coap_message{method = Method2, options=Options2, payload=Payload2} = Request2,
|
||||||
|
Path2 = get_coap_path(Options2),
|
||||||
|
?assertEqual(put, Method2),
|
||||||
|
?assertEqual(<<"/1/0/2">>, Path2),
|
||||||
|
?assertMatch([#{value := 129}], emqx_lwm2m_message:tlv_to_json(Path2, Payload2)),
|
||||||
|
|
||||||
|
timer:sleep(50),
|
||||||
|
|
||||||
|
test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, changed}, #coap_content{}, Request2, true),
|
||||||
|
timer:sleep(100),
|
||||||
|
|
||||||
|
ReadResult = emqx_json:encode(#{
|
||||||
|
<<"requestID">> => CmdId, <<"cacheID">> => CmdId,
|
||||||
|
<<"data">> => #{
|
||||||
|
<<"reqPath">> => <<"/1/0/2">>,
|
||||||
|
<<"code">> => <<"2.04">>,
|
||||||
|
<<"codeMsg">> => <<"changed">>
|
||||||
|
},
|
||||||
|
<<"msgType">> => <<"write">>
|
||||||
|
}),
|
||||||
|
?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)).
|
||||||
|
|
||||||
case01_register(Config) ->
|
case01_register(Config) ->
|
||||||
% ----------------------------------------
|
% ----------------------------------------
|
||||||
% REGISTER command
|
% REGISTER command
|
||||||
|
@ -1655,6 +1710,7 @@ case80_specail_object_19_1_0_write(Config) ->
|
||||||
<<"value">> => base64:encode(<<12345:32>>)
|
<<"value">> => base64:encode(<<12345:32>>)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
CommandJson = emqx_json:encode(Command),
|
CommandJson = emqx_json:encode(Command),
|
||||||
test_mqtt_broker:publish(CommandTopic, CommandJson, 0),
|
test_mqtt_broker:publish(CommandTopic, CommandJson, 0),
|
||||||
timer:sleep(50),
|
timer:sleep(50),
|
||||||
|
@ -1663,7 +1719,7 @@ case80_specail_object_19_1_0_write(Config) ->
|
||||||
Path2 = get_coap_path(Options2),
|
Path2 = get_coap_path(Options2),
|
||||||
?assertEqual(put, Method2),
|
?assertEqual(put, Method2),
|
||||||
?assertEqual(<<"/19/1/0">>, Path2),
|
?assertEqual(<<"/19/1/0">>, Path2),
|
||||||
?assertEqual(<<12345:32>>, Payload2),
|
?assertEqual(<<3:2, 0:1, 0:2, 4:3, 0, 12345:32>>, Payload2),
|
||||||
timer:sleep(50),
|
timer:sleep(50),
|
||||||
|
|
||||||
test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, changed}, #coap_content{}, Request2, true),
|
test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, changed}, #coap_content{}, Request2, true),
|
||||||
|
@ -1672,6 +1728,7 @@ case80_specail_object_19_1_0_write(Config) ->
|
||||||
ReadResult = emqx_json:encode(#{
|
ReadResult = emqx_json:encode(#{
|
||||||
<<"requestID">> => CmdId, <<"cacheID">> => CmdId,
|
<<"requestID">> => CmdId, <<"cacheID">> => CmdId,
|
||||||
<<"data">> => #{
|
<<"data">> => #{
|
||||||
|
<<"reqPath">> => <<"/19/1/0">>,
|
||||||
<<"code">> => <<"2.04">>,
|
<<"code">> => <<"2.04">>,
|
||||||
<<"codeMsg">> => <<"changed">>
|
<<"codeMsg">> => <<"changed">>
|
||||||
},
|
},
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
-module(emqx_lwm2m_message_tests).
|
||||||
|
|
||||||
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
|
||||||
|
-import(emqx_lwm2m_message, [bits/1]).
|
||||||
|
|
||||||
|
bits_pos_test() ->
|
||||||
|
?assertEqual(8, bits(0)),
|
||||||
|
?assertEqual(8, bits(1)),
|
||||||
|
?assertEqual(8, bits(127)),
|
||||||
|
?assertEqual(16, bits(128)),
|
||||||
|
?assertEqual(16, bits(129)),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
bits_neg_test() ->
|
||||||
|
?assertEqual(8, bits(-1)),
|
||||||
|
?assertEqual(8, bits(-2)),
|
||||||
|
?assertEqual(8, bits(-127)),
|
||||||
|
?assertEqual(8, bits(-128)),
|
||||||
|
?assertEqual(16, bits(-129)),
|
||||||
|
ok.
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_management,
|
{application, emqx_management,
|
||||||
[{description, "EMQ X Management API and CLI"},
|
[{description, "EMQ X Management API and CLI"},
|
||||||
{vsn, "4.3.3"}, % strict semver, bump manually!
|
{vsn, "4.3.8"}, % strict semver, bump manually!
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_management_sup]},
|
{registered, [emqx_management_sup]},
|
||||||
{applications, [kernel,stdlib,minirest]},
|
{applications, [kernel,stdlib,minirest]},
|
||||||
|
|
|
@ -1,12 +1,16 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{VSN,
|
{VSN,
|
||||||
[ {<<"4.3.[0-2]">>,
|
[ {<<"4\\.3\\.[0-7]+">>,
|
||||||
[ {restart_application, emqx_management}
|
[ {apply,{minirest,stop_http,['http:management']}},
|
||||||
|
{apply,{minirest,stop_http,['https:management']}},
|
||||||
|
{restart_application, emqx_management}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
],
|
],
|
||||||
[ {<<"4.3.[0-2]">>,
|
[ {<<"4\\.3\\.[0-7]+">>,
|
||||||
[ {restart_application, emqx_management}
|
[ {apply,{minirest,stop_http,['http:management']}},
|
||||||
|
{apply,{minirest,stop_http,['https:management']}},
|
||||||
|
{restart_application, emqx_management}
|
||||||
]},
|
]},
|
||||||
{<<".*">>, []}
|
{<<".*">>, []}
|
||||||
]
|
]
|
||||||
|
|
|
@ -334,7 +334,7 @@ query({Qs, Fuzzy}, Start, Limit) ->
|
||||||
match_fun(Ms, Fuzzy) ->
|
match_fun(Ms, Fuzzy) ->
|
||||||
MsC = ets:match_spec_compile(Ms),
|
MsC = ets:match_spec_compile(Ms),
|
||||||
REFuzzy = lists:map(fun({K, like, S}) ->
|
REFuzzy = lists:map(fun({K, like, S}) ->
|
||||||
{ok, RE} = re:compile(S),
|
{ok, RE} = re:compile(escape(S)),
|
||||||
{K, like, RE}
|
{K, like, RE}
|
||||||
end, Fuzzy),
|
end, Fuzzy),
|
||||||
fun(Rows) ->
|
fun(Rows) ->
|
||||||
|
@ -347,6 +347,9 @@ match_fun(Ms, Fuzzy) ->
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
escape(B) when is_binary(B) ->
|
||||||
|
re:replace(B, <<"\\\\">>, <<"\\\\\\\\">>, [{return, binary}, global]).
|
||||||
|
|
||||||
run_fuzzy_match(_, []) ->
|
run_fuzzy_match(_, []) ->
|
||||||
true;
|
true;
|
||||||
run_fuzzy_match(E = {_, #{clientinfo := ClientInfo}, _}, [{Key, _, RE}|Fuzzy]) ->
|
run_fuzzy_match(E = {_, #{clientinfo := ClientInfo}, _}, [{Key, _, RE}|Fuzzy]) ->
|
||||||
|
@ -450,4 +453,9 @@ params2qs_test() ->
|
||||||
|
|
||||||
[{{'$1', #{}, '_'}, [], ['$_']}] = qs2ms([]).
|
[{{'$1', #{}, '_'}, [], ['$_']}] = qs2ms([]).
|
||||||
|
|
||||||
|
escape_test() ->
|
||||||
|
Str = <<"\\n">>,
|
||||||
|
{ok, Re} = re:compile(escape(Str)),
|
||||||
|
{match, _} = re:run(<<"\\name">>, Re).
|
||||||
|
|
||||||
-endif.
|
-endif.
|
||||||
|
|
|
@ -147,6 +147,8 @@ loop_unsubscribe([Params | ParamsN], Acc) ->
|
||||||
code => Code},
|
code => Code},
|
||||||
loop_unsubscribe(ParamsN, [Result | Acc]).
|
loop_unsubscribe(ParamsN, [Result | Acc]).
|
||||||
|
|
||||||
|
do_subscribe(ClientId, _Topics, _QoS) when not is_binary(ClientId) ->
|
||||||
|
{ok, ?ERROR8, <<"bad clientid: must be string">>};
|
||||||
do_subscribe(_ClientId, [], _QoS) ->
|
do_subscribe(_ClientId, [], _QoS) ->
|
||||||
{ok, ?ERROR15, bad_topic};
|
{ok, ?ERROR15, bad_topic};
|
||||||
do_subscribe(ClientId, Topics, QoS) ->
|
do_subscribe(ClientId, Topics, QoS) ->
|
||||||
|
@ -156,6 +158,8 @@ do_subscribe(ClientId, Topics, QoS) ->
|
||||||
_ -> ok
|
_ -> ok
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
do_publish(ClientId, _Topics, _Qos, _Retain, _Payload) when not (is_binary(ClientId) or (ClientId =:= undefined)) ->
|
||||||
|
{ok, ?ERROR8, <<"bad clientid: must be string">>};
|
||||||
do_publish(_ClientId, [], _Qos, _Retain, _Payload) ->
|
do_publish(_ClientId, [], _Qos, _Retain, _Payload) ->
|
||||||
{ok, ?ERROR15, bad_topic};
|
{ok, ?ERROR15, bad_topic};
|
||||||
do_publish(ClientId, Topics, Qos, Retain, Payload) ->
|
do_publish(ClientId, Topics, Qos, Retain, Payload) ->
|
||||||
|
@ -166,6 +170,8 @@ do_publish(ClientId, Topics, Qos, Retain, Payload) ->
|
||||||
end, Topics),
|
end, Topics),
|
||||||
{ok, MsgIds}.
|
{ok, MsgIds}.
|
||||||
|
|
||||||
|
do_unsubscribe(ClientId, _Topic) when not is_binary(ClientId) ->
|
||||||
|
{ok, ?ERROR8, <<"bad clientid: must be string">>};
|
||||||
do_unsubscribe(ClientId, Topic) ->
|
do_unsubscribe(ClientId, Topic) ->
|
||||||
case validate_by_filter(Topic) of
|
case validate_by_filter(Topic) of
|
||||||
true ->
|
true ->
|
||||||
|
|
|
@ -191,10 +191,8 @@ clients(["show", ClientId]) ->
|
||||||
if_client(ClientId, fun print/1);
|
if_client(ClientId, fun print/1);
|
||||||
|
|
||||||
clients(["kick", ClientId]) ->
|
clients(["kick", ClientId]) ->
|
||||||
case emqx_cm:kick_session(bin(ClientId)) of
|
ok = emqx_cm:kick_session(bin(ClientId)),
|
||||||
ok -> emqx_ctl:print("ok~n");
|
emqx_ctl:print("ok~n");
|
||||||
_ -> emqx_ctl:print("Not Found.~n")
|
|
||||||
end;
|
|
||||||
|
|
||||||
clients(_) ->
|
clients(_) ->
|
||||||
emqx_ctl:usage([{"clients list", "List all clients"},
|
emqx_ctl:usage([{"clients list", "List all clients"},
|
||||||
|
@ -616,8 +614,18 @@ dump(_Table, _, '$end_of_table', Result) ->
|
||||||
lists:reverse(Result);
|
lists:reverse(Result);
|
||||||
|
|
||||||
dump(Table, Tag, Key, Result) ->
|
dump(Table, Tag, Key, Result) ->
|
||||||
PrintValue = [print({Tag, Record}) || Record <- ets:lookup(Table, Key)],
|
Ls = lists:foldl(fun(Record, Acc) ->
|
||||||
dump(Table, Tag, ets:next(Table, Key), [PrintValue | Result]).
|
try
|
||||||
|
[print({Tag, Record}) | Acc]
|
||||||
|
catch
|
||||||
|
Class : Reason : Stk ->
|
||||||
|
logger:error("Failed to print ~p, error: {~p, ~p}. "
|
||||||
|
"Stacktrace: ~0p",
|
||||||
|
[Record, Class, Reason, Stk]),
|
||||||
|
Acc
|
||||||
|
end
|
||||||
|
end, [], ets:lookup(Table, Key)),
|
||||||
|
dump(Table, Tag, ets:next(Table, Key), [lists:reverse(Ls) | Result]).
|
||||||
|
|
||||||
print({_, []}) ->
|
print({_, []}) ->
|
||||||
ok;
|
ok;
|
||||||
|
@ -634,7 +642,7 @@ print({client, {ClientId, ChanPid}}) ->
|
||||||
ClientInfo = maps:get(clientinfo, Attrs, #{}),
|
ClientInfo = maps:get(clientinfo, Attrs, #{}),
|
||||||
ConnInfo = maps:get(conninfo, Attrs, #{}),
|
ConnInfo = maps:get(conninfo, Attrs, #{}),
|
||||||
Session = maps:get(session, Attrs, #{}),
|
Session = maps:get(session, Attrs, #{}),
|
||||||
Connected = case maps:get(conn_state, Attrs) of
|
Connected = case maps:get(conn_state, Attrs, undefined) of
|
||||||
connected -> true;
|
connected -> true;
|
||||||
_ -> false
|
_ -> false
|
||||||
end,
|
end,
|
||||||
|
|
|
@ -118,18 +118,18 @@ export_auth_mnesia() ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
export_acl_mnesia() ->
|
export_acl_mnesia() ->
|
||||||
case ets:info(emqx_acl) of
|
case ets:info(emqx_acl2) of
|
||||||
undefined -> [];
|
undefined -> [];
|
||||||
_ ->
|
_ ->
|
||||||
lists:map(fun({_, Filter, Action, Access, CreatedAt}) ->
|
lists:map(fun({Login, Topic, Action, Access, CreatedAt}) ->
|
||||||
Filter1 = case Filter of
|
Filter1 = case Login of
|
||||||
{{Type, TypeValue}, Topic} ->
|
{Type, TypeValue} ->
|
||||||
[{type, Type}, {type_value, TypeValue}, {topic, Topic}];
|
[{type, Type}, {type_value, TypeValue}, {topic, Topic}];
|
||||||
{Type, Topic} ->
|
Type ->
|
||||||
[{type, Type}, {topic, Topic}]
|
[{type, Type}, {topic, Topic}]
|
||||||
end,
|
end,
|
||||||
Filter1 ++ [{action, Action}, {access, Access}, {created_at, CreatedAt}]
|
Filter1 ++ [{action, Action}, {access, Access}, {created_at, CreatedAt}]
|
||||||
end, ets:tab2list(emqx_acl))
|
end, emqx_acl_mnesia_db:all_acls_export())
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-ifdef(EMQX_ENTERPRISE).
|
-ifdef(EMQX_ENTERPRISE).
|
||||||
|
@ -185,6 +185,7 @@ confs_to_binary(Confs) ->
|
||||||
|
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
|
-dialyzer([{nowarn_function, [import_rules/1, import_rule/1]}]).
|
||||||
import_rule(#{<<"id">> := RuleId,
|
import_rule(#{<<"id">> := RuleId,
|
||||||
<<"rawsql">> := RawSQL,
|
<<"rawsql">> := RawSQL,
|
||||||
<<"actions">> := Actions,
|
<<"actions">> := Actions,
|
||||||
|
@ -195,9 +196,11 @@ import_rule(#{<<"id">> := RuleId,
|
||||||
actions => map_to_actions(Actions),
|
actions => map_to_actions(Actions),
|
||||||
enabled => Enabled,
|
enabled => Enabled,
|
||||||
description => Desc},
|
description => Desc},
|
||||||
try emqx_rule_engine:create_rule(Rule)
|
case emqx_rule_engine:create_rule(Rule) of
|
||||||
catch throw:{resource_not_initialized, _ResId} ->
|
{ok, _} -> ok;
|
||||||
emqx_rule_engine:create_rule(Rule#{enabled => false})
|
{error, _} ->
|
||||||
|
_ = emqx_rule_engine:create_rule(Rule#{enabled => false}),
|
||||||
|
ok
|
||||||
end.
|
end.
|
||||||
|
|
||||||
map_to_actions(Maps) ->
|
map_to_actions(Maps) ->
|
||||||
|
@ -470,10 +473,9 @@ do_import_auth_mnesia(Auths) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
do_import_acl_mnesia_by_old_data(Acls) ->
|
do_import_acl_mnesia_by_old_data(Acls) ->
|
||||||
case ets:info(emqx_acl) of
|
case ets:info(emqx_acl2) of
|
||||||
undefined -> ok;
|
undefined -> ok;
|
||||||
_ ->
|
_ ->
|
||||||
CreatedAt = erlang:system_time(millisecond),
|
|
||||||
lists:foreach(fun(#{<<"login">> := Login,
|
lists:foreach(fun(#{<<"login">> := Login,
|
||||||
<<"topic">> := Topic,
|
<<"topic">> := Topic,
|
||||||
<<"allow">> := Allow,
|
<<"allow">> := Allow,
|
||||||
|
@ -482,11 +484,11 @@ do_import_acl_mnesia_by_old_data(Acls) ->
|
||||||
true -> allow;
|
true -> allow;
|
||||||
false -> deny
|
false -> deny
|
||||||
end,
|
end,
|
||||||
mnesia:dirty_write({emqx_acl, {{get_old_type(), Login}, Topic}, any_to_atom(Action), Allow1, CreatedAt})
|
emqx_acl_mnesia_db:add_acl({get_old_type(), Login}, Topic, any_to_atom(Action), Allow1)
|
||||||
end, Acls)
|
end, Acls)
|
||||||
end.
|
end.
|
||||||
do_import_acl_mnesia(Acls) ->
|
do_import_acl_mnesia(Acls) ->
|
||||||
case ets:info(emqx_acl) of
|
case ets:info(emqx_acl2) of
|
||||||
undefined -> ok;
|
undefined -> ok;
|
||||||
_ ->
|
_ ->
|
||||||
lists:foreach(fun(Map = #{<<"action">> := Action,
|
lists:foreach(fun(Map = #{<<"action">> := Action,
|
||||||
|
@ -498,7 +500,7 @@ do_import_acl_mnesia(Acls) ->
|
||||||
Value ->
|
Value ->
|
||||||
{any_to_atom(maps:get(<<"type">>, Map)), Value}
|
{any_to_atom(maps:get(<<"type">>, Map)), Value}
|
||||||
end,
|
end,
|
||||||
emqx_acl_mnesia_cli:add_acl(Login, Topic, any_to_atom(Action), any_to_atom(Access))
|
emqx_acl_mnesia_db:add_acl(Login, Topic, any_to_atom(Action), any_to_atom(Access))
|
||||||
end, Acls)
|
end, Acls)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue