Merge remote-tracking branch 'origin/release-55'
This commit is contained in:
commit
7d14a13b7b
|
@ -16,4 +16,13 @@ HSTREAMDB_ZK_TAG=3.8.1
|
|||
MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server
|
||||
SQLSERVER_TAG=2019-CU19-ubuntu-20.04
|
||||
|
||||
|
||||
# Password for the 'elastic' user (at least 6 characters)
|
||||
ELASTIC_PASSWORD="emqx123"
|
||||
# Password for the 'kibana_system' user (at least 6 characters)
|
||||
KIBANA_PASSWORD="emqx123"
|
||||
# Version of Elastic products
|
||||
ELASTIC_TAG=8.11.4
|
||||
LICENSE=basic
|
||||
|
||||
TARGET=emqx/emqx
|
||||
|
|
|
@ -0,0 +1,109 @@
|
|||
version: "3.9"
|
||||
|
||||
services:
|
||||
setup:
|
||||
image: public.ecr.aws/elastic/elasticsearch:${ELASTIC_TAG}
|
||||
volumes:
|
||||
- ./elastic:/usr/share/elasticsearch/config/certs
|
||||
user: "0"
|
||||
command: >
|
||||
bash -c '
|
||||
if [ x${ELASTIC_PASSWORD} == x ]; then
|
||||
echo "Set the ELASTIC_PASSWORD environment variable in the .env file";
|
||||
exit 1;
|
||||
elif [ x${KIBANA_PASSWORD} == x ]; then
|
||||
echo "Set the KIBANA_PASSWORD environment variable in the .env file";
|
||||
exit 1;
|
||||
fi;
|
||||
echo "Setting file permissions"
|
||||
chown -R root:root config/certs;
|
||||
find . -type d -exec chmod 750 \{\} \;;
|
||||
find . -type f -exec chmod 640 \{\} \;;
|
||||
echo "Waiting for Elasticsearch availability";
|
||||
until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done;
|
||||
echo "Setting kibana_system password";
|
||||
until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done;
|
||||
echo "All done!";
|
||||
'
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "[ -f config/certs/ca/ca.crt ]"]
|
||||
interval: 1s
|
||||
timeout: 5s
|
||||
retries: 120
|
||||
|
||||
es01:
|
||||
depends_on:
|
||||
setup:
|
||||
condition: service_healthy
|
||||
image: public.ecr.aws/elastic/elasticsearch:${ELASTIC_TAG}
|
||||
container_name: elasticsearch
|
||||
hostname: elasticsearch
|
||||
volumes:
|
||||
- ./elastic:/usr/share/elasticsearch/config/certs
|
||||
- esdata01:/usr/share/elasticsearch/data
|
||||
ports:
|
||||
- 9200:9200
|
||||
environment:
|
||||
- node.name=es01
|
||||
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
|
||||
- bootstrap.memory_lock=true
|
||||
- discovery.type=single-node
|
||||
- xpack.security.enabled=true
|
||||
- xpack.security.http.ssl.enabled=true
|
||||
- xpack.security.http.ssl.key=certs/es01/es01.key
|
||||
- xpack.security.http.ssl.certificate=certs/es01/es01.crt
|
||||
- xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt
|
||||
- xpack.license.self_generated.type=${LICENSE}
|
||||
mem_limit: 1073741824
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'",
|
||||
]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 120
|
||||
restart: always
|
||||
networks:
|
||||
- emqx_bridge
|
||||
|
||||
kibana:
|
||||
depends_on:
|
||||
es01:
|
||||
condition: service_healthy
|
||||
image: public.ecr.aws/elastic/kibana:${ELASTIC_TAG}
|
||||
volumes:
|
||||
- ./elastic:/usr/share/kibana/config/certs
|
||||
- kibanadata:/usr/share/kibana/data
|
||||
ports:
|
||||
- 5601:5601
|
||||
environment:
|
||||
- SERVERNAME=kibana
|
||||
- ELASTICSEARCH_HOSTS=https://es01:9200
|
||||
- ELASTICSEARCH_USERNAME=kibana_system
|
||||
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD}
|
||||
- ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt
|
||||
mem_limit: 1073741824
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'",
|
||||
]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 120
|
||||
restart: always
|
||||
networks:
|
||||
- emqx_bridge
|
||||
|
||||
volumes:
|
||||
esdata01:
|
||||
driver: local
|
||||
kibanadata:
|
||||
driver: local
|
|
@ -0,0 +1,20 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIDSjCCAjKgAwIBAgIVAIrN275DCtGnotTPpxwvQ5751N4OMA0GCSqGSIb3DQEB
|
||||
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
|
||||
ZXJhdGVkIENBMB4XDTI0MDExNjAyMzIyMFoXDTI3MDExNTAyMzIyMFowNDEyMDAG
|
||||
A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew
|
||||
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCy0nwiEurUkIPFMLV1weVM
|
||||
pPk/AlwZUzqjkeL44gsY53XI9Q05w/sL9u6PzwrXgTCFWNXzI9+MoAtp8phPkn14
|
||||
cmg5/3sLe9YcFVFjYK/MoljlUbPDj+4dgk8l+w5FRSi0+JN5krUm7rYk9lojAkeS
|
||||
fX8RU7ekKGbjBXIFtPxX5GNadu9RidR5GkHM3XroAIoris8bFOzMgFn9iybYnkhq
|
||||
0S+Hpv0A8FVxzle0KNbPpsIkxXH2DnP2iPTDym9xJNl9Iv9MPtj9XaamH7TmXcSt
|
||||
MbjkAudKsCw4bRuhHonM16DIUr8sX5UcRcAWyJ1x1qpZaOzMdh2VdYAHNuOsZwzJ
|
||||
AgMBAAGjUzBRMB0GA1UdDgQWBBTAyDlp8NZfPe8NCGVlHJSVclGOhTAfBgNVHSME
|
||||
GDAWgBTAyDlp8NZfPe8NCGVlHJSVclGOhTAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
|
||||
SIb3DQEBCwUAA4IBAQAeIUXRKmC53iirY4P49YspLafspAMf4ndMFQAp+Oc223Vs
|
||||
hQC4axNoYnUdzWDH6LioAN7P826xNPqtXvTZF9fmeX7K8Nm9Kdj+for+QQI3j6+X
|
||||
zq98VVkACb8b/Mc9Nac/WBbv/1IKyKgNNta7//WNPgAFolOfti/C0NLsPcKhrM9L
|
||||
mGbvRX8ZjH8pVJ0YTy4/xfDcF7G/Lxl4Yvb0ZXpuQbvE1+Y0h5aoTNshT/skJxC4
|
||||
iyVseYr21s3pptKcr6H9KZuSdZe5pbEo+81nT15w+50aswFLk9GCYh5UsQ+1jkRK
|
||||
cKgxP93i6x8BVbQJGKi1A1jhauSKX2IpWZQsHy4p
|
||||
-----END CERTIFICATE-----
|
|
@ -0,0 +1,27 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEpAIBAAKCAQEAstJ8IhLq1JCDxTC1dcHlTKT5PwJcGVM6o5Hi+OILGOd1yPUN
|
||||
OcP7C/buj88K14EwhVjV8yPfjKALafKYT5J9eHJoOf97C3vWHBVRY2CvzKJY5VGz
|
||||
w4/uHYJPJfsORUUotPiTeZK1Ju62JPZaIwJHkn1/EVO3pChm4wVyBbT8V+RjWnbv
|
||||
UYnUeRpBzN166ACKK4rPGxTszIBZ/Ysm2J5IatEvh6b9APBVcc5XtCjWz6bCJMVx
|
||||
9g5z9oj0w8pvcSTZfSL/TD7Y/V2mph+05l3ErTG45ALnSrAsOG0boR6JzNegyFK/
|
||||
LF+VHEXAFsidcdaqWWjszHYdlXWABzbjrGcMyQIDAQABAoIBAAZOLXYanmjpIRpX
|
||||
h7h7oikYEplWDRcQBBvvKZaOyuchhznTKTiZmF0xQ3Ny8J4Ndj9ndODWSZxI6uod
|
||||
FaGNp+qytwnfgDBVGSVDm6tyRfSkX1fTsA/j3/iupvmO/w9yezdZYgLaCVTyex31
|
||||
yVMdchZgYjYDUpEBYzJbV2xL18+GBRmmPjdXumlpcJqcclxjOQJSu/1WCGVfn/e/
|
||||
64NQpAm7NSKLqeUl32g0/DvUpmYRfmf7ZjVUjePaJQU6sw5/N+3V9F1hYs8VSWz0
|
||||
OMzYIfUcvixw+VWx5bu0nWt98FirhsQPjCTThD+DHP6koXGrdXpeMOQE1YZmoV5T
|
||||
vP0X+FECgYEA5dsKVDQFL67muqz3CNRVM0xDWACCoa8789hYoxvhd1iO3e4kwXBa
|
||||
ABPcZckioq+HiQ4UIxC2AhQ1FuTeIUTq7LZ0HtAAdKFi48U4LzmPhNUpG1E/HbJ3
|
||||
GQbi4u1cAzGYuhdywktgBhn9bJ4XB7+X3815Y9qKkuRcwtXgKGDy8HkCgYEAxyly
|
||||
vc7NBkLfIAmkOsm6VXfvfBTEUBUGi6+k1rarTUxWFIgRuk4FHywwWUTdxWBKJz3n
|
||||
HNNJb/g7CcufdhLTuWVHQtJDxYf2cJjoi+Kf7/i/Qs9Nyhokj5Mnh6KlZQOWXpZd
|
||||
Gwn/O13NeDxt1TIVO2xp6zY4FhVEPvaHuxsMCtECgYA7/eR/P6iO3nZoCJbdXhXy
|
||||
spftEw0FSCg8p53SzIcXUCzRrcM4HavP0181zb5VebzFP8Bvun/WoRGOLSPwyP0L
|
||||
1T8Pf7huuGSIEERuxvY3dC8raxQvGxJMnOiA0/Ss/Lfg8hfIsEWashPb0pMuOYpZ
|
||||
JlblgfejCSlQzOOZhlxB+QKBgQCKmizRLV9/0QAJAsy5YPR9UJdpCebJOKiyg806
|
||||
5Ct5AvwRE9UKjAuCczU+mu+f0fApOSpi5CQCeYVUvtG90UJpjrM2LLCfgoyeNbv4
|
||||
xgG6dqlcbHrdgK4bATUMbsOd9g4qy4gGLkHi5df9qkhhi5Y9Iajg2X3U2H4DN3yk
|
||||
WSFbUQKBgQCLz333qWOuT3OBv+EYxHDQUS4YG+dReUos+v0iPJzu+spnfibBF5IC
|
||||
RjHIhPsdN1byNB0naXOkkz4tUlLGXv6umFgDtQvy/2rxvxQmUGp/WY1VM2+164Xe
|
||||
NEWdMEU6UckCoMO77kw8JosKhmXCYaSW5bWwnXuEpOj9WWpwjKtxlA==
|
||||
-----END RSA PRIVATE KEY-----
|
|
@ -0,0 +1,20 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIDQDCCAiigAwIBAgIUe90yOBN1KBxOEr2jro3epamZksIwDQYJKoZIhvcNAQEL
|
||||
BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l
|
||||
cmF0ZWQgQ0EwHhcNMjQwMTE2MDIzMjIyWhcNMjcwMTE1MDIzMjIyWjAPMQ0wCwYD
|
||||
VQQDEwRlczAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxGEL71pV
|
||||
j8qoUxEuL7qjRSeS1eHxeKhu2jqEZb7iA1o/7b/26QuYAkoYL+WuJNfYjg5F/O8W
|
||||
VVuAYIlN6a/mC6wT2t3pX4YSrdp+i3gtAC/LX+8mAeqMQPD+4jitOwjOsYzbuFCb
|
||||
nYl86dnFPl/+Pmj20mtZ+Wt7oIPD88j6+r5qgv59pHICxS7Cq304LDTRQbNoT8HO
|
||||
4c9VGGGtWIdtrqiYrz1OVefkffMrvFt77v6dKHn8g5tSyfQUDCoEKtTOc3Pe5zCB
|
||||
vIMs6HaapoSkl8XdpFHQ712PCZRebAMCrVcPYQ3r8e9GYmLY/NhxEn3dWTqRhHeg
|
||||
UD13O8o1aBWonwIDAQABo28wbTAdBgNVHQ4EFgQUXvGJtSf2/mLOK17AzUridtCV
|
||||
xWwwHwYDVR0jBBgwFoAUwMg5afDWXz3vDQhlZRyUlXJRjoUwIAYDVR0RBBkwF4IJ
|
||||
bG9jYWxob3N0hwR/AAABggRlczAxMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQAD
|
||||
ggEBACaNq3ZqrbsGvbEtrf6kJGIsTokTFHeVJUSYmt1ZZzDFLSepXAC/J8gphV45
|
||||
B+YSlkDPNTwMYlf7TUYY872zkdqOXN9r0NUx8MzVAX0+rux0RJba5GGUvJGZDNMX
|
||||
WM5z9ry1KjQSQ1bSoRQOD3QArmBmhvikHjLc97Vqt56N0wA/ztXWOpNZX/TXmast
|
||||
aXlUbcfQE73Cdq9tW1ATXwbQ2Gf7vVAUT3zjZSZbNdgPuBicGJHf85Fhjm2ND4+R
|
||||
sjLIOQ2YgVxNHYbueScc6lJM5RNK194K7WrEQnRyGHT3NaDUm0FFNl//aQeq1ZVw
|
||||
6gaUYlkTFauXwEYMDK901cWFaBE=
|
||||
-----END CERTIFICATE-----
|
|
@ -0,0 +1,27 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEpAIBAAKCAQEAxGEL71pVj8qoUxEuL7qjRSeS1eHxeKhu2jqEZb7iA1o/7b/2
|
||||
6QuYAkoYL+WuJNfYjg5F/O8WVVuAYIlN6a/mC6wT2t3pX4YSrdp+i3gtAC/LX+8m
|
||||
AeqMQPD+4jitOwjOsYzbuFCbnYl86dnFPl/+Pmj20mtZ+Wt7oIPD88j6+r5qgv59
|
||||
pHICxS7Cq304LDTRQbNoT8HO4c9VGGGtWIdtrqiYrz1OVefkffMrvFt77v6dKHn8
|
||||
g5tSyfQUDCoEKtTOc3Pe5zCBvIMs6HaapoSkl8XdpFHQ712PCZRebAMCrVcPYQ3r
|
||||
8e9GYmLY/NhxEn3dWTqRhHegUD13O8o1aBWonwIDAQABAoIBADJ3A/Om4az5dcce
|
||||
96EBU9q+IDBBh2Wr1wzSk9p3sqoM47fLqH5b4dzYwJ1yZw2FwFtFFLw6jqExyexE
|
||||
7JY8gyAFwPZyJ3pKQHuX1gQuRlYxchB9quU8Kn230LA+w1mT2lXrLj2PzWWvAsAv
|
||||
m837KiFMpP0O5EjB07u8kLsRr1mG6QQ24Kc8oxd7xLXIiPzSvsOpYwo9hmIWENd5
|
||||
kyA7oSa9EmN3TRTkKOHI7cFQ3DqIGdO71waUofKOdx39DyHS2YKWxDE/LUjkS9zw
|
||||
1AyZG09l4uowyLRqwYhivEq9Za6rdc64yheuHatAM9kC2AOcVcsCPZquIe90k4t1
|
||||
L7e9CAECgYEA1W483xTW8ngzxv9MMuPiW+PwVGRpyQrbO6OZOxdWEYfhrZlk5wlW
|
||||
XK2T85jqooJwMWPTk1F49vZ9WN2KuLkL65GlkEtkFbxmOiFJjXuWwycbFSk05hPs
|
||||
4AESBYHieaSPcwYhvLeG6g4PFyeqmbAGnKsJaj2ylPwDBOc7LgVlqAECgYEA64wo
|
||||
gZwaj5SlP8M/OqGH04UVYr1kP/Eq6eiDfMyV5exy+pyzofZyNKUfJfw6sGgyRRHx
|
||||
OVxlnPMsZ8zbdOXsvUEIeavpwDfQcp5eAURL65I6GMLsx2QpfiN2mDe1MqQW0jct
|
||||
UleFaURgS84KHLE0+tBBg906jOHGjsE7Q3lyUJ8CgYBYYPev4K9JZGD8bEcfY6Ie
|
||||
Lvsb1yC+8VHrFkmjYHxxcfUPr89KpGEwq2fynUW72YufyBiajkgq69Ln84U4DNhU
|
||||
ydDnOXDOV191fsc4YQ8C7LSYRKH1DBcwgwD1at1fRbdpCAb8YHrrfLre+bv5PBzg
|
||||
zyps5fOHIfwWEbI90lpQAQKBgQDoMMqBMTtxi+r1lucOScrVtFuncOCQs5BE8cIj
|
||||
1JxzAQk6iBv/LSvZP2gcDq5f1Oaw9YXfsHguJfwA+ozeiAQ9bw0Gu3N52sstIXWz
|
||||
M/rO5d9FJ2k3CEJqqFSwqkGBAQXKBUA06jeF1DREpX+MVxbNo1rhvMOJusn7UPm1
|
||||
gtMwKwKBgQCfRzFO10ITwrw8rcRZwO9Axgqf11V7xn6qpgRxj4h0HOErVTCN1H0b
|
||||
vE3Pz7cxS/g9vFRP37TuqBLfGVzPt9LAEFwCWPeZJLROBLHyu8XrhTbQx+sI2/pe
|
||||
SBEJAQAHtYasFTE0sBEKNEY2rIt1c29XZhyhhtNKD9gRN/gB355wLg==
|
||||
-----END RSA PRIVATE KEY-----
|
|
@ -0,0 +1,7 @@
|
|||
instances:
|
||||
- name: es01
|
||||
dns:
|
||||
- es01
|
||||
- localhost
|
||||
ip:
|
||||
- 127.0.0.1
|
|
@ -191,5 +191,11 @@
|
|||
"listen": "0.0.0.0:636",
|
||||
"upstream": "ldap:636",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"name": "elasticsearch",
|
||||
"listen": "0.0.0.0:9200",
|
||||
"upstream": "elasticsearch:9200",
|
||||
"enabled": true
|
||||
}
|
||||
]
|
||||
|
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
matrix:
|
||||
branch:
|
||||
- master
|
||||
- release-54
|
||||
- release-55
|
||||
language:
|
||||
- cpp
|
||||
- python
|
||||
|
|
2
Makefile
2
Makefile
|
@ -21,7 +21,7 @@ endif
|
|||
# Dashboard version
|
||||
# from https://github.com/emqx/emqx-dashboard5
|
||||
export EMQX_DASHBOARD_VERSION ?= v1.6.1
|
||||
export EMQX_EE_DASHBOARD_VERSION ?= e1.4.1
|
||||
export EMQX_EE_DASHBOARD_VERSION ?= e1.5.0-beta.3
|
||||
|
||||
PROFILE ?= emqx
|
||||
REL_PROFILES := emqx emqx-enterprise
|
||||
|
|
|
@ -32,10 +32,10 @@
|
|||
%% `apps/emqx/src/bpapi/README.md'
|
||||
|
||||
%% Opensource edition
|
||||
-define(EMQX_RELEASE_CE, "5.4.1").
|
||||
-define(EMQX_RELEASE_CE, "5.5.0").
|
||||
|
||||
%% Enterprise edition
|
||||
-define(EMQX_RELEASE_EE, "5.4.1").
|
||||
-define(EMQX_RELEASE_EE, "5.5.0-alpha.1").
|
||||
|
||||
%% The HTTP API version
|
||||
-define(EMQX_API_VERSION, "5.0").
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
{emqx_bridge,3}.
|
||||
{emqx_bridge,4}.
|
||||
{emqx_bridge,5}.
|
||||
{emqx_bridge,6}.
|
||||
{emqx_broker,1}.
|
||||
{emqx_cm,1}.
|
||||
{emqx_cm,2}.
|
||||
|
@ -41,6 +42,7 @@
|
|||
{emqx_mgmt_api_plugins,2}.
|
||||
{emqx_mgmt_cluster,1}.
|
||||
{emqx_mgmt_cluster,2}.
|
||||
{emqx_mgmt_cluster,3}.
|
||||
{emqx_mgmt_data_backup,1}.
|
||||
{emqx_mgmt_trace,1}.
|
||||
{emqx_mgmt_trace,2}.
|
||||
|
|
|
@ -24,7 +24,7 @@ IsQuicSupp = fun() ->
|
|||
end,
|
||||
|
||||
Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.0"}}},
|
||||
Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.311"}}}.
|
||||
Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.312"}}}.
|
||||
|
||||
Dialyzer = fun(Config) ->
|
||||
{dialyzer, OldDialyzerConfig} = lists:keyfind(dialyzer, 1, Config),
|
||||
|
|
|
@ -135,6 +135,7 @@ is_username_defined(_) -> false.
|
|||
check_authorization_cache(ClientInfo, Action, Topic) ->
|
||||
case emqx_authz_cache:get_authz_cache(Action, Topic) of
|
||||
not_found ->
|
||||
inc_authz_metrics(cache_miss),
|
||||
AuthzResult = do_authorize(ClientInfo, Action, Topic),
|
||||
emqx_authz_cache:put_authz_cache(Action, Topic, AuthzResult),
|
||||
AuthzResult;
|
||||
|
@ -153,7 +154,7 @@ do_authorize(ClientInfo, Action, Topic) ->
|
|||
case run_hooks('client.authorize', [ClientInfo, Action, Topic], Default) of
|
||||
AuthzResult = #{result := Result} when Result == allow; Result == deny ->
|
||||
From = maps:get(from, AuthzResult, unknown),
|
||||
ok = log_result(ClientInfo, Topic, Action, From, NoMatch),
|
||||
ok = log_result(ClientInfo, Topic, Action, From, Result),
|
||||
emqx_hooks:run(
|
||||
'client.check_authz_complete',
|
||||
[ClientInfo, Action, Topic, Result, From]
|
||||
|
@ -219,7 +220,9 @@ inc_authz_metrics(allow) ->
|
|||
inc_authz_metrics(deny) ->
|
||||
emqx_metrics:inc('authorization.deny');
|
||||
inc_authz_metrics(cache_hit) ->
|
||||
emqx_metrics:inc('authorization.cache_hit').
|
||||
emqx_metrics:inc('authorization.cache_hit');
|
||||
inc_authz_metrics(cache_miss) ->
|
||||
emqx_metrics:inc('authorization.cache_miss').
|
||||
|
||||
inc_authn_metrics(error) ->
|
||||
emqx_metrics:inc('authentication.failure');
|
||||
|
|
|
@ -56,7 +56,7 @@ drain_k() -> {?MODULE, drain_timestamp}.
|
|||
-spec is_enabled(emqx_types:topic()) -> boolean().
|
||||
is_enabled(Topic) ->
|
||||
case emqx:get_config([authorization, cache]) of
|
||||
#{enable := true, excludes := Filters} ->
|
||||
#{enable := true, excludes := Filters} when Filters =/= [] ->
|
||||
not is_excluded(Topic, Filters);
|
||||
#{enable := IsEnabled} ->
|
||||
IsEnabled
|
||||
|
|
|
@ -469,26 +469,23 @@ do_update_listener(Type, Name, OldConf, NewConf) when
|
|||
ok = ranch:set_protocol_options(Id, WsOpts),
|
||||
%% No-op if the listener was not suspended.
|
||||
ranch:resume_listener(Id);
|
||||
do_update_listener(quic = Type, Name, _OldConf, NewConf) ->
|
||||
do_update_listener(quic = Type, Name, OldConf, NewConf) ->
|
||||
case quicer:listener(listener_id(Type, Name)) of
|
||||
{ok, ListenerPid} ->
|
||||
case quicer_listener:reload(ListenerPid, to_quicer_listener_opts(NewConf)) of
|
||||
ListenOn = quic_listen_on(maps:get(bind, NewConf)),
|
||||
case quicer_listener:reload(ListenerPid, ListenOn, to_quicer_listener_opts(NewConf)) of
|
||||
ok ->
|
||||
ok;
|
||||
{error, _} = Error ->
|
||||
%% @TODO: prefer: case quicer_listener:reload(ListenerPid, to_quicer_listener_opts(OldConf)) of
|
||||
case quicer_listener:unlock(ListenerPid, 3000) of
|
||||
Error ->
|
||||
case
|
||||
quic_listener_conf_rollback(
|
||||
ListenerPid, to_quicer_listener_opts(OldConf), Error
|
||||
)
|
||||
of
|
||||
ok ->
|
||||
?ELOG("Failed to reload QUIC listener ~p, but Rollback success\n", [
|
||||
Error
|
||||
]),
|
||||
{skip, Error};
|
||||
RestoreErr ->
|
||||
?ELOG(
|
||||
"Failed to reload QUIC listener ~p, and Rollback failed as well\n",
|
||||
[Error]
|
||||
),
|
||||
{error, {rollback_fail, RestoreErr}}
|
||||
E ->
|
||||
E
|
||||
end
|
||||
end;
|
||||
E ->
|
||||
|
@ -991,7 +988,7 @@ quic_listen_on(Bind) ->
|
|||
Port
|
||||
end.
|
||||
|
||||
-spec to_quicer_listener_opts(map()) -> quicer:listener_opts().
|
||||
-spec to_quicer_listener_opts(map()) -> map().
|
||||
to_quicer_listener_opts(Opts) ->
|
||||
DefAcceptors = erlang:system_info(schedulers_online) * 8,
|
||||
SSLOpts = maps:from_list(ssl_opts(Opts)),
|
||||
|
@ -1018,3 +1015,27 @@ to_quicer_listener_opts(Opts) ->
|
|||
),
|
||||
%% @NOTE: Optional options take precedence over required options
|
||||
maps:merge(Opts2, optional_quic_listener_opts(Opts)).
|
||||
|
||||
-spec quic_listener_conf_rollback(
|
||||
pid(),
|
||||
map(),
|
||||
Error :: {error, _, _} | {error, _}
|
||||
) -> ok | {error, any()}.
|
||||
quic_listener_conf_rollback(ListenerPid, #{bind := Bind} = Conf, Error) ->
|
||||
ListenOn = quic_listen_on(Bind),
|
||||
case quicer_listener:reload(ListenerPid, ListenOn, Conf) of
|
||||
ok ->
|
||||
?ELOG(
|
||||
"Failed to reload QUIC listener ~p, but Rollback success\n",
|
||||
[
|
||||
Error
|
||||
]
|
||||
),
|
||||
ok;
|
||||
RestoreErr ->
|
||||
?ELOG(
|
||||
"Failed to reload QUIC listener ~p, and Rollback failed as well\n",
|
||||
[Error]
|
||||
),
|
||||
{error, {rollback_fail, RestoreErr}}
|
||||
end.
|
||||
|
|
|
@ -48,12 +48,17 @@ is_list_report_acceptable(_) ->
|
|||
|
||||
enrich_report(ReportRaw, Meta) ->
|
||||
%% clientid and peername always in emqx_conn's process metadata.
|
||||
%% topic can be put in meta using ?SLOG/3, or put in msg's report by ?SLOG/2
|
||||
%% topic and username can be put in meta using ?SLOG/3, or put in msg's report by ?SLOG/2
|
||||
Topic =
|
||||
case maps:get(topic, Meta, undefined) of
|
||||
undefined -> maps:get(topic, ReportRaw, undefined);
|
||||
Topic0 -> Topic0
|
||||
end,
|
||||
Username =
|
||||
case maps:get(username, Meta, undefined) of
|
||||
undefined -> maps:get(username, ReportRaw, undefined);
|
||||
Username0 -> Username0
|
||||
end,
|
||||
ClientId = maps:get(clientid, Meta, undefined),
|
||||
Peer = maps:get(peername, Meta, undefined),
|
||||
MFA = emqx_utils:format_mfal(Meta),
|
||||
|
@ -64,8 +69,9 @@ enrich_report(ReportRaw, Meta) ->
|
|||
({_, undefined}, Acc) -> Acc;
|
||||
(Item, Acc) -> [Item | Acc]
|
||||
end,
|
||||
maps:to_list(maps:without([topic, msg, clientid], ReportRaw)),
|
||||
maps:to_list(maps:without([topic, msg, clientid, username], ReportRaw)),
|
||||
[
|
||||
{username, try_format_unicode(Username)},
|
||||
{topic, try_format_unicode(Topic)},
|
||||
{clientid, try_format_unicode(ClientId)},
|
||||
{peername, Peer},
|
||||
|
|
|
@ -258,7 +258,8 @@
|
|||
-define(STASTS_ACL_METRICS, [
|
||||
{counter, 'authorization.allow'},
|
||||
{counter, 'authorization.deny'},
|
||||
{counter, 'authorization.cache_hit'}
|
||||
{counter, 'authorization.cache_hit'},
|
||||
{counter, 'authorization.cache_miss'}
|
||||
]).
|
||||
|
||||
%% Statistic metrics for auth checking
|
||||
|
@ -702,6 +703,7 @@ reserved_idx('session.terminated') -> 224;
|
|||
reserved_idx('authorization.allow') -> 300;
|
||||
reserved_idx('authorization.deny') -> 301;
|
||||
reserved_idx('authorization.cache_hit') -> 302;
|
||||
reserved_idx('authorization.cache_miss') -> 303;
|
||||
reserved_idx('authentication.success') -> 310;
|
||||
reserved_idx('authentication.success.anonymous') -> 311;
|
||||
reserved_idx('authentication.failure') -> 312;
|
||||
|
|
|
@ -167,7 +167,8 @@
|
|||
client_ssl_opts_schema/1,
|
||||
ciphers_schema/1,
|
||||
tls_versions_schema/1,
|
||||
description_schema/0
|
||||
description_schema/0,
|
||||
tags_schema/0
|
||||
]).
|
||||
-export([password_converter/2, bin_str_converter/2]).
|
||||
-export([authz_fields/0]).
|
||||
|
@ -467,7 +468,7 @@ fields(authz_cache) ->
|
|||
}
|
||||
)},
|
||||
{excludes,
|
||||
sc(hoconsc:array(string()), #{
|
||||
sc(hoconsc:array(binary()), #{
|
||||
default => [],
|
||||
desc => ?DESC(fields_authz_cache_excludes)
|
||||
})}
|
||||
|
@ -3825,3 +3826,13 @@ description_schema() ->
|
|||
importance => ?IMPORTANCE_LOW
|
||||
}
|
||||
).
|
||||
|
||||
tags_schema() ->
|
||||
sc(
|
||||
hoconsc:array(binary()),
|
||||
#{
|
||||
desc => ?DESC(resource_tags),
|
||||
required => false,
|
||||
importance => ?IMPORTANCE_LOW
|
||||
}
|
||||
).
|
||||
|
|
|
@ -50,8 +50,6 @@
|
|||
client_ssl/1,
|
||||
client_mtls/0,
|
||||
client_mtls/1,
|
||||
ssl_verify_fun_allow_any_host/0,
|
||||
ssl_verify_fun_allow_any_host_impl/3,
|
||||
ensure_mnesia_stopped/0,
|
||||
ensure_quic_listener/2,
|
||||
ensure_quic_listener/3,
|
||||
|
@ -1389,29 +1387,40 @@ matrix_to_groups(Module, Cases) ->
|
|||
Cases
|
||||
).
|
||||
|
||||
add_case_matrix(Module, Case, Acc0) ->
|
||||
{RootGroup, Matrix} = Module:Case(matrix),
|
||||
add_case_matrix(Module, TestCase, Acc0) ->
|
||||
{MaybeRootGroup, Matrix} =
|
||||
case Module:TestCase(matrix) of
|
||||
{RootGroup0, Matrix0} ->
|
||||
{RootGroup0, Matrix0};
|
||||
Matrix0 ->
|
||||
{undefined, Matrix0}
|
||||
end,
|
||||
lists:foldr(
|
||||
fun(Row, Acc) ->
|
||||
add_group([RootGroup | Row], Acc, Case)
|
||||
case MaybeRootGroup of
|
||||
undefined ->
|
||||
add_group(Row, Acc, TestCase);
|
||||
RootGroup ->
|
||||
add_group([RootGroup | Row], Acc, TestCase)
|
||||
end
|
||||
end,
|
||||
Acc0,
|
||||
Matrix
|
||||
).
|
||||
|
||||
add_group([], Acc, Case) ->
|
||||
case lists:member(Case, Acc) of
|
||||
add_group([], Acc, TestCase) ->
|
||||
case lists:member(TestCase, Acc) of
|
||||
true ->
|
||||
Acc;
|
||||
false ->
|
||||
[Case | Acc]
|
||||
[TestCase | Acc]
|
||||
end;
|
||||
add_group([Name | More], Acc, Cases) ->
|
||||
add_group([Name | More], Acc, TestCases) ->
|
||||
case lists:keyfind(Name, 1, Acc) of
|
||||
false ->
|
||||
[{Name, [], add_group(More, [], Cases)} | Acc];
|
||||
[{Name, [], add_group(More, [], TestCases)} | Acc];
|
||||
{Name, [], SubGroup} ->
|
||||
New = {Name, [], add_group(More, SubGroup, Cases)},
|
||||
New = {Name, [], add_group(More, SubGroup, TestCases)},
|
||||
lists:keystore(Name, 1, Acc, New)
|
||||
end.
|
||||
|
||||
|
|
|
@ -444,14 +444,45 @@ t_quic_update_opts(Config) ->
|
|||
| ClientSSLOpts
|
||||
]),
|
||||
|
||||
%% Change the listener port
|
||||
NewPort = emqx_common_test_helpers:select_free_port(ListenerType),
|
||||
{ok, _} = emqx:update_config(
|
||||
[listeners, ListenerType, updated],
|
||||
{update, #{
|
||||
<<"bind">> => format_bind({Host, NewPort})
|
||||
}}
|
||||
),
|
||||
|
||||
%% Connect to old port fail
|
||||
?assertExceptionOneOf(
|
||||
{exit, _},
|
||||
{error, _},
|
||||
ConnectFun(Host, Port, [
|
||||
{cacertfile, filename:join(PrivDir, "ca-next.pem")},
|
||||
{certfile, filename:join(PrivDir, "client.pem")},
|
||||
{keyfile, filename:join(PrivDir, "client.key")}
|
||||
| ClientSSLOpts
|
||||
])
|
||||
),
|
||||
|
||||
%% Connect to new port successfully.
|
||||
C4 = ConnectFun(Host, NewPort, [
|
||||
{cacertfile, filename:join(PrivDir, "ca-next.pem")},
|
||||
{certfile, filename:join(PrivDir, "client.pem")},
|
||||
{keyfile, filename:join(PrivDir, "client.key")}
|
||||
| ClientSSLOpts
|
||||
]),
|
||||
|
||||
%% Both pre- and post-update clients should be alive.
|
||||
?assertEqual(pong, emqtt:ping(C1)),
|
||||
?assertEqual(pong, emqtt:ping(C2)),
|
||||
?assertEqual(pong, emqtt:ping(C3)),
|
||||
?assertEqual(pong, emqtt:ping(C4)),
|
||||
|
||||
ok = emqtt:stop(C1),
|
||||
ok = emqtt:stop(C2),
|
||||
ok = emqtt:stop(C3)
|
||||
ok = emqtt:stop(C3),
|
||||
ok = emqtt:stop(C4)
|
||||
end).
|
||||
|
||||
t_quic_update_opts_fail(Config) ->
|
||||
|
|
|
@ -137,7 +137,7 @@ secret_base64_encoded(_) -> undefined.
|
|||
|
||||
public_key(type) -> string();
|
||||
public_key(desc) -> ?DESC(?FUNCTION_NAME);
|
||||
public_key(required) -> ture;
|
||||
public_key(required) -> true;
|
||||
public_key(_) -> undefined.
|
||||
|
||||
endpoint(type) -> string();
|
||||
|
|
|
@ -26,7 +26,10 @@
|
|||
bridge_v1_type_to_action_type/1,
|
||||
bridge_v1_type_name/1,
|
||||
is_action_type/1,
|
||||
registered_schema_modules/0,
|
||||
is_source/1,
|
||||
is_action/1,
|
||||
registered_schema_modules_actions/0,
|
||||
registered_schema_modules_sources/0,
|
||||
connector_action_config_to_bridge_v1_config/2,
|
||||
connector_action_config_to_bridge_v1_config/3,
|
||||
bridge_v1_config_to_connector_config/2,
|
||||
|
@ -51,19 +54,26 @@
|
|||
ConnectorConfig :: map(), ActionConfig :: map()
|
||||
) -> map().
|
||||
%% Define this if the automatic config upgrade is not enough for the connector.
|
||||
-callback bridge_v1_config_to_connector_config(BridgeV1Config :: map()) -> map().
|
||||
-callback bridge_v1_config_to_connector_config(BridgeV1Config :: map()) ->
|
||||
map() | {ConnectorTypeName :: atom(), map()}.
|
||||
%% Define this if the automatic config upgrade is not enough for the bridge.
|
||||
%% If you want to make use of the automatic config upgrade, you can call
|
||||
%% emqx_action_info:transform_bridge_v1_config_to_action_config/4 in your
|
||||
%% implementation and do some adjustments on the result.
|
||||
-callback bridge_v1_config_to_action_config(BridgeV1Config :: map(), ConnectorName :: binary()) ->
|
||||
map().
|
||||
map() | {source | action, ActionTypeName :: atom(), map()} | 'none'.
|
||||
-callback is_source() ->
|
||||
boolean().
|
||||
-callback is_action() ->
|
||||
boolean().
|
||||
|
||||
-optional_callbacks([
|
||||
bridge_v1_type_name/0,
|
||||
connector_action_config_to_bridge_v1_config/2,
|
||||
bridge_v1_config_to_connector_config/1,
|
||||
bridge_v1_config_to_action_config/2
|
||||
bridge_v1_config_to_action_config/2,
|
||||
is_source/0,
|
||||
is_action/0
|
||||
]).
|
||||
|
||||
%% ====================================================================
|
||||
|
@ -87,7 +97,8 @@ hard_coded_action_info_modules_ee() ->
|
|||
emqx_bridge_syskeeper_action_info,
|
||||
emqx_bridge_timescale_action_info,
|
||||
emqx_bridge_redis_action_info,
|
||||
emqx_bridge_iotdb_action_info
|
||||
emqx_bridge_iotdb_action_info,
|
||||
emqx_bridge_es_action_info
|
||||
].
|
||||
-else.
|
||||
hard_coded_action_info_modules_ee() ->
|
||||
|
@ -95,7 +106,10 @@ hard_coded_action_info_modules_ee() ->
|
|||
-endif.
|
||||
|
||||
hard_coded_action_info_modules_common() ->
|
||||
[emqx_bridge_http_action_info].
|
||||
[
|
||||
emqx_bridge_http_action_info,
|
||||
emqx_bridge_mqtt_pubsub_action_info
|
||||
].
|
||||
|
||||
hard_coded_action_info_modules() ->
|
||||
hard_coded_action_info_modules_common() ++ hard_coded_action_info_modules_ee().
|
||||
|
@ -177,10 +191,33 @@ is_action_type(Type) ->
|
|||
_ -> true
|
||||
end.
|
||||
|
||||
registered_schema_modules() ->
|
||||
%% Returns true if the action is an ingress action, false otherwise.
|
||||
is_source(Bin) when is_binary(Bin) ->
|
||||
is_source(binary_to_existing_atom(Bin));
|
||||
is_source(Type) ->
|
||||
ActionInfoMap = info_map(),
|
||||
IsSourceMap = maps:get(is_source, ActionInfoMap),
|
||||
maps:get(Type, IsSourceMap, false).
|
||||
|
||||
%% Returns true if the action is an egress action, false otherwise.
|
||||
is_action(Bin) when is_binary(Bin) ->
|
||||
is_action(binary_to_existing_atom(Bin));
|
||||
is_action(Type) ->
|
||||
ActionInfoMap = info_map(),
|
||||
IsActionMap = maps:get(is_action, ActionInfoMap),
|
||||
maps:get(Type, IsActionMap, true).
|
||||
|
||||
registered_schema_modules_actions() ->
|
||||
InfoMap = info_map(),
|
||||
Schemas = maps:get(action_type_to_schema_module, InfoMap),
|
||||
maps:to_list(Schemas).
|
||||
All = maps:to_list(Schemas),
|
||||
[{Type, SchemaMod} || {Type, SchemaMod} <- All, is_action(Type)].
|
||||
|
||||
registered_schema_modules_sources() ->
|
||||
InfoMap = info_map(),
|
||||
Schemas = maps:get(action_type_to_schema_module, InfoMap),
|
||||
All = maps:to_list(Schemas),
|
||||
[{Type, SchemaMod} || {Type, SchemaMod} <- All, is_source(Type)].
|
||||
|
||||
connector_action_config_to_bridge_v1_config(ActionOrBridgeType, ConnectorConfig, ActionConfig) ->
|
||||
Module = get_action_info_module(ActionOrBridgeType),
|
||||
|
@ -292,7 +329,9 @@ initial_info_map() ->
|
|||
action_type_to_bridge_v1_type => #{},
|
||||
action_type_to_connector_type => #{},
|
||||
action_type_to_schema_module => #{},
|
||||
action_type_to_info_module => #{}
|
||||
action_type_to_info_module => #{},
|
||||
is_source => #{},
|
||||
is_action => #{}
|
||||
}.
|
||||
|
||||
get_info_map(Module) ->
|
||||
|
@ -311,6 +350,20 @@ get_info_map(Module) ->
|
|||
false ->
|
||||
{ActionType, [ActionType]}
|
||||
end,
|
||||
IsIngress =
|
||||
case erlang:function_exported(Module, is_source, 0) of
|
||||
true ->
|
||||
Module:is_source();
|
||||
false ->
|
||||
false
|
||||
end,
|
||||
IsEgress =
|
||||
case erlang:function_exported(Module, is_action, 0) of
|
||||
true ->
|
||||
Module:is_action();
|
||||
false ->
|
||||
true
|
||||
end,
|
||||
#{
|
||||
action_type_names =>
|
||||
lists:foldl(
|
||||
|
@ -350,5 +403,11 @@ get_info_map(Module) ->
|
|||
end,
|
||||
#{ActionType => Module},
|
||||
BridgeV1Types
|
||||
)
|
||||
),
|
||||
is_source => #{
|
||||
ActionType => IsIngress
|
||||
},
|
||||
is_action => #{
|
||||
ActionType => IsEgress
|
||||
}
|
||||
}.
|
||||
|
|
|
@ -347,18 +347,26 @@ lookup(Type, Name, RawConf) ->
|
|||
}}
|
||||
end.
|
||||
|
||||
get_metrics(Type, Name) ->
|
||||
case emqx_bridge_v2:is_bridge_v2_type(Type) of
|
||||
get_metrics(ActionType, Name) ->
|
||||
case emqx_bridge_v2:is_bridge_v2_type(ActionType) of
|
||||
true ->
|
||||
case emqx_bridge_v2:bridge_v1_is_valid(Type, Name) of
|
||||
case emqx_bridge_v2:bridge_v1_is_valid(ActionType, Name) of
|
||||
true ->
|
||||
BridgeV2Type = emqx_bridge_v2:bridge_v2_type_to_connector_type(Type),
|
||||
emqx_bridge_v2:get_metrics(BridgeV2Type, Name);
|
||||
BridgeV2Type = emqx_bridge_v2:bridge_v1_type_to_bridge_v2_type(ActionType),
|
||||
try
|
||||
ConfRootKey = emqx_bridge_v2:get_conf_root_key_if_only_one(
|
||||
BridgeV2Type, Name
|
||||
),
|
||||
emqx_bridge_v2:get_metrics(ConfRootKey, BridgeV2Type, Name)
|
||||
catch
|
||||
error:Reason ->
|
||||
{error, Reason}
|
||||
end;
|
||||
false ->
|
||||
{error, not_bridge_v1_compatible}
|
||||
end;
|
||||
false ->
|
||||
emqx_resource:get_metrics(emqx_bridge_resource:resource_id(Type, Name))
|
||||
emqx_resource:get_metrics(emqx_bridge_resource:resource_id(ActionType, Name))
|
||||
end.
|
||||
|
||||
maybe_upgrade(mqtt, Config) ->
|
||||
|
|
|
@ -49,6 +49,14 @@
|
|||
-export([lookup_from_local_node/2]).
|
||||
-export([get_metrics_from_local_node/2]).
|
||||
|
||||
%% used by actions/sources schema
|
||||
-export([mqtt_v1_example/1]).
|
||||
|
||||
%% only for testing/mocking
|
||||
-export([supported_versions/1]).
|
||||
|
||||
-define(BPAPI_NAME, emqx_bridge).
|
||||
|
||||
-define(BRIDGE_NOT_ENABLED,
|
||||
?BAD_REQUEST(<<"Forbidden operation, bridge not enabled">>)
|
||||
).
|
||||
|
@ -176,7 +184,7 @@ bridge_info_examples(Method) ->
|
|||
},
|
||||
<<"mqtt_example">> => #{
|
||||
summary => <<"MQTT Bridge">>,
|
||||
value => info_example(mqtt, Method)
|
||||
value => mqtt_v1_example(Method)
|
||||
}
|
||||
},
|
||||
emqx_enterprise_bridge_examples(Method)
|
||||
|
@ -189,6 +197,9 @@ emqx_enterprise_bridge_examples(Method) ->
|
|||
emqx_enterprise_bridge_examples(_Method) -> #{}.
|
||||
-endif.
|
||||
|
||||
mqtt_v1_example(Method) ->
|
||||
info_example(mqtt, Method).
|
||||
|
||||
info_example(Type, Method) ->
|
||||
maps:merge(
|
||||
info_example_basic(Type),
|
||||
|
@ -548,9 +559,13 @@ schema("/bridges_probe") ->
|
|||
Id,
|
||||
case emqx_bridge_v2:is_bridge_v2_type(BridgeType) of
|
||||
true ->
|
||||
BridgeV2Type = emqx_bridge_v2:bridge_v2_type_to_connector_type(BridgeType),
|
||||
ok = emqx_bridge_v2:reset_metrics(BridgeV2Type, BridgeName),
|
||||
?NO_CONTENT;
|
||||
try
|
||||
ok = emqx_bridge_v2:bridge_v1_reset_metrics(BridgeType, BridgeName),
|
||||
?NO_CONTENT
|
||||
catch
|
||||
error:Reason ->
|
||||
?BAD_REQUEST(Reason)
|
||||
end;
|
||||
false ->
|
||||
ok = emqx_bridge_resource:reset_metrics(
|
||||
emqx_bridge_resource:resource_id(BridgeType, BridgeName)
|
||||
|
@ -1094,18 +1109,18 @@ maybe_try_restart(_, _, _) ->
|
|||
|
||||
do_bpapi_call(all, Call, Args) ->
|
||||
maybe_unwrap(
|
||||
do_bpapi_call_vsn(emqx_bpapi:supported_version(emqx_bridge), Call, Args)
|
||||
do_bpapi_call_vsn(emqx_bpapi:supported_version(?BPAPI_NAME), Call, Args)
|
||||
);
|
||||
do_bpapi_call(Node, Call, Args) ->
|
||||
case lists:member(Node, mria:running_nodes()) of
|
||||
true ->
|
||||
do_bpapi_call_vsn(emqx_bpapi:supported_version(Node, emqx_bridge), Call, Args);
|
||||
do_bpapi_call_vsn(emqx_bpapi:supported_version(Node, ?BPAPI_NAME), Call, Args);
|
||||
false ->
|
||||
{error, {node_not_found, Node}}
|
||||
end.
|
||||
|
||||
do_bpapi_call_vsn(SupportedVersion, Call, Args) ->
|
||||
case lists:member(SupportedVersion, supported_versions(Call)) of
|
||||
case lists:member(SupportedVersion, ?MODULE:supported_versions(Call)) of
|
||||
true ->
|
||||
apply(emqx_bridge_proto_v4, Call, Args);
|
||||
false ->
|
||||
|
@ -1117,10 +1132,15 @@ maybe_unwrap({error, not_implemented}) ->
|
|||
maybe_unwrap(RpcMulticallResult) ->
|
||||
emqx_rpc:unwrap_erpc(RpcMulticallResult).
|
||||
|
||||
supported_versions(start_bridge_to_node) -> [2, 3, 4, 5];
|
||||
supported_versions(start_bridges_to_all_nodes) -> [2, 3, 4, 5];
|
||||
supported_versions(get_metrics_from_all_nodes) -> [4, 5];
|
||||
supported_versions(_Call) -> [1, 2, 3, 4, 5].
|
||||
supported_versions(start_bridge_to_node) -> bpapi_version_range(2, latest);
|
||||
supported_versions(start_bridges_to_all_nodes) -> bpapi_version_range(2, latest);
|
||||
supported_versions(get_metrics_from_all_nodes) -> bpapi_version_range(4, latest);
|
||||
supported_versions(_Call) -> bpapi_version_range(1, latest).
|
||||
|
||||
%% [From, To] (inclusive on both ends)
|
||||
bpapi_version_range(From, latest) ->
|
||||
ThisNodeVsn = emqx_bpapi:supported_version(node(), ?BPAPI_NAME),
|
||||
lists:seq(From, ThisNodeVsn).
|
||||
|
||||
redact(Term) ->
|
||||
emqx_utils:redact(Term).
|
||||
|
|
|
@ -82,7 +82,8 @@ external_ids(Type, Name) ->
|
|||
get_conf(BridgeType, BridgeName) ->
|
||||
case emqx_bridge_v2:is_bridge_v2_type(BridgeType) of
|
||||
true ->
|
||||
emqx_conf:get_raw([actions, BridgeType, BridgeName]);
|
||||
ConfRootName = emqx_bridge_v2:get_conf_root_key_if_only_one(BridgeType, BridgeName),
|
||||
emqx_conf:get_raw([ConfRootName, BridgeType, BridgeName]);
|
||||
false ->
|
||||
undefined
|
||||
end.
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
bridge_to_resource_type/1,
|
||||
resource_id/1,
|
||||
resource_id/2,
|
||||
resource_id/3,
|
||||
bridge_id/2,
|
||||
parse_bridge_id/1,
|
||||
parse_bridge_id/2,
|
||||
|
@ -62,6 +63,9 @@
|
|||
?IS_BI_DIR_BRIDGE(TYPE)
|
||||
).
|
||||
|
||||
-define(ROOT_KEY_ACTIONS, actions).
|
||||
-define(ROOT_KEY_SOURCES, sources).
|
||||
|
||||
-if(?EMQX_RELEASE_EDITION == ee).
|
||||
bridge_to_resource_type(BridgeType) when is_binary(BridgeType) ->
|
||||
bridge_to_resource_type(binary_to_existing_atom(BridgeType, utf8));
|
||||
|
@ -85,11 +89,21 @@ bridge_impl_module(_BridgeType) -> undefined.
|
|||
-endif.
|
||||
|
||||
resource_id(BridgeId) when is_binary(BridgeId) ->
|
||||
resource_id_for_kind(?ROOT_KEY_ACTIONS, BridgeId).
|
||||
|
||||
resource_id(BridgeType, BridgeName) ->
|
||||
resource_id(?ROOT_KEY_ACTIONS, BridgeType, BridgeName).
|
||||
|
||||
resource_id(ConfRootKey, BridgeType, BridgeName) ->
|
||||
BridgeId = bridge_id(BridgeType, BridgeName),
|
||||
resource_id_for_kind(ConfRootKey, BridgeId).
|
||||
|
||||
resource_id_for_kind(ConfRootKey, BridgeId) when is_binary(BridgeId) ->
|
||||
case binary:split(BridgeId, <<":">>) of
|
||||
[Type, _Name] ->
|
||||
case emqx_bridge_v2:is_bridge_v2_type(Type) of
|
||||
true ->
|
||||
emqx_bridge_v2:bridge_v1_id_to_connector_resource_id(BridgeId);
|
||||
emqx_bridge_v2:bridge_v1_id_to_connector_resource_id(ConfRootKey, BridgeId);
|
||||
false ->
|
||||
<<"bridge:", BridgeId/binary>>
|
||||
end;
|
||||
|
@ -97,10 +111,6 @@ resource_id(BridgeId) when is_binary(BridgeId) ->
|
|||
invalid_data(<<"should be of pattern {type}:{name}, but got ", BridgeId/binary>>)
|
||||
end.
|
||||
|
||||
resource_id(BridgeType, BridgeName) ->
|
||||
BridgeId = bridge_id(BridgeType, BridgeName),
|
||||
resource_id(BridgeId).
|
||||
|
||||
bridge_id(BridgeType, BridgeName) ->
|
||||
Name = bin(BridgeName),
|
||||
Type = bin(BridgeType),
|
||||
|
@ -137,7 +147,7 @@ reset_metrics(ResourceId) ->
|
|||
true ->
|
||||
case emqx_bridge_v2:bridge_v1_is_valid(Type, Name) of
|
||||
true ->
|
||||
BridgeV2Type = emqx_bridge_v2:bridge_v2_type_to_connector_type(Type),
|
||||
BridgeV2Type = emqx_bridge_v2:bridge_v1_type_to_bridge_v2_type(Type),
|
||||
emqx_bridge_v2:reset_metrics(BridgeV2Type, Name);
|
||||
false ->
|
||||
{error, not_bridge_v1_compatible}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -26,6 +26,9 @@
|
|||
-import(hoconsc, [mk/2, array/1, enum/1]).
|
||||
-import(emqx_utils, [redact/1]).
|
||||
|
||||
-define(ROOT_KEY_ACTIONS, actions).
|
||||
-define(ROOT_KEY_SOURCES, sources).
|
||||
|
||||
%% Swagger specs from hocon schema
|
||||
-export([
|
||||
api_spec/0,
|
||||
|
@ -34,7 +37,7 @@
|
|||
namespace/0
|
||||
]).
|
||||
|
||||
%% API callbacks
|
||||
%% API callbacks : actions
|
||||
-export([
|
||||
'/actions'/2,
|
||||
'/actions/:id'/2,
|
||||
|
@ -46,9 +49,28 @@
|
|||
'/actions_probe'/2,
|
||||
'/action_types'/2
|
||||
]).
|
||||
%% API callbacks : sources
|
||||
-export([
|
||||
'/sources'/2,
|
||||
'/sources/:id'/2,
|
||||
'/sources/:id/metrics'/2,
|
||||
'/sources/:id/metrics/reset'/2,
|
||||
'/sources/:id/enable/:enable'/2,
|
||||
'/sources/:id/:operation'/2,
|
||||
'/nodes/:node/sources/:id/:operation'/2,
|
||||
'/sources_probe'/2,
|
||||
'/source_types'/2
|
||||
]).
|
||||
|
||||
%% BpAPI / RPC Targets
|
||||
-export([lookup_from_local_node/2, get_metrics_from_local_node/2]).
|
||||
-export([
|
||||
lookup_from_local_node/2,
|
||||
get_metrics_from_local_node/2,
|
||||
lookup_from_local_node_v6/3,
|
||||
get_metrics_from_local_node_v6/3
|
||||
]).
|
||||
|
||||
-define(BPAPI_NAME, emqx_bridge).
|
||||
|
||||
-define(BRIDGE_NOT_FOUND(BRIDGE_TYPE, BRIDGE_NAME),
|
||||
?NOT_FOUND(
|
||||
|
@ -71,13 +93,16 @@
|
|||
end
|
||||
).
|
||||
|
||||
namespace() -> "actions".
|
||||
namespace() -> "actions_and_sources".
|
||||
|
||||
api_spec() ->
|
||||
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
|
||||
|
||||
paths() ->
|
||||
[
|
||||
%%=============
|
||||
%% Actions
|
||||
%%=============
|
||||
"/actions",
|
||||
"/actions/:id",
|
||||
"/actions/:id/enable/:enable",
|
||||
|
@ -88,7 +113,21 @@ paths() ->
|
|||
"/actions/:id/metrics",
|
||||
"/actions/:id/metrics/reset",
|
||||
"/actions_probe",
|
||||
"/action_types"
|
||||
"/action_types",
|
||||
%%=============
|
||||
%% Sources
|
||||
%%=============
|
||||
"/sources",
|
||||
"/sources/:id",
|
||||
"/sources/:id/enable/:enable",
|
||||
"/sources/:id/:operation",
|
||||
"/nodes/:node/sources/:id/:operation",
|
||||
%% %% Caveat: metrics paths must come *after* `/:operation', otherwise minirest will
|
||||
%% %% try to match the latter first, trying to interpret `metrics' as an operation...
|
||||
"/sources/:id/metrics",
|
||||
"/sources/:id/metrics/reset",
|
||||
"/sources_probe"
|
||||
%% "/source_types"
|
||||
].
|
||||
|
||||
error_schema(Code, Message) ->
|
||||
|
@ -101,17 +140,28 @@ error_schema(Codes, Message, ExtraFields) when is_list(Message) ->
|
|||
error_schema(Codes, Message, ExtraFields) when is_list(Codes) andalso is_binary(Message) ->
|
||||
ExtraFields ++ emqx_dashboard_swagger:error_codes(Codes, Message).
|
||||
|
||||
get_response_body_schema() ->
|
||||
actions_get_response_body_schema() ->
|
||||
emqx_dashboard_swagger:schema_with_examples(
|
||||
emqx_bridge_v2_schema:get_response(),
|
||||
bridge_info_examples(get)
|
||||
emqx_bridge_v2_schema:actions_get_response(),
|
||||
bridge_info_examples(get, ?ROOT_KEY_ACTIONS)
|
||||
).
|
||||
|
||||
bridge_info_examples(Method) ->
|
||||
emqx_bridge_v2_schema:examples(Method).
|
||||
sources_get_response_body_schema() ->
|
||||
emqx_dashboard_swagger:schema_with_examples(
|
||||
emqx_bridge_v2_schema:sources_get_response(),
|
||||
bridge_info_examples(get, ?ROOT_KEY_SOURCES)
|
||||
).
|
||||
|
||||
bridge_info_array_example(Method) ->
|
||||
lists:map(fun(#{value := Config}) -> Config end, maps:values(bridge_info_examples(Method))).
|
||||
bridge_info_examples(Method, ?ROOT_KEY_ACTIONS) ->
|
||||
emqx_bridge_v2_schema:actions_examples(Method);
|
||||
bridge_info_examples(Method, ?ROOT_KEY_SOURCES) ->
|
||||
emqx_bridge_v2_schema:sources_examples(Method).
|
||||
|
||||
bridge_info_array_example(Method, ConfRootKey) ->
|
||||
lists:map(
|
||||
fun(#{value := Config}) -> Config end,
|
||||
maps:values(bridge_info_examples(Method, ConfRootKey))
|
||||
).
|
||||
|
||||
param_path_id() ->
|
||||
{id,
|
||||
|
@ -185,6 +235,9 @@ param_path_enable() ->
|
|||
}
|
||||
)}.
|
||||
|
||||
%%================================================================================
|
||||
%% Actions
|
||||
%%================================================================================
|
||||
schema("/actions") ->
|
||||
#{
|
||||
'operationId' => '/actions',
|
||||
|
@ -194,8 +247,8 @@ schema("/actions") ->
|
|||
description => ?DESC("desc_api1"),
|
||||
responses => #{
|
||||
200 => emqx_dashboard_swagger:schema_with_example(
|
||||
array(emqx_bridge_v2_schema:get_response()),
|
||||
bridge_info_array_example(get)
|
||||
array(emqx_bridge_v2_schema:actions_get_response()),
|
||||
bridge_info_array_example(get, ?ROOT_KEY_ACTIONS)
|
||||
)
|
||||
}
|
||||
},
|
||||
|
@ -204,11 +257,11 @@ schema("/actions") ->
|
|||
summary => <<"Create bridge">>,
|
||||
description => ?DESC("desc_api2"),
|
||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||
emqx_bridge_v2_schema:post_request(),
|
||||
bridge_info_examples(post)
|
||||
emqx_bridge_v2_schema:actions_post_request(),
|
||||
bridge_info_examples(post, ?ROOT_KEY_ACTIONS)
|
||||
),
|
||||
responses => #{
|
||||
201 => get_response_body_schema(),
|
||||
201 => actions_get_response_body_schema(),
|
||||
400 => error_schema('ALREADY_EXISTS', "Bridge already exists")
|
||||
}
|
||||
}
|
||||
|
@ -222,7 +275,7 @@ schema("/actions/:id") ->
|
|||
description => ?DESC("desc_api3"),
|
||||
parameters => [param_path_id()],
|
||||
responses => #{
|
||||
200 => get_response_body_schema(),
|
||||
200 => actions_get_response_body_schema(),
|
||||
404 => error_schema('NOT_FOUND', "Bridge not found")
|
||||
}
|
||||
},
|
||||
|
@ -232,11 +285,11 @@ schema("/actions/:id") ->
|
|||
description => ?DESC("desc_api4"),
|
||||
parameters => [param_path_id()],
|
||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||
emqx_bridge_v2_schema:put_request(),
|
||||
bridge_info_examples(put)
|
||||
emqx_bridge_v2_schema:actions_put_request(),
|
||||
bridge_info_examples(put, ?ROOT_KEY_ACTIONS)
|
||||
),
|
||||
responses => #{
|
||||
200 => get_response_body_schema(),
|
||||
200 => actions_get_response_body_schema(),
|
||||
404 => error_schema('NOT_FOUND', "Bridge not found"),
|
||||
400 => error_schema('BAD_REQUEST', "Update bridge failed")
|
||||
}
|
||||
|
@ -361,8 +414,8 @@ schema("/actions_probe") ->
|
|||
desc => ?DESC("desc_api9"),
|
||||
summary => <<"Test creating bridge">>,
|
||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||
emqx_bridge_v2_schema:post_request(),
|
||||
bridge_info_examples(post)
|
||||
emqx_bridge_v2_schema:actions_post_request(),
|
||||
bridge_info_examples(post, ?ROOT_KEY_ACTIONS)
|
||||
),
|
||||
responses => #{
|
||||
204 => <<"Test bridge OK">>,
|
||||
|
@ -379,12 +432,223 @@ schema("/action_types") ->
|
|||
summary => <<"List available action types">>,
|
||||
responses => #{
|
||||
200 => emqx_dashboard_swagger:schema_with_examples(
|
||||
array(emqx_bridge_v2_schema:types_sc()),
|
||||
array(emqx_bridge_v2_schema:action_types_sc()),
|
||||
#{
|
||||
<<"types">> =>
|
||||
#{
|
||||
summary => <<"Action types">>,
|
||||
value => emqx_bridge_v2_schema:types()
|
||||
value => emqx_bridge_v2_schema:action_types()
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
%%================================================================================
|
||||
%% Sources
|
||||
%%================================================================================
|
||||
schema("/sources") ->
|
||||
#{
|
||||
'operationId' => '/sources',
|
||||
get => #{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"List sources">>,
|
||||
description => ?DESC("desc_api1"),
|
||||
responses => #{
|
||||
%% FIXME: examples
|
||||
200 => emqx_dashboard_swagger:schema_with_example(
|
||||
array(emqx_bridge_v2_schema:sources_get_response()),
|
||||
bridge_info_array_example(get, ?ROOT_KEY_SOURCES)
|
||||
)
|
||||
}
|
||||
},
|
||||
post => #{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"Create source">>,
|
||||
description => ?DESC("desc_api2"),
|
||||
%% FIXME: examples
|
||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||
emqx_bridge_v2_schema:sources_post_request(),
|
||||
bridge_info_examples(post, ?ROOT_KEY_SOURCES)
|
||||
),
|
||||
responses => #{
|
||||
201 => sources_get_response_body_schema(),
|
||||
400 => error_schema('ALREADY_EXISTS', "Source already exists")
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/sources/:id") ->
|
||||
#{
|
||||
'operationId' => '/sources/:id',
|
||||
get => #{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"Get source">>,
|
||||
description => ?DESC("desc_api3"),
|
||||
parameters => [param_path_id()],
|
||||
responses => #{
|
||||
200 => sources_get_response_body_schema(),
|
||||
404 => error_schema('NOT_FOUND', "Source not found")
|
||||
}
|
||||
},
|
||||
put => #{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"Update source">>,
|
||||
description => ?DESC("desc_api4"),
|
||||
parameters => [param_path_id()],
|
||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||
emqx_bridge_v2_schema:sources_put_request(),
|
||||
bridge_info_examples(put, ?ROOT_KEY_SOURCES)
|
||||
),
|
||||
responses => #{
|
||||
200 => sources_get_response_body_schema(),
|
||||
404 => error_schema('NOT_FOUND', "Source not found"),
|
||||
400 => error_schema('BAD_REQUEST', "Update source failed")
|
||||
}
|
||||
},
|
||||
delete => #{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"Delete source">>,
|
||||
description => ?DESC("desc_api5"),
|
||||
parameters => [param_path_id(), param_qs_delete_cascade()],
|
||||
responses => #{
|
||||
204 => <<"Source deleted">>,
|
||||
400 => error_schema(
|
||||
'BAD_REQUEST',
|
||||
"Cannot delete bridge while active rules are defined for this source",
|
||||
[{rules, mk(array(string()), #{desc => "Dependent Rule IDs"})}]
|
||||
),
|
||||
404 => error_schema('NOT_FOUND', "Source not found"),
|
||||
503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/sources/:id/metrics") ->
|
||||
#{
|
||||
'operationId' => '/sources/:id/metrics',
|
||||
get => #{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"Get source metrics">>,
|
||||
description => ?DESC("desc_bridge_metrics"),
|
||||
parameters => [param_path_id()],
|
||||
responses => #{
|
||||
200 => emqx_bridge_schema:metrics_fields(),
|
||||
404 => error_schema('NOT_FOUND', "Source not found")
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/sources/:id/metrics/reset") ->
|
||||
#{
|
||||
'operationId' => '/sources/:id/metrics/reset',
|
||||
put => #{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"Reset source metrics">>,
|
||||
description => ?DESC("desc_api6"),
|
||||
parameters => [param_path_id()],
|
||||
responses => #{
|
||||
204 => <<"Reset success">>,
|
||||
404 => error_schema('NOT_FOUND', "Source not found")
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/sources/:id/enable/:enable") ->
|
||||
#{
|
||||
'operationId' => '/sources/:id/enable/:enable',
|
||||
put =>
|
||||
#{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"Enable or disable bridge">>,
|
||||
desc => ?DESC("desc_enable_bridge"),
|
||||
parameters => [param_path_id(), param_path_enable()],
|
||||
responses =>
|
||||
#{
|
||||
204 => <<"Success">>,
|
||||
404 => error_schema(
|
||||
'NOT_FOUND', "Bridge not found or invalid operation"
|
||||
),
|
||||
503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/sources/:id/:operation") ->
|
||||
#{
|
||||
'operationId' => '/sources/:id/:operation',
|
||||
post => #{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"Manually start a bridge">>,
|
||||
description => ?DESC("desc_api7"),
|
||||
parameters => [
|
||||
param_path_id(),
|
||||
param_path_operation_cluster()
|
||||
],
|
||||
responses => #{
|
||||
204 => <<"Operation success">>,
|
||||
400 => error_schema(
|
||||
'BAD_REQUEST', "Problem with configuration of external service"
|
||||
),
|
||||
404 => error_schema('NOT_FOUND', "Bridge not found or invalid operation"),
|
||||
501 => error_schema('NOT_IMPLEMENTED', "Not Implemented"),
|
||||
503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/nodes/:node/sources/:id/:operation") ->
|
||||
#{
|
||||
'operationId' => '/nodes/:node/sources/:id/:operation',
|
||||
post => #{
|
||||
tags => [<<"sources">>],
|
||||
summary => <<"Manually start a bridge on a given node">>,
|
||||
description => ?DESC("desc_api8"),
|
||||
parameters => [
|
||||
param_path_node(),
|
||||
param_path_id(),
|
||||
param_path_operation_on_node()
|
||||
],
|
||||
responses => #{
|
||||
204 => <<"Operation success">>,
|
||||
400 => error_schema(
|
||||
'BAD_REQUEST',
|
||||
"Problem with configuration of external service or bridge not enabled"
|
||||
),
|
||||
404 => error_schema(
|
||||
'NOT_FOUND', "Bridge or node not found or invalid operation"
|
||||
),
|
||||
501 => error_schema('NOT_IMPLEMENTED', "Not Implemented"),
|
||||
503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/sources_probe") ->
|
||||
#{
|
||||
'operationId' => '/sources_probe',
|
||||
post => #{
|
||||
tags => [<<"sources">>],
|
||||
desc => ?DESC("desc_api9"),
|
||||
summary => <<"Test creating bridge">>,
|
||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||
emqx_bridge_v2_schema:sources_post_request(),
|
||||
bridge_info_examples(post, ?ROOT_KEY_SOURCES)
|
||||
),
|
||||
responses => #{
|
||||
204 => <<"Test bridge OK">>,
|
||||
400 => error_schema(['TEST_FAILED'], "bridge test failed")
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/source_types") ->
|
||||
#{
|
||||
'operationId' => '/source_types',
|
||||
get => #{
|
||||
tags => [<<"sources">>],
|
||||
desc => ?DESC("desc_api10"),
|
||||
summary => <<"List available source types">>,
|
||||
responses => #{
|
||||
200 => emqx_dashboard_swagger:schema_with_examples(
|
||||
array(emqx_bridge_v2_schema:action_types_sc()),
|
||||
#{
|
||||
<<"types">> =>
|
||||
#{
|
||||
summary => <<"Source types">>,
|
||||
value => emqx_bridge_v2_schema:action_types()
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -392,21 +656,103 @@ schema("/action_types") ->
|
|||
}
|
||||
}.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Thin Handlers
|
||||
%%------------------------------------------------------------------------------
|
||||
%%================================================================================
|
||||
%% Actions
|
||||
%%================================================================================
|
||||
'/actions'(post, #{body := #{<<"type">> := BridgeType, <<"name">> := BridgeName} = Conf0}) ->
|
||||
case emqx_bridge_v2:lookup(BridgeType, BridgeName) of
|
||||
{ok, _} ->
|
||||
?BAD_REQUEST('ALREADY_EXISTS', <<"bridge already exists">>);
|
||||
{error, not_found} ->
|
||||
Conf = filter_out_request_body(Conf0),
|
||||
create_bridge(BridgeType, BridgeName, Conf)
|
||||
end;
|
||||
handle_create(?ROOT_KEY_ACTIONS, BridgeType, BridgeName, Conf0);
|
||||
'/actions'(get, _Params) ->
|
||||
Nodes = mria:running_nodes(),
|
||||
NodeReplies = emqx_bridge_proto_v5:v2_list_bridges_on_nodes(Nodes),
|
||||
handle_list(?ROOT_KEY_ACTIONS).
|
||||
|
||||
'/actions/:id'(get, #{bindings := #{id := Id}}) ->
|
||||
?TRY_PARSE_ID(Id, lookup_from_all_nodes(?ROOT_KEY_ACTIONS, BridgeType, BridgeName, 200));
|
||||
'/actions/:id'(put, #{bindings := #{id := Id}, body := Conf0}) ->
|
||||
handle_update(?ROOT_KEY_ACTIONS, Id, Conf0);
|
||||
'/actions/:id'(delete, #{bindings := #{id := Id}, query_string := Qs}) ->
|
||||
handle_delete(?ROOT_KEY_ACTIONS, Id, Qs).
|
||||
|
||||
'/actions/:id/metrics'(get, #{bindings := #{id := Id}}) ->
|
||||
?TRY_PARSE_ID(Id, get_metrics_from_all_nodes(?ROOT_KEY_ACTIONS, BridgeType, BridgeName)).
|
||||
|
||||
'/actions/:id/metrics/reset'(put, #{bindings := #{id := Id}}) ->
|
||||
handle_reset_metrics(?ROOT_KEY_ACTIONS, Id).
|
||||
|
||||
'/actions/:id/enable/:enable'(put, #{bindings := #{id := Id, enable := Enable}}) ->
|
||||
handle_disable_enable(?ROOT_KEY_ACTIONS, Id, Enable).
|
||||
|
||||
'/actions/:id/:operation'(post, #{
|
||||
bindings :=
|
||||
#{id := Id, operation := Op}
|
||||
}) ->
|
||||
handle_operation(?ROOT_KEY_ACTIONS, Id, Op).
|
||||
|
||||
'/nodes/:node/actions/:id/:operation'(post, #{
|
||||
bindings :=
|
||||
#{id := Id, operation := Op, node := Node}
|
||||
}) ->
|
||||
handle_node_operation(?ROOT_KEY_ACTIONS, Node, Id, Op).
|
||||
|
||||
'/actions_probe'(post, Request) ->
|
||||
handle_probe(?ROOT_KEY_ACTIONS, Request).
|
||||
|
||||
'/action_types'(get, _Request) ->
|
||||
?OK(emqx_bridge_v2_schema:action_types()).
|
||||
%%================================================================================
|
||||
%% Sources
|
||||
%%================================================================================
|
||||
'/sources'(post, #{body := #{<<"type">> := BridgeType, <<"name">> := BridgeName} = Conf0}) ->
|
||||
handle_create(?ROOT_KEY_SOURCES, BridgeType, BridgeName, Conf0);
|
||||
'/sources'(get, _Params) ->
|
||||
handle_list(?ROOT_KEY_SOURCES).
|
||||
|
||||
'/sources/:id'(get, #{bindings := #{id := Id}}) ->
|
||||
?TRY_PARSE_ID(Id, lookup_from_all_nodes(?ROOT_KEY_SOURCES, BridgeType, BridgeName, 200));
|
||||
'/sources/:id'(put, #{bindings := #{id := Id}, body := Conf0}) ->
|
||||
handle_update(?ROOT_KEY_SOURCES, Id, Conf0);
|
||||
'/sources/:id'(delete, #{bindings := #{id := Id}, query_string := Qs}) ->
|
||||
handle_delete(?ROOT_KEY_SOURCES, Id, Qs).
|
||||
|
||||
'/sources/:id/metrics'(get, #{bindings := #{id := Id}}) ->
|
||||
?TRY_PARSE_ID(Id, get_metrics_from_all_nodes(?ROOT_KEY_SOURCES, BridgeType, BridgeName)).
|
||||
|
||||
'/sources/:id/metrics/reset'(put, #{bindings := #{id := Id}}) ->
|
||||
handle_reset_metrics(?ROOT_KEY_SOURCES, Id).
|
||||
|
||||
'/sources/:id/enable/:enable'(put, #{bindings := #{id := Id, enable := Enable}}) ->
|
||||
handle_disable_enable(?ROOT_KEY_SOURCES, Id, Enable).
|
||||
|
||||
'/sources/:id/:operation'(post, #{
|
||||
bindings :=
|
||||
#{id := Id, operation := Op}
|
||||
}) ->
|
||||
handle_operation(?ROOT_KEY_SOURCES, Id, Op).
|
||||
|
||||
'/nodes/:node/sources/:id/:operation'(post, #{
|
||||
bindings :=
|
||||
#{id := Id, operation := Op, node := Node}
|
||||
}) ->
|
||||
handle_node_operation(?ROOT_KEY_SOURCES, Node, Id, Op).
|
||||
|
||||
'/sources_probe'(post, Request) ->
|
||||
handle_probe(?ROOT_KEY_SOURCES, Request).
|
||||
|
||||
'/source_types'(get, _Request) ->
|
||||
?OK(emqx_bridge_v2_schema:source_types()).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Handlers
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
handle_list(ConfRootKey) ->
|
||||
Nodes = emqx:running_nodes(),
|
||||
NodeReplies = emqx_bridge_proto_v6:v2_list_bridges_on_nodes_v6(Nodes, ConfRootKey),
|
||||
case is_ok(NodeReplies) of
|
||||
{ok, NodeBridges} ->
|
||||
AllBridges = [
|
||||
[format_resource(Data, Node) || Data <- Bridges]
|
||||
[format_resource(ConfRootKey, Data, Node) || Data <- Bridges]
|
||||
|| {Node, Bridges} <- lists:zip(Nodes, NodeBridges)
|
||||
],
|
||||
?OK(zip_bridges(AllBridges));
|
||||
|
@ -414,34 +760,44 @@ schema("/action_types") ->
|
|||
?INTERNAL_ERROR(Reason)
|
||||
end.
|
||||
|
||||
'/actions/:id'(get, #{bindings := #{id := Id}}) ->
|
||||
?TRY_PARSE_ID(Id, lookup_from_all_nodes(BridgeType, BridgeName, 200));
|
||||
'/actions/:id'(put, #{bindings := #{id := Id}, body := Conf0}) ->
|
||||
handle_create(ConfRootKey, Type, Name, Conf0) ->
|
||||
case emqx_bridge_v2:lookup(ConfRootKey, Type, Name) of
|
||||
{ok, _} ->
|
||||
?BAD_REQUEST('ALREADY_EXISTS', <<"bridge already exists">>);
|
||||
{error, not_found} ->
|
||||
Conf = filter_out_request_body(Conf0),
|
||||
create_bridge(ConfRootKey, Type, Name, Conf)
|
||||
end.
|
||||
|
||||
handle_update(ConfRootKey, Id, Conf0) ->
|
||||
Conf1 = filter_out_request_body(Conf0),
|
||||
?TRY_PARSE_ID(
|
||||
Id,
|
||||
case emqx_bridge_v2:lookup(BridgeType, BridgeName) of
|
||||
case emqx_bridge_v2:lookup(ConfRootKey, BridgeType, BridgeName) of
|
||||
{ok, _} ->
|
||||
RawConf = emqx:get_raw_config([bridges, BridgeType, BridgeName], #{}),
|
||||
Conf = emqx_utils:deobfuscate(Conf1, RawConf),
|
||||
update_bridge(BridgeType, BridgeName, Conf);
|
||||
update_bridge(ConfRootKey, BridgeType, BridgeName, Conf);
|
||||
{error, not_found} ->
|
||||
?BRIDGE_NOT_FOUND(BridgeType, BridgeName)
|
||||
end
|
||||
);
|
||||
'/actions/:id'(delete, #{bindings := #{id := Id}, query_string := Qs}) ->
|
||||
).
|
||||
|
||||
handle_delete(ConfRootKey, Id, QueryStringOpts) ->
|
||||
?TRY_PARSE_ID(
|
||||
Id,
|
||||
case emqx_bridge_v2:lookup(BridgeType, BridgeName) of
|
||||
case emqx_bridge_v2:lookup(ConfRootKey, BridgeType, BridgeName) of
|
||||
{ok, _} ->
|
||||
AlsoDeleteActions =
|
||||
case maps:get(<<"also_delete_dep_actions">>, Qs, <<"false">>) of
|
||||
case maps:get(<<"also_delete_dep_actions">>, QueryStringOpts, <<"false">>) of
|
||||
<<"true">> -> true;
|
||||
true -> true;
|
||||
_ -> false
|
||||
end,
|
||||
case
|
||||
emqx_bridge_v2:check_deps_and_remove(BridgeType, BridgeName, AlsoDeleteActions)
|
||||
emqx_bridge_v2:check_deps_and_remove(
|
||||
ConfRootKey, BridgeType, BridgeName, AlsoDeleteActions
|
||||
)
|
||||
of
|
||||
ok ->
|
||||
?NO_CONTENT;
|
||||
|
@ -465,23 +821,22 @@ schema("/action_types") ->
|
|||
end
|
||||
).
|
||||
|
||||
'/actions/:id/metrics'(get, #{bindings := #{id := Id}}) ->
|
||||
?TRY_PARSE_ID(Id, get_metrics_from_all_nodes(BridgeType, BridgeName)).
|
||||
|
||||
'/actions/:id/metrics/reset'(put, #{bindings := #{id := Id}}) ->
|
||||
handle_reset_metrics(ConfRootKey, Id) ->
|
||||
?TRY_PARSE_ID(
|
||||
Id,
|
||||
begin
|
||||
ActionType = emqx_bridge_v2:bridge_v2_type_to_connector_type(BridgeType),
|
||||
ok = emqx_bridge_v2:reset_metrics(ActionType, BridgeName),
|
||||
ok = emqx_bridge_v2:reset_metrics(ConfRootKey, ActionType, BridgeName),
|
||||
?NO_CONTENT
|
||||
end
|
||||
).
|
||||
|
||||
'/actions/:id/enable/:enable'(put, #{bindings := #{id := Id, enable := Enable}}) ->
|
||||
handle_disable_enable(ConfRootKey, Id, Enable) ->
|
||||
?TRY_PARSE_ID(
|
||||
Id,
|
||||
case emqx_bridge_v2:disable_enable(enable_func(Enable), BridgeType, BridgeName) of
|
||||
case
|
||||
emqx_bridge_v2:disable_enable(ConfRootKey, enable_func(Enable), BridgeType, BridgeName)
|
||||
of
|
||||
{ok, _} ->
|
||||
?NO_CONTENT;
|
||||
{error, {pre_config_update, _, bridge_not_found}} ->
|
||||
|
@ -495,41 +850,42 @@ schema("/action_types") ->
|
|||
end
|
||||
).
|
||||
|
||||
'/actions/:id/:operation'(post, #{
|
||||
bindings :=
|
||||
#{id := Id, operation := Op}
|
||||
}) ->
|
||||
handle_operation(ConfRootKey, Id, Op) ->
|
||||
?TRY_PARSE_ID(
|
||||
Id,
|
||||
begin
|
||||
OperFunc = operation_func(all, Op),
|
||||
Nodes = mria:running_nodes(),
|
||||
call_operation_if_enabled(all, OperFunc, [Nodes, BridgeType, BridgeName])
|
||||
Nodes = emqx:running_nodes(),
|
||||
call_operation_if_enabled(all, OperFunc, [Nodes, ConfRootKey, BridgeType, BridgeName])
|
||||
end
|
||||
).
|
||||
|
||||
'/nodes/:node/actions/:id/:operation'(post, #{
|
||||
bindings :=
|
||||
#{id := Id, operation := Op, node := Node}
|
||||
}) ->
|
||||
handle_node_operation(ConfRootKey, Node, Id, Op) ->
|
||||
?TRY_PARSE_ID(
|
||||
Id,
|
||||
case emqx_utils:safe_to_existing_atom(Node, utf8) of
|
||||
{ok, TargetNode} ->
|
||||
OperFunc = operation_func(TargetNode, Op),
|
||||
call_operation_if_enabled(TargetNode, OperFunc, [TargetNode, BridgeType, BridgeName]);
|
||||
call_operation_if_enabled(TargetNode, OperFunc, [
|
||||
TargetNode, ConfRootKey, BridgeType, BridgeName
|
||||
]);
|
||||
{error, _} ->
|
||||
?NOT_FOUND(<<"Invalid node name: ", Node/binary>>)
|
||||
end
|
||||
).
|
||||
|
||||
'/actions_probe'(post, Request) ->
|
||||
RequestMeta = #{module => ?MODULE, method => post, path => "/actions_probe"},
|
||||
handle_probe(ConfRootKey, Request) ->
|
||||
Path =
|
||||
case ConfRootKey of
|
||||
?ROOT_KEY_ACTIONS -> "/actions_probe";
|
||||
?ROOT_KEY_SOURCES -> "/sources_probe"
|
||||
end,
|
||||
RequestMeta = #{module => ?MODULE, method => post, path => Path},
|
||||
case emqx_dashboard_swagger:filter_check_request_and_translate_body(Request, RequestMeta) of
|
||||
{ok, #{body := #{<<"type">> := ConnType} = Params}} ->
|
||||
{ok, #{body := #{<<"type">> := Type} = Params}} ->
|
||||
Params1 = maybe_deobfuscate_bridge_probe(Params),
|
||||
Params2 = maps:remove(<<"type">>, Params1),
|
||||
case emqx_bridge_v2:create_dry_run(ConnType, Params2) of
|
||||
case emqx_bridge_v2:create_dry_run(ConfRootKey, Type, Params2) of
|
||||
ok ->
|
||||
?NO_CONTENT;
|
||||
{error, #{kind := validation_error} = Reason0} ->
|
||||
|
@ -548,9 +904,7 @@ schema("/action_types") ->
|
|||
redact(BadRequest)
|
||||
end.
|
||||
|
||||
'/action_types'(get, _Request) ->
|
||||
?OK(emqx_bridge_v2_schema:types()).
|
||||
|
||||
%%% API helpers
|
||||
maybe_deobfuscate_bridge_probe(#{<<"type">> := ActionType, <<"name">> := BridgeName} = Params) ->
|
||||
case emqx_bridge_v2:lookup(ActionType, BridgeName) of
|
||||
{ok, #{raw_config := RawConf}} ->
|
||||
|
@ -564,7 +918,6 @@ maybe_deobfuscate_bridge_probe(#{<<"type">> := ActionType, <<"name">> := BridgeN
|
|||
maybe_deobfuscate_bridge_probe(Params) ->
|
||||
Params.
|
||||
|
||||
%%% API helpers
|
||||
is_ok(ok) ->
|
||||
ok;
|
||||
is_ok(OkResult = {ok, _}) ->
|
||||
|
@ -587,9 +940,16 @@ is_ok(ResL) ->
|
|||
end.
|
||||
|
||||
%% bridge helpers
|
||||
lookup_from_all_nodes(BridgeType, BridgeName, SuccCode) ->
|
||||
Nodes = mria:running_nodes(),
|
||||
case is_ok(emqx_bridge_proto_v5:v2_lookup_from_all_nodes(Nodes, BridgeType, BridgeName)) of
|
||||
-spec lookup_from_all_nodes(emqx_bridge_v2:root_cfg_key(), _, _, _) -> _.
|
||||
lookup_from_all_nodes(ConfRootKey, BridgeType, BridgeName, SuccCode) ->
|
||||
Nodes = emqx:running_nodes(),
|
||||
case
|
||||
is_ok(
|
||||
emqx_bridge_proto_v6:v2_lookup_from_all_nodes_v6(
|
||||
Nodes, ConfRootKey, BridgeType, BridgeName
|
||||
)
|
||||
)
|
||||
of
|
||||
{ok, [{ok, _} | _] = Results} ->
|
||||
{SuccCode, format_bridge_info([R || {ok, R} <- Results])};
|
||||
{ok, [{error, not_found} | _]} ->
|
||||
|
@ -598,10 +958,10 @@ lookup_from_all_nodes(BridgeType, BridgeName, SuccCode) ->
|
|||
?INTERNAL_ERROR(Reason)
|
||||
end.
|
||||
|
||||
get_metrics_from_all_nodes(ActionType, ActionName) ->
|
||||
get_metrics_from_all_nodes(ConfRootKey, Type, Name) ->
|
||||
Nodes = emqx:running_nodes(),
|
||||
Result = maybe_unwrap(
|
||||
emqx_bridge_proto_v5:v2_get_metrics_from_all_nodes(Nodes, ActionType, ActionName)
|
||||
emqx_bridge_proto_v6:v2_get_metrics_from_all_nodes_v6(Nodes, ConfRootKey, Type, Name)
|
||||
),
|
||||
case Result of
|
||||
Metrics when is_list(Metrics) ->
|
||||
|
@ -610,22 +970,25 @@ get_metrics_from_all_nodes(ActionType, ActionName) ->
|
|||
?INTERNAL_ERROR(Reason)
|
||||
end.
|
||||
|
||||
operation_func(all, start) -> v2_start_bridge_to_all_nodes;
|
||||
operation_func(_Node, start) -> v2_start_bridge_to_node.
|
||||
operation_func(all, start) -> v2_start_bridge_on_all_nodes_v6;
|
||||
operation_func(_Node, start) -> v2_start_bridge_on_node_v6;
|
||||
operation_func(all, lookup) -> v2_lookup_from_all_nodes_v6;
|
||||
operation_func(all, list) -> v2_list_bridges_on_nodes_v6;
|
||||
operation_func(all, get_metrics) -> v2_get_metrics_from_all_nodes_v6.
|
||||
|
||||
call_operation_if_enabled(NodeOrAll, OperFunc, [Nodes, BridgeType, BridgeName]) ->
|
||||
try is_enabled_bridge(BridgeType, BridgeName) of
|
||||
call_operation_if_enabled(NodeOrAll, OperFunc, [Nodes, ConfRootKey, BridgeType, BridgeName]) ->
|
||||
try is_enabled_bridge(ConfRootKey, BridgeType, BridgeName) of
|
||||
false ->
|
||||
?BRIDGE_NOT_ENABLED;
|
||||
true ->
|
||||
call_operation(NodeOrAll, OperFunc, [Nodes, BridgeType, BridgeName])
|
||||
call_operation(NodeOrAll, OperFunc, [Nodes, ConfRootKey, BridgeType, BridgeName])
|
||||
catch
|
||||
throw:not_found ->
|
||||
?BRIDGE_NOT_FOUND(BridgeType, BridgeName)
|
||||
end.
|
||||
|
||||
is_enabled_bridge(BridgeType, BridgeName) ->
|
||||
try emqx_bridge_v2:lookup(BridgeType, binary_to_existing_atom(BridgeName)) of
|
||||
is_enabled_bridge(ConfRootKey, BridgeType, BridgeName) ->
|
||||
try emqx_bridge_v2:lookup(ConfRootKey, BridgeType, binary_to_existing_atom(BridgeName)) of
|
||||
{ok, #{raw_config := ConfMap}} ->
|
||||
maps:get(<<"enable">>, ConfMap, false);
|
||||
{error, not_found} ->
|
||||
|
@ -637,7 +1000,7 @@ is_enabled_bridge(BridgeType, BridgeName) ->
|
|||
throw(not_found)
|
||||
end.
|
||||
|
||||
call_operation(NodeOrAll, OperFunc, Args = [_Nodes, BridgeType, BridgeName]) ->
|
||||
call_operation(NodeOrAll, OperFunc, Args = [_Nodes, _ConfRootKey, BridgeType, BridgeName]) ->
|
||||
case is_ok(do_bpapi_call(NodeOrAll, OperFunc, Args)) of
|
||||
Ok when Ok =:= ok; is_tuple(Ok), element(1, Ok) =:= ok ->
|
||||
?NO_CONTENT;
|
||||
|
@ -668,12 +1031,12 @@ call_operation(NodeOrAll, OperFunc, Args = [_Nodes, BridgeType, BridgeName]) ->
|
|||
|
||||
do_bpapi_call(all, Call, Args) ->
|
||||
maybe_unwrap(
|
||||
do_bpapi_call_vsn(emqx_bpapi:supported_version(emqx_bridge), Call, Args)
|
||||
do_bpapi_call_vsn(emqx_bpapi:supported_version(?BPAPI_NAME), Call, Args)
|
||||
);
|
||||
do_bpapi_call(Node, Call, Args) ->
|
||||
case lists:member(Node, mria:running_nodes()) of
|
||||
case lists:member(Node, emqx:running_nodes()) of
|
||||
true ->
|
||||
do_bpapi_call_vsn(emqx_bpapi:supported_version(Node, emqx_bridge), Call, Args);
|
||||
do_bpapi_call_vsn(emqx_bpapi:supported_version(Node, ?BPAPI_NAME), Call, Args);
|
||||
false ->
|
||||
{error, {node_not_found, Node}}
|
||||
end.
|
||||
|
@ -681,7 +1044,7 @@ do_bpapi_call(Node, Call, Args) ->
|
|||
do_bpapi_call_vsn(Version, Call, Args) ->
|
||||
case is_supported_version(Version, Call) of
|
||||
true ->
|
||||
apply(emqx_bridge_proto_v5, Call, Args);
|
||||
apply(emqx_bridge_proto_v6, Call, Args);
|
||||
false ->
|
||||
{error, not_implemented}
|
||||
end.
|
||||
|
@ -689,7 +1052,12 @@ do_bpapi_call_vsn(Version, Call, Args) ->
|
|||
is_supported_version(Version, Call) ->
|
||||
lists:member(Version, supported_versions(Call)).
|
||||
|
||||
supported_versions(_Call) -> [5].
|
||||
supported_versions(_Call) -> bpapi_version_range(6, latest).
|
||||
|
||||
%% [From, To] (inclusive on both ends)
|
||||
bpapi_version_range(From, latest) ->
|
||||
ThisNodeVsn = emqx_bpapi:supported_version(node(), ?BPAPI_NAME),
|
||||
lists:seq(From, ThisNodeVsn).
|
||||
|
||||
maybe_unwrap({error, not_implemented}) ->
|
||||
{error, not_implemented};
|
||||
|
@ -763,7 +1131,15 @@ aggregate_status(AllStatus) ->
|
|||
%% RPC Target
|
||||
lookup_from_local_node(BridgeType, BridgeName) ->
|
||||
case emqx_bridge_v2:lookup(BridgeType, BridgeName) of
|
||||
{ok, Res} -> {ok, format_resource(Res, node())};
|
||||
{ok, Res} -> {ok, format_resource(?ROOT_KEY_ACTIONS, Res, node())};
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
%% RPC Target
|
||||
-spec lookup_from_local_node_v6(emqx_bridge_v2:root_cfg_key(), _, _) -> _.
|
||||
lookup_from_local_node_v6(ConfRootKey, BridgeType, BridgeName) ->
|
||||
case emqx_bridge_v2:lookup(ConfRootKey, BridgeType, BridgeName) of
|
||||
{ok, Res} -> {ok, format_resource(ConfRootKey, Res, node())};
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
|
@ -771,8 +1147,13 @@ lookup_from_local_node(BridgeType, BridgeName) ->
|
|||
get_metrics_from_local_node(ActionType, ActionName) ->
|
||||
format_metrics(emqx_bridge_v2:get_metrics(ActionType, ActionName)).
|
||||
|
||||
%% RPC Target
|
||||
get_metrics_from_local_node_v6(ConfRootKey, Type, Name) ->
|
||||
format_metrics(emqx_bridge_v2:get_metrics(ConfRootKey, Type, Name)).
|
||||
|
||||
%% resource
|
||||
format_resource(
|
||||
ConfRootKey,
|
||||
#{
|
||||
type := Type,
|
||||
name := Name,
|
||||
|
@ -783,7 +1164,7 @@ format_resource(
|
|||
},
|
||||
Node
|
||||
) ->
|
||||
RawConf = fill_defaults(Type, RawConf0),
|
||||
RawConf = fill_defaults(ConfRootKey, Type, RawConf0),
|
||||
redact(
|
||||
maps:merge(
|
||||
RawConf#{
|
||||
|
@ -914,17 +1295,18 @@ aggregate_metrics(
|
|||
M17 + N17
|
||||
).
|
||||
|
||||
fill_defaults(Type, RawConf) ->
|
||||
PackedConf = pack_bridge_conf(Type, RawConf),
|
||||
fill_defaults(ConfRootKey, Type, RawConf) ->
|
||||
PackedConf = pack_bridge_conf(ConfRootKey, Type, RawConf),
|
||||
FullConf = emqx_config:fill_defaults(emqx_bridge_v2_schema, PackedConf, #{}),
|
||||
unpack_bridge_conf(Type, FullConf).
|
||||
unpack_bridge_conf(ConfRootKey, Type, FullConf).
|
||||
|
||||
pack_bridge_conf(Type, RawConf) ->
|
||||
#{<<"actions">> => #{bin(Type) => #{<<"foo">> => RawConf}}}.
|
||||
pack_bridge_conf(ConfRootKey, Type, RawConf) ->
|
||||
#{bin(ConfRootKey) => #{bin(Type) => #{<<"foo">> => RawConf}}}.
|
||||
|
||||
unpack_bridge_conf(Type, PackedConf) ->
|
||||
unpack_bridge_conf(ConfRootKey, Type, PackedConf) ->
|
||||
ConfRootKeyBin = bin(ConfRootKey),
|
||||
TypeBin = bin(Type),
|
||||
#{<<"actions">> := Bridges} = PackedConf,
|
||||
#{ConfRootKeyBin := Bridges} = PackedConf,
|
||||
#{<<"foo">> := RawConf} = maps:get(TypeBin, Bridges),
|
||||
RawConf.
|
||||
|
||||
|
@ -938,13 +1320,13 @@ format_resource_data(error, Error, Result) ->
|
|||
format_resource_data(K, V, Result) ->
|
||||
Result#{K => V}.
|
||||
|
||||
create_bridge(BridgeType, BridgeName, Conf) ->
|
||||
create_or_update_bridge(BridgeType, BridgeName, Conf, 201).
|
||||
create_bridge(ConfRootKey, BridgeType, BridgeName, Conf) ->
|
||||
create_or_update_bridge(ConfRootKey, BridgeType, BridgeName, Conf, 201).
|
||||
|
||||
update_bridge(BridgeType, BridgeName, Conf) ->
|
||||
create_or_update_bridge(BridgeType, BridgeName, Conf, 200).
|
||||
update_bridge(ConfRootKey, BridgeType, BridgeName, Conf) ->
|
||||
create_or_update_bridge(ConfRootKey, BridgeType, BridgeName, Conf, 200).
|
||||
|
||||
create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) ->
|
||||
create_or_update_bridge(ConfRootKey, BridgeType, BridgeName, Conf, HttpStatusCode) ->
|
||||
Check =
|
||||
try
|
||||
is_binary(BridgeType) andalso emqx_resource:validate_type(BridgeType),
|
||||
|
@ -955,15 +1337,15 @@ create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) ->
|
|||
end,
|
||||
case Check of
|
||||
ok ->
|
||||
do_create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode);
|
||||
do_create_or_update_bridge(ConfRootKey, BridgeType, BridgeName, Conf, HttpStatusCode);
|
||||
BadRequest ->
|
||||
BadRequest
|
||||
end.
|
||||
|
||||
do_create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) ->
|
||||
case emqx_bridge_v2:create(BridgeType, BridgeName, Conf) of
|
||||
do_create_or_update_bridge(ConfRootKey, BridgeType, BridgeName, Conf, HttpStatusCode) ->
|
||||
case emqx_bridge_v2:create(ConfRootKey, BridgeType, BridgeName, Conf) of
|
||||
{ok, _} ->
|
||||
lookup_from_all_nodes(BridgeType, BridgeName, HttpStatusCode);
|
||||
lookup_from_all_nodes(ConfRootKey, BridgeType, BridgeName, HttpStatusCode);
|
||||
{error, {PreOrPostConfigUpdate, _HandlerMod, Reason}} when
|
||||
PreOrPostConfigUpdate =:= pre_config_update;
|
||||
PreOrPostConfigUpdate =:= post_config_update
|
||||
|
|
|
@ -0,0 +1,196 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_proto_v6).
|
||||
|
||||
-behaviour(emqx_bpapi).
|
||||
|
||||
-export([
|
||||
introduced_in/0,
|
||||
|
||||
list_bridges_on_nodes/1,
|
||||
restart_bridge_to_node/3,
|
||||
start_bridge_to_node/3,
|
||||
stop_bridge_to_node/3,
|
||||
lookup_from_all_nodes/3,
|
||||
get_metrics_from_all_nodes/3,
|
||||
restart_bridges_to_all_nodes/3,
|
||||
start_bridges_to_all_nodes/3,
|
||||
stop_bridges_to_all_nodes/3,
|
||||
|
||||
%% introduced in v6
|
||||
v2_lookup_from_all_nodes_v6/4,
|
||||
v2_list_bridges_on_nodes_v6/2,
|
||||
v2_get_metrics_from_all_nodes_v6/4,
|
||||
v2_start_bridge_on_node_v6/4,
|
||||
v2_start_bridge_on_all_nodes_v6/4
|
||||
]).
|
||||
|
||||
-include_lib("emqx/include/bpapi.hrl").
|
||||
|
||||
-define(TIMEOUT, 15000).
|
||||
|
||||
introduced_in() ->
|
||||
"5.5.0".
|
||||
|
||||
-spec list_bridges_on_nodes([node()]) ->
|
||||
emqx_rpc:erpc_multicall([emqx_resource:resource_data()]).
|
||||
list_bridges_on_nodes(Nodes) ->
|
||||
erpc:multicall(Nodes, emqx_bridge, list, [], ?TIMEOUT).
|
||||
|
||||
-type key() :: atom() | binary() | [byte()].
|
||||
|
||||
-spec restart_bridge_to_node(node(), key(), key()) ->
|
||||
term().
|
||||
restart_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||
rpc:call(
|
||||
Node,
|
||||
emqx_bridge_resource,
|
||||
restart,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec start_bridge_to_node(node(), key(), key()) ->
|
||||
term().
|
||||
start_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||
rpc:call(
|
||||
Node,
|
||||
emqx_bridge_resource,
|
||||
start,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec stop_bridge_to_node(node(), key(), key()) ->
|
||||
term().
|
||||
stop_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||
rpc:call(
|
||||
Node,
|
||||
emqx_bridge_resource,
|
||||
stop,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
||||
emqx_rpc:erpc_multicall(ok).
|
||||
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_resource,
|
||||
restart,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
||||
emqx_rpc:erpc_multicall(ok).
|
||||
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_resource,
|
||||
start,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
||||
emqx_rpc:erpc_multicall(ok).
|
||||
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_resource,
|
||||
stop,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
||||
emqx_rpc:erpc_multicall(term()).
|
||||
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_api,
|
||||
lookup_from_local_node,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec get_metrics_from_all_nodes([node()], key(), key()) ->
|
||||
emqx_rpc:erpc_multicall(emqx_metrics_worker:metrics()).
|
||||
get_metrics_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_api,
|
||||
get_metrics_from_local_node,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
%%--------------------------------------------------------------------------------
|
||||
%% introduced in v6
|
||||
%%--------------------------------------------------------------------------------
|
||||
|
||||
%% V2 Calls
|
||||
-spec v2_lookup_from_all_nodes_v6([node()], emqx_bridge_v2:root_cfg_key(), key(), key()) ->
|
||||
emqx_rpc:erpc_multicall(term()).
|
||||
v2_lookup_from_all_nodes_v6(Nodes, ConfRootKey, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_v2_api,
|
||||
lookup_from_local_node_v6,
|
||||
[ConfRootKey, BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec v2_list_bridges_on_nodes_v6([node()], emqx_bridge_v2:root_cfg_key()) ->
|
||||
emqx_rpc:erpc_multicall([emqx_resource:resource_data()]).
|
||||
v2_list_bridges_on_nodes_v6(Nodes, ConfRootKey) ->
|
||||
erpc:multicall(Nodes, emqx_bridge_v2, list, [ConfRootKey], ?TIMEOUT).
|
||||
|
||||
-spec v2_get_metrics_from_all_nodes_v6([node()], emqx_bridge_v2:root_cfg_key(), key(), key()) ->
|
||||
emqx_rpc:erpc_multicall(term()).
|
||||
v2_get_metrics_from_all_nodes_v6(Nodes, ConfRootKey, ActionType, ActionName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_v2_api,
|
||||
get_metrics_from_local_node_v6,
|
||||
[ConfRootKey, ActionType, ActionName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec v2_start_bridge_on_all_nodes_v6([node()], emqx_bridge_v2:root_cfg_key(), key(), key()) ->
|
||||
emqx_rpc:erpc_multicall(ok).
|
||||
v2_start_bridge_on_all_nodes_v6(Nodes, ConfRootKey, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_v2,
|
||||
start,
|
||||
[ConfRootKey, BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec v2_start_bridge_on_node_v6(node(), emqx_bridge_v2:root_cfg_key(), key(), key()) ->
|
||||
term().
|
||||
v2_start_bridge_on_node_v6(Node, ConfRootKey, BridgeType, BridgeName) ->
|
||||
rpc:call(
|
||||
Node,
|
||||
emqx_bridge_v2,
|
||||
start,
|
||||
[ConfRootKey, BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
|
@ -126,6 +126,7 @@ common_bridge_fields() ->
|
|||
default => true
|
||||
}
|
||||
)},
|
||||
{tags, emqx_schema:tags_schema()},
|
||||
%% Create v2 connector then usr v1 /bridges_probe api to test connector
|
||||
%% /bridges_probe should pass through v2 connector's description.
|
||||
{description, emqx_schema:description_schema()}
|
||||
|
|
|
@ -28,22 +28,33 @@
|
|||
-export([roots/0, fields/1, desc/1, namespace/0, tags/0]).
|
||||
|
||||
-export([
|
||||
get_response/0,
|
||||
put_request/0,
|
||||
post_request/0,
|
||||
examples/1,
|
||||
actions_get_response/0,
|
||||
actions_put_request/0,
|
||||
actions_post_request/0,
|
||||
actions_examples/1,
|
||||
action_values/4
|
||||
]).
|
||||
|
||||
-export([
|
||||
sources_get_response/0,
|
||||
sources_put_request/0,
|
||||
sources_post_request/0,
|
||||
sources_examples/1,
|
||||
source_values/4
|
||||
]).
|
||||
|
||||
%% Exported for mocking
|
||||
%% TODO: refactor emqx_bridge_v1_compatibility_layer_SUITE so we don't need to
|
||||
%% export this
|
||||
-export([
|
||||
registered_api_schemas/1
|
||||
registered_actions_api_schemas/1,
|
||||
registered_sources_api_schemas/1
|
||||
]).
|
||||
|
||||
-export([types/0, types_sc/0]).
|
||||
-export([resource_opts_fields/0, resource_opts_fields/1]).
|
||||
-export([action_types/0, action_types_sc/0]).
|
||||
-export([source_types/0, source_types_sc/0]).
|
||||
-export([action_resource_opts_fields/0, action_resource_opts_fields/1]).
|
||||
-export([source_resource_opts_fields/0, source_resource_opts_fields/1]).
|
||||
|
||||
-export([
|
||||
api_fields/3
|
||||
|
@ -53,38 +64,146 @@
|
|||
make_producer_action_schema/1, make_producer_action_schema/2,
|
||||
make_consumer_action_schema/1, make_consumer_action_schema/2,
|
||||
top_level_common_action_keys/0,
|
||||
project_to_actions_resource_opts/1
|
||||
project_to_actions_resource_opts/1,
|
||||
project_to_sources_resource_opts/1
|
||||
]).
|
||||
|
||||
-export([actions_convert_from_connectors/1]).
|
||||
|
||||
-export_type([action_type/0]).
|
||||
-export_type([action_type/0, source_type/0]).
|
||||
|
||||
%% Should we explicitly list them here so dialyzer may be more helpful?
|
||||
-type action_type() :: atom().
|
||||
-type source_type() :: atom().
|
||||
-type http_method() :: get | post | put.
|
||||
-type schema_example_map() :: #{atom() => term()}.
|
||||
|
||||
%%======================================================================================
|
||||
%% For HTTP APIs
|
||||
get_response() ->
|
||||
api_schema("get").
|
||||
%%======================================================================================
|
||||
|
||||
put_request() ->
|
||||
api_schema("put").
|
||||
%%---------------------------------------------
|
||||
%% Actions
|
||||
%%---------------------------------------------
|
||||
|
||||
post_request() ->
|
||||
api_schema("post").
|
||||
actions_get_response() ->
|
||||
actions_api_schema("get").
|
||||
|
||||
api_schema(Method) ->
|
||||
APISchemas = ?MODULE:registered_api_schemas(Method),
|
||||
actions_put_request() ->
|
||||
actions_api_schema("put").
|
||||
|
||||
actions_post_request() ->
|
||||
actions_api_schema("post").
|
||||
|
||||
actions_api_schema(Method) ->
|
||||
APISchemas = ?MODULE:registered_actions_api_schemas(Method),
|
||||
hoconsc:union(bridge_api_union(APISchemas)).
|
||||
|
||||
registered_api_schemas(Method) ->
|
||||
RegisteredSchemas = emqx_action_info:registered_schema_modules(),
|
||||
registered_actions_api_schemas(Method) ->
|
||||
RegisteredSchemas = emqx_action_info:registered_schema_modules_actions(),
|
||||
[
|
||||
api_ref(SchemaModule, atom_to_binary(BridgeV2Type), Method ++ "_bridge_v2")
|
||||
|| {BridgeV2Type, SchemaModule} <- RegisteredSchemas
|
||||
].
|
||||
|
||||
-spec action_values(http_method(), atom(), atom(), schema_example_map()) -> schema_example_map().
|
||||
action_values(Method, ActionType, ConnectorType, ActionValues) ->
|
||||
ActionTypeBin = atom_to_binary(ActionType),
|
||||
ConnectorTypeBin = atom_to_binary(ConnectorType),
|
||||
lists:foldl(
|
||||
fun(M1, M2) ->
|
||||
maps:merge(M1, M2)
|
||||
end,
|
||||
#{
|
||||
enable => true,
|
||||
description => <<"My example ", ActionTypeBin/binary, " action">>,
|
||||
connector => <<ConnectorTypeBin/binary, "_connector">>,
|
||||
resource_opts => #{
|
||||
health_check_interval => "30s"
|
||||
}
|
||||
},
|
||||
[
|
||||
ActionValues,
|
||||
method_values(action, Method, ActionType)
|
||||
]
|
||||
).
|
||||
|
||||
actions_examples(Method) ->
|
||||
MergeFun =
|
||||
fun(Example, Examples) ->
|
||||
maps:merge(Examples, Example)
|
||||
end,
|
||||
Fun =
|
||||
fun(Module, Examples) ->
|
||||
ConnectorExamples = erlang:apply(Module, bridge_v2_examples, [Method]),
|
||||
lists:foldl(MergeFun, Examples, ConnectorExamples)
|
||||
end,
|
||||
SchemaModules = [Mod || {_, Mod} <- emqx_action_info:registered_schema_modules_actions()],
|
||||
lists:foldl(Fun, #{}, SchemaModules).
|
||||
|
||||
%%---------------------------------------------
|
||||
%% Sources
|
||||
%%---------------------------------------------
|
||||
|
||||
sources_get_response() ->
|
||||
sources_api_schema("get").
|
||||
|
||||
sources_put_request() ->
|
||||
sources_api_schema("put").
|
||||
|
||||
sources_post_request() ->
|
||||
sources_api_schema("post").
|
||||
|
||||
sources_api_schema(Method) ->
|
||||
APISchemas = ?MODULE:registered_sources_api_schemas(Method),
|
||||
hoconsc:union(bridge_api_union(APISchemas)).
|
||||
|
||||
registered_sources_api_schemas(Method) ->
|
||||
RegisteredSchemas = emqx_action_info:registered_schema_modules_sources(),
|
||||
[
|
||||
api_ref(SchemaModule, atom_to_binary(BridgeV2Type), Method ++ "_source")
|
||||
|| {BridgeV2Type, SchemaModule} <- RegisteredSchemas
|
||||
].
|
||||
|
||||
-spec source_values(http_method(), atom(), atom(), schema_example_map()) -> schema_example_map().
|
||||
source_values(Method, SourceType, ConnectorType, SourceValues) ->
|
||||
SourceTypeBin = atom_to_binary(SourceType),
|
||||
ConnectorTypeBin = atom_to_binary(ConnectorType),
|
||||
lists:foldl(
|
||||
fun(M1, M2) ->
|
||||
maps:merge(M1, M2)
|
||||
end,
|
||||
#{
|
||||
enable => true,
|
||||
description => <<"My example ", SourceTypeBin/binary, " source">>,
|
||||
connector => <<ConnectorTypeBin/binary, "_connector">>,
|
||||
resource_opts => #{
|
||||
health_check_interval => <<"30s">>
|
||||
}
|
||||
},
|
||||
[
|
||||
SourceValues,
|
||||
method_values(source, Method, SourceType)
|
||||
]
|
||||
).
|
||||
|
||||
sources_examples(Method) ->
|
||||
MergeFun =
|
||||
fun(Example, Examples) ->
|
||||
maps:merge(Examples, Example)
|
||||
end,
|
||||
Fun =
|
||||
fun(Module, Examples) ->
|
||||
ConnectorExamples = erlang:apply(Module, source_examples, [Method]),
|
||||
lists:foldl(MergeFun, Examples, ConnectorExamples)
|
||||
end,
|
||||
SchemaModules = [Mod || {_, Mod} <- emqx_action_info:registered_schema_modules_sources()],
|
||||
lists:foldl(Fun, #{}, SchemaModules).
|
||||
|
||||
%%---------------------------------------------
|
||||
%% Common helpers
|
||||
%%---------------------------------------------
|
||||
|
||||
api_ref(Module, Type, Method) ->
|
||||
{Type, ref(Module, Method)}.
|
||||
|
||||
|
@ -111,41 +230,17 @@ bridge_api_union(Refs) ->
|
|||
end
|
||||
end.
|
||||
|
||||
-type http_method() :: get | post | put.
|
||||
-type schema_example_map() :: #{atom() => term()}.
|
||||
|
||||
-spec action_values(http_method(), atom(), atom(), schema_example_map()) -> schema_example_map().
|
||||
action_values(Method, ActionType, ConnectorType, ActionValues) ->
|
||||
ActionTypeBin = atom_to_binary(ActionType),
|
||||
ConnectorTypeBin = atom_to_binary(ConnectorType),
|
||||
lists:foldl(
|
||||
fun(M1, M2) ->
|
||||
maps:merge(M1, M2)
|
||||
end,
|
||||
#{
|
||||
enable => true,
|
||||
description => <<"My example ", ActionTypeBin/binary, " action">>,
|
||||
connector => <<ConnectorTypeBin/binary, "_connector">>,
|
||||
resource_opts => #{
|
||||
health_check_interval => "30s"
|
||||
}
|
||||
},
|
||||
[
|
||||
ActionValues,
|
||||
method_values(Method, ActionType)
|
||||
]
|
||||
).
|
||||
|
||||
-spec method_values(http_method(), atom()) -> schema_example_map().
|
||||
method_values(post, Type) ->
|
||||
-spec method_values(action | source, http_method(), atom()) -> schema_example_map().
|
||||
method_values(Kind, post, Type) ->
|
||||
KindBin = atom_to_binary(Kind),
|
||||
TypeBin = atom_to_binary(Type),
|
||||
#{
|
||||
name => <<TypeBin/binary, "_action">>,
|
||||
name => <<TypeBin/binary, "_", KindBin/binary>>,
|
||||
type => TypeBin
|
||||
};
|
||||
method_values(get, Type) ->
|
||||
method_values(Kind, get, Type) ->
|
||||
maps:merge(
|
||||
method_values(post, Type),
|
||||
method_values(Kind, post, Type),
|
||||
#{
|
||||
status => <<"connected">>,
|
||||
node_status => [
|
||||
|
@ -156,7 +251,7 @@ method_values(get, Type) ->
|
|||
]
|
||||
}
|
||||
);
|
||||
method_values(put, _Type) ->
|
||||
method_values(_Kind, put, _Type) ->
|
||||
#{}.
|
||||
|
||||
api_fields("get_bridge_v2", Type, Fields) ->
|
||||
|
@ -175,20 +270,38 @@ api_fields("post_bridge_v2", Type, Fields) ->
|
|||
]
|
||||
);
|
||||
api_fields("put_bridge_v2", _Type, Fields) ->
|
||||
Fields;
|
||||
api_fields("get_source", Type, Fields) ->
|
||||
lists:append(
|
||||
[
|
||||
emqx_bridge_schema:type_and_name_fields(Type),
|
||||
emqx_bridge_schema:status_fields(),
|
||||
Fields
|
||||
]
|
||||
);
|
||||
api_fields("post_source", Type, Fields) ->
|
||||
lists:append(
|
||||
[
|
||||
emqx_bridge_schema:type_and_name_fields(Type),
|
||||
Fields
|
||||
]
|
||||
);
|
||||
api_fields("put_source", _Type, Fields) ->
|
||||
Fields.
|
||||
|
||||
%%======================================================================================
|
||||
%% HOCON Schema Callbacks
|
||||
%%======================================================================================
|
||||
|
||||
namespace() -> "actions".
|
||||
namespace() -> "actions_and_sources".
|
||||
|
||||
tags() ->
|
||||
[<<"Actions">>].
|
||||
[<<"Actions">>, <<"Sources">>].
|
||||
|
||||
-dialyzer({nowarn_function, roots/0}).
|
||||
|
||||
roots() ->
|
||||
ActionsRoot =
|
||||
case fields(actions) of
|
||||
[] ->
|
||||
[
|
||||
|
@ -197,38 +310,66 @@ roots() ->
|
|||
];
|
||||
_ ->
|
||||
[{actions, ?HOCON(?R_REF(actions), #{importance => ?IMPORTANCE_LOW})}]
|
||||
end.
|
||||
end,
|
||||
SourcesRoot =
|
||||
[{sources, ?HOCON(?R_REF(sources), #{importance => ?IMPORTANCE_LOW})}],
|
||||
ActionsRoot ++ SourcesRoot.
|
||||
|
||||
fields(actions) ->
|
||||
registered_schema_fields();
|
||||
fields(resource_opts) ->
|
||||
resource_opts_fields(_Overrides = []).
|
||||
registered_schema_fields_actions();
|
||||
fields(sources) ->
|
||||
registered_schema_fields_sources();
|
||||
fields(action_resource_opts) ->
|
||||
action_resource_opts_fields(_Overrides = []);
|
||||
fields(source_resource_opts) ->
|
||||
source_resource_opts_fields(_Overrides = []).
|
||||
|
||||
registered_schema_fields() ->
|
||||
registered_schema_fields_actions() ->
|
||||
[
|
||||
Module:fields(action)
|
||||
|| {_BridgeV2Type, Module} <- emqx_action_info:registered_schema_modules()
|
||||
|| {_BridgeV2Type, Module} <- emqx_action_info:registered_schema_modules_actions()
|
||||
].
|
||||
|
||||
registered_schema_fields_sources() ->
|
||||
[
|
||||
Module:fields(source)
|
||||
|| {_BridgeV2Type, Module} <- emqx_action_info:registered_schema_modules_sources()
|
||||
].
|
||||
|
||||
desc(actions) ->
|
||||
?DESC("desc_bridges_v2");
|
||||
desc(resource_opts) ->
|
||||
desc(sources) ->
|
||||
?DESC("desc_sources");
|
||||
desc(action_resource_opts) ->
|
||||
?DESC(emqx_resource_schema, "resource_opts");
|
||||
desc(source_resource_opts) ->
|
||||
?DESC(emqx_resource_schema, "resource_opts");
|
||||
desc(_) ->
|
||||
undefined.
|
||||
|
||||
-spec types() -> [action_type()].
|
||||
types() ->
|
||||
-spec action_types() -> [action_type()].
|
||||
action_types() ->
|
||||
proplists:get_keys(?MODULE:fields(actions)).
|
||||
|
||||
-spec types_sc() -> ?ENUM([action_type()]).
|
||||
types_sc() ->
|
||||
hoconsc:enum(types()).
|
||||
-spec action_types_sc() -> ?ENUM([action_type()]).
|
||||
action_types_sc() ->
|
||||
hoconsc:enum(action_types()).
|
||||
|
||||
resource_opts_fields() ->
|
||||
resource_opts_fields(_Overrides = []).
|
||||
-spec source_types() -> [source_type()].
|
||||
source_types() ->
|
||||
proplists:get_keys(?MODULE:fields(sources)).
|
||||
|
||||
common_resource_opts_subfields() ->
|
||||
-spec source_types_sc() -> ?ENUM([source_type()]).
|
||||
source_types_sc() ->
|
||||
hoconsc:enum(source_types()).
|
||||
|
||||
action_resource_opts_fields() ->
|
||||
action_resource_opts_fields(_Overrides = []).
|
||||
|
||||
source_resource_opts_fields() ->
|
||||
source_resource_opts_fields(_Overrides = []).
|
||||
|
||||
common_action_resource_opts_subfields() ->
|
||||
[
|
||||
batch_size,
|
||||
batch_time,
|
||||
|
@ -244,32 +385,36 @@ common_resource_opts_subfields() ->
|
|||
worker_pool_size
|
||||
].
|
||||
|
||||
common_resource_opts_subfields_bin() ->
|
||||
lists:map(fun atom_to_binary/1, common_resource_opts_subfields()).
|
||||
common_source_resource_opts_subfields() ->
|
||||
[
|
||||
health_check_interval,
|
||||
resume_interval
|
||||
].
|
||||
|
||||
resource_opts_fields(Overrides) ->
|
||||
ActionROFields = common_resource_opts_subfields(),
|
||||
common_action_resource_opts_subfields_bin() ->
|
||||
lists:map(fun atom_to_binary/1, common_action_resource_opts_subfields()).
|
||||
|
||||
common_source_resource_opts_subfields_bin() ->
|
||||
lists:map(fun atom_to_binary/1, common_source_resource_opts_subfields()).
|
||||
|
||||
action_resource_opts_fields(Overrides) ->
|
||||
ActionROFields = common_action_resource_opts_subfields(),
|
||||
lists:filter(
|
||||
fun({Key, _Sc}) -> lists:member(Key, ActionROFields) end,
|
||||
emqx_resource_schema:create_opts(Overrides)
|
||||
).
|
||||
|
||||
examples(Method) ->
|
||||
MergeFun =
|
||||
fun(Example, Examples) ->
|
||||
maps:merge(Examples, Example)
|
||||
end,
|
||||
Fun =
|
||||
fun(Module, Examples) ->
|
||||
ConnectorExamples = erlang:apply(Module, bridge_v2_examples, [Method]),
|
||||
lists:foldl(MergeFun, Examples, ConnectorExamples)
|
||||
end,
|
||||
SchemaModules = [Mod || {_, Mod} <- emqx_action_info:registered_schema_modules()],
|
||||
lists:foldl(Fun, #{}, SchemaModules).
|
||||
source_resource_opts_fields(Overrides) ->
|
||||
ActionROFields = common_source_resource_opts_subfields(),
|
||||
lists:filter(
|
||||
fun({Key, _Sc}) -> lists:member(Key, ActionROFields) end,
|
||||
emqx_resource_schema:create_opts(Overrides)
|
||||
).
|
||||
|
||||
top_level_common_action_keys() ->
|
||||
[
|
||||
<<"connector">>,
|
||||
<<"tags">>,
|
||||
<<"description">>,
|
||||
<<"enable">>,
|
||||
<<"local_topic">>,
|
||||
|
@ -285,24 +430,12 @@ make_producer_action_schema(ActionParametersRef) ->
|
|||
make_producer_action_schema(ActionParametersRef, _Opts = #{}).
|
||||
|
||||
make_producer_action_schema(ActionParametersRef, Opts) ->
|
||||
ResourceOptsRef = maps:get(resource_opts_ref, Opts, ref(?MODULE, action_resource_opts)),
|
||||
[
|
||||
{local_topic, mk(binary(), #{required => false, desc => ?DESC(mqtt_topic)})}
|
||||
| make_consumer_action_schema(ActionParametersRef, Opts)
|
||||
].
|
||||
|
||||
make_consumer_action_schema(ActionParametersRef) ->
|
||||
make_consumer_action_schema(ActionParametersRef, _Opts = #{}).
|
||||
|
||||
make_consumer_action_schema(ActionParametersRef, Opts) ->
|
||||
ResourceOptsRef = maps:get(resource_opts_ref, Opts, ref(?MODULE, resource_opts)),
|
||||
| common_schema(ActionParametersRef, Opts)
|
||||
] ++
|
||||
[
|
||||
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
|
||||
{connector,
|
||||
mk(binary(), #{
|
||||
desc => ?DESC(emqx_connector_schema, "connector_field"), required => true
|
||||
})},
|
||||
{description, emqx_schema:description_schema()},
|
||||
{parameters, ActionParametersRef},
|
||||
{resource_opts,
|
||||
mk(ResourceOptsRef, #{
|
||||
default => #{},
|
||||
|
@ -310,8 +443,38 @@ make_consumer_action_schema(ActionParametersRef, Opts) ->
|
|||
})}
|
||||
].
|
||||
|
||||
make_consumer_action_schema(ParametersRef) ->
|
||||
make_consumer_action_schema(ParametersRef, _Opts = #{}).
|
||||
|
||||
make_consumer_action_schema(ParametersRef, Opts) ->
|
||||
ResourceOptsRef = maps:get(resource_opts_ref, Opts, ref(?MODULE, source_resource_opts)),
|
||||
common_schema(ParametersRef, Opts) ++
|
||||
[
|
||||
{resource_opts,
|
||||
mk(ResourceOptsRef, #{
|
||||
default => #{},
|
||||
desc => ?DESC(emqx_resource_schema, "resource_opts")
|
||||
})}
|
||||
].
|
||||
|
||||
common_schema(ParametersRef, _Opts) ->
|
||||
[
|
||||
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
|
||||
{connector,
|
||||
mk(binary(), #{
|
||||
desc => ?DESC(emqx_connector_schema, "connector_field"), required => true
|
||||
})},
|
||||
{tags, emqx_schema:tags_schema()},
|
||||
{description, emqx_schema:description_schema()},
|
||||
{parameters, ParametersRef}
|
||||
].
|
||||
|
||||
project_to_actions_resource_opts(OldResourceOpts) ->
|
||||
Subfields = common_resource_opts_subfields_bin(),
|
||||
Subfields = common_action_resource_opts_subfields_bin(),
|
||||
maps:with(Subfields, OldResourceOpts).
|
||||
|
||||
project_to_sources_resource_opts(OldResourceOpts) ->
|
||||
Subfields = common_source_resource_opts_subfields_bin(),
|
||||
maps:with(Subfields, OldResourceOpts).
|
||||
|
||||
actions_convert_from_connectors(RawConf = #{<<"actions">> := Actions}) ->
|
||||
|
@ -367,12 +530,17 @@ is_bad_schema(#{type := ?MAP(_, ?R_REF(Module, TypeName))}) ->
|
|||
case MissingFields of
|
||||
[] ->
|
||||
false;
|
||||
_ ->
|
||||
%% elasticsearch is new and doesn't have local_topic
|
||||
case MissingFields of
|
||||
[local_topic] when Module =:= emqx_bridge_es -> false;
|
||||
_ ->
|
||||
{true, #{
|
||||
schema_module => Module,
|
||||
type_name => TypeName,
|
||||
missing_fields => MissingFields
|
||||
}}
|
||||
end
|
||||
end.
|
||||
|
||||
-endif.
|
||||
|
|
|
@ -32,7 +32,9 @@ init_per_suite(Config) ->
|
|||
emqx_conf,
|
||||
emqx_connector,
|
||||
emqx_bridge_http,
|
||||
emqx_bridge
|
||||
emqx_bridge_mqtt,
|
||||
emqx_bridge,
|
||||
emqx_rule_engine
|
||||
],
|
||||
#{work_dir => ?config(priv_dir, Config)}
|
||||
),
|
||||
|
@ -154,14 +156,18 @@ setup_fake_telemetry_data() ->
|
|||
ok.
|
||||
|
||||
t_update_ssl_conf(Config) ->
|
||||
Path = proplists:get_value(config_path, Config),
|
||||
CertDir = filename:join([emqx:mutable_certs_dir() | Path]),
|
||||
[_Root, Type, Name] = proplists:get_value(config_path, Config),
|
||||
CertDir = filename:join([emqx:mutable_certs_dir(), connectors, Type, Name]),
|
||||
EnableSSLConf = #{
|
||||
<<"bridge_mode">> => false,
|
||||
<<"clean_start">> => true,
|
||||
<<"keepalive">> => <<"60s">>,
|
||||
<<"proto_ver">> => <<"v4">>,
|
||||
<<"server">> => <<"127.0.0.1:1883">>,
|
||||
<<"egress">> => #{
|
||||
<<"local">> => #{<<"topic">> => <<"t">>},
|
||||
<<"remote">> => #{<<"topic">> => <<"remote/t">>}
|
||||
},
|
||||
<<"ssl">> =>
|
||||
#{
|
||||
<<"cacertfile">> => cert_file("cafile"),
|
||||
|
@ -171,10 +177,15 @@ t_update_ssl_conf(Config) ->
|
|||
<<"verify">> => <<"verify_peer">>
|
||||
}
|
||||
},
|
||||
{ok, _} = emqx:update_config(Path, EnableSSLConf),
|
||||
CreateCfg = [
|
||||
{bridge_name, Name},
|
||||
{bridge_type, Type},
|
||||
{bridge_config, #{}}
|
||||
],
|
||||
{ok, _} = emqx_bridge_testlib:create_bridge_api(CreateCfg, EnableSSLConf),
|
||||
?assertMatch({ok, [_, _, _]}, file:list_dir(CertDir)),
|
||||
NoSSLConf = EnableSSLConf#{<<"ssl">> := #{<<"enable">> => false}},
|
||||
{ok, _} = emqx:update_config(Path, NoSSLConf),
|
||||
{ok, _} = emqx_bridge_testlib:update_bridge_api(CreateCfg, NoSSLConf),
|
||||
{ok, _} = emqx_tls_certfile_gc:force(),
|
||||
?assertMatch({error, enoent}, file:list_dir(CertDir)),
|
||||
ok.
|
||||
|
|
|
@ -160,8 +160,9 @@ end_per_group(_, Config) ->
|
|||
|
||||
init_per_testcase(t_broken_bpapi_vsn, Config) ->
|
||||
meck:new(emqx_bpapi, [passthrough]),
|
||||
meck:expect(emqx_bpapi, supported_version, 1, -1),
|
||||
meck:expect(emqx_bpapi, supported_version, 2, -1),
|
||||
meck:new(emqx_bridge_api, [passthrough]),
|
||||
meck:expect(emqx_bridge_api, supported_versions, 1, []),
|
||||
init_per_testcase(common, Config);
|
||||
init_per_testcase(t_old_bpapi_vsn, Config) ->
|
||||
meck:new(emqx_bpapi, [passthrough]),
|
||||
|
@ -173,10 +174,10 @@ init_per_testcase(_, Config) ->
|
|||
[{port, Port}, {sock, Sock}, {acceptor, Acceptor} | Config].
|
||||
|
||||
end_per_testcase(t_broken_bpapi_vsn, Config) ->
|
||||
meck:unload([emqx_bpapi]),
|
||||
meck:unload(),
|
||||
end_per_testcase(common, Config);
|
||||
end_per_testcase(t_old_bpapi_vsn, Config) ->
|
||||
meck:unload([emqx_bpapi]),
|
||||
meck:unload(),
|
||||
end_per_testcase(common, Config);
|
||||
end_per_testcase(_, Config) ->
|
||||
Sock = ?config(sock, Config),
|
||||
|
@ -188,18 +189,7 @@ end_per_testcase(_, Config) ->
|
|||
ok.
|
||||
|
||||
clear_resources() ->
|
||||
lists:foreach(
|
||||
fun(#{type := Type, name := Name}) ->
|
||||
ok = emqx_bridge_v2:remove(Type, Name)
|
||||
end,
|
||||
emqx_bridge_v2:list()
|
||||
),
|
||||
lists:foreach(
|
||||
fun(#{type := Type, name := Name}) ->
|
||||
ok = emqx_connector:remove(Type, Name)
|
||||
end,
|
||||
emqx_connector:list()
|
||||
),
|
||||
emqx_bridge_v2_testlib:delete_all_bridges_and_connectors(),
|
||||
lists:foreach(
|
||||
fun(#{type := Type, name := Name}) ->
|
||||
ok = emqx_bridge:remove(Type, Name)
|
||||
|
@ -1026,9 +1016,11 @@ t_with_redact_update(Config) ->
|
|||
BridgeConf = emqx_utils:redact(Template),
|
||||
BridgeID = emqx_bridge_resource:bridge_id(Type, Name),
|
||||
{ok, 200, _} = request(put, uri(["bridges", BridgeID]), BridgeConf, Config),
|
||||
%% bridge is migrated after creation
|
||||
ConfigRootKey = connectors,
|
||||
?assertEqual(
|
||||
Password,
|
||||
get_raw_config([bridges, Type, Name, password], Config)
|
||||
get_raw_config([ConfigRootKey, Type, Name, password], Config)
|
||||
),
|
||||
|
||||
%% probe with new password; should not be considered redacted
|
||||
|
|
|
@ -196,20 +196,10 @@ delete_bridge_http_api_v1(Opts) ->
|
|||
op_bridge_api(Op, BridgeType, BridgeName) ->
|
||||
BridgeId = emqx_bridge_resource:bridge_id(BridgeType, BridgeName),
|
||||
Path = emqx_mgmt_api_test_util:api_path(["bridges", BridgeId, Op]),
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
Opts = #{return_all => true},
|
||||
ct:pal("calling bridge ~p (via http): ~p", [BridgeId, Op]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, "", Opts) of
|
||||
{ok, {Status = {_, 204, _}, Headers, Body}} ->
|
||||
{ok, {Status, Headers, Body}};
|
||||
{ok, {Status, Headers, Body}} ->
|
||||
{ok, {Status, Headers, emqx_utils_json:decode(Body, [return_maps])}};
|
||||
{error, {Status, Headers, Body}} ->
|
||||
{error, {Status, Headers, emqx_utils_json:decode(Body, [return_maps])}};
|
||||
Error ->
|
||||
Error
|
||||
end,
|
||||
Method = post,
|
||||
Params = [],
|
||||
Res = emqx_bridge_v2_testlib:request(Method, Path, Params),
|
||||
ct:pal("bridge op result: ~p", [Res]),
|
||||
Res.
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ setup_mocks() ->
|
|||
catch meck:new(emqx_bridge_v2_schema, MeckOpts),
|
||||
meck:expect(
|
||||
emqx_bridge_v2_schema,
|
||||
registered_api_schemas,
|
||||
registered_actions_api_schemas,
|
||||
1,
|
||||
fun(Method) ->
|
||||
[{bridge_type_bin(), hoconsc:ref(?MODULE, "api_v2_" ++ Method)}]
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -12,6 +12,9 @@
|
|||
|
||||
-import(emqx_common_test_helpers, [on_exit/1]).
|
||||
|
||||
-define(ROOT_KEY_ACTIONS, actions).
|
||||
-define(ROOT_KEY_SOURCES, sources).
|
||||
|
||||
%% ct setup helpers
|
||||
|
||||
init_per_suite(Config, Apps) ->
|
||||
|
@ -96,9 +99,15 @@ delete_all_bridges_and_connectors() ->
|
|||
delete_all_bridges() ->
|
||||
lists:foreach(
|
||||
fun(#{name := Name, type := Type}) ->
|
||||
emqx_bridge_v2:remove(Type, Name)
|
||||
emqx_bridge_v2:remove(actions, Type, Name)
|
||||
end,
|
||||
emqx_bridge_v2:list()
|
||||
emqx_bridge_v2:list(actions)
|
||||
),
|
||||
lists:foreach(
|
||||
fun(#{name := Name, type := Type}) ->
|
||||
emqx_bridge_v2:remove(sources, Type, Name)
|
||||
end,
|
||||
emqx_bridge_v2:list(sources)
|
||||
).
|
||||
|
||||
delete_all_connectors() ->
|
||||
|
@ -146,6 +155,49 @@ create_bridge(Config, Overrides) ->
|
|||
ct:pal("creating bridge with config: ~p", [BridgeConfig]),
|
||||
emqx_bridge_v2:create(BridgeType, BridgeName, BridgeConfig).
|
||||
|
||||
get_ct_config_with_fallback(Config, [Key]) ->
|
||||
?config(Key, Config);
|
||||
get_ct_config_with_fallback(Config, [Key | Rest]) ->
|
||||
case ?config(Key, Config) of
|
||||
undefined ->
|
||||
get_ct_config_with_fallback(Config, Rest);
|
||||
X ->
|
||||
X
|
||||
end.
|
||||
|
||||
get_config_by_kind(Config, Overrides) ->
|
||||
Kind = ?config(bridge_kind, Config),
|
||||
get_config_by_kind(Kind, Config, Overrides).
|
||||
|
||||
get_config_by_kind(Kind, Config, Overrides) ->
|
||||
case Kind of
|
||||
action ->
|
||||
%% TODO: refactor tests to use action_type...
|
||||
ActionType = get_ct_config_with_fallback(Config, [action_type, bridge_type]),
|
||||
ActionName = get_ct_config_with_fallback(Config, [action_name, bridge_name]),
|
||||
ActionConfig0 = get_ct_config_with_fallback(Config, [action_config, bridge_config]),
|
||||
ActionConfig = emqx_utils_maps:deep_merge(ActionConfig0, Overrides),
|
||||
#{type => ActionType, name => ActionName, config => ActionConfig};
|
||||
source ->
|
||||
SourceType = ?config(source_type, Config),
|
||||
SourceName = ?config(source_name, Config),
|
||||
SourceConfig0 = ?config(source_config, Config),
|
||||
SourceConfig = emqx_utils_maps:deep_merge(SourceConfig0, Overrides),
|
||||
#{type => SourceType, name => SourceName, config => SourceConfig}
|
||||
end.
|
||||
|
||||
api_path_root(Kind) ->
|
||||
case Kind of
|
||||
action -> "actions";
|
||||
source -> "sources"
|
||||
end.
|
||||
|
||||
conf_root_key(Kind) ->
|
||||
case Kind of
|
||||
action -> ?ROOT_KEY_ACTIONS;
|
||||
source -> ?ROOT_KEY_SOURCES
|
||||
end.
|
||||
|
||||
maybe_json_decode(X) ->
|
||||
case emqx_utils_json:safe_decode(X, [return_maps]) of
|
||||
{ok, Decoded} -> Decoded;
|
||||
|
@ -212,26 +264,26 @@ create_bridge_api(Config) ->
|
|||
create_bridge_api(Config, _Overrides = #{}).
|
||||
|
||||
create_bridge_api(Config, Overrides) ->
|
||||
BridgeType = ?config(bridge_type, Config),
|
||||
BridgeName = ?config(bridge_name, Config),
|
||||
BridgeConfig0 = ?config(bridge_config, Config),
|
||||
BridgeConfig = emqx_utils_maps:deep_merge(BridgeConfig0, Overrides),
|
||||
|
||||
{ok, {{_, 201, _}, _, _}} = create_connector_api(Config),
|
||||
create_kind_api(Config, Overrides).
|
||||
|
||||
Params = BridgeConfig#{<<"type">> => BridgeType, <<"name">> => BridgeName},
|
||||
Path = emqx_mgmt_api_test_util:api_path(["actions"]),
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
Opts = #{return_all => true},
|
||||
ct:pal("creating bridge (via http): ~p", [Params]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {Status, Headers, Body0}} ->
|
||||
{ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}};
|
||||
Error ->
|
||||
Error
|
||||
end,
|
||||
ct:pal("bridge create result: ~p", [Res]),
|
||||
create_kind_api(Config) ->
|
||||
create_kind_api(Config, _Overrides = #{}).
|
||||
|
||||
create_kind_api(Config, Overrides) ->
|
||||
Kind = proplists:get_value(bridge_kind, Config, action),
|
||||
#{
|
||||
type := Type,
|
||||
name := Name,
|
||||
config := BridgeConfig
|
||||
} = get_config_by_kind(Kind, Config, Overrides),
|
||||
Params = BridgeConfig#{<<"type">> => Type, <<"name">> => Name},
|
||||
PathRoot = api_path_root(Kind),
|
||||
Path = emqx_mgmt_api_test_util:api_path([PathRoot]),
|
||||
ct:pal("creating bridge (~s, http):\n ~p", [Kind, Params]),
|
||||
Method = post,
|
||||
Res = request(Method, Path, Params),
|
||||
ct:pal("bridge create (~s, http) result:\n ~p", [Kind, Res]),
|
||||
Res.
|
||||
|
||||
create_connector_api(Config) ->
|
||||
|
@ -282,41 +334,33 @@ update_bridge_api(Config) ->
|
|||
update_bridge_api(Config, _Overrides = #{}).
|
||||
|
||||
update_bridge_api(Config, Overrides) ->
|
||||
BridgeType = ?config(bridge_type, Config),
|
||||
Name = ?config(bridge_name, Config),
|
||||
BridgeConfig0 = ?config(bridge_config, Config),
|
||||
BridgeConfig = emqx_utils_maps:deep_merge(BridgeConfig0, Overrides),
|
||||
BridgeId = emqx_bridge_resource:bridge_id(BridgeType, Name),
|
||||
Path = emqx_mgmt_api_test_util:api_path(["actions", BridgeId]),
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
Opts = #{return_all => true},
|
||||
ct:pal("updating bridge (via http): ~p", [BridgeConfig]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(put, Path, "", AuthHeader, BridgeConfig, Opts) of
|
||||
{ok, {_Status, _Headers, Body0}} -> {ok, emqx_utils_json:decode(Body0, [return_maps])};
|
||||
Error -> Error
|
||||
end,
|
||||
ct:pal("bridge update result: ~p", [Res]),
|
||||
Kind = proplists:get_value(bridge_kind, Config, action),
|
||||
#{
|
||||
type := Type,
|
||||
name := Name,
|
||||
config := Params
|
||||
} = get_config_by_kind(Kind, Config, Overrides),
|
||||
BridgeId = emqx_bridge_resource:bridge_id(Type, Name),
|
||||
PathRoot = api_path_root(Kind),
|
||||
Path = emqx_mgmt_api_test_util:api_path([PathRoot, BridgeId]),
|
||||
ct:pal("updating bridge (~s, http):\n ~p", [Kind, Params]),
|
||||
Method = put,
|
||||
Res = request(Method, Path, Params),
|
||||
ct:pal("update bridge (~s, http) result:\n ~p", [Kind, Res]),
|
||||
Res.
|
||||
|
||||
op_bridge_api(Op, BridgeType, BridgeName) ->
|
||||
op_bridge_api(_Kind = action, Op, BridgeType, BridgeName).
|
||||
|
||||
op_bridge_api(Kind, Op, BridgeType, BridgeName) ->
|
||||
BridgeId = emqx_bridge_resource:bridge_id(BridgeType, BridgeName),
|
||||
Path = emqx_mgmt_api_test_util:api_path(["actions", BridgeId, Op]),
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
Opts = #{return_all => true},
|
||||
ct:pal("calling bridge ~p (via http): ~p", [BridgeId, Op]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, "", Opts) of
|
||||
{ok, {Status = {_, 204, _}, Headers, Body}} ->
|
||||
{ok, {Status, Headers, Body}};
|
||||
{ok, {Status, Headers, Body}} ->
|
||||
{ok, {Status, Headers, emqx_utils_json:decode(Body, [return_maps])}};
|
||||
{error, {Status, Headers, Body}} ->
|
||||
{error, {Status, Headers, emqx_utils_json:decode(Body, [return_maps])}};
|
||||
Error ->
|
||||
Error
|
||||
end,
|
||||
ct:pal("bridge op result: ~p", [Res]),
|
||||
PathRoot = api_path_root(Kind),
|
||||
Path = emqx_mgmt_api_test_util:api_path([PathRoot, BridgeId, Op]),
|
||||
ct:pal("calling bridge ~p (~s, http):\n ~p", [BridgeId, Kind, Op]),
|
||||
Method = post,
|
||||
Params = [],
|
||||
Res = request(Method, Path, Params),
|
||||
ct:pal("bridge op result:\n ~p", [Res]),
|
||||
Res.
|
||||
|
||||
probe_bridge_api(Config) ->
|
||||
|
@ -330,17 +374,16 @@ probe_bridge_api(Config, Overrides) ->
|
|||
probe_bridge_api(BridgeType, BridgeName, BridgeConfig).
|
||||
|
||||
probe_bridge_api(BridgeType, BridgeName, BridgeConfig) ->
|
||||
probe_bridge_api(action, BridgeType, BridgeName, BridgeConfig).
|
||||
|
||||
probe_bridge_api(Kind, BridgeType, BridgeName, BridgeConfig) ->
|
||||
Params = BridgeConfig#{<<"type">> => BridgeType, <<"name">> => BridgeName},
|
||||
Path = emqx_mgmt_api_test_util:api_path(["actions_probe"]),
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
Opts = #{return_all => true},
|
||||
ct:pal("probing bridge (via http): ~p", [Params]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {{_, 204, _}, _Headers, _Body0} = Res0} -> {ok, Res0};
|
||||
Error -> Error
|
||||
end,
|
||||
ct:pal("bridge probe result: ~p", [Res]),
|
||||
PathRoot = api_path_root(Kind),
|
||||
Path = emqx_mgmt_api_test_util:api_path([PathRoot ++ "_probe"]),
|
||||
ct:pal("probing bridge (~s, http):\n ~p", [Kind, Params]),
|
||||
Method = post,
|
||||
Res = request(Method, Path, Params),
|
||||
ct:pal("bridge probe (~s, http) result:\n ~p", [Kind, Res]),
|
||||
Res.
|
||||
|
||||
list_bridges_http_api_v1() ->
|
||||
|
@ -357,6 +400,13 @@ list_actions_http_api() ->
|
|||
ct:pal("list actions (http v2) result:\n ~p", [Res]),
|
||||
Res.
|
||||
|
||||
list_sources_http_api() ->
|
||||
Path = emqx_mgmt_api_test_util:api_path(["sources"]),
|
||||
ct:pal("list sources (http v2)"),
|
||||
Res = request(get, Path, _Params = []),
|
||||
ct:pal("list sources (http v2) result:\n ~p", [Res]),
|
||||
Res.
|
||||
|
||||
list_connectors_http_api() ->
|
||||
Path = emqx_mgmt_api_test_util:api_path(["connectors"]),
|
||||
ct:pal("list connectors"),
|
||||
|
@ -392,6 +442,23 @@ try_decode_error(Body0) ->
|
|||
Body0
|
||||
end.
|
||||
|
||||
create_rule_api(Opts) ->
|
||||
#{
|
||||
sql := SQL,
|
||||
actions := RuleActions
|
||||
} = Opts,
|
||||
Params = #{
|
||||
enable => true,
|
||||
sql => SQL,
|
||||
actions => RuleActions
|
||||
},
|
||||
Path = emqx_mgmt_api_test_util:api_path(["rules"]),
|
||||
ct:pal("create rule:\n ~p", [Params]),
|
||||
Method = post,
|
||||
Res = request(Method, Path, Params),
|
||||
ct:pal("create rule results:\n ~p", [Res]),
|
||||
Res.
|
||||
|
||||
create_rule_and_action_http(BridgeType, RuleTopic, Config) ->
|
||||
create_rule_and_action_http(BridgeType, RuleTopic, Config, _Opts = #{}).
|
||||
|
||||
|
@ -510,13 +577,6 @@ t_create_via_http(Config) ->
|
|||
begin
|
||||
?assertMatch({ok, _}, create_bridge_api(Config)),
|
||||
|
||||
%% lightweight matrix testing some configs
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
update_bridge_api(
|
||||
Config
|
||||
)
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
update_bridge_api(
|
||||
|
@ -530,23 +590,26 @@ t_create_via_http(Config) ->
|
|||
ok.
|
||||
|
||||
t_start_stop(Config, StopTracePoint) ->
|
||||
BridgeType = ?config(bridge_type, Config),
|
||||
BridgeName = ?config(bridge_name, Config),
|
||||
BridgeConfig = ?config(bridge_config, Config),
|
||||
Kind = proplists:get_value(bridge_kind, Config, action),
|
||||
ConnectorName = ?config(connector_name, Config),
|
||||
ConnectorType = ?config(connector_type, Config),
|
||||
ConnectorConfig = ?config(connector_config, Config),
|
||||
#{
|
||||
type := Type,
|
||||
name := Name,
|
||||
config := BridgeConfig
|
||||
} = get_config_by_kind(Kind, Config, _Overrides = #{}),
|
||||
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_connector:create(ConnectorType, ConnectorName, ConnectorConfig)
|
||||
{ok, {{_, 201, _}, _, _}},
|
||||
create_connector_api(Config)
|
||||
),
|
||||
|
||||
?check_trace(
|
||||
begin
|
||||
ProbeRes0 = probe_bridge_api(
|
||||
BridgeType,
|
||||
BridgeName,
|
||||
Kind,
|
||||
Type,
|
||||
Name,
|
||||
BridgeConfig
|
||||
),
|
||||
?assertMatch({ok, {{_, 204, _}, _Headers, _Body}}, ProbeRes0),
|
||||
|
@ -554,8 +617,9 @@ t_start_stop(Config, StopTracePoint) ->
|
|||
AtomsBefore = erlang:system_info(atom_count),
|
||||
%% Probe again; shouldn't have created more atoms.
|
||||
ProbeRes1 = probe_bridge_api(
|
||||
BridgeType,
|
||||
BridgeName,
|
||||
Kind,
|
||||
Type,
|
||||
Name,
|
||||
BridgeConfig
|
||||
),
|
||||
|
||||
|
@ -563,9 +627,9 @@ t_start_stop(Config, StopTracePoint) ->
|
|||
AtomsAfter = erlang:system_info(atom_count),
|
||||
?assertEqual(AtomsBefore, AtomsAfter),
|
||||
|
||||
?assertMatch({ok, _}, emqx_bridge_v2:create(BridgeType, BridgeName, BridgeConfig)),
|
||||
?assertMatch({ok, _}, create_kind_api(Config)),
|
||||
|
||||
ResourceId = emqx_bridge_resource:resource_id(BridgeType, BridgeName),
|
||||
ResourceId = emqx_bridge_resource:resource_id(conf_root_key(Kind), Type, Name),
|
||||
|
||||
%% Since the connection process is async, we give it some time to
|
||||
%% stabilize and avoid flakiness.
|
||||
|
@ -578,7 +642,7 @@ t_start_stop(Config, StopTracePoint) ->
|
|||
%% `start` bridge to trigger `already_started`
|
||||
?assertMatch(
|
||||
{ok, {{_, 204, _}, _Headers, []}},
|
||||
emqx_bridge_v2_testlib:op_bridge_api("start", BridgeType, BridgeName)
|
||||
op_bridge_api(Kind, "start", Type, Name)
|
||||
),
|
||||
|
||||
?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceId)),
|
||||
|
@ -628,10 +692,10 @@ t_start_stop(Config, StopTracePoint) ->
|
|||
)
|
||||
),
|
||||
|
||||
ok
|
||||
#{resource_id => ResourceId}
|
||||
end,
|
||||
fun(Trace) ->
|
||||
ResourceId = emqx_bridge_resource:resource_id(BridgeType, BridgeName),
|
||||
fun(Res, Trace) ->
|
||||
#{resource_id := ResourceId} = Res,
|
||||
%% one for each probe, one for real
|
||||
?assertMatch(
|
||||
[_, _, #{instance_id := ResourceId}],
|
||||
|
|
|
@ -48,7 +48,9 @@ resource_opts_union_connector_actions_test() ->
|
|||
%% consciouly between connector and actions, in particular when/if we introduce new
|
||||
%% fields there.
|
||||
AllROFields = non_deprecated_fields(emqx_resource_schema:create_opts([])),
|
||||
ActionROFields = non_deprecated_fields(emqx_bridge_v2_schema:resource_opts_fields()),
|
||||
ActionROFields = non_deprecated_fields(
|
||||
emqx_bridge_v2_schema:action_resource_opts_fields()
|
||||
),
|
||||
ConnectorROFields = non_deprecated_fields(emqx_connector_schema:resource_opts_fields()),
|
||||
UnionROFields = lists:usort(ConnectorROFields ++ ActionROFields),
|
||||
?assertEqual(
|
||||
|
@ -108,7 +110,7 @@ connector_resource_opts_test() ->
|
|||
ok.
|
||||
|
||||
actions_api_spec_post_fields_test() ->
|
||||
?UNION(Union) = emqx_bridge_v2_schema:post_request(),
|
||||
?UNION(Union) = emqx_bridge_v2_schema:actions_post_request(),
|
||||
Schemas =
|
||||
lists:map(
|
||||
fun(?R_REF(SchemaMod, StructName)) ->
|
||||
|
|
|
@ -134,6 +134,7 @@ fields(actions) ->
|
|||
mk(binary(), #{
|
||||
desc => ?DESC(emqx_connector_schema, "connector_field"), required => true
|
||||
})},
|
||||
{tags, emqx_schema:tags_schema()},
|
||||
{description, emqx_schema:description_schema()}
|
||||
],
|
||||
override_documentations(Fields);
|
||||
|
|
|
@ -121,6 +121,7 @@ fields(actions) ->
|
|||
mk(binary(), #{
|
||||
desc => ?DESC(emqx_connector_schema, "connector_field"), required => true
|
||||
})},
|
||||
{tags, emqx_schema:tags_schema()},
|
||||
{description, emqx_schema:description_schema()}
|
||||
],
|
||||
override_documentations(Fields);
|
||||
|
|
|
@ -72,7 +72,7 @@ parse(Hocon) ->
|
|||
Conf.
|
||||
|
||||
check(SchemaMod, Conf) when is_map(Conf) ->
|
||||
hocon_tconf:check_plain(SchemaMod, Conf).
|
||||
hocon_tconf:check_plain(SchemaMod, Conf, #{required => false}).
|
||||
|
||||
check_action(Conf) when is_map(Conf) ->
|
||||
check(emqx_bridge_v2_schema, Conf).
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
Business Source License 1.1
|
||||
|
||||
Licensor: Hangzhou EMQ Technologies Co., Ltd.
|
||||
Licensed Work: EMQX Enterprise Edition
|
||||
The Licensed Work is (c) 2023
|
||||
Hangzhou EMQ Technologies Co., Ltd.
|
||||
Additional Use Grant: Students and educators are granted right to copy,
|
||||
modify, and create derivative work for research
|
||||
or education.
|
||||
Change Date: 2027-02-01
|
||||
Change License: Apache License, Version 2.0
|
||||
|
||||
For information about alternative licensing arrangements for the Software,
|
||||
please contact Licensor: https://www.emqx.com/en/contact
|
||||
|
||||
Notice
|
||||
|
||||
The Business Source License (this document, or the “License”) is not an Open
|
||||
Source license. However, the Licensed Work will eventually be made available
|
||||
under an Open Source License, as stated in this License.
|
||||
|
||||
License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved.
|
||||
“Business Source License” is a trademark of MariaDB Corporation Ab.
|
||||
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
Business Source License 1.1
|
||||
|
||||
Terms
|
||||
|
||||
The Licensor hereby grants you the right to copy, modify, create derivative
|
||||
works, redistribute, and make non-production use of the Licensed Work. The
|
||||
Licensor may make an Additional Use Grant, above, permitting limited
|
||||
production use.
|
||||
|
||||
Effective on the Change Date, or the fourth anniversary of the first publicly
|
||||
available distribution of a specific version of the Licensed Work under this
|
||||
License, whichever comes first, the Licensor hereby grants you rights under
|
||||
the terms of the Change License, and the rights granted in the paragraph
|
||||
above terminate.
|
||||
|
||||
If your use of the Licensed Work does not comply with the requirements
|
||||
currently in effect as described in this License, you must purchase a
|
||||
commercial license from the Licensor, its affiliated entities, or authorized
|
||||
resellers, or you must refrain from using the Licensed Work.
|
||||
|
||||
All copies of the original and modified Licensed Work, and derivative works
|
||||
of the Licensed Work, are subject to this License. This License applies
|
||||
separately for each version of the Licensed Work and the Change Date may vary
|
||||
for each version of the Licensed Work released by Licensor.
|
||||
|
||||
You must conspicuously display this License on each original or modified copy
|
||||
of the Licensed Work. If you receive the Licensed Work in original or
|
||||
modified form from a third party, the terms and conditions set forth in this
|
||||
License apply to your use of that work.
|
||||
|
||||
Any use of the Licensed Work in violation of this License will automatically
|
||||
terminate your rights under this License for the current and all other
|
||||
versions of the Licensed Work.
|
||||
|
||||
This License does not grant you any right in any trademark or logo of
|
||||
Licensor or its affiliates (provided that you may use a trademark or logo of
|
||||
Licensor as expressly required by this License).
|
||||
|
||||
TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON
|
||||
AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,
|
||||
EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND
|
||||
TITLE.
|
||||
|
||||
MariaDB hereby grants you permission to use this License’s text to license
|
||||
your works, and to refer to it using the trademark “Business Source License”,
|
||||
as long as you comply with the Covenants of Licensor below.
|
||||
|
||||
Covenants of Licensor
|
||||
|
||||
In consideration of the right to use this License’s text and the “Business
|
||||
Source License” name and trademark, Licensor covenants to MariaDB, and to all
|
||||
other recipients of the licensed work to be provided by Licensor:
|
||||
|
||||
1. To specify as the Change License the GPL Version 2.0 or any later version,
|
||||
or a license that is compatible with GPL Version 2.0 or a later version,
|
||||
where “compatible” means that software provided under the Change License can
|
||||
be included in a program with software provided under GPL Version 2.0 or a
|
||||
later version. Licensor may specify additional Change Licenses without
|
||||
limitation.
|
||||
|
||||
2. To either: (a) specify an additional grant of rights to use that does not
|
||||
impose any additional restriction on the right granted in this License, as
|
||||
the Additional Use Grant; or (b) insert the text “None”.
|
||||
|
||||
3. To specify a Change Date.
|
||||
|
||||
4. Not to modify this License in any other way.
|
|
@ -0,0 +1,23 @@
|
|||
# Apache ElasticSearch Data Integration Bridge
|
||||
|
||||
This application houses the ElasticSearch data integration bridge for EMQX Enterprise
|
||||
Edition. It provides the means to connect to ElasticSearch and publish messages to it.
|
||||
|
||||
It implements the connection management and interaction without need for a
|
||||
separate connector app, since it's not used by authentication and authorization
|
||||
applications.
|
||||
|
||||
<!---
|
||||
# Configurations
|
||||
|
||||
Please see [our official
|
||||
documentation](https://www.emqx.io/docs/en/v5.0/data-integration/data-bridge-elasticsearch.html
|
||||
for more detailed info.
|
||||
--->
|
||||
|
||||
# Contributing
|
||||
Please see our [contributing.md](../../CONTRIBUTING.md).
|
||||
|
||||
# License
|
||||
|
||||
See [BSL](./BSL.txt).
|
|
@ -0,0 +1,2 @@
|
|||
toxiproxy
|
||||
elasticsearch
|
|
@ -0,0 +1,8 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-ifndef(EMQX_BRIDGE_ES_HRL).
|
||||
-define(EMQX_BRIDGE_ES_HRL, true).
|
||||
|
||||
-endif.
|
|
@ -0,0 +1,15 @@
|
|||
%% -*- mode: erlang -*-
|
||||
|
||||
{erl_opts, [
|
||||
debug_info
|
||||
]}.
|
||||
|
||||
{deps, [
|
||||
{emqx, {path, "../../apps/emqx"}},
|
||||
{emqx_connector, {path, "../../apps/emqx_connector"}},
|
||||
{emqx_resource, {path, "../../apps/emqx_resource"}},
|
||||
{emqx_bridge, {path, "../../apps/emqx_bridge"}},
|
||||
{emqx_bridge_http, {path, "../emqx_bridge_http"}}
|
||||
]}.
|
||||
{plugins, [rebar3_path_deps]}.
|
||||
{project_plugins, [erlfmt]}.
|
|
@ -0,0 +1,23 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_bridge_es, [
|
||||
{description, "EMQX Enterprise Elastic Search Bridge"},
|
||||
{vsn, "0.1.0"},
|
||||
{modules, [
|
||||
emqx_bridge_es,
|
||||
emqx_bridge_es_connector
|
||||
]},
|
||||
{registered, []},
|
||||
{applications, [
|
||||
kernel,
|
||||
stdlib,
|
||||
emqx_resource,
|
||||
emqx_connector
|
||||
]},
|
||||
{env, []},
|
||||
{licenses, ["Business Source License 1.1"]},
|
||||
{maintainers, ["EMQX Team <contact@emqx.io>"]},
|
||||
{links, [
|
||||
{"Homepage", "https://emqx.io/"},
|
||||
{"Github", "https://github.com/emqx/emqx"}
|
||||
]}
|
||||
]}.
|
|
@ -0,0 +1,243 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_bridge_es).
|
||||
|
||||
-include("emqx_bridge_es.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||
|
||||
-export([bridge_v2_examples/1]).
|
||||
|
||||
%% hocon_schema API
|
||||
-export([namespace/0, roots/0, fields/1, desc/1]).
|
||||
|
||||
-define(CONNECTOR_TYPE, elasticsearch).
|
||||
-define(ACTION_TYPE, ?CONNECTOR_TYPE).
|
||||
|
||||
namespace() -> "bridge_elasticsearch".
|
||||
|
||||
roots() -> [].
|
||||
|
||||
fields(action) ->
|
||||
{elasticsearch,
|
||||
?HOCON(
|
||||
?MAP(action_name, ?R_REF(action_config)),
|
||||
#{
|
||||
required => false,
|
||||
desc => ?DESC(elasticsearch)
|
||||
}
|
||||
)};
|
||||
fields(action_config) ->
|
||||
emqx_resource_schema:override(
|
||||
emqx_bridge_v2_schema:make_consumer_action_schema(
|
||||
?HOCON(
|
||||
?UNION(fun action_union_member_selector/1),
|
||||
#{
|
||||
required => true, desc => ?DESC("action_parameters")
|
||||
}
|
||||
)
|
||||
),
|
||||
[
|
||||
{resource_opts,
|
||||
?HOCON(?R_REF(action_resource_opts), #{
|
||||
default => #{},
|
||||
desc => ?DESC(emqx_resource_schema, "resource_opts")
|
||||
})}
|
||||
]
|
||||
);
|
||||
fields(action_resource_opts) ->
|
||||
lists:filter(
|
||||
fun({K, _V}) ->
|
||||
not lists:member(K, unsupported_opts())
|
||||
end,
|
||||
emqx_bridge_v2_schema:action_resource_opts_fields()
|
||||
);
|
||||
fields(action_create) ->
|
||||
[
|
||||
action(create),
|
||||
index(),
|
||||
id(false),
|
||||
doc(),
|
||||
routing(),
|
||||
require_alias(),
|
||||
overwrite()
|
||||
| http_common_opts()
|
||||
];
|
||||
fields(action_delete) ->
|
||||
[action(delete), index(), id(true), routing() | http_common_opts()];
|
||||
fields(action_update) ->
|
||||
[
|
||||
action(update),
|
||||
index(),
|
||||
id(true),
|
||||
doc(),
|
||||
doc_as_upsert(),
|
||||
routing(),
|
||||
require_alias()
|
||||
| http_common_opts()
|
||||
];
|
||||
fields("post_bridge_v2") ->
|
||||
emqx_bridge_schema:type_and_name_fields(elasticsearch) ++ fields(action_config);
|
||||
fields("put_bridge_v2") ->
|
||||
fields(action_config);
|
||||
fields("get_bridge_v2") ->
|
||||
emqx_bridge_schema:status_fields() ++ fields("post_bridge_v2").
|
||||
|
||||
action_union_member_selector(all_union_members) ->
|
||||
[
|
||||
?R_REF(action_create),
|
||||
?R_REF(action_delete),
|
||||
?R_REF(action_update)
|
||||
];
|
||||
action_union_member_selector({value, Value}) ->
|
||||
case Value of
|
||||
#{<<"action">> := <<"create">>} ->
|
||||
[?R_REF(action_create)];
|
||||
#{<<"action">> := <<"delete">>} ->
|
||||
[?R_REF(action_delete)];
|
||||
#{<<"action">> := <<"update">>} ->
|
||||
[?R_REF(action_update)];
|
||||
#{<<"action">> := Action} when is_atom(Action) ->
|
||||
Value1 = Value#{<<"action">> => atom_to_binary(Action)},
|
||||
action_union_member_selector({value, Value1});
|
||||
Actual ->
|
||||
Expected = "create | delete | update",
|
||||
throw(#{
|
||||
field_name => action,
|
||||
actual => Actual,
|
||||
expected => Expected
|
||||
})
|
||||
end.
|
||||
|
||||
action(Action) ->
|
||||
{action,
|
||||
?HOCON(
|
||||
Action,
|
||||
#{
|
||||
required => true,
|
||||
desc => atom_to_binary(Action)
|
||||
}
|
||||
)}.
|
||||
|
||||
overwrite() ->
|
||||
{overwrite,
|
||||
?HOCON(
|
||||
boolean(),
|
||||
#{
|
||||
required => false,
|
||||
default => true,
|
||||
desc => ?DESC("config_overwrite")
|
||||
}
|
||||
)}.
|
||||
|
||||
index() ->
|
||||
{index,
|
||||
?HOCON(
|
||||
binary(),
|
||||
#{
|
||||
required => true,
|
||||
example => <<"${payload.index}">>,
|
||||
desc => ?DESC("config_parameters_index")
|
||||
}
|
||||
)}.
|
||||
|
||||
id(Required) ->
|
||||
{id,
|
||||
?HOCON(
|
||||
binary(),
|
||||
#{
|
||||
required => Required,
|
||||
example => <<"${payload.id}">>,
|
||||
desc => ?DESC("config_parameters_id")
|
||||
}
|
||||
)}.
|
||||
|
||||
doc() ->
|
||||
{doc,
|
||||
?HOCON(
|
||||
binary(),
|
||||
#{
|
||||
required => false,
|
||||
example => <<"${payload.doc}">>,
|
||||
desc => ?DESC("config_parameters_doc")
|
||||
}
|
||||
)}.
|
||||
|
||||
http_common_opts() ->
|
||||
lists:filter(
|
||||
fun({K, _}) ->
|
||||
not lists:member(K, [path, method, body, headers, request_timeout])
|
||||
end,
|
||||
emqx_bridge_http_schema:fields("parameters_opts")
|
||||
).
|
||||
|
||||
doc_as_upsert() ->
|
||||
{doc_as_upsert,
|
||||
?HOCON(
|
||||
boolean(),
|
||||
#{
|
||||
required => false,
|
||||
default => false,
|
||||
desc => ?DESC("config_doc_as_upsert")
|
||||
}
|
||||
)}.
|
||||
|
||||
routing() ->
|
||||
{routing,
|
||||
?HOCON(
|
||||
binary(),
|
||||
#{
|
||||
required => false,
|
||||
example => <<"${payload.routing}">>,
|
||||
desc => ?DESC("config_routing")
|
||||
}
|
||||
)}.
|
||||
|
||||
require_alias() ->
|
||||
{require_alias,
|
||||
?HOCON(
|
||||
boolean(),
|
||||
#{
|
||||
required => false,
|
||||
desc => ?DESC("config_require_alias")
|
||||
}
|
||||
)}.
|
||||
|
||||
bridge_v2_examples(Method) ->
|
||||
[
|
||||
#{
|
||||
<<"elasticsearch">> =>
|
||||
#{
|
||||
summary => <<"Elastic Search Bridge">>,
|
||||
value => emqx_bridge_v2_schema:action_values(
|
||||
Method, ?ACTION_TYPE, ?CONNECTOR_TYPE, action_values()
|
||||
)
|
||||
}
|
||||
}
|
||||
].
|
||||
|
||||
action_values() ->
|
||||
#{
|
||||
parameters => #{
|
||||
action => create,
|
||||
index => <<"${payload.index}">>,
|
||||
overwrite => true,
|
||||
doc => <<"${payload.doc}">>
|
||||
}
|
||||
}.
|
||||
|
||||
unsupported_opts() ->
|
||||
[
|
||||
batch_size,
|
||||
batch_time
|
||||
].
|
||||
|
||||
desc(elasticsearch) -> ?DESC(elasticsearch);
|
||||
desc(action_config) -> ?DESC(action_config);
|
||||
desc(action_create) -> ?DESC(action_create);
|
||||
desc(action_delete) -> ?DESC(action_delete);
|
||||
desc(action_update) -> ?DESC(action_update);
|
||||
desc(action_resource_opts) -> ?DESC(action_resource_opts);
|
||||
desc(_) -> undefined.
|
|
@ -0,0 +1,22 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_bridge_es_action_info).
|
||||
|
||||
-behaviour(emqx_action_info).
|
||||
|
||||
-elvis([{elvis_style, invalid_dynamic_call, disable}]).
|
||||
|
||||
%% behaviour callbacks
|
||||
-export([
|
||||
action_type_name/0,
|
||||
connector_type_name/0,
|
||||
schema_module/0
|
||||
]).
|
||||
|
||||
-define(ACTION_TYPE, elasticsearch).
|
||||
|
||||
action_type_name() -> ?ACTION_TYPE.
|
||||
connector_type_name() -> ?ACTION_TYPE.
|
||||
|
||||
schema_module() -> emqx_bridge_es.
|
|
@ -0,0 +1,401 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_bridge_es_connector).
|
||||
|
||||
-behaviour(emqx_resource).
|
||||
|
||||
-include("emqx_bridge_es.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
|
||||
%% `emqx_resource' API
|
||||
-export([
|
||||
callback_mode/0,
|
||||
on_start/2,
|
||||
on_stop/2,
|
||||
on_get_status/2,
|
||||
on_query/3,
|
||||
on_query_async/4,
|
||||
on_add_channel/4,
|
||||
on_remove_channel/3,
|
||||
on_get_channels/1,
|
||||
on_get_channel_status/3
|
||||
]).
|
||||
|
||||
-export([
|
||||
namespace/0,
|
||||
roots/0,
|
||||
fields/1,
|
||||
desc/1,
|
||||
connector_examples/1,
|
||||
connector_example_values/0
|
||||
]).
|
||||
|
||||
-export([render_template/2]).
|
||||
|
||||
%% emqx_connector_resource behaviour callbacks
|
||||
-export([connector_config/2]).
|
||||
|
||||
-type config() ::
|
||||
#{
|
||||
base_url := #{
|
||||
scheme := http | https,
|
||||
host := iolist(),
|
||||
port := inet:port_number(),
|
||||
path := _
|
||||
},
|
||||
connect_timeout := pos_integer(),
|
||||
pool_type := random | hash,
|
||||
pool_size := pos_integer(),
|
||||
request => undefined | map(),
|
||||
atom() => _
|
||||
}.
|
||||
|
||||
-type state() ::
|
||||
#{
|
||||
base_path := _,
|
||||
connect_timeout := pos_integer(),
|
||||
pool_type := random | hash,
|
||||
channels := map(),
|
||||
request => undefined | map(),
|
||||
atom() => _
|
||||
}.
|
||||
|
||||
-type manager_id() :: binary().
|
||||
|
||||
-define(CONNECTOR_TYPE, elasticsearch).
|
||||
|
||||
%%-------------------------------------------------------------------------------------
|
||||
%% connector examples
|
||||
%%-------------------------------------------------------------------------------------
|
||||
connector_examples(Method) ->
|
||||
[
|
||||
#{
|
||||
<<"elasticsearch">> =>
|
||||
#{
|
||||
summary => <<"Elastic Search Connector">>,
|
||||
value => emqx_connector_schema:connector_values(
|
||||
Method, ?CONNECTOR_TYPE, connector_example_values()
|
||||
)
|
||||
}
|
||||
}
|
||||
].
|
||||
|
||||
connector_example_values() ->
|
||||
#{
|
||||
name => <<"elasticsearch_connector">>,
|
||||
type => elasticsearch,
|
||||
enable => true,
|
||||
authentication => #{
|
||||
<<"username">> => <<"root">>,
|
||||
<<"password">> => <<"******">>
|
||||
},
|
||||
base_url => <<"http://127.0.0.1:9200/">>,
|
||||
connect_timeout => <<"15s">>,
|
||||
pool_type => <<"random">>,
|
||||
pool_size => 8,
|
||||
enable_pipelining => 100,
|
||||
ssl => #{enable => false}
|
||||
}.
|
||||
|
||||
%%-------------------------------------------------------------------------------------
|
||||
%% schema
|
||||
%%-------------------------------------------------------------------------------------
|
||||
namespace() -> "elasticsearch".
|
||||
|
||||
roots() ->
|
||||
[{config, #{type => ?R_REF(config)}}].
|
||||
|
||||
fields(config) ->
|
||||
lists:filter(
|
||||
fun({K, _}) -> not lists:member(K, [url, request, retry_interval, headers]) end,
|
||||
emqx_bridge_http_schema:fields("config_connector")
|
||||
) ++
|
||||
fields("connection_fields");
|
||||
fields("connection_fields") ->
|
||||
[
|
||||
{base_url,
|
||||
?HOCON(
|
||||
emqx_schema:url(),
|
||||
#{
|
||||
required => true,
|
||||
desc => ?DESC(emqx_bridge_es, "config_base_url")
|
||||
}
|
||||
)},
|
||||
{authentication,
|
||||
?HOCON(
|
||||
?UNION([?R_REF(auth_basic)]),
|
||||
#{
|
||||
desc => ?DESC("config_authentication")
|
||||
}
|
||||
)}
|
||||
];
|
||||
fields(auth_basic) ->
|
||||
[
|
||||
{username,
|
||||
?HOCON(binary(), #{
|
||||
required => true,
|
||||
desc => ?DESC("config_auth_basic_username")
|
||||
})},
|
||||
{password,
|
||||
emqx_schema_secret:mk(#{
|
||||
required => true,
|
||||
desc => ?DESC("config_auth_basic_password")
|
||||
})}
|
||||
];
|
||||
fields("post") ->
|
||||
emqx_connector_schema:type_and_name_fields(elasticsearch) ++ fields(config);
|
||||
fields("put") ->
|
||||
fields(config);
|
||||
fields("get") ->
|
||||
emqx_bridge_schema:status_fields() ++ fields("post").
|
||||
|
||||
desc(config) ->
|
||||
?DESC("desc_config");
|
||||
desc(auth_basic) ->
|
||||
"Basic Authentication";
|
||||
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
|
||||
["Configuration for Elastic Search using `", string:to_upper(Method), "` method."];
|
||||
desc(_) ->
|
||||
undefined.
|
||||
|
||||
connector_config(Conf, #{name := Name, parse_confs := ParseConfs}) ->
|
||||
#{
|
||||
base_url := BaseUrl,
|
||||
authentication :=
|
||||
#{
|
||||
username := Username,
|
||||
password := Password0
|
||||
}
|
||||
} = Conf,
|
||||
|
||||
Password = emqx_secret:unwrap(Password0),
|
||||
Base64 = base64:encode(<<Username/binary, ":", Password/binary>>),
|
||||
BasicToken = <<"Basic ", Base64/binary>>,
|
||||
|
||||
WebhookConfig =
|
||||
Conf#{
|
||||
method => <<"post">>,
|
||||
url => BaseUrl,
|
||||
headers => [
|
||||
{<<"Content-type">>, <<"application/json">>},
|
||||
{<<"Authorization">>, BasicToken}
|
||||
]
|
||||
},
|
||||
ParseConfs(
|
||||
<<"http">>,
|
||||
Name,
|
||||
WebhookConfig
|
||||
).
|
||||
|
||||
%%-------------------------------------------------------------------------------------
|
||||
%% `emqx_resource' API
|
||||
%%-------------------------------------------------------------------------------------
|
||||
callback_mode() -> async_if_possible.
|
||||
|
||||
-spec on_start(manager_id(), config()) -> {ok, state()} | no_return().
|
||||
on_start(InstanceId, Config) ->
|
||||
case emqx_bridge_http_connector:on_start(InstanceId, Config) of
|
||||
{ok, State} ->
|
||||
?SLOG(info, #{
|
||||
msg => "elasticsearch_bridge_started",
|
||||
instance_id => InstanceId,
|
||||
request => emqx_utils:redact(maps:get(request, State, <<>>))
|
||||
}),
|
||||
?tp(elasticsearch_bridge_started, #{instance_id => InstanceId}),
|
||||
{ok, State#{channels => #{}}};
|
||||
{error, Reason} ->
|
||||
?SLOG(error, #{
|
||||
msg => "failed_to_start_elasticsearch_bridge",
|
||||
instance_id => InstanceId,
|
||||
request => emqx_utils:redact(maps:get(request, Config, <<>>)),
|
||||
reason => Reason
|
||||
}),
|
||||
throw(failed_to_start_elasticsearch_bridge)
|
||||
end.
|
||||
|
||||
-spec on_stop(manager_id(), state()) -> ok | {error, term()}.
|
||||
on_stop(InstanceId, State) ->
|
||||
?SLOG(info, #{
|
||||
msg => "stopping_elasticsearch_bridge",
|
||||
connector => InstanceId
|
||||
}),
|
||||
Res = emqx_bridge_http_connector:on_stop(InstanceId, State),
|
||||
?tp(elasticsearch_bridge_stopped, #{instance_id => InstanceId}),
|
||||
Res.
|
||||
|
||||
-spec on_get_status(manager_id(), state()) ->
|
||||
{connected, state()} | {disconnected, state(), term()}.
|
||||
on_get_status(InstanceId, State) ->
|
||||
emqx_bridge_http_connector:on_get_status(InstanceId, State).
|
||||
|
||||
-spec on_query(manager_id(), tuple(), state()) ->
|
||||
{ok, pos_integer(), [term()], term()}
|
||||
| {ok, pos_integer(), [term()]}
|
||||
| {error, term()}.
|
||||
on_query(InstanceId, {ChannelId, Msg} = Req, State) ->
|
||||
?tp(elasticsearch_bridge_on_query, #{instance_id => InstanceId}),
|
||||
?SLOG(debug, #{
|
||||
msg => "elasticsearch_bridge_on_query_called",
|
||||
instance_id => InstanceId,
|
||||
send_message => Req,
|
||||
state => emqx_utils:redact(State)
|
||||
}),
|
||||
handle_response(
|
||||
emqx_bridge_http_connector:on_query(
|
||||
InstanceId, {ChannelId, Msg}, State
|
||||
)
|
||||
).
|
||||
|
||||
-spec on_query_async(manager_id(), tuple(), {function(), [term()]}, state()) ->
|
||||
{ok, pid()} | {error, empty_request}.
|
||||
on_query_async(
|
||||
InstanceId, {ChannelId, Msg} = Req, ReplyFunAndArgs0, State
|
||||
) ->
|
||||
?tp(elasticsearch_bridge_on_query_async, #{instance_id => InstanceId}),
|
||||
?SLOG(debug, #{
|
||||
msg => "elasticsearch_bridge_on_query_async_called",
|
||||
instance_id => InstanceId,
|
||||
send_message => Req,
|
||||
state => emqx_utils:redact(State)
|
||||
}),
|
||||
ReplyFunAndArgs =
|
||||
{
|
||||
fun(Result) ->
|
||||
Response = handle_response(Result),
|
||||
emqx_resource:apply_reply_fun(ReplyFunAndArgs0, Response)
|
||||
end,
|
||||
[]
|
||||
},
|
||||
emqx_bridge_http_connector:on_query_async(
|
||||
InstanceId, {ChannelId, Msg}, ReplyFunAndArgs, State
|
||||
).
|
||||
|
||||
on_add_channel(
|
||||
InstanceId,
|
||||
#{channels := Channels} = State0,
|
||||
ChannelId,
|
||||
#{parameters := Parameter}
|
||||
) ->
|
||||
case maps:is_key(ChannelId, Channels) of
|
||||
true ->
|
||||
{error, already_exists};
|
||||
_ ->
|
||||
Parameter1 = Parameter#{
|
||||
path => path(Parameter),
|
||||
method => method(Parameter),
|
||||
body => get_body_template(Parameter)
|
||||
},
|
||||
ChannelConfig = #{
|
||||
parameters => Parameter1,
|
||||
render_template_func => fun ?MODULE:render_template/2
|
||||
},
|
||||
{ok, State} = emqx_bridge_http_connector:on_add_channel(
|
||||
InstanceId, State0, ChannelId, ChannelConfig
|
||||
),
|
||||
Channel = Parameter1,
|
||||
Channels2 = Channels#{ChannelId => Channel},
|
||||
{ok, State#{channels => Channels2}}
|
||||
end.
|
||||
|
||||
on_remove_channel(InstanceId, #{channels := Channels} = OldState0, ChannelId) ->
|
||||
{ok, OldState} = emqx_bridge_http_connector:on_remove_channel(InstanceId, OldState0, ChannelId),
|
||||
Channels2 = maps:remove(ChannelId, Channels),
|
||||
{ok, OldState#{channels => Channels2}}.
|
||||
|
||||
on_get_channels(InstanceId) ->
|
||||
emqx_bridge_v2:get_channels_for_connector(InstanceId).
|
||||
|
||||
on_get_channel_status(_InstanceId, ChannelId, #{channels := Channels}) ->
|
||||
case maps:is_key(ChannelId, Channels) of
|
||||
true ->
|
||||
connected;
|
||||
_ ->
|
||||
{error, not_exists}
|
||||
end.
|
||||
|
||||
render_template(Template, Msg) ->
|
||||
% Ignoring errors here, undefined bindings will be replaced with empty string.
|
||||
Opts = #{var_trans => fun to_string/2},
|
||||
{String, _Errors} = emqx_template:render(Template, {emqx_jsonish, Msg}, Opts),
|
||||
String.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal Functions
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
to_string(Name, Value) ->
|
||||
emqx_template:to_string(render_var(Name, Value)).
|
||||
render_var(_, undefined) ->
|
||||
% NOTE Any allowed but undefined binding will be replaced with empty string
|
||||
<<>>;
|
||||
render_var(_Name, Value) ->
|
||||
Value.
|
||||
%% delete DELETE /<index>/_doc/<_id>
|
||||
path(#{action := delete, id := Id, index := Index} = Action) ->
|
||||
BasePath = ["/", Index, "/_doc/", Id],
|
||||
Qs = add_query_string([routing], Action),
|
||||
BasePath ++ Qs;
|
||||
%% update POST /<index>/_update/<_id>
|
||||
path(#{action := update, id := Id, index := Index} = Action) ->
|
||||
BasePath = ["/", Index, "/_update/", Id],
|
||||
Qs = add_query_string([routing, require_alias], Action),
|
||||
BasePath ++ Qs;
|
||||
%% create with id /<index>/_doc/_id
|
||||
path(#{action := create, index := Index, id := Id} = Action) ->
|
||||
BasePath = ["/", Index, "/_doc/", Id],
|
||||
Qs =
|
||||
case maps:get(overwrite, Action, true) of
|
||||
true ->
|
||||
add_query_string([routing, require_alias], Action);
|
||||
false ->
|
||||
Action1 = Action#{op_type => "create"},
|
||||
add_query_string([routing, require_alias, op_type], Action1)
|
||||
end,
|
||||
BasePath ++ Qs;
|
||||
%% create without id POST /<index>/_doc/
|
||||
path(#{action := create, index := Index} = Action) ->
|
||||
BasePath = ["/", Index, "/_doc/"],
|
||||
Qs = add_query_string([routing, require_alias], Action),
|
||||
BasePath ++ Qs.
|
||||
|
||||
method(#{action := create}) -> <<"POST">>;
|
||||
method(#{action := delete}) -> <<"DELETE">>;
|
||||
method(#{action := update}) -> <<"POST">>.
|
||||
|
||||
add_query_string(Keys, Param0) ->
|
||||
Param1 = maps:with(Keys, Param0),
|
||||
FoldFun = fun(K, V, Acc) -> [[atom_to_list(K), "=", to_str(V)] | Acc] end,
|
||||
case maps:fold(FoldFun, [], Param1) of
|
||||
"" -> "";
|
||||
QString -> "?" ++ lists:join("&", QString)
|
||||
end.
|
||||
|
||||
to_str(List) when is_list(List) -> List;
|
||||
to_str(Bin) when is_binary(Bin) -> binary_to_list(Bin);
|
||||
to_str(false) -> "false";
|
||||
to_str(true) -> "true";
|
||||
to_str(Atom) when is_atom(Atom) -> atom_to_list(Atom).
|
||||
|
||||
handle_response({ok, Code, _Headers, _Body} = Resp) when Code =:= 200; Code =:= 201 ->
|
||||
Resp;
|
||||
handle_response({ok, Code, _Body} = Resp) when Code =:= 200; Code =:= 201 ->
|
||||
Resp;
|
||||
handle_response({ok, Code, _Headers, Body}) ->
|
||||
{error, #{code => Code, body => Body}};
|
||||
handle_response({ok, Code, Body}) ->
|
||||
{error, #{code => Code, body => Body}};
|
||||
handle_response({error, _} = Error) ->
|
||||
Error.
|
||||
|
||||
get_body_template(#{action := update, doc := Doc} = Template) ->
|
||||
case maps:get(doc_as_upsert, Template, false) of
|
||||
false -> <<"{\"doc\":", Doc/binary, "}">>;
|
||||
true -> <<"{\"doc\":", Doc/binary, ",\"doc_as_upsert\": true}">>
|
||||
end;
|
||||
get_body_template(#{doc := Doc}) ->
|
||||
Doc;
|
||||
get_body_template(_) ->
|
||||
undefined.
|
|
@ -0,0 +1,379 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_es_SUITE).
|
||||
|
||||
-compile(nowarn_export_all).
|
||||
-compile(export_all).
|
||||
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||
|
||||
-import(emqx_common_test_helpers, [on_exit/1]).
|
||||
|
||||
-define(TYPE, elasticsearch).
|
||||
-define(CA, "es.crt").
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% CT boilerplate
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
all() ->
|
||||
emqx_common_test_helpers:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
ProxyName = "elasticsearch",
|
||||
ESHost = os:getenv("ELASTICSEARCH_HOST", "elasticsearch"),
|
||||
ESPort = list_to_integer(os:getenv("ELASTICSEARCH_PORT", "9200")),
|
||||
Apps = emqx_cth_suite:start(
|
||||
[
|
||||
emqx,
|
||||
emqx_conf,
|
||||
emqx_connector,
|
||||
emqx_bridge_es,
|
||||
emqx_bridge,
|
||||
emqx_rule_engine,
|
||||
emqx_management,
|
||||
{emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}
|
||||
],
|
||||
#{work_dir => emqx_cth_suite:work_dir(Config)}
|
||||
),
|
||||
{ok, _} = emqx_common_test_http:create_default_app(),
|
||||
wait_until_elasticsearch_is_up(ESHost, ESPort),
|
||||
[
|
||||
{apps, Apps},
|
||||
{proxy_name, ProxyName},
|
||||
{es_host, ESHost},
|
||||
{es_port, ESPort}
|
||||
| Config
|
||||
].
|
||||
|
||||
es_checks() ->
|
||||
case os:getenv("IS_CI") of
|
||||
"yes" -> 10;
|
||||
_ -> 1
|
||||
end.
|
||||
|
||||
wait_until_elasticsearch_is_up(Host, Port) ->
|
||||
wait_until_elasticsearch_is_up(es_checks(), Host, Port).
|
||||
|
||||
wait_until_elasticsearch_is_up(0, Host, Port) ->
|
||||
throw({{Host, Port}, not_available});
|
||||
wait_until_elasticsearch_is_up(Count, Host, Port) ->
|
||||
timer:sleep(1000),
|
||||
case emqx_common_test_helpers:is_all_tcp_servers_available([{Host, Port}]) of
|
||||
true -> ok;
|
||||
false -> wait_until_elasticsearch_is_up(Count - 1, Host, Port)
|
||||
end.
|
||||
|
||||
end_per_suite(Config) ->
|
||||
Apps = ?config(apps, Config),
|
||||
%ProxyHost = ?config(proxy_host, Config),
|
||||
%ProxyPort = ?config(proxy_port, Config),
|
||||
%emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||
emqx_cth_suite:stop(Apps),
|
||||
ok.
|
||||
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
Config.
|
||||
|
||||
end_per_testcase(_TestCase, _Config) ->
|
||||
%ProxyHost = ?config(proxy_host, Config),
|
||||
%ProxyPort = ?config(proxy_port, Config),
|
||||
%emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||
emqx_bridge_v2_testlib:delete_all_bridges_and_connectors(),
|
||||
emqx_common_test_helpers:call_janitor(60_000),
|
||||
ok.
|
||||
|
||||
%%-------------------------------------------------------------------------------------
|
||||
%% Helper fns
|
||||
%%-------------------------------------------------------------------------------------
|
||||
|
||||
check_send_message_with_action(Topic, ActionName, ConnectorName) ->
|
||||
send_message(Topic),
|
||||
%% ######################################
|
||||
%% Check if message is sent to es
|
||||
%% ######################################
|
||||
timer:sleep(500),
|
||||
check_action_metrics(ActionName, ConnectorName).
|
||||
|
||||
send_message(Topic) ->
|
||||
Now = emqx_utils_calendar:now_to_rfc3339(microsecond),
|
||||
Doc = #{<<"name">> => <<"emqx">>, <<"release_date">> => Now},
|
||||
Index = <<"emqx-test-index">>,
|
||||
Payload = emqx_utils_json:encode(#{doc => Doc, index => Index}),
|
||||
|
||||
ClientId = emqx_guid:to_hexstr(emqx_guid:gen()),
|
||||
{ok, Client} = emqtt:start_link([{clientid, ClientId}, {port, 1883}]),
|
||||
{ok, _} = emqtt:connect(Client),
|
||||
ok = emqtt:publish(Client, Topic, Payload, [{qos, 0}]),
|
||||
ok.
|
||||
|
||||
check_action_metrics(ActionName, ConnectorName) ->
|
||||
ActionId = emqx_bridge_v2:id(?TYPE, ActionName, ConnectorName),
|
||||
Metrics =
|
||||
#{
|
||||
match => emqx_resource_metrics:matched_get(ActionId),
|
||||
success => emqx_resource_metrics:success_get(ActionId),
|
||||
failed => emqx_resource_metrics:failed_get(ActionId),
|
||||
queuing => emqx_resource_metrics:queuing_get(ActionId),
|
||||
dropped => emqx_resource_metrics:dropped_get(ActionId)
|
||||
},
|
||||
?assertEqual(
|
||||
#{
|
||||
match => 1,
|
||||
success => 1,
|
||||
dropped => 0,
|
||||
failed => 0,
|
||||
queuing => 0
|
||||
},
|
||||
Metrics,
|
||||
{ActionName, ConnectorName, ActionId}
|
||||
).
|
||||
|
||||
action_config(ConnectorName) ->
|
||||
action_config(ConnectorName, _Overrides = #{}).
|
||||
|
||||
action_config(ConnectorName, Overrides) ->
|
||||
Cfg0 = action(ConnectorName),
|
||||
emqx_utils_maps:deep_merge(Cfg0, Overrides).
|
||||
|
||||
action(ConnectorName) ->
|
||||
#{
|
||||
<<"description">> => <<"My elasticsearch test action">>,
|
||||
<<"enable">> => true,
|
||||
<<"parameters">> => #{
|
||||
<<"index">> => <<"${payload.index}">>,
|
||||
<<"action">> => <<"create">>,
|
||||
<<"doc">> => <<"${payload.doc}">>,
|
||||
<<"overwrite">> => true
|
||||
},
|
||||
<<"connector">> => ConnectorName,
|
||||
<<"resource_opts">> => #{
|
||||
<<"health_check_interval">> => <<"30s">>,
|
||||
<<"query_mode">> => <<"sync">>
|
||||
}
|
||||
}.
|
||||
|
||||
base_url(Config) ->
|
||||
Host = ?config(es_host, Config),
|
||||
Port = ?config(es_port, Config),
|
||||
iolist_to_binary([
|
||||
"https://",
|
||||
Host,
|
||||
":",
|
||||
integer_to_binary(Port)
|
||||
]).
|
||||
|
||||
connector_config(Config) ->
|
||||
connector_config(_Overrides = #{}, Config).
|
||||
|
||||
connector_config(Overrides, Config) ->
|
||||
Defaults =
|
||||
#{
|
||||
<<"base_url">> => base_url(Config),
|
||||
<<"enable">> => true,
|
||||
<<"authentication">> => #{
|
||||
<<"password">> => <<"emqx123">>,
|
||||
<<"username">> => <<"elastic">>
|
||||
},
|
||||
<<"description">> => <<"My elasticsearch test connector">>,
|
||||
<<"connect_timeout">> => <<"15s">>,
|
||||
<<"pool_size">> => 2,
|
||||
<<"pool_type">> => <<"random">>,
|
||||
<<"enable_pipelining">> => 100,
|
||||
<<"ssl">> => #{
|
||||
<<"enable">> => true,
|
||||
<<"hibernate_after">> => <<"5s">>,
|
||||
<<"cacertfile">> => filename:join(?config(data_dir, Config), ?CA)
|
||||
}
|
||||
},
|
||||
emqx_utils_maps:deep_merge(Defaults, Overrides).
|
||||
|
||||
create_connector(Name, Config) ->
|
||||
Res = emqx_connector:create(?TYPE, Name, Config),
|
||||
on_exit(fun() -> emqx_connector:remove(?TYPE, Name) end),
|
||||
Res.
|
||||
|
||||
create_action(Name, Config) ->
|
||||
Res = emqx_bridge_v2:create(?TYPE, Name, Config),
|
||||
on_exit(fun() -> emqx_bridge_v2:remove(?TYPE, Name) end),
|
||||
Res.
|
||||
|
||||
action_api_spec_props_for_get() ->
|
||||
#{
|
||||
<<"bridge_elasticsearch.get_bridge_v2">> :=
|
||||
#{<<"properties">> := Props}
|
||||
} =
|
||||
emqx_bridge_v2_testlib:actions_api_spec_schemas(),
|
||||
Props.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Testcases
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
t_create_remove_list(Config) ->
|
||||
[] = emqx_bridge_v2:list(),
|
||||
ConnectorConfig = connector_config(Config),
|
||||
{ok, _} = emqx_connector:create(?TYPE, test_connector, ConnectorConfig),
|
||||
ActionConfig = action(<<"test_connector">>),
|
||||
{ok, _} = emqx_bridge_v2:create(?TYPE, test_action_1, ActionConfig),
|
||||
[ActionInfo] = emqx_bridge_v2:list(),
|
||||
#{
|
||||
name := <<"test_action_1">>,
|
||||
type := <<"elasticsearch">>,
|
||||
raw_config := _,
|
||||
status := connected
|
||||
} = ActionInfo,
|
||||
{ok, _} = emqx_bridge_v2:create(?TYPE, test_action_2, ActionConfig),
|
||||
2 = length(emqx_bridge_v2:list()),
|
||||
ok = emqx_bridge_v2:remove(?TYPE, test_action_1),
|
||||
1 = length(emqx_bridge_v2:list()),
|
||||
ok = emqx_bridge_v2:remove(?TYPE, test_action_2),
|
||||
[] = emqx_bridge_v2:list(),
|
||||
emqx_connector:remove(?TYPE, test_connector),
|
||||
ok.
|
||||
|
||||
%% Test sending a message to a bridge V2
|
||||
t_send_message(Config) ->
|
||||
ConnectorConfig = connector_config(Config),
|
||||
{ok, _} = emqx_connector:create(?TYPE, test_connector2, ConnectorConfig),
|
||||
ActionConfig = action(<<"test_connector2">>),
|
||||
{ok, _} = emqx_bridge_v2:create(?TYPE, test_action_1, ActionConfig),
|
||||
Rule = #{
|
||||
id => <<"rule:t_es">>,
|
||||
sql => <<"SELECT\n *\nFROM\n \"es/#\"">>,
|
||||
actions => [<<"elasticsearch:test_action_1">>],
|
||||
description => <<"sink doc to elasticsearch">>
|
||||
},
|
||||
{ok, _} = emqx_rule_engine:create_rule(Rule),
|
||||
%% Use the action to send a message
|
||||
check_send_message_with_action(<<"es/1">>, test_action_1, test_connector2),
|
||||
%% Create a few more bridges with the same connector and test them
|
||||
ActionNames1 =
|
||||
lists:foldl(
|
||||
fun(I, Acc) ->
|
||||
Seq = integer_to_binary(I),
|
||||
ActionNameStr = "test_action_" ++ integer_to_list(I),
|
||||
ActionName = list_to_atom(ActionNameStr),
|
||||
{ok, _} = emqx_bridge_v2:create(?TYPE, ActionName, ActionConfig),
|
||||
Rule1 = #{
|
||||
id => <<"rule:t_es", Seq/binary>>,
|
||||
sql => <<"SELECT\n *\nFROM\n \"es/", Seq/binary, "\"">>,
|
||||
actions => [<<"elasticsearch:", (list_to_binary(ActionNameStr))/binary>>],
|
||||
description => <<"sink doc to elasticsearch">>
|
||||
},
|
||||
{ok, _} = emqx_rule_engine:create_rule(Rule1),
|
||||
Topic = <<"es/", Seq/binary>>,
|
||||
check_send_message_with_action(Topic, ActionName, test_connector2),
|
||||
[ActionName | Acc]
|
||||
end,
|
||||
[],
|
||||
lists:seq(2, 10)
|
||||
),
|
||||
ActionNames = [test_action_1 | ActionNames1],
|
||||
%% Remove all the bridges
|
||||
lists:foreach(
|
||||
fun(BridgeName) ->
|
||||
ok = emqx_bridge_v2:remove(?TYPE, BridgeName)
|
||||
end,
|
||||
ActionNames
|
||||
),
|
||||
emqx_connector:remove(?TYPE, test_connector2),
|
||||
ok.
|
||||
|
||||
%% Test that we can get the status of the bridge V2
|
||||
t_health_check(Config) ->
|
||||
BridgeV2Config = action(<<"test_connector3">>),
|
||||
ConnectorConfig = connector_config(Config),
|
||||
{ok, _} = emqx_connector:create(?TYPE, test_connector3, ConnectorConfig),
|
||||
{ok, _} = emqx_bridge_v2:create(?TYPE, test_bridge_v2, BridgeV2Config),
|
||||
#{status := connected} = emqx_bridge_v2:health_check(?TYPE, test_bridge_v2),
|
||||
ok = emqx_bridge_v2:remove(?TYPE, test_bridge_v2),
|
||||
%% Check behaviour when bridge does not exist
|
||||
{error, bridge_not_found} = emqx_bridge_v2:health_check(?TYPE, test_bridge_v2),
|
||||
ok = emqx_connector:remove(?TYPE, test_connector3),
|
||||
ok.
|
||||
|
||||
t_bad_url(Config) ->
|
||||
ConnectorName = <<"test_connector">>,
|
||||
ActionName = <<"test_action">>,
|
||||
ActionConfig = action(<<"test_connector">>),
|
||||
ConnectorConfig0 = connector_config(Config),
|
||||
ConnectorConfig = ConnectorConfig0#{<<"base_url">> := <<"bad_host:9092">>},
|
||||
?assertMatch({ok, _}, create_connector(ConnectorName, ConnectorConfig)),
|
||||
?assertMatch({ok, _}, create_action(ActionName, ActionConfig)),
|
||||
?assertMatch(
|
||||
{ok, #{
|
||||
resource_data :=
|
||||
#{
|
||||
status := ?status_disconnected,
|
||||
error := failed_to_start_elasticsearch_bridge
|
||||
}
|
||||
}},
|
||||
emqx_connector:lookup(?TYPE, ConnectorName)
|
||||
),
|
||||
?assertMatch({ok, #{status := ?status_disconnected}}, emqx_bridge_v2:lookup(?TYPE, ActionName)),
|
||||
ok.
|
||||
|
||||
t_parameters_key_api_spec(_Config) ->
|
||||
ActionProps = action_api_spec_props_for_get(),
|
||||
?assertNot(is_map_key(<<"elasticsearch">>, ActionProps), #{action_props => ActionProps}),
|
||||
?assert(is_map_key(<<"parameters">>, ActionProps), #{action_props => ActionProps}),
|
||||
ok.
|
||||
|
||||
t_http_api_get(Config) ->
|
||||
ConnectorName = <<"test_connector">>,
|
||||
ActionName = <<"test_action">>,
|
||||
ActionConfig = action(ConnectorName),
|
||||
ConnectorConfig = connector_config(Config),
|
||||
?assertMatch({ok, _}, create_connector(ConnectorName, ConnectorConfig)),
|
||||
?assertMatch({ok, _}, create_action(ActionName, ActionConfig)),
|
||||
?assertMatch(
|
||||
{ok,
|
||||
{{_, 200, _}, _, [
|
||||
#{
|
||||
<<"connector">> := ConnectorName,
|
||||
<<"description">> := <<"My elasticsearch test action">>,
|
||||
<<"enable">> := true,
|
||||
<<"error">> := <<>>,
|
||||
<<"name">> := ActionName,
|
||||
<<"node_status">> :=
|
||||
[
|
||||
#{
|
||||
<<"node">> := _,
|
||||
<<"status">> := <<"connected">>,
|
||||
<<"status_reason">> := <<>>
|
||||
}
|
||||
],
|
||||
<<"parameters">> :=
|
||||
#{
|
||||
<<"action">> := <<"create">>,
|
||||
<<"doc">> := <<"${payload.doc}">>,
|
||||
<<"index">> := <<"${payload.index}">>,
|
||||
<<"max_retries">> := 2,
|
||||
<<"overwrite">> := true
|
||||
},
|
||||
<<"resource_opts">> := #{<<"query_mode">> := <<"sync">>},
|
||||
<<"status">> := <<"connected">>,
|
||||
<<"status_reason">> := <<>>,
|
||||
<<"type">> := <<"elasticsearch">>
|
||||
}
|
||||
]}},
|
||||
emqx_bridge_v2_testlib:list_bridges_api()
|
||||
),
|
||||
ok.
|
|
@ -0,0 +1,20 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIDSjCCAjKgAwIBAgIVAIrN275DCtGnotTPpxwvQ5751N4OMA0GCSqGSIb3DQEB
|
||||
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
|
||||
ZXJhdGVkIENBMB4XDTI0MDExNjAyMzIyMFoXDTI3MDExNTAyMzIyMFowNDEyMDAG
|
||||
A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew
|
||||
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCy0nwiEurUkIPFMLV1weVM
|
||||
pPk/AlwZUzqjkeL44gsY53XI9Q05w/sL9u6PzwrXgTCFWNXzI9+MoAtp8phPkn14
|
||||
cmg5/3sLe9YcFVFjYK/MoljlUbPDj+4dgk8l+w5FRSi0+JN5krUm7rYk9lojAkeS
|
||||
fX8RU7ekKGbjBXIFtPxX5GNadu9RidR5GkHM3XroAIoris8bFOzMgFn9iybYnkhq
|
||||
0S+Hpv0A8FVxzle0KNbPpsIkxXH2DnP2iPTDym9xJNl9Iv9MPtj9XaamH7TmXcSt
|
||||
MbjkAudKsCw4bRuhHonM16DIUr8sX5UcRcAWyJ1x1qpZaOzMdh2VdYAHNuOsZwzJ
|
||||
AgMBAAGjUzBRMB0GA1UdDgQWBBTAyDlp8NZfPe8NCGVlHJSVclGOhTAfBgNVHSME
|
||||
GDAWgBTAyDlp8NZfPe8NCGVlHJSVclGOhTAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
|
||||
SIb3DQEBCwUAA4IBAQAeIUXRKmC53iirY4P49YspLafspAMf4ndMFQAp+Oc223Vs
|
||||
hQC4axNoYnUdzWDH6LioAN7P826xNPqtXvTZF9fmeX7K8Nm9Kdj+for+QQI3j6+X
|
||||
zq98VVkACb8b/Mc9Nac/WBbv/1IKyKgNNta7//WNPgAFolOfti/C0NLsPcKhrM9L
|
||||
mGbvRX8ZjH8pVJ0YTy4/xfDcF7G/Lxl4Yvb0ZXpuQbvE1+Y0h5aoTNshT/skJxC4
|
||||
iyVseYr21s3pptKcr6H9KZuSdZe5pbEo+81nT15w+50aswFLk9GCYh5UsQ+1jkRK
|
||||
cKgxP93i6x8BVbQJGKi1A1jhauSKX2IpWZQsHy4p
|
||||
-----END CERTIFICATE-----
|
|
@ -159,7 +159,7 @@ generate_config(Config0) ->
|
|||
} = gcp_pubsub_config(Config0),
|
||||
%% FIXME
|
||||
%% `emqx_bridge_resource:resource_id' requires an existing connector in the config.....
|
||||
ConnectorName = <<"connector_", ActionName/binary>>,
|
||||
ConnectorName = ActionName,
|
||||
ConnectorResourceId = <<"connector:", ?CONNECTOR_TYPE_BIN/binary, ":", ConnectorName/binary>>,
|
||||
ActionResourceId = emqx_bridge_v2:id(?ACTION_TYPE_BIN, ActionName, ConnectorName),
|
||||
BridgeId = emqx_bridge_resource:bridge_id(?BRIDGE_V1_TYPE_BIN, ActionName),
|
||||
|
@ -1228,7 +1228,11 @@ do_econnrefused_or_timeout_test(Config, Error) ->
|
|||
%% _Msg = "The connection was lost."
|
||||
ok;
|
||||
Trace0 ->
|
||||
error({unexpected_trace, Trace0})
|
||||
error(
|
||||
{unexpected_trace, Trace0, #{
|
||||
expected_connector_id => ConnectorResourceId
|
||||
}}
|
||||
)
|
||||
end;
|
||||
timeout ->
|
||||
?assertMatch(
|
||||
|
|
|
@ -38,6 +38,7 @@
|
|||
]).
|
||||
|
||||
-export([reply_delegator/3]).
|
||||
-export([render_template/2]).
|
||||
|
||||
-export([
|
||||
roots/0,
|
||||
|
@ -266,7 +267,9 @@ on_add_channel(
|
|||
) ->
|
||||
InstalledActions = maps:get(installed_actions, OldState, #{}),
|
||||
{ok, ActionState} = do_create_http_action(ActionConfig),
|
||||
NewInstalledActions = maps:put(ActionId, ActionState, InstalledActions),
|
||||
RenderTmplFunc = maps:get(render_template_func, ActionConfig, fun ?MODULE:render_template/2),
|
||||
ActionState1 = ActionState#{render_template_func => RenderTmplFunc},
|
||||
NewInstalledActions = maps:put(ActionId, ActionState1, InstalledActions),
|
||||
NewState = maps:put(installed_actions, NewInstalledActions, OldState),
|
||||
{ok, NewState}.
|
||||
|
||||
|
@ -337,7 +340,7 @@ on_query(
|
|||
} = process_request_and_action(Request, ActionState, Msg),
|
||||
%% bridge buffer worker has retry, do not let ehttpc retry
|
||||
Retry = 2,
|
||||
ClientId = maps:get(clientid, Msg, undefined),
|
||||
ClientId = clientid(Msg),
|
||||
on_query(
|
||||
InstId,
|
||||
{ClientId, Method, {Path, Headers, Body}, Timeout, Retry},
|
||||
|
@ -449,7 +452,7 @@ on_query_async(
|
|||
headers := Headers,
|
||||
request_timeout := Timeout
|
||||
} = process_request_and_action(Request, ActionState, Msg),
|
||||
ClientId = maps:get(clientid, Msg, undefined),
|
||||
ClientId = clientid(Msg),
|
||||
on_query_async(
|
||||
InstId,
|
||||
{ClientId, Method, {Path, Headers, Body}, Timeout},
|
||||
|
@ -631,12 +634,10 @@ parse_template(String) ->
|
|||
|
||||
process_request_and_action(Request, ActionState, Msg) ->
|
||||
MethodTemplate = maps:get(method, ActionState),
|
||||
Method = make_method(render_template_string(MethodTemplate, Msg)),
|
||||
BodyTemplate = maps:get(body, ActionState),
|
||||
Body = render_request_body(BodyTemplate, Msg),
|
||||
|
||||
PathPrefix = unicode:characters_to_list(render_template(maps:get(path, Request), Msg)),
|
||||
PathSuffix = unicode:characters_to_list(render_template(maps:get(path, ActionState), Msg)),
|
||||
RenderTmplFunc = maps:get(render_template_func, ActionState),
|
||||
Method = make_method(render_template_string(MethodTemplate, RenderTmplFunc, Msg)),
|
||||
PathPrefix = unicode:characters_to_list(RenderTmplFunc(maps:get(path, Request), Msg)),
|
||||
PathSuffix = unicode:characters_to_list(RenderTmplFunc(maps:get(path, ActionState), Msg)),
|
||||
|
||||
Path =
|
||||
case PathSuffix of
|
||||
|
@ -647,9 +648,11 @@ process_request_and_action(Request, ActionState, Msg) ->
|
|||
HeadersTemplate1 = maps:get(headers, Request),
|
||||
HeadersTemplate2 = maps:get(headers, ActionState),
|
||||
Headers = merge_proplist(
|
||||
render_headers(HeadersTemplate1, Msg),
|
||||
render_headers(HeadersTemplate2, Msg)
|
||||
render_headers(HeadersTemplate1, RenderTmplFunc, Msg),
|
||||
render_headers(HeadersTemplate2, RenderTmplFunc, Msg)
|
||||
),
|
||||
BodyTemplate = maps:get(body, ActionState),
|
||||
Body = render_request_body(BodyTemplate, RenderTmplFunc, Msg),
|
||||
#{
|
||||
method => Method,
|
||||
path => Path,
|
||||
|
@ -682,25 +685,26 @@ process_request(
|
|||
} = Conf,
|
||||
Msg
|
||||
) ->
|
||||
RenderTemplateFun = fun render_template/2,
|
||||
Conf#{
|
||||
method => make_method(render_template_string(MethodTemplate, Msg)),
|
||||
path => unicode:characters_to_list(render_template(PathTemplate, Msg)),
|
||||
body => render_request_body(BodyTemplate, Msg),
|
||||
headers => render_headers(HeadersTemplate, Msg),
|
||||
method => make_method(render_template_string(MethodTemplate, RenderTemplateFun, Msg)),
|
||||
path => unicode:characters_to_list(RenderTemplateFun(PathTemplate, Msg)),
|
||||
body => render_request_body(BodyTemplate, RenderTemplateFun, Msg),
|
||||
headers => render_headers(HeadersTemplate, RenderTemplateFun, Msg),
|
||||
request_timeout => ReqTimeout
|
||||
}.
|
||||
|
||||
render_request_body(undefined, Msg) ->
|
||||
render_request_body(undefined, _, Msg) ->
|
||||
emqx_utils_json:encode(Msg);
|
||||
render_request_body(BodyTks, Msg) ->
|
||||
render_template(BodyTks, Msg).
|
||||
render_request_body(BodyTks, RenderTmplFunc, Msg) ->
|
||||
RenderTmplFunc(BodyTks, Msg).
|
||||
|
||||
render_headers(HeaderTks, Msg) ->
|
||||
render_headers(HeaderTks, RenderTmplFunc, Msg) ->
|
||||
lists:map(
|
||||
fun({K, V}) ->
|
||||
{
|
||||
render_template_string(K, Msg),
|
||||
render_template_string(emqx_secret:unwrap(V), Msg)
|
||||
render_template_string(K, RenderTmplFunc, Msg),
|
||||
render_template_string(emqx_secret:unwrap(V), RenderTmplFunc, Msg)
|
||||
}
|
||||
end,
|
||||
HeaderTks
|
||||
|
@ -711,8 +715,8 @@ render_template(Template, Msg) ->
|
|||
{String, _Errors} = emqx_template:render(Template, {emqx_jsonish, Msg}),
|
||||
String.
|
||||
|
||||
render_template_string(Template, Msg) ->
|
||||
unicode:characters_to_binary(render_template(Template, Msg)).
|
||||
render_template_string(Template, RenderTmplFunc, Msg) ->
|
||||
unicode:characters_to_binary(RenderTmplFunc(Template, Msg)).
|
||||
|
||||
make_method(M) when M == <<"POST">>; M == <<"post">> -> post;
|
||||
make_method(M) when M == <<"PUT">>; M == <<"put">> -> put;
|
||||
|
@ -732,7 +736,7 @@ formalize_request(_Method, BasePath, {Path, Headers}) ->
|
|||
%% because an HTTP server may handle paths like
|
||||
%% "/a/b/c/", "/a/b/c" and "/a//b/c" differently.
|
||||
%%
|
||||
%% So we try to avoid unneccessary path normalization.
|
||||
%% So we try to avoid unnecessary path normalization.
|
||||
%%
|
||||
%% See also: `join_paths_test_/0`
|
||||
join_paths(Path1, Path2) ->
|
||||
|
@ -876,6 +880,8 @@ redact_request({Path, Headers}) ->
|
|||
redact_request({Path, Headers, _Body}) ->
|
||||
{Path, Headers, <<"******">>}.
|
||||
|
||||
clientid(Msg) -> maps:get(clientid, Msg, undefined).
|
||||
|
||||
-ifdef(TEST).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
|
|
|
@ -76,6 +76,7 @@ fields("http_action") ->
|
|||
mk(binary(), #{
|
||||
desc => ?DESC(emqx_connector_schema, "connector_field"), required => true
|
||||
})},
|
||||
{tags, emqx_schema:tags_schema()},
|
||||
{description, emqx_schema:description_schema()},
|
||||
%% Note: there's an implicit convention in `emqx_bridge' that,
|
||||
%% for egress bridges with this config, the published messages
|
||||
|
@ -105,7 +106,7 @@ fields(action_resource_opts) ->
|
|||
UnsupportedOpts = [batch_size, batch_time],
|
||||
lists:filter(
|
||||
fun({K, _V}) -> not lists:member(K, UnsupportedOpts) end,
|
||||
emqx_bridge_v2_schema:resource_opts_fields()
|
||||
emqx_bridge_v2_schema:action_resource_opts_fields()
|
||||
);
|
||||
fields("parameters_opts") ->
|
||||
[
|
||||
|
@ -175,6 +176,7 @@ basic_config() ->
|
|||
default => true
|
||||
}
|
||||
)},
|
||||
{tags, emqx_schema:tags_schema()},
|
||||
{description, emqx_schema:description_schema()}
|
||||
] ++ connector_opts().
|
||||
|
||||
|
|
|
@ -146,7 +146,7 @@ end_per_testcase(_TestCase, Config) ->
|
|||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% HTTP server for testing
|
||||
%% (Orginally copied from emqx_bridge_api_SUITE)
|
||||
%% (Originally copied from emqx_bridge_api_SUITE)
|
||||
%%------------------------------------------------------------------------------
|
||||
start_http_server(HTTPServerConfig) ->
|
||||
process_flag(trap_exit, true),
|
||||
|
@ -244,6 +244,12 @@ parse_http_request_assertive(ReqStr0) ->
|
|||
%% Helper functions
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
get_metrics(Name) ->
|
||||
%% Note: `emqx_bridge:get_metrics/2' is currently *only* called in prod by
|
||||
%% `emqx_bridge_api:lookup_from_local_node' with an action (not v1 bridge) type.
|
||||
Type = <<"http">>,
|
||||
emqx_bridge:get_metrics(Type, Name).
|
||||
|
||||
bridge_async_config(#{port := Port} = Config) ->
|
||||
Type = maps:get(type, Config, ?BRIDGE_TYPE),
|
||||
Name = maps:get(name, Config, ?BRIDGE_NAME),
|
||||
|
@ -570,7 +576,7 @@ t_path_not_found(Config) ->
|
|||
success := 0
|
||||
}
|
||||
},
|
||||
emqx_bridge:get_metrics(?BRIDGE_TYPE, ?BRIDGE_NAME)
|
||||
get_metrics(?BRIDGE_NAME)
|
||||
)
|
||||
),
|
||||
ok
|
||||
|
@ -611,7 +617,7 @@ t_too_many_requests(Config) ->
|
|||
success := 1
|
||||
}
|
||||
},
|
||||
emqx_bridge:get_metrics(?BRIDGE_TYPE, ?BRIDGE_NAME)
|
||||
get_metrics(?BRIDGE_NAME)
|
||||
)
|
||||
),
|
||||
ok
|
||||
|
@ -654,7 +660,7 @@ t_rule_action_expired(Config) ->
|
|||
dropped := 1
|
||||
}
|
||||
},
|
||||
emqx_bridge:get_metrics(?BRIDGE_TYPE, ?BRIDGE_NAME)
|
||||
get_metrics(?BRIDGE_NAME)
|
||||
)
|
||||
),
|
||||
?retry(
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
{erl_opts, [debug_info]}.
|
||||
|
||||
{deps, [
|
||||
{influxdb, {git, "https://github.com/emqx/influxdb-client-erl", {tag, "1.1.12"}}},
|
||||
{influxdb, {git, "https://github.com/emqx/influxdb-client-erl", {tag, "1.1.13"}}},
|
||||
{emqx_connector, {path, "../../apps/emqx_connector"}},
|
||||
{emqx_resource, {path, "../../apps/emqx_resource"}},
|
||||
{emqx_bridge, {path, "../../apps/emqx_bridge"}}
|
||||
|
|
|
@ -382,7 +382,7 @@ field(Line) ->
|
|||
field_val([$" | Line]) ->
|
||||
{Val, [$" | Line1]} = unescape(?FIELD_VAL_ESC_CHARS, [$"], Line, []),
|
||||
%% Quoted val can be empty
|
||||
{Val, strip_l(Line1, ?VAL_SEP)};
|
||||
{{quoted, Val}, strip_l(Line1, ?VAL_SEP)};
|
||||
field_val(Line) ->
|
||||
%% Unquoted value should not be un-escaped according to InfluxDB protocol,
|
||||
%% as it can only hold float, integer, uinteger or boolean value.
|
||||
|
|
|
@ -59,6 +59,11 @@
|
|||
|
||||
-define(DEFAULT_TIMESTAMP_TMPL, "${timestamp}").
|
||||
|
||||
-define(IS_HTTP_ERROR(STATUS_CODE),
|
||||
(is_integer(STATUS_CODE) andalso
|
||||
(STATUS_CODE < 200 orelse STATUS_CODE >= 300))
|
||||
).
|
||||
|
||||
%% -------------------------------------------------------------------------------------------------
|
||||
%% resource callback
|
||||
callback_mode() -> async_if_possible.
|
||||
|
@ -541,7 +546,12 @@ reply_callback(ReplyFunAndArgs, {ok, 401, _, _}) ->
|
|||
?tp(influxdb_connector_do_query_failure, #{error => <<"authorization failure">>}),
|
||||
Result = {error, {unrecoverable_error, <<"authorization failure">>}},
|
||||
emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result);
|
||||
reply_callback(ReplyFunAndArgs, {ok, Code, _, Body}) when ?IS_HTTP_ERROR(Code) ->
|
||||
?tp(influxdb_connector_do_query_failure, #{error => Body}),
|
||||
Result = {error, {unrecoverable_error, Body}},
|
||||
emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result);
|
||||
reply_callback(ReplyFunAndArgs, Result) ->
|
||||
?tp(influxdb_connector_do_query_ok, #{result => Result}),
|
||||
emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result).
|
||||
|
||||
%% -------------------------------------------------------------------------------------------------
|
||||
|
@ -589,8 +599,17 @@ to_kv_config(KVfields) ->
|
|||
|
||||
to_maps_config(K, V, Res) ->
|
||||
NK = emqx_placeholder:preproc_tmpl(bin(K)),
|
||||
NV = emqx_placeholder:preproc_tmpl(bin(V)),
|
||||
Res#{NK => NV}.
|
||||
Res#{NK => preproc_quoted(V)}.
|
||||
|
||||
preproc_quoted({quoted, V}) ->
|
||||
{quoted, emqx_placeholder:preproc_tmpl(bin(V))};
|
||||
preproc_quoted(V) ->
|
||||
emqx_placeholder:preproc_tmpl(bin(V)).
|
||||
|
||||
proc_quoted({quoted, V}, Data, TransOpts) ->
|
||||
{quoted, emqx_placeholder:proc_tmpl(V, Data, TransOpts)};
|
||||
proc_quoted(V, Data, TransOpts) ->
|
||||
emqx_placeholder:proc_tmpl(V, Data, TransOpts).
|
||||
|
||||
%% -------------------------------------------------------------------------------------------------
|
||||
%% Tags & Fields Data Trans
|
||||
|
@ -711,56 +730,115 @@ time_unit(ns) -> nanosecond.
|
|||
maps_config_to_data(K, V, {Data, Res}) ->
|
||||
KTransOptions = #{return => rawlist, var_trans => fun key_filter/1},
|
||||
VTransOptions = #{return => rawlist, var_trans => fun data_filter/1},
|
||||
NK0 = emqx_placeholder:proc_tmpl(K, Data, KTransOptions),
|
||||
NV = emqx_placeholder:proc_tmpl(V, Data, VTransOptions),
|
||||
case {NK0, NV} of
|
||||
NK = emqx_placeholder:proc_tmpl(K, Data, KTransOptions),
|
||||
NV = proc_quoted(V, Data, VTransOptions),
|
||||
case {NK, NV} of
|
||||
{[undefined], _} ->
|
||||
{Data, Res};
|
||||
%% undefined value in normal format [undefined] or int/uint format [undefined, <<"i">>]
|
||||
{_, [undefined | _]} ->
|
||||
{Data, Res};
|
||||
{_, {quoted, [undefined | _]}} ->
|
||||
{Data, Res};
|
||||
_ ->
|
||||
NK = list_to_binary(NK0),
|
||||
{Data, Res#{NK => value_type(NV)}}
|
||||
{Data, Res#{
|
||||
list_to_binary(NK) => value_type(NV, tmpl_type(V))
|
||||
}}
|
||||
end.
|
||||
|
||||
value_type([Int, <<"i">>]) when
|
||||
is_integer(Int)
|
||||
->
|
||||
value_type({quoted, ValList}, _) ->
|
||||
{string_list, ValList};
|
||||
value_type([Int, <<"i">>], mixed) when is_integer(Int) ->
|
||||
{int, Int};
|
||||
value_type([UInt, <<"u">>]) when
|
||||
is_integer(UInt)
|
||||
->
|
||||
value_type([UInt, <<"u">>], mixed) when is_integer(UInt) ->
|
||||
{uint, UInt};
|
||||
%% write `1`, `1.0`, `-1.0` all as float
|
||||
%% see also: https://docs.influxdata.com/influxdb/v2.7/reference/syntax/line-protocol/#float
|
||||
value_type([Number]) when is_number(Number) ->
|
||||
Number;
|
||||
value_type([<<"t">>]) ->
|
||||
value_type([Number], _) when is_number(Number) ->
|
||||
{float, Number};
|
||||
value_type([<<"t">>], _) ->
|
||||
't';
|
||||
value_type([<<"T">>]) ->
|
||||
value_type([<<"T">>], _) ->
|
||||
'T';
|
||||
value_type([true]) ->
|
||||
value_type([true], _) ->
|
||||
'true';
|
||||
value_type([<<"TRUE">>]) ->
|
||||
value_type([<<"TRUE">>], _) ->
|
||||
'TRUE';
|
||||
value_type([<<"True">>]) ->
|
||||
value_type([<<"True">>], _) ->
|
||||
'True';
|
||||
value_type([<<"f">>]) ->
|
||||
value_type([<<"f">>], _) ->
|
||||
'f';
|
||||
value_type([<<"F">>]) ->
|
||||
value_type([<<"F">>], _) ->
|
||||
'F';
|
||||
value_type([false]) ->
|
||||
value_type([false], _) ->
|
||||
'false';
|
||||
value_type([<<"FALSE">>]) ->
|
||||
value_type([<<"FALSE">>], _) ->
|
||||
'FALSE';
|
||||
value_type([<<"False">>]) ->
|
||||
value_type([<<"False">>], _) ->
|
||||
'False';
|
||||
value_type(Val) ->
|
||||
Val.
|
||||
value_type([Str], variable) when is_binary(Str) ->
|
||||
Str;
|
||||
value_type([Str], literal) when is_binary(Str) ->
|
||||
%% if Str is a literal string suffixed with `i` or `u`, we should convert it to int/uint.
|
||||
%% otherwise, we should convert it to float.
|
||||
NumStr = binary:part(Str, 0, byte_size(Str) - 1),
|
||||
case binary:part(Str, byte_size(Str), -1) of
|
||||
<<"i">> ->
|
||||
maybe_convert_to_integer(NumStr, Str, int);
|
||||
<<"u">> ->
|
||||
maybe_convert_to_integer(NumStr, Str, uint);
|
||||
_ ->
|
||||
maybe_convert_to_float_str(Str)
|
||||
end;
|
||||
value_type(Str, _) ->
|
||||
Str.
|
||||
|
||||
tmpl_type([{str, _}]) ->
|
||||
literal;
|
||||
tmpl_type([{var, _}]) ->
|
||||
variable;
|
||||
tmpl_type(_) ->
|
||||
mixed.
|
||||
|
||||
maybe_convert_to_integer(NumStr, String, Type) ->
|
||||
try
|
||||
Int = binary_to_integer(NumStr),
|
||||
{Type, Int}
|
||||
catch
|
||||
error:badarg ->
|
||||
maybe_convert_to_integer_f(NumStr, String, Type)
|
||||
end.
|
||||
|
||||
maybe_convert_to_integer_f(NumStr, String, Type) ->
|
||||
try
|
||||
Float = binary_to_float(NumStr),
|
||||
{Type, erlang:floor(Float)}
|
||||
catch
|
||||
error:badarg ->
|
||||
String
|
||||
end.
|
||||
|
||||
maybe_convert_to_float_str(NumStr) ->
|
||||
try
|
||||
_ = binary_to_float(NumStr),
|
||||
%% NOTE: return a {float, String} to avoid precision loss when converting to float
|
||||
{float, NumStr}
|
||||
catch
|
||||
error:badarg ->
|
||||
maybe_convert_to_float_str_i(NumStr)
|
||||
end.
|
||||
|
||||
maybe_convert_to_float_str_i(NumStr) ->
|
||||
try
|
||||
_ = binary_to_integer(NumStr),
|
||||
{float, NumStr}
|
||||
catch
|
||||
error:badarg ->
|
||||
NumStr
|
||||
end.
|
||||
|
||||
key_filter(undefined) -> undefined;
|
||||
key_filter(Value) -> emqx_utils_conv:bin(Value).
|
||||
key_filter(Value) -> bin(Value).
|
||||
|
||||
data_filter(undefined) -> undefined;
|
||||
data_filter(Int) when is_integer(Int) -> Int;
|
||||
|
@ -799,6 +877,10 @@ str(S) when is_list(S) ->
|
|||
|
||||
is_unrecoverable_error({error, {unrecoverable_error, _}}) ->
|
||||
true;
|
||||
is_unrecoverable_error({error, {Code, _}}) when ?IS_HTTP_ERROR(Code) ->
|
||||
true;
|
||||
is_unrecoverable_error({error, {Code, _, _Body}}) when ?IS_HTTP_ERROR(Code) ->
|
||||
true;
|
||||
is_unrecoverable_error(_) ->
|
||||
false.
|
||||
|
||||
|
|
|
@ -445,6 +445,7 @@ query_by_clientid(ClientId, Config) ->
|
|||
query => Query,
|
||||
dialect => #{
|
||||
header => true,
|
||||
annotations => [<<"datatype">>],
|
||||
delimiter => <<";">>
|
||||
}
|
||||
}),
|
||||
|
@ -456,6 +457,7 @@ query_by_clientid(ClientId, Config) ->
|
|||
_Timeout = 10_000,
|
||||
_Retry = 0
|
||||
),
|
||||
%ct:pal("raw body: ~p", [RawBody0]),
|
||||
RawBody1 = iolist_to_binary(string:replace(RawBody0, <<"\r\n">>, <<"\n">>, all)),
|
||||
{ok, DecodedCSV0} = erl_csv:decode(RawBody1, #{separator => <<$;>>}),
|
||||
DecodedCSV1 = [
|
||||
|
@ -465,21 +467,26 @@ query_by_clientid(ClientId, Config) ->
|
|||
DecodedCSV2 = csv_lines_to_maps(DecodedCSV1),
|
||||
index_by_field(DecodedCSV2).
|
||||
|
||||
csv_lines_to_maps([Title | Rest]) ->
|
||||
csv_lines_to_maps(Rest, Title, _Acc = []);
|
||||
csv_lines_to_maps([[<<"#datatype">> | DataType], Title | Rest]) ->
|
||||
csv_lines_to_maps(Rest, Title, _Acc = [], DataType);
|
||||
csv_lines_to_maps([]) ->
|
||||
[].
|
||||
|
||||
csv_lines_to_maps([[<<"_result">> | _] = Data | RestData], Title, Acc) ->
|
||||
csv_lines_to_maps([[<<"_result">> | _] = Data | RestData], Title, Acc, DataType) ->
|
||||
%ct:pal("data: ~p, title: ~p, datatype: ~p", [Data, Title, DataType]),
|
||||
Map = maps:from_list(lists:zip(Title, Data)),
|
||||
csv_lines_to_maps(RestData, Title, [Map | Acc]);
|
||||
MapT = lists:zip(Title, DataType),
|
||||
[Type] = [T || {<<"_value">>, T} <- MapT],
|
||||
csv_lines_to_maps(RestData, Title, [Map#{'_value_type' => Type} | Acc], DataType);
|
||||
%% ignore the csv title line
|
||||
%% it's always like this:
|
||||
%% [<<"result">>,<<"table">>,<<"_start">>,<<"_stop">>,
|
||||
%% <<"_time">>,<<"_value">>,<<"_field">>,<<"_measurement">>, Measurement],
|
||||
csv_lines_to_maps([[<<"result">> | _] = _Title | RestData], Title, Acc) ->
|
||||
csv_lines_to_maps(RestData, Title, Acc);
|
||||
csv_lines_to_maps([], _Title, Acc) ->
|
||||
csv_lines_to_maps([[<<"result">> | _] = _Title | RestData], Title, Acc, DataType) ->
|
||||
csv_lines_to_maps(RestData, Title, Acc, DataType);
|
||||
csv_lines_to_maps([[<<"#datatype">> | DataType] | RestData], Title, Acc, _) ->
|
||||
csv_lines_to_maps(RestData, Title, Acc, DataType);
|
||||
csv_lines_to_maps([], _Title, Acc, _DataType) ->
|
||||
lists:reverse(Acc).
|
||||
|
||||
index_by_field(DecodedCSV) ->
|
||||
|
@ -494,11 +501,21 @@ assert_persisted_data(ClientId, Expected, PersistedData) ->
|
|||
#{<<"_value">> := ExpectedValue},
|
||||
maps:get(ClientIdIntKey, PersistedData)
|
||||
);
|
||||
(Key, {ExpectedValue, ExpectedType}) ->
|
||||
?assertMatch(
|
||||
#{<<"_value">> := ExpectedValue, '_value_type' := ExpectedType},
|
||||
maps:get(atom_to_binary(Key), PersistedData),
|
||||
#{
|
||||
key => Key,
|
||||
expected_value => ExpectedValue,
|
||||
expected_data_type => ExpectedType
|
||||
}
|
||||
);
|
||||
(Key, ExpectedValue) ->
|
||||
?assertMatch(
|
||||
#{<<"_value">> := ExpectedValue},
|
||||
maps:get(atom_to_binary(Key), PersistedData),
|
||||
#{expected => ExpectedValue}
|
||||
#{key => Key, expected_value => ExpectedValue}
|
||||
)
|
||||
end,
|
||||
Expected
|
||||
|
@ -689,7 +706,15 @@ t_const_timestamp(Config) ->
|
|||
Config,
|
||||
#{
|
||||
<<"write_syntax">> =>
|
||||
<<"mqtt,clientid=${clientid} foo=${payload.foo}i,bar=5i ", ConstBin/binary>>
|
||||
<<
|
||||
"mqtt,clientid=${clientid} "
|
||||
"foo=${payload.foo}i,"
|
||||
"foo1=${payload.foo},"
|
||||
"foo2=\"${payload.foo}\","
|
||||
"foo3=\"${payload.foo}somestr\","
|
||||
"bar=5i,baz0=1.1,baz1=\"a\",baz2=\"ai\",baz3=\"au\",baz4=\"1u\" ",
|
||||
ConstBin/binary
|
||||
>>
|
||||
}
|
||||
)
|
||||
),
|
||||
|
@ -709,7 +734,18 @@ t_const_timestamp(Config) ->
|
|||
end,
|
||||
ct:sleep(1500),
|
||||
PersistedData = query_by_clientid(ClientId, Config),
|
||||
Expected = #{foo => <<"123">>},
|
||||
Expected = #{
|
||||
foo => {<<"123">>, <<"long">>},
|
||||
foo1 => {<<"123">>, <<"double">>},
|
||||
foo2 => {<<"123">>, <<"string">>},
|
||||
foo3 => {<<"123somestr">>, <<"string">>},
|
||||
bar => {<<"5">>, <<"long">>},
|
||||
baz0 => {<<"1.1">>, <<"double">>},
|
||||
baz1 => {<<"a">>, <<"string">>},
|
||||
baz2 => {<<"ai">>, <<"string">>},
|
||||
baz3 => {<<"au">>, <<"string">>},
|
||||
baz4 => {<<"1u">>, <<"string">>}
|
||||
},
|
||||
assert_persisted_data(ClientId, Expected, PersistedData),
|
||||
TimeReturned0 = maps:get(<<"_time">>, maps:get(<<"foo">>, PersistedData)),
|
||||
TimeReturned = pad_zero(TimeReturned0),
|
||||
|
@ -945,6 +981,7 @@ t_create_disconnected(Config) ->
|
|||
econnrefused -> ok;
|
||||
closed -> ok;
|
||||
{closed, _} -> ok;
|
||||
{shutdown, closed} -> ok;
|
||||
_ -> ct:fail("influxdb_client_not_alive with wrong reason: ~p", [Reason])
|
||||
end,
|
||||
ok
|
||||
|
|
|
@ -102,27 +102,51 @@
|
|||
#{
|
||||
measurement => "m7",
|
||||
tags => [{"tag", "tag7"}, {"tag_a", "\"tag7a\""}, {"tag_b", "tag7b"}],
|
||||
fields => [{"field", "field7"}, {"field_a", "field7a"}, {"field_b", "field7b"}],
|
||||
fields => [
|
||||
{"field", {quoted, "field7"}},
|
||||
{"field_a", "field7a"},
|
||||
{"field_b", {quoted, "field7b"}}
|
||||
],
|
||||
timestamp => undefined
|
||||
}},
|
||||
{"m8,tag=tag8,tag_a=\"tag8a\",tag_b=tag8b field=\"field8\",field_a=field8a,field_b=\"field8b\" ${timestamp8}",
|
||||
#{
|
||||
measurement => "m8",
|
||||
tags => [{"tag", "tag8"}, {"tag_a", "\"tag8a\""}, {"tag_b", "tag8b"}],
|
||||
fields => [{"field", "field8"}, {"field_a", "field8a"}, {"field_b", "field8b"}],
|
||||
fields => [
|
||||
{"field", {quoted, "field8"}},
|
||||
{"field_a", "field8a"},
|
||||
{"field_b", {quoted, "field8b"}}
|
||||
],
|
||||
timestamp => "${timestamp8}"
|
||||
}},
|
||||
{
|
||||
"m8a,tag=tag8,tag_a=\"${tag8a}\",tag_b=tag8b field=\"${field8}\","
|
||||
"field_a=field8a,field_b=\"${field8b}\" ${timestamp8}",
|
||||
#{
|
||||
measurement => "m8a",
|
||||
tags => [{"tag", "tag8"}, {"tag_a", "\"${tag8a}\""}, {"tag_b", "tag8b"}],
|
||||
fields => [
|
||||
{"field", {quoted, "${field8}"}},
|
||||
{"field_a", "field8a"},
|
||||
{"field_b", {quoted, "${field8b}"}}
|
||||
],
|
||||
timestamp => "${timestamp8}"
|
||||
}
|
||||
},
|
||||
{"m9,tag=tag9,tag_a=\"tag9a\",tag_b=tag9b field=\"field9\",field_a=field9a,field_b=\"\" ${timestamp9}",
|
||||
#{
|
||||
measurement => "m9",
|
||||
tags => [{"tag", "tag9"}, {"tag_a", "\"tag9a\""}, {"tag_b", "tag9b"}],
|
||||
fields => [{"field", "field9"}, {"field_a", "field9a"}, {"field_b", ""}],
|
||||
fields => [
|
||||
{"field", {quoted, "field9"}}, {"field_a", "field9a"}, {"field_b", {quoted, ""}}
|
||||
],
|
||||
timestamp => "${timestamp9}"
|
||||
}},
|
||||
{"m10 field=\"\" ${timestamp10}", #{
|
||||
measurement => "m10",
|
||||
tags => [],
|
||||
fields => [{"field", ""}],
|
||||
fields => [{"field", {quoted, ""}}],
|
||||
timestamp => "${timestamp10}"
|
||||
}}
|
||||
]).
|
||||
|
@ -177,19 +201,19 @@
|
|||
{"m2,tag=tag2 field=\"field \\\"2\\\",\n\"", #{
|
||||
measurement => "m2",
|
||||
tags => [{"tag", "tag2"}],
|
||||
fields => [{"field", "field \"2\",\n"}],
|
||||
fields => [{"field", {quoted, "field \"2\",\n"}}],
|
||||
timestamp => undefined
|
||||
}},
|
||||
{"m\\ 3 field=\"field3\" ${payload.timestamp\\ 3}", #{
|
||||
measurement => "m 3",
|
||||
tags => [],
|
||||
fields => [{"field", "field3"}],
|
||||
fields => [{"field", {quoted, "field3"}}],
|
||||
timestamp => "${payload.timestamp 3}"
|
||||
}},
|
||||
{"m4 field=\"\\\"field\\\\4\\\"\"", #{
|
||||
measurement => "m4",
|
||||
tags => [],
|
||||
fields => [{"field", "\"field\\4\""}],
|
||||
fields => [{"field", {quoted, "\"field\\4\""}}],
|
||||
timestamp => undefined
|
||||
}},
|
||||
{
|
||||
|
@ -208,7 +232,11 @@
|
|||
#{
|
||||
measurement => "m6",
|
||||
tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}],
|
||||
fields => [{"field", "field6"}, {"field_a", "field6a"}, {"field_b", "field6b"}],
|
||||
fields => [
|
||||
{"field", {quoted, "field6"}},
|
||||
{"field_a", {quoted, "field6a"}},
|
||||
{"field_b", {quoted, "field6b"}}
|
||||
],
|
||||
timestamp => undefined
|
||||
}},
|
||||
{
|
||||
|
@ -217,7 +245,11 @@
|
|||
#{
|
||||
measurement => " m7 ",
|
||||
tags => [{"tag", " tag,7 "}, {"tag_a", "\"tag7a\""}, {"tag_b,tag1", "tag7b"}],
|
||||
fields => [{"field", "field7"}, {"field_a", "field7a"}, {"field_b", "field7b\\\n"}],
|
||||
fields => [
|
||||
{"field", {quoted, "field7"}},
|
||||
{"field_a", "field7a"},
|
||||
{"field_b", {quoted, "field7b\\\n"}}
|
||||
],
|
||||
timestamp => undefined
|
||||
}
|
||||
},
|
||||
|
@ -227,7 +259,11 @@
|
|||
#{
|
||||
measurement => "m8",
|
||||
tags => [{"tag", "tag8"}, {"tag_a", "\"tag8a\""}, {"tag_b", "tag8b"}],
|
||||
fields => [{"field", "field8"}, {"field_a", "field8a"}, {"field_b", "\"field\" = 8b"}],
|
||||
fields => [
|
||||
{"field", {quoted, "field8"}},
|
||||
{"field_a", "field8a"},
|
||||
{"field_b", {quoted, "\"field\" = 8b"}}
|
||||
],
|
||||
timestamp => "${timestamp8}"
|
||||
}
|
||||
},
|
||||
|
@ -235,14 +271,18 @@
|
|||
#{
|
||||
measurement => "m\\9",
|
||||
tags => [{"tag", "tag9"}, {"tag_a", "\"tag9a\""}, {"tag_b", "tag9b"}],
|
||||
fields => [{"field=field", "field9"}, {"field_a", "field9a"}, {"field_b", ""}],
|
||||
fields => [
|
||||
{"field=field", {quoted, "field9"}},
|
||||
{"field_a", "field9a"},
|
||||
{"field_b", {quoted, ""}}
|
||||
],
|
||||
timestamp => "${timestamp9}"
|
||||
}},
|
||||
{"m\\,10 \"field\\\\\"=\"\" ${timestamp10}", #{
|
||||
measurement => "m,10",
|
||||
tags => [],
|
||||
%% backslash should not be un-escaped in tag key
|
||||
fields => [{"\"field\\\\\"", ""}],
|
||||
fields => [{"\"field\\\\\"", {quoted, ""}}],
|
||||
timestamp => "${timestamp10}"
|
||||
}}
|
||||
]).
|
||||
|
@ -257,19 +297,19 @@
|
|||
{" m2,tag=tag2 field=\"field \\\"2\\\",\n\" ", #{
|
||||
measurement => "m2",
|
||||
tags => [{"tag", "tag2"}],
|
||||
fields => [{"field", "field \"2\",\n"}],
|
||||
fields => [{"field", {quoted, "field \"2\",\n"}}],
|
||||
timestamp => undefined
|
||||
}},
|
||||
{" m\\ 3 field=\"field3\" ${payload.timestamp\\ 3} ", #{
|
||||
measurement => "m 3",
|
||||
tags => [],
|
||||
fields => [{"field", "field3"}],
|
||||
fields => [{"field", {quoted, "field3"}}],
|
||||
timestamp => "${payload.timestamp 3}"
|
||||
}},
|
||||
{" m4 field=\"\\\"field\\\\4\\\"\" ", #{
|
||||
measurement => "m4",
|
||||
tags => [],
|
||||
fields => [{"field", "\"field\\4\""}],
|
||||
fields => [{"field", {quoted, "\"field\\4\""}}],
|
||||
timestamp => undefined
|
||||
}},
|
||||
{
|
||||
|
@ -288,7 +328,11 @@
|
|||
#{
|
||||
measurement => "m6",
|
||||
tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}],
|
||||
fields => [{"field", "field6"}, {"field_a", "field6a"}, {"field_b", "field6b"}],
|
||||
fields => [
|
||||
{"field", {quoted, "field6"}},
|
||||
{"field_a", {quoted, "field6a"}},
|
||||
{"field_b", {quoted, "field6b"}}
|
||||
],
|
||||
timestamp => undefined
|
||||
}}
|
||||
]).
|
||||
|
|
|
@ -70,7 +70,7 @@ fields(action_resource_opts) ->
|
|||
fun({K, _V}) ->
|
||||
not lists:member(K, unsupported_opts())
|
||||
end,
|
||||
emqx_bridge_v2_schema:resource_opts_fields()
|
||||
emqx_bridge_v2_schema:action_resource_opts_fields()
|
||||
);
|
||||
fields(action_parameters) ->
|
||||
[
|
||||
|
@ -93,7 +93,8 @@ fields(action_parameters) ->
|
|||
mk(
|
||||
array(ref(?MODULE, action_parameters_data)),
|
||||
#{
|
||||
desc => ?DESC("action_parameters_data")
|
||||
desc => ?DESC("action_parameters_data"),
|
||||
required => true
|
||||
}
|
||||
)}
|
||||
] ++
|
||||
|
|
|
@ -93,7 +93,7 @@ connector_example_values() ->
|
|||
iotdb_version => ?VSN_1_1_X,
|
||||
authentication => #{
|
||||
<<"username">> => <<"root">>,
|
||||
<<"password">> => <<"*****">>
|
||||
<<"password">> => <<"******">>
|
||||
},
|
||||
base_url => <<"http://iotdb.local:18080/">>,
|
||||
connect_timeout => <<"15s">>,
|
||||
|
@ -112,7 +112,10 @@ roots() ->
|
|||
[{config, #{type => hoconsc:ref(?MODULE, config)}}].
|
||||
|
||||
fields(config) ->
|
||||
proplists_without([url, headers], emqx_bridge_http_schema:fields("config_connector")) ++
|
||||
proplists_without(
|
||||
[url, request, retry_interval, headers],
|
||||
emqx_bridge_http_schema:fields("config_connector")
|
||||
) ++
|
||||
fields("connection_fields");
|
||||
fields("connection_fields") ->
|
||||
[
|
||||
|
@ -209,7 +212,7 @@ on_start(InstanceId, #{iotdb_version := Version} = Config) ->
|
|||
?SLOG(info, #{
|
||||
msg => "iotdb_bridge_started",
|
||||
instance_id => InstanceId,
|
||||
request => maps:get(request, State, <<>>)
|
||||
request => emqx_utils:redact(maps:get(request, State, <<>>))
|
||||
}),
|
||||
?tp(iotdb_bridge_started, #{instance_id => InstanceId}),
|
||||
{ok, State#{iotdb_version => Version, channels => #{}}};
|
||||
|
@ -217,7 +220,7 @@ on_start(InstanceId, #{iotdb_version := Version} = Config) ->
|
|||
?SLOG(error, #{
|
||||
msg => "failed_to_start_iotdb_bridge",
|
||||
instance_id => InstanceId,
|
||||
base_url => maps:get(request, Config, <<>>),
|
||||
request => emqx_utils:redact(maps:get(request, Config, <<>>)),
|
||||
reason => Reason
|
||||
}),
|
||||
throw(failed_to_start_iotdb_bridge)
|
||||
|
@ -441,19 +444,22 @@ proc_data(
|
|||
DataType = list_to_binary(
|
||||
string:uppercase(binary_to_list(emqx_placeholder:proc_tmpl(DataType0, Msg)))
|
||||
),
|
||||
case proc_value(DataType, ValueTkn, Msg) of
|
||||
{ok, Value} ->
|
||||
try
|
||||
proc_data(T, Msg, Nows, [
|
||||
#{
|
||||
timestamp => iot_timestamp(TimestampTkn, Msg, Nows),
|
||||
measurement => emqx_placeholder:proc_tmpl(Measurement, Msg),
|
||||
data_type => DataType,
|
||||
value => Value
|
||||
value => proc_value(DataType, ValueTkn, Msg)
|
||||
}
|
||||
| Acc
|
||||
]);
|
||||
Error ->
|
||||
Error
|
||||
])
|
||||
catch
|
||||
throw:Reason ->
|
||||
{error, Reason};
|
||||
Error:Reason:Stacktrace ->
|
||||
?SLOG(debug, #{exception => Error, reason => Reason, stacktrace => Stacktrace}),
|
||||
{error, invalid_data}
|
||||
end;
|
||||
proc_data([], _Msg, _Nows, Acc) ->
|
||||
{ok, lists:reverse(Acc)}.
|
||||
|
@ -475,19 +481,18 @@ iot_timestamp(Timestamp, _) when is_binary(Timestamp) ->
|
|||
binary_to_integer(Timestamp).
|
||||
|
||||
proc_value(<<"TEXT">>, ValueTkn, Msg) ->
|
||||
{ok,
|
||||
case emqx_placeholder:proc_tmpl(ValueTkn, Msg) of
|
||||
<<"undefined">> -> null;
|
||||
Val -> Val
|
||||
end};
|
||||
end;
|
||||
proc_value(<<"BOOLEAN">>, ValueTkn, Msg) ->
|
||||
{ok, convert_bool(replace_var(ValueTkn, Msg))};
|
||||
convert_bool(replace_var(ValueTkn, Msg));
|
||||
proc_value(Int, ValueTkn, Msg) when Int =:= <<"INT32">>; Int =:= <<"INT64">> ->
|
||||
{ok, convert_int(replace_var(ValueTkn, Msg))};
|
||||
convert_int(replace_var(ValueTkn, Msg));
|
||||
proc_value(Int, ValueTkn, Msg) when Int =:= <<"FLOAT">>; Int =:= <<"DOUBLE">> ->
|
||||
{ok, convert_float(replace_var(ValueTkn, Msg))};
|
||||
convert_float(replace_var(ValueTkn, Msg));
|
||||
proc_value(Type, _, _) ->
|
||||
{error, {invalid_type, Type}}.
|
||||
throw(#{reason => invalid_type, type => Type}).
|
||||
|
||||
replace_var(Tokens, Data) when is_list(Tokens) ->
|
||||
[Val] = emqx_placeholder:proc_tmpl(Tokens, Data, #{return => rawlist}),
|
||||
|
|
|
@ -664,7 +664,16 @@ t_sync_query_invalid_type(Config) ->
|
|||
DeviceId = iotdb_device(Config),
|
||||
Payload = make_iotdb_payload(DeviceId, "temp", "IxT32", "36"),
|
||||
MakeMessageFun = make_message_fun(iotdb_topic(Config), Payload),
|
||||
IsInvalidType = fun(Result) -> ?assertMatch({error, {invalid_type, _}}, Result) end,
|
||||
IsInvalidType = fun(Result) -> ?assertMatch({error, #{reason := invalid_type}}, Result) end,
|
||||
ok = emqx_bridge_v2_testlib:t_sync_query(
|
||||
Config, MakeMessageFun, IsInvalidType, iotdb_bridge_on_query
|
||||
).
|
||||
|
||||
t_sync_query_unmatched_type(Config) ->
|
||||
DeviceId = iotdb_device(Config),
|
||||
Payload = make_iotdb_payload(DeviceId, "temp", "BOOLEAN", "not boolean"),
|
||||
MakeMessageFun = make_message_fun(iotdb_topic(Config), Payload),
|
||||
IsInvalidType = fun(Result) -> ?assertMatch({error, invalid_data}, Result) end,
|
||||
ok = emqx_bridge_v2_testlib:t_sync_query(
|
||||
Config, MakeMessageFun, IsInvalidType, iotdb_bridge_on_query
|
||||
).
|
||||
|
|
|
@ -297,6 +297,7 @@ fields(kafka_producer_action) ->
|
|||
mk(binary(), #{
|
||||
desc => ?DESC(emqx_connector_schema, "connector_field"), required => true
|
||||
})},
|
||||
{tags, emqx_schema:tags_schema()},
|
||||
{description, emqx_schema:description_schema()}
|
||||
] ++ producer_opts(action);
|
||||
fields(kafka_consumer) ->
|
||||
|
@ -552,7 +553,7 @@ fields(connector_resource_opts) ->
|
|||
emqx_connector_schema:resource_opts_fields();
|
||||
fields(resource_opts) ->
|
||||
SupportedFields = [health_check_interval],
|
||||
CreationOpts = emqx_bridge_v2_schema:resource_opts_fields(),
|
||||
CreationOpts = emqx_bridge_v2_schema:action_resource_opts_fields(),
|
||||
lists:filter(fun({Field, _}) -> lists:member(Field, SupportedFields) end, CreationOpts);
|
||||
fields(action_field) ->
|
||||
{kafka_producer,
|
||||
|
|
|
@ -135,7 +135,7 @@ create_producers_for_bridge_v2(
|
|||
KafkaHeadersTokens = preproc_kafka_headers(maps:get(kafka_headers, KafkaConfig, undefined)),
|
||||
KafkaExtHeadersTokens = preproc_ext_headers(maps:get(kafka_ext_headers, KafkaConfig, [])),
|
||||
KafkaHeadersValEncodeMode = maps:get(kafka_header_value_encode_mode, KafkaConfig, none),
|
||||
{_BridgeType, BridgeName} = emqx_bridge_v2:parse_id(BridgeV2Id),
|
||||
#{name := BridgeName} = emqx_bridge_v2:parse_id(BridgeV2Id),
|
||||
TestIdStart = string:find(BridgeV2Id, ?TEST_ID_PREFIX),
|
||||
IsDryRun =
|
||||
case TestIdStart of
|
||||
|
|
|
@ -97,7 +97,7 @@ fields(action_parameters) ->
|
|||
fields(connector_resource_opts) ->
|
||||
emqx_connector_schema:resource_opts_fields();
|
||||
fields(action_resource_opts) ->
|
||||
emqx_bridge_v2_schema:resource_opts_fields([
|
||||
emqx_bridge_v2_schema:action_resource_opts_fields([
|
||||
{batch_size, #{
|
||||
importance => ?IMPORTANCE_HIDDEN,
|
||||
converter => fun(_, _) -> 1 end,
|
||||
|
|
|
@ -20,8 +20,13 @@
|
|||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||
|
||||
-behaviour(emqx_resource).
|
||||
-behaviour(ecpool_worker).
|
||||
|
||||
%% ecpool
|
||||
-export([connect/1]).
|
||||
|
||||
-export([on_message_received/3]).
|
||||
-export([handle_disconnect/1]).
|
||||
|
||||
%% callbacks of behaviour emqx_resource
|
||||
-export([
|
||||
|
@ -30,11 +35,25 @@
|
|||
on_stop/2,
|
||||
on_query/3,
|
||||
on_query_async/4,
|
||||
on_get_status/2
|
||||
on_get_status/2,
|
||||
on_add_channel/4,
|
||||
on_remove_channel/3,
|
||||
on_get_channel_status/3,
|
||||
on_get_channels/1
|
||||
]).
|
||||
|
||||
-export([on_async_result/2]).
|
||||
|
||||
-type name() :: term().
|
||||
|
||||
-type option() ::
|
||||
{name, name()}
|
||||
| {ingress, map()}
|
||||
%% see `emqtt:option()`
|
||||
| {client_opts, map()}.
|
||||
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
|
||||
-define(HEALTH_CHECK_TIMEOUT, 1000).
|
||||
-define(INGRESS, "I").
|
||||
-define(EGRESS, "E").
|
||||
|
@ -42,142 +61,211 @@
|
|||
%% ===================================================================
|
||||
%% When use this bridge as a data source, ?MODULE:on_message_received will be called
|
||||
%% if the bridge received msgs from the remote broker.
|
||||
on_message_received(Msg, HookPoint, ResId) ->
|
||||
|
||||
on_message_received(Msg, HookPoints, ResId) ->
|
||||
emqx_resource_metrics:received_inc(ResId),
|
||||
emqx_hooks:run(HookPoint, [Msg]).
|
||||
lists:foreach(
|
||||
fun(HookPoint) ->
|
||||
emqx_hooks:run(HookPoint, [Msg])
|
||||
end,
|
||||
HookPoints
|
||||
),
|
||||
ok.
|
||||
|
||||
%% ===================================================================
|
||||
callback_mode() -> async_if_possible.
|
||||
|
||||
on_start(ResourceId, Conf) ->
|
||||
on_start(ResourceId, #{server := Server} = Conf) ->
|
||||
?SLOG(info, #{
|
||||
msg => "starting_mqtt_connector",
|
||||
connector => ResourceId,
|
||||
config => emqx_utils:redact(Conf)
|
||||
}),
|
||||
case start_ingress(ResourceId, Conf) of
|
||||
TopicToHandlerIndex = emqx_topic_index:new(),
|
||||
StartConf = Conf#{topic_to_handler_index => TopicToHandlerIndex},
|
||||
case start_mqtt_clients(ResourceId, StartConf) of
|
||||
{ok, Result1} ->
|
||||
case start_egress(ResourceId, Conf) of
|
||||
{ok, Result2} ->
|
||||
{ok, maps:merge(Result1, Result2)};
|
||||
{error, Reason} ->
|
||||
_ = stop_ingress(Result1),
|
||||
{error, Reason}
|
||||
end;
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
start_ingress(ResourceId, Conf) ->
|
||||
ClientOpts = mk_client_opts(ResourceId, ?INGRESS, Conf),
|
||||
case mk_ingress_config(ResourceId, Conf) of
|
||||
Ingress = #{} ->
|
||||
start_ingress(ResourceId, Ingress, ClientOpts);
|
||||
undefined ->
|
||||
{ok, #{}}
|
||||
end.
|
||||
|
||||
start_ingress(ResourceId, Ingress, ClientOpts) ->
|
||||
PoolName = <<ResourceId/binary, ":ingress">>,
|
||||
PoolSize = choose_ingress_pool_size(ResourceId, Ingress),
|
||||
Options = [
|
||||
{name, PoolName},
|
||||
{pool_size, PoolSize},
|
||||
{ingress, Ingress},
|
||||
{client_opts, ClientOpts}
|
||||
],
|
||||
ok = emqx_resource:allocate_resource(ResourceId, ingress_pool_name, PoolName),
|
||||
case emqx_resource_pool:start(PoolName, emqx_bridge_mqtt_ingress, Options) of
|
||||
ok ->
|
||||
{ok, #{ingress_pool_name => PoolName}};
|
||||
{error, {start_pool_failed, _, Reason}} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
choose_ingress_pool_size(<<?TEST_ID_PREFIX, _/binary>>, _) ->
|
||||
1;
|
||||
choose_ingress_pool_size(
|
||||
ResourceId,
|
||||
#{remote := #{topic := RemoteTopic}, pool_size := PoolSize}
|
||||
) ->
|
||||
case emqx_topic:parse(RemoteTopic) of
|
||||
{#share{} = _Filter, _SubOpts} ->
|
||||
% NOTE: this is shared subscription, many workers may subscribe
|
||||
PoolSize;
|
||||
{_Filter, #{}} when PoolSize > 1 ->
|
||||
% NOTE: this is regular subscription, only one worker should subscribe
|
||||
?SLOG(warning, #{
|
||||
msg => "mqtt_bridge_ingress_pool_size_ignored",
|
||||
connector => ResourceId,
|
||||
reason =>
|
||||
"Remote topic filter is not a shared subscription, "
|
||||
"ingress pool will start with a single worker",
|
||||
config_pool_size => PoolSize,
|
||||
pool_size => 1
|
||||
}),
|
||||
1;
|
||||
{_Filter, #{}} when PoolSize == 1 ->
|
||||
1
|
||||
end.
|
||||
|
||||
start_egress(ResourceId, Conf) ->
|
||||
% NOTE
|
||||
% We are ignoring the user configuration here because there's currently no reliable way
|
||||
% to ensure proper session recovery according to the MQTT spec.
|
||||
ClientOpts = maps:put(clean_start, true, mk_client_opts(ResourceId, ?EGRESS, Conf)),
|
||||
case mk_egress_config(Conf) of
|
||||
Egress = #{} ->
|
||||
start_egress(ResourceId, Egress, ClientOpts);
|
||||
undefined ->
|
||||
{ok, #{}}
|
||||
end.
|
||||
|
||||
start_egress(ResourceId, Egress, ClientOpts) ->
|
||||
PoolName = <<ResourceId/binary, ":egress">>,
|
||||
PoolSize = maps:get(pool_size, Egress),
|
||||
Options = [
|
||||
{name, PoolName},
|
||||
{pool_size, PoolSize},
|
||||
{client_opts, ClientOpts}
|
||||
],
|
||||
ok = emqx_resource:allocate_resource(ResourceId, egress_pool_name, PoolName),
|
||||
case emqx_resource_pool:start(PoolName, emqx_bridge_mqtt_egress, Options) of
|
||||
ok ->
|
||||
{ok, #{
|
||||
egress_pool_name => PoolName,
|
||||
egress_config => emqx_bridge_mqtt_egress:config(Egress)
|
||||
{ok, Result1#{
|
||||
installed_channels => #{},
|
||||
clean_start => maps:get(clean_start, Conf),
|
||||
topic_to_handler_index => TopicToHandlerIndex,
|
||||
server => Server
|
||||
}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
on_add_channel(
|
||||
_InstId,
|
||||
#{
|
||||
installed_channels := InstalledChannels,
|
||||
clean_start := CleanStart
|
||||
} = OldState,
|
||||
ChannelId,
|
||||
#{config_root := actions} = ChannelConfig
|
||||
) ->
|
||||
%% Publisher channel
|
||||
%% make a warning if clean_start is set to false
|
||||
case CleanStart of
|
||||
false ->
|
||||
?tp(
|
||||
mqtt_clean_start_egress_action_warning,
|
||||
#{
|
||||
channel_id => ChannelId,
|
||||
resource_id => _InstId
|
||||
}
|
||||
),
|
||||
?SLOG(warning, #{
|
||||
msg => "mqtt_publisher_clean_start_false",
|
||||
reason => "clean_start is set to false when using MQTT publisher action, " ++
|
||||
"which may cause unexpected behavior. " ++
|
||||
"For example, if the client ID is already subscribed to topics, " ++
|
||||
"we might receive messages that are unhanded.",
|
||||
channel => ChannelId,
|
||||
config => emqx_utils:redact(ChannelConfig)
|
||||
});
|
||||
true ->
|
||||
ok
|
||||
end,
|
||||
RemoteParams0 = maps:get(parameters, ChannelConfig),
|
||||
{LocalParams, RemoteParams} = take(local, RemoteParams0, #{}),
|
||||
ChannelState = emqx_bridge_mqtt_egress:config(#{remote => RemoteParams, local => LocalParams}),
|
||||
NewInstalledChannels = maps:put(ChannelId, ChannelState, InstalledChannels),
|
||||
NewState = OldState#{installed_channels => NewInstalledChannels},
|
||||
{ok, NewState};
|
||||
on_add_channel(
|
||||
_ResourceId,
|
||||
#{
|
||||
installed_channels := InstalledChannels,
|
||||
pool_name := PoolName,
|
||||
topic_to_handler_index := TopicToHandlerIndex,
|
||||
server := Server
|
||||
} = OldState,
|
||||
ChannelId,
|
||||
#{hookpoints := HookPoints} = ChannelConfig
|
||||
) ->
|
||||
%% Add ingress channel
|
||||
RemoteParams0 = maps:get(parameters, ChannelConfig),
|
||||
{LocalParams, RemoteParams} = take(local, RemoteParams0, #{}),
|
||||
ChannelState0 = #{
|
||||
hookpoints => HookPoints,
|
||||
server => Server,
|
||||
config_root => sources,
|
||||
local => LocalParams,
|
||||
remote => RemoteParams
|
||||
},
|
||||
ChannelState1 = mk_ingress_config(ChannelId, ChannelState0, TopicToHandlerIndex),
|
||||
ok = emqx_bridge_mqtt_ingress:subscribe_channel(PoolName, ChannelState1),
|
||||
NewInstalledChannels = maps:put(ChannelId, ChannelState1, InstalledChannels),
|
||||
NewState = OldState#{installed_channels => NewInstalledChannels},
|
||||
{ok, NewState}.
|
||||
|
||||
on_remove_channel(
|
||||
_InstId,
|
||||
#{
|
||||
installed_channels := InstalledChannels,
|
||||
pool_name := PoolName,
|
||||
topic_to_handler_index := TopicToHandlerIndex
|
||||
} = OldState,
|
||||
ChannelId
|
||||
) ->
|
||||
ChannelState = maps:get(ChannelId, InstalledChannels),
|
||||
case ChannelState of
|
||||
#{
|
||||
config_root := sources
|
||||
} ->
|
||||
emqx_bridge_mqtt_ingress:unsubscribe_channel(
|
||||
PoolName, ChannelState, ChannelId, TopicToHandlerIndex
|
||||
),
|
||||
ok;
|
||||
_ ->
|
||||
ok
|
||||
end,
|
||||
NewInstalledChannels = maps:remove(ChannelId, InstalledChannels),
|
||||
%% Update state
|
||||
NewState = OldState#{installed_channels => NewInstalledChannels},
|
||||
{ok, NewState}.
|
||||
|
||||
on_get_channel_status(
|
||||
_ResId,
|
||||
ChannelId,
|
||||
#{
|
||||
installed_channels := Channels
|
||||
} = _State
|
||||
) when is_map_key(ChannelId, Channels) ->
|
||||
%% The channel should be ok as long as the MQTT client is ok
|
||||
connected.
|
||||
|
||||
on_get_channels(ResId) ->
|
||||
emqx_bridge_v2:get_channels_for_connector(ResId).
|
||||
|
||||
start_mqtt_clients(ResourceId, Conf) ->
|
||||
ClientOpts = mk_client_opts(ResourceId, Conf),
|
||||
start_mqtt_clients(ResourceId, Conf, ClientOpts).
|
||||
|
||||
start_mqtt_clients(ResourceId, StartConf, ClientOpts) ->
|
||||
PoolName = <<ResourceId/binary>>,
|
||||
#{
|
||||
pool_size := PoolSize
|
||||
} = StartConf,
|
||||
Options = [
|
||||
{name, PoolName},
|
||||
{pool_size, PoolSize},
|
||||
{client_opts, ClientOpts}
|
||||
],
|
||||
ok = emqx_resource:allocate_resource(ResourceId, pool_name, PoolName),
|
||||
case emqx_resource_pool:start(PoolName, ?MODULE, Options) of
|
||||
ok ->
|
||||
{ok, #{pool_name => PoolName}};
|
||||
{error, {start_pool_failed, _, Reason}} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
on_stop(ResourceId, _State) ->
|
||||
on_stop(ResourceId, State) ->
|
||||
?SLOG(info, #{
|
||||
msg => "stopping_mqtt_connector",
|
||||
connector => ResourceId
|
||||
}),
|
||||
%% on_stop can be called with State = undefined
|
||||
StateMap =
|
||||
case State of
|
||||
Map when is_map(State) ->
|
||||
Map;
|
||||
_ ->
|
||||
#{}
|
||||
end,
|
||||
case maps:get(topic_to_handler_index, StateMap, undefined) of
|
||||
undefined ->
|
||||
ok;
|
||||
TopicToHandlerIndex ->
|
||||
ets:delete(TopicToHandlerIndex)
|
||||
end,
|
||||
Allocated = emqx_resource:get_allocated_resources(ResourceId),
|
||||
ok = stop_ingress(Allocated),
|
||||
ok = stop_egress(Allocated).
|
||||
|
||||
stop_ingress(#{ingress_pool_name := PoolName}) ->
|
||||
emqx_resource_pool:stop(PoolName);
|
||||
stop_ingress(#{}) ->
|
||||
ok = stop_helper(Allocated),
|
||||
?tp(mqtt_connector_stopped, #{instance_id => ResourceId}),
|
||||
ok.
|
||||
|
||||
stop_egress(#{egress_pool_name := PoolName}) ->
|
||||
emqx_resource_pool:stop(PoolName);
|
||||
stop_egress(#{}) ->
|
||||
ok.
|
||||
stop_helper(#{pool_name := PoolName}) ->
|
||||
emqx_resource_pool:stop(PoolName).
|
||||
|
||||
on_query(
|
||||
ResourceId,
|
||||
{send_message, Msg},
|
||||
#{egress_pool_name := PoolName, egress_config := Config}
|
||||
{ChannelId, Msg},
|
||||
#{pool_name := PoolName} = State
|
||||
) ->
|
||||
?TRACE("QUERY", "send_msg_to_remote_node", #{message => Msg, connector => ResourceId}),
|
||||
handle_send_result(with_egress_client(PoolName, send, [Msg, Config]));
|
||||
on_query(ResourceId, {send_message, Msg}, #{}) ->
|
||||
?TRACE(
|
||||
"QUERY",
|
||||
"send_msg_to_remote_node",
|
||||
#{
|
||||
message => Msg,
|
||||
connector => ResourceId,
|
||||
channel_id => ChannelId
|
||||
}
|
||||
),
|
||||
Channels = maps:get(installed_channels, State),
|
||||
ChannelConfig = maps:get(ChannelId, Channels),
|
||||
handle_send_result(with_egress_client(PoolName, send, [Msg, ChannelConfig]));
|
||||
on_query(ResourceId, {_ChannelId, Msg}, #{}) ->
|
||||
?SLOG(error, #{
|
||||
msg => "forwarding_unavailable",
|
||||
connector => ResourceId,
|
||||
|
@ -187,13 +275,15 @@ on_query(ResourceId, {send_message, Msg}, #{}) ->
|
|||
|
||||
on_query_async(
|
||||
ResourceId,
|
||||
{send_message, Msg},
|
||||
{ChannelId, Msg},
|
||||
CallbackIn,
|
||||
#{egress_pool_name := PoolName, egress_config := Config}
|
||||
#{pool_name := PoolName} = State
|
||||
) ->
|
||||
?TRACE("QUERY", "async_send_msg_to_remote_node", #{message => Msg, connector => ResourceId}),
|
||||
Callback = {fun on_async_result/2, [CallbackIn]},
|
||||
Result = with_egress_client(PoolName, send_async, [Msg, Callback, Config]),
|
||||
Channels = maps:get(installed_channels, State),
|
||||
ChannelConfig = maps:get(ChannelId, Channels),
|
||||
Result = with_egress_client(PoolName, send_async, [Msg, Callback, ChannelConfig]),
|
||||
case Result of
|
||||
ok ->
|
||||
ok;
|
||||
|
@ -202,7 +292,7 @@ on_query_async(
|
|||
{error, Reason} ->
|
||||
{error, classify_error(Reason)}
|
||||
end;
|
||||
on_query_async(ResourceId, {send_message, Msg}, _Callback, #{}) ->
|
||||
on_query_async(ResourceId, {_ChannelId, Msg}, _Callback, #{}) ->
|
||||
?SLOG(error, #{
|
||||
msg => "forwarding_unavailable",
|
||||
connector => ResourceId,
|
||||
|
@ -251,7 +341,7 @@ classify_error(Reason) ->
|
|||
{unrecoverable_error, Reason}.
|
||||
|
||||
on_get_status(_ResourceId, State) ->
|
||||
Pools = maps:to_list(maps:with([ingress_pool_name, egress_pool_name], State)),
|
||||
Pools = maps:to_list(maps:with([pool_name], State)),
|
||||
Workers = [{Pool, Worker} || {Pool, PN} <- Pools, {_Name, Worker} <- ecpool:workers(PN)],
|
||||
try emqx_utils:pmap(fun get_status/1, Workers, ?HEALTH_CHECK_TIMEOUT) of
|
||||
Statuses ->
|
||||
|
@ -261,12 +351,10 @@ on_get_status(_ResourceId, State) ->
|
|||
connecting
|
||||
end.
|
||||
|
||||
get_status({Pool, Worker}) ->
|
||||
get_status({_Pool, Worker}) ->
|
||||
case ecpool_worker:client(Worker) of
|
||||
{ok, Client} when Pool == ingress_pool_name ->
|
||||
{ok, Client} ->
|
||||
emqx_bridge_mqtt_ingress:status(Client);
|
||||
{ok, Client} when Pool == egress_pool_name ->
|
||||
emqx_bridge_mqtt_egress:status(Client);
|
||||
{error, _} ->
|
||||
disconnected
|
||||
end.
|
||||
|
@ -284,30 +372,19 @@ combine_status(Statuses) ->
|
|||
end.
|
||||
|
||||
mk_ingress_config(
|
||||
ResourceId,
|
||||
#{
|
||||
ingress := Ingress = #{remote := _},
|
||||
server := Server,
|
||||
hookpoint := HookPoint
|
||||
}
|
||||
ChannelId,
|
||||
IngressChannelConfig,
|
||||
TopicToHandlerIndex
|
||||
) ->
|
||||
Ingress#{
|
||||
server => Server,
|
||||
on_message_received => {?MODULE, on_message_received, [HookPoint, ResourceId]}
|
||||
};
|
||||
mk_ingress_config(ResourceId, #{ingress := #{remote := _}} = Conf) ->
|
||||
error({no_hookpoint_provided, ResourceId, Conf});
|
||||
mk_ingress_config(_ResourceId, #{}) ->
|
||||
undefined.
|
||||
|
||||
mk_egress_config(#{egress := Egress = #{remote := _}}) ->
|
||||
Egress;
|
||||
mk_egress_config(#{}) ->
|
||||
undefined.
|
||||
HookPoints = maps:get(hookpoints, IngressChannelConfig, []),
|
||||
NewConf = IngressChannelConfig#{
|
||||
on_message_received => {?MODULE, on_message_received, [HookPoints, ChannelId]},
|
||||
ingress_list => [IngressChannelConfig]
|
||||
},
|
||||
emqx_bridge_mqtt_ingress:config(NewConf, ChannelId, TopicToHandlerIndex).
|
||||
|
||||
mk_client_opts(
|
||||
ResourceId,
|
||||
ClientScope,
|
||||
Config = #{
|
||||
server := Server,
|
||||
keepalive := KeepAlive,
|
||||
|
@ -327,14 +404,15 @@ mk_client_opts(
|
|||
% A load balancing server (such as haproxy) is often set up before the emqx broker server.
|
||||
% When the load balancing server enables mqtt connection packet inspection,
|
||||
% non-standard mqtt connection packets might be filtered out by LB.
|
||||
bridge_mode
|
||||
bridge_mode,
|
||||
topic_to_handler_index
|
||||
],
|
||||
Config
|
||||
),
|
||||
Name = parse_id_to_name(ResourceId),
|
||||
mk_client_opt_password(Options#{
|
||||
hosts => [HostPort],
|
||||
clientid => clientid(Name, ClientScope, Config),
|
||||
clientid => clientid(Name, Config),
|
||||
connect_timeout => 30,
|
||||
keepalive => ms_to_s(KeepAlive),
|
||||
force_ping => true,
|
||||
|
@ -342,10 +420,8 @@ mk_client_opts(
|
|||
ssl_opts => maps:to_list(maps:remove(enable, Ssl))
|
||||
}).
|
||||
|
||||
parse_id_to_name(<<?TEST_ID_PREFIX, Name/binary>>) ->
|
||||
Name;
|
||||
parse_id_to_name(Id) ->
|
||||
{_Type, Name} = emqx_bridge_resource:parse_bridge_id(Id, #{atom_name => false}),
|
||||
{_Type, Name} = emqx_connector_resource:parse_connector_id(Id, #{atom_name => false}),
|
||||
Name.
|
||||
|
||||
mk_client_opt_password(Options = #{password := Secret}) ->
|
||||
|
@ -357,9 +433,82 @@ mk_client_opt_password(Options) ->
|
|||
ms_to_s(Ms) ->
|
||||
erlang:ceil(Ms / 1000).
|
||||
|
||||
clientid(Name, ClientScope, _Conf = #{clientid_prefix := Prefix}) when
|
||||
clientid(Name, _Conf = #{clientid_prefix := Prefix}) when
|
||||
is_binary(Prefix) andalso Prefix =/= <<>>
|
||||
->
|
||||
emqx_bridge_mqtt_lib:clientid_base([Prefix, $:, Name, ClientScope]);
|
||||
clientid(Name, ClientScope, _Conf) ->
|
||||
emqx_bridge_mqtt_lib:clientid_base([Name, ClientScope]).
|
||||
emqx_bridge_mqtt_lib:clientid_base([Prefix, $:, Name]);
|
||||
clientid(Name, _Conf) ->
|
||||
emqx_bridge_mqtt_lib:clientid_base([Name]).
|
||||
|
||||
%% @doc Start an ingress bridge worker.
|
||||
-spec connect([option() | {ecpool_worker_id, pos_integer()}]) ->
|
||||
{ok, pid()} | {error, _Reason}.
|
||||
connect(Options) ->
|
||||
WorkerId = proplists:get_value(ecpool_worker_id, Options),
|
||||
?SLOG(debug, #{
|
||||
msg => "ingress_client_starting",
|
||||
options => emqx_utils:redact(Options)
|
||||
}),
|
||||
Name = proplists:get_value(name, Options),
|
||||
WorkerId = proplists:get_value(ecpool_worker_id, Options),
|
||||
ClientOpts = proplists:get_value(client_opts, Options),
|
||||
case emqtt:start_link(mk_client_opts(Name, WorkerId, ClientOpts)) of
|
||||
{ok, Pid} ->
|
||||
connect(Pid, Name);
|
||||
{error, Reason} = Error ->
|
||||
?SLOG(error, #{
|
||||
msg => "client_start_failed",
|
||||
config => emqx_utils:redact(ClientOpts),
|
||||
reason => Reason
|
||||
}),
|
||||
Error
|
||||
end.
|
||||
|
||||
mk_client_opts(
|
||||
Name,
|
||||
WorkerId,
|
||||
ClientOpts = #{
|
||||
clientid := ClientId,
|
||||
topic_to_handler_index := TopicToHandlerIndex
|
||||
}
|
||||
) ->
|
||||
ClientOpts#{
|
||||
clientid := mk_clientid(WorkerId, ClientId),
|
||||
msg_handler => mk_client_event_handler(Name, TopicToHandlerIndex)
|
||||
}.
|
||||
|
||||
mk_clientid(WorkerId, ClientId) ->
|
||||
emqx_bridge_mqtt_lib:bytes23([ClientId], WorkerId).
|
||||
|
||||
mk_client_event_handler(Name, TopicToHandlerIndex) ->
|
||||
#{
|
||||
publish => {fun emqx_bridge_mqtt_ingress:handle_publish/3, [Name, TopicToHandlerIndex]},
|
||||
disconnected => {fun ?MODULE:handle_disconnect/1, []}
|
||||
}.
|
||||
|
||||
-spec connect(pid(), name()) ->
|
||||
{ok, pid()} | {error, _Reason}.
|
||||
connect(Pid, Name) ->
|
||||
case emqtt:connect(Pid) of
|
||||
{ok, _Props} ->
|
||||
{ok, Pid};
|
||||
{error, Reason} = Error ->
|
||||
?SLOG(warning, #{
|
||||
msg => "ingress_client_connect_failed",
|
||||
reason => Reason,
|
||||
name => Name
|
||||
}),
|
||||
_ = catch emqtt:stop(Pid),
|
||||
Error
|
||||
end.
|
||||
|
||||
handle_disconnect(_Reason) ->
|
||||
ok.
|
||||
|
||||
take(Key, Map0, Default) ->
|
||||
case maps:take(Key, Map0) of
|
||||
{Value, Map} ->
|
||||
{Value, Map};
|
||||
error ->
|
||||
{Default, Map0}
|
||||
end.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%%-------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -30,10 +30,15 @@
|
|||
parse_server/1
|
||||
]).
|
||||
|
||||
-export([
|
||||
connector_examples/1
|
||||
]).
|
||||
|
||||
-import(emqx_schema, [mk_duration/2]).
|
||||
|
||||
-import(hoconsc, [mk/2, ref/2]).
|
||||
|
||||
-define(CONNECTOR_TYPE, mqtt).
|
||||
-define(MQTT_HOST_OPTS, #{default_port => 1883}).
|
||||
|
||||
namespace() -> "connector_mqtt".
|
||||
|
@ -61,6 +66,14 @@ fields("config") ->
|
|||
}
|
||||
)}
|
||||
];
|
||||
fields("config_connector") ->
|
||||
emqx_connector_schema:common_fields() ++ fields("specific_connector_config");
|
||||
fields("specific_connector_config") ->
|
||||
[{pool_size, fun egress_pool_size/1}] ++
|
||||
emqx_connector_schema:resource_opts_ref(?MODULE, resource_opts) ++
|
||||
fields("server_configs");
|
||||
fields(resource_opts) ->
|
||||
emqx_connector_schema:resource_opts_fields();
|
||||
fields("server_configs") ->
|
||||
[
|
||||
{mode,
|
||||
|
@ -131,6 +144,7 @@ fields("server_configs") ->
|
|||
fields("ingress") ->
|
||||
[
|
||||
{pool_size, fun ingress_pool_size/1},
|
||||
%% array
|
||||
{remote,
|
||||
mk(
|
||||
ref(?MODULE, "ingress_remote"),
|
||||
|
@ -144,6 +158,22 @@ fields("ingress") ->
|
|||
}
|
||||
)}
|
||||
];
|
||||
fields(connector_ingress) ->
|
||||
[
|
||||
{remote,
|
||||
mk(
|
||||
ref(?MODULE, "ingress_remote"),
|
||||
#{desc => ?DESC("ingress_remote")}
|
||||
)},
|
||||
{local,
|
||||
mk(
|
||||
ref(?MODULE, "ingress_local"),
|
||||
#{
|
||||
desc => ?DESC("ingress_local"),
|
||||
importance => ?IMPORTANCE_HIDDEN
|
||||
}
|
||||
)}
|
||||
];
|
||||
fields("ingress_remote") ->
|
||||
[
|
||||
{topic,
|
||||
|
@ -269,7 +299,16 @@ fields("egress_remote") ->
|
|||
desc => ?DESC("payload")
|
||||
}
|
||||
)}
|
||||
].
|
||||
];
|
||||
fields(Field) when
|
||||
Field == "get_connector";
|
||||
Field == "put_connector";
|
||||
Field == "post_connector"
|
||||
->
|
||||
Fields = fields("specific_connector_config"),
|
||||
emqx_connector_schema:api_fields(Field, ?CONNECTOR_TYPE, Fields);
|
||||
fields(What) ->
|
||||
error({emqx_bridge_mqtt_connector_schema, missing_field_handler, What}).
|
||||
|
||||
ingress_pool_size(desc) ->
|
||||
?DESC("ingress_pool_size");
|
||||
|
@ -283,6 +322,8 @@ egress_pool_size(Prop) ->
|
|||
|
||||
desc("server_configs") ->
|
||||
?DESC("server_configs");
|
||||
desc("config_connector") ->
|
||||
?DESC("config_connector");
|
||||
desc("ingress") ->
|
||||
?DESC("ingress_desc");
|
||||
desc("ingress_remote") ->
|
||||
|
@ -295,6 +336,8 @@ desc("egress_remote") ->
|
|||
?DESC("egress_remote");
|
||||
desc("egress_local") ->
|
||||
?DESC("egress_local");
|
||||
desc(resource_opts) ->
|
||||
?DESC(emqx_resource_schema, <<"resource_opts">>);
|
||||
desc(_) ->
|
||||
undefined.
|
||||
|
||||
|
@ -304,3 +347,6 @@ qos() ->
|
|||
parse_server(Str) ->
|
||||
#{hostname := Host, port := Port} = emqx_schema:parse_server(Str, ?MQTT_HOST_OPTS),
|
||||
{Host, Port}.
|
||||
|
||||
connector_examples(_Method) ->
|
||||
[#{}].
|
||||
|
|
|
@ -20,33 +20,16 @@
|
|||
-include_lib("emqx/include/emqx.hrl").
|
||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||
|
||||
-behaviour(ecpool_worker).
|
||||
|
||||
%% ecpool
|
||||
-export([connect/1]).
|
||||
|
||||
-export([
|
||||
config/1,
|
||||
send/3,
|
||||
send_async/4
|
||||
]).
|
||||
|
||||
%% management APIs
|
||||
-export([
|
||||
status/1,
|
||||
info/1
|
||||
]).
|
||||
|
||||
-type name() :: term().
|
||||
-type message() :: emqx_types:message() | map().
|
||||
-type callback() :: {function(), [_Arg]} | {module(), atom(), [_Arg]}.
|
||||
-type remote_message() :: #mqtt_msg{}.
|
||||
|
||||
-type option() ::
|
||||
{name, name()}
|
||||
%% see `emqtt:option()`
|
||||
| {client_opts, map()}.
|
||||
|
||||
-type egress() :: #{
|
||||
local => #{
|
||||
topic => emqx_types:topic()
|
||||
|
@ -54,51 +37,6 @@
|
|||
remote := emqx_bridge_mqtt_msg:msgvars()
|
||||
}.
|
||||
|
||||
%% @doc Start an ingress bridge worker.
|
||||
-spec connect([option() | {ecpool_worker_id, pos_integer()}]) ->
|
||||
{ok, pid()} | {error, _Reason}.
|
||||
connect(Options) ->
|
||||
?SLOG(debug, #{
|
||||
msg => "egress_client_starting",
|
||||
options => emqx_utils:redact(Options)
|
||||
}),
|
||||
Name = proplists:get_value(name, Options),
|
||||
WorkerId = proplists:get_value(ecpool_worker_id, Options),
|
||||
ClientOpts = proplists:get_value(client_opts, Options),
|
||||
case emqtt:start_link(mk_client_opts(WorkerId, ClientOpts)) of
|
||||
{ok, Pid} ->
|
||||
connect(Pid, Name);
|
||||
{error, Reason} = Error ->
|
||||
?SLOG(error, #{
|
||||
msg => "egress_client_start_failed",
|
||||
config => emqx_utils:redact(ClientOpts),
|
||||
reason => Reason
|
||||
}),
|
||||
Error
|
||||
end.
|
||||
|
||||
mk_client_opts(WorkerId, ClientOpts = #{clientid := ClientId}) ->
|
||||
ClientOpts#{clientid := mk_clientid(WorkerId, ClientId)}.
|
||||
|
||||
mk_clientid(WorkerId, ClientId) ->
|
||||
emqx_bridge_mqtt_lib:bytes23(ClientId, WorkerId).
|
||||
|
||||
connect(Pid, Name) ->
|
||||
case emqtt:connect(Pid) of
|
||||
{ok, _Props} ->
|
||||
{ok, Pid};
|
||||
{error, Reason} = Error ->
|
||||
?SLOG(warning, #{
|
||||
msg => "egress_client_connect_failed",
|
||||
reason => Reason,
|
||||
name => Name
|
||||
}),
|
||||
_ = catch emqtt:stop(Pid),
|
||||
Error
|
||||
end.
|
||||
|
||||
%%
|
||||
|
||||
-spec config(map()) ->
|
||||
egress().
|
||||
config(#{remote := RC = #{}} = Conf) ->
|
||||
|
@ -137,25 +75,3 @@ to_remote_msg(Msg = #{}, Remote) ->
|
|||
props = emqx_utils:pub_props_to_packet(PubProps),
|
||||
payload = Payload
|
||||
}.
|
||||
|
||||
%%
|
||||
|
||||
-spec info(pid()) ->
|
||||
[{atom(), term()}].
|
||||
info(Pid) ->
|
||||
emqtt:info(Pid).
|
||||
|
||||
-spec status(pid()) ->
|
||||
emqx_resource:resource_status().
|
||||
status(Pid) ->
|
||||
try
|
||||
case proplists:get_value(socket, info(Pid)) of
|
||||
Socket when Socket /= undefined ->
|
||||
connected;
|
||||
undefined ->
|
||||
connecting
|
||||
end
|
||||
catch
|
||||
exit:{noproc, _} ->
|
||||
disconnected
|
||||
end.
|
||||
|
|
|
@ -17,95 +17,50 @@
|
|||
-module(emqx_bridge_mqtt_ingress).
|
||||
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
|
||||
-behaviour(ecpool_worker).
|
||||
|
||||
%% ecpool
|
||||
-export([connect/1]).
|
||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||
|
||||
%% management APIs
|
||||
-export([
|
||||
status/1,
|
||||
info/1
|
||||
info/1,
|
||||
subscribe_channel/2,
|
||||
unsubscribe_channel/4,
|
||||
config/3
|
||||
]).
|
||||
|
||||
-export([handle_publish/5]).
|
||||
-export([handle_disconnect/1]).
|
||||
-export([handle_publish/3]).
|
||||
|
||||
-type name() :: term().
|
||||
|
||||
-type option() ::
|
||||
{name, name()}
|
||||
| {ingress, map()}
|
||||
%% see `emqtt:option()`
|
||||
| {client_opts, map()}.
|
||||
|
||||
-type ingress() :: #{
|
||||
server := string(),
|
||||
remote := #{
|
||||
topic := emqx_types:topic(),
|
||||
qos => emqx_types:qos()
|
||||
},
|
||||
local := emqx_bridge_mqtt_msg:msgvars(),
|
||||
on_message_received := {module(), atom(), [term()]}
|
||||
}.
|
||||
|
||||
%% @doc Start an ingress bridge worker.
|
||||
-spec connect([option() | {ecpool_worker_id, pos_integer()}]) ->
|
||||
{ok, pid()} | {error, _Reason}.
|
||||
connect(Options) ->
|
||||
?SLOG(debug, #{
|
||||
msg => "ingress_client_starting",
|
||||
options => emqx_utils:redact(Options)
|
||||
}),
|
||||
Name = proplists:get_value(name, Options),
|
||||
WorkerId = proplists:get_value(ecpool_worker_id, Options),
|
||||
Ingress = config(proplists:get_value(ingress, Options), Name),
|
||||
ClientOpts = proplists:get_value(client_opts, Options),
|
||||
case emqtt:start_link(mk_client_opts(Name, WorkerId, Ingress, ClientOpts)) of
|
||||
{ok, Pid} ->
|
||||
connect(Pid, Name, Ingress);
|
||||
{error, Reason} = Error ->
|
||||
?SLOG(error, #{
|
||||
msg => "client_start_failed",
|
||||
config => emqx_utils:redact(ClientOpts),
|
||||
reason => Reason
|
||||
}),
|
||||
subscribe_channel(PoolName, ChannelConfig) ->
|
||||
Workers = ecpool:workers(PoolName),
|
||||
PoolSize = length(Workers),
|
||||
Results = [
|
||||
subscribe_channel(Pid, Name, ChannelConfig, Idx, PoolSize)
|
||||
|| {{Name, Idx}, Pid} <- Workers
|
||||
],
|
||||
case proplists:get_value(error, Results, ok) of
|
||||
ok ->
|
||||
ok;
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
mk_client_opts(Name, WorkerId, Ingress, ClientOpts = #{clientid := ClientId}) ->
|
||||
ClientOpts#{
|
||||
clientid := mk_clientid(WorkerId, ClientId),
|
||||
msg_handler => mk_client_event_handler(Name, Ingress)
|
||||
}.
|
||||
subscribe_channel(WorkerPid, Name, Ingress, WorkerIdx, PoolSize) ->
|
||||
case ecpool_worker:client(WorkerPid) of
|
||||
{ok, Client} ->
|
||||
subscribe_channel_helper(Client, Name, Ingress, WorkerIdx, PoolSize);
|
||||
{error, Reason} ->
|
||||
error({client_not_found, Reason})
|
||||
end.
|
||||
|
||||
mk_clientid(WorkerId, ClientId) ->
|
||||
emqx_bridge_mqtt_lib:bytes23(ClientId, WorkerId).
|
||||
|
||||
mk_client_event_handler(Name, Ingress = #{}) ->
|
||||
IngressVars = maps:with([server], Ingress),
|
||||
OnMessage = maps:get(on_message_received, Ingress, undefined),
|
||||
LocalPublish =
|
||||
case Ingress of
|
||||
#{local := Local = #{topic := _}} ->
|
||||
Local;
|
||||
#{} ->
|
||||
undefined
|
||||
end,
|
||||
#{
|
||||
publish => {fun ?MODULE:handle_publish/5, [Name, OnMessage, LocalPublish, IngressVars]},
|
||||
disconnected => {fun ?MODULE:handle_disconnect/1, []}
|
||||
}.
|
||||
|
||||
-spec connect(pid(), name(), ingress()) ->
|
||||
{ok, pid()} | {error, _Reason}.
|
||||
connect(Pid, Name, Ingress) ->
|
||||
case emqtt:connect(Pid) of
|
||||
{ok, _Props} ->
|
||||
case subscribe_remote_topic(Pid, Ingress) of
|
||||
{ok, _, _RCs} ->
|
||||
{ok, Pid};
|
||||
subscribe_channel_helper(Client, Name, Ingress, WorkerIdx, PoolSize) ->
|
||||
IngressList = maps:get(ingress_list, Ingress, []),
|
||||
SubscribeResults = subscribe_remote_topics(
|
||||
Client, IngressList, WorkerIdx, PoolSize, Name
|
||||
),
|
||||
%% Find error if any using proplists:get_value/2
|
||||
case proplists:get_value(error, SubscribeResults, ok) of
|
||||
ok ->
|
||||
ok;
|
||||
{error, Reason} = Error ->
|
||||
?SLOG(error, #{
|
||||
msg => "ingress_client_subscribe_failed",
|
||||
|
@ -113,33 +68,139 @@ connect(Pid, Name, Ingress) ->
|
|||
name => Name,
|
||||
reason => Reason
|
||||
}),
|
||||
_ = catch emqtt:stop(Pid),
|
||||
Error
|
||||
end;
|
||||
{error, Reason} = Error ->
|
||||
?SLOG(warning, #{
|
||||
msg => "ingress_client_connect_failed",
|
||||
reason => Reason,
|
||||
name => Name
|
||||
}),
|
||||
_ = catch emqtt:stop(Pid),
|
||||
Error
|
||||
end.
|
||||
|
||||
subscribe_remote_topic(Pid, #{remote := #{topic := RemoteTopic, qos := QoS}}) ->
|
||||
emqtt:subscribe(Pid, RemoteTopic, QoS).
|
||||
subscribe_remote_topics(Pid, IngressList, WorkerIdx, PoolSize, Name) ->
|
||||
[subscribe_remote_topic(Pid, Ingress, WorkerIdx, PoolSize, Name) || Ingress <- IngressList].
|
||||
|
||||
%%
|
||||
subscribe_remote_topic(
|
||||
Pid, #{remote := #{topic := RemoteTopic, qos := QoS}} = _Remote, WorkerIdx, PoolSize, Name
|
||||
) ->
|
||||
case should_subscribe(RemoteTopic, WorkerIdx, PoolSize, Name, _LogWarn = true) of
|
||||
true ->
|
||||
emqtt:subscribe(Pid, RemoteTopic, QoS);
|
||||
false ->
|
||||
ok
|
||||
end.
|
||||
|
||||
-spec config(map(), name()) ->
|
||||
ingress().
|
||||
config(#{remote := RC, local := LC} = Conf, BridgeName) ->
|
||||
Conf#{
|
||||
remote => parse_remote(RC, BridgeName),
|
||||
local => emqx_bridge_mqtt_msg:parse(LC)
|
||||
}.
|
||||
should_subscribe(RemoteTopic, WorkerIdx, PoolSize, Name, LogWarn) ->
|
||||
IsFirstWorker = WorkerIdx == 1,
|
||||
case emqx_topic:parse(RemoteTopic) of
|
||||
{#share{} = _Filter, _SubOpts} ->
|
||||
% NOTE: this is shared subscription, many workers may subscribe
|
||||
true;
|
||||
{_Filter, #{}} when PoolSize > 1, IsFirstWorker, LogWarn ->
|
||||
% NOTE: this is regular subscription, only one worker should subscribe
|
||||
?SLOG(warning, #{
|
||||
msg => "mqtt_pool_size_ignored",
|
||||
connector => Name,
|
||||
reason =>
|
||||
"Remote topic filter is not a shared subscription, "
|
||||
"only a single connection will be used from the connection pool",
|
||||
config_pool_size => PoolSize,
|
||||
pool_size => PoolSize
|
||||
}),
|
||||
IsFirstWorker;
|
||||
{_Filter, #{}} ->
|
||||
% NOTE: this is regular subscription, only one worker should subscribe
|
||||
IsFirstWorker
|
||||
end.
|
||||
|
||||
parse_remote(#{qos := QoSIn} = Conf, BridgeName) ->
|
||||
unsubscribe_channel(PoolName, ChannelConfig, ChannelId, TopicToHandlerIndex) ->
|
||||
Workers = ecpool:workers(PoolName),
|
||||
PoolSize = length(Workers),
|
||||
_ = [
|
||||
unsubscribe_channel(Pid, Name, ChannelConfig, Idx, PoolSize, ChannelId, TopicToHandlerIndex)
|
||||
|| {{Name, Idx}, Pid} <- Workers
|
||||
],
|
||||
ok.
|
||||
|
||||
unsubscribe_channel(WorkerPid, Name, Ingress, WorkerIdx, PoolSize, ChannelId, TopicToHandlerIndex) ->
|
||||
case ecpool_worker:client(WorkerPid) of
|
||||
{ok, Client} ->
|
||||
unsubscribe_channel_helper(
|
||||
Client, Name, Ingress, WorkerIdx, PoolSize, ChannelId, TopicToHandlerIndex
|
||||
);
|
||||
{error, Reason} ->
|
||||
error({client_not_found, Reason})
|
||||
end.
|
||||
|
||||
unsubscribe_channel_helper(
|
||||
Client, Name, Ingress, WorkerIdx, PoolSize, ChannelId, TopicToHandlerIndex
|
||||
) ->
|
||||
IngressList = maps:get(ingress_list, Ingress, []),
|
||||
unsubscribe_remote_topics(
|
||||
Client, IngressList, WorkerIdx, PoolSize, Name, ChannelId, TopicToHandlerIndex
|
||||
).
|
||||
|
||||
unsubscribe_remote_topics(
|
||||
Pid, IngressList, WorkerIdx, PoolSize, Name, ChannelId, TopicToHandlerIndex
|
||||
) ->
|
||||
[
|
||||
unsubscribe_remote_topic(
|
||||
Pid, Ingress, WorkerIdx, PoolSize, Name, ChannelId, TopicToHandlerIndex
|
||||
)
|
||||
|| Ingress <- IngressList
|
||||
].
|
||||
|
||||
unsubscribe_remote_topic(
|
||||
Pid,
|
||||
#{remote := #{topic := RemoteTopic}} = _Remote,
|
||||
WorkerIdx,
|
||||
PoolSize,
|
||||
Name,
|
||||
ChannelId,
|
||||
TopicToHandlerIndex
|
||||
) ->
|
||||
emqx_topic_index:delete(RemoteTopic, ChannelId, TopicToHandlerIndex),
|
||||
case should_subscribe(RemoteTopic, WorkerIdx, PoolSize, Name, _NoWarn = false) of
|
||||
true ->
|
||||
case emqtt:unsubscribe(Pid, RemoteTopic) of
|
||||
{ok, _Properties, _ReasonCodes} ->
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
?SLOG(warning, #{
|
||||
msg => "unsubscribe_mqtt_topic_failed",
|
||||
channel_id => Name,
|
||||
reason => Reason
|
||||
}),
|
||||
ok
|
||||
end;
|
||||
false ->
|
||||
ok
|
||||
end.
|
||||
|
||||
config(#{ingress_list := IngressList} = Conf, Name, TopicToHandlerIndex) ->
|
||||
NewIngressList = [
|
||||
fix_remote_config(Ingress, Name, TopicToHandlerIndex, Conf)
|
||||
|| Ingress <- IngressList
|
||||
],
|
||||
Conf#{ingress_list => NewIngressList}.
|
||||
|
||||
fix_remote_config(#{remote := RC}, BridgeName, TopicToHandlerIndex, Conf) ->
|
||||
FixedConf0 = Conf#{
|
||||
remote => parse_remote(RC, BridgeName)
|
||||
},
|
||||
FixedConf = emqx_utils_maps:update_if_present(
|
||||
local, fun emqx_bridge_mqtt_msg:parse/1, FixedConf0
|
||||
),
|
||||
insert_to_topic_to_handler_index(FixedConf, TopicToHandlerIndex, BridgeName),
|
||||
FixedConf.
|
||||
|
||||
insert_to_topic_to_handler_index(
|
||||
#{remote := #{topic := Topic}} = Conf, TopicToHandlerIndex, BridgeName
|
||||
) ->
|
||||
TopicPattern =
|
||||
case emqx_topic:parse(Topic) of
|
||||
{#share{group = _Group, topic = TP}, _} ->
|
||||
TP;
|
||||
_ ->
|
||||
Topic
|
||||
end,
|
||||
emqx_topic_index:insert(TopicPattern, BridgeName, Conf, TopicToHandlerIndex).
|
||||
|
||||
parse_remote(#{qos := QoSIn} = Remote, BridgeName) ->
|
||||
QoS = downgrade_ingress_qos(QoSIn),
|
||||
case QoS of
|
||||
QoSIn ->
|
||||
|
@ -152,7 +213,7 @@ parse_remote(#{qos := QoSIn} = Conf, BridgeName) ->
|
|||
name => BridgeName
|
||||
})
|
||||
end,
|
||||
Conf#{qos => QoS}.
|
||||
Remote#{qos => QoS}.
|
||||
|
||||
downgrade_ingress_qos(2) ->
|
||||
1;
|
||||
|
@ -183,17 +244,39 @@ status(Pid) ->
|
|||
|
||||
%%
|
||||
|
||||
handle_publish(#{properties := Props} = MsgIn, Name, OnMessage, LocalPublish, IngressVars) ->
|
||||
Msg = import_msg(MsgIn, IngressVars),
|
||||
handle_publish(
|
||||
#{properties := Props, topic := Topic} = MsgIn,
|
||||
Name,
|
||||
TopicToHandlerIndex
|
||||
) ->
|
||||
?SLOG(debug, #{
|
||||
msg => "ingress_publish_local",
|
||||
message => Msg,
|
||||
message => MsgIn,
|
||||
name => Name
|
||||
}),
|
||||
maybe_on_message_received(Msg, OnMessage),
|
||||
maybe_publish_local(Msg, LocalPublish, Props).
|
||||
Matches = emqx_topic_index:matches(Topic, TopicToHandlerIndex, []),
|
||||
lists:foreach(
|
||||
fun(Match) ->
|
||||
handle_match(TopicToHandlerIndex, Match, MsgIn, Name, Props)
|
||||
end,
|
||||
Matches
|
||||
),
|
||||
ok.
|
||||
|
||||
handle_disconnect(_Reason) ->
|
||||
handle_match(
|
||||
TopicToHandlerIndex,
|
||||
Match,
|
||||
MsgIn,
|
||||
_Name,
|
||||
Props
|
||||
) ->
|
||||
[ChannelConfig] = emqx_topic_index:get_record(Match, TopicToHandlerIndex),
|
||||
#{on_message_received := OnMessage} = ChannelConfig,
|
||||
Msg = import_msg(MsgIn, ChannelConfig),
|
||||
|
||||
maybe_on_message_received(Msg, OnMessage),
|
||||
LocalPublish = maps:get(local, ChannelConfig, undefined),
|
||||
_ = maybe_publish_local(Msg, LocalPublish, Props),
|
||||
ok.
|
||||
|
||||
maybe_on_message_received(Msg, {Mod, Func, Args}) ->
|
||||
|
|
|
@ -0,0 +1,256 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_mqtt_pubsub_action_info).
|
||||
|
||||
-behaviour(emqx_action_info).
|
||||
|
||||
-export([
|
||||
bridge_v1_type_name/0,
|
||||
action_type_name/0,
|
||||
connector_type_name/0,
|
||||
schema_module/0,
|
||||
bridge_v1_config_to_connector_config/1,
|
||||
bridge_v1_config_to_action_config/2,
|
||||
connector_action_config_to_bridge_v1_config/2,
|
||||
is_source/0
|
||||
]).
|
||||
|
||||
bridge_v1_type_name() -> mqtt.
|
||||
|
||||
action_type_name() -> mqtt.
|
||||
|
||||
connector_type_name() -> mqtt.
|
||||
|
||||
schema_module() -> emqx_bridge_mqtt_pubsub_schema.
|
||||
|
||||
is_source() -> true.
|
||||
|
||||
bridge_v1_config_to_connector_config(Config) ->
|
||||
%% Transform the egress part to mqtt_publisher connector config
|
||||
SimplifiedConfig = check_and_simplify_bridge_v1_config(Config),
|
||||
ConnectorConfigMap = make_connector_config_from_bridge_v1_config(SimplifiedConfig),
|
||||
{mqtt, ConnectorConfigMap}.
|
||||
|
||||
make_connector_config_from_bridge_v1_config(Config) ->
|
||||
ConnectorConfigSchema = emqx_bridge_mqtt_connector_schema:fields("config_connector"),
|
||||
ConnectorTopFields = [
|
||||
erlang:atom_to_binary(FieldName, utf8)
|
||||
|| {FieldName, _} <- ConnectorConfigSchema
|
||||
],
|
||||
ConnectorConfigMap = maps:with(ConnectorTopFields, Config),
|
||||
ResourceOptsMap = maps:get(<<"resource_opts">>, ConnectorConfigMap, #{}),
|
||||
ResourceOptsMap2 = emqx_connector_schema:project_to_connector_resource_opts(ResourceOptsMap),
|
||||
ConnectorConfigMap2 = maps:put(<<"resource_opts">>, ResourceOptsMap2, ConnectorConfigMap),
|
||||
IngressMap0 = maps:get(<<"ingress">>, Config, #{}),
|
||||
EgressMap = maps:get(<<"egress">>, Config, #{}),
|
||||
%% Move pool_size to the top level
|
||||
PoolSizeIngress = maps:get(<<"pool_size">>, IngressMap0, undefined),
|
||||
PoolSize =
|
||||
case PoolSizeIngress of
|
||||
undefined ->
|
||||
DefaultPoolSize = emqx_connector_schema_lib:pool_size(default),
|
||||
maps:get(<<"pool_size">>, EgressMap, DefaultPoolSize);
|
||||
_ ->
|
||||
PoolSizeIngress
|
||||
end,
|
||||
%% Remove ingress part from the config
|
||||
ConnectorConfigMap3 = maps:remove(<<"ingress">>, ConnectorConfigMap2),
|
||||
%% Remove egress part from the config
|
||||
ConnectorConfigMap4 = maps:remove(<<"egress">>, ConnectorConfigMap3),
|
||||
ConnectorConfigMap5 = maps:put(<<"pool_size">>, PoolSize, ConnectorConfigMap4),
|
||||
ConnectorConfigMap5.
|
||||
|
||||
bridge_v1_config_to_action_config(BridgeV1Config, ConnectorName) ->
|
||||
SimplifiedConfig = check_and_simplify_bridge_v1_config(BridgeV1Config),
|
||||
bridge_v1_config_to_action_config_helper(
|
||||
SimplifiedConfig, ConnectorName
|
||||
).
|
||||
|
||||
bridge_v1_config_to_action_config_helper(
|
||||
#{
|
||||
<<"egress">> := EgressMap0
|
||||
} = Config,
|
||||
ConnectorName
|
||||
) ->
|
||||
%% Transform the egress part to mqtt_publisher connector config
|
||||
SchemaFields = emqx_bridge_mqtt_pubsub_schema:fields("mqtt_publisher_action"),
|
||||
ResourceOptsSchemaFields = emqx_bridge_mqtt_pubsub_schema:fields(action_resource_opts),
|
||||
ConfigMap1 = general_action_conf_map_from_bridge_v1_config(
|
||||
Config, ConnectorName, SchemaFields, ResourceOptsSchemaFields
|
||||
),
|
||||
LocalTopicMap = maps:get(<<"local">>, EgressMap0, #{}),
|
||||
LocalTopic = maps:get(<<"topic">>, LocalTopicMap, undefined),
|
||||
EgressMap1 = maps:without([<<"local">>, <<"pool_size">>], EgressMap0),
|
||||
LocalParams = maps:get(<<"local">>, EgressMap0, #{}),
|
||||
EgressMap2 = emqx_utils_maps:unindent(<<"remote">>, EgressMap1),
|
||||
EgressMap = maps:put(<<"local">>, LocalParams, EgressMap2),
|
||||
%% Add parameters field (Egress map) to the action config
|
||||
ConfigMap2 = maps:put(<<"parameters">>, EgressMap, ConfigMap1),
|
||||
ConfigMap3 =
|
||||
case LocalTopic of
|
||||
undefined ->
|
||||
ConfigMap2;
|
||||
_ ->
|
||||
maps:put(<<"local_topic">>, LocalTopic, ConfigMap2)
|
||||
end,
|
||||
{action, mqtt, ConfigMap3};
|
||||
bridge_v1_config_to_action_config_helper(
|
||||
#{
|
||||
<<"ingress">> := IngressMap0
|
||||
} = Config,
|
||||
ConnectorName
|
||||
) ->
|
||||
%% Transform the egress part to mqtt_publisher connector config
|
||||
SchemaFields = emqx_bridge_mqtt_pubsub_schema:fields("mqtt_subscriber_source"),
|
||||
ResourceOptsSchemaFields = emqx_bridge_mqtt_pubsub_schema:fields(source_resource_opts),
|
||||
ConfigMap1 = general_action_conf_map_from_bridge_v1_config(
|
||||
Config, ConnectorName, SchemaFields, ResourceOptsSchemaFields
|
||||
),
|
||||
IngressMap1 = maps:without([<<"pool_size">>, <<"local">>], IngressMap0),
|
||||
LocalParams = maps:get(<<"local">>, IngressMap0, #{}),
|
||||
IngressMap2 = emqx_utils_maps:unindent(<<"remote">>, IngressMap1),
|
||||
IngressMap = maps:put(<<"local">>, LocalParams, IngressMap2),
|
||||
%% Add parameters field (Egress map) to the action config
|
||||
ConfigMap2 = maps:put(<<"parameters">>, IngressMap, ConfigMap1),
|
||||
{source, mqtt, ConfigMap2};
|
||||
bridge_v1_config_to_action_config_helper(
|
||||
_Config,
|
||||
_ConnectorName
|
||||
) ->
|
||||
error({incompatible_bridge_v1, no_matching_action_or_source}).
|
||||
|
||||
general_action_conf_map_from_bridge_v1_config(
|
||||
Config, ConnectorName, SchemaFields, ResourceOptsSchemaFields
|
||||
) ->
|
||||
ShemaFieldsNames = [
|
||||
erlang:atom_to_binary(FieldName, utf8)
|
||||
|| {FieldName, _} <- SchemaFields
|
||||
],
|
||||
ActionConfig0 = maps:with(ShemaFieldsNames, Config),
|
||||
ResourceOptsSchemaFieldsNames = [
|
||||
erlang:atom_to_binary(FieldName, utf8)
|
||||
|| {FieldName, _} <- ResourceOptsSchemaFields
|
||||
],
|
||||
ResourceOptsMap = maps:get(<<"resource_opts">>, ActionConfig0, #{}),
|
||||
ResourceOptsMap2 = maps:with(ResourceOptsSchemaFieldsNames, ResourceOptsMap),
|
||||
%% Only put resource_opts if the original config has it
|
||||
ActionConfig1 =
|
||||
case maps:is_key(<<"resource_opts">>, ActionConfig0) of
|
||||
true ->
|
||||
maps:put(<<"resource_opts">>, ResourceOptsMap2, ActionConfig0);
|
||||
false ->
|
||||
ActionConfig0
|
||||
end,
|
||||
ActionConfig2 = maps:put(<<"connector">>, ConnectorName, ActionConfig1),
|
||||
ActionConfig2.
|
||||
|
||||
check_and_simplify_bridge_v1_config(
|
||||
#{
|
||||
<<"egress">> := EgressMap
|
||||
} = Config
|
||||
) when map_size(EgressMap) =:= 0 ->
|
||||
check_and_simplify_bridge_v1_config(maps:remove(<<"egress">>, Config));
|
||||
check_and_simplify_bridge_v1_config(
|
||||
#{
|
||||
<<"ingress">> := IngressMap
|
||||
} = Config
|
||||
) when map_size(IngressMap) =:= 0 ->
|
||||
check_and_simplify_bridge_v1_config(maps:remove(<<"ingress">>, Config));
|
||||
check_and_simplify_bridge_v1_config(#{
|
||||
<<"egress">> := _EGressMap,
|
||||
<<"ingress">> := _InGressMap
|
||||
}) ->
|
||||
%% We should crash beacuse we don't support upgrading when ingress and egress exist at the same time
|
||||
error(
|
||||
{unsupported_config, <<
|
||||
"Upgrade not supported when ingress and egress exist in the "
|
||||
"same MQTT bridge. Please divide the egress and ingress part "
|
||||
"to separate bridges in the configuration."
|
||||
>>}
|
||||
);
|
||||
check_and_simplify_bridge_v1_config(SimplifiedConfig) ->
|
||||
SimplifiedConfig.
|
||||
|
||||
connector_action_config_to_bridge_v1_config(
|
||||
ConnectorConfig, ActionConfig
|
||||
) ->
|
||||
Params0 = maps:get(<<"parameters">>, ActionConfig, #{}),
|
||||
ResourceOptsConnector = maps:get(<<"resource_opts">>, ConnectorConfig, #{}),
|
||||
ResourceOptsAction = maps:get(<<"resource_opts">>, ActionConfig, #{}),
|
||||
ResourceOpts0 = maps:merge(ResourceOptsConnector, ResourceOptsAction),
|
||||
V1ResourceOptsFields =
|
||||
lists:map(
|
||||
fun({Field, _}) -> atom_to_binary(Field) end,
|
||||
emqx_bridge_mqtt_schema:fields("creation_opts")
|
||||
),
|
||||
ResourceOpts = maps:with(V1ResourceOptsFields, ResourceOpts0),
|
||||
%% Check the direction of the action
|
||||
Direction =
|
||||
case is_map_key(<<"retain">>, Params0) of
|
||||
%% Only source has retain
|
||||
true ->
|
||||
<<"publisher">>;
|
||||
false ->
|
||||
<<"subscriber">>
|
||||
end,
|
||||
Params1 = maps:remove(<<"direction">>, Params0),
|
||||
Params = maps:remove(<<"local">>, Params1),
|
||||
%% hidden; for backwards compatibility
|
||||
LocalParams = maps:get(<<"local">>, Params1, #{}),
|
||||
DefaultPoolSize = emqx_connector_schema_lib:pool_size(default),
|
||||
PoolSize = maps:get(<<"pool_size">>, ConnectorConfig, DefaultPoolSize),
|
||||
ConnectorConfig2 = maps:remove(<<"pool_size">>, ConnectorConfig),
|
||||
LocalTopic = maps:get(<<"local_topic">>, ActionConfig, undefined),
|
||||
BridgeV1Conf0 =
|
||||
case {Direction, LocalTopic} of
|
||||
{<<"publisher">>, undefined} ->
|
||||
#{
|
||||
<<"egress">> =>
|
||||
#{
|
||||
<<"pool_size">> => PoolSize,
|
||||
<<"remote">> => Params,
|
||||
<<"local">> => LocalParams
|
||||
}
|
||||
};
|
||||
{<<"publisher">>, LocalT} ->
|
||||
#{
|
||||
<<"egress">> =>
|
||||
#{
|
||||
<<"pool_size">> => PoolSize,
|
||||
<<"remote">> => Params,
|
||||
<<"local">> =>
|
||||
maps:merge(
|
||||
LocalParams,
|
||||
#{<<"topic">> => LocalT}
|
||||
)
|
||||
}
|
||||
};
|
||||
{<<"subscriber">>, _} ->
|
||||
#{
|
||||
<<"ingress">> =>
|
||||
#{
|
||||
<<"pool_size">> => PoolSize,
|
||||
<<"remote">> => Params,
|
||||
<<"local">> => LocalParams
|
||||
}
|
||||
}
|
||||
end,
|
||||
BridgeV1Conf1 = maps:merge(BridgeV1Conf0, ConnectorConfig2),
|
||||
BridgeV1Conf2 = BridgeV1Conf1#{
|
||||
<<"resource_opts">> => ResourceOpts
|
||||
},
|
||||
BridgeV1Conf2.
|
|
@ -0,0 +1,206 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_bridge_mqtt_pubsub_schema).
|
||||
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
|
||||
-import(hoconsc, [mk/2, ref/2]).
|
||||
|
||||
-export([roots/0, fields/1, desc/1, namespace/0]).
|
||||
|
||||
-export([
|
||||
bridge_v2_examples/1,
|
||||
source_examples/1,
|
||||
conn_bridge_examples/1
|
||||
]).
|
||||
|
||||
-define(ACTION_TYPE, mqtt).
|
||||
-define(SOURCE_TYPE, mqtt).
|
||||
|
||||
%%======================================================================================
|
||||
%% Hocon Schema Definitions
|
||||
namespace() -> "bridge_mqtt_publisher".
|
||||
|
||||
roots() -> [].
|
||||
|
||||
fields(action) ->
|
||||
{mqtt,
|
||||
mk(
|
||||
hoconsc:map(name, ref(?MODULE, "mqtt_publisher_action")),
|
||||
#{
|
||||
desc => <<"MQTT Publisher Action Config">>,
|
||||
required => false
|
||||
}
|
||||
)};
|
||||
fields("mqtt_publisher_action") ->
|
||||
emqx_bridge_v2_schema:make_producer_action_schema(
|
||||
hoconsc:mk(
|
||||
hoconsc:ref(?MODULE, action_parameters),
|
||||
#{
|
||||
required => true,
|
||||
desc => ?DESC("action_parameters")
|
||||
}
|
||||
)
|
||||
);
|
||||
fields(action_parameters) ->
|
||||
[
|
||||
%% for backwards compatibility
|
||||
{local,
|
||||
mk(
|
||||
ref(emqx_bridge_mqtt_connector_schema, "egress_local"),
|
||||
#{
|
||||
default => #{},
|
||||
importance => ?IMPORTANCE_HIDDEN
|
||||
}
|
||||
)}
|
||||
| emqx_bridge_mqtt_connector_schema:fields("egress_remote")
|
||||
];
|
||||
fields(source) ->
|
||||
{mqtt,
|
||||
mk(
|
||||
hoconsc:map(name, ref(?MODULE, "mqtt_subscriber_source")),
|
||||
#{
|
||||
desc => <<"MQTT Subscriber Source Config">>,
|
||||
required => false
|
||||
}
|
||||
)};
|
||||
fields("mqtt_subscriber_source") ->
|
||||
emqx_bridge_v2_schema:make_consumer_action_schema(
|
||||
mk(
|
||||
ref(?MODULE, ingress_parameters),
|
||||
#{
|
||||
required => true,
|
||||
desc => ?DESC("source_parameters")
|
||||
}
|
||||
)
|
||||
);
|
||||
fields(ingress_parameters) ->
|
||||
[
|
||||
%% for backwards compatibility
|
||||
{local,
|
||||
mk(
|
||||
ref(emqx_bridge_mqtt_connector_schema, "ingress_local"),
|
||||
#{
|
||||
default => #{},
|
||||
importance => ?IMPORTANCE_HIDDEN
|
||||
}
|
||||
)}
|
||||
| emqx_bridge_mqtt_connector_schema:fields("ingress_remote")
|
||||
];
|
||||
fields(action_resource_opts) ->
|
||||
UnsupportedOpts = [enable_batch, batch_size, batch_time],
|
||||
lists:filter(
|
||||
fun({K, _V}) -> not lists:member(K, UnsupportedOpts) end,
|
||||
emqx_bridge_v2_schema:action_resource_opts_fields()
|
||||
);
|
||||
fields(source_resource_opts) ->
|
||||
emqx_bridge_v2_schema:source_resource_opts_fields();
|
||||
fields(Field) when
|
||||
Field == "get_bridge_v2";
|
||||
Field == "post_bridge_v2";
|
||||
Field == "put_bridge_v2"
|
||||
->
|
||||
emqx_bridge_v2_schema:api_fields(Field, ?ACTION_TYPE, fields("mqtt_publisher_action"));
|
||||
fields(Field) when
|
||||
Field == "get_source";
|
||||
Field == "post_source";
|
||||
Field == "put_source"
|
||||
->
|
||||
emqx_bridge_v2_schema:api_fields(Field, ?SOURCE_TYPE, fields("mqtt_subscriber_source"));
|
||||
fields(What) ->
|
||||
error({emqx_bridge_mqtt_pubsub_schema, missing_field_handler, What}).
|
||||
%% v2: api schema
|
||||
%% The parameter equls to
|
||||
%% `get_bridge_v2`, `post_bridge_v2`, `put_bridge_v2` from emqx_bridge_v2_schema:api_schema/1
|
||||
%% `get_connector`, `post_connector`, `put_connector` from emqx_connector_schema:api_schema/1
|
||||
%%--------------------------------------------------------------------
|
||||
%% v1/v2
|
||||
|
||||
desc("config") ->
|
||||
?DESC("desc_config");
|
||||
desc(action_resource_opts) ->
|
||||
?DESC(emqx_resource_schema, "creation_opts");
|
||||
desc(source_resource_opts) ->
|
||||
?DESC(emqx_resource_schema, "creation_opts");
|
||||
desc(action_parameters) ->
|
||||
?DESC(action_parameters);
|
||||
desc(ingress_parameters) ->
|
||||
?DESC(ingress_parameters);
|
||||
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
|
||||
["Configuration for WebHook using `", string:to_upper(Method), "` method."];
|
||||
desc("http_action") ->
|
||||
?DESC("desc_config");
|
||||
desc("parameters_opts") ->
|
||||
?DESC("config_parameters_opts");
|
||||
desc("mqtt_publisher_action") ->
|
||||
?DESC("mqtt_publisher_action");
|
||||
desc("mqtt_subscriber_source") ->
|
||||
?DESC("mqtt_subscriber_source");
|
||||
desc(_) ->
|
||||
undefined.
|
||||
|
||||
bridge_v2_examples(Method) ->
|
||||
[
|
||||
#{
|
||||
<<"mqtt">> => #{
|
||||
summary => <<"MQTT Producer Action">>,
|
||||
value => emqx_bridge_v2_schema:action_values(
|
||||
Method,
|
||||
_ActionType = mqtt,
|
||||
_ConnectorType = mqtt,
|
||||
#{
|
||||
parameters => #{
|
||||
topic => <<"remote/topic">>,
|
||||
qos => 2,
|
||||
retain => false,
|
||||
payload => <<"${.payload}">>
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
].
|
||||
|
||||
source_examples(Method) ->
|
||||
[
|
||||
#{
|
||||
<<"mqtt">> => #{
|
||||
summary => <<"MQTT Subscriber Source">>,
|
||||
value => emqx_bridge_v2_schema:source_values(
|
||||
Method,
|
||||
_SourceType = mqtt,
|
||||
_ConnectorType = mqtt,
|
||||
#{
|
||||
parameters => #{
|
||||
topic => <<"remote/topic">>,
|
||||
qos => 2
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
].
|
||||
|
||||
conn_bridge_examples(Method) ->
|
||||
[
|
||||
#{
|
||||
<<"mqtt">> => #{
|
||||
summary => <<"MQTT Producer Action">>,
|
||||
value => emqx_bridge_api:mqtt_v1_example(Method)
|
||||
}
|
||||
}
|
||||
].
|
|
@ -13,6 +13,7 @@
|
|||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_mqtt_schema).
|
||||
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
|
|
@ -238,7 +238,8 @@ t_conf_bridge_authn_passfile(Config) ->
|
|||
post,
|
||||
uri(["bridges"]),
|
||||
?SERVER_CONF(<<>>, <<"file://im/pretty/sure/theres/no/such/file">>)#{
|
||||
<<"name">> => <<"t_conf_bridge_authn_no_passfile">>
|
||||
<<"name">> => <<"t_conf_bridge_authn_no_passfile">>,
|
||||
<<"ingress">> => ?INGRESS_CONF#{<<"pool_size">> => 1}
|
||||
}
|
||||
),
|
||||
?assertMatch({match, _}, re:run(Reason, <<"failed_to_read_secret_file">>)).
|
||||
|
@ -397,8 +398,9 @@ t_mqtt_conn_bridge_ingress_shared_subscription(_) ->
|
|||
{ok, 204, <<>>} = request(delete, uri(["bridges", BridgeID]), []),
|
||||
ok.
|
||||
|
||||
t_mqtt_egress_bridge_ignores_clean_start(_) ->
|
||||
t_mqtt_egress_bridge_warns_clean_start(_) ->
|
||||
BridgeName = atom_to_binary(?FUNCTION_NAME),
|
||||
Action = fun() ->
|
||||
BridgeID = create_bridge(
|
||||
?SERVER_CONF#{
|
||||
<<"name">> => BridgeName,
|
||||
|
@ -407,22 +409,17 @@ t_mqtt_egress_bridge_ignores_clean_start(_) ->
|
|||
}
|
||||
),
|
||||
|
||||
ResourceID = emqx_bridge_resource:resource_id(BridgeID),
|
||||
{ok, _Group, #{state := #{egress_pool_name := EgressPoolName}}} =
|
||||
emqx_resource_manager:lookup_cached(ResourceID),
|
||||
ClientInfo = ecpool:pick_and_do(
|
||||
EgressPoolName,
|
||||
{emqx_bridge_mqtt_egress, info, []},
|
||||
no_handover
|
||||
),
|
||||
?assertMatch(
|
||||
#{clean_start := true},
|
||||
maps:from_list(ClientInfo)
|
||||
),
|
||||
|
||||
%% delete the bridge
|
||||
{ok, 204, <<>>} = request(delete, uri(["bridges", BridgeID]), []),
|
||||
|
||||
ok
|
||||
end,
|
||||
{ok, {ok, _}} =
|
||||
?wait_async_action(
|
||||
Action(),
|
||||
#{?snk_kind := mqtt_clean_start_egress_action_warning},
|
||||
10000
|
||||
),
|
||||
ok.
|
||||
|
||||
t_mqtt_conn_bridge_ingress_downgrades_qos_2(_) ->
|
||||
|
@ -567,17 +564,17 @@ t_mqtt_conn_bridge_egress_no_payload_template(_) ->
|
|||
|
||||
t_egress_short_clientid(_Config) ->
|
||||
%% Name is short, expect the actual client ID in use is hashed from
|
||||
%% <name>E<nodename-hash>:<pool_worker_id>
|
||||
Name = "abc01234",
|
||||
BaseId = emqx_bridge_mqtt_lib:clientid_base([Name, "E"]),
|
||||
%% <name><nodename-hash>:<pool_worker_id>
|
||||
Name = <<"abc01234">>,
|
||||
BaseId = emqx_bridge_mqtt_lib:clientid_base([Name]),
|
||||
ExpectedClientId = iolist_to_binary([BaseId, $:, "1"]),
|
||||
test_egress_clientid(Name, ExpectedClientId).
|
||||
|
||||
t_egress_long_clientid(_Config) ->
|
||||
%% Expect the actual client ID in use is hashed from
|
||||
%% <name>E<nodename-hash>:<pool_worker_id>
|
||||
Name = "abc01234567890123456789",
|
||||
BaseId = emqx_bridge_mqtt_lib:clientid_base([Name, "E"]),
|
||||
%% <name><nodename-hash>:<pool_worker_id>
|
||||
Name = <<"abc012345678901234567890">>,
|
||||
BaseId = emqx_bridge_mqtt_lib:clientid_base([Name]),
|
||||
ExpectedClientId = emqx_bridge_mqtt_lib:bytes23(BaseId, 1),
|
||||
test_egress_clientid(Name, ExpectedClientId).
|
||||
|
||||
|
@ -1086,7 +1083,8 @@ create_bridge(Config = #{<<"type">> := Type, <<"name">> := Name}) ->
|
|||
<<"type">> := Type,
|
||||
<<"name">> := Name
|
||||
},
|
||||
emqx_utils_json:decode(Bridge)
|
||||
emqx_utils_json:decode(Bridge),
|
||||
#{expected_type => Type, expected_name => Name}
|
||||
),
|
||||
emqx_bridge_resource:bridge_id(Type, Name).
|
||||
|
||||
|
|
|
@ -0,0 +1,244 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_bridge_mqtt_v2_subscriber_SUITE).
|
||||
|
||||
-compile(nowarn_export_all).
|
||||
-compile(export_all).
|
||||
|
||||
-include_lib("emqx/include/emqx.hrl").
|
||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||
-include_lib("emqx/include/emqx_hooks.hrl").
|
||||
-include_lib("stdlib/include/assert.hrl").
|
||||
-include_lib("emqx/include/asserts.hrl").
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
|
||||
-import(emqx_common_test_helpers, [on_exit/1]).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% CT boilerplate
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
all() ->
|
||||
emqx_common_test_helpers:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
Apps = emqx_cth_suite:start(
|
||||
[
|
||||
emqx,
|
||||
emqx_conf,
|
||||
emqx_connector,
|
||||
emqx_bridge_mqtt,
|
||||
emqx_bridge,
|
||||
emqx_rule_engine,
|
||||
emqx_management,
|
||||
{emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}
|
||||
],
|
||||
#{work_dir => emqx_cth_suite:work_dir(Config)}
|
||||
),
|
||||
{ok, Api} = emqx_common_test_http:create_default_app(),
|
||||
[
|
||||
{apps, Apps},
|
||||
{api, Api}
|
||||
| Config
|
||||
].
|
||||
|
||||
end_per_suite(Config) ->
|
||||
Apps = ?config(apps, Config),
|
||||
emqx_cth_suite:stop(Apps),
|
||||
ok.
|
||||
|
||||
init_per_testcase(TestCase, Config) ->
|
||||
UniqueNum = integer_to_binary(erlang:unique_integer()),
|
||||
Name = iolist_to_binary([atom_to_binary(TestCase), UniqueNum]),
|
||||
ConnectorConfig = connector_config(),
|
||||
SourceConfig = source_config(#{connector => Name}),
|
||||
[
|
||||
{bridge_kind, source},
|
||||
{source_type, mqtt},
|
||||
{source_name, Name},
|
||||
{source_config, SourceConfig},
|
||||
{connector_type, mqtt},
|
||||
{connector_name, Name},
|
||||
{connector_config, ConnectorConfig}
|
||||
| Config
|
||||
].
|
||||
|
||||
end_per_testcase(_TestCase, _Config) ->
|
||||
emqx_common_test_helpers:call_janitor(),
|
||||
emqx_bridge_v2_testlib:delete_all_bridges_and_connectors(),
|
||||
ok.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Helper fns
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
connector_config() ->
|
||||
%% !!!!!!!!!!!! FIXME!!!!!! add more fields ("server_configs")
|
||||
#{
|
||||
<<"enable">> => true,
|
||||
<<"description">> => <<"my connector">>,
|
||||
<<"pool_size">> => 3,
|
||||
<<"proto_ver">> => <<"v5">>,
|
||||
<<"server">> => <<"127.0.0.1:1883">>,
|
||||
<<"resource_opts">> => #{
|
||||
<<"health_check_interval">> => <<"15s">>,
|
||||
<<"start_after_created">> => true,
|
||||
<<"start_timeout">> => <<"5s">>
|
||||
}
|
||||
}.
|
||||
|
||||
source_config(Overrides0) ->
|
||||
Overrides = emqx_utils_maps:binary_key_map(Overrides0),
|
||||
CommonConfig =
|
||||
#{
|
||||
<<"enable">> => true,
|
||||
<<"connector">> => <<"please override">>,
|
||||
<<"parameters">> =>
|
||||
#{
|
||||
<<"topic">> => <<"remote/topic">>,
|
||||
<<"qos">> => 2
|
||||
},
|
||||
<<"resource_opts">> => #{
|
||||
<<"health_check_interval">> => <<"15s">>,
|
||||
<<"resume_interval">> => <<"15s">>
|
||||
}
|
||||
},
|
||||
maps:merge(CommonConfig, Overrides).
|
||||
|
||||
replace(Key, Value, Proplist) ->
|
||||
lists:keyreplace(Key, 1, Proplist, {Key, Value}).
|
||||
|
||||
bridge_id(Config) ->
|
||||
Type = ?config(source_type, Config),
|
||||
Name = ?config(source_name, Config),
|
||||
emqx_bridge_resource:bridge_id(Type, Name).
|
||||
|
||||
hookpoint(Config) ->
|
||||
BridgeId = bridge_id(Config),
|
||||
emqx_bridge_resource:bridge_hookpoint(BridgeId).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Testcases
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
t_create_via_http(Config) ->
|
||||
ConnectorName = ?config(connector_name, Config),
|
||||
ok = emqx_bridge_v2_testlib:t_create_via_http(Config),
|
||||
?assertMatch(
|
||||
{ok,
|
||||
{{_, 200, _}, _, [
|
||||
#{
|
||||
<<"enable">> := true,
|
||||
<<"status">> := <<"connected">>
|
||||
}
|
||||
]}},
|
||||
emqx_bridge_v2_testlib:list_bridges_http_api_v1()
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, {{_, 200, _}, _, [#{<<"enable">> := true}]}},
|
||||
emqx_bridge_v2_testlib:list_connectors_http_api()
|
||||
),
|
||||
|
||||
NewSourceName = <<"my_other_source">>,
|
||||
{ok, {{_, 201, _}, _, _}} =
|
||||
emqx_bridge_v2_testlib:create_kind_api(
|
||||
replace(source_name, NewSourceName, Config)
|
||||
),
|
||||
?assertMatch(
|
||||
{ok,
|
||||
{{_, 200, _}, _, [
|
||||
#{<<"connector">> := ConnectorName},
|
||||
#{<<"connector">> := ConnectorName}
|
||||
]}},
|
||||
emqx_bridge_v2_testlib:list_sources_http_api()
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, {{_, 200, _}, _, []}},
|
||||
emqx_bridge_v2_testlib:list_bridges_http_api_v1()
|
||||
),
|
||||
ok.
|
||||
|
||||
t_start_stop(Config) ->
|
||||
ok = emqx_bridge_v2_testlib:t_start_stop(Config, mqtt_connector_stopped),
|
||||
ok.
|
||||
|
||||
t_receive_via_rule(Config) ->
|
||||
SourceConfig = ?config(source_config, Config),
|
||||
?check_trace(
|
||||
begin
|
||||
{ok, {{_, 201, _}, _, _}} = emqx_bridge_v2_testlib:create_connector_api(Config),
|
||||
{ok, {{_, 201, _}, _, _}} = emqx_bridge_v2_testlib:create_kind_api(Config),
|
||||
Hookpoint = hookpoint(Config),
|
||||
RepublishTopic = <<"rep/t">>,
|
||||
RemoteTopic = emqx_utils_maps:deep_get(
|
||||
[<<"parameters">>, <<"topic">>],
|
||||
SourceConfig
|
||||
),
|
||||
RuleOpts = #{
|
||||
sql => <<"select * from \"", Hookpoint/binary, "\"">>,
|
||||
actions => [
|
||||
%% #{function => console},
|
||||
#{
|
||||
function => republish,
|
||||
args => #{
|
||||
topic => RepublishTopic,
|
||||
payload => <<"${.}">>,
|
||||
qos => 0,
|
||||
retain => false,
|
||||
user_properties => <<"${.pub_props.'User-Property'}">>
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{ok, {{_, 201, _}, _, #{<<"id">> := RuleId}}} =
|
||||
emqx_bridge_v2_testlib:create_rule_api(RuleOpts),
|
||||
on_exit(fun() -> emqx_rule_engine:delete_rule(RuleId) end),
|
||||
{ok, Client} = emqtt:start_link([{proto_ver, v5}]),
|
||||
{ok, _} = emqtt:connect(Client),
|
||||
{ok, _, [?RC_GRANTED_QOS_0]} = emqtt:subscribe(Client, RepublishTopic),
|
||||
ok = emqtt:publish(
|
||||
Client,
|
||||
RemoteTopic,
|
||||
#{'User-Property' => [{<<"key">>, <<"value">>}]},
|
||||
<<"mypayload">>,
|
||||
_Opts = []
|
||||
),
|
||||
{publish, Msg} =
|
||||
?assertReceive(
|
||||
{publish, #{
|
||||
topic := RepublishTopic,
|
||||
retain := false,
|
||||
qos := 0,
|
||||
properties := #{'User-Property' := [{<<"key">>, <<"value">>}]}
|
||||
}}
|
||||
),
|
||||
Payload = emqx_utils_json:decode(maps:get(payload, Msg), [return_maps]),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"event">> := Hookpoint,
|
||||
<<"payload">> := <<"mypayload">>
|
||||
},
|
||||
Payload
|
||||
),
|
||||
emqtt:stop(Client),
|
||||
ok
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assertEqual([], ?of_kind("action_references_nonexistent_bridges", Trace)),
|
||||
ok
|
||||
end
|
||||
),
|
||||
ok.
|
|
@ -166,11 +166,6 @@ common_init(Config0) ->
|
|||
#{work_dir => emqx_cth_suite:work_dir(Config0)}
|
||||
),
|
||||
{ok, _Api} = emqx_common_test_http:create_default_app(),
|
||||
|
||||
%% ok = emqx_common_test_helpers:start_apps([emqx, emqx_postgresql, emqx_conf, emqx_bridge]),
|
||||
%% _ = emqx_bridge_enterprise:module_info(),
|
||||
%% emqx_mgmt_api_test_util:init_suite(),
|
||||
|
||||
% Connect to pgsql directly and create the table
|
||||
connect_and_create_table(Config0),
|
||||
{Name, PGConf} = pgsql_config(BridgeType, Config0),
|
||||
|
|
|
@ -76,7 +76,7 @@ fields(redis_action) ->
|
|||
[ResOpts] = emqx_connector_schema:resource_opts_ref(?MODULE, action_resource_opts),
|
||||
lists:keyreplace(resource_opts, 1, Schema, ResOpts);
|
||||
fields(action_resource_opts) ->
|
||||
emqx_bridge_v2_schema:resource_opts_fields([
|
||||
emqx_bridge_v2_schema:action_resource_opts_fields([
|
||||
{batch_size, #{desc => ?DESC(batch_size)}},
|
||||
{batch_time, #{desc => ?DESC(batch_time)}}
|
||||
]);
|
||||
|
|
|
@ -86,6 +86,7 @@ fields(action) ->
|
|||
fields(config) ->
|
||||
[
|
||||
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
|
||||
{tags, emqx_schema:tags_schema()},
|
||||
{description, emqx_schema:description_schema()},
|
||||
{connector,
|
||||
mk(binary(), #{
|
||||
|
|
|
@ -123,8 +123,11 @@ fields(Field) when
|
|||
Field == "post";
|
||||
Field == "put"
|
||||
->
|
||||
Fields =
|
||||
fields("connection_fields") ++
|
||||
emqx_connector_schema:resource_opts_ref(?MODULE, connector_resource_opts),
|
||||
emqx_connector_schema:api_fields(
|
||||
Field ++ "_connector", ?CONNECTOR_TYPE, fields("connection_fields")
|
||||
Field ++ "_connector", ?CONNECTOR_TYPE, Fields
|
||||
).
|
||||
|
||||
desc(config) ->
|
||||
|
|
|
@ -150,6 +150,9 @@ post_config_update([?ROOT_KEY, Type, Name], '$remove', _, _OldConf, _AppEnvs) ->
|
|||
ok = emqx_connector_resource:remove(Type, Name),
|
||||
?tp(connector_post_config_update_done, #{}),
|
||||
ok;
|
||||
{error, not_found} ->
|
||||
?tp(connector_post_config_update_done, #{}),
|
||||
ok;
|
||||
{ok, Channels} ->
|
||||
{error, {active_channels, Channels}}
|
||||
end;
|
||||
|
|
|
@ -326,7 +326,7 @@ schema("/connectors_probe") ->
|
|||
create_connector(ConnectorType, ConnectorName, Conf)
|
||||
end;
|
||||
'/connectors'(get, _Params) ->
|
||||
Nodes = mria:running_nodes(),
|
||||
Nodes = emqx:running_nodes(),
|
||||
NodeReplies = emqx_connector_proto_v1:list_connectors_on_nodes(Nodes),
|
||||
case is_ok(NodeReplies) of
|
||||
{ok, NodeConnectors} ->
|
||||
|
@ -674,7 +674,10 @@ unpack_connector_conf(Type, PackedConf) ->
|
|||
RawConf.
|
||||
|
||||
format_action(ActionId) ->
|
||||
element(2, emqx_bridge_v2:parse_id(ActionId)).
|
||||
case emqx_bridge_v2:parse_id(ActionId) of
|
||||
#{name := Name} ->
|
||||
Name
|
||||
end.
|
||||
|
||||
is_ok(ok) ->
|
||||
ok;
|
||||
|
|
|
@ -51,11 +51,11 @@
|
|||
|
||||
-export([parse_url/1]).
|
||||
|
||||
-callback connector_config(ParsedConfig) ->
|
||||
-callback connector_config(ParsedConfig, Context) ->
|
||||
ParsedConfig
|
||||
when
|
||||
ParsedConfig :: #{atom() => any()}.
|
||||
-optional_callbacks([connector_config/1]).
|
||||
ParsedConfig :: #{atom() => any()}, Context :: #{atom() => any()}.
|
||||
-optional_callbacks([connector_config/2]).
|
||||
|
||||
-if(?EMQX_RELEASE_EDITION == ee).
|
||||
connector_to_resource_type(ConnectorType) ->
|
||||
|
@ -81,6 +81,10 @@ connector_impl_module(_ConnectorType) ->
|
|||
|
||||
connector_to_resource_type_ce(http) ->
|
||||
emqx_bridge_http_connector;
|
||||
connector_to_resource_type_ce(mqtt) ->
|
||||
emqx_bridge_mqtt_connector;
|
||||
% connector_to_resource_type_ce(mqtt_subscriber) ->
|
||||
% emqx_bridge_mqtt_subscriber_connector;
|
||||
connector_to_resource_type_ce(ConnectorType) ->
|
||||
error({no_bridge_v2, ConnectorType}).
|
||||
|
||||
|
@ -276,6 +280,12 @@ remove(Type, Name, _Conf, _Opts) ->
|
|||
emqx_resource:remove_local(resource_id(Type, Name)).
|
||||
|
||||
%% convert connector configs to what the connector modules want
|
||||
parse_confs(
|
||||
<<"mqtt">> = Type,
|
||||
Name,
|
||||
Conf
|
||||
) ->
|
||||
insert_hookpoints(Type, Name, Conf);
|
||||
parse_confs(
|
||||
<<"http">>,
|
||||
_Name,
|
||||
|
@ -307,6 +317,13 @@ parse_confs(
|
|||
parse_confs(ConnectorType, Name, Config) ->
|
||||
connector_config(ConnectorType, Name, Config).
|
||||
|
||||
insert_hookpoints(Type, Name, Conf) ->
|
||||
BId = emqx_bridge_resource:bridge_id(Type, Name),
|
||||
BridgeHookpoint = emqx_bridge_resource:bridge_hookpoint(BId),
|
||||
ConnectorHookpoint = connector_hookpoint(BId),
|
||||
HookPoints = [BridgeHookpoint, ConnectorHookpoint],
|
||||
Conf#{hookpoints => HookPoints}.
|
||||
|
||||
connector_config(ConnectorType, Name, Config) ->
|
||||
Mod = connector_impl_module(ConnectorType),
|
||||
case erlang:function_exported(Mod, connector_config, 2) of
|
||||
|
|
|
@ -50,6 +50,8 @@ resource_type(redis) ->
|
|||
emqx_bridge_redis_connector;
|
||||
resource_type(iotdb) ->
|
||||
emqx_bridge_iotdb_connector;
|
||||
resource_type(elasticsearch) ->
|
||||
emqx_bridge_es_connector;
|
||||
resource_type(Type) ->
|
||||
error({unknown_connector_type, Type}).
|
||||
|
||||
|
@ -62,6 +64,8 @@ connector_impl_module(confluent_producer) ->
|
|||
emqx_bridge_confluent_producer;
|
||||
connector_impl_module(iotdb) ->
|
||||
emqx_bridge_iotdb_connector;
|
||||
connector_impl_module(elasticsearch) ->
|
||||
emqx_bridge_es_connector;
|
||||
connector_impl_module(_ConnectorType) ->
|
||||
undefined.
|
||||
|
||||
|
@ -181,6 +185,14 @@ connector_structs() ->
|
|||
desc => <<"IoTDB Connector Config">>,
|
||||
required => false
|
||||
}
|
||||
)},
|
||||
{elasticsearch,
|
||||
mk(
|
||||
hoconsc:map(name, ref(emqx_bridge_es_connector, config)),
|
||||
#{
|
||||
desc => <<"ElasticSearch Connector Config">>,
|
||||
required => false
|
||||
}
|
||||
)}
|
||||
].
|
||||
|
||||
|
@ -199,7 +211,8 @@ schema_modules() ->
|
|||
emqx_bridge_timescale,
|
||||
emqx_postgresql_connector_schema,
|
||||
emqx_bridge_redis_schema,
|
||||
emqx_bridge_iotdb_connector
|
||||
emqx_bridge_iotdb_connector,
|
||||
emqx_bridge_es_connector
|
||||
].
|
||||
|
||||
api_schemas(Method) ->
|
||||
|
@ -227,7 +240,8 @@ api_schemas(Method) ->
|
|||
api_ref(emqx_bridge_timescale, <<"timescale">>, Method ++ "_connector"),
|
||||
api_ref(emqx_postgresql_connector_schema, <<"pgsql">>, Method ++ "_connector"),
|
||||
api_ref(emqx_bridge_redis_schema, <<"redis">>, Method ++ "_connector"),
|
||||
api_ref(emqx_bridge_iotdb_connector, <<"iotdb">>, Method)
|
||||
api_ref(emqx_bridge_iotdb_connector, <<"iotdb">>, Method),
|
||||
api_ref(emqx_bridge_es_connector, <<"elasticsearch">>, Method)
|
||||
].
|
||||
|
||||
api_ref(Module, Type, Method) ->
|
||||
|
|
|
@ -90,7 +90,9 @@ api_schemas(Method) ->
|
|||
[
|
||||
%% We need to map the `type' field of a request (binary) to a
|
||||
%% connector schema module.
|
||||
api_ref(emqx_bridge_http_schema, <<"http">>, Method ++ "_connector")
|
||||
api_ref(emqx_bridge_http_schema, <<"http">>, Method ++ "_connector"),
|
||||
% api_ref(emqx_bridge_mqtt_connector_schema, <<"mqtt_subscriber">>, Method ++ "_connector"),
|
||||
api_ref(emqx_bridge_mqtt_connector_schema, <<"mqtt">>, Method ++ "_connector")
|
||||
].
|
||||
|
||||
api_ref(Module, Type, Method) ->
|
||||
|
@ -110,10 +112,11 @@ examples(Method) ->
|
|||
|
||||
-if(?EMQX_RELEASE_EDITION == ee).
|
||||
schema_modules() ->
|
||||
[emqx_bridge_http_schema] ++ emqx_connector_ee_schema:schema_modules().
|
||||
[emqx_bridge_http_schema, emqx_bridge_mqtt_connector_schema] ++
|
||||
emqx_connector_ee_schema:schema_modules().
|
||||
-else.
|
||||
schema_modules() ->
|
||||
[emqx_bridge_http_schema].
|
||||
[emqx_bridge_http_schema, emqx_bridge_mqtt_connector_schema].
|
||||
-endif.
|
||||
|
||||
%% @doc Return old bridge(v1) and/or connector(v2) type
|
||||
|
@ -136,6 +139,8 @@ connector_type_to_bridge_types(influxdb) ->
|
|||
[influxdb, influxdb_api_v1, influxdb_api_v2];
|
||||
connector_type_to_bridge_types(mysql) ->
|
||||
[mysql];
|
||||
connector_type_to_bridge_types(mqtt) ->
|
||||
[mqtt];
|
||||
connector_type_to_bridge_types(pgsql) ->
|
||||
[pgsql];
|
||||
connector_type_to_bridge_types(redis) ->
|
||||
|
@ -147,9 +152,12 @@ connector_type_to_bridge_types(syskeeper_proxy) ->
|
|||
connector_type_to_bridge_types(timescale) ->
|
||||
[timescale];
|
||||
connector_type_to_bridge_types(iotdb) ->
|
||||
[iotdb].
|
||||
[iotdb];
|
||||
connector_type_to_bridge_types(elasticsearch) ->
|
||||
[elasticsearch].
|
||||
|
||||
actions_config_name() -> <<"actions">>.
|
||||
actions_config_name(action) -> <<"actions">>;
|
||||
actions_config_name(source) -> <<"sources">>.
|
||||
|
||||
has_connector_field(BridgeConf, ConnectorFields) ->
|
||||
lists:any(
|
||||
|
@ -183,18 +191,29 @@ bridge_configs_to_transform(
|
|||
end.
|
||||
|
||||
split_bridge_to_connector_and_action(
|
||||
{ConnectorsMap, {BridgeType, BridgeName, BridgeV1Conf, ConnectorFields, PreviousRawConfig}}
|
||||
{
|
||||
{ConnectorsMap, OrgConnectorType},
|
||||
{BridgeType, BridgeName, BridgeV1Conf, ConnectorFields, PreviousRawConfig}
|
||||
}
|
||||
) ->
|
||||
ConnectorMap =
|
||||
{ConnectorMap, ConnectorType} =
|
||||
case emqx_action_info:has_custom_bridge_v1_config_to_connector_config(BridgeType) of
|
||||
true ->
|
||||
case
|
||||
emqx_action_info:bridge_v1_config_to_connector_config(
|
||||
BridgeType, BridgeV1Conf
|
||||
);
|
||||
)
|
||||
of
|
||||
{ConType, ConMap} ->
|
||||
{ConMap, ConType};
|
||||
ConMap ->
|
||||
{ConMap, OrgConnectorType}
|
||||
end;
|
||||
false ->
|
||||
%% We do an automatic transformation to get the connector config
|
||||
%% if the callback is not defined.
|
||||
%% Get connector fields from bridge config
|
||||
NewCConMap =
|
||||
lists:foldl(
|
||||
fun({ConnectorFieldName, _Spec}, ToTransformSoFar) ->
|
||||
ConnectorFieldNameBin = to_bin(ConnectorFieldName),
|
||||
|
@ -216,7 +235,8 @@ split_bridge_to_connector_and_action(
|
|||
end,
|
||||
#{},
|
||||
ConnectorFields
|
||||
)
|
||||
),
|
||||
{NewCConMap, OrgConnectorType}
|
||||
end,
|
||||
%% Generate a connector name, if needed. Avoid doing so if there was a previous config.
|
||||
ConnectorName =
|
||||
|
@ -224,18 +244,29 @@ split_bridge_to_connector_and_action(
|
|||
#{<<"connector">> := ConnectorName0} -> ConnectorName0;
|
||||
_ -> generate_connector_name(ConnectorsMap, BridgeName, 0)
|
||||
end,
|
||||
ActionMap =
|
||||
OrgActionType = emqx_action_info:bridge_v1_type_to_action_type(BridgeType),
|
||||
{ActionMap, ActionType, ActionOrSource} =
|
||||
case emqx_action_info:has_custom_bridge_v1_config_to_action_config(BridgeType) of
|
||||
true ->
|
||||
case
|
||||
emqx_action_info:bridge_v1_config_to_action_config(
|
||||
BridgeType, BridgeV1Conf, ConnectorName
|
||||
);
|
||||
)
|
||||
of
|
||||
{ActionOrSource0, ActionType0, ActionMap0} ->
|
||||
{ActionMap0, ActionType0, ActionOrSource0};
|
||||
ActionMap0 ->
|
||||
{ActionMap0, OrgActionType, action}
|
||||
end;
|
||||
false ->
|
||||
ActionMap0 =
|
||||
transform_bridge_v1_config_to_action_config(
|
||||
BridgeV1Conf, ConnectorName, ConnectorFields
|
||||
)
|
||||
),
|
||||
{ActionMap0, OrgActionType, action}
|
||||
end,
|
||||
{BridgeType, BridgeName, ActionMap, ConnectorName, ConnectorMap}.
|
||||
{BridgeType, BridgeName, ActionMap, ActionType, ActionOrSource, ConnectorName, ConnectorMap,
|
||||
ConnectorType}.
|
||||
|
||||
maybe_project_to_connector_resource_opts(<<"resource_opts">>, OldResourceOpts) ->
|
||||
project_to_connector_resource_opts(OldResourceOpts);
|
||||
|
@ -305,9 +336,9 @@ generate_connector_name(ConnectorsMap, BridgeName, Attempt) ->
|
|||
ConnectorNameList =
|
||||
case Attempt of
|
||||
0 ->
|
||||
io_lib:format("connector_~s", [BridgeName]);
|
||||
io_lib:format("~s", [BridgeName]);
|
||||
_ ->
|
||||
io_lib:format("connector_~s_~p", [BridgeName, Attempt + 1])
|
||||
io_lib:format("~s_~p", [BridgeName, Attempt + 1])
|
||||
end,
|
||||
ConnectorName = iolist_to_binary(ConnectorNameList),
|
||||
case maps:is_key(ConnectorName, ConnectorsMap) of
|
||||
|
@ -338,7 +369,10 @@ transform_old_style_bridges_to_connector_and_actions_of_type(
|
|||
),
|
||||
ConnectorsWithTypeMap = maps:get(to_bin(ConnectorType), ConnectorsConfMap, #{}),
|
||||
BridgeConfigsToTransformWithConnectorConf = lists:zip(
|
||||
lists:duplicate(length(BridgeConfigsToTransform), ConnectorsWithTypeMap),
|
||||
lists:duplicate(
|
||||
length(BridgeConfigsToTransform),
|
||||
{ConnectorsWithTypeMap, ConnectorType}
|
||||
),
|
||||
BridgeConfigsToTransform
|
||||
),
|
||||
ActionConnectorTuples = lists:map(
|
||||
|
@ -347,10 +381,14 @@ transform_old_style_bridges_to_connector_and_actions_of_type(
|
|||
),
|
||||
%% Add connectors and actions and remove bridges
|
||||
lists:foldl(
|
||||
fun({BridgeType, BridgeName, ActionMap, ConnectorName, ConnectorMap}, RawConfigSoFar) ->
|
||||
fun(
|
||||
{BridgeType, BridgeName, ActionMap, NewActionType, ActionOrSource, ConnectorName,
|
||||
ConnectorMap, NewConnectorType},
|
||||
RawConfigSoFar
|
||||
) ->
|
||||
%% Add connector
|
||||
RawConfigSoFar1 = emqx_utils_maps:deep_put(
|
||||
[<<"connectors">>, to_bin(ConnectorType), ConnectorName],
|
||||
[<<"connectors">>, to_bin(NewConnectorType), ConnectorName],
|
||||
RawConfigSoFar,
|
||||
ConnectorMap
|
||||
),
|
||||
|
@ -360,12 +398,21 @@ transform_old_style_bridges_to_connector_and_actions_of_type(
|
|||
RawConfigSoFar1
|
||||
),
|
||||
%% Add action
|
||||
ActionType = emqx_action_info:bridge_v1_type_to_action_type(to_bin(BridgeType)),
|
||||
RawConfigSoFar3 = emqx_utils_maps:deep_put(
|
||||
[actions_config_name(), to_bin(ActionType), BridgeName],
|
||||
RawConfigSoFar3 =
|
||||
case ActionMap of
|
||||
none ->
|
||||
RawConfigSoFar2;
|
||||
_ ->
|
||||
emqx_utils_maps:deep_put(
|
||||
[
|
||||
actions_config_name(ActionOrSource),
|
||||
to_bin(NewActionType),
|
||||
BridgeName
|
||||
],
|
||||
RawConfigSoFar2,
|
||||
ActionMap
|
||||
),
|
||||
)
|
||||
end,
|
||||
RawConfigSoFar3
|
||||
end,
|
||||
RawConfig,
|
||||
|
@ -452,7 +499,23 @@ fields(connectors) ->
|
|||
desc => <<"HTTP Connector Config">>,
|
||||
required => false
|
||||
}
|
||||
)},
|
||||
{mqtt,
|
||||
mk(
|
||||
hoconsc:map(name, ref(emqx_bridge_mqtt_connector_schema, "config_connector")),
|
||||
#{
|
||||
desc => <<"MQTT Publisher Connector Config">>,
|
||||
required => false
|
||||
}
|
||||
)}
|
||||
% {mqtt_subscriber,
|
||||
% mk(
|
||||
% hoconsc:map(name, ref(emqx_bridge_mqtt_connector_schema, "config_connector")),
|
||||
% #{
|
||||
% desc => <<"MQTT Subscriber Connector Config">>,
|
||||
% required => false
|
||||
% }
|
||||
% )}
|
||||
] ++ enterprise_fields_connectors();
|
||||
fields("node_status") ->
|
||||
[
|
||||
|
@ -501,6 +564,7 @@ api_fields("put_connector", _Type, Fields) ->
|
|||
common_fields() ->
|
||||
[
|
||||
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
|
||||
{tags, emqx_schema:tags_schema()},
|
||||
{description, emqx_schema:description_schema()}
|
||||
].
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{application, emqx_dashboard_sso, [
|
||||
{description, "EMQX Dashboard Single Sign-On"},
|
||||
{vsn, "0.1.3"},
|
||||
{vsn, "0.1.4"},
|
||||
{registered, [emqx_dashboard_sso_sup]},
|
||||
{applications, [
|
||||
kernel,
|
||||
|
|
|
@ -163,7 +163,7 @@ convert_certs(
|
|||
) ->
|
||||
case
|
||||
emqx_tls_lib:ensure_ssl_files(
|
||||
Dir, #{enable => ture, certfile => Cert, keyfile => Key}, #{}
|
||||
Dir, #{enable => true, certfile => Cert, keyfile => Key}, #{}
|
||||
)
|
||||
of
|
||||
{ok, #{certfile := CertPath, keyfile := KeyPath}} ->
|
||||
|
|
|
@ -99,6 +99,7 @@
|
|||
emqx_bridge_hstreamdb,
|
||||
emqx_bridge_influxdb,
|
||||
emqx_bridge_iotdb,
|
||||
emqx_bridge_es,
|
||||
emqx_bridge_matrix,
|
||||
emqx_bridge_mongodb,
|
||||
emqx_bridge_mysql,
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
{id, "emqx_machine"},
|
||||
{description, "The EMQX Machine"},
|
||||
% strict semver, bump manually!
|
||||
{vsn, "0.2.17"},
|
||||
{vsn, "0.2.18"},
|
||||
{modules, []},
|
||||
{registered, []},
|
||||
{applications, [kernel, stdlib, emqx_ctl]},
|
||||
|
|
|
@ -26,11 +26,15 @@
|
|||
cluster_info/2,
|
||||
cluster_topology/2,
|
||||
invite_node/2,
|
||||
invite_node_async/2,
|
||||
get_invitation_status/2,
|
||||
force_leave/2,
|
||||
join/1,
|
||||
connected_replicants/0
|
||||
]).
|
||||
|
||||
-define(DEFAULT_INVITE_TIMEOUT, 15000).
|
||||
|
||||
namespace() -> "cluster".
|
||||
|
||||
api_spec() ->
|
||||
|
@ -40,7 +44,9 @@ paths() ->
|
|||
[
|
||||
"/cluster",
|
||||
"/cluster/topology",
|
||||
"/cluster/invitation",
|
||||
"/cluster/:node/invite",
|
||||
"/cluster/:node/invite_async",
|
||||
"/cluster/:node/force_leave"
|
||||
].
|
||||
|
||||
|
@ -70,6 +76,20 @@ schema("/cluster/topology") ->
|
|||
}
|
||||
}
|
||||
};
|
||||
schema("/cluster/invitation") ->
|
||||
#{
|
||||
'operationId' => get_invitation_status,
|
||||
get => #{
|
||||
desc => ?DESC(get_invitation_status),
|
||||
tags => [<<"Cluster">>],
|
||||
responses => #{
|
||||
200 => ?HOCON(
|
||||
?REF(invitation_status),
|
||||
#{desc => <<"Get invitation progress created by async operation">>}
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/cluster/:node/invite") ->
|
||||
#{
|
||||
'operationId' => invite_node,
|
||||
|
@ -77,6 +97,20 @@ schema("/cluster/:node/invite") ->
|
|||
desc => ?DESC(invite_node),
|
||||
tags => [<<"Cluster">>],
|
||||
parameters => [hoconsc:ref(node)],
|
||||
'requestBody' => hoconsc:ref(timeout),
|
||||
responses => #{
|
||||
200 => <<"ok">>,
|
||||
400 => emqx_dashboard_swagger:error_codes(['BAD_REQUEST'])
|
||||
}
|
||||
}
|
||||
};
|
||||
schema("/cluster/:node/invite_async") ->
|
||||
#{
|
||||
'operationId' => invite_node_async,
|
||||
put => #{
|
||||
desc => ?DESC(invite_node_async),
|
||||
tags => [<<"Cluster">>],
|
||||
parameters => [hoconsc:ref(node)],
|
||||
responses => #{
|
||||
200 => <<"ok">>,
|
||||
400 => emqx_dashboard_swagger:error_codes(['BAD_REQUEST'])
|
||||
|
@ -131,6 +165,71 @@ fields(core_replicants) ->
|
|||
#{desc => <<"Core node name">>, example => <<"emqx-core@127.0.0.1">>}
|
||||
)},
|
||||
{replicant_nodes, ?HOCON(?ARRAY(?REF(replicant_info)))}
|
||||
];
|
||||
fields(timeout) ->
|
||||
[
|
||||
{timeout,
|
||||
?HOCON(
|
||||
non_neg_integer(),
|
||||
#{desc => <<"Timeout in milliseconds">>, example => <<"15000">>}
|
||||
)}
|
||||
];
|
||||
fields(invitation_status) ->
|
||||
[
|
||||
{succeed,
|
||||
?HOCON(
|
||||
?ARRAY(?REF(node_invitation_succeed)),
|
||||
#{desc => <<"A list of information about nodes which are successfully invited">>}
|
||||
)},
|
||||
{in_progress,
|
||||
?HOCON(
|
||||
?ARRAY(?REF(node_invitation_in_progress)),
|
||||
#{desc => <<"A list of information about nodes that are processing invitations">>}
|
||||
)},
|
||||
{failed,
|
||||
?HOCON(
|
||||
?ARRAY(?REF(node_invitation_failed)),
|
||||
#{desc => <<"A list of information about nodes that failed to be invited">>}
|
||||
)}
|
||||
];
|
||||
fields(node_invitation_failed) ->
|
||||
fields(node_invitation_succeed) ++
|
||||
[
|
||||
{reason,
|
||||
?HOCON(
|
||||
binary(),
|
||||
#{desc => <<"Failure reason">>, example => <<"Bad RPC to target node">>}
|
||||
)}
|
||||
];
|
||||
fields(node_invitation_succeed) ->
|
||||
fields(node_invitation_in_progress) ++
|
||||
[
|
||||
{finished_at,
|
||||
?HOCON(
|
||||
emqx_utils_calendar:epoch_millisecond(),
|
||||
#{
|
||||
desc =>
|
||||
<<"The time of the async invitation result is received, millisecond precision epoch">>,
|
||||
example => <<"1705044829915">>
|
||||
}
|
||||
)}
|
||||
];
|
||||
fields(node_invitation_in_progress) ->
|
||||
[
|
||||
{node,
|
||||
?HOCON(
|
||||
binary(),
|
||||
#{desc => <<"Node name">>, example => <<"emqx2@127.0.0.1">>}
|
||||
)},
|
||||
{started_at,
|
||||
?HOCON(
|
||||
emqx_utils_calendar:epoch_millisecond(),
|
||||
#{
|
||||
desc =>
|
||||
<<"The start timestamp of the invitation, millisecond precision epoch">>,
|
||||
example => <<"1705044829915">>
|
||||
}
|
||||
)}
|
||||
].
|
||||
|
||||
validate_node(Node) ->
|
||||
|
@ -188,19 +287,43 @@ running_cores() ->
|
|||
Running = emqx:running_nodes(),
|
||||
lists:filter(fun(C) -> lists:member(C, Running) end, emqx:cluster_nodes(cores)).
|
||||
|
||||
invite_node(put, #{bindings := #{node := Node0}}) ->
|
||||
invite_node(put, #{bindings := #{node := Node0}, body := Body}) ->
|
||||
Node = ekka_node:parse_name(binary_to_list(Node0)),
|
||||
case emqx_mgmt_cluster_proto_v1:invite_node(Node, node()) of
|
||||
case maps:get(<<"timeout">>, Body, ?DEFAULT_INVITE_TIMEOUT) of
|
||||
T when not is_integer(T) ->
|
||||
{400, #{code => 'BAD_REQUEST', message => <<"timeout must be an integer">>}};
|
||||
T when T < 5000 ->
|
||||
{400, #{code => 'BAD_REQUEST', message => <<"timeout cannot be less than 5000ms">>}};
|
||||
Timeout ->
|
||||
case emqx_mgmt_cluster_proto_v3:invite_node(Node, node(), Timeout) of
|
||||
ok ->
|
||||
{200};
|
||||
ignore ->
|
||||
{400, #{code => 'BAD_REQUEST', message => <<"Can't invite self">>}};
|
||||
{400, #{code => 'BAD_REQUEST', message => <<"Cannot invite self">>}};
|
||||
{badrpc, Error} ->
|
||||
{400, #{code => 'BAD_REQUEST', message => error_message(Error)}};
|
||||
{error, Error} ->
|
||||
{400, #{code => 'BAD_REQUEST', message => error_message(Error)}}
|
||||
end
|
||||
end.
|
||||
|
||||
invite_node_async(put, #{bindings := #{node := Node0}}) ->
|
||||
Node = ekka_node:parse_name(binary_to_list(Node0)),
|
||||
case emqx_mgmt_cluster:invite_async(Node) of
|
||||
ok ->
|
||||
{200};
|
||||
ignore ->
|
||||
{400, #{code => 'BAD_REQUEST', message => <<"Can't invite self">>}};
|
||||
{error, {already_started, _Pid}} ->
|
||||
{400, #{
|
||||
code => 'BAD_REQUEST',
|
||||
message => <<"The invitation task already created for this node">>
|
||||
}}
|
||||
end.
|
||||
|
||||
get_invitation_status(get, _) ->
|
||||
{200, format_invitation_status(emqx_mgmt_cluster:invitation_status())}.
|
||||
|
||||
force_leave(delete, #{bindings := #{node := Node0}}) ->
|
||||
Node = ekka_node:parse_name(binary_to_list(Node0)),
|
||||
case ekka:force_leave(Node) of
|
||||
|
@ -222,3 +345,27 @@ connected_replicants() ->
|
|||
|
||||
error_message(Msg) ->
|
||||
iolist_to_binary(io_lib:format("~p", [Msg])).
|
||||
|
||||
format_invitation_status(#{
|
||||
succeed := Succeed,
|
||||
in_progress := InProgress,
|
||||
failed := Failed
|
||||
}) ->
|
||||
#{
|
||||
succeed => format_invitation_info(Succeed),
|
||||
in_progress => format_invitation_info(InProgress),
|
||||
failed => format_invitation_info(Failed)
|
||||
}.
|
||||
|
||||
format_invitation_info(L) when is_list(L) ->
|
||||
lists:map(
|
||||
fun(Info) ->
|
||||
Info1 = emqx_utils_maps:update_if_present(
|
||||
started_at, fun emqx_utils_calendar:epoch_to_rfc3339/1, Info
|
||||
),
|
||||
emqx_utils_maps:update_if_present(
|
||||
finished_at, fun emqx_utils_calendar:epoch_to_rfc3339/1, Info1
|
||||
)
|
||||
end,
|
||||
L
|
||||
).
|
||||
|
|
|
@ -0,0 +1,201 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_mgmt_cluster).
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% APIs
|
||||
-export([start_link/0]).
|
||||
|
||||
-export([invite_async/1, invitation_status/0]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([
|
||||
init/1,
|
||||
handle_call/3,
|
||||
handle_cast/2,
|
||||
handle_info/2,
|
||||
terminate/2,
|
||||
code_change/3
|
||||
]).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% APIs
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
-spec invite_async(atom()) -> ok | ignore | {error, {already_started, pid()}}.
|
||||
invite_async(Node) ->
|
||||
%% Proxy the invitation task to the leader node
|
||||
JoinTo = mria_membership:leader(),
|
||||
case Node =/= JoinTo of
|
||||
true ->
|
||||
gen_server:call({?MODULE, JoinTo}, {invite_async, Node, JoinTo}, infinity);
|
||||
false ->
|
||||
ignore
|
||||
end.
|
||||
|
||||
-spec invitation_status() -> map().
|
||||
invitation_status() ->
|
||||
Leader = mria_membership:leader(),
|
||||
gen_server:call({?MODULE, Leader}, invitation_status, infinity).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init([]) ->
|
||||
process_flag(trap_exit, true),
|
||||
{ok, #{}}.
|
||||
|
||||
handle_call({invite_async, Node, JoinTo}, _From, State) ->
|
||||
case maps:get(Node, State, undefined) of
|
||||
undefined ->
|
||||
Caller = self(),
|
||||
Task = spawn_link_invite_worker(Node, JoinTo, Caller),
|
||||
{reply, ok, State#{Node => Task}};
|
||||
WorkerPid ->
|
||||
{reply, {error, {already_started, WorkerPid}}, State}
|
||||
end;
|
||||
handle_call(invitation_status, _From, State) ->
|
||||
{reply, state_to_invitation_status(State), State};
|
||||
handle_call(_Request, _From, State) ->
|
||||
Reply = ok,
|
||||
{reply, Reply, State}.
|
||||
|
||||
handle_cast(_Msg, State) ->
|
||||
{noreply, State}.
|
||||
|
||||
handle_info({task_done, _WorkerPid, Node, Result}, State) ->
|
||||
case maps:take(Node, State) of
|
||||
{Task, State1} ->
|
||||
History = maps:get(history, State1, #{}),
|
||||
Task1 = Task#{
|
||||
result => Result,
|
||||
finished_at => erlang:system_time(millisecond)
|
||||
},
|
||||
{noreply, State1#{history => History#{Node => Task1}}};
|
||||
error ->
|
||||
{noreply, State}
|
||||
end;
|
||||
handle_info({'EXIT', WorkerPid, Reason}, State) ->
|
||||
case take_node_name_via_worker_pid(WorkerPid, State) of
|
||||
{key_value, Node, Task, State1} ->
|
||||
History = maps:get(history, State1, #{}),
|
||||
Task1 = Task#{
|
||||
result => {error, Reason},
|
||||
finished_at => erlang:system_time(millisecond)
|
||||
},
|
||||
{noreply, State1#{history => History#{Node => Task1}}};
|
||||
error ->
|
||||
{noreply, State}
|
||||
end;
|
||||
handle_info(_Info, State) ->
|
||||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, _State) ->
|
||||
ok.
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal funcs
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
spawn_link_invite_worker(Node, JoinTo, Caller) ->
|
||||
Pid = erlang:spawn_link(
|
||||
fun() ->
|
||||
Result =
|
||||
case emqx_mgmt_cluster_proto_v3:invite_node(Node, JoinTo, infinity) of
|
||||
ok ->
|
||||
ok;
|
||||
{error, {already_in_cluster, _Node}} ->
|
||||
ok;
|
||||
{error, _} = E ->
|
||||
E;
|
||||
{badrpc, Reason} ->
|
||||
{error, {badrpc, Reason}}
|
||||
end,
|
||||
Caller ! {task_done, self(), Node, Result}
|
||||
end
|
||||
),
|
||||
#{worker => Pid, started_at => erlang:system_time(millisecond)}.
|
||||
|
||||
take_node_name_via_worker_pid(WorkerPid, Map) when is_map(Map) ->
|
||||
Key = find_node_name_via_worker_pid(WorkerPid, maps:next(maps:iterator(Map))),
|
||||
case maps:take(Key, Map) of
|
||||
error ->
|
||||
error;
|
||||
{Vaule, Map1} ->
|
||||
{key_value, Key, Vaule, Map1}
|
||||
end.
|
||||
|
||||
find_node_name_via_worker_pid(_WorkerPid, none) ->
|
||||
error;
|
||||
find_node_name_via_worker_pid(WorkerPid, {Key, Task, I}) ->
|
||||
case maps:get(worker, Task, undefined) of
|
||||
WorkerPid ->
|
||||
Key;
|
||||
_ ->
|
||||
find_node_name_via_worker_pid(WorkerPid, maps:next(I))
|
||||
end.
|
||||
|
||||
state_to_invitation_status(State) ->
|
||||
History = maps:get(history, State, #{}),
|
||||
{Succ, Failed} = lists:foldl(
|
||||
fun({Node, Task}, {SuccAcc, FailedAcc}) ->
|
||||
#{
|
||||
started_at := StartedAt,
|
||||
finished_at := FinishedAt,
|
||||
result := Result
|
||||
} = Task,
|
||||
Ret = #{node => Node, started_at => StartedAt, finished_at => FinishedAt},
|
||||
case is_succeed_result(Result) of
|
||||
true ->
|
||||
{[Ret | SuccAcc], FailedAcc};
|
||||
false ->
|
||||
{SuccAcc, [Ret#{reason => format_error_reason(Result)} | FailedAcc]}
|
||||
end
|
||||
end,
|
||||
{[], []},
|
||||
maps:to_list(History)
|
||||
),
|
||||
|
||||
InPro = maps:fold(
|
||||
fun(Node, _Task = #{started_at := StartedAt}, Acc) ->
|
||||
[#{node => Node, started_at => StartedAt} | Acc]
|
||||
end,
|
||||
[],
|
||||
maps:without([history], State)
|
||||
),
|
||||
#{succeed => Succ, in_progress => InPro, failed => Failed}.
|
||||
|
||||
is_succeed_result(Result) ->
|
||||
case Result of
|
||||
ok ->
|
||||
true;
|
||||
{error, {already_in_cluster, _Node}} ->
|
||||
true;
|
||||
_ ->
|
||||
false
|
||||
end.
|
||||
|
||||
format_error_reason(Term) ->
|
||||
iolist_to_binary(io_lib:format("~0p", [Term])).
|
|
@ -33,7 +33,8 @@ init([]) ->
|
|||
_ ->
|
||||
[]
|
||||
end,
|
||||
{ok, {{one_for_one, 1, 5}, Workers}}.
|
||||
Cluster = child_spec(emqx_mgmt_cluster, 5000, worker),
|
||||
{ok, {{one_for_one, 1, 5}, [Cluster | Workers]}}.
|
||||
|
||||
child_spec(Mod, Shutdown, Type) ->
|
||||
#{
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_mgmt_cluster_proto_v3).
|
||||
|
||||
-behaviour(emqx_bpapi).
|
||||
|
||||
-export([
|
||||
introduced_in/0,
|
||||
invite_node/3,
|
||||
connected_replicants/1
|
||||
]).
|
||||
|
||||
-include_lib("emqx/include/bpapi.hrl").
|
||||
|
||||
introduced_in() ->
|
||||
"5.5.0".
|
||||
|
||||
-spec invite_node(node(), node(), timeout()) -> ok | ignore | {error, term()} | emqx_rpc:badrpc().
|
||||
invite_node(Node, Self, Timeout) when is_integer(Timeout); Timeout =:= infinity ->
|
||||
rpc:call(Node, emqx_mgmt_api_cluster, join, [Self], Timeout).
|
||||
|
||||
-spec connected_replicants([node()]) -> emqx_rpc:multicall_result().
|
||||
connected_replicants(Nodes) ->
|
||||
rpc:multicall(Nodes, emqx_mgmt_api_cluster, connected_replicants, [], 30_000).
|
|
@ -1,5 +1,5 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
|
@ -35,6 +35,12 @@ end_per_suite(_) ->
|
|||
init_per_testcase(TC = t_cluster_topology_api_replicants, Config0) ->
|
||||
Config = [{tc_name, TC} | Config0],
|
||||
[{cluster, cluster(Config)} | setup(Config)];
|
||||
init_per_testcase(TC = t_cluster_invite_api_timeout, Config0) ->
|
||||
Config = [{tc_name, TC} | Config0],
|
||||
[{cluster, cluster(Config)} | setup(Config)];
|
||||
init_per_testcase(TC = t_cluster_invite_async, Config0) ->
|
||||
Config = [{tc_name, TC} | Config0],
|
||||
[{cluster, cluster(Config)} | setup(Config)];
|
||||
init_per_testcase(_TC, Config) ->
|
||||
emqx_mgmt_api_test_util:init_suite(?APPS),
|
||||
Config.
|
||||
|
@ -42,6 +48,12 @@ init_per_testcase(_TC, Config) ->
|
|||
end_per_testcase(t_cluster_topology_api_replicants, Config) ->
|
||||
emqx_cth_cluster:stop(?config(cluster, Config)),
|
||||
cleanup(Config);
|
||||
end_per_testcase(t_cluster_invite_api_timeout, Config) ->
|
||||
emqx_cth_cluster:stop(?config(cluster, Config)),
|
||||
cleanup(Config);
|
||||
end_per_testcase(t_cluster_invite_async, Config) ->
|
||||
emqx_cth_cluster:stop(?config(cluster, Config)),
|
||||
cleanup(Config);
|
||||
end_per_testcase(_TC, _Config) ->
|
||||
emqx_mgmt_api_test_util:end_suite(?APPS).
|
||||
|
||||
|
@ -77,12 +89,186 @@ t_cluster_topology_api_replicants(Config) ->
|
|||
|| Resp <- [lists:sort(R) || R <- [Core1Resp, Core2Resp, ReplResp]]
|
||||
].
|
||||
|
||||
t_cluster_invite_api_timeout(Config) ->
|
||||
%% assert the cluster is created
|
||||
[Core1, Core2, Replicant] = _NodesList = ?config(cluster, Config),
|
||||
{200, Core1Resp} = rpc:call(Core1, emqx_mgmt_api_cluster, cluster_topology, [get, #{}]),
|
||||
?assertMatch(
|
||||
[
|
||||
#{
|
||||
core_node := Core1,
|
||||
replicant_nodes :=
|
||||
[#{node := Replicant, streams := _}]
|
||||
},
|
||||
#{
|
||||
core_node := Core2,
|
||||
replicant_nodes :=
|
||||
[#{node := Replicant, streams := _}]
|
||||
}
|
||||
],
|
||||
lists:sort(Core1Resp)
|
||||
),
|
||||
|
||||
%% force leave the core2
|
||||
{204} = rpc:call(
|
||||
Core1,
|
||||
emqx_mgmt_api_cluster,
|
||||
force_leave,
|
||||
[delete, #{bindings => #{node => atom_to_binary(Core2)}}]
|
||||
),
|
||||
|
||||
%% assert the cluster is updated
|
||||
{200, Core1Resp2} = rpc:call(Core1, emqx_mgmt_api_cluster, cluster_topology, [get, #{}]),
|
||||
?assertMatch(
|
||||
[
|
||||
#{
|
||||
core_node := Core1,
|
||||
replicant_nodes :=
|
||||
[#{node := Replicant, streams := _}]
|
||||
}
|
||||
],
|
||||
lists:sort(Core1Resp2)
|
||||
),
|
||||
|
||||
%% assert timeout parameter checking
|
||||
Invite = fun(Node, Timeout) ->
|
||||
Node1 = atom_to_binary(Node),
|
||||
rpc:call(
|
||||
Core1,
|
||||
emqx_mgmt_api_cluster,
|
||||
invite_node,
|
||||
[put, #{bindings => #{node => Node1}, body => #{<<"timeout">> => Timeout}}]
|
||||
)
|
||||
end,
|
||||
?assertMatch(
|
||||
{400, #{code := 'BAD_REQUEST', message := <<"timeout must be an integer">>}},
|
||||
Invite(Core2, not_a_integer_timeout)
|
||||
),
|
||||
?assertMatch(
|
||||
{400, #{code := 'BAD_REQUEST', message := <<"timeout cannot be less than 5000ms">>}},
|
||||
Invite(Core2, 3000)
|
||||
),
|
||||
|
||||
%% assert cluster is updated after invite
|
||||
?assertMatch(
|
||||
{200},
|
||||
Invite(Core2, 15000)
|
||||
),
|
||||
{200, Core1Resp3} = rpc:call(Core1, emqx_mgmt_api_cluster, cluster_topology, [get, #{}]),
|
||||
?assertMatch(
|
||||
[
|
||||
#{
|
||||
core_node := Core1,
|
||||
replicant_nodes :=
|
||||
[#{node := Replicant, streams := _}]
|
||||
},
|
||||
#{
|
||||
core_node := Core2,
|
||||
replicant_nodes := _
|
||||
}
|
||||
],
|
||||
lists:sort(Core1Resp3)
|
||||
).
|
||||
|
||||
t_cluster_invite_async(Config) ->
|
||||
%% assert the cluster is created
|
||||
[Core1, Core2, Replicant] = _NodesList = ?config(cluster, Config),
|
||||
{200, Core1Resp} = rpc:call(Core1, emqx_mgmt_api_cluster, cluster_topology, [get, #{}]),
|
||||
?assertMatch(
|
||||
[
|
||||
#{
|
||||
core_node := Core1,
|
||||
replicant_nodes :=
|
||||
[#{node := Replicant, streams := _}]
|
||||
},
|
||||
#{
|
||||
core_node := Core2,
|
||||
replicant_nodes :=
|
||||
[#{node := Replicant, streams := _}]
|
||||
}
|
||||
],
|
||||
lists:sort(Core1Resp)
|
||||
),
|
||||
|
||||
%% force leave the core2 and replicant
|
||||
{204} = rpc:call(
|
||||
Core1,
|
||||
emqx_mgmt_api_cluster,
|
||||
force_leave,
|
||||
[delete, #{bindings => #{node => atom_to_binary(Core2)}}]
|
||||
),
|
||||
%% assert the cluster is updated
|
||||
{200, Core1Resp2} = rpc:call(Core1, emqx_mgmt_api_cluster, cluster_topology, [get, #{}]),
|
||||
?assertMatch(
|
||||
[
|
||||
#{
|
||||
core_node := Core1,
|
||||
replicant_nodes := [_]
|
||||
}
|
||||
],
|
||||
lists:sort(Core1Resp2)
|
||||
),
|
||||
|
||||
Invite = fun(Node) ->
|
||||
Node1 = atom_to_binary(Node),
|
||||
rpc:call(
|
||||
Core1,
|
||||
emqx_mgmt_api_cluster,
|
||||
invite_node_async,
|
||||
[put, #{bindings => #{node => Node1}}]
|
||||
)
|
||||
end,
|
||||
|
||||
%% parameter checking
|
||||
?assertMatch(
|
||||
{400, #{code := 'BAD_REQUEST', message := <<"Can't invite self">>}},
|
||||
Invite(Core1)
|
||||
),
|
||||
?assertMatch(
|
||||
{200},
|
||||
Invite(Core2)
|
||||
),
|
||||
%% already invited
|
||||
?assertMatch(
|
||||
{400, #{
|
||||
code := 'BAD_REQUEST',
|
||||
message := <<"The invitation task already created for this node">>
|
||||
}},
|
||||
Invite(Core2)
|
||||
),
|
||||
|
||||
%% assert: core2 is in_progress status
|
||||
?assertMatch(
|
||||
{200, #{in_progress := [#{node := Core2}]}},
|
||||
rpc:call(Core1, emqx_mgmt_api_cluster, get_invitation_status, [get, #{}])
|
||||
),
|
||||
|
||||
%% waiting the async invitation_succeed
|
||||
?assertMatch({succeed, _}, waiting_the_async_invitation_succeed(Core1, Core2)),
|
||||
|
||||
{200, Core1Resp3} = rpc:call(Core1, emqx_mgmt_api_cluster, cluster_topology, [get, #{}]),
|
||||
?assertMatch(
|
||||
[
|
||||
#{
|
||||
core_node := Core1,
|
||||
replicant_nodes :=
|
||||
[#{node := Replicant, streams := _}]
|
||||
},
|
||||
#{
|
||||
core_node := Core2,
|
||||
replicant_nodes := _
|
||||
}
|
||||
],
|
||||
lists:sort(Core1Resp3)
|
||||
).
|
||||
|
||||
cluster(Config) ->
|
||||
NodeSpec = #{apps => ?APPS},
|
||||
Nodes = emqx_cth_cluster:start(
|
||||
[
|
||||
{data_backup_core1, #{role => core, apps => ?APPS}},
|
||||
{data_backup_core2, #{role => core, apps => ?APPS}},
|
||||
{data_backup_replicant, #{role => replicant, apps => ?APPS}}
|
||||
{data_backup_core1, NodeSpec#{role => core}},
|
||||
{data_backup_core2, NodeSpec#{role => core}},
|
||||
{data_backup_replicant, NodeSpec#{role => replicant}}
|
||||
],
|
||||
#{work_dir => work_dir(Config)}
|
||||
),
|
||||
|
@ -98,3 +284,37 @@ cleanup(Config) ->
|
|||
|
||||
work_dir(Config) ->
|
||||
filename:join(?config(priv_dir, Config), ?config(tc_name, Config)).
|
||||
|
||||
waiting_the_async_invitation_succeed(Node, TargetNode) ->
|
||||
waiting_the_async_invitation_succeed(Node, TargetNode, 100).
|
||||
|
||||
waiting_the_async_invitation_succeed(_Node, _TargetNode, 0) ->
|
||||
error(timeout);
|
||||
waiting_the_async_invitation_succeed(Node, TargetNode, N) ->
|
||||
{200, #{
|
||||
in_progress := InProgress,
|
||||
succeed := Succeed,
|
||||
failed := Failed
|
||||
}} = rpc:call(Node, emqx_mgmt_api_cluster, get_invitation_status, [get, #{}]),
|
||||
case find_node_info_list(TargetNode, InProgress) of
|
||||
error ->
|
||||
case find_node_info_list(TargetNode, Succeed) of
|
||||
error ->
|
||||
case find_node_info_list(TargetNode, Failed) of
|
||||
error -> error;
|
||||
Info1 -> {failed, Info1}
|
||||
end;
|
||||
Info2 ->
|
||||
{succeed, Info2}
|
||||
end;
|
||||
_Info ->
|
||||
timer:sleep(1000),
|
||||
waiting_the_async_invitation_succeed(Node, TargetNode, N - 1)
|
||||
end.
|
||||
|
||||
find_node_info_list(Node, List) ->
|
||||
L = lists:filter(fun(#{node := N}) -> N =:= Node end, List),
|
||||
case L of
|
||||
[] -> error;
|
||||
[Info] -> Info
|
||||
end.
|
||||
|
|
|
@ -412,7 +412,7 @@ t_create_webhook_v1_bridges_api(Config) ->
|
|||
#{
|
||||
<<"webhook_name">> =>
|
||||
#{
|
||||
<<"connector">> => <<"connector_webhook_name">>,
|
||||
<<"connector">> => <<"webhook_name">>,
|
||||
<<"description">> => <<>>,
|
||||
<<"enable">> => true,
|
||||
<<"parameters">> =>
|
||||
|
@ -440,7 +440,7 @@ t_create_webhook_v1_bridges_api(Config) ->
|
|||
#{
|
||||
<<"http">> =>
|
||||
#{
|
||||
<<"connector_webhook_name">> =>
|
||||
<<"webhook_name">> =>
|
||||
#{
|
||||
<<"connect_timeout">> => <<"15s">>,
|
||||
<<"description">> => <<>>,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
{application, emqx_prometheus, [
|
||||
{description, "Prometheus for EMQX"},
|
||||
% strict semver, bump manually!
|
||||
{vsn, "5.0.18"},
|
||||
{vsn, "5.0.19"},
|
||||
{modules, []},
|
||||
{registered, [emqx_prometheus_sup]},
|
||||
{applications, [kernel, stdlib, prometheus, emqx, emqx_management]},
|
||||
|
|
|
@ -486,6 +486,8 @@ emqx_collect(emqx_authorization_deny, Stats) ->
|
|||
counter_metric(?C('authorization.deny', Stats));
|
||||
emqx_collect(emqx_authorization_cache_hit, Stats) ->
|
||||
counter_metric(?C('authorization.cache_hit', Stats));
|
||||
emqx_collect(emqx_authorization_cache_miss, Stats) ->
|
||||
counter_metric(?C('authorization.cache_miss', Stats));
|
||||
emqx_collect(emqx_authorization_superuser, Stats) ->
|
||||
counter_metric(?C('authorization.superuser', Stats));
|
||||
emqx_collect(emqx_authorization_nomatch, Stats) ->
|
||||
|
@ -591,6 +593,7 @@ emqx_metrics_acl() ->
|
|||
emqx_authorization_allow,
|
||||
emqx_authorization_deny,
|
||||
emqx_authorization_cache_hit,
|
||||
emqx_authorization_cache_miss,
|
||||
emqx_authorization_superuser,
|
||||
emqx_authorization_nomatch,
|
||||
emqx_authorization_matched_allow,
|
||||
|
|
|
@ -241,6 +241,12 @@ parse_user_properties(<<"${pub_props.'User-Property'}">>) ->
|
|||
%% we do not want to force users to select the value
|
||||
%% the value will be taken from Env.pub_props directly
|
||||
?ORIGINAL_USER_PROPERTIES;
|
||||
parse_user_properties(<<"${.pub_props.'User-Property'}">>) ->
|
||||
%% keep the original
|
||||
%% avoid processing this special variable because
|
||||
%% we do not want to force users to select the value
|
||||
%% the value will be taken from Env.pub_props directly
|
||||
?ORIGINAL_USER_PROPERTIES;
|
||||
parse_user_properties(<<"${", _/binary>> = V) ->
|
||||
%% use a variable
|
||||
emqx_template:parse(V);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue