Compare commits

..

2 Commits

Author SHA1 Message Date
Shawn dd6b214231 fix: write quoted strings to influxdb failed 2024-01-12 20:11:22 +08:00
Shawn 456a14fe20 fix: cannot write literal numbers to influxdb 2024-01-12 14:31:55 +08:00
2466 changed files with 28236 additions and 137804 deletions

View File

@ -10,19 +10,10 @@ CASSANDRA_TAG=3.11
MINIO_TAG=RELEASE.2023-03-20T20-16-18Z
OPENTS_TAG=9aa7f88
KINESIS_TAG=2.1
HSTREAMDB_TAG=v0.19.3
HSTREAMDB_TAG=v0.16.1
HSTREAMDB_ZK_TAG=3.8.1
MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server
SQLSERVER_TAG=2019-CU19-ubuntu-20.04
# Password for the 'elastic' user (at least 6 characters)
ELASTIC_PASSWORD="emqx123"
# Password for the 'kibana_system' user (at least 6 characters)
KIBANA_PASSWORD="emqx123"
# Version of Elastic products
ELASTIC_TAG=8.11.4
LICENSE=basic
TARGET=emqx/emqx

View File

@ -1,24 +0,0 @@
version: '3.9'
services:
azurite:
container_name: azurite
image: mcr.microsoft.com/azure-storage/azurite:3.30.0
restart: always
expose:
- "10000"
# ports:
# - "10000:10000"
networks:
- emqx_bridge
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:10000"]
interval: 30s
timeout: 5s
retries: 4
command:
- azurite-blob
- "--blobHost"
- 0.0.0.0
- "-d"
- debug.log

View File

@ -1,30 +0,0 @@
version: '3.9'
services:
couchbase:
container_name: couchbase
hostname: couchbase
image: ghcr.io/emqx/couchbase:1.0.0
restart: always
expose:
- 8091-8093
# ports:
# - "8091-8093:8091-8093"
networks:
- emqx_bridge
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8093/admin/ping"]
interval: 30s
timeout: 5s
retries: 4
environment:
- CLUSTER=localhost
- USER=admin
- PASS=public
- PORT=8091
- RAMSIZEMB=2048
- RAMSIZEINDEXMB=512
- RAMSIZEFTSMB=512
- BUCKETS=mqtt
- BUCKETSIZES=100
- AUTOREBALANCE=true

View File

@ -1,111 +0,0 @@
version: "3.9"
# hint: run the following if the container fails to start locally
# sysctl -w vm.max_map_count=262144
services:
setup:
image: public.ecr.aws/elastic/elasticsearch:${ELASTIC_TAG}
volumes:
- ./elastic:/usr/share/elasticsearch/config/certs
user: "0"
command: >
bash -c '
if [ x${ELASTIC_PASSWORD} == x ]; then
echo "Set the ELASTIC_PASSWORD environment variable in the .env file";
exit 1;
elif [ x${KIBANA_PASSWORD} == x ]; then
echo "Set the KIBANA_PASSWORD environment variable in the .env file";
exit 1;
fi;
echo "Setting file permissions"
chown -R root:root config/certs;
find . -type d -exec chmod 750 \{\} \;;
find . -type f -exec chmod 640 \{\} \;;
echo "Waiting for Elasticsearch availability";
until curl -s --cacert config/certs/ca/ca.crt https://es01:9200 | grep -q "missing authentication credentials"; do sleep 30; done;
echo "Setting kibana_system password";
until curl -s -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://es01:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}" | grep -q "^{}"; do sleep 10; done;
echo "All done!";
'
healthcheck:
test: ["CMD-SHELL", "[ -f config/certs/ca/ca.crt ]"]
interval: 1s
timeout: 5s
retries: 120
es01:
depends_on:
setup:
condition: service_healthy
image: public.ecr.aws/elastic/elasticsearch:${ELASTIC_TAG}
container_name: elasticsearch
hostname: elasticsearch
volumes:
- ./elastic:/usr/share/elasticsearch/config/certs
- esdata01:/usr/share/elasticsearch/data
ports:
- 9200:9200
environment:
- node.name=es01
- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
- bootstrap.memory_lock=true
- discovery.type=single-node
- xpack.security.enabled=true
- xpack.security.http.ssl.enabled=true
- xpack.security.http.ssl.key=certs/es01/es01.key
- xpack.security.http.ssl.certificate=certs/es01/es01.crt
- xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt
- xpack.license.self_generated.type=${LICENSE}
mem_limit: 4G
ulimits:
memlock:
soft: -1
hard: -1
healthcheck:
test:
[
"CMD-SHELL",
"curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'",
]
interval: 10s
timeout: 10s
retries: 120
restart: always
networks:
- emqx_bridge
kibana:
depends_on:
es01:
condition: service_healthy
image: public.ecr.aws/elastic/kibana:${ELASTIC_TAG}
volumes:
- ./elastic:/usr/share/kibana/config/certs
- kibanadata:/usr/share/kibana/data
ports:
- 5601:5601
environment:
- SERVERNAME=kibana
- ELASTICSEARCH_HOSTS=https://es01:9200
- ELASTICSEARCH_USERNAME=kibana_system
- ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD}
- ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt
mem_limit: 1073741824
healthcheck:
test:
[
"CMD-SHELL",
"curl -s -I http://localhost:5601 | grep -q 'HTTP/1.1 302 Found'",
]
interval: 10s
timeout: 10s
retries: 120
restart: always
networks:
- emqx_bridge
volumes:
esdata01:
driver: local
kibanadata:
driver: local

View File

@ -4,7 +4,7 @@ services:
greptimedb:
container_name: greptimedb
hostname: greptimedb
image: greptime/greptimedb:v0.7.1
image: greptime/greptimedb:v0.4.4
expose:
- "4000"
- "4001"

View File

@ -1,53 +1,24 @@
version: '3.9'
services:
iotdb_1_3_0:
container_name: iotdb130
hostname: iotdb130
image: apache/iotdb:1.3.0-standalone
restart: always
environment:
- enable_rest_service=true
- cn_internal_address=iotdb130
- cn_internal_port=10710
- cn_consensus_port=10720
- cn_seed_config_node=iotdb130:10710
- dn_rpc_address=iotdb130
- dn_internal_address=iotdb130
- dn_rpc_port=6667
- dn_mpp_data_exchange_port=10740
- dn_schema_region_consensus_port=10750
- dn_data_region_consensus_port=10760
- dn_seed_config_node=iotdb130:10710
# volumes:
# - ./data:/iotdb/data
# - ./logs:/iotdb/logs
expose:
- "18080"
# IoTDB's REST interface, uncomment for local testing
# ports:
# - "18080:18080"
networks:
- emqx_bridge
iotdb_1_1_0:
container_name: iotdb110
hostname: iotdb110
iotdb:
container_name: iotdb
hostname: iotdb
image: apache/iotdb:1.1.0-standalone
restart: always
environment:
- enable_rest_service=true
- cn_internal_address=iotdb110
- cn_internal_address=iotdb
- cn_internal_port=10710
- cn_consensus_port=10720
- cn_target_config_node_list=iotdb110:10710
- dn_rpc_address=iotdb110
- dn_internal_address=iotdb110
- cn_target_config_node_list=iotdb:10710
- dn_rpc_address=iotdb
- dn_internal_address=iotdb
- dn_rpc_port=6667
- dn_mpp_data_exchange_port=10740
- dn_schema_region_consensus_port=10750
- dn_data_region_consensus_port=10760
- dn_target_config_node_list=iotdb110:10710
- dn_target_config_node_list=iotdb:10710
# volumes:
# - ./data:/iotdb/data
# - ./logs:/iotdb/logs

View File

@ -18,7 +18,7 @@ services:
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
kdc:
hostname: kdc.emqx.net
image: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04
image: ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-ubuntu22.04
container_name: kdc.emqx.net
expose:
- 88 # kdc

View File

@ -10,7 +10,7 @@ services:
nofile: 1024
image: openldap
#ports:
# - "389:389"
# - 389:389
volumes:
- ./certs/ca.crt:/etc/certs/ca.crt
restart: always

View File

@ -9,12 +9,10 @@ services:
expose:
- "15672"
- "5672"
- "5671"
# We don't want to take ports from the host
#ports:
# ports:
# - "15672:15672"
# - "5672:5672"
# - "5671:5671"
volumes:
- ./certs/ca.crt:/opt/certs/ca.crt
- ./certs/server.crt:/opt/certs/server.crt

View File

@ -1,41 +0,0 @@
version: '3.9'
services:
mqnamesrvssl:
image: apache/rocketmq:4.9.4
container_name: rocketmq_namesrv_ssl
# ports:
# - 9876:9876
volumes:
- ./rocketmq/logs_ssl:/opt/logs
- ./rocketmq/store_ssl:/opt/store
environment:
JAVA_OPT: "-Dtls.server.mode=enforcing"
command: ./mqnamesrv
networks:
- emqx_bridge
mqbrokerssl:
image: apache/rocketmq:4.9.4
container_name: rocketmq_broker_ssl
# ports:
# - 10909:10909
# - 10911:10911
volumes:
- ./rocketmq/logs_ssl:/opt/logs
- ./rocketmq/store_ssl:/opt/store
- ./rocketmq/conf_ssl/broker.conf:/etc/rocketmq/broker.conf
- ./rocketmq/conf_ssl/plain_acl.yml:/home/rocketmq/rocketmq-4.9.4/conf/plain_acl.yml
environment:
NAMESRV_ADDR: "rocketmq_namesrv_ssl:9876"
JAVA_OPTS: " -Duser.home=/opt -Drocketmq.broker.diskSpaceWarningLevelRatio=0.99"
JAVA_OPT_EXT: "-server -Xms512m -Xmx512m -Xmn512m -Dtls.server.mode=enforcing"
command: ./mqbroker -c /etc/rocketmq/broker.conf
depends_on:
- mqnamesrvssl
networks:
- emqx_bridge
networks:
emqx_bridge:
driver: bridge

View File

@ -39,10 +39,6 @@ services:
- 19042:9042
# Cassandra TLS
- 19142:9142
# Cassandra No Auth
- 19043:9043
# Cassandra TLS No Auth
- 19143:9143
# S3
- 19000:19000
# S3 TLS

View File

@ -3,7 +3,7 @@ version: '3.9'
services:
erlang:
container_name: erlang
image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04}
image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-ubuntu22.04}
env_file:
- credentials.env
- conf.env

View File

@ -1,20 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDSjCCAjKgAwIBAgIVAIrN275DCtGnotTPpxwvQ5751N4OMA0GCSqGSIb3DQEB
CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu
ZXJhdGVkIENBMB4XDTI0MDExNjAyMzIyMFoXDTI3MDExNTAyMzIyMFowNDEyMDAG
A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew
ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCy0nwiEurUkIPFMLV1weVM
pPk/AlwZUzqjkeL44gsY53XI9Q05w/sL9u6PzwrXgTCFWNXzI9+MoAtp8phPkn14
cmg5/3sLe9YcFVFjYK/MoljlUbPDj+4dgk8l+w5FRSi0+JN5krUm7rYk9lojAkeS
fX8RU7ekKGbjBXIFtPxX5GNadu9RidR5GkHM3XroAIoris8bFOzMgFn9iybYnkhq
0S+Hpv0A8FVxzle0KNbPpsIkxXH2DnP2iPTDym9xJNl9Iv9MPtj9XaamH7TmXcSt
MbjkAudKsCw4bRuhHonM16DIUr8sX5UcRcAWyJ1x1qpZaOzMdh2VdYAHNuOsZwzJ
AgMBAAGjUzBRMB0GA1UdDgQWBBTAyDlp8NZfPe8NCGVlHJSVclGOhTAfBgNVHSME
GDAWgBTAyDlp8NZfPe8NCGVlHJSVclGOhTAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
SIb3DQEBCwUAA4IBAQAeIUXRKmC53iirY4P49YspLafspAMf4ndMFQAp+Oc223Vs
hQC4axNoYnUdzWDH6LioAN7P826xNPqtXvTZF9fmeX7K8Nm9Kdj+for+QQI3j6+X
zq98VVkACb8b/Mc9Nac/WBbv/1IKyKgNNta7//WNPgAFolOfti/C0NLsPcKhrM9L
mGbvRX8ZjH8pVJ0YTy4/xfDcF7G/Lxl4Yvb0ZXpuQbvE1+Y0h5aoTNshT/skJxC4
iyVseYr21s3pptKcr6H9KZuSdZe5pbEo+81nT15w+50aswFLk9GCYh5UsQ+1jkRK
cKgxP93i6x8BVbQJGKi1A1jhauSKX2IpWZQsHy4p
-----END CERTIFICATE-----

View File

@ -1,27 +0,0 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEAstJ8IhLq1JCDxTC1dcHlTKT5PwJcGVM6o5Hi+OILGOd1yPUN
OcP7C/buj88K14EwhVjV8yPfjKALafKYT5J9eHJoOf97C3vWHBVRY2CvzKJY5VGz
w4/uHYJPJfsORUUotPiTeZK1Ju62JPZaIwJHkn1/EVO3pChm4wVyBbT8V+RjWnbv
UYnUeRpBzN166ACKK4rPGxTszIBZ/Ysm2J5IatEvh6b9APBVcc5XtCjWz6bCJMVx
9g5z9oj0w8pvcSTZfSL/TD7Y/V2mph+05l3ErTG45ALnSrAsOG0boR6JzNegyFK/
LF+VHEXAFsidcdaqWWjszHYdlXWABzbjrGcMyQIDAQABAoIBAAZOLXYanmjpIRpX
h7h7oikYEplWDRcQBBvvKZaOyuchhznTKTiZmF0xQ3Ny8J4Ndj9ndODWSZxI6uod
FaGNp+qytwnfgDBVGSVDm6tyRfSkX1fTsA/j3/iupvmO/w9yezdZYgLaCVTyex31
yVMdchZgYjYDUpEBYzJbV2xL18+GBRmmPjdXumlpcJqcclxjOQJSu/1WCGVfn/e/
64NQpAm7NSKLqeUl32g0/DvUpmYRfmf7ZjVUjePaJQU6sw5/N+3V9F1hYs8VSWz0
OMzYIfUcvixw+VWx5bu0nWt98FirhsQPjCTThD+DHP6koXGrdXpeMOQE1YZmoV5T
vP0X+FECgYEA5dsKVDQFL67muqz3CNRVM0xDWACCoa8789hYoxvhd1iO3e4kwXBa
ABPcZckioq+HiQ4UIxC2AhQ1FuTeIUTq7LZ0HtAAdKFi48U4LzmPhNUpG1E/HbJ3
GQbi4u1cAzGYuhdywktgBhn9bJ4XB7+X3815Y9qKkuRcwtXgKGDy8HkCgYEAxyly
vc7NBkLfIAmkOsm6VXfvfBTEUBUGi6+k1rarTUxWFIgRuk4FHywwWUTdxWBKJz3n
HNNJb/g7CcufdhLTuWVHQtJDxYf2cJjoi+Kf7/i/Qs9Nyhokj5Mnh6KlZQOWXpZd
Gwn/O13NeDxt1TIVO2xp6zY4FhVEPvaHuxsMCtECgYA7/eR/P6iO3nZoCJbdXhXy
spftEw0FSCg8p53SzIcXUCzRrcM4HavP0181zb5VebzFP8Bvun/WoRGOLSPwyP0L
1T8Pf7huuGSIEERuxvY3dC8raxQvGxJMnOiA0/Ss/Lfg8hfIsEWashPb0pMuOYpZ
JlblgfejCSlQzOOZhlxB+QKBgQCKmizRLV9/0QAJAsy5YPR9UJdpCebJOKiyg806
5Ct5AvwRE9UKjAuCczU+mu+f0fApOSpi5CQCeYVUvtG90UJpjrM2LLCfgoyeNbv4
xgG6dqlcbHrdgK4bATUMbsOd9g4qy4gGLkHi5df9qkhhi5Y9Iajg2X3U2H4DN3yk
WSFbUQKBgQCLz333qWOuT3OBv+EYxHDQUS4YG+dReUos+v0iPJzu+spnfibBF5IC
RjHIhPsdN1byNB0naXOkkz4tUlLGXv6umFgDtQvy/2rxvxQmUGp/WY1VM2+164Xe
NEWdMEU6UckCoMO77kw8JosKhmXCYaSW5bWwnXuEpOj9WWpwjKtxlA==
-----END RSA PRIVATE KEY-----

View File

@ -1,20 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDQDCCAiigAwIBAgIUe90yOBN1KBxOEr2jro3epamZksIwDQYJKoZIhvcNAQEL
BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l
cmF0ZWQgQ0EwHhcNMjQwMTE2MDIzMjIyWhcNMjcwMTE1MDIzMjIyWjAPMQ0wCwYD
VQQDEwRlczAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxGEL71pV
j8qoUxEuL7qjRSeS1eHxeKhu2jqEZb7iA1o/7b/26QuYAkoYL+WuJNfYjg5F/O8W
VVuAYIlN6a/mC6wT2t3pX4YSrdp+i3gtAC/LX+8mAeqMQPD+4jitOwjOsYzbuFCb
nYl86dnFPl/+Pmj20mtZ+Wt7oIPD88j6+r5qgv59pHICxS7Cq304LDTRQbNoT8HO
4c9VGGGtWIdtrqiYrz1OVefkffMrvFt77v6dKHn8g5tSyfQUDCoEKtTOc3Pe5zCB
vIMs6HaapoSkl8XdpFHQ712PCZRebAMCrVcPYQ3r8e9GYmLY/NhxEn3dWTqRhHeg
UD13O8o1aBWonwIDAQABo28wbTAdBgNVHQ4EFgQUXvGJtSf2/mLOK17AzUridtCV
xWwwHwYDVR0jBBgwFoAUwMg5afDWXz3vDQhlZRyUlXJRjoUwIAYDVR0RBBkwF4IJ
bG9jYWxob3N0hwR/AAABggRlczAxMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQAD
ggEBACaNq3ZqrbsGvbEtrf6kJGIsTokTFHeVJUSYmt1ZZzDFLSepXAC/J8gphV45
B+YSlkDPNTwMYlf7TUYY872zkdqOXN9r0NUx8MzVAX0+rux0RJba5GGUvJGZDNMX
WM5z9ry1KjQSQ1bSoRQOD3QArmBmhvikHjLc97Vqt56N0wA/ztXWOpNZX/TXmast
aXlUbcfQE73Cdq9tW1ATXwbQ2Gf7vVAUT3zjZSZbNdgPuBicGJHf85Fhjm2ND4+R
sjLIOQ2YgVxNHYbueScc6lJM5RNK194K7WrEQnRyGHT3NaDUm0FFNl//aQeq1ZVw
6gaUYlkTFauXwEYMDK901cWFaBE=
-----END CERTIFICATE-----

View File

@ -1,27 +0,0 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEAxGEL71pVj8qoUxEuL7qjRSeS1eHxeKhu2jqEZb7iA1o/7b/2
6QuYAkoYL+WuJNfYjg5F/O8WVVuAYIlN6a/mC6wT2t3pX4YSrdp+i3gtAC/LX+8m
AeqMQPD+4jitOwjOsYzbuFCbnYl86dnFPl/+Pmj20mtZ+Wt7oIPD88j6+r5qgv59
pHICxS7Cq304LDTRQbNoT8HO4c9VGGGtWIdtrqiYrz1OVefkffMrvFt77v6dKHn8
g5tSyfQUDCoEKtTOc3Pe5zCBvIMs6HaapoSkl8XdpFHQ712PCZRebAMCrVcPYQ3r
8e9GYmLY/NhxEn3dWTqRhHegUD13O8o1aBWonwIDAQABAoIBADJ3A/Om4az5dcce
96EBU9q+IDBBh2Wr1wzSk9p3sqoM47fLqH5b4dzYwJ1yZw2FwFtFFLw6jqExyexE
7JY8gyAFwPZyJ3pKQHuX1gQuRlYxchB9quU8Kn230LA+w1mT2lXrLj2PzWWvAsAv
m837KiFMpP0O5EjB07u8kLsRr1mG6QQ24Kc8oxd7xLXIiPzSvsOpYwo9hmIWENd5
kyA7oSa9EmN3TRTkKOHI7cFQ3DqIGdO71waUofKOdx39DyHS2YKWxDE/LUjkS9zw
1AyZG09l4uowyLRqwYhivEq9Za6rdc64yheuHatAM9kC2AOcVcsCPZquIe90k4t1
L7e9CAECgYEA1W483xTW8ngzxv9MMuPiW+PwVGRpyQrbO6OZOxdWEYfhrZlk5wlW
XK2T85jqooJwMWPTk1F49vZ9WN2KuLkL65GlkEtkFbxmOiFJjXuWwycbFSk05hPs
4AESBYHieaSPcwYhvLeG6g4PFyeqmbAGnKsJaj2ylPwDBOc7LgVlqAECgYEA64wo
gZwaj5SlP8M/OqGH04UVYr1kP/Eq6eiDfMyV5exy+pyzofZyNKUfJfw6sGgyRRHx
OVxlnPMsZ8zbdOXsvUEIeavpwDfQcp5eAURL65I6GMLsx2QpfiN2mDe1MqQW0jct
UleFaURgS84KHLE0+tBBg906jOHGjsE7Q3lyUJ8CgYBYYPev4K9JZGD8bEcfY6Ie
Lvsb1yC+8VHrFkmjYHxxcfUPr89KpGEwq2fynUW72YufyBiajkgq69Ln84U4DNhU
ydDnOXDOV191fsc4YQ8C7LSYRKH1DBcwgwD1at1fRbdpCAb8YHrrfLre+bv5PBzg
zyps5fOHIfwWEbI90lpQAQKBgQDoMMqBMTtxi+r1lucOScrVtFuncOCQs5BE8cIj
1JxzAQk6iBv/LSvZP2gcDq5f1Oaw9YXfsHguJfwA+ozeiAQ9bw0Gu3N52sstIXWz
M/rO5d9FJ2k3CEJqqFSwqkGBAQXKBUA06jeF1DREpX+MVxbNo1rhvMOJusn7UPm1
gtMwKwKBgQCfRzFO10ITwrw8rcRZwO9Axgqf11V7xn6qpgRxj4h0HOErVTCN1H0b
vE3Pz7cxS/g9vFRP37TuqBLfGVzPt9LAEFwCWPeZJLROBLHyu8XrhTbQx+sI2/pe
SBEJAQAHtYasFTE0sBEKNEY2rIt1c29XZhyhhtNKD9gRN/gB355wLg==
-----END RSA PRIVATE KEY-----

View File

@ -1,7 +0,0 @@
instances:
- name: es01
dns:
- es01
- localhost
ip:
- 127.0.0.1

View File

@ -49,9 +49,6 @@ echo "+++++++ Creating Kafka Topics ++++++++"
# there seem to be a race condition when creating the topics (too early)
env KAFKA_CREATE_TOPICS="$KAFKA_CREATE_TOPICS_NG" KAFKA_PORT="$PORT1" create-topics.sh
# create a topic with max.message.bytes=100
/opt/kafka/bin/kafka-topics.sh --create --bootstrap-server "${SERVER}:${PORT1}" --topic max-100-bytes --partitions 1 --replication-factor 1 --config max.message.bytes=100
echo "+++++++ Wait until Kafka ports are down ++++++++"
bash -c 'while printf "" 2>>/dev/null >>/dev/tcp/$0/$1; do sleep 1; done' $SERVER $PORT1

View File

@ -1,4 +1,4 @@
FROM docker.io/zmstone/openldap:2.5.16@sha256:a813922115a1d1f1b974399595921d1778fae22b3f1ee15dcfa8cfa89700dbc7
FROM docker.io/zmstone/openldap:2.5.16
COPY .ci/docker-compose-file/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf
COPY apps/emqx_ldap/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif

View File

@ -1,61 +0,0 @@
# LDAP authentication
To run manual tests with the default docker-compose files.
Expose openldap container port by uncommenting the `ports` config in `docker-compose-ldap.yaml `
To start openldap:
```
docker-compose -f ./.ci/docker-compose-file/docker-compose.yaml -f ./.ci/docker-compose-file/docker-compose-ldap.yaml up -docker
```
## LDAP database
LDAP database is populated from below files:
```
apps/emqx_ldap/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
apps/emqx_ldap/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema
```
## Minimal EMQX config
```
authentication = [
{
backend = ldap
base_dn = "uid=${username},ou=testdevice,dc=emqx,dc=io"
filter = "(& (objectClass=mqttUser) (uid=${username}))"
mechanism = password_based
method {
is_superuser_attribute = isSuperuser
password_attribute = userPassword
type = hash
}
password = public
pool_size = 8
query_timeout = "5s"
request_timeout = "10s"
server = "localhost:1389"
username = "cn=root,dc=emqx,dc=io"
}
]
```
## Example ldapsearch command
```
ldapsearch -x -H ldap://localhost:389 -D "cn=root,dc=emqx,dc=io" -W -b "uid=mqttuser0007,ou=testdevice,dc=emqx,dc=io" "(&(objectClass=mqttUser)(uid=mqttuser0007))"
```
## Example mqttx command
The client password hashes are generated from their username.
```
# disabled user
mqttx pub -t 't/1' -h localhost -p 1883 -m x -u mqttuser0006 -P mqttuser0006
# enabled super-user
mqttx pub -t 't/1' -h localhost -p 1883 -m x -u mqttuser0007 -P mqttuser0007
```

View File

@ -1,4 +1,4 @@
ARG BUILD_FROM=public.ecr.aws/docker/library/postgres:13@sha256:fa69de30d02652cfdfb68166692e5186f6972c17f83c89c71ac8ff0916d46ae3
ARG BUILD_FROM=public.ecr.aws/docker/library/postgres:13
FROM ${BUILD_FROM}
ARG POSTGRES_USER=postgres
COPY --chown=$POSTGRES_USER ./pgsql/pg_hba_tls.conf /var/lib/postgresql/pg_hba.conf

View File

@ -6,9 +6,6 @@
set -x
set +e
# shellcheck disable=SC3028 disable=SC3054
SCRIPT_DIR="$( dirname -- "$( readlink -f -- "$0"; )"; )"
EMQX_TEST_DB_BACKEND=$1
if [ "$EMQX_TEST_DB_BACKEND" = "rlog" ]
then
@ -23,7 +20,7 @@ fi
apk update && apk add git curl
git clone -b develop-5.0 https://github.com/emqx/paho.mqtt.testing.git /paho.mqtt.testing
pip install --require-hashes -r "$SCRIPT_DIR/requirements.txt"
pip install pytest==7.1.2 pytest-retry==1.3.0
pytest --retries 3 -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "$TARGET_HOST"
RESULT=$?

View File

@ -1,21 +0,0 @@
pytest-retry==1.6.1 \
--hash=sha256:3d420afc08e61ed3be28ecbb544371041b1b8e5fea7c94eb97cefa0d4ea9825c \
--hash=sha256:3d663159a9be4d6878705822cf27a0976f99ec1bc4f2d9494e80403b17f700f2
pytest==7.4.4 \
--hash=sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280 \
--hash=sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8
pluggy==1.3.0 \
--hash=sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12 \
--hash=sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7
iniconfig==2.0.0 \
--hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
--hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
tomli==2.0.1 \
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
exceptiongroup==1.2.0 \
--hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14 \
--hash=sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68
packaging==23.2 \
--hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \
--hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7

View File

@ -9,4 +9,3 @@ accounts:
defaultGroupPerm: PUB|SUB
topicPerms:
- TopicTest=PUB|SUB
- Topic2=PUB|SUB

View File

@ -1,24 +0,0 @@
brokerClusterName=DefaultClusterSSL
brokerName=broker-a
brokerId=0
brokerIP1=rocketmq_broker_ssl
defaultTopicQueueNums=4
autoCreateTopicEnable=true
autoCreateSubscriptionGroup=true
listenPort=10911
deleteWhen=04
fileReservedTime=120
mapedFileSizeCommitLog=1073741824
mapedFileSizeConsumeQueue=300000
diskMaxUsedSpaceRatio=100
maxMessageSize=65536
brokerRole=ASYNC_MASTER
flushDiskType=ASYNC_FLUSH
aclEnable=true

View File

@ -1,12 +0,0 @@
globalWhiteRemoteAddresses:
accounts:
- accessKey: RocketMQ
secretKey: 12345678
whiteRemoteAddress:
admin: false
defaultTopicPerm: DENY
defaultGroupPerm: PUB|SUB
topicPerms:
- TopicTest=PUB|SUB
- Topic2=PUB|SUB

View File

@ -96,18 +96,6 @@
"upstream": "cassandra:9142",
"enabled": true
},
{
"name": "cassa_no_auth_tcp",
"listen": "0.0.0.0:9043",
"upstream": "cassandra_noauth:9042",
"enabled": true
},
{
"name": "cassa_no_auth_tls",
"listen": "0.0.0.0:9143",
"upstream": "cassandra_noauth:9142",
"enabled": true
},
{
"name": "sqlserver",
"listen": "0.0.0.0:1433",
@ -139,15 +127,9 @@
"enabled": true
},
{
"name": "iotdb110",
"name": "iotdb",
"listen": "0.0.0.0:18080",
"upstream": "iotdb110:18080",
"enabled": true
},
{
"name": "iotdb130",
"listen": "0.0.0.0:28080",
"upstream": "iotdb130:18080",
"upstream": "iotdb:18080",
"enabled": true
},
{
@ -209,23 +191,5 @@
"listen": "0.0.0.0:636",
"upstream": "ldap:636",
"enabled": true
},
{
"name": "elasticsearch",
"listen": "0.0.0.0:9200",
"upstream": "elasticsearch:9200",
"enabled": true
},
{
"name": "azurite_plain",
"listen": "0.0.0.0:10000",
"upstream": "azurite:10000",
"enabled": true
},
{
"name": "couchbase",
"listen": "0.0.0.0:8093",
"upstream": "couchbase:8093",
"enabled": true
}
]

View File

@ -1,18 +1,18 @@
%% -*- mode: erlang -*-
{application, http_server, [
{description, "An HTTP server application"},
{vsn, "0.2.0"},
{registered, []},
% {mod, {http_server_app, []}},
{modules, []},
{applications, [
kernel,
stdlib,
minirest
]},
{env, []},
{modules, []},
{application, http_server,
[{description, "An HTTP server application"},
{vsn, "0.2.0"},
{registered, []},
% {mod, {http_server_app, []}},
{modules, []},
{applications,
[kernel,
stdlib,
minirest
]},
{env,[]},
{modules, []},
{licenses, ["Apache 2.0"]},
{links, []}
]}.
{licenses, ["Apache 2.0"]},
{links, []}
]}.

View File

@ -1,14 +0,0 @@
arrow==1.2.3 --hash=sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2
click==8.1.3 --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
exceptiongroup==1.2.0 --hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14
gitlint==0.19.1 --hash=sha256:26bb085959148d99fbbc178b4e56fda6c3edd7646b7c2a24d8ee1f8e036ed85d
gitlint-core==0.19.1 --hash=sha256:f41effd1dcbc06ffbfc56b6888cce72241796f517b46bd9fd4ab1b145056988c
iniconfig==2.0.0 --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
packaging==23.2 --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7
pluggy==1.3.0 --hash=sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7
pytest==7.4.4 --hash=sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8
pytest-retry==1.6.1 --hash=sha256:3d420afc08e61ed3be28ecbb544371041b1b8e5fea7c94eb97cefa0d4ea9825c
python-dateutil==2.8.2 --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
sh==1.14.3 --hash=sha256:e4045b6c732d9ce75d571c79f5ac2234edd9ae4f5fa9d59b09705082bdca18c7
six==1.16.0 --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
tomli==2.0.1 --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc

23
.github/CODEOWNERS vendored
View File

@ -1,29 +1,18 @@
## Default
* @emqx/emqx-review-board
# emqx-review-board members
## HJianBo
## id
## ieQu1
## keynslug
## qzhuyan
## savonarola
## terry-xiaoyu
## thalesmg
## zhongwencool
## zmstone
## apps
/apps/emqx/ @emqx/emqx-review-board @lafirest
/apps/emqx_auth/ @emqx/emqx-review-board @JimMoen
/apps/emqx_connector/ @emqx/emqx-review-board
/apps/emqx_auth/ @emqx/emqx-review-board @JimMoen @savonarola
/apps/emqx_connector/ @emqx/emqx-review-board @JimMoen
/apps/emqx_dashboard/ @emqx/emqx-review-board @JimMoen @lafirest
/apps/emqx_dashboard_rbac/ @emqx/emqx-review-board @lafirest
/apps/emqx_dashboard_sso/ @emqx/emqx-review-board @JimMoen @lafirest
/apps/emqx_exhook/ @emqx/emqx-review-board @JimMoen
/apps/emqx_exhook/ @emqx/emqx-review-board @JimMoen @HJianBo
/apps/emqx_ft/ @emqx/emqx-review-board @savonarola @keynslug
/apps/emqx_gateway/ @emqx/emqx-review-board @lafirest
/apps/emqx_management/ @emqx/emqx-review-board @lafirest
/apps/emqx_opentelemetry @emqx/emqx-review-board @SergeTupchiy
/apps/emqx_management/ @emqx/emqx-review-board @lafirest @sstrigler
/apps/emqx_plugins/ @emqx/emqx-review-board @JimMoen
/apps/emqx_prometheus/ @emqx/emqx-review-board @JimMoen
/apps/emqx_psk/ @emqx/emqx-review-board @lafirest
@ -31,7 +20,7 @@
/apps/emqx_rule_engine/ @emqx/emqx-review-board @kjellwinblad
/apps/emqx_slow_subs/ @emqx/emqx-review-board @lafirest
/apps/emqx_statsd/ @emqx/emqx-review-board @JimMoen
/apps/emqx_durable_storage/ @emqx/emqx-review-board @keynslug
/apps/emqx_durable_storage/ @emqx/emqx-review-board @ieQu1 @keynslug
## CI
/deploy/ @emqx/emqx-review-board @Rory-Z

View File

@ -33,7 +33,7 @@ runs:
HOMEBREW_NO_INSTALL_UPGRADE: 1
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
run: |
brew install curl zip unzip coreutils openssl@1.1 unixodbc
brew install curl zip unzip coreutils openssl@1.1
echo "/usr/local/opt/bison/bin" >> $GITHUB_PATH
echo "/usr/local/bin" >> $GITHUB_PATH
echo "emqx_name=${emqx_name}" >> $GITHUB_OUTPUT
@ -51,12 +51,12 @@ runs:
echo "SELF_HOSTED=false" >> $GITHUB_OUTPUT
;;
esac
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
- uses: actions/cache@v3
id: cache
if: steps.prepare.outputs.SELF_HOSTED != 'true'
with:
path: ${{ steps.prepare.outputs.OTP_INSTALL_PATH }}
key: otp-install-${{ inputs.otp }}-${{ inputs.os }}-static-ssl-disable-hipe-disable-jit-20240524-1
key: otp-install-${{ inputs.otp }}-${{ inputs.os }}-static-ssl-disable-hipe-disable-jit
- name: build erlang
if: steps.cache.outputs.cache-hit != 'true'
shell: bash
@ -80,10 +80,9 @@ runs:
git clone --depth 1 --branch OTP-${{ inputs.otp }} https://github.com/emqx/otp.git "$OTP_SOURCE_PATH"
cd "$OTP_SOURCE_PATH"
if [ "$(arch)" = arm64 ]; then
ODBCHOME="$(brew --prefix unixodbc)"
export CFLAGS="-O2 -g -I${ODBCHOME}/include"
export LDFLAGS="-L${ODBCHOME}/lib"
WITH_ODBC="--with-odbc=${ODBCHOME}"
export CFLAGS="-O2 -g -I$(brew --prefix unixodbc)/include"
export LDFLAGS="-L$(brew --prefix unixodbc)/lib"
WITH_ODBC="--with-odbc=$(brew --prefix unixodbc)"
else
WITH_ODBC=""
fi

View File

@ -1,21 +1,37 @@
name: 'Prepare jmeter'
inputs:
version-emqx:
required: true
type: string
runs:
using: composite
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
- uses: actions/download-artifact@v3
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
env:
PKG_VSN: ${{ inputs.version-emqx }}
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
ref: broker-autotest-v5
path: scripts
- uses: actions/setup-java@99b8673ff64fbf99d8d325f52d9a5bdedb8483e9 # v4.2.1
- uses: actions/setup-java@v3
with:
java-version: '8.0.282' # The JDK version to make available on the path.
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
architecture: x64 # (x64 or x86) - defaults to x64
# https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
distribution: 'zulu'
- uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4
- uses: actions/download-artifact@v3
with:
name: apache-jmeter.tgz
- name: install jmeter

View File

@ -1,32 +0,0 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: "/"
reviewers:
- "emqx/emqx-review-board"
schedule:
interval: weekly
groups:
actions:
patterns:
- "*"
- package-ecosystem: github-actions
directory: "/.github/actions/package-macos/" # All subdirectories outside of "/.github/workflows" must be explicitly included.
reviewers:
- "emqx/emqx-review-board"
schedule:
interval: weekly
groups:
actions-package-macos:
patterns:
- "*"
- package-ecosystem: github-actions
directory: "/.github/actions/prepare-jmeter/" # All subdirectories outside of "/.github/workflows" must be explicitly included.
reviewers:
- "emqx/emqx-review-board"
schedule:
interval: weekly
groups:
actions-prepare-jmeter:
patterns:
- "*"

View File

@ -1 +0,0 @@
*/.github/*

View File

@ -11,48 +11,26 @@ on:
ref:
required: false
defaults:
run:
shell: bash
env:
IS_CI: "yes"
jobs:
init:
runs-on: ubuntu-22.04
outputs:
BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }}
OTP_VSN: ${{ steps.env.outputs.OTP_VSN }}
ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
BUILDER: ${{ steps.env.outputs.BUILDER }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.event.inputs.ref }}
- name: Set up environment
id: env
run: |
source ./env.sh
echo "BUILDER_VSN=$EMQX_BUILDER_VSN" | tee -a "$GITHUB_OUTPUT"
echo "OTP_VSN=$OTP_VSN" | tee -a "$GITHUB_OUTPUT"
echo "ELIXIR_VSN=$ELIXIR_VSN" | tee -a "$GITHUB_OUTPUT"
echo "BUILDER=$EMQX_BUILDER" | tee -a "$GITHUB_OUTPUT"
sanity-checks:
runs-on: ubuntu-22.04
needs: init
container: ${{ needs.init.outputs.BUILDER }}
container: "ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-ubuntu22.04"
outputs:
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
ct-host: ${{ steps.matrix.outputs.ct-host }}
ct-docker: ${{ steps.matrix.outputs.ct-docker }}
permissions:
contents: read
version-emqx: ${{ steps.matrix.outputs.version-emqx }}
version-emqx-enterprise: ${{ steps.matrix.outputs.version-emqx-enterprise }}
builder: "ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-ubuntu22.04"
builder_vsn: "5.2-8"
otp_vsn: "26.1.2-2"
elixir_vsn: "1.15.7"
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.ref }}
fetch-depth: 0
@ -64,7 +42,7 @@ jobs:
BEFORE_REF: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
AFTER_REF: ${{ github.sha }}
run: |
pip install --require-hashes -r .ci/gitlint.requirements.txt
pip install gitlint
gitlint --commits $BEFORE_REF..$AFTER_REF --config .github/workflows/.gitlint
- name: Run shellcheck
run: |
@ -97,8 +75,7 @@ jobs:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
run: |
# mix local.hex --force --if-missing && mix local.rebar --force --if-missing
mix local.hex 2.0.6 --force --if-missing && mix local.rebar --force --if-missing
mix local.hex --force --if-missing && mix local.rebar --force --if-missing
- name: Check formatting
env:
MIX_ENV: emqx-enterprise
@ -111,20 +88,35 @@ jobs:
- name: Generate CT Matrix
id: matrix
run: |
MATRIX="$(./scripts/find-apps.sh --ci)"
APPS="$(./scripts/find-apps.sh --ci)"
MATRIX="$(echo "${APPS}" | jq -c '
[
(.[] | select(.profile == "emqx") | . + {
builder: "5.2-8",
otp: "26.1.2-2",
elixir: "1.15.7"
}),
(.[] | select(.profile == "emqx-enterprise") | . + {
builder: "5.2-8",
otp: ["26.1.2-2"][],
elixir: "1.15.7"
})
]
')"
echo "${MATRIX}" | jq
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')"
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT
echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT
echo "version-emqx=$(./pkg-vsn.sh emqx)" | tee -a $GITHUB_OUTPUT
echo "version-emqx-enterprise=$(./pkg-vsn.sh emqx-enterprise)" | tee -a $GITHUB_OUTPUT
compile:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral-xl","linux","x64"]') }}
container: ${{ needs.init.outputs.BUILDER }}
container: ${{ needs.sanity-checks.outputs.builder }}
needs:
- init
- sanity-checks
strategy:
matrix:
@ -132,11 +124,8 @@ jobs:
- emqx
- emqx-enterprise
permissions:
contents: read
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Work around https://github.com/actions/checkout/issues/766
@ -152,55 +141,61 @@ jobs:
echo "PROFILE=${PROFILE}" | tee -a .env
echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env
zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.profile }}
path: ${{ matrix.profile }}.zip
retention-days: 7
retention-days: 1
run_emqx_app_tests:
needs:
- init
- sanity-checks
- compile
uses: ./.github/workflows/run_emqx_app_tests.yaml
with:
builder: ${{ needs.init.outputs.BUILDER }}
builder: ${{ needs.sanity-checks.outputs.builder }}
before_ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
after_ref: ${{ github.sha }}
run_test_cases:
needs:
- init
- sanity-checks
- compile
uses: ./.github/workflows/run_test_cases.yaml
with:
builder: ${{ needs.init.outputs.BUILDER }}
builder: ${{ needs.sanity-checks.outputs.builder }}
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
ct-host: ${{ needs.sanity-checks.outputs.ct-host }}
ct-docker: ${{ needs.sanity-checks.outputs.ct-docker }}
static_checks:
needs:
- init
- sanity-checks
- compile
uses: ./.github/workflows/static_checks.yaml
with:
builder: ${{ needs.init.outputs.BUILDER }}
builder: ${{ needs.sanity-checks.outputs.builder }}
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
build_slim_packages:
needs:
- sanity-checks
uses: ./.github/workflows/build_slim_packages.yaml
with:
builder: ${{ needs.sanity-checks.outputs.builder }}
builder_vsn: ${{ needs.sanity-checks.outputs.builder_vsn }}
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
build_docker_for_test:
needs:
- init
- sanity-checks
uses: ./.github/workflows/build_docker_for_test.yaml
with:
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
spellcheck:
needs:
@ -210,35 +205,41 @@ jobs:
run_conf_tests:
needs:
- init
- sanity-checks
- compile
uses: ./.github/workflows/run_conf_tests.yaml
with:
builder: ${{ needs.init.outputs.BUILDER }}
builder: ${{ needs.sanity-checks.outputs.builder }}
check_deps_integrity:
needs:
- init
- sanity-checks
uses: ./.github/workflows/check_deps_integrity.yaml
with:
builder: ${{ needs.init.outputs.BUILDER }}
builder: ${{ needs.sanity-checks.outputs.builder }}
run_jmeter_tests:
needs:
- sanity-checks
- build_docker_for_test
uses: ./.github/workflows/run_jmeter_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
run_docker_tests:
needs:
- sanity-checks
- build_docker_for_test
uses: ./.github/workflows/run_docker_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
run_helm_tests:
needs:
- sanity-checks
- build_docker_for_test
uses: ./.github/workflows/run_helm_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}

View File

@ -8,64 +8,34 @@ on:
push:
tags:
- 'v*'
- 'e*'
branches:
- 'master'
- 'release-5[0-9]'
- 'ci/**'
workflow_dispatch:
inputs:
ref:
required: false
defaults:
run:
shell: bash
env:
IS_CI: 'yes'
jobs:
init:
runs-on: ubuntu-22.04
outputs:
BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }}
OTP_VSN: ${{ steps.env.outputs.OTP_VSN }}
ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
BUILDER: ${{ steps.env.outputs.BUILDER }}
BUILD_FROM: ${{ steps.env.outputs.BUILD_FROM }}
RUN_FROM: ${{ steps.env.outputs.BUILD_FROM }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.event.inputs.ref }}
- name: Set up environment
id: env
run: |
source env.sh
echo "BUILDER_VSN=$EMQX_BUILDER_VSN" >> "$GITHUB_OUTPUT"
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
echo "ELIXIR_VSN=$ELIXIR_VSN" >> "$GITHUB_OUTPUT"
echo "BUILDER=$EMQX_BUILDER" >> "$GITHUB_OUTPUT"
echo "BUILD_FROM=$EMQX_DOCKER_BUILD_FROM" >> "$GITHUB_OUTPUT"
echo "RUN_FROM=$EMQX_DOCKER_RUN_FROM" >> "$GITHUB_OUTPUT"
prepare:
runs-on: ubuntu-22.04
needs: init
container: ${{ needs.init.outputs.BUILDER }}
container: 'ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-ubuntu22.04'
outputs:
profile: ${{ steps.parse-git-ref.outputs.profile }}
release: ${{ steps.parse-git-ref.outputs.release }}
latest: ${{ steps.parse-git-ref.outputs.latest }}
version: ${{ steps.parse-git-ref.outputs.version }}
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
ct-host: ${{ steps.matrix.outputs.ct-host }}
ct-docker: ${{ steps.matrix.outputs.ct-docker }}
permissions:
contents: read
builder: 'ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-ubuntu22.04'
builder_vsn: '5.2-8'
otp_vsn: '26.1.2-2'
elixir_vsn: '1.15.7'
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.ref }}
fetch-depth: 0
@ -73,22 +43,38 @@ jobs:
shell: bash
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
- name: Detect emqx profile
- name: Detect emqx profile and version
id: parse-git-ref
run: |
JSON="$(./scripts/parse-git-ref.sh $GITHUB_REF)"
PROFILE=$(echo "$JSON" | jq -cr '.profile')
RELEASE=$(echo "$JSON" | jq -cr '.release')
LATEST=$(echo "$JSON" | jq -cr '.latest')
VERSION="$(./pkg-vsn.sh "$PROFILE")"
echo "profile=$PROFILE" | tee -a $GITHUB_OUTPUT
echo "release=$RELEASE" | tee -a $GITHUB_OUTPUT
echo "latest=$LATEST" | tee -a $GITHUB_OUTPUT
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
- name: Build matrix
id: matrix
run: |
MATRIX="$(./scripts/find-apps.sh --ci)"
APPS="$(./scripts/find-apps.sh --ci)"
MATRIX="$(echo "${APPS}" | jq -c '
[
(.[] | select(.profile == "emqx") | . + {
builder: "5.2-8",
otp: "26.1.2-2",
elixir: "1.15.7"
}),
(.[] | select(.profile == "emqx-enterprise") | . + {
builder: "5.2-8",
otp: ["26.1.2-2"][],
elixir: "1.15.7"
})
]
')"
echo "${MATRIX}" | jq
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')"
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
@ -98,44 +84,47 @@ jobs:
build_packages:
if: needs.prepare.outputs.release == 'true'
needs:
- init
- prepare
uses: ./.github/workflows/build_packages.yaml
with:
profile: ${{ needs.prepare.outputs.profile }}
publish: true
otp_vsn: ${{ needs.init.outputs.OTP_VSN }}
elixir_vsn: ${{ needs.init.outputs.ELIXIR_VSN }}
builder_vsn: ${{ needs.init.outputs.BUILDER_VSN }}
publish: ${{ needs.prepare.outputs.release }}
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
secrets: inherit
build_and_push_docker_images:
if: needs.prepare.outputs.release == 'true'
needs:
- init
- prepare
uses: ./.github/workflows/build_and_push_docker_images.yaml
with:
profile: ${{ needs.prepare.outputs.profile }}
publish: true
version: ${{ needs.prepare.outputs.version }}
publish: ${{ needs.prepare.outputs.release }}
latest: ${{ needs.prepare.outputs.latest }}
build_from: ${{ needs.init.outputs.BUILD_FROM }}
run_from: ${{ needs.init.outputs.RUN_FROM }}
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
secrets: inherit
build_slim_packages:
if: needs.prepare.outputs.release != 'true'
needs:
- init
- prepare
uses: ./.github/workflows/build_slim_packages.yaml
with:
builder: ${{ needs.prepare.outputs.builder }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
compile:
if: needs.prepare.outputs.release != 'true'
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ needs.init.outputs.BUILDER }}
container: ${{ needs.prepare.outputs.builder }}
needs:
- init
- prepare
strategy:
matrix:
@ -143,11 +132,8 @@ jobs:
- emqx
- emqx-enterprise
permissions:
contents: read
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.ref }}
fetch-depth: 0
@ -163,7 +149,7 @@ jobs:
echo "PROFILE=${PROFILE}" | tee -a .env
echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env
zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.profile }}
path: ${{ matrix.profile }}.zip
@ -171,23 +157,22 @@ jobs:
run_emqx_app_tests:
needs:
- init
- prepare
- compile
uses: ./.github/workflows/run_emqx_app_tests.yaml
with:
builder: ${{ needs.init.outputs.BUILDER }}
builder: ${{ needs.prepare.outputs.builder }}
before_ref: ${{ github.event.before }}
after_ref: ${{ github.sha }}
run_test_cases:
if: needs.prepare.outputs.release != 'true'
needs:
- init
- prepare
- compile
uses: ./.github/workflows/run_test_cases.yaml
with:
builder: ${{ needs.init.outputs.BUILDER }}
builder: ${{ needs.prepare.outputs.builder }}
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}
ct-host: ${{ needs.prepare.outputs.ct-host }}
ct-docker: ${{ needs.prepare.outputs.ct-docker }}
@ -195,20 +180,18 @@ jobs:
run_conf_tests:
if: needs.prepare.outputs.release != 'true'
needs:
- init
- prepare
- compile
uses: ./.github/workflows/run_conf_tests.yaml
with:
builder: ${{ needs.init.outputs.BUILDER }}
builder: ${{ needs.prepare.outputs.builder }}
static_checks:
if: needs.prepare.outputs.release != 'true'
needs:
- init
- prepare
- compile
uses: ./.github/workflows/static_checks.yaml
with:
builder: ${{ needs.init.outputs.BUILDER }}
builder: ${{ needs.prepare.outputs.builder }}
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}

View File

@ -10,16 +10,22 @@ on:
profile:
required: true
type: string
version:
required: true
type: string
latest:
required: true
type: string
publish:
required: true
type: boolean
build_from:
type: string
otp_vsn:
required: true
type: string
run_from:
elixir_vsn:
required: true
type: string
builder_vsn:
required: true
type: string
secrets:
@ -39,6 +45,8 @@ on:
required: false
type: string
default: 'emqx'
version:
required: true
latest:
required: false
type: boolean
@ -47,22 +55,25 @@ on:
required: false
type: boolean
default: false
build_from:
otp_vsn:
required: false
type: string
default: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-debian12
run_from:
default: public.ecr.aws/debian/debian:stable-20240612-slim
default: '26.1.2-2'
elixir_vsn:
required: false
type: string
default: '1.15.7'
builder_vsn:
required: false
type: string
default: '5.2-8'
permissions:
contents: read
jobs:
build:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.arch)) || 'ubuntu-22.04' }}
container: ${{ inputs.build_from }}
outputs:
PKG_VSN: ${{ steps.build.outputs.PKG_VSN }}
docker:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
strategy:
fail-fast: false
@ -70,141 +81,54 @@ jobs:
profile:
- ${{ inputs.profile }}
- ${{ inputs.profile }}-elixir
arch:
- x64
- arm64
registry:
- 'docker.io'
- 'public.ecr.aws'
exclude:
- profile: emqx-enterprise
registry: 'public.ecr.aws'
- profile: emqx-enterprise-elixir
registry: 'public.ecr.aws'
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.event.inputs.ref }}
- run: git config --global --add safe.directory "$PWD"
- name: build release tarball
id: build
run: |
make ${{ matrix.profile }}-tgz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
with:
name: "${{ matrix.profile }}-${{ matrix.arch }}.tar.gz"
path: "_packages/emqx*/emqx-*.tar.gz"
retention-days: 7
overwrite: true
if-no-files-found: error
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.ref }}
fetch-depth: 0
docker:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
needs:
- build
defaults:
run:
shell: bash
- uses: docker/setup-qemu-action@v2
- uses: docker/setup-buildx-action@v2
strategy:
fail-fast: false
matrix:
profile:
- ["${{ inputs.profile }}", "${{ inputs.profile == 'emqx' && 'docker.io,public.ecr.aws' || 'docker.io' }}"]
- ["${{ inputs.profile }}-elixir", "${{ inputs.profile == 'emqx' && 'docker.io,public.ecr.aws' || 'docker.io' }}"]
- name: Login to hub.docker.com
uses: docker/login-action@v2
if: matrix.registry == 'docker.io'
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.event.inputs.ref }}
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
pattern: "${{ matrix.profile[0] }}-*.tar.gz"
path: _packages
merge-multiple: true
- name: Login to AWS ECR
uses: docker/login-action@v2
if: matrix.registry == 'public.ecr.aws'
with:
registry: public.ecr.aws
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ecr: true
- name: Move artifacts to root directory
env:
PROFILE: ${{ inputs.profile }}
run: |
ls -lR _packages/$PROFILE
mv _packages/$PROFILE/*.tar.gz ./
- name: Enable containerd image store on Docker Engine
run: |
echo "$(sudo cat /etc/docker/daemon.json | jq '. += {"features": {"containerd-snapshotter": true}}')" > daemon.json
sudo mv daemon.json /etc/docker/daemon.json
sudo systemctl restart docker
- uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0
- uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Login to hub.docker.com
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
if: inputs.publish && contains(matrix.profile[1], 'docker.io')
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Login to AWS ECR
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
if: inputs.publish && contains(matrix.profile[1], 'public.ecr.aws')
with:
registry: public.ecr.aws
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ecr: true
- name: Build docker image for smoke test
env:
PROFILE: ${{ matrix.profile[0] }}
DOCKER_REGISTRY: ${{ matrix.profile[1] }}
DOCKER_ORG: ${{ github.repository_owner }}
DOCKER_LATEST: ${{ inputs.latest }}
DOCKER_PUSH: false
DOCKER_BUILD_NOCACHE: true
BUILD_FROM: ${{ inputs.build_from }}
RUN_FROM: ${{ inputs.run_from }}
PKG_VSN: ${{ needs.build.outputs.PKG_VSN }}
EMQX_SOURCE_TYPE: tgz
run: |
./build ${PROFILE} docker
echo "Built tags:"
echo "==========="
cat .emqx_docker_image_tags
echo "==========="
echo "_EMQX_DOCKER_IMAGE_TAG=$(head -n 1 .emqx_docker_image_tags)" >> $GITHUB_ENV
- name: smoke test
timeout-minutes: 1
run: |
for tag in $(cat .emqx_docker_image_tags); do
CID=$(docker run -d -p 18083:18083 $tag)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
docker rm -f $CID
done
- name: dashboard tests
working-directory: ./scripts/ui-tests
timeout-minutes: 5
run: |
set -eu
docker compose up --abort-on-container-exit --exit-code-from selenium
docker compose rm -fsv
- name: test node_dump
run: |
CID=$(docker run -d -P $_EMQX_DOCKER_IMAGE_TAG)
docker exec -t -u root -w /root $CID bash -c 'apt-get -y update && apt-get -y install net-tools'
docker exec -t -u root $CID node_dump
docker rm -f $CID
- name: Build and push docker image
if: inputs.publish || github.repository_owner != 'emqx'
env:
PROFILE: ${{ matrix.profile[0] }}
DOCKER_REGISTRY: ${{ matrix.profile[1] }}
DOCKER_ORG: ${{ github.repository_owner }}
DOCKER_LATEST: ${{ inputs.latest }}
DOCKER_PUSH: true
DOCKER_BUILD_NOCACHE: false
DOCKER_PLATFORMS: linux/amd64,linux/arm64
DOCKER_LOAD: false
BUILD_FROM: ${{ inputs.build_from }}
RUN_FROM: ${{ inputs.run_from }}
PKG_VSN: ${{ needs.build.outputs.PKG_VSN }}
EMQX_SOURCE_TYPE: tgz
run: |
./build ${PROFILE} docker
- name: Build docker image
env:
PROFILE: ${{ matrix.profile }}
DOCKER_REGISTRY: ${{ matrix.registry }}
DOCKER_ORG: ${{ github.repository_owner }}
DOCKER_LATEST: ${{ inputs.latest }}
DOCKER_PUSH: ${{ inputs.publish == 'true' || inputs.publish || github.repository_owner != 'emqx' }}
DOCKER_BUILD_NOCACHE: true
DOCKER_PLATFORMS: linux/amd64,linux/arm64
EMQX_RUNNER: 'debian:11-slim'
EMQX_DOCKERFILE: 'deploy/docker/Dockerfile'
PKG_VSN: ${{ inputs.version }}
EMQX_BUILDER_VSN: ${{ inputs.builder_vsn }}
EMQX_OTP_VSN: ${{ inputs.otp_vsn }}
EMQX_ELIXIR_VSN: ${{ inputs.elixir_vsn }}
run: |
./build ${PROFILE} docker

View File

@ -6,6 +6,19 @@ concurrency:
on:
workflow_call:
inputs:
otp_vsn:
required: true
type: string
elixir_vsn:
required: true
type: string
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions:
contents: read
@ -15,6 +28,9 @@ jobs:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
env:
EMQX_NAME: ${{ matrix.profile }}
PKG_VSN: ${{ startsWith(matrix.profile, 'emqx-enterprise') && inputs.version-emqx-enterprise || inputs.version-emqx }}
OTP_VSN: ${{ inputs.otp_vsn }}
ELIXIR_VSN: ${{ inputs.elixir_vsn }}
strategy:
fail-fast: false
@ -26,32 +42,22 @@ jobs:
- emqx-enterprise-elixir
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/checkout@v3
- name: build and export to Docker
id: build
run: |
make ${EMQX_NAME}-docker
echo "_EMQX_DOCKER_IMAGE_TAG=$(head -n 1 .emqx_docker_image_tags)" >> $GITHUB_ENV
echo "EMQX_IMAGE_TAG=$(cat .docker_image_tag)" >> $GITHUB_ENV
- name: smoke test
run: |
CID=$(docker run -d --rm -P $_EMQX_DOCKER_IMAGE_TAG)
CID=$(docker run -d --rm -P $EMQX_IMAGE_TAG)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT || {
docker logs $CID
exit 1
}
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
docker stop $CID
- name: export docker image
if: always()
run: |
docker save $_EMQX_DOCKER_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
docker save $EMQX_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz
- uses: actions/upload-artifact@v3
with:
name: "${{ env.EMQX_NAME }}-docker"
path: "${{ env.EMQX_NAME }}-docker-${{ env.PKG_VSN }}.tar.gz"

View File

@ -12,7 +12,7 @@ on:
type: string
publish:
required: true
type: boolean
type: string
otp_vsn:
required: true
type: string
@ -46,8 +46,7 @@ on:
ref:
required: false
profile:
required: true
default: 'emqx'
required: false
publish:
required: false
type: boolean
@ -55,7 +54,7 @@ on:
otp_vsn:
required: false
type: string
default: '26.2.5-3'
default: '26.1.2-2'
elixir_vsn:
required: false
type: string
@ -63,10 +62,7 @@ on:
builder_vsn:
required: false
type: string
default: '5.3-9'
permissions:
contents: read
default: '5.2-8'
jobs:
mac:
@ -75,14 +71,16 @@ jobs:
matrix:
profile:
- ${{ inputs.profile }}
os:
- macos-13
- macos-14
otp:
- ${{ inputs.otp_vsn }}
os:
- macos-12
- macos-12-arm64
- macos-13
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: emqx/self-hosted-cleanup-action@v1.0.3
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.ref }}
fetch-depth: 0
@ -95,94 +93,106 @@ jobs:
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: success()
with:
name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.otp }}
name: ${{ matrix.profile }}
path: _packages/${{ matrix.profile }}/
retention-days: 7
linux:
runs-on: [self-hosted, ephemeral, linux, "${{ matrix.arch == 'arm64' && 'arm64' || 'x64' }}"]
runs-on: [self-hosted, ephemeral, linux, "${{ matrix.arch }}"]
# always run in builder container because the host might have the wrong OTP version etc.
# otherwise buildx.sh does not run docker if arch and os matches the target arch and os.
container:
image: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os }}"
strategy:
fail-fast: false
matrix:
profile:
- ${{ inputs.profile }}
otp:
- ${{ inputs.otp_vsn }}
arch:
- x64
- arm64
os:
- ubuntu24.04
- ubuntu22.04
- ubuntu20.04
- ubuntu18.04
- debian12
- debian11
- debian10
- el9
- el8
- el7
- amzn2
- amzn2023
arch:
- amd64
- arm64
with_elixir:
- 'no'
otp:
- ${{ inputs.otp_vsn }}
builder:
- ${{ inputs.builder_vsn }}
elixir:
- ${{ inputs.elixir_vsn }}
with_elixir:
- 'no'
include:
- profile: ${{ inputs.profile }}
os: ubuntu22.04
arch: amd64
with_elixir: 'yes'
- profile: emqx
otp: ${{ inputs.otp_vsn }}
arch: x64
os: ubuntu22.04
builder: ${{ inputs.builder_vsn }}
elixir: ${{ inputs.elixir_vsn }}
with_elixir: 'yes'
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.ref }}
fetch-depth: 0
- name: build tgz
- name: fix workdir
run: |
set -eu
git config --global --add safe.directory "$GITHUB_WORKSPACE"
# Align path for CMake caches
if [ ! "$PWD" = "/emqx" ]; then
ln -s $PWD /emqx
cd /emqx
fi
echo "pwd is $PWD"
- name: build emqx packages
env:
PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }}
OS: ${{ matrix.os }}
IS_ELIXIR: ${{ matrix.with_elixir }}
BUILDER: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os }}"
BUILDER_SYSTEM: force_docker
ACLOCAL_PATH: "/usr/share/aclocal:/usr/local/share/aclocal"
run: |
./scripts/buildx.sh \
--profile $PROFILE \
--arch $ARCH \
--builder $BUILDER \
--elixir $IS_ELIXIR \
--pkgtype tgz
- name: build pkg
if: matrix.with_elixir == 'no'
set -eu
if [ "${IS_ELIXIR:-}" == 'yes' ]; then
make "${PROFILE}-elixir-tgz"
else
make "${PROFILE}-tgz"
make "${PROFILE}-pkg"
fi
- name: test emqx packages
env:
PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }}
OS: ${{ matrix.os }}
IS_ELIXIR: ${{ matrix.with_elixir }}
BUILDER: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os }}"
BUILDER_SYSTEM: force_docker
run: |
./scripts/buildx.sh \
--profile $PROFILE \
--arch $ARCH \
--builder $BUILDER \
--elixir $IS_ELIXIR \
--pkgtype pkg
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
set -eu
if [ "${IS_ELIXIR:-}" == 'yes' ]; then
./scripts/pkg-tests.sh "${PROFILE}-elixir-tgz"
else
./scripts/pkg-tests.sh "${PROFILE}-tgz"
./scripts/pkg-tests.sh "${PROFILE}-pkg"
fi
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.arch }}${{ matrix.with_elixir == 'yes' && '-elixir' || '' }}-${{ matrix.builder }}-${{ matrix.otp }}-${{ matrix.elixir }}
name: ${{ matrix.profile }}
path: _packages/${{ matrix.profile }}/
retention-days: 7
@ -191,19 +201,30 @@ jobs:
needs:
- mac
- linux
if: inputs.publish
if: inputs.publish == 'true' || inputs.publish
strategy:
fail-fast: false
matrix:
profile:
- ${{ inputs.profile }}
steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
pattern: "${{ matrix.profile }}-*"
name: ${{ matrix.profile }}
path: packages/${{ matrix.profile }}
merge-multiple: true
- uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
- name: install dos2unix
run: sudo apt-get update -y && sudo apt install -y dos2unix
- name: get packages
run: |
set -eu
cd packages/${{ matrix.profile }}
# fix the .sha256 file format
for var in $(ls | grep emqx | grep -v sha256); do
dos2unix $var.sha256
echo "$(cat $var.sha256) $var" | sha256sum -c || exit 1
done
cd -
- uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@ -211,9 +232,6 @@ jobs:
- name: upload to aws s3
env:
PROFILE: ${{ matrix.profile }}
REF_NAME: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || github.ref_name }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_CLOUDFRONT_ID: ${{ secrets.AWS_CLOUDFRONT_ID }}
run: |
set -eu
if [ $PROFILE = 'emqx' ]; then
@ -224,5 +242,5 @@ jobs:
echo "unknown profile $PROFILE"
exit 1
fi
aws s3 cp --recursive packages/$PROFILE s3://$AWS_S3_BUCKET/$s3dir/$REF_NAME
aws cloudfront create-invalidation --distribution-id "$AWS_CLOUDFRONT_ID" --paths "/$s3dir/$REF_NAME/*"
aws s3 cp --recursive packages/$PROFILE s3://${{ secrets.AWS_S3_BUCKET }}/$s3dir/${{ github.ref_name }}
aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CLOUDFRONT_ID }} --paths "/$s3dir/${{ github.ref_name }}/*"

View File

@ -9,59 +9,68 @@ on:
- cron: '0 */6 * * *'
workflow_dispatch:
permissions:
contents: read
jobs:
linux:
if: github.repository_owner == 'emqx'
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container:
image: "ghcr.io/emqx/emqx-builder/${{ matrix.profile[2] }}-${{ matrix.os }}"
strategy:
fail-fast: false
matrix:
profile:
- ['emqx', 'master']
- ['emqx', 'release-57']
- ['emqx', 'release-58']
- ['emqx', 'master', '5.2-8:1.15.7-26.1.2-2']
- ['emqx-enterprise', 'release-54', '5.2-3:1.14.5-25.3.2-2']
os:
- debian10
- ubuntu22.04
- amzn2023
env:
PROFILE: ${{ matrix.profile[0] }}
OS: ${{ matrix.os }}
BUILDER_SYSTEM: force_docker
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
ref: ${{ matrix.profile[1] }}
fetch-depth: 0
- name: Set up environment
id: env
- name: fix workdir
run: |
source env.sh
BUILDER="ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VSN}:${ELIXIR_VSN}-${OTP_VSN}-${OS}"
echo "BUILDER=$BUILDER" >> "$GITHUB_ENV"
- name: build tgz
set -eu
git config --global --add safe.directory "$GITHUB_WORKSPACE"
# Align path for CMake caches
if [ ! "$PWD" = "/emqx" ]; then
ln -s $PWD /emqx
cd /emqx
fi
echo "pwd is $PWD"
- name: build emqx packages
env:
PROFILE: ${{ matrix.profile[0] }}
ACLOCAL_PATH: "/usr/share/aclocal:/usr/local/share/aclocal"
run: |
./scripts/buildx.sh --profile "$PROFILE" --pkgtype tgz --builder "$BUILDER"
- name: build pkg
set -eu
make "${PROFILE}-tgz"
make "${PROFILE}-pkg"
- name: test emqx packages
env:
PROFILE: ${{ matrix.profile[0] }}
run: |
./scripts/buildx.sh --profile "$PROFILE" --pkgtype pkg --builder "$BUILDER"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
set -eu
./scripts/pkg-tests.sh "${PROFILE}-tgz"
./scripts/pkg-tests.sh "${PROFILE}-pkg"
- uses: actions/upload-artifact@v3
if: success()
with:
name: ${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.os }}
name: ${{ matrix.profile[0] }}
path: _packages/${{ matrix.profile[0] }}/
retention-days: 7
- name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
uses: slackapi/slack-github-action@v1.23.0
if: failure()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
@ -80,36 +89,33 @@ jobs:
- emqx
branch:
- master
otp:
- 26.1.2-2
os:
- macos-14-arm64
- macos-12-arm64
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
ref: ${{ matrix.branch }}
fetch-depth: 0
- name: Set up environment
id: env
run: |
source env.sh
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
- uses: ./.github/actions/package-macos
with:
profile: ${{ matrix.profile }}
otp: ${{ steps.env.outputs.OTP_VSN }}
otp: ${{ matrix.otp }}
os: ${{ matrix.os }}
apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }}
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: success()
with:
name: ${{ matrix.profile }}-${{ matrix.os }}
name: ${{ matrix.profile }}
path: _packages/${{ matrix.profile }}/
retention-days: 7
- name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
uses: slackapi/slack-github-action@v1.23.0
if: failure()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -6,50 +6,92 @@ concurrency:
on:
workflow_call:
inputs:
builder:
required: true
type: string
builder_vsn:
required: true
type: string
otp_vsn:
required: true
type: string
elixir_vsn:
required: true
type: string
workflow_dispatch:
inputs:
ref:
required: false
permissions:
contents: read
builder:
required: false
type: string
default: 'ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-ubuntu22.04'
builder_vsn:
required: false
type: string
default: '5.2-8'
otp_vsn:
required: false
type: string
default: '26.1.2-2'
elixir_vsn:
required: false
type: string
default: '1.15.7'
jobs:
linux:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.profile[2])) || 'ubuntu-22.04' }}
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
env:
PROFILE: ${{ matrix.profile[0] }}
ELIXIR: ${{ matrix.profile[1] == 'elixir' && 'yes' || 'no' }}
ARCH: ${{ matrix.profile[2] == 'x64' && 'amd64' || 'arm64' }}
BUILDER_SYSTEM: force_docker
EMQX_NAME: ${{ matrix.profile[0] }}
strategy:
fail-fast: false
matrix:
profile:
- ["emqx", "elixir", "x64"]
- ["emqx", "elixir", "arm64"]
- ["emqx-enterprise", "erlang", "x64"]
- ["emqx", "26.1.2-2", "ubuntu20.04", "elixir"]
- ["emqx-enterprise", "26.1.2-2", "ubuntu20.04", "erlang"]
container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: build tgz
- name: Work around https://github.com/actions/checkout/issues/766
run: |
./scripts/buildx.sh --profile $PROFILE --pkgtype tgz --elixir $ELIXIR --arch $ARCH
- name: build pkg
git config --global --add safe.directory "$GITHUB_WORKSPACE"
echo "CODE_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
- name: build and test tgz package
if: matrix.profile[3] == 'erlang'
run: |
./scripts/buildx.sh --profile $PROFILE --pkgtype pkg --elixir $ELIXIR --arch $ARCH
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
make ${EMQX_NAME}-tgz
./scripts/pkg-tests.sh ${EMQX_NAME}-tgz
- name: build and test deb/rpm packages
if: matrix.profile[3] == 'erlang'
run: |
make ${EMQX_NAME}-pkg
./scripts/pkg-tests.sh ${EMQX_NAME}-pkg
- name: build and test tgz package (Elixir)
if: matrix.profile[3] == 'elixir'
run: |
make ${EMQX_NAME}-elixir-tgz
./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-tgz
- name: build and test deb/rpm packages (Elixir)
if: matrix.profile[3] == 'elixir'
run: |
make ${EMQX_NAME}-elixir-pkg
./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-pkg
- uses: actions/upload-artifact@v3
with:
name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
path: _packages/${{ matrix.profile[0] }}/*
retention-days: 7
compression-level: 0
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
with:
name: "${{ matrix.profile[0] }}-schema-dump-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
name: "${{ matrix.profile[0] }}_schema_dump"
path: |
scripts/spellcheck
_build/docgen/${{ matrix.profile[0] }}/schema-en.json
@ -61,30 +103,27 @@ jobs:
matrix:
profile:
- emqx
otp:
- ${{ inputs.otp_vsn }}
os:
- macos-14-arm64
- macos-12-arm64
runs-on: ${{ matrix.os }}
env:
EMQX_NAME: ${{ matrix.profile }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set up environment
id: env
run: |
source env.sh
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
- uses: actions/checkout@v3
- uses: ./.github/actions/package-macos
with:
profile: ${{ matrix.profile }}
otp: ${{ steps.env.outputs.OTP_VSN }}
otp: ${{ matrix.otp }}
os: ${{ matrix.os }}
apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }}
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.os }}
path: _packages/**/*

View File

@ -14,33 +14,32 @@ jobs:
check_deps_integrity:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }}
env:
MIX_ENV: ${{ matrix.profile }}
PROFILE: ${{ matrix.profile }}
strategy:
matrix:
profile:
- emqx-enterprise
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- run: make ensure-rebar3
- run: ./scripts/check-deps-integrity.escript
- name: Setup mix
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
run: |
# mix local.hex --force
mix local.hex 2.0.6 --force
mix local.hex --force
mix local.rebar --force
mix deps.get
- name: print mix dependency tree
run: mix deps.tree
- run: ./scripts/check-elixir-deps-discrepancies.exs
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
- run: ./scripts/check-elixir-applications.exs
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
- name: Upload produced lock files
uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
uses: actions/upload-artifact@v3
if: failure()
with:
name: ${{ matrix.profile }}_produced_lock_files
name: produced_lock_files
path: |
mix.lock
rebar.lock

View File

@ -4,39 +4,42 @@ on:
schedule:
- cron: '33 14 * * 4'
workflow_dispatch:
inputs:
ref:
required: false
permissions:
contents: read
jobs:
analyze:
if: github.repository == 'emqx/emqx'
name: Analyze
runs-on: ubuntu-22.04
timeout-minutes: 360
permissions:
actions: read
security-events: write
container:
image: ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-ubuntu22.04
strategy:
fail-fast: false
matrix:
branch:
- master
- release-57
- release-58
language:
- cpp
- python
language: [ 'cpp', 'python' ]
steps:
- name: Checkout repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
uses: actions/checkout@v3
with:
ref: ${{ matrix.branch }}
ref: ${{ github.event.inputs.ref }}
- name: Ensure git safe dir
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
make ensure-rebar3
- name: Initialize CodeQL
uses: github/codeql-action/init@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
@ -45,9 +48,16 @@ jobs:
env:
PROFILE: emqx-enterprise
run: |
./scripts/buildx.sh --profile emqx-enterprise --pkgtype rel
make emqx-enterprise-compile
- name: Fetch deps
if: matrix.language == 'python'
env:
PROFILE: emqx-enterprise
run: |
make deps-emqx-enterprise
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{matrix.language}}"

View File

@ -7,6 +7,9 @@ on:
# run hourly
- cron: "0 * * * *"
workflow_dispatch:
inputs:
ref:
required: false
permissions:
contents: read
@ -14,30 +17,22 @@ permissions:
jobs:
rerun-failed-jobs:
if: github.repository_owner == 'emqx'
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
permissions:
checks: read
actions: write
strategy:
fail-fast: false
matrix:
ref:
- master
- release-57
- release-58
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
ref: ${{ matrix.ref }}
ref: ${{ github.event.inputs.ref || 'master' }}
- name: run script
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPO: ${{ github.repository }}
run: |
gh api --method GET -f head_sha=$(git rev-parse HEAD) -f status=completed -f exclude_pull_requests=true /repos/${GITHUB_REPO}/actions/runs > runs.json
for id in $(jq -r '.workflow_runs[] | select((."conclusion" == "failure") and (."name" != "Keep master green") and .run_attempt < 3) | .id' runs.json); do
echo "rerun https://github.com/${GITHUB_REPO}/actions/runs/$id"
gh api --method POST /repos/${GITHUB_REPO}/actions/runs/$id/rerun-failed-jobs || true
gh api --method GET -f head_sha=$(git rev-parse HEAD) -f status=completed -f exclude_pull_requests=true /repos/emqx/emqx/actions/runs > runs.json
for id in $(jq -r '.workflow_runs[] | select((."conclusion" != "success") and .run_attempt < 3) | .id' runs.json); do
echo "rerun https://github.com/emqx/emqx/actions/runs/$id"
gh api --method POST /repos/emqx/emqx/actions/runs/$id/rerun-failed-jobs
done

View File

@ -26,13 +26,13 @@ jobs:
prepare:
runs-on: ubuntu-latest
if: github.repository_owner == 'emqx'
container: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu20.04
container: ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-ubuntu20.04
outputs:
BENCH_ID: ${{ steps.prepare.outputs.BENCH_ID }}
PACKAGE_FILE: ${{ steps.package_file.outputs.PACKAGE_FILE }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.event.inputs.ref }}
@ -52,7 +52,7 @@ jobs:
id: package_file
run: |
echo "PACKAGE_FILE=$(find _packages/emqx -name 'emqx-*.deb' | head -n 1 | xargs basename)" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
with:
name: emqx-ubuntu20.04
path: _packages/emqx/${{ steps.package_file.outputs.PACKAGE_FILE }}
@ -66,23 +66,23 @@ jobs:
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
uses: actions/checkout@v3
with:
repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test
ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: emqx-ubuntu20.04
path: tf-emqx-performance-test/
- name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
- name: run scenario
@ -105,7 +105,7 @@ jobs:
terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
uses: slackapi/slack-github-action@v1.24.0
with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy
@ -113,13 +113,13 @@ jobs:
working-directory: ./tf-emqx-performance-test
run: |
terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: success()
with:
name: metrics
path: |
"./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: failure()
with:
name: terraform
@ -137,23 +137,23 @@ jobs:
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
uses: actions/checkout@v3
with:
repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test
ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: emqx-ubuntu20.04
path: tf-emqx-performance-test/
- name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
- name: run scenario
@ -176,7 +176,7 @@ jobs:
terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
uses: slackapi/slack-github-action@v1.24.0
with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy
@ -184,13 +184,13 @@ jobs:
working-directory: ./tf-emqx-performance-test
run: |
terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: success()
with:
name: metrics
path: |
"./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: failure()
with:
name: terraform
@ -209,23 +209,23 @@ jobs:
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
uses: actions/checkout@v3
with:
repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test
ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: emqx-ubuntu20.04
path: tf-emqx-performance-test/
- name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
- name: run scenario
@ -249,7 +249,7 @@ jobs:
terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
uses: slackapi/slack-github-action@v1.24.0
with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy
@ -257,13 +257,13 @@ jobs:
working-directory: ./tf-emqx-performance-test
run: |
terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: success()
with:
name: metrics
path: |
"./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: failure()
with:
name: terraform
@ -283,23 +283,23 @@ jobs:
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
uses: actions/checkout@v3
with:
repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test
ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: emqx-ubuntu20.04
path: tf-emqx-performance-test/
- name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1
uses: hashicorp/setup-terraform@v2
with:
terraform_wrapper: false
- name: run scenario
@ -322,7 +322,7 @@ jobs:
terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
uses: slackapi/slack-github-action@v1.24.0
with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy
@ -330,13 +330,13 @@ jobs:
working-directory: ./tf-emqx-performance-test
run: |
terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: success()
with:
name: metrics
path: |
"./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: failure()
with:
name: terraform

View File

@ -8,7 +8,7 @@ on:
tag:
type: string
required: true
publish_release_artifacts:
publish_release_artefacts:
type: boolean
required: true
default: false
@ -31,12 +31,12 @@ jobs:
strategy:
fail-fast: false
steps:
- uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
- uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.tag }}
- name: Detect profile
@ -67,15 +67,14 @@ jobs:
BUCKET=${{ secrets.AWS_S3_BUCKET }}
OUTPUT_DIR=${{ steps.profile.outputs.s3dir }}
aws s3 cp --recursive s3://$BUCKET/$OUTPUT_DIR/${{ env.ref_name }} packages
- uses: emqx/upload-assets@974befcf0e72a1811360a81c798855efb66b0551 # 0.5.2
- uses: emqx/upload-assets@8d2083b4dbe3151b0b735572eaa153b6acb647fe # 0.5.0
env:
GITHUB_TOKEN: ${{ github.token }}
with:
asset_paths: '["packages/*"]'
tag_name: "${{ env.ref_name }}"
skip_existing: true
- name: update to emqx.io
if: github.event_name == 'release' || inputs.publish_release_artifacts
if: startsWith(env.ref_name, 'v') && ((github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artefacts)
run: |
set -eux
curl -w %{http_code} \
@ -86,7 +85,7 @@ jobs:
-d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.ref_name }}\" }" \
${{ secrets.EMQX_IO_RELEASE_API }}
- name: Push to packagecloud.io
if: (github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artifacts
if: (github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artefacts
env:
PROFILE: ${{ steps.profile.outputs.profile }}
VERSION: ${{ steps.profile.outputs.version }}
@ -106,12 +105,14 @@ jobs:
push "debian/bullseye" "packages/$PROFILE-$VERSION-debian11-arm64.deb"
push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-amd64.deb"
push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-arm64.deb"
push "ubuntu/bionic" "packages/$PROFILE-$VERSION-ubuntu18.04-amd64.deb"
push "ubuntu/bionic" "packages/$PROFILE-$VERSION-ubuntu18.04-arm64.deb"
push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-amd64.deb"
push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-arm64.deb"
push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-amd64.deb"
push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-arm64.deb"
push "ubuntu/noble" "packages/$PROFILE-$VERSION-ubuntu24.04-amd64.deb"
push "ubuntu/noble" "packages/$PROFILE-$VERSION-ubuntu24.04-arm64.deb"
push "el/7" "packages/$PROFILE-$VERSION-el7-amd64.rpm"
push "el/7" "packages/$PROFILE-$VERSION-el7-arm64.rpm"
push "el/8" "packages/$PROFILE-$VERSION-el8-amd64.rpm"
push "el/8" "packages/$PROFILE-$VERSION-el8-arm64.rpm"
push "el/9" "packages/$PROFILE-$VERSION-el9-amd64.rpm"
@ -131,7 +132,7 @@ jobs:
checks: write
actions: write
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
- name: trigger re-run of app versions check on open PRs
shell: bash
env:

View File

@ -25,7 +25,7 @@ jobs:
- emqx
- emqx-enterprise
steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.profile }}
- name: extract artifact
@ -39,10 +39,10 @@ jobs:
- name: print erlang log
if: failure()
run: |
cat _build/${{ matrix.profile }}/rel/emqx/log/erlang.log.*
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
cat _build/${{ matrix.profile }}/rel/emqx/logs/erlang.log.*
- uses: actions/upload-artifact@v3
if: failure()
with:
name: conftest-logs-${{ matrix.profile }}
path: _build/${{ matrix.profile }}/rel/emqx/log
name: logs-${{ matrix.profile }}
path: _build/${{ matrix.profile }}/rel/emqx/logs
retention-days: 7

View File

@ -6,6 +6,13 @@ concurrency:
on:
workflow_call:
inputs:
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions:
contents: read
@ -25,24 +32,19 @@ jobs:
env:
EMQX_NAME: ${{ matrix.profile[0] }}
PKG_VSN: ${{ matrix.profile[0] == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
name: ${{ env.EMQX_NAME }}-docker
path: /tmp
- name: load docker image
run: |
_EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/${EMQX_NAME}-docker-${PKG_VSN}.tar.gz 2>/dev/null | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$_EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
EMQX_IMAGE_TAG=$(docker load < /tmp/${EMQX_NAME}-docker-${PKG_VSN}.tar.gz 2>/dev/null | sed 's/Loaded image: //g')
echo "EMQX_IMAGE_TAG=$EMQX_IMAGE_TAG" >> $GITHUB_ENV
- name: dashboard tests
working-directory: ./scripts/ui-tests
run: |
@ -50,11 +52,9 @@ jobs:
docker compose up --abort-on-container-exit --exit-code-from selenium
- name: test two nodes cluster with proto_dist=inet_tls in docker
run: |
## -d 1 means only put node 1 (latest version) behind haproxy
./scripts/test/start-two-nodes-in-docker.sh -d 1 -P $_EMQX_DOCKER_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
./scripts/test/start-two-nodes-in-docker.sh -P $EMQX_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
## -c menas 'cleanup'
./scripts/test/start-two-nodes-in-docker.sh -c
- name: cleanup
if: always()
@ -69,6 +69,8 @@ jobs:
shell: bash
env:
EMQX_NAME: ${{ matrix.profile }}
PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
_EMQX_TEST_DB_BACKEND: ${{ matrix.cluster_db_backend }}
strategy:
fail-fast: false
@ -77,20 +79,12 @@ jobs:
- emqx
- emqx-enterprise
- emqx-elixir
cluster_db_backend:
- mnesia
- rlog
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set up environment
id: env
run: |
source env.sh
if [ "$EMQX_NAME" = "emqx-enterprise" ]; then
_EMQX_TEST_DB_BACKEND='rlog'
else
_EMQX_TEST_DB_BACKEND='mnesia'
fi
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
name: ${{ env.EMQX_NAME }}-docker
path: /tmp
@ -119,4 +113,4 @@ jobs:
- name: test node_dump
run: |
docker exec -t -u root node1.emqx.io bash -c 'apt-get -y update && apt-get -y install net-tools'
docker exec -t -u root node1.emqx.io node_dump
docker exec node1.emqx.io node_dump

View File

@ -27,21 +27,19 @@ permissions:
contents: read
jobs:
prepare_matrix:
run_emqx_app_tests:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }}
defaults:
run:
shell: bash
outputs:
matrix: ${{ steps.matrix.outputs.matrix }}
skip: ${{ steps.matrix.outputs.skip }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: prepare test matrix
id: matrix
- name: run
env:
BEFORE_REF: ${{ inputs.before_ref }}
AFTER_REF: ${{ inputs.after_ref }}
@ -50,54 +48,19 @@ jobs:
changed_files="$(git diff --name-only ${BEFORE_REF} ${AFTER_REF} apps/emqx)"
if [ "$changed_files" = '' ]; then
echo "nothing changed in apps/emqx, ignored."
echo 'matrix=[]' | tee -a $GITHUB_OUTPUT
echo 'skip=true' | tee -a $GITHUB_OUTPUT
exit 0
else
echo 'skip=false' | tee -a $GITHUB_OUTPUT
echo 'matrix=[{"type": "eunit_proper_and_static"},{"type": "1_3"},{"type": "2_3"},{"type": "3_3"}]' | tee -a $GITHUB_OUTPUT
fi
run_emqx_app_tests:
if: needs.prepare_matrix.outputs.skip != 'true'
needs:
- prepare_matrix
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }}
strategy:
fail-fast: false
matrix:
include: ${{ fromJson(needs.prepare_matrix.outputs.matrix) }}
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0
- name: run
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
make ensure-rebar3
cp rebar3 apps/emqx/
cd apps/emqx
if [[ ${{ matrix.type }} == "eunit_proper_and_static" ]]; then
./rebar3 xref
./rebar3 dialyzer
./rebar3 eunit -v --name 'eunit@127.0.0.1'
./rebar3 proper -d test/props
else
export SUITEGROUP=${{ matrix.type }}
SUITES=$(../../scripts/find-suites.sh apps/emqx | \
sed -e 's|apps/emqx/test/||g' | \
sed -Ee 's|,?apps/emqx/integration_test/.*||g' | \
sed -e 's/\.erl//g')
echo "Suites: $SUITES"
./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true --suite="$SUITES"
fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
./rebar3 xref
./rebar3 dialyzer
./rebar3 eunit -v --name 'eunit@127.0.0.1'
./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true
./rebar3 proper -d test/props
- uses: actions/upload-artifact@v3
if: failure()
with:
name: logs-emqx-app-tests-${{ matrix.type }}
name: logs-emqx-app-tests
path: apps/emqx/_build/test/logs
retention-days: 7

View File

@ -6,6 +6,13 @@ concurrency:
on:
workflow_call:
inputs:
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions:
contents: read
@ -18,6 +25,7 @@ jobs:
shell: bash
env:
EMQX_NAME: ${{ matrix.profile }}
EMQX_TAG: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
REPOSITORY: "emqx/${{ matrix.profile }}"
strategy:
@ -34,17 +42,10 @@ jobs:
- ssl1.3
- ssl1.2
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
path: source
- name: Set up environment
id: env
run: |
cd source
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "EMQX_TAG=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: "${{ env.EMQX_NAME }}-docker"
path: /tmp
@ -164,14 +165,14 @@ jobs:
fi
sleep 1;
done
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
repository: emqx/paho.mqtt.testing
ref: develop-5.0
path: paho.mqtt.testing
- name: install pytest
run: |
pip install --require-hashes -r source/.ci/docker-compose-file/python/requirements.txt
pip install pytest==7.1.2 pytest-retry==1.3.0
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: run paho test
timeout-minutes: 10

View File

@ -2,9 +2,10 @@ name: JMeter integration tests
on:
workflow_call:
permissions:
contents: read
inputs:
version-emqx:
required: true
type: string
jobs:
jmeter_artifact:
@ -12,7 +13,7 @@ jobs:
steps:
- name: Cache Jmeter
id: cache-jmeter
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
uses: actions/cache@v3
with:
path: /tmp/apache-jmeter.tgz
key: apache-jmeter-5.4.3.tgz
@ -31,7 +32,7 @@ jobs:
else
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz $ARCHIVE_URL
fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
with:
name: apache-jmeter.tgz
path: /tmp/apache-jmeter.tgz
@ -51,23 +52,10 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: actions/checkout@v3
- uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up
timeout-minutes: 5
run: |
@ -95,10 +83,10 @@ jobs:
echo "check logs failed"
exit 1
fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: always()
with:
name: jmeter_logs-advanced_feat-${{ matrix.scripts_type }}
name: jmeter_logs
path: ./jmeter_logs
retention-days: 3
@ -120,23 +108,10 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: actions/checkout@v3
- uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up
timeout-minutes: 5
env:
@ -175,10 +150,10 @@ jobs:
if: failure()
run: |
docker compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: always()
with:
name: jmeter_logs-pgsql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.pgsql_tag }}
name: jmeter_logs
path: ./jmeter_logs
retention-days: 3
@ -197,23 +172,10 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: actions/checkout@v3
- uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up
timeout-minutes: 5
env:
@ -248,10 +210,10 @@ jobs:
echo "check logs failed"
exit 1
fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: always()
with:
name: jmeter_logs-mysql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}
name: jmeter_logs
path: ./jmeter_logs
retention-days: 3
@ -266,23 +228,10 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: actions/checkout@v3
- uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up
timeout-minutes: 5
run: |
@ -313,10 +262,10 @@ jobs:
echo "check logs failed"
exit 1
fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: always()
with:
name: jmeter_logs-JWT_authn-${{ matrix.scripts_type }}
name: jmeter_logs
path: ./jmeter_logs
retention-days: 3
@ -332,23 +281,10 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: actions/checkout@v3
- uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up
timeout-minutes: 5
run: |
@ -370,9 +306,9 @@ jobs:
echo "check logs failed"
exit 1
fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: always()
with:
name: jmeter_logs-built_in_database_authn_authz-${{ matrix.scripts_type }}
name: jmeter_logs
path: ./jmeter_logs
retention-days: 3

View File

@ -25,7 +25,7 @@ jobs:
run:
shell: bash
steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: emqx-enterprise
- name: extract artifact
@ -45,10 +45,10 @@ jobs:
run: |
export PROFILE='emqx-enterprise'
make emqx-enterprise-tgz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
name: Upload built emqx and test scenario
with:
name: relup_tests_emqx_built
name: emqx_built
path: |
_upgrade_base
_packages
@ -72,10 +72,10 @@ jobs:
run:
shell: bash
steps:
- uses: erlef/setup-beam@b9c58b0450cd832ccdb3c17cc156a47065d2114f # v1.18.1
- uses: erlef/setup-beam@v1.16.0
with:
otp-version: 26.2.5
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
otp-version: 26.1.2
- uses: actions/checkout@v3
with:
repository: hawk/lux
ref: lux-2.8.1
@ -88,10 +88,10 @@ jobs:
./configure
make
echo "$(pwd)/bin" >> $GITHUB_PATH
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
name: Download built emqx and test scenario
with:
name: relup_tests_emqx_built
name: emqx_built
path: .
- name: run relup test
run: |
@ -111,11 +111,11 @@ jobs:
docker logs node2.emqx.io | tee lux_logs/emqx2.log
exit 1
fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
name: Save debug data
if: failure()
with:
name: relup_test_run_debug_data
name: debug_data
path: |
lux_logs
retention-days: 3

View File

@ -35,41 +35,37 @@ jobs:
defaults:
run:
shell: bash
container: ${{ inputs.builder }}
env:
PROFILE: ${{ matrix.profile }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}
permissions:
contents: read
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.profile }}
- name: extract artifact
run: |
unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE"
# produces eunit.coverdata
- run: make eunit
- name: eunit
env:
PROFILE: ${{ matrix.profile }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
run: make eunit
# produces proper.coverdata
- run: make proper
- run: make cover
- name: send to coveralls
if: github.repository == 'emqx/emqx'
- name: proper
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: make coveralls
PROFILE: ${{ matrix.profile }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
run: make proper
- run: cat rebar3.crashdump
if: failure()
- uses: actions/upload-artifact@v3
with:
name: coverdata
path: _build/test/cover
retention-days: 7
ct_docker:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON('["self-hosted","ephemeral","linux","x64"]') || 'ubuntu-22.04' }}
@ -83,24 +79,19 @@ jobs:
run:
shell: bash
env:
PROFILE: ${{ matrix.profile }}
permissions:
contents: read
steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.profile }}
- name: extract artifact
run: |
unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE"
# produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata
- name: run common tests
env:
DOCKER_CT_RUNNER_IMAGE: ${{ inputs.builder }}
DOCKER_CT_RUNNER_IMAGE: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
MONGO_TAG: "5"
MYSQL_TAG: "8"
PGSQL_TAG: "13"
@ -109,36 +100,24 @@ jobs:
TDENGINE_TAG: "3.0.2.4"
OPENTS_TAG: "9aa7f88"
MINIO_TAG: "RELEASE.2023-03-20T20-16-18Z"
PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: ./scripts/ct/run.sh --ci --app ${{ matrix.app }} --keep-up
- name: make cover
run: |
docker exec -e PROFILE="$PROFILE" -t erlang make cover
- name: send to coveralls
if: github.repository == 'emqx/emqx'
run: |
ls _build/test/cover/*.coverdata || exit 0
docker exec -e PROFILE="$PROFILE" -t erlang make coveralls
- name: rebar3.crashdump
if: failure()
run: cat rebar3.crashdump
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
run: ./scripts/ct/run.sh --ci --app ${{ matrix.app }}
- uses: actions/upload-artifact@v3
with:
name: coverdata
path: _build/test/cover
retention-days: 7
- name: compress logs
if: failure()
run: tar -czf logs.tar.gz _build/test/logs
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: failure()
with:
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }}
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
path: logs.tar.gz
compression-level: 0
retention-days: 7
ct:
@ -149,22 +128,13 @@ jobs:
matrix:
include: ${{ fromJson(inputs.ct-host) }}
container: ${{ inputs.builder }}
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
defaults:
run:
shell: bash
permissions:
contents: read
env:
PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }}
steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.profile }}
- name: extract artifact
@ -174,31 +144,27 @@ jobs:
# produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata
- name: run common tests
run: make "${{ matrix.app }}-ct"
- run: make cover
- name: send to coveralls
if: github.repository == 'emqx/emqx'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
run: |
ls _build/test/cover/*.coverdata || exit 0
make coveralls
- run: cat rebar3.crashdump
if: failure()
make "${{ matrix.app }}-ct"
- uses: actions/upload-artifact@v3
with:
name: coverdata
path: _build/test/cover
if-no-files-found: warn # do not fail if no coverdata found
retention-days: 7
- name: compress logs
if: failure()
run: tar -czf logs.tar.gz _build/test/logs
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
- uses: actions/upload-artifact@v3
if: failure()
with:
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }}
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
path: logs.tar.gz
compression-level: 0
retention-days: 7
tests_passed:
@ -209,18 +175,60 @@ jobs:
runs-on: ubuntu-22.04
strategy:
fail-fast: false
permissions:
pull-requests: write
steps:
- name: Coveralls finished
if: github.repository == 'emqx/emqx'
uses: coverallsapp/github-action@643bc377ffa44ace6394b2b5d0d3950076de9f63 # v2.3.0
with:
parallel-finished: true
git-branch: ${{ github.ref }}
git-commit: ${{ github.sha }}
- run: echo "All tests passed"
make_cover:
needs:
- eunit_and_proper
- ct
- ct_docker
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }}
strategy:
fail-fast: false
matrix:
profile:
- emqx-enterprise
steps:
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.profile }}
- name: extract artifact
run: |
unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE"
- uses: actions/download-artifact@v3
name: download coverdata
with:
name: coverdata
path: _build/test/cover
- name: make cover
env:
PROFILE: emqx-enterprise
run: make cover
- name: send to coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PROFILE: emqx-enterprise
run: make coveralls
- name: get coveralls logs
if: failure()
run: cat rebar3.crashdump
# do this in a separate job
upload_coverdata:
needs: make_cover
runs-on: ubuntu-22.04
steps:
- name: Coveralls Finished
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
curl -v -k https://coveralls.io/webhook \
--header "Content-Type: application/json" \
--data "{\"repo_name\":\"$GITHUB_REPOSITORY\",\"repo_token\":\"$GITHUB_TOKEN\",\"payload\":{\"build_num\":$GITHUB_RUN_ID,\"status\":\"done\"}}" || true

View File

@ -8,29 +8,27 @@ on:
schedule:
- cron: '25 21 * * 6'
push:
branches:
- master
branches: [ "master" ]
workflow_dispatch:
permissions: read-all
jobs:
analysis:
if: github.repository == 'emqx/emqx'
name: Scorecard analysis
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
permissions:
security-events: write
id-token: write
steps:
- name: "Checkout code"
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0
uses: ossf/scorecard-action@483ef80eb98fb506c348f7d62e28055e49fe2398 # v2.3.0
with:
results_file: results.sarif
results_format: sarif
@ -40,7 +38,7 @@ jobs:
publish_results: true
- name: "Upload artifact"
uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
with:
name: SARIF file
path: results.sarif
@ -48,6 +46,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.22.1
uses: github/codeql-action/upload-sarif@8e0b1c74b1d5a0077b04d064c76ee714d3da7637 # v2.22.1
with:
sarif_file: results.sarif

View File

@ -19,10 +19,10 @@ jobs:
- emqx-enterprise
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
pattern: "${{ matrix.profile }}-schema-dump-*-x64"
merge-multiple: true
name: "${{ matrix.profile }}_schema_dump"
path: /tmp/
- name: Run spellcheck
run: |
bash scripts/spellcheck/spellcheck.sh _build/docgen/${{ matrix.profile }}/schema-en.json
bash /tmp/scripts/spellcheck/spellcheck.sh /tmp/_build/docgen/${{ matrix.profile }}/schema-en.json

View File

@ -13,15 +13,15 @@ permissions:
jobs:
stale:
if: github.repository == 'emqx/emqx'
runs-on: ubuntu-22.04
if: github.repository_owner == 'emqx'
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
permissions:
issues: write
pull-requests: none
steps:
- name: Close Stale Issues
uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0
uses: actions/stale@v6
with:
days-before-stale: 7
days-before-close: 7

View File

@ -28,21 +28,21 @@ jobs:
fail-fast: false
matrix:
include: ${{ fromJson(inputs.ct-matrix) }}
container: "${{ inputs.builder }}"
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
- uses: actions/download-artifact@v3
with:
name: ${{ matrix.profile }}
- name: extract artifact
run: |
unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE"
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
- uses: actions/cache@v3
with:
path: "emqx_dialyzer_${{ matrix.profile }}_plt"
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }}
path: "emqx_dialyzer_${{ matrix.otp }}_plt"
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }}
restore-keys: |
rebar3-dialyzer-plt-${{ matrix.profile }}-
rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-
- run: cat .env | tee -a $GITHUB_ENV
- name: run static checks
run: make static_checks

View File

@ -1,88 +0,0 @@
name: Sync release branch
concurrency:
group: sync-release-branch-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
on:
schedule:
- cron: '0 2 * * *'
workflow_dispatch:
permissions:
contents: read
jobs:
create-pr:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
strategy:
fail-fast: false
matrix:
branch:
- release-57
env:
SYNC_BRANCH: ${{ matrix.branch }}
defaults:
run:
shell: bash
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0
- name: create new branch
run: |
set -euxo pipefail
NEW_BRANCH_NAME=sync-${SYNC_BRANCH}-$(date +"%Y%m%d-%H%M%S")
echo "NEW_BRANCH_NAME=${NEW_BRANCH_NAME}" >> $GITHUB_ENV
git config --global user.name "${GITHUB_ACTOR}"
git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com"
git checkout -b ${NEW_BRANCH_NAME}
git merge origin/${SYNC_BRANCH} 2>&1 | tee merge.log
git push origin ${NEW_BRANCH_NAME}:${NEW_BRANCH_NAME}
- name: create pull request
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euxo pipefail
for pr in $(gh pr list --state open --base master --label sync-release-branch --search "Sync ${SYNC_BRANCH} in:title" --repo ${{ github.repository }} --json number --jq '.[] | .number'); do
gh pr close $pr --repo ${{ github.repository }} --delete-branch || true
done
gh pr create --title "Sync ${SYNC_BRANCH}" --body "Sync ${SYNC_BRANCH}" --base master --head ${NEW_BRANCH_NAME} --label sync-release-branch --repo ${{ github.repository }}
- name: Send notification to Slack
if: failure()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
run: |
awk '{printf "%s\\n", $0}' merge.log > merge.log.1
cat <<EOF > payload.json
{
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Automatic sync of ${SYNC_BRANCH} branch failed: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "\`\`\`$(cat merge.log.1)\`\`\`"
}
}
]
}
EOF
curl -X POST -H 'Content-type: application/json' --data @payload.json "$SLACK_WEBHOOK_URL"

View File

@ -9,21 +9,18 @@ on:
type: string
required: true
permissions:
contents: read
jobs:
upload:
runs-on: ubuntu-22.04
strategy:
fail-fast: false
steps:
- uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
- uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.tag }}
- name: Detect profile
@ -45,7 +42,7 @@ jobs:
echo "version=$(./pkg-vsn.sh emqx-enterprise)" >> $GITHUB_OUTPUT
;;
esac
- uses: emqx/push-helm-action@5ca37070f42cf874fc843a0e0c7c10ac76de5255 # v1.1
- uses: emqx/push-helm-action@v1.1
if: github.event_name == 'release' && !github.event.release.prerelease
with:
charts_dir: "${{ github.workspace }}/deploy/charts/${{ steps.profile.outputs.profile }}"

6
.gitignore vendored
View File

@ -72,10 +72,4 @@ ct_run*/
apps/emqx_conf/etc/emqx.conf.all.rendered*
rebar-git-cache.tar
# build docker image locally
.dockerignore
.docker_image_tag
.emqx_docker_image_tags
.git/
apps/emqx_utils/src/emqx_variform_parser.erl
apps/emqx_utils/src/emqx_variform_scan.erl
default-profile.mk

View File

@ -1,2 +1,2 @@
erlang 26.2.5-3
erlang 26.1.2-2
elixir 1.15.7-otp-26

View File

@ -1,5 +1,5 @@
## This is a fast-build Dockerfile only for testing
FROM ubuntu:20.04@sha256:f2034e7195f61334e6caff6ecf2e965f92d11e888309065da85ff50c617732b8
FROM ubuntu:20.04
ARG PROFILE=emqx
RUN apt-get update; \

View File

@ -6,17 +6,23 @@ endif
REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts
include env.sh
export EMQX_RELUP ?= true
export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.2-8:1.15.7-26.1.2-2-debian11
export EMQX_DEFAULT_RUNNER = public.ecr.aws/debian/debian:11-slim
export EMQX_REL_FORM ?= tgz
export QUICER_DOWNLOAD_FROM_RELEASE = 1
ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none
FIND=/usr/bin/find
else
FIND=find
endif
# Dashboard version
# from https://github.com/emqx/emqx-dashboard5
export EMQX_DASHBOARD_VERSION ?= v1.10.0-beta.1
export EMQX_EE_DASHBOARD_VERSION ?= e1.8.0-beta.1
export EMQX_DASHBOARD_VERSION ?= v1.6.1
export EMQX_EE_DASHBOARD_VERSION ?= e1.4.1
export EMQX_RELUP ?= true
export EMQX_REL_FORM ?= tgz
-include default-profile.mk
PROFILE ?= emqx
REL_PROFILES := emqx emqx-enterprise
PKG_PROFILES := emqx-pkg emqx-enterprise-pkg
@ -28,8 +34,6 @@ CT_COVER_EXPORT_PREFIX ?= $(PROFILE)
export REBAR_GIT_CLONE_OPTIONS += --depth=1
ELIXIR_COMMON_DEPS := ensure-hex ensure-mix-rebar3 ensure-mix-rebar
.PHONY: default
default: $(REBAR) $(PROFILE)
@ -49,8 +53,7 @@ $(REBAR): .prepare ensure-rebar3
.PHONY: ensure-hex
ensure-hex:
# @mix local.hex --if-missing --force
@mix local.hex 2.0.6 --if-missing --force
@mix local.hex --if-missing --force
.PHONY: ensure-mix-rebar3
ensure-mix-rebar3: $(REBAR)
@ -60,12 +63,8 @@ ensure-mix-rebar3: $(REBAR)
ensure-mix-rebar: $(REBAR)
@mix local.rebar --if-missing --force
.PHONY: elixir-common-deps
elixir-common-deps: $(ELIXIR_COMMON_DEPS)
.PHONY: mix-deps-get
mix-deps-get: elixir-common-deps
mix-deps-get: $(ELIXIR_COMMON_DEPS)
@mix deps.get
.PHONY: eunit
@ -195,8 +194,8 @@ $(PROFILES:%=clean-%):
@if [ -d _build/$(@:clean-%=%) ]; then \
rm -f rebar.lock; \
rm -rf _build/$(@:clean-%=%)/rel; \
find _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \
find _build/$(@:clean-%=%) -type l -delete; \
$(FIND) _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \
$(FIND) _build/$(@:clean-%=%) -type l -delete; \
fi
.PHONY: clean-all
@ -244,7 +243,7 @@ $(foreach zt,$(ALL_ZIPS),$(eval $(call download-relup-packages,$(zt))))
## relup target is to create relup instructions
.PHONY: $(REL_PROFILES:%=%-relup)
define gen-relup-target
$1-relup: $(COMMON_DEPS)
$1-relup: $1-relup-downloads $(COMMON_DEPS)
@$(BUILD) $1 relup
endef
ALL_TGZS = $(REL_PROFILES)
@ -253,7 +252,7 @@ $(foreach zt,$(ALL_TGZS),$(eval $(call gen-relup-target,$(zt))))
## tgz target is to create a release package .tar.gz with relup
.PHONY: $(REL_PROFILES:%=%-tgz)
define gen-tgz-target
$1-tgz: $(COMMON_DEPS)
$1-tgz: $1-relup
@$(BUILD) $1 tgz
endef
ALL_TGZS = $(REL_PROFILES)
@ -316,19 +315,10 @@ $(foreach tt,$(ALL_ELIXIR_TGZS),$(eval $(call gen-elixir-tgz-target,$(tt))))
.PHONY: fmt
fmt: $(REBAR)
@find . \( -name '*.app.src' -o \
-name '*.erl' -o \
-name '*.hrl' -o \
-name 'rebar.config' -o \
-name '*.eterm' -o \
-name '*.escript' \) \
-not -path '*/_build/*' \
-not -path '*/deps/*' \
-not -path '*/_checkouts/*' \
-type f \
| xargs $(SCRIPTS)/erlfmt -w
@$(SCRIPTS)/erlfmt -w 'apps/emqx/rebar.config.script'
@$(SCRIPTS)/erlfmt -w 'elvis.config'
@$(SCRIPTS)/erlfmt -w 'apps/*/{src,include,priv,test,integration_test}/**/*.{erl,hrl,app.src,eterm}'
@$(SCRIPTS)/erlfmt -w '**/*.escript' --exclude-files '_build/**'
@$(SCRIPTS)/erlfmt -w '**/rebar.config'
@$(SCRIPTS)/erlfmt -w 'rebar.config.erl'
@$(SCRIPTS)/erlfmt -w 'bin/nodetool'
@mix format

2
NOTICE
View File

@ -1,5 +1,5 @@
EMQX, highly scalable, highly available distributed MQTT messaging platform for IoT.
Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
This product contains code developed at EMQ Technologies Co., Ltd.
Visit https://www.emqx.come to learn more.

View File

@ -1,12 +1,9 @@
简体中文 | [English](./README.md) | [Русский](./README-RU.md)
# EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
[![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml)
[![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/emqx/emqx/badge)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
[![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES)
[![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
@ -88,7 +85,7 @@ EMQX Cloud 文档:[docs.emqx.com/zh/cloud/latest/](https://docs.emqx.com/zh/cl
`master` 分支是最新的 5 版本,`main-v4.4` 是 4.4 版本。
EMQX 4.4 版本需要 OTP 245 版本则可以使用 OTP 25 和 26 构建。
EMQX 4.4 版本需要 OTP 245 版本则可以使用 OTP 24 和 25 构建。
```bash
git clone https://github.com/emqx/emqx.git

View File

@ -1,12 +1,9 @@
Русский | [简体中文](./README-CN.md) | [English](./README.md)
# Брокер EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
[![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml)
[![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/emqx/emqx/badge)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
[![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES)
[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)

View File

@ -1,12 +1,9 @@
English | [简体中文](./README-CN.md) | [Русский](./README-RU.md)
# EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
[![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml)
[![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/emqx/emqx/badge)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
[![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES)
[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
@ -100,7 +97,7 @@ The `master` branch tracks the latest version 5. For version 4.4 checkout the `m
EMQX 4.4 requires OTP 24.
EMQX 5.0 ~ 5.3 can be built with OTP 24 or 25.
EMQX 5.4 and newer can be built with OTP 25 or 26.
EMQX 5.4 and newer can be built with OTP 24 or 25.
```bash
git clone https://github.com/emqx/emqx.git

131
Windows.md Normal file
View File

@ -0,0 +1,131 @@
# Build and run EMQX on Windows
NOTE: The instructions and examples are based on Windows 10.
## Build Environment
### Visual studio for C/C++ compile and link
EMQX includes Erlang NIF (Native Implemented Function) components, implemented
in C/C++. To compile and link C/C++ libraries, the easiest way is perhaps to
install Visual Studio.
Visual Studio 2019 is used in our tests.
If you are like me (@zmstone), do not know where to start,
please follow this OTP guide:
https://github.com/erlang/otp/blob/master/HOWTO/INSTALL-WIN32.md
NOTE: To avoid surprises, you may need to add below two paths to `Path` environment variable
and order them before other paths.
```
C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.28.29910\bin\Hostx64\x64
C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build
```
Depending on your visual studio version and OS, the paths may differ.
The first path is for rebar3 port compiler to find `cl.exe` and `link.exe`
The second path is for CMD to setup environment variables.
### Erlang/OTP
Install Erlang/OTP 24 from https://www.erlang.org/downloads
You may need to edit the `Path` environment variable to allow running
Erlang commands such as `erl` from powershell.
To validate Erlang installation in CMD or powershell:
* Start (or restart) CMD or powershell
* Execute `erl` command to enter Erlang shell
* Evaluate Erlang expression `halt().` to exit Erlang shell.
e.g.
```
PS C:\Users\zmsto> erl
Eshell V12.2.1 (abort with ^G)
1> halt().
```
### bash
All EMQX build/run scripts are either in `bash` or `escript`.
`escript` is installed as a part of Erlang. To install a `bash`
environment in Windows, there are quite a few options.
Cygwin is what we tested with.
* Add `cygwin\bin` dir to `Path` environment variable
To do so, search for Edit environment variable in control panel and
add `C:\tools\cygwin\bin` (depending on the location where it was installed)
to `Path` list.
* Validate installation.
Start (restart) CMD or powershell console and execute `which bash`, it should
print out `/usr/bin/bash`
NOTE: Make sure cygwin's bin dir is added before `C:\Windows\system32` in `Path`,
otherwise the build scripts may end up using binaries from wsl instead of cygwin.
### Other tools
Some of the unix world tools are required to build EMQX. Including:
* git
* curl
* make
* cmake
* jq
* zip / unzip
We recommend using [scoop](https://scoop.sh/), or [Chocolatey](https://chocolatey.org/install) to install the tools.
When using scoop:
```
scoop install git curl make cmake jq zip unzip
```
## Build EMQX source code
* Clone the repo: `git clone https://github.com/emqx/emqx.git`
* Start CMD console
* Execute `vcvarsall.bat x86_amd64` to load environment variables
* Change to emqx directory and execute `make`
### Possible errors
* `'cl.exe' is not recognized as an internal or external command`
This error is likely due to Visual Studio executables are not set in `Path` environment variable.
To fix it, either add path like `C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.28.29910\bin\Hostx64\x64`
to `Paht`. Or make sure `vcvarsall.bat x86_amd64` is executed prior to the `make` command
* `fatal error C1083: Cannot open include file: 'assert.h': No such file or directory`
If Visual Studio is installed correctly, this is likely `LIB` and `LIB_PATH` environment
variables are not set. Make sure `vcvarsall.bat x86_amd64` is executed prior to the `make` command
* `link: extra operand 'some.obj'`
This is likely due to the usage of GNU `lnik.exe` but not the one from Visual Studio.
Execute `link.exe --version` to inspect which one is in use. The one installed from
Visual Studio should print out `Microsoft (R) Incremental Linker`.
To fix it, Visual Studio's bin paths should be ordered prior to Cygwin's (or similar installation's)
bin paths in `Path` environment variable.
## Run EMQX
To start EMQX broker.
Execute `_build\emqx\rel\emqx>.\bin\emqx console` or `_build\emqx\rel\emqx>.\bin\emqx start` to start EMQX.
Then execute `_build\emqx\rel\emqx>.\bin\emqx_ctl status` to check status.
If everything works fine, it should print out
```
Node 'emqx@127.0.0.1' 4.3-beta.1 is started
Application emqx 4.3.0 is running
```

View File

@ -1,5 +1,5 @@
EMQX, highly scalable, highly available distributed MQTT messaging platform for IoT.
Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
This product contains code developed at EMQ Technologies Co., Ltd.
Visit https://www.emqx.come to learn more.

View File

@ -1,20 +0,0 @@
## EMQX provides support for two primary log handlers: `file` and `console`, with an additional `audit` handler specifically designed to always direct logs to files.
## The system's default log handling behavior can be configured via the environment variable `EMQX_DEFAULT_LOG_HANDLER`, which accepts the following settings:
##
## - `file`: Directs log output exclusively to files.
## - `console`: Channels log output solely to the console.
##
## It's noteworthy that `EMQX_DEFAULT_LOG_HANDLER` is set to `file` when EMQX is initiated via systemd `emqx.service` file.
## In scenarios outside systemd initiation, `console` serves as the default log handler.
## Read more about configs here: {{ emqx_configuration_doc_log }}
log {
# file {
# level = warning
# }
# console {
# level = warning
# }
}

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -45,10 +45,6 @@
).
-define(assertReceive(PATTERN, TIMEOUT),
?assertReceive(PATTERN, TIMEOUT, #{})
).
-define(assertReceive(PATTERN, TIMEOUT, EXTRA),
(fun() ->
receive
X__V = PATTERN -> X__V
@ -58,8 +54,7 @@
{module, ?MODULE},
{line, ?LINE},
{expression, (??PATTERN)},
{mailbox, ?drainMailbox()},
{extra_info, EXTRA}
{mailbox, ?drainMailbox()}
]}
)
end

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -65,20 +65,9 @@
%% Route
%%--------------------------------------------------------------------
-record(share_dest, {
session_id :: emqx_session:session_id(),
group :: emqx_types:group()
}).
-record(route, {
topic :: binary(),
dest ::
node()
| {binary(), node()}
| emqx_session:session_id()
%% One session can also have multiple subscriptions to the same topic through different groups
| #share_dest{}
| emqx_external_broker:dest()
dest :: node() | {binary(), node()} | emqx_session:session_id()
}).
%%--------------------------------------------------------------------
@ -99,17 +88,14 @@
%%--------------------------------------------------------------------
-record(banned, {
who :: emqx_types:banned_who(),
who ::
{clientid, binary()}
| {peerhost, inet:ip_address()}
| {username, binary()},
by :: binary(),
reason :: binary(),
at :: integer(),
until :: integer()
}).
%%--------------------------------------------------------------------
%% Configurations
%%--------------------------------------------------------------------
-define(KIND_REPLICATE, replicate).
-define(KIND_INITIATE, initiate).
-endif.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -23,20 +23,11 @@
-define(CHAN_INFO_TAB, emqx_channel_info).
-define(CHAN_LIVE_TAB, emqx_channel_live).
%% Mria table for session registration.
%% Mria/Mnesia Tables for channel management.
-define(CHAN_REG_TAB, emqx_channel_registry).
-define(T_KICK, 5_000).
-define(T_GET_INFO, 5_000).
-define(T_TAKEOVER, 15_000).
-define(CM_POOL, emqx_cm_pool).
%% Registered sessions.
-record(channel, {
chid :: emqx_types:clientid() | '_',
%% pid field is extended in 5.6.0 to support recording unregistration timestamp.
pid :: pid() | non_neg_integer() | '$1'
}).
-endif.

View File

@ -1,35 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022, 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc This header contains definitions of durable session metadata
%% keys, that can be consumed by the external code.
-ifndef(EMQX_DURABLE_SESSION_META_HRL).
-define(EMQX_DURABLE_SESSION_META_HRL, true).
%% Session metadata keys:
-define(created_at, created_at).
-define(last_alive_at, last_alive_at).
-define(expiry_interval, expiry_interval).
%% Unique integer used to create unique identities:
-define(last_id, last_id).
%% Connection info (relevent for the dashboard):
-define(peername, peername).
-define(will_message, will_message).
-define(clientinfo, clientinfo).
-define(protocol, protocol).
-define(offline_info, offline_info).
-endif.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2021-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -25,8 +25,6 @@
-define(HP_AUTHN, 970).
-define(HP_AUTHZ, 960).
-define(HP_SYS_MSGS, 950).
-define(HP_SCHEMA_VALIDATION, 945).
-define(HP_MESSAGE_TRANSFORMATION, 943).
-define(HP_TOPIC_METRICS, 940).
-define(HP_RETAINER, 930).
-define(HP_AUTO_SUB, 920).

View File

@ -1,258 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQX_METRICS_HRL).
-define(EMQX_METRICS_HRL, true).
%% Bytes sent and received
-define(BYTES_METRICS, [
{counter, 'bytes.received', <<"Number of bytes received ">>},
{counter, 'bytes.sent', <<"Number of bytes sent on this connection">>}
]).
%% Packets sent and received
-define(PACKET_METRICS, [
{counter, 'packets.received', <<"Number of received packet">>},
{counter, 'packets.sent', <<"Number of sent packet">>},
{counter, 'packets.connect.received', <<"Number of received CONNECT packet">>},
{counter, 'packets.connack.sent', <<"Number of sent CONNACK packet">>},
{counter, 'packets.connack.error',
<<"Number of received CONNECT packet with unsuccessful connections">>},
{counter, 'packets.connack.auth_error',
<<"Number of received CONNECT packet with failed Authentication">>},
{counter, 'packets.publish.received', <<"Number of received PUBLISH packet">>},
%% PUBLISH packets sent
{counter, 'packets.publish.sent', <<"Number of sent PUBLISH packet">>},
%% PUBLISH packet_id inuse
{counter, 'packets.publish.inuse',
<<"Number of received PUBLISH packet with occupied identifiers">>},
%% PUBLISH failed for error
{counter, 'packets.publish.error',
<<"Number of received PUBLISH packet that cannot be published">>},
%% PUBLISH failed for auth error
{counter, 'packets.publish.auth_error',
<<"Number of received PUBLISH packets with failed the Authorization check">>},
%% PUBLISH(QoS2) packets dropped
{counter, 'packets.publish.dropped',
<<"Number of messages discarded due to the receiving limit">>},
%% PUBACK packets received
{counter, 'packets.puback.received', <<"Number of received PUBACK packet">>},
%% PUBACK packets sent
{counter, 'packets.puback.sent', <<"Number of sent PUBACK packet">>},
%% PUBACK packet_id inuse
{counter, 'packets.puback.inuse',
<<"Number of received PUBACK packet with occupied identifiers">>},
%% PUBACK packets missed
{counter, 'packets.puback.missed', <<"Number of received packet with identifiers.">>},
%% PUBREC packets received
{counter, 'packets.pubrec.received', <<"Number of received PUBREC packet">>},
%% PUBREC packets sent
{counter, 'packets.pubrec.sent', <<"Number of sent PUBREC packet">>},
%% PUBREC packet_id inuse
{counter, 'packets.pubrec.inuse',
<<"Number of received PUBREC packet with occupied identifiers">>},
%% PUBREC packets missed
{counter, 'packets.pubrec.missed',
<<"Number of received PUBREC packet with unknown identifiers">>},
%% PUBREL packets received
{counter, 'packets.pubrel.received', <<"Number of received PUBREL packet">>},
%% PUBREL packets sent
{counter, 'packets.pubrel.sent', <<"Number of sent PUBREL packet">>},
%% PUBREL packets missed
{counter, 'packets.pubrel.missed',
<<"Number of received PUBREC packet with unknown identifiers">>},
%% PUBCOMP packets received
{counter, 'packets.pubcomp.received', <<"Number of received PUBCOMP packet">>},
%% PUBCOMP packets sent
{counter, 'packets.pubcomp.sent', <<"Number of sent PUBCOMP packet">>},
%% PUBCOMP packet_id inuse
{counter, 'packets.pubcomp.inuse',
<<"Number of received PUBCOMP packet with occupied identifiers">>},
%% PUBCOMP packets missed
{counter, 'packets.pubcomp.missed', <<"Number of missed PUBCOMP packet">>},
%% SUBSCRIBE Packets received
{counter, 'packets.subscribe.received', <<"Number of received SUBSCRIBE packet">>},
%% SUBSCRIBE error
{counter, 'packets.subscribe.error',
<<"Number of received SUBSCRIBE packet with failed subscriptions">>},
%% SUBSCRIBE failed for not auth
{counter, 'packets.subscribe.auth_error',
<<"Number of received SUBACK packet with failed Authorization check">>},
%% SUBACK packets sent
{counter, 'packets.suback.sent', <<"Number of sent SUBACK packet">>},
%% UNSUBSCRIBE Packets received
{counter, 'packets.unsubscribe.received', <<"Number of received UNSUBSCRIBE packet">>},
%% UNSUBSCRIBE error
{counter, 'packets.unsubscribe.error',
<<"Number of received UNSUBSCRIBE packet with failed unsubscriptions">>},
%% UNSUBACK Packets sent
{counter, 'packets.unsuback.sent', <<"Number of sent UNSUBACK packet">>},
%% PINGREQ packets received
{counter, 'packets.pingreq.received', <<"Number of received PINGREQ packet">>},
%% PINGRESP Packets sent
{counter, 'packets.pingresp.sent', <<"Number of sent PUBRESP packet">>},
%% DISCONNECT Packets received
{counter, 'packets.disconnect.received', <<"Number of received DISCONNECT packet">>},
%% DISCONNECT Packets sent
{counter, 'packets.disconnect.sent', <<"Number of sent DISCONNECT packet">>},
%% Auth Packets received
{counter, 'packets.auth.received', <<"Number of received AUTH packet">>},
%% Auth Packets sent
{counter, 'packets.auth.sent', <<"Number of sent AUTH packet">>}
]).
%% Messages sent/received and pubsub
-define(MESSAGE_METRICS, [
%% All Messages received
{counter, 'messages.received', <<
"Number of messages received from the client, equal to the sum of "
"messages.qos0.received, messages.qos1.received and messages.qos2.received"
>>},
%% All Messages sent
{counter, 'messages.sent', <<
"Number of messages sent to the client, equal to the sum of "
"messages.qos0.sent, messages.qos1.sent and messages.qos2.sent"
>>},
%% QoS0 Messages received
{counter, 'messages.qos0.received', <<"Number of QoS 0 messages received from clients">>},
%% QoS0 Messages sent
{counter, 'messages.qos0.sent', <<"Number of QoS 0 messages sent to clients">>},
%% QoS1 Messages received
{counter, 'messages.qos1.received', <<"Number of QoS 1 messages received from clients">>},
%% QoS1 Messages sent
{counter, 'messages.qos1.sent', <<"Number of QoS 1 messages sent to clients">>},
%% QoS2 Messages received
{counter, 'messages.qos2.received', <<"Number of QoS 2 messages received from clients">>},
%% QoS2 Messages sent
{counter, 'messages.qos2.sent', <<"Number of QoS 2 messages sent to clients">>},
%% PubSub Metrics
%% Messages Publish
{counter, 'messages.publish',
<<"Number of messages published in addition to system messages">>},
%% Messages dropped due to no subscribers
{counter, 'messages.dropped',
<<"Number of messages dropped before forwarding to the subscription process">>},
%% Messages that failed validations
{counter, 'messages.validation_failed', <<"Number of message validation failed">>},
%% Messages that passed validations
{counter, 'messages.validation_succeeded', <<"Number of message validation successful">>},
%% % Messages that failed transformations
{counter, 'messages.transformation_failed', <<"Number fo message transformation failed">>},
%% % Messages that passed transformations
{counter, 'messages.transformation_succeeded',
<<"Number fo message transformation succeeded">>},
%% QoS2 Messages expired
{counter, 'messages.dropped.await_pubrel_timeout',
<<"Number of messages dropped due to waiting PUBREL timeout">>},
%% Messages dropped
{counter, 'messages.dropped.no_subscribers',
<<"Number of messages dropped due to no subscribers">>},
%% Messages forward
{counter, 'messages.forward', <<"Number of messages forwarded to other nodes">>},
%% Messages delayed
{counter, 'messages.delayed', <<"Number of delay-published messages">>},
%% Messages delivered
{counter, 'messages.delivered',
<<"Number of messages forwarded to the subscription process internally">>},
%% Messages acked
{counter, 'messages.acked', <<"Number of received PUBACK and PUBREC packet">>},
%% Messages persistently stored
{counter, 'messages.persisted', <<"Number of message persisted">>}
]).
%% Delivery metrics
-define(DELIVERY_METRICS, [
%% All Dropped during delivery
{counter, 'delivery.dropped', <<"Total number of discarded messages when sending">>},
%% Dropped due to no_local
{counter, 'delivery.dropped.no_local', <<
"Number of messages that were dropped due to the No Local subscription "
"option when sending"
>>},
%% Dropped due to message too large
{counter, 'delivery.dropped.too_large', <<
"The number of messages that were dropped because the length exceeded "
"the limit when sending"
>>},
%% Dropped qos0 message
{counter, 'delivery.dropped.qos0_msg', <<
"Number of messages with QoS 0 that were dropped because the message "
"queue was full when sending"
>>},
%% Dropped due to queue full
{counter, 'delivery.dropped.queue_full', <<
"Number of messages with a non-zero QoS that were dropped because the "
"message queue was full when sending"
>>},
%% Dropped due to expired
{counter, 'delivery.dropped.expired',
<<"Number of messages dropped due to message expiration on sending">>}
]).
%% Client Lifecircle metrics
-define(CLIENT_METRICS, [
{counter, 'client.connect', <<"Number of client connections">>},
{counter, 'client.connack', <<"Number of CONNACK packet sent">>},
{counter, 'client.connected', <<"Number of successful client connected">>},
{counter, 'client.authenticate', <<"Number of client Authentication">>},
{counter, 'client.auth.anonymous', <<"Number of clients who log in anonymously">>},
{counter, 'client.authorize', <<"Number of Authorization rule checks">>},
{counter, 'client.subscribe', <<"Number of client subscriptions">>},
{counter, 'client.unsubscribe', <<"Number of client unsubscriptions">>},
{counter, 'client.disconnected', <<"Number of client disconnects">>}
]).
%% Session Lifecircle metrics
-define(SESSION_METRICS, [
{counter, 'session.created', <<"Number of sessions created">>},
{counter, 'session.resumed',
<<"Number of sessions resumed because Clean Session or Clean Start is false">>},
{counter, 'session.takenover',
<<"Number of sessions takenover because Clean Session or Clean Start is false">>},
{counter, 'session.discarded',
<<"Number of sessions dropped because Clean Session or Clean Start is true">>},
{counter, 'session.terminated', <<"Number of terminated sessions">>}
]).
%% Statistic metrics for ACL checking
-define(STASTS_ACL_METRICS, [
{counter, 'authorization.allow', <<"Number of Authorization allow">>},
{counter, 'authorization.deny', <<"Number of Authorization deny">>},
{counter, 'authorization.cache_hit', <<"Number of Authorization hits the cache">>},
{counter, 'authorization.cache_miss', <<"Number of Authorization cache missing">>}
]).
%% Statistic metrics for auth checking
-define(STASTS_AUTHN_METRICS, [
{counter, 'authentication.success', <<"Number of successful client Authentication">>},
{counter, 'authentication.success.anonymous',
<<"Number of successful client Authentication due to anonymous">>},
{counter, 'authentication.failure', <<"Number of failed client Authentication">>}
]).
%% Overload protection counters
-define(OLP_METRICS, [
{counter, 'overload_protection.delay.ok', <<"Number of overload protection delayed">>},
{counter, 'overload_protection.delay.timeout',
<<"Number of overload protection delay timeout">>},
{counter, 'overload_protection.hibernation', <<"Number of overload protection hibernation">>},
{counter, 'overload_protection.gc', <<"Number of overload protection garbage collection">>},
{counter, 'overload_protection.new_conn',
<<"Number of overload protection close new incoming connection">>}
]).
-endif.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -673,6 +673,7 @@ end).
-define(SHARE, "$share").
-define(QUEUE, "$queue").
-define(SHARE(Group, Topic), emqx_topic:join([<<?SHARE>>, Group, Topic])).
-define(REDISPATCH_TO(GROUP, TOPIC), {GROUP, TOPIC}).
@ -683,7 +684,6 @@ end).
-define(FRAME_PARSE_ERROR, frame_parse_error).
-define(FRAME_SERIALIZE_ERROR, frame_serialize_error).
-define(THROW_FRAME_ERROR(Reason), erlang:throw({?FRAME_PARSE_ERROR, Reason})).
-define(THROW_SERIALIZE_ERROR(Reason), erlang:throw({?FRAME_SERIALIZE_ERROR, Reason})).

View File

@ -1,29 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQX_PERSISTENT_MESSAGE_HRL).
-define(EMQX_PERSISTENT_MESSAGE_HRL, true).
-define(PERSISTENT_MESSAGE_DB, messages).
-define(PERSISTENCE_ENABLED, emqx_message_persistence_enabled).
-define(WITH_DURABILITY_ENABLED(DO),
case is_persistence_enabled() of
true -> DO;
false -> {skipped, disabled}
end
).
-endif.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -28,19 +28,14 @@
%% cert
-define(VAR_CERT_SUBJECT, "cert_subject").
-define(VAR_CERT_CN_NAME, "cert_common_name").
-define(VAR_CERT_PEM, "cert_pem").
-define(PH_CERT_SUBJECT, ?PH(?VAR_CERT_SUBJECT)).
-define(PH_CERT_CN_NAME, ?PH(?VAR_CERT_CN_NAME)).
-define(PH_CERT_PEM, ?PH(?VAR_CERT_PEM)).
%% MQTT/Gateway
%% MQTT
-define(VAR_PASSWORD, "password").
-define(VAR_CLIENTID, "clientid").
-define(VAR_USERNAME, "username").
-define(VAR_TOPIC, "topic").
-define(VAR_ENDPOINT_NAME, "endpoint_name").
-define(VAR_NS_CLIENT_ATTRS, {var_namespace, "client_attrs"}).
-define(PH_PASSWORD, ?PH(?VAR_PASSWORD)).
-define(PH_CLIENTID, ?PH(?VAR_CLIENTID)).
-define(PH_FROM_CLIENTID, ?PH("from_clientid")).
@ -94,7 +89,7 @@
-define(PH_NODE, ?PH("node")).
-define(PH_REASON, ?PH("reason")).
-define(PH_ENDPOINT_NAME, ?PH(?VAR_ENDPOINT_NAME)).
-define(PH_ENDPOINT_NAME, ?PH("endpoint_name")).
-define(VAR_RETAIN, "retain").
-define(PH_RETAIN, ?PH(?VAR_RETAIN)).

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2021-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -32,7 +32,10 @@
%% `apps/emqx/src/bpapi/README.md'
%% Opensource edition
-define(EMQX_RELEASE_CE, "5.8.0-alpha.1").
-define(EMQX_RELEASE_CE, "5.4.1").
%% Enterprise edition
-define(EMQX_RELEASE_EE, "5.8.0-alpha.1").
-define(EMQX_RELEASE_EE, "5.4.1").
%% The HTTP API version
-define(EMQX_API_VERSION, "5.0").

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -21,9 +21,4 @@
-define(TOMBSTONE_CONFIG_CHANGE_REQ, mark_it_for_deletion).
-define(CONFIG_NOT_FOUND_MAGIC, '$0tFound').
%%--------------------------------------------------------------------
%% EE injections
%%--------------------------------------------------------------------
-define(EMQX_SSL_FUN_MFA(Name), {emqx_ssl_fun_mfa, Name}).
-endif.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -20,11 +20,4 @@
-define(IS_SESSION_IMPL_MEM(S), (is_tuple(S) andalso element(1, S) =:= session)).
-define(IS_SESSION_IMPL_DS(S), (is_map_key(id, S))).
%% (Erlang) messages that a connection process should forward to the
%% session handler.
-record(session_message, {
message :: term()
}).
-define(session_message(MSG), #session_message{message = MSG}).
-endif.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.

View File

@ -1,28 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2018-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQX_SHARED_SUB_HRL).
-define(EMQX_SHARED_SUB_HRL, true).
%% Mnesia table for shared sub message routing
-define(SHARED_SUBSCRIPTION, emqx_shared_subscription).
%% ETS tables for Shared PubSub
-define(SHARED_SUBSCRIBER, emqx_shared_subscriber).
-define(ALIVE_SHARED_SUBSCRIBERS, emqx_alive_shared_subscribers).
-define(SHARED_SUBS_ROUND_ROBIN_COUNTER, emqx_shared_subscriber_round_robin_counter).
-endif.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -20,33 +20,17 @@
-record(?TRACE, {
name :: binary() | undefined | '_',
type :: clientid | topic | ip_address | ruleid | undefined | '_',
type :: clientid | topic | ip_address | undefined | '_',
filter ::
emqx_types:topic()
| emqx_types:clientid()
| emqx_trace:ip_address()
| emqx_trace:ruleid()
| undefined
| '_',
emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address() | undefined | '_',
enable = true :: boolean() | '_',
payload_encode = text :: hex | text | hidden | '_',
extra = #{formatter => text} :: #{formatter => text | json} | '_',
extra = #{} :: map() | '_',
start_at :: integer() | undefined | '_',
end_at :: integer() | undefined | '_'
}).
-record(emqx_trace_format_func_data, {
function :: fun((any()) -> any()),
data :: any()
}).
-define(SHARD, ?COMMON_SHARD).
-define(MAX_SIZE, 30).
-define(EMQX_TRACE_STOP_ACTION(REASON),
{unrecoverable_error, {action_stopped_after_template_rendering, REASON}}
).
-define(EMQX_TRACE_STOP_ACTION_MATCH, ?EMQX_TRACE_STOP_ACTION(_)).
-endif.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -17,7 +17,6 @@
%% HTTP API Auth
-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
-define(BAD_API_KEY_OR_SECRET, 'BAD_API_KEY_OR_SECRET').
-define(API_KEY_NOT_ALLOW, 'API_KEY_NOT_ALLOW').
-define(API_KEY_NOT_ALLOW_MSG, <<"This API Key don't have permission to access this resource">>).
%% Bad Request
@ -86,6 +85,5 @@
{'SOURCE_ERROR', <<"Source error">>},
{'UPDATE_FAILED', <<"Update failed">>},
{'REST_FAILED', <<"Reset source or config failed">>},
{'CLIENT_NOT_RESPONSE', <<"Client not responding">>},
{'UNSUPPORTED_MEDIA_TYPE', <<"Unsupported media type">>}
{'CLIENT_NOT_RESPONSE', <<"Client not responding">>}
]).

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2018-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2018-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -30,7 +30,7 @@
logger:log(
Level,
(Data),
maps:merge(Meta, #{
(Meta#{
mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY},
line => ?LINE
})
@ -40,40 +40,14 @@
end
).
%% NOTE: do not forget to use atom for msg and add every used msg to
%% the default value of `log.throttling.msgs` list.
-define(SLOG_THROTTLE(Level, Data),
?SLOG_THROTTLE(Level, Data, #{})
).
-define(SLOG_THROTTLE(Level, Data, Meta),
?SLOG_THROTTLE(Level, undefined, Data, Meta)
).
-define(SLOG_THROTTLE(Level, UniqueKey, Data, Meta),
case logger:allow(Level, ?MODULE) of
true ->
(fun(#{msg := __Msg} = __Data) ->
case emqx_log_throttler:allow(__Msg, UniqueKey) of
true ->
logger:log(Level, __Data, Meta);
false ->
?_DO_TRACE(Level, __Msg, maps:merge(__Data, Meta))
end
end)(
Data
);
false ->
ok
end
).
-define(AUDIT_HANDLER, emqx_audit).
-define(TRACE_FILTER, emqx_trace_filter).
-define(OWN_KEYS, [level, filters, filter_default, handlers]).
%% Internal macro
-define(_DO_TRACE(Tag, Msg, Meta),
-define(TRACE(Tag, Msg, Meta), ?TRACE(debug, Tag, Msg, Meta)).
%% Only evaluate when necessary
-define(TRACE(Level, Tag, Msg, Meta), begin
case persistent_term:get(?TRACE_FILTER, []) of
[] -> ok;
%% We can't bind filter list to a variable because we pollute the calling scope with it.
@ -81,17 +55,10 @@
%% because this adds overhead to the happy path.
%% So evaluate `persistent_term:get` twice.
_ -> emqx_trace:log(persistent_term:get(?TRACE_FILTER, []), Msg, (Meta)#{trace_tag => Tag})
end
).
-define(TRACE(Tag, Msg, Meta), ?TRACE(debug, Tag, Msg, Meta)).
%% Only evaluate when necessary
-define(TRACE(Level, Tag, Msg, Meta), begin
?_DO_TRACE(Tag, Msg, Meta),
end,
?SLOG(
Level,
(Meta)#{msg => Msg, tag => Tag},
(emqx_trace_formatter:format_meta_map(Meta))#{msg => Msg, tag => Tag},
#{is_trace => false}
)
end).

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2019-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2019-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@ -14,7 +14,7 @@
%% limitations under the License.
%%--------------------------------------------------------------------
-type option(T) :: undefined | T.
-type maybe(T) :: undefined | T.
-type startlink_ret() :: {ok, pid()} | ignore | {error, term()}.

View File

@ -1,5 +1,5 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%--------------------------------------------------------------------
-module(emqx_persistent_session_ds_SUITE).
@ -18,23 +18,15 @@
%% CT boilerplate
%%------------------------------------------------------------------------------
suite() ->
[{timetrap, {seconds, 60}}].
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
case emqx_ds_test_helpers:skip_if_norepl() of
false ->
TCApps = emqx_cth_suite:start(
app_specs(),
#{work_dir => emqx_cth_suite:work_dir(Config)}
),
[{tc_apps, TCApps} | Config];
Yes ->
Yes
end.
TCApps = emqx_cth_suite:start(
app_specs(),
#{work_dir => emqx_cth_suite:work_dir(Config)}
),
[{tc_apps, TCApps} | Config].
end_per_suite(Config) ->
TCApps = ?config(tc_apps, Config),
@ -59,12 +51,12 @@ init_per_testcase(TestCase, Config) when
init_per_testcase(t_session_gc = TestCase, Config) ->
Opts = #{
n => 3,
roles => [core, core, core],
roles => [core, core, replicant],
extra_emqx_conf =>
"\n durable_sessions {"
"\n heartbeat_interval = 500ms "
"\n session_gc_interval = 1s "
"\n session_gc_batch_size = 2 "
"\n session_persistence {"
"\n last_alive_update_interval = 500ms "
"\n session_gc_interval = 2s "
"\n session_gc_batch_size = 1 "
"\n }"
},
Cluster = cluster(Opts),
@ -90,15 +82,13 @@ end_per_testcase(TestCase, Config) when
Nodes = ?config(nodes, Config),
emqx_common_test_helpers:call_janitor(60_000),
ok = emqx_cth_cluster:stop(Nodes),
snabbkaffe:stop(),
ok;
end_per_testcase(_TestCase, _Config) ->
emqx_common_test_helpers:call_janitor(60_000),
snabbkaffe:stop(),
ok.
%%------------------------------------------------------------------------------
%% Helper functions
%% Helper fns
%%------------------------------------------------------------------------------
cluster(#{n := N} = Opts) ->
@ -123,10 +113,10 @@ app_specs() ->
app_specs(_Opts = #{}).
app_specs(Opts) ->
DefaultEMQXConf = "durable_sessions {enable = true, renew_streams_interval = 1s}",
ExtraEMQXConf = maps:get(extra_emqx_conf, Opts, ""),
[
{emqx, DefaultEMQXConf ++ ExtraEMQXConf}
emqx_durable_storage,
{emqx, "session_persistence = {enable = true}" ++ ExtraEMQXConf}
].
get_mqtt_port(Node, Type) ->
@ -140,30 +130,29 @@ wait_nodeup(Node) ->
pong = net_adm:ping(Node)
).
wait_gen_rpc_down(_NodeSpec = #{apps := Apps}) ->
#{override_env := Env} = proplists:get_value(gen_rpc, Apps),
Port = proplists:get_value(tcp_server_port, Env),
?retry(
_Sleep0 = 500,
_Attempts0 = 50,
false = emqx_common_test_helpers:is_tcp_server_available("127.0.0.1", Port)
).
start_client(Opts0 = #{}) ->
Defaults = #{
port => 1883,
proto_ver => v5,
properties => #{'Session-Expiry-Interval' => 300}
},
Opts = emqx_utils_maps:deep_merge(Defaults, Opts0),
?tp(notice, "starting client", Opts),
{ok, Client} = emqtt:start_link(maps:to_list(Opts)),
unlink(Client),
Opts = maps:to_list(emqx_utils_maps:deep_merge(Defaults, Opts0)),
ct:pal("starting client with opts:\n ~p", [Opts]),
{ok, Client} = emqtt:start_link(Opts),
on_exit(fun() -> catch emqtt:stop(Client) end),
Client.
start_connect_client(Opts = #{}) ->
Client = start_client(Opts),
?assertMatch({ok, _}, emqtt:connect(Client)),
Client.
mk_clientid(Prefix, ID) ->
iolist_to_binary(io_lib:format("~p/~p", [Prefix, ID])).
restart_node(Node, NodeSpec) ->
?tp(will_restart_node, #{}),
emqx_cth_cluster:restart(NodeSpec),
emqx_cth_cluster:restart(Node, NodeSpec),
wait_nodeup(Node),
?tp(restarted_node, #{}),
ok.
@ -172,44 +161,58 @@ is_persistent_connect_opts(#{properties := #{'Session-Expiry-Interval' := EI}})
EI > 0.
list_all_sessions(Node) ->
erpc:call(Node, emqx_persistent_session_ds_state, list_sessions, []).
erpc:call(Node, emqx_persistent_session_ds, list_all_sessions, []).
list_all_subscriptions(Node) ->
Sessions = list_all_sessions(Node),
lists:flatmap(
fun(ClientId) ->
#{s := #{subscriptions := Subs}} = erpc:call(
Node, emqx_persistent_session_ds, print_session, [ClientId]
),
maps:to_list(Subs)
end,
Sessions
).
erpc:call(Node, emqx_persistent_session_ds, list_all_subscriptions, []).
list_all_pubranges(Node) ->
erpc:call(Node, emqx_persistent_session_ds, list_all_pubranges, []).
session_open(Node, ClientId) ->
ClientInfo = #{},
ConnInfo = #{peername => {undefined, undefined}, proto_name => <<"MQTT">>, proto_ver => 5},
WillMsg = undefined,
erpc:call(
Node,
emqx_persistent_session_ds,
session_open,
[ClientId, ClientInfo, ConnInfo, WillMsg]
).
force_last_alive_at(ClientId, Time) ->
{ok, S0} = emqx_persistent_session_ds_state:open(ClientId),
S = emqx_persistent_session_ds_state:set_last_alive_at(Time, S0),
_ = emqx_persistent_session_ds_state:commit(S),
ok.
prop_only_cores_run_gc(CoreNodes) ->
{"only core nodes run gc", fun(Trace) -> ?MODULE:prop_only_cores_run_gc(Trace, CoreNodes) end}.
prop_only_cores_run_gc(Trace, CoreNodes) ->
GCNodes = lists:usort([
N
|| #{
?snk_kind := K,
?snk_meta := #{node := N}
} <- Trace,
lists:member(K, [ds_session_gc, ds_session_gc_lock_taken]),
N =/= node()
]),
?assertEqual(lists:usort(CoreNodes), GCNodes).
%%------------------------------------------------------------------------------
%% Testcases
%%------------------------------------------------------------------------------
t_non_persistent_session_subscription(_Config) ->
ClientId = atom_to_binary(?FUNCTION_NAME),
SubTopicFilter = <<"t/#">>,
?check_trace(
begin
?tp(notice, "starting", #{}),
Client = start_client(#{
clientid => ClientId,
properties => #{'Session-Expiry-Interval' => 0}
}),
{ok, _} = emqtt:connect(Client),
?tp(notice, "subscribing", #{}),
{ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client, SubTopicFilter, qos2),
ok = emqtt:stop(Client),
ok
end,
fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
?assertEqual([], ?of_kind(ds_session_subscription_added, Trace)),
ok
end
),
ok.
t_session_subscription_idempotency(Config) ->
[Node1Spec | _] = ?config(node_specs, Config),
[Node1] = ?config(nodes, Config),
@ -217,7 +220,6 @@ t_session_subscription_idempotency(Config) ->
SubTopicFilter = <<"t/+">>,
ClientId = <<"myclientid">>,
?check_trace(
#{timetrap => 30_000},
begin
?force_ordering(
#{?snk_kind := persistent_session_ds_subscription_added},
@ -258,8 +260,11 @@ t_session_subscription_idempotency(Config) ->
ok
end,
fun(_Trace) ->
Session = session_open(Node1, ClientId),
fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
Session = erpc:call(
Node1, emqx_persistent_session_ds, session_open, [ClientId, _ConnInfo = #{}]
),
?assertMatch(
#{SubTopicFilter := #{}},
emqx_session:info(subscriptions, Session)
@ -276,11 +281,11 @@ t_session_unsubscription_idempotency(Config) ->
SubTopicFilter = <<"t/+">>,
ClientId = <<"myclientid">>,
?check_trace(
#{timetrap => 30_000},
begin
?force_ordering(
#{
?snk_kind := persistent_session_ds_subscription_delete
?snk_kind := persistent_session_ds_subscription_delete,
?snk_span := {complete, _}
},
_NEvents0 = 1,
#{?snk_kind := will_restart_node},
@ -331,8 +336,11 @@ t_session_unsubscription_idempotency(Config) ->
ok
end,
fun(_Trace) ->
Session = session_open(Node1, ClientId),
fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
Session = erpc:call(
Node1, emqx_persistent_session_ds, session_open, [ClientId, _ConnInfo = #{}]
),
?assertEqual(
#{},
emqx_session:info(subscriptions, Session)
@ -377,7 +385,6 @@ do_t_session_discard(Params) ->
ReconnectOpts = ReconnectOpts0#{clientid => ClientId},
SubTopicFilter = <<"t/+">>,
?check_trace(
#{timetrap => 30_000},
begin
?tp(notice, "starting", #{}),
Client0 = start_client(#{
@ -395,38 +402,40 @@ do_t_session_discard(Params) ->
?retry(
_Sleep0 = 100,
_Attempts0 = 50,
#{} = emqx_persistent_session_ds_state:print_session(ClientId)
true = map_size(emqx_persistent_session_ds:list_all_streams()) > 0
),
ok = emqtt:stop(Client0),
?tp(notice, "disconnected", #{}),
?tp(notice, "reconnecting", #{}),
%% we still have the session:
?assertMatch(#{}, emqx_persistent_session_ds_state:print_session(ClientId)),
%% we still have streams
?assert(map_size(emqx_persistent_session_ds:list_all_streams()) > 0),
Client1 = start_client(ReconnectOpts),
{ok, _} = emqtt:connect(Client1),
?assertEqual([], emqtt:subscriptions(Client1)),
case is_persistent_connect_opts(ReconnectOpts) of
true ->
?assertMatch(#{}, emqx_persistent_session_ds_state:print_session(ClientId));
?assertMatch(#{ClientId := _}, emqx_persistent_session_ds:list_all_sessions());
false ->
?assertEqual(
undefined, emqx_persistent_session_ds_state:print_session(ClientId)
)
?assertEqual(#{}, emqx_persistent_session_ds:list_all_sessions())
end,
?assertEqual(#{}, emqx_persistent_session_ds:list_all_subscriptions()),
?assertEqual([], emqx_persistent_session_ds_router:topics()),
?assertEqual(#{}, emqx_persistent_session_ds:list_all_streams()),
?assertEqual(#{}, emqx_persistent_session_ds:list_all_pubranges()),
ok = emqtt:stop(Client1),
?tp(notice, "disconnected", #{}),
ok
end,
[]
fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
ok
end
),
ok.
t_session_expiration1(Config) ->
%% This testcase verifies that the properties passed in the
%% CONNECT packet are respected by the GC process:
ClientId = atom_to_binary(?FUNCTION_NAME),
Opts = #{
clientid => ClientId,
@ -439,9 +448,6 @@ t_session_expiration1(Config) ->
do_t_session_expiration(Config, Opts).
t_session_expiration2(Config) ->
%% This testcase updates the expiry interval for the session in
%% the _DISCONNECT_ packet. This setting should be respected by GC
%% process:
ClientId = atom_to_binary(?FUNCTION_NAME),
Opts = #{
clientid => ClientId,
@ -456,8 +462,6 @@ t_session_expiration2(Config) ->
do_t_session_expiration(Config, Opts).
do_t_session_expiration(_Config, Opts) ->
%% Sequence is a list of pairs of properties passed through the
%% CONNECT and for the DISCONNECT for each session:
#{
clientid := ClientId,
sequence := [
@ -468,14 +472,13 @@ do_t_session_expiration(_Config, Opts) ->
} = Opts,
CommonParams = #{proto_ver => v5, clientid => ClientId},
?check_trace(
#{timetrap => 30_000},
begin
Topic = <<"some/topic">>,
Params0 = maps:merge(CommonParams, FirstConn),
Client0 = start_client(Params0),
{ok, _} = emqtt:connect(Client0),
{ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client0, Topic, ?QOS_2),
#{s := #{subscriptions := Subs0}} = emqx_persistent_session_ds:print_session(ClientId),
Subs0 = emqx_persistent_session_ds:list_all_subscriptions(),
?assertEqual(1, map_size(Subs0), #{subs => Subs0}),
Info0 = maps:from_list(emqtt:info(Client0)),
?assertEqual(0, maps:get(session_present, Info0), #{info => Info0}),
@ -490,7 +493,7 @@ do_t_session_expiration(_Config, Opts) ->
?assertEqual([], Subs1),
emqtt:disconnect(Client1, ?RC_NORMAL_DISCONNECTION, SecondDisconn),
ct:sleep(2_500),
ct:sleep(1_500),
Params2 = maps:merge(CommonParams, ThirdConn),
Client2 = start_client(Params2),
@ -502,9 +505,9 @@ do_t_session_expiration(_Config, Opts) ->
emqtt:publish(Client2, Topic, <<"payload">>),
?assertNotReceive({publish, #{topic := Topic}}),
%% ensure subscriptions are absent from table.
#{s := #{subscriptions := Subs3}} = emqx_persistent_session_ds:print_session(ClientId),
?assertEqual([], maps:to_list(Subs3)),
?assertEqual(#{}, emqx_persistent_session_ds:list_all_subscriptions()),
emqtt:disconnect(Client2, ?RC_NORMAL_DISCONNECTION, ThirdDisconn),
ok
end,
[]
@ -512,13 +515,14 @@ do_t_session_expiration(_Config, Opts) ->
ok.
t_session_gc(Config) ->
[Node1, _Node2, _Node3] = Nodes = ?config(nodes, Config),
GCInterval = ?config(gc_interval, Config),
[Node1, Node2, _Node3] = Nodes = ?config(nodes, Config),
CoreNodes = [Node1, Node2],
[
Port1,
Port2,
Port3
] = lists:map(fun(N) -> get_mqtt_port(N, tcp) end, Nodes),
ct:pal("Ports: ~p", [[Port1, Port2, Port3]]),
CommonParams = #{
clean_start => false,
proto_ver => v5
@ -535,16 +539,15 @@ t_session_gc(Config) ->
end,
?check_trace(
#{timetrap => 30_000},
begin
ClientId0 = <<"session_gc0">>,
Client0 = StartClient(ClientId0, Port1, 30),
ClientId1 = <<"session_gc1">>,
Client1 = StartClient(ClientId1, Port1, 30),
Client1 = StartClient(ClientId1, Port2, 1),
ClientId2 = <<"session_gc2">>,
Client2 = StartClient(ClientId2, Port2, 1),
ClientId3 = <<"session_gc3">>,
Client3 = StartClient(ClientId3, Port3, 1),
Client2 = StartClient(ClientId2, Port3, 1),
lists:foreach(
fun(Client) ->
@ -554,51 +557,55 @@ t_session_gc(Config) ->
{ok, _} = emqtt:publish(Client, Topic, Payload, ?QOS_1),
ok
end,
[Client1, Client2, Client3]
[Client0, Client1, Client2]
),
%% Clients are still alive; no session is garbage collected.
?tp(notice, "waiting for gc", #{}),
?assertMatch(
{ok, _},
?block_until(
#{
?snk_kind := ds_session_gc,
?snk_span := {complete, _},
?snk_meta := #{node := N}
} when N =/= node()
)
Res0 = ?block_until(
#{
?snk_kind := ds_session_gc,
?snk_span := {complete, _},
?snk_meta := #{node := N}
} when
N =/= node(),
3 * GCInterval + 1_000
),
?assertMatch([_, _, _], list_all_sessions(Node1), sessions),
?assertMatch([_, _, _], list_all_subscriptions(Node1), subscriptions),
?tp(notice, "gc ran", #{}),
?assertMatch({ok, _}, Res0),
{ok, #{?snk_meta := #{time := T0}}} = Res0,
Sessions0 = list_all_sessions(Node1),
Subs0 = list_all_subscriptions(Node1),
?assertEqual(3, map_size(Sessions0), #{sessions => Sessions0}),
?assertEqual(3, map_size(Subs0), #{subs => Subs0}),
%% Now we disconnect 2 of them; only those should be GC'ed.
?tp(notice, "disconnecting client1", #{}),
?assertMatch(
{ok, {ok, _}},
?wait_async_action(
emqtt:stop(Client1),
#{?snk_kind := terminate},
1_000
)
),
ct:pal("disconnected client1"),
?assertMatch(
{ok, {ok, _}},
?wait_async_action(
emqtt:stop(Client2),
#{?snk_kind := terminate}
#{?snk_kind := terminate},
1_000
)
),
?tp(notice, "disconnected client1", #{}),
?assertMatch(
{ok, {ok, _}},
?wait_async_action(
emqtt:stop(Client3),
#{?snk_kind := terminate}
)
),
?tp(notice, "disconnected client2", #{}),
ct:pal("disconnected client2"),
?assertMatch(
{ok, _},
?block_until(
#{
?snk_kind := ds_session_gc_cleaned,
session_id := ClientId2
}
?snk_meta := #{node := N, time := T},
session_ids := [ClientId1]
} when
N =/= node() andalso T > T0,
4 * GCInterval + 1_000
)
),
?assertMatch(
@ -606,116 +613,22 @@ t_session_gc(Config) ->
?block_until(
#{
?snk_kind := ds_session_gc_cleaned,
session_id := ClientId3
}
?snk_meta := #{node := N, time := T},
session_ids := [ClientId2]
} when
N =/= node() andalso T > T0,
4 * GCInterval + 1_000
)
),
?retry(50, 3, [ClientId1] = list_all_sessions(Node1)),
?assertMatch([_], list_all_subscriptions(Node1), subscriptions),
ok
end,
[]
),
ok.
t_session_replay_retry(_Config) ->
%% Verify that the session recovers smoothly from transient errors during
%% replay.
ok = emqx_ds_test_helpers:mock_rpc(),
NClients = 10,
ClientSubOpts = #{
clientid => mk_clientid(?FUNCTION_NAME, sub),
auto_ack => never
},
ClientSub = start_connect_client(ClientSubOpts),
?assertMatch(
{ok, _, [?RC_GRANTED_QOS_1]},
emqtt:subscribe(ClientSub, <<"t/#">>, ?QOS_1)
),
ClientsPub = [
start_connect_client(#{
clientid => mk_clientid(?FUNCTION_NAME, I),
properties => #{'Session-Expiry-Interval' => 0}
})
|| I <- lists:seq(1, NClients)
],
lists:foreach(
fun(Client) ->
Index = integer_to_binary(rand:uniform(NClients)),
Topic = <<"t/", Index/binary>>,
?assertMatch({ok, #{}}, emqtt:publish(Client, Topic, Index, 1))
end,
ClientsPub
),
Pubs0 = emqx_common_test_helpers:wait_publishes(NClients, 5_000),
NPubs = length(Pubs0),
?assertEqual(NClients, NPubs, ?drainMailbox(1_500)),
ok = emqtt:stop(ClientSub),
%% Make `emqx_ds` believe that roughly half of the shards are unavailable.
ok = emqx_ds_test_helpers:mock_rpc_result(
fun(_Node, emqx_ds_replication_layer, _Function, [_DB, Shard | _]) ->
case erlang:phash2(Shard) rem 2 of
0 -> unavailable;
1 -> passthrough
end
end
),
_ClientSub = start_connect_client(ClientSubOpts#{clean_start => false}),
Pubs1 = emqx_common_test_helpers:wait_publishes(NPubs, 5_000),
?assert(length(Pubs1) < length(Pubs0), Pubs1),
%% "Recover" the shards.
emqx_ds_test_helpers:unmock_rpc(),
Pubs2 = emqx_common_test_helpers:wait_publishes(NPubs - length(Pubs1), 5_000),
?assertEqual(
[maps:with([topic, payload, qos], P) || P <- Pubs0],
[maps:with([topic, payload, qos], P) || P <- Pubs1 ++ Pubs2]
).
%% Check that we send will messages when performing GC without relying on timers set by
%% the channel process.
t_session_gc_will_message(_Config) ->
?check_trace(
#{timetrap => 10_000},
begin
WillTopic = <<"will/t">>,
ok = emqx:subscribe(WillTopic, #{qos => 2}),
ClientId = <<"will_msg_client">>,
Client = start_client(#{
clientid => ClientId,
will_topic => WillTopic,
will_payload => <<"will payload">>,
will_qos => 0,
will_props => #{'Will-Delay-Interval' => 300}
}),
{ok, _} = emqtt:connect(Client),
%% Use reason code =/= `?RC_SUCCESS' to allow will message
{ok, {ok, _}} =
?wait_async_action(
emqtt:disconnect(Client, ?RC_UNSPECIFIED_ERROR),
#{?snk_kind := emqx_cm_clean_down}
),
?assertNotReceive({deliver, WillTopic, _}),
%% Set fake `last_alive_at' to trigger immediate will message.
force_last_alive_at(ClientId, _Time = 0),
{ok, {ok, _}} =
?wait_async_action(
emqx_persistent_session_ds_gc_worker:check_session(ClientId),
#{?snk_kind := session_gc_published_will_msg}
),
?assertReceive({deliver, WillTopic, _}),
Sessions1 = list_all_sessions(Node1),
Subs1 = list_all_subscriptions(Node1),
?assertEqual(1, map_size(Sessions1), #{sessions => Sessions1}),
?assertEqual(1, map_size(Subs1), #{subs => Subs1}),
ok
end,
[]
[
prop_only_cores_run_gc(CoreNodes)
]
),
ok.

View File

@ -1,74 +0,0 @@
defmodule EMQX.MixProject do
use Mix.Project
alias EMQXUmbrella.MixProject, as: UMP
def project do
[
app: :emqx,
version: "0.1.0",
build_path: "../../_build",
erlc_paths: erlc_paths(),
erlc_options: [
{:i, "src"}
| UMP.erlc_options()
],
compilers: Mix.compilers() ++ [:copy_srcs],
# used by our `Mix.Tasks.Compile.CopySrcs` compiler
extra_dirs: extra_dirs(),
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.14",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications
def application do
[
## FIXME!!! go though emqx.app.src and add missing stuff...
extra_applications: [:public_key, :ssl, :os_mon, :logger, :mnesia, :sasl] ++ UMP.extra_applications(),
mod: {:emqx_app, []}
]
end
def deps() do
## FIXME!!! go though emqx.app.src and add missing stuff...
[
{:emqx_mix_utils, in_umbrella: true, runtime: false},
{:emqx_utils, in_umbrella: true},
{:emqx_ds_backends, in_umbrella: true},
UMP.common_dep(:gproc),
UMP.common_dep(:gen_rpc),
UMP.common_dep(:ekka),
UMP.common_dep(:esockd),
UMP.common_dep(:cowboy),
UMP.common_dep(:lc),
UMP.common_dep(:hocon),
UMP.common_dep(:ranch),
UMP.common_dep(:bcrypt),
UMP.common_dep(:pbkdf2),
UMP.common_dep(:emqx_http_lib),
] ++ UMP.quicer_dep()
end
defp erlc_paths() do
paths = UMP.erlc_paths()
if UMP.test_env?() do
["integration_test" | paths]
else
paths
end
end
defp extra_dirs() do
dirs = ["src", "etc"]
if UMP.test_env?() do
["test", "integration_test" | dirs]
else
dirs
end
end
end

View File

@ -8,29 +8,20 @@
{emqx_bridge,3}.
{emqx_bridge,4}.
{emqx_bridge,5}.
{emqx_bridge,6}.
{emqx_broker,1}.
{emqx_cluster_link,1}.
{emqx_cm,1}.
{emqx_cm,2}.
{emqx_cm,3}.
{emqx_conf,1}.
{emqx_conf,2}.
{emqx_conf,3}.
{emqx_conf,4}.
{emqx_connector,1}.
{emqx_dashboard,1}.
{emqx_delayed,1}.
{emqx_delayed,2}.
{emqx_delayed,3}.
{emqx_ds,1}.
{emqx_ds,2}.
{emqx_ds,3}.
{emqx_ds,4}.
{emqx_ds_shared_sub,1}.
{emqx_eviction_agent,1}.
{emqx_eviction_agent,2}.
{emqx_eviction_agent,3}.
{emqx_exhook,1}.
{emqx_ft_storage_exporter_fs,1}.
{emqx_ft_storage_fs,1}.
@ -44,16 +35,11 @@
{emqx_management,2}.
{emqx_management,3}.
{emqx_management,4}.
{emqx_management,5}.
{emqx_metrics,1}.
{emqx_metrics,2}.
{emqx_mgmt_api_plugins,1}.
{emqx_mgmt_api_plugins,2}.
{emqx_mgmt_api_plugins,3}.
{emqx_mgmt_api_relup,1}.
{emqx_mgmt_cluster,1}.
{emqx_mgmt_cluster,2}.
{emqx_mgmt_cluster,3}.
{emqx_mgmt_data_backup,1}.
{emqx_mgmt_trace,1}.
{emqx_mgmt_trace,2}.
@ -63,18 +49,15 @@
{emqx_node_rebalance_api,1}.
{emqx_node_rebalance_api,2}.
{emqx_node_rebalance_evacuation,1}.
{emqx_node_rebalance_purge,1}.
{emqx_node_rebalance_status,1}.
{emqx_node_rebalance_status,2}.
{emqx_persistent_session_ds,1}.
{emqx_plugins,1}.
{emqx_plugins,2}.
{emqx_prometheus,1}.
{emqx_prometheus,2}.
{emqx_resource,1}.
{emqx_resource,2}.
{emqx_retainer,1}.
{emqx_retainer,2}.
{emqx_router,1}.
{emqx_rule_engine,1}.
{emqx_shared_sub,1}.
{emqx_slow_subs,1}.

View File

@ -24,18 +24,17 @@
{deps, [
{emqx_utils, {path, "../emqx_utils"}},
{emqx_durable_storage, {path, "../emqx_durable_storage"}},
{emqx_ds_backends, {path, "../emqx_ds_backends"}},
{lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}},
{gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}},
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}},
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.12.0"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.5"}}},
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.11.1"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.17.0"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.43.2"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.3"}}},
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.10"}}}
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.8"}}}
]}.
{plugins, [{rebar3_proper, "0.12.1"}, rebar3_path_deps]}.
@ -72,7 +71,7 @@
{statistics, true}
]}.
{project_plugins, [{erlfmt, "1.3.0"}]}.
{project_plugins, [erlfmt]}.
{erlfmt, [
{files, [

View File

@ -24,8 +24,7 @@ IsQuicSupp = fun() ->
end,
Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.0"}}},
Quicer =
{quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.500"}}}.
Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.308"}}}.
Dialyzer = fun(Config) ->
{dialyzer, OldDialyzerConfig} = lists:keyfind(dialyzer, 1, Config),

Some files were not shown because too many files have changed in this diff Show More