Compare commits

..

No commits in common. "master" and "0131-ci-use-otp25-for-docker" have entirely different histories.

2395 changed files with 25513 additions and 125004 deletions

View File

@ -10,7 +10,7 @@ CASSANDRA_TAG=3.11
MINIO_TAG=RELEASE.2023-03-20T20-16-18Z MINIO_TAG=RELEASE.2023-03-20T20-16-18Z
OPENTS_TAG=9aa7f88 OPENTS_TAG=9aa7f88
KINESIS_TAG=2.1 KINESIS_TAG=2.1
HSTREAMDB_TAG=v0.19.3 HSTREAMDB_TAG=v0.16.1
HSTREAMDB_ZK_TAG=3.8.1 HSTREAMDB_ZK_TAG=3.8.1
MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server

View File

@ -1,24 +0,0 @@
version: '3.9'
services:
azurite:
container_name: azurite
image: mcr.microsoft.com/azure-storage/azurite:3.30.0
restart: always
expose:
- "10000"
# ports:
# - "10000:10000"
networks:
- emqx_bridge
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:10000"]
interval: 30s
timeout: 5s
retries: 4
command:
- azurite-blob
- "--blobHost"
- 0.0.0.0
- "-d"
- debug.log

View File

@ -1,30 +0,0 @@
version: '3.9'
services:
couchbase:
container_name: couchbase
hostname: couchbase
image: ghcr.io/emqx/couchbase:1.0.0
restart: always
expose:
- 8091-8093
# ports:
# - "8091-8093:8091-8093"
networks:
- emqx_bridge
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8093/admin/ping"]
interval: 30s
timeout: 5s
retries: 4
environment:
- CLUSTER=localhost
- USER=admin
- PASS=public
- PORT=8091
- RAMSIZEMB=2048
- RAMSIZEINDEXMB=512
- RAMSIZEFTSMB=512
- BUCKETS=mqtt
- BUCKETSIZES=100
- AUTOREBALANCE=true

View File

@ -1,7 +1,5 @@
version: "3.9" version: "3.9"
# hint: run the following if the container fails to start locally
# sysctl -w vm.max_map_count=262144
services: services:
setup: setup:
image: public.ecr.aws/elastic/elasticsearch:${ELASTIC_TAG} image: public.ecr.aws/elastic/elasticsearch:${ELASTIC_TAG}
@ -56,7 +54,7 @@ services:
- xpack.security.http.ssl.certificate=certs/es01/es01.crt - xpack.security.http.ssl.certificate=certs/es01/es01.crt
- xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt - xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt
- xpack.license.self_generated.type=${LICENSE} - xpack.license.self_generated.type=${LICENSE}
mem_limit: 4G mem_limit: 1073741824
ulimits: ulimits:
memlock: memlock:
soft: -1 soft: -1

View File

@ -4,7 +4,7 @@ services:
greptimedb: greptimedb:
container_name: greptimedb container_name: greptimedb
hostname: greptimedb hostname: greptimedb
image: greptime/greptimedb:v0.7.1 image: greptime/greptimedb:v0.4.4
expose: expose:
- "4000" - "4000"
- "4001" - "4001"

View File

@ -1,53 +1,24 @@
version: '3.9' version: '3.9'
services: services:
iotdb_1_3_0: iotdb:
container_name: iotdb130 container_name: iotdb
hostname: iotdb130 hostname: iotdb
image: apache/iotdb:1.3.0-standalone
restart: always
environment:
- enable_rest_service=true
- cn_internal_address=iotdb130
- cn_internal_port=10710
- cn_consensus_port=10720
- cn_seed_config_node=iotdb130:10710
- dn_rpc_address=iotdb130
- dn_internal_address=iotdb130
- dn_rpc_port=6667
- dn_mpp_data_exchange_port=10740
- dn_schema_region_consensus_port=10750
- dn_data_region_consensus_port=10760
- dn_seed_config_node=iotdb130:10710
# volumes:
# - ./data:/iotdb/data
# - ./logs:/iotdb/logs
expose:
- "18080"
# IoTDB's REST interface, uncomment for local testing
# ports:
# - "18080:18080"
networks:
- emqx_bridge
iotdb_1_1_0:
container_name: iotdb110
hostname: iotdb110
image: apache/iotdb:1.1.0-standalone image: apache/iotdb:1.1.0-standalone
restart: always restart: always
environment: environment:
- enable_rest_service=true - enable_rest_service=true
- cn_internal_address=iotdb110 - cn_internal_address=iotdb
- cn_internal_port=10710 - cn_internal_port=10710
- cn_consensus_port=10720 - cn_consensus_port=10720
- cn_target_config_node_list=iotdb110:10710 - cn_target_config_node_list=iotdb:10710
- dn_rpc_address=iotdb110 - dn_rpc_address=iotdb
- dn_internal_address=iotdb110 - dn_internal_address=iotdb
- dn_rpc_port=6667 - dn_rpc_port=6667
- dn_mpp_data_exchange_port=10740 - dn_mpp_data_exchange_port=10740
- dn_schema_region_consensus_port=10750 - dn_schema_region_consensus_port=10750
- dn_data_region_consensus_port=10760 - dn_data_region_consensus_port=10760
- dn_target_config_node_list=iotdb110:10710 - dn_target_config_node_list=iotdb:10710
# volumes: # volumes:
# - ./data:/iotdb/data # - ./data:/iotdb/data
# - ./logs:/iotdb/logs # - ./logs:/iotdb/logs

View File

@ -18,7 +18,7 @@ services:
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret - /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
kdc: kdc:
hostname: kdc.emqx.net hostname: kdc.emqx.net
image: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04 image: ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04
container_name: kdc.emqx.net container_name: kdc.emqx.net
expose: expose:
- 88 # kdc - 88 # kdc

View File

@ -10,7 +10,7 @@ services:
nofile: 1024 nofile: 1024
image: openldap image: openldap
#ports: #ports:
# - "389:389" # - 389:389
volumes: volumes:
- ./certs/ca.crt:/etc/certs/ca.crt - ./certs/ca.crt:/etc/certs/ca.crt
restart: always restart: always

View File

@ -9,12 +9,10 @@ services:
expose: expose:
- "15672" - "15672"
- "5672" - "5672"
- "5671"
# We don't want to take ports from the host # We don't want to take ports from the host
#ports: # ports:
# - "15672:15672" # - "15672:15672"
# - "5672:5672" # - "5672:5672"
# - "5671:5671"
volumes: volumes:
- ./certs/ca.crt:/opt/certs/ca.crt - ./certs/ca.crt:/opt/certs/ca.crt
- ./certs/server.crt:/opt/certs/server.crt - ./certs/server.crt:/opt/certs/server.crt

View File

@ -1,41 +0,0 @@
version: '3.9'
services:
mqnamesrvssl:
image: apache/rocketmq:4.9.4
container_name: rocketmq_namesrv_ssl
# ports:
# - 9876:9876
volumes:
- ./rocketmq/logs_ssl:/opt/logs
- ./rocketmq/store_ssl:/opt/store
environment:
JAVA_OPT: "-Dtls.server.mode=enforcing"
command: ./mqnamesrv
networks:
- emqx_bridge
mqbrokerssl:
image: apache/rocketmq:4.9.4
container_name: rocketmq_broker_ssl
# ports:
# - 10909:10909
# - 10911:10911
volumes:
- ./rocketmq/logs_ssl:/opt/logs
- ./rocketmq/store_ssl:/opt/store
- ./rocketmq/conf_ssl/broker.conf:/etc/rocketmq/broker.conf
- ./rocketmq/conf_ssl/plain_acl.yml:/home/rocketmq/rocketmq-4.9.4/conf/plain_acl.yml
environment:
NAMESRV_ADDR: "rocketmq_namesrv_ssl:9876"
JAVA_OPTS: " -Duser.home=/opt -Drocketmq.broker.diskSpaceWarningLevelRatio=0.99"
JAVA_OPT_EXT: "-server -Xms512m -Xmx512m -Xmn512m -Dtls.server.mode=enforcing"
command: ./mqbroker -c /etc/rocketmq/broker.conf
depends_on:
- mqnamesrvssl
networks:
- emqx_bridge
networks:
emqx_bridge:
driver: bridge

View File

@ -39,10 +39,6 @@ services:
- 19042:9042 - 19042:9042
# Cassandra TLS # Cassandra TLS
- 19142:9142 - 19142:9142
# Cassandra No Auth
- 19043:9043
# Cassandra TLS No Auth
- 19143:9143
# S3 # S3
- 19000:19000 - 19000:19000
# S3 TLS # S3 TLS

View File

@ -3,7 +3,7 @@ version: '3.9'
services: services:
erlang: erlang:
container_name: erlang container_name: erlang
image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04} image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04}
env_file: env_file:
- credentials.env - credentials.env
- conf.env - conf.env

View File

@ -49,9 +49,6 @@ echo "+++++++ Creating Kafka Topics ++++++++"
# there seem to be a race condition when creating the topics (too early) # there seem to be a race condition when creating the topics (too early)
env KAFKA_CREATE_TOPICS="$KAFKA_CREATE_TOPICS_NG" KAFKA_PORT="$PORT1" create-topics.sh env KAFKA_CREATE_TOPICS="$KAFKA_CREATE_TOPICS_NG" KAFKA_PORT="$PORT1" create-topics.sh
# create a topic with max.message.bytes=100
/opt/kafka/bin/kafka-topics.sh --create --bootstrap-server "${SERVER}:${PORT1}" --topic max-100-bytes --partitions 1 --replication-factor 1 --config max.message.bytes=100
echo "+++++++ Wait until Kafka ports are down ++++++++" echo "+++++++ Wait until Kafka ports are down ++++++++"
bash -c 'while printf "" 2>>/dev/null >>/dev/tcp/$0/$1; do sleep 1; done' $SERVER $PORT1 bash -c 'while printf "" 2>>/dev/null >>/dev/tcp/$0/$1; do sleep 1; done' $SERVER $PORT1

View File

@ -1,61 +0,0 @@
# LDAP authentication
To run manual tests with the default docker-compose files.
Expose openldap container port by uncommenting the `ports` config in `docker-compose-ldap.yaml `
To start openldap:
```
docker-compose -f ./.ci/docker-compose-file/docker-compose.yaml -f ./.ci/docker-compose-file/docker-compose-ldap.yaml up -docker
```
## LDAP database
LDAP database is populated from below files:
```
apps/emqx_ldap/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
apps/emqx_ldap/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema
```
## Minimal EMQX config
```
authentication = [
{
backend = ldap
base_dn = "uid=${username},ou=testdevice,dc=emqx,dc=io"
filter = "(& (objectClass=mqttUser) (uid=${username}))"
mechanism = password_based
method {
is_superuser_attribute = isSuperuser
password_attribute = userPassword
type = hash
}
password = public
pool_size = 8
query_timeout = "5s"
request_timeout = "10s"
server = "localhost:1389"
username = "cn=root,dc=emqx,dc=io"
}
]
```
## Example ldapsearch command
```
ldapsearch -x -H ldap://localhost:389 -D "cn=root,dc=emqx,dc=io" -W -b "uid=mqttuser0007,ou=testdevice,dc=emqx,dc=io" "(&(objectClass=mqttUser)(uid=mqttuser0007))"
```
## Example mqttx command
The client password hashes are generated from their username.
```
# disabled user
mqttx pub -t 't/1' -h localhost -p 1883 -m x -u mqttuser0006 -P mqttuser0006
# enabled super-user
mqttx pub -t 't/1' -h localhost -p 1883 -m x -u mqttuser0007 -P mqttuser0007
```

View File

@ -9,4 +9,3 @@ accounts:
defaultGroupPerm: PUB|SUB defaultGroupPerm: PUB|SUB
topicPerms: topicPerms:
- TopicTest=PUB|SUB - TopicTest=PUB|SUB
- Topic2=PUB|SUB

View File

@ -1,24 +0,0 @@
brokerClusterName=DefaultClusterSSL
brokerName=broker-a
brokerId=0
brokerIP1=rocketmq_broker_ssl
defaultTopicQueueNums=4
autoCreateTopicEnable=true
autoCreateSubscriptionGroup=true
listenPort=10911
deleteWhen=04
fileReservedTime=120
mapedFileSizeCommitLog=1073741824
mapedFileSizeConsumeQueue=300000
diskMaxUsedSpaceRatio=100
maxMessageSize=65536
brokerRole=ASYNC_MASTER
flushDiskType=ASYNC_FLUSH
aclEnable=true

View File

@ -1,12 +0,0 @@
globalWhiteRemoteAddresses:
accounts:
- accessKey: RocketMQ
secretKey: 12345678
whiteRemoteAddress:
admin: false
defaultTopicPerm: DENY
defaultGroupPerm: PUB|SUB
topicPerms:
- TopicTest=PUB|SUB
- Topic2=PUB|SUB

View File

@ -96,18 +96,6 @@
"upstream": "cassandra:9142", "upstream": "cassandra:9142",
"enabled": true "enabled": true
}, },
{
"name": "cassa_no_auth_tcp",
"listen": "0.0.0.0:9043",
"upstream": "cassandra_noauth:9042",
"enabled": true
},
{
"name": "cassa_no_auth_tls",
"listen": "0.0.0.0:9143",
"upstream": "cassandra_noauth:9142",
"enabled": true
},
{ {
"name": "sqlserver", "name": "sqlserver",
"listen": "0.0.0.0:1433", "listen": "0.0.0.0:1433",
@ -139,15 +127,9 @@
"enabled": true "enabled": true
}, },
{ {
"name": "iotdb110", "name": "iotdb",
"listen": "0.0.0.0:18080", "listen": "0.0.0.0:18080",
"upstream": "iotdb110:18080", "upstream": "iotdb:18080",
"enabled": true
},
{
"name": "iotdb130",
"listen": "0.0.0.0:28080",
"upstream": "iotdb130:18080",
"enabled": true "enabled": true
}, },
{ {
@ -215,17 +197,5 @@
"listen": "0.0.0.0:9200", "listen": "0.0.0.0:9200",
"upstream": "elasticsearch:9200", "upstream": "elasticsearch:9200",
"enabled": true "enabled": true
},
{
"name": "azurite_plain",
"listen": "0.0.0.0:10000",
"upstream": "azurite:10000",
"enabled": true
},
{
"name": "couchbase",
"listen": "0.0.0.0:8093",
"upstream": "couchbase:8093",
"enabled": true
} }
] ]

View File

@ -1,18 +1,18 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{application, http_server, [ {application, http_server,
{description, "An HTTP server application"}, [{description, "An HTTP server application"},
{vsn, "0.2.0"}, {vsn, "0.2.0"},
{registered, []}, {registered, []},
% {mod, {http_server_app, []}}, % {mod, {http_server_app, []}},
{modules, []}, {modules, []},
{applications, [ {applications,
kernel, [kernel,
stdlib, stdlib,
minirest minirest
]}, ]},
{env, []}, {env,[]},
{modules, []}, {modules, []},
{licenses, ["Apache 2.0"]}, {licenses, ["Apache 2.0"]},
{links, []} {links, []}
]}. ]}.

23
.github/CODEOWNERS vendored
View File

@ -1,29 +1,18 @@
## Default ## Default
* @emqx/emqx-review-board * @emqx/emqx-review-board
# emqx-review-board members
## HJianBo
## id
## ieQu1
## keynslug
## qzhuyan
## savonarola
## terry-xiaoyu
## thalesmg
## zhongwencool
## zmstone
## apps ## apps
/apps/emqx/ @emqx/emqx-review-board @lafirest /apps/emqx/ @emqx/emqx-review-board @lafirest
/apps/emqx_auth/ @emqx/emqx-review-board @JimMoen /apps/emqx_connector/ @emqx/emqx-review-board
/apps/emqx_auth/ @emqx/emqx-review-board @JimMoen @savonarola
/apps/emqx_connector/ @emqx/emqx-review-board @JimMoen /apps/emqx_connector/ @emqx/emqx-review-board @JimMoen
/apps/emqx_dashboard/ @emqx/emqx-review-board @JimMoen @lafirest /apps/emqx_dashboard/ @emqx/emqx-review-board @JimMoen @lafirest
/apps/emqx_dashboard_rbac/ @emqx/emqx-review-board @lafirest /apps/emqx_dashboard_rbac/ @emqx/emqx-review-board @lafirest
/apps/emqx_dashboard_sso/ @emqx/emqx-review-board @JimMoen @lafirest /apps/emqx_dashboard_sso/ @emqx/emqx-review-board @JimMoen @lafirest
/apps/emqx_exhook/ @emqx/emqx-review-board @JimMoen /apps/emqx_exhook/ @emqx/emqx-review-board @JimMoen @HJianBo
/apps/emqx_ft/ @emqx/emqx-review-board @savonarola @keynslug
/apps/emqx_gateway/ @emqx/emqx-review-board @lafirest /apps/emqx_gateway/ @emqx/emqx-review-board @lafirest
/apps/emqx_management/ @emqx/emqx-review-board @lafirest /apps/emqx_management/ @emqx/emqx-review-board @lafirest @sstrigler
/apps/emqx_opentelemetry @emqx/emqx-review-board @SergeTupchiy
/apps/emqx_plugins/ @emqx/emqx-review-board @JimMoen /apps/emqx_plugins/ @emqx/emqx-review-board @JimMoen
/apps/emqx_prometheus/ @emqx/emqx-review-board @JimMoen /apps/emqx_prometheus/ @emqx/emqx-review-board @JimMoen
/apps/emqx_psk/ @emqx/emqx-review-board @lafirest /apps/emqx_psk/ @emqx/emqx-review-board @lafirest
@ -31,7 +20,7 @@
/apps/emqx_rule_engine/ @emqx/emqx-review-board @kjellwinblad /apps/emqx_rule_engine/ @emqx/emqx-review-board @kjellwinblad
/apps/emqx_slow_subs/ @emqx/emqx-review-board @lafirest /apps/emqx_slow_subs/ @emqx/emqx-review-board @lafirest
/apps/emqx_statsd/ @emqx/emqx-review-board @JimMoen /apps/emqx_statsd/ @emqx/emqx-review-board @JimMoen
/apps/emqx_durable_storage/ @emqx/emqx-review-board @keynslug /apps/emqx_durable_storage/ @emqx/emqx-review-board @ieQu1 @keynslug
## CI ## CI
/deploy/ @emqx/emqx-review-board @Rory-Z /deploy/ @emqx/emqx-review-board @Rory-Z

View File

@ -33,7 +33,7 @@ runs:
HOMEBREW_NO_INSTALL_UPGRADE: 1 HOMEBREW_NO_INSTALL_UPGRADE: 1
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
run: | run: |
brew install curl zip unzip coreutils openssl@1.1 unixodbc brew install curl zip unzip coreutils openssl@1.1
echo "/usr/local/opt/bison/bin" >> $GITHUB_PATH echo "/usr/local/opt/bison/bin" >> $GITHUB_PATH
echo "/usr/local/bin" >> $GITHUB_PATH echo "/usr/local/bin" >> $GITHUB_PATH
echo "emqx_name=${emqx_name}" >> $GITHUB_OUTPUT echo "emqx_name=${emqx_name}" >> $GITHUB_OUTPUT
@ -51,12 +51,12 @@ runs:
echo "SELF_HOSTED=false" >> $GITHUB_OUTPUT echo "SELF_HOSTED=false" >> $GITHUB_OUTPUT
;; ;;
esac esac
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
id: cache id: cache
if: steps.prepare.outputs.SELF_HOSTED != 'true' if: steps.prepare.outputs.SELF_HOSTED != 'true'
with: with:
path: ${{ steps.prepare.outputs.OTP_INSTALL_PATH }} path: ${{ steps.prepare.outputs.OTP_INSTALL_PATH }}
key: otp-install-${{ inputs.otp }}-${{ inputs.os }}-static-ssl-disable-hipe-disable-jit-20240524-1 key: otp-install-${{ inputs.otp }}-${{ inputs.os }}-static-ssl-disable-hipe-disable-jit
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
shell: bash shell: bash
@ -80,10 +80,9 @@ runs:
git clone --depth 1 --branch OTP-${{ inputs.otp }} https://github.com/emqx/otp.git "$OTP_SOURCE_PATH" git clone --depth 1 --branch OTP-${{ inputs.otp }} https://github.com/emqx/otp.git "$OTP_SOURCE_PATH"
cd "$OTP_SOURCE_PATH" cd "$OTP_SOURCE_PATH"
if [ "$(arch)" = arm64 ]; then if [ "$(arch)" = arm64 ]; then
ODBCHOME="$(brew --prefix unixodbc)" export CFLAGS="-O2 -g -I$(brew --prefix unixodbc)/include"
export CFLAGS="-O2 -g -I${ODBCHOME}/include" export LDFLAGS="-L$(brew --prefix unixodbc)/lib"
export LDFLAGS="-L${ODBCHOME}/lib" WITH_ODBC="--with-odbc=$(brew --prefix unixodbc)"
WITH_ODBC="--with-odbc=${ODBCHOME}"
else else
WITH_ODBC="" WITH_ODBC=""
fi fi

View File

@ -1,21 +1,37 @@
name: 'Prepare jmeter' name: 'Prepare jmeter'
inputs:
version-emqx:
required: true
type: string
runs: runs:
using: composite using: composite
steps: steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
env:
PKG_VSN: ${{ inputs.version-emqx }}
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/emqx-fvt repository: emqx/emqx-fvt
ref: broker-autotest-v5 ref: broker-autotest-v5
path: scripts path: scripts
- uses: actions/setup-java@99b8673ff64fbf99d8d325f52d9a5bdedb8483e9 # v4.2.1 - uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 # v4.0.0
with: with:
java-version: '8.0.282' # The JDK version to make available on the path. java-version: '8.0.282' # The JDK version to make available on the path.
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
architecture: x64 # (x64 or x86) - defaults to x64 architecture: x64 # (x64 or x86) - defaults to x64
# https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md # https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
distribution: 'zulu' distribution: 'zulu'
- uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: apache-jmeter.tgz name: apache-jmeter.tgz
- name: install jmeter - name: install jmeter

View File

@ -1 +0,0 @@
*/.github/*

View File

@ -11,48 +11,29 @@ on:
ref: ref:
required: false required: false
defaults:
run:
shell: bash
env: env:
IS_CI: "yes" IS_CI: "yes"
jobs: permissions:
init: contents: read
runs-on: ubuntu-22.04
outputs:
BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }}
OTP_VSN: ${{ steps.env.outputs.OTP_VSN }}
ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
BUILDER: ${{ steps.env.outputs.BUILDER }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.event.inputs.ref }}
- name: Set up environment
id: env
run: |
source ./env.sh
echo "BUILDER_VSN=$EMQX_BUILDER_VSN" | tee -a "$GITHUB_OUTPUT"
echo "OTP_VSN=$OTP_VSN" | tee -a "$GITHUB_OUTPUT"
echo "ELIXIR_VSN=$ELIXIR_VSN" | tee -a "$GITHUB_OUTPUT"
echo "BUILDER=$EMQX_BUILDER" | tee -a "$GITHUB_OUTPUT"
jobs:
sanity-checks: sanity-checks:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: init container: "ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04"
container: ${{ needs.init.outputs.BUILDER }}
outputs: outputs:
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
ct-host: ${{ steps.matrix.outputs.ct-host }} ct-host: ${{ steps.matrix.outputs.ct-host }}
ct-docker: ${{ steps.matrix.outputs.ct-docker }} ct-docker: ${{ steps.matrix.outputs.ct-docker }}
version-emqx: ${{ steps.matrix.outputs.version-emqx }}
permissions: version-emqx-enterprise: ${{ steps.matrix.outputs.version-emqx-enterprise }}
contents: read builder: "ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04"
builder_vsn: "5.3-2"
otp_vsn: "26.2.1-2"
elixir_vsn: "1.15.7"
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
@ -97,8 +78,7 @@ jobs:
MIX_ENV: emqx-enterprise MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise PROFILE: emqx-enterprise
run: | run: |
# mix local.hex --force --if-missing && mix local.rebar --force --if-missing mix local.hex --force --if-missing && mix local.rebar --force --if-missing
mix local.hex 2.0.6 --force --if-missing && mix local.rebar --force --if-missing
- name: Check formatting - name: Check formatting
env: env:
MIX_ENV: emqx-enterprise MIX_ENV: emqx-enterprise
@ -111,20 +91,35 @@ jobs:
- name: Generate CT Matrix - name: Generate CT Matrix
id: matrix id: matrix
run: | run: |
MATRIX="$(./scripts/find-apps.sh --ci)" APPS="$(./scripts/find-apps.sh --ci)"
MATRIX="$(echo "${APPS}" | jq -c '
[
(.[] | select(.profile == "emqx") | . + {
builder: "5.3-2",
otp: "26.2.1-2",
elixir: "1.15.7"
}),
(.[] | select(.profile == "emqx-enterprise") | . + {
builder: "5.3-2",
otp: ["26.2.1-2"][],
elixir: "1.15.7"
})
]
')"
echo "${MATRIX}" | jq echo "${MATRIX}" | jq
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')" CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')" CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')" CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT
echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT
echo "version-emqx=$(./pkg-vsn.sh emqx)" | tee -a $GITHUB_OUTPUT
echo "version-emqx-enterprise=$(./pkg-vsn.sh emqx-enterprise)" | tee -a $GITHUB_OUTPUT
compile: compile:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral-xl","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral-xl","linux","x64"]') }}
container: ${{ needs.init.outputs.BUILDER }} container: ${{ needs.sanity-checks.outputs.builder }}
needs: needs:
- init
- sanity-checks - sanity-checks
strategy: strategy:
matrix: matrix:
@ -132,11 +127,8 @@ jobs:
- emqx - emqx
- emqx-enterprise - emqx-enterprise
permissions:
contents: read
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Work around https://github.com/actions/checkout/issues/766 - name: Work around https://github.com/actions/checkout/issues/766
@ -152,55 +144,61 @@ jobs:
echo "PROFILE=${PROFILE}" | tee -a .env echo "PROFILE=${PROFILE}" | tee -a .env
echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env
zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip . zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
path: ${{ matrix.profile }}.zip path: ${{ matrix.profile }}.zip
retention-days: 7 retention-days: 1
run_emqx_app_tests: run_emqx_app_tests:
needs: needs:
- init
- sanity-checks - sanity-checks
- compile - compile
uses: ./.github/workflows/run_emqx_app_tests.yaml uses: ./.github/workflows/run_emqx_app_tests.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
before_ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }} before_ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
after_ref: ${{ github.sha }} after_ref: ${{ github.sha }}
run_test_cases: run_test_cases:
needs: needs:
- init
- sanity-checks - sanity-checks
- compile - compile
uses: ./.github/workflows/run_test_cases.yaml uses: ./.github/workflows/run_test_cases.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }} ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
ct-host: ${{ needs.sanity-checks.outputs.ct-host }} ct-host: ${{ needs.sanity-checks.outputs.ct-host }}
ct-docker: ${{ needs.sanity-checks.outputs.ct-docker }} ct-docker: ${{ needs.sanity-checks.outputs.ct-docker }}
static_checks: static_checks:
needs: needs:
- init
- sanity-checks - sanity-checks
- compile - compile
uses: ./.github/workflows/static_checks.yaml uses: ./.github/workflows/static_checks.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }} ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
build_slim_packages: build_slim_packages:
needs: needs:
- sanity-checks - sanity-checks
uses: ./.github/workflows/build_slim_packages.yaml uses: ./.github/workflows/build_slim_packages.yaml
with:
builder: ${{ needs.sanity-checks.outputs.builder }}
builder_vsn: ${{ needs.sanity-checks.outputs.builder_vsn }}
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
build_docker_for_test: build_docker_for_test:
needs: needs:
- init
- sanity-checks - sanity-checks
uses: ./.github/workflows/build_docker_for_test.yaml uses: ./.github/workflows/build_docker_for_test.yaml
with:
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
spellcheck: spellcheck:
needs: needs:
@ -210,35 +208,41 @@ jobs:
run_conf_tests: run_conf_tests:
needs: needs:
- init
- sanity-checks - sanity-checks
- compile - compile
uses: ./.github/workflows/run_conf_tests.yaml uses: ./.github/workflows/run_conf_tests.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
check_deps_integrity: check_deps_integrity:
needs: needs:
- init
- sanity-checks - sanity-checks
uses: ./.github/workflows/check_deps_integrity.yaml uses: ./.github/workflows/check_deps_integrity.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
run_jmeter_tests: run_jmeter_tests:
needs: needs:
- sanity-checks - sanity-checks
- build_docker_for_test - build_docker_for_test
uses: ./.github/workflows/run_jmeter_tests.yaml uses: ./.github/workflows/run_jmeter_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
run_docker_tests: run_docker_tests:
needs: needs:
- sanity-checks - sanity-checks
- build_docker_for_test - build_docker_for_test
uses: ./.github/workflows/run_docker_tests.yaml uses: ./.github/workflows/run_docker_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
run_helm_tests: run_helm_tests:
needs: needs:
- sanity-checks - sanity-checks
- build_docker_for_test - build_docker_for_test
uses: ./.github/workflows/run_helm_tests.yaml uses: ./.github/workflows/run_helm_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}

View File

@ -8,64 +8,37 @@ on:
push: push:
tags: tags:
- 'v*' - 'v*'
- 'e*'
branches: branches:
- 'master' - 'master'
- 'release-5[0-9]' - 'release-5[0-9]'
- 'ci/**' - 'ci/**'
workflow_dispatch:
inputs:
ref:
required: false
defaults: permissions:
run: contents: read
shell: bash
env: env:
IS_CI: 'yes' IS_CI: 'yes'
jobs: jobs:
init:
runs-on: ubuntu-22.04
outputs:
BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }}
OTP_VSN: ${{ steps.env.outputs.OTP_VSN }}
ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
BUILDER: ${{ steps.env.outputs.BUILDER }}
BUILD_FROM: ${{ steps.env.outputs.BUILD_FROM }}
RUN_FROM: ${{ steps.env.outputs.BUILD_FROM }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.event.inputs.ref }}
- name: Set up environment
id: env
run: |
source env.sh
echo "BUILDER_VSN=$EMQX_BUILDER_VSN" >> "$GITHUB_OUTPUT"
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
echo "ELIXIR_VSN=$ELIXIR_VSN" >> "$GITHUB_OUTPUT"
echo "BUILDER=$EMQX_BUILDER" >> "$GITHUB_OUTPUT"
echo "BUILD_FROM=$EMQX_DOCKER_BUILD_FROM" >> "$GITHUB_OUTPUT"
echo "RUN_FROM=$EMQX_DOCKER_RUN_FROM" >> "$GITHUB_OUTPUT"
prepare: prepare:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: init container: 'ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04'
container: ${{ needs.init.outputs.BUILDER }}
outputs: outputs:
profile: ${{ steps.parse-git-ref.outputs.profile }} profile: ${{ steps.parse-git-ref.outputs.profile }}
release: ${{ steps.parse-git-ref.outputs.release }} release: ${{ steps.parse-git-ref.outputs.release }}
latest: ${{ steps.parse-git-ref.outputs.latest }} latest: ${{ steps.parse-git-ref.outputs.latest }}
version: ${{ steps.parse-git-ref.outputs.version }}
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
ct-host: ${{ steps.matrix.outputs.ct-host }} ct-host: ${{ steps.matrix.outputs.ct-host }}
ct-docker: ${{ steps.matrix.outputs.ct-docker }} ct-docker: ${{ steps.matrix.outputs.ct-docker }}
builder: 'ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04'
permissions: builder_vsn: '5.3-2'
contents: read otp_vsn: '26.2.1-2'
elixir_vsn: '1.15.7'
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
@ -73,22 +46,38 @@ jobs:
shell: bash shell: bash
run: | run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE" git config --global --add safe.directory "$GITHUB_WORKSPACE"
- name: Detect emqx profile - name: Detect emqx profile and version
id: parse-git-ref id: parse-git-ref
run: | run: |
JSON="$(./scripts/parse-git-ref.sh $GITHUB_REF)" JSON="$(./scripts/parse-git-ref.sh $GITHUB_REF)"
PROFILE=$(echo "$JSON" | jq -cr '.profile') PROFILE=$(echo "$JSON" | jq -cr '.profile')
RELEASE=$(echo "$JSON" | jq -cr '.release') RELEASE=$(echo "$JSON" | jq -cr '.release')
LATEST=$(echo "$JSON" | jq -cr '.latest') LATEST=$(echo "$JSON" | jq -cr '.latest')
VERSION="$(./pkg-vsn.sh "$PROFILE")"
echo "profile=$PROFILE" | tee -a $GITHUB_OUTPUT echo "profile=$PROFILE" | tee -a $GITHUB_OUTPUT
echo "release=$RELEASE" | tee -a $GITHUB_OUTPUT echo "release=$RELEASE" | tee -a $GITHUB_OUTPUT
echo "latest=$LATEST" | tee -a $GITHUB_OUTPUT echo "latest=$LATEST" | tee -a $GITHUB_OUTPUT
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
- name: Build matrix - name: Build matrix
id: matrix id: matrix
run: | run: |
MATRIX="$(./scripts/find-apps.sh --ci)" APPS="$(./scripts/find-apps.sh --ci)"
MATRIX="$(echo "${APPS}" | jq -c '
[
(.[] | select(.profile == "emqx") | . + {
builder: "5.3-2",
otp: "26.2.1-2",
elixir: "1.15.7"
}),
(.[] | select(.profile == "emqx-enterprise") | . + {
builder: "5.3-2",
otp: ["26.2.1-2"][],
elixir: "1.15.7"
})
]
')"
echo "${MATRIX}" | jq echo "${MATRIX}" | jq
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')" CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')" CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')" CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
@ -98,44 +87,48 @@ jobs:
build_packages: build_packages:
if: needs.prepare.outputs.release == 'true' if: needs.prepare.outputs.release == 'true'
needs: needs:
- init
- prepare - prepare
uses: ./.github/workflows/build_packages.yaml uses: ./.github/workflows/build_packages.yaml
with: with:
profile: ${{ needs.prepare.outputs.profile }} profile: ${{ needs.prepare.outputs.profile }}
publish: true publish: ${{ needs.prepare.outputs.release }}
otp_vsn: ${{ needs.init.outputs.OTP_VSN }} otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.init.outputs.ELIXIR_VSN }} elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
builder_vsn: ${{ needs.init.outputs.BUILDER_VSN }} builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
secrets: inherit secrets: inherit
build_and_push_docker_images: build_and_push_docker_images:
if: needs.prepare.outputs.release == 'true' if: needs.prepare.outputs.release == 'true'
needs: needs:
- init
- prepare - prepare
uses: ./.github/workflows/build_and_push_docker_images.yaml uses: ./.github/workflows/build_and_push_docker_images.yaml
with: with:
profile: ${{ needs.prepare.outputs.profile }} profile: ${{ needs.prepare.outputs.profile }}
publish: true version: ${{ needs.prepare.outputs.version }}
publish: ${{ needs.prepare.outputs.release }}
latest: ${{ needs.prepare.outputs.latest }} latest: ${{ needs.prepare.outputs.latest }}
build_from: ${{ needs.init.outputs.BUILD_FROM }} # TODO: revert this back to needs.prepare.outputs.otp_vsn when OTP 26 bug is fixed
run_from: ${{ needs.init.outputs.RUN_FROM }} otp_vsn: 25.3.2
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
secrets: inherit secrets: inherit
build_slim_packages: build_slim_packages:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
needs: needs:
- init
- prepare - prepare
uses: ./.github/workflows/build_slim_packages.yaml uses: ./.github/workflows/build_slim_packages.yaml
with:
builder: ${{ needs.prepare.outputs.builder }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
compile: compile:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ needs.init.outputs.BUILDER }} container: ${{ needs.prepare.outputs.builder }}
needs: needs:
- init
- prepare - prepare
strategy: strategy:
matrix: matrix:
@ -143,11 +136,8 @@ jobs:
- emqx - emqx
- emqx-enterprise - emqx-enterprise
permissions:
contents: read
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
@ -163,7 +153,7 @@ jobs:
echo "PROFILE=${PROFILE}" | tee -a .env echo "PROFILE=${PROFILE}" | tee -a .env
echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env
zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip . zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
path: ${{ matrix.profile }}.zip path: ${{ matrix.profile }}.zip
@ -171,23 +161,22 @@ jobs:
run_emqx_app_tests: run_emqx_app_tests:
needs: needs:
- init - prepare
- compile - compile
uses: ./.github/workflows/run_emqx_app_tests.yaml uses: ./.github/workflows/run_emqx_app_tests.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.prepare.outputs.builder }}
before_ref: ${{ github.event.before }} before_ref: ${{ github.event.before }}
after_ref: ${{ github.sha }} after_ref: ${{ github.sha }}
run_test_cases: run_test_cases:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
needs: needs:
- init
- prepare - prepare
- compile - compile
uses: ./.github/workflows/run_test_cases.yaml uses: ./.github/workflows/run_test_cases.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.prepare.outputs.builder }}
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }} ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}
ct-host: ${{ needs.prepare.outputs.ct-host }} ct-host: ${{ needs.prepare.outputs.ct-host }}
ct-docker: ${{ needs.prepare.outputs.ct-docker }} ct-docker: ${{ needs.prepare.outputs.ct-docker }}
@ -195,20 +184,18 @@ jobs:
run_conf_tests: run_conf_tests:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
needs: needs:
- init
- prepare - prepare
- compile - compile
uses: ./.github/workflows/run_conf_tests.yaml uses: ./.github/workflows/run_conf_tests.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.prepare.outputs.builder }}
static_checks: static_checks:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
needs: needs:
- init
- prepare - prepare
- compile - compile
uses: ./.github/workflows/static_checks.yaml uses: ./.github/workflows/static_checks.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.prepare.outputs.builder }}
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }} ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}

View File

@ -10,16 +10,22 @@ on:
profile: profile:
required: true required: true
type: string type: string
version:
required: true
type: string
latest: latest:
required: true required: true
type: string type: string
publish: publish:
required: true required: true
type: boolean type: string
build_from: otp_vsn:
required: true required: true
type: string type: string
run_from: elixir_vsn:
required: true
type: string
builder_vsn:
required: true required: true
type: string type: string
secrets: secrets:
@ -39,6 +45,8 @@ on:
required: false required: false
type: string type: string
default: 'emqx' default: 'emqx'
version:
required: true
latest: latest:
required: false required: false
type: boolean type: boolean
@ -47,22 +55,25 @@ on:
required: false required: false
type: boolean type: boolean
default: false default: false
build_from: otp_vsn:
required: false required: false
type: string type: string
default: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-debian12 default: '25.3.2-2'
run_from: elixir_vsn:
default: public.ecr.aws/debian/debian:stable-20240612-slim required: false
type: string
default: '1.15.7'
builder_vsn:
required: false
type: string
default: '5.3-2'
permissions: permissions:
contents: read contents: read
jobs: jobs:
build: docker:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.arch)) || 'ubuntu-22.04' }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.build_from }}
outputs:
PKG_VSN: ${{ steps.build.outputs.PKG_VSN }}
strategy: strategy:
fail-fast: false fail-fast: false
@ -70,141 +81,54 @@ jobs:
profile: profile:
- ${{ inputs.profile }} - ${{ inputs.profile }}
- ${{ inputs.profile }}-elixir - ${{ inputs.profile }}-elixir
arch: registry:
- x64 - 'docker.io'
- arm64 - 'public.ecr.aws'
exclude:
- profile: emqx-enterprise
registry: 'public.ecr.aws'
- profile: emqx-enterprise-elixir
registry: 'public.ecr.aws'
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
- run: git config --global --add safe.directory "$PWD" fetch-depth: 0
- name: build release tarball
id: build
run: |
make ${{ matrix.profile }}-tgz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
with:
name: "${{ matrix.profile }}-${{ matrix.arch }}.tar.gz"
path: "_packages/emqx*/emqx-*.tar.gz"
retention-days: 7
overwrite: true
if-no-files-found: error
docker: - uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3.0.0
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} - uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
needs:
- build
defaults:
run:
shell: bash
strategy: - name: Login to hub.docker.com
fail-fast: false uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
matrix: if: matrix.registry == 'docker.io'
profile: with:
- ["${{ inputs.profile }}", "${{ inputs.profile == 'emqx' && 'docker.io,public.ecr.aws' || 'docker.io' }}"] username: ${{ secrets.DOCKER_HUB_USER }}
- ["${{ inputs.profile }}-elixir", "${{ inputs.profile == 'emqx' && 'docker.io,public.ecr.aws' || 'docker.io' }}"] password: ${{ secrets.DOCKER_HUB_TOKEN }}
steps: - name: Login to AWS ECR
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
with: if: matrix.registry == 'public.ecr.aws'
ref: ${{ github.event.inputs.ref }} with:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 registry: public.ecr.aws
with: username: ${{ secrets.AWS_ACCESS_KEY_ID }}
pattern: "${{ matrix.profile[0] }}-*.tar.gz" password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
path: _packages ecr: true
merge-multiple: true
- name: Move artifacts to root directory - name: Build docker image
env: env:
PROFILE: ${{ inputs.profile }} PROFILE: ${{ matrix.profile }}
run: | DOCKER_REGISTRY: ${{ matrix.registry }}
ls -lR _packages/$PROFILE DOCKER_ORG: ${{ github.repository_owner }}
mv _packages/$PROFILE/*.tar.gz ./ DOCKER_LATEST: ${{ inputs.latest }}
DOCKER_PUSH: ${{ inputs.publish == 'true' || inputs.publish || github.repository_owner != 'emqx' }}
- name: Enable containerd image store on Docker Engine DOCKER_BUILD_NOCACHE: true
run: | DOCKER_PLATFORMS: linux/amd64,linux/arm64
echo "$(sudo cat /etc/docker/daemon.json | jq '. += {"features": {"containerd-snapshotter": true}}')" > daemon.json EMQX_RUNNER: 'debian:11-slim'
sudo mv daemon.json /etc/docker/daemon.json EMQX_DOCKERFILE: 'deploy/docker/Dockerfile'
sudo systemctl restart docker PKG_VSN: ${{ inputs.version }}
EMQX_BUILDER_VERSION: ${{ inputs.builder_vsn }}
- uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0 EMQX_BUILDER_OTP: ${{ inputs.otp_vsn }}
- uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 EMQX_BUILDER_ELIXIR: ${{ inputs.elixir_vsn }}
run: |
- name: Login to hub.docker.com ./build ${PROFILE} docker
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
if: inputs.publish && contains(matrix.profile[1], 'docker.io')
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Login to AWS ECR
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
if: inputs.publish && contains(matrix.profile[1], 'public.ecr.aws')
with:
registry: public.ecr.aws
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ecr: true
- name: Build docker image for smoke test
env:
PROFILE: ${{ matrix.profile[0] }}
DOCKER_REGISTRY: ${{ matrix.profile[1] }}
DOCKER_ORG: ${{ github.repository_owner }}
DOCKER_LATEST: ${{ inputs.latest }}
DOCKER_PUSH: false
DOCKER_BUILD_NOCACHE: true
BUILD_FROM: ${{ inputs.build_from }}
RUN_FROM: ${{ inputs.run_from }}
PKG_VSN: ${{ needs.build.outputs.PKG_VSN }}
EMQX_SOURCE_TYPE: tgz
run: |
./build ${PROFILE} docker
echo "Built tags:"
echo "==========="
cat .emqx_docker_image_tags
echo "==========="
echo "_EMQX_DOCKER_IMAGE_TAG=$(head -n 1 .emqx_docker_image_tags)" >> $GITHUB_ENV
- name: smoke test
timeout-minutes: 1
run: |
for tag in $(cat .emqx_docker_image_tags); do
CID=$(docker run -d -p 18083:18083 $tag)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
docker rm -f $CID
done
- name: dashboard tests
working-directory: ./scripts/ui-tests
timeout-minutes: 5
run: |
set -eu
docker compose up --abort-on-container-exit --exit-code-from selenium
docker compose rm -fsv
- name: test node_dump
run: |
CID=$(docker run -d -P $_EMQX_DOCKER_IMAGE_TAG)
docker exec -t -u root -w /root $CID bash -c 'apt-get -y update && apt-get -y install net-tools'
docker exec -t -u root $CID node_dump
docker rm -f $CID
- name: Build and push docker image
if: inputs.publish || github.repository_owner != 'emqx'
env:
PROFILE: ${{ matrix.profile[0] }}
DOCKER_REGISTRY: ${{ matrix.profile[1] }}
DOCKER_ORG: ${{ github.repository_owner }}
DOCKER_LATEST: ${{ inputs.latest }}
DOCKER_PUSH: true
DOCKER_BUILD_NOCACHE: false
DOCKER_PLATFORMS: linux/amd64,linux/arm64
DOCKER_LOAD: false
BUILD_FROM: ${{ inputs.build_from }}
RUN_FROM: ${{ inputs.run_from }}
PKG_VSN: ${{ needs.build.outputs.PKG_VSN }}
EMQX_SOURCE_TYPE: tgz
run: |
./build ${PROFILE} docker

View File

@ -6,6 +6,19 @@ concurrency:
on: on:
workflow_call: workflow_call:
inputs:
otp_vsn:
required: true
type: string
elixir_vsn:
required: true
type: string
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions: permissions:
contents: read contents: read
@ -15,6 +28,9 @@ jobs:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
env: env:
EMQX_NAME: ${{ matrix.profile }} EMQX_NAME: ${{ matrix.profile }}
PKG_VSN: ${{ startsWith(matrix.profile, 'emqx-enterprise') && inputs.version-emqx-enterprise || inputs.version-emqx }}
OTP_VSN: ${{ inputs.otp_vsn }}
ELIXIR_VSN: ${{ inputs.elixir_vsn }}
strategy: strategy:
fail-fast: false fail-fast: false
@ -26,32 +42,22 @@ jobs:
- emqx-enterprise-elixir - emqx-enterprise-elixir
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- name: build and export to Docker - name: build and export to Docker
id: build id: build
run: | run: |
make ${EMQX_NAME}-docker make ${EMQX_NAME}-docker
echo "_EMQX_DOCKER_IMAGE_TAG=$(head -n 1 .emqx_docker_image_tags)" >> $GITHUB_ENV echo "EMQX_IMAGE_TAG=$(cat .docker_image_tag)" >> $GITHUB_ENV
- name: smoke test - name: smoke test
run: | run: |
CID=$(docker run -d --rm -P $_EMQX_DOCKER_IMAGE_TAG) CID=$(docker run -d --rm -P $EMQX_IMAGE_TAG)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID) HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT || { ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
docker logs $CID
exit 1
}
docker stop $CID docker stop $CID
- name: export docker image - name: export docker image
if: always()
run: | run: |
docker save $_EMQX_DOCKER_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz docker save $EMQX_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with: with:
name: "${{ env.EMQX_NAME }}-docker" name: "${{ env.EMQX_NAME }}-docker"
path: "${{ env.EMQX_NAME }}-docker-${{ env.PKG_VSN }}.tar.gz" path: "${{ env.EMQX_NAME }}-docker-${{ env.PKG_VSN }}.tar.gz"

View File

@ -12,7 +12,7 @@ on:
type: string type: string
publish: publish:
required: true required: true
type: boolean type: string
otp_vsn: otp_vsn:
required: true required: true
type: string type: string
@ -46,8 +46,7 @@ on:
ref: ref:
required: false required: false
profile: profile:
required: true required: false
default: 'emqx'
publish: publish:
required: false required: false
type: boolean type: boolean
@ -55,7 +54,7 @@ on:
otp_vsn: otp_vsn:
required: false required: false
type: string type: string
default: '26.2.5-3' default: '26.2.1-2'
elixir_vsn: elixir_vsn:
required: false required: false
type: string type: string
@ -63,7 +62,7 @@ on:
builder_vsn: builder_vsn:
required: false required: false
type: string type: string
default: '5.3-9' default: '5.3-2'
permissions: permissions:
contents: read contents: read
@ -75,14 +74,15 @@ jobs:
matrix: matrix:
profile: profile:
- ${{ inputs.profile }} - ${{ inputs.profile }}
os:
- macos-13
- macos-14
otp: otp:
- ${{ inputs.otp_vsn }} - ${{ inputs.otp_vsn }}
os:
- macos-12
- macos-12-arm64
- macos-13
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
@ -95,94 +95,107 @@ jobs:
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }} apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }} apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }} apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: success() if: success()
with: with:
name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.otp }} name: ${{ matrix.profile }}-${{ matrix.otp }}-${{ matrix.os }}
path: _packages/${{ matrix.profile }}/ path: _packages/${{ matrix.profile }}/
retention-days: 7 retention-days: 7
compression-level: 0
linux: linux:
runs-on: [self-hosted, ephemeral, linux, "${{ matrix.arch == 'arm64' && 'arm64' || 'x64' }}"] runs-on: [self-hosted, ephemeral, linux, "${{ matrix.arch }}"]
# always run in builder container because the host might have the wrong OTP version etc.
# otherwise buildx.sh does not run docker if arch and os matches the target arch and os.
container:
image: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os }}"
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: profile:
- ${{ inputs.profile }} - ${{ inputs.profile }}
otp:
- ${{ inputs.otp_vsn }}
arch:
- x64
- arm64
os: os:
- ubuntu24.04
- ubuntu22.04 - ubuntu22.04
- ubuntu20.04 - ubuntu20.04
- ubuntu18.04
- debian12 - debian12
- debian11 - debian11
- debian10 - debian10
- el9 - el9
- el8 - el8
- el7
- amzn2 - amzn2
- amzn2023 - amzn2023
arch:
- amd64
- arm64
with_elixir:
- 'no'
otp:
- ${{ inputs.otp_vsn }}
builder: builder:
- ${{ inputs.builder_vsn }} - ${{ inputs.builder_vsn }}
elixir: elixir:
- ${{ inputs.elixir_vsn }} - ${{ inputs.elixir_vsn }}
with_elixir:
- 'no'
include: include:
- profile: ${{ inputs.profile }} - profile: emqx
os: ubuntu22.04
arch: amd64
with_elixir: 'yes'
otp: ${{ inputs.otp_vsn }} otp: ${{ inputs.otp_vsn }}
arch: x64
os: ubuntu22.04
builder: ${{ inputs.builder_vsn }} builder: ${{ inputs.builder_vsn }}
elixir: ${{ inputs.elixir_vsn }} elixir: ${{ inputs.elixir_vsn }}
with_elixir: 'yes'
defaults: defaults:
run: run:
shell: bash shell: bash
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # v3.6.0
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
- name: build tgz
- name: fix workdir
run: |
set -eu
git config --global --add safe.directory "$GITHUB_WORKSPACE"
# Align path for CMake caches
if [ ! "$PWD" = "/emqx" ]; then
ln -s $PWD /emqx
cd /emqx
fi
echo "pwd is $PWD"
- name: build emqx packages
env: env:
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }}
OS: ${{ matrix.os }}
IS_ELIXIR: ${{ matrix.with_elixir }} IS_ELIXIR: ${{ matrix.with_elixir }}
BUILDER: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os }}" ACLOCAL_PATH: "/usr/share/aclocal:/usr/local/share/aclocal"
BUILDER_SYSTEM: force_docker
run: | run: |
./scripts/buildx.sh \ set -eu
--profile $PROFILE \ if [ "${IS_ELIXIR:-}" == 'yes' ]; then
--arch $ARCH \ make "${PROFILE}-elixir-tgz"
--builder $BUILDER \ else
--elixir $IS_ELIXIR \ make "${PROFILE}-tgz"
--pkgtype tgz make "${PROFILE}-pkg"
- name: build pkg fi
if: matrix.with_elixir == 'no' - name: test emqx packages
env: env:
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }}
OS: ${{ matrix.os }}
IS_ELIXIR: ${{ matrix.with_elixir }} IS_ELIXIR: ${{ matrix.with_elixir }}
BUILDER: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os }}"
BUILDER_SYSTEM: force_docker
run: | run: |
./scripts/buildx.sh \ set -eu
--profile $PROFILE \ if [ "${IS_ELIXIR:-}" == 'yes' ]; then
--arch $ARCH \ ./scripts/pkg-tests.sh "${PROFILE}-elixir-tgz"
--builder $BUILDER \ else
--elixir $IS_ELIXIR \ ./scripts/pkg-tests.sh "${PROFILE}-tgz"
--pkgtype pkg ./scripts/pkg-tests.sh "${PROFILE}-pkg"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 fi
- uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
with: with:
name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.arch }}${{ matrix.with_elixir == 'yes' && '-elixir' || '' }}-${{ matrix.builder }}-${{ matrix.otp }}-${{ matrix.elixir }} name: ${{ matrix.profile }}-${{ matrix.otp }}-${{ matrix.arch }}-${{ matrix.os }}-${{ matrix.with_elixir == 'yes' && 'elixir' || 'erlang' }}
path: _packages/${{ matrix.profile }}/ path: _packages/${{ matrix.profile }}/
retention-days: 7 retention-days: 7
@ -191,19 +204,31 @@ jobs:
needs: needs:
- mac - mac
- linux - linux
if: inputs.publish if: inputs.publish == 'true' || inputs.publish
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: profile:
- ${{ inputs.profile }} - ${{ inputs.profile }}
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
pattern: "${{ matrix.profile }}-*" pattern: ${{ matrix.profile }}-*
path: packages/${{ matrix.profile }} path: packages/${{ matrix.profile }}
merge-multiple: true merge-multiple: true
- uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 - name: install dos2unix
run: sudo apt-get update -y && sudo apt install -y dos2unix
- name: get packages
run: |
set -eu
cd packages/${{ matrix.profile }}
# fix the .sha256 file format
for var in $(ls | grep emqx | grep -v sha256); do
dos2unix $var.sha256
echo "$(cat $var.sha256) $var" | sha256sum -c || exit 1
done
cd -
- uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@ -211,9 +236,6 @@ jobs:
- name: upload to aws s3 - name: upload to aws s3
env: env:
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
REF_NAME: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.ref || github.ref_name }}
AWS_S3_BUCKET: ${{ secrets.AWS_S3_BUCKET }}
AWS_CLOUDFRONT_ID: ${{ secrets.AWS_CLOUDFRONT_ID }}
run: | run: |
set -eu set -eu
if [ $PROFILE = 'emqx' ]; then if [ $PROFILE = 'emqx' ]; then
@ -224,5 +246,5 @@ jobs:
echo "unknown profile $PROFILE" echo "unknown profile $PROFILE"
exit 1 exit 1
fi fi
aws s3 cp --recursive packages/$PROFILE s3://$AWS_S3_BUCKET/$s3dir/$REF_NAME aws s3 cp --recursive packages/$PROFILE s3://${{ secrets.AWS_S3_BUCKET }}/$s3dir/${{ github.ref_name }}
aws cloudfront create-invalidation --distribution-id "$AWS_CLOUDFRONT_ID" --paths "/$s3dir/$REF_NAME/*" aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CLOUDFRONT_ID }} --paths "/$s3dir/${{ github.ref_name }}/*"

View File

@ -16,52 +16,64 @@ jobs:
linux: linux:
if: github.repository_owner == 'emqx' if: github.repository_owner == 'emqx'
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container:
image: "ghcr.io/emqx/emqx-builder/${{ matrix.profile[2] }}-${{ matrix.os }}"
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: profile:
- ['emqx', 'master'] - ['emqx', 'master', '5.3-2:1.15.7-26.2.1-2']
- ['emqx', 'release-57'] - ['emqx-enterprise', 'release-55', '5.3-2:1.15.7-25.3.2-2']
- ['emqx', 'release-58']
os: os:
- debian10
- ubuntu22.04 - ubuntu22.04
- amzn2023 - amzn2023
env:
PROFILE: ${{ matrix.profile[0] }}
OS: ${{ matrix.os }}
BUILDER_SYSTEM: force_docker
defaults: defaults:
run: run:
shell: bash shell: bash
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ matrix.profile[1] }} ref: ${{ matrix.profile[1] }}
fetch-depth: 0 fetch-depth: 0
- name: Set up environment
id: env - name: fix workdir
run: | run: |
source env.sh set -eu
BUILDER="ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VSN}:${ELIXIR_VSN}-${OTP_VSN}-${OS}" git config --global --add safe.directory "$GITHUB_WORKSPACE"
echo "BUILDER=$BUILDER" >> "$GITHUB_ENV" # Align path for CMake caches
- name: build tgz if [ ! "$PWD" = "/emqx" ]; then
ln -s $PWD /emqx
cd /emqx
fi
echo "pwd is $PWD"
- name: build emqx packages
env:
PROFILE: ${{ matrix.profile[0] }}
ACLOCAL_PATH: "/usr/share/aclocal:/usr/local/share/aclocal"
run: | run: |
./scripts/buildx.sh --profile "$PROFILE" --pkgtype tgz --builder "$BUILDER" set -eu
- name: build pkg make "${PROFILE}-tgz"
make "${PROFILE}-pkg"
- name: test emqx packages
env:
PROFILE: ${{ matrix.profile[0] }}
run: | run: |
./scripts/buildx.sh --profile "$PROFILE" --pkgtype pkg --builder "$BUILDER" set -eu
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 ./scripts/pkg-tests.sh "${PROFILE}-tgz"
./scripts/pkg-tests.sh "${PROFILE}-pkg"
- uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: success() if: success()
with: with:
name: ${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.os }} name: ${{ matrix.profile[0] }}-${{ matrix.os }}
path: _packages/${{ matrix.profile[0] }}/ path: _packages/${{ matrix.profile[0] }}/
retention-days: 7 retention-days: 7
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0
if: failure() if: failure()
env: env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
@ -80,36 +92,33 @@ jobs:
- emqx - emqx
branch: branch:
- master - master
otp:
- 26.2.1-2
os: os:
- macos-14-arm64 - macos-12-arm64
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ matrix.branch }} ref: ${{ matrix.branch }}
fetch-depth: 0 fetch-depth: 0
- name: Set up environment
id: env
run: |
source env.sh
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
- uses: ./.github/actions/package-macos - uses: ./.github/actions/package-macos
with: with:
profile: ${{ matrix.profile }} profile: ${{ matrix.profile }}
otp: ${{ steps.env.outputs.OTP_VSN }} otp: ${{ matrix.otp }}
os: ${{ matrix.os }} os: ${{ matrix.os }}
apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }} apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }}
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }} apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }} apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }} apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: success() if: success()
with: with:
name: ${{ matrix.profile }}-${{ matrix.os }} name: ${{ matrix.profile }}-${{ matrix.os }}
path: _packages/${{ matrix.profile }}/ path: _packages/${{ matrix.profile }}/
retention-days: 7 retention-days: 7
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0
if: failure() if: failure()
env: env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -6,50 +6,97 @@ concurrency:
on: on:
workflow_call: workflow_call:
inputs:
builder:
required: true
type: string
builder_vsn:
required: true
type: string
otp_vsn:
required: true
type: string
elixir_vsn:
required: true
type: string
workflow_dispatch: workflow_dispatch:
inputs: inputs:
ref: ref:
required: false required: false
builder:
required: false
type: string
default: 'ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04'
builder_vsn:
required: false
type: string
default: '5.3-2'
otp_vsn:
required: false
type: string
default: '26.2.1-2'
elixir_vsn:
required: false
type: string
default: '1.15.7'
permissions: permissions:
contents: read contents: read
jobs: jobs:
linux: linux:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.profile[2])) || 'ubuntu-22.04' }} runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.profile[4])) || 'ubuntu-22.04' }}
env: env:
PROFILE: ${{ matrix.profile[0] }} EMQX_NAME: ${{ matrix.profile[0] }}
ELIXIR: ${{ matrix.profile[1] == 'elixir' && 'yes' || 'no' }}
ARCH: ${{ matrix.profile[2] == 'x64' && 'amd64' || 'arm64' }}
BUILDER_SYSTEM: force_docker
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: profile:
- ["emqx", "elixir", "x64"] - ["emqx", "26.2.1-2", "ubuntu22.04", "elixir", "x64"]
- ["emqx", "elixir", "arm64"] - ["emqx", "26.2.1-2", "ubuntu22.04", "elixir", "arm64"]
- ["emqx-enterprise", "erlang", "x64"] - ["emqx-enterprise", "26.2.1-2", "ubuntu22.04", "erlang", "x64"]
container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
- name: build tgz - name: Work around https://github.com/actions/checkout/issues/766
run: | run: |
./scripts/buildx.sh --profile $PROFILE --pkgtype tgz --elixir $ELIXIR --arch $ARCH git config --global --add safe.directory "$GITHUB_WORKSPACE"
- name: build pkg echo "CODE_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
- name: build and test tgz package
if: matrix.profile[3] == 'erlang'
run: | run: |
./scripts/buildx.sh --profile $PROFILE --pkgtype pkg --elixir $ELIXIR --arch $ARCH make ${EMQX_NAME}-tgz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 ./scripts/pkg-tests.sh ${EMQX_NAME}-tgz
- name: build and test deb/rpm packages
if: matrix.profile[3] == 'erlang'
run: |
make ${EMQX_NAME}-pkg
./scripts/pkg-tests.sh ${EMQX_NAME}-pkg
- name: build and test tgz package (Elixir)
if: matrix.profile[3] == 'elixir'
run: |
make ${EMQX_NAME}-elixir-tgz
./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-tgz
- name: build and test deb/rpm packages (Elixir)
if: matrix.profile[3] == 'elixir'
run: |
make ${EMQX_NAME}-elixir-pkg
./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-pkg
- uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with: with:
name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}-${{ matrix.profile[3] }}-${{ matrix.profile[4] }}"
path: _packages/${{ matrix.profile[0] }}/* path: _packages/${{ matrix.profile[0] }}/*
retention-days: 7 retention-days: 7
compression-level: 0 compression-level: 0
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with: with:
name: "${{ matrix.profile[0] }}-schema-dump-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" name: "${{ matrix.profile[0] }}-schema-dump-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}-${{ matrix.profile[3] }}-${{ matrix.profile[4] }}"
path: | path: |
scripts/spellcheck scripts/spellcheck
_build/docgen/${{ matrix.profile[0] }}/schema-en.json _build/docgen/${{ matrix.profile[0] }}/schema-en.json
@ -61,30 +108,27 @@ jobs:
matrix: matrix:
profile: profile:
- emqx - emqx
otp:
- ${{ inputs.otp_vsn }}
os: os:
- macos-14-arm64 - macos-12-arm64
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
env: env:
EMQX_NAME: ${{ matrix.profile }} EMQX_NAME: ${{ matrix.profile }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
- uses: ./.github/actions/package-macos - uses: ./.github/actions/package-macos
with: with:
profile: ${{ matrix.profile }} profile: ${{ matrix.profile }}
otp: ${{ steps.env.outputs.OTP_VSN }} otp: ${{ matrix.otp }}
os: ${{ matrix.os }} os: ${{ matrix.os }}
apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }} apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }}
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }} apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }} apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }} apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with: with:
name: ${{ matrix.os }} name: ${{ matrix.os }}
path: _packages/**/* path: _packages/**/*

View File

@ -14,33 +14,32 @@ jobs:
check_deps_integrity: check_deps_integrity:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }} container: ${{ inputs.builder }}
env:
MIX_ENV: ${{ matrix.profile }}
PROFILE: ${{ matrix.profile }}
strategy:
matrix:
profile:
- emqx-enterprise
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE" - run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- run: make ensure-rebar3 - run: make ensure-rebar3
- run: ./scripts/check-deps-integrity.escript - run: ./scripts/check-deps-integrity.escript
- name: Setup mix - name: Setup mix
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
run: | run: |
# mix local.hex --force mix local.hex --force
mix local.hex 2.0.6 --force
mix local.rebar --force mix local.rebar --force
mix deps.get mix deps.get
- name: print mix dependency tree
run: mix deps.tree
- run: ./scripts/check-elixir-deps-discrepancies.exs - run: ./scripts/check-elixir-deps-discrepancies.exs
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
- run: ./scripts/check-elixir-applications.exs - run: ./scripts/check-elixir-applications.exs
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
- name: Upload produced lock files - name: Upload produced lock files
uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: failure() if: failure()
with: with:
name: ${{ matrix.profile }}_produced_lock_files name: produced_lock_files
path: | path: |
mix.lock mix.lock
rebar.lock rebar.lock

View File

@ -10,31 +10,36 @@ permissions:
jobs: jobs:
analyze: analyze:
if: github.repository == 'emqx/emqx'
name: Analyze name: Analyze
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
timeout-minutes: 360 timeout-minutes: 360
permissions: permissions:
actions: read actions: read
security-events: write security-events: write
container:
image: ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
branch: branch:
- master - master
- release-57 - release-55
- release-58
language: language:
- cpp - cpp
- python - python
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ matrix.branch }} ref: ${{ matrix.branch }}
- name: Ensure git safe dir
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
make ensure-rebar3
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5 uses: github/codeql-action/init@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5
with: with:
@ -45,7 +50,14 @@ jobs:
env: env:
PROFILE: emqx-enterprise PROFILE: emqx-enterprise
run: | run: |
./scripts/buildx.sh --profile emqx-enterprise --pkgtype rel make emqx-enterprise-compile
- name: Fetch deps
if: matrix.language == 'python'
env:
PROFILE: emqx-enterprise
run: |
make deps-emqx-enterprise
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5 uses: github/codeql-action/analyze@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5

View File

@ -7,6 +7,9 @@ on:
# run hourly # run hourly
- cron: "0 * * * *" - cron: "0 * * * *"
workflow_dispatch: workflow_dispatch:
inputs:
ref:
required: false
permissions: permissions:
contents: read contents: read
@ -14,30 +17,22 @@ permissions:
jobs: jobs:
rerun-failed-jobs: rerun-failed-jobs:
if: github.repository_owner == 'emqx' if: github.repository_owner == 'emqx'
runs-on: ubuntu-latest runs-on: ubuntu-22.04
permissions: permissions:
checks: read checks: read
actions: write actions: write
strategy:
fail-fast: false
matrix:
ref:
- master
- release-57
- release-58
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ matrix.ref }} ref: ${{ github.event.inputs.ref || 'master' }}
- name: run script - name: run script
shell: bash shell: bash
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPO: ${{ github.repository }}
run: | run: |
gh api --method GET -f head_sha=$(git rev-parse HEAD) -f status=completed -f exclude_pull_requests=true /repos/${GITHUB_REPO}/actions/runs > runs.json gh api --method GET -f head_sha=$(git rev-parse HEAD) -f status=completed -f exclude_pull_requests=true /repos/emqx/emqx/actions/runs > runs.json
for id in $(jq -r '.workflow_runs[] | select((."conclusion" == "failure") and (."name" != "Keep master green") and .run_attempt < 3) | .id' runs.json); do for id in $(jq -r '.workflow_runs[] | select((."conclusion" != "success") and .run_attempt < 3) | .id' runs.json); do
echo "rerun https://github.com/${GITHUB_REPO}/actions/runs/$id" echo "rerun https://github.com/emqx/emqx/actions/runs/$id"
gh api --method POST /repos/${GITHUB_REPO}/actions/runs/$id/rerun-failed-jobs || true gh api --method POST /repos/emqx/emqx/actions/runs/$id/rerun-failed-jobs
done done

View File

@ -26,13 +26,13 @@ jobs:
prepare: prepare:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository_owner == 'emqx' if: github.repository_owner == 'emqx'
container: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu20.04
outputs: outputs:
BENCH_ID: ${{ steps.prepare.outputs.BENCH_ID }} BENCH_ID: ${{ steps.prepare.outputs.BENCH_ID }}
PACKAGE_FILE: ${{ steps.package_file.outputs.PACKAGE_FILE }} PACKAGE_FILE: ${{ steps.package_file.outputs.PACKAGE_FILE }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
@ -52,7 +52,7 @@ jobs:
id: package_file id: package_file
run: | run: |
echo "PACKAGE_FILE=$(find _packages/emqx -name 'emqx-*.deb' | head -n 1 | xargs basename)" >> $GITHUB_OUTPUT echo "PACKAGE_FILE=$(find _packages/emqx -name 'emqx-*.deb' | head -n 1 | xargs basename)" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: _packages/emqx/${{ steps.package_file.outputs.PACKAGE_FILE }} path: _packages/emqx/${{ steps.package_file.outputs.PACKAGE_FILE }}
@ -66,23 +66,23 @@ jobs:
steps: steps:
- name: Configure AWS Credentials - name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test - name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/tf-emqx-performance-test repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test path: tf-emqx-performance-test
ref: v0.2.3 ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: tf-emqx-performance-test/ path: tf-emqx-performance-test/
- name: Setup Terraform - name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1 uses: hashicorp/setup-terraform@a1502cd9e758c50496cc9ac5308c4843bcd56d36 # v3.0.0
with: with:
terraform_wrapper: false terraform_wrapper: false
- name: run scenario - name: run scenario
@ -105,7 +105,7 @@ jobs:
terraform destroy -auto-approve terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id . aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0
with: with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json" payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy - name: terraform destroy
@ -113,13 +113,13 @@ jobs:
working-directory: ./tf-emqx-performance-test working-directory: ./tf-emqx-performance-test
run: | run: |
terraform destroy -auto-approve terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: success() if: success()
with: with:
name: metrics name: metrics
path: | path: |
"./tf-emqx-performance-test/*.tar.gz" "./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: failure() if: failure()
with: with:
name: terraform name: terraform
@ -137,23 +137,23 @@ jobs:
steps: steps:
- name: Configure AWS Credentials - name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test - name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/tf-emqx-performance-test repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test path: tf-emqx-performance-test
ref: v0.2.3 ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: tf-emqx-performance-test/ path: tf-emqx-performance-test/
- name: Setup Terraform - name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1 uses: hashicorp/setup-terraform@a1502cd9e758c50496cc9ac5308c4843bcd56d36 # v3.0.0
with: with:
terraform_wrapper: false terraform_wrapper: false
- name: run scenario - name: run scenario
@ -176,7 +176,7 @@ jobs:
terraform destroy -auto-approve terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id . aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0
with: with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json" payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy - name: terraform destroy
@ -184,13 +184,13 @@ jobs:
working-directory: ./tf-emqx-performance-test working-directory: ./tf-emqx-performance-test
run: | run: |
terraform destroy -auto-approve terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: success() if: success()
with: with:
name: metrics name: metrics
path: | path: |
"./tf-emqx-performance-test/*.tar.gz" "./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: failure() if: failure()
with: with:
name: terraform name: terraform
@ -209,23 +209,23 @@ jobs:
steps: steps:
- name: Configure AWS Credentials - name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test - name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/tf-emqx-performance-test repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test path: tf-emqx-performance-test
ref: v0.2.3 ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: tf-emqx-performance-test/ path: tf-emqx-performance-test/
- name: Setup Terraform - name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1 uses: hashicorp/setup-terraform@a1502cd9e758c50496cc9ac5308c4843bcd56d36 # v3.0.0
with: with:
terraform_wrapper: false terraform_wrapper: false
- name: run scenario - name: run scenario
@ -249,7 +249,7 @@ jobs:
terraform destroy -auto-approve terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id . aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0
with: with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json" payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy - name: terraform destroy
@ -257,13 +257,13 @@ jobs:
working-directory: ./tf-emqx-performance-test working-directory: ./tf-emqx-performance-test
run: | run: |
terraform destroy -auto-approve terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: success() if: success()
with: with:
name: metrics name: metrics
path: | path: |
"./tf-emqx-performance-test/*.tar.gz" "./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: failure() if: failure()
with: with:
name: terraform name: terraform
@ -283,23 +283,23 @@ jobs:
steps: steps:
- name: Configure AWS Credentials - name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_PERF_TEST }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test - name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/tf-emqx-performance-test repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test path: tf-emqx-performance-test
ref: v0.2.3 ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: tf-emqx-performance-test/ path: tf-emqx-performance-test/
- name: Setup Terraform - name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1 uses: hashicorp/setup-terraform@a1502cd9e758c50496cc9ac5308c4843bcd56d36 # v3.0.0
with: with:
terraform_wrapper: false terraform_wrapper: false
- name: run scenario - name: run scenario
@ -322,7 +322,7 @@ jobs:
terraform destroy -auto-approve terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id . aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0
with: with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json" payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy - name: terraform destroy
@ -330,13 +330,13 @@ jobs:
working-directory: ./tf-emqx-performance-test working-directory: ./tf-emqx-performance-test
run: | run: |
terraform destroy -auto-approve terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: success() if: success()
with: with:
name: metrics name: metrics
path: | path: |
"./tf-emqx-performance-test/*.tar.gz" "./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: failure() if: failure()
with: with:
name: terraform name: terraform

View File

@ -8,7 +8,7 @@ on:
tag: tag:
type: string type: string
required: true required: true
publish_release_artifacts: publish_release_artefacts:
type: boolean type: boolean
required: true required: true
default: false default: false
@ -31,12 +31,12 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
steps: steps:
- uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.tag }} ref: ${{ github.event.inputs.tag }}
- name: Detect profile - name: Detect profile
@ -67,15 +67,14 @@ jobs:
BUCKET=${{ secrets.AWS_S3_BUCKET }} BUCKET=${{ secrets.AWS_S3_BUCKET }}
OUTPUT_DIR=${{ steps.profile.outputs.s3dir }} OUTPUT_DIR=${{ steps.profile.outputs.s3dir }}
aws s3 cp --recursive s3://$BUCKET/$OUTPUT_DIR/${{ env.ref_name }} packages aws s3 cp --recursive s3://$BUCKET/$OUTPUT_DIR/${{ env.ref_name }} packages
- uses: emqx/upload-assets@974befcf0e72a1811360a81c798855efb66b0551 # 0.5.2 - uses: emqx/upload-assets@8d2083b4dbe3151b0b735572eaa153b6acb647fe # 0.5.0
env: env:
GITHUB_TOKEN: ${{ github.token }} GITHUB_TOKEN: ${{ github.token }}
with: with:
asset_paths: '["packages/*"]' asset_paths: '["packages/*"]'
tag_name: "${{ env.ref_name }}" tag_name: "${{ env.ref_name }}"
skip_existing: true
- name: update to emqx.io - name: update to emqx.io
if: github.event_name == 'release' || inputs.publish_release_artifacts if: startsWith(env.ref_name, 'v') && ((github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artefacts)
run: | run: |
set -eux set -eux
curl -w %{http_code} \ curl -w %{http_code} \
@ -86,7 +85,7 @@ jobs:
-d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.ref_name }}\" }" \ -d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.ref_name }}\" }" \
${{ secrets.EMQX_IO_RELEASE_API }} ${{ secrets.EMQX_IO_RELEASE_API }}
- name: Push to packagecloud.io - name: Push to packagecloud.io
if: (github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artifacts if: (github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artefacts
env: env:
PROFILE: ${{ steps.profile.outputs.profile }} PROFILE: ${{ steps.profile.outputs.profile }}
VERSION: ${{ steps.profile.outputs.version }} VERSION: ${{ steps.profile.outputs.version }}
@ -106,12 +105,14 @@ jobs:
push "debian/bullseye" "packages/$PROFILE-$VERSION-debian11-arm64.deb" push "debian/bullseye" "packages/$PROFILE-$VERSION-debian11-arm64.deb"
push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-amd64.deb" push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-amd64.deb"
push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-arm64.deb" push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-arm64.deb"
push "ubuntu/bionic" "packages/$PROFILE-$VERSION-ubuntu18.04-amd64.deb"
push "ubuntu/bionic" "packages/$PROFILE-$VERSION-ubuntu18.04-arm64.deb"
push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-amd64.deb" push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-amd64.deb"
push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-arm64.deb" push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-arm64.deb"
push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-amd64.deb" push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-amd64.deb"
push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-arm64.deb" push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-arm64.deb"
push "ubuntu/noble" "packages/$PROFILE-$VERSION-ubuntu24.04-amd64.deb" push "el/7" "packages/$PROFILE-$VERSION-el7-amd64.rpm"
push "ubuntu/noble" "packages/$PROFILE-$VERSION-ubuntu24.04-arm64.deb" push "el/7" "packages/$PROFILE-$VERSION-el7-arm64.rpm"
push "el/8" "packages/$PROFILE-$VERSION-el8-amd64.rpm" push "el/8" "packages/$PROFILE-$VERSION-el8-amd64.rpm"
push "el/8" "packages/$PROFILE-$VERSION-el8-arm64.rpm" push "el/8" "packages/$PROFILE-$VERSION-el8-arm64.rpm"
push "el/9" "packages/$PROFILE-$VERSION-el9-amd64.rpm" push "el/9" "packages/$PROFILE-$VERSION-el9-amd64.rpm"
@ -131,7 +132,7 @@ jobs:
checks: write checks: write
actions: write actions: write
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: trigger re-run of app versions check on open PRs - name: trigger re-run of app versions check on open PRs
shell: bash shell: bash
env: env:

View File

@ -25,7 +25,7 @@ jobs:
- emqx - emqx
- emqx-enterprise - emqx-enterprise
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
@ -39,10 +39,10 @@ jobs:
- name: print erlang log - name: print erlang log
if: failure() if: failure()
run: | run: |
cat _build/${{ matrix.profile }}/rel/emqx/log/erlang.log.* cat _build/${{ matrix.profile }}/rel/emqx/logs/erlang.log.*
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: failure() if: failure()
with: with:
name: conftest-logs-${{ matrix.profile }} name: conftest-logs-${{ matrix.profile }}
path: _build/${{ matrix.profile }}/rel/emqx/log path: _build/${{ matrix.profile }}/rel/emqx/logs
retention-days: 7 retention-days: 7

View File

@ -6,6 +6,13 @@ concurrency:
on: on:
workflow_call: workflow_call:
inputs:
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions: permissions:
contents: read contents: read
@ -25,24 +32,19 @@ jobs:
env: env:
EMQX_NAME: ${{ matrix.profile[0] }} EMQX_NAME: ${{ matrix.profile[0] }}
PKG_VSN: ${{ matrix.profile[0] == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }} EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with: with:
name: ${{ env.EMQX_NAME }}-docker name: ${{ env.EMQX_NAME }}-docker
path: /tmp path: /tmp
- name: load docker image - name: load docker image
run: | run: |
_EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/${EMQX_NAME}-docker-${PKG_VSN}.tar.gz 2>/dev/null | sed 's/Loaded image: //g') EMQX_IMAGE_TAG=$(docker load < /tmp/${EMQX_NAME}-docker-${PKG_VSN}.tar.gz 2>/dev/null | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$_EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV echo "EMQX_IMAGE_TAG=$EMQX_IMAGE_TAG" >> $GITHUB_ENV
- name: dashboard tests - name: dashboard tests
working-directory: ./scripts/ui-tests working-directory: ./scripts/ui-tests
run: | run: |
@ -50,11 +52,9 @@ jobs:
docker compose up --abort-on-container-exit --exit-code-from selenium docker compose up --abort-on-container-exit --exit-code-from selenium
- name: test two nodes cluster with proto_dist=inet_tls in docker - name: test two nodes cluster with proto_dist=inet_tls in docker
run: | run: |
## -d 1 means only put node 1 (latest version) behind haproxy ./scripts/test/start-two-nodes-in-docker.sh -P $EMQX_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
./scripts/test/start-two-nodes-in-docker.sh -d 1 -P $_EMQX_DOCKER_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy) HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
## -c menas 'cleanup'
./scripts/test/start-two-nodes-in-docker.sh -c ./scripts/test/start-two-nodes-in-docker.sh -c
- name: cleanup - name: cleanup
if: always() if: always()
@ -69,6 +69,8 @@ jobs:
shell: bash shell: bash
env: env:
EMQX_NAME: ${{ matrix.profile }} EMQX_NAME: ${{ matrix.profile }}
PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
_EMQX_TEST_DB_BACKEND: ${{ matrix.cluster_db_backend }}
strategy: strategy:
fail-fast: false fail-fast: false
@ -77,20 +79,12 @@ jobs:
- emqx - emqx
- emqx-enterprise - emqx-enterprise
- emqx-elixir - emqx-elixir
cluster_db_backend:
- mnesia
- rlog
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
id: env
run: |
source env.sh
if [ "$EMQX_NAME" = "emqx-enterprise" ]; then
_EMQX_TEST_DB_BACKEND='rlog'
else
_EMQX_TEST_DB_BACKEND='mnesia'
fi
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with: with:
name: ${{ env.EMQX_NAME }}-docker name: ${{ env.EMQX_NAME }}-docker
path: /tmp path: /tmp
@ -119,4 +113,4 @@ jobs:
- name: test node_dump - name: test node_dump
run: | run: |
docker exec -t -u root node1.emqx.io bash -c 'apt-get -y update && apt-get -y install net-tools' docker exec -t -u root node1.emqx.io bash -c 'apt-get -y update && apt-get -y install net-tools'
docker exec -t -u root node1.emqx.io node_dump docker exec node1.emqx.io node_dump

View File

@ -27,21 +27,19 @@ permissions:
contents: read contents: read
jobs: jobs:
prepare_matrix: run_emqx_app_tests:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }} container: ${{ inputs.builder }}
defaults: defaults:
run: run:
shell: bash shell: bash
outputs:
matrix: ${{ steps.matrix.outputs.matrix }}
skip: ${{ steps.matrix.outputs.skip }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
- name: prepare test matrix - name: run
id: matrix
env: env:
BEFORE_REF: ${{ inputs.before_ref }} BEFORE_REF: ${{ inputs.before_ref }}
AFTER_REF: ${{ inputs.after_ref }} AFTER_REF: ${{ inputs.after_ref }}
@ -50,54 +48,19 @@ jobs:
changed_files="$(git diff --name-only ${BEFORE_REF} ${AFTER_REF} apps/emqx)" changed_files="$(git diff --name-only ${BEFORE_REF} ${AFTER_REF} apps/emqx)"
if [ "$changed_files" = '' ]; then if [ "$changed_files" = '' ]; then
echo "nothing changed in apps/emqx, ignored." echo "nothing changed in apps/emqx, ignored."
echo 'matrix=[]' | tee -a $GITHUB_OUTPUT
echo 'skip=true' | tee -a $GITHUB_OUTPUT
exit 0 exit 0
else
echo 'skip=false' | tee -a $GITHUB_OUTPUT
echo 'matrix=[{"type": "eunit_proper_and_static"},{"type": "1_3"},{"type": "2_3"},{"type": "3_3"}]' | tee -a $GITHUB_OUTPUT
fi fi
run_emqx_app_tests:
if: needs.prepare_matrix.outputs.skip != 'true'
needs:
- prepare_matrix
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }}
strategy:
fail-fast: false
matrix:
include: ${{ fromJson(needs.prepare_matrix.outputs.matrix) }}
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0
- name: run
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
make ensure-rebar3 make ensure-rebar3
cp rebar3 apps/emqx/ cp rebar3 apps/emqx/
cd apps/emqx cd apps/emqx
if [[ ${{ matrix.type }} == "eunit_proper_and_static" ]]; then ./rebar3 xref
./rebar3 xref ./rebar3 dialyzer
./rebar3 dialyzer ./rebar3 eunit -v --name 'eunit@127.0.0.1'
./rebar3 eunit -v --name 'eunit@127.0.0.1' ./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true
./rebar3 proper -d test/props ./rebar3 proper -d test/props
else - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
export SUITEGROUP=${{ matrix.type }}
SUITES=$(../../scripts/find-suites.sh apps/emqx | \
sed -e 's|apps/emqx/test/||g' | \
sed -Ee 's|,?apps/emqx/integration_test/.*||g' | \
sed -e 's/\.erl//g')
echo "Suites: $SUITES"
./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true --suite="$SUITES"
fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
if: failure() if: failure()
with: with:
name: logs-emqx-app-tests-${{ matrix.type }} name: logs-emqx-app-tests
path: apps/emqx/_build/test/logs path: apps/emqx/_build/test/logs
retention-days: 7 retention-days: 7

View File

@ -6,6 +6,13 @@ concurrency:
on: on:
workflow_call: workflow_call:
inputs:
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions: permissions:
contents: read contents: read
@ -18,6 +25,7 @@ jobs:
shell: bash shell: bash
env: env:
EMQX_NAME: ${{ matrix.profile }} EMQX_NAME: ${{ matrix.profile }}
EMQX_TAG: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
REPOSITORY: "emqx/${{ matrix.profile }}" REPOSITORY: "emqx/${{ matrix.profile }}"
strategy: strategy:
@ -34,17 +42,10 @@ jobs:
- ssl1.3 - ssl1.3
- ssl1.2 - ssl1.2
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
path: source path: source
- name: Set up environment - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
id: env
run: |
cd source
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "EMQX_TAG=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with: with:
name: "${{ env.EMQX_NAME }}-docker" name: "${{ env.EMQX_NAME }}-docker"
path: /tmp path: /tmp
@ -164,7 +165,7 @@ jobs:
fi fi
sleep 1; sleep 1;
done done
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/paho.mqtt.testing repository: emqx/paho.mqtt.testing
ref: develop-5.0 ref: develop-5.0

View File

@ -2,6 +2,10 @@ name: JMeter integration tests
on: on:
workflow_call: workflow_call:
inputs:
version-emqx:
required: true
type: string
permissions: permissions:
contents: read contents: read
@ -12,7 +16,7 @@ jobs:
steps: steps:
- name: Cache Jmeter - name: Cache Jmeter
id: cache-jmeter id: cache-jmeter
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
with: with:
path: /tmp/apache-jmeter.tgz path: /tmp/apache-jmeter.tgz
key: apache-jmeter-5.4.3.tgz key: apache-jmeter-5.4.3.tgz
@ -31,7 +35,7 @@ jobs:
else else
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz $ARCHIVE_URL wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz $ARCHIVE_URL
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with: with:
name: apache-jmeter.tgz name: apache-jmeter.tgz
path: /tmp/apache-jmeter.tgz path: /tmp/apache-jmeter.tgz
@ -51,23 +55,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -95,7 +86,7 @@ jobs:
echo "check logs failed" echo "check logs failed"
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: always() if: always()
with: with:
name: jmeter_logs-advanced_feat-${{ matrix.scripts_type }} name: jmeter_logs-advanced_feat-${{ matrix.scripts_type }}
@ -120,23 +111,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -175,7 +153,7 @@ jobs:
if: failure() if: failure()
run: | run: |
docker compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log docker compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: always() if: always()
with: with:
name: jmeter_logs-pgsql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.pgsql_tag }} name: jmeter_logs-pgsql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.pgsql_tag }}
@ -197,23 +175,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -248,7 +213,7 @@ jobs:
echo "check logs failed" echo "check logs failed"
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: always() if: always()
with: with:
name: jmeter_logs-mysql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.mysql_tag }} name: jmeter_logs-mysql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}
@ -266,23 +231,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -313,7 +265,7 @@ jobs:
echo "check logs failed" echo "check logs failed"
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: always() if: always()
with: with:
name: jmeter_logs-JWT_authn-${{ matrix.scripts_type }} name: jmeter_logs-JWT_authn-${{ matrix.scripts_type }}
@ -332,23 +284,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -370,7 +309,7 @@ jobs:
echo "check logs failed" echo "check logs failed"
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: always() if: always()
with: with:
name: jmeter_logs-built_in_database_authn_authz-${{ matrix.scripts_type }} name: jmeter_logs-built_in_database_authn_authz-${{ matrix.scripts_type }}

View File

@ -25,7 +25,7 @@ jobs:
run: run:
shell: bash shell: bash
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: emqx-enterprise name: emqx-enterprise
- name: extract artifact - name: extract artifact
@ -45,7 +45,7 @@ jobs:
run: | run: |
export PROFILE='emqx-enterprise' export PROFILE='emqx-enterprise'
make emqx-enterprise-tgz make emqx-enterprise-tgz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
name: Upload built emqx and test scenario name: Upload built emqx and test scenario
with: with:
name: relup_tests_emqx_built name: relup_tests_emqx_built
@ -72,10 +72,10 @@ jobs:
run: run:
shell: bash shell: bash
steps: steps:
- uses: erlef/setup-beam@b9c58b0450cd832ccdb3c17cc156a47065d2114f # v1.18.1 - uses: erlef/setup-beam@a34c98fd51e370b4d4981854aba1eb817ce4e483 # v1.17.0
with: with:
otp-version: 26.2.5 otp-version: 26.2.1
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: hawk/lux repository: hawk/lux
ref: lux-2.8.1 ref: lux-2.8.1
@ -88,7 +88,7 @@ jobs:
./configure ./configure
make make
echo "$(pwd)/bin" >> $GITHUB_PATH echo "$(pwd)/bin" >> $GITHUB_PATH
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
name: Download built emqx and test scenario name: Download built emqx and test scenario
with: with:
name: relup_tests_emqx_built name: relup_tests_emqx_built
@ -111,7 +111,7 @@ jobs:
docker logs node2.emqx.io | tee lux_logs/emqx2.log docker logs node2.emqx.io | tee lux_logs/emqx2.log
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
name: Save debug data name: Save debug data
if: failure() if: failure()
with: with:

View File

@ -20,6 +20,9 @@ on:
required: true required: true
type: string type: string
permissions:
contents: read
env: env:
IS_CI: "yes" IS_CI: "yes"
@ -35,41 +38,37 @@ jobs:
defaults: defaults:
run: run:
shell: bash shell: bash
container: ${{ inputs.builder }} container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
env:
PROFILE: ${{ matrix.profile }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}
permissions:
contents: read
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
run: | run: |
unzip -o -q ${{ matrix.profile }}.zip unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE" git config --global --add safe.directory "$GITHUB_WORKSPACE"
# produces eunit.coverdata # produces eunit.coverdata
- run: make eunit - name: eunit
env:
PROFILE: ${{ matrix.profile }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
run: make eunit
# produces proper.coverdata # produces proper.coverdata
- run: make proper - name: proper
- run: make cover
- name: send to coveralls
if: github.repository == 'emqx/emqx'
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} PROFILE: ${{ matrix.profile }}
run: make coveralls ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
run: make proper
- run: cat rebar3.crashdump - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
if: failure() with:
name: coverdata-${{ matrix.profile }}-${{ matrix.otp }}
path: _build/test/cover
retention-days: 7
ct_docker: ct_docker:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON('["self-hosted","ephemeral","linux","x64"]') || 'ubuntu-22.04' }} runs-on: ${{ github.repository_owner == 'emqx' && fromJSON('["self-hosted","ephemeral","linux","x64"]') || 'ubuntu-22.04' }}
@ -83,24 +82,19 @@ jobs:
run: run:
shell: bash shell: bash
env:
PROFILE: ${{ matrix.profile }}
permissions:
contents: read
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
run: | run: |
unzip -o -q ${{ matrix.profile }}.zip unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE"
# produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata # produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata
- name: run common tests - name: run common tests
env: env:
DOCKER_CT_RUNNER_IMAGE: ${{ inputs.builder }} DOCKER_CT_RUNNER_IMAGE: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
MONGO_TAG: "5" MONGO_TAG: "5"
MYSQL_TAG: "8" MYSQL_TAG: "8"
PGSQL_TAG: "13" PGSQL_TAG: "13"
@ -109,34 +103,23 @@ jobs:
TDENGINE_TAG: "3.0.2.4" TDENGINE_TAG: "3.0.2.4"
OPENTS_TAG: "9aa7f88" OPENTS_TAG: "9aa7f88"
MINIO_TAG: "RELEASE.2023-03-20T20-16-18Z" MINIO_TAG: "RELEASE.2023-03-20T20-16-18Z"
PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }} SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1 ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }} CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: ./scripts/ct/run.sh --ci --app ${{ matrix.app }}
run: ./scripts/ct/run.sh --ci --app ${{ matrix.app }} --keep-up - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with:
- name: make cover name: coverdata-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
run: | path: _build/test/cover
docker exec -e PROFILE="$PROFILE" -t erlang make cover retention-days: 7
- name: send to coveralls
if: github.repository == 'emqx/emqx'
run: |
ls _build/test/cover/*.coverdata || exit 0
docker exec -e PROFILE="$PROFILE" -t erlang make coveralls
- name: rebar3.crashdump
if: failure()
run: cat rebar3.crashdump
- name: compress logs - name: compress logs
if: failure() if: failure()
run: tar -czf logs.tar.gz _build/test/logs run: tar -czf logs.tar.gz _build/test/logs
- uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
if: failure() if: failure()
with: with:
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }} name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
path: logs.tar.gz path: logs.tar.gz
compression-level: 0 compression-level: 0
retention-days: 7 retention-days: 7
@ -149,22 +132,13 @@ jobs:
matrix: matrix:
include: ${{ fromJson(inputs.ct-host) }} include: ${{ fromJson(inputs.ct-host) }}
container: ${{ inputs.builder }} container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
defaults: defaults:
run: run:
shell: bash shell: bash
permissions:
contents: read
env:
PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }}
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
@ -174,29 +148,26 @@ jobs:
# produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata # produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata
- name: run common tests - name: run common tests
run: make "${{ matrix.app }}-ct"
- run: make cover
- name: send to coveralls
if: github.repository == 'emqx/emqx'
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
run: | run: |
ls _build/test/cover/*.coverdata || exit 0 make "${{ matrix.app }}-ct"
make coveralls - uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with:
- run: cat rebar3.crashdump name: coverdata-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
if: failure() path: _build/test/cover
if-no-files-found: warn # do not fail if no coverdata found
retention-days: 7
- name: compress logs - name: compress logs
if: failure() if: failure()
run: tar -czf logs.tar.gz _build/test/logs run: tar -czf logs.tar.gz _build/test/logs
- uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
if: failure() if: failure()
with: with:
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }} name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
path: logs.tar.gz path: logs.tar.gz
compression-level: 0 compression-level: 0
retention-days: 7 retention-days: 7
@ -209,18 +180,61 @@ jobs:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
strategy: strategy:
fail-fast: false fail-fast: false
permissions:
pull-requests: write
steps: steps:
- name: Coveralls finished
if: github.repository == 'emqx/emqx'
uses: coverallsapp/github-action@643bc377ffa44ace6394b2b5d0d3950076de9f63 # v2.3.0
with:
parallel-finished: true
git-branch: ${{ github.ref }}
git-commit: ${{ github.sha }}
- run: echo "All tests passed" - run: echo "All tests passed"
make_cover:
needs:
- eunit_and_proper
- ct
- ct_docker
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }}
strategy:
fail-fast: false
matrix:
profile:
- emqx-enterprise
steps:
- uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with:
name: ${{ matrix.profile }}
- name: extract artifact
run: |
unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE"
- uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
name: download coverdata
with:
pattern: coverdata-${{ matrix.profile }}-*
path: _build/test/cover
merge-multiple: true
- name: make cover
env:
PROFILE: emqx-enterprise
run: make cover
- name: send to coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PROFILE: emqx-enterprise
run: make coveralls
- name: get coveralls logs
if: failure()
run: cat rebar3.crashdump
# do this in a separate job
upload_coverdata:
needs: make_cover
runs-on: ubuntu-22.04
steps:
- name: Coveralls Finished
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
curl -v -k https://coveralls.io/webhook \
--header "Content-Type: application/json" \
--data "{\"repo_name\":\"$GITHUB_REPOSITORY\",\"repo_token\":\"$GITHUB_TOKEN\",\"payload\":{\"build_num\":$GITHUB_RUN_ID,\"status\":\"done\"}}" || true

View File

@ -10,27 +10,27 @@ on:
push: push:
branches: branches:
- master - master
- 'release-5[0-9]'
workflow_dispatch: workflow_dispatch:
permissions: read-all permissions: read-all
jobs: jobs:
analysis: analysis:
if: github.repository == 'emqx/emqx'
name: Scorecard analysis name: Scorecard analysis
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
permissions: permissions:
security-events: write security-events: write
id-token: write id-token: write
steps: steps:
- name: "Checkout code" - name: "Checkout code"
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
persist-credentials: false persist-credentials: false
- name: "Run analysis" - name: "Run analysis"
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
with: with:
results_file: results.sarif results_file: results.sarif
results_format: sarif results_format: sarif
@ -40,7 +40,7 @@ jobs:
publish_results: true publish_results: true
- name: "Upload artifact" - name: "Upload artifact"
uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 uses: actions/upload-artifact@c7d193f32edcb7bfad88892161225aeda64e9392 # v4.0.0
with: with:
name: SARIF file name: SARIF file
path: results.sarif path: results.sarif
@ -48,6 +48,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard. # Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning" - name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.22.1 uses: github/codeql-action/upload-sarif@8e0b1c74b1d5a0077b04d064c76ee714d3da7637 # v2.22.1
with: with:
sarif_file: results.sarif sarif_file: results.sarif

View File

@ -19,7 +19,7 @@ jobs:
- emqx-enterprise - emqx-enterprise
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
pattern: "${{ matrix.profile }}-schema-dump-*-x64" pattern: "${{ matrix.profile }}-schema-dump-*-x64"
merge-multiple: true merge-multiple: true

View File

@ -13,8 +13,8 @@ permissions:
jobs: jobs:
stale: stale:
if: github.repository == 'emqx/emqx' if: github.repository_owner == 'emqx'
runs-on: ubuntu-22.04 runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
permissions: permissions:
issues: write issues: write
pull-requests: none pull-requests: none

View File

@ -28,21 +28,21 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
include: ${{ fromJson(inputs.ct-matrix) }} include: ${{ fromJson(inputs.ct-matrix) }}
container: "${{ inputs.builder }}" container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
run: | run: |
unzip -o -q ${{ matrix.profile }}.zip unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE" git config --global --add safe.directory "$GITHUB_WORKSPACE"
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84 # v3.3.2
with: with:
path: "emqx_dialyzer_${{ matrix.profile }}_plt" path: "emqx_dialyzer_${{ matrix.otp }}_plt"
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }} key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }}
restore-keys: | restore-keys: |
rebar3-dialyzer-plt-${{ matrix.profile }}- rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-
- run: cat .env | tee -a $GITHUB_ENV - run: cat .env | tee -a $GITHUB_ENV
- name: run static checks - name: run static checks
run: make static_checks run: make static_checks

View File

@ -1,88 +0,0 @@
name: Sync release branch
concurrency:
group: sync-release-branch-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
on:
schedule:
- cron: '0 2 * * *'
workflow_dispatch:
permissions:
contents: read
jobs:
create-pr:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
strategy:
fail-fast: false
matrix:
branch:
- release-57
env:
SYNC_BRANCH: ${{ matrix.branch }}
defaults:
run:
shell: bash
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0
- name: create new branch
run: |
set -euxo pipefail
NEW_BRANCH_NAME=sync-${SYNC_BRANCH}-$(date +"%Y%m%d-%H%M%S")
echo "NEW_BRANCH_NAME=${NEW_BRANCH_NAME}" >> $GITHUB_ENV
git config --global user.name "${GITHUB_ACTOR}"
git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com"
git checkout -b ${NEW_BRANCH_NAME}
git merge origin/${SYNC_BRANCH} 2>&1 | tee merge.log
git push origin ${NEW_BRANCH_NAME}:${NEW_BRANCH_NAME}
- name: create pull request
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euxo pipefail
for pr in $(gh pr list --state open --base master --label sync-release-branch --search "Sync ${SYNC_BRANCH} in:title" --repo ${{ github.repository }} --json number --jq '.[] | .number'); do
gh pr close $pr --repo ${{ github.repository }} --delete-branch || true
done
gh pr create --title "Sync ${SYNC_BRANCH}" --body "Sync ${SYNC_BRANCH}" --base master --head ${NEW_BRANCH_NAME} --label sync-release-branch --repo ${{ github.repository }}
- name: Send notification to Slack
if: failure()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
run: |
awk '{printf "%s\\n", $0}' merge.log > merge.log.1
cat <<EOF > payload.json
{
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Automatic sync of ${SYNC_BRANCH} branch failed: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "\`\`\`$(cat merge.log.1)\`\`\`"
}
}
]
}
EOF
curl -X POST -H 'Content-type: application/json' --data @payload.json "$SLACK_WEBHOOK_URL"

View File

@ -18,12 +18,12 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
steps: steps:
- uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 - uses: aws-actions/configure-aws-credentials@010d0da01d0b5a38af31e9c3470dbfdabdecca3a # v4.0.1
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.tag }} ref: ${{ github.event.inputs.tag }}
- name: Detect profile - name: Detect profile

6
.gitignore vendored
View File

@ -72,10 +72,4 @@ ct_run*/
apps/emqx_conf/etc/emqx.conf.all.rendered* apps/emqx_conf/etc/emqx.conf.all.rendered*
rebar-git-cache.tar rebar-git-cache.tar
# build docker image locally # build docker image locally
.dockerignore
.docker_image_tag .docker_image_tag
.emqx_docker_image_tags
.git/
apps/emqx_utils/src/emqx_variform_parser.erl
apps/emqx_utils/src/emqx_variform_scan.erl
default-profile.mk

View File

@ -1,2 +1,2 @@
erlang 26.2.5-3 erlang 26.2.1-2
elixir 1.15.7-otp-26 elixir 1.15.7-otp-26

View File

@ -6,17 +6,23 @@ endif
REBAR = $(CURDIR)/rebar3 REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts SCRIPTS = $(CURDIR)/scripts
include env.sh export EMQX_RELUP ?= true
export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-debian11
export EMQX_DEFAULT_RUNNER = public.ecr.aws/debian/debian:11-slim
export EMQX_REL_FORM ?= tgz
export QUICER_DOWNLOAD_FROM_RELEASE = 1
ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none
FIND=/usr/bin/find
else
FIND=find
endif
# Dashboard version # Dashboard version
# from https://github.com/emqx/emqx-dashboard5 # from https://github.com/emqx/emqx-dashboard5
export EMQX_DASHBOARD_VERSION ?= v1.10.0-beta.1 export EMQX_DASHBOARD_VERSION ?= v1.7.0
export EMQX_EE_DASHBOARD_VERSION ?= e1.8.0-beta.1 export EMQX_EE_DASHBOARD_VERSION ?= e1.5.0
export EMQX_RELUP ?= true
export EMQX_REL_FORM ?= tgz
-include default-profile.mk
PROFILE ?= emqx PROFILE ?= emqx
REL_PROFILES := emqx emqx-enterprise REL_PROFILES := emqx emqx-enterprise
PKG_PROFILES := emqx-pkg emqx-enterprise-pkg PKG_PROFILES := emqx-pkg emqx-enterprise-pkg
@ -28,8 +34,6 @@ CT_COVER_EXPORT_PREFIX ?= $(PROFILE)
export REBAR_GIT_CLONE_OPTIONS += --depth=1 export REBAR_GIT_CLONE_OPTIONS += --depth=1
ELIXIR_COMMON_DEPS := ensure-hex ensure-mix-rebar3 ensure-mix-rebar
.PHONY: default .PHONY: default
default: $(REBAR) $(PROFILE) default: $(REBAR) $(PROFILE)
@ -49,8 +53,7 @@ $(REBAR): .prepare ensure-rebar3
.PHONY: ensure-hex .PHONY: ensure-hex
ensure-hex: ensure-hex:
# @mix local.hex --if-missing --force @mix local.hex --if-missing --force
@mix local.hex 2.0.6 --if-missing --force
.PHONY: ensure-mix-rebar3 .PHONY: ensure-mix-rebar3
ensure-mix-rebar3: $(REBAR) ensure-mix-rebar3: $(REBAR)
@ -60,12 +63,8 @@ ensure-mix-rebar3: $(REBAR)
ensure-mix-rebar: $(REBAR) ensure-mix-rebar: $(REBAR)
@mix local.rebar --if-missing --force @mix local.rebar --if-missing --force
.PHONY: elixir-common-deps
elixir-common-deps: $(ELIXIR_COMMON_DEPS)
.PHONY: mix-deps-get .PHONY: mix-deps-get
mix-deps-get: elixir-common-deps mix-deps-get: $(ELIXIR_COMMON_DEPS)
@mix deps.get @mix deps.get
.PHONY: eunit .PHONY: eunit
@ -195,8 +194,8 @@ $(PROFILES:%=clean-%):
@if [ -d _build/$(@:clean-%=%) ]; then \ @if [ -d _build/$(@:clean-%=%) ]; then \
rm -f rebar.lock; \ rm -f rebar.lock; \
rm -rf _build/$(@:clean-%=%)/rel; \ rm -rf _build/$(@:clean-%=%)/rel; \
find _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \ $(FIND) _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \
find _build/$(@:clean-%=%) -type l -delete; \ $(FIND) _build/$(@:clean-%=%) -type l -delete; \
fi fi
.PHONY: clean-all .PHONY: clean-all
@ -244,7 +243,7 @@ $(foreach zt,$(ALL_ZIPS),$(eval $(call download-relup-packages,$(zt))))
## relup target is to create relup instructions ## relup target is to create relup instructions
.PHONY: $(REL_PROFILES:%=%-relup) .PHONY: $(REL_PROFILES:%=%-relup)
define gen-relup-target define gen-relup-target
$1-relup: $(COMMON_DEPS) $1-relup: $1-relup-downloads $(COMMON_DEPS)
@$(BUILD) $1 relup @$(BUILD) $1 relup
endef endef
ALL_TGZS = $(REL_PROFILES) ALL_TGZS = $(REL_PROFILES)
@ -253,7 +252,7 @@ $(foreach zt,$(ALL_TGZS),$(eval $(call gen-relup-target,$(zt))))
## tgz target is to create a release package .tar.gz with relup ## tgz target is to create a release package .tar.gz with relup
.PHONY: $(REL_PROFILES:%=%-tgz) .PHONY: $(REL_PROFILES:%=%-tgz)
define gen-tgz-target define gen-tgz-target
$1-tgz: $(COMMON_DEPS) $1-tgz: $1-relup
@$(BUILD) $1 tgz @$(BUILD) $1 tgz
endef endef
ALL_TGZS = $(REL_PROFILES) ALL_TGZS = $(REL_PROFILES)
@ -316,19 +315,10 @@ $(foreach tt,$(ALL_ELIXIR_TGZS),$(eval $(call gen-elixir-tgz-target,$(tt))))
.PHONY: fmt .PHONY: fmt
fmt: $(REBAR) fmt: $(REBAR)
@find . \( -name '*.app.src' -o \ @$(SCRIPTS)/erlfmt -w 'apps/*/{src,include,priv,test,integration_test}/**/*.{erl,hrl,app.src,eterm}'
-name '*.erl' -o \ @$(SCRIPTS)/erlfmt -w '**/*.escript' --exclude-files '_build/**'
-name '*.hrl' -o \ @$(SCRIPTS)/erlfmt -w '**/rebar.config' --exclude-files '_build/**'
-name 'rebar.config' -o \ @$(SCRIPTS)/erlfmt -w 'rebar.config.erl'
-name '*.eterm' -o \
-name '*.escript' \) \
-not -path '*/_build/*' \
-not -path '*/deps/*' \
-not -path '*/_checkouts/*' \
-type f \
| xargs $(SCRIPTS)/erlfmt -w
@$(SCRIPTS)/erlfmt -w 'apps/emqx/rebar.config.script'
@$(SCRIPTS)/erlfmt -w 'elvis.config'
@$(SCRIPTS)/erlfmt -w 'bin/nodetool' @$(SCRIPTS)/erlfmt -w 'bin/nodetool'
@mix format @mix format

2
NOTICE
View File

@ -1,5 +1,5 @@
EMQX, highly scalable, highly available distributed MQTT messaging platform for IoT. EMQX, highly scalable, highly available distributed MQTT messaging platform for IoT.
Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
This product contains code developed at EMQ Technologies Co., Ltd. This product contains code developed at EMQ Technologies Co., Ltd.
Visit https://www.emqx.come to learn more. Visit https://www.emqx.come to learn more.

View File

@ -1,12 +1,9 @@
简体中文 | [English](./README.md) | [Русский](./README-RU.md)
# EMQX # EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases) [![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
[![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml) [![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml)
[![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master) [![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/emqx/emqx/badge)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
[![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/) [![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES) [![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES)
[![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech) [![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
@ -88,7 +85,7 @@ EMQX Cloud 文档:[docs.emqx.com/zh/cloud/latest/](https://docs.emqx.com/zh/cl
`master` 分支是最新的 5 版本,`main-v4.4` 是 4.4 版本。 `master` 分支是最新的 5 版本,`main-v4.4` 是 4.4 版本。
EMQX 4.4 版本需要 OTP 245 版本则可以使用 OTP 25 和 26 构建。 EMQX 4.4 版本需要 OTP 245 版本则可以使用 OTP 24 和 25 构建。
```bash ```bash
git clone https://github.com/emqx/emqx.git git clone https://github.com/emqx/emqx.git

View File

@ -1,12 +1,9 @@
Русский | [简体中文](./README-CN.md) | [English](./README.md)
# Брокер EMQX # Брокер EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases) [![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
[![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml) [![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml)
[![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master) [![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/emqx/emqx/badge)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
[![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/) [![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES) [![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES)
[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech) [![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)

View File

@ -1,12 +1,9 @@
English | [简体中文](./README-CN.md) | [Русский](./README-RU.md)
# EMQX # EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases) [![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
[![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml) [![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml)
[![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master) [![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/emqx/emqx/badge)](https://securityscorecards.dev/viewer/?uri=github.com/emqx/emqx)
[![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/) [![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES) [![Discord](https://img.shields.io/discord/931086341838622751?label=Discord&logo=discord)](https://discord.gg/xYGf3fQnES)
[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech) [![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
@ -100,7 +97,7 @@ The `master` branch tracks the latest version 5. For version 4.4 checkout the `m
EMQX 4.4 requires OTP 24. EMQX 4.4 requires OTP 24.
EMQX 5.0 ~ 5.3 can be built with OTP 24 or 25. EMQX 5.0 ~ 5.3 can be built with OTP 24 or 25.
EMQX 5.4 and newer can be built with OTP 25 or 26. EMQX 5.4 and newer can be built with OTP 24 or 25.
```bash ```bash
git clone https://github.com/emqx/emqx.git git clone https://github.com/emqx/emqx.git

131
Windows.md Normal file
View File

@ -0,0 +1,131 @@
# Build and run EMQX on Windows
NOTE: The instructions and examples are based on Windows 10.
## Build Environment
### Visual studio for C/C++ compile and link
EMQX includes Erlang NIF (Native Implemented Function) components, implemented
in C/C++. To compile and link C/C++ libraries, the easiest way is perhaps to
install Visual Studio.
Visual Studio 2019 is used in our tests.
If you are like me (@zmstone), do not know where to start,
please follow this OTP guide:
https://github.com/erlang/otp/blob/master/HOWTO/INSTALL-WIN32.md
NOTE: To avoid surprises, you may need to add below two paths to `Path` environment variable
and order them before other paths.
```
C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.28.29910\bin\Hostx64\x64
C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build
```
Depending on your visual studio version and OS, the paths may differ.
The first path is for rebar3 port compiler to find `cl.exe` and `link.exe`
The second path is for CMD to setup environment variables.
### Erlang/OTP
Install Erlang/OTP 24 from https://www.erlang.org/downloads
You may need to edit the `Path` environment variable to allow running
Erlang commands such as `erl` from powershell.
To validate Erlang installation in CMD or powershell:
* Start (or restart) CMD or powershell
* Execute `erl` command to enter Erlang shell
* Evaluate Erlang expression `halt().` to exit Erlang shell.
e.g.
```
PS C:\Users\zmsto> erl
Eshell V12.2.1 (abort with ^G)
1> halt().
```
### bash
All EMQX build/run scripts are either in `bash` or `escript`.
`escript` is installed as a part of Erlang. To install a `bash`
environment in Windows, there are quite a few options.
Cygwin is what we tested with.
* Add `cygwin\bin` dir to `Path` environment variable
To do so, search for Edit environment variable in control panel and
add `C:\tools\cygwin\bin` (depending on the location where it was installed)
to `Path` list.
* Validate installation.
Start (restart) CMD or powershell console and execute `which bash`, it should
print out `/usr/bin/bash`
NOTE: Make sure cygwin's bin dir is added before `C:\Windows\system32` in `Path`,
otherwise the build scripts may end up using binaries from wsl instead of cygwin.
### Other tools
Some of the unix world tools are required to build EMQX. Including:
* git
* curl
* make
* cmake
* jq
* zip / unzip
We recommend using [scoop](https://scoop.sh/), or [Chocolatey](https://chocolatey.org/install) to install the tools.
When using scoop:
```
scoop install git curl make cmake jq zip unzip
```
## Build EMQX source code
* Clone the repo: `git clone https://github.com/emqx/emqx.git`
* Start CMD console
* Execute `vcvarsall.bat x86_amd64` to load environment variables
* Change to emqx directory and execute `make`
### Possible errors
* `'cl.exe' is not recognized as an internal or external command`
This error is likely due to Visual Studio executables are not set in `Path` environment variable.
To fix it, either add path like `C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.28.29910\bin\Hostx64\x64`
to `Paht`. Or make sure `vcvarsall.bat x86_amd64` is executed prior to the `make` command
* `fatal error C1083: Cannot open include file: 'assert.h': No such file or directory`
If Visual Studio is installed correctly, this is likely `LIB` and `LIB_PATH` environment
variables are not set. Make sure `vcvarsall.bat x86_amd64` is executed prior to the `make` command
* `link: extra operand 'some.obj'`
This is likely due to the usage of GNU `lnik.exe` but not the one from Visual Studio.
Execute `link.exe --version` to inspect which one is in use. The one installed from
Visual Studio should print out `Microsoft (R) Incremental Linker`.
To fix it, Visual Studio's bin paths should be ordered prior to Cygwin's (or similar installation's)
bin paths in `Path` environment variable.
## Run EMQX
To start EMQX broker.
Execute `_build\emqx\rel\emqx>.\bin\emqx console` or `_build\emqx\rel\emqx>.\bin\emqx start` to start EMQX.
Then execute `_build\emqx\rel\emqx>.\bin\emqx_ctl status` to check status.
If everything works fine, it should print out
```
Node 'emqx@127.0.0.1' 4.3-beta.1 is started
Application emqx 4.3.0 is running
```

View File

@ -1,5 +1,5 @@
EMQX, highly scalable, highly available distributed MQTT messaging platform for IoT. EMQX, highly scalable, highly available distributed MQTT messaging platform for IoT.
Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
This product contains code developed at EMQ Technologies Co., Ltd. This product contains code developed at EMQ Technologies Co., Ltd.
Visit https://www.emqx.come to learn more. Visit https://www.emqx.come to learn more.

View File

@ -1,20 +0,0 @@
## EMQX provides support for two primary log handlers: `file` and `console`, with an additional `audit` handler specifically designed to always direct logs to files.
## The system's default log handling behavior can be configured via the environment variable `EMQX_DEFAULT_LOG_HANDLER`, which accepts the following settings:
##
## - `file`: Directs log output exclusively to files.
## - `console`: Channels log output solely to the console.
##
## It's noteworthy that `EMQX_DEFAULT_LOG_HANDLER` is set to `file` when EMQX is initiated via systemd `emqx.service` file.
## In scenarios outside systemd initiation, `console` serves as the default log handler.
## Read more about configs here: {{ emqx_configuration_doc_log }}
log {
# file {
# level = warning
# }
# console {
# level = warning
# }
}

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -45,10 +45,6 @@
). ).
-define(assertReceive(PATTERN, TIMEOUT), -define(assertReceive(PATTERN, TIMEOUT),
?assertReceive(PATTERN, TIMEOUT, #{})
).
-define(assertReceive(PATTERN, TIMEOUT, EXTRA),
(fun() -> (fun() ->
receive receive
X__V = PATTERN -> X__V X__V = PATTERN -> X__V
@ -58,8 +54,7 @@
{module, ?MODULE}, {module, ?MODULE},
{line, ?LINE}, {line, ?LINE},
{expression, (??PATTERN)}, {expression, (??PATTERN)},
{mailbox, ?drainMailbox()}, {mailbox, ?drainMailbox()}
{extra_info, EXTRA}
]} ]}
) )
end end

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -65,20 +65,9 @@
%% Route %% Route
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-record(share_dest, {
session_id :: emqx_session:session_id(),
group :: emqx_types:group()
}).
-record(route, { -record(route, {
topic :: binary(), topic :: binary(),
dest :: dest :: node() | {binary(), node()} | emqx_session:session_id()
node()
| {binary(), node()}
| emqx_session:session_id()
%% One session can also have multiple subscriptions to the same topic through different groups
| #share_dest{}
| emqx_external_broker:dest()
}). }).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -99,17 +88,14 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-record(banned, { -record(banned, {
who :: emqx_types:banned_who(), who ::
{clientid, binary()}
| {peerhost, inet:ip_address()}
| {username, binary()},
by :: binary(), by :: binary(),
reason :: binary(), reason :: binary(),
at :: integer(), at :: integer(),
until :: integer() until :: integer()
}). }).
%%--------------------------------------------------------------------
%% Configurations
%%--------------------------------------------------------------------
-define(KIND_REPLICATE, replicate).
-define(KIND_INITIATE, initiate).
-endif. -endif.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%------------------------------------------------------------------- %%-------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -23,20 +23,11 @@
-define(CHAN_INFO_TAB, emqx_channel_info). -define(CHAN_INFO_TAB, emqx_channel_info).
-define(CHAN_LIVE_TAB, emqx_channel_live). -define(CHAN_LIVE_TAB, emqx_channel_live).
%% Mria table for session registration. %% Mria/Mnesia Tables for channel management.
-define(CHAN_REG_TAB, emqx_channel_registry). -define(CHAN_REG_TAB, emqx_channel_registry).
-define(T_KICK, 5_000). -define(T_KICK, 5_000).
-define(T_GET_INFO, 5_000). -define(T_GET_INFO, 5_000).
-define(T_TAKEOVER, 15_000). -define(T_TAKEOVER, 15_000).
-define(CM_POOL, emqx_cm_pool).
%% Registered sessions.
-record(channel, {
chid :: emqx_types:clientid() | '_',
%% pid field is extended in 5.6.0 to support recording unregistration timestamp.
pid :: pid() | non_neg_integer() | '$1'
}).
-endif. -endif.

View File

@ -1,35 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022, 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc This header contains definitions of durable session metadata
%% keys, that can be consumed by the external code.
-ifndef(EMQX_DURABLE_SESSION_META_HRL).
-define(EMQX_DURABLE_SESSION_META_HRL, true).
%% Session metadata keys:
-define(created_at, created_at).
-define(last_alive_at, last_alive_at).
-define(expiry_interval, expiry_interval).
%% Unique integer used to create unique identities:
-define(last_id, last_id).
%% Connection info (relevent for the dashboard):
-define(peername, peername).
-define(will_message, will_message).
-define(clientinfo, clientinfo).
-define(protocol, protocol).
-define(offline_info, offline_info).
-endif.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2021-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2021-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -25,8 +25,6 @@
-define(HP_AUTHN, 970). -define(HP_AUTHN, 970).
-define(HP_AUTHZ, 960). -define(HP_AUTHZ, 960).
-define(HP_SYS_MSGS, 950). -define(HP_SYS_MSGS, 950).
-define(HP_SCHEMA_VALIDATION, 945).
-define(HP_MESSAGE_TRANSFORMATION, 943).
-define(HP_TOPIC_METRICS, 940). -define(HP_TOPIC_METRICS, 940).
-define(HP_RETAINER, 930). -define(HP_RETAINER, 930).
-define(HP_AUTO_SUB, 920). -define(HP_AUTO_SUB, 920).

View File

@ -1,258 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQX_METRICS_HRL).
-define(EMQX_METRICS_HRL, true).
%% Bytes sent and received
-define(BYTES_METRICS, [
{counter, 'bytes.received', <<"Number of bytes received ">>},
{counter, 'bytes.sent', <<"Number of bytes sent on this connection">>}
]).
%% Packets sent and received
-define(PACKET_METRICS, [
{counter, 'packets.received', <<"Number of received packet">>},
{counter, 'packets.sent', <<"Number of sent packet">>},
{counter, 'packets.connect.received', <<"Number of received CONNECT packet">>},
{counter, 'packets.connack.sent', <<"Number of sent CONNACK packet">>},
{counter, 'packets.connack.error',
<<"Number of received CONNECT packet with unsuccessful connections">>},
{counter, 'packets.connack.auth_error',
<<"Number of received CONNECT packet with failed Authentication">>},
{counter, 'packets.publish.received', <<"Number of received PUBLISH packet">>},
%% PUBLISH packets sent
{counter, 'packets.publish.sent', <<"Number of sent PUBLISH packet">>},
%% PUBLISH packet_id inuse
{counter, 'packets.publish.inuse',
<<"Number of received PUBLISH packet with occupied identifiers">>},
%% PUBLISH failed for error
{counter, 'packets.publish.error',
<<"Number of received PUBLISH packet that cannot be published">>},
%% PUBLISH failed for auth error
{counter, 'packets.publish.auth_error',
<<"Number of received PUBLISH packets with failed the Authorization check">>},
%% PUBLISH(QoS2) packets dropped
{counter, 'packets.publish.dropped',
<<"Number of messages discarded due to the receiving limit">>},
%% PUBACK packets received
{counter, 'packets.puback.received', <<"Number of received PUBACK packet">>},
%% PUBACK packets sent
{counter, 'packets.puback.sent', <<"Number of sent PUBACK packet">>},
%% PUBACK packet_id inuse
{counter, 'packets.puback.inuse',
<<"Number of received PUBACK packet with occupied identifiers">>},
%% PUBACK packets missed
{counter, 'packets.puback.missed', <<"Number of received packet with identifiers.">>},
%% PUBREC packets received
{counter, 'packets.pubrec.received', <<"Number of received PUBREC packet">>},
%% PUBREC packets sent
{counter, 'packets.pubrec.sent', <<"Number of sent PUBREC packet">>},
%% PUBREC packet_id inuse
{counter, 'packets.pubrec.inuse',
<<"Number of received PUBREC packet with occupied identifiers">>},
%% PUBREC packets missed
{counter, 'packets.pubrec.missed',
<<"Number of received PUBREC packet with unknown identifiers">>},
%% PUBREL packets received
{counter, 'packets.pubrel.received', <<"Number of received PUBREL packet">>},
%% PUBREL packets sent
{counter, 'packets.pubrel.sent', <<"Number of sent PUBREL packet">>},
%% PUBREL packets missed
{counter, 'packets.pubrel.missed',
<<"Number of received PUBREC packet with unknown identifiers">>},
%% PUBCOMP packets received
{counter, 'packets.pubcomp.received', <<"Number of received PUBCOMP packet">>},
%% PUBCOMP packets sent
{counter, 'packets.pubcomp.sent', <<"Number of sent PUBCOMP packet">>},
%% PUBCOMP packet_id inuse
{counter, 'packets.pubcomp.inuse',
<<"Number of received PUBCOMP packet with occupied identifiers">>},
%% PUBCOMP packets missed
{counter, 'packets.pubcomp.missed', <<"Number of missed PUBCOMP packet">>},
%% SUBSCRIBE Packets received
{counter, 'packets.subscribe.received', <<"Number of received SUBSCRIBE packet">>},
%% SUBSCRIBE error
{counter, 'packets.subscribe.error',
<<"Number of received SUBSCRIBE packet with failed subscriptions">>},
%% SUBSCRIBE failed for not auth
{counter, 'packets.subscribe.auth_error',
<<"Number of received SUBACK packet with failed Authorization check">>},
%% SUBACK packets sent
{counter, 'packets.suback.sent', <<"Number of sent SUBACK packet">>},
%% UNSUBSCRIBE Packets received
{counter, 'packets.unsubscribe.received', <<"Number of received UNSUBSCRIBE packet">>},
%% UNSUBSCRIBE error
{counter, 'packets.unsubscribe.error',
<<"Number of received UNSUBSCRIBE packet with failed unsubscriptions">>},
%% UNSUBACK Packets sent
{counter, 'packets.unsuback.sent', <<"Number of sent UNSUBACK packet">>},
%% PINGREQ packets received
{counter, 'packets.pingreq.received', <<"Number of received PINGREQ packet">>},
%% PINGRESP Packets sent
{counter, 'packets.pingresp.sent', <<"Number of sent PUBRESP packet">>},
%% DISCONNECT Packets received
{counter, 'packets.disconnect.received', <<"Number of received DISCONNECT packet">>},
%% DISCONNECT Packets sent
{counter, 'packets.disconnect.sent', <<"Number of sent DISCONNECT packet">>},
%% Auth Packets received
{counter, 'packets.auth.received', <<"Number of received AUTH packet">>},
%% Auth Packets sent
{counter, 'packets.auth.sent', <<"Number of sent AUTH packet">>}
]).
%% Messages sent/received and pubsub
-define(MESSAGE_METRICS, [
%% All Messages received
{counter, 'messages.received', <<
"Number of messages received from the client, equal to the sum of "
"messages.qos0.received, messages.qos1.received and messages.qos2.received"
>>},
%% All Messages sent
{counter, 'messages.sent', <<
"Number of messages sent to the client, equal to the sum of "
"messages.qos0.sent, messages.qos1.sent and messages.qos2.sent"
>>},
%% QoS0 Messages received
{counter, 'messages.qos0.received', <<"Number of QoS 0 messages received from clients">>},
%% QoS0 Messages sent
{counter, 'messages.qos0.sent', <<"Number of QoS 0 messages sent to clients">>},
%% QoS1 Messages received
{counter, 'messages.qos1.received', <<"Number of QoS 1 messages received from clients">>},
%% QoS1 Messages sent
{counter, 'messages.qos1.sent', <<"Number of QoS 1 messages sent to clients">>},
%% QoS2 Messages received
{counter, 'messages.qos2.received', <<"Number of QoS 2 messages received from clients">>},
%% QoS2 Messages sent
{counter, 'messages.qos2.sent', <<"Number of QoS 2 messages sent to clients">>},
%% PubSub Metrics
%% Messages Publish
{counter, 'messages.publish',
<<"Number of messages published in addition to system messages">>},
%% Messages dropped due to no subscribers
{counter, 'messages.dropped',
<<"Number of messages dropped before forwarding to the subscription process">>},
%% Messages that failed validations
{counter, 'messages.validation_failed', <<"Number of message validation failed">>},
%% Messages that passed validations
{counter, 'messages.validation_succeeded', <<"Number of message validation successful">>},
%% % Messages that failed transformations
{counter, 'messages.transformation_failed', <<"Number fo message transformation failed">>},
%% % Messages that passed transformations
{counter, 'messages.transformation_succeeded',
<<"Number fo message transformation succeeded">>},
%% QoS2 Messages expired
{counter, 'messages.dropped.await_pubrel_timeout',
<<"Number of messages dropped due to waiting PUBREL timeout">>},
%% Messages dropped
{counter, 'messages.dropped.no_subscribers',
<<"Number of messages dropped due to no subscribers">>},
%% Messages forward
{counter, 'messages.forward', <<"Number of messages forwarded to other nodes">>},
%% Messages delayed
{counter, 'messages.delayed', <<"Number of delay-published messages">>},
%% Messages delivered
{counter, 'messages.delivered',
<<"Number of messages forwarded to the subscription process internally">>},
%% Messages acked
{counter, 'messages.acked', <<"Number of received PUBACK and PUBREC packet">>},
%% Messages persistently stored
{counter, 'messages.persisted', <<"Number of message persisted">>}
]).
%% Delivery metrics
-define(DELIVERY_METRICS, [
%% All Dropped during delivery
{counter, 'delivery.dropped', <<"Total number of discarded messages when sending">>},
%% Dropped due to no_local
{counter, 'delivery.dropped.no_local', <<
"Number of messages that were dropped due to the No Local subscription "
"option when sending"
>>},
%% Dropped due to message too large
{counter, 'delivery.dropped.too_large', <<
"The number of messages that were dropped because the length exceeded "
"the limit when sending"
>>},
%% Dropped qos0 message
{counter, 'delivery.dropped.qos0_msg', <<
"Number of messages with QoS 0 that were dropped because the message "
"queue was full when sending"
>>},
%% Dropped due to queue full
{counter, 'delivery.dropped.queue_full', <<
"Number of messages with a non-zero QoS that were dropped because the "
"message queue was full when sending"
>>},
%% Dropped due to expired
{counter, 'delivery.dropped.expired',
<<"Number of messages dropped due to message expiration on sending">>}
]).
%% Client Lifecircle metrics
-define(CLIENT_METRICS, [
{counter, 'client.connect', <<"Number of client connections">>},
{counter, 'client.connack', <<"Number of CONNACK packet sent">>},
{counter, 'client.connected', <<"Number of successful client connected">>},
{counter, 'client.authenticate', <<"Number of client Authentication">>},
{counter, 'client.auth.anonymous', <<"Number of clients who log in anonymously">>},
{counter, 'client.authorize', <<"Number of Authorization rule checks">>},
{counter, 'client.subscribe', <<"Number of client subscriptions">>},
{counter, 'client.unsubscribe', <<"Number of client unsubscriptions">>},
{counter, 'client.disconnected', <<"Number of client disconnects">>}
]).
%% Session Lifecircle metrics
-define(SESSION_METRICS, [
{counter, 'session.created', <<"Number of sessions created">>},
{counter, 'session.resumed',
<<"Number of sessions resumed because Clean Session or Clean Start is false">>},
{counter, 'session.takenover',
<<"Number of sessions takenover because Clean Session or Clean Start is false">>},
{counter, 'session.discarded',
<<"Number of sessions dropped because Clean Session or Clean Start is true">>},
{counter, 'session.terminated', <<"Number of terminated sessions">>}
]).
%% Statistic metrics for ACL checking
-define(STASTS_ACL_METRICS, [
{counter, 'authorization.allow', <<"Number of Authorization allow">>},
{counter, 'authorization.deny', <<"Number of Authorization deny">>},
{counter, 'authorization.cache_hit', <<"Number of Authorization hits the cache">>},
{counter, 'authorization.cache_miss', <<"Number of Authorization cache missing">>}
]).
%% Statistic metrics for auth checking
-define(STASTS_AUTHN_METRICS, [
{counter, 'authentication.success', <<"Number of successful client Authentication">>},
{counter, 'authentication.success.anonymous',
<<"Number of successful client Authentication due to anonymous">>},
{counter, 'authentication.failure', <<"Number of failed client Authentication">>}
]).
%% Overload protection counters
-define(OLP_METRICS, [
{counter, 'overload_protection.delay.ok', <<"Number of overload protection delayed">>},
{counter, 'overload_protection.delay.timeout',
<<"Number of overload protection delay timeout">>},
{counter, 'overload_protection.hibernation', <<"Number of overload protection hibernation">>},
{counter, 'overload_protection.gc', <<"Number of overload protection garbage collection">>},
{counter, 'overload_protection.new_conn',
<<"Number of overload protection close new incoming connection">>}
]).
-endif.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -673,6 +673,7 @@ end).
-define(SHARE, "$share"). -define(SHARE, "$share").
-define(QUEUE, "$queue"). -define(QUEUE, "$queue").
-define(SHARE(Group, Topic), emqx_topic:join([<<?SHARE>>, Group, Topic])).
-define(REDISPATCH_TO(GROUP, TOPIC), {GROUP, TOPIC}). -define(REDISPATCH_TO(GROUP, TOPIC), {GROUP, TOPIC}).
@ -683,7 +684,6 @@ end).
-define(FRAME_PARSE_ERROR, frame_parse_error). -define(FRAME_PARSE_ERROR, frame_parse_error).
-define(FRAME_SERIALIZE_ERROR, frame_serialize_error). -define(FRAME_SERIALIZE_ERROR, frame_serialize_error).
-define(THROW_FRAME_ERROR(Reason), erlang:throw({?FRAME_PARSE_ERROR, Reason})). -define(THROW_FRAME_ERROR(Reason), erlang:throw({?FRAME_PARSE_ERROR, Reason})).
-define(THROW_SERIALIZE_ERROR(Reason), erlang:throw({?FRAME_SERIALIZE_ERROR, Reason})). -define(THROW_SERIALIZE_ERROR(Reason), erlang:throw({?FRAME_SERIALIZE_ERROR, Reason})).

View File

@ -1,29 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQX_PERSISTENT_MESSAGE_HRL).
-define(EMQX_PERSISTENT_MESSAGE_HRL, true).
-define(PERSISTENT_MESSAGE_DB, messages).
-define(PERSISTENCE_ENABLED, emqx_message_persistence_enabled).
-define(WITH_DURABILITY_ENABLED(DO),
case is_persistence_enabled() of
true -> DO;
false -> {skipped, disabled}
end
).
-endif.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -28,19 +28,14 @@
%% cert %% cert
-define(VAR_CERT_SUBJECT, "cert_subject"). -define(VAR_CERT_SUBJECT, "cert_subject").
-define(VAR_CERT_CN_NAME, "cert_common_name"). -define(VAR_CERT_CN_NAME, "cert_common_name").
-define(VAR_CERT_PEM, "cert_pem").
-define(PH_CERT_SUBJECT, ?PH(?VAR_CERT_SUBJECT)). -define(PH_CERT_SUBJECT, ?PH(?VAR_CERT_SUBJECT)).
-define(PH_CERT_CN_NAME, ?PH(?VAR_CERT_CN_NAME)). -define(PH_CERT_CN_NAME, ?PH(?VAR_CERT_CN_NAME)).
-define(PH_CERT_PEM, ?PH(?VAR_CERT_PEM)).
%% MQTT/Gateway %% MQTT
-define(VAR_PASSWORD, "password"). -define(VAR_PASSWORD, "password").
-define(VAR_CLIENTID, "clientid"). -define(VAR_CLIENTID, "clientid").
-define(VAR_USERNAME, "username"). -define(VAR_USERNAME, "username").
-define(VAR_TOPIC, "topic"). -define(VAR_TOPIC, "topic").
-define(VAR_ENDPOINT_NAME, "endpoint_name").
-define(VAR_NS_CLIENT_ATTRS, {var_namespace, "client_attrs"}).
-define(PH_PASSWORD, ?PH(?VAR_PASSWORD)). -define(PH_PASSWORD, ?PH(?VAR_PASSWORD)).
-define(PH_CLIENTID, ?PH(?VAR_CLIENTID)). -define(PH_CLIENTID, ?PH(?VAR_CLIENTID)).
-define(PH_FROM_CLIENTID, ?PH("from_clientid")). -define(PH_FROM_CLIENTID, ?PH("from_clientid")).
@ -94,7 +89,7 @@
-define(PH_NODE, ?PH("node")). -define(PH_NODE, ?PH("node")).
-define(PH_REASON, ?PH("reason")). -define(PH_REASON, ?PH("reason")).
-define(PH_ENDPOINT_NAME, ?PH(?VAR_ENDPOINT_NAME)). -define(PH_ENDPOINT_NAME, ?PH("endpoint_name")).
-define(VAR_RETAIN, "retain"). -define(VAR_RETAIN, "retain").
-define(PH_RETAIN, ?PH(?VAR_RETAIN)). -define(PH_RETAIN, ?PH(?VAR_RETAIN)).

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2021-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2021-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -32,7 +32,10 @@
%% `apps/emqx/src/bpapi/README.md' %% `apps/emqx/src/bpapi/README.md'
%% Opensource edition %% Opensource edition
-define(EMQX_RELEASE_CE, "5.8.0-alpha.1"). -define(EMQX_RELEASE_CE, "5.5.0-rc.2").
%% Enterprise edition %% Enterprise edition
-define(EMQX_RELEASE_EE, "5.8.0-alpha.1"). -define(EMQX_RELEASE_EE, "5.5.0-rc.2").
%% The HTTP API version
-define(EMQX_API_VERSION, "5.0").

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -21,9 +21,4 @@
-define(TOMBSTONE_CONFIG_CHANGE_REQ, mark_it_for_deletion). -define(TOMBSTONE_CONFIG_CHANGE_REQ, mark_it_for_deletion).
-define(CONFIG_NOT_FOUND_MAGIC, '$0tFound'). -define(CONFIG_NOT_FOUND_MAGIC, '$0tFound').
%%--------------------------------------------------------------------
%% EE injections
%%--------------------------------------------------------------------
-define(EMQX_SSL_FUN_MFA(Name), {emqx_ssl_fun_mfa, Name}).
-endif. -endif.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -20,11 +20,4 @@
-define(IS_SESSION_IMPL_MEM(S), (is_tuple(S) andalso element(1, S) =:= session)). -define(IS_SESSION_IMPL_MEM(S), (is_tuple(S) andalso element(1, S) =:= session)).
-define(IS_SESSION_IMPL_DS(S), (is_map_key(id, S))). -define(IS_SESSION_IMPL_DS(S), (is_map_key(id, S))).
%% (Erlang) messages that a connection process should forward to the
%% session handler.
-record(session_message, {
message :: term()
}).
-define(session_message(MSG), #session_message{message = MSG}).
-endif. -endif.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.

View File

@ -1,28 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2018-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQX_SHARED_SUB_HRL).
-define(EMQX_SHARED_SUB_HRL, true).
%% Mnesia table for shared sub message routing
-define(SHARED_SUBSCRIPTION, emqx_shared_subscription).
%% ETS tables for Shared PubSub
-define(SHARED_SUBSCRIBER, emqx_shared_subscriber).
-define(ALIVE_SHARED_SUBSCRIBERS, emqx_alive_shared_subscribers).
-define(SHARED_SUBS_ROUND_ROBIN_COUNTER, emqx_shared_subscriber_round_robin_counter).
-endif.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -20,33 +20,17 @@
-record(?TRACE, { -record(?TRACE, {
name :: binary() | undefined | '_', name :: binary() | undefined | '_',
type :: clientid | topic | ip_address | ruleid | undefined | '_', type :: clientid | topic | ip_address | undefined | '_',
filter :: filter ::
emqx_types:topic() emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address() | undefined | '_',
| emqx_types:clientid()
| emqx_trace:ip_address()
| emqx_trace:ruleid()
| undefined
| '_',
enable = true :: boolean() | '_', enable = true :: boolean() | '_',
payload_encode = text :: hex | text | hidden | '_', payload_encode = text :: hex | text | hidden | '_',
extra = #{formatter => text} :: #{formatter => text | json} | '_', extra = #{} :: map() | '_',
start_at :: integer() | undefined | '_', start_at :: integer() | undefined | '_',
end_at :: integer() | undefined | '_' end_at :: integer() | undefined | '_'
}). }).
-record(emqx_trace_format_func_data, {
function :: fun((any()) -> any()),
data :: any()
}).
-define(SHARD, ?COMMON_SHARD). -define(SHARD, ?COMMON_SHARD).
-define(MAX_SIZE, 30). -define(MAX_SIZE, 30).
-define(EMQX_TRACE_STOP_ACTION(REASON),
{unrecoverable_error, {action_stopped_after_template_rendering, REASON}}
).
-define(EMQX_TRACE_STOP_ACTION_MATCH, ?EMQX_TRACE_STOP_ACTION(_)).
-endif. -endif.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -17,7 +17,6 @@
%% HTTP API Auth %% HTTP API Auth
-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD'). -define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
-define(BAD_API_KEY_OR_SECRET, 'BAD_API_KEY_OR_SECRET'). -define(BAD_API_KEY_OR_SECRET, 'BAD_API_KEY_OR_SECRET').
-define(API_KEY_NOT_ALLOW, 'API_KEY_NOT_ALLOW').
-define(API_KEY_NOT_ALLOW_MSG, <<"This API Key don't have permission to access this resource">>). -define(API_KEY_NOT_ALLOW_MSG, <<"This API Key don't have permission to access this resource">>).
%% Bad Request %% Bad Request
@ -86,6 +85,5 @@
{'SOURCE_ERROR', <<"Source error">>}, {'SOURCE_ERROR', <<"Source error">>},
{'UPDATE_FAILED', <<"Update failed">>}, {'UPDATE_FAILED', <<"Update failed">>},
{'REST_FAILED', <<"Reset source or config failed">>}, {'REST_FAILED', <<"Reset source or config failed">>},
{'CLIENT_NOT_RESPONSE', <<"Client not responding">>}, {'CLIENT_NOT_RESPONSE', <<"Client not responding">>}
{'UNSUPPORTED_MEDIA_TYPE', <<"Unsupported media type">>}
]). ]).

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2018-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2018-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -30,7 +30,7 @@
logger:log( logger:log(
Level, Level,
(Data), (Data),
maps:merge(Meta, #{ (Meta#{
mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY},
line => ?LINE line => ?LINE
}) })
@ -40,40 +40,14 @@
end end
). ).
%% NOTE: do not forget to use atom for msg and add every used msg to
%% the default value of `log.throttling.msgs` list.
-define(SLOG_THROTTLE(Level, Data),
?SLOG_THROTTLE(Level, Data, #{})
).
-define(SLOG_THROTTLE(Level, Data, Meta),
?SLOG_THROTTLE(Level, undefined, Data, Meta)
).
-define(SLOG_THROTTLE(Level, UniqueKey, Data, Meta),
case logger:allow(Level, ?MODULE) of
true ->
(fun(#{msg := __Msg} = __Data) ->
case emqx_log_throttler:allow(__Msg, UniqueKey) of
true ->
logger:log(Level, __Data, Meta);
false ->
?_DO_TRACE(Level, __Msg, maps:merge(__Data, Meta))
end
end)(
Data
);
false ->
ok
end
).
-define(AUDIT_HANDLER, emqx_audit). -define(AUDIT_HANDLER, emqx_audit).
-define(TRACE_FILTER, emqx_trace_filter). -define(TRACE_FILTER, emqx_trace_filter).
-define(OWN_KEYS, [level, filters, filter_default, handlers]). -define(OWN_KEYS, [level, filters, filter_default, handlers]).
%% Internal macro -define(TRACE(Tag, Msg, Meta), ?TRACE(debug, Tag, Msg, Meta)).
-define(_DO_TRACE(Tag, Msg, Meta),
%% Only evaluate when necessary
-define(TRACE(Level, Tag, Msg, Meta), begin
case persistent_term:get(?TRACE_FILTER, []) of case persistent_term:get(?TRACE_FILTER, []) of
[] -> ok; [] -> ok;
%% We can't bind filter list to a variable because we pollute the calling scope with it. %% We can't bind filter list to a variable because we pollute the calling scope with it.
@ -81,17 +55,10 @@
%% because this adds overhead to the happy path. %% because this adds overhead to the happy path.
%% So evaluate `persistent_term:get` twice. %% So evaluate `persistent_term:get` twice.
_ -> emqx_trace:log(persistent_term:get(?TRACE_FILTER, []), Msg, (Meta)#{trace_tag => Tag}) _ -> emqx_trace:log(persistent_term:get(?TRACE_FILTER, []), Msg, (Meta)#{trace_tag => Tag})
end end,
).
-define(TRACE(Tag, Msg, Meta), ?TRACE(debug, Tag, Msg, Meta)).
%% Only evaluate when necessary
-define(TRACE(Level, Tag, Msg, Meta), begin
?_DO_TRACE(Tag, Msg, Meta),
?SLOG( ?SLOG(
Level, Level,
(Meta)#{msg => Msg, tag => Tag}, (emqx_trace_formatter:format_meta_map(Meta))#{msg => Msg, tag => Tag},
#{is_trace => false} #{is_trace => false}
) )
end). end).

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2019-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2019-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -14,7 +14,7 @@
%% limitations under the License. %% limitations under the License.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-type option(T) :: undefined | T. -type maybe(T) :: undefined | T.
-type startlink_ret() :: {ok, pid()} | ignore | {error, term()}. -type startlink_ret() :: {ok, pid()} | ignore | {error, term()}.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-module(emqx_persistent_session_ds_SUITE). -module(emqx_persistent_session_ds_SUITE).
@ -18,23 +18,15 @@
%% CT boilerplate %% CT boilerplate
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
suite() ->
[{timetrap, {seconds, 60}}].
all() -> all() ->
emqx_common_test_helpers:all(?MODULE). emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) -> init_per_suite(Config) ->
case emqx_ds_test_helpers:skip_if_norepl() of TCApps = emqx_cth_suite:start(
false -> app_specs(),
TCApps = emqx_cth_suite:start( #{work_dir => emqx_cth_suite:work_dir(Config)}
app_specs(), ),
#{work_dir => emqx_cth_suite:work_dir(Config)} [{tc_apps, TCApps} | Config].
),
[{tc_apps, TCApps} | Config];
Yes ->
Yes
end.
end_per_suite(Config) -> end_per_suite(Config) ->
TCApps = ?config(tc_apps, Config), TCApps = ?config(tc_apps, Config),
@ -59,12 +51,12 @@ init_per_testcase(TestCase, Config) when
init_per_testcase(t_session_gc = TestCase, Config) -> init_per_testcase(t_session_gc = TestCase, Config) ->
Opts = #{ Opts = #{
n => 3, n => 3,
roles => [core, core, core], roles => [core, core, replicant],
extra_emqx_conf => extra_emqx_conf =>
"\n durable_sessions {" "\n session_persistence {"
"\n heartbeat_interval = 500ms " "\n last_alive_update_interval = 500ms "
"\n session_gc_interval = 1s " "\n session_gc_interval = 2s "
"\n session_gc_batch_size = 2 " "\n session_gc_batch_size = 1 "
"\n }" "\n }"
}, },
Cluster = cluster(Opts), Cluster = cluster(Opts),
@ -90,15 +82,13 @@ end_per_testcase(TestCase, Config) when
Nodes = ?config(nodes, Config), Nodes = ?config(nodes, Config),
emqx_common_test_helpers:call_janitor(60_000), emqx_common_test_helpers:call_janitor(60_000),
ok = emqx_cth_cluster:stop(Nodes), ok = emqx_cth_cluster:stop(Nodes),
snabbkaffe:stop(),
ok; ok;
end_per_testcase(_TestCase, _Config) -> end_per_testcase(_TestCase, _Config) ->
emqx_common_test_helpers:call_janitor(60_000), emqx_common_test_helpers:call_janitor(60_000),
snabbkaffe:stop(),
ok. ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Helper functions %% Helper fns
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
cluster(#{n := N} = Opts) -> cluster(#{n := N} = Opts) ->
@ -123,10 +113,10 @@ app_specs() ->
app_specs(_Opts = #{}). app_specs(_Opts = #{}).
app_specs(Opts) -> app_specs(Opts) ->
DefaultEMQXConf = "durable_sessions {enable = true, renew_streams_interval = 1s}",
ExtraEMQXConf = maps:get(extra_emqx_conf, Opts, ""), ExtraEMQXConf = maps:get(extra_emqx_conf, Opts, ""),
[ [
{emqx, DefaultEMQXConf ++ ExtraEMQXConf} emqx_durable_storage,
{emqx, "session_persistence = {enable = true}" ++ ExtraEMQXConf}
]. ].
get_mqtt_port(Node, Type) -> get_mqtt_port(Node, Type) ->
@ -140,30 +130,29 @@ wait_nodeup(Node) ->
pong = net_adm:ping(Node) pong = net_adm:ping(Node)
). ).
wait_gen_rpc_down(_NodeSpec = #{apps := Apps}) ->
#{override_env := Env} = proplists:get_value(gen_rpc, Apps),
Port = proplists:get_value(tcp_server_port, Env),
?retry(
_Sleep0 = 500,
_Attempts0 = 50,
false = emqx_common_test_helpers:is_tcp_server_available("127.0.0.1", Port)
).
start_client(Opts0 = #{}) -> start_client(Opts0 = #{}) ->
Defaults = #{ Defaults = #{
port => 1883,
proto_ver => v5, proto_ver => v5,
properties => #{'Session-Expiry-Interval' => 300} properties => #{'Session-Expiry-Interval' => 300}
}, },
Opts = emqx_utils_maps:deep_merge(Defaults, Opts0), Opts = maps:to_list(emqx_utils_maps:deep_merge(Defaults, Opts0)),
?tp(notice, "starting client", Opts), ct:pal("starting client with opts:\n ~p", [Opts]),
{ok, Client} = emqtt:start_link(maps:to_list(Opts)), {ok, Client} = emqtt:start_link(Opts),
unlink(Client),
on_exit(fun() -> catch emqtt:stop(Client) end), on_exit(fun() -> catch emqtt:stop(Client) end),
Client. Client.
start_connect_client(Opts = #{}) ->
Client = start_client(Opts),
?assertMatch({ok, _}, emqtt:connect(Client)),
Client.
mk_clientid(Prefix, ID) ->
iolist_to_binary(io_lib:format("~p/~p", [Prefix, ID])).
restart_node(Node, NodeSpec) -> restart_node(Node, NodeSpec) ->
?tp(will_restart_node, #{}), ?tp(will_restart_node, #{}),
emqx_cth_cluster:restart(NodeSpec), emqx_cth_cluster:restart(Node, NodeSpec),
wait_nodeup(Node), wait_nodeup(Node),
?tp(restarted_node, #{}), ?tp(restarted_node, #{}),
ok. ok.
@ -172,44 +161,58 @@ is_persistent_connect_opts(#{properties := #{'Session-Expiry-Interval' := EI}})
EI > 0. EI > 0.
list_all_sessions(Node) -> list_all_sessions(Node) ->
erpc:call(Node, emqx_persistent_session_ds_state, list_sessions, []). erpc:call(Node, emqx_persistent_session_ds, list_all_sessions, []).
list_all_subscriptions(Node) -> list_all_subscriptions(Node) ->
Sessions = list_all_sessions(Node), erpc:call(Node, emqx_persistent_session_ds, list_all_subscriptions, []).
lists:flatmap(
fun(ClientId) ->
#{s := #{subscriptions := Subs}} = erpc:call(
Node, emqx_persistent_session_ds, print_session, [ClientId]
),
maps:to_list(Subs)
end,
Sessions
).
list_all_pubranges(Node) -> list_all_pubranges(Node) ->
erpc:call(Node, emqx_persistent_session_ds, list_all_pubranges, []). erpc:call(Node, emqx_persistent_session_ds, list_all_pubranges, []).
session_open(Node, ClientId) -> prop_only_cores_run_gc(CoreNodes) ->
ClientInfo = #{}, {"only core nodes run gc", fun(Trace) -> ?MODULE:prop_only_cores_run_gc(Trace, CoreNodes) end}.
ConnInfo = #{peername => {undefined, undefined}, proto_name => <<"MQTT">>, proto_ver => 5}, prop_only_cores_run_gc(Trace, CoreNodes) ->
WillMsg = undefined, GCNodes = lists:usort([
erpc:call( N
Node, || #{
emqx_persistent_session_ds, ?snk_kind := K,
session_open, ?snk_meta := #{node := N}
[ClientId, ClientInfo, ConnInfo, WillMsg] } <- Trace,
). lists:member(K, [ds_session_gc, ds_session_gc_lock_taken]),
N =/= node()
force_last_alive_at(ClientId, Time) -> ]),
{ok, S0} = emqx_persistent_session_ds_state:open(ClientId), ?assertEqual(lists:usort(CoreNodes), GCNodes).
S = emqx_persistent_session_ds_state:set_last_alive_at(Time, S0),
_ = emqx_persistent_session_ds_state:commit(S),
ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Testcases %% Testcases
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
t_non_persistent_session_subscription(_Config) ->
ClientId = atom_to_binary(?FUNCTION_NAME),
SubTopicFilter = <<"t/#">>,
?check_trace(
begin
?tp(notice, "starting", #{}),
Client = start_client(#{
clientid => ClientId,
properties => #{'Session-Expiry-Interval' => 0}
}),
{ok, _} = emqtt:connect(Client),
?tp(notice, "subscribing", #{}),
{ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client, SubTopicFilter, qos2),
ok = emqtt:stop(Client),
ok
end,
fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
?assertEqual([], ?of_kind(ds_session_subscription_added, Trace)),
ok
end
),
ok.
t_session_subscription_idempotency(Config) -> t_session_subscription_idempotency(Config) ->
[Node1Spec | _] = ?config(node_specs, Config), [Node1Spec | _] = ?config(node_specs, Config),
[Node1] = ?config(nodes, Config), [Node1] = ?config(nodes, Config),
@ -217,7 +220,6 @@ t_session_subscription_idempotency(Config) ->
SubTopicFilter = <<"t/+">>, SubTopicFilter = <<"t/+">>,
ClientId = <<"myclientid">>, ClientId = <<"myclientid">>,
?check_trace( ?check_trace(
#{timetrap => 30_000},
begin begin
?force_ordering( ?force_ordering(
#{?snk_kind := persistent_session_ds_subscription_added}, #{?snk_kind := persistent_session_ds_subscription_added},
@ -258,8 +260,11 @@ t_session_subscription_idempotency(Config) ->
ok ok
end, end,
fun(_Trace) -> fun(Trace) ->
Session = session_open(Node1, ClientId), ct:pal("trace:\n ~p", [Trace]),
Session = erpc:call(
Node1, emqx_persistent_session_ds, session_open, [ClientId, _ConnInfo = #{}]
),
?assertMatch( ?assertMatch(
#{SubTopicFilter := #{}}, #{SubTopicFilter := #{}},
emqx_session:info(subscriptions, Session) emqx_session:info(subscriptions, Session)
@ -276,11 +281,11 @@ t_session_unsubscription_idempotency(Config) ->
SubTopicFilter = <<"t/+">>, SubTopicFilter = <<"t/+">>,
ClientId = <<"myclientid">>, ClientId = <<"myclientid">>,
?check_trace( ?check_trace(
#{timetrap => 30_000},
begin begin
?force_ordering( ?force_ordering(
#{ #{
?snk_kind := persistent_session_ds_subscription_delete ?snk_kind := persistent_session_ds_subscription_delete,
?snk_span := {complete, _}
}, },
_NEvents0 = 1, _NEvents0 = 1,
#{?snk_kind := will_restart_node}, #{?snk_kind := will_restart_node},
@ -331,8 +336,11 @@ t_session_unsubscription_idempotency(Config) ->
ok ok
end, end,
fun(_Trace) -> fun(Trace) ->
Session = session_open(Node1, ClientId), ct:pal("trace:\n ~p", [Trace]),
Session = erpc:call(
Node1, emqx_persistent_session_ds, session_open, [ClientId, _ConnInfo = #{}]
),
?assertEqual( ?assertEqual(
#{}, #{},
emqx_session:info(subscriptions, Session) emqx_session:info(subscriptions, Session)
@ -377,7 +385,6 @@ do_t_session_discard(Params) ->
ReconnectOpts = ReconnectOpts0#{clientid => ClientId}, ReconnectOpts = ReconnectOpts0#{clientid => ClientId},
SubTopicFilter = <<"t/+">>, SubTopicFilter = <<"t/+">>,
?check_trace( ?check_trace(
#{timetrap => 30_000},
begin begin
?tp(notice, "starting", #{}), ?tp(notice, "starting", #{}),
Client0 = start_client(#{ Client0 = start_client(#{
@ -395,38 +402,40 @@ do_t_session_discard(Params) ->
?retry( ?retry(
_Sleep0 = 100, _Sleep0 = 100,
_Attempts0 = 50, _Attempts0 = 50,
#{} = emqx_persistent_session_ds_state:print_session(ClientId) true = map_size(emqx_persistent_session_ds:list_all_streams()) > 0
), ),
ok = emqtt:stop(Client0), ok = emqtt:stop(Client0),
?tp(notice, "disconnected", #{}), ?tp(notice, "disconnected", #{}),
?tp(notice, "reconnecting", #{}), ?tp(notice, "reconnecting", #{}),
%% we still have the session: %% we still have streams
?assertMatch(#{}, emqx_persistent_session_ds_state:print_session(ClientId)), ?assert(map_size(emqx_persistent_session_ds:list_all_streams()) > 0),
Client1 = start_client(ReconnectOpts), Client1 = start_client(ReconnectOpts),
{ok, _} = emqtt:connect(Client1), {ok, _} = emqtt:connect(Client1),
?assertEqual([], emqtt:subscriptions(Client1)), ?assertEqual([], emqtt:subscriptions(Client1)),
case is_persistent_connect_opts(ReconnectOpts) of case is_persistent_connect_opts(ReconnectOpts) of
true -> true ->
?assertMatch(#{}, emqx_persistent_session_ds_state:print_session(ClientId)); ?assertMatch(#{ClientId := _}, emqx_persistent_session_ds:list_all_sessions());
false -> false ->
?assertEqual( ?assertEqual(#{}, emqx_persistent_session_ds:list_all_sessions())
undefined, emqx_persistent_session_ds_state:print_session(ClientId)
)
end, end,
?assertEqual(#{}, emqx_persistent_session_ds:list_all_subscriptions()),
?assertEqual([], emqx_persistent_session_ds_router:topics()), ?assertEqual([], emqx_persistent_session_ds_router:topics()),
?assertEqual(#{}, emqx_persistent_session_ds:list_all_streams()),
?assertEqual(#{}, emqx_persistent_session_ds:list_all_pubranges()),
ok = emqtt:stop(Client1), ok = emqtt:stop(Client1),
?tp(notice, "disconnected", #{}), ?tp(notice, "disconnected", #{}),
ok ok
end, end,
[] fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
ok
end
), ),
ok. ok.
t_session_expiration1(Config) -> t_session_expiration1(Config) ->
%% This testcase verifies that the properties passed in the
%% CONNECT packet are respected by the GC process:
ClientId = atom_to_binary(?FUNCTION_NAME), ClientId = atom_to_binary(?FUNCTION_NAME),
Opts = #{ Opts = #{
clientid => ClientId, clientid => ClientId,
@ -439,9 +448,6 @@ t_session_expiration1(Config) ->
do_t_session_expiration(Config, Opts). do_t_session_expiration(Config, Opts).
t_session_expiration2(Config) -> t_session_expiration2(Config) ->
%% This testcase updates the expiry interval for the session in
%% the _DISCONNECT_ packet. This setting should be respected by GC
%% process:
ClientId = atom_to_binary(?FUNCTION_NAME), ClientId = atom_to_binary(?FUNCTION_NAME),
Opts = #{ Opts = #{
clientid => ClientId, clientid => ClientId,
@ -456,8 +462,6 @@ t_session_expiration2(Config) ->
do_t_session_expiration(Config, Opts). do_t_session_expiration(Config, Opts).
do_t_session_expiration(_Config, Opts) -> do_t_session_expiration(_Config, Opts) ->
%% Sequence is a list of pairs of properties passed through the
%% CONNECT and for the DISCONNECT for each session:
#{ #{
clientid := ClientId, clientid := ClientId,
sequence := [ sequence := [
@ -468,14 +472,13 @@ do_t_session_expiration(_Config, Opts) ->
} = Opts, } = Opts,
CommonParams = #{proto_ver => v5, clientid => ClientId}, CommonParams = #{proto_ver => v5, clientid => ClientId},
?check_trace( ?check_trace(
#{timetrap => 30_000},
begin begin
Topic = <<"some/topic">>, Topic = <<"some/topic">>,
Params0 = maps:merge(CommonParams, FirstConn), Params0 = maps:merge(CommonParams, FirstConn),
Client0 = start_client(Params0), Client0 = start_client(Params0),
{ok, _} = emqtt:connect(Client0), {ok, _} = emqtt:connect(Client0),
{ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client0, Topic, ?QOS_2), {ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client0, Topic, ?QOS_2),
#{s := #{subscriptions := Subs0}} = emqx_persistent_session_ds:print_session(ClientId), Subs0 = emqx_persistent_session_ds:list_all_subscriptions(),
?assertEqual(1, map_size(Subs0), #{subs => Subs0}), ?assertEqual(1, map_size(Subs0), #{subs => Subs0}),
Info0 = maps:from_list(emqtt:info(Client0)), Info0 = maps:from_list(emqtt:info(Client0)),
?assertEqual(0, maps:get(session_present, Info0), #{info => Info0}), ?assertEqual(0, maps:get(session_present, Info0), #{info => Info0}),
@ -490,7 +493,7 @@ do_t_session_expiration(_Config, Opts) ->
?assertEqual([], Subs1), ?assertEqual([], Subs1),
emqtt:disconnect(Client1, ?RC_NORMAL_DISCONNECTION, SecondDisconn), emqtt:disconnect(Client1, ?RC_NORMAL_DISCONNECTION, SecondDisconn),
ct:sleep(2_500), ct:sleep(1_500),
Params2 = maps:merge(CommonParams, ThirdConn), Params2 = maps:merge(CommonParams, ThirdConn),
Client2 = start_client(Params2), Client2 = start_client(Params2),
@ -502,9 +505,9 @@ do_t_session_expiration(_Config, Opts) ->
emqtt:publish(Client2, Topic, <<"payload">>), emqtt:publish(Client2, Topic, <<"payload">>),
?assertNotReceive({publish, #{topic := Topic}}), ?assertNotReceive({publish, #{topic := Topic}}),
%% ensure subscriptions are absent from table. %% ensure subscriptions are absent from table.
#{s := #{subscriptions := Subs3}} = emqx_persistent_session_ds:print_session(ClientId), ?assertEqual(#{}, emqx_persistent_session_ds:list_all_subscriptions()),
?assertEqual([], maps:to_list(Subs3)),
emqtt:disconnect(Client2, ?RC_NORMAL_DISCONNECTION, ThirdDisconn), emqtt:disconnect(Client2, ?RC_NORMAL_DISCONNECTION, ThirdDisconn),
ok ok
end, end,
[] []
@ -512,13 +515,14 @@ do_t_session_expiration(_Config, Opts) ->
ok. ok.
t_session_gc(Config) -> t_session_gc(Config) ->
[Node1, _Node2, _Node3] = Nodes = ?config(nodes, Config), GCInterval = ?config(gc_interval, Config),
[Node1, Node2, _Node3] = Nodes = ?config(nodes, Config),
CoreNodes = [Node1, Node2],
[ [
Port1, Port1,
Port2, Port2,
Port3 Port3
] = lists:map(fun(N) -> get_mqtt_port(N, tcp) end, Nodes), ] = lists:map(fun(N) -> get_mqtt_port(N, tcp) end, Nodes),
ct:pal("Ports: ~p", [[Port1, Port2, Port3]]),
CommonParams = #{ CommonParams = #{
clean_start => false, clean_start => false,
proto_ver => v5 proto_ver => v5
@ -535,16 +539,15 @@ t_session_gc(Config) ->
end, end,
?check_trace( ?check_trace(
#{timetrap => 30_000},
begin begin
ClientId0 = <<"session_gc0">>,
Client0 = StartClient(ClientId0, Port1, 30),
ClientId1 = <<"session_gc1">>, ClientId1 = <<"session_gc1">>,
Client1 = StartClient(ClientId1, Port1, 30), Client1 = StartClient(ClientId1, Port2, 1),
ClientId2 = <<"session_gc2">>, ClientId2 = <<"session_gc2">>,
Client2 = StartClient(ClientId2, Port2, 1), Client2 = StartClient(ClientId2, Port3, 1),
ClientId3 = <<"session_gc3">>,
Client3 = StartClient(ClientId3, Port3, 1),
lists:foreach( lists:foreach(
fun(Client) -> fun(Client) ->
@ -554,51 +557,55 @@ t_session_gc(Config) ->
{ok, _} = emqtt:publish(Client, Topic, Payload, ?QOS_1), {ok, _} = emqtt:publish(Client, Topic, Payload, ?QOS_1),
ok ok
end, end,
[Client1, Client2, Client3] [Client0, Client1, Client2]
), ),
%% Clients are still alive; no session is garbage collected. %% Clients are still alive; no session is garbage collected.
?tp(notice, "waiting for gc", #{}), Res0 = ?block_until(
?assertMatch( #{
{ok, _}, ?snk_kind := ds_session_gc,
?block_until( ?snk_span := {complete, _},
#{ ?snk_meta := #{node := N}
?snk_kind := ds_session_gc, } when
?snk_span := {complete, _}, N =/= node(),
?snk_meta := #{node := N} 3 * GCInterval + 1_000
} when N =/= node()
)
), ),
?assertMatch([_, _, _], list_all_sessions(Node1), sessions), ?assertMatch({ok, _}, Res0),
?assertMatch([_, _, _], list_all_subscriptions(Node1), subscriptions), {ok, #{?snk_meta := #{time := T0}}} = Res0,
?tp(notice, "gc ran", #{}), Sessions0 = list_all_sessions(Node1),
Subs0 = list_all_subscriptions(Node1),
?assertEqual(3, map_size(Sessions0), #{sessions => Sessions0}),
?assertEqual(3, map_size(Subs0), #{subs => Subs0}),
%% Now we disconnect 2 of them; only those should be GC'ed. %% Now we disconnect 2 of them; only those should be GC'ed.
?assertMatch(
?tp(notice, "disconnecting client1", #{}), {ok, {ok, _}},
?wait_async_action(
emqtt:stop(Client1),
#{?snk_kind := terminate},
1_000
)
),
ct:pal("disconnected client1"),
?assertMatch( ?assertMatch(
{ok, {ok, _}}, {ok, {ok, _}},
?wait_async_action( ?wait_async_action(
emqtt:stop(Client2), emqtt:stop(Client2),
#{?snk_kind := terminate} #{?snk_kind := terminate},
1_000
) )
), ),
?tp(notice, "disconnected client1", #{}), ct:pal("disconnected client2"),
?assertMatch(
{ok, {ok, _}},
?wait_async_action(
emqtt:stop(Client3),
#{?snk_kind := terminate}
)
),
?tp(notice, "disconnected client2", #{}),
?assertMatch( ?assertMatch(
{ok, _}, {ok, _},
?block_until( ?block_until(
#{ #{
?snk_kind := ds_session_gc_cleaned, ?snk_kind := ds_session_gc_cleaned,
session_id := ClientId2 ?snk_meta := #{node := N, time := T},
} session_ids := [ClientId1]
} when
N =/= node() andalso T > T0,
4 * GCInterval + 1_000
) )
), ),
?assertMatch( ?assertMatch(
@ -606,116 +613,22 @@ t_session_gc(Config) ->
?block_until( ?block_until(
#{ #{
?snk_kind := ds_session_gc_cleaned, ?snk_kind := ds_session_gc_cleaned,
session_id := ClientId3 ?snk_meta := #{node := N, time := T},
} session_ids := [ClientId2]
} when
N =/= node() andalso T > T0,
4 * GCInterval + 1_000
) )
), ),
?retry(50, 3, [ClientId1] = list_all_sessions(Node1)), Sessions1 = list_all_sessions(Node1),
?assertMatch([_], list_all_subscriptions(Node1), subscriptions), Subs1 = list_all_subscriptions(Node1),
ok ?assertEqual(1, map_size(Sessions1), #{sessions => Sessions1}),
end, ?assertEqual(1, map_size(Subs1), #{subs => Subs1}),
[]
),
ok.
t_session_replay_retry(_Config) ->
%% Verify that the session recovers smoothly from transient errors during
%% replay.
ok = emqx_ds_test_helpers:mock_rpc(),
NClients = 10,
ClientSubOpts = #{
clientid => mk_clientid(?FUNCTION_NAME, sub),
auto_ack => never
},
ClientSub = start_connect_client(ClientSubOpts),
?assertMatch(
{ok, _, [?RC_GRANTED_QOS_1]},
emqtt:subscribe(ClientSub, <<"t/#">>, ?QOS_1)
),
ClientsPub = [
start_connect_client(#{
clientid => mk_clientid(?FUNCTION_NAME, I),
properties => #{'Session-Expiry-Interval' => 0}
})
|| I <- lists:seq(1, NClients)
],
lists:foreach(
fun(Client) ->
Index = integer_to_binary(rand:uniform(NClients)),
Topic = <<"t/", Index/binary>>,
?assertMatch({ok, #{}}, emqtt:publish(Client, Topic, Index, 1))
end,
ClientsPub
),
Pubs0 = emqx_common_test_helpers:wait_publishes(NClients, 5_000),
NPubs = length(Pubs0),
?assertEqual(NClients, NPubs, ?drainMailbox(1_500)),
ok = emqtt:stop(ClientSub),
%% Make `emqx_ds` believe that roughly half of the shards are unavailable.
ok = emqx_ds_test_helpers:mock_rpc_result(
fun(_Node, emqx_ds_replication_layer, _Function, [_DB, Shard | _]) ->
case erlang:phash2(Shard) rem 2 of
0 -> unavailable;
1 -> passthrough
end
end
),
_ClientSub = start_connect_client(ClientSubOpts#{clean_start => false}),
Pubs1 = emqx_common_test_helpers:wait_publishes(NPubs, 5_000),
?assert(length(Pubs1) < length(Pubs0), Pubs1),
%% "Recover" the shards.
emqx_ds_test_helpers:unmock_rpc(),
Pubs2 = emqx_common_test_helpers:wait_publishes(NPubs - length(Pubs1), 5_000),
?assertEqual(
[maps:with([topic, payload, qos], P) || P <- Pubs0],
[maps:with([topic, payload, qos], P) || P <- Pubs1 ++ Pubs2]
).
%% Check that we send will messages when performing GC without relying on timers set by
%% the channel process.
t_session_gc_will_message(_Config) ->
?check_trace(
#{timetrap => 10_000},
begin
WillTopic = <<"will/t">>,
ok = emqx:subscribe(WillTopic, #{qos => 2}),
ClientId = <<"will_msg_client">>,
Client = start_client(#{
clientid => ClientId,
will_topic => WillTopic,
will_payload => <<"will payload">>,
will_qos => 0,
will_props => #{'Will-Delay-Interval' => 300}
}),
{ok, _} = emqtt:connect(Client),
%% Use reason code =/= `?RC_SUCCESS' to allow will message
{ok, {ok, _}} =
?wait_async_action(
emqtt:disconnect(Client, ?RC_UNSPECIFIED_ERROR),
#{?snk_kind := emqx_cm_clean_down}
),
?assertNotReceive({deliver, WillTopic, _}),
%% Set fake `last_alive_at' to trigger immediate will message.
force_last_alive_at(ClientId, _Time = 0),
{ok, {ok, _}} =
?wait_async_action(
emqx_persistent_session_ds_gc_worker:check_session(ClientId),
#{?snk_kind := session_gc_published_will_msg}
),
?assertReceive({deliver, WillTopic, _}),
ok ok
end, end,
[] [
prop_only_cores_run_gc(CoreNodes)
]
), ),
ok. ok.

View File

@ -1,74 +0,0 @@
defmodule EMQX.MixProject do
use Mix.Project
alias EMQXUmbrella.MixProject, as: UMP
def project do
[
app: :emqx,
version: "0.1.0",
build_path: "../../_build",
erlc_paths: erlc_paths(),
erlc_options: [
{:i, "src"}
| UMP.erlc_options()
],
compilers: Mix.compilers() ++ [:copy_srcs],
# used by our `Mix.Tasks.Compile.CopySrcs` compiler
extra_dirs: extra_dirs(),
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.14",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications
def application do
[
## FIXME!!! go though emqx.app.src and add missing stuff...
extra_applications: [:public_key, :ssl, :os_mon, :logger, :mnesia, :sasl] ++ UMP.extra_applications(),
mod: {:emqx_app, []}
]
end
def deps() do
## FIXME!!! go though emqx.app.src and add missing stuff...
[
{:emqx_mix_utils, in_umbrella: true, runtime: false},
{:emqx_utils, in_umbrella: true},
{:emqx_ds_backends, in_umbrella: true},
UMP.common_dep(:gproc),
UMP.common_dep(:gen_rpc),
UMP.common_dep(:ekka),
UMP.common_dep(:esockd),
UMP.common_dep(:cowboy),
UMP.common_dep(:lc),
UMP.common_dep(:hocon),
UMP.common_dep(:ranch),
UMP.common_dep(:bcrypt),
UMP.common_dep(:pbkdf2),
UMP.common_dep(:emqx_http_lib),
] ++ UMP.quicer_dep()
end
defp erlc_paths() do
paths = UMP.erlc_paths()
if UMP.test_env?() do
["integration_test" | paths]
else
paths
end
end
defp extra_dirs() do
dirs = ["src", "etc"]
if UMP.test_env?() do
["test", "integration_test" | dirs]
else
dirs
end
end
end

View File

@ -10,14 +10,11 @@
{emqx_bridge,5}. {emqx_bridge,5}.
{emqx_bridge,6}. {emqx_bridge,6}.
{emqx_broker,1}. {emqx_broker,1}.
{emqx_cluster_link,1}.
{emqx_cm,1}. {emqx_cm,1}.
{emqx_cm,2}. {emqx_cm,2}.
{emqx_cm,3}.
{emqx_conf,1}. {emqx_conf,1}.
{emqx_conf,2}. {emqx_conf,2}.
{emqx_conf,3}. {emqx_conf,3}.
{emqx_conf,4}.
{emqx_connector,1}. {emqx_connector,1}.
{emqx_dashboard,1}. {emqx_dashboard,1}.
{emqx_delayed,1}. {emqx_delayed,1}.
@ -25,12 +22,8 @@
{emqx_delayed,3}. {emqx_delayed,3}.
{emqx_ds,1}. {emqx_ds,1}.
{emqx_ds,2}. {emqx_ds,2}.
{emqx_ds,3}.
{emqx_ds,4}.
{emqx_ds_shared_sub,1}.
{emqx_eviction_agent,1}. {emqx_eviction_agent,1}.
{emqx_eviction_agent,2}. {emqx_eviction_agent,2}.
{emqx_eviction_agent,3}.
{emqx_exhook,1}. {emqx_exhook,1}.
{emqx_ft_storage_exporter_fs,1}. {emqx_ft_storage_exporter_fs,1}.
{emqx_ft_storage_fs,1}. {emqx_ft_storage_fs,1}.
@ -44,13 +37,9 @@
{emqx_management,2}. {emqx_management,2}.
{emqx_management,3}. {emqx_management,3}.
{emqx_management,4}. {emqx_management,4}.
{emqx_management,5}.
{emqx_metrics,1}. {emqx_metrics,1}.
{emqx_metrics,2}.
{emqx_mgmt_api_plugins,1}. {emqx_mgmt_api_plugins,1}.
{emqx_mgmt_api_plugins,2}. {emqx_mgmt_api_plugins,2}.
{emqx_mgmt_api_plugins,3}.
{emqx_mgmt_api_relup,1}.
{emqx_mgmt_cluster,1}. {emqx_mgmt_cluster,1}.
{emqx_mgmt_cluster,2}. {emqx_mgmt_cluster,2}.
{emqx_mgmt_cluster,3}. {emqx_mgmt_cluster,3}.
@ -63,18 +52,16 @@
{emqx_node_rebalance_api,1}. {emqx_node_rebalance_api,1}.
{emqx_node_rebalance_api,2}. {emqx_node_rebalance_api,2}.
{emqx_node_rebalance_evacuation,1}. {emqx_node_rebalance_evacuation,1}.
{emqx_node_rebalance_purge,1}.
{emqx_node_rebalance_status,1}. {emqx_node_rebalance_status,1}.
{emqx_node_rebalance_status,2}. {emqx_node_rebalance_status,2}.
{emqx_persistent_session_ds,1}. {emqx_persistent_session_ds,1}.
{emqx_plugins,1}. {emqx_plugins,1}.
{emqx_plugins,2}.
{emqx_prometheus,1}. {emqx_prometheus,1}.
{emqx_prometheus,2}. {emqx_prometheus,2}.
{emqx_resource,1}. {emqx_resource,1}.
{emqx_resource,2}.
{emqx_retainer,1}. {emqx_retainer,1}.
{emqx_retainer,2}. {emqx_retainer,2}.
{emqx_router,1}.
{emqx_rule_engine,1}. {emqx_rule_engine,1}.
{emqx_shared_sub,1}. {emqx_shared_sub,1}.
{emqx_slow_subs,1}. {emqx_slow_subs,1}.

View File

@ -24,18 +24,17 @@
{deps, [ {deps, [
{emqx_utils, {path, "../emqx_utils"}}, {emqx_utils, {path, "../emqx_utils"}},
{emqx_durable_storage, {path, "../emqx_durable_storage"}}, {emqx_durable_storage, {path, "../emqx_durable_storage"}},
{emqx_ds_backends, {path, "../emqx_ds_backends"}},
{lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}}, {lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}},
{gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}}, {gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}},
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}}, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}},
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.12.0"}}}, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.11.1"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.5"}}}, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.18.3"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.43.2"}}}, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.4"}}},
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.10"}}} {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.8"}}}
]}. ]}.
{plugins, [{rebar3_proper, "0.12.1"}, rebar3_path_deps]}. {plugins, [{rebar3_proper, "0.12.1"}, rebar3_path_deps]}.
@ -72,7 +71,7 @@
{statistics, true} {statistics, true}
]}. ]}.
{project_plugins, [{erlfmt, "1.3.0"}]}. {project_plugins, [erlfmt]}.
{erlfmt, [ {erlfmt, [
{files, [ {files, [

View File

@ -24,8 +24,7 @@ IsQuicSupp = fun() ->
end, end,
Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.0"}}}, Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.0"}}},
Quicer = Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.313"}}}.
{quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.500"}}}.
Dialyzer = fun(Config) -> Dialyzer = fun(Config) ->
{dialyzer, OldDialyzerConfig} = lists:keyfind(dialyzer, 1, Config), {dialyzer, OldDialyzerConfig} = lists:keyfind(dialyzer, 1, Config),

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -16,14 +16,9 @@
-module(emqx_config_backup). -module(emqx_config_backup).
-type ok_result() :: #{
root_key => emqx_utils_maps:config_key(),
changed => [emqx_utils_maps:config_key_path()]
}.
-type error_result() :: #{root_key => emqx_utils_maps:config_key(), reason => term()}.
-callback import_config(RawConf :: map()) -> -callback import_config(RawConf :: map()) ->
{ok, ok_result()} {ok, #{
| {error, error_result()} root_key => emqx_utils_maps:config_key(),
| {results, {[ok_result()], [error_result()]}}. changed => [emqx_utils_maps:config_key_path()]
}}
| {error, #{root_key => emqx_utils_maps:config_key(), reason => term()}}.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -18,8 +18,6 @@
-type traverse_break_reason() :: over | migrate. -type traverse_break_reason() :: over | migrate.
-type opts() :: #{print_fun => fun((io:format(), [term()]) -> ok)}.
-callback backup_tables() -> [mria:table()]. -callback backup_tables() -> [mria:table()].
%% validate the backup %% validate the backup
@ -33,9 +31,6 @@
-callback migrate_mnesia_backup(tuple()) -> {ok, tuple()} | {error, term()}. -callback migrate_mnesia_backup(tuple()) -> {ok, tuple()} | {error, term()}.
%% NOTE: currently, this is called only when the table has been restored successfully. -optional_callbacks([validate_mnesia_backup/1, migrate_mnesia_backup/1]).
-callback on_backup_table_imported(mria:table(), opts()) -> ok | {error, term()}.
-optional_callbacks([validate_mnesia_backup/1, migrate_mnesia_backup/1, on_backup_table_imported/2]).
-export_type([traverse_break_reason/0]). -export_type([traverse_break_reason/0]).

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -18,15 +18,14 @@
%% API: %% API:
-export([ -export([
start/0, start/0,
announce/2, announce/1,
supported_version/1, supported_version/2, supported_version/1, supported_version/2,
versions_file/1 versions_file/1
]). ]).
%% Internal exports (RPC) %% Internal exports (RPC)
-export([ -export([
announce_fun/1, announce_fun/1
announce_fun/2
]). ]).
-export_type([api/0, api_version/0, var_name/0, call/0, rpc/0, bpapi_meta/0]). -export_type([api/0, api_version/0, var_name/0, call/0, rpc/0, bpapi_meta/0]).
@ -67,7 +66,7 @@ start() ->
{rlog_shard, ?COMMON_SHARD} {rlog_shard, ?COMMON_SHARD}
]), ]),
ok = mria:wait_for_tables([?TAB]), ok = mria:wait_for_tables([?TAB]),
announce(node(), emqx). announce(emqx).
%% @doc Get maximum version of the backplane API supported by the node %% @doc Get maximum version of the backplane API supported by the node
-spec supported_version(node(), api()) -> api_version() | undefined. -spec supported_version(node(), api()) -> api_version() | undefined.
@ -83,38 +82,11 @@ supported_version(Node, API) ->
supported_version(API) -> supported_version(API) ->
ets:lookup_element(?TAB, {?multicall, API}, #?TAB.version). ets:lookup_element(?TAB, {?multicall, API}, #?TAB.version).
-spec announce(node(), atom()) -> ok. -spec announce(atom()) -> ok.
announce(Node, App) -> announce(App) ->
{ok, Data} = file:consult(?MODULE:versions_file(App)), {ok, Data} = file:consult(?MODULE:versions_file(App)),
%% replicant(5.6.0) will call old core(<5.6.0) announce_fun/2 is undef on old core {atomic, ok} = mria:transaction(?COMMON_SHARD, fun ?MODULE:announce_fun/1, [Data]),
%% so we just use anonymous function to update. ok.
case mria:transaction(?COMMON_SHARD, fun ?MODULE:announce_fun/2, [Node, Data]) of
{atomic, ok} ->
ok;
{aborted, {undef, [{?MODULE, announce_fun, _, _} | _]}} ->
{atomic, ok} = mria:transaction(
?COMMON_SHARD,
fun() ->
MS = ets:fun2ms(fun(#?TAB{key = {N, API}}) when N =:= Node ->
{N, API}
end),
OldKeys = mnesia:select(?TAB, MS, write),
_ = [
mnesia:delete({?TAB, Key})
|| Key <- OldKeys
],
%% Insert new records:
_ = [
mnesia:write(#?TAB{key = {Node, API}, version = Version})
|| {API, Version} <- Data
],
%% Update maximum supported version:
_ = [update_minimum(API) || {API, _} <- Data],
ok
end
),
ok
end.
-spec versions_file(atom()) -> file:filename_all(). -spec versions_file(atom()) -> file:filename_all().
versions_file(App) -> versions_file(App) ->
@ -124,18 +96,11 @@ versions_file(App) ->
%% Internal functions %% Internal functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Attention:
%% This function is just to prevent errors when being called during a rolling upgrade
%% if the version is less than 5.5.0. Its 'node' parameter is wrong!
-spec announce_fun([{api(), api_version()}]) -> ok. -spec announce_fun([{api(), api_version()}]) -> ok.
announce_fun(Data) -> announce_fun(Data) ->
announce_fun(node(), Data).
-spec announce_fun(node(), [{api(), api_version()}]) -> ok.
announce_fun(Node, Data) ->
%% Delete old records, if present: %% Delete old records, if present:
MS = ets:fun2ms(fun(#?TAB{key = {N, API}}) when N =:= Node -> MS = ets:fun2ms(fun(#?TAB{key = {node(), API}}) ->
{N, API} {node(), API}
end), end),
OldKeys = mnesia:select(?TAB, MS, write), OldKeys = mnesia:select(?TAB, MS, write),
_ = [ _ = [
@ -144,7 +109,7 @@ announce_fun(Node, Data) ->
], ],
%% Insert new records: %% Insert new records:
_ = [ _ = [
mnesia:write(#?TAB{key = {Node, API}, version = Version}) mnesia:write(#?TAB{key = {node(), API}, version = Version})
|| {API, Version} <- Data || {API, Version} <- Data
], ],
%% Update maximum supported version: %% Update maximum supported version:

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2022-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2020-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -237,38 +237,25 @@ log_formatter(HandlerName, Conf) ->
_ -> _ ->
conf_get("formatter", Conf) conf_get("formatter", Conf)
end, end,
TsFormat = timestamp_format(Conf),
WithMfa = conf_get("with_mfa", Conf),
PayloadEncode = conf_get("payload_encode", Conf, text),
do_formatter( do_formatter(
Format, CharsLimit, SingleLine, TimeOffSet, Depth, TsFormat, WithMfa, PayloadEncode Format, CharsLimit, SingleLine, TimeOffSet, Depth
). ).
%% auto | epoch | rfc3339
timestamp_format(Conf) ->
conf_get("timestamp_format", Conf).
%% helpers %% helpers
do_formatter(json, CharsLimit, SingleLine, TimeOffSet, Depth, TsFormat, WithMfa, PayloadEncode) -> do_formatter(json, CharsLimit, SingleLine, TimeOffSet, Depth) ->
{emqx_logger_jsonfmt, #{ {emqx_logger_jsonfmt, #{
chars_limit => CharsLimit, chars_limit => CharsLimit,
single_line => SingleLine, single_line => SingleLine,
time_offset => TimeOffSet, time_offset => TimeOffSet,
depth => Depth, depth => Depth
timestamp_format => TsFormat,
with_mfa => WithMfa,
payload_encode => PayloadEncode
}}; }};
do_formatter(text, CharsLimit, SingleLine, TimeOffSet, Depth, TsFormat, WithMfa, PayloadEncode) -> do_formatter(text, CharsLimit, SingleLine, TimeOffSet, Depth) ->
{emqx_logger_textfmt, #{ {emqx_logger_textfmt, #{
template => ["[", level, "] ", msg, "\n"], template => [time, " [", level, "] ", msg, "\n"],
chars_limit => CharsLimit, chars_limit => CharsLimit,
single_line => SingleLine, single_line => SingleLine,
time_offset => TimeOffSet, time_offset => TimeOffSet,
depth => Depth, depth => Depth
timestamp_format => TsFormat,
with_mfa => WithMfa,
payload_encode => PayloadEncode
}}. }}.
%% Don't record all logger message %% Don't record all logger message

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2020-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -20,7 +20,6 @@
%% API %% API
-export([add_handler/0, remove_handler/0, pre_config_update/3]). -export([add_handler/0, remove_handler/0, pre_config_update/3]).
-export([is_olp_enabled/0]). -export([is_olp_enabled/0]).
-export([assert_zone_exists/1]).
-define(ZONES, [zones]). -define(ZONES, [zones]).
@ -45,26 +44,3 @@ is_olp_enabled() ->
false, false,
emqx_config:get([zones], #{}) emqx_config:get([zones], #{})
). ).
-spec assert_zone_exists(binary() | atom()) -> ok.
assert_zone_exists(Name0) when is_binary(Name0) ->
%% an existing zone must have already an atom-name
Name =
try
binary_to_existing_atom(Name0)
catch
_:_ ->
throw({unknown_zone, Name0})
end,
assert_zone_exists(Name);
assert_zone_exists(default) ->
%% there is always a 'default' zone
ok;
assert_zone_exists(Name) when is_atom(Name) ->
try
_ = emqx_config:get([zones, Name]),
ok
catch
error:{config_not_found, _} ->
throw({unknown_zone, Name})
end.

View File

@ -2,7 +2,7 @@
{application, emqx, [ {application, emqx, [
{id, "emqx"}, {id, "emqx"},
{description, "EMQX Core"}, {description, "EMQX Core"},
{vsn, "5.3.4"}, {vsn, "5.1.19"},
{modules, []}, {modules, []},
{registered, []}, {registered, []},
{applications, [ {applications, [
@ -18,7 +18,7 @@
sasl, sasl,
lc, lc,
hocon, hocon,
emqx_ds_backends, emqx_durable_storage,
bcrypt, bcrypt,
pbkdf2, pbkdf2,
emqx_http_lib, emqx_http_lib,

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -61,12 +61,9 @@
get_raw_config/2, get_raw_config/2,
update_config/2, update_config/2,
update_config/3, update_config/3,
update_config/4,
remove_config/1, remove_config/1,
remove_config/2, remove_config/2,
remove_config/3,
reset_config/2, reset_config/2,
reset_config/3,
data_dir/0, data_dir/0,
etc_file/1, etc_file/1,
cert_file/1, cert_file/1,
@ -198,7 +195,7 @@ get_raw_config(KeyPath, Default) ->
-spec update_config(emqx_utils_maps:config_key_path(), emqx_config:update_request()) -> -spec update_config(emqx_utils_maps:config_key_path(), emqx_config:update_request()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(KeyPath, UpdateReq) -> update_config(KeyPath, UpdateReq) ->
update_config(KeyPath, UpdateReq, #{}, #{}). update_config(KeyPath, UpdateReq, #{}).
-spec update_config( -spec update_config(
emqx_utils_maps:config_key_path(), emqx_utils_maps:config_key_path(),
@ -206,56 +203,30 @@ update_config(KeyPath, UpdateReq) ->
emqx_config:update_opts() emqx_config:update_opts()
) -> ) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(KeyPath, UpdateReq, Opts) -> update_config([RootName | _] = KeyPath, UpdateReq, Opts) ->
update_config(KeyPath, UpdateReq, Opts, #{}).
-spec update_config(
emqx_utils_maps:config_key_path(),
emqx_config:update_request(),
emqx_config:update_opts(),
emqx_config:cluster_rpc_opts()
) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config([RootName | _] = KeyPath, UpdateReq, Opts, ClusterRpcOpts) ->
emqx_config_handler:update_config( emqx_config_handler:update_config(
emqx_config:get_schema_mod(RootName), emqx_config:get_schema_mod(RootName),
KeyPath, KeyPath,
{{update, UpdateReq}, Opts}, {{update, UpdateReq}, Opts}
ClusterRpcOpts
). ).
-spec remove_config(emqx_utils_maps:config_key_path()) -> -spec remove_config(emqx_utils_maps:config_key_path()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config(KeyPath) -> remove_config(KeyPath) ->
remove_config(KeyPath, #{}, #{}). remove_config(KeyPath, #{}).
-spec remove_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) -> -spec remove_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config([_RootName | _] = KeyPath, Opts) -> remove_config([RootName | _] = KeyPath, Opts) ->
remove_config(KeyPath, Opts, #{}).
-spec remove_config(
emqx_utils_maps:config_key_path(), emqx_config:update_opts(), emqx_config:cluster_rpc_opts()
) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config([RootName | _] = KeyPath, Opts, ClusterRpcOpts) ->
emqx_config_handler:update_config( emqx_config_handler:update_config(
emqx_config:get_schema_mod(RootName), emqx_config:get_schema_mod(RootName),
KeyPath, KeyPath,
{remove, Opts}, {remove, Opts}
ClusterRpcOpts
). ).
-spec reset_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) -> -spec reset_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
reset_config([RootName | SubKeys] = KeyPath, Opts) -> reset_config([RootName | SubKeys] = KeyPath, Opts) ->
reset_config([RootName | SubKeys] = KeyPath, Opts, #{}).
-spec reset_config(
emqx_utils_maps:config_key_path(), emqx_config:update_opts(), emqx_config:cluster_rpc_opts()
) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
reset_config([RootName | SubKeys] = KeyPath, Opts, ClusterRpcOpts) ->
case emqx_config:get_default_value(KeyPath) of case emqx_config:get_default_value(KeyPath) of
{ok, Default} -> {ok, Default} ->
Mod = emqx_config:get_schema_mod(RootName), Mod = emqx_config:get_schema_mod(RootName),
@ -264,8 +235,7 @@ reset_config([RootName | SubKeys] = KeyPath, Opts, ClusterRpcOpts) ->
emqx_config_handler:update_config( emqx_config_handler:update_config(
Mod, Mod,
KeyPath, KeyPath,
{{update, Default}, Opts}, {{update, Default}, Opts}
ClusterRpcOpts
); );
false -> false ->
NewConf = NewConf =
@ -277,8 +247,7 @@ reset_config([RootName | SubKeys] = KeyPath, Opts, ClusterRpcOpts) ->
emqx_config_handler:update_config( emqx_config_handler:update_config(
Mod, Mod,
[RootName], [RootName],
{{update, NewConf}, Opts}, {{update, NewConf}, Opts}
ClusterRpcOpts
) )
end; end;
{error, _} = Error -> {error, _} = Error ->

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -56,31 +56,31 @@ authenticate(Credential) ->
NotSuperUser = #{is_superuser => false}, NotSuperUser = #{is_superuser => false},
case pre_hook_authenticate(Credential) of case pre_hook_authenticate(Credential) of
ok -> ok ->
on_authentication_complete(Credential, NotSuperUser, anonymous), inc_authn_metrics(anonymous),
{ok, NotSuperUser}; {ok, NotSuperUser};
continue -> continue ->
case run_hooks('client.authenticate', [Credential], ignore) of case run_hooks('client.authenticate', [Credential], ignore) of
ignore -> ignore ->
on_authentication_complete(Credential, NotSuperUser, anonymous), inc_authn_metrics(anonymous),
{ok, NotSuperUser}; {ok, NotSuperUser};
ok -> ok ->
on_authentication_complete(Credential, NotSuperUser, ok), inc_authn_metrics(ok),
{ok, NotSuperUser}; {ok, NotSuperUser};
{ok, AuthResult} = OkResult -> {ok, _AuthResult} = OkResult ->
on_authentication_complete(Credential, AuthResult, ok), inc_authn_metrics(ok),
OkResult; OkResult;
{ok, AuthResult, _AuthData} = OkResult -> {ok, _AuthResult, _AuthData} = OkResult ->
on_authentication_complete(Credential, AuthResult, ok), inc_authn_metrics(ok),
OkResult; OkResult;
{error, Reason} = Error -> {error, _Reason} = Error ->
on_authentication_complete(Credential, Reason, error), inc_authn_metrics(error),
Error; Error;
%% {continue, AuthCache} | {continue, AuthData, AuthCache} %% {continue, AuthCache} | {continue, AuthData, AuthCache}
Other -> Other ->
Other Other
end; end;
{error, Reason} = Error -> {error, _Reason} = Error ->
on_authentication_complete(Credential, Reason, error), inc_authn_metrics(error),
Error Error
end. end.
@ -154,7 +154,7 @@ do_authorize(ClientInfo, Action, Topic) ->
case run_hooks('client.authorize', [ClientInfo, Action, Topic], Default) of case run_hooks('client.authorize', [ClientInfo, Action, Topic], Default) of
AuthzResult = #{result := Result} when Result == allow; Result == deny -> AuthzResult = #{result := Result} when Result == allow; Result == deny ->
From = maps:get(from, AuthzResult, unknown), From = maps:get(from, AuthzResult, unknown),
ok = log_result(Topic, Action, From, Result), ok = log_result(ClientInfo, Topic, Action, From, Result),
emqx_hooks:run( emqx_hooks:run(
'client.check_authz_complete', 'client.check_authz_complete',
[ClientInfo, Action, Topic, Result, From] [ClientInfo, Action, Topic, Result, From]
@ -173,28 +173,19 @@ do_authorize(ClientInfo, Action, Topic) ->
deny deny
end. end.
log_result(Topic, Action, From, Result) -> log_result(#{username := Username}, Topic, Action, From, Result) ->
LogMeta = fun() -> LogMeta = fun() ->
#{ #{
username => Username,
topic => Topic, topic => Topic,
action => format_action(Action), action => format_action(Action),
source => format_from(From) source => format_from(From)
} }
end, end,
do_log_result(Action, Result, LogMeta). case Result of
allow -> ?SLOG(info, (LogMeta())#{msg => "authorization_permission_allowed"});
do_log_result(_Action, allow, LogMeta) -> deny -> ?SLOG(warning, (LogMeta())#{msg => "authorization_permission_denied"})
?SLOG(info, (LogMeta())#{msg => "authorization_permission_allowed"}, #{tag => "AUTHZ"}); end.
do_log_result(?AUTHZ_PUBLISH_MATCH_MAP(_, _), deny, LogMeta) ->
%% for publish action, we do not log permission deny at warning level here
%% because it will be logged as cannot_publish_to_topic_due_to_not_authorized
?SLOG(info, (LogMeta())#{msg => "authorization_permission_denied"}, #{tag => "AUTHZ"});
do_log_result(_, deny, LogMeta) ->
?SLOG_THROTTLE(
warning,
(LogMeta())#{msg => authorization_permission_denied},
#{tag => "AUTHZ"}
).
%% @private Format authorization rules source. %% @private Format authorization rules source.
format_from(default) -> format_from(default) ->
@ -238,30 +229,5 @@ inc_authn_metrics(error) ->
inc_authn_metrics(ok) -> inc_authn_metrics(ok) ->
emqx_metrics:inc('authentication.success'); emqx_metrics:inc('authentication.success');
inc_authn_metrics(anonymous) -> inc_authn_metrics(anonymous) ->
emqx_metrics:inc('client.auth.anonymous'),
emqx_metrics:inc('authentication.success.anonymous'), emqx_metrics:inc('authentication.success.anonymous'),
emqx_metrics:inc('authentication.success'). emqx_metrics:inc('authentication.success').
on_authentication_complete(Credential, Reason, error) ->
emqx_hooks:run(
'client.check_authn_complete',
[
Credential,
#{
reason_code => Reason
}
]
),
inc_authn_metrics(error);
on_authentication_complete(Credential, Result, Type) ->
emqx_hooks:run(
'client.check_authn_complete',
[
Credential,
Result#{
reason_code => success,
is_anonymous => (Type =:= anonymous)
}
]
),
inc_authn_metrics(Type).

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2020-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.
@ -21,9 +21,12 @@
-include("emqx.hrl"). -include("emqx.hrl").
-include("logger.hrl"). -include("logger.hrl").
-export([create_tables/0]). %% Mnesia bootstrap
-export([start_link/0]). -export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
-export([start_link/0]).
%% API %% API
-export([ -export([
activate/1, activate/1,
@ -83,7 +86,7 @@
%% Mnesia bootstrap %% Mnesia bootstrap
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
create_tables() -> mnesia(boot) ->
ok = mria:create_table( ok = mria:create_table(
?ACTIVATED_ALARM, ?ACTIVATED_ALARM,
[ [
@ -103,8 +106,7 @@ create_tables() ->
{record_name, deactivated_alarm}, {record_name, deactivated_alarm},
{attributes, record_info(fields, deactivated_alarm)} {attributes, record_info(fields, deactivated_alarm)}
] ]
), ).
[?ACTIVATED_ALARM, ?DEACTIVATED_ALARM].
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% API %% API

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2019-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2019-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.

View File

@ -1,5 +1,5 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Copyright (c) 2017-2024 EMQ Technologies Co., Ltd. All Rights Reserved. %% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%% %%
%% Licensed under the Apache License, Version 2.0 (the "License"); %% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License. %% you may not use this file except in compliance with the License.

Some files were not shown because too many files have changed in this diff Show More