Compare commits

..

1 Commits

Author SHA1 Message Date
Ivan Dyachkov 3a90d0b83c build(docker): optimize docker image layers
use COPY --chown instead of chown in Dockerfile to reduce the number of layers
2024-03-18 13:57:45 +01:00
1524 changed files with 15785 additions and 95550 deletions

View File

@ -1,24 +0,0 @@
version: '3.9'
services:
azurite:
container_name: azurite
image: mcr.microsoft.com/azure-storage/azurite:3.30.0
restart: always
expose:
- "10000"
# ports:
# - "10000:10000"
networks:
- emqx_bridge
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:10000"]
interval: 30s
timeout: 5s
retries: 4
command:
- azurite-blob
- "--blobHost"
- 0.0.0.0
- "-d"
- debug.log

View File

@ -1,30 +0,0 @@
version: '3.9'
services:
couchbase:
container_name: couchbase
hostname: couchbase
image: ghcr.io/emqx/couchbase:1.0.0
restart: always
expose:
- 8091-8093
# ports:
# - "8091-8093:8091-8093"
networks:
- emqx_bridge
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8093/admin/ping"]
interval: 30s
timeout: 5s
retries: 4
environment:
- CLUSTER=localhost
- USER=admin
- PASS=public
- PORT=8091
- RAMSIZEMB=2048
- RAMSIZEINDEXMB=512
- RAMSIZEFTSMB=512
- BUCKETS=mqtt
- BUCKETSIZES=100
- AUTOREBALANCE=true

View File

@ -4,7 +4,7 @@ services:
greptimedb: greptimedb:
container_name: greptimedb container_name: greptimedb
hostname: greptimedb hostname: greptimedb
image: greptime/greptimedb:v0.7.1 image: greptime/greptimedb:v0.4.4
expose: expose:
- "4000" - "4000"
- "4001" - "4001"

View File

@ -1,53 +1,24 @@
version: '3.9' version: '3.9'
services: services:
iotdb_1_3_0: iotdb:
container_name: iotdb130 container_name: iotdb
hostname: iotdb130 hostname: iotdb
image: apache/iotdb:1.3.0-standalone
restart: always
environment:
- enable_rest_service=true
- cn_internal_address=iotdb130
- cn_internal_port=10710
- cn_consensus_port=10720
- cn_seed_config_node=iotdb130:10710
- dn_rpc_address=iotdb130
- dn_internal_address=iotdb130
- dn_rpc_port=6667
- dn_mpp_data_exchange_port=10740
- dn_schema_region_consensus_port=10750
- dn_data_region_consensus_port=10760
- dn_seed_config_node=iotdb130:10710
# volumes:
# - ./data:/iotdb/data
# - ./logs:/iotdb/logs
expose:
- "18080"
# IoTDB's REST interface, uncomment for local testing
# ports:
# - "18080:18080"
networks:
- emqx_bridge
iotdb_1_1_0:
container_name: iotdb110
hostname: iotdb110
image: apache/iotdb:1.1.0-standalone image: apache/iotdb:1.1.0-standalone
restart: always restart: always
environment: environment:
- enable_rest_service=true - enable_rest_service=true
- cn_internal_address=iotdb110 - cn_internal_address=iotdb
- cn_internal_port=10710 - cn_internal_port=10710
- cn_consensus_port=10720 - cn_consensus_port=10720
- cn_target_config_node_list=iotdb110:10710 - cn_target_config_node_list=iotdb:10710
- dn_rpc_address=iotdb110 - dn_rpc_address=iotdb
- dn_internal_address=iotdb110 - dn_internal_address=iotdb
- dn_rpc_port=6667 - dn_rpc_port=6667
- dn_mpp_data_exchange_port=10740 - dn_mpp_data_exchange_port=10740
- dn_schema_region_consensus_port=10750 - dn_schema_region_consensus_port=10750
- dn_data_region_consensus_port=10760 - dn_data_region_consensus_port=10760
- dn_target_config_node_list=iotdb110:10710 - dn_target_config_node_list=iotdb:10710
# volumes: # volumes:
# - ./data:/iotdb/data # - ./data:/iotdb/data
# - ./logs:/iotdb/logs # - ./logs:/iotdb/logs

View File

@ -18,7 +18,7 @@ services:
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret - /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
kdc: kdc:
hostname: kdc.emqx.net hostname: kdc.emqx.net
image: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04 image: ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04
container_name: kdc.emqx.net container_name: kdc.emqx.net
expose: expose:
- 88 # kdc - 88 # kdc

View File

@ -10,7 +10,7 @@ services:
nofile: 1024 nofile: 1024
image: openldap image: openldap
#ports: #ports:
# - "389:389" # - 389:389
volumes: volumes:
- ./certs/ca.crt:/etc/certs/ca.crt - ./certs/ca.crt:/etc/certs/ca.crt
restart: always restart: always

View File

@ -1,41 +0,0 @@
version: '3.9'
services:
mqnamesrvssl:
image: apache/rocketmq:4.9.4
container_name: rocketmq_namesrv_ssl
# ports:
# - 9876:9876
volumes:
- ./rocketmq/logs_ssl:/opt/logs
- ./rocketmq/store_ssl:/opt/store
environment:
JAVA_OPT: "-Dtls.server.mode=enforcing"
command: ./mqnamesrv
networks:
- emqx_bridge
mqbrokerssl:
image: apache/rocketmq:4.9.4
container_name: rocketmq_broker_ssl
# ports:
# - 10909:10909
# - 10911:10911
volumes:
- ./rocketmq/logs_ssl:/opt/logs
- ./rocketmq/store_ssl:/opt/store
- ./rocketmq/conf_ssl/broker.conf:/etc/rocketmq/broker.conf
- ./rocketmq/conf_ssl/plain_acl.yml:/home/rocketmq/rocketmq-4.9.4/conf/plain_acl.yml
environment:
NAMESRV_ADDR: "rocketmq_namesrv_ssl:9876"
JAVA_OPTS: " -Duser.home=/opt -Drocketmq.broker.diskSpaceWarningLevelRatio=0.99"
JAVA_OPT_EXT: "-server -Xms512m -Xmx512m -Xmn512m -Dtls.server.mode=enforcing"
command: ./mqbroker -c /etc/rocketmq/broker.conf
depends_on:
- mqnamesrvssl
networks:
- emqx_bridge
networks:
emqx_bridge:
driver: bridge

View File

@ -3,7 +3,7 @@ version: '3.9'
services: services:
erlang: erlang:
container_name: erlang container_name: erlang
image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04} image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04}
env_file: env_file:
- credentials.env - credentials.env
- conf.env - conf.env

View File

@ -49,9 +49,6 @@ echo "+++++++ Creating Kafka Topics ++++++++"
# there seem to be a race condition when creating the topics (too early) # there seem to be a race condition when creating the topics (too early)
env KAFKA_CREATE_TOPICS="$KAFKA_CREATE_TOPICS_NG" KAFKA_PORT="$PORT1" create-topics.sh env KAFKA_CREATE_TOPICS="$KAFKA_CREATE_TOPICS_NG" KAFKA_PORT="$PORT1" create-topics.sh
# create a topic with max.message.bytes=100
/opt/kafka/bin/kafka-topics.sh --create --bootstrap-server "${SERVER}:${PORT1}" --topic max-100-bytes --partitions 1 --replication-factor 1 --config max.message.bytes=100
echo "+++++++ Wait until Kafka ports are down ++++++++" echo "+++++++ Wait until Kafka ports are down ++++++++"
bash -c 'while printf "" 2>>/dev/null >>/dev/tcp/$0/$1; do sleep 1; done' $SERVER $PORT1 bash -c 'while printf "" 2>>/dev/null >>/dev/tcp/$0/$1; do sleep 1; done' $SERVER $PORT1

View File

@ -1,61 +0,0 @@
# LDAP authentication
To run manual tests with the default docker-compose files.
Expose openldap container port by uncommenting the `ports` config in `docker-compose-ldap.yaml `
To start openldap:
```
docker-compose -f ./.ci/docker-compose-file/docker-compose.yaml -f ./.ci/docker-compose-file/docker-compose-ldap.yaml up -docker
```
## LDAP database
LDAP database is populated from below files:
```
apps/emqx_ldap/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
apps/emqx_ldap/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema
```
## Minimal EMQX config
```
authentication = [
{
backend = ldap
base_dn = "uid=${username},ou=testdevice,dc=emqx,dc=io"
filter = "(& (objectClass=mqttUser) (uid=${username}))"
mechanism = password_based
method {
is_superuser_attribute = isSuperuser
password_attribute = userPassword
type = hash
}
password = public
pool_size = 8
query_timeout = "5s"
request_timeout = "10s"
server = "localhost:1389"
username = "cn=root,dc=emqx,dc=io"
}
]
```
## Example ldapsearch command
```
ldapsearch -x -H ldap://localhost:389 -D "cn=root,dc=emqx,dc=io" -W -b "uid=mqttuser0007,ou=testdevice,dc=emqx,dc=io" "(&(objectClass=mqttUser)(uid=mqttuser0007))"
```
## Example mqttx command
The client password hashes are generated from their username.
```
# disabled user
mqttx pub -t 't/1' -h localhost -p 1883 -m x -u mqttuser0006 -P mqttuser0006
# enabled super-user
mqttx pub -t 't/1' -h localhost -p 1883 -m x -u mqttuser0007 -P mqttuser0007
```

View File

@ -9,4 +9,3 @@ accounts:
defaultGroupPerm: PUB|SUB defaultGroupPerm: PUB|SUB
topicPerms: topicPerms:
- TopicTest=PUB|SUB - TopicTest=PUB|SUB
- Topic2=PUB|SUB

View File

@ -1,24 +0,0 @@
brokerClusterName=DefaultClusterSSL
brokerName=broker-a
brokerId=0
brokerIP1=rocketmq_broker_ssl
defaultTopicQueueNums=4
autoCreateTopicEnable=true
autoCreateSubscriptionGroup=true
listenPort=10911
deleteWhen=04
fileReservedTime=120
mapedFileSizeCommitLog=1073741824
mapedFileSizeConsumeQueue=300000
diskMaxUsedSpaceRatio=100
maxMessageSize=65536
brokerRole=ASYNC_MASTER
flushDiskType=ASYNC_FLUSH
aclEnable=true

View File

@ -1,12 +0,0 @@
globalWhiteRemoteAddresses:
accounts:
- accessKey: RocketMQ
secretKey: 12345678
whiteRemoteAddress:
admin: false
defaultTopicPerm: DENY
defaultGroupPerm: PUB|SUB
topicPerms:
- TopicTest=PUB|SUB
- Topic2=PUB|SUB

View File

@ -139,15 +139,9 @@
"enabled": true "enabled": true
}, },
{ {
"name": "iotdb110", "name": "iotdb",
"listen": "0.0.0.0:18080", "listen": "0.0.0.0:18080",
"upstream": "iotdb110:18080", "upstream": "iotdb:18080",
"enabled": true
},
{
"name": "iotdb130",
"listen": "0.0.0.0:28080",
"upstream": "iotdb130:18080",
"enabled": true "enabled": true
}, },
{ {
@ -215,17 +209,5 @@
"listen": "0.0.0.0:9200", "listen": "0.0.0.0:9200",
"upstream": "elasticsearch:9200", "upstream": "elasticsearch:9200",
"enabled": true "enabled": true
},
{
"name": "azurite_plain",
"listen": "0.0.0.0:10000",
"upstream": "azurite:10000",
"enabled": true
},
{
"name": "couchbase",
"listen": "0.0.0.0:8093",
"upstream": "couchbase:8093",
"enabled": true
} }
] ]

View File

@ -1,18 +1,18 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{application, http_server, [ {application, http_server,
{description, "An HTTP server application"}, [{description, "An HTTP server application"},
{vsn, "0.2.0"}, {vsn, "0.2.0"},
{registered, []}, {registered, []},
% {mod, {http_server_app, []}}, % {mod, {http_server_app, []}},
{modules, []}, {modules, []},
{applications, [ {applications,
kernel, [kernel,
stdlib, stdlib,
minirest minirest
]}, ]},
{env, []}, {env,[]},
{modules, []}, {modules, []},
{licenses, ["Apache 2.0"]}, {licenses, ["Apache 2.0"]},
{links, []} {links, []}
]}. ]}.

23
.github/CODEOWNERS vendored
View File

@ -1,29 +1,18 @@
## Default ## Default
* @emqx/emqx-review-board * @emqx/emqx-review-board
# emqx-review-board members
## HJianBo
## id
## ieQu1
## keynslug
## qzhuyan
## savonarola
## terry-xiaoyu
## thalesmg
## zhongwencool
## zmstone
## apps ## apps
/apps/emqx/ @emqx/emqx-review-board @lafirest /apps/emqx/ @emqx/emqx-review-board @lafirest
/apps/emqx_auth/ @emqx/emqx-review-board @JimMoen /apps/emqx_connector/ @emqx/emqx-review-board
/apps/emqx_auth/ @emqx/emqx-review-board @JimMoen @savonarola
/apps/emqx_connector/ @emqx/emqx-review-board @JimMoen /apps/emqx_connector/ @emqx/emqx-review-board @JimMoen
/apps/emqx_dashboard/ @emqx/emqx-review-board @JimMoen @lafirest /apps/emqx_dashboard/ @emqx/emqx-review-board @JimMoen @lafirest
/apps/emqx_dashboard_rbac/ @emqx/emqx-review-board @lafirest /apps/emqx_dashboard_rbac/ @emqx/emqx-review-board @lafirest
/apps/emqx_dashboard_sso/ @emqx/emqx-review-board @JimMoen @lafirest /apps/emqx_dashboard_sso/ @emqx/emqx-review-board @JimMoen @lafirest
/apps/emqx_exhook/ @emqx/emqx-review-board @JimMoen /apps/emqx_exhook/ @emqx/emqx-review-board @JimMoen @HJianBo
/apps/emqx_ft/ @emqx/emqx-review-board @savonarola @keynslug
/apps/emqx_gateway/ @emqx/emqx-review-board @lafirest /apps/emqx_gateway/ @emqx/emqx-review-board @lafirest
/apps/emqx_management/ @emqx/emqx-review-board @lafirest /apps/emqx_management/ @emqx/emqx-review-board @lafirest @sstrigler
/apps/emqx_opentelemetry @emqx/emqx-review-board @SergeTupchiy
/apps/emqx_plugins/ @emqx/emqx-review-board @JimMoen /apps/emqx_plugins/ @emqx/emqx-review-board @JimMoen
/apps/emqx_prometheus/ @emqx/emqx-review-board @JimMoen /apps/emqx_prometheus/ @emqx/emqx-review-board @JimMoen
/apps/emqx_psk/ @emqx/emqx-review-board @lafirest /apps/emqx_psk/ @emqx/emqx-review-board @lafirest
@ -31,7 +20,7 @@
/apps/emqx_rule_engine/ @emqx/emqx-review-board @kjellwinblad /apps/emqx_rule_engine/ @emqx/emqx-review-board @kjellwinblad
/apps/emqx_slow_subs/ @emqx/emqx-review-board @lafirest /apps/emqx_slow_subs/ @emqx/emqx-review-board @lafirest
/apps/emqx_statsd/ @emqx/emqx-review-board @JimMoen /apps/emqx_statsd/ @emqx/emqx-review-board @JimMoen
/apps/emqx_durable_storage/ @emqx/emqx-review-board @keynslug /apps/emqx_durable_storage/ @emqx/emqx-review-board @ieQu1 @keynslug
## CI ## CI
/deploy/ @emqx/emqx-review-board @Rory-Z /deploy/ @emqx/emqx-review-board @Rory-Z

View File

@ -33,7 +33,7 @@ runs:
HOMEBREW_NO_INSTALL_UPGRADE: 1 HOMEBREW_NO_INSTALL_UPGRADE: 1
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1 HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
run: | run: |
brew install curl zip unzip coreutils openssl@1.1 unixodbc brew install curl zip unzip coreutils openssl@1.1
echo "/usr/local/opt/bison/bin" >> $GITHUB_PATH echo "/usr/local/opt/bison/bin" >> $GITHUB_PATH
echo "/usr/local/bin" >> $GITHUB_PATH echo "/usr/local/bin" >> $GITHUB_PATH
echo "emqx_name=${emqx_name}" >> $GITHUB_OUTPUT echo "emqx_name=${emqx_name}" >> $GITHUB_OUTPUT
@ -51,12 +51,12 @@ runs:
echo "SELF_HOSTED=false" >> $GITHUB_OUTPUT echo "SELF_HOSTED=false" >> $GITHUB_OUTPUT
;; ;;
esac esac
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
id: cache id: cache
if: steps.prepare.outputs.SELF_HOSTED != 'true' if: steps.prepare.outputs.SELF_HOSTED != 'true'
with: with:
path: ${{ steps.prepare.outputs.OTP_INSTALL_PATH }} path: ${{ steps.prepare.outputs.OTP_INSTALL_PATH }}
key: otp-install-${{ inputs.otp }}-${{ inputs.os }}-static-ssl-disable-hipe-disable-jit-20240524-1 key: otp-install-${{ inputs.otp }}-${{ inputs.os }}-static-ssl-disable-hipe-disable-jit
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
shell: bash shell: bash
@ -80,10 +80,9 @@ runs:
git clone --depth 1 --branch OTP-${{ inputs.otp }} https://github.com/emqx/otp.git "$OTP_SOURCE_PATH" git clone --depth 1 --branch OTP-${{ inputs.otp }} https://github.com/emqx/otp.git "$OTP_SOURCE_PATH"
cd "$OTP_SOURCE_PATH" cd "$OTP_SOURCE_PATH"
if [ "$(arch)" = arm64 ]; then if [ "$(arch)" = arm64 ]; then
ODBCHOME="$(brew --prefix unixodbc)" export CFLAGS="-O2 -g -I$(brew --prefix unixodbc)/include"
export CFLAGS="-O2 -g -I${ODBCHOME}/include" export LDFLAGS="-L$(brew --prefix unixodbc)/lib"
export LDFLAGS="-L${ODBCHOME}/lib" WITH_ODBC="--with-odbc=$(brew --prefix unixodbc)"
WITH_ODBC="--with-odbc=${ODBCHOME}"
else else
WITH_ODBC="" WITH_ODBC=""
fi fi

View File

@ -1,21 +1,37 @@
name: 'Prepare jmeter' name: 'Prepare jmeter'
inputs:
version-emqx:
required: true
type: string
runs: runs:
using: composite using: composite
steps: steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
env:
PKG_VSN: ${{ inputs.version-emqx }}
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/emqx-fvt repository: emqx/emqx-fvt
ref: broker-autotest-v5 ref: broker-autotest-v5
path: scripts path: scripts
- uses: actions/setup-java@99b8673ff64fbf99d8d325f52d9a5bdedb8483e9 # v4.2.1 - uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 # v4.0.0
with: with:
java-version: '8.0.282' # The JDK version to make available on the path. java-version: '8.0.282' # The JDK version to make available on the path.
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
architecture: x64 # (x64 or x86) - defaults to x64 architecture: x64 # (x64 or x86) - defaults to x64
# https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md # https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
distribution: 'zulu' distribution: 'zulu'
- uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: apache-jmeter.tgz name: apache-jmeter.tgz
- name: install jmeter - name: install jmeter

View File

@ -1 +0,0 @@
*/.github/*

View File

@ -11,48 +11,29 @@ on:
ref: ref:
required: false required: false
defaults:
run:
shell: bash
env: env:
IS_CI: "yes" IS_CI: "yes"
jobs: permissions:
init: contents: read
runs-on: ubuntu-22.04
outputs:
BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }}
OTP_VSN: ${{ steps.env.outputs.OTP_VSN }}
ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
BUILDER: ${{ steps.env.outputs.BUILDER }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.event.inputs.ref }}
- name: Set up environment
id: env
run: |
source ./env.sh
echo "BUILDER_VSN=$EMQX_BUILDER_VSN" | tee -a "$GITHUB_OUTPUT"
echo "OTP_VSN=$OTP_VSN" | tee -a "$GITHUB_OUTPUT"
echo "ELIXIR_VSN=$ELIXIR_VSN" | tee -a "$GITHUB_OUTPUT"
echo "BUILDER=$EMQX_BUILDER" | tee -a "$GITHUB_OUTPUT"
jobs:
sanity-checks: sanity-checks:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: init container: "ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04"
container: ${{ needs.init.outputs.BUILDER }}
outputs: outputs:
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
ct-host: ${{ steps.matrix.outputs.ct-host }} ct-host: ${{ steps.matrix.outputs.ct-host }}
ct-docker: ${{ steps.matrix.outputs.ct-docker }} ct-docker: ${{ steps.matrix.outputs.ct-docker }}
version-emqx: ${{ steps.matrix.outputs.version-emqx }}
permissions: version-emqx-enterprise: ${{ steps.matrix.outputs.version-emqx-enterprise }}
contents: read builder: "ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04"
builder_vsn: "5.3-2"
otp_vsn: "26.2.1-2"
elixir_vsn: "1.15.7"
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
@ -97,8 +78,7 @@ jobs:
MIX_ENV: emqx-enterprise MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise PROFILE: emqx-enterprise
run: | run: |
# mix local.hex --force --if-missing && mix local.rebar --force --if-missing mix local.hex --force --if-missing && mix local.rebar --force --if-missing
mix local.hex 2.0.6 --force --if-missing && mix local.rebar --force --if-missing
- name: Check formatting - name: Check formatting
env: env:
MIX_ENV: emqx-enterprise MIX_ENV: emqx-enterprise
@ -111,20 +91,35 @@ jobs:
- name: Generate CT Matrix - name: Generate CT Matrix
id: matrix id: matrix
run: | run: |
MATRIX="$(./scripts/find-apps.sh --ci)" APPS="$(./scripts/find-apps.sh --ci)"
MATRIX="$(echo "${APPS}" | jq -c '
[
(.[] | select(.profile == "emqx") | . + {
builder: "5.3-2",
otp: "26.2.1-2",
elixir: "1.15.7"
}),
(.[] | select(.profile == "emqx-enterprise") | . + {
builder: "5.3-2",
otp: ["26.2.1-2"][],
elixir: "1.15.7"
})
]
')"
echo "${MATRIX}" | jq echo "${MATRIX}" | jq
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')" CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')" CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')" CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT
echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT
echo "version-emqx=$(./pkg-vsn.sh emqx)" | tee -a $GITHUB_OUTPUT
echo "version-emqx-enterprise=$(./pkg-vsn.sh emqx-enterprise)" | tee -a $GITHUB_OUTPUT
compile: compile:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral-xl","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral-xl","linux","x64"]') }}
container: ${{ needs.init.outputs.BUILDER }} container: ${{ needs.sanity-checks.outputs.builder }}
needs: needs:
- init
- sanity-checks - sanity-checks
strategy: strategy:
matrix: matrix:
@ -132,11 +127,8 @@ jobs:
- emqx - emqx
- emqx-enterprise - emqx-enterprise
permissions:
contents: read
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Work around https://github.com/actions/checkout/issues/766 - name: Work around https://github.com/actions/checkout/issues/766
@ -152,7 +144,7 @@ jobs:
echo "PROFILE=${PROFILE}" | tee -a .env echo "PROFILE=${PROFILE}" | tee -a .env
echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env
zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip . zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
path: ${{ matrix.profile }}.zip path: ${{ matrix.profile }}.zip
@ -160,47 +152,53 @@ jobs:
run_emqx_app_tests: run_emqx_app_tests:
needs: needs:
- init
- sanity-checks - sanity-checks
- compile - compile
uses: ./.github/workflows/run_emqx_app_tests.yaml uses: ./.github/workflows/run_emqx_app_tests.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
before_ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }} before_ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
after_ref: ${{ github.sha }} after_ref: ${{ github.sha }}
run_test_cases: run_test_cases:
needs: needs:
- init
- sanity-checks - sanity-checks
- compile - compile
uses: ./.github/workflows/run_test_cases.yaml uses: ./.github/workflows/run_test_cases.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }} ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
ct-host: ${{ needs.sanity-checks.outputs.ct-host }} ct-host: ${{ needs.sanity-checks.outputs.ct-host }}
ct-docker: ${{ needs.sanity-checks.outputs.ct-docker }} ct-docker: ${{ needs.sanity-checks.outputs.ct-docker }}
static_checks: static_checks:
needs: needs:
- init
- sanity-checks - sanity-checks
- compile - compile
uses: ./.github/workflows/static_checks.yaml uses: ./.github/workflows/static_checks.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }} ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
build_slim_packages: build_slim_packages:
needs: needs:
- sanity-checks - sanity-checks
uses: ./.github/workflows/build_slim_packages.yaml uses: ./.github/workflows/build_slim_packages.yaml
with:
builder: ${{ needs.sanity-checks.outputs.builder }}
builder_vsn: ${{ needs.sanity-checks.outputs.builder_vsn }}
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
build_docker_for_test: build_docker_for_test:
needs: needs:
- init
- sanity-checks - sanity-checks
uses: ./.github/workflows/build_docker_for_test.yaml uses: ./.github/workflows/build_docker_for_test.yaml
with:
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
spellcheck: spellcheck:
needs: needs:
@ -210,35 +208,41 @@ jobs:
run_conf_tests: run_conf_tests:
needs: needs:
- init
- sanity-checks - sanity-checks
- compile - compile
uses: ./.github/workflows/run_conf_tests.yaml uses: ./.github/workflows/run_conf_tests.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
check_deps_integrity: check_deps_integrity:
needs: needs:
- init
- sanity-checks - sanity-checks
uses: ./.github/workflows/check_deps_integrity.yaml uses: ./.github/workflows/check_deps_integrity.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.sanity-checks.outputs.builder }}
run_jmeter_tests: run_jmeter_tests:
needs: needs:
- sanity-checks - sanity-checks
- build_docker_for_test - build_docker_for_test
uses: ./.github/workflows/run_jmeter_tests.yaml uses: ./.github/workflows/run_jmeter_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
run_docker_tests: run_docker_tests:
needs: needs:
- sanity-checks - sanity-checks
- build_docker_for_test - build_docker_for_test
uses: ./.github/workflows/run_docker_tests.yaml uses: ./.github/workflows/run_docker_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
run_helm_tests: run_helm_tests:
needs: needs:
- sanity-checks - sanity-checks
- build_docker_for_test - build_docker_for_test
uses: ./.github/workflows/run_helm_tests.yaml uses: ./.github/workflows/run_helm_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}

View File

@ -8,51 +8,22 @@ on:
push: push:
tags: tags:
- 'v*' - 'v*'
- 'e*'
branches: branches:
- 'master' - 'master'
- 'release-5[0-9]' - 'release-5[0-9]'
- 'ci/**' - 'ci/**'
workflow_dispatch:
inputs:
ref:
required: false
defaults: permissions:
run: contents: read
shell: bash
env: env:
IS_CI: 'yes' IS_CI: 'yes'
jobs: jobs:
init:
runs-on: ubuntu-22.04
outputs:
BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }}
OTP_VSN: ${{ steps.env.outputs.OTP_VSN }}
ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
BUILDER: ${{ steps.env.outputs.BUILDER }}
BUILD_FROM: ${{ steps.env.outputs.BUILD_FROM }}
RUN_FROM: ${{ steps.env.outputs.BUILD_FROM }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
ref: ${{ github.event.inputs.ref }}
- name: Set up environment
id: env
run: |
source env.sh
echo "BUILDER_VSN=$EMQX_BUILDER_VSN" >> "$GITHUB_OUTPUT"
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
echo "ELIXIR_VSN=$ELIXIR_VSN" >> "$GITHUB_OUTPUT"
echo "BUILDER=$EMQX_BUILDER" >> "$GITHUB_OUTPUT"
echo "BUILD_FROM=$EMQX_DOCKER_BUILD_FROM" >> "$GITHUB_OUTPUT"
echo "RUN_FROM=$EMQX_DOCKER_RUN_FROM" >> "$GITHUB_OUTPUT"
prepare: prepare:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: init container: 'ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04'
container: ${{ needs.init.outputs.BUILDER }}
outputs: outputs:
profile: ${{ steps.parse-git-ref.outputs.profile }} profile: ${{ steps.parse-git-ref.outputs.profile }}
release: ${{ steps.parse-git-ref.outputs.release }} release: ${{ steps.parse-git-ref.outputs.release }}
@ -60,12 +31,13 @@ jobs:
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
ct-host: ${{ steps.matrix.outputs.ct-host }} ct-host: ${{ steps.matrix.outputs.ct-host }}
ct-docker: ${{ steps.matrix.outputs.ct-docker }} ct-docker: ${{ steps.matrix.outputs.ct-docker }}
builder: 'ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04'
permissions: builder_vsn: '5.3-2'
contents: read otp_vsn: '26.2.1-2'
elixir_vsn: '1.15.7'
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
@ -86,9 +58,23 @@ jobs:
- name: Build matrix - name: Build matrix
id: matrix id: matrix
run: | run: |
MATRIX="$(./scripts/find-apps.sh --ci)" APPS="$(./scripts/find-apps.sh --ci)"
MATRIX="$(echo "${APPS}" | jq -c '
[
(.[] | select(.profile == "emqx") | . + {
builder: "5.3-2",
otp: "26.2.1-2",
elixir: "1.15.7"
}),
(.[] | select(.profile == "emqx-enterprise") | . + {
builder: "5.3-2",
otp: ["26.2.1-2"][],
elixir: "1.15.7"
})
]
')"
echo "${MATRIX}" | jq echo "${MATRIX}" | jq
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')" CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')" CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')" CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
@ -98,44 +84,47 @@ jobs:
build_packages: build_packages:
if: needs.prepare.outputs.release == 'true' if: needs.prepare.outputs.release == 'true'
needs: needs:
- init
- prepare - prepare
uses: ./.github/workflows/build_packages.yaml uses: ./.github/workflows/build_packages.yaml
with: with:
profile: ${{ needs.prepare.outputs.profile }} profile: ${{ needs.prepare.outputs.profile }}
publish: true publish: true
otp_vsn: ${{ needs.init.outputs.OTP_VSN }} otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.init.outputs.ELIXIR_VSN }} elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
builder_vsn: ${{ needs.init.outputs.BUILDER_VSN }} builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
secrets: inherit secrets: inherit
build_and_push_docker_images: build_and_push_docker_images:
if: needs.prepare.outputs.release == 'true' if: needs.prepare.outputs.release == 'true'
needs: needs:
- init
- prepare - prepare
uses: ./.github/workflows/build_and_push_docker_images.yaml uses: ./.github/workflows/build_and_push_docker_images.yaml
with: with:
profile: ${{ needs.prepare.outputs.profile }} profile: ${{ needs.prepare.outputs.profile }}
publish: true publish: true
latest: ${{ needs.prepare.outputs.latest }} latest: ${{ needs.prepare.outputs.latest }}
build_from: ${{ needs.init.outputs.BUILD_FROM }} # TODO: revert this back to needs.prepare.outputs.otp_vsn when OTP 26 bug is fixed
run_from: ${{ needs.init.outputs.RUN_FROM }} otp_vsn: 25.3.2-2
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
secrets: inherit secrets: inherit
build_slim_packages: build_slim_packages:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
needs: needs:
- init
- prepare - prepare
uses: ./.github/workflows/build_slim_packages.yaml uses: ./.github/workflows/build_slim_packages.yaml
with:
builder: ${{ needs.prepare.outputs.builder }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
compile: compile:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ needs.init.outputs.BUILDER }} container: ${{ needs.prepare.outputs.builder }}
needs: needs:
- init
- prepare - prepare
strategy: strategy:
matrix: matrix:
@ -143,11 +132,8 @@ jobs:
- emqx - emqx
- emqx-enterprise - emqx-enterprise
permissions:
contents: read
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
@ -163,7 +149,7 @@ jobs:
echo "PROFILE=${PROFILE}" | tee -a .env echo "PROFILE=${PROFILE}" | tee -a .env
echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env
zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip . zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
path: ${{ matrix.profile }}.zip path: ${{ matrix.profile }}.zip
@ -171,23 +157,22 @@ jobs:
run_emqx_app_tests: run_emqx_app_tests:
needs: needs:
- init - prepare
- compile - compile
uses: ./.github/workflows/run_emqx_app_tests.yaml uses: ./.github/workflows/run_emqx_app_tests.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.prepare.outputs.builder }}
before_ref: ${{ github.event.before }} before_ref: ${{ github.event.before }}
after_ref: ${{ github.sha }} after_ref: ${{ github.sha }}
run_test_cases: run_test_cases:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
needs: needs:
- init
- prepare - prepare
- compile - compile
uses: ./.github/workflows/run_test_cases.yaml uses: ./.github/workflows/run_test_cases.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.prepare.outputs.builder }}
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }} ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}
ct-host: ${{ needs.prepare.outputs.ct-host }} ct-host: ${{ needs.prepare.outputs.ct-host }}
ct-docker: ${{ needs.prepare.outputs.ct-docker }} ct-docker: ${{ needs.prepare.outputs.ct-docker }}
@ -195,20 +180,18 @@ jobs:
run_conf_tests: run_conf_tests:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
needs: needs:
- init
- prepare - prepare
- compile - compile
uses: ./.github/workflows/run_conf_tests.yaml uses: ./.github/workflows/run_conf_tests.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.prepare.outputs.builder }}
static_checks: static_checks:
if: needs.prepare.outputs.release != 'true' if: needs.prepare.outputs.release != 'true'
needs: needs:
- init
- prepare - prepare
- compile - compile
uses: ./.github/workflows/static_checks.yaml uses: ./.github/workflows/static_checks.yaml
with: with:
builder: ${{ needs.init.outputs.BUILDER }} builder: ${{ needs.prepare.outputs.builder }}
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }} ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}

View File

@ -16,10 +16,13 @@ on:
publish: publish:
required: true required: true
type: boolean type: boolean
build_from: otp_vsn:
required: true required: true
type: string type: string
run_from: elixir_vsn:
required: true
type: string
builder_vsn:
required: true required: true
type: string type: string
secrets: secrets:
@ -47,12 +50,18 @@ on:
required: false required: false
type: boolean type: boolean
default: false default: false
build_from: otp_vsn:
required: false required: false
type: string type: string
default: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-debian12 default: '25.3.2-2'
run_from: elixir_vsn:
default: public.ecr.aws/debian/debian:stable-20240612-slim required: false
type: string
default: '1.15.7'
builder_vsn:
required: false
type: string
default: '5.3-2'
permissions: permissions:
contents: read contents: read
@ -60,7 +69,7 @@ permissions:
jobs: jobs:
build: build:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.arch)) || 'ubuntu-22.04' }} runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.arch)) || 'ubuntu-22.04' }}
container: ${{ inputs.build_from }} container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ inputs.otp_vsn }}-debian12"
outputs: outputs:
PKG_VSN: ${{ steps.build.outputs.PKG_VSN }} PKG_VSN: ${{ steps.build.outputs.PKG_VSN }}
@ -75,7 +84,7 @@ jobs:
- arm64 - arm64
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
- run: git config --global --add safe.directory "$PWD" - run: git config --global --add safe.directory "$PWD"
@ -83,7 +92,7 @@ jobs:
id: build id: build
run: | run: |
make ${{ matrix.profile }}-tgz make ${{ matrix.profile }}-tgz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: "${{ matrix.profile }}-${{ matrix.arch }}.tar.gz" name: "${{ matrix.profile }}-${{ matrix.arch }}.tar.gz"
path: "_packages/emqx*/emqx-*.tar.gz" path: "_packages/emqx*/emqx-*.tar.gz"
@ -107,10 +116,10 @@ jobs:
- ["${{ inputs.profile }}-elixir", "${{ inputs.profile == 'emqx' && 'docker.io,public.ecr.aws' || 'docker.io' }}"] - ["${{ inputs.profile }}-elixir", "${{ inputs.profile == 'emqx' && 'docker.io,public.ecr.aws' || 'docker.io' }}"]
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
pattern: "${{ matrix.profile[0] }}-*.tar.gz" pattern: "${{ matrix.profile[0] }}-*.tar.gz"
path: _packages path: _packages
@ -122,33 +131,32 @@ jobs:
run: | run: |
ls -lR _packages/$PROFILE ls -lR _packages/$PROFILE
mv _packages/$PROFILE/*.tar.gz ./ mv _packages/$PROFILE/*.tar.gz ./
- name: Enable containerd image store on Docker Engine - name: Enable containerd image store on Docker Engine
run: | run: |
echo "$(sudo cat /etc/docker/daemon.json | jq '. += {"features": {"containerd-snapshotter": true}}')" > daemon.json echo "$(jq '. += {"features": {"containerd-snapshotter": true}}' /etc/docker/daemon.json)" > daemon.json
sudo mv daemon.json /etc/docker/daemon.json sudo mv daemon.json /etc/docker/daemon.json
sudo systemctl restart docker sudo systemctl restart docker
- uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0 - uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3.0.0
- uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 - uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0
- name: Login to hub.docker.com - name: Login to hub.docker.com
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
if: inputs.publish && contains(matrix.profile[1], 'docker.io') if: inputs.publish || github.repository_owner != 'emqx'
with: with:
username: ${{ secrets.DOCKER_HUB_USER }} username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }} password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Login to AWS ECR - name: Login to AWS ECR
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
if: inputs.publish && contains(matrix.profile[1], 'public.ecr.aws') if: inputs.publish || github.repository_owner != 'emqx'
with: with:
registry: public.ecr.aws registry: public.ecr.aws
username: ${{ secrets.AWS_ACCESS_KEY_ID }} username: ${{ secrets.AWS_ACCESS_KEY_ID }}
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }} password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ecr: true ecr: true
- name: Build docker image for smoke test - name: Build docker image
env: env:
PROFILE: ${{ matrix.profile[0] }} PROFILE: ${{ matrix.profile[0] }}
DOCKER_REGISTRY: ${{ matrix.profile[1] }} DOCKER_REGISTRY: ${{ matrix.profile[1] }}
@ -156,9 +164,14 @@ jobs:
DOCKER_LATEST: ${{ inputs.latest }} DOCKER_LATEST: ${{ inputs.latest }}
DOCKER_PUSH: false DOCKER_PUSH: false
DOCKER_BUILD_NOCACHE: true DOCKER_BUILD_NOCACHE: true
BUILD_FROM: ${{ inputs.build_from }} DOCKER_PLATFORMS: linux/amd64,linux/arm64
RUN_FROM: ${{ inputs.run_from }} DOCKER_LOAD: true
EMQX_RUNNER: 'public.ecr.aws/debian/debian:12-slim'
EMQX_DOCKERFILE: 'deploy/docker/Dockerfile'
PKG_VSN: ${{ needs.build.outputs.PKG_VSN }} PKG_VSN: ${{ needs.build.outputs.PKG_VSN }}
EMQX_BUILDER_VERSION: ${{ inputs.builder_vsn }}
EMQX_BUILDER_OTP: ${{ inputs.otp_vsn }}
EMQX_BUILDER_ELIXIR: ${{ inputs.elixir_vsn }}
EMQX_SOURCE_TYPE: tgz EMQX_SOURCE_TYPE: tgz
run: | run: |
./build ${PROFILE} docker ./build ${PROFILE} docker
@ -172,7 +185,7 @@ jobs:
timeout-minutes: 1 timeout-minutes: 1
run: | run: |
for tag in $(cat .emqx_docker_image_tags); do for tag in $(cat .emqx_docker_image_tags); do
CID=$(docker run -d -p 18083:18083 $tag) CID=$(docker run -d -P $tag)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID) HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
docker rm -f $CID docker rm -f $CID
@ -190,9 +203,7 @@ jobs:
docker exec -t -u root -w /root $CID bash -c 'apt-get -y update && apt-get -y install net-tools' docker exec -t -u root -w /root $CID bash -c 'apt-get -y update && apt-get -y install net-tools'
docker exec -t -u root $CID node_dump docker exec -t -u root $CID node_dump
docker rm -f $CID docker rm -f $CID
- name: Push docker image
- name: Build and push docker image
if: inputs.publish || github.repository_owner != 'emqx'
env: env:
PROFILE: ${{ matrix.profile[0] }} PROFILE: ${{ matrix.profile[0] }}
DOCKER_REGISTRY: ${{ matrix.profile[1] }} DOCKER_REGISTRY: ${{ matrix.profile[1] }}
@ -202,9 +213,12 @@ jobs:
DOCKER_BUILD_NOCACHE: false DOCKER_BUILD_NOCACHE: false
DOCKER_PLATFORMS: linux/amd64,linux/arm64 DOCKER_PLATFORMS: linux/amd64,linux/arm64
DOCKER_LOAD: false DOCKER_LOAD: false
BUILD_FROM: ${{ inputs.build_from }} EMQX_RUNNER: 'public.ecr.aws/debian/debian:12-slim'
RUN_FROM: ${{ inputs.run_from }} EMQX_DOCKERFILE: 'deploy/docker/Dockerfile'
PKG_VSN: ${{ needs.build.outputs.PKG_VSN }} PKG_VSN: ${{ needs.build.outputs.PKG_VSN }}
EMQX_BUILDER_VERSION: ${{ inputs.builder_vsn }}
EMQX_BUILDER_OTP: ${{ inputs.otp_vsn }}
EMQX_BUILDER_ELIXIR: ${{ inputs.elixir_vsn }}
EMQX_SOURCE_TYPE: tgz EMQX_SOURCE_TYPE: tgz
run: | run: |
./build ${PROFILE} docker ./build ${PROFILE} docker

View File

@ -6,6 +6,19 @@ concurrency:
on: on:
workflow_call: workflow_call:
inputs:
otp_vsn:
required: true
type: string
elixir_vsn:
required: true
type: string
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions: permissions:
contents: read contents: read
@ -15,6 +28,9 @@ jobs:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
env: env:
EMQX_NAME: ${{ matrix.profile }} EMQX_NAME: ${{ matrix.profile }}
PKG_VSN: ${{ startsWith(matrix.profile, 'emqx-enterprise') && inputs.version-emqx-enterprise || inputs.version-emqx }}
OTP_VSN: ${{ inputs.otp_vsn }}
ELIXIR_VSN: ${{ inputs.elixir_vsn }}
strategy: strategy:
fail-fast: false fail-fast: false
@ -26,13 +42,7 @@ jobs:
- emqx-enterprise-elixir - emqx-enterprise-elixir
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- name: build and export to Docker - name: build and export to Docker
id: build id: build
run: | run: |
@ -42,16 +52,12 @@ jobs:
run: | run: |
CID=$(docker run -d --rm -P $_EMQX_DOCKER_IMAGE_TAG) CID=$(docker run -d --rm -P $_EMQX_DOCKER_IMAGE_TAG)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID) HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT || { ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
docker logs $CID
exit 1
}
docker stop $CID docker stop $CID
- name: export docker image - name: export docker image
if: always()
run: | run: |
docker save $_EMQX_DOCKER_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz docker save $_EMQX_DOCKER_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: "${{ env.EMQX_NAME }}-docker" name: "${{ env.EMQX_NAME }}-docker"
path: "${{ env.EMQX_NAME }}-docker-${{ env.PKG_VSN }}.tar.gz" path: "${{ env.EMQX_NAME }}-docker-${{ env.PKG_VSN }}.tar.gz"

View File

@ -55,7 +55,7 @@ on:
otp_vsn: otp_vsn:
required: false required: false
type: string type: string
default: '26.2.5-3' default: '26.2.1-2'
elixir_vsn: elixir_vsn:
required: false required: false
type: string type: string
@ -63,7 +63,7 @@ on:
builder_vsn: builder_vsn:
required: false required: false
type: string type: string
default: '5.3-9' default: '5.3-2'
permissions: permissions:
contents: read contents: read
@ -76,13 +76,14 @@ jobs:
profile: profile:
- ${{ inputs.profile }} - ${{ inputs.profile }}
os: os:
- macos-12
- macos-12-arm64
- macos-13 - macos-13
- macos-14
otp: otp:
- ${{ inputs.otp_vsn }} - ${{ inputs.otp_vsn }}
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
@ -95,7 +96,7 @@ jobs:
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }} apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }} apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }} apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: success() if: success()
with: with:
name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.otp }} name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.otp }}
@ -110,14 +111,15 @@ jobs:
profile: profile:
- ${{ inputs.profile }} - ${{ inputs.profile }}
os: os:
- ubuntu24.04
- ubuntu22.04 - ubuntu22.04
- ubuntu20.04 - ubuntu20.04
- ubuntu18.04
- debian12 - debian12
- debian11 - debian11
- debian10 - debian10
- el9 - el9
- el8 - el8
- el7
- amzn2 - amzn2
- amzn2023 - amzn2023
arch: arch:
@ -145,27 +147,11 @@ jobs:
shell: bash shell: bash
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
fetch-depth: 0 fetch-depth: 0
- name: build tgz - name: build emqx packages
env:
PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }}
OS: ${{ matrix.os }}
IS_ELIXIR: ${{ matrix.with_elixir }}
BUILDER: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os }}"
BUILDER_SYSTEM: force_docker
run: |
./scripts/buildx.sh \
--profile $PROFILE \
--arch $ARCH \
--builder $BUILDER \
--elixir $IS_ELIXIR \
--pkgtype tgz
- name: build pkg
if: matrix.with_elixir == 'no'
env: env:
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }} ARCH: ${{ matrix.arch }}
@ -180,7 +166,7 @@ jobs:
--builder $BUILDER \ --builder $BUILDER \
--elixir $IS_ELIXIR \ --elixir $IS_ELIXIR \
--pkgtype pkg --pkgtype pkg
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.arch }}${{ matrix.with_elixir == 'yes' && '-elixir' || '' }}-${{ matrix.builder }}-${{ matrix.otp }}-${{ matrix.elixir }} name: ${{ matrix.profile }}-${{ matrix.os }}-${{ matrix.arch }}${{ matrix.with_elixir == 'yes' && '-elixir' || '' }}-${{ matrix.builder }}-${{ matrix.otp }}-${{ matrix.elixir }}
path: _packages/${{ matrix.profile }}/ path: _packages/${{ matrix.profile }}/
@ -198,11 +184,23 @@ jobs:
profile: profile:
- ${{ inputs.profile }} - ${{ inputs.profile }}
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
pattern: "${{ matrix.profile }}-*" pattern: "${{ matrix.profile }}-*"
path: packages/${{ matrix.profile }} path: packages/${{ matrix.profile }}
merge-multiple: true merge-multiple: true
- name: install dos2unix
run: sudo apt-get update -y && sudo apt install -y dos2unix
- name: get packages
run: |
set -eu
cd packages/${{ matrix.profile }}
# fix the .sha256 file format
for f in *.sha256; do
dos2unix $f
echo "$(cat $f) ${f%.*}" | sha256sum -c || exit 1
done
cd -
- uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 - uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}

View File

@ -16,52 +16,64 @@ jobs:
linux: linux:
if: github.repository_owner == 'emqx' if: github.repository_owner == 'emqx'
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container:
image: "ghcr.io/emqx/emqx-builder/${{ matrix.profile[2] }}-${{ matrix.os }}"
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: profile:
- ['emqx', 'master'] - ['emqx', 'master', '5.3-2:1.15.7-26.2.1-2']
- ['emqx', 'release-57'] - ['emqx-enterprise', 'release-56', '5.3-2:1.15.7-25.3.2-2']
- ['emqx', 'release-58']
os: os:
- debian10
- ubuntu22.04 - ubuntu22.04
- amzn2023 - amzn2023
env:
PROFILE: ${{ matrix.profile[0] }}
OS: ${{ matrix.os }}
BUILDER_SYSTEM: force_docker
defaults: defaults:
run: run:
shell: bash shell: bash
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ matrix.profile[1] }} ref: ${{ matrix.profile[1] }}
fetch-depth: 0 fetch-depth: 0
- name: Set up environment
id: env - name: fix workdir
run: | run: |
source env.sh set -eu
BUILDER="ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VSN}:${ELIXIR_VSN}-${OTP_VSN}-${OS}" git config --global --add safe.directory "$GITHUB_WORKSPACE"
echo "BUILDER=$BUILDER" >> "$GITHUB_ENV" # Align path for CMake caches
- name: build tgz if [ ! "$PWD" = "/emqx" ]; then
ln -s $PWD /emqx
cd /emqx
fi
echo "pwd is $PWD"
- name: build emqx packages
env:
PROFILE: ${{ matrix.profile[0] }}
ACLOCAL_PATH: "/usr/share/aclocal:/usr/local/share/aclocal"
run: | run: |
./scripts/buildx.sh --profile "$PROFILE" --pkgtype tgz --builder "$BUILDER" set -eu
- name: build pkg make "${PROFILE}-tgz"
make "${PROFILE}-pkg"
- name: test emqx packages
env:
PROFILE: ${{ matrix.profile[0] }}
run: | run: |
./scripts/buildx.sh --profile "$PROFILE" --pkgtype pkg --builder "$BUILDER" set -eu
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 ./scripts/pkg-tests.sh "${PROFILE}-tgz"
./scripts/pkg-tests.sh "${PROFILE}-pkg"
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: success() if: success()
with: with:
name: ${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.os }} name: ${{ matrix.profile[0] }}-${{ matrix.os }}
path: _packages/${{ matrix.profile[0] }}/ path: _packages/${{ matrix.profile[0] }}/
retention-days: 7 retention-days: 7
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0
if: failure() if: failure()
env: env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
@ -80,36 +92,33 @@ jobs:
- emqx - emqx
branch: branch:
- master - master
otp:
- 26.2.1-2
os: os:
- macos-14-arm64 - macos-12-arm64
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ matrix.branch }} ref: ${{ matrix.branch }}
fetch-depth: 0 fetch-depth: 0
- name: Set up environment
id: env
run: |
source env.sh
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
- uses: ./.github/actions/package-macos - uses: ./.github/actions/package-macos
with: with:
profile: ${{ matrix.profile }} profile: ${{ matrix.profile }}
otp: ${{ steps.env.outputs.OTP_VSN }} otp: ${{ matrix.otp }}
os: ${{ matrix.os }} os: ${{ matrix.os }}
apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }} apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }}
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }} apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }} apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }} apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: success() if: success()
with: with:
name: ${{ matrix.profile }}-${{ matrix.os }} name: ${{ matrix.profile }}-${{ matrix.os }}
path: _packages/${{ matrix.profile }}/ path: _packages/${{ matrix.profile }}/
retention-days: 7 retention-days: 7
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0
if: failure() if: failure()
env: env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -6,50 +6,97 @@ concurrency:
on: on:
workflow_call: workflow_call:
inputs:
builder:
required: true
type: string
builder_vsn:
required: true
type: string
otp_vsn:
required: true
type: string
elixir_vsn:
required: true
type: string
workflow_dispatch: workflow_dispatch:
inputs: inputs:
ref: ref:
required: false required: false
builder:
required: false
type: string
default: 'ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04'
builder_vsn:
required: false
type: string
default: '5.3-2'
otp_vsn:
required: false
type: string
default: '26.2.1-2'
elixir_vsn:
required: false
type: string
default: '1.15.7'
permissions: permissions:
contents: read contents: read
jobs: jobs:
linux: linux:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.profile[2])) || 'ubuntu-22.04' }} runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.profile[4])) || 'ubuntu-22.04' }}
env: env:
PROFILE: ${{ matrix.profile[0] }} EMQX_NAME: ${{ matrix.profile[0] }}
ELIXIR: ${{ matrix.profile[1] == 'elixir' && 'yes' || 'no' }}
ARCH: ${{ matrix.profile[2] == 'x64' && 'amd64' || 'arm64' }}
BUILDER_SYSTEM: force_docker
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: profile:
- ["emqx", "elixir", "x64"] - ["emqx", "26.2.1-2", "ubuntu22.04", "elixir", "x64"]
- ["emqx", "elixir", "arm64"] - ["emqx", "26.2.1-2", "ubuntu22.04", "elixir", "arm64"]
- ["emqx-enterprise", "erlang", "x64"] - ["emqx-enterprise", "26.2.1-2", "ubuntu22.04", "erlang", "x64"]
container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
- name: build tgz - name: Work around https://github.com/actions/checkout/issues/766
run: | run: |
./scripts/buildx.sh --profile $PROFILE --pkgtype tgz --elixir $ELIXIR --arch $ARCH git config --global --add safe.directory "$GITHUB_WORKSPACE"
- name: build pkg echo "CODE_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
- name: build and test tgz package
if: matrix.profile[3] == 'erlang'
run: | run: |
./scripts/buildx.sh --profile $PROFILE --pkgtype pkg --elixir $ELIXIR --arch $ARCH make ${EMQX_NAME}-tgz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 ./scripts/pkg-tests.sh ${EMQX_NAME}-tgz
- name: build and test deb/rpm packages
if: matrix.profile[3] == 'erlang'
run: |
make ${EMQX_NAME}-pkg
./scripts/pkg-tests.sh ${EMQX_NAME}-pkg
- name: build and test tgz package (Elixir)
if: matrix.profile[3] == 'elixir'
run: |
make ${EMQX_NAME}-elixir-tgz
./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-tgz
- name: build and test deb/rpm packages (Elixir)
if: matrix.profile[3] == 'elixir'
run: |
make ${EMQX_NAME}-elixir-pkg
./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-pkg
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}-${{ matrix.profile[3] }}-${{ matrix.profile[4] }}"
path: _packages/${{ matrix.profile[0] }}/* path: _packages/${{ matrix.profile[0] }}/*
retention-days: 7 retention-days: 7
compression-level: 0 compression-level: 0
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: "${{ matrix.profile[0] }}-schema-dump-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" name: "${{ matrix.profile[0] }}-schema-dump-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}-${{ matrix.profile[3] }}-${{ matrix.profile[4] }}"
path: | path: |
scripts/spellcheck scripts/spellcheck
_build/docgen/${{ matrix.profile[0] }}/schema-en.json _build/docgen/${{ matrix.profile[0] }}/schema-en.json
@ -61,30 +108,27 @@ jobs:
matrix: matrix:
profile: profile:
- emqx - emqx
otp:
- ${{ inputs.otp_vsn }}
os: os:
- macos-14-arm64 - macos-12-arm64
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
env: env:
EMQX_NAME: ${{ matrix.profile }} EMQX_NAME: ${{ matrix.profile }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
- uses: ./.github/actions/package-macos - uses: ./.github/actions/package-macos
with: with:
profile: ${{ matrix.profile }} profile: ${{ matrix.profile }}
otp: ${{ steps.env.outputs.OTP_VSN }} otp: ${{ matrix.otp }}
os: ${{ matrix.os }} os: ${{ matrix.os }}
apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }} apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }}
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }} apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}
apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }} apple_developer_id_bundle: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE }}
apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }} apple_developer_id_bundle_password: ${{ secrets.APPLE_DEVELOPER_ID_BUNDLE_PASSWORD }}
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: ${{ matrix.os }} name: ${{ matrix.os }}
path: _packages/**/* path: _packages/**/*

View File

@ -14,33 +14,32 @@ jobs:
check_deps_integrity: check_deps_integrity:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }} container: ${{ inputs.builder }}
env:
MIX_ENV: ${{ matrix.profile }}
PROFILE: ${{ matrix.profile }}
strategy:
matrix:
profile:
- emqx-enterprise
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE" - run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- run: make ensure-rebar3 - run: make ensure-rebar3
- run: ./scripts/check-deps-integrity.escript - run: ./scripts/check-deps-integrity.escript
- name: Setup mix - name: Setup mix
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
run: | run: |
# mix local.hex --force mix local.hex --force
mix local.hex 2.0.6 --force
mix local.rebar --force mix local.rebar --force
mix deps.get mix deps.get
- name: print mix dependency tree
run: mix deps.tree
- run: ./scripts/check-elixir-deps-discrepancies.exs - run: ./scripts/check-elixir-deps-discrepancies.exs
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
- run: ./scripts/check-elixir-applications.exs - run: ./scripts/check-elixir-applications.exs
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
- name: Upload produced lock files - name: Upload produced lock files
uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: failure() if: failure()
with: with:
name: ${{ matrix.profile }}_produced_lock_files name: produced_lock_files
path: | path: |
mix.lock mix.lock
rebar.lock rebar.lock

View File

@ -10,31 +10,37 @@ permissions:
jobs: jobs:
analyze: analyze:
if: github.repository == 'emqx/emqx'
name: Analyze name: Analyze
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
timeout-minutes: 360 timeout-minutes: 360
permissions: permissions:
actions: read actions: read
security-events: write security-events: write
container:
image: ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu22.04
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
branch: branch:
- master - master
- release-57 - release-55
- release-58 - release-56
language: language:
- cpp - cpp
- python - python
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ matrix.branch }} ref: ${{ matrix.branch }}
- name: Ensure git safe dir
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
make ensure-rebar3
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5 uses: github/codeql-action/init@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5
with: with:
@ -45,7 +51,14 @@ jobs:
env: env:
PROFILE: emqx-enterprise PROFILE: emqx-enterprise
run: | run: |
./scripts/buildx.sh --profile emqx-enterprise --pkgtype rel make emqx-enterprise-compile
- name: Fetch deps
if: matrix.language == 'python'
env:
PROFILE: emqx-enterprise
run: |
make deps-emqx-enterprise
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5 uses: github/codeql-action/analyze@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5

View File

@ -7,6 +7,9 @@ on:
# run hourly # run hourly
- cron: "0 * * * *" - cron: "0 * * * *"
workflow_dispatch: workflow_dispatch:
inputs:
ref:
required: false
permissions: permissions:
contents: read contents: read
@ -14,30 +17,22 @@ permissions:
jobs: jobs:
rerun-failed-jobs: rerun-failed-jobs:
if: github.repository_owner == 'emqx' if: github.repository_owner == 'emqx'
runs-on: ubuntu-latest runs-on: ubuntu-22.04
permissions: permissions:
checks: read checks: read
actions: write actions: write
strategy:
fail-fast: false
matrix:
ref:
- master
- release-57
- release-58
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ matrix.ref }} ref: ${{ github.event.inputs.ref || 'master' }}
- name: run script - name: run script
shell: bash shell: bash
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPO: ${{ github.repository }}
run: | run: |
gh api --method GET -f head_sha=$(git rev-parse HEAD) -f status=completed -f exclude_pull_requests=true /repos/${GITHUB_REPO}/actions/runs > runs.json gh api --method GET -f head_sha=$(git rev-parse HEAD) -f status=completed -f exclude_pull_requests=true /repos/emqx/emqx/actions/runs > runs.json
for id in $(jq -r '.workflow_runs[] | select((."conclusion" == "failure") and (."name" != "Keep master green") and .run_attempt < 3) | .id' runs.json); do for id in $(jq -r '.workflow_runs[] | select((."conclusion" != "success") and .run_attempt < 3) | .id' runs.json); do
echo "rerun https://github.com/${GITHUB_REPO}/actions/runs/$id" echo "rerun https://github.com/emqx/emqx/actions/runs/$id"
gh api --method POST /repos/${GITHUB_REPO}/actions/runs/$id/rerun-failed-jobs || true gh api --method POST /repos/emqx/emqx/actions/runs/$id/rerun-failed-jobs
done done

View File

@ -26,13 +26,13 @@ jobs:
prepare: prepare:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.repository_owner == 'emqx' if: github.repository_owner == 'emqx'
container: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-ubuntu20.04
outputs: outputs:
BENCH_ID: ${{ steps.prepare.outputs.BENCH_ID }} BENCH_ID: ${{ steps.prepare.outputs.BENCH_ID }}
PACKAGE_FILE: ${{ steps.package_file.outputs.PACKAGE_FILE }} PACKAGE_FILE: ${{ steps.package_file.outputs.PACKAGE_FILE }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
ref: ${{ github.event.inputs.ref }} ref: ${{ github.event.inputs.ref }}
@ -52,7 +52,7 @@ jobs:
id: package_file id: package_file
run: | run: |
echo "PACKAGE_FILE=$(find _packages/emqx -name 'emqx-*.deb' | head -n 1 | xargs basename)" >> $GITHUB_OUTPUT echo "PACKAGE_FILE=$(find _packages/emqx -name 'emqx-*.deb' | head -n 1 | xargs basename)" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: _packages/emqx/${{ steps.package_file.outputs.PACKAGE_FILE }} path: _packages/emqx/${{ steps.package_file.outputs.PACKAGE_FILE }}
@ -72,17 +72,17 @@ jobs:
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test - name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/tf-emqx-performance-test repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test path: tf-emqx-performance-test
ref: v0.2.3 ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: tf-emqx-performance-test/ path: tf-emqx-performance-test/
- name: Setup Terraform - name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1 uses: hashicorp/setup-terraform@a1502cd9e758c50496cc9ac5308c4843bcd56d36 # v3.0.0
with: with:
terraform_wrapper: false terraform_wrapper: false
- name: run scenario - name: run scenario
@ -105,7 +105,7 @@ jobs:
terraform destroy -auto-approve terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id . aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0
with: with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json" payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy - name: terraform destroy
@ -113,13 +113,13 @@ jobs:
working-directory: ./tf-emqx-performance-test working-directory: ./tf-emqx-performance-test
run: | run: |
terraform destroy -auto-approve terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: success() if: success()
with: with:
name: metrics name: metrics
path: | path: |
"./tf-emqx-performance-test/*.tar.gz" "./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: failure() if: failure()
with: with:
name: terraform name: terraform
@ -143,17 +143,17 @@ jobs:
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test - name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/tf-emqx-performance-test repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test path: tf-emqx-performance-test
ref: v0.2.3 ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: tf-emqx-performance-test/ path: tf-emqx-performance-test/
- name: Setup Terraform - name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1 uses: hashicorp/setup-terraform@a1502cd9e758c50496cc9ac5308c4843bcd56d36 # v3.0.0
with: with:
terraform_wrapper: false terraform_wrapper: false
- name: run scenario - name: run scenario
@ -176,7 +176,7 @@ jobs:
terraform destroy -auto-approve terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id . aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0
with: with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json" payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy - name: terraform destroy
@ -184,13 +184,13 @@ jobs:
working-directory: ./tf-emqx-performance-test working-directory: ./tf-emqx-performance-test
run: | run: |
terraform destroy -auto-approve terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: success() if: success()
with: with:
name: metrics name: metrics
path: | path: |
"./tf-emqx-performance-test/*.tar.gz" "./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: failure() if: failure()
with: with:
name: terraform name: terraform
@ -215,17 +215,17 @@ jobs:
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test - name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/tf-emqx-performance-test repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test path: tf-emqx-performance-test
ref: v0.2.3 ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: tf-emqx-performance-test/ path: tf-emqx-performance-test/
- name: Setup Terraform - name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1 uses: hashicorp/setup-terraform@a1502cd9e758c50496cc9ac5308c4843bcd56d36 # v3.0.0
with: with:
terraform_wrapper: false terraform_wrapper: false
- name: run scenario - name: run scenario
@ -249,7 +249,7 @@ jobs:
terraform destroy -auto-approve terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id . aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0
with: with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json" payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy - name: terraform destroy
@ -257,13 +257,13 @@ jobs:
working-directory: ./tf-emqx-performance-test working-directory: ./tf-emqx-performance-test
run: | run: |
terraform destroy -auto-approve terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: success() if: success()
with: with:
name: metrics name: metrics
path: | path: |
"./tf-emqx-performance-test/*.tar.gz" "./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: failure() if: failure()
with: with:
name: terraform name: terraform
@ -289,17 +289,17 @@ jobs:
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_PERF_TEST }}
aws-region: eu-west-1 aws-region: eu-west-1
- name: Checkout tf-emqx-performance-test - name: Checkout tf-emqx-performance-test
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/tf-emqx-performance-test repository: emqx/tf-emqx-performance-test
path: tf-emqx-performance-test path: tf-emqx-performance-test
ref: v0.2.3 ref: v0.2.3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: emqx-ubuntu20.04 name: emqx-ubuntu20.04
path: tf-emqx-performance-test/ path: tf-emqx-performance-test/
- name: Setup Terraform - name: Setup Terraform
uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3.1.1 uses: hashicorp/setup-terraform@a1502cd9e758c50496cc9ac5308c4843bcd56d36 # v3.0.0
with: with:
terraform_wrapper: false terraform_wrapper: false
- name: run scenario - name: run scenario
@ -322,7 +322,7 @@ jobs:
terraform destroy -auto-approve terraform destroy -auto-approve
aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id . aws s3 sync --exclude '*' --include '*.tar.gz' s3://$TF_VAR_s3_bucket_name/$TF_VAR_bench_id .
- name: Send notification to Slack - name: Send notification to Slack
uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0
with: with:
payload-file-path: "./tf-emqx-performance-test/slack-payload.json" payload-file-path: "./tf-emqx-performance-test/slack-payload.json"
- name: terraform destroy - name: terraform destroy
@ -330,13 +330,13 @@ jobs:
working-directory: ./tf-emqx-performance-test working-directory: ./tf-emqx-performance-test
run: | run: |
terraform destroy -auto-approve terraform destroy -auto-approve
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: success() if: success()
with: with:
name: metrics name: metrics
path: | path: |
"./tf-emqx-performance-test/*.tar.gz" "./tf-emqx-performance-test/*.tar.gz"
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: failure() if: failure()
with: with:
name: terraform name: terraform

View File

@ -8,7 +8,7 @@ on:
tag: tag:
type: string type: string
required: true required: true
publish_release_artifacts: publish_release_artefacts:
type: boolean type: boolean
required: true required: true
default: false default: false
@ -36,7 +36,7 @@ jobs:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.tag }} ref: ${{ github.event.inputs.tag }}
- name: Detect profile - name: Detect profile
@ -67,15 +67,14 @@ jobs:
BUCKET=${{ secrets.AWS_S3_BUCKET }} BUCKET=${{ secrets.AWS_S3_BUCKET }}
OUTPUT_DIR=${{ steps.profile.outputs.s3dir }} OUTPUT_DIR=${{ steps.profile.outputs.s3dir }}
aws s3 cp --recursive s3://$BUCKET/$OUTPUT_DIR/${{ env.ref_name }} packages aws s3 cp --recursive s3://$BUCKET/$OUTPUT_DIR/${{ env.ref_name }} packages
- uses: emqx/upload-assets@974befcf0e72a1811360a81c798855efb66b0551 # 0.5.2 - uses: emqx/upload-assets@8d2083b4dbe3151b0b735572eaa153b6acb647fe # 0.5.0
env: env:
GITHUB_TOKEN: ${{ github.token }} GITHUB_TOKEN: ${{ github.token }}
with: with:
asset_paths: '["packages/*"]' asset_paths: '["packages/*"]'
tag_name: "${{ env.ref_name }}" tag_name: "${{ env.ref_name }}"
skip_existing: true
- name: update to emqx.io - name: update to emqx.io
if: github.event_name == 'release' || inputs.publish_release_artifacts if: startsWith(env.ref_name, 'v') && ((github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artefacts)
run: | run: |
set -eux set -eux
curl -w %{http_code} \ curl -w %{http_code} \
@ -86,7 +85,7 @@ jobs:
-d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.ref_name }}\" }" \ -d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.ref_name }}\" }" \
${{ secrets.EMQX_IO_RELEASE_API }} ${{ secrets.EMQX_IO_RELEASE_API }}
- name: Push to packagecloud.io - name: Push to packagecloud.io
if: (github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artifacts if: (github.event_name == 'release' && !github.event.release.prerelease) || inputs.publish_release_artefacts
env: env:
PROFILE: ${{ steps.profile.outputs.profile }} PROFILE: ${{ steps.profile.outputs.profile }}
VERSION: ${{ steps.profile.outputs.version }} VERSION: ${{ steps.profile.outputs.version }}
@ -106,12 +105,14 @@ jobs:
push "debian/bullseye" "packages/$PROFILE-$VERSION-debian11-arm64.deb" push "debian/bullseye" "packages/$PROFILE-$VERSION-debian11-arm64.deb"
push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-amd64.deb" push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-amd64.deb"
push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-arm64.deb" push "debian/bookworm" "packages/$PROFILE-$VERSION-debian12-arm64.deb"
push "ubuntu/bionic" "packages/$PROFILE-$VERSION-ubuntu18.04-amd64.deb"
push "ubuntu/bionic" "packages/$PROFILE-$VERSION-ubuntu18.04-arm64.deb"
push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-amd64.deb" push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-amd64.deb"
push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-arm64.deb" push "ubuntu/focal" "packages/$PROFILE-$VERSION-ubuntu20.04-arm64.deb"
push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-amd64.deb" push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-amd64.deb"
push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-arm64.deb" push "ubuntu/jammy" "packages/$PROFILE-$VERSION-ubuntu22.04-arm64.deb"
push "ubuntu/noble" "packages/$PROFILE-$VERSION-ubuntu24.04-amd64.deb" push "el/7" "packages/$PROFILE-$VERSION-el7-amd64.rpm"
push "ubuntu/noble" "packages/$PROFILE-$VERSION-ubuntu24.04-arm64.deb" push "el/7" "packages/$PROFILE-$VERSION-el7-arm64.rpm"
push "el/8" "packages/$PROFILE-$VERSION-el8-amd64.rpm" push "el/8" "packages/$PROFILE-$VERSION-el8-amd64.rpm"
push "el/8" "packages/$PROFILE-$VERSION-el8-arm64.rpm" push "el/8" "packages/$PROFILE-$VERSION-el8-arm64.rpm"
push "el/9" "packages/$PROFILE-$VERSION-el9-amd64.rpm" push "el/9" "packages/$PROFILE-$VERSION-el9-amd64.rpm"
@ -131,7 +132,7 @@ jobs:
checks: write checks: write
actions: write actions: write
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: trigger re-run of app versions check on open PRs - name: trigger re-run of app versions check on open PRs
shell: bash shell: bash
env: env:

View File

@ -25,7 +25,7 @@ jobs:
- emqx - emqx
- emqx-enterprise - emqx-enterprise
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
@ -39,10 +39,10 @@ jobs:
- name: print erlang log - name: print erlang log
if: failure() if: failure()
run: | run: |
cat _build/${{ matrix.profile }}/rel/emqx/log/erlang.log.* cat _build/${{ matrix.profile }}/rel/emqx/logs/erlang.log.*
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: failure() if: failure()
with: with:
name: conftest-logs-${{ matrix.profile }} name: conftest-logs-${{ matrix.profile }}
path: _build/${{ matrix.profile }}/rel/emqx/log path: _build/${{ matrix.profile }}/rel/emqx/logs
retention-days: 7 retention-days: 7

View File

@ -6,6 +6,13 @@ concurrency:
on: on:
workflow_call: workflow_call:
inputs:
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions: permissions:
contents: read contents: read
@ -25,17 +32,12 @@ jobs:
env: env:
EMQX_NAME: ${{ matrix.profile[0] }} EMQX_NAME: ${{ matrix.profile[0] }}
PKG_VSN: ${{ matrix.profile[0] == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }} EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with: with:
name: ${{ env.EMQX_NAME }}-docker name: ${{ env.EMQX_NAME }}-docker
path: /tmp path: /tmp
@ -50,11 +52,9 @@ jobs:
docker compose up --abort-on-container-exit --exit-code-from selenium docker compose up --abort-on-container-exit --exit-code-from selenium
- name: test two nodes cluster with proto_dist=inet_tls in docker - name: test two nodes cluster with proto_dist=inet_tls in docker
run: | run: |
## -d 1 means only put node 1 (latest version) behind haproxy ./scripts/test/start-two-nodes-in-docker.sh -P $_EMQX_DOCKER_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
./scripts/test/start-two-nodes-in-docker.sh -d 1 -P $_EMQX_DOCKER_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy) HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
## -c menas 'cleanup'
./scripts/test/start-two-nodes-in-docker.sh -c ./scripts/test/start-two-nodes-in-docker.sh -c
- name: cleanup - name: cleanup
if: always() if: always()
@ -69,6 +69,8 @@ jobs:
shell: bash shell: bash
env: env:
EMQX_NAME: ${{ matrix.profile }} EMQX_NAME: ${{ matrix.profile }}
PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
_EMQX_TEST_DB_BACKEND: ${{ matrix.cluster_db_backend }}
strategy: strategy:
fail-fast: false fail-fast: false
@ -77,20 +79,12 @@ jobs:
- emqx - emqx
- emqx-enterprise - emqx-enterprise
- emqx-elixir - emqx-elixir
cluster_db_backend:
- mnesia
- rlog
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
id: env
run: |
source env.sh
if [ "$EMQX_NAME" = "emqx-enterprise" ]; then
_EMQX_TEST_DB_BACKEND='rlog'
else
_EMQX_TEST_DB_BACKEND='mnesia'
fi
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with: with:
name: ${{ env.EMQX_NAME }}-docker name: ${{ env.EMQX_NAME }}-docker
path: /tmp path: /tmp

View File

@ -27,21 +27,19 @@ permissions:
contents: read contents: read
jobs: jobs:
prepare_matrix: run_emqx_app_tests:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }} container: ${{ inputs.builder }}
defaults: defaults:
run: run:
shell: bash shell: bash
outputs:
matrix: ${{ steps.matrix.outputs.matrix }}
skip: ${{ steps.matrix.outputs.skip }}
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
fetch-depth: 0 fetch-depth: 0
- name: prepare test matrix - name: run
id: matrix
env: env:
BEFORE_REF: ${{ inputs.before_ref }} BEFORE_REF: ${{ inputs.before_ref }}
AFTER_REF: ${{ inputs.after_ref }} AFTER_REF: ${{ inputs.after_ref }}
@ -50,54 +48,19 @@ jobs:
changed_files="$(git diff --name-only ${BEFORE_REF} ${AFTER_REF} apps/emqx)" changed_files="$(git diff --name-only ${BEFORE_REF} ${AFTER_REF} apps/emqx)"
if [ "$changed_files" = '' ]; then if [ "$changed_files" = '' ]; then
echo "nothing changed in apps/emqx, ignored." echo "nothing changed in apps/emqx, ignored."
echo 'matrix=[]' | tee -a $GITHUB_OUTPUT
echo 'skip=true' | tee -a $GITHUB_OUTPUT
exit 0 exit 0
else
echo 'skip=false' | tee -a $GITHUB_OUTPUT
echo 'matrix=[{"type": "eunit_proper_and_static"},{"type": "1_3"},{"type": "2_3"},{"type": "3_3"}]' | tee -a $GITHUB_OUTPUT
fi fi
run_emqx_app_tests:
if: needs.prepare_matrix.outputs.skip != 'true'
needs:
- prepare_matrix
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }}
strategy:
fail-fast: false
matrix:
include: ${{ fromJson(needs.prepare_matrix.outputs.matrix) }}
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0
- name: run
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
make ensure-rebar3 make ensure-rebar3
cp rebar3 apps/emqx/ cp rebar3 apps/emqx/
cd apps/emqx cd apps/emqx
if [[ ${{ matrix.type }} == "eunit_proper_and_static" ]]; then ./rebar3 xref
./rebar3 xref ./rebar3 dialyzer
./rebar3 dialyzer ./rebar3 eunit -v --name 'eunit@127.0.0.1'
./rebar3 eunit -v --name 'eunit@127.0.0.1' ./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true
./rebar3 proper -d test/props ./rebar3 proper -d test/props
else - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
export SUITEGROUP=${{ matrix.type }}
SUITES=$(../../scripts/find-suites.sh apps/emqx | \
sed -e 's|apps/emqx/test/||g' | \
sed -Ee 's|,?apps/emqx/integration_test/.*||g' | \
sed -e 's/\.erl//g')
echo "Suites: $SUITES"
./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true --suite="$SUITES"
fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
if: failure() if: failure()
with: with:
name: logs-emqx-app-tests-${{ matrix.type }} name: logs-emqx-app-tests
path: apps/emqx/_build/test/logs path: apps/emqx/_build/test/logs
retention-days: 7 retention-days: 7

View File

@ -6,6 +6,13 @@ concurrency:
on: on:
workflow_call: workflow_call:
inputs:
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions: permissions:
contents: read contents: read
@ -18,6 +25,7 @@ jobs:
shell: bash shell: bash
env: env:
EMQX_NAME: ${{ matrix.profile }} EMQX_NAME: ${{ matrix.profile }}
EMQX_TAG: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
REPOSITORY: "emqx/${{ matrix.profile }}" REPOSITORY: "emqx/${{ matrix.profile }}"
strategy: strategy:
@ -34,17 +42,10 @@ jobs:
- ssl1.3 - ssl1.3
- ssl1.2 - ssl1.2
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
path: source path: source
- name: Set up environment - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
id: env
run: |
cd source
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "EMQX_TAG=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with: with:
name: "${{ env.EMQX_NAME }}-docker" name: "${{ env.EMQX_NAME }}-docker"
path: /tmp path: /tmp
@ -164,7 +165,7 @@ jobs:
fi fi
sleep 1; sleep 1;
done done
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: emqx/paho.mqtt.testing repository: emqx/paho.mqtt.testing
ref: develop-5.0 ref: develop-5.0

View File

@ -2,6 +2,10 @@ name: JMeter integration tests
on: on:
workflow_call: workflow_call:
inputs:
version-emqx:
required: true
type: string
permissions: permissions:
contents: read contents: read
@ -12,7 +16,7 @@ jobs:
steps: steps:
- name: Cache Jmeter - name: Cache Jmeter
id: cache-jmeter id: cache-jmeter
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with: with:
path: /tmp/apache-jmeter.tgz path: /tmp/apache-jmeter.tgz
key: apache-jmeter-5.4.3.tgz key: apache-jmeter-5.4.3.tgz
@ -31,7 +35,7 @@ jobs:
else else
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz $ARCHIVE_URL wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz $ARCHIVE_URL
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: apache-jmeter.tgz name: apache-jmeter.tgz
path: /tmp/apache-jmeter.tgz path: /tmp/apache-jmeter.tgz
@ -51,23 +55,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -95,7 +86,7 @@ jobs:
echo "check logs failed" echo "check logs failed"
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: always() if: always()
with: with:
name: jmeter_logs-advanced_feat-${{ matrix.scripts_type }} name: jmeter_logs-advanced_feat-${{ matrix.scripts_type }}
@ -120,23 +111,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -175,7 +153,7 @@ jobs:
if: failure() if: failure()
run: | run: |
docker compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log docker compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: always() if: always()
with: with:
name: jmeter_logs-pgsql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.pgsql_tag }} name: jmeter_logs-pgsql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.pgsql_tag }}
@ -197,23 +175,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -248,7 +213,7 @@ jobs:
echo "check logs failed" echo "check logs failed"
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: always() if: always()
with: with:
name: jmeter_logs-mysql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.mysql_tag }} name: jmeter_logs-mysql_authn_authz-${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}
@ -266,23 +231,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -313,7 +265,7 @@ jobs:
echo "check logs failed" echo "check logs failed"
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: always() if: always()
with: with:
name: jmeter_logs-JWT_authn-${{ matrix.scripts_type }} name: jmeter_logs-JWT_authn-${{ matrix.scripts_type }}
@ -332,23 +284,10 @@ jobs:
needs: jmeter_artifact needs: jmeter_artifact
steps: steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter - uses: ./.github/actions/prepare-jmeter
with:
version-emqx: ${{ inputs.version-emqx }}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -370,7 +309,7 @@ jobs:
echo "check logs failed" echo "check logs failed"
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: always() if: always()
with: with:
name: jmeter_logs-built_in_database_authn_authz-${{ matrix.scripts_type }} name: jmeter_logs-built_in_database_authn_authz-${{ matrix.scripts_type }}

View File

@ -25,7 +25,7 @@ jobs:
run: run:
shell: bash shell: bash
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: emqx-enterprise name: emqx-enterprise
- name: extract artifact - name: extract artifact
@ -45,7 +45,7 @@ jobs:
run: | run: |
export PROFILE='emqx-enterprise' export PROFILE='emqx-enterprise'
make emqx-enterprise-tgz make emqx-enterprise-tgz
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
name: Upload built emqx and test scenario name: Upload built emqx and test scenario
with: with:
name: relup_tests_emqx_built name: relup_tests_emqx_built
@ -72,10 +72,10 @@ jobs:
run: run:
shell: bash shell: bash
steps: steps:
- uses: erlef/setup-beam@b9c58b0450cd832ccdb3c17cc156a47065d2114f # v1.18.1 - uses: erlef/setup-beam@8b9cac4c04dbcd7bf8fd673e16f988225d89b09b # v1.17.2
with: with:
otp-version: 26.2.5 otp-version: 26.2.1
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
repository: hawk/lux repository: hawk/lux
ref: lux-2.8.1 ref: lux-2.8.1
@ -88,7 +88,7 @@ jobs:
./configure ./configure
make make
echo "$(pwd)/bin" >> $GITHUB_PATH echo "$(pwd)/bin" >> $GITHUB_PATH
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
name: Download built emqx and test scenario name: Download built emqx and test scenario
with: with:
name: relup_tests_emqx_built name: relup_tests_emqx_built
@ -111,7 +111,7 @@ jobs:
docker logs node2.emqx.io | tee lux_logs/emqx2.log docker logs node2.emqx.io | tee lux_logs/emqx2.log
exit 1 exit 1
fi fi
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
name: Save debug data name: Save debug data
if: failure() if: failure()
with: with:

View File

@ -20,6 +20,9 @@ on:
required: true required: true
type: string type: string
permissions:
contents: read
env: env:
IS_CI: "yes" IS_CI: "yes"
@ -35,41 +38,37 @@ jobs:
defaults: defaults:
run: run:
shell: bash shell: bash
container: ${{ inputs.builder }} container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
env:
PROFILE: ${{ matrix.profile }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}
permissions:
contents: read
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
run: | run: |
unzip -o -q ${{ matrix.profile }}.zip unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE" git config --global --add safe.directory "$GITHUB_WORKSPACE"
# produces eunit.coverdata # produces eunit.coverdata
- run: make eunit - name: eunit
env:
PROFILE: ${{ matrix.profile }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
run: make eunit
# produces proper.coverdata # produces proper.coverdata
- run: make proper - name: proper
- run: make cover
- name: send to coveralls
if: github.repository == 'emqx/emqx'
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} PROFILE: ${{ matrix.profile }}
run: make coveralls ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
run: make proper
- run: cat rebar3.crashdump - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
if: failure() with:
name: coverdata-${{ matrix.profile }}-${{ matrix.otp }}
path: _build/test/cover
retention-days: 7
ct_docker: ct_docker:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON('["self-hosted","ephemeral","linux","x64"]') || 'ubuntu-22.04' }} runs-on: ${{ github.repository_owner == 'emqx' && fromJSON('["self-hosted","ephemeral","linux","x64"]') || 'ubuntu-22.04' }}
@ -83,24 +82,19 @@ jobs:
run: run:
shell: bash shell: bash
env:
PROFILE: ${{ matrix.profile }}
permissions:
contents: read
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
run: | run: |
unzip -o -q ${{ matrix.profile }}.zip unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE"
# produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata # produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata
- name: run common tests - name: run common tests
env: env:
DOCKER_CT_RUNNER_IMAGE: ${{ inputs.builder }} DOCKER_CT_RUNNER_IMAGE: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
MONGO_TAG: "5" MONGO_TAG: "5"
MYSQL_TAG: "8" MYSQL_TAG: "8"
PGSQL_TAG: "13" PGSQL_TAG: "13"
@ -109,34 +103,23 @@ jobs:
TDENGINE_TAG: "3.0.2.4" TDENGINE_TAG: "3.0.2.4"
OPENTS_TAG: "9aa7f88" OPENTS_TAG: "9aa7f88"
MINIO_TAG: "RELEASE.2023-03-20T20-16-18Z" MINIO_TAG: "RELEASE.2023-03-20T20-16-18Z"
PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }} SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1 ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }} CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: ./scripts/ct/run.sh --ci --app ${{ matrix.app }}
run: ./scripts/ct/run.sh --ci --app ${{ matrix.app }} --keep-up - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
- name: make cover name: coverdata-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
run: | path: _build/test/cover
docker exec -e PROFILE="$PROFILE" -t erlang make cover retention-days: 7
- name: send to coveralls
if: github.repository == 'emqx/emqx'
run: |
ls _build/test/cover/*.coverdata || exit 0
docker exec -e PROFILE="$PROFILE" -t erlang make coveralls
- name: rebar3.crashdump
if: failure()
run: cat rebar3.crashdump
- name: compress logs - name: compress logs
if: failure() if: failure()
run: tar -czf logs.tar.gz _build/test/logs run: tar -czf logs.tar.gz _build/test/logs
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
if: failure() if: failure()
with: with:
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }} name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
path: logs.tar.gz path: logs.tar.gz
compression-level: 0 compression-level: 0
retention-days: 7 retention-days: 7
@ -149,22 +132,13 @@ jobs:
matrix: matrix:
include: ${{ fromJson(inputs.ct-host) }} include: ${{ fromJson(inputs.ct-host) }}
container: ${{ inputs.builder }} container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
defaults: defaults:
run: run:
shell: bash shell: bash
permissions:
contents: read
env:
PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }}
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
@ -174,29 +148,26 @@ jobs:
# produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata # produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata
- name: run common tests - name: run common tests
run: make "${{ matrix.app }}-ct"
- run: make cover
- name: send to coveralls
if: github.repository == 'emqx/emqx'
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
run: | run: |
ls _build/test/cover/*.coverdata || exit 0 make "${{ matrix.app }}-ct"
make coveralls - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with:
- run: cat rebar3.crashdump name: coverdata-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
if: failure() path: _build/test/cover
if-no-files-found: warn # do not fail if no coverdata found
retention-days: 7
- name: compress logs - name: compress logs
if: failure() if: failure()
run: tar -czf logs.tar.gz _build/test/logs run: tar -czf logs.tar.gz _build/test/logs
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
if: failure() if: failure()
with: with:
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }} name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
path: logs.tar.gz path: logs.tar.gz
compression-level: 0 compression-level: 0
retention-days: 7 retention-days: 7
@ -209,18 +180,61 @@ jobs:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
strategy: strategy:
fail-fast: false fail-fast: false
permissions:
pull-requests: write
steps: steps:
- name: Coveralls finished
if: github.repository == 'emqx/emqx'
uses: coverallsapp/github-action@643bc377ffa44ace6394b2b5d0d3950076de9f63 # v2.3.0
with:
parallel-finished: true
git-branch: ${{ github.ref }}
git-commit: ${{ github.sha }}
- run: echo "All tests passed" - run: echo "All tests passed"
make_cover:
needs:
- eunit_and_proper
- ct
- ct_docker
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ inputs.builder }}
strategy:
fail-fast: false
matrix:
profile:
- emqx-enterprise
steps:
- uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with:
name: ${{ matrix.profile }}
- name: extract artifact
run: |
unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE"
- uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
name: download coverdata
with:
pattern: coverdata-${{ matrix.profile }}-*
path: _build/test/cover
merge-multiple: true
- name: make cover
env:
PROFILE: emqx-enterprise
run: make cover
- name: send to coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PROFILE: emqx-enterprise
run: make coveralls
- name: get coveralls logs
if: failure()
run: cat rebar3.crashdump
# do this in a separate job
upload_coverdata:
needs: make_cover
runs-on: ubuntu-22.04
steps:
- name: Coveralls Finished
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
curl -v -k https://coveralls.io/webhook \
--header "Content-Type: application/json" \
--data "{\"repo_name\":\"$GITHUB_REPOSITORY\",\"repo_token\":\"$GITHUB_TOKEN\",\"payload\":{\"build_num\":$GITHUB_RUN_ID,\"status\":\"done\"}}" || true

View File

@ -16,21 +16,20 @@ permissions: read-all
jobs: jobs:
analysis: analysis:
if: github.repository == 'emqx/emqx'
name: Scorecard analysis name: Scorecard analysis
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
permissions: permissions:
security-events: write security-events: write
id-token: write id-token: write
steps: steps:
- name: "Checkout code" - name: "Checkout code"
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
persist-credentials: false persist-credentials: false
- name: "Run analysis" - name: "Run analysis"
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
with: with:
results_file: results.sarif results_file: results.sarif
results_format: sarif results_format: sarif
@ -40,7 +39,7 @@ jobs:
publish_results: true publish_results: true
- name: "Upload artifact" - name: "Upload artifact"
uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5 uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
with: with:
name: SARIF file name: SARIF file
path: results.sarif path: results.sarif

View File

@ -19,7 +19,7 @@ jobs:
- emqx-enterprise - emqx-enterprise
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
pattern: "${{ matrix.profile }}-schema-dump-*-x64" pattern: "${{ matrix.profile }}-schema-dump-*-x64"
merge-multiple: true merge-multiple: true

View File

@ -13,8 +13,8 @@ permissions:
jobs: jobs:
stale: stale:
if: github.repository == 'emqx/emqx' if: github.repository_owner == 'emqx'
runs-on: ubuntu-22.04 runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
permissions: permissions:
issues: write issues: write
pull-requests: none pull-requests: none

View File

@ -28,21 +28,21 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
include: ${{ fromJson(inputs.ct-matrix) }} include: ${{ fromJson(inputs.ct-matrix) }}
container: "${{ inputs.builder }}" container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
steps: steps:
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}
- name: extract artifact - name: extract artifact
run: | run: |
unzip -o -q ${{ matrix.profile }}.zip unzip -o -q ${{ matrix.profile }}.zip
git config --global --add safe.directory "$GITHUB_WORKSPACE" git config --global --add safe.directory "$GITHUB_WORKSPACE"
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 - uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2 # v4.0.0
with: with:
path: "emqx_dialyzer_${{ matrix.profile }}_plt" path: "emqx_dialyzer_${{ matrix.otp }}_plt"
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }} key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }}
restore-keys: | restore-keys: |
rebar3-dialyzer-plt-${{ matrix.profile }}- rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-
- run: cat .env | tee -a $GITHUB_ENV - run: cat .env | tee -a $GITHUB_ENV
- name: run static checks - name: run static checks
run: make static_checks run: make static_checks

View File

@ -1,88 +0,0 @@
name: Sync release branch
concurrency:
group: sync-release-branch-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
on:
schedule:
- cron: '0 2 * * *'
workflow_dispatch:
permissions:
contents: read
jobs:
create-pr:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
strategy:
fail-fast: false
matrix:
branch:
- release-57
env:
SYNC_BRANCH: ${{ matrix.branch }}
defaults:
run:
shell: bash
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0
- name: create new branch
run: |
set -euxo pipefail
NEW_BRANCH_NAME=sync-${SYNC_BRANCH}-$(date +"%Y%m%d-%H%M%S")
echo "NEW_BRANCH_NAME=${NEW_BRANCH_NAME}" >> $GITHUB_ENV
git config --global user.name "${GITHUB_ACTOR}"
git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com"
git checkout -b ${NEW_BRANCH_NAME}
git merge origin/${SYNC_BRANCH} 2>&1 | tee merge.log
git push origin ${NEW_BRANCH_NAME}:${NEW_BRANCH_NAME}
- name: create pull request
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euxo pipefail
for pr in $(gh pr list --state open --base master --label sync-release-branch --search "Sync ${SYNC_BRANCH} in:title" --repo ${{ github.repository }} --json number --jq '.[] | .number'); do
gh pr close $pr --repo ${{ github.repository }} --delete-branch || true
done
gh pr create --title "Sync ${SYNC_BRANCH}" --body "Sync ${SYNC_BRANCH}" --base master --head ${NEW_BRANCH_NAME} --label sync-release-branch --repo ${{ github.repository }}
- name: Send notification to Slack
if: failure()
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
run: |
awk '{printf "%s\\n", $0}' merge.log > merge.log.1
cat <<EOF > payload.json
{
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Automatic sync of ${SYNC_BRANCH} branch failed: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "\`\`\`$(cat merge.log.1)\`\`\`"
}
}
]
}
EOF
curl -X POST -H 'Content-type: application/json' --data @payload.json "$SLACK_WEBHOOK_URL"

View File

@ -23,7 +23,7 @@ jobs:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
ref: ${{ github.event.inputs.tag }} ref: ${{ github.event.inputs.tag }}
- name: Detect profile - name: Detect profile

3
.gitignore vendored
View File

@ -76,6 +76,3 @@ rebar-git-cache.tar
.docker_image_tag .docker_image_tag
.emqx_docker_image_tags .emqx_docker_image_tags
.git/ .git/
apps/emqx_utils/src/emqx_variform_parser.erl
apps/emqx_utils/src/emqx_variform_scan.erl
default-profile.mk

View File

@ -1,2 +1,2 @@
erlang 26.2.5-3 erlang 26.2.1-2
elixir 1.15.7-otp-26 elixir 1.15.7-otp-26

View File

@ -6,17 +6,23 @@ endif
REBAR = $(CURDIR)/rebar3 REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts SCRIPTS = $(CURDIR)/scripts
include env.sh export EMQX_RELUP ?= true
export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.3-2:1.15.7-26.2.1-2-debian12
export EMQX_DEFAULT_RUNNER = public.ecr.aws/debian/debian:12-slim
export EMQX_REL_FORM ?= tgz
export QUICER_DOWNLOAD_FROM_RELEASE = 1
ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none
FIND=/usr/bin/find
else
FIND=find
endif
# Dashboard version # Dashboard version
# from https://github.com/emqx/emqx-dashboard5 # from https://github.com/emqx/emqx-dashboard5
export EMQX_DASHBOARD_VERSION ?= v1.10.0-beta.1 export EMQX_DASHBOARD_VERSION ?= v1.7.0
export EMQX_EE_DASHBOARD_VERSION ?= e1.8.0-beta.1 export EMQX_EE_DASHBOARD_VERSION ?= e1.6.0-beta.5
export EMQX_RELUP ?= true
export EMQX_REL_FORM ?= tgz
-include default-profile.mk
PROFILE ?= emqx PROFILE ?= emqx
REL_PROFILES := emqx emqx-enterprise REL_PROFILES := emqx emqx-enterprise
PKG_PROFILES := emqx-pkg emqx-enterprise-pkg PKG_PROFILES := emqx-pkg emqx-enterprise-pkg
@ -28,8 +34,6 @@ CT_COVER_EXPORT_PREFIX ?= $(PROFILE)
export REBAR_GIT_CLONE_OPTIONS += --depth=1 export REBAR_GIT_CLONE_OPTIONS += --depth=1
ELIXIR_COMMON_DEPS := ensure-hex ensure-mix-rebar3 ensure-mix-rebar
.PHONY: default .PHONY: default
default: $(REBAR) $(PROFILE) default: $(REBAR) $(PROFILE)
@ -49,8 +53,7 @@ $(REBAR): .prepare ensure-rebar3
.PHONY: ensure-hex .PHONY: ensure-hex
ensure-hex: ensure-hex:
# @mix local.hex --if-missing --force @mix local.hex --if-missing --force
@mix local.hex 2.0.6 --if-missing --force
.PHONY: ensure-mix-rebar3 .PHONY: ensure-mix-rebar3
ensure-mix-rebar3: $(REBAR) ensure-mix-rebar3: $(REBAR)
@ -60,12 +63,8 @@ ensure-mix-rebar3: $(REBAR)
ensure-mix-rebar: $(REBAR) ensure-mix-rebar: $(REBAR)
@mix local.rebar --if-missing --force @mix local.rebar --if-missing --force
.PHONY: elixir-common-deps
elixir-common-deps: $(ELIXIR_COMMON_DEPS)
.PHONY: mix-deps-get .PHONY: mix-deps-get
mix-deps-get: elixir-common-deps mix-deps-get: $(ELIXIR_COMMON_DEPS)
@mix deps.get @mix deps.get
.PHONY: eunit .PHONY: eunit
@ -195,8 +194,8 @@ $(PROFILES:%=clean-%):
@if [ -d _build/$(@:clean-%=%) ]; then \ @if [ -d _build/$(@:clean-%=%) ]; then \
rm -f rebar.lock; \ rm -f rebar.lock; \
rm -rf _build/$(@:clean-%=%)/rel; \ rm -rf _build/$(@:clean-%=%)/rel; \
find _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \ $(FIND) _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \
find _build/$(@:clean-%=%) -type l -delete; \ $(FIND) _build/$(@:clean-%=%) -type l -delete; \
fi fi
.PHONY: clean-all .PHONY: clean-all
@ -244,7 +243,7 @@ $(foreach zt,$(ALL_ZIPS),$(eval $(call download-relup-packages,$(zt))))
## relup target is to create relup instructions ## relup target is to create relup instructions
.PHONY: $(REL_PROFILES:%=%-relup) .PHONY: $(REL_PROFILES:%=%-relup)
define gen-relup-target define gen-relup-target
$1-relup: $(COMMON_DEPS) $1-relup: $1-relup-downloads $(COMMON_DEPS)
@$(BUILD) $1 relup @$(BUILD) $1 relup
endef endef
ALL_TGZS = $(REL_PROFILES) ALL_TGZS = $(REL_PROFILES)
@ -253,7 +252,7 @@ $(foreach zt,$(ALL_TGZS),$(eval $(call gen-relup-target,$(zt))))
## tgz target is to create a release package .tar.gz with relup ## tgz target is to create a release package .tar.gz with relup
.PHONY: $(REL_PROFILES:%=%-tgz) .PHONY: $(REL_PROFILES:%=%-tgz)
define gen-tgz-target define gen-tgz-target
$1-tgz: $(COMMON_DEPS) $1-tgz: $1-relup
@$(BUILD) $1 tgz @$(BUILD) $1 tgz
endef endef
ALL_TGZS = $(REL_PROFILES) ALL_TGZS = $(REL_PROFILES)
@ -316,20 +315,10 @@ $(foreach tt,$(ALL_ELIXIR_TGZS),$(eval $(call gen-elixir-tgz-target,$(tt))))
.PHONY: fmt .PHONY: fmt
fmt: $(REBAR) fmt: $(REBAR)
@find . \( -name '*.app.src' -o \ @$(SCRIPTS)/erlfmt -w 'apps/*/{src,include,priv,test,integration_test}/**/*.{erl,hrl,app.src,eterm}'
-name '*.erl' -o \ @$(SCRIPTS)/erlfmt -w 'apps/*/rebar.config' 'apps/emqx/rebar.config.script' '.ci/fvt_tests/http_server/rebar.config'
-name '*.hrl' -o \ @$(SCRIPTS)/erlfmt -w 'rebar.config' 'rebar.config.erl'
-name 'rebar.config' -o \ @$(SCRIPTS)/erlfmt -w 'scripts/*.escript' 'bin/*.escript' 'bin/nodetool'
-name '*.eterm' -o \
-name '*.escript' \) \
-not -path '*/_build/*' \
-not -path '*/deps/*' \
-not -path '*/_checkouts/*' \
-type f \
| xargs $(SCRIPTS)/erlfmt -w
@$(SCRIPTS)/erlfmt -w 'apps/emqx/rebar.config.script'
@$(SCRIPTS)/erlfmt -w 'elvis.config'
@$(SCRIPTS)/erlfmt -w 'bin/nodetool'
@mix format @mix format
.PHONY: clean-test-cluster-config .PHONY: clean-test-cluster-config

View File

@ -1,5 +1,3 @@
简体中文 | [English](./README.md) | [Русский](./README-RU.md)
# EMQX # EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases) [![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
@ -88,7 +86,7 @@ EMQX Cloud 文档:[docs.emqx.com/zh/cloud/latest/](https://docs.emqx.com/zh/cl
`master` 分支是最新的 5 版本,`main-v4.4` 是 4.4 版本。 `master` 分支是最新的 5 版本,`main-v4.4` 是 4.4 版本。
EMQX 4.4 版本需要 OTP 245 版本则可以使用 OTP 25 和 26 构建。 EMQX 4.4 版本需要 OTP 245 版本则可以使用 OTP 24 和 25 构建。
```bash ```bash
git clone https://github.com/emqx/emqx.git git clone https://github.com/emqx/emqx.git

View File

@ -1,5 +1,3 @@
Русский | [简体中文](./README-CN.md) | [English](./README.md)
# Брокер EMQX # Брокер EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases) [![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)

View File

@ -1,5 +1,3 @@
English | [简体中文](./README-CN.md) | [Русский](./README-RU.md)
# EMQX # EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases) [![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
@ -100,7 +98,7 @@ The `master` branch tracks the latest version 5. For version 4.4 checkout the `m
EMQX 4.4 requires OTP 24. EMQX 4.4 requires OTP 24.
EMQX 5.0 ~ 5.3 can be built with OTP 24 or 25. EMQX 5.0 ~ 5.3 can be built with OTP 24 or 25.
EMQX 5.4 and newer can be built with OTP 25 or 26. EMQX 5.4 and newer can be built with OTP 24 or 25.
```bash ```bash
git clone https://github.com/emqx/emqx.git git clone https://github.com/emqx/emqx.git

View File

@ -45,10 +45,6 @@
). ).
-define(assertReceive(PATTERN, TIMEOUT), -define(assertReceive(PATTERN, TIMEOUT),
?assertReceive(PATTERN, TIMEOUT, #{})
).
-define(assertReceive(PATTERN, TIMEOUT, EXTRA),
(fun() -> (fun() ->
receive receive
X__V = PATTERN -> X__V X__V = PATTERN -> X__V
@ -58,8 +54,7 @@
{module, ?MODULE}, {module, ?MODULE},
{line, ?LINE}, {line, ?LINE},
{expression, (??PATTERN)}, {expression, (??PATTERN)},
{mailbox, ?drainMailbox()}, {mailbox, ?drainMailbox()}
{extra_info, EXTRA}
]} ]}
) )
end end

View File

@ -65,20 +65,9 @@
%% Route %% Route
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-record(share_dest, {
session_id :: emqx_session:session_id(),
group :: emqx_types:group()
}).
-record(route, { -record(route, {
topic :: binary(), topic :: binary(),
dest :: dest :: node() | {binary(), node()} | emqx_session:session_id()
node()
| {binary(), node()}
| emqx_session:session_id()
%% One session can also have multiple subscriptions to the same topic through different groups
| #share_dest{}
| emqx_external_broker:dest()
}). }).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -106,10 +95,4 @@
until :: integer() until :: integer()
}). }).
%%--------------------------------------------------------------------
%% Configurations
%%--------------------------------------------------------------------
-define(KIND_REPLICATE, replicate).
-define(KIND_INITIATE, initiate).
-endif. -endif.

View File

@ -1,35 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022, 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc This header contains definitions of durable session metadata
%% keys, that can be consumed by the external code.
-ifndef(EMQX_DURABLE_SESSION_META_HRL).
-define(EMQX_DURABLE_SESSION_META_HRL, true).
%% Session metadata keys:
-define(created_at, created_at).
-define(last_alive_at, last_alive_at).
-define(expiry_interval, expiry_interval).
%% Unique integer used to create unique identities:
-define(last_id, last_id).
%% Connection info (relevent for the dashboard):
-define(peername, peername).
-define(will_message, will_message).
-define(clientinfo, clientinfo).
-define(protocol, protocol).
-define(offline_info, offline_info).
-endif.

View File

@ -25,8 +25,6 @@
-define(HP_AUTHN, 970). -define(HP_AUTHN, 970).
-define(HP_AUTHZ, 960). -define(HP_AUTHZ, 960).
-define(HP_SYS_MSGS, 950). -define(HP_SYS_MSGS, 950).
-define(HP_SCHEMA_VALIDATION, 945).
-define(HP_MESSAGE_TRANSFORMATION, 943).
-define(HP_TOPIC_METRICS, 940). -define(HP_TOPIC_METRICS, 940).
-define(HP_RETAINER, 930). -define(HP_RETAINER, 930).
-define(HP_AUTO_SUB, 920). -define(HP_AUTO_SUB, 920).

View File

@ -1,258 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQX_METRICS_HRL).
-define(EMQX_METRICS_HRL, true).
%% Bytes sent and received
-define(BYTES_METRICS, [
{counter, 'bytes.received', <<"Number of bytes received ">>},
{counter, 'bytes.sent', <<"Number of bytes sent on this connection">>}
]).
%% Packets sent and received
-define(PACKET_METRICS, [
{counter, 'packets.received', <<"Number of received packet">>},
{counter, 'packets.sent', <<"Number of sent packet">>},
{counter, 'packets.connect.received', <<"Number of received CONNECT packet">>},
{counter, 'packets.connack.sent', <<"Number of sent CONNACK packet">>},
{counter, 'packets.connack.error',
<<"Number of received CONNECT packet with unsuccessful connections">>},
{counter, 'packets.connack.auth_error',
<<"Number of received CONNECT packet with failed Authentication">>},
{counter, 'packets.publish.received', <<"Number of received PUBLISH packet">>},
%% PUBLISH packets sent
{counter, 'packets.publish.sent', <<"Number of sent PUBLISH packet">>},
%% PUBLISH packet_id inuse
{counter, 'packets.publish.inuse',
<<"Number of received PUBLISH packet with occupied identifiers">>},
%% PUBLISH failed for error
{counter, 'packets.publish.error',
<<"Number of received PUBLISH packet that cannot be published">>},
%% PUBLISH failed for auth error
{counter, 'packets.publish.auth_error',
<<"Number of received PUBLISH packets with failed the Authorization check">>},
%% PUBLISH(QoS2) packets dropped
{counter, 'packets.publish.dropped',
<<"Number of messages discarded due to the receiving limit">>},
%% PUBACK packets received
{counter, 'packets.puback.received', <<"Number of received PUBACK packet">>},
%% PUBACK packets sent
{counter, 'packets.puback.sent', <<"Number of sent PUBACK packet">>},
%% PUBACK packet_id inuse
{counter, 'packets.puback.inuse',
<<"Number of received PUBACK packet with occupied identifiers">>},
%% PUBACK packets missed
{counter, 'packets.puback.missed', <<"Number of received packet with identifiers.">>},
%% PUBREC packets received
{counter, 'packets.pubrec.received', <<"Number of received PUBREC packet">>},
%% PUBREC packets sent
{counter, 'packets.pubrec.sent', <<"Number of sent PUBREC packet">>},
%% PUBREC packet_id inuse
{counter, 'packets.pubrec.inuse',
<<"Number of received PUBREC packet with occupied identifiers">>},
%% PUBREC packets missed
{counter, 'packets.pubrec.missed',
<<"Number of received PUBREC packet with unknown identifiers">>},
%% PUBREL packets received
{counter, 'packets.pubrel.received', <<"Number of received PUBREL packet">>},
%% PUBREL packets sent
{counter, 'packets.pubrel.sent', <<"Number of sent PUBREL packet">>},
%% PUBREL packets missed
{counter, 'packets.pubrel.missed',
<<"Number of received PUBREC packet with unknown identifiers">>},
%% PUBCOMP packets received
{counter, 'packets.pubcomp.received', <<"Number of received PUBCOMP packet">>},
%% PUBCOMP packets sent
{counter, 'packets.pubcomp.sent', <<"Number of sent PUBCOMP packet">>},
%% PUBCOMP packet_id inuse
{counter, 'packets.pubcomp.inuse',
<<"Number of received PUBCOMP packet with occupied identifiers">>},
%% PUBCOMP packets missed
{counter, 'packets.pubcomp.missed', <<"Number of missed PUBCOMP packet">>},
%% SUBSCRIBE Packets received
{counter, 'packets.subscribe.received', <<"Number of received SUBSCRIBE packet">>},
%% SUBSCRIBE error
{counter, 'packets.subscribe.error',
<<"Number of received SUBSCRIBE packet with failed subscriptions">>},
%% SUBSCRIBE failed for not auth
{counter, 'packets.subscribe.auth_error',
<<"Number of received SUBACK packet with failed Authorization check">>},
%% SUBACK packets sent
{counter, 'packets.suback.sent', <<"Number of sent SUBACK packet">>},
%% UNSUBSCRIBE Packets received
{counter, 'packets.unsubscribe.received', <<"Number of received UNSUBSCRIBE packet">>},
%% UNSUBSCRIBE error
{counter, 'packets.unsubscribe.error',
<<"Number of received UNSUBSCRIBE packet with failed unsubscriptions">>},
%% UNSUBACK Packets sent
{counter, 'packets.unsuback.sent', <<"Number of sent UNSUBACK packet">>},
%% PINGREQ packets received
{counter, 'packets.pingreq.received', <<"Number of received PINGREQ packet">>},
%% PINGRESP Packets sent
{counter, 'packets.pingresp.sent', <<"Number of sent PUBRESP packet">>},
%% DISCONNECT Packets received
{counter, 'packets.disconnect.received', <<"Number of received DISCONNECT packet">>},
%% DISCONNECT Packets sent
{counter, 'packets.disconnect.sent', <<"Number of sent DISCONNECT packet">>},
%% Auth Packets received
{counter, 'packets.auth.received', <<"Number of received AUTH packet">>},
%% Auth Packets sent
{counter, 'packets.auth.sent', <<"Number of sent AUTH packet">>}
]).
%% Messages sent/received and pubsub
-define(MESSAGE_METRICS, [
%% All Messages received
{counter, 'messages.received', <<
"Number of messages received from the client, equal to the sum of "
"messages.qos0.received, messages.qos1.received and messages.qos2.received"
>>},
%% All Messages sent
{counter, 'messages.sent', <<
"Number of messages sent to the client, equal to the sum of "
"messages.qos0.sent, messages.qos1.sent and messages.qos2.sent"
>>},
%% QoS0 Messages received
{counter, 'messages.qos0.received', <<"Number of QoS 0 messages received from clients">>},
%% QoS0 Messages sent
{counter, 'messages.qos0.sent', <<"Number of QoS 0 messages sent to clients">>},
%% QoS1 Messages received
{counter, 'messages.qos1.received', <<"Number of QoS 1 messages received from clients">>},
%% QoS1 Messages sent
{counter, 'messages.qos1.sent', <<"Number of QoS 1 messages sent to clients">>},
%% QoS2 Messages received
{counter, 'messages.qos2.received', <<"Number of QoS 2 messages received from clients">>},
%% QoS2 Messages sent
{counter, 'messages.qos2.sent', <<"Number of QoS 2 messages sent to clients">>},
%% PubSub Metrics
%% Messages Publish
{counter, 'messages.publish',
<<"Number of messages published in addition to system messages">>},
%% Messages dropped due to no subscribers
{counter, 'messages.dropped',
<<"Number of messages dropped before forwarding to the subscription process">>},
%% Messages that failed validations
{counter, 'messages.validation_failed', <<"Number of message validation failed">>},
%% Messages that passed validations
{counter, 'messages.validation_succeeded', <<"Number of message validation successful">>},
%% % Messages that failed transformations
{counter, 'messages.transformation_failed', <<"Number fo message transformation failed">>},
%% % Messages that passed transformations
{counter, 'messages.transformation_succeeded',
<<"Number fo message transformation succeeded">>},
%% QoS2 Messages expired
{counter, 'messages.dropped.await_pubrel_timeout',
<<"Number of messages dropped due to waiting PUBREL timeout">>},
%% Messages dropped
{counter, 'messages.dropped.no_subscribers',
<<"Number of messages dropped due to no subscribers">>},
%% Messages forward
{counter, 'messages.forward', <<"Number of messages forwarded to other nodes">>},
%% Messages delayed
{counter, 'messages.delayed', <<"Number of delay-published messages">>},
%% Messages delivered
{counter, 'messages.delivered',
<<"Number of messages forwarded to the subscription process internally">>},
%% Messages acked
{counter, 'messages.acked', <<"Number of received PUBACK and PUBREC packet">>},
%% Messages persistently stored
{counter, 'messages.persisted', <<"Number of message persisted">>}
]).
%% Delivery metrics
-define(DELIVERY_METRICS, [
%% All Dropped during delivery
{counter, 'delivery.dropped', <<"Total number of discarded messages when sending">>},
%% Dropped due to no_local
{counter, 'delivery.dropped.no_local', <<
"Number of messages that were dropped due to the No Local subscription "
"option when sending"
>>},
%% Dropped due to message too large
{counter, 'delivery.dropped.too_large', <<
"The number of messages that were dropped because the length exceeded "
"the limit when sending"
>>},
%% Dropped qos0 message
{counter, 'delivery.dropped.qos0_msg', <<
"Number of messages with QoS 0 that were dropped because the message "
"queue was full when sending"
>>},
%% Dropped due to queue full
{counter, 'delivery.dropped.queue_full', <<
"Number of messages with a non-zero QoS that were dropped because the "
"message queue was full when sending"
>>},
%% Dropped due to expired
{counter, 'delivery.dropped.expired',
<<"Number of messages dropped due to message expiration on sending">>}
]).
%% Client Lifecircle metrics
-define(CLIENT_METRICS, [
{counter, 'client.connect', <<"Number of client connections">>},
{counter, 'client.connack', <<"Number of CONNACK packet sent">>},
{counter, 'client.connected', <<"Number of successful client connected">>},
{counter, 'client.authenticate', <<"Number of client Authentication">>},
{counter, 'client.auth.anonymous', <<"Number of clients who log in anonymously">>},
{counter, 'client.authorize', <<"Number of Authorization rule checks">>},
{counter, 'client.subscribe', <<"Number of client subscriptions">>},
{counter, 'client.unsubscribe', <<"Number of client unsubscriptions">>},
{counter, 'client.disconnected', <<"Number of client disconnects">>}
]).
%% Session Lifecircle metrics
-define(SESSION_METRICS, [
{counter, 'session.created', <<"Number of sessions created">>},
{counter, 'session.resumed',
<<"Number of sessions resumed because Clean Session or Clean Start is false">>},
{counter, 'session.takenover',
<<"Number of sessions takenover because Clean Session or Clean Start is false">>},
{counter, 'session.discarded',
<<"Number of sessions dropped because Clean Session or Clean Start is true">>},
{counter, 'session.terminated', <<"Number of terminated sessions">>}
]).
%% Statistic metrics for ACL checking
-define(STASTS_ACL_METRICS, [
{counter, 'authorization.allow', <<"Number of Authorization allow">>},
{counter, 'authorization.deny', <<"Number of Authorization deny">>},
{counter, 'authorization.cache_hit', <<"Number of Authorization hits the cache">>},
{counter, 'authorization.cache_miss', <<"Number of Authorization cache missing">>}
]).
%% Statistic metrics for auth checking
-define(STASTS_AUTHN_METRICS, [
{counter, 'authentication.success', <<"Number of successful client Authentication">>},
{counter, 'authentication.success.anonymous',
<<"Number of successful client Authentication due to anonymous">>},
{counter, 'authentication.failure', <<"Number of failed client Authentication">>}
]).
%% Overload protection counters
-define(OLP_METRICS, [
{counter, 'overload_protection.delay.ok', <<"Number of overload protection delayed">>},
{counter, 'overload_protection.delay.timeout',
<<"Number of overload protection delay timeout">>},
{counter, 'overload_protection.hibernation', <<"Number of overload protection hibernation">>},
{counter, 'overload_protection.gc', <<"Number of overload protection garbage collection">>},
{counter, 'overload_protection.new_conn',
<<"Number of overload protection close new incoming connection">>}
]).
-endif.

View File

@ -673,6 +673,7 @@ end).
-define(SHARE, "$share"). -define(SHARE, "$share").
-define(QUEUE, "$queue"). -define(QUEUE, "$queue").
-define(SHARE(Group, Topic), emqx_topic:join([<<?SHARE>>, Group, Topic])).
-define(REDISPATCH_TO(GROUP, TOPIC), {GROUP, TOPIC}). -define(REDISPATCH_TO(GROUP, TOPIC), {GROUP, TOPIC}).
@ -683,7 +684,6 @@ end).
-define(FRAME_PARSE_ERROR, frame_parse_error). -define(FRAME_PARSE_ERROR, frame_parse_error).
-define(FRAME_SERIALIZE_ERROR, frame_serialize_error). -define(FRAME_SERIALIZE_ERROR, frame_serialize_error).
-define(THROW_FRAME_ERROR(Reason), erlang:throw({?FRAME_PARSE_ERROR, Reason})). -define(THROW_FRAME_ERROR(Reason), erlang:throw({?FRAME_PARSE_ERROR, Reason})).
-define(THROW_SERIALIZE_ERROR(Reason), erlang:throw({?FRAME_SERIALIZE_ERROR, Reason})). -define(THROW_SERIALIZE_ERROR(Reason), erlang:throw({?FRAME_SERIALIZE_ERROR, Reason})).

View File

@ -28,19 +28,14 @@
%% cert %% cert
-define(VAR_CERT_SUBJECT, "cert_subject"). -define(VAR_CERT_SUBJECT, "cert_subject").
-define(VAR_CERT_CN_NAME, "cert_common_name"). -define(VAR_CERT_CN_NAME, "cert_common_name").
-define(VAR_CERT_PEM, "cert_pem").
-define(PH_CERT_SUBJECT, ?PH(?VAR_CERT_SUBJECT)). -define(PH_CERT_SUBJECT, ?PH(?VAR_CERT_SUBJECT)).
-define(PH_CERT_CN_NAME, ?PH(?VAR_CERT_CN_NAME)). -define(PH_CERT_CN_NAME, ?PH(?VAR_CERT_CN_NAME)).
-define(PH_CERT_PEM, ?PH(?VAR_CERT_PEM)).
%% MQTT/Gateway %% MQTT
-define(VAR_PASSWORD, "password"). -define(VAR_PASSWORD, "password").
-define(VAR_CLIENTID, "clientid"). -define(VAR_CLIENTID, "clientid").
-define(VAR_USERNAME, "username"). -define(VAR_USERNAME, "username").
-define(VAR_TOPIC, "topic"). -define(VAR_TOPIC, "topic").
-define(VAR_ENDPOINT_NAME, "endpoint_name").
-define(VAR_NS_CLIENT_ATTRS, {var_namespace, "client_attrs"}).
-define(PH_PASSWORD, ?PH(?VAR_PASSWORD)). -define(PH_PASSWORD, ?PH(?VAR_PASSWORD)).
-define(PH_CLIENTID, ?PH(?VAR_CLIENTID)). -define(PH_CLIENTID, ?PH(?VAR_CLIENTID)).
-define(PH_FROM_CLIENTID, ?PH("from_clientid")). -define(PH_FROM_CLIENTID, ?PH("from_clientid")).
@ -94,7 +89,7 @@
-define(PH_NODE, ?PH("node")). -define(PH_NODE, ?PH("node")).
-define(PH_REASON, ?PH("reason")). -define(PH_REASON, ?PH("reason")).
-define(PH_ENDPOINT_NAME, ?PH(?VAR_ENDPOINT_NAME)). -define(PH_ENDPOINT_NAME, ?PH("endpoint_name")).
-define(VAR_RETAIN, "retain"). -define(VAR_RETAIN, "retain").
-define(PH_RETAIN, ?PH(?VAR_RETAIN)). -define(PH_RETAIN, ?PH(?VAR_RETAIN)).

View File

@ -32,7 +32,7 @@
%% `apps/emqx/src/bpapi/README.md' %% `apps/emqx/src/bpapi/README.md'
%% Opensource edition %% Opensource edition
-define(EMQX_RELEASE_CE, "5.8.0-alpha.1"). -define(EMQX_RELEASE_CE, "5.6.0-rc.1").
%% Enterprise edition %% Enterprise edition
-define(EMQX_RELEASE_EE, "5.8.0-alpha.1"). -define(EMQX_RELEASE_EE, "5.6.0-rc.1").

View File

@ -21,9 +21,4 @@
-define(TOMBSTONE_CONFIG_CHANGE_REQ, mark_it_for_deletion). -define(TOMBSTONE_CONFIG_CHANGE_REQ, mark_it_for_deletion).
-define(CONFIG_NOT_FOUND_MAGIC, '$0tFound'). -define(CONFIG_NOT_FOUND_MAGIC, '$0tFound').
%%--------------------------------------------------------------------
%% EE injections
%%--------------------------------------------------------------------
-define(EMQX_SSL_FUN_MFA(Name), {emqx_ssl_fun_mfa, Name}).
-endif. -endif.

View File

@ -20,11 +20,4 @@
-define(IS_SESSION_IMPL_MEM(S), (is_tuple(S) andalso element(1, S) =:= session)). -define(IS_SESSION_IMPL_MEM(S), (is_tuple(S) andalso element(1, S) =:= session)).
-define(IS_SESSION_IMPL_DS(S), (is_map_key(id, S))). -define(IS_SESSION_IMPL_DS(S), (is_map_key(id, S))).
%% (Erlang) messages that a connection process should forward to the
%% session handler.
-record(session_message, {
message :: term()
}).
-define(session_message(MSG), #session_message{message = MSG}).
-endif. -endif.

View File

@ -1,28 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2018-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQX_SHARED_SUB_HRL).
-define(EMQX_SHARED_SUB_HRL, true).
%% Mnesia table for shared sub message routing
-define(SHARED_SUBSCRIPTION, emqx_shared_subscription).
%% ETS tables for Shared PubSub
-define(SHARED_SUBSCRIBER, emqx_shared_subscriber).
-define(ALIVE_SHARED_SUBSCRIBERS, emqx_alive_shared_subscribers).
-define(SHARED_SUBS_ROUND_ROBIN_COUNTER, emqx_shared_subscriber_round_robin_counter).
-endif.

View File

@ -20,33 +20,17 @@
-record(?TRACE, { -record(?TRACE, {
name :: binary() | undefined | '_', name :: binary() | undefined | '_',
type :: clientid | topic | ip_address | ruleid | undefined | '_', type :: clientid | topic | ip_address | undefined | '_',
filter :: filter ::
emqx_types:topic() emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address() | undefined | '_',
| emqx_types:clientid()
| emqx_trace:ip_address()
| emqx_trace:ruleid()
| undefined
| '_',
enable = true :: boolean() | '_', enable = true :: boolean() | '_',
payload_encode = text :: hex | text | hidden | '_', payload_encode = text :: hex | text | hidden | '_',
extra = #{formatter => text} :: #{formatter => text | json} | '_', extra = #{} :: map() | '_',
start_at :: integer() | undefined | '_', start_at :: integer() | undefined | '_',
end_at :: integer() | undefined | '_' end_at :: integer() | undefined | '_'
}). }).
-record(emqx_trace_format_func_data, {
function :: fun((any()) -> any()),
data :: any()
}).
-define(SHARD, ?COMMON_SHARD). -define(SHARD, ?COMMON_SHARD).
-define(MAX_SIZE, 30). -define(MAX_SIZE, 30).
-define(EMQX_TRACE_STOP_ACTION(REASON),
{unrecoverable_error, {action_stopped_after_template_rendering, REASON}}
).
-define(EMQX_TRACE_STOP_ACTION_MATCH, ?EMQX_TRACE_STOP_ACTION(_)).
-endif. -endif.

View File

@ -86,6 +86,5 @@
{'SOURCE_ERROR', <<"Source error">>}, {'SOURCE_ERROR', <<"Source error">>},
{'UPDATE_FAILED', <<"Update failed">>}, {'UPDATE_FAILED', <<"Update failed">>},
{'REST_FAILED', <<"Reset source or config failed">>}, {'REST_FAILED', <<"Reset source or config failed">>},
{'CLIENT_NOT_RESPONSE', <<"Client not responding">>}, {'CLIENT_NOT_RESPONSE', <<"Client not responding">>}
{'UNSUPPORTED_MEDIA_TYPE', <<"Unsupported media type">>}
]). ]).

View File

@ -30,10 +30,7 @@
logger:log( logger:log(
Level, Level,
(Data), (Data),
maps:merge(Meta, #{ Meta
mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY},
line => ?LINE
})
); );
false -> false ->
ok ok
@ -41,30 +38,17 @@
). ).
%% NOTE: do not forget to use atom for msg and add every used msg to %% NOTE: do not forget to use atom for msg and add every used msg to
%% the default value of `log.throttling.msgs` list. %% the default value of `log.thorttling.msgs` list.
-define(SLOG_THROTTLE(Level, Data), -define(SLOG_THROTTLE(Level, Data),
?SLOG_THROTTLE(Level, Data, #{}) ?SLOG_THROTTLE(Level, Data, #{})
). ).
-define(SLOG_THROTTLE(Level, Data, Meta), -define(SLOG_THROTTLE(Level, Data, Meta),
?SLOG_THROTTLE(Level, undefined, Data, Meta) case emqx_log_throttler:allow(maps:get(msg, Data)) of
).
-define(SLOG_THROTTLE(Level, UniqueKey, Data, Meta),
case logger:allow(Level, ?MODULE) of
true -> true ->
(fun(#{msg := __Msg} = __Data) -> ?SLOG(Level, Data, Meta);
case emqx_log_throttler:allow(__Msg, UniqueKey) of
true ->
logger:log(Level, __Data, Meta);
false ->
?_DO_TRACE(Level, __Msg, maps:merge(__Data, Meta))
end
end)(
Data
);
false -> false ->
ok ?_DO_TRACE(Level, maps:get(msg, Data), maps:merge(Data, Meta))
end end
). ).
@ -91,7 +75,7 @@
?_DO_TRACE(Tag, Msg, Meta), ?_DO_TRACE(Tag, Msg, Meta),
?SLOG( ?SLOG(
Level, Level,
(Meta)#{msg => Msg, tag => Tag}, (emqx_trace_formatter:format_meta_map(Meta))#{msg => Msg, tag => Tag},
#{is_trace => false} #{is_trace => false}
) )
end). end).

View File

@ -25,16 +25,11 @@ all() ->
emqx_common_test_helpers:all(?MODULE). emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) -> init_per_suite(Config) ->
case emqx_ds_test_helpers:skip_if_norepl() of TCApps = emqx_cth_suite:start(
false -> app_specs(),
TCApps = emqx_cth_suite:start( #{work_dir => emqx_cth_suite:work_dir(Config)}
app_specs(), ),
#{work_dir => emqx_cth_suite:work_dir(Config)} [{tc_apps, TCApps} | Config].
),
[{tc_apps, TCApps} | Config];
Yes ->
Yes
end.
end_per_suite(Config) -> end_per_suite(Config) ->
TCApps = ?config(tc_apps, Config), TCApps = ?config(tc_apps, Config),
@ -61,8 +56,8 @@ init_per_testcase(t_session_gc = TestCase, Config) ->
n => 3, n => 3,
roles => [core, core, core], roles => [core, core, core],
extra_emqx_conf => extra_emqx_conf =>
"\n durable_sessions {" "\n session_persistence {"
"\n heartbeat_interval = 500ms " "\n last_alive_update_interval = 500ms "
"\n session_gc_interval = 1s " "\n session_gc_interval = 1s "
"\n session_gc_batch_size = 2 " "\n session_gc_batch_size = 2 "
"\n }" "\n }"
@ -90,11 +85,9 @@ end_per_testcase(TestCase, Config) when
Nodes = ?config(nodes, Config), Nodes = ?config(nodes, Config),
emqx_common_test_helpers:call_janitor(60_000), emqx_common_test_helpers:call_janitor(60_000),
ok = emqx_cth_cluster:stop(Nodes), ok = emqx_cth_cluster:stop(Nodes),
snabbkaffe:stop(),
ok; ok;
end_per_testcase(_TestCase, _Config) -> end_per_testcase(_TestCase, _Config) ->
emqx_common_test_helpers:call_janitor(60_000), emqx_common_test_helpers:call_janitor(60_000),
snabbkaffe:stop(),
ok. ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -123,10 +116,9 @@ app_specs() ->
app_specs(_Opts = #{}). app_specs(_Opts = #{}).
app_specs(Opts) -> app_specs(Opts) ->
DefaultEMQXConf = "durable_sessions {enable = true, renew_streams_interval = 1s}",
ExtraEMQXConf = maps:get(extra_emqx_conf, Opts, ""), ExtraEMQXConf = maps:get(extra_emqx_conf, Opts, ""),
[ [
{emqx, DefaultEMQXConf ++ ExtraEMQXConf} {emqx, "session_persistence = {enable = true}" ++ ExtraEMQXConf}
]. ].
get_mqtt_port(Node, Type) -> get_mqtt_port(Node, Type) ->
@ -140,9 +132,17 @@ wait_nodeup(Node) ->
pong = net_adm:ping(Node) pong = net_adm:ping(Node)
). ).
wait_gen_rpc_down(_NodeSpec = #{apps := Apps}) ->
#{override_env := Env} = proplists:get_value(gen_rpc, Apps),
Port = proplists:get_value(tcp_server_port, Env),
?retry(
_Sleep0 = 500,
_Attempts0 = 50,
false = emqx_common_test_helpers:is_tcp_server_available("127.0.0.1", Port)
).
start_client(Opts0 = #{}) -> start_client(Opts0 = #{}) ->
Defaults = #{ Defaults = #{
port => 1883,
proto_ver => v5, proto_ver => v5,
properties => #{'Session-Expiry-Interval' => 300} properties => #{'Session-Expiry-Interval' => 300}
}, },
@ -163,7 +163,7 @@ mk_clientid(Prefix, ID) ->
restart_node(Node, NodeSpec) -> restart_node(Node, NodeSpec) ->
?tp(will_restart_node, #{}), ?tp(will_restart_node, #{}),
emqx_cth_cluster:restart(NodeSpec), emqx_cth_cluster:restart(Node, NodeSpec),
wait_nodeup(Node), wait_nodeup(Node),
?tp(restarted_node, #{}), ?tp(restarted_node, #{}),
ok. ok.
@ -189,23 +189,6 @@ list_all_subscriptions(Node) ->
list_all_pubranges(Node) -> list_all_pubranges(Node) ->
erpc:call(Node, emqx_persistent_session_ds, list_all_pubranges, []). erpc:call(Node, emqx_persistent_session_ds, list_all_pubranges, []).
session_open(Node, ClientId) ->
ClientInfo = #{},
ConnInfo = #{peername => {undefined, undefined}, proto_name => <<"MQTT">>, proto_ver => 5},
WillMsg = undefined,
erpc:call(
Node,
emqx_persistent_session_ds,
session_open,
[ClientId, ClientInfo, ConnInfo, WillMsg]
).
force_last_alive_at(ClientId, Time) ->
{ok, S0} = emqx_persistent_session_ds_state:open(ClientId),
S = emqx_persistent_session_ds_state:set_last_alive_at(Time, S0),
_ = emqx_persistent_session_ds_state:commit(S),
ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Testcases %% Testcases
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -258,8 +241,11 @@ t_session_subscription_idempotency(Config) ->
ok ok
end, end,
fun(_Trace) -> fun(Trace) ->
Session = session_open(Node1, ClientId), ct:pal("trace:\n ~p", [Trace]),
Session = erpc:call(
Node1, emqx_persistent_session_ds, session_open, [ClientId, _ConnInfo = #{}]
),
?assertMatch( ?assertMatch(
#{SubTopicFilter := #{}}, #{SubTopicFilter := #{}},
emqx_session:info(subscriptions, Session) emqx_session:info(subscriptions, Session)
@ -331,8 +317,11 @@ t_session_unsubscription_idempotency(Config) ->
ok ok
end, end,
fun(_Trace) -> fun(Trace) ->
Session = session_open(Node1, ClientId), ct:pal("trace:\n ~p", [Trace]),
Session = erpc:call(
Node1, emqx_persistent_session_ds, session_open, [ClientId, _ConnInfo = #{}]
),
?assertEqual( ?assertEqual(
#{}, #{},
emqx_session:info(subscriptions, Session) emqx_session:info(subscriptions, Session)
@ -420,7 +409,10 @@ do_t_session_discard(Params) ->
ok ok
end, end,
[] fun(Trace) ->
ct:pal("trace:\n ~p", [Trace]),
ok
end
), ),
ok. ok.
@ -558,7 +550,6 @@ t_session_gc(Config) ->
), ),
%% Clients are still alive; no session is garbage collected. %% Clients are still alive; no session is garbage collected.
?tp(notice, "waiting for gc", #{}),
?assertMatch( ?assertMatch(
{ok, _}, {ok, _},
?block_until( ?block_until(
@ -571,11 +562,9 @@ t_session_gc(Config) ->
), ),
?assertMatch([_, _, _], list_all_sessions(Node1), sessions), ?assertMatch([_, _, _], list_all_sessions(Node1), sessions),
?assertMatch([_, _, _], list_all_subscriptions(Node1), subscriptions), ?assertMatch([_, _, _], list_all_subscriptions(Node1), subscriptions),
?tp(notice, "gc ran", #{}),
%% Now we disconnect 2 of them; only those should be GC'ed. %% Now we disconnect 2 of them; only those should be GC'ed.
?tp(notice, "disconnecting client1", #{}),
?assertMatch( ?assertMatch(
{ok, {ok, _}}, {ok, {ok, _}},
?wait_async_action( ?wait_async_action(
@ -653,7 +642,7 @@ t_session_replay_retry(_Config) ->
Pubs0 = emqx_common_test_helpers:wait_publishes(NClients, 5_000), Pubs0 = emqx_common_test_helpers:wait_publishes(NClients, 5_000),
NPubs = length(Pubs0), NPubs = length(Pubs0),
?assertEqual(NClients, NPubs, ?drainMailbox(1_500)), ?assertEqual(NClients, NPubs, ?drainMailbox()),
ok = emqtt:stop(ClientSub), ok = emqtt:stop(ClientSub),
@ -680,42 +669,3 @@ t_session_replay_retry(_Config) ->
[maps:with([topic, payload, qos], P) || P <- Pubs0], [maps:with([topic, payload, qos], P) || P <- Pubs0],
[maps:with([topic, payload, qos], P) || P <- Pubs1 ++ Pubs2] [maps:with([topic, payload, qos], P) || P <- Pubs1 ++ Pubs2]
). ).
%% Check that we send will messages when performing GC without relying on timers set by
%% the channel process.
t_session_gc_will_message(_Config) ->
?check_trace(
#{timetrap => 10_000},
begin
WillTopic = <<"will/t">>,
ok = emqx:subscribe(WillTopic, #{qos => 2}),
ClientId = <<"will_msg_client">>,
Client = start_client(#{
clientid => ClientId,
will_topic => WillTopic,
will_payload => <<"will payload">>,
will_qos => 0,
will_props => #{'Will-Delay-Interval' => 300}
}),
{ok, _} = emqtt:connect(Client),
%% Use reason code =/= `?RC_SUCCESS' to allow will message
{ok, {ok, _}} =
?wait_async_action(
emqtt:disconnect(Client, ?RC_UNSPECIFIED_ERROR),
#{?snk_kind := emqx_cm_clean_down}
),
?assertNotReceive({deliver, WillTopic, _}),
%% Set fake `last_alive_at' to trigger immediate will message.
force_last_alive_at(ClientId, _Time = 0),
{ok, {ok, _}} =
?wait_async_action(
emqx_persistent_session_ds_gc_worker:check_session(ClientId),
#{?snk_kind := session_gc_published_will_msg}
),
?assertReceive({deliver, WillTopic, _}),
ok
end,
[]
),
ok.

View File

@ -1,74 +0,0 @@
defmodule EMQX.MixProject do
use Mix.Project
alias EMQXUmbrella.MixProject, as: UMP
def project do
[
app: :emqx,
version: "0.1.0",
build_path: "../../_build",
erlc_paths: erlc_paths(),
erlc_options: [
{:i, "src"}
| UMP.erlc_options()
],
compilers: Mix.compilers() ++ [:copy_srcs],
# used by our `Mix.Tasks.Compile.CopySrcs` compiler
extra_dirs: extra_dirs(),
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.14",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications
def application do
[
## FIXME!!! go though emqx.app.src and add missing stuff...
extra_applications: [:public_key, :ssl, :os_mon, :logger, :mnesia, :sasl] ++ UMP.extra_applications(),
mod: {:emqx_app, []}
]
end
def deps() do
## FIXME!!! go though emqx.app.src and add missing stuff...
[
{:emqx_mix_utils, in_umbrella: true, runtime: false},
{:emqx_utils, in_umbrella: true},
{:emqx_ds_backends, in_umbrella: true},
UMP.common_dep(:gproc),
UMP.common_dep(:gen_rpc),
UMP.common_dep(:ekka),
UMP.common_dep(:esockd),
UMP.common_dep(:cowboy),
UMP.common_dep(:lc),
UMP.common_dep(:hocon),
UMP.common_dep(:ranch),
UMP.common_dep(:bcrypt),
UMP.common_dep(:pbkdf2),
UMP.common_dep(:emqx_http_lib),
] ++ UMP.quicer_dep()
end
defp erlc_paths() do
paths = UMP.erlc_paths()
if UMP.test_env?() do
["integration_test" | paths]
else
paths
end
end
defp extra_dirs() do
dirs = ["src", "etc"]
if UMP.test_env?() do
["test", "integration_test" | dirs]
else
dirs
end
end
end

View File

@ -10,14 +10,11 @@
{emqx_bridge,5}. {emqx_bridge,5}.
{emqx_bridge,6}. {emqx_bridge,6}.
{emqx_broker,1}. {emqx_broker,1}.
{emqx_cluster_link,1}.
{emqx_cm,1}. {emqx_cm,1}.
{emqx_cm,2}. {emqx_cm,2}.
{emqx_cm,3}.
{emqx_conf,1}. {emqx_conf,1}.
{emqx_conf,2}. {emqx_conf,2}.
{emqx_conf,3}. {emqx_conf,3}.
{emqx_conf,4}.
{emqx_connector,1}. {emqx_connector,1}.
{emqx_dashboard,1}. {emqx_dashboard,1}.
{emqx_delayed,1}. {emqx_delayed,1}.
@ -27,10 +24,8 @@
{emqx_ds,2}. {emqx_ds,2}.
{emqx_ds,3}. {emqx_ds,3}.
{emqx_ds,4}. {emqx_ds,4}.
{emqx_ds_shared_sub,1}.
{emqx_eviction_agent,1}. {emqx_eviction_agent,1}.
{emqx_eviction_agent,2}. {emqx_eviction_agent,2}.
{emqx_eviction_agent,3}.
{emqx_exhook,1}. {emqx_exhook,1}.
{emqx_ft_storage_exporter_fs,1}. {emqx_ft_storage_exporter_fs,1}.
{emqx_ft_storage_fs,1}. {emqx_ft_storage_fs,1}.
@ -46,11 +41,8 @@
{emqx_management,4}. {emqx_management,4}.
{emqx_management,5}. {emqx_management,5}.
{emqx_metrics,1}. {emqx_metrics,1}.
{emqx_metrics,2}.
{emqx_mgmt_api_plugins,1}. {emqx_mgmt_api_plugins,1}.
{emqx_mgmt_api_plugins,2}. {emqx_mgmt_api_plugins,2}.
{emqx_mgmt_api_plugins,3}.
{emqx_mgmt_api_relup,1}.
{emqx_mgmt_cluster,1}. {emqx_mgmt_cluster,1}.
{emqx_mgmt_cluster,2}. {emqx_mgmt_cluster,2}.
{emqx_mgmt_cluster,3}. {emqx_mgmt_cluster,3}.
@ -63,18 +55,17 @@
{emqx_node_rebalance_api,1}. {emqx_node_rebalance_api,1}.
{emqx_node_rebalance_api,2}. {emqx_node_rebalance_api,2}.
{emqx_node_rebalance_evacuation,1}. {emqx_node_rebalance_evacuation,1}.
{emqx_node_rebalance_purge,1}.
{emqx_node_rebalance_status,1}. {emqx_node_rebalance_status,1}.
{emqx_node_rebalance_status,2}. {emqx_node_rebalance_status,2}.
{emqx_persistent_session_ds,1}. {emqx_persistent_session_ds,1}.
{emqx_plugins,1}. {emqx_plugins,1}.
{emqx_plugins,2}.
{emqx_prometheus,1}. {emqx_prometheus,1}.
{emqx_prometheus,2}. {emqx_prometheus,2}.
{emqx_resource,1}. {emqx_resource,1}.
{emqx_resource,2}. {emqx_resource,2}.
{emqx_retainer,1}. {emqx_retainer,1}.
{emqx_retainer,2}. {emqx_retainer,2}.
{emqx_router,1}.
{emqx_rule_engine,1}. {emqx_rule_engine,1}.
{emqx_shared_sub,1}. {emqx_shared_sub,1}.
{emqx_slow_subs,1}. {emqx_slow_subs,1}.

View File

@ -24,18 +24,17 @@
{deps, [ {deps, [
{emqx_utils, {path, "../emqx_utils"}}, {emqx_utils, {path, "../emqx_utils"}},
{emqx_durable_storage, {path, "../emqx_durable_storage"}}, {emqx_durable_storage, {path, "../emqx_durable_storage"}},
{emqx_ds_backends, {path, "../emqx_ds_backends"}},
{lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}}, {lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}},
{gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}}, {gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}},
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}}, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}},
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.12.0"}}}, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.11.1"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.5"}}}, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.0"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.43.2"}}}, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.42.1"}}},
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.10"}}} {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.8"}}}
]}. ]}.
{plugins, [{rebar3_proper, "0.12.1"}, rebar3_path_deps]}. {plugins, [{rebar3_proper, "0.12.1"}, rebar3_path_deps]}.

View File

@ -24,8 +24,7 @@ IsQuicSupp = fun() ->
end, end,
Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.0"}}}, Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.0"}}},
Quicer = Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.313"}}}.
{quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.500"}}}.
Dialyzer = fun(Config) -> Dialyzer = fun(Config) ->
{dialyzer, OldDialyzerConfig} = lists:keyfind(dialyzer, 1, Config), {dialyzer, OldDialyzerConfig} = lists:keyfind(dialyzer, 1, Config),

View File

@ -16,14 +16,9 @@
-module(emqx_config_backup). -module(emqx_config_backup).
-type ok_result() :: #{
root_key => emqx_utils_maps:config_key(),
changed => [emqx_utils_maps:config_key_path()]
}.
-type error_result() :: #{root_key => emqx_utils_maps:config_key(), reason => term()}.
-callback import_config(RawConf :: map()) -> -callback import_config(RawConf :: map()) ->
{ok, ok_result()} {ok, #{
| {error, error_result()} root_key => emqx_utils_maps:config_key(),
| {results, {[ok_result()], [error_result()]}}. changed => [emqx_utils_maps:config_key_path()]
}}
| {error, #{root_key => emqx_utils_maps:config_key(), reason => term()}}.

View File

@ -18,8 +18,6 @@
-type traverse_break_reason() :: over | migrate. -type traverse_break_reason() :: over | migrate.
-type opts() :: #{print_fun => fun((io:format(), [term()]) -> ok)}.
-callback backup_tables() -> [mria:table()]. -callback backup_tables() -> [mria:table()].
%% validate the backup %% validate the backup
@ -33,9 +31,6 @@
-callback migrate_mnesia_backup(tuple()) -> {ok, tuple()} | {error, term()}. -callback migrate_mnesia_backup(tuple()) -> {ok, tuple()} | {error, term()}.
%% NOTE: currently, this is called only when the table has been restored successfully. -optional_callbacks([validate_mnesia_backup/1, migrate_mnesia_backup/1]).
-callback on_backup_table_imported(mria:table(), opts()) -> ok | {error, term()}.
-optional_callbacks([validate_mnesia_backup/1, migrate_mnesia_backup/1, on_backup_table_imported/2]).
-export_type([traverse_break_reason/0]). -export_type([traverse_break_reason/0]).

View File

@ -86,35 +86,8 @@ supported_version(API) ->
-spec announce(node(), atom()) -> ok. -spec announce(node(), atom()) -> ok.
announce(Node, App) -> announce(Node, App) ->
{ok, Data} = file:consult(?MODULE:versions_file(App)), {ok, Data} = file:consult(?MODULE:versions_file(App)),
%% replicant(5.6.0) will call old core(<5.6.0) announce_fun/2 is undef on old core {atomic, ok} = mria:transaction(?COMMON_SHARD, fun ?MODULE:announce_fun/2, [Node, Data]),
%% so we just use anonymous function to update. ok.
case mria:transaction(?COMMON_SHARD, fun ?MODULE:announce_fun/2, [Node, Data]) of
{atomic, ok} ->
ok;
{aborted, {undef, [{?MODULE, announce_fun, _, _} | _]}} ->
{atomic, ok} = mria:transaction(
?COMMON_SHARD,
fun() ->
MS = ets:fun2ms(fun(#?TAB{key = {N, API}}) when N =:= Node ->
{N, API}
end),
OldKeys = mnesia:select(?TAB, MS, write),
_ = [
mnesia:delete({?TAB, Key})
|| Key <- OldKeys
],
%% Insert new records:
_ = [
mnesia:write(#?TAB{key = {Node, API}, version = Version})
|| {API, Version} <- Data
],
%% Update maximum supported version:
_ = [update_minimum(API) || {API, _} <- Data],
ok
end
),
ok
end.
-spec versions_file(atom()) -> file:filename_all(). -spec versions_file(atom()) -> file:filename_all().
versions_file(App) -> versions_file(App) ->

View File

@ -237,38 +237,25 @@ log_formatter(HandlerName, Conf) ->
_ -> _ ->
conf_get("formatter", Conf) conf_get("formatter", Conf)
end, end,
TsFormat = timestamp_format(Conf),
WithMfa = conf_get("with_mfa", Conf),
PayloadEncode = conf_get("payload_encode", Conf, text),
do_formatter( do_formatter(
Format, CharsLimit, SingleLine, TimeOffSet, Depth, TsFormat, WithMfa, PayloadEncode Format, CharsLimit, SingleLine, TimeOffSet, Depth
). ).
%% auto | epoch | rfc3339
timestamp_format(Conf) ->
conf_get("timestamp_format", Conf).
%% helpers %% helpers
do_formatter(json, CharsLimit, SingleLine, TimeOffSet, Depth, TsFormat, WithMfa, PayloadEncode) -> do_formatter(json, CharsLimit, SingleLine, TimeOffSet, Depth) ->
{emqx_logger_jsonfmt, #{ {emqx_logger_jsonfmt, #{
chars_limit => CharsLimit, chars_limit => CharsLimit,
single_line => SingleLine, single_line => SingleLine,
time_offset => TimeOffSet, time_offset => TimeOffSet,
depth => Depth, depth => Depth
timestamp_format => TsFormat,
with_mfa => WithMfa,
payload_encode => PayloadEncode
}}; }};
do_formatter(text, CharsLimit, SingleLine, TimeOffSet, Depth, TsFormat, WithMfa, PayloadEncode) -> do_formatter(text, CharsLimit, SingleLine, TimeOffSet, Depth) ->
{emqx_logger_textfmt, #{ {emqx_logger_textfmt, #{
template => ["[", level, "] ", msg, "\n"], template => [time, " [", level, "] ", msg, "\n"],
chars_limit => CharsLimit, chars_limit => CharsLimit,
single_line => SingleLine, single_line => SingleLine,
time_offset => TimeOffSet, time_offset => TimeOffSet,
depth => Depth, depth => Depth
timestamp_format => TsFormat,
with_mfa => WithMfa,
payload_encode => PayloadEncode
}}. }}.
%% Don't record all logger message %% Don't record all logger message

View File

@ -20,7 +20,6 @@
%% API %% API
-export([add_handler/0, remove_handler/0, pre_config_update/3]). -export([add_handler/0, remove_handler/0, pre_config_update/3]).
-export([is_olp_enabled/0]). -export([is_olp_enabled/0]).
-export([assert_zone_exists/1]).
-define(ZONES, [zones]). -define(ZONES, [zones]).
@ -45,26 +44,3 @@ is_olp_enabled() ->
false, false,
emqx_config:get([zones], #{}) emqx_config:get([zones], #{})
). ).
-spec assert_zone_exists(binary() | atom()) -> ok.
assert_zone_exists(Name0) when is_binary(Name0) ->
%% an existing zone must have already an atom-name
Name =
try
binary_to_existing_atom(Name0)
catch
_:_ ->
throw({unknown_zone, Name0})
end,
assert_zone_exists(Name);
assert_zone_exists(default) ->
%% there is always a 'default' zone
ok;
assert_zone_exists(Name) when is_atom(Name) ->
try
_ = emqx_config:get([zones, Name]),
ok
catch
error:{config_not_found, _} ->
throw({unknown_zone, Name})
end.

View File

@ -2,7 +2,7 @@
{application, emqx, [ {application, emqx, [
{id, "emqx"}, {id, "emqx"},
{description, "EMQX Core"}, {description, "EMQX Core"},
{vsn, "5.3.4"}, {vsn, "5.2.0"},
{modules, []}, {modules, []},
{registered, []}, {registered, []},
{applications, [ {applications, [
@ -18,7 +18,7 @@
sasl, sasl,
lc, lc,
hocon, hocon,
emqx_ds_backends, emqx_durable_storage,
bcrypt, bcrypt,
pbkdf2, pbkdf2,
emqx_http_lib, emqx_http_lib,

View File

@ -61,12 +61,9 @@
get_raw_config/2, get_raw_config/2,
update_config/2, update_config/2,
update_config/3, update_config/3,
update_config/4,
remove_config/1, remove_config/1,
remove_config/2, remove_config/2,
remove_config/3,
reset_config/2, reset_config/2,
reset_config/3,
data_dir/0, data_dir/0,
etc_file/1, etc_file/1,
cert_file/1, cert_file/1,
@ -198,7 +195,7 @@ get_raw_config(KeyPath, Default) ->
-spec update_config(emqx_utils_maps:config_key_path(), emqx_config:update_request()) -> -spec update_config(emqx_utils_maps:config_key_path(), emqx_config:update_request()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(KeyPath, UpdateReq) -> update_config(KeyPath, UpdateReq) ->
update_config(KeyPath, UpdateReq, #{}, #{}). update_config(KeyPath, UpdateReq, #{}).
-spec update_config( -spec update_config(
emqx_utils_maps:config_key_path(), emqx_utils_maps:config_key_path(),
@ -206,56 +203,30 @@ update_config(KeyPath, UpdateReq) ->
emqx_config:update_opts() emqx_config:update_opts()
) -> ) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(KeyPath, UpdateReq, Opts) -> update_config([RootName | _] = KeyPath, UpdateReq, Opts) ->
update_config(KeyPath, UpdateReq, Opts, #{}).
-spec update_config(
emqx_utils_maps:config_key_path(),
emqx_config:update_request(),
emqx_config:update_opts(),
emqx_config:cluster_rpc_opts()
) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config([RootName | _] = KeyPath, UpdateReq, Opts, ClusterRpcOpts) ->
emqx_config_handler:update_config( emqx_config_handler:update_config(
emqx_config:get_schema_mod(RootName), emqx_config:get_schema_mod(RootName),
KeyPath, KeyPath,
{{update, UpdateReq}, Opts}, {{update, UpdateReq}, Opts}
ClusterRpcOpts
). ).
-spec remove_config(emqx_utils_maps:config_key_path()) -> -spec remove_config(emqx_utils_maps:config_key_path()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config(KeyPath) -> remove_config(KeyPath) ->
remove_config(KeyPath, #{}, #{}). remove_config(KeyPath, #{}).
-spec remove_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) -> -spec remove_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config([_RootName | _] = KeyPath, Opts) -> remove_config([RootName | _] = KeyPath, Opts) ->
remove_config(KeyPath, Opts, #{}).
-spec remove_config(
emqx_utils_maps:config_key_path(), emqx_config:update_opts(), emqx_config:cluster_rpc_opts()
) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config([RootName | _] = KeyPath, Opts, ClusterRpcOpts) ->
emqx_config_handler:update_config( emqx_config_handler:update_config(
emqx_config:get_schema_mod(RootName), emqx_config:get_schema_mod(RootName),
KeyPath, KeyPath,
{remove, Opts}, {remove, Opts}
ClusterRpcOpts
). ).
-spec reset_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) -> -spec reset_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
reset_config([RootName | SubKeys] = KeyPath, Opts) -> reset_config([RootName | SubKeys] = KeyPath, Opts) ->
reset_config([RootName | SubKeys] = KeyPath, Opts, #{}).
-spec reset_config(
emqx_utils_maps:config_key_path(), emqx_config:update_opts(), emqx_config:cluster_rpc_opts()
) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
reset_config([RootName | SubKeys] = KeyPath, Opts, ClusterRpcOpts) ->
case emqx_config:get_default_value(KeyPath) of case emqx_config:get_default_value(KeyPath) of
{ok, Default} -> {ok, Default} ->
Mod = emqx_config:get_schema_mod(RootName), Mod = emqx_config:get_schema_mod(RootName),
@ -264,8 +235,7 @@ reset_config([RootName | SubKeys] = KeyPath, Opts, ClusterRpcOpts) ->
emqx_config_handler:update_config( emqx_config_handler:update_config(
Mod, Mod,
KeyPath, KeyPath,
{{update, Default}, Opts}, {{update, Default}, Opts}
ClusterRpcOpts
); );
false -> false ->
NewConf = NewConf =
@ -277,8 +247,7 @@ reset_config([RootName | SubKeys] = KeyPath, Opts, ClusterRpcOpts) ->
emqx_config_handler:update_config( emqx_config_handler:update_config(
Mod, Mod,
[RootName], [RootName],
{{update, NewConf}, Opts}, {{update, NewConf}, Opts}
ClusterRpcOpts
) )
end; end;
{error, _} = Error -> {error, _} = Error ->

View File

@ -56,31 +56,31 @@ authenticate(Credential) ->
NotSuperUser = #{is_superuser => false}, NotSuperUser = #{is_superuser => false},
case pre_hook_authenticate(Credential) of case pre_hook_authenticate(Credential) of
ok -> ok ->
on_authentication_complete(Credential, NotSuperUser, anonymous), inc_authn_metrics(anonymous),
{ok, NotSuperUser}; {ok, NotSuperUser};
continue -> continue ->
case run_hooks('client.authenticate', [Credential], ignore) of case run_hooks('client.authenticate', [Credential], ignore) of
ignore -> ignore ->
on_authentication_complete(Credential, NotSuperUser, anonymous), inc_authn_metrics(anonymous),
{ok, NotSuperUser}; {ok, NotSuperUser};
ok -> ok ->
on_authentication_complete(Credential, NotSuperUser, ok), inc_authn_metrics(ok),
{ok, NotSuperUser}; {ok, NotSuperUser};
{ok, AuthResult} = OkResult -> {ok, _AuthResult} = OkResult ->
on_authentication_complete(Credential, AuthResult, ok), inc_authn_metrics(ok),
OkResult; OkResult;
{ok, AuthResult, _AuthData} = OkResult -> {ok, _AuthResult, _AuthData} = OkResult ->
on_authentication_complete(Credential, AuthResult, ok), inc_authn_metrics(ok),
OkResult; OkResult;
{error, Reason} = Error -> {error, _Reason} = Error ->
on_authentication_complete(Credential, Reason, error), inc_authn_metrics(error),
Error; Error;
%% {continue, AuthCache} | {continue, AuthData, AuthCache} %% {continue, AuthCache} | {continue, AuthData, AuthCache}
Other -> Other ->
Other Other
end; end;
{error, Reason} = Error -> {error, _Reason} = Error ->
on_authentication_complete(Credential, Reason, error), inc_authn_metrics(error),
Error Error
end. end.
@ -154,7 +154,7 @@ do_authorize(ClientInfo, Action, Topic) ->
case run_hooks('client.authorize', [ClientInfo, Action, Topic], Default) of case run_hooks('client.authorize', [ClientInfo, Action, Topic], Default) of
AuthzResult = #{result := Result} when Result == allow; Result == deny -> AuthzResult = #{result := Result} when Result == allow; Result == deny ->
From = maps:get(from, AuthzResult, unknown), From = maps:get(from, AuthzResult, unknown),
ok = log_result(Topic, Action, From, Result), ok = log_result(ClientInfo, Topic, Action, From, Result),
emqx_hooks:run( emqx_hooks:run(
'client.check_authz_complete', 'client.check_authz_complete',
[ClientInfo, Action, Topic, Result, From] [ClientInfo, Action, Topic, Result, From]
@ -173,28 +173,24 @@ do_authorize(ClientInfo, Action, Topic) ->
deny deny
end. end.
log_result(Topic, Action, From, Result) -> log_result(#{username := Username}, Topic, Action, From, Result) ->
LogMeta = fun() -> LogMeta = fun() ->
#{ #{
username => Username,
topic => Topic, topic => Topic,
action => format_action(Action), action => format_action(Action),
source => format_from(From) source => format_from(From)
} }
end, end,
do_log_result(Action, Result, LogMeta). case Result of
allow ->
do_log_result(_Action, allow, LogMeta) -> ?SLOG(info, (LogMeta())#{msg => "authorization_permission_allowed"});
?SLOG(info, (LogMeta())#{msg => "authorization_permission_allowed"}, #{tag => "AUTHZ"}); deny ->
do_log_result(?AUTHZ_PUBLISH_MATCH_MAP(_, _), deny, LogMeta) -> ?SLOG_THROTTLE(
%% for publish action, we do not log permission deny at warning level here warning,
%% because it will be logged as cannot_publish_to_topic_due_to_not_authorized (LogMeta())#{msg => authorization_permission_denied}
?SLOG(info, (LogMeta())#{msg => "authorization_permission_denied"}, #{tag => "AUTHZ"}); )
do_log_result(_, deny, LogMeta) -> end.
?SLOG_THROTTLE(
warning,
(LogMeta())#{msg => authorization_permission_denied},
#{tag => "AUTHZ"}
).
%% @private Format authorization rules source. %% @private Format authorization rules source.
format_from(default) -> format_from(default) ->
@ -238,30 +234,5 @@ inc_authn_metrics(error) ->
inc_authn_metrics(ok) -> inc_authn_metrics(ok) ->
emqx_metrics:inc('authentication.success'); emqx_metrics:inc('authentication.success');
inc_authn_metrics(anonymous) -> inc_authn_metrics(anonymous) ->
emqx_metrics:inc('client.auth.anonymous'),
emqx_metrics:inc('authentication.success.anonymous'), emqx_metrics:inc('authentication.success.anonymous'),
emqx_metrics:inc('authentication.success'). emqx_metrics:inc('authentication.success').
on_authentication_complete(Credential, Reason, error) ->
emqx_hooks:run(
'client.check_authn_complete',
[
Credential,
#{
reason_code => Reason
}
]
),
inc_authn_metrics(error);
on_authentication_complete(Credential, Result, Type) ->
emqx_hooks:run(
'client.check_authn_complete',
[
Credential,
Result#{
reason_code => success,
is_anonymous => (Type =:= anonymous)
}
]
),
inc_authn_metrics(Type).

View File

@ -16,8 +16,6 @@
-module(emqx_banned). -module(emqx_banned).
-feature(maybe_expr, enable).
-behaviour(gen_server). -behaviour(gen_server).
-behaviour(emqx_db_backup). -behaviour(emqx_db_backup).
@ -33,7 +31,6 @@
-export([ -export([
check/1, check/1,
check_clientid/1,
create/1, create/1,
look_up/1, look_up/1,
delete/1, delete/1,
@ -51,7 +48,6 @@
handle_call/3, handle_call/3,
handle_cast/2, handle_cast/2,
handle_info/2, handle_info/2,
handle_continue/2,
terminate/2, terminate/2,
code_change/3 code_change/3
]). ]).
@ -118,10 +114,6 @@ check(ClientInfo) ->
do_check({peerhost, maps:get(peerhost, ClientInfo, undefined)}) orelse do_check({peerhost, maps:get(peerhost, ClientInfo, undefined)}) orelse
do_check_rules(ClientInfo). do_check_rules(ClientInfo).
-spec check_clientid(emqx_types:clientid()) -> boolean().
check_clientid(ClientId) ->
do_check({clientid, ClientId}) orelse do_check_rules(#{clientid => ClientId}).
-spec format(emqx_types:banned()) -> map(). -spec format(emqx_types:banned()) -> map().
format(#banned{ format(#banned{
who = Who0, who = Who0,
@ -140,7 +132,7 @@ format(#banned{
until => to_rfc3339(Until) until => to_rfc3339(Until)
}. }.
-spec parse(map()) -> {ok, emqx_types:banned()} | {error, term()}. -spec parse(map()) -> emqx_types:banned() | {error, term()}.
parse(Params) -> parse(Params) ->
case parse_who(Params) of case parse_who(Params) of
{error, Reason} -> {error, Reason} ->
@ -152,13 +144,13 @@ parse(Params) ->
Until = maps:get(<<"until">>, Params, At + ?EXPIRATION_TIME), Until = maps:get(<<"until">>, Params, At + ?EXPIRATION_TIME),
case Until > erlang:system_time(second) of case Until > erlang:system_time(second) of
true -> true ->
{ok, #banned{ #banned{
who = Who, who = Who,
by = By, by = By,
reason = Reason, reason = Reason,
at = At, at = At,
until = Until until = Until
}}; };
false -> false ->
ErrorReason = ErrorReason =
io_lib:format("Cannot create expired banned, ~p to ~p", [At, Until]), io_lib:format("Cannot create expired banned, ~p to ~p", [At, Until]),
@ -242,139 +234,12 @@ who(peerhost_net, CIDR) when is_tuple(CIDR) -> {peerhost_net, CIDR};
who(peerhost_net, CIDR) when is_binary(CIDR) -> who(peerhost_net, CIDR) when is_binary(CIDR) ->
{peerhost_net, esockd_cidr:parse(binary_to_list(CIDR), true)}. {peerhost_net, esockd_cidr:parse(binary_to_list(CIDR), true)}.
%%--------------------------------------------------------------------
%% Import From CSV
%%--------------------------------------------------------------------
init_from_csv(undefined) ->
ok;
init_from_csv(File) ->
maybe
core ?= mria_rlog:role(),
'$end_of_table' ?= mnesia:dirty_first(?BANNED_RULE_TAB),
'$end_of_table' ?= mnesia:dirty_first(?BANNED_INDIVIDUAL_TAB),
{ok, Bin} ?= file:read_file(File),
Stream = emqx_utils_stream:csv(Bin, #{nullable => true, filter_null => true}),
{ok, List} ?= parse_stream(Stream),
import_from_stream(List),
?SLOG(info, #{
msg => "load_banned_bootstrap_file_succeeded",
file => File
})
else
replicant ->
ok;
{Name, _} when
Name == peerhost;
Name == peerhost_net;
Name == clientid_re;
Name == username_re;
Name == clientid;
Name == username
->
ok;
{error, Reason} = Error ->
?SLOG(error, #{
msg => "load_banned_bootstrap_file_failed",
reason => Reason,
file => File
}),
Error
end.
import_from_stream(Stream) ->
Groups = maps:groups_from_list(
fun(#banned{who = Who}) -> table(Who) end, Stream
),
maps:foreach(
fun(Tab, Items) ->
Trans = fun() ->
lists:foreach(
fun(Item) ->
mnesia:write(Tab, Item, write)
end,
Items
)
end,
case trans(Trans) of
{ok, _} ->
?SLOG(info, #{
msg => "import_banned_from_stream_succeeded",
items => Items
});
{error, Reason} ->
?SLOG(error, #{
msg => "import_banned_from_stream_failed",
reason => Reason,
items => Items
})
end
end,
Groups
).
parse_stream(Stream) ->
try
List = emqx_utils_stream:consume(Stream),
parse_stream(List, [], [])
catch
error:Reason ->
{error, Reason}
end.
parse_stream([Item | List], Ok, Error) ->
maybe
{ok, Item1} ?= normalize_parse_item(Item),
{ok, Banned} ?= parse(Item1),
parse_stream(List, [Banned | Ok], Error)
else
{error, _} ->
parse_stream(List, Ok, [Item | Error])
end;
parse_stream([], Ok, []) ->
{ok, Ok};
parse_stream([], Ok, Error) ->
?SLOG(warning, #{
msg => "invalid_banned_items",
items => Error
}),
{ok, Ok}.
normalize_parse_item(#{<<"as">> := As} = Item) ->
ParseTime = fun(Name, Input) ->
maybe
#{Name := Time} ?= Input,
{ok, Epoch} ?= emqx_utils_calendar:to_epoch_second(emqx_utils_conv:str(Time)),
{ok, Input#{Name := Epoch}}
else
{error, _} = Error ->
Error;
NoTime when is_map(NoTime) ->
{ok, NoTime}
end
end,
maybe
{ok, Type} ?= emqx_utils:safe_to_existing_atom(As),
{ok, Item1} ?= ParseTime(<<"at">>, Item#{<<"as">> := Type}),
ParseTime(<<"until">>, Item1)
end;
normalize_parse_item(_Item) ->
{error, invalid_item}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% gen_server callbacks %% gen_server callbacks
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init([]) -> init([]) ->
{ok, ensure_expiry_timer(#{expiry_timer => undefined}), {continue, init_from_csv}}. {ok, ensure_expiry_timer(#{expiry_timer => undefined})}.
handle_continue(init_from_csv, State) ->
File = emqx_schema:naive_env_interpolation(
emqx:get_config([banned, bootstrap_file], undefined)
),
_ = init_from_csv(File),
{noreply, State}.
handle_call(Req, _From, State) -> handle_call(Req, _From, State) ->
?SLOG(error, #{msg => "unexpected_call", call => Req}), ?SLOG(error, #{msg => "unexpected_call", call => Req}),
@ -385,7 +250,7 @@ handle_cast(Msg, State) ->
{noreply, State}. {noreply, State}.
handle_info({timeout, TRef, expire}, State = #{expiry_timer := TRef}) -> handle_info({timeout, TRef, expire}, State = #{expiry_timer := TRef}) ->
_ = trans(fun ?MODULE:expire_banned_items/1, [ _ = mria:transaction(?COMMON_SHARD, fun ?MODULE:expire_banned_items/1, [
erlang:system_time(second) erlang:system_time(second)
]), ]),
{noreply, ensure_expiry_timer(State), hibernate}; {noreply, ensure_expiry_timer(State), hibernate};
@ -526,15 +391,3 @@ on_banned(_) ->
all_rules() -> all_rules() ->
ets:tab2list(?BANNED_RULE_TAB). ets:tab2list(?BANNED_RULE_TAB).
trans(Fun) ->
case mria:transaction(?COMMON_SHARD, Fun) of
{atomic, Res} -> {ok, Res};
{aborted, Reason} -> {error, Reason}
end.
trans(Fun, Args) ->
case mria:transaction(?COMMON_SHARD, Fun, Args) of
{atomic, Res} -> {ok, Res};
{aborted, Reason} -> {error, Reason}
end.

View File

@ -60,6 +60,9 @@
-export([topics/0]). -export([topics/0]).
%% Stats fun
-export([stats_fun/0]).
%% gen_server callbacks %% gen_server callbacks
-export([ -export([
init/1, init/1,
@ -232,46 +235,23 @@ publish(Msg) when is_record(Msg, message) ->
_ = emqx_trace:publish(Msg), _ = emqx_trace:publish(Msg),
emqx_message:is_sys(Msg) orelse emqx_metrics:inc('messages.publish'), emqx_message:is_sys(Msg) orelse emqx_metrics:inc('messages.publish'),
case emqx_hooks:run_fold('message.publish', [], emqx_message:clean_dup(Msg)) of case emqx_hooks:run_fold('message.publish', [], emqx_message:clean_dup(Msg)) of
#message{headers = #{should_disconnect := true}, topic = Topic} ->
?TRACE("MQTT", "msg_publish_not_allowed_disconnect", #{
message => emqx_message:to_log_map(Msg),
topic => Topic
}),
disconnect;
#message{headers = #{allow_publish := false}, topic = Topic} -> #message{headers = #{allow_publish := false}, topic = Topic} ->
?TRACE("MQTT", "msg_publish_not_allowed", #{ ?TRACE("MQTT", "msg_publish_not_allowed", #{
message => emqx_message:to_log_map(Msg), message => emqx_message:to_log_map(Msg),
topic => Topic topic => Topic
}), }),
[]; [];
Msg1 = #message{} -> Msg1 = #message{topic = Topic} ->
do_publish(Msg1); PersistRes = persist_publish(Msg1),
Msgs when is_list(Msgs) -> route(aggre(emqx_router:match_routes(Topic)), delivery(Msg1), PersistRes)
do_publish_many(Msgs)
end. end.
do_publish_many([]) ->
[];
do_publish_many([Msg | T]) ->
do_publish(Msg) ++ do_publish_many(T).
do_publish(#message{topic = Topic} = Msg) ->
PersistRes = persist_publish(Msg),
Routes = aggre(emqx_router:match_routes(Topic)),
Delivery = delivery(Msg),
RouteRes = route(Routes, Delivery, PersistRes),
do_forward_external(Delivery, RouteRes).
persist_publish(Msg) -> persist_publish(Msg) ->
case emqx_persistent_message:persist(Msg) of case emqx_persistent_message:persist(Msg) of
ok -> ok ->
[persisted]; [persisted];
{skipped, _} -> {_SkipOrError, _Reason} ->
[]; % TODO: log errors?
{error, Recoverable, Reason} ->
?SLOG(debug, #{
msg => "failed_to_persist_message", is_recoverable => Recoverable, reason => Reason
}),
[] []
end. end.
@ -345,9 +325,6 @@ aggre([], false, Acc) ->
aggre([], true, Acc) -> aggre([], true, Acc) ->
lists:usort(Acc). lists:usort(Acc).
do_forward_external(Delivery, RouteRes) ->
emqx_external_broker:forward(Delivery) ++ RouteRes.
%% @doc Forward message to another node. %% @doc Forward message to another node.
-spec forward( -spec forward(
node(), emqx_types:topic() | emqx_types:share(), emqx_types:delivery(), RpcMode :: sync | async node(), emqx_types:topic() | emqx_types:share(), emqx_types:delivery(), RpcMode :: sync | async
@ -492,6 +469,21 @@ set_subopts(SubPid, Topic, NewOpts) ->
topics() -> topics() ->
emqx_router:topics(). emqx_router:topics().
%%--------------------------------------------------------------------
%% Stats fun
%%--------------------------------------------------------------------
stats_fun() ->
safe_update_stats(?SUBSCRIBER, 'subscribers.count', 'subscribers.max'),
safe_update_stats(?SUBSCRIPTION, 'subscriptions.count', 'subscriptions.max'),
safe_update_stats(?SUBOPTION, 'suboptions.count', 'suboptions.max').
safe_update_stats(Tab, Stat, MaxStat) ->
case ets:info(Tab, size) of
undefined -> ok;
Size -> emqx_stats:setstat(Stat, MaxStat, Size)
end.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% call, cast, pick %% call, cast, pick
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -659,27 +651,19 @@ maybe_delete_route(Topic) ->
sync_route(Action, Topic, ReplyTo) -> sync_route(Action, Topic, ReplyTo) ->
EnabledOn = emqx_config:get([broker, routing, batch_sync, enable_on]), EnabledOn = emqx_config:get([broker, routing, batch_sync, enable_on]),
Res = case EnabledOn of
case EnabledOn of all ->
all -> push_sync_route(Action, Topic, ReplyTo);
push_sync_route(Action, Topic, ReplyTo); none ->
none -> regular_sync_route(Action, Topic);
regular_sync_route(Action, Topic); Role ->
Role -> case Role =:= mria_config:whoami() of
case Role =:= mria_config:whoami() of true ->
true -> push_sync_route(Action, Topic, ReplyTo);
push_sync_route(Action, Topic, ReplyTo); false ->
false -> regular_sync_route(Action, Topic)
regular_sync_route(Action, Topic) end
end end.
end,
_ = external_sync_route(Action, Topic),
Res.
external_sync_route(add, Topic) ->
emqx_external_broker:add_route(Topic);
external_sync_route(delete, Topic) ->
emqx_external_broker:delete_route(Topic).
push_sync_route(Action, Topic, Opts) -> push_sync_route(Action, Topic, Opts) ->
emqx_router_syncer:push(Action, Topic, node(), Opts). emqx_router_syncer:push(Action, Topic, node(), Opts).

View File

@ -18,8 +18,6 @@
-behaviour(gen_server). -behaviour(gen_server).
-include("emqx_router.hrl").
-include("emqx_shared_sub.hrl").
-include("logger.hrl"). -include("logger.hrl").
-include("types.hrl"). -include("types.hrl").
@ -35,9 +33,6 @@
reclaim_seq/1 reclaim_seq/1
]). ]).
%% Stats fun
-export([stats_fun/0]).
%% gen_server callbacks %% gen_server callbacks
-export([ -export([
init/1, init/1,
@ -104,43 +99,6 @@ create_seq(Topic) ->
reclaim_seq(Topic) -> reclaim_seq(Topic) ->
emqx_sequence:reclaim(?SUBSEQ, Topic). emqx_sequence:reclaim(?SUBSEQ, Topic).
%%--------------------------------------------------------------------
%% Stats fun
%%--------------------------------------------------------------------
stats_fun() ->
safe_update_stats(subscriber_val(), 'subscribers.count', 'subscribers.max'),
safe_update_stats(subscription_count(), 'subscriptions.count', 'subscriptions.max'),
safe_update_stats(
durable_subscription_count(),
'durable_subscriptions.count',
'durable_subscriptions.max'
),
safe_update_stats(table_size(?SUBOPTION), 'suboptions.count', 'suboptions.max').
safe_update_stats(undefined, _Stat, _MaxStat) ->
ok;
safe_update_stats(Val, Stat, MaxStat) when is_integer(Val) ->
emqx_stats:setstat(Stat, MaxStat, Val).
%% N.B.: subscriptions from durable sessions are not tied to any particular node.
%% Therefore, do not sum them with node-local subscriptions.
subscription_count() ->
table_size(?SUBSCRIPTION).
durable_subscription_count() ->
emqx_persistent_session_bookkeeper:get_subscription_count().
subscriber_val() ->
sum_subscriber(table_size(?SUBSCRIBER), table_size(?SHARED_SUBSCRIBER)).
sum_subscriber(undefined, undefined) -> undefined;
sum_subscriber(undefined, V2) when is_integer(V2) -> V2;
sum_subscriber(V1, undefined) when is_integer(V1) -> V1;
sum_subscriber(V1, V2) when is_integer(V1), is_integer(V2) -> V1 + V2.
table_size(Tab) when is_atom(Tab) -> ets:info(Tab, size).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% gen_server callbacks %% gen_server callbacks
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -157,7 +115,7 @@ init([]) ->
%% SubMon: SubPid -> SubId %% SubMon: SubPid -> SubId
ok = emqx_utils_ets:new(?SUBMON, [public, {read_concurrency, true}, {write_concurrency, true}]), ok = emqx_utils_ets:new(?SUBMON, [public, {read_concurrency, true}, {write_concurrency, true}]),
%% Stats timer %% Stats timer
ok = emqx_stats:update_interval(broker_stats, fun ?MODULE:stats_fun/0), ok = emqx_stats:update_interval(broker_stats, fun emqx_broker:stats_fun/0),
{ok, #{pmon => emqx_pmon:new()}}. {ok, #{pmon => emqx_pmon:new()}}.
handle_call(Req, _From, State) -> handle_call(Req, _From, State) ->

View File

@ -47,7 +47,7 @@ init([]) ->
router_syncer_pool, router_syncer_pool,
hash, hash,
PoolSize, PoolSize,
{emqx_router_syncer, start_link_pooled, []} {emqx_router_syncer, start_link, []}
]), ]),
%% Shared subscription %% Shared subscription

View File

@ -19,7 +19,6 @@
-include("emqx.hrl"). -include("emqx.hrl").
-include("emqx_channel.hrl"). -include("emqx_channel.hrl").
-include("emqx_session.hrl").
-include("emqx_mqtt.hrl"). -include("emqx_mqtt.hrl").
-include("emqx_access_control.hrl"). -include("emqx_access_control.hrl").
-include("logger.hrl"). -include("logger.hrl").
@ -65,12 +64,6 @@
maybe_nack/1 maybe_nack/1
]). ]).
%% Export for DS session GC worker and session implementations
-export([
will_delay_interval/1,
prepare_will_message_for_publishing/2
]).
%% Exports for CT %% Exports for CT
-export([set_field/3]). -export([set_field/3]).
@ -146,9 +139,7 @@
-type replies() :: emqx_types:packet() | reply() | [reply()]. -type replies() :: emqx_types:packet() | reply() | [reply()].
-define(IS_MQTT_V5, #channel{conninfo = #{proto_ver := ?MQTT_PROTO_V5}}). -define(IS_MQTT_V5, #channel{conninfo = #{proto_ver := ?MQTT_PROTO_V5}}).
-define(IS_CONNECTED_OR_REAUTHENTICATING(ConnState),
((ConnState == connected) orelse (ConnState == reauthenticating))
).
-define(IS_COMMON_SESSION_TIMER(N), -define(IS_COMMON_SESSION_TIMER(N),
((N == retry_delivery) orelse (N == expire_awaiting_rel)) ((N == retry_delivery) orelse (N == expire_awaiting_rel))
). ).
@ -237,7 +228,7 @@ caps(#channel{clientinfo = #{zone := Zone}}) ->
-spec init(emqx_types:conninfo(), opts()) -> channel(). -spec init(emqx_types:conninfo(), opts()) -> channel().
init( init(
ConnInfo = #{ ConnInfo = #{
peername := {PeerHost, PeerPort} = PeerName, peername := {PeerHost, PeerPort},
sockname := {_Host, SockPort} sockname := {_Host, SockPort}
}, },
#{ #{
@ -261,9 +252,6 @@ init(
listener => ListenerId, listener => ListenerId,
protocol => Protocol, protocol => Protocol,
peerhost => PeerHost, peerhost => PeerHost,
%% We copy peername to clientinfo because some event contexts only have access
%% to client info (e.g.: authn/authz).
peername => PeerName,
peerport => PeerPort, peerport => PeerPort,
sockport => SockPort, sockport => SockPort,
clientid => undefined, clientid => undefined,
@ -275,7 +263,7 @@ init(
}, },
Zone Zone
), ),
{NClientInfo, NConnInfo} = take_conn_info_fields([ws_cookie, peersni], ClientInfo, ConnInfo), {NClientInfo, NConnInfo} = take_ws_cookie(ClientInfo, ConnInfo),
#channel{ #channel{
conninfo = NConnInfo, conninfo = NConnInfo,
clientinfo = NClientInfo, clientinfo = NClientInfo,
@ -315,19 +303,13 @@ set_peercert_infos(Peercert, ClientInfo, Zone) ->
ClientId = PeercetAs(peer_cert_as_clientid), ClientId = PeercetAs(peer_cert_as_clientid),
ClientInfo#{username => Username, clientid => ClientId, dn => DN, cn => CN}. ClientInfo#{username => Username, clientid => ClientId, dn => DN, cn => CN}.
take_conn_info_fields(Fields, ClientInfo, ConnInfo) -> take_ws_cookie(ClientInfo, ConnInfo) ->
lists:foldl( case maps:take(ws_cookie, ConnInfo) of
fun(Field, {ClientInfo0, ConnInfo0}) -> {WsCookie, NConnInfo} ->
case maps:take(Field, ConnInfo0) of {ClientInfo#{ws_cookie => WsCookie}, NConnInfo};
{Value, NConnInfo} -> _ ->
{ClientInfo0#{Field => Value}, NConnInfo}; {ClientInfo, ConnInfo}
_ -> end.
{ClientInfo0, ConnInfo0}
end
end,
{ClientInfo, ConnInfo},
Fields
).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Handle incoming packet %% Handle incoming packet
@ -339,7 +321,7 @@ take_conn_info_fields(Fields, ClientInfo, ConnInfo) ->
| {shutdown, Reason :: term(), channel()} | {shutdown, Reason :: term(), channel()}
| {shutdown, Reason :: term(), replies(), channel()}. | {shutdown, Reason :: term(), replies(), channel()}.
handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = ConnState}) when handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = ConnState}) when
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState) ConnState =:= connected orelse ConnState =:= reauthenticating
-> ->
handle_out(disconnect, ?RC_PROTOCOL_ERROR, Channel); handle_out(disconnect, ?RC_PROTOCOL_ERROR, Channel);
handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = connecting}) -> handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = connecting}) ->
@ -556,10 +538,8 @@ handle_in(
{error, ReasonCode} -> {error, ReasonCode} ->
handle_out(disconnect, ReasonCode, Channel) handle_out(disconnect, ReasonCode, Channel)
end; end;
handle_in(?PACKET(?PINGREQ), Channel = #channel{keepalive = Keepalive}) -> handle_in(?PACKET(?PINGREQ), Channel) ->
{ok, NKeepalive} = emqx_keepalive:check(Keepalive), {ok, ?PACKET(?PINGRESP), Channel};
NChannel = Channel#channel{keepalive = NKeepalive},
{ok, ?PACKET(?PINGRESP), reset_timer(keepalive, NChannel)};
handle_in( handle_in(
?DISCONNECT_PACKET(ReasonCode, Properties), ?DISCONNECT_PACKET(ReasonCode, Properties),
Channel = #channel{conninfo = ConnInfo} Channel = #channel{conninfo = ConnInfo}
@ -569,8 +549,29 @@ handle_in(
process_disconnect(ReasonCode, Properties, NChannel); process_disconnect(ReasonCode, Properties, NChannel);
handle_in(?AUTH_PACKET(), Channel) -> handle_in(?AUTH_PACKET(), Channel) ->
handle_out(disconnect, ?RC_IMPLEMENTATION_SPECIFIC_ERROR, Channel); handle_out(disconnect, ?RC_IMPLEMENTATION_SPECIFIC_ERROR, Channel);
handle_in({frame_error, Reason}, Channel) -> handle_in({frame_error, Reason}, Channel = #channel{conn_state = idle}) ->
handle_frame_error(Reason, Channel); shutdown(shutdown_count(frame_error, Reason), Channel);
handle_in(
{frame_error, #{cause := frame_too_large} = R}, Channel = #channel{conn_state = connecting}
) ->
shutdown(
shutdown_count(frame_error, R), ?CONNACK_PACKET(?RC_PACKET_TOO_LARGE), Channel
);
handle_in({frame_error, Reason}, Channel = #channel{conn_state = connecting}) ->
shutdown(shutdown_count(frame_error, Reason), ?CONNACK_PACKET(?RC_MALFORMED_PACKET), Channel);
handle_in(
{frame_error, #{cause := frame_too_large}}, Channel = #channel{conn_state = ConnState}
) when
ConnState =:= connected orelse ConnState =:= reauthenticating
->
handle_out(disconnect, {?RC_PACKET_TOO_LARGE, frame_too_large}, Channel);
handle_in({frame_error, Reason}, Channel = #channel{conn_state = ConnState}) when
ConnState =:= connected orelse ConnState =:= reauthenticating
->
handle_out(disconnect, {?RC_MALFORMED_PACKET, Reason}, Channel);
handle_in({frame_error, Reason}, Channel = #channel{conn_state = disconnected}) ->
?SLOG(error, #{msg => "malformed_mqtt_message", reason => Reason}),
{ok, Channel};
handle_in(Packet, Channel) -> handle_in(Packet, Channel) ->
?SLOG(error, #{msg => "disconnecting_due_to_unexpected_message", packet => Packet}), ?SLOG(error, #{msg => "disconnecting_due_to_unexpected_message", packet => Packet}),
handle_out(disconnect, ?RC_PROTOCOL_ERROR, Channel). handle_out(disconnect, ?RC_PROTOCOL_ERROR, Channel).
@ -583,12 +584,11 @@ process_connect(
AckProps, AckProps,
Channel = #channel{ Channel = #channel{
conninfo = ConnInfo, conninfo = ConnInfo,
clientinfo = ClientInfo, clientinfo = ClientInfo
will_msg = MaybeWillMsg
} }
) -> ) ->
#{clean_start := CleanStart} = ConnInfo, #{clean_start := CleanStart} = ConnInfo,
case emqx_cm:open_session(CleanStart, ClientInfo, ConnInfo, MaybeWillMsg) of case emqx_cm:open_session(CleanStart, ClientInfo, ConnInfo) of
{ok, #{session := Session, present := false}} -> {ok, #{session := Session, present := false}} ->
NChannel = Channel#channel{session = Session}, NChannel = Channel#channel{session = Session},
handle_out(connack, {?RC_SUCCESS, sp(false), AckProps}, ensure_connected(NChannel)); handle_out(connack, {?RC_SUCCESS, sp(false), AckProps}, ensure_connected(NChannel));
@ -633,7 +633,7 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) ->
msg => cannot_publish_to_topic_due_to_not_authorized, msg => cannot_publish_to_topic_due_to_not_authorized,
reason => emqx_reason_codes:name(Rc) reason => emqx_reason_codes:name(Rc)
}, },
#{topic => Topic, tag => "AUTHZ"} #{topic => Topic}
), ),
case emqx:get_config([authorization, deny_action], ignore) of case emqx:get_config([authorization, deny_action], ignore) of
ignore -> ignore ->
@ -652,7 +652,7 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) ->
msg => cannot_publish_to_topic_due_to_quota_exceeded, msg => cannot_publish_to_topic_due_to_quota_exceeded,
reason => emqx_reason_codes:name(Rc) reason => emqx_reason_codes:name(Rc)
}, },
#{topic => Topic, tag => "AUTHZ"} #{topic => Topic}
), ),
case QoS of case QoS of
?QOS_0 -> ?QOS_0 ->
@ -677,28 +677,21 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) ->
end. end.
packet_to_message(Packet, #channel{ packet_to_message(Packet, #channel{
conninfo = #{ conninfo = #{proto_ver := ProtoVer},
peername := PeerName, clientinfo = #{
proto_ver := ProtoVer protocol := Protocol,
}, clientid := ClientId,
clientinfo = username := Username,
#{ peerhost := PeerHost,
protocol := Protocol, mountpoint := MountPoint
clientid := ClientId, }
username := Username,
peerhost := PeerHost,
mountpoint := MountPoint
} = ClientInfo
}) -> }) ->
ClientAttrs = maps:get(client_attrs, ClientInfo, #{}),
emqx_mountpoint:mount( emqx_mountpoint:mount(
MountPoint, MountPoint,
emqx_packet:to_message( emqx_packet:to_message(
Packet, Packet,
ClientId, ClientId,
#{ #{
client_attrs => ClientAttrs,
peername => PeerName,
proto_ver => ProtoVer, proto_ver => ProtoVer,
protocol => Protocol, protocol => Protocol,
username => Username, username => Username,
@ -709,21 +702,14 @@ packet_to_message(Packet, #channel{
do_publish(_PacketId, Msg = #message{qos = ?QOS_0}, Channel) -> do_publish(_PacketId, Msg = #message{qos = ?QOS_0}, Channel) ->
Result = emqx_broker:publish(Msg), Result = emqx_broker:publish(Msg),
case Result of NChannel = ensure_quota(Result, Channel),
disconnect -> {ok, NChannel};
handle_out(disconnect, ?RC_IMPLEMENTATION_SPECIFIC_ERROR, Channel);
_ ->
NChannel = ensure_quota(Result, Channel),
{ok, NChannel}
end;
do_publish(PacketId, Msg = #message{qos = ?QOS_1}, Channel) -> do_publish(PacketId, Msg = #message{qos = ?QOS_1}, Channel) ->
PubRes = emqx_broker:publish(Msg), PubRes = emqx_broker:publish(Msg),
RC = puback_reason_code(PacketId, Msg, PubRes), RC = puback_reason_code(PacketId, Msg, PubRes),
case RC of case RC of
undefined -> undefined ->
{ok, Channel}; {ok, Channel};
disconnect ->
handle_out(disconnect, ?RC_IMPLEMENTATION_SPECIFIC_ERROR, Channel);
_Value -> _Value ->
do_finish_publish(PacketId, PubRes, RC, Channel) do_finish_publish(PacketId, PubRes, RC, Channel)
end; end;
@ -733,8 +719,6 @@ do_publish(
Channel = #channel{clientinfo = ClientInfo, session = Session} Channel = #channel{clientinfo = ClientInfo, session = Session}
) -> ) ->
case emqx_session:publish(ClientInfo, PacketId, Msg, Session) of case emqx_session:publish(ClientInfo, PacketId, Msg, Session) of
{ok, disconnect, _NSession} ->
handle_out(disconnect, ?RC_IMPLEMENTATION_SPECIFIC_ERROR, Channel);
{ok, PubRes, NSession} -> {ok, PubRes, NSession} ->
RC = pubrec_reason_code(PubRes), RC = pubrec_reason_code(PubRes),
NChannel0 = Channel#channel{session = NSession}, NChannel0 = Channel#channel{session = NSession},
@ -779,9 +763,7 @@ pubrec_reason_code([_ | _]) -> ?RC_SUCCESS.
puback_reason_code(PacketId, Msg, [] = PubRes) -> puback_reason_code(PacketId, Msg, [] = PubRes) ->
emqx_hooks:run_fold('message.puback', [PacketId, Msg, PubRes], ?RC_NO_MATCHING_SUBSCRIBERS); emqx_hooks:run_fold('message.puback', [PacketId, Msg, PubRes], ?RC_NO_MATCHING_SUBSCRIBERS);
puback_reason_code(PacketId, Msg, [_ | _] = PubRes) -> puback_reason_code(PacketId, Msg, [_ | _] = PubRes) ->
emqx_hooks:run_fold('message.puback', [PacketId, Msg, PubRes], ?RC_SUCCESS); emqx_hooks:run_fold('message.puback', [PacketId, Msg, PubRes], ?RC_SUCCESS).
puback_reason_code(_PacketId, _Msg, disconnect) ->
disconnect.
-compile({inline, [after_message_acked/3]}). -compile({inline, [after_message_acked/3]}).
after_message_acked(ClientInfo, Msg, PubAckProps) -> after_message_acked(ClientInfo, Msg, PubAckProps) ->
@ -891,10 +873,9 @@ do_unsubscribe(
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% MQTT-v5.0: 3.14.4 DISCONNECT Actions %% MQTT-v5.0: 3.14.4 DISCONNECT Actions
maybe_clean_will_msg(?RC_SUCCESS, Channel = #channel{session = Session0}) -> maybe_clean_will_msg(?RC_SUCCESS, Channel) ->
%% [MQTT-3.14.4-3] %% [MQTT-3.14.4-3]
Session = emqx_session:clear_will_message(Session0), Channel#channel{will_msg = undefined};
Channel#channel{will_msg = undefined, session = Session};
maybe_clean_will_msg(_ReasonCode, Channel) -> maybe_clean_will_msg(_ReasonCode, Channel) ->
Channel. Channel.
@ -1002,68 +983,6 @@ not_nacked({deliver, _Topic, Msg}) ->
true true
end. end.
%%--------------------------------------------------------------------
%% Handle Frame Error
%%--------------------------------------------------------------------
handle_frame_error(
Reason = #{cause := frame_too_large},
Channel = #channel{conn_state = ConnState, conninfo = ConnInfo}
) when
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState)
->
ShutdownCount = shutdown_count(frame_error, Reason),
case proto_ver(Reason, ConnInfo) of
?MQTT_PROTO_V5 ->
handle_out(disconnect, {?RC_PACKET_TOO_LARGE, frame_too_large}, Channel);
_ ->
shutdown(ShutdownCount, Channel)
end;
%% Only send CONNACK with reason code `frame_too_large` for MQTT-v5.0 when connecting,
%% otherwise DONOT send any CONNACK or DISCONNECT packet.
handle_frame_error(
Reason,
Channel = #channel{conn_state = ConnState, conninfo = ConnInfo}
) when
is_map(Reason) andalso
(ConnState == idle orelse ConnState == connecting)
->
ShutdownCount = shutdown_count(frame_error, Reason),
ProtoVer = proto_ver(Reason, ConnInfo),
NChannel = Channel#channel{conninfo = ConnInfo#{proto_ver => ProtoVer}},
case ProtoVer of
?MQTT_PROTO_V5 ->
shutdown(ShutdownCount, ?CONNACK_PACKET(?RC_PACKET_TOO_LARGE), NChannel);
_ ->
shutdown(ShutdownCount, NChannel)
end;
handle_frame_error(
Reason,
Channel = #channel{conn_state = connecting}
) ->
shutdown(
shutdown_count(frame_error, Reason),
?CONNACK_PACKET(?RC_MALFORMED_PACKET),
Channel
);
handle_frame_error(
Reason,
Channel = #channel{conn_state = ConnState}
) when
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState)
->
handle_out(
disconnect,
{?RC_MALFORMED_PACKET, Reason},
Channel
);
handle_frame_error(
Reason,
Channel = #channel{conn_state = disconnected}
) ->
?SLOG(error, #{msg => "malformed_mqtt_message", reason => Reason}),
{ok, Channel}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Handle outgoing packet %% Handle outgoing packet
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -1089,6 +1008,7 @@ handle_out(connack, {?RC_SUCCESS, SP, Props}, Channel = #channel{conninfo = Conn
[ConnInfo, emqx_reason_codes:name(?RC_SUCCESS)], [ConnInfo, emqx_reason_codes:name(?RC_SUCCESS)],
AckProps AckProps
), ),
return_connack( return_connack(
?CONNACK_PACKET(?RC_SUCCESS, SP, NAckProps), ?CONNACK_PACKET(?RC_SUCCESS, SP, NAckProps),
ensure_keepalive(NAckProps, Channel) ensure_keepalive(NAckProps, Channel)
@ -1137,7 +1057,7 @@ handle_out(disconnect, {ReasonCode, ReasonName, Props}, Channel = ?IS_MQTT_V5) -
Packet = ?DISCONNECT_PACKET(ReasonCode, Props), Packet = ?DISCONNECT_PACKET(ReasonCode, Props),
{ok, [?REPLY_OUTGOING(Packet), ?REPLY_CLOSE(ReasonName)], Channel}; {ok, [?REPLY_OUTGOING(Packet), ?REPLY_CLOSE(ReasonName)], Channel};
handle_out(disconnect, {_ReasonCode, ReasonName, _Props}, Channel) -> handle_out(disconnect, {_ReasonCode, ReasonName, _Props}, Channel) ->
{ok, ?REPLY_CLOSE(ReasonName), Channel}; {ok, {close, ReasonName}, Channel};
handle_out(auth, {ReasonCode, Properties}, Channel) -> handle_out(auth, {ReasonCode, Properties}, Channel) ->
{ok, ?AUTH_PACKET(ReasonCode, Properties), Channel}; {ok, ?AUTH_PACKET(ReasonCode, Properties), Channel};
handle_out(Type, Data, Channel) -> handle_out(Type, Data, Channel) ->
@ -1226,11 +1146,9 @@ handle_call(
kick, kick,
Channel = #channel{ Channel = #channel{
conn_state = ConnState, conn_state = ConnState,
conninfo = #{proto_ver := ProtoVer}, conninfo = #{proto_ver := ProtoVer}
session = Session
} }
) -> ) ->
emqx_session:destroy(Session),
Channel0 = maybe_publish_will_msg(kicked, Channel), Channel0 = maybe_publish_will_msg(kicked, Channel),
Channel1 = Channel1 =
case ConnState of case ConnState of
@ -1275,21 +1193,17 @@ handle_call(
), ),
Channel0 = maybe_publish_will_msg(takenover, Channel), Channel0 = maybe_publish_will_msg(takenover, Channel),
disconnect_and_shutdown(takenover, AllPendings, Channel0); disconnect_and_shutdown(takenover, AllPendings, Channel0);
handle_call(takeover_kick, Channel) ->
Channel0 = maybe_publish_will_msg(takenover, Channel),
disconnect_and_shutdown(takenover, ok, Channel0);
handle_call(list_authz_cache, Channel) -> handle_call(list_authz_cache, Channel) ->
{reply, emqx_authz_cache:list_authz_cache(), Channel}; {reply, emqx_authz_cache:list_authz_cache(), Channel};
handle_call( handle_call(
{keepalive, Interval}, {keepalive, Interval},
Channel = #channel{ Channel = #channel{
keepalive = KeepAlive, keepalive = KeepAlive,
conninfo = ConnInfo, conninfo = ConnInfo
clientinfo = #{zone := Zone}
} }
) -> ) ->
ClientId = info(clientid, Channel), ClientId = info(clientid, Channel),
NKeepalive = emqx_keepalive:update(Zone, Interval, KeepAlive), NKeepalive = emqx_keepalive:update(timer:seconds(Interval), KeepAlive),
NConnInfo = maps:put(keepalive, Interval, ConnInfo), NConnInfo = maps:put(keepalive, Interval, ConnInfo),
NChannel = Channel#channel{keepalive = NKeepalive, conninfo = NConnInfo}, NChannel = Channel#channel{keepalive = NKeepalive, conninfo = NConnInfo},
SockInfo = maps:get(sockinfo, emqx_cm:get_chan_info(ClientId), #{}), SockInfo = maps:get(sockinfo, emqx_cm:get_chan_info(ClientId), #{}),
@ -1332,7 +1246,7 @@ handle_info(
session = Session session = Session
} }
) when ) when
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState) ConnState =:= connected orelse ConnState =:= reauthenticating
-> ->
{Intent, Session1} = session_disconnect(ClientInfo, ConnInfo, Session), {Intent, Session1} = session_disconnect(ClientInfo, ConnInfo, Session),
Channel1 = ensure_disconnected(Reason, maybe_publish_will_msg(sock_closed, Channel)), Channel1 = ensure_disconnected(Reason, maybe_publish_will_msg(sock_closed, Channel)),
@ -1362,9 +1276,6 @@ handle_info({'DOWN', Ref, process, Pid, Reason}, Channel) ->
[] -> {ok, Channel}; [] -> {ok, Channel};
Msgs -> {ok, Msgs, Channel} Msgs -> {ok, Msgs, Channel}
end; end;
handle_info(?session_message(Message), #channel{session = Session} = Channel) ->
NSession = emqx_session:handle_info(Message, Session),
{ok, Channel#channel{session = NSession}};
handle_info(Info, Channel) -> handle_info(Info, Channel) ->
?SLOG(error, #{msg => "unexpected_info", info => Info}), ?SLOG(error, #{msg => "unexpected_info", info => Info}),
{ok, Channel}. {ok, Channel}.
@ -1392,22 +1303,22 @@ die_if_test_compiled() ->
| {shutdown, Reason :: term(), channel()}. | {shutdown, Reason :: term(), channel()}.
handle_timeout( handle_timeout(
_TRef, _TRef,
keepalive, {keepalive, _StatVal},
Channel = #channel{keepalive = undefined} Channel = #channel{keepalive = undefined}
) -> ) ->
{ok, Channel}; {ok, Channel};
handle_timeout( handle_timeout(
_TRef, _TRef,
keepalive, {keepalive, _StatVal},
Channel = #channel{conn_state = disconnected} Channel = #channel{conn_state = disconnected}
) -> ) ->
{ok, Channel}; {ok, Channel};
handle_timeout( handle_timeout(
_TRef, _TRef,
keepalive, {keepalive, StatVal},
Channel = #channel{keepalive = Keepalive} Channel = #channel{keepalive = Keepalive}
) -> ) ->
case emqx_keepalive:check(Keepalive) of case emqx_keepalive:check(StatVal, Keepalive) of
{ok, NKeepalive} -> {ok, NKeepalive} ->
NChannel = Channel#channel{keepalive = NKeepalive}, NChannel = Channel#channel{keepalive = NKeepalive},
{ok, reset_timer(keepalive, NChannel)}; {ok, reset_timer(keepalive, NChannel)};
@ -1456,9 +1367,9 @@ handle_timeout(_TRef, expire_session, Channel = #channel{session = Session}) ->
handle_timeout( handle_timeout(
_TRef, _TRef,
will_message = TimerName, will_message = TimerName,
Channel = #channel{will_msg = WillMsg} Channel = #channel{clientinfo = ClientInfo, will_msg = WillMsg}
) -> ) ->
(WillMsg =/= undefined) andalso publish_will_msg(Channel), (WillMsg =/= undefined) andalso publish_will_msg(ClientInfo, WillMsg),
{ok, clean_timer(TimerName, Channel#channel{will_msg = undefined})}; {ok, clean_timer(TimerName, Channel#channel{will_msg = undefined})};
handle_timeout( handle_timeout(
_TRef, _TRef,
@ -1472,16 +1383,6 @@ handle_timeout(
{_, Quota2} -> {_, Quota2} ->
{ok, clean_timer(TimerName, Channel#channel{quota = Quota2})} {ok, clean_timer(TimerName, Channel#channel{quota = Quota2})}
end; end;
handle_timeout(
_TRef,
connection_expire,
#channel{conn_state = ConnState} = Channel0
) ->
Channel1 = clean_timer(connection_expire, Channel0),
case ConnState of
disconnected -> {ok, Channel1};
_ -> handle_out(disconnect, ?RC_NOT_AUTHORIZED, Channel1)
end;
handle_timeout(TRef, Msg, Channel) -> handle_timeout(TRef, Msg, Channel) ->
case emqx_hooks:run_fold('client.timeout', [TRef, Msg], []) of case emqx_hooks:run_fold('client.timeout', [TRef, Msg], []) of
[] -> [] ->
@ -1518,16 +1419,10 @@ reset_timer(Name, Time, Channel) ->
ensure_timer(Name, Time, clean_timer(Name, Channel)). ensure_timer(Name, Time, clean_timer(Name, Channel)).
clean_timer(Name, Channel = #channel{timers = Timers}) -> clean_timer(Name, Channel = #channel{timers = Timers}) ->
case maps:take(Name, Timers) of Channel#channel{timers = maps:remove(Name, Timers)}.
error ->
Channel;
{TRef, NTimers} ->
ok = emqx_utils:cancel_timer(TRef),
Channel#channel{timers = NTimers}
end.
interval(keepalive, #channel{keepalive = KeepAlive}) -> interval(keepalive, #channel{keepalive = KeepAlive}) ->
emqx_keepalive:info(check_interval, KeepAlive); emqx_keepalive:info(interval, KeepAlive);
interval(retry_delivery, #channel{session = Session}) -> interval(retry_delivery, #channel{session = Session}) ->
emqx_session:info(retry_interval, Session); emqx_session:info(retry_interval, Session);
interval(expire_awaiting_rel, #channel{session = Session}) -> interval(expire_awaiting_rel, #channel{session = Session}) ->
@ -1654,8 +1549,7 @@ enrich_client(ConnPkt, Channel = #channel{clientinfo = ClientInfo}) ->
fun set_bridge_mode/2, fun set_bridge_mode/2,
fun maybe_username_as_clientid/2, fun maybe_username_as_clientid/2,
fun maybe_assign_clientid/2, fun maybe_assign_clientid/2,
%% attr init should happen after clientid and username assign fun fix_mountpoint/2
fun maybe_set_client_initial_attrs/2
], ],
ConnPkt, ConnPkt,
ClientInfo ClientInfo
@ -1707,73 +1601,17 @@ maybe_assign_clientid(#mqtt_packet_connect{clientid = <<>>}, ClientInfo) ->
maybe_assign_clientid(#mqtt_packet_connect{clientid = ClientId}, ClientInfo) -> maybe_assign_clientid(#mqtt_packet_connect{clientid = ClientId}, ClientInfo) ->
{ok, ClientInfo#{clientid => ClientId}}. {ok, ClientInfo#{clientid => ClientId}}.
get_client_attrs_init_config(Zone) -> fix_mountpoint(_ConnPkt, #{mountpoint := undefined}) ->
get_mqtt_conf(Zone, client_attrs_init, []). ok;
fix_mountpoint(_ConnPkt, ClientInfo = #{mountpoint := MountPoint}) ->
maybe_set_client_initial_attrs(ConnPkt, #{zone := Zone} = ClientInfo) ->
Inits = get_client_attrs_init_config(Zone),
UserProperty = get_user_property_as_map(ConnPkt),
{ok, initialize_client_attrs(Inits, ClientInfo#{user_property => UserProperty})}.
initialize_client_attrs(Inits, ClientInfo) ->
lists:foldl(
fun(#{expression := Variform, set_as_attr := Name}, Acc) ->
Attrs = maps:get(client_attrs, Acc, #{}),
case emqx_variform:render(Variform, ClientInfo) of
{ok, <<>>} ->
?SLOG(
debug,
#{
msg => "client_attr_rednered_to_empty_string",
set_as_attr => Name
}
),
Acc;
{ok, Value} ->
?SLOG(
debug,
#{
msg => "client_attr_initialized",
set_as_attr => Name,
attr_value => Value
}
),
Acc#{client_attrs => Attrs#{Name => Value}};
{error, Reason} ->
?SLOG(
warning,
#{
msg => "client_attr_initialization_failed",
reason => Reason
}
),
Acc
end
end,
ClientInfo,
Inits
).
get_user_property_as_map(#mqtt_packet_connect{properties = #{'User-Property' := UserProperty}}) when
is_list(UserProperty)
->
maps:from_list(UserProperty);
get_user_property_as_map(_) ->
#{}.
fix_mountpoint(#{mountpoint := undefined} = ClientInfo) ->
ClientInfo;
fix_mountpoint(ClientInfo = #{mountpoint := MountPoint}) ->
MountPoint1 = emqx_mountpoint:replvar(MountPoint, ClientInfo), MountPoint1 = emqx_mountpoint:replvar(MountPoint, ClientInfo),
ClientInfo#{mountpoint := MountPoint1}. {ok, ClientInfo#{mountpoint := MountPoint1}}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Set log metadata %% Set log metadata
set_log_meta(_ConnPkt, #channel{clientinfo = #{clientid := ClientId} = ClientInfo}) -> set_log_meta(_ConnPkt, #channel{clientinfo = #{clientid := ClientId}}) ->
Username = maps:get(username, ClientInfo, undefined), emqx_logger:set_metadata_clientid(ClientId).
emqx_logger:set_metadata_clientid(ClientId),
emqx_logger:set_metadata_username(Username).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Check banned %% Check banned
@ -1794,16 +1632,6 @@ count_flapping_event(_ConnPkt, #channel{clientinfo = ClientInfo}) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Authenticate %% Authenticate
%% If peercert exists, add it as `cert_pem` credential field.
maybe_add_cert(Map, #channel{conninfo = ConnInfo}) ->
maybe_add_cert(Map, ConnInfo);
maybe_add_cert(Map, #{peercert := PeerCert}) when is_binary(PeerCert) ->
%% NOTE: it's raw binary at this point,
%% encoding to PEM (base64) is done lazy in emqx_auth_utils:render_var
Map#{cert_pem => PeerCert};
maybe_add_cert(Map, _) ->
Map.
authenticate( authenticate(
?CONNECT_PACKET( ?CONNECT_PACKET(
#mqtt_packet_connect{ #mqtt_packet_connect{
@ -1816,23 +1644,20 @@ authenticate(
auth_cache = AuthCache auth_cache = AuthCache
} = Channel } = Channel
) -> ) ->
%% Auth with CONNECT packet for MQTT v5
AuthData = emqx_mqtt_props:get('Authentication-Data', Properties, undefined), AuthData = emqx_mqtt_props:get('Authentication-Data', Properties, undefined),
Credential0 = do_authenticate(
ClientInfo#{ ClientInfo#{
auth_method => AuthMethod, auth_method => AuthMethod,
auth_data => AuthData, auth_data => AuthData,
auth_cache => AuthCache auth_cache => AuthCache
}, },
Credential = maybe_add_cert(Credential0, Channel), Channel
do_authenticate(Credential, Channel); );
authenticate( authenticate(
?CONNECT_PACKET(#mqtt_packet_connect{password = Password}), ?CONNECT_PACKET(#mqtt_packet_connect{password = Password}),
#channel{clientinfo = ClientInfo} = Channel #channel{clientinfo = ClientInfo} = Channel
) -> ) ->
%% Auth with CONNECT packet for MQTT v3 do_authenticate(ClientInfo#{password => Password}, Channel);
Credential = maybe_add_cert(ClientInfo#{password => Password}, Channel),
do_authenticate(Credential, Channel);
authenticate( authenticate(
?AUTH_PACKET(_, #{'Authentication-Method' := AuthMethod} = Properties), ?AUTH_PACKET(_, #{'Authentication-Method' := AuthMethod} = Properties),
#channel{ #channel{
@ -1841,7 +1666,6 @@ authenticate(
auth_cache = AuthCache auth_cache = AuthCache
} = Channel } = Channel
) -> ) ->
%% Enhanced auth
case emqx_mqtt_props:get('Authentication-Method', ConnProps, undefined) of case emqx_mqtt_props:get('Authentication-Method', ConnProps, undefined) of
AuthMethod -> AuthMethod ->
AuthData = emqx_mqtt_props:get('Authentication-Data', Properties, undefined), AuthData = emqx_mqtt_props:get('Authentication-Data', Properties, undefined),
@ -1854,7 +1678,6 @@ authenticate(
Channel Channel
); );
_ -> _ ->
log_auth_failure("bad_authentication_method"),
{error, ?RC_BAD_AUTHENTICATION_METHOD} {error, ?RC_BAD_AUTHENTICATION_METHOD}
end. end.
@ -1881,7 +1704,6 @@ do_authenticate(
auth_cache = AuthCache auth_cache = AuthCache
}}; }};
{error, Reason} -> {error, Reason} ->
log_auth_failure(Reason),
{error, emqx_reason_codes:connack_error(Reason)} {error, emqx_reason_codes:connack_error(Reason)}
end; end;
do_authenticate(Credential, #channel{clientinfo = ClientInfo} = Channel) -> do_authenticate(Credential, #channel{clientinfo = ClientInfo} = Channel) ->
@ -1889,42 +1711,12 @@ do_authenticate(Credential, #channel{clientinfo = ClientInfo} = Channel) ->
{ok, AuthResult} -> {ok, AuthResult} ->
{ok, #{}, Channel#channel{clientinfo = merge_auth_result(ClientInfo, AuthResult)}}; {ok, #{}, Channel#channel{clientinfo = merge_auth_result(ClientInfo, AuthResult)}};
{error, Reason} -> {error, Reason} ->
log_auth_failure(Reason),
{error, emqx_reason_codes:connack_error(Reason)} {error, emqx_reason_codes:connack_error(Reason)}
end. end.
log_auth_failure(Reason) -> merge_auth_result(ClientInfo, AuthResult) when is_map(ClientInfo) andalso is_map(AuthResult) ->
?SLOG_THROTTLE( IsSuperuser = maps:get(is_superuser, AuthResult, false),
warning, maps:merge(ClientInfo, AuthResult#{is_superuser => IsSuperuser}).
#{
msg => authentication_failure,
reason => Reason
},
#{tag => "AUTHN"}
).
%% Merge authentication result into ClientInfo
%% Authentication result may include:
%% 1. `is_superuser': The superuser flag from various backends
%% 2. `expire_at`: Authentication validity deadline, the client will be disconnected after this time
%% 3. `acl': ACL rules from JWT, HTTP auth backend
%% 4. `client_attrs': Extra client attributes from JWT, HTTP auth backend
%% 5. Maybe more non-standard fields used by hook callbacks
merge_auth_result(ClientInfo, AuthResult0) when is_map(ClientInfo) andalso is_map(AuthResult0) ->
IsSuperuser = maps:get(is_superuser, AuthResult0, false),
ExpireAt = maps:get(expire_at, AuthResult0, undefined),
AuthResult = maps:without([client_attrs, expire_at], AuthResult0),
Attrs0 = maps:get(client_attrs, ClientInfo, #{}),
Attrs1 = maps:get(client_attrs, AuthResult0, #{}),
Attrs = maps:merge(Attrs0, Attrs1),
NewClientInfo = maps:merge(
ClientInfo#{client_attrs => Attrs},
AuthResult#{
is_superuser => IsSuperuser,
auth_expire_at => ExpireAt
}
),
fix_mountpoint(NewClientInfo).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Process Topic Alias %% Process Topic Alias
@ -2329,16 +2121,10 @@ ensure_connected(
) -> ) ->
NConnInfo = ConnInfo#{connected_at => erlang:system_time(millisecond)}, NConnInfo = ConnInfo#{connected_at => erlang:system_time(millisecond)},
ok = run_hooks('client.connected', [ClientInfo, NConnInfo]), ok = run_hooks('client.connected', [ClientInfo, NConnInfo]),
schedule_connection_expire(Channel#channel{ Channel#channel{
conninfo = trim_conninfo(NConnInfo), conninfo = trim_conninfo(NConnInfo),
conn_state = connected conn_state = connected
}). }.
schedule_connection_expire(Channel = #channel{clientinfo = #{auth_expire_at := undefined}}) ->
Channel;
schedule_connection_expire(Channel = #channel{clientinfo = #{auth_expire_at := ExpireAt}}) ->
Interval = max(0, ExpireAt - erlang:system_time(millisecond)),
ensure_timer(connection_expire, Interval, Channel).
trim_conninfo(ConnInfo) -> trim_conninfo(ConnInfo) ->
maps:without( maps:without(
@ -2385,7 +2171,9 @@ ensure_keepalive_timer(0, Channel) ->
ensure_keepalive_timer(disabled, Channel) -> ensure_keepalive_timer(disabled, Channel) ->
Channel; Channel;
ensure_keepalive_timer(Interval, Channel = #channel{clientinfo = #{zone := Zone}}) -> ensure_keepalive_timer(Interval, Channel = #channel{clientinfo = #{zone := Zone}}) ->
Keepalive = emqx_keepalive:init(Zone, Interval), Multiplier = get_mqtt_conf(Zone, keepalive_multiplier),
RecvCnt = emqx_pd:get_counter(recv_pkt),
Keepalive = emqx_keepalive:init(RecvCnt, round(timer:seconds(Interval) * Multiplier)),
ensure_timer(keepalive, Channel#channel{keepalive = Keepalive}). ensure_timer(keepalive, Channel#channel{keepalive = Keepalive}).
clear_keepalive(Channel = #channel{timers = Timers}) -> clear_keepalive(Channel = #channel{timers = Timers}) ->
@ -2502,18 +2290,20 @@ maybe_publish_will_msg(
Channel; Channel;
maybe_publish_will_msg( maybe_publish_will_msg(
_Reason, _Reason,
Channel0 = #channel{ Channel = #channel{
conninfo = #{proto_ver := ?MQTT_PROTO_V3, clientid := ClientId} conninfo = #{proto_ver := ?MQTT_PROTO_V3, clientid := ClientId}, will_msg = WillMsg
} }
) -> ) ->
%% Unconditionally publish will message for MQTT 3.1.1 %% Unconditionally publish will message for MQTT 3.1.1
?tp(debug, maybe_publish_willmsg_v3, #{clientid => ClientId}), ?tp(debug, maybe_publish_willmsg_v3, #{clientid => ClientId}),
Channel = publish_will_msg(Channel0), _ = publish_will_msg(Channel#channel.clientinfo, WillMsg),
remove_willmsg(Channel); Channel#channel{will_msg = undefined};
maybe_publish_will_msg( maybe_publish_will_msg(
Reason, Reason,
Channel0 = #channel{ Channel = #channel{
conninfo = #{clientid := ClientId} clientinfo = ClientInfo,
conninfo = #{clientid := ClientId},
will_msg = WillMsg
} }
) when ) when
Reason =:= expired orelse Reason =:= expired orelse
@ -2530,20 +2320,13 @@ maybe_publish_will_msg(
%% d. internal_error (maybe not recoverable) %% d. internal_error (maybe not recoverable)
%% This ensures willmsg will be published if the willmsg timer is scheduled but not fired %% This ensures willmsg will be published if the willmsg timer is scheduled but not fired
%% OR fired but not yet handled %% OR fired but not yet handled
%% NOTE! For durable sessions, `?chan_terminating' does NOT imply that the session is ?tp(debug, maybe_publish_willmsg_session_ends, #{clientid => ClientId, reason => Reason}),
%% gone. _ = publish_will_msg(ClientInfo, WillMsg),
case is_durable_session(Channel0) andalso Reason =:= ?chan_terminating of remove_willmsg(Channel);
false ->
?tp(debug, maybe_publish_willmsg_session_ends, #{clientid => ClientId, reason => Reason}),
Channel = publish_will_msg(Channel0),
remove_willmsg(Channel);
true ->
Channel0
end;
maybe_publish_will_msg( maybe_publish_will_msg(
takenover, takenover,
Channel0 = #channel{ Channel = #channel{
clientinfo = ClientInfo,
will_msg = WillMsg, will_msg = WillMsg,
conninfo = #{clientid := ClientId} conninfo = #{clientid := ClientId}
} }
@ -2561,8 +2344,7 @@ maybe_publish_will_msg(
case will_delay_interval(WillMsg) of case will_delay_interval(WillMsg) of
0 -> 0 ->
?tp(debug, maybe_publish_willmsg_takenover_pub, #{clientid => ClientId}), ?tp(debug, maybe_publish_willmsg_takenover_pub, #{clientid => ClientId}),
Channel = publish_will_msg(Channel0), _ = publish_will_msg(ClientInfo, WillMsg);
ok;
I when I > 0 -> I when I > 0 ->
%% @NOTE Non-normative comment in MQTT 5.0 spec %% @NOTE Non-normative comment in MQTT 5.0 spec
%% """ %% """
@ -2571,13 +2353,13 @@ maybe_publish_will_msg(
%% before the Will Message is published. %% before the Will Message is published.
%% """ %% """
?tp(debug, maybe_publish_willmsg_takenover_skip, #{clientid => ClientId}), ?tp(debug, maybe_publish_willmsg_takenover_skip, #{clientid => ClientId}),
Channel = Channel0,
skip skip
end, end,
remove_willmsg(Channel); remove_willmsg(Channel);
maybe_publish_will_msg( maybe_publish_will_msg(
Reason, Reason,
Channel0 = #channel{ Channel = #channel{
clientinfo = ClientInfo,
will_msg = WillMsg, will_msg = WillMsg,
conninfo = #{clientid := ClientId} conninfo = #{clientid := ClientId}
} }
@ -2588,11 +2370,11 @@ maybe_publish_will_msg(
?tp(debug, maybe_publish_will_msg_other_publish, #{ ?tp(debug, maybe_publish_will_msg_other_publish, #{
clientid => ClientId, reason => Reason clientid => ClientId, reason => Reason
}), }),
Channel = publish_will_msg(Channel0), _ = publish_will_msg(ClientInfo, WillMsg),
remove_willmsg(Channel); remove_willmsg(Channel);
I when I > 0 -> I when I > 0 ->
?tp(debug, maybe_publish_will_msg_other_delay, #{clientid => ClientId, reason => Reason}), ?tp(debug, maybe_publish_will_msg_other_delay, #{clientid => ClientId, reason => Reason}),
ensure_timer(will_message, timer:seconds(I), Channel0) ensure_timer(will_message, timer:seconds(I), Channel)
end. end.
will_delay_interval(WillMsg) -> will_delay_interval(WillMsg) ->
@ -2603,17 +2385,14 @@ will_delay_interval(WillMsg) ->
). ).
publish_will_msg( publish_will_msg(
#channel{ ClientInfo = #{mountpoint := MountPoint},
session = Session, Msg = #message{topic = Topic}
clientinfo = ClientInfo,
will_msg = Msg = #message{topic = Topic}
} = Channel
) -> ) ->
case prepare_will_message_for_publishing(ClientInfo, Msg) of Action = authz_action(Msg),
{ok, PreparedMessage} -> PublishingDisallowed = emqx_access_control:authorize(ClientInfo, Action, Topic) =/= allow,
NSession = emqx_session:publish_will_message_now(Session, PreparedMessage), ClientBanned = emqx_banned:check(ClientInfo),
Channel#channel{session = NSession}; case PublishingDisallowed orelse ClientBanned of
{error, #{client_banned := ClientBanned, publishing_disallowed := PublishingDisallowed}} -> true ->
?tp( ?tp(
warning, warning,
last_will_testament_publish_denied, last_will_testament_publish_denied,
@ -2623,23 +2402,12 @@ publish_will_msg(
publishing_disallowed => PublishingDisallowed publishing_disallowed => PublishingDisallowed
} }
), ),
Channel ok;
end.
prepare_will_message_for_publishing(
ClientInfo = #{mountpoint := MountPoint},
Msg = #message{topic = Topic}
) ->
Action = authz_action(Msg),
PublishingDisallowed = emqx_access_control:authorize(ClientInfo, Action, Topic) =/= allow,
ClientBanned = emqx_banned:check(ClientInfo),
case PublishingDisallowed orelse ClientBanned of
true ->
{error, #{client_banned => ClientBanned, publishing_disallowed => PublishingDisallowed}};
false -> false ->
NMsg = emqx_mountpoint:mount(MountPoint, Msg), NMsg = emqx_mountpoint:mount(MountPoint, Msg),
PreparedMessage = NMsg#message{timestamp = emqx_message:timestamp_now()}, NMsg2 = NMsg#message{timestamp = erlang:system_time(millisecond)},
{ok, PreparedMessage} _ = emqx_broker:publish(NMsg2),
ok
end. end.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -2679,7 +2447,8 @@ save_alias(outbound, AliasId, Topic, TopicAliases = #{outbound := Aliases}) ->
NAliases = maps:put(Topic, AliasId, Aliases), NAliases = maps:put(Topic, AliasId, Aliases),
TopicAliases#{outbound => NAliases}. TopicAliases#{outbound => NAliases}.
-compile({inline, [reply/2, shutdown/2, shutdown/3]}). -compile({inline, [reply/2, shutdown/2, shutdown/3, sp/1, flag/1]}).
reply(Reply, Channel) -> reply(Reply, Channel) ->
{reply, Reply, Channel}. {reply, Reply, Channel}.
@ -2715,19 +2484,14 @@ disconnect_and_shutdown(
?IS_MQTT_V5 = ?IS_MQTT_V5 =
#channel{conn_state = ConnState} #channel{conn_state = ConnState}
) when ) when
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState) ConnState =:= connected orelse ConnState =:= reauthenticating
-> ->
NChannel = ensure_disconnected(Reason, Channel), NChannel = ensure_disconnected(Reason, Channel),
shutdown(Reason, Reply, ?DISCONNECT_PACKET(reason_code(Reason)), NChannel); shutdown(Reason, Reply, ?DISCONNECT_PACKET(reason_code(Reason)), NChannel);
%% mqtt v3/v4 connected sessions %% mqtt v3/v4 sessions, mqtt v5 other conn_state sessions
disconnect_and_shutdown(Reason, Reply, Channel = #channel{conn_state = ConnState}) when
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState)
->
NChannel = ensure_disconnected(Reason, Channel),
shutdown(Reason, Reply, NChannel);
%% other conn_state sessions
disconnect_and_shutdown(Reason, Reply, Channel) -> disconnect_and_shutdown(Reason, Reply, Channel) ->
shutdown(Reason, Reply, Channel). NChannel = ensure_disconnected(Reason, Channel),
shutdown(Reason, Reply, NChannel).
-compile({inline, [sp/1, flag/1]}). -compile({inline, [sp/1, flag/1]}).
sp(true) -> 1; sp(true) -> 1;
@ -2755,22 +2519,6 @@ remove_willmsg(Channel = #channel{timers = Timers}) ->
timers = maps:remove(will_message, Timers) timers = maps:remove(will_message, Timers)
} }
end. end.
is_durable_session(#channel{session = Session}) ->
case emqx_session:info(impl, Session) of
emqx_persistent_session_ds ->
true;
_ ->
false
end.
proto_ver(#{proto_ver := ProtoVer}, _ConnInfo) ->
ProtoVer;
proto_ver(_Reason, #{proto_ver := ProtoVer}) ->
ProtoVer;
proto_ver(_, _) ->
?MQTT_PROTO_V4.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% For CT tests %% For CT tests
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -47,14 +47,13 @@
]). ]).
-export([ -export([
open_session/4, open_session/3,
discard_session/1, discard_session/1,
discard_session/2, discard_session/2,
takeover_session_begin/1, takeover_session_begin/1,
takeover_session_end/1, takeover_session_end/1,
kick_session/1, kick_session/1,
kick_session/2, kick_session/2
takeover_kick/1
]). ]).
-export([ -export([
@ -101,7 +100,6 @@
takeover_session/2, takeover_session/2,
takeover_finish/2, takeover_finish/2,
do_kick_session/3, do_kick_session/3,
do_takeover_kick_session_v3/2,
do_get_chan_info/2, do_get_chan_info/2,
do_get_chan_stats/2, do_get_chan_stats/2,
do_get_chann_conn_mod/2 do_get_chann_conn_mod/2
@ -112,8 +110,6 @@
chan_pid/0 chan_pid/0
]). ]).
-type message() :: emqx_types:message().
-type chan_pid() :: pid(). -type chan_pid() :: pid().
-type channel_info() :: { -type channel_info() :: {
@ -124,8 +120,6 @@
-type takeover_state() :: {_ConnMod :: module(), _ChanPid :: pid()}. -type takeover_state() :: {_ConnMod :: module(), _ChanPid :: pid()}.
-define(BPAPI_NAME, emqx_cm).
-define(CHAN_STATS, [ -define(CHAN_STATS, [
{?CHAN_TAB, 'channels.count', 'channels.max'}, {?CHAN_TAB, 'channels.count', 'channels.max'},
{?CHAN_TAB, 'sessions.count', 'sessions.max'}, {?CHAN_TAB, 'sessions.count', 'sessions.max'},
@ -227,7 +221,7 @@ get_chan_info(ClientId, ChanPid) ->
wrap_rpc(emqx_cm_proto_v2:get_chan_info(ClientId, ChanPid)). wrap_rpc(emqx_cm_proto_v2:get_chan_info(ClientId, ChanPid)).
%% @doc Update infos of the channel. %% @doc Update infos of the channel.
-spec set_chan_info(emqx_types:clientid(), emqx_types:channel_attrs()) -> boolean(). -spec set_chan_info(emqx_types:clientid(), emqx_types:attrs()) -> boolean().
set_chan_info(ClientId, Info) when ?IS_CLIENTID(ClientId) -> set_chan_info(ClientId, Info) when ?IS_CLIENTID(ClientId) ->
Chan = {ClientId, self()}, Chan = {ClientId, self()},
try try
@ -272,29 +266,24 @@ set_chan_stats(ClientId, ChanPid, Stats) when ?IS_CLIENTID(ClientId) ->
end. end.
%% @doc Open a session. %% @doc Open a session.
-spec open_session( -spec open_session(_CleanStart :: boolean(), emqx_types:clientinfo(), emqx_types:conninfo()) ->
_CleanStart :: boolean(),
emqx_types:clientinfo(),
emqx_types:conninfo(),
emqx_maybe:t(message())
) ->
{ok, #{ {ok, #{
session := emqx_session:t(), session := emqx_session:t(),
present := boolean(), present := boolean(),
replay => _ReplayContext replay => _ReplayContext
}} }}
| {error, Reason :: term()}. | {error, Reason :: term()}.
open_session(_CleanStart = true, ClientInfo = #{clientid := ClientId}, ConnInfo, MaybeWillMsg) -> open_session(_CleanStart = true, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
Self = self(), Self = self(),
emqx_cm_locker:trans(ClientId, fun(_) -> emqx_cm_locker:trans(ClientId, fun(_) ->
ok = discard_session(ClientId), ok = discard_session(ClientId),
ok = emqx_session:destroy(ClientInfo, ConnInfo), ok = emqx_session:destroy(ClientInfo, ConnInfo),
create_register_session(ClientInfo, ConnInfo, MaybeWillMsg, Self) create_register_session(ClientInfo, ConnInfo, Self)
end); end);
open_session(_CleanStart = false, ClientInfo = #{clientid := ClientId}, ConnInfo, MaybeWillMsg) -> open_session(_CleanStart = false, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
Self = self(), Self = self(),
emqx_cm_locker:trans(ClientId, fun(_) -> emqx_cm_locker:trans(ClientId, fun(_) ->
case emqx_session:open(ClientInfo, ConnInfo, MaybeWillMsg) of case emqx_session:open(ClientInfo, ConnInfo) of
{true, Session, ReplayContext} -> {true, Session, ReplayContext} ->
ok = register_channel(ClientId, Self, ConnInfo), ok = register_channel(ClientId, Self, ConnInfo),
{ok, #{session => Session, present => true, replay => ReplayContext}}; {ok, #{session => Session, present => true, replay => ReplayContext}};
@ -304,8 +293,8 @@ open_session(_CleanStart = false, ClientInfo = #{clientid := ClientId}, ConnInfo
end end
end). end).
create_register_session(ClientInfo = #{clientid := ClientId}, ConnInfo, MaybeWillMsg, ChanPid) -> create_register_session(ClientInfo = #{clientid := ClientId}, ConnInfo, ChanPid) ->
Session = emqx_session:create(ClientInfo, ConnInfo, MaybeWillMsg), Session = emqx_session:create(ClientInfo, ConnInfo),
ok = register_channel(ClientId, ChanPid, ConnInfo), ok = register_channel(ClientId, ChanPid, ConnInfo),
{ok, #{session => Session, present => false}}. {ok, #{session => Session, present => false}}.
@ -356,38 +345,6 @@ pick_channel(ClientId) ->
ChanPid ChanPid
end. end.
%% Used by `emqx_persistent_session_ds'
-spec takeover_kick(emqx_types:clientid()) -> ok.
takeover_kick(ClientId) ->
case lookup_channels(ClientId) of
[] ->
ok;
ChanPids ->
lists:foreach(
fun(Pid) ->
do_takeover_session(ClientId, Pid)
end,
ChanPids
)
end.
%% Used by `emqx_persistent_session_ds'.
%% We stop any running channels with reason `takenover' so that correct reason codes and
%% will message processing may take place. For older BPAPI nodes, we don't have much
%% choice other than calling the old `discard_session' code.
do_takeover_session(ClientId, Pid) ->
Node = node(Pid),
case emqx_bpapi:supported_version(Node, ?BPAPI_NAME) of
undefined ->
%% Race: node (re)starting? Assume v2.
discard_session(ClientId, Pid);
Vsn when Vsn =< 2 ->
discard_session(ClientId, Pid);
_Vsn ->
takeover_kick_session(ClientId, Pid)
end.
%% Used only by `emqx_session_mem'
takeover_finish(ConnMod, ChanPid) -> takeover_finish(ConnMod, ChanPid) ->
request_stepdown( request_stepdown(
{takeover, 'end'}, {takeover, 'end'},
@ -396,7 +353,6 @@ takeover_finish(ConnMod, ChanPid) ->
). ).
%% @doc RPC Target @ emqx_cm_proto_v2:takeover_session/2 %% @doc RPC Target @ emqx_cm_proto_v2:takeover_session/2
%% Used only by `emqx_session_mem'
takeover_session(ClientId, Pid) -> takeover_session(ClientId, Pid) ->
try try
do_takeover_begin(ClientId, Pid) do_takeover_begin(ClientId, Pid)
@ -452,7 +408,7 @@ discard_session(ClientId) when is_binary(ClientId) ->
| {ok, emqx_session:t() | _ReplayContext} | {ok, emqx_session:t() | _ReplayContext}
| {error, term()} | {error, term()}
when when
Action :: kick | discard | {takeover, 'begin'} | {takeover, 'end'} | takeover_kick. Action :: kick | discard | {takeover, 'begin'} | {takeover, 'end'}.
request_stepdown(Action, ConnMod, Pid) -> request_stepdown(Action, ConnMod, Pid) ->
Timeout = Timeout =
case Action == kick orelse Action == discard of case Action == kick orelse Action == discard of
@ -533,19 +489,7 @@ do_kick_session(Action, ClientId, ChanPid) when node(ChanPid) =:= node() ->
ok = request_stepdown(Action, ConnMod, ChanPid) ok = request_stepdown(Action, ConnMod, ChanPid)
end. end.
%% @doc RPC Target for emqx_cm_proto_v3:takeover_kick_session/3 %% @private This function is shared for session 'kick' and 'discard' (as the first arg Action).
-spec do_takeover_kick_session_v3(emqx_types:clientid(), chan_pid()) -> ok.
do_takeover_kick_session_v3(ClientId, ChanPid) when node(ChanPid) =:= node() ->
case do_get_chann_conn_mod(ClientId, ChanPid) of
undefined ->
%% already deregistered
ok;
ConnMod when is_atom(ConnMod) ->
ok = request_stepdown(takeover_kick, ConnMod, ChanPid)
end.
%% @private This function is shared for session `kick' and `discard' (as the first arg
%% Action).
kick_session(Action, ClientId, ChanPid) -> kick_session(Action, ClientId, ChanPid) ->
try try
wrap_rpc(emqx_cm_proto_v2:kick_session(Action, ClientId, ChanPid)) wrap_rpc(emqx_cm_proto_v2:kick_session(Action, ClientId, ChanPid))
@ -568,28 +512,6 @@ kick_session(Action, ClientId, ChanPid) ->
) )
end. end.
takeover_kick_session(ClientId, ChanPid) ->
try
wrap_rpc(emqx_cm_proto_v3:takeover_kick_session(ClientId, ChanPid))
catch
Error:Reason ->
%% This should mostly be RPC failures.
%% However, if the node is still running the old version
%% code (prior to emqx app 4.3.10) some of the RPC handler
%% exceptions may get propagated to a new version node
?SLOG(
error,
#{
msg => "failed_to_kick_session_on_remote_node",
node => node(ChanPid),
action => takeover,
error => Error,
reason => Reason
},
#{clientid => ClientId}
)
end.
kick_session(ClientId) -> kick_session(ClientId) ->
case lookup_channels(ClientId) of case lookup_channels(ClientId) of
[] -> [] ->

View File

@ -53,7 +53,6 @@ init([]) ->
RegistryKeeper = child_spec(emqx_cm_registry_keeper, 5000, worker), RegistryKeeper = child_spec(emqx_cm_registry_keeper, 5000, worker),
Manager = child_spec(emqx_cm, 5000, worker), Manager = child_spec(emqx_cm, 5000, worker),
DSSessionGCSup = child_spec(emqx_persistent_session_ds_sup, infinity, supervisor), DSSessionGCSup = child_spec(emqx_persistent_session_ds_sup, infinity, supervisor),
DSSessionBookkeeper = child_spec(emqx_persistent_session_bookkeeper, 5_000, worker),
Children = Children =
[ [
Banned, Banned,
@ -63,8 +62,7 @@ init([]) ->
Registry, Registry,
RegistryKeeper, RegistryKeeper,
Manager, Manager,
DSSessionGCSup, DSSessionGCSup
DSSessionBookkeeper
], ],
{ok, {SupFlags, Children}}. {ok, {SupFlags, Children}}.

View File

@ -118,7 +118,6 @@
config/0, config/0,
app_envs/0, app_envs/0,
update_opts/0, update_opts/0,
cluster_rpc_opts/0,
update_cmd/0, update_cmd/0,
update_args/0, update_args/0,
update_error/0, update_error/0,
@ -148,7 +147,6 @@
raw_config => emqx_config:raw_config(), raw_config => emqx_config:raw_config(),
post_config_update => #{module() => any()} post_config_update => #{module() => any()}
}. }.
-type cluster_rpc_opts() :: #{kind => ?KIND_INITIATE | ?KIND_REPLICATE}.
%% raw_config() is the config that is NOT parsed and translated by hocon schema %% raw_config() is the config that is NOT parsed and translated by hocon schema
-type raw_config() :: #{binary() => term()} | list() | undefined. -type raw_config() :: #{binary() => term()} | list() | undefined.
@ -499,14 +497,15 @@ fill_defaults(RawConf, Opts) ->
). ).
-spec fill_defaults(module(), raw_config(), hocon_tconf:opts()) -> map(). -spec fill_defaults(module(), raw_config(), hocon_tconf:opts()) -> map().
fill_defaults(SchemaMod, RawConf = #{<<"durable_storage">> := Ds}, Opts) -> fill_defaults(_SchemaMod, RawConf = #{<<"durable_storage">> := _}, _) ->
%% FIXME: kludge to prevent `emqx_config' module from filling in %% FIXME: kludge to prevent `emqx_config' module from filling in
%% the default values for backends and layouts. These records are %% the default values for backends and layouts. These records are
%% inside unions, and adding default values there will add %% inside unions, and adding default values there will add
%% incompatible fields. %% incompatible fields.
RawConf1 = maps:remove(<<"durable_storage">>, RawConf), %%
Conf = fill_defaults(SchemaMod, RawConf1, Opts), %% Note: this function is called for each individual conf root, so
Conf#{<<"durable_storage">> => Ds}; %% this clause only affects this particular subtree.
RawConf;
fill_defaults(SchemaMod, RawConf, Opts0) -> fill_defaults(SchemaMod, RawConf, Opts0) ->
Opts = maps:merge(#{required => false, make_serializable => true}, Opts0), Opts = maps:merge(#{required => false, make_serializable => true}, Opts0),
hocon_tconf:check_plain( hocon_tconf:check_plain(

View File

@ -18,7 +18,6 @@
-module(emqx_config_handler). -module(emqx_config_handler).
-include("logger.hrl"). -include("logger.hrl").
-include("emqx.hrl").
-include("emqx_schema.hrl"). -include("emqx_schema.hrl").
-include_lib("hocon/include/hocon_types.hrl"). -include_lib("hocon/include/hocon_types.hrl").
@ -31,7 +30,6 @@
add_handler/2, add_handler/2,
remove_handler/1, remove_handler/1,
update_config/3, update_config/3,
update_config/4,
get_raw_cluster_override_conf/0, get_raw_cluster_override_conf/0,
info/0 info/0
]). ]).
@ -55,13 +53,9 @@
-optional_callbacks([ -optional_callbacks([
pre_config_update/3, pre_config_update/3,
pre_config_update/4,
propagated_pre_config_update/3, propagated_pre_config_update/3,
propagated_pre_config_update/4,
post_config_update/5, post_config_update/5,
post_config_update/6, propagated_post_config_update/5
propagated_post_config_update/5,
propagated_post_config_update/6
]). ]).
-callback pre_config_update([atom()], emqx_config:update_request(), emqx_config:raw_config()) -> -callback pre_config_update([atom()], emqx_config:update_request(), emqx_config:raw_config()) ->
@ -89,38 +83,6 @@
) -> ) ->
ok | {ok, Result :: any()} | {error, Reason :: term()}. ok | {ok, Result :: any()} | {error, Reason :: term()}.
-callback pre_config_update(
[atom()], emqx_config:update_request(), emqx_config:raw_config(), emqx_config:cluster_rpc_opts()
) ->
ok | {ok, emqx_config:update_request()} | {error, term()}.
-callback propagated_pre_config_update(
[binary()],
emqx_config:update_request(),
emqx_config:raw_config(),
emqx_config:cluster_rpc_opts()
) ->
ok | {ok, emqx_config:update_request()} | {error, term()}.
-callback post_config_update(
[atom()],
emqx_config:update_request(),
emqx_config:config(),
emqx_config:config(),
emqx_config:app_envs(),
emqx_config:cluster_rpc_opts()
) ->
ok | {ok, Result :: any()} | {error, Reason :: term()}.
-callback propagated_post_config_update(
[atom()],
emqx_config:update_request(),
emqx_config:config(),
emqx_config:config(),
emqx_config:app_envs(),
emqx_config:cluster_rpc_opts()
) ->
ok | {ok, Result :: any()} | {error, Reason :: term()}.
-type state() :: #{handlers := any()}. -type state() :: #{handlers := any()}.
-type config_key_path() :: emqx_utils_maps:config_key_path(). -type config_key_path() :: emqx_utils_maps:config_key_path().
@ -130,17 +92,12 @@ start_link() ->
stop() -> stop() ->
gen_server:stop(?MODULE). gen_server:stop(?MODULE).
update_config(SchemaModule, ConfKeyPath, UpdateArgs) -> -spec update_config(module(), config_key_path(), emqx_config:update_args()) ->
update_config(SchemaModule, ConfKeyPath, UpdateArgs, #{}).
-spec update_config(module(), config_key_path(), emqx_config:update_args(), map()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(SchemaModule, ConfKeyPath, UpdateArgs, ClusterOpts) -> update_config(SchemaModule, ConfKeyPath, UpdateArgs) ->
%% force convert the path to a list of atoms, as there maybe some wildcard names/ids in the path %% force convert the path to a list of atoms, as there maybe some wildcard names/ids in the path
AtomKeyPath = [atom(Key) || Key <- ConfKeyPath], AtomKeyPath = [atom(Key) || Key <- ConfKeyPath],
gen_server:call( gen_server:call(?MODULE, {change_config, SchemaModule, AtomKeyPath, UpdateArgs}, infinity).
?MODULE, {change_config, SchemaModule, AtomKeyPath, UpdateArgs, ClusterOpts}, infinity
).
-spec add_handler(config_key_path(), handler_name()) -> -spec add_handler(config_key_path(), handler_name()) ->
ok | {error, {conflict, list()}}. ok | {error, {conflict, list()}}.
@ -173,11 +130,11 @@ handle_call({add_handler, ConfKeyPath, HandlerName}, _From, State = #{handlers :
{error, _Reason} = Error -> {reply, Error, State} {error, _Reason} = Error -> {reply, Error, State}
end; end;
handle_call( handle_call(
{change_config, SchemaModule, ConfKeyPath, UpdateArgs, ClusterRpcOpts}, {change_config, SchemaModule, ConfKeyPath, UpdateArgs},
_From, _From,
#{handlers := Handlers} = State #{handlers := Handlers} = State
) -> ) ->
Result = handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs, ClusterRpcOpts), Result = handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs),
{reply, Result, State}; {reply, Result, State};
handle_call(get_raw_cluster_override_conf, _From, State) -> handle_call(get_raw_cluster_override_conf, _From, State) ->
Reply = emqx_config:read_override_conf(#{override_to => cluster}), Reply = emqx_config:read_override_conf(#{override_to => cluster}),
@ -246,9 +203,9 @@ filter_top_level_handlers(Handlers) ->
Handlers Handlers
). ).
handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs, ClusterRpcOpts) -> handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs) ->
try try
do_handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs, ClusterRpcOpts) do_handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs)
catch catch
throw:Reason -> throw:Reason ->
{error, Reason}; {error, Reason};
@ -260,14 +217,13 @@ handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs, ClusterRp
update_req => UpdateArgs, update_req => UpdateArgs,
module => SchemaModule, module => SchemaModule,
key_path => ConfKeyPath, key_path => ConfKeyPath,
cluster_rpc_opts => ClusterRpcOpts,
stacktrace => ST stacktrace => ST
}), }),
{error, {config_update_crashed, Reason}} {error, {config_update_crashed, Reason}}
end. end.
do_handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs, ClusterOpts) -> do_handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs) ->
case process_update_request(ConfKeyPath, Handlers, UpdateArgs, ClusterOpts) of case process_update_request(ConfKeyPath, Handlers, UpdateArgs) of
{ok, NewRawConf, OverrideConf, Opts} -> {ok, NewRawConf, OverrideConf, Opts} ->
check_and_save_configs( check_and_save_configs(
SchemaModule, SchemaModule,
@ -276,24 +232,23 @@ do_handle_update_request(SchemaModule, ConfKeyPath, Handlers, UpdateArgs, Cluste
NewRawConf, NewRawConf,
OverrideConf, OverrideConf,
UpdateArgs, UpdateArgs,
Opts, Opts
ClusterOpts
); );
{error, Result} -> {error, Result} ->
{error, Result} {error, Result}
end. end.
process_update_request([_], _Handlers, {remove, _Opts}, _ClusterRpcOpts) -> process_update_request([_], _Handlers, {remove, _Opts}) ->
{error, "remove_root_is_forbidden"}; {error, "remove_root_is_forbidden"};
process_update_request(ConfKeyPath, _Handlers, {remove, Opts}, _ClusterRpcOpts) -> process_update_request(ConfKeyPath, _Handlers, {remove, Opts}) ->
OldRawConf = emqx_config:get_root_raw(ConfKeyPath), OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
BinKeyPath = bin_path(ConfKeyPath), BinKeyPath = bin_path(ConfKeyPath),
NewRawConf = emqx_utils_maps:deep_remove(BinKeyPath, OldRawConf), NewRawConf = emqx_utils_maps:deep_remove(BinKeyPath, OldRawConf),
OverrideConf = remove_from_override_config(BinKeyPath, Opts), OverrideConf = remove_from_override_config(BinKeyPath, Opts),
{ok, NewRawConf, OverrideConf, Opts}; {ok, NewRawConf, OverrideConf, Opts};
process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}, ClusterRpcOpts) -> process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
OldRawConf = emqx_config:get_root_raw(ConfKeyPath), OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
case do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq, ClusterRpcOpts) of case do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) of
{ok, NewRawConf} -> {ok, NewRawConf} ->
OverrideConf = merge_to_override_config(NewRawConf, Opts), OverrideConf = merge_to_override_config(NewRawConf, Opts),
{ok, NewRawConf, OverrideConf, Opts}; {ok, NewRawConf, OverrideConf, Opts};
@ -301,16 +256,15 @@ process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}, Clust
Error Error
end. end.
do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq, ClusterRpcOpts) -> do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) ->
do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq, ClusterRpcOpts, []). do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq, []).
do_update_config([], Handlers, OldRawConf, UpdateReq, ClusterRpcOpts, ConfKeyPath) -> do_update_config([], Handlers, OldRawConf, UpdateReq, ConfKeyPath) ->
call_pre_config_update(#{ call_pre_config_update(#{
handlers => Handlers, handlers => Handlers,
old_raw_conf => OldRawConf, old_raw_conf => OldRawConf,
update_req => UpdateReq, update_req => UpdateReq,
conf_key_path => ConfKeyPath, conf_key_path => ConfKeyPath,
cluster_rpc_opts => ClusterRpcOpts,
callback => pre_config_update, callback => pre_config_update,
is_propagated => false is_propagated => false
}); });
@ -319,18 +273,13 @@ do_update_config(
Handlers, Handlers,
OldRawConf, OldRawConf,
UpdateReq, UpdateReq,
ClusterRpcOpts,
ConfKeyPath0 ConfKeyPath0
) -> ) ->
ConfKeyPath = ConfKeyPath0 ++ [ConfKey], ConfKeyPath = ConfKeyPath0 ++ [ConfKey],
ConfKeyBin = bin(ConfKey), ConfKeyBin = bin(ConfKey),
SubOldRawConf = get_sub_config(ConfKeyBin, OldRawConf), SubOldRawConf = get_sub_config(ConfKeyBin, OldRawConf),
SubHandlers = get_sub_handlers(ConfKey, Handlers), SubHandlers = get_sub_handlers(ConfKey, Handlers),
case case do_update_config(SubConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq, ConfKeyPath) of
do_update_config(
SubConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq, ClusterRpcOpts, ConfKeyPath
)
of
{ok, NewUpdateReq} -> {ok, NewUpdateReq} ->
merge_to_old_config(#{ConfKeyBin => NewUpdateReq}, OldRawConf); merge_to_old_config(#{ConfKeyBin => NewUpdateReq}, OldRawConf);
Error -> Error ->
@ -344,18 +293,12 @@ check_and_save_configs(
NewRawConf, NewRawConf,
OverrideConf, OverrideConf,
UpdateArgs, UpdateArgs,
Opts, Opts
ClusterOpts
) -> ) ->
Schema = schema(SchemaModule, ConfKeyPath), Schema = schema(SchemaModule, ConfKeyPath),
Kind = maps:get(kind, ClusterOpts, ?KIND_INITIATE),
{AppEnvs, NewConf} = emqx_config:check_config(Schema, NewRawConf), {AppEnvs, NewConf} = emqx_config:check_config(Schema, NewRawConf),
OldConf = emqx_config:get_root(ConfKeyPath), OldConf = emqx_config:get_root(ConfKeyPath),
case case do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, #{}) of
do_post_config_update(
ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, ClusterOpts, #{}
)
of
{ok, Result0} -> {ok, Result0} ->
post_update_ok( post_update_ok(
AppEnvs, AppEnvs,
@ -367,24 +310,21 @@ check_and_save_configs(
UpdateArgs, UpdateArgs,
Result0 Result0
); );
{error, {post_config_update, HandlerName, Reason}} when Kind =/= ?KIND_INITIATE -> {error, {post_config_update, HandlerName, Reason}} ->
?SLOG(critical, #{ HandlePostFailureFun =
msg => "post_config_update_failed_but_save_the_config_anyway", fun() ->
handler => HandlerName, post_update_ok(
reason => Reason AppEnvs,
}), NewConf,
post_update_ok( NewRawConf,
AppEnvs, OverrideConf,
NewConf, Opts,
NewRawConf, ConfKeyPath,
OverrideConf, UpdateArgs,
Opts, #{}
ConfKeyPath, )
UpdateArgs, end,
#{} {error, {post_config_update, HandlerName, {Reason, HandlePostFailureFun}}}
);
{error, _} = Error ->
Error
end. end.
post_update_ok(AppEnvs, NewConf, NewRawConf, OverrideConf, Opts, ConfKeyPath, UpdateArgs, Result0) -> post_update_ok(AppEnvs, NewConf, NewRawConf, OverrideConf, Opts, ConfKeyPath, UpdateArgs, Result0) ->
@ -392,9 +332,7 @@ post_update_ok(AppEnvs, NewConf, NewRawConf, OverrideConf, Opts, ConfKeyPath, Up
Result1 = return_change_result(ConfKeyPath, UpdateArgs), Result1 = return_change_result(ConfKeyPath, UpdateArgs),
{ok, Result1#{post_config_update => Result0}}. {ok, Result1#{post_config_update => Result0}}.
do_post_config_update( do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result) ->
ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, ClusterOpts, Result
) ->
do_post_config_update( do_post_config_update(
ConfKeyPath, ConfKeyPath,
Handlers, Handlers,
@ -402,7 +340,6 @@ do_post_config_update(
NewConf, NewConf,
AppEnvs, AppEnvs,
UpdateArgs, UpdateArgs,
ClusterOpts,
Result, Result,
[] []
). ).
@ -415,7 +352,6 @@ do_post_config_update(
AppEnvs, AppEnvs,
UpdateArgs, UpdateArgs,
Result, Result,
ClusterOpts,
ConfKeyPath ConfKeyPath
) -> ) ->
call_post_config_update(#{ call_post_config_update(#{
@ -426,7 +362,6 @@ do_post_config_update(
update_req => up_req(UpdateArgs), update_req => up_req(UpdateArgs),
result => Result, result => Result,
conf_key_path => ConfKeyPath, conf_key_path => ConfKeyPath,
cluster_rpc_opts => ClusterOpts,
callback => post_config_update callback => post_config_update
}); });
do_post_config_update( do_post_config_update(
@ -436,7 +371,6 @@ do_post_config_update(
NewConf, NewConf,
AppEnvs, AppEnvs,
UpdateArgs, UpdateArgs,
ClusterOpts,
Result, Result,
ConfKeyPath0 ConfKeyPath0
) -> ) ->
@ -451,7 +385,6 @@ do_post_config_update(
SubNewConf, SubNewConf,
AppEnvs, AppEnvs,
UpdateArgs, UpdateArgs,
ClusterOpts,
Result, Result,
ConfKeyPath ConfKeyPath
). ).
@ -495,41 +428,37 @@ call_proper_pre_config_update(
#{ #{
handlers := #{?MOD := Module}, handlers := #{?MOD := Module},
callback := Callback, callback := Callback,
update_req := UpdateReq update_req := UpdateReq,
old_raw_conf := OldRawConf
} = Ctx } = Ctx
) -> ) ->
Arity = get_function_arity(Module, Callback, [3, 4]), case erlang:function_exported(Module, Callback, 3) of
case apply_pre_config_update(Module, Callback, Arity, Ctx) of true ->
{ok, NewUpdateReq} -> case apply_pre_config_update(Module, Ctx) of
{ok, NewUpdateReq}; {ok, NewUpdateReq} ->
ok -> {ok, NewUpdateReq};
{ok, UpdateReq}; ok ->
{error, Reason} -> {ok, UpdateReq};
{error, {pre_config_update, Module, Reason}} {error, Reason} ->
{error, {pre_config_update, Module, Reason}}
end;
false ->
merge_to_old_config(UpdateReq, OldRawConf)
end; end;
call_proper_pre_config_update( call_proper_pre_config_update(
#{update_req := UpdateReq} #{update_req := UpdateReq}
) -> ) ->
{ok, UpdateReq}. {ok, UpdateReq}.
apply_pre_config_update(Module, Callback, 3, #{ apply_pre_config_update(Module, #{
conf_key_path := ConfKeyPath,
update_req := UpdateReq,
old_raw_conf := OldRawConf
}) ->
Module:Callback(ConfKeyPath, UpdateReq, OldRawConf);
apply_pre_config_update(Module, Callback, 4, #{
conf_key_path := ConfKeyPath, conf_key_path := ConfKeyPath,
update_req := UpdateReq, update_req := UpdateReq,
old_raw_conf := OldRawConf, old_raw_conf := OldRawConf,
cluster_rpc_opts := ClusterRpcOpts callback := Callback
}) -> }) ->
Module:Callback(ConfKeyPath, UpdateReq, OldRawConf, ClusterRpcOpts); Module:Callback(
apply_pre_config_update(_Module, _Callback, false, #{ ConfKeyPath, UpdateReq, OldRawConf
update_req := UpdateReq, ).
old_raw_conf := OldRawConf
}) ->
merge_to_old_config(UpdateReq, OldRawConf).
propagate_pre_config_updates_to_subconf( propagate_pre_config_updates_to_subconf(
#{handlers := #{?WKEY := _}} = Ctx #{handlers := #{?WKEY := _}} = Ctx
@ -631,23 +560,28 @@ call_proper_post_config_update(
result := Result result := Result
} = Ctx } = Ctx
) -> ) ->
Arity = get_function_arity(Module, Callback, [5, 6]), case erlang:function_exported(Module, Callback, 5) of
case apply_post_config_update(Module, Callback, Arity, Ctx) of true ->
ok -> {ok, Result}; case apply_post_config_update(Module, Ctx) of
{ok, Result1} -> {ok, Result#{Module => Result1}}; ok -> {ok, Result};
{error, Reason} -> {error, {post_config_update, Module, Reason}} {ok, Result1} -> {ok, Result#{Module => Result1}};
{error, Reason} -> {error, {post_config_update, Module, Reason}}
end;
false ->
{ok, Result}
end; end;
call_proper_post_config_update( call_proper_post_config_update(
#{result := Result} = _Ctx #{result := Result} = _Ctx
) -> ) ->
{ok, Result}. {ok, Result}.
apply_post_config_update(Module, Callback, 5, #{ apply_post_config_update(Module, #{
conf_key_path := ConfKeyPath, conf_key_path := ConfKeyPath,
update_req := UpdateReq, update_req := UpdateReq,
new_conf := NewConf, new_conf := NewConf,
old_conf := OldConf, old_conf := OldConf,
app_envs := AppEnvs app_envs := AppEnvs,
callback := Callback
}) -> }) ->
Module:Callback( Module:Callback(
ConfKeyPath, ConfKeyPath,
@ -655,25 +589,7 @@ apply_post_config_update(Module, Callback, 5, #{
NewConf, NewConf,
OldConf, OldConf,
AppEnvs AppEnvs
); ).
apply_post_config_update(Module, Callback, 6, #{
conf_key_path := ConfKeyPath,
update_req := UpdateReq,
cluster_rpc_opts := ClusterRpcOpts,
new_conf := NewConf,
old_conf := OldConf,
app_envs := AppEnvs
}) ->
Module:Callback(
ConfKeyPath,
UpdateReq,
NewConf,
OldConf,
AppEnvs,
ClusterRpcOpts
);
apply_post_config_update(_Module, _Callback, false, _Ctx) ->
ok.
propagate_post_config_updates_to_subconf( propagate_post_config_updates_to_subconf(
#{handlers := #{?WKEY := _}} = Ctx #{handlers := #{?WKEY := _}} = Ctx
@ -852,9 +768,7 @@ assert_callback_function(Mod) ->
_ = apply(Mod, module_info, []), _ = apply(Mod, module_info, []),
case case
erlang:function_exported(Mod, pre_config_update, 3) orelse erlang:function_exported(Mod, pre_config_update, 3) orelse
erlang:function_exported(Mod, post_config_update, 5) orelse erlang:function_exported(Mod, post_config_update, 5)
erlang:function_exported(Mod, pre_config_update, 4) orelse
erlang:function_exported(Mod, post_config_update, 6)
of of
true -> ok; true -> ok;
false -> error(#{msg => "bad_emqx_config_handler_callback", module => Mod}) false -> error(#{msg => "bad_emqx_config_handler_callback", module => Mod})
@ -897,13 +811,3 @@ load_prev_handlers() ->
save_handlers(Handlers) -> save_handlers(Handlers) ->
application:set_env(emqx, ?MODULE, Handlers). application:set_env(emqx, ?MODULE, Handlers).
get_function_arity(_Module, _Callback, []) ->
false;
get_function_arity(Module, Callback, [Arity | Opts]) ->
%% ensure module is loaded
Module = Module:module_info(module),
case erlang:function_exported(Module, Callback, Arity) of
true -> Arity;
false -> get_function_arity(Module, Callback, Opts)
end.

View File

@ -158,6 +158,31 @@
-define(ENABLED(X), (X =/= undefined)). -define(ENABLED(X), (X =/= undefined)).
-define(ALARM_TCP_CONGEST(Channel),
list_to_binary(
io_lib:format(
"mqtt_conn/congested/~ts/~ts",
[
emqx_channel:info(clientid, Channel),
emqx_channel:info(username, Channel)
]
)
)
).
-define(ALARM_CONN_INFO_KEYS, [
socktype,
sockname,
peername,
clientid,
username,
proto_name,
proto_ver,
connected_at
]).
-define(ALARM_SOCK_STATS_KEYS, [send_pend, recv_cnt, recv_oct, send_cnt, send_oct]).
-define(ALARM_SOCK_OPTS_KEYS, [high_watermark, high_msgq_watermark, sndbuf, recbuf, buffer]).
-define(LIMITER_BYTES_IN, bytes). -define(LIMITER_BYTES_IN, bytes).
-define(LIMITER_MESSAGE_IN, messages). -define(LIMITER_MESSAGE_IN, messages).
@ -173,9 +198,7 @@
system_code_change/4 system_code_change/4
]} ]}
). ).
-dialyzer({no_missing_calls, [handle_msg/2]}).
-ifndef(BUILD_WITHOUT_QUIC).
-spec start_link -spec start_link
(esockd:transport(), esockd:socket(), emqx_channel:opts()) -> (esockd:transport(), esockd:socket(), emqx_channel:opts()) ->
{ok, pid()}; {ok, pid()};
@ -185,9 +208,6 @@
emqx_quic_connection:cb_state() emqx_quic_connection:cb_state()
) -> ) ->
{ok, pid()}. {ok, pid()}.
-else.
-spec start_link(esockd:transport(), esockd:socket(), emqx_channel:opts()) -> {ok, pid()}.
-endif.
start_link(Transport, Socket, Options) -> start_link(Transport, Socket, Options) ->
Args = [self(), Transport, Socket, Options], Args = [self(), Transport, Socket, Options],
@ -310,13 +330,11 @@ init_state(
{ok, Peername} = Transport:ensure_ok_or_exit(peername, [Socket]), {ok, Peername} = Transport:ensure_ok_or_exit(peername, [Socket]),
{ok, Sockname} = Transport:ensure_ok_or_exit(sockname, [Socket]), {ok, Sockname} = Transport:ensure_ok_or_exit(sockname, [Socket]),
Peercert = Transport:ensure_ok_or_exit(peercert, [Socket]), Peercert = Transport:ensure_ok_or_exit(peercert, [Socket]),
PeerSNI = Transport:ensure_ok_or_exit(peersni, [Socket]),
ConnInfo = #{ ConnInfo = #{
socktype => Transport:type(Socket), socktype => Transport:type(Socket),
peername => Peername, peername => Peername,
sockname => Sockname, sockname => Sockname,
peercert => Peercert, peercert => Peercert,
peersni => PeerSNI,
conn_mod => ?MODULE conn_mod => ?MODULE
}, },
@ -328,7 +346,7 @@ init_state(
max_size => emqx_config:get_zone_conf(Zone, [mqtt, max_packet_size]) max_size => emqx_config:get_zone_conf(Zone, [mqtt, max_packet_size])
}, },
ParseState = emqx_frame:initial_parse_state(FrameOpts), ParseState = emqx_frame:initial_parse_state(FrameOpts),
Serialize = emqx_frame:initial_serialize_opts(FrameOpts), Serialize = emqx_frame:serialize_opts(),
%% Init Channel %% Init Channel
Channel = emqx_channel:init(ConnInfo, Opts), Channel = emqx_channel:init(ConnInfo, Opts),
GcState = GcState =
@ -473,17 +491,19 @@ cancel_stats_timer(State) ->
process_msg([], State) -> process_msg([], State) ->
{ok, State}; {ok, State};
process_msg([Msg | More], State) -> process_msg([Msg | More], State) ->
try handle_msg(Msg, State) of try
ok -> case handle_msg(Msg, State) of
process_msg(More, State); ok ->
{ok, NState} -> process_msg(More, State);
process_msg(More, NState); {ok, NState} ->
{ok, Msgs, NState} -> process_msg(More, NState);
process_msg(append_msg(More, Msgs), NState); {ok, Msgs, NState} ->
{stop, Reason, NState} -> process_msg(append_msg(More, Msgs), NState);
{stop, Reason, NState}; {stop, Reason, NState} ->
{stop, Reason} -> {stop, Reason, NState};
{stop, Reason, State} {stop, Reason} ->
{stop, Reason, State}
end
catch catch
exit:normal -> exit:normal ->
{stop, normal, State}; {stop, normal, State};
@ -569,10 +589,12 @@ handle_msg({Closed, _Sock}, State) when
handle_msg({Passive, _Sock}, State) when handle_msg({Passive, _Sock}, State) when
Passive == tcp_passive; Passive == ssl_passive; Passive =:= quic_passive Passive == tcp_passive; Passive == ssl_passive; Passive =:= quic_passive
-> ->
%% In Stats
Pubs = emqx_pd:reset_counter(incoming_pubs), Pubs = emqx_pd:reset_counter(incoming_pubs),
Bytes = emqx_pd:reset_counter(incoming_bytes), Bytes = emqx_pd:reset_counter(incoming_bytes),
InStats = #{cnt => Pubs, oct => Bytes},
%% Run GC and Check OOM %% Run GC and Check OOM
NState1 = check_oom(Pubs, Bytes, run_gc(Pubs, Bytes, State)), NState1 = check_oom(run_gc(InStats, State)),
handle_info(activate_socket, NState1); handle_info(activate_socket, NState1);
handle_msg( handle_msg(
Deliver = {deliver, _Topic, _Msg}, Deliver = {deliver, _Topic, _Msg},
@ -581,6 +603,17 @@ handle_msg(
ActiveN = get_active_n(Type, Listener), ActiveN = get_active_n(Type, Listener),
Delivers = [Deliver | emqx_utils:drain_deliver(ActiveN)], Delivers = [Deliver | emqx_utils:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State); with_channel(handle_deliver, [Delivers], State);
%% Something sent
handle_msg({inet_reply, _Sock, ok}, State = #state{listener = {Type, Listener}}) ->
case emqx_pd:get_counter(outgoing_pubs) > get_active_n(Type, Listener) of
true ->
Pubs = emqx_pd:reset_counter(outgoing_pubs),
Bytes = emqx_pd:reset_counter(outgoing_bytes),
OutStats = #{cnt => Pubs, oct => Bytes},
{ok, check_oom(run_gc(OutStats, State))};
false ->
ok
end;
handle_msg({inet_reply, _Sock, {error, Reason}}, State) -> handle_msg({inet_reply, _Sock, {error, Reason}}, State) ->
handle_info({sock_error, Reason}, State); handle_info({sock_error, Reason}, State);
handle_msg({connack, ConnAck}, State) -> handle_msg({connack, ConnAck}, State) ->
@ -696,9 +729,9 @@ handle_call(_From, Req, State = #state{channel = Channel}) ->
shutdown(Reason, Reply, State#state{channel = NChannel}); shutdown(Reason, Reply, State#state{channel = NChannel});
{shutdown, Reason, Reply, OutPacket, NChannel} -> {shutdown, Reason, Reply, OutPacket, NChannel} ->
NState = State#state{channel = NChannel}, NState = State#state{channel = NChannel},
{ok, NState2} = handle_outgoing(OutPacket, NState), ok = handle_outgoing(OutPacket, NState),
NState3 = graceful_shutdown_transport(Reason, NState2), NState2 = graceful_shutdown_transport(Reason, NState),
shutdown(Reason, Reply, NState3) shutdown(Reason, Reply, NState2)
end. end.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -732,7 +765,9 @@ handle_timeout(
disconnected -> disconnected ->
{ok, State}; {ok, State};
_ -> _ ->
with_channel(handle_timeout, [TRef, keepalive], State) %% recv_pkt: valid MQTT message
RecvCnt = emqx_pd:get_counter(recv_pkt),
handle_timeout(TRef, {keepalive, RecvCnt}, State)
end; end;
handle_timeout(TRef, Msg, State) -> handle_timeout(TRef, Msg, State) ->
with_channel(handle_timeout, [TRef, Msg], State). with_channel(handle_timeout, [TRef, Msg], State).
@ -783,8 +818,7 @@ parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
input_bytes => Data, input_bytes => Data,
parsed_packets => Packets parsed_packets => Packets
}), }),
NState = enrich_state(Reason, State), {[{frame_error, Reason} | Packets], State};
{[{frame_error, Reason} | Packets], NState};
error:Reason:Stacktrace -> error:Reason:Stacktrace ->
?LOG(error, #{ ?LOG(error, #{
at_state => emqx_frame:describe_state(ParseState), at_state => emqx_frame:describe_state(ParseState),
@ -820,8 +854,8 @@ with_channel(Fun, Args, State = #state{channel = Channel}) ->
shutdown(Reason, State#state{channel = NChannel}); shutdown(Reason, State#state{channel = NChannel});
{shutdown, Reason, Packet, NChannel} -> {shutdown, Reason, Packet, NChannel} ->
NState = State#state{channel = NChannel}, NState = State#state{channel = NChannel},
{ok, NState2} = handle_outgoing(Packet, NState), ok = handle_outgoing(Packet, NState),
shutdown(Reason, NState2) shutdown(Reason, NState)
end. end.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -875,35 +909,20 @@ serialize_and_inc_stats_fun(#state{serialize = Serialize}) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Send data %% Send data
-spec send(iodata(), state()) -> {ok, state()}. -spec send(iodata(), state()) -> ok.
send(IoData, #state{transport = Transport, socket = Socket} = State) -> send(IoData, #state{transport = Transport, socket = Socket, channel = Channel}) ->
Oct = iolist_size(IoData), Oct = iolist_size(IoData),
emqx_metrics:inc('bytes.sent', Oct), ok = emqx_metrics:inc('bytes.sent', Oct),
inc_counter(outgoing_bytes, Oct), inc_counter(outgoing_bytes, Oct),
case Transport:send(Socket, IoData) of emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
case Transport:async_send(Socket, IoData, []) of
ok -> ok ->
%% NOTE: for Transport=emqx_quic_stream, it's actually an ok;
%% async_send, sent/1 should technically be called when
%% {quic, send_complete, _Stream, true | false} is received,
%% but it is handled early for simplicity
sent(State);
Error = {error, _Reason} -> Error = {error, _Reason} ->
%% Defer error handling %% Send an inet_reply to postpone handling the error
%% so it's handled the same as tcp_closed or ssl_closed %% @FIXME: why not just return error?
self() ! {inet_reply, Socket, Error}, self() ! {inet_reply, Socket, Error},
{ok, State} ok
end.
%% Some bytes sent
sent(#state{listener = {Type, Listener}} = State) ->
%% Run GC and check OOM after certain amount of messages or bytes sent.
case emqx_pd:get_counter(outgoing_pubs) > get_active_n(Type, Listener) of
true ->
Pubs = emqx_pd:reset_counter(outgoing_pubs),
Bytes = emqx_pd:reset_counter(outgoing_bytes),
{ok, check_oom(Pubs, Bytes, run_gc(Pubs, Bytes, State))};
false ->
{ok, State}
end. end.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -994,23 +1013,17 @@ check_limiter(
Data, Data,
WhenOk, WhenOk,
Msgs, Msgs,
#state{channel = Channel, limiter_timer = undefined, limiter = Limiter} = State #state{limiter_timer = undefined, limiter = Limiter} = State
) -> ) ->
case emqx_limiter_container:check_list(Needs, Limiter) of case emqx_limiter_container:check_list(Needs, Limiter) of
{ok, Limiter2} -> {ok, Limiter2} ->
WhenOk(Data, Msgs, State#state{limiter = Limiter2}); WhenOk(Data, Msgs, State#state{limiter = Limiter2});
{pause, Time, Limiter2} -> {pause, Time, Limiter2} ->
?SLOG_THROTTLE( ?SLOG(debug, #{
warning, msg => "pause_time_due_to_rate_limit",
#{ needs => Needs,
msg => socket_receive_paused_by_rate_limit, time_in_ms => Time
paused_ms => Time }),
},
#{
tag => "RATE",
clientid => emqx_channel:info(clientid, Channel)
}
),
Retry = #retry{ Retry = #retry{
types = [Type || {_, Type} <- Needs], types = [Type || {_, Type} <- Needs],
@ -1044,7 +1057,7 @@ check_limiter(
%% try to perform a retry %% try to perform a retry
-spec retry_limiter(state()) -> _. -spec retry_limiter(state()) -> _.
retry_limiter(#state{channel = Channel, limiter = Limiter} = State) -> retry_limiter(#state{limiter = Limiter} = State) ->
#retry{types = Types, data = Data, next = Next} = #retry{types = Types, data = Data, next = Next} =
emqx_limiter_container:get_retry_context(Limiter), emqx_limiter_container:get_retry_context(Limiter),
case emqx_limiter_container:retry_list(Types, Limiter) of case emqx_limiter_container:retry_list(Types, Limiter) of
@ -1058,17 +1071,11 @@ retry_limiter(#state{channel = Channel, limiter = Limiter} = State) ->
} }
); );
{pause, Time, Limiter2} -> {pause, Time, Limiter2} ->
?SLOG_THROTTLE( ?SLOG(debug, #{
warning, msg => "pause_time_due_to_rate_limit",
#{ types => Types,
msg => socket_receive_paused_by_rate_limit, time_in_ms => Time
paused_ms => Time }),
},
#{
tag => "RATE",
clientid => emqx_channel:info(clientid, Channel)
}
),
TRef = start_timer(Time, limit_timeout), TRef = start_timer(Time, limit_timeout),
@ -1081,36 +1088,25 @@ retry_limiter(#state{channel = Channel, limiter = Limiter} = State) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Run GC and Check OOM %% Run GC and Check OOM
run_gc(Pubs, Bytes, State = #state{gc_state = GcSt, zone = Zone}) -> run_gc(Stats, State = #state{gc_state = GcSt, zone = Zone}) ->
case case
?ENABLED(GcSt) andalso not emqx_olp:backoff_gc(Zone) andalso ?ENABLED(GcSt) andalso not emqx_olp:backoff_gc(Zone) andalso
emqx_gc:run(Pubs, Bytes, GcSt) emqx_gc:run(Stats, GcSt)
of of
false -> State; false -> State;
{_IsGC, GcSt1} -> State#state{gc_state = GcSt1} {_IsGC, GcSt1} -> State#state{gc_state = GcSt1}
end. end.
check_oom(Pubs, Bytes, State = #state{channel = Channel}) -> check_oom(State = #state{channel = Channel}) ->
ShutdownPolicy = emqx_config:get_zone_conf( ShutdownPolicy = emqx_config:get_zone_conf(
emqx_channel:info(zone, Channel), [force_shutdown] emqx_channel:info(zone, Channel), [force_shutdown]
), ),
?tp(debug, check_oom, #{policy => ShutdownPolicy}),
case emqx_utils:check_oom(ShutdownPolicy) of case emqx_utils:check_oom(ShutdownPolicy) of
{shutdown, Reason} -> {shutdown, Reason} ->
%% triggers terminate/2 callback immediately %% triggers terminate/2 callback immediately
?tp(warning, check_oom_shutdown, #{
policy => ShutdownPolicy,
incoming_pubs => Pubs,
incoming_bytes => Bytes,
shutdown => Reason
}),
erlang:exit({shutdown, Reason}); erlang:exit({shutdown, Reason});
Result -> _ ->
?tp(debug, check_oom_ok, #{
policy => ShutdownPolicy,
incoming_pubs => Pubs,
incoming_bytes => Bytes,
result => Result
}),
ok ok
end, end,
State. State.
@ -1228,12 +1224,6 @@ inc_counter(Key, Inc) ->
_ = emqx_pd:inc_counter(Key, Inc), _ = emqx_pd:inc_counter(Key, Inc),
ok. ok.
enrich_state(#{parse_state := NParseState}, State) ->
Serialize = emqx_frame:serialize_opts(NParseState),
State#state{parse_state = NParseState, serialize = Serialize};
enrich_state(_, State) ->
State.
set_tcp_keepalive({quic, _Listener}) -> set_tcp_keepalive({quic, _Listener}) ->
ok; ok;
set_tcp_keepalive({Type, Id}) -> set_tcp_keepalive({Type, Id}) ->

View File

@ -18,7 +18,7 @@
-module(emqx_ds_schema). -module(emqx_ds_schema).
%% API: %% API:
-export([schema/0, translate_builtin_raft/1, translate_builtin_local/1]). -export([schema/0, translate_builtin/1]).
%% Behavior callbacks: %% Behavior callbacks:
-export([fields/1, desc/1, namespace/0]). -export([fields/1, desc/1, namespace/0]).
@ -32,51 +32,37 @@
%% Type declarations %% Type declarations
%%================================================================================ %%================================================================================
-ifndef(EMQX_RELEASE_EDITION).
-define(EMQX_RELEASE_EDITION, ce).
-endif.
-if(?EMQX_RELEASE_EDITION == ee).
-define(DEFAULT_BACKEND, builtin_raft).
-define(BUILTIN_BACKENDS, [ref(builtin_raft), ref(builtin_local)]).
-else.
-define(DEFAULT_BACKEND, builtin_local).
-define(BUILTIN_BACKENDS, [ref(builtin_local)]).
-endif.
%%================================================================================ %%================================================================================
%% API %% API
%%================================================================================ %%================================================================================
translate_builtin_raft( translate_builtin(#{
Backend = #{ backend := builtin,
backend := builtin_raft, n_shards := NShards,
n_shards := NShards, replication_factor := ReplFactor,
n_sites := NSites, layout := Layout
replication_factor := ReplFactor, }) ->
layout := Layout Storage =
} case Layout of
) -> #{
type := wildcard_optimized,
bits_per_topic_level := BitsPerTopicLevel,
epoch_bits := EpochBits,
topic_index_bytes := TIBytes
} ->
{emqx_ds_storage_bitfield_lts, #{
bits_per_topic_level => BitsPerTopicLevel,
topic_index_bytes => TIBytes,
epoch_bits => EpochBits
}};
#{type := reference} ->
{emqx_ds_storage_reference, #{}}
end,
#{ #{
backend => builtin_raft, backend => builtin,
n_shards => NShards, n_shards => NShards,
n_sites => NSites,
replication_factor => ReplFactor, replication_factor => ReplFactor,
replication_options => maps:get(replication_options, Backend, #{}), storage => Storage
storage => translate_layout(Layout)
}.
translate_builtin_local(
#{
backend := builtin_local,
n_shards := NShards,
layout := Layout
}
) ->
#{
backend => builtin_local,
n_shards => NShards,
storage => translate_layout(Layout)
}. }.
%%================================================================================ %%================================================================================
@ -92,24 +78,24 @@ schema() ->
ds_schema(#{ ds_schema(#{
default => default =>
#{ #{
<<"backend">> => ?DEFAULT_BACKEND <<"backend">> => builtin
}, },
importance => ?IMPORTANCE_MEDIUM, importance => ?IMPORTANCE_MEDIUM,
desc => ?DESC(messages) desc => ?DESC(messages)
})} })}
]. ].
fields(builtin_local) -> fields(builtin) ->
%% Schema for the builtin_raft backend: %% Schema for the builtin backend:
[ [
{backend, {backend,
sc( sc(
builtin_local, builtin,
#{ #{
'readOnly' => true, 'readOnly' => true,
default => builtin_local, default => builtin,
importance => ?IMPORTANCE_MEDIUM, importance => ?IMPORTANCE_MEDIUM,
desc => ?DESC(backend_type) desc => ?DESC(builtin_backend)
} }
)}, )},
{'_config_handler', {'_config_handler',
@ -117,42 +103,27 @@ fields(builtin_local) ->
{module(), atom()}, {module(), atom()},
#{ #{
'readOnly' => true, 'readOnly' => true,
default => {?MODULE, translate_builtin_local}, default => {?MODULE, translate_builtin},
importance => ?IMPORTANCE_HIDDEN importance => ?IMPORTANCE_HIDDEN
} }
)} )},
| common_builtin_fields() {data_dir,
];
fields(builtin_raft) ->
%% Schema for the builtin_raft backend:
[
{backend,
sc( sc(
builtin_raft, string(),
#{ #{
'readOnly' => true, mapping => "emqx_durable_storage.db_data_dir",
default => builtin_raft, required => false,
importance => ?IMPORTANCE_MEDIUM, importance => ?IMPORTANCE_MEDIUM,
desc => ?DESC(backend_type) desc => ?DESC(builtin_data_dir)
} }
)}, )},
{'_config_handler', {n_shards,
sc(
{module(), atom()},
#{
'readOnly' => true,
default => {?MODULE, translate_builtin_raft},
importance => ?IMPORTANCE_HIDDEN
}
)},
%% TODO: Deprecate once cluster management and rebalancing is implemented.
{"n_sites",
sc( sc(
pos_integer(), pos_integer(),
#{ #{
default => 1, default => 16,
importance => ?IMPORTANCE_HIDDEN, importance => ?IMPORTANCE_MEDIUM,
desc => ?DESC(builtin_n_sites) desc => ?DESC(builtin_n_shards)
} }
)}, )},
{replication_factor, {replication_factor,
@ -163,18 +134,28 @@ fields(builtin_raft) ->
importance => ?IMPORTANCE_HIDDEN importance => ?IMPORTANCE_HIDDEN
} }
)}, )},
%% TODO: Elaborate. {local_write_buffer,
{"replication_options",
sc( sc(
hoconsc:map(name, any()), ref(builtin_local_write_buffer),
#{ #{
default => #{}, importance => ?IMPORTANCE_HIDDEN,
importance => ?IMPORTANCE_HIDDEN desc => ?DESC(builtin_local_write_buffer)
}
)},
{layout,
sc(
hoconsc:union(builtin_layouts()),
#{
desc => ?DESC(builtin_layout),
importance => ?IMPORTANCE_MEDIUM,
default =>
#{
<<"type">> => wildcard_optimized
}
} }
)} )}
| common_builtin_fields()
]; ];
fields(builtin_write_buffer) -> fields(builtin_local_write_buffer) ->
[ [
{max_items, {max_items,
sc( sc(
@ -183,7 +164,7 @@ fields(builtin_write_buffer) ->
default => 1000, default => 1000,
mapping => "emqx_durable_storage.egress_batch_size", mapping => "emqx_durable_storage.egress_batch_size",
importance => ?IMPORTANCE_HIDDEN, importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(builtin_write_buffer_max_items) desc => ?DESC(builtin_local_write_buffer_max_items)
} }
)}, )},
{flush_interval, {flush_interval,
@ -193,7 +174,7 @@ fields(builtin_write_buffer) ->
default => 100, default => 100,
mapping => "emqx_durable_storage.egress_flush_interval", mapping => "emqx_durable_storage.egress_flush_interval",
importance => ?IMPORTANCE_HIDDEN, importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(builtin_write_buffer_flush_interval) desc => ?DESC(builtin_local_write_buffer_flush_interval)
} }
)} )}
]; ];
@ -220,7 +201,7 @@ fields(layout_builtin_wildcard_optimized) ->
sc( sc(
range(0, 64), range(0, 64),
#{ #{
default => 20, default => 10,
importance => ?IMPORTANCE_HIDDEN, importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(wildcard_optimized_epoch_bits) desc => ?DESC(wildcard_optimized_epoch_bits)
} }
@ -234,42 +215,6 @@ fields(layout_builtin_wildcard_optimized) ->
} }
)} )}
]; ];
fields(layout_builtin_wildcard_optimized_v2) ->
[
{type,
sc(
wildcard_optimized_v2,
#{
'readOnly' => true,
default => wildcard_optimized_v2,
desc => ?DESC(layout_builtin_wildcard_optimized_type)
}
)},
{bytes_per_topic_level,
sc(
range(1, 16),
#{
default => 8,
importance => ?IMPORTANCE_HIDDEN
}
)},
{topic_index_bytes,
sc(
pos_integer(),
#{
default => 8,
importance => ?IMPORTANCE_HIDDEN
}
)},
{serialization_schema,
sc(
emqx_ds_msg_serializer:schema(),
#{
default => v1,
importance => ?IMPORTANCE_HIDDEN
}
)}
];
fields(layout_builtin_reference) -> fields(layout_builtin_reference) ->
[ [
{type, {type,
@ -277,68 +222,17 @@ fields(layout_builtin_reference) ->
reference, reference,
#{ #{
'readOnly' => true, 'readOnly' => true,
importance => ?IMPORTANCE_LOW, importance => ?IMPORTANCE_HIDDEN
default => reference,
desc => ?DESC(layout_builtin_reference_type)
} }
)} )}
]. ].
common_builtin_fields() -> desc(builtin) ->
[ ?DESC(builtin);
{data_dir, desc(builtin_local_write_buffer) ->
sc( ?DESC(builtin_local_write_buffer);
string(),
#{
mapping => "emqx_durable_storage.db_data_dir",
required => false,
importance => ?IMPORTANCE_MEDIUM,
desc => ?DESC(builtin_data_dir)
}
)},
{n_shards,
sc(
pos_integer(),
#{
default => 16,
importance => ?IMPORTANCE_MEDIUM,
desc => ?DESC(builtin_n_shards)
}
)},
{local_write_buffer,
sc(
ref(builtin_write_buffer),
#{
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(builtin_write_buffer)
}
)},
{layout,
sc(
hoconsc:union(builtin_layouts()),
#{
desc => ?DESC(builtin_layout),
importance => ?IMPORTANCE_MEDIUM,
default =>
#{
<<"type">> => wildcard_optimized_v2
}
}
)}
].
desc(builtin_raft) ->
?DESC(builtin_raft);
desc(builtin_local) ->
?DESC(builtin_local);
desc(builtin_write_buffer) ->
?DESC(builtin_write_buffer);
desc(layout_builtin_wildcard_optimized) -> desc(layout_builtin_wildcard_optimized) ->
?DESC(layout_builtin_wildcard_optimized); ?DESC(layout_builtin_wildcard_optimized);
desc(layout_builtin_wildcard_optimized_v2) ->
?DESC(layout_builtin_wildcard_optimized);
desc(layout_builtin_reference) ->
?DESC(layout_builtin_reference);
desc(_) -> desc(_) ->
undefined. undefined.
@ -346,53 +240,26 @@ desc(_) ->
%% Internal functions %% Internal functions
%%================================================================================ %%================================================================================
translate_layout(
#{
type := wildcard_optimized_v2,
bytes_per_topic_level := BytesPerTopicLevel,
topic_index_bytes := TopicIndexBytes,
serialization_schema := SSchema
}
) ->
{emqx_ds_storage_skipstream_lts, #{
wildcard_hash_bytes => BytesPerTopicLevel,
topic_index_bytes => TopicIndexBytes,
serialization_schema => SSchema
}};
translate_layout(
#{
type := wildcard_optimized,
bits_per_topic_level := BitsPerTopicLevel,
epoch_bits := EpochBits,
topic_index_bytes := TIBytes
}
) ->
{emqx_ds_storage_bitfield_lts, #{
bits_per_topic_level => BitsPerTopicLevel,
topic_index_bytes => TIBytes,
epoch_bits => EpochBits
}};
translate_layout(#{type := reference}) ->
{emqx_ds_storage_reference, #{}}.
ds_schema(Options) -> ds_schema(Options) ->
sc( sc(
hoconsc:union( hoconsc:union([
?BUILTIN_BACKENDS ++ emqx_schema_hooks:injection_point('durable_storage.backends', []) ref(builtin)
), | emqx_schema_hooks:injection_point('durable_storage.backends', [])
]),
Options Options
). ).
-ifndef(TEST).
builtin_layouts() ->
[ref(layout_builtin_wildcard_optimized)].
-else.
builtin_layouts() -> builtin_layouts() ->
%% Reference layout stores everything in one stream, so it's not %% Reference layout stores everything in one stream, so it's not
%% suitable for production use. However, it's very simple and %% suitable for production use. However, it's very simple and
%% produces a very predictabale replay order, which can be useful %% produces a very predictabale replay order, which can be useful
%% for testing and debugging: %% for testing and debugging:
[ [ref(layout_builtin_wildcard_optimized), ref(layout_builtin_reference)].
ref(layout_builtin_wildcard_optimized_v2), -endif.
ref(layout_builtin_wildcard_optimized),
ref(layout_builtin_reference)
].
sc(Type, Meta) -> hoconsc:mk(Type, Meta). sc(Type, Meta) -> hoconsc:mk(Type, Meta).

View File

@ -117,13 +117,6 @@ try_subscribe(ClientId, Topic) ->
write write
), ),
allow; allow;
[#exclusive_subscription{clientid = ClientId, topic = Topic}] ->
%% Fixed the issue-13476
%% In this feature, the user must manually call `unsubscribe` to release the lock,
%% but sometimes the node may go down for some reason,
%% then the client will reconnect to this node and resubscribe.
%% We need to allow resubscription, otherwise the lock will never be released.
allow;
[_] -> [_] ->
deny deny
end. end.

View File

@ -1,148 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_external_broker).
-callback forward(emqx_types:delivery()) ->
emqx_types:publish_result().
-callback add_route(emqx_types:topic()) -> ok.
-callback delete_route(emqx_types:topic()) -> ok.
-callback add_shared_route(emqx_types:topic(), emqx_types:group()) -> ok.
-callback delete_shared_route(emqx_types:topic(), emqx_types:group()) -> ok.
-callback add_persistent_route(emqx_types:topic(), emqx_persistent_session_ds:id()) -> ok.
-callback delete_persistent_route(emqx_types:topic(), emqx_persistent_session_ds:id()) -> ok.
-type dest() :: term().
-export([
%% Registration
provider/0,
register_provider/1,
unregister_provider/1,
%% Forwarding
forward/1,
%% Routing updates
add_route/1,
delete_route/1,
add_shared_route/2,
delete_shared_route/2,
add_persistent_route/2,
delete_persistent_route/2,
add_persistent_shared_route/3,
delete_persistent_shared_route/3
]).
-export_type([dest/0]).
-include("logger.hrl").
-define(PROVIDER, {?MODULE, external_broker}).
-define(safe_with_provider(IfRegistered, IfNotRegistered),
case persistent_term:get(?PROVIDER, undefined) of
undefined ->
IfNotRegistered;
Provider ->
try
Provider:IfRegistered
catch
Err:Reason:St ->
?SLOG_THROTTLE(error, #{
msg => external_broker_crashed,
provider => Provider,
callback => ?FUNCTION_NAME,
stacktrace => St,
error => Err,
reason => Reason
}),
{error, Reason}
end
end
).
%% TODO: provider API copied from emqx_external_traces,
%% but it can be moved to a common module.
%%--------------------------------------------------------------------
%% Provider API
%%--------------------------------------------------------------------
-spec register_provider(module()) -> ok | {error, term()}.
register_provider(Module) when is_atom(Module) ->
case is_valid_provider(Module) of
true ->
persistent_term:put(?PROVIDER, Module);
false ->
{error, invalid_provider}
end.
-spec unregister_provider(module()) -> ok | {error, term()}.
unregister_provider(Module) ->
case persistent_term:get(?PROVIDER, undefined) of
Module ->
persistent_term:erase(?PROVIDER),
ok;
_ ->
{error, not_registered}
end.
-spec provider() -> module() | undefined.
provider() ->
persistent_term:get(?PROVIDER, undefined).
%%--------------------------------------------------------------------
%% Broker API
%%--------------------------------------------------------------------
forward(Delivery) ->
?safe_with_provider(?FUNCTION_NAME(Delivery), []).
add_route(Topic) ->
?safe_with_provider(?FUNCTION_NAME(Topic), ok).
delete_route(Topic) ->
?safe_with_provider(?FUNCTION_NAME(Topic), ok).
add_shared_route(Topic, Group) ->
?safe_with_provider(?FUNCTION_NAME(Topic, Group), ok).
delete_shared_route(Topic, Group) ->
?safe_with_provider(?FUNCTION_NAME(Topic, Group), ok).
add_persistent_route(Topic, ID) ->
?safe_with_provider(?FUNCTION_NAME(Topic, ID), ok).
delete_persistent_route(Topic, ID) ->
?safe_with_provider(?FUNCTION_NAME(Topic, ID), ok).
add_persistent_shared_route(Topic, Group, ID) ->
?safe_with_provider(?FUNCTION_NAME(Topic, Group, ID), ok).
delete_persistent_shared_route(Topic, Group, ID) ->
?safe_with_provider(?FUNCTION_NAME(Topic, Group, ID), ok).
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------
is_valid_provider(Module) ->
lists:all(
fun({F, A}) -> erlang:function_exported(Module, F, A) end,
?MODULE:behaviour_info(callbacks)
).

View File

@ -29,12 +29,11 @@
parse/2, parse/2,
serialize_fun/0, serialize_fun/0,
serialize_fun/1, serialize_fun/1,
initial_serialize_opts/1, serialize_opts/0,
serialize_opts/1, serialize_opts/1,
serialize_pkt/2, serialize_pkt/2,
serialize/1, serialize/1,
serialize/2, serialize/2
serialize/3
]). ]).
-export([describe_state/1]). -export([describe_state/1]).
@ -85,7 +84,7 @@
-define(MULTIPLIER_MAX, 16#200000). -define(MULTIPLIER_MAX, 16#200000).
-dialyzer({no_match, [serialize_utf8_string/3]}). -dialyzer({no_match, [serialize_utf8_string/2]}).
%% @doc Describe state for logging. %% @doc Describe state for logging.
describe_state(?NONE(_Opts)) -> describe_state(?NONE(_Opts)) ->
@ -267,76 +266,47 @@ packet(Header, Variable) ->
packet(Header, Variable, Payload) -> packet(Header, Variable, Payload) ->
#mqtt_packet{header = Header, variable = Variable, payload = Payload}. #mqtt_packet{header = Header, variable = Variable, payload = Payload}.
parse_connect(FrameBin, Options = #{strict_mode := StrictMode}) -> parse_connect(FrameBin, StrictMode) ->
{ProtoName, Rest0} = parse_utf8_string_with_cause(FrameBin, StrictMode, invalid_proto_name), {ProtoName, Rest} = parse_utf8_string_with_cause(FrameBin, StrictMode, invalid_proto_name),
%% No need to parse and check proto_ver if proto_name is invalid, check it first case ProtoName of
%% And the matching check of `proto_name` and `proto_ver` fields will be done in `emqx_packet:check_proto_ver/2` <<"MQTT">> ->
_ = validate_proto_name(ProtoName), ok;
{IsBridge, ProtoVer, Rest2} = parse_connect_proto_ver(Rest0), <<"MQIsdp">> ->
NOptions = Options#{version => ProtoVer}, ok;
try _ ->
do_parse_connect(ProtoName, IsBridge, ProtoVer, Rest2, StrictMode) %% from spec: the server MAY send disconnect with reason code 0x84
catch %% we chose to close socket because the client is likely not talking MQTT anyway
throw:{?FRAME_PARSE_ERROR, ReasonM} when is_map(ReasonM) -> ?PARSE_ERR(#{
?PARSE_ERR( cause => invalid_proto_name,
ReasonM#{ expected => <<"'MQTT' or 'MQIsdp'">>,
proto_ver => ProtoVer, received => ProtoName
proto_name => ProtoName, })
parse_state => ?NONE(NOptions) end,
} parse_connect2(ProtoName, Rest, StrictMode).
);
throw:{?FRAME_PARSE_ERROR, Reason} ->
?PARSE_ERR(
#{
cause => Reason,
proto_ver => ProtoVer,
proto_name => ProtoName,
parse_state => ?NONE(NOptions)
}
)
end.
do_parse_connect( % Note: return malformed if reserved flag is not 0.
parse_connect2(
ProtoName, ProtoName,
IsBridge, <<BridgeTag:4, ProtoVer:4, UsernameFlag:1, PasswordFlag:1, WillRetain:1, WillQoS:2, WillFlag:1,
ProtoVer, CleanStart:1, Reserved:1, KeepAlive:16/big, Rest2/binary>>,
<<
UsernameFlagB:1,
PasswordFlagB:1,
WillRetainB:1,
WillQoS:2,
WillFlagB:1,
CleanStart:1,
Reserved:1,
KeepAlive:16/big,
Rest/binary
>>,
StrictMode StrictMode
) -> ) ->
_ = validate_connect_reserved(Reserved), case Reserved of
_ = validate_connect_will( 0 -> ok;
WillFlag = bool(WillFlagB), 1 -> ?PARSE_ERR(reserved_connect_flag)
WillRetain = bool(WillRetainB), end,
WillQoS {Properties, Rest3} = parse_properties(Rest2, ProtoVer, StrictMode),
),
_ = validate_connect_password_flag(
StrictMode,
ProtoVer,
UsernameFlag = bool(UsernameFlagB),
PasswordFlag = bool(PasswordFlagB)
),
{Properties, Rest3} = parse_properties(Rest, ProtoVer, StrictMode),
{ClientId, Rest4} = parse_utf8_string_with_cause(Rest3, StrictMode, invalid_clientid), {ClientId, Rest4} = parse_utf8_string_with_cause(Rest3, StrictMode, invalid_clientid),
ConnPacket = #mqtt_packet_connect{ ConnPacket = #mqtt_packet_connect{
proto_name = ProtoName, proto_name = ProtoName,
proto_ver = ProtoVer, proto_ver = ProtoVer,
%% For bridge mode, non-standard implementation %% For bridge mode, non-standard implementation
%% Invented by mosquitto, named 'try_private': https://mosquitto.org/man/mosquitto-conf-5.html %% Invented by mosquitto, named 'try_private': https://mosquitto.org/man/mosquitto-conf-5.html
is_bridge = IsBridge, is_bridge = (BridgeTag =:= 8),
clean_start = bool(CleanStart), clean_start = bool(CleanStart),
will_flag = WillFlag, will_flag = bool(WillFlag),
will_qos = WillQoS, will_qos = WillQoS,
will_retain = WillRetain, will_retain = bool(WillRetain),
keepalive = KeepAlive, keepalive = KeepAlive,
properties = Properties, properties = Properties,
clientid = ClientId clientid = ClientId
@ -347,14 +317,14 @@ do_parse_connect(
fun(Bin) -> fun(Bin) ->
parse_utf8_string_with_cause(Bin, StrictMode, invalid_username) parse_utf8_string_with_cause(Bin, StrictMode, invalid_username)
end, end,
UsernameFlag bool(UsernameFlag)
), ),
{Password, Rest7} = parse_optional( {Password, Rest7} = parse_optional(
Rest6, Rest6,
fun(Bin) -> fun(Bin) ->
parse_utf8_string_with_cause(Bin, StrictMode, invalid_password) parse_utf8_string_with_cause(Bin, StrictMode, invalid_password)
end, end,
PasswordFlag bool(PasswordFlag)
), ),
case Rest7 of case Rest7 of
<<>> -> <<>> ->
@ -365,16 +335,16 @@ do_parse_connect(
unexpected_trailing_bytes => size(Rest7) unexpected_trailing_bytes => size(Rest7)
}) })
end; end;
do_parse_connect(_ProtoName, _IsBridge, _ProtoVer, Bin, _StrictMode) -> parse_connect2(_ProtoName, Bin, _StrictMode) ->
%% sent less than 24 bytes %% sent less than 32 bytes
?PARSE_ERR(#{cause => malformed_connect, header_bytes => Bin}). ?PARSE_ERR(#{cause => malformed_connect, header_bytes => Bin}).
parse_packet( parse_packet(
#mqtt_packet_header{type = ?CONNECT}, #mqtt_packet_header{type = ?CONNECT},
FrameBin, FrameBin,
Options #{strict_mode := StrictMode}
) -> ) ->
parse_connect(FrameBin, Options); parse_connect(FrameBin, StrictMode);
parse_packet( parse_packet(
#mqtt_packet_header{type = ?CONNACK}, #mqtt_packet_header{type = ?CONNACK},
<<AckFlags:8, ReasonCode:8, Rest/binary>>, <<AckFlags:8, ReasonCode:8, Rest/binary>>,
@ -538,12 +508,6 @@ parse_packet_id(<<PacketId:16/big, Rest/binary>>) ->
parse_packet_id(_) -> parse_packet_id(_) ->
?PARSE_ERR(invalid_packet_id). ?PARSE_ERR(invalid_packet_id).
parse_connect_proto_ver(<<BridgeTag:4, ProtoVer:4, Rest/binary>>) ->
{_IsBridge = (BridgeTag =:= 8), ProtoVer, Rest};
parse_connect_proto_ver(Bin) ->
%% sent less than 1 bytes or empty
?PARSE_ERR(#{cause => malformed_connect, header_bytes => Bin}).
parse_properties(Bin, Ver, _StrictMode) when Ver =/= ?MQTT_PROTO_V5 -> parse_properties(Bin, Ver, _StrictMode) when Ver =/= ?MQTT_PROTO_V5 ->
{#{}, Bin}; {#{}, Bin};
%% TODO: version mess? %% TODO: version mess?
@ -754,53 +718,43 @@ serialize_fun() -> serialize_fun(?DEFAULT_OPTIONS).
serialize_fun(#mqtt_packet_connect{proto_ver = ProtoVer, properties = ConnProps}) -> serialize_fun(#mqtt_packet_connect{proto_ver = ProtoVer, properties = ConnProps}) ->
MaxSize = get_property('Maximum-Packet-Size', ConnProps, ?MAX_PACKET_SIZE), MaxSize = get_property('Maximum-Packet-Size', ConnProps, ?MAX_PACKET_SIZE),
serialize_fun(#{version => ProtoVer, max_size => MaxSize, strict_mode => false}); serialize_fun(#{version => ProtoVer, max_size => MaxSize});
serialize_fun(#{version := Ver, max_size := MaxSize, strict_mode := StrictMode}) -> serialize_fun(#{version := Ver, max_size := MaxSize}) ->
fun(Packet) -> fun(Packet) ->
IoData = serialize(Packet, Ver, StrictMode), IoData = serialize(Packet, Ver),
case is_too_large(IoData, MaxSize) of case is_too_large(IoData, MaxSize) of
true -> <<>>; true -> <<>>;
false -> IoData false -> IoData
end end
end. end.
initial_serialize_opts(Opts) -> serialize_opts() ->
maps:merge(?DEFAULT_OPTIONS, Opts). ?DEFAULT_OPTIONS.
serialize_opts(?NONE(Options)) ->
maps:merge(?DEFAULT_OPTIONS, Options);
serialize_opts(#mqtt_packet_connect{proto_ver = ProtoVer, properties = ConnProps}) -> serialize_opts(#mqtt_packet_connect{proto_ver = ProtoVer, properties = ConnProps}) ->
MaxSize = get_property('Maximum-Packet-Size', ConnProps, ?MAX_PACKET_SIZE), MaxSize = get_property('Maximum-Packet-Size', ConnProps, ?MAX_PACKET_SIZE),
#{version => ProtoVer, max_size => MaxSize, strict_mode => false}. #{version => ProtoVer, max_size => MaxSize}.
serialize_pkt(Packet, #{version := Ver, max_size := MaxSize, strict_mode := StrictMode}) -> serialize_pkt(Packet, #{version := Ver, max_size := MaxSize}) ->
IoData = serialize(Packet, Ver, StrictMode), IoData = serialize(Packet, Ver),
case is_too_large(IoData, MaxSize) of case is_too_large(IoData, MaxSize) of
true -> <<>>; true -> <<>>;
false -> IoData false -> IoData
end. end.
-spec serialize(emqx_types:packet()) -> iodata(). -spec serialize(emqx_types:packet()) -> iodata().
serialize(Packet) -> serialize(Packet, ?MQTT_PROTO_V4, false). serialize(Packet) -> serialize(Packet, ?MQTT_PROTO_V4).
serialize(Packet, Ver) -> serialize(Packet, Ver, false). -spec serialize(emqx_types:packet(), emqx_types:proto_ver()) -> iodata().
-spec serialize(emqx_types:packet(), emqx_types:proto_ver(), boolean()) -> iodata().
serialize( serialize(
#mqtt_packet{ #mqtt_packet{
header = Header, header = Header,
variable = Variable, variable = Variable,
payload = Payload payload = Payload
}, },
Ver, Ver
StrictMode
) -> ) ->
serialize( serialize(Header, serialize_variable(Variable, Ver), serialize_payload(Payload)).
Header,
serialize_variable(Variable, Ver, StrictMode),
serialize_payload(Payload),
StrictMode
).
serialize( serialize(
#mqtt_packet_header{ #mqtt_packet_header{
@ -810,8 +764,7 @@ serialize(
retain = Retain retain = Retain
}, },
VariableBin, VariableBin,
PayloadBin, PayloadBin
_StrictMode
) when ) when
?CONNECT =< Type andalso Type =< ?AUTH ?CONNECT =< Type andalso Type =< ?AUTH
-> ->
@ -843,8 +796,7 @@ serialize_variable(
username = Username, username = Username,
password = Password password = Password
}, },
_Ver, _Ver
StrictMode
) -> ) ->
[ [
serialize_binary_data(ProtoName), serialize_binary_data(ProtoName),
@ -862,20 +814,20 @@ serialize_variable(
0:1, 0:1,
KeepAlive:16/big-unsigned-integer KeepAlive:16/big-unsigned-integer
>>, >>,
serialize_properties(Properties, ProtoVer, StrictMode), serialize_properties(Properties, ProtoVer),
serialize_utf8_string(ClientId, StrictMode), serialize_utf8_string(ClientId),
case WillFlag of case WillFlag of
true -> true ->
[ [
serialize_properties(WillProps, ProtoVer, StrictMode), serialize_properties(WillProps, ProtoVer),
serialize_utf8_string(WillTopic, StrictMode), serialize_utf8_string(WillTopic),
serialize_binary_data(WillPayload) serialize_binary_data(WillPayload)
]; ];
false -> false ->
<<>> <<>>
end, end,
serialize_utf8_string(Username, true, StrictMode), serialize_utf8_string(Username, true),
serialize_utf8_string(Password, true, StrictMode) serialize_utf8_string(Password, true)
]; ];
serialize_variable( serialize_variable(
#mqtt_packet_connack{ #mqtt_packet_connack{
@ -883,28 +835,26 @@ serialize_variable(
reason_code = ReasonCode, reason_code = ReasonCode,
properties = Properties properties = Properties
}, },
Ver, Ver
StrictMode
) -> ) ->
[AckFlags, ReasonCode, serialize_properties(Properties, Ver, StrictMode)]; [AckFlags, ReasonCode, serialize_properties(Properties, Ver)];
serialize_variable( serialize_variable(
#mqtt_packet_publish{ #mqtt_packet_publish{
topic_name = TopicName, topic_name = TopicName,
packet_id = PacketId, packet_id = PacketId,
properties = Properties properties = Properties
}, },
Ver, Ver
StrictMode
) -> ) ->
[ [
serialize_utf8_string(TopicName, StrictMode), serialize_utf8_string(TopicName),
case PacketId of case PacketId of
undefined -> <<>>; undefined -> <<>>;
_ -> <<PacketId:16/big-unsigned-integer>> _ -> <<PacketId:16/big-unsigned-integer>>
end, end,
serialize_properties(Properties, Ver, StrictMode) serialize_properties(Properties, Ver)
]; ];
serialize_variable(#mqtt_packet_puback{packet_id = PacketId}, Ver, _StrictMode) when serialize_variable(#mqtt_packet_puback{packet_id = PacketId}, Ver) when
Ver == ?MQTT_PROTO_V3; Ver == ?MQTT_PROTO_V4 Ver == ?MQTT_PROTO_V3; Ver == ?MQTT_PROTO_V4
-> ->
<<PacketId:16/big-unsigned-integer>>; <<PacketId:16/big-unsigned-integer>>;
@ -914,13 +864,12 @@ serialize_variable(
reason_code = ReasonCode, reason_code = ReasonCode,
properties = Properties properties = Properties
}, },
Ver = ?MQTT_PROTO_V5, Ver = ?MQTT_PROTO_V5
StrictMode
) -> ) ->
[ [
<<PacketId:16/big-unsigned-integer>>, <<PacketId:16/big-unsigned-integer>>,
ReasonCode, ReasonCode,
serialize_properties(Properties, Ver, StrictMode) serialize_properties(Properties, Ver)
]; ];
serialize_variable( serialize_variable(
#mqtt_packet_subscribe{ #mqtt_packet_subscribe{
@ -928,13 +877,12 @@ serialize_variable(
properties = Properties, properties = Properties,
topic_filters = TopicFilters topic_filters = TopicFilters
}, },
Ver, Ver
StrictMode
) -> ) ->
[ [
<<PacketId:16/big-unsigned-integer>>, <<PacketId:16/big-unsigned-integer>>,
serialize_properties(Properties, Ver, StrictMode), serialize_properties(Properties, Ver),
serialize_topic_filters(subscribe, TopicFilters, Ver, StrictMode) serialize_topic_filters(subscribe, TopicFilters, Ver)
]; ];
serialize_variable( serialize_variable(
#mqtt_packet_suback{ #mqtt_packet_suback{
@ -942,12 +890,11 @@ serialize_variable(
properties = Properties, properties = Properties,
reason_codes = ReasonCodes reason_codes = ReasonCodes
}, },
Ver, Ver
StrictMode
) -> ) ->
[ [
<<PacketId:16/big-unsigned-integer>>, <<PacketId:16/big-unsigned-integer>>,
serialize_properties(Properties, Ver, StrictMode), serialize_properties(Properties, Ver),
serialize_reason_codes(ReasonCodes) serialize_reason_codes(ReasonCodes)
]; ];
serialize_variable( serialize_variable(
@ -956,13 +903,12 @@ serialize_variable(
properties = Properties, properties = Properties,
topic_filters = TopicFilters topic_filters = TopicFilters
}, },
Ver, Ver
StrictMode
) -> ) ->
[ [
<<PacketId:16/big-unsigned-integer>>, <<PacketId:16/big-unsigned-integer>>,
serialize_properties(Properties, Ver, StrictMode), serialize_properties(Properties, Ver),
serialize_topic_filters(unsubscribe, TopicFilters, Ver, StrictMode) serialize_topic_filters(unsubscribe, TopicFilters, Ver)
]; ];
serialize_variable( serialize_variable(
#mqtt_packet_unsuback{ #mqtt_packet_unsuback{
@ -970,15 +916,14 @@ serialize_variable(
properties = Properties, properties = Properties,
reason_codes = ReasonCodes reason_codes = ReasonCodes
}, },
Ver, Ver
StrictMode
) -> ) ->
[ [
<<PacketId:16/big-unsigned-integer>>, <<PacketId:16/big-unsigned-integer>>,
serialize_properties(Properties, Ver, StrictMode), serialize_properties(Properties, Ver),
serialize_reason_codes(ReasonCodes) serialize_reason_codes(ReasonCodes)
]; ];
serialize_variable(#mqtt_packet_disconnect{}, Ver, _StrictMode) when serialize_variable(#mqtt_packet_disconnect{}, Ver) when
Ver == ?MQTT_PROTO_V3; Ver == ?MQTT_PROTO_V4 Ver == ?MQTT_PROTO_V3; Ver == ?MQTT_PROTO_V4
-> ->
<<>>; <<>>;
@ -987,115 +932,110 @@ serialize_variable(
reason_code = ReasonCode, reason_code = ReasonCode,
properties = Properties properties = Properties
}, },
Ver = ?MQTT_PROTO_V5, Ver = ?MQTT_PROTO_V5
StrictMode
) -> ) ->
[ReasonCode, serialize_properties(Properties, Ver, StrictMode)]; [ReasonCode, serialize_properties(Properties, Ver)];
serialize_variable(#mqtt_packet_disconnect{}, _Ver, _StrictMode) -> serialize_variable(#mqtt_packet_disconnect{}, _Ver) ->
<<>>; <<>>;
serialize_variable( serialize_variable(
#mqtt_packet_auth{ #mqtt_packet_auth{
reason_code = ReasonCode, reason_code = ReasonCode,
properties = Properties properties = Properties
}, },
Ver = ?MQTT_PROTO_V5, Ver = ?MQTT_PROTO_V5
StrictMode
) -> ) ->
[ReasonCode, serialize_properties(Properties, Ver, StrictMode)]; [ReasonCode, serialize_properties(Properties, Ver)];
serialize_variable(PacketId, ?MQTT_PROTO_V3, _StrictMode) when is_integer(PacketId) -> serialize_variable(PacketId, ?MQTT_PROTO_V3) when is_integer(PacketId) ->
<<PacketId:16/big-unsigned-integer>>; <<PacketId:16/big-unsigned-integer>>;
serialize_variable(PacketId, ?MQTT_PROTO_V4, _StrictMode) when is_integer(PacketId) -> serialize_variable(PacketId, ?MQTT_PROTO_V4) when is_integer(PacketId) ->
<<PacketId:16/big-unsigned-integer>>; <<PacketId:16/big-unsigned-integer>>;
serialize_variable(undefined, _Ver, _StrictMode) -> serialize_variable(undefined, _Ver) ->
<<>>. <<>>.
serialize_payload(undefined) -> <<>>; serialize_payload(undefined) -> <<>>;
serialize_payload(Bin) -> Bin. serialize_payload(Bin) -> Bin.
serialize_properties(_Props, Ver, _StrictMode) when Ver =/= ?MQTT_PROTO_V5 -> serialize_properties(_Props, Ver) when Ver =/= ?MQTT_PROTO_V5 ->
<<>>; <<>>;
serialize_properties(Props, ?MQTT_PROTO_V5, StrictMode) -> serialize_properties(Props, ?MQTT_PROTO_V5) ->
serialize_properties(Props, StrictMode). serialize_properties(Props).
serialize_properties(undefined, _StrictMode) -> serialize_properties(undefined) ->
<<0>>; <<0>>;
serialize_properties(Props, _StrictMode) when map_size(Props) == 0 -> serialize_properties(Props) when map_size(Props) == 0 ->
<<0>>; <<0>>;
serialize_properties(Props, StrictMode) when is_map(Props) -> serialize_properties(Props) when is_map(Props) ->
Bin = << Bin = <<<<(serialize_property(Prop, Val))/binary>> || {Prop, Val} <- maps:to_list(Props)>>,
<<(serialize_property(Prop, Val, StrictMode))/binary>>
|| {Prop, Val} <- maps:to_list(Props)
>>,
[serialize_variable_byte_integer(byte_size(Bin)), Bin]. [serialize_variable_byte_integer(byte_size(Bin)), Bin].
serialize_property(_, Disabled, _StrictMode) when Disabled =:= disabled; Disabled =:= undefined -> serialize_property(_, Disabled) when Disabled =:= disabled; Disabled =:= undefined ->
<<>>; <<>>;
serialize_property(internal_extra, _, _StrictMode) -> serialize_property(internal_extra, _) ->
<<>>; <<>>;
serialize_property('Payload-Format-Indicator', Val, _StrictMode) -> serialize_property('Payload-Format-Indicator', Val) ->
<<16#01, Val>>; <<16#01, Val>>;
serialize_property('Message-Expiry-Interval', Val, _StrictMode) -> serialize_property('Message-Expiry-Interval', Val) ->
<<16#02, Val:32/big>>; <<16#02, Val:32/big>>;
serialize_property('Content-Type', Val, StrictMode) -> serialize_property('Content-Type', Val) ->
<<16#03, (serialize_utf8_string(Val, StrictMode))/binary>>; <<16#03, (serialize_utf8_string(Val))/binary>>;
serialize_property('Response-Topic', Val, StrictMode) -> serialize_property('Response-Topic', Val) ->
<<16#08, (serialize_utf8_string(Val, StrictMode))/binary>>; <<16#08, (serialize_utf8_string(Val))/binary>>;
serialize_property('Correlation-Data', Val, _StrictMode) -> serialize_property('Correlation-Data', Val) ->
<<16#09, (byte_size(Val)):16, Val/binary>>; <<16#09, (byte_size(Val)):16, Val/binary>>;
serialize_property('Subscription-Identifier', Val, _StrictMode) -> serialize_property('Subscription-Identifier', Val) ->
<<16#0B, (serialize_variable_byte_integer(Val))/binary>>; <<16#0B, (serialize_variable_byte_integer(Val))/binary>>;
serialize_property('Session-Expiry-Interval', Val, _StrictMode) -> serialize_property('Session-Expiry-Interval', Val) ->
<<16#11, Val:32/big>>; <<16#11, Val:32/big>>;
serialize_property('Assigned-Client-Identifier', Val, StrictMode) -> serialize_property('Assigned-Client-Identifier', Val) ->
<<16#12, (serialize_utf8_string(Val, StrictMode))/binary>>; <<16#12, (serialize_utf8_string(Val))/binary>>;
serialize_property('Server-Keep-Alive', Val, _StrictMode) -> serialize_property('Server-Keep-Alive', Val) ->
<<16#13, Val:16/big>>; <<16#13, Val:16/big>>;
serialize_property('Authentication-Method', Val, StrictMode) -> serialize_property('Authentication-Method', Val) ->
<<16#15, (serialize_utf8_string(Val, StrictMode))/binary>>; <<16#15, (serialize_utf8_string(Val))/binary>>;
serialize_property('Authentication-Data', Val, _StrictMode) -> serialize_property('Authentication-Data', Val) ->
<<16#16, (iolist_size(Val)):16, Val/binary>>; <<16#16, (iolist_size(Val)):16, Val/binary>>;
serialize_property('Request-Problem-Information', Val, _StrictMode) -> serialize_property('Request-Problem-Information', Val) ->
<<16#17, Val>>; <<16#17, Val>>;
serialize_property('Will-Delay-Interval', Val, _StrictMode) -> serialize_property('Will-Delay-Interval', Val) ->
<<16#18, Val:32/big>>; <<16#18, Val:32/big>>;
serialize_property('Request-Response-Information', Val, _StrictMode) -> serialize_property('Request-Response-Information', Val) ->
<<16#19, Val>>; <<16#19, Val>>;
serialize_property('Response-Information', Val, StrictMode) -> serialize_property('Response-Information', Val) ->
<<16#1A, (serialize_utf8_string(Val, StrictMode))/binary>>; <<16#1A, (serialize_utf8_string(Val))/binary>>;
serialize_property('Server-Reference', Val, StrictMode) -> serialize_property('Server-Reference', Val) ->
<<16#1C, (serialize_utf8_string(Val, StrictMode))/binary>>; <<16#1C, (serialize_utf8_string(Val))/binary>>;
serialize_property('Reason-String', Val, StrictMode) -> serialize_property('Reason-String', Val) ->
<<16#1F, (serialize_utf8_string(Val, StrictMode))/binary>>; <<16#1F, (serialize_utf8_string(Val))/binary>>;
serialize_property('Receive-Maximum', Val, _StrictMode) -> serialize_property('Receive-Maximum', Val) ->
<<16#21, Val:16/big>>; <<16#21, Val:16/big>>;
serialize_property('Topic-Alias-Maximum', Val, _StrictMode) -> serialize_property('Topic-Alias-Maximum', Val) ->
<<16#22, Val:16/big>>; <<16#22, Val:16/big>>;
serialize_property('Topic-Alias', Val, _StrictMode) -> serialize_property('Topic-Alias', Val) ->
<<16#23, Val:16/big>>; <<16#23, Val:16/big>>;
serialize_property('Maximum-QoS', Val, _StrictMode) -> serialize_property('Maximum-QoS', Val) ->
<<16#24, Val>>; <<16#24, Val>>;
serialize_property('Retain-Available', Val, _StrictMode) -> serialize_property('Retain-Available', Val) ->
<<16#25, Val>>; <<16#25, Val>>;
serialize_property('User-Property', {Key, Val}, StrictMode) -> serialize_property('User-Property', {Key, Val}) ->
<<16#26, (serialize_utf8_pair(Key, Val, StrictMode))/binary>>; <<16#26, (serialize_utf8_pair({Key, Val}))/binary>>;
serialize_property('User-Property', Props, StrictMode) when is_list(Props) -> serialize_property('User-Property', Props) when is_list(Props) ->
<< <<
<<(serialize_property('User-Property', {Key, Val}, StrictMode))/binary>> <<(serialize_property('User-Property', {Key, Val}))/binary>>
|| {Key, Val} <- Props || {Key, Val} <- Props
>>; >>;
serialize_property('Maximum-Packet-Size', Val, _StrictMode) -> serialize_property('Maximum-Packet-Size', Val) ->
<<16#27, Val:32/big>>; <<16#27, Val:32/big>>;
serialize_property('Wildcard-Subscription-Available', Val, _StrictMode) -> serialize_property('Wildcard-Subscription-Available', Val) ->
<<16#28, Val>>; <<16#28, Val>>;
serialize_property('Subscription-Identifier-Available', Val, _StrictMode) -> serialize_property('Subscription-Identifier-Available', Val) ->
<<16#29, Val>>; <<16#29, Val>>;
serialize_property('Shared-Subscription-Available', Val, _StrictMode) -> serialize_property('Shared-Subscription-Available', Val) ->
<<16#2A, Val>>. <<16#2A, Val>>.
serialize_topic_filters(subscribe, TopicFilters, ?MQTT_PROTO_V5, StrictMode) -> serialize_topic_filters(subscribe, TopicFilters, ?MQTT_PROTO_V5) ->
<< <<
<< <<
(serialize_utf8_string(Topic, StrictMode))/binary, (serialize_utf8_string(Topic))/binary,
?RESERVED:2, ?RESERVED:2,
Rh:2, Rh:2,
(flag(Rap)):1, (flag(Rap)):1,
@ -1104,42 +1044,37 @@ serialize_topic_filters(subscribe, TopicFilters, ?MQTT_PROTO_V5, StrictMode) ->
>> >>
|| {Topic, #{rh := Rh, rap := Rap, nl := Nl, qos := QoS}} <- TopicFilters || {Topic, #{rh := Rh, rap := Rap, nl := Nl, qos := QoS}} <- TopicFilters
>>; >>;
serialize_topic_filters(subscribe, TopicFilters, _Ver, StrictMode) -> serialize_topic_filters(subscribe, TopicFilters, _Ver) ->
<< <<
<<(serialize_utf8_string(Topic, StrictMode))/binary, ?RESERVED:6, QoS:2>> <<(serialize_utf8_string(Topic))/binary, ?RESERVED:6, QoS:2>>
|| {Topic, #{qos := QoS}} <- TopicFilters || {Topic, #{qos := QoS}} <- TopicFilters
>>; >>;
serialize_topic_filters(unsubscribe, TopicFilters, _Ver, StrictMode) -> serialize_topic_filters(unsubscribe, TopicFilters, _Ver) ->
<<<<(serialize_utf8_string(Topic, StrictMode))/binary>> || Topic <- TopicFilters>>. <<<<(serialize_utf8_string(Topic))/binary>> || Topic <- TopicFilters>>.
serialize_reason_codes(undefined) -> serialize_reason_codes(undefined) ->
<<>>; <<>>;
serialize_reason_codes(ReasonCodes) when is_list(ReasonCodes) -> serialize_reason_codes(ReasonCodes) when is_list(ReasonCodes) ->
<<<<Code>> || Code <- ReasonCodes>>. <<<<Code>> || Code <- ReasonCodes>>.
serialize_utf8_pair(Name, Value, StrictMode) -> serialize_utf8_pair({Name, Value}) ->
<< <<(serialize_utf8_string(Name))/binary, (serialize_utf8_string(Value))/binary>>.
(serialize_utf8_string(Name, StrictMode))/binary,
(serialize_utf8_string(Value, StrictMode))/binary
>>.
serialize_binary_data(Bin) -> serialize_binary_data(Bin) ->
[<<(byte_size(Bin)):16/big-unsigned-integer>>, Bin]. [<<(byte_size(Bin)):16/big-unsigned-integer>>, Bin].
serialize_utf8_string(undefined, false, _StrictMode) -> serialize_utf8_string(undefined, false) ->
?SERIALIZE_ERR(utf8_string_undefined); ?SERIALIZE_ERR(utf8_string_undefined);
serialize_utf8_string(undefined, true, _StrictMode) -> serialize_utf8_string(undefined, true) ->
<<>>; <<>>;
serialize_utf8_string(String, _AllowNull, StrictMode) -> serialize_utf8_string(String, _AllowNull) ->
serialize_utf8_string(String, StrictMode). serialize_utf8_string(String).
serialize_utf8_string(String, true) -> serialize_utf8_string(String) ->
StringBin = unicode:characters_to_binary(String), StringBin = unicode:characters_to_binary(String),
serialize_utf8_string(StringBin, false); Len = byte_size(StringBin),
serialize_utf8_string(String, false) ->
Len = byte_size(String),
true = (Len =< 16#ffff), true = (Len =< 16#ffff),
<<Len:16/big, String/binary>>. <<Len:16/big, StringBin/binary>>.
serialize_remaining_len(I) -> serialize_remaining_len(I) ->
serialize_variable_byte_integer(I). serialize_variable_byte_integer(I).
@ -1187,49 +1122,6 @@ validate_subqos([3 | _]) -> ?PARSE_ERR(bad_subqos);
validate_subqos([_ | T]) -> validate_subqos(T); validate_subqos([_ | T]) -> validate_subqos(T);
validate_subqos([]) -> ok. validate_subqos([]) -> ok.
%% from spec: the server MAY send disconnect with reason code 0x84
%% we chose to close socket because the client is likely not talking MQTT anyway
validate_proto_name(<<"MQTT">>) ->
ok;
validate_proto_name(<<"MQIsdp">>) ->
ok;
validate_proto_name(ProtoName) ->
?PARSE_ERR(#{
cause => invalid_proto_name,
expected => <<"'MQTT' or 'MQIsdp'">>,
received => ProtoName
}).
%% MQTT-v3.1.1-[MQTT-3.1.2-3], MQTT-v5.0-[MQTT-3.1.2-3]
-compile({inline, [validate_connect_reserved/1]}).
validate_connect_reserved(0) -> ok;
validate_connect_reserved(1) -> ?PARSE_ERR(reserved_connect_flag).
-compile({inline, [validate_connect_will/3]}).
%% MQTT-v3.1.1-[MQTT-3.1.2-13], MQTT-v5.0-[MQTT-3.1.2-11]
validate_connect_will(false, _, WillQoS) when WillQoS > 0 -> ?PARSE_ERR(invalid_will_qos);
%% MQTT-v3.1.1-[MQTT-3.1.2-14], MQTT-v5.0-[MQTT-3.1.2-12]
validate_connect_will(true, _, WillQoS) when WillQoS > 2 -> ?PARSE_ERR(invalid_will_qos);
%% MQTT-v3.1.1-[MQTT-3.1.2-15], MQTT-v5.0-[MQTT-3.1.2-13]
validate_connect_will(false, WillRetain, _) when WillRetain -> ?PARSE_ERR(invalid_will_retain);
validate_connect_will(_, _, _) -> ok.
-compile({inline, [validate_connect_password_flag/4]}).
%% MQTT-v3.1
%% Username flag and password flag are not strongly related
%% https://public.dhe.ibm.com/software/dw/webservices/ws-mqtt/mqtt-v3r1.html#connect
validate_connect_password_flag(true, ?MQTT_PROTO_V3, _, _) ->
ok;
%% MQTT-v3.1.1-[MQTT-3.1.2-22]
validate_connect_password_flag(true, ?MQTT_PROTO_V4, UsernameFlag, PasswordFlag) ->
%% BUG-FOR-BUG compatible, only check when `strict-mode`
UsernameFlag orelse PasswordFlag andalso ?PARSE_ERR(invalid_password_flag);
validate_connect_password_flag(true, ?MQTT_PROTO_V5, _, _) ->
ok;
validate_connect_password_flag(_, _, _, _) ->
ok.
-compile({inline, [bool/1]}).
bool(0) -> false; bool(0) -> false;
bool(1) -> true. bool(1) -> true.

View File

@ -30,6 +30,7 @@
-export([ -export([
init/1, init/1,
run/2,
run/3, run/3,
info/1, info/1,
reset/1 reset/1
@ -61,7 +62,12 @@ init(#{count := Count, bytes := Bytes}) ->
Oct = [{oct, {Bytes, Bytes}} || ?ENABLED(Bytes)], Oct = [{oct, {Bytes, Bytes}} || ?ENABLED(Bytes)],
?GCS(maps:from_list(Cnt ++ Oct)). ?GCS(maps:from_list(Cnt ++ Oct)).
%% @doc Try to run GC based on reductions of count or bytes. %% @doc Try to run GC based on reduntions of count or bytes.
-spec run(#{cnt := pos_integer(), oct := pos_integer()}, gc_state()) ->
{boolean(), gc_state()}.
run(#{cnt := Cnt, oct := Oct}, GcSt) ->
run(Cnt, Oct, GcSt).
-spec run(pos_integer(), pos_integer(), gc_state()) -> -spec run(pos_integer(), pos_integer(), gc_state()) ->
{boolean(), gc_state()}. {boolean(), gc_state()}.
run(Cnt, Oct, ?GCS(St)) -> run(Cnt, Oct, ?GCS(St)) ->

View File

@ -44,7 +44,6 @@
'client.disconnected', 'client.disconnected',
'client.authorize', 'client.authorize',
'client.check_authz_complete', 'client.check_authz_complete',
'client.check_authn_complete',
'client.authenticate', 'client.authenticate',
'client.subscribe', 'client.subscribe',
'client.unsubscribe', 'client.unsubscribe',
@ -60,8 +59,6 @@
'message.publish', 'message.publish',
'message.puback', 'message.puback',
'message.dropped', 'message.dropped',
'message.transformation_failed',
'schema.validation_failed',
'message.delivered', 'message.delivered',
'message.acked', 'message.acked',
'delivery.dropped', 'delivery.dropped',
@ -185,9 +182,6 @@ when
-callback 'message.dropped'(emqx_types:message(), #{node => node()}, _Reason :: atom()) -> -callback 'message.dropped'(emqx_types:message(), #{node => node()}, _Reason :: atom()) ->
callback_result(). callback_result().
-callback 'schema.validation_failed'(emqx_types:message(), #{node => node()}, _Ctx :: map()) ->
callback_result().
-callback 'message.delivered'(emqx_types:clientinfo(), Msg) -> fold_callback_result(Msg) when -callback 'message.delivered'(emqx_types:clientinfo(), Msg) -> fold_callback_result(Msg) when
Msg :: emqx_types:message(). Msg :: emqx_types:message().

View File

@ -36,7 +36,8 @@
max_size/1, max_size/1,
is_full/1, is_full/1,
is_empty/1, is_empty/1,
window/1 window/1,
query/2
]). ]).
-export_type([inflight/0]). -export_type([inflight/0]).
@ -138,3 +139,47 @@ size(?INFLIGHT(Tree)) ->
-spec max_size(inflight()) -> non_neg_integer(). -spec max_size(inflight()) -> non_neg_integer().
max_size(?INFLIGHT(MaxSize, _Tree)) -> max_size(?INFLIGHT(MaxSize, _Tree)) ->
MaxSize. MaxSize.
-spec query(inflight(), #{continuation => Cont, limit := L}) ->
{[{key(), term()}], #{continuation := Cont, count := C}}
when
Cont :: none | end_of_data | key(),
L :: non_neg_integer(),
C :: non_neg_integer().
query(?INFLIGHT(Tree), #{limit := Limit} = Pager) ->
Count = gb_trees:size(Tree),
ContKey = maps:get(continuation, Pager, none),
{List, NextCont} = sublist(iterator_from(ContKey, Tree), Limit),
{List, #{continuation => NextCont, count => Count}}.
iterator_from(none, Tree) ->
gb_trees:iterator(Tree);
iterator_from(ContKey, Tree) ->
It = gb_trees:iterator_from(ContKey, Tree),
case gb_trees:next(It) of
{ContKey, _Val, ItNext} -> ItNext;
_ -> It
end.
sublist(_It, 0) ->
{[], none};
sublist(It, Len) ->
{ListAcc, HasNext} = sublist(It, Len, []),
{lists:reverse(ListAcc), next_cont(ListAcc, HasNext)}.
sublist(It, 0, Acc) ->
{Acc, gb_trees:next(It) =/= none};
sublist(It, Len, Acc) ->
case gb_trees:next(It) of
none ->
{Acc, false};
{Key, Val, ItNext} ->
sublist(ItNext, Len - 1, [{Key, Val} | Acc])
end.
next_cont(_Acc, false) ->
end_of_data;
next_cont([{LastKey, _LastVal} | _Acc], _HasNext) ->
LastKey;
next_cont([], _HasNext) ->
end_of_data.

View File

@ -19,12 +19,10 @@
-export([ -export([
init/1, init/1,
init/2, init/2,
init/3,
info/1, info/1,
info/2, info/2,
check/1,
check/2, check/2,
update/3 update/2
]). ]).
-elvis([{elvis_style, no_if_expression, disable}]). -elvis([{elvis_style, no_if_expression, disable}]).
@ -32,12 +30,8 @@
-export_type([keepalive/0]). -export_type([keepalive/0]).
-record(keepalive, { -record(keepalive, {
check_interval :: pos_integer(), interval :: pos_integer(),
%% the received packets since last keepalive check statval :: non_neg_integer()
statval :: non_neg_integer(),
%% The number of idle intervals allowed before disconnecting the client.
idle_milliseconds = 0 :: non_neg_integer(),
max_idle_millisecond :: pos_integer()
}). }).
-opaque keepalive() :: #keepalive{}. -opaque keepalive() :: #keepalive{}.
@ -45,11 +39,7 @@
%% @doc Init keepalive. %% @doc Init keepalive.
-spec init(Interval :: non_neg_integer()) -> keepalive(). -spec init(Interval :: non_neg_integer()) -> keepalive().
init(Interval) -> init(default, 0, Interval). init(Interval) -> init(0, Interval).
init(Zone, Interval) ->
RecvCnt = emqx_pd:get_counter(recv_pkt),
init(Zone, RecvCnt, Interval).
%% from mqtt-v3.1.1 specific %% from mqtt-v3.1.1 specific
%% A Keep Alive value of zero (0) has the effect of turning off the keep alive mechanism. %% A Keep Alive value of zero (0) has the effect of turning off the keep alive mechanism.
@ -63,88 +53,42 @@ init(Zone, Interval) ->
%% typically this is a few minutes. %% typically this is a few minutes.
%% The maximum value is (65535s) 18 hours 12 minutes and 15 seconds. %% The maximum value is (65535s) 18 hours 12 minutes and 15 seconds.
%% @doc Init keepalive. %% @doc Init keepalive.
-spec init( -spec init(StatVal :: non_neg_integer(), Interval :: non_neg_integer()) -> keepalive() | undefined.
Zone :: atom(), init(StatVal, Interval) when Interval > 0 andalso Interval =< ?MAX_INTERVAL ->
StatVal :: non_neg_integer(), #keepalive{interval = Interval, statval = StatVal};
Second :: non_neg_integer() init(_, 0) ->
) -> keepalive() | undefined.
init(Zone, StatVal, Second) when Second > 0 andalso Second =< ?MAX_INTERVAL ->
#{keepalive_multiplier := Mul, keepalive_check_interval := CheckInterval} =
emqx_config:get_zone_conf(Zone, [mqtt]),
MilliSeconds = timer:seconds(Second),
Interval = emqx_utils:clamp(CheckInterval, 1000, max(MilliSeconds div 2, 1000)),
MaxIdleMs = ceil(MilliSeconds * Mul),
#keepalive{
check_interval = Interval,
statval = StatVal,
idle_milliseconds = 0,
max_idle_millisecond = MaxIdleMs
};
init(_Zone, _, 0) ->
undefined; undefined;
init(Zone, StatVal, Interval) when Interval > ?MAX_INTERVAL -> init(Zone, StatVal, ?MAX_INTERVAL). init(StatVal, Interval) when Interval > ?MAX_INTERVAL -> init(StatVal, ?MAX_INTERVAL).
%% @doc Get Info of the keepalive. %% @doc Get Info of the keepalive.
-spec info(keepalive()) -> emqx_types:infos(). -spec info(keepalive()) -> emqx_types:infos().
info(#keepalive{ info(#keepalive{
check_interval = Interval, interval = Interval,
statval = StatVal, statval = StatVal
idle_milliseconds = IdleIntervals,
max_idle_millisecond = MaxMs
}) -> }) ->
#{ #{
check_interval => Interval, interval => Interval,
statval => StatVal, statval => StatVal
idle_milliseconds => IdleIntervals,
max_idle_millisecond => MaxMs
}. }.
-spec info(check_interval | statval | idle_milliseconds, keepalive()) -> -spec info(interval | statval, keepalive()) ->
non_neg_integer(). non_neg_integer().
info(check_interval, #keepalive{check_interval = Interval}) -> info(interval, #keepalive{interval = Interval}) ->
Interval; Interval;
info(statval, #keepalive{statval = StatVal}) -> info(statval, #keepalive{statval = StatVal}) ->
StatVal; StatVal;
info(idle_milliseconds, #keepalive{idle_milliseconds = Val}) -> info(interval, undefined) ->
Val;
info(check_interval, undefined) ->
0. 0.
check(Keepalive = #keepalive{}) ->
RecvCnt = emqx_pd:get_counter(recv_pkt),
check(RecvCnt, Keepalive);
check(Keepalive) ->
{ok, Keepalive}.
%% @doc Check keepalive. %% @doc Check keepalive.
-spec check(non_neg_integer(), keepalive()) -> -spec check(non_neg_integer(), keepalive()) ->
{ok, keepalive()} | {error, timeout}. {ok, keepalive()} | {error, timeout}.
check(Val, #keepalive{statval = Val}) -> {error, timeout};
check( check(Val, KeepAlive) -> {ok, KeepAlive#keepalive{statval = Val}}.
NewVal,
#keepalive{
statval = NewVal,
idle_milliseconds = IdleAcc,
check_interval = Interval,
max_idle_millisecond = Max
}
) when IdleAcc + Interval >= Max ->
{error, timeout};
check(
NewVal,
#keepalive{
statval = NewVal,
idle_milliseconds = IdleAcc,
check_interval = Interval
} = KeepAlive
) ->
{ok, KeepAlive#keepalive{statval = NewVal, idle_milliseconds = IdleAcc + Interval}};
check(NewVal, #keepalive{} = KeepAlive) ->
{ok, KeepAlive#keepalive{statval = NewVal, idle_milliseconds = 0}}.
%% @doc Update keepalive. %% @doc Update keepalive.
%% The statval of the previous keepalive will be used, %% The statval of the previous keepalive will be used,
%% and normal checks will begin from the next cycle. %% and normal checks will begin from the next cycle.
-spec update(atom(), non_neg_integer(), keepalive() | undefined) -> keepalive() | undefined. -spec update(non_neg_integer(), keepalive() | undefined) -> keepalive() | undefined.
update(Zone, Interval, undefined) -> init(Zone, 0, Interval); update(Interval, undefined) -> init(0, Interval);
update(Zone, Interval, #keepalive{statval = StatVal}) -> init(Zone, StatVal, Interval). update(Interval, #keepalive{statval = StatVal}) -> init(StatVal, Interval).

View File

@ -212,29 +212,16 @@ short_paths_fields() ->
short_paths_fields(Importance) -> short_paths_fields(Importance) ->
[ [
{Name, {Name,
?HOCON( ?HOCON(rate_type(), #{
rate_type(), desc => ?DESC(Name),
maps:merge( required => false,
#{ importance => Importance,
desc => ?DESC(Name), example => Example
required => false, })}
importance => Importance,
example => Example
},
short_paths_fields_extra(Name)
)
)}
|| {Name, Example} <- || {Name, Example} <-
lists:zip(short_paths(), [<<"1000/s">>, <<"1000/s">>, <<"100MB/s">>]) lists:zip(short_paths(), [<<"1000/s">>, <<"1000/s">>, <<"100MB/s">>])
]. ].
short_paths_fields_extra(max_conn_rate) ->
#{
default => infinity
};
short_paths_fields_extra(_Name) ->
#{}.
desc(limiter) -> desc(limiter) ->
"Settings for the rate limiter."; "Settings for the rate limiter.";
desc(node_opts) -> desc(node_opts) ->

View File

@ -64,17 +64,6 @@
-export_type([listener_id/0]). -export_type([listener_id/0]).
-dialyzer(
{no_unknown, [
is_running/3,
current_conns/3,
do_stop_listener/3,
do_start_listener/4,
do_update_listener/4,
quic_listener_conf_rollback/3
]}
).
-type listener_id() :: atom() | binary(). -type listener_id() :: atom() | binary().
-type listener_type() :: tcp | ssl | ws | wss | quic | dtls. -type listener_type() :: tcp | ssl | ws | wss | quic | dtls.
@ -135,7 +124,7 @@ format_raw_listeners({Type0, Conf}) ->
Bind = parse_bind(LConf0), Bind = parse_bind(LConf0),
MaxConn = maps:get(<<"max_connections">>, LConf0, default_max_conn()), MaxConn = maps:get(<<"max_connections">>, LConf0, default_max_conn()),
Running = is_running(Type, listener_id(Type, LName), LConf0#{bind => Bind}), Running = is_running(Type, listener_id(Type, LName), LConf0#{bind => Bind}),
LConf1 = maps:without([<<"authentication">>], LConf0), LConf1 = maps:without([<<"authentication">>, <<"zone">>], LConf0),
LConf2 = maps:put(<<"running">>, Running, LConf1), LConf2 = maps:put(<<"running">>, Running, LConf1),
CurrConn = CurrConn =
case Running of case Running of
@ -432,7 +421,7 @@ do_start_listener(Type, Name, Id, #{bind := ListenOn} = Opts) when ?ESOCKD_LISTE
esockd:open( esockd:open(
Id, Id,
ListenOn, ListenOn,
merge_default(esockd_opts(Id, Type, Name, Opts, _OldOpts = undefined)) merge_default(esockd_opts(Id, Type, Name, Opts))
); );
%% Start MQTT/WS listener %% Start MQTT/WS listener
do_start_listener(Type, Name, Id, Opts) when ?COWBOY_LISTENER(Type) -> do_start_listener(Type, Name, Id, Opts) when ?COWBOY_LISTENER(Type) ->
@ -476,7 +465,7 @@ do_update_listener(Type, Name, OldConf, NewConf = #{bind := ListenOn}) when
Id = listener_id(Type, Name), Id = listener_id(Type, Name),
case maps:get(bind, OldConf) of case maps:get(bind, OldConf) of
ListenOn -> ListenOn ->
esockd:set_options({Id, ListenOn}, esockd_opts(Id, Type, Name, NewConf, OldConf)); esockd:set_options({Id, ListenOn}, esockd_opts(Id, Type, Name, NewConf));
_Different -> _Different ->
%% TODO %% TODO
%% Again, we're not strictly required to drop live connections in this case. %% Again, we're not strictly required to drop live connections in this case.
@ -537,7 +526,6 @@ pre_config_update([?ROOT_KEY, _Type, _Name], {update, _Request}, undefined) ->
pre_config_update([?ROOT_KEY, Type, Name], {update, Request}, RawConf) -> pre_config_update([?ROOT_KEY, Type, Name], {update, Request}, RawConf) ->
RawConf1 = emqx_utils_maps:deep_merge(RawConf, Request), RawConf1 = emqx_utils_maps:deep_merge(RawConf, Request),
RawConf2 = ensure_override_limiter_conf(RawConf1, Request), RawConf2 = ensure_override_limiter_conf(RawConf1, Request),
ok = assert_zone_exists(RawConf2),
{ok, convert_certs(Type, Name, RawConf2)}; {ok, convert_certs(Type, Name, RawConf2)};
pre_config_update([?ROOT_KEY, _Type, _Name], {action, _Action, Updated}, RawConf) -> pre_config_update([?ROOT_KEY, _Type, _Name], {action, _Action, Updated}, RawConf) ->
{ok, emqx_utils_maps:deep_merge(RawConf, Updated)}; {ok, emqx_utils_maps:deep_merge(RawConf, Updated)};
@ -588,7 +576,7 @@ perform_listener_change(update, {{Type, Name, ConfOld}, {_, _, ConfNew}}) ->
perform_listener_change(stop, {Type, Name, Conf}) -> perform_listener_change(stop, {Type, Name, Conf}) ->
stop_listener(Type, Name, Conf). stop_listener(Type, Name, Conf).
esockd_opts(ListenerId, Type, Name, Opts0, OldOpts) -> esockd_opts(ListenerId, Type, Name, Opts0) ->
Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0), Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0),
Limiter = limiter(Opts0), Limiter = limiter(Opts0),
Opts2 = Opts2 =
@ -620,11 +608,9 @@ esockd_opts(ListenerId, Type, Name, Opts0, OldOpts) ->
tcp -> tcp ->
Opts3#{tcp_options => tcp_opts(Opts0)}; Opts3#{tcp_options => tcp_opts(Opts0)};
ssl -> ssl ->
OptsWithCRL = inject_crl_config(Opts0, OldOpts), OptsWithCRL = inject_crl_config(Opts0),
OptsWithSNI = inject_sni_fun(ListenerId, OptsWithCRL), OptsWithSNI = inject_sni_fun(ListenerId, OptsWithCRL),
OptsWithRootFun = inject_root_fun(OptsWithSNI), SSLOpts = ssl_opts(OptsWithSNI),
OptsWithVerifyFun = inject_verify_fun(OptsWithRootFun),
SSLOpts = ssl_opts(OptsWithVerifyFun),
Opts3#{ssl_options => SSLOpts, tcp_options => tcp_opts(Opts0)} Opts3#{ssl_options => SSLOpts, tcp_options => tcp_opts(Opts0)}
end end
). ).
@ -648,18 +634,8 @@ ranch_opts(Type, Opts = #{bind := ListenOn}) ->
MaxConnections = maps:get(max_connections, Opts, 1024), MaxConnections = maps:get(max_connections, Opts, 1024),
SocketOpts = SocketOpts =
case Type of case Type of
wss -> wss -> tcp_opts(Opts) ++ proplists:delete(handshake_timeout, ssl_opts(Opts));
tcp_opts(Opts) ++ ws -> tcp_opts(Opts)
lists:filter(
fun
({partial_chain, _}) -> false;
({handshake_timeout, _}) -> false;
(_) -> true
end,
ssl_opts(Opts)
);
ws ->
tcp_opts(Opts)
end, end,
#{ #{
num_acceptors => NumAcceptors, num_acceptors => NumAcceptors,
@ -908,11 +884,6 @@ convert_certs(Type, Name, Conf) ->
filter_stacktrace({Reason, _Stacktrace}) -> Reason; filter_stacktrace({Reason, _Stacktrace}) -> Reason;
filter_stacktrace(Reason) -> Reason. filter_stacktrace(Reason) -> Reason.
assert_zone_exists(#{<<"zone">> := Zone}) ->
emqx_config_zones:assert_zone_exists(Zone);
assert_zone_exists(_) ->
ok.
%% limiter config should override, not merge %% limiter config should override, not merge
ensure_override_limiter_conf(Conf, #{<<"limiter">> := Limiter}) -> ensure_override_limiter_conf(Conf, #{<<"limiter">> := Limiter}) ->
Conf#{<<"limiter">> => Limiter}; Conf#{<<"limiter">> => Limiter};
@ -985,18 +956,13 @@ quic_listener_optional_settings() ->
stateless_operation_expiration_ms stateless_operation_expiration_ms
]. ].
inject_root_fun(#{ssl_options := SSLOpts} = Opts) ->
Opts#{ssl_options := emqx_tls_lib:maybe_inject_ssl_fun(root_fun, SSLOpts)}.
inject_verify_fun(#{ssl_options := SSLOpts} = Opts) ->
Opts#{ssl_options := emqx_tls_lib:maybe_inject_ssl_fun(verify_fun, SSLOpts)}.
inject_sni_fun(ListenerId, Conf = #{ssl_options := #{ocsp := #{enable_ocsp_stapling := true}}}) -> inject_sni_fun(ListenerId, Conf = #{ssl_options := #{ocsp := #{enable_ocsp_stapling := true}}}) ->
emqx_ocsp_cache:inject_sni_fun(ListenerId, Conf); emqx_ocsp_cache:inject_sni_fun(ListenerId, Conf);
inject_sni_fun(_ListenerId, Conf) -> inject_sni_fun(_ListenerId, Conf) ->
Conf. Conf.
inject_crl_config( inject_crl_config(
Conf = #{ssl_options := #{enable_crl_check := true} = SSLOpts}, _OldOpts Conf = #{ssl_options := #{enable_crl_check := true} = SSLOpts}
) -> ) ->
HTTPTimeout = emqx_config:get([crl_cache, http_timeout], timer:seconds(15)), HTTPTimeout = emqx_config:get([crl_cache, http_timeout], timer:seconds(15)),
Conf#{ Conf#{
@ -1006,16 +972,7 @@ inject_crl_config(
crl_cache => {emqx_ssl_crl_cache, {internal, [{http, HTTPTimeout}]}} crl_cache => {emqx_ssl_crl_cache, {internal, [{http, HTTPTimeout}]}}
} }
}; };
inject_crl_config(#{ssl_options := SSLOpts0} = Conf0, #{} = OldOpts) -> inject_crl_config(Conf) ->
%% Note: we must set crl options to `undefined' to unset them. Otherwise,
%% `esockd' will retain such options when `esockd:merge_opts/2' is called and the SSL
%% options were previously enabled.
WasEnabled = emqx_utils_maps:deep_get([ssl_options, enable_crl_check], OldOpts, false),
Undefine = fun(Acc, K) -> emqx_utils_maps:put_if(Acc, K, undefined, WasEnabled) end,
SSLOpts1 = Undefine(SSLOpts0, crl_check),
SSLOpts = Undefine(SSLOpts1, crl_cache),
Conf0#{ssl_options := SSLOpts};
inject_crl_config(Conf, undefined = _OldOpts) ->
Conf. Conf.
maybe_unregister_ocsp_stapling_refresh( maybe_unregister_ocsp_stapling_refresh(
@ -1038,6 +995,7 @@ ensure_max_conns(<<"infinity">>) -> <<"infinity">>;
ensure_max_conns(MaxConn) when is_binary(MaxConn) -> binary_to_integer(MaxConn); ensure_max_conns(MaxConn) when is_binary(MaxConn) -> binary_to_integer(MaxConn);
ensure_max_conns(MaxConn) -> MaxConn. ensure_max_conns(MaxConn) -> MaxConn.
-spec quic_listen_on(X :: any()) -> quicer:listen_on().
quic_listen_on(Bind) -> quic_listen_on(Bind) ->
case Bind of case Bind of
{Addr, Port} when tuple_size(Addr) == 4 -> {Addr, Port} when tuple_size(Addr) == 4 ->
@ -1056,8 +1014,8 @@ to_quicer_listener_opts(Opts) ->
SSLOpts = maps:from_list(ssl_opts(Opts)), SSLOpts = maps:from_list(ssl_opts(Opts)),
Opts1 = maps:filter( Opts1 = maps:filter(
fun fun
(cacertfile, undefined) -> false; (cacertfile, undefined) -> fasle;
(password, undefined) -> false; (password, undefined) -> fasle;
(_, _) -> true (_, _) -> true
end, end,
Opts Opts

View File

@ -25,7 +25,7 @@
-export([start_link/0]). -export([start_link/0]).
%% throttler API %% throttler API
-export([allow/2]). -export([allow/1]).
%% gen_server callbacks %% gen_server callbacks
-export([ -export([
@ -40,29 +40,23 @@
-define(SEQ_ID(Msg), {?MODULE, Msg}). -define(SEQ_ID(Msg), {?MODULE, Msg}).
-define(NEW_SEQ, atomics:new(1, [{signed, false}])). -define(NEW_SEQ, atomics:new(1, [{signed, false}])).
-define(GET_SEQ(Msg), persistent_term:get(?SEQ_ID(Msg), undefined)). -define(GET_SEQ(Msg), persistent_term:get(?SEQ_ID(Msg), undefined)).
-define(ERASE_SEQ(Msg), persistent_term:erase(?SEQ_ID(Msg))).
-define(RESET_SEQ(SeqRef), atomics:put(SeqRef, 1, 0)). -define(RESET_SEQ(SeqRef), atomics:put(SeqRef, 1, 0)).
-define(INC_SEQ(SeqRef), atomics:add(SeqRef, 1, 1)). -define(INC_SEQ(SeqRef), atomics:add(SeqRef, 1, 1)).
-define(GET_DROPPED(SeqRef), atomics:get(SeqRef, 1) - 1). -define(GET_DROPPED(SeqRef), atomics:get(SeqRef, 1) - 1).
-define(IS_ALLOWED(SeqRef), atomics:add_get(SeqRef, 1, 1) =:= 1). -define(IS_ALLOWED(SeqRef), atomics:add_get(SeqRef, 1, 1) =:= 1).
-define(NEW_THROTTLE(Msg, SeqRef), persistent_term:put(?SEQ_ID(Msg), SeqRef)).
-define(MSGS_LIST, emqx:get_config([log, throttling, msgs], [])). -define(MSGS_LIST, emqx:get_config([log, throttling, msgs], [])).
-define(TIME_WINDOW_MS, timer:seconds(emqx:get_config([log, throttling, time_window], 60))). -define(TIME_WINDOW_MS, timer:seconds(emqx:get_config([log, throttling, time_window], 60))).
%% @doc Check if a throttled log message is allowed to pass down to the logger this time. -spec allow(atom()) -> boolean().
%% The Msg has to be an atom, and the second argument `UniqueKey' should be `undefined' allow(Msg) when is_atom(Msg) ->
%% for predefined message IDs.
%% For relatively static resources created from configurations such as data integration
%% resource IDs `UniqueKey' should be of `binary()' type.
-spec allow(atom(), undefined | binary()) -> boolean().
allow(Msg, UniqueKey) when
is_atom(Msg) andalso (is_binary(UniqueKey) orelse UniqueKey =:= undefined)
->
case emqx_logger:get_primary_log_level() of case emqx_logger:get_primary_log_level() of
debug -> debug ->
true; true;
_ -> _ ->
do_allow(Msg, UniqueKey) do_allow(Msg)
end. end.
-spec start_link() -> startlink_ret(). -spec start_link() -> startlink_ret().
@ -74,8 +68,7 @@ start_link() ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init([]) -> init([]) ->
process_flag(trap_exit, true), ok = lists:foreach(fun(Msg) -> ?NEW_THROTTLE(Msg, ?NEW_SEQ) end, ?MSGS_LIST),
ok = lists:foreach(fun new_throttler/1, ?MSGS_LIST),
CurrentPeriodMs = ?TIME_WINDOW_MS, CurrentPeriodMs = ?TIME_WINDOW_MS,
TimerRef = schedule_refresh(CurrentPeriodMs), TimerRef = schedule_refresh(CurrentPeriodMs),
{ok, #{timer_ref => TimerRef, current_period_ms => CurrentPeriodMs}}. {ok, #{timer_ref => TimerRef, current_period_ms => CurrentPeriodMs}}.
@ -93,22 +86,16 @@ handle_info(refresh, #{current_period_ms := PeriodMs} = State) ->
DroppedStats = lists:foldl( DroppedStats = lists:foldl(
fun(Msg, Acc) -> fun(Msg, Acc) ->
case ?GET_SEQ(Msg) of case ?GET_SEQ(Msg) of
%% Should not happen, unless the static ids list is updated at run-time.
undefined -> undefined ->
%% Should not happen, unless the static ids list is updated at run-time. ?NEW_THROTTLE(Msg, ?NEW_SEQ),
new_throttler(Msg),
?tp(log_throttler_new_msg, #{throttled_msg => Msg}), ?tp(log_throttler_new_msg, #{throttled_msg => Msg}),
Acc; Acc;
SeqMap when is_map(SeqMap) ->
maps:fold(
fun(Key, Ref, Acc0) ->
ID = iolist_to_binary([atom_to_binary(Msg), $:, Key]),
drop_stats(Ref, ID, Acc0)
end,
Acc,
SeqMap
);
SeqRef -> SeqRef ->
drop_stats(SeqRef, Msg, Acc) Dropped = ?GET_DROPPED(SeqRef),
ok = ?RESET_SEQ(SeqRef),
?tp(log_throttler_dropped, #{dropped_count => Dropped, throttled_msg => Msg}),
maybe_add_dropped(Msg, Dropped, Acc)
end end
end, end,
#{}, #{},
@ -125,16 +112,7 @@ handle_info(Info, State) ->
?SLOG(error, #{msg => "unxpected_info", info => Info}), ?SLOG(error, #{msg => "unxpected_info", info => Info}),
{noreply, State}. {noreply, State}.
drop_stats(SeqRef, Msg, Acc) ->
Dropped = ?GET_DROPPED(SeqRef),
ok = ?RESET_SEQ(SeqRef),
?tp(log_throttler_dropped, #{dropped_count => Dropped, throttled_msg => Msg}),
maybe_add_dropped(Msg, Dropped, Acc).
terminate(_Reason, _State) -> terminate(_Reason, _State) ->
%% atomics do not have delete/remove/release/deallocate API
%% after the reference is garbage-collected the resource is released
lists:foreach(fun(Msg) -> ?ERASE_SEQ(Msg) end, ?MSGS_LIST),
ok. ok.
code_change(_OldVsn, State, _Extra) -> code_change(_OldVsn, State, _Extra) ->
@ -144,27 +122,17 @@ code_change(_OldVsn, State, _Extra) ->
%% internal functions %% internal functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
do_allow(Msg, UniqueKey) -> do_allow(Msg) ->
case persistent_term:get(?SEQ_ID(Msg), undefined) of case persistent_term:get(?SEQ_ID(Msg), undefined) of
undefined -> undefined ->
%% This is either a race condition (emqx_log_throttler is not started yet) %% This is either a race condition (emqx_log_throttler is not started yet)
%% or a developer mistake (msg used in ?SLOG_THROTTLE/2,3 macro is %% or a developer mistake (msg used in ?SLOG_THROTTLE/2,3 macro is
%% not added to the default value of `log.throttling.msgs`. %% not added to the default value of `log.throttling.msgs`.
?SLOG(debug, #{ ?SLOG(info, #{
msg => "log_throttle_disabled", msg => "missing_log_throttle_sequence",
throttled_msg => Msg throttled_msg => Msg
}), }),
true; true;
%% e.g: unrecoverable msg throttle according resource_id
SeqMap when is_map(SeqMap) ->
case maps:find(UniqueKey, SeqMap) of
{ok, SeqRef} ->
?IS_ALLOWED(SeqRef);
error ->
SeqRef = ?NEW_SEQ,
new_throttler(Msg, SeqMap#{UniqueKey => SeqRef}),
true
end;
SeqRef -> SeqRef ->
?IS_ALLOWED(SeqRef) ?IS_ALLOWED(SeqRef)
end. end.
@ -186,11 +154,3 @@ maybe_log_dropped(_DroppedStats, _PeriodMs) ->
schedule_refresh(PeriodMs) -> schedule_refresh(PeriodMs) ->
?tp(log_throttler_sched_refresh, #{new_period_ms => PeriodMs}), ?tp(log_throttler_sched_refresh, #{new_period_ms => PeriodMs}),
erlang:send_after(PeriodMs, ?MODULE, refresh). erlang:send_after(PeriodMs, ?MODULE, refresh).
new_throttler(unrecoverable_resource_error = Msg) ->
new_throttler(Msg, #{});
new_throttler(Msg) ->
new_throttler(Msg, ?NEW_SEQ).
new_throttler(Msg, AtomicOrEmptyMap) ->
persistent_term:put(?SEQ_ID(Msg), AtomicOrEmptyMap).

Some files were not shown because too many files have changed in this diff Show More