diff --git a/.editorconfig b/.editorconfig
index c563aa10d..719028b4d 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -20,8 +20,3 @@ indent_size = 4
# Tab indentation (no size specified)
[Makefile]
indent_style = tab
-
-# Matches the exact files either package.json or .travis.yml
-[{.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/.github/actions/docker-meta/action.yaml b/.github/actions/docker-meta/action.yaml
deleted file mode 100644
index 13ab21da6..000000000
--- a/.github/actions/docker-meta/action.yaml
+++ /dev/null
@@ -1,81 +0,0 @@
-name: 'Docker meta'
-inputs:
- profile:
- required: true
- type: string
- registry:
- required: true
- type: string
- arch:
- required: true
- type: string
- otp:
- required: true
- type: string
- elixir:
- required: false
- type: string
- default: ''
- builder_base:
- required: true
- type: string
- owner:
- required: true
- type: string
- docker_tags:
- required: true
- type: string
-
-outputs:
- emqx_name:
- description: "EMQX name"
- value: ${{ steps.pre-meta.outputs.emqx_name }}
- version:
- description: "docker image version"
- value: ${{ steps.meta.outputs.version }}
- tags:
- description: "docker image tags"
- value: ${{ steps.meta.outputs.tags }}
- labels:
- description: "docker image labels"
- value: ${{ steps.meta.outputs.labels }}
-
-runs:
- using: composite
- steps:
- - name: prepare for docker/metadata-action
- id: pre-meta
- shell: bash
- run: |
- emqx_name=${{ inputs.profile }}
- img_suffix=${{ inputs.arch }}
- img_labels="org.opencontainers.image.otp.version=${{ inputs.otp }}"
- if [ -n "${{ inputs.elixir }}" ]; then
- emqx_name="emqx-elixir"
- img_suffix="elixir-${{ inputs.arch }}"
- img_labels="org.opencontainers.image.elixir.version=${{ inputs.elixir }}\n${img_labels}"
- fi
- if [ "${{ inputs.profile }}" = "emqx" ]; then
- img_labels="org.opencontainers.image.edition=Opensource\n${img_labels}"
- fi
- if [ "${{ inputs.profile }}" = "emqx-enterprise" ]; then
- img_labels="org.opencontainers.image.edition=Enterprise\n${img_labels}"
- fi
- if [[ "${{ inputs.builder_base }}" =~ "alpine" ]]; then
- img_suffix="${img_suffix}-alpine"
- fi
- echo "emqx_name=${emqx_name}" >> $GITHUB_OUTPUT
- echo "img_suffix=${img_suffix}" >> $GITHUB_OUTPUT
- echo "img_labels=${img_labels}" >> $GITHUB_OUTPUT
- echo "img_name=${{ inputs.registry }}/${{ inputs.owner }}/${{ inputs.profile }}" >> $GITHUB_OUTPUT
- - uses: docker/metadata-action@v4
- id: meta
- with:
- images:
- ${{ steps.pre-meta.outputs.img_name }}
- flavor: |
- suffix=-${{ steps.pre-meta.outputs.img_suffix }}
- tags: |
- type=raw,value=${{ inputs.docker_tags }}
- labels:
- ${{ steps.pre-meta.outputs.img_labels }}
diff --git a/.github/workflows/build_and_push_docker_images.yaml b/.github/workflows/build_and_push_docker_images.yaml
index 76238c75f..71515f699 100644
--- a/.github/workflows/build_and_push_docker_images.yaml
+++ b/.github/workflows/build_and_push_docker_images.yaml
@@ -9,15 +9,17 @@ on:
tags:
- v*
- e*
- release:
- types:
- - published
+ - docker-latest-*
workflow_dispatch:
inputs:
branch_or_tag:
required: false
profile:
required: false
+ default: 'emqx'
+ is_latest:
+ required: false
+ default: false
jobs:
prepare:
@@ -26,10 +28,11 @@ jobs:
container: "ghcr.io/emqx/emqx-builder/5.0-26:1.13.4-24.3.4.2-1-ubuntu20.04"
outputs:
- BUILD_PROFILE: ${{ steps.get_profile.outputs.BUILD_PROFILE }}
- IS_DOCKER_LATEST: ${{ steps.get_profile.outputs.IS_DOCKER_LATEST }}
+ PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
+ EDITION: ${{ steps.get_profile.outputs.EDITION }}
+ IS_LATEST: ${{ steps.get_profile.outputs.IS_LATEST }}
IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }}
- DOCKER_TAG_VERSION: ${{ steps.get_profile.outputs.DOCKER_TAG_VERSION }}
+ VERSION: ${{ steps.get_profile.outputs.VERSION }}
steps:
- uses: actions/checkout@v3
@@ -45,14 +48,14 @@ jobs:
tag=${{ github.ref }}
# tag docker-latest-ce or docker-latest-ee
if git describe --tags --exact --match 'docker-latest-*' 2>/dev/null; then
- echo 'docker_latest=true due to docker-latest-* tag'
- docker_latest=true
- elif [ "${{ github.event_name }}" = "release" ]; then
- echo 'docker_latest=true due to release'
- docker_latest=true
+ echo 'is_latest=true due to docker-latest-* tag'
+ is_latest=true
+ elif [ "${{ inputs.is_latest }}" = "true" ]; then
+ echo 'is_latest=true due to manual input from workflow_dispatch'
+ is_latest=true
else
- echo 'docker_latest=false'
- docker_latest=false
+ echo 'is_latest=false'
+ is_latest=false
fi
if git describe --tags --match "[v|e]*" --exact; then
echo "This is an exact git tag, will publish images"
@@ -64,18 +67,20 @@ jobs:
case $tag in
refs/tags/v*)
PROFILE='emqx'
+ EDITION='Opensource'
;;
refs/tags/e*)
PROFILE=emqx-enterprise
+ EDITION='Enterprise'
;;
*)
PROFILE=${{ github.event.inputs.profile }}
case "$PROFILE" in
emqx)
- true
+ EDITION='Opensource'
;;
emqx-enterprise)
- true
+ EDITION='Enterprise'
;;
*)
echo "ERROR: Failed to resolve build profile"
@@ -85,14 +90,18 @@ jobs:
;;
esac
VSN="$(./pkg-vsn.sh "$PROFILE")"
- echo "Building $PROFILE image with tag $VSN (latest=$docker_latest)"
- echo "IS_DOCKER_LATEST=$docker_latest" >> $GITHUB_OUTPUT
+ echo "Building emqx/$PROFILE:$VSN image (latest=$is_latest)"
+ echo "Push = $is_exact"
+ echo "IS_LATEST=$is_latest" >> $GITHUB_OUTPUT
echo "IS_EXACT_TAG=$is_exact" >> $GITHUB_OUTPUT
- echo "BUILD_PROFILE=$PROFILE" >> $GITHUB_OUTPUT
- echo "DOCKER_TAG_VERSION=$VSN" >> $GITHUB_OUTPUT
+ echo "PROFILE=$PROFILE" >> $GITHUB_OUTPUT
+ echo "EDITION=$EDITION" >> $GITHUB_OUTPUT
+ echo "VERSION=$VSN" >> $GITHUB_OUTPUT
- name: get_all_deps
+ env:
+ PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
run: |
- make -C source deps-all
+ PROFILE=$PROFILE make -C source deps-$PROFILE
zip -ryq source.zip source/* source/.[^.]*
- uses: actions/upload-artifact@v3
with:
@@ -100,17 +109,17 @@ jobs:
path: source.zip
docker:
- runs-on: ${{ matrix.arch[1] }}
+ runs-on: ubuntu-20.04
needs: prepare
strategy:
fail-fast: false
matrix:
- arch:
- - [amd64, ubuntu-20.04]
- - [arm64, aws-arm64]
profile:
- - ${{ needs.prepare.outputs.BUILD_PROFILE }}
+ - "${{ needs.prepare.outputs.PROFILE }}"
+ flavor:
+ - ''
+ - '-elixir'
registry:
- 'docker.io'
- 'public.ecr.aws'
@@ -128,9 +137,10 @@ jobs:
exclude: # TODO: publish enterprise to ecr too?
- registry: 'public.ecr.aws'
profile: emqx-enterprise
+ - flavor: '-elixir'
+ os: [alpine3.15.1, "alpine:3.15.1", "deploy/docker/Dockerfile.alpine"]
+
steps:
- - uses: AutoModality/action-clean@v1
- if: matrix.arch[1] == 'aws-arm64'
- uses: actions/download-artifact@v3
with:
name: source
@@ -138,16 +148,17 @@ jobs:
- name: unzip source code
run: unzip -q source.zip
+ - uses: docker/setup-qemu-action@v2
- uses: docker/setup-buildx-action@v2
- - name: Login for docker.
+ - name: Login to hub.docker.com
uses: docker/login-action@v2
if: matrix.registry == 'docker.io'
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- - name: Login for AWS ECR
+ - name: Login to AWS ECR
uses: docker/login-action@v2
if: matrix.registry == 'public.ecr.aws'
with:
@@ -156,229 +167,48 @@ jobs:
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ecr: true
- - uses: ./source/.github/actions/docker-meta
+ - name: prepare for docker/metadata-action
+ id: pre-meta
+ shell: bash
+ run: |
+ extra_labels=
+ img_suffix=
+ flavor="${{ matrix.flavor }}"
+ if [ "${{ matrix.flavor }}" = '-elixir' ]; then
+ img_suffix="-elixir"
+ extra_labels="org.opencontainers.image.elixir.version=${{ matrix.elixir }}"
+ fi
+ if [[ "${{ matrix.os[0] }}" =~ "alpine" ]]; then
+ img_suffix="${img_suffix}-alpine"
+ fi
+
+ echo "img_suffix=$img_suffix" >> $GITHUB_OUTPUT
+ echo "extra_labels=$extra_labels" >> $GITHUB_OUTPUT
+
+ - uses: docker/metadata-action@v4
id: meta
with:
- profile: ${{ matrix.profile }}
- registry: ${{ matrix.registry }}
- arch: ${{ matrix.arch[0] }}
- otp: ${{ matrix.otp }}
- builder_base: ${{ matrix.os[0] }}
- owner: ${{ github.repository_owner }}
- docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
+ images: |
+ ${{ matrix.registry }}/${{ github.repository_owner }}/${{ matrix.profile }}
+ flavor: |
+ suffix=${{ steps.pre-meta.outputs.img_suffix }}
+ tags: |
+ type=raw,value=${{ needs.prepare.outputs.VERSION }}
+ type=raw,value=latest,enable=${{ needs.prepare.outputs.IS_LATEST }}
+ labels: |
+ org.opencontainers.image.otp.version=${{ matrix.otp }}
+ org.opencontainers.image.edition=${{ needs.prepare.outputs.EDITION }}
+ ${{ steps.pre-meta.outputs.extra_labels }}
- uses: docker/build-push-action@v3
with:
push: ${{ needs.prepare.outputs.IS_EXACT_TAG == 'true' || github.repository_owner != 'emqx' }}
pull: true
no-cache: true
- platforms: linux/${{ matrix.arch[0] }}
+ platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
- BUILD_FROM=ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }}
- RUN_FROM=${{ matrix.os[1] }}
- EMQX_NAME=${{ steps.meta.outputs.emqx_name }}
+ EMQX_NAME=${{ matrix.profile }}${{ matrix.flavor }}
file: source/${{ matrix.os[2] }}
context: source
-
- - name: Docker Hub Description
- if: matrix.registry == 'docker.io'
- uses: peter-evans/dockerhub-description@v3
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_PASSWORD }}
- repository: "emqx/${{ needs.prepare.outputs.BUILD_PROFILE }}"
- readme-filepath: ./source/deploy/docker/README.md
- short-description: "The most scalable open-source MQTT broker for IoT, IIoT, connected vehicles, and more."
-
- docker-elixir:
- runs-on: ${{ matrix.arch[1] }}
- needs: prepare
- # do not build elixir images for ee for now
- if: needs.prepare.outputs.BUILD_PROFILE == 'emqx'
-
- strategy:
- fail-fast: false
- matrix:
- arch:
- - [amd64, ubuntu-20.04]
- - [arm64, aws-arm64]
- profile:
- - ${{ needs.prepare.outputs.BUILD_PROFILE }}
- registry:
- - 'docker.io'
- os:
- - [debian11, "debian:11-slim", "deploy/docker/Dockerfile"]
- builder:
- - 5.0-26 # update to latest
- otp:
- - 25.1.2-2 # update to latest
- elixir:
- - 1.13.4 # update to latest
-
- steps:
- - uses: AutoModality/action-clean@v1
- if: matrix.arch[1] == 'aws-arm64'
- - uses: actions/download-artifact@v3
- with:
- name: source
- path: .
- - name: unzip source code
- run: unzip -q source.zip
-
- - uses: docker/setup-buildx-action@v2
-
- - name: Login for docker.
- uses: docker/login-action@v2
- with:
- username: ${{ secrets.DOCKER_HUB_USER }}
- password: ${{ secrets.DOCKER_HUB_TOKEN }}
-
- - uses: ./source/.github/actions/docker-meta
- id: meta
- with:
- profile: ${{ matrix.profile }}
- registry: ${{ matrix.registry }}
- arch: ${{ matrix.arch[0] }}
- otp: ${{ matrix.otp }}
- elixir: ${{ matrix.elixir }}
- builder_base: ${{ matrix.os[0] }}
- owner: ${{ github.repository_owner }}
- docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
-
- - uses: docker/build-push-action@v3
- with:
- push: ${{ needs.prepare.outputs.IS_EXACT_TAG == 'true' || github.repository_owner != 'emqx' }}
- pull: true
- no-cache: true
- platforms: linux/${{ matrix.arch[0] }}
- tags: ${{ steps.meta.outputs.tags }}
- labels: ${{ steps.meta.outputs.labels }}
- build-args: |
- BUILD_FROM=ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }}
- RUN_FROM=${{ matrix.os[1] }}
- EMQX_NAME=${{ steps.meta.outputs.emqx_name }}
- file: source/${{ matrix.os[2] }}
- context: source
-
- docker-push-multi-arch-manifest:
- # note, we only run on amd64
- if: needs.prepare.outputs.IS_EXACT_TAG
- needs:
- - prepare
- - docker
- runs-on: ${{ matrix.arch[1] }}
- strategy:
- fail-fast: false
- matrix:
- arch:
- - [amd64, ubuntu-20.04]
- profile:
- - ${{ needs.prepare.outputs.BUILD_PROFILE }}
- os:
- - [alpine3.15.1, "alpine:3.15.1", "deploy/docker/Dockerfile.alpine"]
- - [debian11, "debian:11-slim", "deploy/docker/Dockerfile"]
- # NOTE: only support latest otp version, not a matrix
- otp:
- - 24.3.4.2-1 # switch to 25 once ready to release 5.1
- registry:
- - 'docker.io'
- - 'public.ecr.aws'
- exclude:
- - registry: 'public.ecr.aws'
- profile: emqx-enterprise
-
- steps:
- - uses: actions/download-artifact@v3
- with:
- name: source
- path: .
-
- - name: unzip source code
- run: unzip -q source.zip
-
- - uses: docker/login-action@v2
- if: matrix.registry == 'docker.io'
- with:
- username: ${{ secrets.DOCKER_HUB_USER }}
- password: ${{ secrets.DOCKER_HUB_TOKEN }}
-
- - uses: docker/login-action@v2
- if: matrix.registry == 'public.ecr.aws'
- with:
- registry: public.ecr.aws
- username: ${{ secrets.AWS_ACCESS_KEY_ID }}
- password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- ecr: true
-
- - uses: ./source/.github/actions/docker-meta
- id: meta
- with:
- profile: ${{ matrix.profile }}
- registry: ${{ matrix.registry }}
- arch: ${{ matrix.arch[0] }}
- otp: ${{ matrix.otp }}
- builder_base: ${{ matrix.os[0] }}
- owner: ${{ github.repository_owner }}
- docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
-
- - name: update manifest for multiarch image
- working-directory: source
- run: |
- is_latest="${{ needs.prepare.outputs.IS_DOCKER_LATEST }}"
- scripts/docker-create-push-manifests.sh "${{ steps.meta.outputs.tags }}" "$is_latest"
-
- docker-elixir-push-multi-arch-manifest:
- # note, we only run on amd64
- # do not build enterprise elixir images for now
- if: needs.prepare.outputs.IS_EXACT_TAG == 'true' && needs.prepare.outputs.BUILD_PROFILE == 'emqx'
- needs:
- - prepare
- - docker-elixir
- runs-on: ${{ matrix.arch[1] }}
- strategy:
- fail-fast: false
- matrix:
- arch:
- - [amd64, ubuntu-20.04]
- profile:
- - ${{ needs.prepare.outputs.BUILD_PROFILE }}
- # NOTE: for docker, only support latest otp version, not a matrix
- otp:
- - 25.1.2-2 # update to latest
- elixir:
- - 1.13.4 # update to latest
- registry:
- - 'docker.io'
-
- steps:
- - uses: actions/download-artifact@v3
- with:
- name: source
- path: .
-
- - name: unzip source code
- run: unzip -q source.zip
-
- - uses: docker/login-action@v2
- with:
- username: ${{ secrets.DOCKER_HUB_USER }}
- password: ${{ secrets.DOCKER_HUB_TOKEN }}
-
- - uses: ./source/.github/actions/docker-meta
- id: meta
- with:
- profile: ${{ matrix.profile }}
- registry: ${{ matrix.registry }}
- arch: ${{ matrix.arch[0] }}
- otp: ${{ matrix.otp }}
- elixir: ${{ matrix.elixir }}
- builder_base: ${{ matrix.os[0] }}
- owner: ${{ github.repository_owner }}
- docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
-
- - name: update manifest for multiarch image
- working-directory: source
- run: |
- scripts/docker-create-push-manifests.sh "${{ steps.meta.outputs.tags }}" false
diff --git a/.github/workflows/run_fvt_tests.yaml b/.github/workflows/run_fvt_tests.yaml
index 9e44034fb..a54bb68dd 100644
--- a/.github/workflows/run_fvt_tests.yaml
+++ b/.github/workflows/run_fvt_tests.yaml
@@ -201,12 +201,25 @@ jobs:
echo "waiting emqx started";
sleep 10;
done
+ - name: Get Token
+ timeout-minutes: 1
+ run: |
+ kubectl port-forward service/${{ matrix.profile }} 18083:18083 > /dev/null &
+
+ while
+ [ "$(curl --silent -X 'GET' 'http://127.0.0.1:18083/api/v5/status' | tail -n1)" != "emqx is running" ]
+ do
+ echo "waiting emqx"
+ sleep 1
+ done
+
+ echo "TOKEN=$(curl --silent -X 'POST' 'http://127.0.0.1:18083/api/v5/login' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"username": "admin","password": "public"}' | jq -r ".token")" >> $GITHUB_ENV
+
- name: Check cluster
timeout-minutes: 10
run: |
- kubectl port-forward service/${{ matrix.profile }} 18083:18083 > /dev/null &
while
- [ "$(curl --silent --basic -u admin:public -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')" != "3" ];
+ [ "$(curl --silent -H "Authorization: Bearer $TOKEN" -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')" != "3" ];
do
echo "waiting ${{ matrix.profile }} cluster scale"
sleep 1
diff --git a/.github/workflows/run_jmeter_tests.yaml b/.github/workflows/run_jmeter_tests.yaml
index ba64b6d94..81923dba5 100644
--- a/.github/workflows/run_jmeter_tests.yaml
+++ b/.github/workflows/run_jmeter_tests.yaml
@@ -92,7 +92,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
- ref: broker-autotest-v3
+ ref: broker-autotest-v5
path: scripts
- uses: actions/setup-java@v3
with:
@@ -191,7 +191,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
- ref: broker-autotest-v3
+ ref: broker-autotest-v5
path: scripts
- uses: actions/setup-java@v3
with:
@@ -297,7 +297,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
- ref: broker-autotest-v3
+ ref: broker-autotest-v5
path: scripts
- uses: actions/setup-java@v3
with:
@@ -396,7 +396,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
- ref: broker-autotest-v3
+ ref: broker-autotest-v5
path: scripts
- name: run jwks_server
timeout-minutes: 10
@@ -496,7 +496,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
- ref: broker-autotest-v3
+ ref: broker-autotest-v5
path: scripts
- uses: actions/setup-java@v3
with:
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 118e9a046..272a602e9 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -55,7 +55,7 @@ Must be one of the following:
- **chore**: Updating grunt tasks etc; no production code change
- **perf**: A code change that improves performance
- **test**: Adding missing tests, refactoring tests; no production code change
-- **build**: Changes that affect the CI/CD pipeline or build system or external dependencies (example scopes: travis, jenkins, makefile)
+- **build**: Changes that affect the CI/CD pipeline or build system or external dependencies (example scopes: jenkins, makefile)
- **ci**: Changes provided by DevOps for CI purposes.
- **revert**: Reverts a previous commit.
diff --git a/Makefile b/Makefile
index 79107cba9..faa866753 100644
--- a/Makefile
+++ b/Makefile
@@ -6,8 +6,8 @@ export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.0-26:1.13.4-24.3.4.2-1
export EMQX_DEFAULT_RUNNER = debian:11-slim
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
export ELIXIR_VSN ?= $(shell $(CURDIR)/scripts/get-elixir-vsn.sh)
-export EMQX_DASHBOARD_VERSION ?= v1.1.4
-export EMQX_EE_DASHBOARD_VERSION ?= e1.0.1-beta.9
+export EMQX_DASHBOARD_VERSION ?= v1.1.5
+export EMQX_EE_DASHBOARD_VERSION ?= e1.0.1-beta.12
export EMQX_REL_FORM ?= tgz
export QUICER_DOWNLOAD_FROM_RELEASE = 1
ifeq ($(OS),Windows_NT)
diff --git a/README-CN.md b/README-CN.md
index 7e8cdd8a7..193e5ab98 100644
--- a/README-CN.md
+++ b/README-CN.md
@@ -1,7 +1,7 @@
# EMQX
[](https://github.com/emqx/emqx/releases)
-[](https://travis-ci.org/emqx/emqx)
+[](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml)
[](https://coveralls.io/github/emqx/emqx?branch=master)
[](https://hub.docker.com/r/emqx/emqx)
[](https://slack-invite.emqx.io/)
diff --git a/README-RU.md b/README-RU.md
index 8a35177af..fb5ff9608 100644
--- a/README-RU.md
+++ b/README-RU.md
@@ -1,7 +1,7 @@
# Брокер EMQX
[](https://github.com/emqx/emqx/releases)
-[](https://travis-ci.org/emqx/emqx)
+[](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml)
[](https://coveralls.io/github/emqx/emqx?branch=master)
[](https://hub.docker.com/r/emqx/emqx)
[](https://slack-invite.emqx.io/)
diff --git a/README.md b/README.md
index 1831ced60..94baba04f 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
# EMQX
[](https://github.com/emqx/emqx/releases)
-[](https://travis-ci.org/emqx/emqx)
+[](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml)
[](https://coveralls.io/github/emqx/emqx?branch=master)
[](https://hub.docker.com/r/emqx/emqx)
[](https://slack-invite.emqx.io/)
diff --git a/apps/emqx/i18n/emqx_schema_i18n.conf b/apps/emqx/i18n/emqx_schema_i18n.conf
index 750c0c2cd..0665cfb09 100644
--- a/apps/emqx/i18n/emqx_schema_i18n.conf
+++ b/apps/emqx/i18n/emqx_schema_i18n.conf
@@ -2050,7 +2050,7 @@ base_listener_enable_authn {
Set true
(default) to enable client authentication on this listener, the authentication
process goes through the configured authentication chain.
When set to false
to allow any clients with or without authentication information such as username or password to log in.
-When set to quick_deny_anonymous, it behaves like when set to true
but clients will be
+When set to quick_deny_anonymous
, it behaves like when set to true
, but clients will be
denied immediately without going through any authenticators if username
is not provided. This is useful to fence off
anonymous clients early.
"""
diff --git a/apps/emqx/include/emqx_release.hrl b/apps/emqx/include/emqx_release.hrl
index 98b17eae7..e749ebc2e 100644
--- a/apps/emqx/include/emqx_release.hrl
+++ b/apps/emqx/include/emqx_release.hrl
@@ -32,7 +32,7 @@
%% `apps/emqx/src/bpapi/README.md'
%% Community edition
--define(EMQX_RELEASE_CE, "5.0.13").
+-define(EMQX_RELEASE_CE, "5.0.14").
%% Enterprise edition
-define(EMQX_RELEASE_EE, "5.0.0-beta.6").
diff --git a/apps/emqx/include/http_api.hrl b/apps/emqx/include/http_api.hrl
index 858ce96ce..08dd08362 100644
--- a/apps/emqx/include/http_api.hrl
+++ b/apps/emqx/include/http_api.hrl
@@ -15,10 +15,8 @@
%%--------------------------------------------------------------------
%% HTTP API Auth
--define(WRONG_USERNAME_OR_PWD, 'WRONG_USERNAME_OR_PWD').
--define(WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET,
- 'WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET'
-).
+-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
+-define(BAD_API_KEY_OR_SECRET, 'BAD_API_KEY_OR_SECRET').
%% Bad Request
-define(BAD_REQUEST, 'BAD_REQUEST').
@@ -57,8 +55,8 @@
%% All codes
-define(ERROR_CODES, [
- {'WRONG_USERNAME_OR_PWD', <<"Wrong username or pwd">>},
- {'WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET', <<"Wrong username & pwd or key & secret">>},
+ {?BAD_USERNAME_OR_PWD, <<"Bad username or password">>},
+ {?BAD_API_KEY_OR_SECRET, <<"Bad API key or secret">>},
{'BAD_REQUEST', <<"Request parameters are not legal">>},
{'NOT_MATCH', <<"Conditions are not matched">>},
{'ALREADY_EXISTS', <<"Resource already existed">>},
diff --git a/apps/emqx/rebar.config b/apps/emqx/rebar.config
index 8ff4fea58..82be1cb9a 100644
--- a/apps/emqx/rebar.config
+++ b/apps/emqx/rebar.config
@@ -29,7 +29,7 @@
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.13.7"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
- {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.33.0"}}},
+ {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.35.0"}}},
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.0"}}}
diff --git a/apps/emqx/src/emqx.app.src b/apps/emqx/src/emqx.app.src
index bd7617e74..40d2796cd 100644
--- a/apps/emqx/src/emqx.app.src
+++ b/apps/emqx/src/emqx.app.src
@@ -3,7 +3,7 @@
{id, "emqx"},
{description, "EMQX Core"},
% strict semver, bump manually!
- {vsn, "5.0.14"},
+ {vsn, "5.0.15"},
{modules, []},
{registered, []},
{applications, [
diff --git a/apps/emqx/src/emqx_config.erl b/apps/emqx/src/emqx_config.erl
index 49962e490..2fa39d094 100644
--- a/apps/emqx/src/emqx_config.erl
+++ b/apps/emqx/src/emqx_config.erl
@@ -362,8 +362,8 @@ schema_default(Schema) ->
[];
?LAZY(?ARRAY(_)) ->
[];
- ?LAZY(?UNION(Unions)) ->
- case [A || ?ARRAY(A) <- Unions] of
+ ?LAZY(?UNION(Members)) ->
+ case [A || ?ARRAY(A) <- hoconsc:union_members(Members)] of
[_ | _] -> [];
_ -> #{}
end;
@@ -402,7 +402,6 @@ merge_envs(SchemaMod, RawConf) ->
required => false,
format => map,
apply_override_envs => true,
- remove_env_meta => true,
check_lazy => true
},
hocon_tconf:merge_env_overrides(SchemaMod, RawConf, all, Opts).
diff --git a/apps/emqx/src/emqx_hocon.erl b/apps/emqx/src/emqx_hocon.erl
index 7e9dbca77..4391a9a0b 100644
--- a/apps/emqx/src/emqx_hocon.erl
+++ b/apps/emqx/src/emqx_hocon.erl
@@ -21,7 +21,8 @@
format_path/1,
check/2,
format_error/1,
- format_error/2
+ format_error/2,
+ make_schema/1
]).
%% @doc Format hocon config field path to dot-separated string in iolist format.
@@ -79,6 +80,9 @@ format_error({_Schema, [#{kind := K} = First | Rest] = All}, Opts) when
format_error(_Other, _) ->
false.
+make_schema(Fields) ->
+ #{roots => Fields, fields => #{}}.
+
%% Ensure iolist()
iol(B) when is_binary(B) -> B;
iol(A) when is_atom(A) -> atom_to_binary(A, utf8);
diff --git a/apps/emqx/src/emqx_listeners.erl b/apps/emqx/src/emqx_listeners.erl
index fb6096e80..003c8785e 100644
--- a/apps/emqx/src/emqx_listeners.erl
+++ b/apps/emqx/src/emqx_listeners.erl
@@ -57,6 +57,10 @@
-export([format_bind/1]).
+-ifdef(TEST).
+-export([certs_dir/2]).
+-endif.
+
-define(CONF_KEY_PATH, [listeners, '?', '?']).
-define(TYPES_STRING, ["tcp", "ssl", "ws", "wss", "quic"]).
@@ -415,6 +419,7 @@ pre_config_update(_Path, _Request, RawConf) ->
post_config_update([listeners, Type, Name], {create, _Request}, NewConf, undefined, _AppEnvs) ->
start_listener(Type, Name, NewConf);
post_config_update([listeners, Type, Name], {update, _Request}, NewConf, OldConf, _AppEnvs) ->
+ try_clear_ssl_files(certs_dir(Type, Name), NewConf, OldConf),
case NewConf of
#{enabled := true} -> restart_listener(Type, Name, {OldConf, NewConf});
_ -> ok
@@ -670,7 +675,7 @@ certs_dir(Type, Name) ->
iolist_to_binary(filename:join(["listeners", Type, Name])).
convert_certs(CertsDir, Conf) ->
- case emqx_tls_lib:ensure_ssl_files(CertsDir, maps:get(<<"ssl_options">>, Conf, undefined)) of
+ case emqx_tls_lib:ensure_ssl_files(CertsDir, get_ssl_options(Conf)) of
{ok, undefined} ->
Conf;
{ok, SSL} ->
@@ -681,7 +686,7 @@ convert_certs(CertsDir, Conf) ->
end.
clear_certs(CertsDir, Conf) ->
- OldSSL = maps:get(<<"ssl_options">>, Conf, undefined),
+ OldSSL = get_ssl_options(Conf),
emqx_tls_lib:delete_ssl_files(CertsDir, undefined, OldSSL).
filter_stacktrace({Reason, _Stacktrace}) -> Reason;
@@ -692,3 +697,16 @@ ensure_override_limiter_conf(Conf, #{<<"limiter">> := Limiter}) ->
Conf#{<<"limiter">> => Limiter};
ensure_override_limiter_conf(Conf, _) ->
Conf.
+
+try_clear_ssl_files(CertsDir, NewConf, OldConf) ->
+ NewSSL = get_ssl_options(NewConf),
+ OldSSL = get_ssl_options(OldConf),
+ emqx_tls_lib:delete_ssl_files(CertsDir, NewSSL, OldSSL).
+
+get_ssl_options(Conf) ->
+ case maps:find(ssl_options, Conf) of
+ {ok, SSL} ->
+ SSL;
+ error ->
+ maps:get(<<"ssl_options">>, Conf, undefined)
+ end.
diff --git a/apps/emqx/src/emqx_metrics_worker.erl b/apps/emqx/src/emqx_metrics_worker.erl
index 241ba599f..5f41346cb 100644
--- a/apps/emqx/src/emqx_metrics_worker.erl
+++ b/apps/emqx/src/emqx_metrics_worker.erl
@@ -31,6 +31,7 @@
-export([
inc/3,
inc/4,
+ observe/4,
get/3,
get_gauge/3,
set_gauge/5,
@@ -38,6 +39,8 @@
get_gauges/2,
delete_gauges/2,
get_rate/2,
+ get_slide/2,
+ get_slide/3,
get_counters/2,
create_metrics/3,
create_metrics/4,
@@ -67,7 +70,16 @@
-define(SAMPLING, 1).
-endif.
--export_type([metrics/0, handler_name/0, metric_id/0]).
+-export_type([metrics/0, handler_name/0, metric_id/0, metric_spec/0]).
+
+% Default
+-type metric_type() ::
+ %% Simple counter
+ counter
+ %% Sliding window average
+ | slide.
+
+-type metric_spec() :: {metric_type(), atom()}.
-type rate() :: #{
current := float(),
@@ -77,6 +89,7 @@
-type metrics() :: #{
counters := #{metric_name() => integer()},
gauges := #{metric_name() => integer()},
+ slides := #{metric_name() => number()},
rate := #{metric_name() => rate()}
}.
-type handler_name() :: atom().
@@ -103,9 +116,22 @@
last5m_smpl = [] :: list()
}).
+-record(slide_datapoint, {
+ sum :: non_neg_integer(),
+ samples :: non_neg_integer(),
+ time :: non_neg_integer()
+}).
+
+-record(slide, {
+ %% Total number of samples through the history
+ n_samples = 0 :: non_neg_integer(),
+ datapoints = [] :: [#slide_datapoint{}]
+}).
+
-record(state, {
metric_ids = sets:new(),
- rates :: undefined | #{metric_id() => #rate{}}
+ rates :: #{metric_id() => #{metric_name() => #rate{}}} | undefined,
+ slides = #{} :: #{metric_id() => #{metric_name() => #slide{}}}
}).
%%------------------------------------------------------------------------------
@@ -126,14 +152,18 @@ child_spec(ChldName, Name) ->
modules => [emqx_metrics_worker]
}.
--spec create_metrics(handler_name(), metric_id(), [metric_name()]) -> ok | {error, term()}.
+-spec create_metrics(handler_name(), metric_id(), [metric_spec() | metric_name()]) ->
+ ok | {error, term()}.
create_metrics(Name, Id, Metrics) ->
- create_metrics(Name, Id, Metrics, Metrics).
+ Metrics1 = desugar(Metrics),
+ Counters = filter_counters(Metrics1),
+ create_metrics(Name, Id, Metrics1, Counters).
--spec create_metrics(handler_name(), metric_id(), [metric_name()], [metric_name()]) ->
+-spec create_metrics(handler_name(), metric_id(), [metric_spec() | metric_name()], [atom()]) ->
ok | {error, term()}.
create_metrics(Name, Id, Metrics, RateMetrics) ->
- gen_server:call(Name, {create_metrics, Id, Metrics, RateMetrics}).
+ Metrics1 = desugar(Metrics),
+ gen_server:call(Name, {create_metrics, Id, Metrics1, RateMetrics}).
-spec clear_metrics(handler_name(), metric_id()) -> ok.
clear_metrics(Name, Id) ->
@@ -156,7 +186,7 @@ get(Name, Id, Metric) ->
not_found ->
0;
Ref when is_atom(Metric) ->
- counters:get(Ref, idx_metric(Name, Id, Metric));
+ counters:get(Ref, idx_metric(Name, Id, counter, Metric));
Ref when is_integer(Metric) ->
counters:get(Ref, Metric)
end.
@@ -171,21 +201,37 @@ get_counters(Name, Id) ->
fun(_Metric, Index) ->
get(Name, Id, Index)
end,
- get_indexes(Name, Id)
+ get_indexes(Name, counter, Id)
).
+-spec get_slide(handler_name(), metric_id()) -> map().
+get_slide(Name, Id) ->
+ gen_server:call(Name, {get_slide, Id}).
+
+%% Get the average for a specified sliding window period.
+%%
+%% It will only account for the samples recorded in the past `Window' seconds.
+-spec get_slide(handler_name(), metric_id(), non_neg_integer()) -> number().
+get_slide(Name, Id, Window) ->
+ gen_server:call(Name, {get_slide, Id, Window}).
+
-spec reset_counters(handler_name(), metric_id()) -> ok.
reset_counters(Name, Id) ->
- Indexes = maps:values(get_indexes(Name, Id)),
- Ref = get_ref(Name, Id),
- lists:foreach(fun(Idx) -> counters:put(Ref, Idx, 0) end, Indexes).
+ case get_ref(Name, Id) of
+ not_found ->
+ ok;
+ Ref ->
+ #{size := Size} = counters:info(Ref),
+ lists:foreach(fun(Idx) -> counters:put(Ref, Idx, 0) end, lists:seq(1, Size))
+ end.
-spec get_metrics(handler_name(), metric_id()) -> metrics().
get_metrics(Name, Id) ->
#{
rate => get_rate(Name, Id),
counters => get_counters(Name, Id),
- gauges => get_gauges(Name, Id)
+ gauges => get_gauges(Name, Id),
+ slides => get_slide(Name, Id)
}.
-spec inc(handler_name(), metric_id(), atom()) -> ok.
@@ -194,7 +240,37 @@ inc(Name, Id, Metric) ->
-spec inc(handler_name(), metric_id(), metric_name(), integer()) -> ok.
inc(Name, Id, Metric, Val) ->
- counters:add(get_ref(Name, Id), idx_metric(Name, Id, Metric), Val).
+ counters:add(get_ref(Name, Id), idx_metric(Name, Id, counter, Metric), Val).
+
+%% Add a sample to the slide.
+%%
+%% Slide is short for "sliding window average" type of metric.
+%%
+%% It allows to monitor an average of some observed values in time,
+%% and it's mainly used for performance analysis. For example, it can
+%% be used to report run time of operations.
+%%
+%% Consider an example:
+%%
+%% ```
+%% emqx_metrics_worker:create_metrics(Name, Id, [{slide, a}]),
+%% emqx_metrics_worker:observe(Name, Id, a, 10),
+%% emqx_metrics_worker:observe(Name, Id, a, 30),
+%% #{a := 20} = emqx_metrics_worker:get_slide(Name, Id, _Window = 1).
+%% '''
+%%
+%% After recording 2 samples, this metric becomes 20 (the average of 10 and 30).
+%%
+%% But after 1 second it becomes 0 again, unless new samples are recorded.
+%%
+-spec observe(handler_name(), metric_id(), atom(), integer()) -> ok.
+observe(Name, Id, Metric, Val) ->
+ #{ref := CRef, slide := Idx} = maps:get(Id, get_pterm(Name)),
+ Index = maps:get(Metric, Idx),
+ %% Update sum:
+ counters:add(CRef, Index, Val),
+ %% Update number of samples:
+ counters:add(CRef, Index + 1, 1).
-spec set_gauge(handler_name(), metric_id(), worker_id(), metric_name(), integer()) -> ok.
set_gauge(Name, Id, WorkerId, Metric, Val) ->
@@ -300,9 +376,9 @@ handle_call({get_rate, Id}, _From, State = #state{rates = Rates}) ->
handle_call(
{create_metrics, Id, Metrics, RateMetrics},
_From,
- State = #state{metric_ids = MIDs, rates = Rates}
+ State = #state{metric_ids = MIDs, rates = Rates, slides = Slides}
) ->
- case RateMetrics -- Metrics of
+ case RateMetrics -- filter_counters(Metrics) of
[] ->
RatePerId = maps:from_list([{M, #rate{}} || M <- RateMetrics]),
Rate1 =
@@ -310,9 +386,11 @@ handle_call(
undefined -> #{Id => RatePerId};
_ -> Rates#{Id => RatePerId}
end,
+ Slides1 = Slides#{Id => create_slides(Metrics)},
{reply, create_counters(get_self_name(), Id, Metrics), State#state{
metric_ids = sets:add_element(Id, MIDs),
- rates = Rate1
+ rates = Rate1,
+ slides = Slides1
}};
_ ->
{reply, {error, not_super_set_of, {RateMetrics, Metrics}}, State}
@@ -320,7 +398,7 @@ handle_call(
handle_call(
{delete_metrics, Id},
_From,
- State = #state{metric_ids = MIDs, rates = Rates}
+ State = #state{metric_ids = MIDs, rates = Rates, slides = Slides}
) ->
Name = get_self_name(),
delete_counters(Name, Id),
@@ -331,29 +409,43 @@ handle_call(
case Rates of
undefined -> undefined;
_ -> maps:remove(Id, Rates)
- end
+ end,
+ slides = maps:remove(Id, Slides)
}};
handle_call(
{reset_metrics, Id},
_From,
- State = #state{rates = Rates}
+ State = #state{rates = Rates, slides = Slides}
) ->
- Name = get_self_name(),
- delete_gauges(Name, Id),
- {reply, reset_counters(Name, Id), State#state{
+ delete_gauges(get_self_name(), Id),
+ NewRates =
+ case Rates of
+ undefined ->
+ undefined;
+ _ ->
+ ResetRate =
+ maps:map(
+ fun(_Key, _Value) -> #rate{} end,
+ maps:get(Id, Rates, #{})
+ ),
+ maps:put(Id, ResetRate, Rates)
+ end,
+ SlideSpecs = [{slide, I} || I <- maps:keys(maps:get(Id, Slides, #{}))],
+ NewSlides = Slides#{Id => create_slides(SlideSpecs)},
+ {reply, reset_counters(get_self_name(), Id), State#state{
rates =
- case Rates of
- undefined ->
- undefined;
- _ ->
- ResetRate =
- maps:map(
- fun(_Key, _Value) -> #rate{} end,
- maps:get(Id, Rates, #{})
- ),
- maps:put(Id, ResetRate, Rates)
- end
+ NewRates,
+ slides = NewSlides
}};
+handle_call({get_slide, Id}, _From, State = #state{slides = Slides}) ->
+ SlidesForID = maps:get(Id, Slides, #{}),
+ {reply, maps:map(fun(Metric, Slide) -> do_get_slide(Id, Metric, Slide) end, SlidesForID),
+ State};
+handle_call({get_slide, Id, Window}, _From, State = #state{slides = Slides}) ->
+ SlidesForID = maps:get(Id, Slides, #{}),
+ {reply,
+ maps:map(fun(Metric, Slide) -> do_get_slide(Window, Id, Metric, Slide) end, SlidesForID),
+ State};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
@@ -363,7 +455,7 @@ handle_cast(_Msg, State) ->
handle_info(ticking, State = #state{rates = undefined}) ->
erlang:send_after(timer:seconds(?SAMPLING), self(), ticking),
{noreply, State};
-handle_info(ticking, State = #state{rates = Rates0}) ->
+handle_info(ticking, State = #state{rates = Rates0, slides = Slides0}) ->
Rates =
maps:map(
fun(Id, RatesPerID) ->
@@ -376,8 +468,20 @@ handle_info(ticking, State = #state{rates = Rates0}) ->
end,
Rates0
),
+ Slides =
+ maps:map(
+ fun(Id, SlidesPerID) ->
+ maps:map(
+ fun(Metric, Slide) ->
+ update_slide(Id, Metric, Slide)
+ end,
+ SlidesPerID
+ )
+ end,
+ Slides0
+ ),
erlang:send_after(timer:seconds(?SAMPLING), self(), ticking),
- {noreply, State#state{rates = Rates}};
+ {noreply, State#state{rates = Rates, slides = Slides}};
handle_info(_Info, State) ->
{noreply, State}.
@@ -408,17 +512,18 @@ create_counters(_Name, _Id, []) ->
error({create_counter_error, must_provide_a_list_of_metrics});
create_counters(Name, Id, Metrics) ->
%% backup the old counters
- OlderCounters = maps:with(Metrics, get_counters(Name, Id)),
+ OlderCounters = maps:with(filter_counters(Metrics), get_counters(Name, Id)),
%% create the new counter
- Size = length(Metrics),
- Indexes = maps:from_list(lists:zip(Metrics, lists:seq(1, Size))),
+ {Size, Indexes} = create_metric_indexes(Metrics),
Counters = get_pterm(Name),
CntrRef = counters:new(Size, [write_concurrency]),
persistent_term:put(
?CntrRef(Name),
- Counters#{Id => #{ref => CntrRef, indexes => Indexes}}
+ Counters#{Id => Indexes#{ref => CntrRef}}
),
- %% restore the old counters
+ %% Restore the old counters. Slides are not restored, since they
+ %% are periodically zeroed anyway. We do lose some samples in the
+ %% current interval, but that's acceptable for now.
lists:foreach(
fun({Metric, N}) ->
inc(Name, Id, Metric, N)
@@ -426,6 +531,16 @@ create_counters(Name, Id, Metrics) ->
maps:to_list(OlderCounters)
).
+create_metric_indexes(Metrics) ->
+ create_metric_indexes(Metrics, 1, [], []).
+
+create_metric_indexes([], Size, Counters, Slides) ->
+ {Size, #{counter => maps:from_list(Counters), slide => maps:from_list(Slides)}};
+create_metric_indexes([{counter, Id} | Rest], Index, Counters, Slides) ->
+ create_metric_indexes(Rest, Index + 1, [{Id, Index} | Counters], Slides);
+create_metric_indexes([{slide, Id} | Rest], Index, Counters, Slides) ->
+ create_metric_indexes(Rest, Index + 2, Counters, [{Id, Index} | Slides]).
+
delete_counters(Name, Id) ->
persistent_term:put(?CntrRef(Name), maps:remove(Id, get_pterm(Name))).
@@ -435,12 +550,12 @@ get_ref(Name, Id) ->
error -> not_found
end.
-idx_metric(Name, Id, Metric) ->
- maps:get(Metric, get_indexes(Name, Id)).
+idx_metric(Name, Id, Type, Metric) ->
+ maps:get(Metric, get_indexes(Name, Type, Id)).
-get_indexes(Name, Id) ->
+get_indexes(Name, Type, Id) ->
case maps:find(Id, get_pterm(Name)) of
- {ok, #{indexes := Indexes}} -> Indexes;
+ {ok, #{Type := Indexes}} -> Indexes;
error -> #{}
end.
@@ -488,6 +603,53 @@ calculate_rate(CurrVal, #rate{
tick = Tick + 1
}.
+do_get_slide(Id, Metric, S = #slide{n_samples = NSamples}) ->
+ #{
+ n_samples => NSamples,
+ current => do_get_slide(2, Id, Metric, S),
+ last5m => do_get_slide(?SECS_5M, Id, Metric, S)
+ }.
+
+do_get_slide(Window, Id, Metric, #slide{datapoints = DP0}) ->
+ Datapoint = get_slide_datapoint(Id, Metric),
+ {N, Sum} = get_slide_window(os:system_time(second) - Window, [Datapoint | DP0], 0, 0),
+ case N > 0 of
+ true -> Sum div N;
+ false -> 0
+ end.
+
+get_slide_window(_StartTime, [], N, S) ->
+ {N, S};
+get_slide_window(StartTime, [#slide_datapoint{time = T} | _], N, S) when T < StartTime ->
+ {N, S};
+get_slide_window(StartTime, [#slide_datapoint{samples = N, sum = S} | Rest], AccN, AccS) ->
+ get_slide_window(StartTime, Rest, AccN + N, AccS + S).
+
+get_slide_datapoint(Id, Metric) ->
+ Name = get_self_name(),
+ CRef = get_ref(Name, Id),
+ Index = idx_metric(Name, Id, slide, Metric),
+ Total = counters:get(CRef, Index),
+ N = counters:get(CRef, Index + 1),
+ #slide_datapoint{
+ sum = Total,
+ samples = N,
+ time = os:system_time(second)
+ }.
+
+update_slide(Id, Metric, Slide0 = #slide{n_samples = NSamples, datapoints = DPs}) ->
+ Datapoint = get_slide_datapoint(Id, Metric),
+ %% Reset counters:
+ Name = get_self_name(),
+ CRef = get_ref(Name, Id),
+ Index = idx_metric(Name, Id, slide, Metric),
+ counters:put(CRef, Index, 0),
+ counters:put(CRef, Index + 1, 0),
+ Slide0#slide{
+ datapoints = [Datapoint | lists:droplast(DPs)],
+ n_samples = Datapoint#slide_datapoint.samples + NSamples
+ }.
+
format_rates_of_id(RatesPerId) ->
maps:map(
fun(_Metric, Rates) ->
@@ -510,6 +672,27 @@ precision(Float, N) ->
Base = math:pow(10, N),
round(Float * Base) / Base.
+desugar(Metrics) ->
+ lists:map(
+ fun
+ (Atom) when is_atom(Atom) ->
+ {counter, Atom};
+ (Spec = {_, _}) ->
+ Spec
+ end,
+ Metrics
+ ).
+
+filter_counters(Metrics) ->
+ [K || {counter, K} <- Metrics].
+
+create_slides(Metrics) ->
+ EmptyDatapoints = [
+ #slide_datapoint{sum = 0, samples = 0, time = 0}
+ || _ <- lists:seq(1, ?SECS_5M div ?SAMPLING)
+ ],
+ maps:from_list([{K, #slide{datapoints = EmptyDatapoints}} || {slide, K} <- Metrics]).
+
get_self_name() ->
{registered_name, Name} = process_info(self(), registered_name),
Name.
diff --git a/apps/emqx/src/emqx_packet.erl b/apps/emqx/src/emqx_packet.erl
index 8f539563e..d82810d15 100644
--- a/apps/emqx/src/emqx_packet.erl
+++ b/apps/emqx/src/emqx_packet.erl
@@ -16,6 +16,8 @@
-module(emqx_packet).
+-elvis([{elvis_style, no_spec_with_records, disable}]).
+
-include("emqx.hrl").
-include("emqx_mqtt.hrl").
@@ -492,7 +494,7 @@ format_variable(undefined, _, _) ->
format_variable(Variable, undefined, PayloadEncode) ->
format_variable(Variable, PayloadEncode);
format_variable(Variable, Payload, PayloadEncode) ->
- [format_variable(Variable, PayloadEncode), format_payload(Payload, PayloadEncode)].
+ [format_variable(Variable, PayloadEncode), ",", format_payload(Payload, PayloadEncode)].
format_variable(
#mqtt_packet_connect{
diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl
index 4cd78b575..e9f455611 100644
--- a/apps/emqx/src/emqx_schema.erl
+++ b/apps/emqx/src/emqx_schema.erl
@@ -111,15 +111,19 @@
comma_separated_atoms/0
]).
--export([namespace/0, roots/0, roots/1, fields/1, desc/1]).
+-export([namespace/0, roots/0, roots/1, fields/1, desc/1, tags/0]).
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
-export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1]).
+-export([authz_fields/0]).
-export([sc/2, map/2]).
-elvis([{elvis_style, god_modules, disable}]).
namespace() -> broker.
+tags() ->
+ [<<"EMQX">>].
+
roots() ->
%% TODO change config importance to a field metadata
roots(high) ++ roots(medium) ++ roots(low).
@@ -323,31 +327,7 @@ fields("stats") ->
)}
];
fields("authorization") ->
- [
- {"no_match",
- sc(
- hoconsc:enum([allow, deny]),
- #{
- default => allow,
- required => true,
- desc => ?DESC(fields_authorization_no_match)
- }
- )},
- {"deny_action",
- sc(
- hoconsc:enum([ignore, disconnect]),
- #{
- default => ignore,
- required => true,
- desc => ?DESC(fields_authorization_deny_action)
- }
- )},
- {"cache",
- sc(
- ref(?MODULE, "cache"),
- #{}
- )}
- ];
+ authz_fields();
fields("cache") ->
[
{"enable",
@@ -1644,7 +1624,7 @@ base_listener(Bind) ->
sc(
hoconsc:union([infinity, pos_integer()]),
#{
- default => infinity,
+ default => <<"infinity">>,
desc => ?DESC(base_listener_max_connections)
}
)},
@@ -2088,6 +2068,33 @@ do_default_ciphers(_) ->
%% otherwise resolve default ciphers list at runtime
[].
+authz_fields() ->
+ [
+ {"no_match",
+ sc(
+ hoconsc:enum([allow, deny]),
+ #{
+ default => allow,
+ required => true,
+ desc => ?DESC(fields_authorization_no_match)
+ }
+ )},
+ {"deny_action",
+ sc(
+ hoconsc:enum([ignore, disconnect]),
+ #{
+ default => ignore,
+ required => true,
+ desc => ?DESC(fields_authorization_deny_action)
+ }
+ )},
+ {"cache",
+ sc(
+ ref(?MODULE, "cache"),
+ #{}
+ )}
+ ].
+
%% @private return a list of keys in a parent field
-spec keys(string(), hocon:config()) -> [string()].
keys(Parent, Conf) ->
@@ -2342,7 +2349,7 @@ authentication(Which) ->
undefined -> hoconsc:array(typerefl:map());
Module -> Module:root_type()
end,
- %% It is a lazy type because when handing runtime update requests
+ %% It is a lazy type because when handling runtime update requests
%% the config is not checked by emqx_schema, but by the injected schema
Type = hoconsc:lazy(Type0),
#{
diff --git a/apps/emqx/src/emqx_session_router.erl b/apps/emqx/src/emqx_session_router.erl
index 94f7fb64d..0d4972e8c 100644
--- a/apps/emqx/src/emqx_session_router.erl
+++ b/apps/emqx/src/emqx_session_router.erl
@@ -71,24 +71,15 @@
%%--------------------------------------------------------------------
create_router_tab(disc) ->
- ok = mria:create_table(?ROUTE_DISC_TAB, [
- {type, bag},
- {rlog_shard, ?ROUTE_SHARD},
- {storage, disc_copies},
- {record_name, route},
- {attributes, record_info(fields, route)},
- {storage_properties, [
- {ets, [
- {read_concurrency, true},
- {write_concurrency, true}
- ]}
- ]}
- ]);
+ create_table(?ROUTE_DISC_TAB, disc_copies);
create_router_tab(ram) ->
- ok = mria:create_table(?ROUTE_RAM_TAB, [
+ create_table(?ROUTE_RAM_TAB, ram_copies).
+
+create_table(Tab, Storage) ->
+ ok = mria:create_table(Tab, [
{type, bag},
{rlog_shard, ?ROUTE_SHARD},
- {storage, ram_copies},
+ {storage, Storage},
{record_name, route},
{attributes, record_info(fields, route)},
{storage_properties, [
diff --git a/apps/emqx/test/data/certs/certfile b/apps/emqx/test/data/certs/certfile
new file mode 100644
index 000000000..a198faf61
--- /dev/null
+++ b/apps/emqx/test/data/certs/certfile
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID/jCCAeagAwIBAgIJAKTICmq1Lg6dMA0GCSqGSIb3DQEBCwUAMDQxEjAQBgNV
+BAoMCUVNUVggVGVzdDEeMBwGA1UEAwwVQ2VydGlmaWNhdGUgQXV0aG9yaXR5MB4X
+DTIxMTIzMDA4NDExMloXDTQ5MDUxNzA4NDExMlowJTESMBAGA1UECgwJRU1RWCBU
+ZXN0MQ8wDQYDVQQDDAZjbGllbnQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQDzrujfx6XZTH0MWqLO6kNAeHndUZ+OGaURXvxKMPMF5dA40lxNG6cEzzlq
+0Rm61adlv8tF4kRJrs6EnRjEVoMImrdh07vGFdOTYqP01LjiBhErAzyRtSn2X8FT
+Te8ExoCRs3x61SPebGY2hOvFxuO6YDPVOSDvbbxvRgqIlM1ZXC8dOvPSSGZ+P8hV
+56EPayRthfu1FVptnkW9CyZCRI0gg95Hv8RC7bGG+tuWpkN9ZrRvohhgGR1+bDUi
+BNBpncEsSh+UgWaj8KRN8D16H6m/Im6ty467j0at49FvPx5nACL48/ghtYvzgKLc
+uKHtokKUuuzebDK/hQxN3mUSAJStAgMBAAGjIjAgMAsGA1UdDwQEAwIFoDARBglg
+hkgBhvhCAQEEBAMCB4AwDQYJKoZIhvcNAQELBQADggIBAIlVyPhOpkz3MNzQmjX7
+xgJ3vGPK5uK11n/wfjRwe2qXwZbrI2sYLVtTpUgvLDuP0gB73Vwfu7xAMdue6TRm
+CKr9z0lkQsVBtgoqzZCjd4PYLfHm4EhsOMi98OGKU5uOGD4g3yLwQWXHhbYtiZMO
+Jsj0hebYveYJt/BYTd1syGQcIcYCyVExWvSWjidfpAqjT6EF7whdubaFtuF2kaGF
+IO9yn9rWtXB5yK99uCguEmKhx3fAQxomzqweTu3WRvy9axsUH3WAUW9a4DIBSz2+
+ZSJNheFn5GktgggygJUGYqpSZHooUJW0UBs/8vX6AP+8MtINmqOGZUawmNwLWLOq
+wHyVt2YGD5TXjzzsWNSQ4mqXxM6AXniZVZK0yYNjA4ATikX1AtwunyWBR4IjyE/D
+FxYPORdZCOtywRFE1R5KLTUq/C8BNGCkYnoO78DJBO+pT0oagkQGQb0CnmC6C1db
+4lWzA9K0i4B0PyooZA+gp+5FFgaLuX1DkyeaY1J204QhHR1z/Vcyl5dpqR9hqnYP
+t8raLk9ogMDKqKA9iG0wc3CBNckD4sjVWAEeovXhElG55fD21wwhF+AnDCvX8iVK
+cBfKV6z6uxfKjGIxc2I643I5DiIn+V3DnPxYyY74Ln1lWFYmt5JREhAxPu42zq74
+e6+eIMYFszB+5gKgt6pa6ZNI
+-----END CERTIFICATE-----
diff --git a/apps/emqx/test/data/certs/keyfile b/apps/emqx/test/data/certs/keyfile
new file mode 100644
index 000000000..2f0af5d41
--- /dev/null
+++ b/apps/emqx/test/data/certs/keyfile
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEA867o38el2Ux9DFqizupDQHh53VGfjhmlEV78SjDzBeXQONJc
+TRunBM85atEZutWnZb/LReJESa7OhJ0YxFaDCJq3YdO7xhXTk2Kj9NS44gYRKwM8
+kbUp9l/BU03vBMaAkbN8etUj3mxmNoTrxcbjumAz1Tkg7228b0YKiJTNWVwvHTrz
+0khmfj/IVeehD2skbYX7tRVabZ5FvQsmQkSNIIPeR7/EQu2xhvrblqZDfWa0b6IY
+YBkdfmw1IgTQaZ3BLEoflIFmo/CkTfA9eh+pvyJurcuOu49GrePRbz8eZwAi+PP4
+IbWL84Ci3Lih7aJClLrs3mwyv4UMTd5lEgCUrQIDAQABAoIBAQDwEbBgznrIwn8r
+jZt5x/brbAV7Ea/kOcWSgIaCvQifFdJ2OGAwov5/UXwajNgRZe2d4z7qoUhvYuUY
+ZwCAZU6ASpRBr2v9cYFYYURvrqZaHmoJew3P6q/lhl6aqFvC06DUagRHqvXEafyk
+13zEAvZVpfNKrBaTawPKiDFWb2qDDc9D6hC07EuJ/DNeehiHvzHrSZSDVV5Ut7Bw
+YDm33XygheUPAlHfeCnaixzcs3osiVyFEmVjxcIaM0ZS1NgcSaohSpJHMzvEaohX
+e+v9vccraSVlw01AlvFwI2vHYUV8jT6HwglTPKKGOCzK/ace3wPdYSU9qLcqfuHn
+EFhNc3tNAoGBAPugLMgbReJg2gpbIPUkYyoMMAAU7llFU1WvPWwXzo1a9EBjBACw
+WfCZISNtANXR38zIYXzoH547uXi4YPks1Nne3sYuCDpvuX+iz7fIo4zHf1nFmxH7
+eE6GtQr2ubmuuipTc28S0wBMGT1/KybH0e2NKL6GaOkNDmAI0IbEMBrvAoGBAPfr
+Y1QYLhPhan6m5g/5s+bQpKtHfNH9TNkk13HuYu72zNuY3qL2GC7oSadR8vTbRXZg
+KQqfaO0IGRcdkSFTq/AEhSSqr2Ld5nPadMbKvSGrSCc1s8rFH97jRVQY56yhM7ti
+IW4+6cE8ylCMbdYB6wuduK/GIgNpqoF4xs1i2XojAoGACacBUMPLEH4Kny8TupOk
+wi4pgTdMVVxVcAoC3yyincWJbRbfRm99Y79cCBHcYFdmsGJXawU0gUtlN/5KqgRQ
+PfNQtGV7p1I12XGTakdmDrZwai8sXao52TlNpJgGU9siBRGicfZU5cQFi9he/WPY
+57XshDJ/v8DidkigRysrdT0CgYEA5iuO22tblC+KvK1dGOXeZWO+DhrfwuGlcFBp
+CaimB2/w/8vsn2VVTG9yujo2E6hj1CQw1mDrfG0xRim4LTXOgpbfugwRqvuTUmo2
+Ur21XEX2RhjwpEfhcACWxB4fMUG0krrniMA2K6axupi1/KNpQi6bYe3UdFCs8Wld
+QSAOAvsCgYBk/X5PmD44DvndE5FShM2w70YOoMr3Cgl5sdwAFUFE9yDuC14UhVxk
+oxnYxwtVI9uVVirET+LczP9JEvcvxnN/Xg3tH/qm0WlIxmTxyYrFFIK9j0rqeu9z
+blPu56OzNI2VMrR1GbOBLxQINLTIpaacjNJAlr8XOlegdUJsW/Jwqw==
+-----END RSA PRIVATE KEY-----
diff --git a/apps/emqx/test/data/certs/keyfile2 b/apps/emqx/test/data/certs/keyfile2
new file mode 100644
index 000000000..2b3f30cf6
--- /dev/null
+++ b/apps/emqx/test/data/certs/keyfile2
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAzLiGiSwpxkENtjrzS7pNLblTnWe4HUUFwYyUX0H+3TnvA86X
+EX85yZvFjkzB6lLjUkMY+C6UTVXt+mxeSJbUtSKZhX+2yoF/KYh7SaVjug5FqEqO
+LvMpDZQEhUWF2W9DG6eUgOfDoX2milSDIe10yG2WBkryipHAfE7l1t+i6Rh3on+v
+561LmrbqyBWR/cLp23RN3sHbkf2pb5/ugtU9twdgJr6Lve73rvSeulewL5BzszKD
+BrYqr+PBT5+3ItCc55bTsO7M7CzOIL99BlqdvFH7xT0U1+2BFwLe4/8kwphSqyJE
+C5oOiQBFnFVNXmFQSV+k7rPr80i1IO++HeJ6KQIDAQABAoIBAGWgvPjfuaU3qizq
+uti/FY07USz0zkuJdkANH6LiSjlchzDmn8wJ0pApCjuIE0PV/g9aS8z4opp5q/gD
+UBLM/a8mC/xf2EhTXOMrY7i9p/I3H5FZ4ZehEqIw9sWKK9YzC6dw26HabB2BGOnW
+5nozPSQ6cp2RGzJ7BIkxSZwPzPnVTgy3OAuPOiJytvK+hGLhsNaT+Y9bNDvplVT2
+ZwYTV8GlHZC+4b2wNROILm0O86v96O+Qd8nn3fXjGHbMsAnONBq10bZS16L4fvkH
+5G+W/1PeSXmtZFppdRRDxIW+DWcXK0D48WRliuxcV4eOOxI+a9N2ZJZZiNLQZGwg
+w3A8+mECgYEA8HuJFrlRvdoBe2U/EwUtG74dcyy30L4yEBnN5QscXmEEikhaQCfX
+Wm6EieMcIB/5I5TQmSw0cmBMeZjSXYoFdoI16/X6yMMuATdxpvhOZGdUGXxhAH+x
+xoTUavWZnEqW3fkUU71kT5E2f2i+0zoatFESXHeslJyz85aAYpP92H0CgYEA2e5A
+Yozt5eaA1Gyhd8SeptkEU4xPirNUnVQHStpMWUb1kzTNXrPmNWccQ7JpfpG6DcYl
+zUF6p6mlzY+zkMiyPQjwEJlhiHM2NlL1QS7td0R8ewgsFoyn8WsBI4RejWrEG9td
+EDniuIw+pBFkcWthnTLHwECHdzgquToyTMjrBB0CgYEA28tdGbrZXhcyAZEhHAZA
+Gzog+pKlkpEzeonLKIuGKzCrEKRecIK5jrqyQsCjhS0T7ZRnL4g6i0s+umiV5M5w
+fcc292pEA1h45L3DD6OlKplSQVTv55/OYS4oY3YEJtf5mfm8vWi9lQeY8sxOlQpn
+O+VZTdBHmTC8PGeTAgZXHZUCgYA6Tyv88lYowB7SN2qQgBQu8jvdGtqhcs/99GCr
+H3N0I69LPsKAR0QeH8OJPXBKhDUywESXAaEOwS5yrLNP1tMRz5Vj65YUCzeDG3kx
+gpvY4IMp7ArX0bSRvJ6mYSFnVxy3k174G3TVCfksrtagHioVBGQ7xUg5ltafjrms
+n8l55QKBgQDVzU8tQvBVqY8/1lnw11Vj4fkE/drZHJ5UkdC1eenOfSWhlSLfUJ8j
+ds7vEWpRPPoVuPZYeR1y78cyxKe1GBx6Wa2lF5c7xjmiu0xbRnrxYeLolce9/ntp
+asClqpnHT8/VJYTD7Kqj0fouTTZf0zkig/y+2XERppd8k+pSKjUCPQ==
+-----END RSA PRIVATE KEY-----
diff --git a/apps/emqx/test/emqx_common_test_http.erl b/apps/emqx/test/emqx_common_test_http.erl
index 87a35a1e2..575bed5c3 100644
--- a/apps/emqx/test/emqx_common_test_http.erl
+++ b/apps/emqx/test/emqx_common_test_http.erl
@@ -29,6 +29,9 @@
auth_header/2
]).
+-define(DEFAULT_APP_ID, <<"default_appid">>).
+-define(DEFAULT_APP_SECRET, <<"default_app_secret">>).
+
request_api(Method, Url, Auth) ->
request_api(Method, Url, [], Auth, []).
@@ -74,12 +77,18 @@ auth_header(User, Pass) ->
{"Authorization", "Basic " ++ Encoded}.
default_auth_header() ->
- AppId = <<"myappid">>,
- AppSecret = emqx_mgmt_auth:get_appsecret(AppId),
- auth_header(erlang:binary_to_list(AppId), erlang:binary_to_list(AppSecret)).
+ {ok, #{api_key := APIKey}} = emqx_mgmt_auth:read(?DEFAULT_APP_ID),
+ auth_header(
+ erlang:binary_to_list(APIKey), erlang:binary_to_list(?DEFAULT_APP_SECRET)
+ ).
create_default_app() ->
- emqx_mgmt_auth:add_app(<<"myappid">>, <<"test">>).
+ Now = erlang:system_time(second),
+ ExpiredAt = Now + timer:minutes(10),
+ emqx_mgmt_auth:create(
+ ?DEFAULT_APP_ID, ?DEFAULT_APP_SECRET, true, ExpiredAt, <<"default app key for test">>
+ ),
+ ok.
delete_default_app() ->
- emqx_mgmt_auth:del_app(<<"myappid">>).
+ emqx_mgmt_auth:delete(?DEFAULT_APP_ID).
diff --git a/apps/emqx/test/emqx_metrics_worker_SUITE.erl b/apps/emqx/test/emqx_metrics_worker_SUITE.erl
index 113e8650f..194c9cc99 100644
--- a/apps/emqx/test/emqx_metrics_worker_SUITE.erl
+++ b/apps/emqx/test/emqx_metrics_worker_SUITE.erl
@@ -46,7 +46,7 @@ end_per_testcase(_, _Config) ->
ok.
t_get_metrics(_) ->
- Metrics = [a, b, c],
+ Metrics = [a, b, c, {slide, d}],
Id = <<"testid">>,
ok = emqx_metrics_worker:create_metrics(?NAME, Id, Metrics),
%% all the metrics are set to zero at start
@@ -73,6 +73,8 @@ t_get_metrics(_) ->
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id0, inflight, 5),
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id1, inflight, 7),
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id2, queuing, 9),
+ ok = emqx_metrics_worker:observe(?NAME, Id, d, 10),
+ ok = emqx_metrics_worker:observe(?NAME, Id, d, 30),
ct:sleep(1500),
?LET(
#{
@@ -89,6 +91,9 @@ t_get_metrics(_) ->
a := 1,
b := 1,
c := 2
+ } = Counters,
+ slides := #{
+ d := #{n_samples := 2, last5m := 20, current := _}
}
},
emqx_metrics_worker:get_metrics(?NAME, Id),
@@ -100,7 +105,8 @@ t_get_metrics(_) ->
?assert(MaxB > 0),
?assert(MaxC > 0),
?assert(Inflight == 12),
- ?assert(Queuing == 9)
+ ?assert(Queuing == 9),
+ ?assertNot(maps:is_key(d, Counters))
}
),
ok = emqx_metrics_worker:clear_metrics(?NAME, Id).
@@ -117,6 +123,7 @@ t_clear_metrics(_Config) ->
c := #{current := 0.0, max := 0.0, last5m := 0.0}
},
gauges := #{},
+ slides := #{},
counters := #{
a := 0,
b := 0,
@@ -138,14 +145,15 @@ t_clear_metrics(_Config) ->
#{
counters => #{},
gauges => #{},
- rate => #{current => 0.0, last5m => 0.0, max => 0.0}
+ rate => #{current => 0.0, last5m => 0.0, max => 0.0},
+ slides => #{}
},
emqx_metrics_worker:get_metrics(?NAME, Id)
),
ok.
t_reset_metrics(_) ->
- Metrics = [a, b, c],
+ Metrics = [a, b, c, {slide, d}],
Id = <<"testid">>,
ok = emqx_metrics_worker:create_metrics(?NAME, Id, Metrics),
%% all the metrics are set to zero at start
@@ -161,6 +169,9 @@ t_reset_metrics(_) ->
a := 0,
b := 0,
c := 0
+ },
+ slides := #{
+ d := #{n_samples := 0, last5m := 0, current := 0}
}
},
emqx_metrics_worker:get_metrics(?NAME, Id)
@@ -172,7 +183,12 @@ t_reset_metrics(_) ->
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id0, inflight, 5),
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id1, inflight, 7),
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id2, queuing, 9),
+ ok = emqx_metrics_worker:observe(?NAME, Id, d, 100),
+ ok = emqx_metrics_worker:observe(?NAME, Id, d, 200),
ct:sleep(1500),
+ ?assertMatch(
+ #{d := #{n_samples := 2}}, emqx_metrics_worker:get_slide(?NAME, <<"testid">>)
+ ),
ok = emqx_metrics_worker:reset_metrics(?NAME, Id),
?LET(
#{
@@ -186,6 +202,9 @@ t_reset_metrics(_) ->
a := 0,
b := 0,
c := 0
+ },
+ slides := #{
+ d := #{n_samples := 0, last5m := 0, current := 0}
}
},
emqx_metrics_worker:get_metrics(?NAME, Id),
@@ -202,7 +221,7 @@ t_reset_metrics(_) ->
ok = emqx_metrics_worker:clear_metrics(?NAME, Id).
t_get_metrics_2(_) ->
- Metrics = [a, b, c],
+ Metrics = [a, b, c, {slide, d}],
Id = <<"testid">>,
ok = emqx_metrics_worker:create_metrics(
?NAME,
diff --git a/apps/emqx/test/emqx_schema_tests.erl b/apps/emqx/test/emqx_schema_tests.erl
index e4fadb192..b249dea92 100644
--- a/apps/emqx/test/emqx_schema_tests.erl
+++ b/apps/emqx/test/emqx_schema_tests.erl
@@ -153,7 +153,7 @@ ssl_opts_gc_after_handshake_test_rancher_listener_test() ->
#{
kind := validation_error,
reason := unknown_fields,
- unknown := <<"gc_after_handshake">>
+ unknown := "gc_after_handshake"
}
]},
validate(Sc, #{<<"gc_after_handshake">> => true})
diff --git a/apps/emqx_authn/src/emqx_authn.app.src b/apps/emqx_authn/src/emqx_authn.app.src
index ea21e5bdc..7f01d94c0 100644
--- a/apps/emqx_authn/src/emqx_authn.app.src
+++ b/apps/emqx_authn/src/emqx_authn.app.src
@@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_authn, [
{description, "EMQX Authentication"},
- {vsn, "0.1.11"},
+ {vsn, "0.1.12"},
{modules, []},
{registered, [emqx_authn_sup, emqx_authn_registry]},
{applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]},
diff --git a/apps/emqx_authn/src/emqx_authn_schema.erl b/apps/emqx_authn/src/emqx_authn_schema.erl
index 88d8955c5..f40e759f0 100644
--- a/apps/emqx_authn/src/emqx_authn_schema.erl
+++ b/apps/emqx_authn/src/emqx_authn_schema.erl
@@ -22,6 +22,7 @@
-export([
common_fields/0,
roots/0,
+ tags/0,
fields/1,
authenticator_type/0,
authenticator_type_without_scram/0,
@@ -32,6 +33,9 @@
roots() -> [].
+tags() ->
+ [<<"Authentication">>].
+
common_fields() ->
[{enable, fun enable/1}].
diff --git a/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl b/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl
index ba13bd069..ac39e2cda 100644
--- a/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl
+++ b/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl
@@ -25,6 +25,7 @@
-export([
namespace/0,
+ tags/0,
roots/0,
fields/1,
desc/1
@@ -105,6 +106,9 @@ mnesia(boot) ->
namespace() -> "authn-scram-builtin_db".
+tags() ->
+ [<<"Authentication">>].
+
roots() -> [?CONF_NS].
fields(?CONF_NS) ->
diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl
index 0a9aaa825..faa06b71a 100644
--- a/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl
+++ b/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl
@@ -26,6 +26,7 @@
-export([
namespace/0,
+ tags/0,
roots/0,
fields/1,
desc/1,
@@ -51,6 +52,9 @@
namespace() -> "authn-http".
+tags() ->
+ [<<"Authentication">>].
+
roots() ->
[
{?CONF_NS,
diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_jwt.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_jwt.erl
index 5709a1fe7..1c44b4d1f 100644
--- a/apps/emqx_authn/src/simple_authn/emqx_authn_jwt.erl
+++ b/apps/emqx_authn/src/simple_authn/emqx_authn_jwt.erl
@@ -25,6 +25,7 @@
-export([
namespace/0,
+ tags/0,
roots/0,
fields/1,
desc/1
@@ -44,6 +45,9 @@
namespace() -> "authn-jwt".
+tags() ->
+ [<<"Authentication">>].
+
roots() ->
[
{?CONF_NS,
diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl
index e915744e1..7c51644b7 100644
--- a/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl
+++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl
@@ -26,6 +26,7 @@
-export([
namespace/0,
+ tags/0,
roots/0,
fields/1,
desc/1
@@ -107,6 +108,9 @@ mnesia(boot) ->
namespace() -> "authn-builtin_db".
+tags() ->
+ [<<"Authentication">>].
+
roots() -> [?CONF_NS].
fields(?CONF_NS) ->
diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl
index 3fac0ed7d..3f140a8eb 100644
--- a/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl
+++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl
@@ -25,6 +25,7 @@
-export([
namespace/0,
+ tags/0,
roots/0,
fields/1,
desc/1
@@ -44,6 +45,9 @@
namespace() -> "authn-mongodb".
+tags() ->
+ [<<"Authentication">>].
+
roots() ->
[
{?CONF_NS,
diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl
index 68913443f..ffce42bb3 100644
--- a/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl
+++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl
@@ -27,6 +27,7 @@
-export([
namespace/0,
+ tags/0,
roots/0,
fields/1,
desc/1
@@ -46,6 +47,9 @@
namespace() -> "authn-mysql".
+tags() ->
+ [<<"Authentication">>].
+
roots() -> [?CONF_NS].
fields(?CONF_NS) ->
diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl
index 1cadf9c56..2d7974301 100644
--- a/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl
+++ b/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl
@@ -26,6 +26,7 @@
-export([
namespace/0,
+ tags/0,
roots/0,
fields/1,
desc/1
@@ -50,6 +51,9 @@
namespace() -> "authn-postgresql".
+tags() ->
+ [<<"Authentication">>].
+
roots() -> [?CONF_NS].
fields(?CONF_NS) ->
diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl
index 0c8fedfb5..12b7422b5 100644
--- a/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl
+++ b/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl
@@ -25,6 +25,7 @@
-export([
namespace/0,
+ tags/0,
roots/0,
fields/1,
desc/1
@@ -44,6 +45,9 @@
namespace() -> "authn-redis".
+tags() ->
+ [<<"Authentication">>].
+
roots() ->
[
{?CONF_NS,
diff --git a/apps/emqx_authn/test/emqx_authn_api_SUITE.erl b/apps/emqx_authn/test/emqx_authn_api_SUITE.erl
index 1a867b0be..11e2c6773 100644
--- a/apps/emqx_authn/test/emqx_authn_api_SUITE.erl
+++ b/apps/emqx_authn/test/emqx_authn_api_SUITE.erl
@@ -18,7 +18,8 @@
-compile(nowarn_export_all).
-compile(export_all).
--import(emqx_dashboard_api_test_helpers, [request/3, uri/1, multipart_formdata_request/3]).
+-import(emqx_dashboard_api_test_helpers, [multipart_formdata_request/3]).
+-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
-include("emqx_authn.hrl").
-include_lib("eunit/include/eunit.hrl").
@@ -65,9 +66,8 @@ end_per_testcase(_, Config) ->
init_per_suite(Config) ->
emqx_config:erase(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY),
_ = application:load(emqx_conf),
- ok = emqx_common_test_helpers:start_apps(
- [emqx_authn, emqx_dashboard],
- fun set_special_configs/1
+ ok = emqx_mgmt_api_test_util:init_suite(
+ [emqx_authn]
),
?AUTHN:delete_chain(?GLOBAL),
@@ -76,12 +76,7 @@ init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
- emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authn]),
- ok.
-
-set_special_configs(emqx_dashboard) ->
- emqx_dashboard_api_test_helpers:set_default_config();
-set_special_configs(_App) ->
+ emqx_mgmt_api_test_util:end_suite([emqx_authn]),
ok.
%%------------------------------------------------------------------------------
diff --git a/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl b/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl
index 8191fe2e9..cd97a15d9 100644
--- a/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl
+++ b/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl
@@ -197,7 +197,7 @@ t_list_users(_) ->
#{is_superuser := false, user_id := _},
#{is_superuser := false, user_id := _}
],
- meta := #{page := 1, limit := 2, count := 3}
+ meta := #{page := 1, limit := 2, count := 3, hasnext := true}
} = emqx_authn_mnesia:list_users(
#{<<"page">> => 1, <<"limit">> => 2},
State
@@ -205,7 +205,7 @@ t_list_users(_) ->
#{
data := [#{is_superuser := false, user_id := _}],
- meta := #{page := 2, limit := 2, count := 3}
+ meta := #{page := 2, limit := 2, count := 3, hasnext := false}
} = emqx_authn_mnesia:list_users(
#{<<"page">> => 2, <<"limit">> => 2},
State
@@ -213,7 +213,7 @@ t_list_users(_) ->
#{
data := [#{is_superuser := false, user_id := <<"u3">>}],
- meta := #{page := 1, limit := 20, count := 0}
+ meta := #{page := 1, limit := 20, hasnext := false}
} = emqx_authn_mnesia:list_users(
#{
<<"page">> => 1,
diff --git a/apps/emqx_authn/test/emqx_enhanced_authn_scram_mnesia_SUITE.erl b/apps/emqx_authn/test/emqx_enhanced_authn_scram_mnesia_SUITE.erl
index e1a2586cd..b143903b5 100644
--- a/apps/emqx_authn/test/emqx_enhanced_authn_scram_mnesia_SUITE.erl
+++ b/apps/emqx_authn/test/emqx_enhanced_authn_scram_mnesia_SUITE.erl
@@ -300,14 +300,14 @@ t_list_users(_) ->
#{
data := [?USER_MAP, ?USER_MAP],
- meta := #{page := 1, limit := 2, count := 3}
+ meta := #{page := 1, limit := 2, count := 3, hasnext := true}
} = emqx_enhanced_authn_scram_mnesia:list_users(
#{<<"page">> => 1, <<"limit">> => 2},
State
),
#{
data := [?USER_MAP],
- meta := #{page := 2, limit := 2, count := 3}
+ meta := #{page := 2, limit := 2, count := 3, hasnext := false}
} = emqx_enhanced_authn_scram_mnesia:list_users(
#{<<"page">> => 2, <<"limit">> => 2},
State
@@ -319,7 +319,7 @@ t_list_users(_) ->
is_superuser := _
}
],
- meta := #{page := 1, limit := 3, count := 0}
+ meta := #{page := 1, limit := 3, hasnext := false}
} = emqx_enhanced_authn_scram_mnesia:list_users(
#{
<<"page">> => 1,
diff --git a/apps/emqx_authz/README.md b/apps/emqx_authz/README.md
index 8c05f21be..af543e478 100644
--- a/apps/emqx_authz/README.md
+++ b/apps/emqx_authz/README.md
@@ -15,7 +15,6 @@ authz:{
pool_size: 1
username: root
password: public
- auto_reconnect: true
ssl: {
enable: true
cacertfile: "etc/certs/cacert.pem"
@@ -33,7 +32,6 @@ authz:{
pool_size: 1
username: root
password: public
- auto_reconnect: true
ssl: {enable: false}
}
sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = ${peerhost} or username = ${username} or username = '$all' or clientid = ${clientid}"
@@ -45,7 +43,6 @@ authz:{
database: 0
pool_size: 1
password: public
- auto_reconnect: true
ssl: {enable: false}
}
cmd: "HGETALL mqtt_authz:${username}"
diff --git a/apps/emqx_authz/etc/emqx_authz.conf b/apps/emqx_authz/etc/emqx_authz.conf
index e7fd73498..3bdc180c5 100644
--- a/apps/emqx_authz/etc/emqx_authz.conf
+++ b/apps/emqx_authz/etc/emqx_authz.conf
@@ -1,6 +1,7 @@
authorization {
deny_action = ignore
no_match = allow
+ cache = { enable = true }
sources = [
{
type = file
diff --git a/apps/emqx_authz/src/emqx_authz.app.src b/apps/emqx_authz/src/emqx_authz.app.src
index f5b9f9da6..6a4b721e9 100644
--- a/apps/emqx_authz/src/emqx_authz.app.src
+++ b/apps/emqx_authz/src/emqx_authz.app.src
@@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_authz, [
{description, "An OTP application"},
- {vsn, "0.1.11"},
+ {vsn, "0.1.12"},
{registered, []},
{mod, {emqx_authz_app, []}},
{applications, [
diff --git a/apps/emqx_authz/src/emqx_authz.erl b/apps/emqx_authz/src/emqx_authz.erl
index bf07f3083..682ad7f2e 100644
--- a/apps/emqx_authz/src/emqx_authz.erl
+++ b/apps/emqx_authz/src/emqx_authz.erl
@@ -20,6 +20,7 @@
-include("emqx_authz.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx/include/emqx_hooks.hrl").
+-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-ifdef(TEST).
-compile(export_all).
@@ -358,6 +359,7 @@ authorize_non_superuser(
emqx_metrics:inc(?METRIC_DENY),
{stop, #{result => deny, from => AuthzSource}};
nomatch ->
+ ?tp(authz_non_superuser, #{result => nomatch}),
?SLOG(info, #{
msg => "authorization_failed_nomatch",
username => Username,
@@ -388,6 +390,12 @@ do_authorize(
nomatch ->
emqx_metrics_worker:inc(authz_metrics, Type, nomatch),
do_authorize(Client, PubSub, Topic, Tail);
+ %% {matched, allow | deny | ignore}
+ {matched, ignore} ->
+ do_authorize(Client, PubSub, Topic, Tail);
+ ignore ->
+ do_authorize(Client, PubSub, Topic, Tail);
+ %% {matched, allow | deny}
Matched ->
{Matched, Type}
catch
diff --git a/apps/emqx_authz/src/emqx_authz_api_settings.erl b/apps/emqx_authz/src/emqx_authz_api_settings.erl
index 72a2db35c..db915a795 100644
--- a/apps/emqx_authz/src/emqx_authz_api_settings.erl
+++ b/apps/emqx_authz/src/emqx_authz_api_settings.erl
@@ -64,7 +64,7 @@ schema("/authorization/settings") ->
}.
ref_authz_schema() ->
- proplists:delete(sources, emqx_conf_schema:fields("authorization")).
+ emqx_schema:authz_fields().
settings(get, _Params) ->
{200, authorization_settings()};
@@ -83,4 +83,6 @@ settings(put, #{
{200, authorization_settings()}.
authorization_settings() ->
- maps:remove(<<"sources">>, emqx:get_raw_config([authorization], #{})).
+ C = maps:remove(<<"sources">>, emqx:get_raw_config([authorization], #{})),
+ Schema = emqx_hocon:make_schema(emqx_schema:authz_fields()),
+ hocon_tconf:make_serializable(Schema, C, #{}).
diff --git a/apps/emqx_authz/src/emqx_authz_api_sources.erl b/apps/emqx_authz/src/emqx_authz_api_sources.erl
index f5570f1f1..c692154b1 100644
--- a/apps/emqx_authz/src/emqx_authz_api_sources.erl
+++ b/apps/emqx_authz/src/emqx_authz_api_sources.erl
@@ -449,7 +449,7 @@ is_ok(ResL) ->
get_raw_sources() ->
RawSources = emqx:get_raw_config([authorization, sources], []),
- Schema = #{roots => emqx_authz_schema:fields("authorization"), fields => #{}},
+ Schema = emqx_hocon:make_schema(emqx_authz_schema:authz_fields()),
Conf = #{<<"sources">> => RawSources},
#{<<"sources">> := Sources} = hocon_tconf:make_serializable(Schema, Conf, #{}),
merge_default_headers(Sources).
diff --git a/apps/emqx_authz/src/emqx_authz_http.erl b/apps/emqx_authz/src/emqx_authz_http.erl
index ea12214ec..4479d1483 100644
--- a/apps/emqx_authz/src/emqx_authz_http.erl
+++ b/apps/emqx_authz/src/emqx_authz_http.erl
@@ -20,6 +20,7 @@
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx/include/emqx_placeholder.hrl").
+-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-behaviour(emqx_authz).
@@ -104,6 +105,7 @@ authorize(
log_nomtach_msg(Status, Headers, Body),
nomatch;
{error, Reason} ->
+ ?tp(authz_http_request_failure, #{error => Reason}),
?SLOG(error, #{
msg => "http_server_query_failed",
resource => ResourceID,
diff --git a/apps/emqx_authz/src/emqx_authz_schema.erl b/apps/emqx_authz/src/emqx_authz_schema.erl
index d03747b84..5527c26d6 100644
--- a/apps/emqx_authz/src/emqx_authz_schema.erl
+++ b/apps/emqx_authz/src/emqx_authz_schema.erl
@@ -33,9 +33,11 @@
-export([
namespace/0,
roots/0,
+ tags/0,
fields/1,
validations/0,
- desc/1
+ desc/1,
+ authz_fields/0
]).
-export([
@@ -47,14 +49,8 @@
%% Hocon Schema
%%--------------------------------------------------------------------
-namespace() -> authz.
-
-%% @doc authorization schema is not exported
-%% but directly used by emqx_schema
-roots() -> [].
-
-fields("authorization") ->
- Types = [
+type_names() ->
+ [
file,
http_get,
http_post,
@@ -67,18 +63,19 @@ fields("authorization") ->
redis_single,
redis_sentinel,
redis_cluster
- ],
- Unions = [?R_REF(Type) || Type <- Types],
- [
- {sources,
- ?HOCON(
- ?ARRAY(?UNION(Unions)),
- #{
- default => [],
- desc => ?DESC(sources)
- }
- )}
- ];
+ ].
+
+namespace() -> authz.
+
+tags() ->
+ [<<"Authorization">>].
+
+%% @doc authorization schema is not exported
+%% but directly used by emqx_schema
+roots() -> [].
+
+fields("authorization") ->
+ authz_fields();
fields(file) ->
authz_common_fields(file) ++
[{path, ?HOCON(string(), #{required => true, desc => ?DESC(path)})}];
@@ -408,9 +405,94 @@ common_rate_field() ->
].
method(Method) ->
- ?HOCON(Method, #{default => Method, required => true, desc => ?DESC(method)}).
+ ?HOCON(Method, #{required => true, desc => ?DESC(method)}).
array(Ref) -> array(Ref, Ref).
array(Ref, DescId) ->
?HOCON(?ARRAY(?R_REF(Ref)), #{desc => ?DESC(DescId)}).
+
+select_union_member(#{<<"type">> := <<"mongodb">>} = Value) ->
+ MongoType = maps:get(<<"mongo_type">>, Value, undefined),
+ case MongoType of
+ <<"single">> ->
+ ?R_REF(mongo_single);
+ <<"rs">> ->
+ ?R_REF(mongo_rs);
+ <<"sharded">> ->
+ ?R_REF(mongo_sharded);
+ Else ->
+ throw(#{
+ reason => "unknown_mongo_type",
+ expected => "single | rs | sharded",
+ got => Else
+ })
+ end;
+select_union_member(#{<<"type">> := <<"redis">>} = Value) ->
+ RedisType = maps:get(<<"redis_type">>, Value, undefined),
+ case RedisType of
+ <<"single">> ->
+ ?R_REF(redis_single);
+ <<"cluster">> ->
+ ?R_REF(redis_cluster);
+ <<"sentinel">> ->
+ ?R_REF(redis_sentinel);
+ Else ->
+ throw(#{
+ reason => "unknown_redis_type",
+ expected => "single | cluster | sentinel",
+ got => Else
+ })
+ end;
+select_union_member(#{<<"type">> := <<"http">>} = Value) ->
+ RedisType = maps:get(<<"method">>, Value, undefined),
+ case RedisType of
+ <<"get">> ->
+ ?R_REF(http_get);
+ <<"post">> ->
+ ?R_REF(http_post);
+ Else ->
+ throw(#{
+ reason => "unknown_http_method",
+ expected => "get | post",
+ got => Else
+ })
+ end;
+select_union_member(#{<<"type">> := <<"built_in_database">>}) ->
+ ?R_REF(mnesia);
+select_union_member(#{<<"type">> := Type}) ->
+ select_union_member_loop(Type, type_names());
+select_union_member(_) ->
+ throw("missing_type_field").
+
+select_union_member_loop(TypeValue, []) ->
+ throw(#{
+ reason => "unknown_authz_type",
+ got => TypeValue
+ });
+select_union_member_loop(TypeValue, [Type | Types]) ->
+ case TypeValue =:= atom_to_binary(Type) of
+ true ->
+ ?R_REF(Type);
+ false ->
+ select_union_member_loop(TypeValue, Types)
+ end.
+
+authz_fields() ->
+ Types = [?R_REF(Type) || Type <- type_names()],
+ UnionMemberSelector =
+ fun
+ (all_union_members) -> Types;
+ %% must return list
+ ({value, Value}) -> [select_union_member(Value)]
+ end,
+ [
+ {sources,
+ ?HOCON(
+ ?ARRAY(?UNION(UnionMemberSelector)),
+ #{
+ default => [],
+ desc => ?DESC(sources)
+ }
+ )}
+ ].
diff --git a/apps/emqx_authz/test/emqx_authz_api_cache_SUITE.erl b/apps/emqx_authz/test/emqx_authz_api_cache_SUITE.erl
index 24b8fe25e..45e6d7287 100644
--- a/apps/emqx_authz/test/emqx_authz_api_cache_SUITE.erl
+++ b/apps/emqx_authz/test/emqx_authz_api_cache_SUITE.erl
@@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
--import(emqx_dashboard_api_test_helpers, [request/2, uri/1]).
+-import(emqx_mgmt_api_test_util, [request/2, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@@ -32,8 +32,8 @@ groups() ->
[].
init_per_suite(Config) ->
- ok = emqx_common_test_helpers:start_apps(
- [emqx_conf, emqx_authz, emqx_dashboard, emqx_management],
+ ok = emqx_mgmt_api_test_util:init_suite(
+ [emqx_conf, emqx_authz],
fun set_special_configs/1
),
Config.
@@ -47,7 +47,7 @@ end_per_suite(_Config) ->
<<"sources">> => []
}
),
- emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf, emqx_management]),
+ emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
ok.
set_special_configs(emqx_dashboard) ->
@@ -67,12 +67,12 @@ t_clean_cahce(_) ->
ok = emqtt:publish(C, <<"a/b/c">>, <<"{\"x\":1,\"y\":1}">>, 0),
{ok, 200, Result3} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
- ?assertEqual(2, length(jsx:decode(Result3))),
+ ?assertEqual(2, length(emqx_json:decode(Result3))),
request(delete, uri(["authorization", "cache"])),
{ok, 200, Result4} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
- ?assertEqual(0, length(jsx:decode(Result4))),
+ ?assertEqual(0, length(emqx_json:decode(Result4))),
ok.
diff --git a/apps/emqx_authz/test/emqx_authz_api_mnesia_SUITE.erl b/apps/emqx_authz/test/emqx_authz_api_mnesia_SUITE.erl
index 7b91f9b1c..3ee3ba4d8 100644
--- a/apps/emqx_authz/test/emqx_authz_api_mnesia_SUITE.erl
+++ b/apps/emqx_authz/test/emqx_authz_api_mnesia_SUITE.erl
@@ -22,7 +22,7 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
--import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
+-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
all() ->
emqx_common_test_helpers:all(?MODULE).
@@ -31,8 +31,8 @@ groups() ->
[].
init_per_suite(Config) ->
- ok = emqx_common_test_helpers:start_apps(
- [emqx_conf, emqx_authz, emqx_dashboard],
+ ok = emqx_mgmt_api_test_util:init_suite(
+ [emqx_conf, emqx_authz],
fun set_special_configs/1
),
Config.
@@ -46,7 +46,7 @@ end_per_suite(_Config) ->
<<"sources">> => []
}
),
- emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]),
+ emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
ok.
set_special_configs(emqx_dashboard) ->
@@ -92,7 +92,8 @@ t_api(_) ->
<<"meta">> := #{
<<"count">> := 1,
<<"limit">> := 100,
- <<"page">> := 1
+ <<"page">> := 1,
+ <<"hasnext">> := false
}
} = jsx:decode(Request1),
?assertEqual(3, length(Rules1)),
@@ -109,14 +110,17 @@ t_api(_) ->
]),
[]
),
- #{
- <<"data">> := [],
- <<"meta">> := #{
- <<"count">> := 0,
- <<"limit">> := 20,
- <<"page">> := 1
- }
- } = jsx:decode(Request1_1),
+ ?assertEqual(
+ #{
+ <<"data">> => [],
+ <<"meta">> => #{
+ <<"limit">> => 20,
+ <<"page">> => 1,
+ <<"hasnext">> => false
+ }
+ },
+ jsx:decode(Request1_1)
+ ),
{ok, 200, Request2} =
request(
@@ -160,6 +164,14 @@ t_api(_) ->
[]
),
+ % ensure that db contain a mix of records
+ {ok, 204, _} =
+ request(
+ post,
+ uri(["authorization", "sources", "built_in_database", "username"]),
+ [?USERNAME_RULES_EXAMPLE]
+ ),
+
{ok, 204, _} =
request(
post,
diff --git a/apps/emqx_authz/test/emqx_authz_api_settings_SUITE.erl b/apps/emqx_authz/test/emqx_authz_api_settings_SUITE.erl
index 275b04e40..41eba109e 100644
--- a/apps/emqx_authz/test/emqx_authz_api_settings_SUITE.erl
+++ b/apps/emqx_authz/test/emqx_authz_api_settings_SUITE.erl
@@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
--import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
+-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@@ -30,7 +30,7 @@ groups() ->
[].
init_per_suite(Config) ->
- ok = emqx_common_test_helpers:start_apps(
+ ok = emqx_mgmt_api_test_util:init_suite(
[emqx_conf, emqx_authz, emqx_dashboard],
fun set_special_configs/1
),
@@ -46,7 +46,7 @@ end_per_suite(_Config) ->
}
),
ok = stop_apps([emqx_resource]),
- emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]),
+ emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
ok.
set_special_configs(emqx_dashboard) ->
diff --git a/apps/emqx_authz/test/emqx_authz_api_sources_SUITE.erl b/apps/emqx_authz/test/emqx_authz_api_sources_SUITE.erl
index 34638d0aa..76b025716 100644
--- a/apps/emqx_authz/test/emqx_authz_api_sources_SUITE.erl
+++ b/apps/emqx_authz/test/emqx_authz_api_sources_SUITE.erl
@@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
--import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
+-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@@ -115,8 +115,8 @@ init_per_suite(Config) ->
end
),
- ok = emqx_common_test_helpers:start_apps(
- [emqx_conf, emqx_authz, emqx_dashboard],
+ ok = emqx_mgmt_api_test_util:init_suite(
+ [emqx_conf, emqx_authz],
fun set_special_configs/1
),
ok = start_apps([emqx_resource]),
@@ -134,7 +134,7 @@ end_per_suite(_Config) ->
%% resource and connector should be stop first,
%% or authz_[mysql|pgsql|redis..]_SUITE would be failed
ok = stop_apps([emqx_resource]),
- emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]),
+ emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
meck:unload(emqx_resource),
ok.
diff --git a/apps/emqx_authz/test/emqx_authz_http_SUITE.erl b/apps/emqx_authz/test/emqx_authz_http_SUITE.erl
index b95192cb7..e91da9829 100644
--- a/apps/emqx_authz/test/emqx_authz_http_SUITE.erl
+++ b/apps/emqx_authz/test/emqx_authz_http_SUITE.erl
@@ -23,6 +23,7 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("emqx/include/emqx_placeholder.hrl").
+-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-define(HTTP_PORT, 33333).
-define(HTTP_PATH, "/authz/[...]").
@@ -64,7 +65,14 @@ init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_Case, _Config) ->
- ok = emqx_authz_http_test_server:stop().
+ try
+ ok = emqx_authz_http_test_server:stop()
+ catch
+ exit:noproc ->
+ ok
+ end,
+ snabbkaffe:stop(),
+ ok.
%%------------------------------------------------------------------------------
%% Tests
@@ -148,7 +156,39 @@ t_response_handling(_Config) ->
?assertEqual(
deny,
emqx_access_control:authorize(ClientInfo, publish, <<"t">>)
- ).
+ ),
+
+ %% the server cannot be reached; should skip to the next
+ %% authorizer in the chain.
+ ok = emqx_authz_http_test_server:stop(),
+
+ ?check_trace(
+ ?assertEqual(
+ deny,
+ emqx_access_control:authorize(ClientInfo, publish, <<"t">>)
+ ),
+ fun(Trace) ->
+ ?assertMatch(
+ [
+ #{
+ ?snk_kind := authz_http_request_failure,
+ error := {recoverable_error, econnrefused}
+ }
+ ],
+ ?of_kind(authz_http_request_failure, Trace)
+ ),
+ ?assert(
+ ?strict_causality(
+ #{?snk_kind := authz_http_request_failure},
+ #{?snk_kind := authz_non_superuser, result := nomatch},
+ Trace
+ )
+ ),
+ ok
+ end
+ ),
+
+ ok.
t_query_params(_Config) ->
ok = setup_handler_and_config(
diff --git a/apps/emqx_authz/test/emqx_authz_schema_tests.erl b/apps/emqx_authz/test/emqx_authz_schema_tests.erl
new file mode 100644
index 000000000..f7b2e3c10
--- /dev/null
+++ b/apps/emqx_authz/test/emqx_authz_schema_tests.erl
@@ -0,0 +1,116 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2023-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%--------------------------------------------------------------------
+
+-module(emqx_authz_schema_tests).
+
+-include_lib("eunit/include/eunit.hrl").
+
+bad_authz_type_test() ->
+ Txt = "[{type: foobar}]",
+ ?assertThrow(
+ [
+ #{
+ reason := "unknown_authz_type",
+ got := <<"foobar">>
+ }
+ ],
+ check(Txt)
+ ).
+
+bad_mongodb_type_test() ->
+ Txt = "[{type: mongodb, mongo_type: foobar}]",
+ ?assertThrow(
+ [
+ #{
+ reason := "unknown_mongo_type",
+ got := <<"foobar">>
+ }
+ ],
+ check(Txt)
+ ).
+
+missing_mongodb_type_test() ->
+ Txt = "[{type: mongodb}]",
+ ?assertThrow(
+ [
+ #{
+ reason := "unknown_mongo_type",
+ got := undefined
+ }
+ ],
+ check(Txt)
+ ).
+
+unknown_redis_type_test() ->
+ Txt = "[{type: redis, redis_type: foobar}]",
+ ?assertThrow(
+ [
+ #{
+ reason := "unknown_redis_type",
+ got := <<"foobar">>
+ }
+ ],
+ check(Txt)
+ ).
+
+missing_redis_type_test() ->
+ Txt = "[{type: redis}]",
+ ?assertThrow(
+ [
+ #{
+ reason := "unknown_redis_type",
+ got := undefined
+ }
+ ],
+ check(Txt)
+ ).
+
+unknown_http_method_test() ->
+ Txt = "[{type: http, method: getx}]",
+ ?assertThrow(
+ [
+ #{
+ reason := "unknown_http_method",
+ got := <<"getx">>
+ }
+ ],
+ check(Txt)
+ ).
+
+missing_http_method_test() ->
+ Txt = "[{type: http, methodx: get}]",
+ ?assertThrow(
+ [
+ #{
+ reason := "unknown_http_method",
+ got := undefined
+ }
+ ],
+ check(Txt)
+ ).
+
+check(Txt0) ->
+ Txt = ["sources: ", Txt0],
+ {ok, RawConf} = hocon:binary(Txt),
+ try
+ hocon_tconf:check_plain(schema(), RawConf, #{})
+ catch
+ throw:{_Schema, Errors} ->
+ throw(Errors)
+ end.
+
+schema() ->
+ #{roots => emqx_authz_schema:fields("authorization")}.
diff --git a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.app.src b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.app.src
index 1c5627a1f..a273face1 100644
--- a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.app.src
+++ b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.app.src
@@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_auto_subscribe, [
- {description, "An OTP application"},
- {vsn, "0.1.2"},
+ {description, "Auto subscribe Application"},
+ {vsn, "0.1.3"},
{registered, []},
{mod, {emqx_auto_subscribe_app, []}},
{applications, [
diff --git a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.erl b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.erl
index 878fc2ad7..7453eabdb 100644
--- a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.erl
+++ b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.erl
@@ -51,8 +51,21 @@ max_limit() ->
list() ->
format(emqx_conf:get([auto_subscribe, topics], [])).
-update(Topics) ->
- update_(Topics).
+update(Topics) when length(Topics) =< ?MAX_AUTO_SUBSCRIBE ->
+ case
+ emqx_conf:update(
+ [auto_subscribe, topics],
+ Topics,
+ #{rawconf_with_defaults => true, override_to => cluster}
+ )
+ of
+ {ok, #{raw_config := NewTopics}} ->
+ {ok, NewTopics};
+ {error, Reason} ->
+ {error, Reason}
+ end;
+update(_Topics) ->
+ {error, quota_exceeded}.
post_config_update(_KeyPath, _Req, NewTopics, _OldConf, _AppEnvs) ->
Config = emqx_conf:get([auto_subscribe], #{}),
@@ -95,22 +108,6 @@ format(Rule = #{topic := Topic}) when is_map(Rule) ->
nl => maps:get(nl, Rule, 0)
}.
-update_(Topics) when length(Topics) =< ?MAX_AUTO_SUBSCRIBE ->
- case
- emqx_conf:update(
- [auto_subscribe, topics],
- Topics,
- #{rawconf_with_defaults => true, override_to => cluster}
- )
- of
- {ok, #{raw_config := NewTopics}} ->
- {ok, NewTopics};
- {error, Reason} ->
- {error, Reason}
- end;
-update_(_Topics) ->
- {error, quota_exceeded}.
-
update_hook() ->
update_hook(emqx_conf:get([auto_subscribe], #{})).
diff --git a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe_api.erl b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe_api.erl
index f30482d4c..678c8e9b7 100644
--- a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe_api.erl
+++ b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe_api.erl
@@ -34,7 +34,7 @@
-include_lib("emqx/include/emqx_placeholder.hrl").
api_spec() ->
- emqx_dashboard_swagger:spec(?MODULE).
+ emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
paths() ->
["/mqtt/auto_subscribe"].
@@ -46,15 +46,15 @@ schema("/mqtt/auto_subscribe") ->
description => ?DESC(list_auto_subscribe_api),
tags => [<<"Auto Subscribe">>],
responses => #{
- 200 => hoconsc:ref(emqx_auto_subscribe_schema, "auto_subscribe")
+ 200 => topics()
}
},
put => #{
description => ?DESC(update_auto_subscribe_api),
tags => [<<"Auto Subscribe">>],
- 'requestBody' => hoconsc:ref(emqx_auto_subscribe_schema, "auto_subscribe"),
+ 'requestBody' => topics(),
responses => #{
- 200 => hoconsc:ref(emqx_auto_subscribe_schema, "auto_subscribe"),
+ 200 => topics(),
409 => emqx_dashboard_swagger:error_codes(
[?EXCEED_LIMIT],
?DESC(update_auto_subscribe_api_response409)
@@ -63,14 +63,17 @@ schema("/mqtt/auto_subscribe") ->
}
}.
+topics() ->
+ Fields = emqx_auto_subscribe_schema:fields("auto_subscribe"),
+ {topics, Topics} = lists:keyfind(topics, 1, Fields),
+ Topics.
+
%%%==============================================================================================
%% api apply
auto_subscribe(get, _) ->
{200, emqx_auto_subscribe:list()};
-auto_subscribe(put, #{body := #{}}) ->
- {400, #{code => ?BAD_REQUEST, message => <<"Request body required">>}};
-auto_subscribe(put, #{body := Params}) ->
- case emqx_auto_subscribe:update(Params) of
+auto_subscribe(put, #{body := Topics}) when is_list(Topics) ->
+ case emqx_auto_subscribe:update(Topics) of
{error, quota_exceeded} ->
Message = list_to_binary(
io_lib:format(
diff --git a/apps/emqx_auto_subscribe/test/emqx_auto_subscribe_SUITE.erl b/apps/emqx_auto_subscribe/test/emqx_auto_subscribe_SUITE.erl
index 36c4e708e..900f39ebb 100644
--- a/apps/emqx_auto_subscribe/test/emqx_auto_subscribe_SUITE.erl
+++ b/apps/emqx_auto_subscribe/test/emqx_auto_subscribe_SUITE.erl
@@ -93,9 +93,8 @@ init_per_suite(Config) ->
" }"
>>
),
- emqx_common_test_helpers:start_apps(
- [emqx_conf, emqx_dashboard, ?APP],
- fun set_special_configs/1
+ emqx_mgmt_api_test_util:init_suite(
+ [emqx_conf, ?APP]
),
Config.
@@ -111,12 +110,6 @@ end_per_testcase(t_get_basic_usage_info, _Config) ->
end_per_testcase(_TestCase, _Config) ->
ok.
-set_special_configs(emqx_dashboard) ->
- emqx_dashboard_api_test_helpers:set_default_config(),
- ok;
-set_special_configs(_) ->
- ok.
-
topic_config(T) ->
#{
topic => T,
@@ -132,7 +125,7 @@ end_per_suite(_) ->
application:unload(?APP),
meck:unload(emqx_resource),
meck:unload(emqx_schema),
- emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_conf, ?APP]).
+ emqx_mgmt_api_test_util:end_suite([emqx_conf, ?APP]).
t_auto_subscribe(_) ->
emqx_auto_subscribe:update([#{<<"topic">> => Topic} || Topic <- ?TOPICS]),
@@ -151,6 +144,32 @@ t_update(_) ->
ResponseMap = emqx_json:decode(Response, [return_maps]),
?assertEqual(1, erlang:length(ResponseMap)),
+ BadBody1 = #{topic => ?TOPIC_S},
+ ?assertMatch(
+ {error, {"HTTP/1.1", 400, "Bad Request"}},
+ emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody1)
+ ),
+ BadBody2 = [#{topic => ?TOPIC_S, qos => 3}],
+ ?assertMatch(
+ {error, {"HTTP/1.1", 400, "Bad Request"}},
+ emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody2)
+ ),
+ BadBody3 = [#{topic => ?TOPIC_S, rh => 10}],
+ ?assertMatch(
+ {error, {"HTTP/1.1", 400, "Bad Request"}},
+ emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody3)
+ ),
+ BadBody4 = [#{topic => ?TOPIC_S, rap => -1}],
+ ?assertMatch(
+ {error, {"HTTP/1.1", 400, "Bad Request"}},
+ emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody4)
+ ),
+ BadBody5 = [#{topic => ?TOPIC_S, nl => -1}],
+ ?assertMatch(
+ {error, {"HTTP/1.1", 400, "Bad Request"}},
+ emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody5)
+ ),
+
{ok, Client} = emqtt:start_link(#{username => ?CLIENT_USERNAME, clientid => ?CLIENT_ID}),
{ok, _} = emqtt:connect(Client),
timer:sleep(100),
diff --git a/apps/emqx_bridge/i18n/emqx_bridge_webhook_schema.conf b/apps/emqx_bridge/i18n/emqx_bridge_webhook_schema.conf
index d9d2d0c40..f58b59aad 100644
--- a/apps/emqx_bridge/i18n/emqx_bridge_webhook_schema.conf
+++ b/apps/emqx_bridge/i18n/emqx_bridge_webhook_schema.conf
@@ -93,11 +93,20 @@ HTTP 请求的标头。
desc {
en: """
The body of the HTTP request.
+If not provided, the body will be a JSON object of all the available fields.
+There, 'all the available fields' means the context of a MQTT message when
+this webhook is triggered by receiving a MQTT message (the `local_topic` is set),
+or the context of the event when this webhook is triggered by a rule (i.e. this
+webhook is used as an action of a rule).
Template with variables is allowed.
"""
zh: """
HTTP 请求的正文。
-允许使用带有变量的模板。"""
+如果没有设置该字段,请求正文将是包含所有可用字段的 JSON object。
+如果该 webhook 是由于收到 MQTT 消息触发的,'所有可用字段' 将是 MQTT 消息的
+上下文信息;如果该 webhook 是由于规则触发的,'所有可用字段' 则为触发事件的上下文信息。
+允许使用带有变量的模板。
+"""
}
label: {
en: "HTTP Body"
diff --git a/apps/emqx_bridge/src/emqx_bridge.app.src b/apps/emqx_bridge/src/emqx_bridge.app.src
index 89fb7adaf..39cb1b18b 100644
--- a/apps/emqx_bridge/src/emqx_bridge.app.src
+++ b/apps/emqx_bridge/src/emqx_bridge.app.src
@@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_bridge, [
{description, "EMQX bridges"},
- {vsn, "0.1.8"},
+ {vsn, "0.1.9"},
{registered, []},
{mod, {emqx_bridge_app, []}},
{applications, [
diff --git a/apps/emqx_bridge/src/emqx_bridge_api.erl b/apps/emqx_bridge/src/emqx_bridge_api.erl
index 6de7d8695..6a419d512 100644
--- a/apps/emqx_bridge/src/emqx_bridge_api.erl
+++ b/apps/emqx_bridge/src/emqx_bridge_api.erl
@@ -330,6 +330,7 @@ schema("/bridges/:id") ->
responses => #{
204 => <<"Bridge deleted">>,
400 => error_schema(['INVALID_ID'], "Update bridge failed"),
+ 404 => error_schema('NOT_FOUND', "Bridge not found"),
403 => error_schema('FORBIDDEN_REQUEST', "Forbidden operation"),
503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
}
@@ -452,19 +453,24 @@ schema("/bridges_probe") ->
end,
?TRY_PARSE_ID(
Id,
- case emqx_bridge:check_deps_and_remove(BridgeType, BridgeName, AlsoDeleteActs) of
+ case emqx_bridge:lookup(BridgeType, BridgeName) of
{ok, _} ->
- 204;
- {error, {rules_deps_on_this_bridge, RuleIds}} ->
- {403,
- error_msg(
- 'FORBIDDEN_REQUEST',
- {<<"There're some rules dependent on this bridge">>, RuleIds}
- )};
- {error, timeout} ->
- {503, error_msg('SERVICE_UNAVAILABLE', <<"request timeout">>)};
- {error, Reason} ->
- {500, error_msg('INTERNAL_ERROR', Reason)}
+ case emqx_bridge:check_deps_and_remove(BridgeType, BridgeName, AlsoDeleteActs) of
+ {ok, _} ->
+ 204;
+ {error, {rules_deps_on_this_bridge, RuleIds}} ->
+ {403,
+ error_msg(
+ 'FORBIDDEN_REQUEST',
+ {<<"There're some rules dependent on this bridge">>, RuleIds}
+ )};
+ {error, timeout} ->
+ {503, error_msg('SERVICE_UNAVAILABLE', <<"request timeout">>)};
+ {error, Reason} ->
+ {500, error_msg('INTERNAL_ERROR', Reason)}
+ end;
+ {error, not_found} ->
+ {404, error_msg('NOT_FOUND', <<"Bridge not found">>)}
end
).
diff --git a/apps/emqx_bridge/src/emqx_bridge_resource.erl b/apps/emqx_bridge/src/emqx_bridge_resource.erl
index dce7b9f1a..4f8d248b2 100644
--- a/apps/emqx_bridge/src/emqx_bridge_resource.erl
+++ b/apps/emqx_bridge/src/emqx_bridge_resource.erl
@@ -132,13 +132,14 @@ create(BridgeId, Conf) ->
create(Type, Name, Conf) ->
create(Type, Name, Conf, #{}).
-create(Type, Name, Conf, Opts) ->
+create(Type, Name, Conf, Opts0) ->
?SLOG(info, #{
msg => "create bridge",
type => Type,
name => Name,
config => Conf
}),
+ Opts = override_start_after_created(Conf, Opts0),
{ok, _Data} = emqx_resource:create_local(
resource_id(Type, Name),
<<"emqx_bridge">>,
@@ -146,7 +147,7 @@ create(Type, Name, Conf, Opts) ->
parse_confs(bin(Type), Name, Conf),
Opts
),
- maybe_disable_bridge(Type, Name, Conf).
+ ok.
update(BridgeId, {OldConf, Conf}) ->
{BridgeType, BridgeName} = parse_bridge_id(BridgeId),
@@ -155,7 +156,7 @@ update(BridgeId, {OldConf, Conf}) ->
update(Type, Name, {OldConf, Conf}) ->
update(Type, Name, {OldConf, Conf}, #{}).
-update(Type, Name, {OldConf, Conf}, Opts) ->
+update(Type, Name, {OldConf, Conf}, Opts0) ->
%% TODO: sometimes its not necessary to restart the bridge connection.
%%
%% - if the connection related configs like `servers` is updated, we should restart/start
@@ -164,6 +165,7 @@ update(Type, Name, {OldConf, Conf}, Opts) ->
%% the `method` or `headers` of a WebHook is changed, then the bridge can be updated
%% without restarting the bridge.
%%
+ Opts = override_start_after_created(Conf, Opts0),
case emqx_map_lib:if_only_to_toggle_enable(OldConf, Conf) of
false ->
?SLOG(info, #{
@@ -174,10 +176,10 @@ update(Type, Name, {OldConf, Conf}, Opts) ->
}),
case recreate(Type, Name, Conf, Opts) of
{ok, _} ->
- maybe_disable_bridge(Type, Name, Conf);
+ ok;
{error, not_found} ->
?SLOG(warning, #{
- msg => "updating_a_non-exist_bridge_need_create_a_new_one",
+ msg => "updating_a_non_existing_bridge",
type => Type,
name => Name,
config => Conf
@@ -244,12 +246,6 @@ remove(Type, Name, _Conf, _Opts) ->
{error, Reason} -> {error, Reason}
end.
-maybe_disable_bridge(Type, Name, Conf) ->
- case maps:get(enable, Conf, true) of
- false -> stop(Type, Name);
- true -> ok
- end.
-
maybe_clear_certs(TmpPath, #{ssl := SslConf} = Conf) ->
%% don't remove the cert files if they are in use
case is_tmp_path_conf(TmpPath, SslConf) of
@@ -276,7 +272,6 @@ parse_confs(
#{
url := Url,
method := Method,
- body := Body,
headers := Headers,
request_timeout := ReqTimeout,
max_retries := Retry
@@ -290,7 +285,7 @@ parse_confs(
#{
path => Path,
method => Method,
- body => Body,
+ body => maps:get(body, Conf, undefined),
headers => Headers,
request_timeout => ReqTimeout,
max_retries => Retry
@@ -324,3 +319,8 @@ str(Str) when is_list(Str) -> Str.
bin(Bin) when is_binary(Bin) -> Bin;
bin(Str) when is_list(Str) -> list_to_binary(Str);
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
+
+override_start_after_created(Config, Opts) ->
+ Enabled = maps:get(enable, Config, true),
+ StartAfterCreated = Enabled andalso maps:get(start_after_created, Opts, Enabled),
+ Opts#{start_after_created => StartAfterCreated}.
diff --git a/apps/emqx_bridge/src/schema/emqx_bridge_schema.erl b/apps/emqx_bridge/src/schema/emqx_bridge_schema.erl
index b4159e0a0..845c1ef90 100644
--- a/apps/emqx_bridge/src/schema/emqx_bridge_schema.erl
+++ b/apps/emqx_bridge/src/schema/emqx_bridge_schema.erl
@@ -20,7 +20,7 @@
-import(hoconsc, [mk/2, ref/2]).
--export([roots/0, fields/1, desc/1, namespace/0]).
+-export([roots/0, fields/1, desc/1, namespace/0, tags/0]).
-export([
get_response/0,
@@ -104,6 +104,9 @@ metrics_status_fields() ->
namespace() -> "bridge".
+tags() ->
+ [<<"Bridge">>].
+
roots() -> [bridges].
fields(bridges) ->
@@ -122,7 +125,9 @@ fields(bridges) ->
#{
desc => ?DESC("bridges_mqtt"),
required => false,
- converter => fun emqx_bridge_mqtt_config:upgrade_pre_ee/1
+ converter => fun(X, _HoconOpts) ->
+ emqx_bridge_mqtt_config:upgrade_pre_ee(X)
+ end
}
)}
] ++ ee_fields_bridges();
diff --git a/apps/emqx_bridge/src/schema/emqx_bridge_webhook_schema.erl b/apps/emqx_bridge/src/schema/emqx_bridge_webhook_schema.erl
index a41fc35f5..0495911e7 100644
--- a/apps/emqx_bridge/src/schema/emqx_bridge_webhook_schema.erl
+++ b/apps/emqx_bridge/src/schema/emqx_bridge_webhook_schema.erl
@@ -115,7 +115,7 @@ request_config() ->
mk(
binary(),
#{
- default => <<"${payload}">>,
+ default => undefined,
desc => ?DESC("config_body")
}
)},
diff --git a/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl b/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl
index a77da7544..9d5c57855 100644
--- a/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl
+++ b/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl
@@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
--import(emqx_dashboard_api_test_helpers, [request/4, uri/1]).
+-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@@ -68,9 +68,8 @@ init_per_suite(Config) ->
%% some testcases (may from other app) already get emqx_connector started
_ = application:stop(emqx_resource),
_ = application:stop(emqx_connector),
- ok = emqx_common_test_helpers:start_apps(
- [emqx_rule_engine, emqx_bridge, emqx_dashboard],
- fun set_special_configs/1
+ ok = emqx_mgmt_api_test_util:init_suite(
+ [emqx_rule_engine, emqx_bridge]
),
ok = emqx_common_test_helpers:load_config(
emqx_rule_engine_schema,
@@ -80,12 +79,7 @@ init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
- emqx_common_test_helpers:stop_apps([emqx_rule_engine, emqx_bridge, emqx_dashboard]),
- ok.
-
-set_special_configs(emqx_dashboard) ->
- emqx_dashboard_api_test_helpers:set_default_config(<<"bridge_admin">>);
-set_special_configs(_) ->
+ emqx_mgmt_api_test_util:end_suite([emqx_rule_engine, emqx_bridge]),
ok.
init_per_testcase(_, Config) ->
@@ -311,6 +305,15 @@ t_http_crud_apis(Config) ->
},
jsx:decode(ErrMsg2)
),
+ %% Deleting a non-existing bridge should result in an error
+ {ok, 404, ErrMsg3} = request(delete, uri(["bridges", BridgeID]), []),
+ ?assertMatch(
+ #{
+ <<"code">> := _,
+ <<"message">> := <<"Bridge not found">>
+ },
+ jsx:decode(ErrMsg3)
+ ),
ok.
t_http_bridges_local_topic(Config) ->
diff --git a/apps/emqx_conf/src/emqx_conf.app.src b/apps/emqx_conf/src/emqx_conf.app.src
index 3d18083f8..b13c0d055 100644
--- a/apps/emqx_conf/src/emqx_conf.app.src
+++ b/apps/emqx_conf/src/emqx_conf.app.src
@@ -1,6 +1,6 @@
{application, emqx_conf, [
{description, "EMQX configuration management"},
- {vsn, "0.1.9"},
+ {vsn, "0.1.10"},
{registered, []},
{mod, {emqx_conf_app, []}},
{applications, [kernel, stdlib]},
diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl
index 6fd9ac009..8b471a137 100644
--- a/apps/emqx_conf/src/emqx_conf.erl
+++ b/apps/emqx_conf/src/emqx_conf.erl
@@ -316,7 +316,7 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) ->
{[Schema | Acc], SubRefs ++ RefsAcc}
end,
{[], []},
- Types
+ hoconsc:union_members(Types)
),
{#{<<"oneOf">> => OneOf}, Refs};
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl
index 2a46e95a5..90af47aca 100644
--- a/apps/emqx_conf/src/emqx_conf_schema.erl
+++ b/apps/emqx_conf/src/emqx_conf_schema.erl
@@ -38,7 +38,9 @@
cipher/0
]).
--export([namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1]).
+-export([
+ namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1, tags/0
+]).
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
%% Static apps which merge their configs into the merged emqx.conf
@@ -60,12 +62,16 @@
emqx_exhook_schema,
emqx_psk_schema,
emqx_limiter_schema,
- emqx_slow_subs_schema
+ emqx_slow_subs_schema,
+ emqx_mgmt_api_key_schema
]).
%% root config should not have a namespace
namespace() -> undefined.
+tags() ->
+ [<<"EMQX">>].
+
roots() ->
PtKey = ?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY,
case persistent_term:get(PtKey, undefined) of
@@ -941,8 +947,8 @@ fields("log_burst_limit") ->
)}
];
fields("authorization") ->
- emqx_schema:fields("authorization") ++
- emqx_authz_schema:fields("authorization").
+ emqx_schema:authz_fields() ++
+ emqx_authz_schema:authz_fields().
desc("cluster") ->
?DESC("desc_cluster");
diff --git a/apps/emqx_connector/README.md b/apps/emqx_connector/README.md
index 7ef3a8c4a..6baba29de 100644
--- a/apps/emqx_connector/README.md
+++ b/apps/emqx_connector/README.md
@@ -14,7 +14,7 @@ An MySQL connector can be used as following:
```
(emqx@127.0.0.1)5> emqx_resource:list_instances_verbose().
[#{config =>
- #{auto_reconnect => true,cacertfile => [],certfile => [],
+ #{cacertfile => [],certfile => [],
database => "mqtt",keyfile => [],password => "public",
pool_size => 1,
server => {{127,0,0,1},3306},
diff --git a/apps/emqx_connector/i18n/emqx_connector_ldap.conf b/apps/emqx_connector/i18n/emqx_connector_ldap.conf
new file mode 100644
index 000000000..0bcb4869e
--- /dev/null
+++ b/apps/emqx_connector/i18n/emqx_connector_ldap.conf
@@ -0,0 +1,37 @@
+emqx_connector_ldap {
+
+ bind_dn {
+ desc {
+ en: """LDAP's Binding Distinguished Name (DN)"""
+ zh: """LDAP 绑定的 DN 的值"""
+ }
+ label: {
+ en: "Bind DN"
+ zh: "Bind DN"
+ }
+ }
+
+ port {
+ desc {
+ en: """LDAP Port"""
+ zh: """LDAP 端口"""
+ }
+ label: {
+ en: "Port"
+ zh: "端口"
+ }
+ }
+
+
+ timeout {
+ desc {
+ en: """LDAP's query timeout"""
+ zh: """LDAP 查询超时时间"""
+ }
+ label: {
+ en: "timeout"
+ zh: "超时时间"
+ }
+ }
+
+}
diff --git a/apps/emqx_connector/i18n/emqx_connector_mongo.conf b/apps/emqx_connector/i18n/emqx_connector_mongo.conf
index a598c084d..619a8e3b4 100644
--- a/apps/emqx_connector/i18n/emqx_connector_mongo.conf
+++ b/apps/emqx_connector/i18n/emqx_connector_mongo.conf
@@ -2,34 +2,34 @@ emqx_connector_mongo {
single_mongo_type {
desc {
- en: "Standalone instance."
- zh: "Standalone模式。"
+ en: "Standalone instance. Must be set to 'single' when MongoDB server is running in standalone mode."
+ zh: "Standalone 模式。当 MongoDB 服务运行在 standalone 模式下,该配置必须设置为 'single'。 "
}
label: {
en: "Standalone instance"
- zh: "Standalone模式"
+ zh: "Standalone 模式"
}
}
rs_mongo_type {
desc {
- en: "Replica set."
- zh: "Replica set模式。"
+ en: "Replica set. Must be set to 'rs' when MongoDB server is running in 'replica set' mode."
+ zh: "Replica set模式。当 MongoDB 服务运行在 replica-set 模式下,该配置必须设置为 'rs'。"
}
label: {
en: "Replica set"
- zh: "Replica set模式"
+ zh: "Replica set 模式"
}
}
sharded_mongo_type {
desc {
- en: "Sharded cluster."
- zh: "Sharded cluster模式。"
+ en: "Sharded cluster. Must be set to 'sharded' when MongoDB server is running in 'sharded' mode."
+ zh: "Sharded cluster模式。当 MongoDB 服务运行在 sharded 模式下,该配置必须设置为 'sharded'。"
}
label: {
en: "Sharded cluster"
- zh: "Sharded cluster模式"
+ zh: "Sharded cluster 模式"
}
}
diff --git a/apps/emqx_connector/i18n/emqx_connector_redis.conf b/apps/emqx_connector/i18n/emqx_connector_redis.conf
index 228d0805a..f42f38f30 100644
--- a/apps/emqx_connector/i18n/emqx_connector_redis.conf
+++ b/apps/emqx_connector/i18n/emqx_connector_redis.conf
@@ -2,8 +2,8 @@ emqx_connector_redis {
single {
desc {
- en: "Single mode"
- zh: "单机模式。"
+ en: "Single mode. Must be set to 'single' when Redis server is running in single mode."
+ zh: "单机模式。当 Redis 服务运行在单机模式下,该配置必须设置为 'single'。"
}
label: {
en: "Single Mode"
@@ -13,8 +13,8 @@ emqx_connector_redis {
cluster {
desc {
- en: "Cluster mode"
- zh: "集群模式。"
+ en: "Cluster mode. Must be set to 'cluster' when Redis server is running in clustered mode."
+ zh: "集群模式。当 Redis 服务运行在集群模式下,该配置必须设置为 'cluster'。"
}
label: {
en: "Cluster Mode"
@@ -24,8 +24,8 @@ emqx_connector_redis {
sentinel {
desc {
- en: "Sentinel mode"
- zh: "哨兵模式。"
+ en: "Sentinel mode. Must be set to 'sentinel' when Redis server is running in sentinel mode."
+ zh: "哨兵模式。当 Redis 服务运行在哨兵模式下,该配置必须设置为 'sentinel'。"
}
label: {
en: "Sentinel Mode"
diff --git a/apps/emqx_connector/i18n/emqx_connector_schema_lib.conf b/apps/emqx_connector/i18n/emqx_connector_schema_lib.conf
index f5caf29c4..8f25e0352 100644
--- a/apps/emqx_connector/i18n/emqx_connector_schema_lib.conf
+++ b/apps/emqx_connector/i18n/emqx_connector_schema_lib.conf
@@ -68,13 +68,13 @@ emqx_connector_schema_lib {
auto_reconnect {
desc {
- en: "Enable automatic reconnect to the database."
- zh: "自动重连数据库。"
+ en: "Deprecated. Enable automatic reconnect to the database."
+ zh: "已弃用。自动重连数据库。"
}
label: {
- en: "Auto Reconnect Database"
- zh: "自动重连数据库"
- }
+ en: "Deprecated. Auto Reconnect Database"
+ zh: "已弃用。自动重连数据库"
+ }
}
}
diff --git a/apps/emqx_connector/include/emqx_connector.hrl b/apps/emqx_connector/include/emqx_connector.hrl
index 96b6ba4d6..82c946cfc 100644
--- a/apps/emqx_connector/include/emqx_connector.hrl
+++ b/apps/emqx_connector/include/emqx_connector.hrl
@@ -24,6 +24,8 @@
-define(REDIS_DEFAULT_PORT, 6379).
-define(PGSQL_DEFAULT_PORT, 5432).
+-define(AUTO_RECONNECT_INTERVAL, 2).
+
-define(SERVERS_DESC,
"A Node list for Cluster to connect to. The nodes should be separated with commas, such as: `Node[,Node].`
"
"For each Node should be: "
diff --git a/apps/emqx_connector/rebar.config b/apps/emqx_connector/rebar.config
index 98490a91c..ed0bc827d 100644
--- a/apps/emqx_connector/rebar.config
+++ b/apps/emqx_connector/rebar.config
@@ -12,9 +12,9 @@
{mysql, {git, "https://github.com/emqx/mysql-otp", {tag, "1.7.1"}}},
{epgsql, {git, "https://github.com/emqx/epgsql", {tag, "4.7-emqx.2"}}},
%% NOTE: mind poolboy version when updating mongodb-erlang version
- {mongodb, {git, "https://github.com/emqx/mongodb-erlang", {tag, "v3.0.13"}}},
+ {mongodb, {git, "https://github.com/emqx/mongodb-erlang", {tag, "v3.0.19"}}},
%% NOTE: mind poolboy version when updating eredis_cluster version
- {eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.7.1"}}},
+ {eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.7.5"}}},
%% mongodb-erlang uses a special fork https://github.com/comtihon/poolboy.git
%% (which has overflow_ttl feature added).
%% However, it references `{branch, "master}` (commit 9c06a9a on 2021-04-07).
diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src
index 65ef49c6b..9a82bda27 100644
--- a/apps/emqx_connector/src/emqx_connector.app.src
+++ b/apps/emqx_connector/src/emqx_connector.app.src
@@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_connector, [
{description, "EMQX Data Integration Connectors"},
- {vsn, "0.1.11"},
+ {vsn, "0.1.12"},
{registered, []},
{mod, {emqx_connector_app, []}},
{applications, [
diff --git a/apps/emqx_connector/src/emqx_connector_http.erl b/apps/emqx_connector/src/emqx_connector_http.erl
index 286e0e4e6..7f684a858 100644
--- a/apps/emqx_connector/src/emqx_connector_http.erl
+++ b/apps/emqx_connector/src/emqx_connector_http.erl
@@ -209,7 +209,7 @@ on_start(
?SLOG(info, #{
msg => "starting_http_connector",
connector => InstId,
- config => Config
+ config => emqx_misc:redact(Config)
}),
{Transport, TransportOpts} =
case Scheme of
@@ -431,14 +431,13 @@ preprocess_request(
#{
method := Method,
path := Path,
- body := Body,
headers := Headers
} = Req
) ->
#{
method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method)),
path => emqx_plugin_libs_rule:preproc_tmpl(Path),
- body => emqx_plugin_libs_rule:preproc_tmpl(Body),
+ body => maybe_preproc_tmpl(body, Req),
headers => preproc_headers(Headers),
request_timeout => maps:get(request_timeout, Req, 30000),
max_retries => maps:get(max_retries, Req, 2)
@@ -469,6 +468,12 @@ preproc_headers(Headers) when is_list(Headers) ->
Headers
).
+maybe_preproc_tmpl(Key, Conf) ->
+ case maps:get(Key, Conf, undefined) of
+ undefined -> undefined;
+ Val -> emqx_plugin_libs_rule:preproc_tmpl(Val)
+ end.
+
process_request(
#{
method := MethodTks,
@@ -487,7 +492,7 @@ process_request(
request_timeout => ReqTimeout
}.
-process_request_body([], Msg) ->
+process_request_body(undefined, Msg) ->
emqx_json:encode(Msg);
process_request_body(BodyTks, Msg) ->
emqx_plugin_libs_rule:proc_tmpl(BodyTks, Msg).
diff --git a/apps/emqx_connector/src/emqx_connector_ldap.erl b/apps/emqx_connector/src/emqx_connector_ldap.erl
index 1cb65034d..82d622e09 100644
--- a/apps/emqx_connector/src/emqx_connector_ldap.erl
+++ b/apps/emqx_connector/src/emqx_connector_ldap.erl
@@ -59,14 +59,13 @@ on_start(
bind_password := BindPassword,
timeout := Timeout,
pool_size := PoolSize,
- auto_reconnect := AutoReconn,
ssl := SSL
} = Config
) ->
?SLOG(info, #{
msg => "starting_ldap_connector",
connector => InstId,
- config => Config
+ config => emqx_misc:redact(Config)
}),
Servers = emqx_schema:parse_servers(Servers0, ?LDAP_HOST_OPTIONS),
SslOpts =
@@ -86,11 +85,11 @@ on_start(
{bind_password, BindPassword},
{timeout, Timeout},
{pool_size, PoolSize},
- {auto_reconnect, reconn_interval(AutoReconn)}
+ {auto_reconnect, ?AUTO_RECONNECT_INTERVAL}
],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts ++ SslOpts) of
- ok -> {ok, #{poolname => PoolName, auto_reconnect => AutoReconn}};
+ ok -> {ok, #{poolname => PoolName}};
{error, Reason} -> {error, Reason}
end.
@@ -129,9 +128,6 @@ on_query(InstId, {search, Base, Filter, Attributes}, #{poolname := PoolName} = S
on_get_status(_InstId, _State) -> connected.
-reconn_interval(true) -> 15;
-reconn_interval(false) -> false.
-
search(Conn, Base, Filter, Attributes) ->
eldap2:search(Conn, [
{base, Base},
diff --git a/apps/emqx_connector/src/emqx_connector_mongo.erl b/apps/emqx_connector/src/emqx_connector_mongo.erl
index f9e703d20..1b0bcf94d 100644
--- a/apps/emqx_connector/src/emqx_connector_mongo.erl
+++ b/apps/emqx_connector/src/emqx_connector_mongo.erl
@@ -68,7 +68,6 @@ fields(single) ->
{mongo_type, #{
type => single,
default => single,
- required => true,
desc => ?DESC("single_mongo_type")
}},
{server, server()},
@@ -79,7 +78,6 @@ fields(rs) ->
{mongo_type, #{
type => rs,
default => rs,
- required => true,
desc => ?DESC("rs_mongo_type")
}},
{servers, servers()},
@@ -92,7 +90,6 @@ fields(sharded) ->
{mongo_type, #{
type => sharded,
default => sharded,
- required => true,
desc => ?DESC("sharded_mongo_type")
}},
{servers, servers()},
@@ -158,7 +155,7 @@ on_start(
rs -> "starting_mongodb_replica_set_connector";
sharded -> "starting_mongodb_sharded_connector"
end,
- ?SLOG(info, #{msg => Msg, connector => InstId, config => Config}),
+ ?SLOG(info, #{msg => Msg, connector => InstId, config => emqx_misc:redact(Config)}),
NConfig = #{hosts := Hosts} = maybe_resolve_srv_and_txt_records(Config),
SslOpts =
case maps:get(enable, SSL) of
diff --git a/apps/emqx_connector/src/emqx_connector_mqtt.erl b/apps/emqx_connector/src/emqx_connector_mqtt.erl
index 522f15ccf..585122539 100644
--- a/apps/emqx_connector/src/emqx_connector_mqtt.erl
+++ b/apps/emqx_connector/src/emqx_connector_mqtt.erl
@@ -15,6 +15,8 @@
%%--------------------------------------------------------------------
-module(emqx_connector_mqtt).
+-include("emqx_connector.hrl").
+
-include_lib("typerefl/include/types.hrl").
-include_lib("hocon/include/hoconsc.hrl").
-include_lib("emqx/include/logger.hrl").
@@ -147,7 +149,7 @@ on_start(InstId, Conf) ->
?SLOG(info, #{
msg => "starting_mqtt_connector",
connector => InstanceId,
- config => Conf
+ config => emqx_misc:redact(Conf)
}),
BasicConf = basic_config(Conf),
BridgeConf = BasicConf#{
@@ -198,12 +200,10 @@ on_query_async(
?TRACE("QUERY", "async_send_msg_to_remote_node", #{message => Msg, connector => InstanceId}),
emqx_connector_mqtt_worker:send_to_remote_async(InstanceId, Msg, {ReplyFun, Args}).
-on_get_status(_InstId, #{name := InstanceId, bridge_conf := Conf}) ->
- AutoReconn = maps:get(auto_reconnect, Conf, true),
+on_get_status(_InstId, #{name := InstanceId}) ->
case emqx_connector_mqtt_worker:status(InstanceId) of
connected -> connected;
- _ when AutoReconn == true -> connecting;
- _ when AutoReconn == false -> disconnected
+ _ -> connecting
end.
ensure_mqtt_worker_started(InstanceId, BridgeConf) ->
@@ -236,7 +236,6 @@ make_forward_confs(FrowardConf) ->
basic_config(
#{
server := Server,
- reconnect_interval := ReconnIntv,
proto_ver := ProtoVer,
bridge_mode := BridgeMode,
clean_start := CleanStart,
@@ -252,7 +251,7 @@ basic_config(
%% 30s
connect_timeout => 30,
auto_reconnect => true,
- reconnect_interval => ReconnIntv,
+ reconnect_interval => ?AUTO_RECONNECT_INTERVAL,
proto_ver => ProtoVer,
%% Opening bridge_mode will form a non-standard mqtt connection message.
%% A load balancing server (such as haproxy) is often set up before the emqx broker server.
diff --git a/apps/emqx_connector/src/emqx_connector_mysql.erl b/apps/emqx_connector/src/emqx_connector_mysql.erl
index 693917a27..b162a21b4 100644
--- a/apps/emqx_connector/src/emqx_connector_mysql.erl
+++ b/apps/emqx_connector/src/emqx_connector_mysql.erl
@@ -52,7 +52,6 @@
-type state() ::
#{
poolname := atom(),
- auto_reconnect := boolean(),
prepare_statement := prepares(),
params_tokens := params_tokens(),
batch_inserts := sqls(),
@@ -95,7 +94,6 @@ on_start(
server := Server,
database := DB,
username := Username,
- auto_reconnect := AutoReconn,
pool_size := PoolSize,
ssl := SSL
} = Config
@@ -104,7 +102,7 @@ on_start(
?SLOG(info, #{
msg => "starting_mysql_connector",
connector => InstId,
- config => Config
+ config => emqx_misc:redact(Config)
}),
SslOpts =
case maps:get(enable, SSL) of
@@ -120,13 +118,13 @@ on_start(
{port, Port},
{user, Username},
{database, DB},
- {auto_reconnect, reconn_interval(AutoReconn)},
+ {auto_reconnect, ?AUTO_RECONNECT_INTERVAL},
{pool_size, PoolSize}
]
],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
Prepares = parse_prepare_sql(Config),
- State = maps:merge(#{poolname => PoolName, auto_reconnect => AutoReconn}, Prepares),
+ State = maps:merge(#{poolname => PoolName}, Prepares),
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of
ok ->
{ok, init_prepare(State)};
@@ -211,7 +209,7 @@ mysql_function(prepared_query) ->
mysql_function(_) ->
mysql_function(prepared_query).
-on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State) ->
+on_get_status(_InstId, #{poolname := Pool} = State) ->
case emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1) of
true ->
case do_check_prepares(State) of
@@ -222,10 +220,10 @@ on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State
{connected, NState};
{error, _Reason} ->
%% do not log error, it is logged in prepare_sql_to_conn
- conn_status(AutoReconn)
+ connecting
end;
false ->
- conn_status(AutoReconn)
+ connecting
end.
do_get_status(Conn) ->
@@ -244,11 +242,6 @@ do_check_prepares(State = #{poolname := PoolName, prepare_statement := {error, P
end.
%% ===================================================================
-conn_status(_AutoReconn = true) -> connecting;
-conn_status(_AutoReconn = false) -> disconnected.
-
-reconn_interval(true) -> 15;
-reconn_interval(false) -> false.
connect(Options) ->
mysql:start_link(Options).
diff --git a/apps/emqx_connector/src/emqx_connector_pgsql.erl b/apps/emqx_connector/src/emqx_connector_pgsql.erl
index 4b565a614..9965ff3b4 100644
--- a/apps/emqx_connector/src/emqx_connector_pgsql.erl
+++ b/apps/emqx_connector/src/emqx_connector_pgsql.erl
@@ -56,7 +56,6 @@
-type state() ::
#{
poolname := atom(),
- auto_reconnect := boolean(),
prepare_sql := prepares(),
params_tokens := params_tokens(),
prepare_statement := epgsql:statement()
@@ -87,8 +86,6 @@ on_start(
server := Server,
database := DB,
username := User,
- password := Password,
- auto_reconnect := AutoReconn,
pool_size := PoolSize,
ssl := SSL
} = Config
@@ -97,7 +94,7 @@ on_start(
?SLOG(info, #{
msg => "starting_postgresql_connector",
connector => InstId,
- config => Config
+ config => emqx_misc:redact(Config)
}),
SslOpts =
case maps:get(enable, SSL) of
@@ -113,14 +110,14 @@ on_start(
{host, Host},
{port, Port},
{username, User},
- {password, emqx_secret:wrap(Password)},
+ {password, emqx_secret:wrap(maps:get(password, Config, ""))},
{database, DB},
- {auto_reconnect, reconn_interval(AutoReconn)},
+ {auto_reconnect, ?AUTO_RECONNECT_INTERVAL},
{pool_size, PoolSize}
],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
Prepares = parse_prepare_sql(Config),
- InitState = #{poolname => PoolName, auto_reconnect => AutoReconn, prepare_statement => #{}},
+ InitState = #{poolname => PoolName, prepare_statement => #{}},
State = maps:merge(InitState, Prepares),
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of
ok ->
@@ -247,7 +244,7 @@ on_sql_query(InstId, PoolName, Type, NameOrSQL, Data) ->
end,
Result.
-on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State) ->
+on_get_status(_InstId, #{poolname := Pool} = State) ->
case emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1) of
true ->
case do_check_prepares(State) of
@@ -258,10 +255,10 @@ on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State
{connected, NState};
false ->
%% do not log error, it is logged in prepare_sql_to_conn
- conn_status(AutoReconn)
+ connecting
end;
false ->
- conn_status(AutoReconn)
+ connecting
end.
do_get_status(Conn) ->
@@ -280,11 +277,6 @@ do_check_prepares(State = #{poolname := PoolName, prepare_sql := {error, Prepare
end.
%% ===================================================================
-conn_status(_AutoReconn = true) -> connecting;
-conn_status(_AutoReconn = false) -> disconnected.
-
-reconn_interval(true) -> 15;
-reconn_interval(false) -> false.
connect(Opts) ->
Host = proplists:get_value(host, Opts),
diff --git a/apps/emqx_connector/src/emqx_connector_redis.erl b/apps/emqx_connector/src/emqx_connector_redis.erl
index 350d49f01..7fdf9d28d 100644
--- a/apps/emqx_connector/src/emqx_connector_redis.erl
+++ b/apps/emqx_connector/src/emqx_connector_redis.erl
@@ -64,7 +64,7 @@ fields(single) ->
{redis_type, #{
type => single,
default => single,
- required => true,
+ required => false,
desc => ?DESC("single")
}}
] ++
@@ -76,7 +76,7 @@ fields(cluster) ->
{redis_type, #{
type => cluster,
default => cluster,
- required => true,
+ required => false,
desc => ?DESC("cluster")
}}
] ++
@@ -88,7 +88,7 @@ fields(sentinel) ->
{redis_type, #{
type => sentinel,
default => sentinel,
- required => true,
+ required => false,
desc => ?DESC("sentinel")
}},
{sentinel, #{
@@ -117,14 +117,13 @@ on_start(
#{
redis_type := Type,
pool_size := PoolSize,
- auto_reconnect := AutoReconn,
ssl := SSL
} = Config
) ->
?SLOG(info, #{
msg => "starting_redis_connector",
connector => InstId,
- config => Config
+ config => emqx_misc:redact(Config)
}),
ConfKey =
case Type of
@@ -142,7 +141,7 @@ on_start(
[
{pool_size, PoolSize},
{password, maps:get(password, Config, "")},
- {auto_reconnect, reconn_interval(AutoReconn)}
+ {auto_reconnect, ?AUTO_RECONNECT_INTERVAL}
] ++ Database ++ Servers,
Options =
case maps:get(enable, SSL) of
@@ -155,7 +154,7 @@ on_start(
[{ssl, false}]
end ++ [{sentinel, maps:get(sentinel, Config, undefined)}],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
- State = #{poolname => PoolName, type => Type, auto_reconnect => AutoReconn},
+ State = #{poolname => PoolName, type => Type},
case Type of
cluster ->
case eredis_cluster:start_pool(PoolName, Opts ++ [{options, Options}]) of
@@ -229,18 +228,18 @@ eredis_cluster_workers_exist_and_are_connected(Workers) ->
Workers
).
-on_get_status(_InstId, #{type := cluster, poolname := PoolName, auto_reconnect := AutoReconn}) ->
+on_get_status(_InstId, #{type := cluster, poolname := PoolName}) ->
case eredis_cluster:pool_exists(PoolName) of
true ->
Workers = extract_eredis_cluster_workers(PoolName),
Health = eredis_cluster_workers_exist_and_are_connected(Workers),
- status_result(Health, AutoReconn);
+ status_result(Health);
false ->
disconnected
end;
-on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn}) ->
+on_get_status(_InstId, #{poolname := Pool}) ->
Health = emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1),
- status_result(Health, AutoReconn).
+ status_result(Health).
do_get_status(Conn) ->
case eredis:q(Conn, ["PING"]) of
@@ -248,12 +247,8 @@ do_get_status(Conn) ->
_ -> false
end.
-status_result(_Status = true, _AutoReconn) -> connected;
-status_result(_Status = false, _AutoReconn = true) -> connecting;
-status_result(_Status = false, _AutoReconn = false) -> disconnected.
-
-reconn_interval(true) -> 15;
-reconn_interval(false) -> false.
+status_result(_Status = true) -> connected;
+status_result(_Status = false) -> connecting.
do_cmd(PoolName, cluster, {cmd, Command}) ->
eredis_cluster:q(PoolName, Command);
diff --git a/apps/emqx_connector/src/emqx_connector_schema_lib.erl b/apps/emqx_connector/src/emqx_connector_schema_lib.erl
index 2364aeeaa..5d8f6941c 100644
--- a/apps/emqx_connector/src/emqx_connector_schema_lib.erl
+++ b/apps/emqx_connector/src/emqx_connector_schema_lib.erl
@@ -106,4 +106,5 @@ password(_) -> undefined.
auto_reconnect(type) -> boolean();
auto_reconnect(desc) -> ?DESC("auto_reconnect");
auto_reconnect(default) -> true;
+auto_reconnect(deprecated) -> {since, "v5.0.15"};
auto_reconnect(_) -> undefined.
diff --git a/apps/emqx_connector/test/emqx_connector_redis_SUITE.erl b/apps/emqx_connector/test/emqx_connector_redis_SUITE.erl
index 060340fed..a1d8fe9d5 100644
--- a/apps/emqx_connector/test/emqx_connector_redis_SUITE.erl
+++ b/apps/emqx_connector/test/emqx_connector_redis_SUITE.erl
@@ -75,9 +75,9 @@ wait_for_redis(Checks) ->
wait_for_redis(Checks - 1)
end.
-% %%------------------------------------------------------------------------------
-% %% Testcases
-% %%------------------------------------------------------------------------------
+%%------------------------------------------------------------------------------
+%% Testcases
+%%------------------------------------------------------------------------------
t_single_lifecycle(_Config) ->
perform_lifecycle_check(
diff --git a/apps/emqx_dashboard/i18n/emqx_dashboard_i18n.conf b/apps/emqx_dashboard/i18n/emqx_dashboard_i18n.conf
index e6758d0de..872cfdf26 100644
--- a/apps/emqx_dashboard/i18n/emqx_dashboard_i18n.conf
+++ b/apps/emqx_dashboard/i18n/emqx_dashboard_i18n.conf
@@ -199,23 +199,12 @@ its own from which a browser should permit loading resources."""
}
bootstrap_users_file {
desc {
- en: "Initialize users file."
- zh: "初始化用户文件"
+ en: "Deprecated, use api_key.bootstrap_file"
+ zh: "已废弃,请使用 api_key.bootstrap_file"
}
label {
- en: """Is used to add an administrative user to Dashboard when emqx is first launched,
- the format is:
- ```
- username1:password1
- username2:password2
- ```
-"""
- zh: """用于在首次启动 emqx 时,为 Dashboard 添加管理用户,其格式为:
- ```
- username1:password1
- username2:password2
- ```
-"""
+ en: """Deprecated"""
+ zh: """已废弃"""
}
}
}
diff --git a/apps/emqx_dashboard/src/emqx_dashboard.app.src b/apps/emqx_dashboard/src/emqx_dashboard.app.src
index 2698d5534..57d63247b 100644
--- a/apps/emqx_dashboard/src/emqx_dashboard.app.src
+++ b/apps/emqx_dashboard/src/emqx_dashboard.app.src
@@ -2,7 +2,7 @@
{application, emqx_dashboard, [
{description, "EMQX Web Dashboard"},
% strict semver, bump manually!
- {vsn, "5.0.11"},
+ {vsn, "5.0.12"},
{modules, []},
{registered, [emqx_dashboard_sup]},
{applications, [kernel, stdlib, mnesia, minirest, emqx]},
diff --git a/apps/emqx_dashboard/src/emqx_dashboard.erl b/apps/emqx_dashboard/src/emqx_dashboard.erl
index f15467658..36c7660cc 100644
--- a/apps/emqx_dashboard/src/emqx_dashboard.erl
+++ b/apps/emqx_dashboard/src/emqx_dashboard.erl
@@ -65,8 +65,12 @@ start_listeners(Listeners) ->
components => #{
schemas => #{},
'securitySchemes' => #{
- 'basicAuth' => #{type => http, scheme => basic},
- 'bearerAuth' => #{type => http, scheme => bearer}
+ 'basicAuth' => #{
+ type => http,
+ scheme => basic,
+ description =>
+ <<"Authorize with [API Keys](https://www.emqx.io/docs/en/v5.0/admin/api.html#api-keys)">>
+ }
}
}
},
@@ -215,28 +219,7 @@ listener_name(Protocol) ->
authorize(Req) ->
case cowboy_req:parse_header(<<"authorization">>, Req) of
{basic, Username, Password} ->
- case emqx_dashboard_admin:check(Username, Password) of
- ok ->
- ok;
- {error, <<"username_not_found">>} ->
- Path = cowboy_req:path(Req),
- case emqx_mgmt_auth:authorize(Path, Username, Password) of
- ok ->
- ok;
- {error, <<"not_allowed">>} ->
- return_unauthorized(
- ?WRONG_USERNAME_OR_PWD,
- <<"Check username/password">>
- );
- {error, _} ->
- return_unauthorized(
- ?WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET,
- <<"Check username/password or api_key/api_secret">>
- )
- end;
- {error, _} ->
- return_unauthorized(?WRONG_USERNAME_OR_PWD, <<"Check username/password">>)
- end;
+ api_key_authorize(Req, Username, Password);
{bearer, Token} ->
case emqx_dashboard_admin:verify_token(Token) of
ok ->
@@ -269,3 +252,20 @@ i18n_file() ->
listeners() ->
emqx_conf:get([dashboard, listeners], []).
+
+api_key_authorize(Req, Key, Secret) ->
+ Path = cowboy_req:path(Req),
+ case emqx_mgmt_auth:authorize(Path, Key, Secret) of
+ ok ->
+ ok;
+ {error, <<"not_allowed">>} ->
+ return_unauthorized(
+ ?BAD_API_KEY_OR_SECRET,
+ <<"Not allowed, Check api_key/api_secret">>
+ );
+ {error, _} ->
+ return_unauthorized(
+ ?BAD_API_KEY_OR_SECRET,
+ <<"Check api_key/api_secret">>
+ )
+ end.
diff --git a/apps/emqx_dashboard/src/emqx_dashboard_admin.erl b/apps/emqx_dashboard/src/emqx_dashboard_admin.erl
index 77c77d5b9..e36c2628b 100644
--- a/apps/emqx_dashboard/src/emqx_dashboard_admin.erl
+++ b/apps/emqx_dashboard/src/emqx_dashboard_admin.erl
@@ -51,8 +51,7 @@
-export([
add_default_user/0,
- default_username/0,
- add_bootstrap_users/0
+ default_username/0
]).
-type emqx_admin() :: #?ADMIN{}.
@@ -85,21 +84,6 @@ mnesia(boot) ->
add_default_user() ->
add_default_user(binenv(default_username), binenv(default_password)).
--spec add_bootstrap_users() -> ok | {error, _}.
-add_bootstrap_users() ->
- case emqx:get_config([dashboard, bootstrap_users_file], undefined) of
- undefined ->
- ok;
- File ->
- case mnesia:table_info(?ADMIN, size) of
- 0 ->
- ?SLOG(debug, #{msg => "Add dashboard bootstrap users", file => File}),
- add_bootstrap_users(File);
- _ ->
- ok
- end
- end.
-
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
@@ -311,44 +295,3 @@ add_default_user(Username, Password) ->
[] -> add_user(Username, Password, <<"administrator">>);
_ -> {ok, default_user_exists}
end.
-
-add_bootstrap_users(File) ->
- case file:open(File, [read]) of
- {ok, Dev} ->
- {ok, MP} = re:compile(<<"(\.+):(\.+$)">>, [ungreedy]),
- try
- load_bootstrap_user(Dev, MP)
- catch
- Type:Reason ->
- {error, {Type, Reason}}
- after
- file:close(Dev)
- end;
- {error, Reason} = Error ->
- ?SLOG(error, #{
- msg => "failed to open the dashboard bootstrap users file",
- file => File,
- reason => Reason
- }),
- Error
- end.
-
-load_bootstrap_user(Dev, MP) ->
- case file:read_line(Dev) of
- {ok, Line} ->
- case re:run(Line, MP, [global, {capture, all_but_first, binary}]) of
- {match, [[Username, Password]]} ->
- case add_user(Username, Password, ?BOOTSTRAP_USER_TAG) of
- {ok, _} ->
- load_bootstrap_user(Dev, MP);
- Error ->
- Error
- end;
- _ ->
- load_bootstrap_user(Dev, MP)
- end;
- eof ->
- ok;
- Error ->
- Error
- end.
diff --git a/apps/emqx_dashboard/src/emqx_dashboard_api.erl b/apps/emqx_dashboard/src/emqx_dashboard_api.erl
index 9facac59c..a4322c696 100644
--- a/apps/emqx_dashboard/src/emqx_dashboard_api.erl
+++ b/apps/emqx_dashboard/src/emqx_dashboard_api.erl
@@ -47,7 +47,7 @@
-define(EMPTY(V), (V == undefined orelse V == <<>>)).
--define(WRONG_USERNAME_OR_PWD, 'WRONG_USERNAME_OR_PWD').
+-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
-define(WRONG_TOKEN_OR_USERNAME, 'WRONG_TOKEN_OR_USERNAME').
-define(USER_NOT_FOUND, 'USER_NOT_FOUND').
-define(ERROR_PWD_NOT_MATCH, 'ERROR_PWD_NOT_MATCH').
@@ -164,7 +164,7 @@ schema("/users/:username/change_pwd") ->
}.
response_schema(401) ->
- emqx_dashboard_swagger:error_codes([?WRONG_USERNAME_OR_PWD], ?DESC(login_failed401));
+ emqx_dashboard_swagger:error_codes([?BAD_USERNAME_OR_PWD], ?DESC(login_failed401));
response_schema(404) ->
emqx_dashboard_swagger:error_codes([?USER_NOT_FOUND], ?DESC(users_api404)).
@@ -223,7 +223,7 @@ login(post, #{body := Params}) ->
}};
{error, R} ->
?SLOG(info, #{msg => "Dashboard login failed", username => Username, reason => R}),
- {401, ?WRONG_USERNAME_OR_PWD, <<"Auth failed">>}
+ {401, ?BAD_USERNAME_OR_PWD, <<"Auth failed">>}
end.
logout(_, #{
diff --git a/apps/emqx_dashboard/src/emqx_dashboard_app.erl b/apps/emqx_dashboard/src/emqx_dashboard_app.erl
index 6956f3fc8..2c3f9b8bc 100644
--- a/apps/emqx_dashboard/src/emqx_dashboard_app.erl
+++ b/apps/emqx_dashboard/src/emqx_dashboard_app.erl
@@ -31,13 +31,8 @@ start(_StartType, _StartArgs) ->
case emqx_dashboard:start_listeners() of
ok ->
emqx_dashboard_cli:load(),
- case emqx_dashboard_admin:add_bootstrap_users() of
- ok ->
- {ok, _} = emqx_dashboard_admin:add_default_user(),
- {ok, Sup};
- Error ->
- Error
- end;
+ {ok, _} = emqx_dashboard_admin:add_default_user(),
+ {ok, Sup};
{error, Reason} ->
{error, Reason}
end.
diff --git a/apps/emqx_dashboard/src/emqx_dashboard_schema.erl b/apps/emqx_dashboard/src/emqx_dashboard_schema.erl
index 4605d911d..6742032d5 100644
--- a/apps/emqx_dashboard/src/emqx_dashboard_schema.erl
+++ b/apps/emqx_dashboard/src/emqx_dashboard_schema.erl
@@ -56,7 +56,15 @@ fields("dashboard") ->
{cors, fun cors/1},
{i18n_lang, fun i18n_lang/1},
{bootstrap_users_file,
- ?HOCON(binary(), #{desc => ?DESC(bootstrap_users_file), required => false})}
+ ?HOCON(
+ binary(),
+ #{
+ desc => ?DESC(bootstrap_users_file),
+ required => false,
+ default => <<>>
+ %% deprecated => {since, "5.1.0"}
+ }
+ )}
];
fields("listeners") ->
[
diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl
index 4b7a672bd..0afbf5f1e 100644
--- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl
+++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl
@@ -139,14 +139,20 @@ fields(limit) ->
[{limit, hoconsc:mk(range(1, ?MAX_ROW_LIMIT), Meta)}];
fields(count) ->
Desc = <<
- "Total number of records counted.
"
- "Note: this field is 0
when the queryed table is empty, "
- "or if the query can not be optimized and requires a full table scan."
+ "Total number of records matching the query.
"
+ "Note: this field is present only if the query can be optimized and does "
+ "not require a full table scan."
+ >>,
+ Meta = #{desc => Desc, required => false},
+ [{count, hoconsc:mk(non_neg_integer(), Meta)}];
+fields(hasnext) ->
+ Desc = <<
+ "Flag indicating whether there are more results available on next pages."
>>,
Meta = #{desc => Desc, required => true},
- [{count, hoconsc:mk(non_neg_integer(), Meta)}];
+ [{hasnext, hoconsc:mk(boolean(), Meta)}];
fields(meta) ->
- fields(page) ++ fields(limit) ++ fields(count).
+ fields(page) ++ fields(limit) ++ fields(count) ++ fields(hasnext).
-spec schema_with_example(hocon_schema:type(), term()) -> hocon_schema:field_schema_map().
schema_with_example(Type, Example) ->
@@ -623,7 +629,7 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) ->
{[Schema | Acc], SubRefs ++ RefsAcc}
end,
{[], []},
- Types
+ hoconsc:union_members(Types)
),
{#{<<"oneOf">> => OneOf}, Refs};
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
@@ -705,9 +711,11 @@ typename_to_spec("service_account_json()", _Mod) ->
typename_to_spec("#{" ++ _, Mod) ->
typename_to_spec("map()", Mod);
typename_to_spec("qos()", _Mod) ->
- #{type => string, enum => [0, 1, 2]};
+ #{type => integer, minimum => 0, maximum => 2, example => 0};
typename_to_spec("{binary(), binary()}", _Mod) ->
#{type => object, example => #{}};
+typename_to_spec("{string(), string()}", _Mod) ->
+ #{type => object, example => #{}};
typename_to_spec("comma_separated_list()", _Mod) ->
#{type => string, example => <<"item1,item2">>};
typename_to_spec("comma_separated_binary()", _Mod) ->
diff --git a/apps/emqx_dashboard/test/emqx_dashboard_SUITE.erl b/apps/emqx_dashboard/test/emqx_dashboard_SUITE.erl
index 934d6055d..23d1b40c1 100644
--- a/apps/emqx_dashboard/test/emqx_dashboard_SUITE.erl
+++ b/apps/emqx_dashboard/test/emqx_dashboard_SUITE.erl
@@ -114,9 +114,9 @@ t_admin_delete_self_failed(_) ->
?assertEqual(1, length(Admins)),
Header = auth_header_(<<"username1">>, <<"password">>),
{error, {_, 400, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header),
- Token = erlang:iolist_to_binary(["Basic ", base64:encode("username1:password")]),
+ Token = ["Basic ", base64:encode("username1:password")],
Header2 = {"Authorization", Token},
- {error, {_, 400, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header2),
+ {error, {_, 401, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header2),
mnesia:clear_table(?ADMIN).
t_rest_api(_Config) ->
diff --git a/apps/emqx_dashboard/test/emqx_dashboard_bad_api_SUITE.erl b/apps/emqx_dashboard/test/emqx_dashboard_bad_api_SUITE.erl
index b7fbf889e..a9b448662 100644
--- a/apps/emqx_dashboard/test/emqx_dashboard_bad_api_SUITE.erl
+++ b/apps/emqx_dashboard/test/emqx_dashboard_bad_api_SUITE.erl
@@ -25,43 +25,24 @@
-define(SERVER, "http://127.0.0.1:18083/api/v5").
+-import(emqx_mgmt_api_test_util, [request/2]).
+
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
mria:start(),
- application:load(emqx_dashboard),
- emqx_common_test_helpers:start_apps([emqx_conf, emqx_dashboard], fun set_special_configs/1),
+ emqx_mgmt_api_test_util:init_suite([emqx_conf]),
Config.
-set_special_configs(emqx_dashboard) ->
- emqx_dashboard_api_test_helpers:set_default_config(),
- ok;
-set_special_configs(_) ->
- ok.
-
end_per_suite(Config) ->
end_suite(),
Config.
end_suite() ->
- application:unload(emqx_management),
- emqx_common_test_helpers:stop_apps([emqx_dashboard]).
+ emqx_mgmt_api_test_util:end_suite([emqx_conf]).
t_bad_api_path(_) ->
Url = ?SERVER ++ "/for/test/some/path/not/exist",
- {error, {"HTTP/1.1", 404, "Not Found"}} = request(Url),
+ {ok, 404, _} = request(get, Url),
ok.
-
-request(Url) ->
- Request = {Url, []},
- case httpc:request(get, Request, [], []) of
- {error, Reason} ->
- {error, Reason};
- {ok, {{"HTTP/1.1", Code, _}, _, Return}} when
- Code >= 200 andalso Code =< 299
- ->
- {ok, emqx_json:decode(Return, [return_maps])};
- {ok, {Reason, _, _}} ->
- {error, Reason}
- end.
diff --git a/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl b/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl
index 6f4a0e0fd..7d4980320 100644
--- a/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl
+++ b/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl
@@ -19,6 +19,8 @@
-compile(nowarn_export_all).
-compile(export_all).
+-import(emqx_dashboard_SUITE, [auth_header_/0]).
+
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("emqx/include/emqx.hrl").
@@ -153,10 +155,6 @@ do_request_api(Method, Request) ->
{error, Reason}
end.
-auth_header_() ->
- Basic = binary_to_list(base64:encode(<<"admin:public">>)),
- {"Authorization", "Basic " ++ Basic}.
-
restart_monitor() ->
OldMonitor = erlang:whereis(emqx_dashboard_monitor),
erlang:exit(OldMonitor, kill),
diff --git a/apps/emqx_dashboard/test/emqx_swagger_parameter_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_parameter_SUITE.erl
index 8a5fe68e7..5d89fb273 100644
--- a/apps/emqx_dashboard/test/emqx_swagger_parameter_SUITE.erl
+++ b/apps/emqx_dashboard/test/emqx_swagger_parameter_SUITE.erl
@@ -112,7 +112,7 @@ t_in_query(_Config) ->
description => <<"QOS">>,
in => query,
name => qos,
- schema => #{enum => [0, 1, 2], type => string}
+ schema => #{minimum => 0, maximum => 2, type => integer, example => 0}
}
],
validate("/test/in/query", Expect),
diff --git a/apps/emqx_exhook/test/emqx_exhook_api_SUITE.erl b/apps/emqx_exhook/test/emqx_exhook_api_SUITE.erl
index 7be940a53..8a4fb7a44 100644
--- a/apps/emqx_exhook/test/emqx_exhook_api_SUITE.erl
+++ b/apps/emqx_exhook/test/emqx_exhook_api_SUITE.erl
@@ -347,13 +347,7 @@ do_request_api(Method, Request) ->
end.
auth_header_() ->
- AppId = <<"admin">>,
- AppSecret = <<"public">>,
- auth_header_(binary_to_list(AppId), binary_to_list(AppSecret)).
-
-auth_header_(User, Pass) ->
- Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
- {"Authorization", "Basic " ++ Encoded}.
+ emqx_mgmt_api_test_util:auth_header_().
api_path(Parts) ->
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION] ++ Parts).
diff --git a/apps/emqx_gateway/src/emqx_gateway.app.src b/apps/emqx_gateway/src/emqx_gateway.app.src
index 53403a67a..c5dd76f19 100644
--- a/apps/emqx_gateway/src/emqx_gateway.app.src
+++ b/apps/emqx_gateway/src/emqx_gateway.app.src
@@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_gateway, [
{description, "The Gateway management application"},
- {vsn, "0.1.10"},
+ {vsn, "0.1.11"},
{registered, []},
{mod, {emqx_gateway_app, []}},
{applications, [kernel, stdlib, grpc, emqx, emqx_authn]},
diff --git a/apps/emqx_gateway/src/emqx_gateway_schema.erl b/apps/emqx_gateway/src/emqx_gateway_schema.erl
index e89280f14..804e1f862 100644
--- a/apps/emqx_gateway/src/emqx_gateway_schema.erl
+++ b/apps/emqx_gateway/src/emqx_gateway_schema.erl
@@ -49,12 +49,15 @@
]).
-elvis([{elvis_style, dont_repeat_yourself, disable}]).
--export([namespace/0, roots/0, fields/1, desc/1]).
+-export([namespace/0, roots/0, fields/1, desc/1, tags/0]).
-export([proxy_protocol_opts/0]).
namespace() -> gateway.
+tags() ->
+ [<<"Gateway">>].
+
roots() -> [gateway].
fields(gateway) ->
diff --git a/apps/emqx_gateway/test/emqx_gateway_test_utils.erl b/apps/emqx_gateway/test/emqx_gateway_test_utils.erl
index a6791a36b..deb602bc7 100644
--- a/apps/emqx_gateway/test/emqx_gateway_test_utils.erl
+++ b/apps/emqx_gateway/test/emqx_gateway_test_utils.erl
@@ -106,8 +106,6 @@ assert_fields_exist(Ks, Map) ->
%% http
-define(http_api_host, "http://127.0.0.1:18083/api/v5").
--define(default_user, "admin").
--define(default_pass, "public").
request(delete = Mth, Path) ->
do_request(Mth, req(Path, []));
@@ -176,5 +174,4 @@ url(Path, Qs) ->
lists:concat([?http_api_host, Path, "?", binary_to_list(cow_qs:qs(Qs))]).
auth(Headers) ->
- Token = base64:encode(?default_user ++ ":" ++ ?default_pass),
- [{"Authorization", "Basic " ++ binary_to_list(Token)}] ++ Headers.
+ [emqx_mgmt_api_test_util:auth_header_() | Headers].
diff --git a/apps/emqx_management/i18n/emqx_mgmt_api_key_i18n.conf b/apps/emqx_management/i18n/emqx_mgmt_api_key_i18n.conf
new file mode 100644
index 000000000..eae559660
--- /dev/null
+++ b/apps/emqx_management/i18n/emqx_mgmt_api_key_i18n.conf
@@ -0,0 +1,33 @@
+emqx_mgmt_api_key_schema {
+ api_key {
+ desc {
+ en: """API Key, can be used to request API other than the management API key and the Dashboard user management API"""
+ zh: """API 密钥, 可用于请求除管理 API 密钥及 Dashboard 用户管理 API 的其它接口"""
+ }
+ label {
+ en: "API Key"
+ zh: "API 密钥"
+ }
+ }
+ bootstrap_file {
+ desc {
+ en: """Bootstrap file is used to add an api_key when emqx is launched,
+ the format is:
+ ```
+ 7e729ae70d23144b:2QILI9AcQ9BYlVqLDHQNWN2saIjBV4egr1CZneTNKr9CpK
+ ec3907f865805db0:Ee3taYltUKtoBVD9C3XjQl9C6NXheip8Z9B69BpUv5JxVHL
+ ```
+"""
+ zh: """用于在启动 emqx 时,添加 API 密钥,其格式为:
+ ```
+ 7e729ae70d23144b:2QILI9AcQ9BYlVqLDHQNWN2saIjBV4egr1CZneTNKr9CpK
+ ec3907f865805db0:Ee3taYltUKtoBVD9C3XjQl9C6NXheip8Z9B69BpUv5JxVHL
+ ```
+"""
+ }
+ label {
+ en: "Initialize api_key file."
+ zh: "API 密钥初始化文件"
+ }
+ }
+}
diff --git a/apps/emqx_management/i18n/emqx_mgmt_api_publish_i18n.conf b/apps/emqx_management/i18n/emqx_mgmt_api_publish_i18n.conf
index d845bff4b..4ba7e8dba 100644
--- a/apps/emqx_management/i18n/emqx_mgmt_api_publish_i18n.conf
+++ b/apps/emqx_management/i18n/emqx_mgmt_api_publish_i18n.conf
@@ -63,12 +63,6 @@ result of each individual message in the batch.
zh: "MQTT 消息的 QoS"
}
}
- clientid {
- desc {
- en: "Each message can be published as if it is done on behalf of an MQTT client whos ID can be specified in this field."
- zh: "每个消息都可以带上一个 MQTT 客户端 ID,用于模拟 MQTT 客户端的发布行为。"
- }
- }
payload {
desc {
en: "The MQTT message payload."
diff --git a/apps/emqx_management/src/emqx_management.app.src b/apps/emqx_management/src/emqx_management.app.src
index c0cb05401..ccb53dac4 100644
--- a/apps/emqx_management/src/emqx_management.app.src
+++ b/apps/emqx_management/src/emqx_management.app.src
@@ -2,7 +2,7 @@
{application, emqx_management, [
{description, "EMQX Management API and CLI"},
% strict semver, bump manually!
- {vsn, "5.0.11"},
+ {vsn, "5.0.12"},
{modules, []},
{registered, [emqx_management_sup]},
{applications, [kernel, stdlib, emqx_plugins, minirest, emqx]},
diff --git a/apps/emqx_management/src/emqx_mgmt_api.erl b/apps/emqx_management/src/emqx_mgmt_api.erl
index 893007ebf..e46047521 100644
--- a/apps/emqx_management/src/emqx_mgmt_api.erl
+++ b/apps/emqx_management/src/emqx_mgmt_api.erl
@@ -20,7 +20,6 @@
-elvis([{elvis_style, dont_repeat_yourself, #{min_complexity => 100}}]).
--define(FRESH_SELECT, fresh_select).
-define(LONG_QUERY_TIMEOUT, 50000).
-export([
@@ -174,13 +173,12 @@ do_node_query(
case do_query(Node, QueryState) of
{error, {badrpc, R}} ->
{error, Node, {badrpc, R}};
- {Rows, NQueryState = #{continuation := ?FRESH_SELECT}} ->
- {_, NResultAcc} = accumulate_query_rows(Node, Rows, NQueryState, ResultAcc),
- NResultAcc;
- {Rows, NQueryState} ->
+ {Rows, NQueryState = #{complete := Complete}} ->
case accumulate_query_rows(Node, Rows, NQueryState, ResultAcc) of
{enough, NResultAcc} ->
- NResultAcc;
+ finalize_query(NResultAcc, NQueryState);
+ {_, NResultAcc} when Complete ->
+ finalize_query(NResultAcc, NQueryState);
{more, NResultAcc} ->
do_node_query(Node, NQueryState, NResultAcc)
end
@@ -212,8 +210,8 @@ cluster_query(Tab, QString, QSchema, MsFun, FmtFun) ->
end.
%% @private
-do_cluster_query([], _QueryState, ResultAcc) ->
- ResultAcc;
+do_cluster_query([], QueryState, ResultAcc) ->
+ finalize_query(ResultAcc, mark_complete(QueryState));
do_cluster_query(
[Node | Tail] = Nodes,
QueryState,
@@ -222,31 +220,29 @@ do_cluster_query(
case do_query(Node, QueryState) of
{error, {badrpc, R}} ->
{error, Node, {badrpc, R}};
- {Rows, NQueryState} ->
+ {Rows, NQueryState = #{complete := Complete}} ->
case accumulate_query_rows(Node, Rows, NQueryState, ResultAcc) of
{enough, NResultAcc} ->
- maybe_collect_total_from_tail_nodes(Tail, NQueryState, NResultAcc);
+ FQueryState = maybe_collect_total_from_tail_nodes(Tail, NQueryState),
+ FComplete = Complete andalso Tail =:= [],
+ finalize_query(NResultAcc, mark_complete(FQueryState, FComplete));
+ {more, NResultAcc} when not Complete ->
+ do_cluster_query(Nodes, NQueryState, NResultAcc);
+ {more, NResultAcc} when Tail =/= [] ->
+ do_cluster_query(Tail, reset_query_state(NQueryState), NResultAcc);
{more, NResultAcc} ->
- NextNodes =
- case NQueryState of
- #{continuation := ?FRESH_SELECT} -> Tail;
- _ -> Nodes
- end,
- do_cluster_query(NextNodes, NQueryState, NResultAcc)
+ finalize_query(NResultAcc, NQueryState)
end
end.
-maybe_collect_total_from_tail_nodes([], _QueryState, ResultAcc) ->
- ResultAcc;
-maybe_collect_total_from_tail_nodes(Nodes, QueryState, ResultAcc) ->
- case counting_total_fun(QueryState) of
- false ->
- ResultAcc;
- _Fun ->
- collect_total_from_tail_nodes(Nodes, QueryState, ResultAcc)
- end.
+maybe_collect_total_from_tail_nodes([], QueryState) ->
+ QueryState;
+maybe_collect_total_from_tail_nodes(Nodes, QueryState = #{total := _}) ->
+ collect_total_from_tail_nodes(Nodes, QueryState);
+maybe_collect_total_from_tail_nodes(_Nodes, QueryState) ->
+ QueryState.
-collect_total_from_tail_nodes(Nodes, QueryState, ResultAcc = #{total := TotalAcc}) ->
+collect_total_from_tail_nodes(Nodes, QueryState = #{total := TotalAcc}) ->
%% XXX: badfun risk? if the FuzzyFun is an anonumous func in local node
case rpc:multicall(Nodes, ?MODULE, apply_total_query, [QueryState], ?LONG_QUERY_TIMEOUT) of
{_, [Node | _]} ->
@@ -257,7 +253,8 @@ collect_total_from_tail_nodes(Nodes, QueryState, ResultAcc = #{total := TotalAcc
[{Node, {badrpc, Reason}} | _] ->
{error, Node, {badrpc, Reason}};
[] ->
- ResultAcc#{total => ResL ++ TotalAcc}
+ NTotalAcc = maps:merge(TotalAcc, maps:from_list(ResL)),
+ QueryState#{total := NTotalAcc}
end
end.
@@ -266,13 +263,14 @@ collect_total_from_tail_nodes(Nodes, QueryState, ResultAcc = #{total := TotalAcc
%%--------------------------------------------------------------------
%% QueryState ::
-%% #{continuation := ets:continuation(),
+%% #{continuation => ets:continuation(),
%% page := pos_integer(),
%% limit := pos_integer(),
-%% total := [{node(), non_neg_integer()}],
+%% total => #{node() => non_neg_integer()},
%% table := atom(),
-%% qs := {Qs, Fuzzy} %% parsed query params
-%% msfun := query_to_match_spec_fun()
+%% qs := {Qs, Fuzzy}, %% parsed query params
+%% msfun := query_to_match_spec_fun(),
+%% complete := boolean()
%% }
init_query_state(Tab, QString, MsFun, _Meta = #{page := Page, limit := Limit}) ->
#{match_spec := Ms, fuzzy_fun := FuzzyFun} = erlang:apply(MsFun, [Tab, QString]),
@@ -285,17 +283,31 @@ init_query_state(Tab, QString, MsFun, _Meta = #{page := Page, limit := Limit}) -
true = is_list(Args),
{type, external} = erlang:fun_info(NamedFun, type)
end,
- #{
+ QueryState = #{
page => Page,
limit => Limit,
table => Tab,
qs => QString,
msfun => MsFun,
- mactch_spec => Ms,
+ match_spec => Ms,
fuzzy_fun => FuzzyFun,
- total => [],
- continuation => ?FRESH_SELECT
- }.
+ complete => false
+ },
+ case counting_total_fun(QueryState) of
+ false ->
+ QueryState;
+ Fun when is_function(Fun) ->
+ QueryState#{total => #{}}
+ end.
+
+reset_query_state(QueryState) ->
+ maps:remove(continuation, mark_complete(QueryState, false)).
+
+mark_complete(QueryState) ->
+ mark_complete(QueryState, true).
+
+mark_complete(QueryState, Complete) ->
+ QueryState#{complete => Complete}.
%% @private This function is exempt from BPAPI
do_query(Node, QueryState) when Node =:= node() ->
@@ -318,47 +330,50 @@ do_select(
Node,
QueryState0 = #{
table := Tab,
- mactch_spec := Ms,
- fuzzy_fun := FuzzyFun,
- continuation := Continuation,
- limit := Limit
+ match_spec := Ms,
+ limit := Limit,
+ complete := false
}
) ->
QueryState = maybe_apply_total_query(Node, QueryState0),
Result =
- case Continuation of
- ?FRESH_SELECT ->
+ case maps:get(continuation, QueryState, undefined) of
+ undefined ->
ets:select(Tab, Ms, Limit);
- _ ->
+ Continuation ->
%% XXX: Repair is necessary because we pass Continuation back
%% and forth through the nodes in the `do_cluster_query`
ets:select(ets:repair_continuation(Continuation, Ms))
end,
case Result of
- '$end_of_table' ->
- {[], QueryState#{continuation => ?FRESH_SELECT}};
+ {Rows, '$end_of_table'} ->
+ NRows = maybe_apply_fuzzy_filter(Rows, QueryState),
+ {NRows, mark_complete(QueryState)};
{Rows, NContinuation} ->
- NRows =
- case FuzzyFun of
- undefined ->
- Rows;
- {FilterFun, Args0} when is_function(FilterFun), is_list(Args0) ->
- lists:filter(
- fun(E) -> erlang:apply(FilterFun, [E | Args0]) end,
- Rows
- )
- end,
- {NRows, QueryState#{continuation => NContinuation}}
+ NRows = maybe_apply_fuzzy_filter(Rows, QueryState),
+ {NRows, QueryState#{continuation => NContinuation}};
+ '$end_of_table' ->
+ {[], mark_complete(QueryState)}
end.
-maybe_apply_total_query(Node, QueryState = #{total := TotalAcc}) ->
- case proplists:get_value(Node, TotalAcc, undefined) of
- undefined ->
- Total = apply_total_query(QueryState),
- QueryState#{total := [{Node, Total} | TotalAcc]};
- _ ->
- QueryState
- end.
+maybe_apply_fuzzy_filter(Rows, #{fuzzy_fun := undefined}) ->
+ Rows;
+maybe_apply_fuzzy_filter(Rows, #{fuzzy_fun := {FilterFun, Args}}) ->
+ lists:filter(
+ fun(E) -> erlang:apply(FilterFun, [E | Args]) end,
+ Rows
+ ).
+
+maybe_apply_total_query(Node, QueryState = #{total := Acc}) ->
+ case Acc of
+ #{Node := _} ->
+ QueryState;
+ #{} ->
+ NodeTotal = apply_total_query(QueryState),
+ QueryState#{total := Acc#{Node => NodeTotal}}
+ end;
+maybe_apply_total_query(_Node, QueryState = #{}) ->
+ QueryState.
apply_total_query(QueryState = #{table := Tab}) ->
case counting_total_fun(QueryState) of
@@ -369,9 +384,7 @@ apply_total_query(QueryState = #{table := Tab}) ->
Fun(Tab)
end.
-counting_total_fun(_QueryState = #{qs := {[], []}}) ->
- fun(Tab) -> ets:info(Tab, size) end;
-counting_total_fun(_QueryState = #{mactch_spec := Ms, fuzzy_fun := undefined}) ->
+counting_total_fun(_QueryState = #{match_spec := Ms, fuzzy_fun := undefined}) ->
%% XXX: Calculating the total number of data that match a certain
%% condition under a large table is very expensive because the
%% entire ETS table needs to be scanned.
@@ -385,20 +398,23 @@ counting_total_fun(_QueryState = #{mactch_spec := Ms, fuzzy_fun := undefined}) -
counting_total_fun(_QueryState = #{fuzzy_fun := FuzzyFun}) when FuzzyFun =/= undefined ->
%% XXX: Calculating the total number for a fuzzy searching is very very expensive
%% so it is not supported now
- false.
+ false;
+counting_total_fun(_QueryState = #{qs := {[], []}}) ->
+ fun(Tab) -> ets:info(Tab, size) end.
%% ResultAcc :: #{count := integer(),
%% cursor := integer(),
%% rows := [{node(), Rows :: list()}],
-%% total := [{node() => integer()}]
+%% overflow := boolean(),
+%% hasnext => boolean()
%% }
init_query_result() ->
- #{cursor => 0, count => 0, rows => [], total => []}.
+ #{cursor => 0, count => 0, rows => [], overflow => false}.
accumulate_query_rows(
Node,
Rows,
- _QueryState = #{page := Page, limit := Limit, total := TotalAcc},
+ _QueryState = #{page := Page, limit := Limit},
ResultAcc = #{cursor := Cursor, count := Count, rows := RowsAcc}
) ->
PageStart = (Page - 1) * Limit + 1,
@@ -406,24 +422,35 @@ accumulate_query_rows(
Len = length(Rows),
case Cursor + Len of
NCursor when NCursor < PageStart ->
- {more, ResultAcc#{cursor => NCursor, total => TotalAcc}};
+ {more, ResultAcc#{cursor => NCursor}};
NCursor when NCursor < PageEnd ->
+ SubRows = lists:nthtail(max(0, PageStart - Cursor - 1), Rows),
{more, ResultAcc#{
cursor => NCursor,
- count => Count + length(Rows),
- total => TotalAcc,
- rows => [{Node, Rows} | RowsAcc]
+ count => Count + length(SubRows),
+ rows => [{Node, SubRows} | RowsAcc]
}};
NCursor when NCursor >= PageEnd ->
SubRows = lists:sublist(Rows, Limit - Count),
{enough, ResultAcc#{
cursor => NCursor,
count => Count + length(SubRows),
- total => TotalAcc,
- rows => [{Node, SubRows} | RowsAcc]
+ rows => [{Node, SubRows} | RowsAcc],
+ % there are more rows than can fit in the page
+ overflow => (Limit - Count) < Len
}}
end.
+finalize_query(Result = #{overflow := Overflow}, QueryState = #{complete := Complete}) ->
+ HasNext = Overflow orelse not Complete,
+ maybe_accumulate_totals(Result#{hasnext => HasNext}, QueryState).
+
+maybe_accumulate_totals(Result, #{total := TotalAcc}) ->
+ QueryTotal = maps:fold(fun(_Node, T, N) -> N + T end, 0, TotalAcc),
+ Result#{total => QueryTotal};
+maybe_accumulate_totals(Result, _QueryState) ->
+ Result.
+
%%--------------------------------------------------------------------
%% Internal Functions
%%--------------------------------------------------------------------
@@ -520,16 +547,22 @@ is_fuzzy_key(<<"match_", _/binary>>) ->
is_fuzzy_key(_) ->
false.
-format_query_result(_FmtFun, _Meta, Error = {error, _Node, _Reason}) ->
+format_query_result(_FmtFun, _MetaIn, Error = {error, _Node, _Reason}) ->
Error;
format_query_result(
- FmtFun, Meta, _ResultAcc = #{total := TotalAcc, rows := RowsAcc}
+ FmtFun, MetaIn, ResultAcc = #{hasnext := HasNext, rows := RowsAcc}
) ->
- Total = lists:foldr(fun({_Node, T}, N) -> N + T end, 0, TotalAcc),
+ Meta =
+ case ResultAcc of
+ #{total := QueryTotal} ->
+ %% The `count` is used in HTTP API to indicate the total number of
+ %% queries that can be read
+ MetaIn#{hasnext => HasNext, count => QueryTotal};
+ #{} ->
+ MetaIn#{hasnext => HasNext}
+ end,
#{
- %% The `count` is used in HTTP API to indicate the total number of
- %% queries that can be read
- meta => Meta#{count => Total},
+ meta => Meta,
data => lists:flatten(
lists:foldl(
fun({Node, Rows}, Acc) ->
@@ -552,7 +585,7 @@ parse_pager_params(Params) ->
Limit = b2i(limit(Params)),
case Page > 0 andalso Limit > 0 of
true ->
- #{page => Page, limit => Limit, count => 0};
+ #{page => Page, limit => Limit};
false ->
false
end.
diff --git a/apps/emqx_management/src/emqx_mgmt_api_app.erl b/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl
similarity index 99%
rename from apps/emqx_management/src/emqx_mgmt_api_app.erl
rename to apps/emqx_management/src/emqx_mgmt_api_api_keys.erl
index d317bea70..c39b11273 100644
--- a/apps/emqx_management/src/emqx_mgmt_api_app.erl
+++ b/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl
@@ -13,7 +13,7 @@
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
--module(emqx_mgmt_api_app).
+-module(emqx_mgmt_api_api_keys).
-behaviour(minirest_api).
diff --git a/apps/emqx_management/src/emqx_mgmt_api_clients.erl b/apps/emqx_management/src/emqx_mgmt_api_clients.erl
index 8eb4b26e9..7c45206fd 100644
--- a/apps/emqx_management/src/emqx_mgmt_api_clients.erl
+++ b/apps/emqx_management/src/emqx_mgmt_api_clients.erl
@@ -549,8 +549,8 @@ fields(keepalive) ->
];
fields(subscribe) ->
[
- {topic, hoconsc:mk(binary(), #{desc => <<"Topic">>})},
- {qos, hoconsc:mk(emqx_schema:qos(), #{desc => <<"QoS">>})},
+ {topic, hoconsc:mk(binary(), #{required => true, desc => <<"Topic">>})},
+ {qos, hoconsc:mk(emqx_schema:qos(), #{default => 0, desc => <<"QoS">>})},
{nl, hoconsc:mk(integer(), #{default => 0, desc => <<"No Local">>})},
{rap, hoconsc:mk(integer(), #{default => 0, desc => <<"Retain as Published">>})},
{rh, hoconsc:mk(integer(), #{default => 0, desc => <<"Retain Handling">>})}
@@ -718,15 +718,18 @@ subscribe(#{clientid := ClientID, topic := Topic} = Sub) ->
end.
subscribe_batch(#{clientid := ClientID, topics := Topics}) ->
- case lookup(#{clientid => ClientID}) of
- {200, _} ->
+ %% We use emqx_channel instead of emqx_channel_info (used by the emqx_mgmt:lookup_client/2),
+ %% as the emqx_channel_info table will only be populated after the hook `client.connected`
+ %% has returned. So if one want to subscribe topics in this hook, it will fail.
+ case ets:lookup(emqx_channel, ClientID) of
+ [] ->
+ {404, ?CLIENT_ID_NOT_FOUND};
+ _ ->
ArgList = [
[ClientID, Topic, maps:with([qos, nl, rap, rh], Sub)]
|| #{topic := Topic} = Sub <- Topics
],
- {200, emqx_mgmt_util:batch_operation(?MODULE, do_subscribe, ArgList)};
- {404, ?CLIENT_ID_NOT_FOUND} ->
- {404, ?CLIENT_ID_NOT_FOUND}
+ {200, emqx_mgmt_util:batch_operation(?MODULE, do_subscribe, ArgList)}
end.
unsubscribe(#{clientid := ClientID, topic := Topic}) ->
diff --git a/apps/emqx_management/src/emqx_mgmt_api_configs.erl b/apps/emqx_management/src/emqx_mgmt_api_configs.erl
index eec5793d0..d9cdf6477 100644
--- a/apps/emqx_management/src/emqx_mgmt_api_configs.erl
+++ b/apps/emqx_management/src/emqx_mgmt_api_configs.erl
@@ -62,7 +62,9 @@
<<"event_message">>,
<<"prometheus">>,
<<"telemetry">>,
- <<"listeners">>
+ <<"listeners">>,
+ <<"license">>,
+ <<"api_key">>
] ++ global_zone_roots()
).
diff --git a/apps/emqx_management/src/emqx_mgmt_api_key_schema.erl b/apps/emqx_management/src/emqx_mgmt_api_key_schema.erl
new file mode 100644
index 000000000..556e4308f
--- /dev/null
+++ b/apps/emqx_management/src/emqx_mgmt_api_key_schema.erl
@@ -0,0 +1,44 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%--------------------------------------------------------------------
+-module(emqx_mgmt_api_key_schema).
+
+-include_lib("hocon/include/hoconsc.hrl").
+
+-export([
+ roots/0,
+ fields/1,
+ namespace/0,
+ desc/1
+]).
+
+namespace() -> api_key.
+roots() -> ["api_key"].
+
+fields("api_key") ->
+ [
+ {bootstrap_file,
+ ?HOCON(
+ binary(),
+ #{
+ desc => ?DESC(bootstrap_file),
+ required => false,
+ default => <<>>
+ }
+ )}
+ ].
+
+desc("api_key") ->
+ ?DESC(api_key).
diff --git a/apps/emqx_management/src/emqx_mgmt_api_publish.erl b/apps/emqx_management/src/emqx_mgmt_api_publish.erl
index 672de661b..245b56c1d 100644
--- a/apps/emqx_management/src/emqx_mgmt_api_publish.erl
+++ b/apps/emqx_management/src/emqx_mgmt_api_publish.erl
@@ -104,9 +104,7 @@ fields(message) ->
})},
{clientid,
hoconsc:mk(binary(), #{
- desc => ?DESC(clientid),
- required => false,
- example => <<"api_example_client">>
+ deprecated => {since, "v5.0.14"}
})},
{payload,
hoconsc:mk(binary(), #{
@@ -254,7 +252,6 @@ is_ok_deliver({_NodeOrShare, _MatchedTopic, {error, _}}) -> false.
%% %%%%%% Below error codes are not implemented so far %%%%
%%
%% If HTTP request passes HTTP authentication, it is considered trusted.
-%% In the future, we may choose to check ACL for the provided MQTT Client ID
%% 135 Not authorized 401
%%
%% %%%%%% Below error codes are not applicable %%%%%%%
@@ -326,7 +323,6 @@ make_message(Map) ->
Encoding = maps:get(<<"payload_encoding">>, Map, plain),
case decode_payload(Encoding, maps:get(<<"payload">>, Map)) of
{ok, Payload} ->
- From = maps:get(<<"clientid">>, Map, http_api),
QoS = maps:get(<<"qos">>, Map, 0),
Topic = maps:get(<<"topic">>, Map),
Retain = maps:get(<<"retain">>, Map, false),
@@ -346,7 +342,9 @@ make_message(Map) ->
error:_Reason ->
throw(invalid_topic_name)
end,
- Message = emqx_message:make(From, QoS, Topic, Payload, #{retain => Retain}, Headers),
+ Message = emqx_message:make(
+ http_api, QoS, Topic, Payload, #{retain => Retain}, Headers
+ ),
Size = emqx_message:estimate_size(Message),
(Size > size_limit()) andalso throw(packet_too_large),
{ok, Message};
diff --git a/apps/emqx_management/src/emqx_mgmt_app.erl b/apps/emqx_management/src/emqx_mgmt_app.erl
index 164ac1b36..137f4502c 100644
--- a/apps/emqx_management/src/emqx_mgmt_app.erl
+++ b/apps/emqx_management/src/emqx_mgmt_app.erl
@@ -28,10 +28,15 @@
-include("emqx_mgmt.hrl").
start(_Type, _Args) ->
- {ok, Sup} = emqx_mgmt_sup:start_link(),
ok = mria_rlog:wait_for_shards([?MANAGEMENT_SHARD], infinity),
- emqx_mgmt_cli:load(),
- {ok, Sup}.
+ case emqx_mgmt_auth:init_bootstrap_file() of
+ ok ->
+ {ok, Sup} = emqx_mgmt_sup:start_link(),
+ ok = emqx_mgmt_cli:load(),
+ {ok, Sup};
+ {error, Reason} ->
+ {error, Reason}
+ end.
stop(_State) ->
ok.
diff --git a/apps/emqx_management/src/emqx_mgmt_auth.erl b/apps/emqx_management/src/emqx_mgmt_auth.erl
index 3d97e53bc..6f2a27414 100644
--- a/apps/emqx_management/src/emqx_mgmt_auth.erl
+++ b/apps/emqx_management/src/emqx_mgmt_auth.erl
@@ -15,6 +15,7 @@
%%--------------------------------------------------------------------
-module(emqx_mgmt_auth).
-include_lib("emqx/include/emqx.hrl").
+-include_lib("emqx/include/logger.hrl").
%% API
-export([mnesia/1]).
@@ -25,7 +26,8 @@
read/1,
update/4,
delete/1,
- list/0
+ list/0,
+ init_bootstrap_file/0
]).
-export([authorize/3]).
@@ -34,9 +36,14 @@
-export([
do_update/4,
do_delete/1,
- do_create_app/3
+ do_create_app/3,
+ do_force_create_app/3
]).
+-ifdef(TEST).
+-export([create/5]).
+-endif.
+
-define(APP, emqx_app).
-record(?APP, {
@@ -45,7 +52,7 @@
api_secret_hash = <<>> :: binary() | '_',
enable = true :: boolean() | '_',
desc = <<>> :: binary() | '_',
- expired_at = 0 :: integer() | undefined | '_',
+ expired_at = 0 :: integer() | undefined | infinity | '_',
created_at = 0 :: integer() | '_'
}).
@@ -58,9 +65,19 @@ mnesia(boot) ->
{attributes, record_info(fields, ?APP)}
]).
+-spec init_bootstrap_file() -> ok | {error, _}.
+init_bootstrap_file() ->
+ File = bootstrap_file(),
+ ?SLOG(debug, #{msg => "init_bootstrap_api_keys_from_file", file => File}),
+ init_bootstrap_file(File).
+
create(Name, Enable, ExpiredAt, Desc) ->
- case mnesia:table_info(?APP, size) < 30 of
- true -> create_app(Name, Enable, ExpiredAt, Desc);
+ ApiSecret = generate_api_secret(),
+ create(Name, ApiSecret, Enable, ExpiredAt, Desc).
+
+create(Name, ApiSecret, Enable, ExpiredAt, Desc) ->
+ case mnesia:table_info(?APP, size) < 100 of
+ true -> create_app(Name, ApiSecret, Enable, ExpiredAt, Desc);
false -> {error, "Maximum ApiKey"}
end.
@@ -148,8 +165,7 @@ to_map(#?APP{name = N, api_key = K, enable = E, expired_at = ET, created_at = CT
is_expired(undefined) -> false;
is_expired(ExpiredTime) -> ExpiredTime < erlang:system_time(second).
-create_app(Name, Enable, ExpiredAt, Desc) ->
- ApiSecret = generate_api_secret(),
+create_app(Name, ApiSecret, Enable, ExpiredAt, Desc) ->
App =
#?APP{
name = Name,
@@ -161,14 +177,18 @@ create_app(Name, Enable, ExpiredAt, Desc) ->
api_key = list_to_binary(emqx_misc:gen_id(16))
},
case create_app(App) of
- {error, api_key_already_existed} -> create_app(Name, Enable, ExpiredAt, Desc);
- {ok, Res} -> {ok, Res#{api_secret => ApiSecret}};
- Error -> Error
+ {ok, Res} ->
+ {ok, Res#{api_secret => ApiSecret}};
+ Error ->
+ Error
end.
create_app(App = #?APP{api_key = ApiKey, name = Name}) ->
trans(fun ?MODULE:do_create_app/3, [App, ApiKey, Name]).
+force_create_app(NamePrefix, App = #?APP{api_key = ApiKey}) ->
+ trans(fun ?MODULE:do_force_create_app/3, [App, ApiKey, NamePrefix]).
+
do_create_app(App, ApiKey, Name) ->
case mnesia:read(?APP, Name) of
[_] ->
@@ -183,6 +203,22 @@ do_create_app(App, ApiKey, Name) ->
end
end.
+do_force_create_app(App, ApiKey, NamePrefix) ->
+ case mnesia:match_object(?APP, #?APP{api_key = ApiKey, _ = '_'}, read) of
+ [] ->
+ NewName = generate_unique_name(NamePrefix),
+ ok = mnesia:write(App#?APP{name = NewName});
+ [#?APP{name = Name}] ->
+ ok = mnesia:write(App#?APP{name = Name})
+ end.
+
+generate_unique_name(NamePrefix) ->
+ New = list_to_binary(NamePrefix ++ emqx_misc:gen_id(16)),
+ case mnesia:read(?APP, New) of
+ [] -> New;
+ _ -> generate_unique_name(NamePrefix)
+ end.
+
trans(Fun, Args) ->
case mria:transaction(?COMMON_SHARD, Fun, Args) of
{atomic, Res} -> {ok, Res};
@@ -192,3 +228,84 @@ trans(Fun, Args) ->
generate_api_secret() ->
Random = crypto:strong_rand_bytes(32),
emqx_base62:encode(Random).
+
+bootstrap_file() ->
+ case emqx:get_config([api_key, bootstrap_file], <<>>) of
+ %% For compatible remove until 5.1.0
+ <<>> ->
+ emqx:get_config([dashboard, bootstrap_users_file], <<>>);
+ File ->
+ File
+ end.
+
+init_bootstrap_file(<<>>) ->
+ ok;
+init_bootstrap_file(File) ->
+ case file:open(File, [read, binary]) of
+ {ok, Dev} ->
+ {ok, MP} = re:compile(<<"(\.+):(\.+$)">>, [ungreedy]),
+ init_bootstrap_file(File, Dev, MP);
+ {error, Reason0} ->
+ Reason = emqx_misc:explain_posix(Reason0),
+ ?SLOG(
+ error,
+ #{
+ msg => "failed_to_open_the_bootstrap_file",
+ file => File,
+ reason => Reason
+ }
+ ),
+ {error, Reason}
+ end.
+
+init_bootstrap_file(File, Dev, MP) ->
+ try
+ add_bootstrap_file(File, Dev, MP, 1)
+ catch
+ throw:Error -> {error, Error};
+ Type:Reason:Stacktrace -> {error, {Type, Reason, Stacktrace}}
+ after
+ file:close(Dev)
+ end.
+
+-define(BOOTSTRAP_TAG, <<"Bootstrapped From File">>).
+
+add_bootstrap_file(File, Dev, MP, Line) ->
+ case file:read_line(Dev) of
+ {ok, Bin} ->
+ case re:run(Bin, MP, [global, {capture, all_but_first, binary}]) of
+ {match, [[AppKey, ApiSecret]]} ->
+ App =
+ #?APP{
+ enable = true,
+ expired_at = infinity,
+ desc = ?BOOTSTRAP_TAG,
+ created_at = erlang:system_time(second),
+ api_secret_hash = emqx_dashboard_admin:hash(ApiSecret),
+ api_key = AppKey
+ },
+ case force_create_app("from_bootstrap_file_", App) of
+ {ok, ok} ->
+ add_bootstrap_file(File, Dev, MP, Line + 1);
+ {error, Reason} ->
+ throw(#{file => File, line => Line, content => Bin, reason => Reason})
+ end;
+ _ ->
+ Reason = "invalid_format",
+ ?SLOG(
+ error,
+ #{
+ msg => "failed_to_load_bootstrap_file",
+ file => File,
+ line => Line,
+ content => Bin,
+ reason => Reason
+ }
+ ),
+ throw(#{file => File, line => Line, content => Bin, reason => Reason})
+ end;
+ eof ->
+ ok;
+ {error, Reason} ->
+ throw(#{file => File, line => Line, reason => Reason})
+ end.
diff --git a/apps/emqx_management/test/emqx_mgmt_api_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_SUITE.erl
index a14305d8b..a8bbfa6d9 100644
--- a/apps/emqx_management/test/emqx_mgmt_api_SUITE.erl
+++ b/apps/emqx_management/test/emqx_mgmt_api_SUITE.erl
@@ -88,10 +88,9 @@ t_cluster_query(_Config) ->
%% fuzzy searching can't return total
{200, ClientsNode2} = query_clients(Node2, #{<<"like_username">> => <<"corenode2">>}),
- ?assertMatch(
- #{count := 0},
- maps:get(meta, ClientsNode2)
- ),
+ MetaNode2 = maps:get(meta, ClientsNode2),
+ ?assertNotMatch(#{count := _}, MetaNode2),
+ ?assertMatch(#{hasnext := false}, MetaNode2),
?assertMatch(10, length(maps:get(data, ClientsNode2))),
_ = lists:foreach(fun(C) -> emqtt:disconnect(C) end, ClientLs1),
diff --git a/apps/emqx_management/test/emqx_mgmt_api_app_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_api_keys_SUITE.erl
similarity index 71%
rename from apps/emqx_management/test/emqx_mgmt_api_app_SUITE.erl
rename to apps/emqx_management/test/emqx_mgmt_api_api_keys_SUITE.erl
index a3aaf8f58..241a73dc4 100644
--- a/apps/emqx_management/test/emqx_mgmt_api_app_SUITE.erl
+++ b/apps/emqx_management/test/emqx_mgmt_api_api_keys_SUITE.erl
@@ -13,7 +13,7 @@
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
--module(emqx_mgmt_api_app_SUITE).
+-module(emqx_mgmt_api_api_keys_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
@@ -25,15 +25,62 @@ suite() -> [{timetrap, {minutes, 1}}].
groups() ->
[
{parallel, [parallel], [t_create, t_update, t_delete, t_authorize, t_create_unexpired_app]},
- {sequence, [], [t_create_failed]}
+ {sequence, [], [t_bootstrap_file, t_create_failed]}
].
init_per_suite(Config) ->
- emqx_mgmt_api_test_util:init_suite(),
+ emqx_mgmt_api_test_util:init_suite([emqx_conf]),
Config.
end_per_suite(_) ->
- emqx_mgmt_api_test_util:end_suite().
+ emqx_mgmt_api_test_util:end_suite([emqx_conf]).
+
+t_bootstrap_file(_) ->
+ TestPath = <<"/api/v5/status">>,
+ Bin = <<"test-1:secret-1\ntest-2:secret-2">>,
+ File = "./bootstrap_api_keys.txt",
+ ok = file:write_file(File, Bin),
+ emqx:update_config([api_key, bootstrap_file], File),
+ ok = emqx_mgmt_auth:init_bootstrap_file(),
+ ?assertEqual(ok, emqx_mgmt_auth:authorize(TestPath, <<"test-1">>, <<"secret-1">>)),
+ ?assertEqual(ok, emqx_mgmt_auth:authorize(TestPath, <<"test-2">>, <<"secret-2">>)),
+ ?assertMatch({error, _}, emqx_mgmt_auth:authorize(TestPath, <<"test-2">>, <<"secret-1">>)),
+
+ %% relaunch to check if the table is changed.
+ Bin1 = <<"test-1:new-secret-1\ntest-2:new-secret-2">>,
+ ok = file:write_file(File, Bin1),
+ ok = emqx_mgmt_auth:init_bootstrap_file(),
+ ?assertMatch({error, _}, emqx_mgmt_auth:authorize(TestPath, <<"test-1">>, <<"secret-1">>)),
+ ?assertMatch({error, _}, emqx_mgmt_auth:authorize(TestPath, <<"test-2">>, <<"secret-2">>)),
+ ?assertEqual(ok, emqx_mgmt_auth:authorize(TestPath, <<"test-1">>, <<"new-secret-1">>)),
+ ?assertEqual(ok, emqx_mgmt_auth:authorize(TestPath, <<"test-2">>, <<"new-secret-2">>)),
+
+ %% Compatibility
+ Bin2 = <<"test-3:new-secret-3\ntest-4:new-secret-4">>,
+ ok = file:write_file(File, Bin2),
+ emqx:update_config([api_key, bootstrap_file], <<>>),
+ emqx:update_config([dashboard, bootstrap_users_file], File),
+ ok = emqx_mgmt_auth:init_bootstrap_file(),
+ ?assertMatch(ok, emqx_mgmt_auth:authorize(TestPath, <<"test-1">>, <<"new-secret-1">>)),
+ ?assertMatch(ok, emqx_mgmt_auth:authorize(TestPath, <<"test-2">>, <<"new-secret-2">>)),
+ ?assertEqual(ok, emqx_mgmt_auth:authorize(TestPath, <<"test-3">>, <<"new-secret-3">>)),
+ ?assertEqual(ok, emqx_mgmt_auth:authorize(TestPath, <<"test-4">>, <<"new-secret-4">>)),
+
+ %% not found
+ NotFoundFile = "./bootstrap_apps_not_exist.txt",
+ emqx:update_config([api_key, bootstrap_file], NotFoundFile),
+ ?assertMatch({error, "No such file or directory"}, emqx_mgmt_auth:init_bootstrap_file()),
+
+ %% bad format
+ BadBin = <<"test-1:secret-11\ntest-2 secret-12">>,
+ ok = file:write_file(File, BadBin),
+ emqx:update_config([api_key, bootstrap_file], File),
+ ?assertMatch({error, #{reason := "invalid_format"}}, emqx_mgmt_auth:init_bootstrap_file()),
+ ?assertEqual(ok, emqx_mgmt_auth:authorize(TestPath, <<"test-1">>, <<"secret-11">>)),
+ ?assertMatch({error, _}, emqx_mgmt_auth:authorize(TestPath, <<"test-2">>, <<"secret-12">>)),
+ emqx:update_config([api_key, bootstrap_file], <<>>),
+ emqx:update_config([dashboard, bootstrap_users_file], <<>>),
+ ok.
t_create(_Config) ->
Name = <<"EMQX-API-KEY-1">>,
@@ -69,7 +116,7 @@ t_create_failed(_Config) ->
?assertEqual(BadRequest, create_app(LongName)),
{ok, List} = list_app(),
- CreateNum = 30 - erlang:length(List),
+ CreateNum = 100 - erlang:length(List),
Names = lists:map(
fun(Seq) ->
<<"EMQX-API-FAILED-KEY-", (integer_to_binary(Seq))/binary>>
@@ -178,21 +225,23 @@ t_create_unexpired_app(_Config) ->
ok.
list_app() ->
+ AuthHeader = emqx_dashboard_SUITE:auth_header_(),
Path = emqx_mgmt_api_test_util:api_path(["api_key"]),
- case emqx_mgmt_api_test_util:request_api(get, Path) of
+ case emqx_mgmt_api_test_util:request_api(get, Path, AuthHeader) of
{ok, Apps} -> {ok, emqx_json:decode(Apps, [return_maps])};
Error -> Error
end.
read_app(Name) ->
+ AuthHeader = emqx_dashboard_SUITE:auth_header_(),
Path = emqx_mgmt_api_test_util:api_path(["api_key", Name]),
- case emqx_mgmt_api_test_util:request_api(get, Path) of
+ case emqx_mgmt_api_test_util:request_api(get, Path, AuthHeader) of
{ok, Res} -> {ok, emqx_json:decode(Res, [return_maps])};
Error -> Error
end.
create_app(Name) ->
- AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
+ AuthHeader = emqx_dashboard_SUITE:auth_header_(),
Path = emqx_mgmt_api_test_util:api_path(["api_key"]),
ExpiredAt = to_rfc3339(erlang:system_time(second) + 1000),
App = #{
@@ -207,7 +256,7 @@ create_app(Name) ->
end.
create_unexpired_app(Name, Params) ->
- AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
+ AuthHeader = emqx_dashboard_SUITE:auth_header_(),
Path = emqx_mgmt_api_test_util:api_path(["api_key"]),
App = maps:merge(#{name => Name, desc => <<"Note"/utf8>>, enable => true}, Params),
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, App) of
@@ -216,11 +265,12 @@ create_unexpired_app(Name, Params) ->
end.
delete_app(Name) ->
+ AuthHeader = emqx_dashboard_SUITE:auth_header_(),
DeletePath = emqx_mgmt_api_test_util:api_path(["api_key", Name]),
- emqx_mgmt_api_test_util:request_api(delete, DeletePath).
+ emqx_mgmt_api_test_util:request_api(delete, DeletePath, AuthHeader).
update_app(Name, Change) ->
- AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
+ AuthHeader = emqx_dashboard_SUITE:auth_header_(),
UpdatePath = emqx_mgmt_api_test_util:api_path(["api_key", Name]),
case emqx_mgmt_api_test_util:request_api(put, UpdatePath, "", AuthHeader, Change) of
{ok, Update} -> {ok, emqx_json:decode(Update, [return_maps])};
diff --git a/apps/emqx_management/test/emqx_mgmt_api_listeners_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_listeners_SUITE.erl
index e5c47ac4d..9a8824e7d 100644
--- a/apps/emqx_management/test/emqx_mgmt_api_listeners_SUITE.erl
+++ b/apps/emqx_management/test/emqx_mgmt_api_listeners_SUITE.erl
@@ -32,6 +32,25 @@ end_per_suite(_) ->
emqx_conf:remove([listeners, tcp, new1], #{override_to => local}),
emqx_mgmt_api_test_util:end_suite([emqx_conf]).
+t_max_connection_default(_Config) ->
+ emqx_mgmt_api_test_util:end_suite([emqx_conf]),
+ Etc = filename:join(["etc", "emqx.conf.all"]),
+ ConfFile = emqx_common_test_helpers:app_path(emqx_conf, Etc),
+ Bin = <<"listeners.tcp.max_connection_test {bind = \"0.0.0.0:3883\"}">>,
+ ok = file:write_file(ConfFile, Bin, [append]),
+ emqx_mgmt_api_test_util:init_suite([emqx_conf]),
+ %% Check infinity is binary not atom.
+ #{<<"listeners">> := Listeners} = emqx_mgmt_api_listeners:do_list_listeners(),
+ Target = lists:filter(
+ fun(#{<<"id">> := Id}) -> Id =:= 'tcp:max_connection_test' end,
+ Listeners
+ ),
+ ?assertMatch([#{<<"max_connections">> := <<"infinity">>}], Target),
+ NewPath = emqx_mgmt_api_test_util:api_path(["listeners", "tcp:max_connection_test"]),
+ ?assertMatch(#{<<"max_connections">> := <<"infinity">>}, request(get, NewPath, [], [])),
+ emqx_conf:remove([listeners, tcp, max_connection_test], #{override_to => cluster}),
+ ok.
+
t_list_listeners(_) ->
Path = emqx_mgmt_api_test_util:api_path(["listeners"]),
Res = request(get, Path, [], []),
@@ -54,12 +73,14 @@ t_list_listeners(_) ->
OriginListener2 = maps:remove(<<"id">>, OriginListener),
NewConf = OriginListener2#{
<<"name">> => <<"new">>,
- <<"bind">> => <<"0.0.0.0:2883">>
+ <<"bind">> => <<"0.0.0.0:2883">>,
+ <<"max_connections">> := <<"infinity">>
},
Create = request(post, Path, [], NewConf),
?assertEqual(lists:sort(maps:keys(OriginListener)), lists:sort(maps:keys(Create))),
Get1 = request(get, NewPath, [], []),
?assertMatch(Create, Get1),
+ ?assertMatch(#{<<"max_connections">> := <<"infinity">>}, Create),
?assert(is_running(NewListenerId)),
%% delete
@@ -130,6 +151,60 @@ t_api_listeners_list_not_ready(_Config) ->
emqx_common_test_helpers:stop_slave(Node2)
end.
+t_clear_certs(_) ->
+ ListenerId = <<"ssl:default">>,
+ NewListenerId = <<"ssl:clear">>,
+
+ OriginPath = emqx_mgmt_api_test_util:api_path(["listeners", ListenerId]),
+ NewPath = emqx_mgmt_api_test_util:api_path(["listeners", NewListenerId]),
+ ConfTempT = request(get, OriginPath, [], []),
+ ConfTemp = ConfTempT#{
+ <<"id">> => NewListenerId,
+ <<"bind">> => <<"0.0.0.0:2883">>
+ },
+
+ %% create, make sure the cert files are created
+ NewConf = emqx_map_lib:deep_put(
+ [<<"ssl_options">>, <<"certfile">>], ConfTemp, cert_file("certfile")
+ ),
+ NewConf2 = emqx_map_lib:deep_put(
+ [<<"ssl_options">>, <<"keyfile">>], NewConf, cert_file("keyfile")
+ ),
+
+ _ = request(post, NewPath, [], NewConf2),
+ ListResult1 = list_pem_dir("ssl", "clear"),
+ ?assertMatch({ok, [_, _]}, ListResult1),
+
+ %% update
+ UpdateConf = emqx_map_lib:deep_put(
+ [<<"ssl_options">>, <<"keyfile">>], NewConf2, cert_file("keyfile2")
+ ),
+ _ = request(put, NewPath, [], UpdateConf),
+ ListResult2 = list_pem_dir("ssl", "clear"),
+
+ %% make sure the old cret file is deleted
+ ?assertMatch({ok, [_, _]}, ListResult2),
+
+ {ok, ResultList1} = ListResult1,
+ {ok, ResultList2} = ListResult2,
+
+ FindKeyFile = fun(List) ->
+ case lists:search(fun(E) -> lists:prefix("key", E) end, List) of
+ {value, Value} ->
+ Value;
+ _ ->
+ ?assert(false, "Can't find keyfile")
+ end
+ end,
+
+ %% check the keyfile has changed
+ ?assertNotEqual(FindKeyFile(ResultList1), FindKeyFile(ResultList2)),
+
+ %% remove, check all cert files are deleted
+ _ = delete(NewPath),
+ ?assertMatch({error, not_dir}, list_pem_dir("ssl", "clear")),
+ ok.
+
get_tcp_listeners(Node) ->
Query = #{query_string => #{<<"type">> => tcp}},
{200, L} = rpc:call(Node, emqx_mgmt_api_listeners, list_listeners, [get, Query]),
@@ -293,3 +368,21 @@ listener_stats(Listener, ExpectedStats) ->
is_running(Id) ->
emqx_listeners:is_running(binary_to_atom(Id)).
+
+list_pem_dir(Type, Name) ->
+ ListenerDir = emqx_listeners:certs_dir(Type, Name),
+ Dir = filename:join([emqx:mutable_certs_dir(), ListenerDir]),
+ case filelib:is_dir(Dir) of
+ true ->
+ file:list_dir(Dir);
+ _ ->
+ {error, not_dir}
+ end.
+
+data_file(Name) ->
+ Dir = code:lib_dir(emqx, test),
+ {ok, Bin} = file:read_file(filename:join([Dir, "data", Name])),
+ Bin.
+
+cert_file(Name) ->
+ data_file(filename:join(["certs", Name])).
diff --git a/apps/emqx_management/test/emqx_mgmt_api_subscription_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_subscription_SUITE.erl
index 965ed0997..2ab213e30 100644
--- a/apps/emqx_management/test/emqx_mgmt_api_subscription_SUITE.erl
+++ b/apps/emqx_management/test/emqx_mgmt_api_subscription_SUITE.erl
@@ -44,9 +44,8 @@ init_per_suite(Config) ->
end_per_suite(_) ->
emqx_mgmt_api_test_util:end_suite().
-t_subscription_api(_) ->
- {ok, Client} = emqtt:start_link(#{username => ?USERNAME, clientid => ?CLIENTID, proto_ver => v5}),
- {ok, _} = emqtt:connect(Client),
+t_subscription_api(Config) ->
+ Client = proplists:get_value(client, Config),
{ok, _, _} = emqtt:subscribe(
Client, [
{?TOPIC1, [{rh, ?TOPIC1RH}, {rap, ?TOPIC1RAP}, {nl, ?TOPIC1NL}, {qos, ?TOPIC1QOS}]}
@@ -84,40 +83,78 @@ t_subscription_api(_) ->
?assertEqual(maps:get(<<"topic">>, Subscriptions2), ?TOPIC2),
?assertEqual(maps:get(<<"clientid">>, Subscriptions2), ?CLIENTID),
- QS = uri_string:compose_query([
+ QS = [
{"clientid", ?CLIENTID},
{"topic", ?TOPIC2_TOPIC_ONLY},
{"node", atom_to_list(node())},
{"qos", "0"},
{"share_group", "test_group"},
{"match_topic", "t/#"}
- ]),
+ ],
Headers = emqx_mgmt_api_test_util:auth_header_(),
- {ok, ResponseTopic2} = emqx_mgmt_api_test_util:request_api(get, Path, QS, Headers),
- DataTopic2 = emqx_json:decode(ResponseTopic2, [return_maps]),
- Meta2 = maps:get(<<"meta">>, DataTopic2),
+ DataTopic2 = #{<<"meta">> := Meta2} = request_json(get, QS, Headers),
?assertEqual(1, maps:get(<<"page">>, Meta2)),
?assertEqual(emqx_mgmt:max_row_limit(), maps:get(<<"limit">>, Meta2)),
?assertEqual(1, maps:get(<<"count">>, Meta2)),
SubscriptionsList2 = maps:get(<<"data">>, DataTopic2),
- ?assertEqual(length(SubscriptionsList2), 1),
+ ?assertEqual(length(SubscriptionsList2), 1).
- MatchQs = uri_string:compose_query([
+t_subscription_fuzzy_search(Config) ->
+ Client = proplists:get_value(client, Config),
+ Topics = [
+ <<"t/foo">>,
+ <<"t/foo/bar">>,
+ <<"t/foo/baz">>,
+ <<"topic/foo/bar">>,
+ <<"topic/foo/baz">>
+ ],
+ _ = [{ok, _, _} = emqtt:subscribe(Client, T) || T <- Topics],
+
+ Headers = emqx_mgmt_api_test_util:auth_header_(),
+ MatchQs = [
{"clientid", ?CLIENTID},
{"node", atom_to_list(node())},
- {"qos", "0"},
{"match_topic", "t/#"}
- ]),
+ ],
- {ok, MatchRes} = emqx_mgmt_api_test_util:request_api(get, Path, MatchQs, Headers),
- MatchData = emqx_json:decode(MatchRes, [return_maps]),
- MatchMeta = maps:get(<<"meta">>, MatchData),
- ?assertEqual(1, maps:get(<<"page">>, MatchMeta)),
- ?assertEqual(emqx_mgmt:max_row_limit(), maps:get(<<"limit">>, MatchMeta)),
- %% count equals 0 in fuzzy searching
- ?assertEqual(0, maps:get(<<"count">>, MatchMeta)),
- MatchSubs = maps:get(<<"data">>, MatchData),
- ?assertEqual(1, length(MatchSubs)),
+ MatchData1 = #{<<"meta">> := MatchMeta1} = request_json(get, MatchQs, Headers),
+ ?assertEqual(1, maps:get(<<"page">>, MatchMeta1)),
+ ?assertEqual(emqx_mgmt:max_row_limit(), maps:get(<<"limit">>, MatchMeta1)),
+ %% count is undefined in fuzzy searching
+ ?assertNot(maps:is_key(<<"count">>, MatchMeta1)),
+ ?assertMatch(3, length(maps:get(<<"data">>, MatchData1))),
+ ?assertEqual(false, maps:get(<<"hasnext">>, MatchMeta1)),
+ LimitMatchQuery = [
+ {"clientid", ?CLIENTID},
+ {"match_topic", "+/+/+"},
+ {"limit", "3"}
+ ],
+
+ MatchData2 = #{<<"meta">> := MatchMeta2} = request_json(get, LimitMatchQuery, Headers),
+ ?assertEqual(#{<<"page">> => 1, <<"limit">> => 3, <<"hasnext">> => true}, MatchMeta2),
+ ?assertEqual(3, length(maps:get(<<"data">>, MatchData2))),
+
+ MatchData2P2 =
+ #{<<"meta">> := MatchMeta2P2} =
+ request_json(get, [{"page", "2"} | LimitMatchQuery], Headers),
+ ?assertEqual(#{<<"page">> => 2, <<"limit">> => 3, <<"hasnext">> => false}, MatchMeta2P2),
+ ?assertEqual(1, length(maps:get(<<"data">>, MatchData2P2))).
+
+request_json(Method, Query, Headers) when is_list(Query) ->
+ Qs = uri_string:compose_query(Query),
+ {ok, MatchRes} = emqx_mgmt_api_test_util:request_api(Method, path(), Qs, Headers),
+ emqx_json:decode(MatchRes, [return_maps]).
+
+path() ->
+ emqx_mgmt_api_test_util:api_path(["subscriptions"]).
+
+init_per_testcase(_TC, Config) ->
+ {ok, Client} = emqtt:start_link(#{username => ?USERNAME, clientid => ?CLIENTID, proto_ver => v5}),
+ {ok, _} = emqtt:connect(Client),
+ [{client, Client} | Config].
+
+end_per_testcase(_TC, Config) ->
+ Client = proplists:get_value(client, Config),
emqtt:disconnect(Client).
diff --git a/apps/emqx_management/test/emqx_mgmt_api_test_util.erl b/apps/emqx_management/test/emqx_mgmt_api_test_util.erl
index 5bb0ba818..82d55bb6a 100644
--- a/apps/emqx_management/test/emqx_mgmt_api_test_util.erl
+++ b/apps/emqx_management/test/emqx_mgmt_api_test_util.erl
@@ -24,14 +24,19 @@ init_suite() ->
init_suite([]).
init_suite(Apps) ->
+ init_suite(Apps, fun set_special_configs/1).
+
+init_suite(Apps, SetConfigs) ->
mria:start(),
application:load(emqx_management),
- emqx_common_test_helpers:start_apps(Apps ++ [emqx_dashboard], fun set_special_configs/1).
+ emqx_common_test_helpers:start_apps(Apps ++ [emqx_dashboard], SetConfigs),
+ emqx_common_test_http:create_default_app().
end_suite() ->
end_suite([]).
end_suite(Apps) ->
+ emqx_common_test_http:delete_default_app(),
application:unload(emqx_management),
emqx_common_test_helpers:stop_apps(Apps ++ [emqx_dashboard]),
emqx_config:delete_override_conf_files(),
@@ -43,8 +48,23 @@ set_special_configs(emqx_dashboard) ->
set_special_configs(_App) ->
ok.
+%% there is no difference between the 'request' and 'request_api'
+%% the 'request' is only to be compatible with the 'emqx_dashboard_api_test_helpers:request'
+request(Method, Url) ->
+ request(Method, Url, []).
+
+request(Method, Url, Body) ->
+ request_api_with_body(Method, Url, Body).
+
+uri(Parts) ->
+ emqx_dashboard_api_test_helpers:uri(Parts).
+
+%% compatible_mode will return as same as 'emqx_dashboard_api_test_helpers:request'
+request_api_with_body(Method, Url, Body) ->
+ request_api(Method, Url, [], auth_header_(), Body, #{compatible_mode => true}).
+
request_api(Method, Url) ->
- request_api(Method, Url, [], [], [], #{}).
+ request_api(Method, Url, auth_header_()).
request_api(Method, Url, AuthOrHeaders) ->
request_api(Method, Url, [], AuthOrHeaders, [], #{}).
@@ -90,10 +110,20 @@ request_api(Method, Url, QueryParams, AuthOrHeaders, Body, Opts) when
do_request_api(Method, Request, Opts) ->
ReturnAll = maps:get(return_all, Opts, false),
+ CompatibleMode = maps:get(compatible_mode, Opts, false),
+ ReqOpts =
+ case CompatibleMode of
+ true ->
+ [{body_format, binary}];
+ _ ->
+ []
+ end,
ct:pal("Method: ~p, Request: ~p", [Method, Request]),
- case httpc:request(Method, Request, [], []) of
+ case httpc:request(Method, Request, [], ReqOpts) of
{error, socket_closed_remotely} ->
{error, socket_closed_remotely};
+ {ok, {{_, Code, _}, _Headers, Body}} when CompatibleMode ->
+ {ok, Code, Body};
{ok, {{"HTTP/1.1", Code, _} = Reason, Headers, Body}} when
Code >= 200 andalso Code =< 299 andalso ReturnAll
->
@@ -109,10 +139,7 @@ do_request_api(Method, Request, Opts) ->
end.
auth_header_() ->
- Username = <<"admin">>,
- Password = <<"public">>,
- {ok, Token} = emqx_dashboard_admin:sign_token(Username, Password),
- {"Authorization", "Bearer " ++ binary_to_list(Token)}.
+ emqx_common_test_http:default_auth_header().
build_http_header(X) when is_list(X) ->
X;
diff --git a/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl
index 8e8c5b06f..0ba05b280 100644
--- a/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl
+++ b/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl
@@ -30,6 +30,8 @@
-define(API_VERSION, "v5").
-define(BASE_PATH, "api").
+-import(emqx_dashboard_SUITE, [auth_header_/0]).
+
%%--------------------------------------------------------------------
%% Setups
%%--------------------------------------------------------------------
@@ -330,13 +332,6 @@ t_stream_log(_Config) ->
to_rfc3339(Second) ->
list_to_binary(calendar:system_time_to_rfc3339(Second)).
-auth_header_() ->
- auth_header_("admin", "public").
-
-auth_header_(User, Pass) ->
- Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
- {"Authorization", "Basic " ++ Encoded}.
-
request_api(Method, Url, Auth) -> do_request_api(Method, {Url, [Auth]}).
request_api(Method, Url, Auth, Body) ->
diff --git a/apps/emqx_modules/test/emqx_delayed_api_SUITE.erl b/apps/emqx_modules/test/emqx_delayed_api_SUITE.erl
index 96cdf7840..ed3cd9292 100644
--- a/apps/emqx_modules/test/emqx_delayed_api_SUITE.erl
+++ b/apps/emqx_modules/test/emqx_delayed_api_SUITE.erl
@@ -26,7 +26,7 @@
<<"max_delayed_messages">> => <<"0">>
}).
--import(emqx_dashboard_api_test_helpers, [request/2, request/3, uri/1]).
+-import(emqx_mgmt_api_test_util, [request/2, request/3, uri/1]).
all() ->
emqx_common_test_helpers:all(?MODULE).
@@ -36,27 +36,21 @@ init_per_suite(Config) ->
raw_with_default => true
}),
- ok = emqx_common_test_helpers:start_apps(
- [emqx_conf, emqx_modules, emqx_dashboard],
- fun set_special_configs/1
+ ok = emqx_mgmt_api_test_util:init_suite(
+ [emqx_conf, emqx_modules]
),
emqx_delayed:load(),
Config.
end_per_suite(Config) ->
ok = emqx_delayed:unload(),
- emqx_common_test_helpers:stop_apps([emqx_conf, emqx_dashboard, emqx_modules]),
+ emqx_mgmt_api_test_util:end_suite([emqx_conf, emqx_modules]),
Config.
init_per_testcase(_, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(),
Config.
-set_special_configs(emqx_dashboard) ->
- emqx_dashboard_api_test_helpers:set_default_config();
-set_special_configs(_App) ->
- ok.
-
%%------------------------------------------------------------------------------
%% Test Cases
%%------------------------------------------------------------------------------
diff --git a/apps/emqx_modules/test/emqx_rewrite_api_SUITE.erl b/apps/emqx_modules/test/emqx_rewrite_api_SUITE.erl
index 90e90d788..ddb136f1e 100644
--- a/apps/emqx_modules/test/emqx_rewrite_api_SUITE.erl
+++ b/apps/emqx_modules/test/emqx_rewrite_api_SUITE.erl
@@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
--import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
+-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@@ -37,20 +37,14 @@ init_per_suite(Config) ->
raw_with_default => true
}),
- ok = emqx_common_test_helpers:start_apps(
- [emqx_conf, emqx_modules, emqx_dashboard],
- fun set_special_configs/1
+ ok = emqx_mgmt_api_test_util:init_suite(
+ [emqx_conf, emqx_modules]
),
Config.
end_per_suite(_Config) ->
- emqx_common_test_helpers:stop_apps([emqx_conf, emqx_dashboard, emqx_modules]),
- ok.
-
-set_special_configs(emqx_dashboard) ->
- emqx_dashboard_api_test_helpers:set_default_config();
-set_special_configs(_App) ->
+ emqx_mgmt_api_test_util:end_suite([emqx_conf, emqx_modules]),
ok.
%%------------------------------------------------------------------------------
@@ -81,7 +75,7 @@ t_mqtt_topic_rewrite(_) ->
?assertEqual(
Rules,
- jsx:decode(Result)
+ emqx_json:decode(Result, [return_maps])
).
t_mqtt_topic_rewrite_limit(_) ->
diff --git a/apps/emqx_modules/test/emqx_telemetry_api_SUITE.erl b/apps/emqx_modules/test/emqx_telemetry_api_SUITE.erl
index 288a155d9..16f942bc0 100644
--- a/apps/emqx_modules/test/emqx_telemetry_api_SUITE.erl
+++ b/apps/emqx_modules/test/emqx_telemetry_api_SUITE.erl
@@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
--import(emqx_dashboard_api_test_helpers, [request/2, request/3, uri/1]).
+-import(emqx_mgmt_api_test_util, [request/2, request/3, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@@ -33,8 +33,8 @@ init_per_suite(Config) ->
raw_with_default => true
}),
- ok = emqx_common_test_helpers:start_apps(
- [emqx_conf, emqx_authn, emqx_authz, emqx_modules, emqx_dashboard],
+ ok = emqx_mgmt_api_test_util:init_suite(
+ [emqx_conf, emqx_authn, emqx_authz, emqx_modules],
fun set_special_configs/1
),
@@ -49,8 +49,8 @@ end_per_suite(_Config) ->
<<"sources">> => []
}
),
- emqx_common_test_helpers:stop_apps([
- emqx_dashboard, emqx_conf, emqx_authn, emqx_authz, emqx_modules
+ emqx_mgmt_api_test_util:end_suite([
+ emqx_conf, emqx_authn, emqx_authz, emqx_modules
]),
ok.
diff --git a/apps/emqx_modules/test/emqx_topic_metrics_api_SUITE.erl b/apps/emqx_modules/test/emqx_topic_metrics_api_SUITE.erl
index 8c23d042c..ea85d1fe9 100644
--- a/apps/emqx_modules/test/emqx_topic_metrics_api_SUITE.erl
+++ b/apps/emqx_modules/test/emqx_topic_metrics_api_SUITE.erl
@@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
--import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
+-import(emqx_mgmt_api_test_util, [request/2, request/3, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@@ -44,9 +44,8 @@ init_per_suite(Config) ->
raw_with_default => true
}),
- ok = emqx_common_test_helpers:start_apps(
- [emqx_conf, emqx_modules, emqx_dashboard],
- fun set_special_configs/1
+ ok = emqx_mgmt_api_test_util:init_suite(
+ [emqx_conf, emqx_modules]
),
%% When many tests run in an obscure order, it may occur that
@@ -59,15 +58,10 @@ init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
- emqx_common_test_helpers:stop_apps([emqx_conf, emqx_dashboard, emqx_modules]),
+ emqx_mgmt_api_test_util:end_suite([emqx_conf, emqx_modules]),
application:stop(gen_rpc),
ok.
-set_special_configs(emqx_dashboard) ->
- emqx_dashboard_api_test_helpers:set_default_config();
-set_special_configs(_App) ->
- ok.
-
%%------------------------------------------------------------------------------
%% Tests
%%------------------------------------------------------------------------------
@@ -315,6 +309,3 @@ t_badrpc(_) ->
%%------------------------------------------------------------------------------
%% Helpers
%%------------------------------------------------------------------------------
-
-request(Method, Url) ->
- request(Method, Url, []).
diff --git a/apps/emqx_plugin_libs/src/emqx_plugin_libs.app.src b/apps/emqx_plugin_libs/src/emqx_plugin_libs.app.src
index bcdcfe420..3120b8503 100644
--- a/apps/emqx_plugin_libs/src/emqx_plugin_libs.app.src
+++ b/apps/emqx_plugin_libs/src/emqx_plugin_libs.app.src
@@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_plugin_libs, [
{description, "EMQX Plugin utility libs"},
- {vsn, "4.3.4"},
+ {vsn, "4.3.5"},
{modules, []},
{applications, [kernel, stdlib]},
{env, []}
diff --git a/apps/emqx_plugin_libs/src/emqx_plugin_libs_rule.erl b/apps/emqx_plugin_libs/src/emqx_plugin_libs_rule.erl
index 57bdd16e5..969374309 100644
--- a/apps/emqx_plugin_libs/src/emqx_plugin_libs_rule.erl
+++ b/apps/emqx_plugin_libs/src/emqx_plugin_libs_rule.erl
@@ -63,6 +63,8 @@
can_topic_match_oneof/2
]).
+-export_type([tmpl_token/0]).
+
-compile({no_auto_import, [float/1]}).
-define(EX_PLACE_HOLDER, "(\\$\\{[a-zA-Z0-9\\._]+\\})").
diff --git a/apps/emqx_prometheus/i18n/emqx_prometheus_schema_i18n.conf b/apps/emqx_prometheus/i18n/emqx_prometheus_schema_i18n.conf
index 7f251ff4b..f25e35219 100644
--- a/apps/emqx_prometheus/i18n/emqx_prometheus_schema_i18n.conf
+++ b/apps/emqx_prometheus/i18n/emqx_prometheus_schema_i18n.conf
@@ -24,6 +24,35 @@ emqx_prometheus_schema {
zh: """数据推送间隔"""
}
}
+
+ headers {
+ desc {
+ en: """A list of HTTP Headers when pushing to Push Gateway.
+For example, { Authorization = "some-authz-tokens"}
"""
+ zh: """推送到 Push Gateway 的 HTTP Headers 列表。
+例如, { Authorization = "some-authz-tokens"}
"""
+ }
+ }
+
+ job_name {
+ desc {
+ en: """Job Name that is pushed to the Push Gateway. Available variables:
+- ${name}: Name of EMQX node.
+- ${host}: Host name of EMQX node.
+For example, when the EMQX node name is emqx@127.0.0.1
then the name
variable takes value emqx
and the host
variable takes value 127.0.0.1
.
+
+Default value is: ${name}/instance/${name}~${host}
+"""
+ zh: """推送到 Push Gateway 的 Job 名称。可用变量为:
+- ${name}: EMQX 节点的名称。
+- ${host}: EMQX 节点主机名。
+
+例如,当 EMQX 节点名为 emqx@127.0.0.1
则 name 变量的值为 emqx
,host 变量的值为 127.0.0.1
。
+
+默认值为: ${name}/instance/${name}~${host}
"""
+ }
+ }
+
enable {
desc {
en: """Turn Prometheus data pushing on or off"""
diff --git a/apps/emqx_prometheus/src/emqx_prometheus.app.src b/apps/emqx_prometheus/src/emqx_prometheus.app.src
index d95c89c3b..31f8cbfaf 100644
--- a/apps/emqx_prometheus/src/emqx_prometheus.app.src
+++ b/apps/emqx_prometheus/src/emqx_prometheus.app.src
@@ -2,7 +2,7 @@
{application, emqx_prometheus, [
{description, "Prometheus for EMQX"},
% strict semver, bump manually!
- {vsn, "5.0.3"},
+ {vsn, "5.0.4"},
{modules, []},
{registered, [emqx_prometheus_sup]},
{applications, [kernel, stdlib, prometheus, emqx]},
diff --git a/apps/emqx_prometheus/src/emqx_prometheus.erl b/apps/emqx_prometheus/src/emqx_prometheus.erl
index 5424c4e24..a66f275f8 100644
--- a/apps/emqx_prometheus/src/emqx_prometheus.erl
+++ b/apps/emqx_prometheus/src/emqx_prometheus.erl
@@ -98,8 +98,13 @@ handle_cast(_Msg, State) ->
{noreply, State}.
handle_info({timeout, Timer, ?TIMER_MSG}, State = #{timer := Timer}) ->
- #{interval := Interval, push_gateway_server := Server} = opts(),
- PushRes = push_to_push_gateway(Server),
+ #{
+ interval := Interval,
+ headers := Headers,
+ job_name := JobName,
+ push_gateway_server := Server
+ } = opts(),
+ PushRes = push_to_push_gateway(Server, Headers, JobName),
NewTimer = ensure_timer(Interval),
NewState = maps:update_with(PushRes, fun(C) -> C + 1 end, 1, State#{timer => NewTimer}),
%% Data is too big, hibernate for saving memory and stop system monitor warning.
@@ -107,18 +112,27 @@ handle_info({timeout, Timer, ?TIMER_MSG}, State = #{timer := Timer}) ->
handle_info(_Msg, State) ->
{noreply, State}.
-push_to_push_gateway(Uri) ->
+push_to_push_gateway(Uri, Headers, JobName) when is_list(Headers) ->
[Name, Ip] = string:tokens(atom_to_list(node()), "@"),
- Url = lists:concat([Uri, "/metrics/job/", Name, "/instance/", Name, "~", Ip]),
+ JobName1 = emqx_placeholder:preproc_tmpl(JobName),
+ JobName2 = binary_to_list(
+ emqx_placeholder:proc_tmpl(
+ JobName1,
+ #{<<"name">> => Name, <<"host">> => Ip}
+ )
+ ),
+
+ Url = lists:concat([Uri, "/metrics/job/", JobName2]),
Data = prometheus_text_format:format(),
- case httpc:request(post, {Url, [], "text/plain", Data}, ?HTTP_OPTIONS, []) of
- {ok, {{"HTTP/1.1", 200, "OK"}, _Headers, _Body}} ->
+ case httpc:request(post, {Url, Headers, "text/plain", Data}, ?HTTP_OPTIONS, []) of
+ {ok, {{"HTTP/1.1", 200, _}, _RespHeaders, _RespBody}} ->
ok;
Error ->
?SLOG(error, #{
msg => "post_to_push_gateway_failed",
error => Error,
- url => Url
+ url => Url,
+ headers => Headers
}),
failed
end.
diff --git a/apps/emqx_prometheus/src/emqx_prometheus_api.erl b/apps/emqx_prometheus/src/emqx_prometheus_api.erl
index 7466a1fd1..945c6eba9 100644
--- a/apps/emqx_prometheus/src/emqx_prometheus_api.erl
+++ b/apps/emqx_prometheus/src/emqx_prometheus_api.erl
@@ -121,6 +121,8 @@ prometheus_config_example() ->
enable => true,
interval => "15s",
push_gateway_server => <<"http://127.0.0.1:9091">>,
+ headers => #{'header-name' => 'header-value'},
+ job_name => <<"${name}/instance/${name}~${host}">>,
vm_dist_collector => enabled,
mnesia_collector => enabled,
vm_statistics_collector => enabled,
diff --git a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl
index 688c9be58..c13d198a2 100644
--- a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl
+++ b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl
@@ -25,7 +25,8 @@
roots/0,
fields/1,
desc/1,
- translation/1
+ translation/1,
+ convert_headers/1
]).
namespace() -> "prometheus".
@@ -52,6 +53,26 @@ fields("prometheus") ->
desc => ?DESC(interval)
}
)},
+ {headers,
+ ?HOCON(
+ list({string(), string()}),
+ #{
+ default => #{},
+ required => false,
+ converter => fun ?MODULE:convert_headers/1,
+ desc => ?DESC(headers)
+ }
+ )},
+ {job_name,
+ ?HOCON(
+ binary(),
+ #{
+ default => <<"${name}/instance/${name}~${host}">>,
+ required => true,
+ desc => ?DESC(job_name)
+ }
+ )},
+
{enable,
?HOCON(
boolean(),
@@ -126,6 +147,17 @@ fields("prometheus") ->
desc("prometheus") -> ?DESC(prometheus);
desc(_) -> undefined.
+convert_headers(Headers) when is_map(Headers) ->
+ maps:fold(
+ fun(K, V, Acc) ->
+ [{binary_to_list(K), binary_to_list(V)} | Acc]
+ end,
+ [],
+ Headers
+ );
+convert_headers(Headers) when is_list(Headers) ->
+ Headers.
+
%% for CI test, CI don't load the whole emqx_conf_schema.
translation(Name) ->
emqx_conf_schema:translation(Name).
diff --git a/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl b/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl
index b9df1103b..77d9902a2 100644
--- a/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl
+++ b/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl
@@ -27,6 +27,8 @@
"prometheus {\n"
" push_gateway_server = \"http://127.0.0.1:9091\"\n"
" interval = \"1s\"\n"
+ " headers = { Authorization = \"some-authz-tokens\"}\n"
+ " job_name = \"${name}~${host}\"\n"
" enable = true\n"
" vm_dist_collector = enabled\n"
" mnesia_collector = enabled\n"
@@ -85,6 +87,25 @@ t_collector_no_crash_test(_) ->
prometheus_text_format:format(),
ok.
+t_assert_push(_) ->
+ meck:new(httpc, [passthrough]),
+ Self = self(),
+ AssertPush = fun(Method, Req = {Url, Headers, ContentType, _Data}, HttpOpts, Opts) ->
+ ?assertEqual(post, Method),
+ ?assertMatch("http://127.0.0.1:9091/metrics/job/test~127.0.0.1", Url),
+ ?assertEqual([{"Authorization", "some-authz-tokens"}], Headers),
+ ?assertEqual("text/plain", ContentType),
+ Self ! pass,
+ meck:passthrough([Method, Req, HttpOpts, Opts])
+ end,
+ meck:expect(httpc, request, AssertPush),
+ ?assertMatch(ok, emqx_prometheus_sup:start_child(emqx_prometheus)),
+ receive
+ pass -> ok
+ after 2000 ->
+ ct:fail(assert_push_request_failed)
+ end.
+
t_only_for_coverage(_) ->
?assertEqual("5.0.0", emqx_prometheus_proto_v1:introduced_in()),
ok.
diff --git a/apps/emqx_resource/i18n/emqx_resource_schema_i18n.conf b/apps/emqx_resource/i18n/emqx_resource_schema_i18n.conf
index 52756f70d..0b6cbd0a2 100644
--- a/apps/emqx_resource/i18n/emqx_resource_schema_i18n.conf
+++ b/apps/emqx_resource/i18n/emqx_resource_schema_i18n.conf
@@ -36,8 +36,8 @@ For bridges only have ingress direction data flow, it can be set to 0 otherwise
health_check_interval {
desc {
- en: """Health check interval, in milliseconds."""
- zh: """健康检查间隔,单位毫秒。"""
+ en: """Health check interval."""
+ zh: """健康检查间隔。"""
}
label {
en: """Health Check Interval"""
@@ -69,8 +69,8 @@ For bridges only have ingress direction data flow, it can be set to 0 otherwise
auto_restart_interval {
desc {
- en: """The auto restart interval after the resource is disconnected, in milliseconds."""
- zh: """资源断开以后,自动重连的时间间隔,单位毫秒。"""
+ en: """The auto restart interval after the resource is disconnected."""
+ zh: """资源断开以后,自动重连的时间间隔。"""
}
label {
en: """Auto Restart Interval"""
diff --git a/apps/emqx_resource/src/emqx_resource_manager.erl b/apps/emqx_resource/src/emqx_resource_manager.erl
index 821bcbc5c..170dfe162 100644
--- a/apps/emqx_resource/src/emqx_resource_manager.erl
+++ b/apps/emqx_resource/src/emqx_resource_manager.erl
@@ -54,7 +54,7 @@
% State record
-record(data, {
- id, manager_id, group, mod, callback_mode, query_mode, config, opts, status, state, error
+ id, manager_id, group, mod, callback_mode, query_mode, config, opts, status, state, error, pid
}).
-type data() :: #data{}.
@@ -112,7 +112,7 @@ recreate(ResId, ResourceType, NewConfig, Opts) ->
end.
create_and_return_data(MgrId, ResId, Group, ResourceType, Config, Opts) ->
- create(MgrId, ResId, Group, ResourceType, Config, Opts),
+ _ = create(MgrId, ResId, Group, ResourceType, Config, Opts),
{ok, _Group, Data} = lookup(ResId),
{ok, Data}.
@@ -299,17 +299,16 @@ start_link(MgrId, ResId, Group, ResourceType, Config, Opts) ->
state = undefined,
error = undefined
},
- Module = atom_to_binary(?MODULE),
- ProcName = binary_to_atom(<>, utf8),
- gen_statem:start_link({local, ProcName}, ?MODULE, {Data, Opts}, []).
+ gen_statem:start_link(?MODULE, {Data, Opts}, []).
init({Data, Opts}) ->
process_flag(trap_exit, true),
%% init the cache so that lookup/1 will always return something
- insert_cache(Data#data.id, Data#data.group, Data),
- case maps:get(start_after_created, Opts, true) of
- true -> {ok, connecting, Data, {next_event, internal, start_resource}};
- false -> {ok, stopped, Data}
+ DataWithPid = Data#data{pid = self()},
+ insert_cache(DataWithPid#data.id, DataWithPid#data.group, DataWithPid),
+ case maps:get(start_after_created, Opts, ?START_AFTER_CREATED) of
+ true -> {ok, connecting, DataWithPid, {next_event, internal, start_resource}};
+ false -> {ok, stopped, DataWithPid}
end.
terminate(_Reason, _State, Data) ->
@@ -654,10 +653,12 @@ do_wait_for_ready(ResId, Retry) ->
safe_call(ResId, Message, Timeout) ->
try
- Module = atom_to_binary(?MODULE),
- MgrId = get_owner(ResId),
- ProcName = binary_to_existing_atom(<>, utf8),
- gen_statem:call(ProcName, Message, {clean_timeout, Timeout})
+ case read_cache(ResId) of
+ not_found ->
+ {error, not_found};
+ {_, #data{pid = ManagerPid}} ->
+ gen_statem:call(ManagerPid, Message, {clean_timeout, Timeout})
+ end
catch
error:badarg ->
{error, not_found};
diff --git a/apps/emqx_resource/src/emqx_resource_worker.erl b/apps/emqx_resource/src/emqx_resource_worker.erl
index 93bb22551..7840fd474 100644
--- a/apps/emqx_resource/src/emqx_resource_worker.erl
+++ b/apps/emqx_resource/src/emqx_resource_worker.erl
@@ -76,7 +76,7 @@
-type data() :: #{
id => id(),
index => index(),
- name => atom(),
+ inflight_tid => ets:tid(),
batch_size => pos_integer(),
batch_time => timer:time(),
queue => replayq:q(),
@@ -87,7 +87,7 @@
callback_mode() -> [state_functions, state_enter].
start_link(Id, Index, Opts) ->
- gen_statem:start_link({local, name(Id, Index)}, ?MODULE, {Id, Index, Opts}, []).
+ gen_statem:start_link(?MODULE, {Id, Index, Opts}, []).
-spec sync_query(id(), request(), query_opts()) -> Result :: term().
sync_query(Id, Request, Opts) ->
@@ -133,11 +133,11 @@ simple_async_query(Id, Request, ReplyFun) ->
_ = handle_query_result(Id, Result, false, false),
Result.
--spec block(pid() | atom()) -> ok.
+-spec block(pid()) -> ok.
block(ServerRef) ->
gen_statem:cast(ServerRef, block).
--spec resume(pid() | atom()) -> ok.
+-spec resume(pid()) -> ok.
resume(ServerRef) ->
gen_statem:cast(ServerRef, resume).
@@ -145,7 +145,6 @@ resume(ServerRef) ->
init({Id, Index, Opts}) ->
process_flag(trap_exit, true),
true = gproc_pool:connect_worker(Id, {Id, Index}),
- Name = name(Id, Index),
BatchSize = maps:get(batch_size, Opts, ?DEFAULT_BATCH_SIZE),
SegBytes0 = maps:get(queue_seg_bytes, Opts, ?DEFAULT_QUEUE_SEG_SIZE),
TotalBytes = maps:get(max_queue_bytes, Opts, ?DEFAULT_QUEUE_SIZE),
@@ -165,12 +164,12 @@ init({Id, Index, Opts}) ->
emqx_resource_metrics:queuing_set(Id, Index, queue_count(Queue)),
emqx_resource_metrics:inflight_set(Id, Index, 0),
InfltWinSZ = maps:get(async_inflight_window, Opts, ?DEFAULT_INFLIGHT),
- ok = inflight_new(Name, InfltWinSZ, Id, Index),
+ InflightTID = inflight_new(InfltWinSZ, Id, Index),
HCItvl = maps:get(health_check_interval, Opts, ?HEALTHCHECK_INTERVAL),
St = #{
id => Id,
index => Index,
- name => Name,
+ inflight_tid => InflightTID,
batch_size => BatchSize,
batch_time => maps:get(batch_time, Opts, ?DEFAULT_BATCH_TIME),
queue => Queue,
@@ -283,14 +282,14 @@ pick_cast(Id, Key, Query) ->
ok
end).
-do_resume(#{id := Id, name := Name} = Data) ->
- case inflight_get_first(Name) of
+do_resume(#{id := Id, inflight_tid := InflightTID} = Data) ->
+ case inflight_get_first(InflightTID) of
empty ->
retry_queue(Data);
{Ref, FirstQuery} ->
%% We retry msgs in inflight window sync, as if we send them
%% async, they will be appended to the end of inflight window again.
- retry_inflight_sync(Id, Ref, FirstQuery, Name, Data)
+ retry_inflight_sync(Id, Ref, FirstQuery, InflightTID, Data)
end.
retry_queue(
@@ -299,7 +298,7 @@ retry_queue(
id := Id,
index := Index,
batch_size := 1,
- name := Name,
+ inflight_tid := InflightTID,
resume_interval := ResumeT
} = Data0
) ->
@@ -308,7 +307,7 @@ retry_queue(
empty ->
{next_state, running, Data0};
{Q1, QAckRef, [?QUERY(_, Request, HasBeenSent) = Query]} ->
- QueryOpts = #{inflight_name => Name},
+ QueryOpts = #{inflight_name => InflightTID},
Result = call_query(configured, Id, Index, Query, QueryOpts),
Reply = ?REPLY(undefined, Request, HasBeenSent, Result),
case reply_caller(Id, Reply) of
@@ -327,7 +326,7 @@ retry_queue(
id := Id,
index := Index,
batch_size := BatchSize,
- name := Name,
+ inflight_tid := InflightTID,
resume_interval := ResumeT
} = Data0
) ->
@@ -336,7 +335,7 @@ retry_queue(
empty ->
{next_state, running, Data0};
{Q1, QAckRef, Batch0} ->
- QueryOpts = #{inflight_name => Name},
+ QueryOpts = #{inflight_name => InflightTID},
Result = call_query(configured, Id, Index, Batch0, QueryOpts),
%% The caller has been replied with ?RESOURCE_ERROR(blocked, _) before saving into the queue,
%% we now change the 'from' field to 'undefined' so it will not reply the caller again.
@@ -361,7 +360,7 @@ retry_inflight_sync(
Id,
Ref,
QueryOrBatch,
- Name,
+ InflightTID,
#{index := Index, resume_interval := ResumeT} = Data0
) ->
QueryOpts = #{},
@@ -375,7 +374,7 @@ retry_inflight_sync(
{keep_state, Data0, {state_timeout, ResumeT, resume}};
%% Send ok or failed but the resource is working
false ->
- inflight_drop(Name, Ref, Id, Index),
+ inflight_drop(InflightTID, Ref, Id, Index),
do_resume(Data0)
end.
@@ -451,11 +450,11 @@ do_flush(Data0, #{is_batch := false, batch := Batch, ack_ref := QAckRef, new_que
#{
id := Id,
index := Index,
- name := Name
+ inflight_tid := InflightTID
} = Data0,
%% unwrap when not batching (i.e., batch size == 1)
[?QUERY(From, CoreReq, HasBeenSent) = Request] = Batch,
- QueryOpts = #{inflight_name => Name},
+ QueryOpts = #{inflight_name => InflightTID},
Result = call_query(configured, Id, Index, Request, QueryOpts),
IsAsync = is_async(Id),
Data1 = cancel_flush_timer(Data0),
@@ -489,9 +488,9 @@ do_flush(Data0, #{is_batch := true, batch := Batch, ack_ref := QAckRef, new_queu
id := Id,
index := Index,
batch_size := BatchSize,
- name := Name
+ inflight_tid := InflightTID
} = Data0,
- QueryOpts = #{inflight_name => Name},
+ QueryOpts = #{inflight_name => InflightTID},
Result = call_query(configured, Id, Index, Batch, QueryOpts),
IsAsync = is_async(Id),
Data1 = cancel_flush_timer(Data0),
@@ -639,17 +638,17 @@ apply_query_fun(sync, Mod, Id, _Index, ?QUERY(_, Request, _) = _Query, ResSt, _Q
?APPLY_RESOURCE(call_query, Mod:on_query(Id, Request, ResSt), Request);
apply_query_fun(async, Mod, Id, Index, ?QUERY(_, Request, _) = Query, ResSt, QueryOpts) ->
?tp(call_query_async, #{id => Id, mod => Mod, query => Query, res_st => ResSt}),
- Name = maps:get(inflight_name, QueryOpts, undefined),
+ InflightTID = maps:get(inflight_name, QueryOpts, undefined),
?APPLY_RESOURCE(
call_query_async,
- case is_inflight_full(Name) of
+ case is_inflight_full(InflightTID) of
true ->
{async_return, inflight_full};
false ->
ReplyFun = fun ?MODULE:reply_after_query/7,
Ref = make_message_ref(),
- Args = [self(), Id, Index, Name, Ref, Query],
- ok = inflight_append(Name, Ref, Query, Id, Index),
+ Args = [self(), Id, Index, InflightTID, Ref, Query],
+ ok = inflight_append(InflightTID, Ref, Query, Id, Index),
Result = Mod:on_query_async(Id, Request, {ReplyFun, Args}, ResSt),
{async_return, Result}
end,
@@ -661,25 +660,25 @@ apply_query_fun(sync, Mod, Id, _Index, [?QUERY(_, _, _) | _] = Batch, ResSt, _Qu
?APPLY_RESOURCE(call_batch_query, Mod:on_batch_query(Id, Requests, ResSt), Batch);
apply_query_fun(async, Mod, Id, Index, [?QUERY(_, _, _) | _] = Batch, ResSt, QueryOpts) ->
?tp(call_batch_query_async, #{id => Id, mod => Mod, batch => Batch, res_st => ResSt}),
- Name = maps:get(inflight_name, QueryOpts, undefined),
+ InflightTID = maps:get(inflight_name, QueryOpts, undefined),
?APPLY_RESOURCE(
call_batch_query_async,
- case is_inflight_full(Name) of
+ case is_inflight_full(InflightTID) of
true ->
{async_return, inflight_full};
false ->
ReplyFun = fun ?MODULE:batch_reply_after_query/7,
Ref = make_message_ref(),
- ReplyFunAndArgs = {ReplyFun, [self(), Id, Index, Name, Ref, Batch]},
+ ReplyFunAndArgs = {ReplyFun, [self(), Id, Index, InflightTID, Ref, Batch]},
Requests = [Request || ?QUERY(_From, Request, _) <- Batch],
- ok = inflight_append(Name, Ref, Batch, Id, Index),
+ ok = inflight_append(InflightTID, Ref, Batch, Id, Index),
Result = Mod:on_batch_query_async(Id, Requests, ReplyFunAndArgs, ResSt),
{async_return, Result}
end,
Batch
).
-reply_after_query(Pid, Id, Index, Name, Ref, ?QUERY(From, Request, HasBeenSent), Result) ->
+reply_after_query(Pid, Id, Index, InflightTID, Ref, ?QUERY(From, Request, HasBeenSent), Result) ->
%% NOTE: 'inflight' is the count of messages that were sent async
%% but received no ACK, NOT the number of messages queued in the
%% inflight window.
@@ -687,10 +686,10 @@ reply_after_query(Pid, Id, Index, Name, Ref, ?QUERY(From, Request, HasBeenSent),
true ->
?MODULE:block(Pid);
false ->
- drop_inflight_and_resume(Pid, Name, Ref, Id, Index)
+ drop_inflight_and_resume(Pid, InflightTID, Ref, Id, Index)
end.
-batch_reply_after_query(Pid, Id, Index, Name, Ref, Batch, Result) ->
+batch_reply_after_query(Pid, Id, Index, InflightTID, Ref, Batch, Result) ->
%% NOTE: 'inflight' is the count of messages that were sent async
%% but received no ACK, NOT the number of messages queued in the
%% inflight window.
@@ -698,16 +697,16 @@ batch_reply_after_query(Pid, Id, Index, Name, Ref, Batch, Result) ->
true ->
?MODULE:block(Pid);
false ->
- drop_inflight_and_resume(Pid, Name, Ref, Id, Index)
+ drop_inflight_and_resume(Pid, InflightTID, Ref, Id, Index)
end.
-drop_inflight_and_resume(Pid, Name, Ref, Id, Index) ->
- case is_inflight_full(Name) of
+drop_inflight_and_resume(Pid, InflightTID, Ref, Id, Index) ->
+ case is_inflight_full(InflightTID) of
true ->
- inflight_drop(Name, Ref, Id, Index),
+ inflight_drop(InflightTID, Ref, Id, Index),
?MODULE:resume(Pid);
false ->
- inflight_drop(Name, Ref, Id, Index)
+ inflight_drop(InflightTID, Ref, Id, Index)
end.
%%==============================================================================
@@ -757,82 +756,85 @@ get_first_n_from_queue(Q, N) ->
%% the inflight queue for async query
-define(MAX_SIZE_REF, -1).
-define(SIZE_REF, -2).
-inflight_new(Name, InfltWinSZ, Id, Index) ->
- _ = ets:new(Name, [named_table, ordered_set, public, {write_concurrency, true}]),
- inflight_append(Name, ?MAX_SIZE_REF, {max_size, InfltWinSZ}, Id, Index),
+inflight_new(InfltWinSZ, Id, Index) ->
+ TableId = ets:new(
+ emqx_resource_worker_inflight_tab,
+ [ordered_set, public, {write_concurrency, true}]
+ ),
+ inflight_append(TableId, ?MAX_SIZE_REF, {max_size, InfltWinSZ}, Id, Index),
%% we use this counter because we might deal with batches as
%% elements.
- inflight_append(Name, ?SIZE_REF, 0, Id, Index),
- ok.
+ inflight_append(TableId, ?SIZE_REF, 0, Id, Index),
+ TableId.
-inflight_get_first(Name) ->
- case ets:next(Name, ?MAX_SIZE_REF) of
+inflight_get_first(InflightTID) ->
+ case ets:next(InflightTID, ?MAX_SIZE_REF) of
'$end_of_table' ->
empty;
Ref ->
- case ets:lookup(Name, Ref) of
+ case ets:lookup(InflightTID, Ref) of
[Object] ->
Object;
[] ->
%% it might have been dropped
- inflight_get_first(Name)
+ inflight_get_first(InflightTID)
end
end.
is_inflight_full(undefined) ->
false;
-is_inflight_full(Name) ->
- [{_, {max_size, MaxSize}}] = ets:lookup(Name, ?MAX_SIZE_REF),
+is_inflight_full(InflightTID) ->
+ [{_, {max_size, MaxSize}}] = ets:lookup(InflightTID, ?MAX_SIZE_REF),
%% we consider number of batches rather than number of messages
%% because one batch request may hold several messages.
- Size = inflight_num_batches(Name),
+ Size = inflight_num_batches(InflightTID),
Size >= MaxSize.
-inflight_num_batches(Name) ->
+inflight_num_batches(InflightTID) ->
%% Note: we subtract 2 because there're 2 metadata rows that hold
%% the maximum size value and the number of messages.
MetadataRowCount = 2,
- case ets:info(Name, size) of
+ case ets:info(InflightTID, size) of
undefined -> 0;
Size -> max(0, Size - MetadataRowCount)
end.
-inflight_num_msgs(Name) ->
- [{_, Size}] = ets:lookup(Name, ?SIZE_REF),
+inflight_num_msgs(InflightTID) ->
+ [{_, Size}] = ets:lookup(InflightTID, ?SIZE_REF),
Size.
inflight_append(undefined, _Ref, _Query, _Id, _Index) ->
ok;
-inflight_append(Name, Ref, [?QUERY(_, _, _) | _] = Batch0, Id, Index) ->
+inflight_append(InflightTID, Ref, [?QUERY(_, _, _) | _] = Batch0, Id, Index) ->
Batch = mark_as_sent(Batch0),
- ets:insert(Name, {Ref, Batch}),
+ ets:insert(InflightTID, {Ref, Batch}),
BatchSize = length(Batch),
- ets:update_counter(Name, ?SIZE_REF, {2, BatchSize}),
- emqx_resource_metrics:inflight_set(Id, Index, inflight_num_msgs(Name)),
+ ets:update_counter(InflightTID, ?SIZE_REF, {2, BatchSize}),
+ emqx_resource_metrics:inflight_set(Id, Index, inflight_num_msgs(InflightTID)),
ok;
-inflight_append(Name, Ref, ?QUERY(_From, _Req, _HasBeenSent) = Query0, Id, Index) ->
+inflight_append(InflightTID, Ref, ?QUERY(_From, _Req, _HasBeenSent) = Query0, Id, Index) ->
Query = mark_as_sent(Query0),
- ets:insert(Name, {Ref, Query}),
- ets:update_counter(Name, ?SIZE_REF, {2, 1}),
- emqx_resource_metrics:inflight_set(Id, Index, inflight_num_msgs(Name)),
+ ets:insert(InflightTID, {Ref, Query}),
+ ets:update_counter(InflightTID, ?SIZE_REF, {2, 1}),
+ emqx_resource_metrics:inflight_set(Id, Index, inflight_num_msgs(InflightTID)),
ok;
-inflight_append(Name, Ref, Data, _Id, _Index) ->
- ets:insert(Name, {Ref, Data}),
+inflight_append(InflightTID, Ref, Data, _Id, _Index) ->
+ ets:insert(InflightTID, {Ref, Data}),
%% this is a metadata row being inserted; therefore, we don't bump
%% the inflight metric.
ok.
inflight_drop(undefined, _, _Id, _Index) ->
ok;
-inflight_drop(Name, Ref, Id, Index) ->
+inflight_drop(InflightTID, Ref, Id, Index) ->
Count =
- case ets:take(Name, Ref) of
+ case ets:take(InflightTID, Ref) of
[{Ref, ?QUERY(_, _, _)}] -> 1;
[{Ref, [?QUERY(_, _, _) | _] = Batch}] -> length(Batch);
_ -> 0
end,
- Count > 0 andalso ets:update_counter(Name, ?SIZE_REF, {2, -Count, 0, 0}),
- emqx_resource_metrics:inflight_set(Id, Index, inflight_num_msgs(Name)),
+ Count > 0 andalso ets:update_counter(InflightTID, ?SIZE_REF, {2, -Count, 0, 0}),
+ emqx_resource_metrics:inflight_set(Id, Index, inflight_num_msgs(InflightTID)),
ok.
%%==============================================================================
@@ -868,13 +870,6 @@ assert_ok_result(R) ->
queue_count(Q) ->
replayq:count(Q).
--spec name(id(), integer()) -> atom().
-name(Id, Index) ->
- Mod = atom_to_list(?MODULE),
- Id1 = binary_to_list(Id),
- Index1 = integer_to_list(Index),
- list_to_atom(lists:concat([Mod, ":", Id1, ":", Index1])).
-
disk_queue_dir(Id, Index) ->
QDir = binary_to_list(Id) ++ ":" ++ integer_to_list(Index),
filename:join([emqx:data_dir(), "resource_worker", node(), QDir]).
diff --git a/apps/emqx_resource/test/emqx_resource_SUITE.erl b/apps/emqx_resource/test/emqx_resource_SUITE.erl
index 4bea0a1ee..cdec414c9 100644
--- a/apps/emqx_resource/test/emqx_resource_SUITE.erl
+++ b/apps/emqx_resource/test/emqx_resource_SUITE.erl
@@ -944,7 +944,15 @@ t_create_dry_run_local(_) ->
end,
lists:seq(1, 10)
),
- [] = ets:match(emqx_resource_manager, {{owner, '$1'}, '_'}).
+ case [] =:= ets:match(emqx_resource_manager, {{owner, '$1'}, '_'}) of
+ false ->
+ %% Sleep to remove flakyness in test case. It take some time for
+ %% the ETS table to be cleared.
+ timer:sleep(2000),
+ [] = ets:match(emqx_resource_manager, {{owner, '$1'}, '_'});
+ true ->
+ ok
+ end.
create_dry_run_local_succ() ->
case whereis(test_resource) of
diff --git a/apps/emqx_rule_engine/src/emqx_rule_date.erl b/apps/emqx_rule_engine/src/emqx_rule_date.erl
index a41beb20d..aeb5d7a1b 100644
--- a/apps/emqx_rule_engine/src/emqx_rule_date.erl
+++ b/apps/emqx_rule_engine/src/emqx_rule_date.erl
@@ -88,20 +88,20 @@ parse_date(TimeUnit, Offset, FormatString, InputString) ->
calendar:rfc3339_to_system_time(Str, [{unit, TimeUnit}]).
mlist(R) ->
- %% %H Shows hour in 24-hour format [15]
+ %% %H Shows hour in 24-hour format [15]
[
{$H, R#result.hour},
- %% %M Displays minutes [00-59]
+ %% %M Displays minutes [00-59]
{$M, R#result.minute},
- %% %S Displays seconds [00-59]
+ %% %S Displays seconds [00-59]
{$S, R#result.second},
- %% %y Displays year YYYY [2021]
+ %% %y Displays year YYYY [2021]
{$y, R#result.year},
- %% %m Displays the number of the month [01-12]
+ %% %m Displays the number of the month [01-12]
{$m, R#result.month},
- %% %d Displays the number of the month [01-12]
+ %% %d Displays the number of the month [01-12]
{$d, R#result.day},
- %% %Z Displays Time zone
+ %% %Z Displays Time zone
{$Z, R#result.zone}
].
@@ -223,20 +223,20 @@ parse_zone(Input) ->
mlist1() ->
maps:from_list(
- %% %H Shows hour in 24-hour format [15]
+ %% %H Shows hour in 24-hour format [15]
[
{$H, fun(Input) -> parse_int_times(2, Input) end},
- %% %M Displays minutes [00-59]
+ %% %M Displays minutes [00-59]
{$M, fun(Input) -> parse_int_times(2, Input) end},
- %% %S Displays seconds [00-59]
+ %% %S Displays seconds [00-59]
{$S, fun(Input) -> parse_second(Input) end},
- %% %y Displays year YYYY [2021]
+ %% %y Displays year YYYY [2021]
{$y, fun(Input) -> parse_int_times(4, Input) end},
- %% %m Displays the number of the month [01-12]
+ %% %m Displays the number of the month [01-12]
{$m, fun(Input) -> parse_int_times(2, Input) end},
- %% %d Displays the number of the month [01-12]
+ %% %d Displays the number of the month [01-12]
{$d, fun(Input) -> parse_int_times(2, Input) end},
- %% %Z Displays Time zone
+ %% %Z Displays Time zone
{$Z, fun(Input) -> parse_zone(Input) end}
]
).
diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src
index ea0bf6f9e..ee1544223 100644
--- a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src
+++ b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src
@@ -2,7 +2,7 @@
{application, emqx_rule_engine, [
{description, "EMQX Rule Engine"},
% strict semver, bump manually!
- {vsn, "5.0.7"},
+ {vsn, "5.0.8"},
{modules, []},
{registered, [emqx_rule_engine_sup, emqx_rule_engine]},
{applications, [kernel, stdlib, rulesql, getopt]},
diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl
index cbe7dae82..d6913cbc6 100644
--- a/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl
+++ b/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl
@@ -23,6 +23,7 @@
-export([
namespace/0,
+ tags/0,
roots/0,
fields/1,
desc/1,
@@ -33,6 +34,9 @@
namespace() -> rule_engine.
+tags() ->
+ [<<"Rule Engine">>].
+
roots() -> ["rule_engine"].
fields("rule_engine") ->
diff --git a/apps/emqx_slow_subs/test/emqx_slow_subs_api_SUITE.erl b/apps/emqx_slow_subs/test/emqx_slow_subs_api_SUITE.erl
index 5b5ed063f..6b0721e3d 100644
--- a/apps/emqx_slow_subs/test/emqx_slow_subs_api_SUITE.erl
+++ b/apps/emqx_slow_subs/test/emqx_slow_subs_api_SUITE.erl
@@ -203,13 +203,7 @@ do_request_api(Method, Request) ->
end.
auth_header_() ->
- AppId = <<"admin">>,
- AppSecret = <<"public">>,
- auth_header_(binary_to_list(AppId), binary_to_list(AppSecret)).
-
-auth_header_(User, Pass) ->
- Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
- {"Authorization", "Basic " ++ Encoded}.
+ emqx_mgmt_api_test_util:auth_header_().
api_path(Parts) ->
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION] ++ Parts).
diff --git a/bin/nodetool b/bin/nodetool
index b4f0a0183..9a5d5e069 100755
--- a/bin/nodetool
+++ b/bin/nodetool
@@ -24,12 +24,19 @@ main(Args) ->
["hocon" | Rest] ->
%% forward the call to hocon_cli
hocon_cli:main(Rest);
- ["check_license_key", Key] ->
- check_license(#{key => list_to_binary(Key)});
+ ["check_license_key", Key0] ->
+ Key = cleanup_key(Key0),
+ check_license(#{key => Key});
_ ->
do(Args)
end.
+%% the key is a string (list) representation of a binary, so we need
+%% to remove the leading and trailing angle brackets.
+cleanup_key(Str0) ->
+ Str1 = iolist_to_binary(string:replace(Str0, "<<", "", leading)),
+ iolist_to_binary(string:replace(Str1, ">>", "", trailing)).
+
do(Args) ->
ok = do_with_halt(Args, "mnesia_dir", fun create_mnesia_dir/2),
ok = do_with_halt(Args, "chkconfig", fun("-config", X) -> chkconfig(X) end),
diff --git a/build b/build
index 05a3a33d3..87c719a53 100755
--- a/build
+++ b/build
@@ -135,6 +135,9 @@ assert_no_compile_time_only_deps() {
make_rel() {
./scripts/pre-compile.sh "$PROFILE"
+ # make_elixir_rel always create rebar.lock
+ # delete it to make git clone + checkout work because we use shallow close for rebar deps
+ rm -f rebar.lock
# compile all beams
./rebar3 as "$PROFILE" compile
# generate docs (require beam compiled), generated to etc and priv dirs
diff --git a/changes/refactor-9653.en.md b/changes/refactor-9653.en.md
new file mode 100644
index 000000000..2807f81d5
--- /dev/null
+++ b/changes/refactor-9653.en.md
@@ -0,0 +1 @@
+Make authorization config validation error message more readable.
diff --git a/changes/refactor-9653.zh.md b/changes/refactor-9653.zh.md
new file mode 100644
index 000000000..755fd1683
--- /dev/null
+++ b/changes/refactor-9653.zh.md
@@ -0,0 +1 @@
+改进授权配置检查错误日志的可读性。
diff --git a/changes/v5.0.14-en.md b/changes/v5.0.14-en.md
new file mode 100644
index 000000000..17ae121cb
--- /dev/null
+++ b/changes/v5.0.14-en.md
@@ -0,0 +1,68 @@
+# v5.0.14
+
+## Enhancements
+
+- [#8329](https://github.com/emqx/emqx/pull/8329) The MongoDB library has been upgraded to support MongoDB 5.1+
+
+- [#9593](https://github.com/emqx/emqx/pull/9593) Obfuscated sensitive data in the response when querying `bridges` information by API.
+
+- [#9614](https://github.com/emqx/emqx/pull/9614) Make possible to configure `host:port` from environment variables without quotes.
+ Prior to this change, when overriding a `host:port` config value from environment variable, one has to quote it as:
+ `env EMQX_BRIDGES__MQTT__XYZ__SERVER='"localhost:1883"'`.
+ Now it's possible to set it without quote as `env EMQX_BRIDGES__MQTT__XYZ__SERVER='localhost:1883'`.
+
+- [#9642](https://github.com/emqx/emqx/pull/9642) Deprecates `enable_batch` and `enable_queue` options for bridges/resources. After this change, queuing is always enabled for bridges, and batching is controlled by the `batch_size` option: `batch_size > 1` means batching will be enabled.
+
+- [#9671](https://github.com/emqx/emqx/pull/9671) Implement sliding window average metrics.
+
+- [#9674](https://github.com/emqx/emqx/pull/9674) Made rule engine behavior more consistent with bridge behavior regarding metrics: if a rule engine is disabled, its metrics are now reset
+
+- [#9675](https://github.com/emqx/emqx/pull/9675) HTTP client library `ehttpc` upgraded from `0.4.2` to `0.4.3`.
+ Library `eredis_cluster` which manages clients to redis clusters upgraded from `0.7.1` to `0.7.5`.
+
+- [#9713](https://github.com/emqx/emqx/pull/9713) Introduce `api_key.bootstrap_file` to initialize the api key at boot time.
+ Deprecate `dashboard.bootstrap_users_file`.
+ Limit the maximum number of api keys to 100 instead of 30.
+
+## Bug fixes
+
+- [#8648](https://github.com/emqx/emqx/pull/8648) When deleting a non-existing bridge the server gave a success response. This has been fixed so that the server instead gives an error response when the user attempts to delete a non-existing bridge.
+
+- [#9637](https://github.com/emqx/emqx/pull/9637) Fix the expiry_interval fields of the clients HTTP API to measure in seconds.
+
+- [#9638](https://github.com/emqx/emqx/pull/9638) Fix the problem of data loss and bad match when the MySQL driver is disconnected.
+
+- [#9641](https://github.com/emqx/emqx/pull/9641) Fix an issue where testing the GCP PubSub could leak memory, and an issue where its JWT token would fail to refresh a second time.
+
+- [#9642](https://github.com/emqx/emqx/pull/9642) Fix some issues that could lead to wrong bridge metrics.
+ Fix and issue that could lead to message loss and wrong metrics with Kafka Producer bridge when Kafka or the connection to it is down.
+ Fix some issues that could lead to the same message being delivered more than once when using batching for bridges and when the batch was retried.
+
+- [#9667](https://github.com/emqx/emqx/pull/9667) Remove possibility to set `clientid` for `/publish` and `/publish/bulk` HTTP APIs. This is to reduce the risk for security confusion.
+
+- [#9687](https://github.com/emqx/emqx/pull/9687) Fix the problem that sending messages to data-bridges failed because of incorrect handling of some data-bridges without `local_topic` field configured.
+ Before this change, if some bridges have configured the `local_topic` field but others have not, a `function_clause` error will occur when forwarding messages to the data-bridges.
+
+- [#9689](https://github.com/emqx/emqx/pull/9689) Fix handling of HTTP authorization result when a request failure (e.g.: HTTP resource is down) would cause a `function_clause` error.
+
+- [#9703](https://github.com/emqx/emqx/pull/9703) Set the default value of the `qos` field of the HTTP API `/clients/:clientid/subscribe` to 0.
+ Before this fix, the `qos` field have no default value, which leads to a `function_clause` error
+ when querying this API.
+
+- [#9705](https://github.com/emqx/emqx/pull/9705) Remove the default value of Webhook.
+ Before this repair, the default value of the `body` field of Webhook is `${payload}`,
+ but there is no `payload` field in the available fields of other events except message
+ publishing in the rule, so in this case, the webhook will send a string with the
+ message body as "undefined" to the HTTP service.
+ This fix removes the default value of the `body` field. When the `body` field is
+ not configured, Webhook will send all available fields of the current event in
+ the format of JSON object.
+
+- [#9712](https://github.com/emqx/emqx/pull/9712) Fixed the problem of '404 Not Found' when calling the HTTP API '/clients/:clientid/subscribe/bulk'
+ from the plug-ins and data-bridges on handling the 'client.connected' event.
+
+- [#9714](https://github.com/emqx/emqx/pull/9714) Fix `/mqtt/auto_subscribe` API's bad swagger schema, and make sure swagger always checks if the schema is correct.
+
+- [#9716](https://github.com/emqx/emqx/pull/9716) MQTT bridge config compatibility fix. The config created from before v5.0.12 may encounter a compatibility issue after upgraded to v5.0.13.
+
+- [#9717](https://github.com/emqx/emqx/pull/9717) Prior to this fix, if it always times out when trying to connect a bridge server, it's not possible to change other configs even when the bridge is disabled.
diff --git a/changes/v5.0.14-zh.md b/changes/v5.0.14-zh.md
new file mode 100644
index 000000000..4eb510a43
--- /dev/null
+++ b/changes/v5.0.14-zh.md
@@ -0,0 +1,64 @@
+# v5.0.14
+
+## 增强
+
+- [#8329](https://github.com/emqx/emqx/pull/8329) MongoDB 的驱动现在已经升级到 MongoDB 5.1+ 了。
+
+- [#9593](https://github.com/emqx/emqx/pull/9593) 通过 API 查询 `bridges` 信息时将混淆响应中的敏感数据。
+
+- [#9614](https://github.com/emqx/emqx/pull/9614) 允许环境变量重载 `host:port` 值时不使用引号。
+ 在此修复前,环境变量中使用 `host:port` 这种配置时,用户必须使用引号,例如:
+ `env EMQX_BRIDGES__MQTT__XYZ__SERVER='"localhost:1883"'`。
+ 此修复后,可以不使用引号,例如 `env EMQX_BRIDGES__MQTT__XYZ__SERVER='localhost:1883'`。
+
+- [#9642](https://github.com/emqx/emqx/pull/9642) 废弃了桥接的 `enable_batch` 和 `enable_queue` 配置项 。在这一改变之后,桥接的工作进程总是启用缓存队列,而批处理由 `batch_size` 选项控制:`batch_size > 1` 则意味着启用批处理。
+
+- [#9671](https://github.com/emqx/emqx/pull/9671) 实施滑动窗口平均度量。
+
+- [#9674](https://github.com/emqx/emqx/pull/9674) 使得规则引擎的行为与桥梁的指标行为更加一致:如果一个规则引擎被禁用,其指标现在会被重置。
+
+- [#9675](https://github.com/emqx/emqx/pull/9675) HTTP 客户端库 `ehttpc` 从 `0.4.2` 升级到 `0.4.3`
+ Redis cluster 客户端库 `eredis_cluster` 从 `0.7.1` 升级到 `0.7.5`.
+
+- [#9713](https://github.com/emqx/emqx/pull/9713) 引入 `api_key.bootstrap_file`,用于启动时初始化api密钥。
+ 废弃 `dashboard.boostrap_users_file`。
+ 将 API 密钥的最大数量限制提升为 100(原来为30)。
+
+## 修复
+
+- [#8648](https://github.com/emqx/emqx/pull/8648) 修复了当通过 API 删除一个不存在的桥接时,服务器会返回操作成功的问题,现在将会返回操作失败的信息。
+
+- [#9637](https://github.com/emqx/emqx/pull/9637) 修复 clients HTTP API 下的 expiry_interval 字段的时间单位为秒。
+
+- [#9638](https://github.com/emqx/emqx/pull/9638) 修复 MySQL 驱动断开连接时出现的数据丢失和匹配错误的问题。
+
+- [#9641](https://github.com/emqx/emqx/pull/9641) 修复了测试GCP PubSub可能泄露内存的问题,以及其JWT令牌第二次刷新失败的问题。
+
+- [#9642](https://github.com/emqx/emqx/pull/9642) 修复一些可能导致错误桥接指标的问题。
+ 修复当Kafka或其连接中断时,可能导致Kafka Producer桥的消息丢失和错误指标的问题。
+ 修复一些问题,这些问题可能导致在为桥接使用批处理时,同一消息被多次传递,以及批处理被重试时。
+
+- [#9667](https://github.com/emqx/emqx/pull/9667) 从 HTTP API /publish 和 /publish/bulk 中移除 clientid, 降低安全风险
+
+- [#9687](https://github.com/emqx/emqx/pull/9687) 修复由于某些数据桥接未配置 `local_topic` 字段,导致的所有数据桥接无法发送消息。
+ 在此改动之前,如果有些桥接设置了 `local_topic` 字段而有些没有设置,数据桥接转发消息时会出现 `function_clause` 的错误。
+
+- [#9689](https://github.com/emqx/emqx/pull/9689) 修正当请求失败(如:HTTP资源关闭)会导致`function_clause`错误时对HTTP授权结果的处理。
+
+- [#9703](https://github.com/emqx/emqx/pull/9703) 将 HTTP 接口 `/clients/:clientid/subscribe` 的 `qos` 字段的默认值设置为 0。
+ 在此修复之前,`qos` 字段没有默认值,调用订阅接口的时候将导致 `function_clause` 错误。
+
+- [#9705](https://github.com/emqx/emqx/pull/9705) 删除 Webhook 的默认值。
+ 在此修复之前,Webhook 的 `body` 字段的默认值为 `${payload}`,但规则中除了消息发布之外的其他事件的可用字段中
+ 都没有 `payload` 字段,所以这种情况下 Webhook 将发送消息正文为 "undefined" 的字符串到 HTTP 服务。
+ 此修复移除了 `body` 字段的默认值,当未配置 `body` 字段的时候,Webhook 将以 JSON object 的格式发送
+ 当前事件的全部可用字段。
+
+- [#9712](https://github.com/emqx/emqx/pull/9712) 修复了监听 `client.connected` 事件的插件和数据桥接在调用 `/clients/:clientid/subscribe/bulk`
+ HTTP 接口时报 `404 Not Found` 的问题。
+
+- [#9714](https://github.com/emqx/emqx/pull/9714) 修复 `/mqtt/auto_subscribe` API 错误的 swagger 格式,并且保证 swagger 总是检查格式是否正确。
+
+- [#9716](https://github.com/emqx/emqx/pull/9716) 修复 v5.0.12 之前的 MQTT 桥接配置在 升级到 v5.0.13 后 HTTP API 查询 桥接配置时的一个兼容性问题。
+
+- [#9717](https://github.com/emqx/emqx/pull/9717) 修复已禁用的桥接资源服务器连接超时的情况下不能修改其他配置参数的问题。
diff --git a/changes/v5.0.14/feat-8329.en.md b/changes/v5.0.14/feat-8329.en.md
new file mode 100644
index 000000000..2876e1754
--- /dev/null
+++ b/changes/v5.0.14/feat-8329.en.md
@@ -0,0 +1 @@
+The MongoDB library has been upgraded to support MongoDB 5.1+
diff --git a/changes/v5.0.14/feat-8329.zh.md b/changes/v5.0.14/feat-8329.zh.md
new file mode 100644
index 000000000..31e73086d
--- /dev/null
+++ b/changes/v5.0.14/feat-8329.zh.md
@@ -0,0 +1 @@
+MongoDB 的驱动现在已经升级到 MongoDB 5.1+ 了。
diff --git a/changes/v5.0.14/feat-9671.en.md b/changes/v5.0.14/feat-9671.en.md
new file mode 100644
index 000000000..dd5ed5e4d
--- /dev/null
+++ b/changes/v5.0.14/feat-9671.en.md
@@ -0,0 +1 @@
+Implement sliding window average metrics.
diff --git a/changes/v5.0.14/feat-9671.zh.md b/changes/v5.0.14/feat-9671.zh.md
new file mode 100644
index 000000000..cbcae01fe
--- /dev/null
+++ b/changes/v5.0.14/feat-9671.zh.md
@@ -0,0 +1 @@
+实施滑动窗口平均度量。
diff --git a/changes/v5.0.14/feat-9675.en.md b/changes/v5.0.14/feat-9675.en.md
new file mode 100644
index 000000000..5249e9826
--- /dev/null
+++ b/changes/v5.0.14/feat-9675.en.md
@@ -0,0 +1,2 @@
+HTTP client library `ehttpc` upgraded from `0.4.2` to `0.4.3`.
+Library `eredis_cluster` which manages clients to redis clusters upgraded from `0.7.1` to `0.7.5`.
diff --git a/changes/v5.0.14/feat-9675.zh.md b/changes/v5.0.14/feat-9675.zh.md
new file mode 100644
index 000000000..d14f260ae
--- /dev/null
+++ b/changes/v5.0.14/feat-9675.zh.md
@@ -0,0 +1,2 @@
+HTTP 客户端库 `ehttpc` 从 `0.4.2` 升级到 `0.4.3`
+Redis cluster 客户端库 `eredis_cluster` 从 `0.7.1` 升级到 `0.7.5`.
diff --git a/changes/v5.0.14/feat-9713.en.md b/changes/v5.0.14/feat-9713.en.md
new file mode 100644
index 000000000..e8dbe4c6c
--- /dev/null
+++ b/changes/v5.0.14/feat-9713.en.md
@@ -0,0 +1,3 @@
+Introduce `api_key.bootstrap_file` to initialize the api key at boot time.
+Deprecate `dashboard.bootstrap_users_file`.
+Limit the maximum number of api keys to 100 instead of 30.
diff --git a/changes/v5.0.14/feat-9713.zh.md b/changes/v5.0.14/feat-9713.zh.md
new file mode 100644
index 000000000..7535b8bd5
--- /dev/null
+++ b/changes/v5.0.14/feat-9713.zh.md
@@ -0,0 +1,3 @@
+引入 `api_key.bootstrap_file`,用于启动时初始化api密钥。
+废弃 `dashboard.boostrap_users_file`。
+将 API 密钥的最大数量限制提升为 100(原来为30)。
diff --git a/changes/v5.0.14/fix-8648.en.md b/changes/v5.0.14/fix-8648.en.md
new file mode 100644
index 000000000..ac608a2e1
--- /dev/null
+++ b/changes/v5.0.14/fix-8648.en.md
@@ -0,0 +1 @@
+When deleting a non-existing bridge the server gave a success response. This has been fixed so that the server instead gives an error response when the user attempts to delete a non-existing bridge.
diff --git a/changes/v5.0.14/fix-8648.zh.md b/changes/v5.0.14/fix-8648.zh.md
new file mode 100644
index 000000000..6512c5aa5
--- /dev/null
+++ b/changes/v5.0.14/fix-8648.zh.md
@@ -0,0 +1 @@
+修复了当通过 API 删除一个不存在的桥接时,服务器会返回操作成功的问题,现在将会返回操作失败的信息。
diff --git a/changes/v5.0.14/fix-9667.en.md b/changes/v5.0.14/fix-9667.en.md
new file mode 100644
index 000000000..4b0fe7aef
--- /dev/null
+++ b/changes/v5.0.14/fix-9667.en.md
@@ -0,0 +1 @@
+Remove possibility to set `clientid` for `/publish` and `/publish/bulk` HTTP APIs. This is to reduce the risk for security confusion.
diff --git a/changes/v5.0.14/fix-9667.zh.md b/changes/v5.0.14/fix-9667.zh.md
new file mode 100644
index 000000000..f3952ca14
--- /dev/null
+++ b/changes/v5.0.14/fix-9667.zh.md
@@ -0,0 +1 @@
+从 HTTP API /publish 和 /publish/bulk 中移除 clientid, 降低安全风险
diff --git a/changes/v5.0.14/fix-9689.en.md b/changes/v5.0.14/fix-9689.en.md
new file mode 100644
index 000000000..7582c8bc5
--- /dev/null
+++ b/changes/v5.0.14/fix-9689.en.md
@@ -0,0 +1 @@
+Fix handling of HTTP authorization result when a request failure (e.g.: HTTP resource is down) would cause a `function_clause` error.
diff --git a/changes/v5.0.14/fix-9689.zh.md b/changes/v5.0.14/fix-9689.zh.md
new file mode 100644
index 000000000..62f4a90fb
--- /dev/null
+++ b/changes/v5.0.14/fix-9689.zh.md
@@ -0,0 +1 @@
+修正当请求失败(如:HTTP资源关闭)会导致`function_clause`错误时对HTTP授权结果的处理。
diff --git a/changes/v5.0.14/fix-9703.en.md b/changes/v5.0.14/fix-9703.en.md
new file mode 100644
index 000000000..4eb91c7d0
--- /dev/null
+++ b/changes/v5.0.14/fix-9703.en.md
@@ -0,0 +1,3 @@
+Set the default value of the `qos` field of the HTTP API `/clients/:clientid/subscribe` to 0.
+Before this fix, the `qos` field have no default value, which leads to a `function_clause` error
+when querying this API.
diff --git a/changes/v5.0.14/fix-9703.zh.md b/changes/v5.0.14/fix-9703.zh.md
new file mode 100644
index 000000000..863304a66
--- /dev/null
+++ b/changes/v5.0.14/fix-9703.zh.md
@@ -0,0 +1,2 @@
+将 HTTP 接口 `/clients/:clientid/subscribe` 的 `qos` 字段的默认值设置为 0。
+在此修复之前,`qos` 字段没有默认值,调用订阅接口的时候将导致 `function_clause` 错误。
diff --git a/changes/v5.0.14/fix-9705.en.md b/changes/v5.0.14/fix-9705.en.md
new file mode 100644
index 000000000..479d3d4ea
--- /dev/null
+++ b/changes/v5.0.14/fix-9705.en.md
@@ -0,0 +1,8 @@
+Remove the default value of Webhook.
+Before this repair, the default value of the `body` field of Webhook is `${payload}`,
+but there is no `payload` field in the available fields of other events except message
+publishing in the rule, so in this case, the webhook will send a string with the
+message body as "undefined" to the HTTP service.
+This fix removes the default value of the `body` field. When the `body` field is
+not configured, Webhook will send all available fields of the current event in
+the format of JSON object.
diff --git a/changes/v5.0.14/fix-9705.zh.md b/changes/v5.0.14/fix-9705.zh.md
new file mode 100644
index 000000000..6a57eba05
--- /dev/null
+++ b/changes/v5.0.14/fix-9705.zh.md
@@ -0,0 +1,5 @@
+删除 Webhook 的默认值。
+在此修复之前,Webhook 的 `body` 字段的默认值为 `${payload}`,但规则中除了消息发布之外的其他事件的可用字段中
+都没有 `payload` 字段,所以这种情况下 Webhook 将发送消息正文为 "undefined" 的字符串到 HTTP 服务。
+此修复移除了 `body` 字段的默认值,当未配置 `body` 字段的时候,Webhook 将以 JSON object 的格式发送
+当前事件的全部可用字段。
diff --git a/changes/v5.0.14/fix-9712.en.md b/changes/v5.0.14/fix-9712.en.md
new file mode 100644
index 000000000..e110b03e2
--- /dev/null
+++ b/changes/v5.0.14/fix-9712.en.md
@@ -0,0 +1,2 @@
+Fixed the problem of '404 Not Found' when calling the HTTP API '/clients/:clientid/subscribe/bulk'
+from the plug-ins and data-bridges on handling the 'client.connected' event.
diff --git a/changes/v5.0.14/fix-9712.zh.md b/changes/v5.0.14/fix-9712.zh.md
new file mode 100644
index 000000000..053f6b08c
--- /dev/null
+++ b/changes/v5.0.14/fix-9712.zh.md
@@ -0,0 +1,2 @@
+修复了监听 `client.connected` 事件的插件和数据桥接在调用 `/clients/:clientid/subscribe/bulk`
+HTTP 接口时报 `404 Not Found` 的问题。
diff --git a/changes/v5.0.14/fix-9714.en.md b/changes/v5.0.14/fix-9714.en.md
new file mode 100644
index 000000000..e1a606744
--- /dev/null
+++ b/changes/v5.0.14/fix-9714.en.md
@@ -0,0 +1 @@
+Fix `/mqtt/auto_subscribe` API's bad swagger schema, and make sure swagger always checks if the schema is correct.
diff --git a/changes/v5.0.14/fix-9714.zh.md b/changes/v5.0.14/fix-9714.zh.md
new file mode 100644
index 000000000..cbf38f041
--- /dev/null
+++ b/changes/v5.0.14/fix-9714.zh.md
@@ -0,0 +1 @@
+修复 `/mqtt/auto_subscribe` API 错误的 swagger 格式,并且保证 swagger 总是检查格式是否正确。
diff --git a/changes/v5.0.14/fix-9716.en.md b/changes/v5.0.14/fix-9716.en.md
new file mode 100644
index 000000000..93d4f1823
--- /dev/null
+++ b/changes/v5.0.14/fix-9716.en.md
@@ -0,0 +1 @@
+MQTT bridge config compatibility fix. The config created from before v5.0.12 may encounter a compatibility issue after upgraded to v5.0.13.
diff --git a/changes/v5.0.14/fix-9716.zh.md b/changes/v5.0.14/fix-9716.zh.md
new file mode 100644
index 000000000..f368fe325
--- /dev/null
+++ b/changes/v5.0.14/fix-9716.zh.md
@@ -0,0 +1 @@
+修复 v5.0.12 之前的 MQTT 桥接配置在 升级到 v5.0.13 后 HTTP API 查询 桥接配置时的一个兼容性问题。
diff --git a/changes/v5.0.14/fix-9717.en.md b/changes/v5.0.14/fix-9717.en.md
new file mode 100644
index 000000000..9a3b29157
--- /dev/null
+++ b/changes/v5.0.14/fix-9717.en.md
@@ -0,0 +1 @@
+Prior to this fix, if it always times out when trying to connect a bridge server, it's not possible to change other configs even when the bridge is disabled.
diff --git a/changes/v5.0.14/fix-9717.zh.md b/changes/v5.0.14/fix-9717.zh.md
new file mode 100644
index 000000000..859d7806f
--- /dev/null
+++ b/changes/v5.0.14/fix-9717.zh.md
@@ -0,0 +1 @@
+修复已禁用的桥接资源服务器连接超时的情况下不能修改其他配置参数的问题。
diff --git a/changes/v5.0.14/fix-9730.en.md b/changes/v5.0.14/fix-9730.en.md
new file mode 100644
index 000000000..f926d6e51
--- /dev/null
+++ b/changes/v5.0.14/fix-9730.en.md
@@ -0,0 +1 @@
+Potential leaks of atoms that could lead to a crash if a lot of resources were created have been removed.
diff --git a/changes/v5.0.14/fix-9730.zh.md b/changes/v5.0.14/fix-9730.zh.md
new file mode 100644
index 000000000..e2138d654
--- /dev/null
+++ b/changes/v5.0.14/fix-9730.zh.md
@@ -0,0 +1 @@
+如果创建了大量的资源,可能会导致崩溃的潜在的原子泄漏已经被删除。
diff --git a/changes/v5.0.15/feat-9586.en.md b/changes/v5.0.15/feat-9586.en.md
new file mode 100644
index 000000000..777fb81df
--- /dev/null
+++ b/changes/v5.0.15/feat-9586.en.md
@@ -0,0 +1 @@
+Basic auth is no longer allowed for API calls, must use API key instead.
diff --git a/changes/v5.0.15/feat-9586.zh.md b/changes/v5.0.15/feat-9586.zh.md
new file mode 100644
index 000000000..102266a46
--- /dev/null
+++ b/changes/v5.0.15/feat-9586.zh.md
@@ -0,0 +1 @@
+API 调用不再支持基于 `username:password` 的 `baisc` 认证, 现在 API 必须通过 API Key 才能进行调用。
diff --git a/changes/v5.0.15/feat-9722.en.md b/changes/v5.0.15/feat-9722.en.md
new file mode 100644
index 000000000..b86f37b83
--- /dev/null
+++ b/changes/v5.0.15/feat-9722.en.md
@@ -0,0 +1,3 @@
+Add the following configuration options for Pushing metrics to Prometheus Push Gateway:
+- `headers`: Allows custom HTTP request headers.
+- `job_name`: allows to customize the name of the Job pushed to Push Gateway.
diff --git a/changes/v5.0.15/feat-9722.zh.md b/changes/v5.0.15/feat-9722.zh.md
new file mode 100644
index 000000000..a806cb1de
--- /dev/null
+++ b/changes/v5.0.15/feat-9722.zh.md
@@ -0,0 +1,3 @@
+为 Prometheus 推送到 Push Gateway 新增以下配置项:
+- `headers`:允许自定义 HTTP 请求头。
+- `job_name`:允许自定义推送到 Push Gateway 的 Job 名称。
diff --git a/changes/v5.0.15/feat-9725.en.md b/changes/v5.0.15/feat-9725.en.md
new file mode 100644
index 000000000..832aa6bf9
--- /dev/null
+++ b/changes/v5.0.15/feat-9725.en.md
@@ -0,0 +1,11 @@
+Remove the config `auto_reconnect` from the emqx_authz, emqx_authn and data-bridge componets.
+This is because we have another config with similar functions: `resource_opts.auto_restart_interval`。
+
+The functions of these two config are difficult to distinguish, which will lead to confusion.
+After this change, `auto_reconnect` will not be configurable (always be true), and the underlying
+drivers that support this config will automatically reconnect the abnormally disconnected
+connection every `2s`.
+
+And the config `resource_opts.auto_restart_interval` is still available for user.
+It is the time interval that emqx restarts the resource when the connection cannot be
+established for some reason.
diff --git a/changes/v5.0.15/feat-9725.zh.md b/changes/v5.0.15/feat-9725.zh.md
new file mode 100644
index 000000000..e7a2412d4
--- /dev/null
+++ b/changes/v5.0.15/feat-9725.zh.md
@@ -0,0 +1,8 @@
+从认证、鉴权和数据桥接功能中,删除 `auto_reconnect` 配置项,因为我们还有另一个功能类似的配置项:
+`resource_opts.auto_restart_interval`。
+
+这两个配置项的功能难以区分,会导致困惑。此修改之后,`auto_reconnect` 将不可配置(永远为 true),
+支持此配置的底层驱动将以 `2s` 为周期自动重连异常断开的连接。
+
+而 `resource_opts.auto_restart_interval` 配置项仍然开放给用户配置,它是资源因为某些原因
+无法建立连接的时候,emqx 重新启动该资源的时间间隔。
diff --git a/changes/v5.0.15/fix-8718-en.md b/changes/v5.0.15/fix-8718-en.md
new file mode 100644
index 000000000..6085adecd
--- /dev/null
+++ b/changes/v5.0.15/fix-8718-en.md
@@ -0,0 +1 @@
+Password information has been removed from information log messages for http, ldap, mongo, mqtt, mysql, pgsql and redis.
diff --git a/changes/v5.0.15/fix-8718-zh.md b/changes/v5.0.15/fix-8718-zh.md
new file mode 100644
index 000000000..d8aa81fd1
--- /dev/null
+++ b/changes/v5.0.15/fix-8718-zh.md
@@ -0,0 +1 @@
+密码信息已从http、ldap、mongo、mqtt、mysql、pgsql和redis的信息日志消息中删除。
diff --git a/changes/v5.0.15/fix-9626.en.md b/changes/v5.0.15/fix-9626.en.md
new file mode 100644
index 000000000..cc1c86d3e
--- /dev/null
+++ b/changes/v5.0.15/fix-9626.en.md
@@ -0,0 +1,2 @@
+Return authorization settings with default values.
+The authorization cache is enabled by default, but due to the missing default value in `GET` response of `/authorization/settings`, it seemed to be disabled from the dashboard.
diff --git a/changes/v5.0.15/fix-9626.zh.md b/changes/v5.0.15/fix-9626.zh.md
new file mode 100644
index 000000000..bc2391f48
--- /dev/null
+++ b/changes/v5.0.15/fix-9626.zh.md
@@ -0,0 +1,3 @@
+为授权设置 API 返回默认值。
+授权缓存默认为开启,但是在此修复前,因为默认值在 `/authorization/settings` 这个 API 的返回值中缺失,
+使得在仪表盘配置页面中看起来是关闭了。
diff --git a/changes/v5.0.15/fix-9726-en.md b/changes/v5.0.15/fix-9726-en.md
new file mode 100644
index 000000000..9aa522690
--- /dev/null
+++ b/changes/v5.0.15/fix-9726-en.md
@@ -0,0 +1 @@
+Client fuzzy search API results were missing information which could tell if more results are available in the next pages, this is now fixed by providing `hasnext` flag in the response.
diff --git a/changes/v5.0.15/fix-9726-zh.md b/changes/v5.0.15/fix-9726-zh.md
new file mode 100644
index 000000000..3554d2db7
--- /dev/null
+++ b/changes/v5.0.15/fix-9726-zh.md
@@ -0,0 +1 @@
+在此修复前,客户端模糊搜索 API 缺少一些可以用于判断是否可以继续翻页的信息,现在通过在响应中提供 `hasnext` 标志来解决这个问题。
diff --git a/changes/v5.0.15/fix-9748-en.md b/changes/v5.0.15/fix-9748-en.md
new file mode 100644
index 000000000..85f5896b2
--- /dev/null
+++ b/changes/v5.0.15/fix-9748-en.md
@@ -0,0 +1 @@
+Listeners not configured with `max_connections` will cause the cluster `/listeners` API to return 500 error.
diff --git a/changes/v5.0.15/fix-9748-zh.md b/changes/v5.0.15/fix-9748-zh.md
new file mode 100644
index 000000000..cab352e79
--- /dev/null
+++ b/changes/v5.0.15/fix-9748-zh.md
@@ -0,0 +1 @@
+监听器不配置 `max_connections` 时会导致集群 `/listeners` 接口返回 500 错误。
diff --git a/changes/v5.0.15/fix-9749-en.md b/changes/v5.0.15/fix-9749-en.md
new file mode 100644
index 000000000..f079385ce
--- /dev/null
+++ b/changes/v5.0.15/fix-9749-en.md
@@ -0,0 +1 @@
+In some cases search APIs could respond with an incorrect `count` value in the metadata, that is usually much bigger than expected, this is now fixed.
diff --git a/changes/v5.0.15/fix-9749-zh.md b/changes/v5.0.15/fix-9749-zh.md
new file mode 100644
index 000000000..356cf9475
--- /dev/null
+++ b/changes/v5.0.15/fix-9749-zh.md
@@ -0,0 +1 @@
+在某些情况下,搜索 API 可能会在元数据中响应不正确的 `count` 值,这通常比预期的要大得多,现在已经修复了。
diff --git a/changes/v5.0.15/fix-9751.en.md b/changes/v5.0.15/fix-9751.en.md
new file mode 100644
index 000000000..f45b99129
--- /dev/null
+++ b/changes/v5.0.15/fix-9751.en.md
@@ -0,0 +1 @@
+Fix that obsoleted cert file will not be deleted after the listener is updated/deleted
diff --git a/changes/v5.0.15/fix-9751.zh.md b/changes/v5.0.15/fix-9751.zh.md
new file mode 100644
index 000000000..3908e5c20
--- /dev/null
+++ b/changes/v5.0.15/fix-9751.zh.md
@@ -0,0 +1 @@
+修复在更新或者删除监听器后,过时的证书文件没有被删除的问题。
diff --git a/deploy/charts/emqx/Chart.yaml b/deploy/charts/emqx/Chart.yaml
index 2be2a6324..ae48f9de2 100644
--- a/deploy/charts/emqx/Chart.yaml
+++ b/deploy/charts/emqx/Chart.yaml
@@ -14,8 +14,8 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
-version: 5.0.13
+version: 5.0.14
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application.
-appVersion: 5.0.13
+appVersion: 5.0.14
diff --git a/deploy/docker/Dockerfile b/deploy/docker/Dockerfile
index 4a00c68fb..03533eec4 100644
--- a/deploy/docker/Dockerfile
+++ b/deploy/docker/Dockerfile
@@ -1,6 +1,7 @@
ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0-26:1.13.4-24.3.4.2-1-debian11
ARG RUN_FROM=debian:11-slim
-FROM ${BUILD_FROM} AS builder
+ARG BUILDPLATFORM=linux/amd64
+FROM --platform=$BUILDPLATFORM ${BUILD_FROM} AS builder
COPY . /emqx
diff --git a/deploy/docker/Dockerfile.alpine b/deploy/docker/Dockerfile.alpine
index 0f72be9ab..ebce2f539 100644
--- a/deploy/docker/Dockerfile.alpine
+++ b/deploy/docker/Dockerfile.alpine
@@ -1,6 +1,7 @@
ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0-26:1.13.4-24.3.4.2-1-alpine3.15.1
ARG RUN_FROM=alpine:3.15.1
-FROM ${BUILD_FROM} AS builder
+ARG BUILDPLATFORM=linux/amd64
+FROM --platform=$BUILDPLATFORM ${BUILD_FROM} AS builder
RUN apk add --no-cache \
autoconf \
diff --git a/lib-ee/emqx_ee_bridge/i18n/emqx_ee_bridge_mongodb.conf b/lib-ee/emqx_ee_bridge/i18n/emqx_ee_bridge_mongodb.conf
index f8009f0a4..81ebc1e31 100644
--- a/lib-ee/emqx_ee_bridge/i18n/emqx_ee_bridge_mongodb.conf
+++ b/lib-ee/emqx_ee_bridge/i18n/emqx_ee_bridge_mongodb.conf
@@ -86,4 +86,15 @@ emqx_ee_bridge_mongodb {
zh: "桥接名称"
}
}
+
+ payload_template {
+ desc {
+ en: "The template for formatting the outgoing messages. If undefined, rule engine will use JSON format to serialize all visible inputs, such as clientid, topic, payload etc."
+ zh: "用于格式化写入 MongoDB 的消息模板。 如果未定义,规则引擎会使用 JSON 格式序列化所有的可见输入,例如 clientid, topic, payload 等。"
+ }
+ label: {
+ en: "Payload template"
+ zh: "有效载荷模板"
+ }
+ }
}
diff --git a/lib-ee/emqx_ee_bridge/rebar.config b/lib-ee/emqx_ee_bridge/rebar.config
index 2bd4036e0..6ca554c72 100644
--- a/lib-ee/emqx_ee_bridge/rebar.config
+++ b/lib-ee/emqx_ee_bridge/rebar.config
@@ -1,6 +1,5 @@
{erl_opts, [debug_info]}.
-{deps, [ {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.33.0"}}}
- , {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.7.4"}}}
+{deps, [ {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.7.4"}}}
, {kafka_protocol, {git, "https://github.com/kafka4beam/kafka_protocol.git", {tag, "4.1.2"}}}
, {brod_gssapi, {git, "https://github.com/kafka4beam/brod_gssapi.git", {tag, "v0.1.0-rc1"}}}
, {brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.16.7"}}}
diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge.app.src b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge.app.src
index 1563cb8ef..11831ab06 100644
--- a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge.app.src
+++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge.app.src
@@ -1,6 +1,6 @@
{application, emqx_ee_bridge, [
{description, "EMQX Enterprise data bridges"},
- {vsn, "0.1.2"},
+ {vsn, "0.1.3"},
{registered, []},
{applications, [
kernel,
diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge.erl b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge.erl
index d0099db1c..43a26111a 100644
--- a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge.erl
+++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge.erl
@@ -26,7 +26,9 @@ api_schemas(Method) ->
ref(emqx_ee_bridge_influxdb, Method ++ "_api_v2"),
ref(emqx_ee_bridge_redis, Method ++ "_single"),
ref(emqx_ee_bridge_redis, Method ++ "_sentinel"),
- ref(emqx_ee_bridge_redis, Method ++ "_cluster")
+ ref(emqx_ee_bridge_redis, Method ++ "_cluster"),
+ ref(emqx_ee_bridge_timescale, Method),
+ ref(emqx_ee_bridge_matrix, Method)
].
schema_modules() ->
@@ -38,7 +40,9 @@ schema_modules() ->
emqx_ee_bridge_mongodb,
emqx_ee_bridge_mysql,
emqx_ee_bridge_redis,
- emqx_ee_bridge_pgsql
+ emqx_ee_bridge_pgsql,
+ emqx_ee_bridge_timescale,
+ emqx_ee_bridge_matrix
].
examples(Method) ->
@@ -57,16 +61,18 @@ resource_type(Type) when is_binary(Type) -> resource_type(binary_to_atom(Type, u
resource_type(kafka) -> emqx_bridge_impl_kafka;
resource_type(hstreamdb) -> emqx_ee_connector_hstreamdb;
resource_type(gcp_pubsub) -> emqx_ee_connector_gcp_pubsub;
-resource_type(mongodb_rs) -> emqx_connector_mongo;
-resource_type(mongodb_sharded) -> emqx_connector_mongo;
-resource_type(mongodb_single) -> emqx_connector_mongo;
+resource_type(mongodb_rs) -> emqx_ee_connector_mongodb;
+resource_type(mongodb_sharded) -> emqx_ee_connector_mongodb;
+resource_type(mongodb_single) -> emqx_ee_connector_mongodb;
resource_type(mysql) -> emqx_connector_mysql;
resource_type(influxdb_api_v1) -> emqx_ee_connector_influxdb;
resource_type(influxdb_api_v2) -> emqx_ee_connector_influxdb;
resource_type(redis_single) -> emqx_ee_connector_redis;
resource_type(redis_sentinel) -> emqx_ee_connector_redis;
resource_type(redis_cluster) -> emqx_ee_connector_redis;
-resource_type(pgsql) -> emqx_connector_pgsql.
+resource_type(pgsql) -> emqx_connector_pgsql;
+resource_type(timescale) -> emqx_connector_pgsql;
+resource_type(matrix) -> emqx_connector_pgsql.
fields(bridges) ->
[
@@ -101,16 +107,8 @@ fields(bridges) ->
desc => <<"MySQL Bridge Config">>,
required => false
}
- )},
- {pgsql,
- mk(
- hoconsc:map(name, ref(emqx_ee_bridge_pgsql, "config")),
- #{
- desc => <<"PostgreSQL Bridge Config">>,
- required => false
- }
)}
- ] ++ mongodb_structs() ++ influxdb_structs() ++ redis_structs().
+ ] ++ mongodb_structs() ++ influxdb_structs() ++ redis_structs() ++ pgsql_structs().
mongodb_structs() ->
[
@@ -157,3 +155,20 @@ redis_structs() ->
redis_cluster
]
].
+
+pgsql_structs() ->
+ [
+ {Type,
+ mk(
+ hoconsc:map(name, ref(emqx_ee_bridge_pgsql, "config")),
+ #{
+ desc => <>,
+ required => false
+ }
+ )}
+ || {Type, Name} <- [
+ {pgsql, <<"PostgreSQL">>},
+ {timescale, <<"Timescale">>},
+ {matrix, <<"Matrix">>}
+ ]
+ ].
diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_influxdb.erl b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_influxdb.erl
index 62c8b6ab7..b42c27832 100644
--- a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_influxdb.erl
+++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_influxdb.erl
@@ -196,33 +196,25 @@ to_influx_lines(RawLines) ->
converter_influx_line(Line, AccIn) ->
case string:tokens(str(Line), " ") of
[MeasurementAndTags, Fields, Timestamp] ->
- {Measurement, Tags} = split_measurement_and_tags(MeasurementAndTags),
- [
- #{
- measurement => Measurement,
- tags => kv_pairs(Tags),
- fields => kv_pairs(string:tokens(Fields, ",")),
- timestamp => Timestamp
- }
- | AccIn
- ];
+ append_influx_item(MeasurementAndTags, Fields, Timestamp, AccIn);
[MeasurementAndTags, Fields] ->
- {Measurement, Tags} = split_measurement_and_tags(MeasurementAndTags),
- %% TODO: fix here both here and influxdb driver.
- %% Default value should evaluated by InfluxDB.
- [
- #{
- measurement => Measurement,
- tags => kv_pairs(Tags),
- fields => kv_pairs(string:tokens(Fields, ",")),
- timestamp => "${timestamp}"
- }
- | AccIn
- ];
+ append_influx_item(MeasurementAndTags, Fields, undefined, AccIn);
_ ->
throw("Bad InfluxDB Line Protocol schema")
end.
+append_influx_item(MeasurementAndTags, Fields, Timestamp, Acc) ->
+ {Measurement, Tags} = split_measurement_and_tags(MeasurementAndTags),
+ [
+ #{
+ measurement => Measurement,
+ tags => kv_pairs(Tags),
+ fields => kv_pairs(string:tokens(Fields, ",")),
+ timestamp => Timestamp
+ }
+ | Acc
+ ].
+
split_measurement_and_tags(Subject) ->
case string:tokens(Subject, ",") of
[] ->
diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_matrix.erl b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_matrix.erl
new file mode 100644
index 000000000..106fac48a
--- /dev/null
+++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_matrix.erl
@@ -0,0 +1,42 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+-module(emqx_ee_bridge_matrix).
+
+-export([
+ conn_bridge_examples/1
+]).
+
+-export([
+ namespace/0,
+ roots/0,
+ fields/1,
+ desc/1
+]).
+
+%% -------------------------------------------------------------------------------------------------
+%% api
+
+conn_bridge_examples(Method) ->
+ [
+ #{
+ <<"matrix">> => #{
+ summary => <<"Matrix Bridge">>,
+ value => emqx_ee_bridge_pgsql:values(Method, matrix)
+ }
+ }
+ ].
+
+%% -------------------------------------------------------------------------------------------------
+%% Hocon Schema Definitions
+namespace() -> "bridge_matrix".
+
+roots() -> [].
+
+fields("post") ->
+ emqx_ee_bridge_pgsql:fields("post", matrix);
+fields(Method) ->
+ emqx_ee_bridge_pgsql:fields(Method).
+
+desc(_) ->
+ undefined.
diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mongodb.erl b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mongodb.erl
index 516c75f65..84db0c214 100644
--- a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mongodb.erl
+++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mongodb.erl
@@ -37,8 +37,9 @@ roots() ->
fields("config") ->
[
{enable, mk(boolean(), #{desc => ?DESC("enable"), default => true})},
- {collection, mk(binary(), #{desc => ?DESC("collection"), default => <<"mqtt">>})}
- ];
+ {collection, mk(binary(), #{desc => ?DESC("collection"), default => <<"mqtt">>})},
+ {payload_template, mk(binary(), #{required => false, desc => ?DESC("payload_template")})}
+ ] ++ fields("resource_opts");
fields(mongodb_rs) ->
emqx_connector_mongo:fields(rs) ++ fields("config");
fields(mongodb_sharded) ->
@@ -68,7 +69,32 @@ fields("get_sharded") ->
fields("get_single") ->
emqx_bridge_schema:metrics_status_fields() ++
fields(mongodb_single) ++
- type_and_name_fields(mongodb_single).
+ type_and_name_fields(mongodb_single);
+fields("creation_opts") ->
+ lists:map(
+ fun
+ ({query_mode, _FieldSchema}) ->
+ {query_mode,
+ mk(
+ enum([sync, async]),
+ #{
+ desc => ?DESC(emqx_resource_schema, "query_mode"),
+ default => sync
+ }
+ )};
+ (Field) ->
+ Field
+ end,
+ emqx_resource_schema:fields("creation_opts")
+ );
+fields("resource_opts") ->
+ [
+ {resource_opts,
+ mk(
+ ref(?MODULE, "creation_opts"),
+ #{default => #{}, desc => ?DESC(emqx_resource_schema, "resource_opts")}
+ )}
+ ].
conn_bridge_examples(Method) ->
[
@@ -94,6 +120,8 @@ conn_bridge_examples(Method) ->
desc("config") ->
?DESC("desc_config");
+desc("creation_opts") ->
+ ?DESC(emqx_resource_schema, "creation_opts");
desc(mongodb_rs) ->
?DESC(mongodb_rs_conf);
desc(mongodb_sharded) ->
diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mysql.erl b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mysql.erl
index bf5d2e140..114459149 100644
--- a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mysql.erl
+++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mysql.erl
@@ -51,7 +51,6 @@ values(post) ->
pool_size => 8,
username => <<"root">>,
password => <<"">>,
- auto_reconnect => true,
sql => ?DEFAULT_SQL,
local_topic => <<"local/topic/#">>,
resource_opts => #{
diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_pgsql.erl b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_pgsql.erl
index 1f9a005c9..be9fc9dc8 100644
--- a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_pgsql.erl
+++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_pgsql.erl
@@ -11,7 +11,9 @@
-import(hoconsc, [mk/2, enum/1, ref/2]).
-export([
- conn_bridge_examples/1
+ conn_bridge_examples/1,
+ values/2,
+ fields/2
]).
-export([
@@ -34,24 +36,23 @@ conn_bridge_examples(Method) ->
#{
<<"pgsql">> => #{
summary => <<"PostgreSQL Bridge">>,
- value => values(Method)
+ value => values(Method, pgsql)
}
}
].
-values(get) ->
- maps:merge(values(post), ?METRICS_EXAMPLE);
-values(post) ->
+values(get, Type) ->
+ maps:merge(values(post, Type), ?METRICS_EXAMPLE);
+values(post, Type) ->
#{
enable => true,
- type => pgsql,
+ type => Type,
name => <<"foo">>,
server => <<"127.0.0.1:5432">>,
database => <<"mqtt">>,
pool_size => 8,
username => <<"root">>,
password => <<"public">>,
- auto_reconnect => true,
sql => ?DEFAULT_SQL,
local_topic => <<"local/topic/#">>,
resource_opts => #{
@@ -64,8 +65,8 @@ values(post) ->
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
}
};
-values(put) ->
- values(post).
+values(put, Type) ->
+ values(post, Type).
%% -------------------------------------------------------------------------------------------------
%% Hocon Schema Definitions
@@ -96,17 +97,20 @@ fields("config") ->
}
)}
] ++
- emqx_connector_mysql:fields(config) -- emqx_connector_schema_lib:prepare_statement_fields();
+ emqx_connector_pgsql:fields(config) -- emqx_connector_schema_lib:prepare_statement_fields();
fields("creation_opts") ->
Opts = emqx_resource_schema:fields("creation_opts"),
[O || {Field, _} = O <- Opts, not is_hidden_opts(Field)];
fields("post") ->
- [type_field(), name_field() | fields("config")];
+ fields("post", pgsql);
fields("put") ->
fields("config");
fields("get") ->
emqx_bridge_schema:metrics_status_fields() ++ fields("post").
+fields("post", Type) ->
+ [type_field(Type), name_field() | fields("config")].
+
desc("config") ->
?DESC("desc_config");
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
@@ -123,8 +127,8 @@ is_hidden_opts(Field) ->
async_inflight_window
]).
-type_field() ->
- {type, mk(enum([mysql]), #{required => true, desc => ?DESC("desc_type")})}.
+type_field(Type) ->
+ {type, mk(enum([Type]), #{required => true, desc => ?DESC("desc_type")})}.
name_field() ->
{name, mk(binary(), #{required => true, desc => ?DESC("desc_name")})}.
diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_redis.erl b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_redis.erl
index 727a6df4b..5c273e050 100644
--- a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_redis.erl
+++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_redis.erl
@@ -50,19 +50,22 @@ values(Protocol, get) ->
values("single", post) ->
SpecificOpts = #{
server => <<"127.0.0.1:6379">>,
+ redis_type => single,
database => 1
},
values(common, "single", SpecificOpts);
values("sentinel", post) ->
SpecificOpts = #{
servers => [<<"127.0.0.1:26379">>],
+ redis_type => sentinel,
sentinel => <<"mymaster">>,
database => 1
},
values(common, "sentinel", SpecificOpts);
values("cluster", post) ->
SpecificOpts = #{
- servers => [<<"127.0.0.1:6379">>]
+ servers => [<<"127.0.0.1:6379">>],
+ redis_type => cluster
},
values(common, "cluster", SpecificOpts);
values(Protocol, put) ->
@@ -76,7 +79,6 @@ values(common, RedisType, SpecificOpts) ->
local_topic => <<"local/topic/#">>,
pool_size => 8,
password => <<"secret">>,
- auto_reconnect => true,
command_template => [<<"LPUSH">>, <<"MSGS">>, <<"${payload}">>],
resource_opts => #{
batch_size => 1,
diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_timescale.erl b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_timescale.erl
new file mode 100644
index 000000000..20d940462
--- /dev/null
+++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_timescale.erl
@@ -0,0 +1,42 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+-module(emqx_ee_bridge_timescale).
+
+-export([
+ conn_bridge_examples/1
+]).
+
+-export([
+ namespace/0,
+ roots/0,
+ fields/1,
+ desc/1
+]).
+
+%% -------------------------------------------------------------------------------------------------
+%% api
+
+conn_bridge_examples(Method) ->
+ [
+ #{
+ <<"timescale">> => #{
+ summary => <<"Timescale Bridge">>,
+ value => emqx_ee_bridge_pgsql:values(Method, timescale)
+ }
+ }
+ ].
+
+%% -------------------------------------------------------------------------------------------------
+%% Hocon Schema Definitions
+namespace() -> "bridge_timescale".
+
+roots() -> [].
+
+fields("post") ->
+ emqx_ee_bridge_pgsql:fields("post", timescale);
+fields(Method) ->
+ emqx_ee_bridge_pgsql:fields(Method).
+
+desc(_) ->
+ undefined.
diff --git a/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_influxdb_SUITE.erl b/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_influxdb_SUITE.erl
index 6331611d0..bb87a9f37 100644
--- a/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_influxdb_SUITE.erl
+++ b/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_influxdb_SUITE.erl
@@ -525,7 +525,6 @@ t_start_ok(Config) ->
SentData = #{
<<"clientid">> => ClientId,
<<"topic">> => atom_to_binary(?FUNCTION_NAME),
- <<"timestamp">> => erlang:system_time(nanosecond),
<<"payload">> => Payload
},
?check_trace(
diff --git a/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_mongodb_SUITE.erl b/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_mongodb_SUITE.erl
index fb8f1fcc3..f81571223 100644
--- a/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_mongodb_SUITE.erl
+++ b/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_mongodb_SUITE.erl
@@ -25,7 +25,8 @@ all() ->
group_tests() ->
[
t_setup_via_config_and_publish,
- t_setup_via_http_api_and_publish
+ t_setup_via_http_api_and_publish,
+ t_payload_template
].
groups() ->
@@ -151,6 +152,9 @@ mongo_config(MongoHost, MongoPort0, rs = Type) ->
" servers = [~p]\n"
" w_mode = safe\n"
" database = mqtt\n"
+ " resource_opts = {\n"
+ " worker_pool_size = 1\n"
+ " }\n"
"}",
[Name, Servers]
),
@@ -167,6 +171,9 @@ mongo_config(MongoHost, MongoPort0, sharded = Type) ->
" servers = [~p]\n"
" w_mode = safe\n"
" database = mqtt\n"
+ " resource_opts = {\n"
+ " worker_pool_size = 1\n"
+ " }\n"
"}",
[Name, Servers]
),
@@ -183,6 +190,9 @@ mongo_config(MongoHost, MongoPort0, single = Type) ->
" server = ~p\n"
" w_mode = safe\n"
" database = mqtt\n"
+ " resource_opts = {\n"
+ " worker_pool_size = 1\n"
+ " }\n"
"}",
[Name, Server]
),
@@ -196,9 +206,14 @@ parse_and_check(ConfigString, Type, Name) ->
Config.
create_bridge(Config) ->
+ create_bridge(Config, _Overrides = #{}).
+
+create_bridge(Config, Overrides) ->
Type = mongo_type_bin(?config(mongo_type, Config)),
Name = ?config(mongo_name, Config),
- MongoConfig = ?config(mongo_config, Config),
+ MongoConfig0 = ?config(mongo_config, Config),
+ MongoConfig = emqx_map_lib:deep_merge(MongoConfig0, Overrides),
+ ct:pal("creating ~p bridge with config:\n ~p", [Type, MongoConfig]),
emqx_bridge:create(Type, Name, MongoConfig).
delete_bridge(Config) ->
@@ -219,7 +234,8 @@ clear_db(Config) ->
Name = ?config(mongo_name, Config),
#{<<"collection">> := Collection} = ?config(mongo_config, Config),
ResourceID = emqx_bridge_resource:resource_id(Type, Name),
- {ok, _, #{state := #{poolname := PoolName}}} = emqx_resource:get_instance(ResourceID),
+ {ok, _, #{state := #{connector_state := #{poolname := PoolName}}}} =
+ emqx_resource:get_instance(ResourceID),
Selector = #{},
{true, _} = ecpool:pick_and_do(
PoolName, {mongo_api, delete, [Collection, Selector]}, no_handover
@@ -275,3 +291,14 @@ t_setup_via_http_api_and_publish(Config) ->
find_all(Config)
),
ok.
+
+t_payload_template(Config) ->
+ {ok, _} = create_bridge(Config, #{<<"payload_template">> => <<"{\"foo\": \"${clientid}\"}">>}),
+ Val = erlang:unique_integer(),
+ ClientId = emqx_guid:to_hexstr(emqx_guid:gen()),
+ ok = send_message(Config, #{key => Val, clientid => ClientId}),
+ ?assertMatch(
+ {ok, [#{<<"foo">> := ClientId}]},
+ find_all(Config)
+ ),
+ ok.
diff --git a/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_pgsql_SUITE.erl b/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_pgsql_SUITE.erl
index c5292a892..c2ff6fa8f 100644
--- a/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_pgsql_SUITE.erl
+++ b/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_pgsql_SUITE.erl
@@ -45,14 +45,20 @@ groups() ->
[
{tcp, [
{group, with_batch},
- {group, without_batch}
+ {group, without_batch},
+ {group, matrix},
+ {group, timescale}
]},
{tls, [
{group, with_batch},
- {group, without_batch}
+ {group, without_batch},
+ {group, matrix},
+ {group, timescale}
]},
{with_batch, TCs -- NonBatchCases},
- {without_batch, TCs}
+ {without_batch, TCs},
+ {matrix, [t_setup_via_config_and_publish, t_setup_via_http_api_and_publish]},
+ {timescale, [t_setup_via_config_and_publish, t_setup_via_http_api_and_publish]}
].
init_per_group(tcp, Config) ->
@@ -83,6 +89,12 @@ init_per_group(with_batch, Config0) ->
init_per_group(without_batch, Config0) ->
Config = [{enable_batch, false} | Config0],
common_init(Config);
+init_per_group(matrix, Config0) ->
+ Config = [{bridge_type, <<"matrix">>}, {enable_batch, true} | Config0],
+ common_init(Config);
+init_per_group(timescale, Config0) ->
+ Config = [{bridge_type, <<"timescale">>}, {enable_batch, true} | Config0],
+ common_init(Config);
init_per_group(_Group, Config) ->
Config.
@@ -122,7 +134,7 @@ end_per_testcase(_Testcase, Config) ->
%%------------------------------------------------------------------------------
common_init(Config0) ->
- BridgeType = <<"pgsql">>,
+ BridgeType = proplists:get_value(bridge_type, Config0, <<"pgsql">>),
Host = ?config(pgsql_host, Config0),
Port = ?config(pgsql_port, Config0),
case emqx_common_test_helpers:is_tcp_server_available(Host, Port) of
diff --git a/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_redis_SUITE.erl b/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_redis_SUITE.erl
index 1f4b52ddc..c3529ddeb 100644
--- a/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_redis_SUITE.erl
+++ b/lib-ee/emqx_ee_bridge/test/emqx_ee_bridge_redis_SUITE.erl
@@ -17,7 +17,8 @@
%%------------------------------------------------------------------------------
-define(REDIS_TOXYPROXY_CONNECT_CONFIG, #{
- <<"server">> => <<"toxiproxy:6379">>
+ <<"server">> => <<"toxiproxy:6379">>,
+ <<"redis_type">> => <<"single">>
}).
-define(COMMON_REDIS_OPTS, #{
@@ -31,7 +32,7 @@
-define(PROXY_HOST, "toxiproxy").
-define(PROXY_PORT, "8474").
-all() -> [{group, redis_types}, {group, rest}].
+all() -> [{group, transport_types}, {group, rest}].
groups() ->
ResourceSpecificTCs = [t_create_delete_bridge],
@@ -47,7 +48,7 @@ groups() ->
],
[
{rest, TCs},
- {redis_types, [
+ {transport_types, [
{group, tcp},
{group, tls}
]},
@@ -63,7 +64,7 @@ groups() ->
init_per_group(Group, Config) when
Group =:= redis_single; Group =:= redis_sentinel; Group =:= redis_cluster
->
- [{redis_type, Group} | Config];
+ [{transport_type, Group} | Config];
init_per_group(Group, Config) when
Group =:= tcp; Group =:= tls
->
@@ -79,6 +80,12 @@ end_per_group(_Group, _Config) ->
ok.
init_per_suite(Config) ->
+ wait_for_ci_redis(redis_checks(), Config).
+
+wait_for_ci_redis(0, _Config) ->
+ throw(no_redis);
+wait_for_ci_redis(Checks, Config) ->
+ timer:sleep(1000),
TestHosts = all_test_hosts(),
case emqx_common_test_helpers:is_all_tcp_servers_available(TestHosts) of
true ->
@@ -96,15 +103,15 @@ init_per_suite(Config) ->
| Config
];
false ->
- assert_ci()
+ wait_for_ci_redis(Checks - 1, Config)
end.
-assert_ci() ->
+redis_checks() ->
case os:getenv("IS_CI") of
"yes" ->
- throw(no_redis);
+ 10;
_ ->
- {skip, no_redis}
+ 1
end.
end_per_suite(_Config) ->
@@ -116,7 +123,7 @@ end_per_suite(_Config) ->
init_per_testcase(_Testcase, Config) ->
ok = delete_all_bridges(),
- case ?config(redis_type, Config) of
+ case ?config(transport_type, Config) of
undefined ->
Config;
RedisType ->
@@ -139,7 +146,7 @@ end_per_testcase(_Testcase, Config) ->
t_create_delete_bridge(Config) ->
Name = <<"mybridge">>,
- Type = ?config(redis_type, Config),
+ Type = ?config(transport_type, Config),
BridgeConfig = ?config(bridge_config, Config),
IsBatch = ?config(is_batch, Config),
?assertMatch(
@@ -425,31 +432,37 @@ redis_connect_configs() ->
#{
redis_single => #{
tcp => #{
- <<"server">> => <<"redis:6379">>
+ <<"server">> => <<"redis:6379">>,
+ <<"redis_type">> => <<"single">>
},
tls => #{
<<"server">> => <<"redis-tls:6380">>,
- <<"ssl">> => redis_connect_ssl_opts(redis_single)
+ <<"ssl">> => redis_connect_ssl_opts(redis_single),
+ <<"redis_type">> => <<"single">>
}
},
redis_sentinel => #{
tcp => #{
<<"servers">> => <<"redis-sentinel:26379">>,
+ <<"redis_type">> => <<"sentinel">>,
<<"sentinel">> => <<"mymaster">>
},
tls => #{
<<"servers">> => <<"redis-sentinel-tls:26380">>,
+ <<"redis_type">> => <<"sentinel">>,
<<"sentinel">> => <<"mymaster">>,
<<"ssl">> => redis_connect_ssl_opts(redis_sentinel)
}
},
redis_cluster => #{
tcp => #{
- <<"servers">> => <<"redis-cluster:7000,redis-cluster:7001,redis-cluster:7002">>
+ <<"servers">> => <<"redis-cluster:7000,redis-cluster:7001,redis-cluster:7002">>,
+ <<"redis_type">> => <<"cluster">>
},
tls => #{
<<"servers">> =>
<<"redis-cluster-tls:8000,redis-cluster-tls:8001,redis-cluster-tls:8002">>,
+ <<"redis_type">> => <<"cluster">>,
<<"ssl">> => redis_connect_ssl_opts(redis_cluster)
}
}
diff --git a/lib-ee/emqx_ee_connector/rebar.config b/lib-ee/emqx_ee_connector/rebar.config
index ab4c88396..262641d44 100644
--- a/lib-ee/emqx_ee_connector/rebar.config
+++ b/lib-ee/emqx_ee_connector/rebar.config
@@ -1,7 +1,7 @@
{erl_opts, [debug_info]}.
{deps, [
{hstreamdb_erl, {git, "https://github.com/hstreamdb/hstreamdb_erl.git", {tag, "0.2.5"}}},
- {influxdb, {git, "https://github.com/emqx/influxdb-client-erl", {tag, "1.1.5"}}},
+ {influxdb, {git, "https://github.com/emqx/influxdb-client-erl", {tag, "1.1.6"}}},
{emqx, {path, "../../apps/emqx"}}
]}.
diff --git a/lib-ee/emqx_ee_connector/src/emqx_ee_connector.app.src b/lib-ee/emqx_ee_connector/src/emqx_ee_connector.app.src
index dfebd75f5..15cafa6a4 100644
--- a/lib-ee/emqx_ee_connector/src/emqx_ee_connector.app.src
+++ b/lib-ee/emqx_ee_connector/src/emqx_ee_connector.app.src
@@ -1,6 +1,6 @@
{application, emqx_ee_connector, [
{description, "EMQX Enterprise connectors"},
- {vsn, "0.1.2"},
+ {vsn, "0.1.3"},
{registered, []},
{applications, [
kernel,
diff --git a/lib-ee/emqx_ee_connector/src/emqx_ee_connector_influxdb.erl b/lib-ee/emqx_ee_connector/src/emqx_ee_connector_influxdb.erl
index 7974bf028..db99c4475 100644
--- a/lib-ee/emqx_ee_connector/src/emqx_ee_connector_influxdb.erl
+++ b/lib-ee/emqx_ee_connector/src/emqx_ee_connector_influxdb.erl
@@ -142,7 +142,11 @@ fields(common) ->
[
{server, server()},
{precision,
- mk(enum([ns, us, ms, s, m, h]), #{
+ %% The influxdb only supports these 4 precision:
+ %% See "https://github.com/influxdata/influxdb/blob/
+ %% 6b607288439a991261307518913eb6d4e280e0a7/models/points.go#L487" for
+ %% more information.
+ mk(enum([ns, us, ms, s]), #{
required => false, default => ms, desc => ?DESC("precision")
})}
];
@@ -210,9 +214,7 @@ start_client(InstId, Config) ->
do_start_client(
InstId,
ClientConfig,
- Config = #{
- write_syntax := Lines
- }
+ Config = #{write_syntax := Lines}
) ->
case influxdb:start_client(ClientConfig) of
{ok, Client} ->
@@ -220,7 +222,9 @@ do_start_client(
true ->
State = #{
client => Client,
- write_syntax => to_config(Lines)
+ write_syntax => to_config(
+ Lines, proplists:get_value(precision, ClientConfig)
+ )
},
?SLOG(info, #{
msg => "starting influxdb connector success",
@@ -348,30 +352,33 @@ do_async_query(InstId, Client, Points, ReplyFunAndArgs) ->
%% -------------------------------------------------------------------------------------------------
%% Tags & Fields Config Trans
-to_config(Lines) ->
- to_config(Lines, []).
+to_config(Lines, Precision) ->
+ to_config(Lines, [], Precision).
-to_config([], Acc) ->
+to_config([], Acc, _Precision) ->
lists:reverse(Acc);
-to_config(
- [
- #{
- measurement := Measurement,
- timestamp := Timestamp,
- tags := Tags,
- fields := Fields
- }
- | Rest
- ],
- Acc
-) ->
- Res = #{
- measurement => emqx_plugin_libs_rule:preproc_tmpl(Measurement),
- timestamp => emqx_plugin_libs_rule:preproc_tmpl(Timestamp),
- tags => to_kv_config(Tags),
- fields => to_kv_config(Fields)
+to_config([Item0 | Rest], Acc, Precision) ->
+ Ts = maps:get(timestamp, Item0, undefined),
+ Item = #{
+ measurement => emqx_plugin_libs_rule:preproc_tmpl(maps:get(measurement, Item0)),
+ timestamp => preproc_tmpl_timestamp(Ts, Precision),
+ tags => to_kv_config(maps:get(tags, Item0)),
+ fields => to_kv_config(maps:get(fields, Item0))
},
- to_config(Rest, [Res | Acc]).
+ to_config(Rest, [Item | Acc], Precision).
+
+preproc_tmpl_timestamp(undefined, <<"ns">>) ->
+ erlang:system_time(nanosecond);
+preproc_tmpl_timestamp(undefined, <<"us">>) ->
+ erlang:system_time(microsecond);
+preproc_tmpl_timestamp(undefined, <<"ms">>) ->
+ erlang:system_time(millisecond);
+preproc_tmpl_timestamp(undefined, <<"s">>) ->
+ erlang:system_time(second);
+preproc_tmpl_timestamp(Ts, _) when is_integer(Ts) ->
+ Ts;
+preproc_tmpl_timestamp(Ts, _) when is_binary(Ts); is_list(Ts) ->
+ emqx_plugin_libs_rule:preproc_tmpl(Ts).
to_kv_config(KVfields) ->
maps:fold(fun to_maps_config/3, #{}, proplists:to_map(KVfields)).
@@ -414,7 +421,7 @@ parse_batch_data(InstId, BatchData, SyntaxLines) ->
fields := [{binary(), binary()}],
measurement := binary(),
tags := [{binary(), binary()}],
- timestamp := binary()
+ timestamp := emqx_plugin_libs_rule:tmpl_token() | integer()
}
]) -> {ok, [map()]} | {error, term()}.
data_to_points(Data, SyntaxLines) ->
@@ -430,46 +437,50 @@ lines_to_points(_, [], Points, ErrorPoints) ->
%% ignore trans succeeded points
{error, ErrorPoints}
end;
-lines_to_points(
- Data,
- [
- #{
- measurement := Measurement,
- timestamp := Timestamp,
- tags := Tags,
- fields := Fields
- }
- | Rest
- ],
- ResultPointsAcc,
- ErrorPointsAcc
-) ->
+lines_to_points(Data, [#{timestamp := Ts} = Item | Rest], ResultPointsAcc, ErrorPointsAcc) when
+ is_list(Ts)
+->
TransOptions = #{return => rawlist, var_trans => fun data_filter/1},
- case emqx_plugin_libs_rule:proc_tmpl(Timestamp, Data, TransOptions) of
- [TimestampInt] when is_integer(TimestampInt) ->
- {_, EncodedTags} = maps:fold(fun maps_config_to_data/3, {Data, #{}}, Tags),
- {_, EncodedFields} = maps:fold(fun maps_config_to_data/3, {Data, #{}}, Fields),
- Point = #{
- measurement => emqx_plugin_libs_rule:proc_tmpl(Measurement, Data),
- timestamp => TimestampInt,
- tags => EncodedTags,
- fields => EncodedFields
- },
- case map_size(EncodedFields) =:= 0 of
- true ->
- %% influxdb client doesn't like empty field maps...
- lines_to_points(Data, Rest, ResultPointsAcc, [
- {error, no_fields} | ErrorPointsAcc
- ]);
- false ->
- lines_to_points(Data, Rest, [Point | ResultPointsAcc], ErrorPointsAcc)
- end;
- BadTimestamp ->
+ case emqx_plugin_libs_rule:proc_tmpl(Ts, Data, TransOptions) of
+ [TsInt] when is_integer(TsInt) ->
+ Item1 = Item#{timestamp => TsInt},
+ continue_lines_to_points(Data, Item1, Rest, ResultPointsAcc, ErrorPointsAcc);
+ BadTs ->
lines_to_points(Data, Rest, ResultPointsAcc, [
- {error, {bad_timestamp, BadTimestamp}} | ErrorPointsAcc
+ {error, {bad_timestamp, BadTs}} | ErrorPointsAcc
])
+ end;
+lines_to_points(Data, [#{timestamp := Ts} = Item | Rest], ResultPointsAcc, ErrorPointsAcc) when
+ is_integer(Ts)
+->
+ continue_lines_to_points(Data, Item, Rest, ResultPointsAcc, ErrorPointsAcc).
+
+continue_lines_to_points(Data, Item, Rest, ResultPointsAcc, ErrorPointsAcc) ->
+ case line_to_point(Data, Item) of
+ #{fields := Fields} when map_size(Fields) =:= 0 ->
+ %% influxdb client doesn't like empty field maps...
+ ErrorPointsAcc1 = [{error, no_fields} | ErrorPointsAcc],
+ lines_to_points(Data, Rest, ResultPointsAcc, ErrorPointsAcc1);
+ Point ->
+ lines_to_points(Data, Rest, [Point | ResultPointsAcc], ErrorPointsAcc)
end.
+line_to_point(
+ Data,
+ #{
+ measurement := Measurement,
+ tags := Tags,
+ fields := Fields
+ } = Item
+) ->
+ {_, EncodedTags} = maps:fold(fun maps_config_to_data/3, {Data, #{}}, Tags),
+ {_, EncodedFields} = maps:fold(fun maps_config_to_data/3, {Data, #{}}, Fields),
+ Item#{
+ measurement => emqx_plugin_libs_rule:proc_tmpl(Measurement, Data),
+ tags => EncodedTags,
+ fields => EncodedFields
+ }.
+
maps_config_to_data(K, V, {Data, Res}) ->
KTransOptions = #{return => rawlist, var_trans => fun key_filter/1},
VTransOptions = #{return => rawlist, var_trans => fun data_filter/1},
diff --git a/lib-ee/emqx_ee_connector/src/emqx_ee_connector_mongodb.erl b/lib-ee/emqx_ee_connector/src/emqx_ee_connector_mongodb.erl
new file mode 100644
index 000000000..b1327fef6
--- /dev/null
+++ b/lib-ee/emqx_ee_connector/src/emqx_ee_connector_mongodb.erl
@@ -0,0 +1,78 @@
+%%--------------------------------------------------------------------
+%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
+%%--------------------------------------------------------------------
+
+-module(emqx_ee_connector_mongodb).
+
+-behaviour(emqx_resource).
+
+-include_lib("emqx_connector/include/emqx_connector_tables.hrl").
+-include_lib("emqx_resource/include/emqx_resource.hrl").
+-include_lib("typerefl/include/types.hrl").
+-include_lib("emqx/include/logger.hrl").
+-include_lib("snabbkaffe/include/snabbkaffe.hrl").
+
+%% `emqx_resource' API
+-export([
+ callback_mode/0,
+ is_buffer_supported/0,
+ on_start/2,
+ on_stop/2,
+ on_query/3,
+ on_get_status/2
+]).
+
+%%========================================================================================
+%% `emqx_resource' API
+%%========================================================================================
+
+callback_mode() -> emqx_connector_mongo:callback_mode().
+
+is_buffer_supported() -> false.
+
+on_start(InstanceId, Config) ->
+ case emqx_connector_mongo:on_start(InstanceId, Config) of
+ {ok, ConnectorState} ->
+ PayloadTemplate0 = maps:get(payload_template, Config, undefined),
+ PayloadTemplate = preprocess_template(PayloadTemplate0),
+ State = #{
+ payload_template => PayloadTemplate,
+ connector_state => ConnectorState
+ },
+ {ok, State};
+ Error ->
+ Error
+ end.
+
+on_stop(InstanceId, _State = #{connector_state := ConnectorState}) ->
+ emqx_connector_mongo:on_stop(InstanceId, ConnectorState).
+
+on_query(InstanceId, {send_message, Message0}, State) ->
+ #{
+ payload_template := PayloadTemplate,
+ connector_state := ConnectorState
+ } = State,
+ Message = render_message(PayloadTemplate, Message0),
+ emqx_connector_mongo:on_query(InstanceId, {send_message, Message}, ConnectorState);
+on_query(InstanceId, Request, _State = #{connector_state := ConnectorState}) ->
+ emqx_connector_mongo:on_query(InstanceId, Request, ConnectorState).
+
+on_get_status(InstanceId, _State = #{connector_state := ConnectorState}) ->
+ emqx_connector_mongo:on_get_status(InstanceId, ConnectorState).
+
+%%========================================================================================
+%% Helper fns
+%%========================================================================================
+
+preprocess_template(undefined = _PayloadTemplate) ->
+ undefined;
+preprocess_template(PayloadTemplate) ->
+ emqx_plugin_libs_rule:preproc_tmpl(PayloadTemplate).
+
+render_message(undefined = _PayloadTemplate, Message) ->
+ Message;
+render_message(PayloadTemplate, Message) ->
+ %% Note: mongo expects a map as a document, so the rendered result
+ %% must be JSON-serializable
+ Rendered = emqx_plugin_libs_rule:proc_tmpl(PayloadTemplate, Message),
+ emqx_json:decode(Rendered, [return_maps]).
diff --git a/lib-ee/emqx_license/i18n/emqx_license_schema_i18n.conf b/lib-ee/emqx_license/i18n/emqx_license_schema_i18n.conf
index d4a91159d..379cb3358 100644
--- a/lib-ee/emqx_license/i18n/emqx_license_schema_i18n.conf
+++ b/lib-ee/emqx_license/i18n/emqx_license_schema_i18n.conf
@@ -3,14 +3,14 @@ emqx_license_schema {
desc {
en: "Defines the EMQX Enterprise license. \n\n"
"\n"
- "The default license has 1000 connections limit, it is "
- "issued on 2023-01-02 and valid for 5 years (1825 days).\n"
+ "The default license has 100 connections limit, it is "
+ "issued on 2023-01-09 and valid for 5 years (1825 days).\n"
"\n"
"EMQX comes with a default trial license. For production use, please \n"
"visit https://www.emqx.com/apply-licenses/emqx to apply."
zh: "EMQX企业许可证。\n"
"EMQX 自带一个默认的试用许可证,"
- "默认试用许可允许最多接入 1000 个连接,签发时间是 2023年1月2日,有效期是 5 年(1825 天)。"
+ "默认试用许可允许最多接入 100 个连接,签发时间是 2023年1月9日,有效期是 5 年(1825 天)。"
"若需要在生产环境部署,\n"
"请访问 https://www.emqx.com/apply-licenses/emqx 来申请。\n"
}
diff --git a/lib-ee/emqx_license/src/emqx_license.app.src b/lib-ee/emqx_license/src/emqx_license.app.src
index 2df1b3797..93ae665d5 100644
--- a/lib-ee/emqx_license/src/emqx_license.app.src
+++ b/lib-ee/emqx_license/src/emqx_license.app.src
@@ -1,6 +1,6 @@
{application, emqx_license, [
{description, "EMQX License"},
- {vsn, "5.0.4"},
+ {vsn, "5.0.5"},
{modules, []},
{registered, [emqx_license_sup]},
{applications, [kernel, stdlib]},
diff --git a/lib-ee/emqx_license/src/emqx_license_schema.erl b/lib-ee/emqx_license/src/emqx_license_schema.erl
index dbac851fe..9d16f697c 100644
--- a/lib-ee/emqx_license/src/emqx_license_schema.erl
+++ b/lib-ee/emqx_license/src/emqx_license_schema.erl
@@ -13,7 +13,7 @@
-behaviour(hocon_schema).
--export([roots/0, fields/1, validations/0, desc/1]).
+-export([roots/0, fields/1, validations/0, desc/1, tags/0]).
-export([
default_license/0,
@@ -31,10 +31,13 @@ roots() ->
)}
].
+tags() ->
+ [<<"License">>].
+
fields(key_license) ->
[
{key, #{
- type => string(),
+ type => binary(),
default => default_license(),
%% so it's not logged
sensitive => true,
@@ -78,11 +81,13 @@ check_license_watermark(Conf) ->
%% @doc The default license key.
%% This default license has 1000 connections limit.
-%% It is issued on 2023-01-02 and valid for 5 years (1825 days)
+%% It is issued on 2023-01-09 and valid for 5 years (1825 days)
%% NOTE: when updating a new key, the schema doc in emqx_license_schema_i18n.conf
%% should be updated accordingly
default_license() ->
- "MjIwMTExCjAKMTAKRXZhbHVhdGlvbgpjb250YWN0QGVtcXguaW8KZ"
- "GVmYXVsdAoyMDIzMDEwMgoxODI1CjEwMDAK.MEQCIGEuYO8KxSh5d"
- "1WanqHG41OOjHEVkU8ChnyoOTry2FFUAiA+vPBAH8yhcGuzMUX1ER"
- "kf6nY+xrVSKxnsx0GivANEXA==".
+ <<
+ "MjIwMTExCjAKMTAKRXZhbHVhdGlvbgpjb250YWN0QGVtcXguaW8KZ"
+ "GVmYXVsdAoyMDIzMDEwOQoxODI1CjEwMAo=.MEUCIG62t8W15g05f"
+ "1cKx3tA3YgJoR0dmyHOPCdbUxBGxgKKAiEAhHKh8dUwhU+OxNEaOn"
+ "8mgRDtiT3R8RZooqy6dEsOmDI="
+ >>.
diff --git a/mix.exs b/mix.exs
index 9c8d6ff59..27d9b43ce 100644
--- a/mix.exs
+++ b/mix.exs
@@ -47,7 +47,7 @@ defmodule EMQXUmbrella.MixProject do
{:lc, github: "emqx/lc", tag: "0.3.2", override: true},
{:redbug, "2.0.8"},
{:typerefl, github: "ieQu1/typerefl", tag: "0.9.1", override: true},
- {:ehttpc, github: "emqx/ehttpc", tag: "0.4.2", override: true},
+ {:ehttpc, github: "emqx/ehttpc", tag: "0.4.3", override: true},
{:gproc, github: "uwiger/gproc", tag: "0.8.0", override: true},
{:jiffy, github: "emqx/jiffy", tag: "1.0.5", override: true},
{:cowboy, github: "emqx/cowboy", tag: "2.9.0", override: true},
@@ -68,8 +68,8 @@ defmodule EMQXUmbrella.MixProject do
# in conflict by emqtt and hocon
{:getopt, "1.0.2", override: true},
{:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "1.0.0", override: true},
- {:hocon, github: "emqx/hocon", tag: "0.33.0", override: true},
- {:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.5.1", override: true},
+ {:hocon, github: "emqx/hocon", tag: "0.35.0", override: true},
+ {:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.5.2", override: true},
{:esasl, github: "emqx/esasl", tag: "0.2.0"},
{:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"},
# in conflict by ehttpc and emqtt
@@ -131,7 +131,7 @@ defmodule EMQXUmbrella.MixProject do
defp enterprise_deps(_profile_info = %{edition_type: :enterprise}) do
[
{:hstreamdb_erl, github: "hstreamdb/hstreamdb_erl", tag: "0.2.5"},
- {:influxdb, github: "emqx/influxdb-client-erl", tag: "1.1.4", override: true},
+ {:influxdb, github: "emqx/influxdb-client-erl", tag: "1.1.6", override: true},
{:wolff, github: "kafka4beam/wolff", tag: "1.7.4"},
{:kafka_protocol, github: "kafka4beam/kafka_protocol", tag: "4.1.2", override: true},
{:brod_gssapi, github: "kafka4beam/brod_gssapi", tag: "v0.1.0-rc1"},
diff --git a/rebar.config b/rebar.config
index 3d19d2181..f05624477 100644
--- a/rebar.config
+++ b/rebar.config
@@ -49,7 +49,7 @@
, {gpb, "4.19.5"} %% gpb only used to build, but not for release, pin it here to avoid fetching a wrong version due to rebar plugins scattered in all the deps
, {typerefl, {git, "https://github.com/ieQu1/typerefl", {tag, "0.9.1"}}}
, {gun, {git, "https://github.com/emqx/gun", {tag, "1.3.9"}}}
- , {ehttpc, {git, "https://github.com/emqx/ehttpc", {tag, "0.4.2"}}}
+ , {ehttpc, {git, "https://github.com/emqx/ehttpc", {tag, "0.4.3"}}}
, {gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}}
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}}
@@ -68,8 +68,8 @@
, {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.3"}}}
, {getopt, "1.0.2"}
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.0"}}}
- , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.33.0"}}}
- , {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.1"}}}
+ , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.35.0"}}}
+ , {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}}
, {esasl, {git, "https://github.com/emqx/esasl", {tag, "0.2.0"}}}
, {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}}
, {telemetry, "1.1.0"}
diff --git a/scripts/ct/run.sh b/scripts/ct/run.sh
index 7fb9e00fd..3a7b40317 100755
--- a/scripts/ct/run.sh
+++ b/scripts/ct/run.sh
@@ -11,7 +11,6 @@ help() {
echo
echo "-h|--help: To display this usage info"
echo "--app lib_dir/app_name: For which app to run start docker-compose, and run common tests"
- echo "--suites SUITE1,SUITE2: Comma separated SUITE names to run. e.g. apps/emqx/test/emqx_SUITE.erl"
echo "--console: Start EMQX in console mode but do not run test cases"
echo "--attach: Attach to the Erlang docker container without running any test case"
echo "--stop: Stop running containers for the given app"
diff --git a/scripts/docker-create-push-manifests.sh b/scripts/docker-create-push-manifests.sh
deleted file mode 100755
index db9c01bfb..000000000
--- a/scripts/docker-create-push-manifests.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-##!/usr/bin/env bash
-set -exuo pipefail
-
-img_amd64=$1
-push_latest=${2:-false}
-
-img_arm64=$(echo ${img_amd64} | sed 's/-amd64$/-arm64/g')
-img_name=${img_amd64%-amd64}
-docker pull "$img_amd64"
-docker pull --platform linux/arm64 "$img_arm64"
-img_amd64_digest=$(docker inspect --format='{{index .RepoDigests 0}}' "$img_amd64")
-img_arm64_digest=$(docker inspect --format='{{index .RepoDigests 0}}' "$img_arm64")
-echo "sha256 of amd64 is $img_amd64_digest"
-echo "sha256 of arm64 is $img_arm64_digest"
-docker manifest create "${img_name}" \
- --amend "$img_amd64_digest" \
- --amend "$img_arm64_digest"
-docker manifest push "${img_name}"
-
-# PUSH latest if it is a release build
-if [ "$push_latest" = "true" ]; then
- img_latest=$(echo "$img_arm64" | cut -d: -f 1):latest
- docker manifest create "${img_latest}" \
- --amend "$img_amd64_digest" \
- --amend "$img_arm64_digest"
- docker manifest push "${img_latest}"
-fi