Merge branch 'master' into dev/api-refactor
This commit is contained in:
commit
1690a6dcfc
|
@ -20,8 +20,3 @@ indent_size = 4
|
||||||
# Tab indentation (no size specified)
|
# Tab indentation (no size specified)
|
||||||
[Makefile]
|
[Makefile]
|
||||||
indent_style = tab
|
indent_style = tab
|
||||||
|
|
||||||
# Matches the exact files either package.json or .travis.yml
|
|
||||||
[{.travis.yml}]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
|
|
|
@ -1,81 +0,0 @@
|
||||||
name: 'Docker meta'
|
|
||||||
inputs:
|
|
||||||
profile:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
registry:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
arch:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
otp:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
elixir:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: ''
|
|
||||||
builder_base:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
owner:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
docker_tags:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
emqx_name:
|
|
||||||
description: "EMQX name"
|
|
||||||
value: ${{ steps.pre-meta.outputs.emqx_name }}
|
|
||||||
version:
|
|
||||||
description: "docker image version"
|
|
||||||
value: ${{ steps.meta.outputs.version }}
|
|
||||||
tags:
|
|
||||||
description: "docker image tags"
|
|
||||||
value: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels:
|
|
||||||
description: "docker image labels"
|
|
||||||
value: ${{ steps.meta.outputs.labels }}
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: prepare for docker/metadata-action
|
|
||||||
id: pre-meta
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
emqx_name=${{ inputs.profile }}
|
|
||||||
img_suffix=${{ inputs.arch }}
|
|
||||||
img_labels="org.opencontainers.image.otp.version=${{ inputs.otp }}"
|
|
||||||
if [ -n "${{ inputs.elixir }}" ]; then
|
|
||||||
emqx_name="emqx-elixir"
|
|
||||||
img_suffix="elixir-${{ inputs.arch }}"
|
|
||||||
img_labels="org.opencontainers.image.elixir.version=${{ inputs.elixir }}\n${img_labels}"
|
|
||||||
fi
|
|
||||||
if [ "${{ inputs.profile }}" = "emqx" ]; then
|
|
||||||
img_labels="org.opencontainers.image.edition=Opensource\n${img_labels}"
|
|
||||||
fi
|
|
||||||
if [ "${{ inputs.profile }}" = "emqx-enterprise" ]; then
|
|
||||||
img_labels="org.opencontainers.image.edition=Enterprise\n${img_labels}"
|
|
||||||
fi
|
|
||||||
if [[ "${{ inputs.builder_base }}" =~ "alpine" ]]; then
|
|
||||||
img_suffix="${img_suffix}-alpine"
|
|
||||||
fi
|
|
||||||
echo "emqx_name=${emqx_name}" >> $GITHUB_OUTPUT
|
|
||||||
echo "img_suffix=${img_suffix}" >> $GITHUB_OUTPUT
|
|
||||||
echo "img_labels=${img_labels}" >> $GITHUB_OUTPUT
|
|
||||||
echo "img_name=${{ inputs.registry }}/${{ inputs.owner }}/${{ inputs.profile }}" >> $GITHUB_OUTPUT
|
|
||||||
- uses: docker/metadata-action@v4
|
|
||||||
id: meta
|
|
||||||
with:
|
|
||||||
images:
|
|
||||||
${{ steps.pre-meta.outputs.img_name }}
|
|
||||||
flavor: |
|
|
||||||
suffix=-${{ steps.pre-meta.outputs.img_suffix }}
|
|
||||||
tags: |
|
|
||||||
type=raw,value=${{ inputs.docker_tags }}
|
|
||||||
labels:
|
|
||||||
${{ steps.pre-meta.outputs.img_labels }}
|
|
|
@ -9,15 +9,17 @@ on:
|
||||||
tags:
|
tags:
|
||||||
- v*
|
- v*
|
||||||
- e*
|
- e*
|
||||||
release:
|
- docker-latest-*
|
||||||
types:
|
|
||||||
- published
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
branch_or_tag:
|
branch_or_tag:
|
||||||
required: false
|
required: false
|
||||||
profile:
|
profile:
|
||||||
required: false
|
required: false
|
||||||
|
default: 'emqx'
|
||||||
|
is_latest:
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
prepare:
|
prepare:
|
||||||
|
@ -26,10 +28,11 @@ jobs:
|
||||||
container: "ghcr.io/emqx/emqx-builder/5.0-26:1.13.4-24.3.4.2-1-ubuntu20.04"
|
container: "ghcr.io/emqx/emqx-builder/5.0-26:1.13.4-24.3.4.2-1-ubuntu20.04"
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
BUILD_PROFILE: ${{ steps.get_profile.outputs.BUILD_PROFILE }}
|
PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
|
||||||
IS_DOCKER_LATEST: ${{ steps.get_profile.outputs.IS_DOCKER_LATEST }}
|
EDITION: ${{ steps.get_profile.outputs.EDITION }}
|
||||||
|
IS_LATEST: ${{ steps.get_profile.outputs.IS_LATEST }}
|
||||||
IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }}
|
IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }}
|
||||||
DOCKER_TAG_VERSION: ${{ steps.get_profile.outputs.DOCKER_TAG_VERSION }}
|
VERSION: ${{ steps.get_profile.outputs.VERSION }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
@ -45,14 +48,14 @@ jobs:
|
||||||
tag=${{ github.ref }}
|
tag=${{ github.ref }}
|
||||||
# tag docker-latest-ce or docker-latest-ee
|
# tag docker-latest-ce or docker-latest-ee
|
||||||
if git describe --tags --exact --match 'docker-latest-*' 2>/dev/null; then
|
if git describe --tags --exact --match 'docker-latest-*' 2>/dev/null; then
|
||||||
echo 'docker_latest=true due to docker-latest-* tag'
|
echo 'is_latest=true due to docker-latest-* tag'
|
||||||
docker_latest=true
|
is_latest=true
|
||||||
elif [ "${{ github.event_name }}" = "release" ]; then
|
elif [ "${{ inputs.is_latest }}" = "true" ]; then
|
||||||
echo 'docker_latest=true due to release'
|
echo 'is_latest=true due to manual input from workflow_dispatch'
|
||||||
docker_latest=true
|
is_latest=true
|
||||||
else
|
else
|
||||||
echo 'docker_latest=false'
|
echo 'is_latest=false'
|
||||||
docker_latest=false
|
is_latest=false
|
||||||
fi
|
fi
|
||||||
if git describe --tags --match "[v|e]*" --exact; then
|
if git describe --tags --match "[v|e]*" --exact; then
|
||||||
echo "This is an exact git tag, will publish images"
|
echo "This is an exact git tag, will publish images"
|
||||||
|
@ -64,18 +67,20 @@ jobs:
|
||||||
case $tag in
|
case $tag in
|
||||||
refs/tags/v*)
|
refs/tags/v*)
|
||||||
PROFILE='emqx'
|
PROFILE='emqx'
|
||||||
|
EDITION='Opensource'
|
||||||
;;
|
;;
|
||||||
refs/tags/e*)
|
refs/tags/e*)
|
||||||
PROFILE=emqx-enterprise
|
PROFILE=emqx-enterprise
|
||||||
|
EDITION='Enterprise'
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
PROFILE=${{ github.event.inputs.profile }}
|
PROFILE=${{ github.event.inputs.profile }}
|
||||||
case "$PROFILE" in
|
case "$PROFILE" in
|
||||||
emqx)
|
emqx)
|
||||||
true
|
EDITION='Opensource'
|
||||||
;;
|
;;
|
||||||
emqx-enterprise)
|
emqx-enterprise)
|
||||||
true
|
EDITION='Enterprise'
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo "ERROR: Failed to resolve build profile"
|
echo "ERROR: Failed to resolve build profile"
|
||||||
|
@ -85,14 +90,18 @@ jobs:
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
VSN="$(./pkg-vsn.sh "$PROFILE")"
|
VSN="$(./pkg-vsn.sh "$PROFILE")"
|
||||||
echo "Building $PROFILE image with tag $VSN (latest=$docker_latest)"
|
echo "Building emqx/$PROFILE:$VSN image (latest=$is_latest)"
|
||||||
echo "IS_DOCKER_LATEST=$docker_latest" >> $GITHUB_OUTPUT
|
echo "Push = $is_exact"
|
||||||
|
echo "IS_LATEST=$is_latest" >> $GITHUB_OUTPUT
|
||||||
echo "IS_EXACT_TAG=$is_exact" >> $GITHUB_OUTPUT
|
echo "IS_EXACT_TAG=$is_exact" >> $GITHUB_OUTPUT
|
||||||
echo "BUILD_PROFILE=$PROFILE" >> $GITHUB_OUTPUT
|
echo "PROFILE=$PROFILE" >> $GITHUB_OUTPUT
|
||||||
echo "DOCKER_TAG_VERSION=$VSN" >> $GITHUB_OUTPUT
|
echo "EDITION=$EDITION" >> $GITHUB_OUTPUT
|
||||||
|
echo "VERSION=$VSN" >> $GITHUB_OUTPUT
|
||||||
- name: get_all_deps
|
- name: get_all_deps
|
||||||
|
env:
|
||||||
|
PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
|
||||||
run: |
|
run: |
|
||||||
make -C source deps-all
|
PROFILE=$PROFILE make -C source deps-$PROFILE
|
||||||
zip -ryq source.zip source/* source/.[^.]*
|
zip -ryq source.zip source/* source/.[^.]*
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
|
@ -100,17 +109,17 @@ jobs:
|
||||||
path: source.zip
|
path: source.zip
|
||||||
|
|
||||||
docker:
|
docker:
|
||||||
runs-on: ${{ matrix.arch[1] }}
|
runs-on: ubuntu-20.04
|
||||||
needs: prepare
|
needs: prepare
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
arch:
|
|
||||||
- [amd64, ubuntu-20.04]
|
|
||||||
- [arm64, aws-arm64]
|
|
||||||
profile:
|
profile:
|
||||||
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
|
- "${{ needs.prepare.outputs.PROFILE }}"
|
||||||
|
flavor:
|
||||||
|
- ''
|
||||||
|
- '-elixir'
|
||||||
registry:
|
registry:
|
||||||
- 'docker.io'
|
- 'docker.io'
|
||||||
- 'public.ecr.aws'
|
- 'public.ecr.aws'
|
||||||
|
@ -128,9 +137,10 @@ jobs:
|
||||||
exclude: # TODO: publish enterprise to ecr too?
|
exclude: # TODO: publish enterprise to ecr too?
|
||||||
- registry: 'public.ecr.aws'
|
- registry: 'public.ecr.aws'
|
||||||
profile: emqx-enterprise
|
profile: emqx-enterprise
|
||||||
|
- flavor: '-elixir'
|
||||||
|
os: [alpine3.15.1, "alpine:3.15.1", "deploy/docker/Dockerfile.alpine"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: AutoModality/action-clean@v1
|
|
||||||
if: matrix.arch[1] == 'aws-arm64'
|
|
||||||
- uses: actions/download-artifact@v3
|
- uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: source
|
name: source
|
||||||
|
@ -138,16 +148,17 @@ jobs:
|
||||||
- name: unzip source code
|
- name: unzip source code
|
||||||
run: unzip -q source.zip
|
run: unzip -q source.zip
|
||||||
|
|
||||||
|
- uses: docker/setup-qemu-action@v2
|
||||||
- uses: docker/setup-buildx-action@v2
|
- uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
- name: Login for docker.
|
- name: Login to hub.docker.com
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
if: matrix.registry == 'docker.io'
|
if: matrix.registry == 'docker.io'
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_HUB_USER }}
|
username: ${{ secrets.DOCKER_HUB_USER }}
|
||||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login for AWS ECR
|
- name: Login to AWS ECR
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
if: matrix.registry == 'public.ecr.aws'
|
if: matrix.registry == 'public.ecr.aws'
|
||||||
with:
|
with:
|
||||||
|
@ -156,229 +167,48 @@ jobs:
|
||||||
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
ecr: true
|
ecr: true
|
||||||
|
|
||||||
- uses: ./source/.github/actions/docker-meta
|
- name: prepare for docker/metadata-action
|
||||||
|
id: pre-meta
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
extra_labels=
|
||||||
|
img_suffix=
|
||||||
|
flavor="${{ matrix.flavor }}"
|
||||||
|
if [ "${{ matrix.flavor }}" = '-elixir' ]; then
|
||||||
|
img_suffix="-elixir"
|
||||||
|
extra_labels="org.opencontainers.image.elixir.version=${{ matrix.elixir }}"
|
||||||
|
fi
|
||||||
|
if [[ "${{ matrix.os[0] }}" =~ "alpine" ]]; then
|
||||||
|
img_suffix="${img_suffix}-alpine"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "img_suffix=$img_suffix" >> $GITHUB_OUTPUT
|
||||||
|
echo "extra_labels=$extra_labels" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- uses: docker/metadata-action@v4
|
||||||
id: meta
|
id: meta
|
||||||
with:
|
with:
|
||||||
profile: ${{ matrix.profile }}
|
images: |
|
||||||
registry: ${{ matrix.registry }}
|
${{ matrix.registry }}/${{ github.repository_owner }}/${{ matrix.profile }}
|
||||||
arch: ${{ matrix.arch[0] }}
|
flavor: |
|
||||||
otp: ${{ matrix.otp }}
|
suffix=${{ steps.pre-meta.outputs.img_suffix }}
|
||||||
builder_base: ${{ matrix.os[0] }}
|
tags: |
|
||||||
owner: ${{ github.repository_owner }}
|
type=raw,value=${{ needs.prepare.outputs.VERSION }}
|
||||||
docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
|
type=raw,value=latest,enable=${{ needs.prepare.outputs.IS_LATEST }}
|
||||||
|
labels: |
|
||||||
|
org.opencontainers.image.otp.version=${{ matrix.otp }}
|
||||||
|
org.opencontainers.image.edition=${{ needs.prepare.outputs.EDITION }}
|
||||||
|
${{ steps.pre-meta.outputs.extra_labels }}
|
||||||
|
|
||||||
- uses: docker/build-push-action@v3
|
- uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
push: ${{ needs.prepare.outputs.IS_EXACT_TAG == 'true' || github.repository_owner != 'emqx' }}
|
push: ${{ needs.prepare.outputs.IS_EXACT_TAG == 'true' || github.repository_owner != 'emqx' }}
|
||||||
pull: true
|
pull: true
|
||||||
no-cache: true
|
no-cache: true
|
||||||
platforms: linux/${{ matrix.arch[0] }}
|
platforms: linux/amd64,linux/arm64
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
build-args: |
|
build-args: |
|
||||||
BUILD_FROM=ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }}
|
EMQX_NAME=${{ matrix.profile }}${{ matrix.flavor }}
|
||||||
RUN_FROM=${{ matrix.os[1] }}
|
|
||||||
EMQX_NAME=${{ steps.meta.outputs.emqx_name }}
|
|
||||||
file: source/${{ matrix.os[2] }}
|
file: source/${{ matrix.os[2] }}
|
||||||
context: source
|
context: source
|
||||||
|
|
||||||
- name: Docker Hub Description
|
|
||||||
if: matrix.registry == 'docker.io'
|
|
||||||
uses: peter-evans/dockerhub-description@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
|
||||||
repository: "emqx/${{ needs.prepare.outputs.BUILD_PROFILE }}"
|
|
||||||
readme-filepath: ./source/deploy/docker/README.md
|
|
||||||
short-description: "The most scalable open-source MQTT broker for IoT, IIoT, connected vehicles, and more."
|
|
||||||
|
|
||||||
docker-elixir:
|
|
||||||
runs-on: ${{ matrix.arch[1] }}
|
|
||||||
needs: prepare
|
|
||||||
# do not build elixir images for ee for now
|
|
||||||
if: needs.prepare.outputs.BUILD_PROFILE == 'emqx'
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
arch:
|
|
||||||
- [amd64, ubuntu-20.04]
|
|
||||||
- [arm64, aws-arm64]
|
|
||||||
profile:
|
|
||||||
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
|
|
||||||
registry:
|
|
||||||
- 'docker.io'
|
|
||||||
os:
|
|
||||||
- [debian11, "debian:11-slim", "deploy/docker/Dockerfile"]
|
|
||||||
builder:
|
|
||||||
- 5.0-26 # update to latest
|
|
||||||
otp:
|
|
||||||
- 25.1.2-2 # update to latest
|
|
||||||
elixir:
|
|
||||||
- 1.13.4 # update to latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: AutoModality/action-clean@v1
|
|
||||||
if: matrix.arch[1] == 'aws-arm64'
|
|
||||||
- uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: source
|
|
||||||
path: .
|
|
||||||
- name: unzip source code
|
|
||||||
run: unzip -q source.zip
|
|
||||||
|
|
||||||
- uses: docker/setup-buildx-action@v2
|
|
||||||
|
|
||||||
- name: Login for docker.
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_HUB_USER }}
|
|
||||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
|
||||||
|
|
||||||
- uses: ./source/.github/actions/docker-meta
|
|
||||||
id: meta
|
|
||||||
with:
|
|
||||||
profile: ${{ matrix.profile }}
|
|
||||||
registry: ${{ matrix.registry }}
|
|
||||||
arch: ${{ matrix.arch[0] }}
|
|
||||||
otp: ${{ matrix.otp }}
|
|
||||||
elixir: ${{ matrix.elixir }}
|
|
||||||
builder_base: ${{ matrix.os[0] }}
|
|
||||||
owner: ${{ github.repository_owner }}
|
|
||||||
docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
|
|
||||||
|
|
||||||
- uses: docker/build-push-action@v3
|
|
||||||
with:
|
|
||||||
push: ${{ needs.prepare.outputs.IS_EXACT_TAG == 'true' || github.repository_owner != 'emqx' }}
|
|
||||||
pull: true
|
|
||||||
no-cache: true
|
|
||||||
platforms: linux/${{ matrix.arch[0] }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
build-args: |
|
|
||||||
BUILD_FROM=ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }}
|
|
||||||
RUN_FROM=${{ matrix.os[1] }}
|
|
||||||
EMQX_NAME=${{ steps.meta.outputs.emqx_name }}
|
|
||||||
file: source/${{ matrix.os[2] }}
|
|
||||||
context: source
|
|
||||||
|
|
||||||
docker-push-multi-arch-manifest:
|
|
||||||
# note, we only run on amd64
|
|
||||||
if: needs.prepare.outputs.IS_EXACT_TAG
|
|
||||||
needs:
|
|
||||||
- prepare
|
|
||||||
- docker
|
|
||||||
runs-on: ${{ matrix.arch[1] }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
arch:
|
|
||||||
- [amd64, ubuntu-20.04]
|
|
||||||
profile:
|
|
||||||
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
|
|
||||||
os:
|
|
||||||
- [alpine3.15.1, "alpine:3.15.1", "deploy/docker/Dockerfile.alpine"]
|
|
||||||
- [debian11, "debian:11-slim", "deploy/docker/Dockerfile"]
|
|
||||||
# NOTE: only support latest otp version, not a matrix
|
|
||||||
otp:
|
|
||||||
- 24.3.4.2-1 # switch to 25 once ready to release 5.1
|
|
||||||
registry:
|
|
||||||
- 'docker.io'
|
|
||||||
- 'public.ecr.aws'
|
|
||||||
exclude:
|
|
||||||
- registry: 'public.ecr.aws'
|
|
||||||
profile: emqx-enterprise
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: source
|
|
||||||
path: .
|
|
||||||
|
|
||||||
- name: unzip source code
|
|
||||||
run: unzip -q source.zip
|
|
||||||
|
|
||||||
- uses: docker/login-action@v2
|
|
||||||
if: matrix.registry == 'docker.io'
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_HUB_USER }}
|
|
||||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
|
||||||
|
|
||||||
- uses: docker/login-action@v2
|
|
||||||
if: matrix.registry == 'public.ecr.aws'
|
|
||||||
with:
|
|
||||||
registry: public.ecr.aws
|
|
||||||
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
ecr: true
|
|
||||||
|
|
||||||
- uses: ./source/.github/actions/docker-meta
|
|
||||||
id: meta
|
|
||||||
with:
|
|
||||||
profile: ${{ matrix.profile }}
|
|
||||||
registry: ${{ matrix.registry }}
|
|
||||||
arch: ${{ matrix.arch[0] }}
|
|
||||||
otp: ${{ matrix.otp }}
|
|
||||||
builder_base: ${{ matrix.os[0] }}
|
|
||||||
owner: ${{ github.repository_owner }}
|
|
||||||
docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
|
|
||||||
|
|
||||||
- name: update manifest for multiarch image
|
|
||||||
working-directory: source
|
|
||||||
run: |
|
|
||||||
is_latest="${{ needs.prepare.outputs.IS_DOCKER_LATEST }}"
|
|
||||||
scripts/docker-create-push-manifests.sh "${{ steps.meta.outputs.tags }}" "$is_latest"
|
|
||||||
|
|
||||||
docker-elixir-push-multi-arch-manifest:
|
|
||||||
# note, we only run on amd64
|
|
||||||
# do not build enterprise elixir images for now
|
|
||||||
if: needs.prepare.outputs.IS_EXACT_TAG == 'true' && needs.prepare.outputs.BUILD_PROFILE == 'emqx'
|
|
||||||
needs:
|
|
||||||
- prepare
|
|
||||||
- docker-elixir
|
|
||||||
runs-on: ${{ matrix.arch[1] }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
arch:
|
|
||||||
- [amd64, ubuntu-20.04]
|
|
||||||
profile:
|
|
||||||
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
|
|
||||||
# NOTE: for docker, only support latest otp version, not a matrix
|
|
||||||
otp:
|
|
||||||
- 25.1.2-2 # update to latest
|
|
||||||
elixir:
|
|
||||||
- 1.13.4 # update to latest
|
|
||||||
registry:
|
|
||||||
- 'docker.io'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: source
|
|
||||||
path: .
|
|
||||||
|
|
||||||
- name: unzip source code
|
|
||||||
run: unzip -q source.zip
|
|
||||||
|
|
||||||
- uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_HUB_USER }}
|
|
||||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
|
||||||
|
|
||||||
- uses: ./source/.github/actions/docker-meta
|
|
||||||
id: meta
|
|
||||||
with:
|
|
||||||
profile: ${{ matrix.profile }}
|
|
||||||
registry: ${{ matrix.registry }}
|
|
||||||
arch: ${{ matrix.arch[0] }}
|
|
||||||
otp: ${{ matrix.otp }}
|
|
||||||
elixir: ${{ matrix.elixir }}
|
|
||||||
builder_base: ${{ matrix.os[0] }}
|
|
||||||
owner: ${{ github.repository_owner }}
|
|
||||||
docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
|
|
||||||
|
|
||||||
- name: update manifest for multiarch image
|
|
||||||
working-directory: source
|
|
||||||
run: |
|
|
||||||
scripts/docker-create-push-manifests.sh "${{ steps.meta.outputs.tags }}" false
|
|
||||||
|
|
|
@ -201,12 +201,25 @@ jobs:
|
||||||
echo "waiting emqx started";
|
echo "waiting emqx started";
|
||||||
sleep 10;
|
sleep 10;
|
||||||
done
|
done
|
||||||
|
- name: Get Token
|
||||||
|
timeout-minutes: 1
|
||||||
|
run: |
|
||||||
|
kubectl port-forward service/${{ matrix.profile }} 18083:18083 > /dev/null &
|
||||||
|
|
||||||
|
while
|
||||||
|
[ "$(curl --silent -X 'GET' 'http://127.0.0.1:18083/api/v5/status' | tail -n1)" != "emqx is running" ]
|
||||||
|
do
|
||||||
|
echo "waiting emqx"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "TOKEN=$(curl --silent -X 'POST' 'http://127.0.0.1:18083/api/v5/login' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"username": "admin","password": "public"}' | jq -r ".token")" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Check cluster
|
- name: Check cluster
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
run: |
|
run: |
|
||||||
kubectl port-forward service/${{ matrix.profile }} 18083:18083 > /dev/null &
|
|
||||||
while
|
while
|
||||||
[ "$(curl --silent --basic -u admin:public -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')" != "3" ];
|
[ "$(curl --silent -H "Authorization: Bearer $TOKEN" -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')" != "3" ];
|
||||||
do
|
do
|
||||||
echo "waiting ${{ matrix.profile }} cluster scale"
|
echo "waiting ${{ matrix.profile }} cluster scale"
|
||||||
sleep 1
|
sleep 1
|
||||||
|
|
|
@ -92,7 +92,7 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: emqx/emqx-fvt
|
repository: emqx/emqx-fvt
|
||||||
ref: broker-autotest-v3
|
ref: broker-autotest-v5
|
||||||
path: scripts
|
path: scripts
|
||||||
- uses: actions/setup-java@v3
|
- uses: actions/setup-java@v3
|
||||||
with:
|
with:
|
||||||
|
@ -191,7 +191,7 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: emqx/emqx-fvt
|
repository: emqx/emqx-fvt
|
||||||
ref: broker-autotest-v3
|
ref: broker-autotest-v5
|
||||||
path: scripts
|
path: scripts
|
||||||
- uses: actions/setup-java@v3
|
- uses: actions/setup-java@v3
|
||||||
with:
|
with:
|
||||||
|
@ -297,7 +297,7 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: emqx/emqx-fvt
|
repository: emqx/emqx-fvt
|
||||||
ref: broker-autotest-v3
|
ref: broker-autotest-v5
|
||||||
path: scripts
|
path: scripts
|
||||||
- uses: actions/setup-java@v3
|
- uses: actions/setup-java@v3
|
||||||
with:
|
with:
|
||||||
|
@ -396,7 +396,7 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: emqx/emqx-fvt
|
repository: emqx/emqx-fvt
|
||||||
ref: broker-autotest-v3
|
ref: broker-autotest-v5
|
||||||
path: scripts
|
path: scripts
|
||||||
- name: run jwks_server
|
- name: run jwks_server
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
|
@ -496,7 +496,7 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: emqx/emqx-fvt
|
repository: emqx/emqx-fvt
|
||||||
ref: broker-autotest-v3
|
ref: broker-autotest-v5
|
||||||
path: scripts
|
path: scripts
|
||||||
- uses: actions/setup-java@v3
|
- uses: actions/setup-java@v3
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -55,7 +55,7 @@ Must be one of the following:
|
||||||
- **chore**: Updating grunt tasks etc; no production code change
|
- **chore**: Updating grunt tasks etc; no production code change
|
||||||
- **perf**: A code change that improves performance
|
- **perf**: A code change that improves performance
|
||||||
- **test**: Adding missing tests, refactoring tests; no production code change
|
- **test**: Adding missing tests, refactoring tests; no production code change
|
||||||
- **build**: Changes that affect the CI/CD pipeline or build system or external dependencies (example scopes: travis, jenkins, makefile)
|
- **build**: Changes that affect the CI/CD pipeline or build system or external dependencies (example scopes: jenkins, makefile)
|
||||||
- **ci**: Changes provided by DevOps for CI purposes.
|
- **ci**: Changes provided by DevOps for CI purposes.
|
||||||
- **revert**: Reverts a previous commit.
|
- **revert**: Reverts a previous commit.
|
||||||
|
|
||||||
|
|
4
Makefile
4
Makefile
|
@ -6,8 +6,8 @@ export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.0-26:1.13.4-24.3.4.2-1
|
||||||
export EMQX_DEFAULT_RUNNER = debian:11-slim
|
export EMQX_DEFAULT_RUNNER = debian:11-slim
|
||||||
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
|
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
|
||||||
export ELIXIR_VSN ?= $(shell $(CURDIR)/scripts/get-elixir-vsn.sh)
|
export ELIXIR_VSN ?= $(shell $(CURDIR)/scripts/get-elixir-vsn.sh)
|
||||||
export EMQX_DASHBOARD_VERSION ?= v1.1.4
|
export EMQX_DASHBOARD_VERSION ?= v1.1.5
|
||||||
export EMQX_EE_DASHBOARD_VERSION ?= e1.0.1-beta.9
|
export EMQX_EE_DASHBOARD_VERSION ?= e1.0.1-beta.12
|
||||||
export EMQX_REL_FORM ?= tgz
|
export EMQX_REL_FORM ?= tgz
|
||||||
export QUICER_DOWNLOAD_FROM_RELEASE = 1
|
export QUICER_DOWNLOAD_FROM_RELEASE = 1
|
||||||
ifeq ($(OS),Windows_NT)
|
ifeq ($(OS),Windows_NT)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# EMQX
|
# EMQX
|
||||||
|
|
||||||
[](https://github.com/emqx/emqx/releases)
|
[](https://github.com/emqx/emqx/releases)
|
||||||
[](https://travis-ci.org/emqx/emqx)
|
[](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml)
|
||||||
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
||||||
[](https://hub.docker.com/r/emqx/emqx)
|
[](https://hub.docker.com/r/emqx/emqx)
|
||||||
[](https://slack-invite.emqx.io/)
|
[](https://slack-invite.emqx.io/)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Брокер EMQX
|
# Брокер EMQX
|
||||||
|
|
||||||
[](https://github.com/emqx/emqx/releases)
|
[](https://github.com/emqx/emqx/releases)
|
||||||
[](https://travis-ci.org/emqx/emqx)
|
[](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml)
|
||||||
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
||||||
[](https://hub.docker.com/r/emqx/emqx)
|
[](https://hub.docker.com/r/emqx/emqx)
|
||||||
[](https://slack-invite.emqx.io/)
|
[](https://slack-invite.emqx.io/)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# EMQX
|
# EMQX
|
||||||
|
|
||||||
[](https://github.com/emqx/emqx/releases)
|
[](https://github.com/emqx/emqx/releases)
|
||||||
[](https://travis-ci.org/emqx/emqx)
|
[](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml)
|
||||||
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
||||||
[](https://hub.docker.com/r/emqx/emqx)
|
[](https://hub.docker.com/r/emqx/emqx)
|
||||||
[](https://slack-invite.emqx.io/)
|
[](https://slack-invite.emqx.io/)
|
||||||
|
|
|
@ -2050,7 +2050,7 @@ base_listener_enable_authn {
|
||||||
Set <code>true</code> (default) to enable client authentication on this listener, the authentication
|
Set <code>true</code> (default) to enable client authentication on this listener, the authentication
|
||||||
process goes through the configured authentication chain.
|
process goes through the configured authentication chain.
|
||||||
When set to <code>false</code> to allow any clients with or without authentication information such as username or password to log in.
|
When set to <code>false</code> to allow any clients with or without authentication information such as username or password to log in.
|
||||||
When set to <code>quick_deny_anonymous<code>, it behaves like when set to <code>true</code> but clients will be
|
When set to <code>quick_deny_anonymous</code>, it behaves like when set to <code>true</code>, but clients will be
|
||||||
denied immediately without going through any authenticators if <code>username</code> is not provided. This is useful to fence off
|
denied immediately without going through any authenticators if <code>username</code> is not provided. This is useful to fence off
|
||||||
anonymous clients early.
|
anonymous clients early.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
%% `apps/emqx/src/bpapi/README.md'
|
%% `apps/emqx/src/bpapi/README.md'
|
||||||
|
|
||||||
%% Community edition
|
%% Community edition
|
||||||
-define(EMQX_RELEASE_CE, "5.0.13").
|
-define(EMQX_RELEASE_CE, "5.0.14").
|
||||||
|
|
||||||
%% Enterprise edition
|
%% Enterprise edition
|
||||||
-define(EMQX_RELEASE_EE, "5.0.0-beta.6").
|
-define(EMQX_RELEASE_EE, "5.0.0-beta.6").
|
||||||
|
|
|
@ -15,10 +15,8 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
%% HTTP API Auth
|
%% HTTP API Auth
|
||||||
-define(WRONG_USERNAME_OR_PWD, 'WRONG_USERNAME_OR_PWD').
|
-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
|
||||||
-define(WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET,
|
-define(BAD_API_KEY_OR_SECRET, 'BAD_API_KEY_OR_SECRET').
|
||||||
'WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET'
|
|
||||||
).
|
|
||||||
|
|
||||||
%% Bad Request
|
%% Bad Request
|
||||||
-define(BAD_REQUEST, 'BAD_REQUEST').
|
-define(BAD_REQUEST, 'BAD_REQUEST').
|
||||||
|
@ -57,8 +55,8 @@
|
||||||
|
|
||||||
%% All codes
|
%% All codes
|
||||||
-define(ERROR_CODES, [
|
-define(ERROR_CODES, [
|
||||||
{'WRONG_USERNAME_OR_PWD', <<"Wrong username or pwd">>},
|
{?BAD_USERNAME_OR_PWD, <<"Bad username or password">>},
|
||||||
{'WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET', <<"Wrong username & pwd or key & secret">>},
|
{?BAD_API_KEY_OR_SECRET, <<"Bad API key or secret">>},
|
||||||
{'BAD_REQUEST', <<"Request parameters are not legal">>},
|
{'BAD_REQUEST', <<"Request parameters are not legal">>},
|
||||||
{'NOT_MATCH', <<"Conditions are not matched">>},
|
{'NOT_MATCH', <<"Conditions are not matched">>},
|
||||||
{'ALREADY_EXISTS', <<"Resource already existed">>},
|
{'ALREADY_EXISTS', <<"Resource already existed">>},
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}},
|
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}},
|
||||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.13.7"}}},
|
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.13.7"}}},
|
||||||
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
||||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.33.0"}}},
|
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.35.0"}}},
|
||||||
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
||||||
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
||||||
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.0"}}}
|
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.0"}}}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
{id, "emqx"},
|
{id, "emqx"},
|
||||||
{description, "EMQX Core"},
|
{description, "EMQX Core"},
|
||||||
% strict semver, bump manually!
|
% strict semver, bump manually!
|
||||||
{vsn, "5.0.14"},
|
{vsn, "5.0.15"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -362,8 +362,8 @@ schema_default(Schema) ->
|
||||||
[];
|
[];
|
||||||
?LAZY(?ARRAY(_)) ->
|
?LAZY(?ARRAY(_)) ->
|
||||||
[];
|
[];
|
||||||
?LAZY(?UNION(Unions)) ->
|
?LAZY(?UNION(Members)) ->
|
||||||
case [A || ?ARRAY(A) <- Unions] of
|
case [A || ?ARRAY(A) <- hoconsc:union_members(Members)] of
|
||||||
[_ | _] -> [];
|
[_ | _] -> [];
|
||||||
_ -> #{}
|
_ -> #{}
|
||||||
end;
|
end;
|
||||||
|
@ -402,7 +402,6 @@ merge_envs(SchemaMod, RawConf) ->
|
||||||
required => false,
|
required => false,
|
||||||
format => map,
|
format => map,
|
||||||
apply_override_envs => true,
|
apply_override_envs => true,
|
||||||
remove_env_meta => true,
|
|
||||||
check_lazy => true
|
check_lazy => true
|
||||||
},
|
},
|
||||||
hocon_tconf:merge_env_overrides(SchemaMod, RawConf, all, Opts).
|
hocon_tconf:merge_env_overrides(SchemaMod, RawConf, all, Opts).
|
||||||
|
|
|
@ -21,7 +21,8 @@
|
||||||
format_path/1,
|
format_path/1,
|
||||||
check/2,
|
check/2,
|
||||||
format_error/1,
|
format_error/1,
|
||||||
format_error/2
|
format_error/2,
|
||||||
|
make_schema/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% @doc Format hocon config field path to dot-separated string in iolist format.
|
%% @doc Format hocon config field path to dot-separated string in iolist format.
|
||||||
|
@ -79,6 +80,9 @@ format_error({_Schema, [#{kind := K} = First | Rest] = All}, Opts) when
|
||||||
format_error(_Other, _) ->
|
format_error(_Other, _) ->
|
||||||
false.
|
false.
|
||||||
|
|
||||||
|
make_schema(Fields) ->
|
||||||
|
#{roots => Fields, fields => #{}}.
|
||||||
|
|
||||||
%% Ensure iolist()
|
%% Ensure iolist()
|
||||||
iol(B) when is_binary(B) -> B;
|
iol(B) when is_binary(B) -> B;
|
||||||
iol(A) when is_atom(A) -> atom_to_binary(A, utf8);
|
iol(A) when is_atom(A) -> atom_to_binary(A, utf8);
|
||||||
|
|
|
@ -57,6 +57,10 @@
|
||||||
|
|
||||||
-export([format_bind/1]).
|
-export([format_bind/1]).
|
||||||
|
|
||||||
|
-ifdef(TEST).
|
||||||
|
-export([certs_dir/2]).
|
||||||
|
-endif.
|
||||||
|
|
||||||
-define(CONF_KEY_PATH, [listeners, '?', '?']).
|
-define(CONF_KEY_PATH, [listeners, '?', '?']).
|
||||||
-define(TYPES_STRING, ["tcp", "ssl", "ws", "wss", "quic"]).
|
-define(TYPES_STRING, ["tcp", "ssl", "ws", "wss", "quic"]).
|
||||||
|
|
||||||
|
@ -415,6 +419,7 @@ pre_config_update(_Path, _Request, RawConf) ->
|
||||||
post_config_update([listeners, Type, Name], {create, _Request}, NewConf, undefined, _AppEnvs) ->
|
post_config_update([listeners, Type, Name], {create, _Request}, NewConf, undefined, _AppEnvs) ->
|
||||||
start_listener(Type, Name, NewConf);
|
start_listener(Type, Name, NewConf);
|
||||||
post_config_update([listeners, Type, Name], {update, _Request}, NewConf, OldConf, _AppEnvs) ->
|
post_config_update([listeners, Type, Name], {update, _Request}, NewConf, OldConf, _AppEnvs) ->
|
||||||
|
try_clear_ssl_files(certs_dir(Type, Name), NewConf, OldConf),
|
||||||
case NewConf of
|
case NewConf of
|
||||||
#{enabled := true} -> restart_listener(Type, Name, {OldConf, NewConf});
|
#{enabled := true} -> restart_listener(Type, Name, {OldConf, NewConf});
|
||||||
_ -> ok
|
_ -> ok
|
||||||
|
@ -670,7 +675,7 @@ certs_dir(Type, Name) ->
|
||||||
iolist_to_binary(filename:join(["listeners", Type, Name])).
|
iolist_to_binary(filename:join(["listeners", Type, Name])).
|
||||||
|
|
||||||
convert_certs(CertsDir, Conf) ->
|
convert_certs(CertsDir, Conf) ->
|
||||||
case emqx_tls_lib:ensure_ssl_files(CertsDir, maps:get(<<"ssl_options">>, Conf, undefined)) of
|
case emqx_tls_lib:ensure_ssl_files(CertsDir, get_ssl_options(Conf)) of
|
||||||
{ok, undefined} ->
|
{ok, undefined} ->
|
||||||
Conf;
|
Conf;
|
||||||
{ok, SSL} ->
|
{ok, SSL} ->
|
||||||
|
@ -681,7 +686,7 @@ convert_certs(CertsDir, Conf) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
clear_certs(CertsDir, Conf) ->
|
clear_certs(CertsDir, Conf) ->
|
||||||
OldSSL = maps:get(<<"ssl_options">>, Conf, undefined),
|
OldSSL = get_ssl_options(Conf),
|
||||||
emqx_tls_lib:delete_ssl_files(CertsDir, undefined, OldSSL).
|
emqx_tls_lib:delete_ssl_files(CertsDir, undefined, OldSSL).
|
||||||
|
|
||||||
filter_stacktrace({Reason, _Stacktrace}) -> Reason;
|
filter_stacktrace({Reason, _Stacktrace}) -> Reason;
|
||||||
|
@ -692,3 +697,16 @@ ensure_override_limiter_conf(Conf, #{<<"limiter">> := Limiter}) ->
|
||||||
Conf#{<<"limiter">> => Limiter};
|
Conf#{<<"limiter">> => Limiter};
|
||||||
ensure_override_limiter_conf(Conf, _) ->
|
ensure_override_limiter_conf(Conf, _) ->
|
||||||
Conf.
|
Conf.
|
||||||
|
|
||||||
|
try_clear_ssl_files(CertsDir, NewConf, OldConf) ->
|
||||||
|
NewSSL = get_ssl_options(NewConf),
|
||||||
|
OldSSL = get_ssl_options(OldConf),
|
||||||
|
emqx_tls_lib:delete_ssl_files(CertsDir, NewSSL, OldSSL).
|
||||||
|
|
||||||
|
get_ssl_options(Conf) ->
|
||||||
|
case maps:find(ssl_options, Conf) of
|
||||||
|
{ok, SSL} ->
|
||||||
|
SSL;
|
||||||
|
error ->
|
||||||
|
maps:get(<<"ssl_options">>, Conf, undefined)
|
||||||
|
end.
|
||||||
|
|
|
@ -31,6 +31,7 @@
|
||||||
-export([
|
-export([
|
||||||
inc/3,
|
inc/3,
|
||||||
inc/4,
|
inc/4,
|
||||||
|
observe/4,
|
||||||
get/3,
|
get/3,
|
||||||
get_gauge/3,
|
get_gauge/3,
|
||||||
set_gauge/5,
|
set_gauge/5,
|
||||||
|
@ -38,6 +39,8 @@
|
||||||
get_gauges/2,
|
get_gauges/2,
|
||||||
delete_gauges/2,
|
delete_gauges/2,
|
||||||
get_rate/2,
|
get_rate/2,
|
||||||
|
get_slide/2,
|
||||||
|
get_slide/3,
|
||||||
get_counters/2,
|
get_counters/2,
|
||||||
create_metrics/3,
|
create_metrics/3,
|
||||||
create_metrics/4,
|
create_metrics/4,
|
||||||
|
@ -67,7 +70,16 @@
|
||||||
-define(SAMPLING, 1).
|
-define(SAMPLING, 1).
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
-export_type([metrics/0, handler_name/0, metric_id/0]).
|
-export_type([metrics/0, handler_name/0, metric_id/0, metric_spec/0]).
|
||||||
|
|
||||||
|
% Default
|
||||||
|
-type metric_type() ::
|
||||||
|
%% Simple counter
|
||||||
|
counter
|
||||||
|
%% Sliding window average
|
||||||
|
| slide.
|
||||||
|
|
||||||
|
-type metric_spec() :: {metric_type(), atom()}.
|
||||||
|
|
||||||
-type rate() :: #{
|
-type rate() :: #{
|
||||||
current := float(),
|
current := float(),
|
||||||
|
@ -77,6 +89,7 @@
|
||||||
-type metrics() :: #{
|
-type metrics() :: #{
|
||||||
counters := #{metric_name() => integer()},
|
counters := #{metric_name() => integer()},
|
||||||
gauges := #{metric_name() => integer()},
|
gauges := #{metric_name() => integer()},
|
||||||
|
slides := #{metric_name() => number()},
|
||||||
rate := #{metric_name() => rate()}
|
rate := #{metric_name() => rate()}
|
||||||
}.
|
}.
|
||||||
-type handler_name() :: atom().
|
-type handler_name() :: atom().
|
||||||
|
@ -103,9 +116,22 @@
|
||||||
last5m_smpl = [] :: list()
|
last5m_smpl = [] :: list()
|
||||||
}).
|
}).
|
||||||
|
|
||||||
|
-record(slide_datapoint, {
|
||||||
|
sum :: non_neg_integer(),
|
||||||
|
samples :: non_neg_integer(),
|
||||||
|
time :: non_neg_integer()
|
||||||
|
}).
|
||||||
|
|
||||||
|
-record(slide, {
|
||||||
|
%% Total number of samples through the history
|
||||||
|
n_samples = 0 :: non_neg_integer(),
|
||||||
|
datapoints = [] :: [#slide_datapoint{}]
|
||||||
|
}).
|
||||||
|
|
||||||
-record(state, {
|
-record(state, {
|
||||||
metric_ids = sets:new(),
|
metric_ids = sets:new(),
|
||||||
rates :: undefined | #{metric_id() => #rate{}}
|
rates :: #{metric_id() => #{metric_name() => #rate{}}} | undefined,
|
||||||
|
slides = #{} :: #{metric_id() => #{metric_name() => #slide{}}}
|
||||||
}).
|
}).
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
@ -126,14 +152,18 @@ child_spec(ChldName, Name) ->
|
||||||
modules => [emqx_metrics_worker]
|
modules => [emqx_metrics_worker]
|
||||||
}.
|
}.
|
||||||
|
|
||||||
-spec create_metrics(handler_name(), metric_id(), [metric_name()]) -> ok | {error, term()}.
|
-spec create_metrics(handler_name(), metric_id(), [metric_spec() | metric_name()]) ->
|
||||||
|
ok | {error, term()}.
|
||||||
create_metrics(Name, Id, Metrics) ->
|
create_metrics(Name, Id, Metrics) ->
|
||||||
create_metrics(Name, Id, Metrics, Metrics).
|
Metrics1 = desugar(Metrics),
|
||||||
|
Counters = filter_counters(Metrics1),
|
||||||
|
create_metrics(Name, Id, Metrics1, Counters).
|
||||||
|
|
||||||
-spec create_metrics(handler_name(), metric_id(), [metric_name()], [metric_name()]) ->
|
-spec create_metrics(handler_name(), metric_id(), [metric_spec() | metric_name()], [atom()]) ->
|
||||||
ok | {error, term()}.
|
ok | {error, term()}.
|
||||||
create_metrics(Name, Id, Metrics, RateMetrics) ->
|
create_metrics(Name, Id, Metrics, RateMetrics) ->
|
||||||
gen_server:call(Name, {create_metrics, Id, Metrics, RateMetrics}).
|
Metrics1 = desugar(Metrics),
|
||||||
|
gen_server:call(Name, {create_metrics, Id, Metrics1, RateMetrics}).
|
||||||
|
|
||||||
-spec clear_metrics(handler_name(), metric_id()) -> ok.
|
-spec clear_metrics(handler_name(), metric_id()) -> ok.
|
||||||
clear_metrics(Name, Id) ->
|
clear_metrics(Name, Id) ->
|
||||||
|
@ -156,7 +186,7 @@ get(Name, Id, Metric) ->
|
||||||
not_found ->
|
not_found ->
|
||||||
0;
|
0;
|
||||||
Ref when is_atom(Metric) ->
|
Ref when is_atom(Metric) ->
|
||||||
counters:get(Ref, idx_metric(Name, Id, Metric));
|
counters:get(Ref, idx_metric(Name, Id, counter, Metric));
|
||||||
Ref when is_integer(Metric) ->
|
Ref when is_integer(Metric) ->
|
||||||
counters:get(Ref, Metric)
|
counters:get(Ref, Metric)
|
||||||
end.
|
end.
|
||||||
|
@ -171,21 +201,37 @@ get_counters(Name, Id) ->
|
||||||
fun(_Metric, Index) ->
|
fun(_Metric, Index) ->
|
||||||
get(Name, Id, Index)
|
get(Name, Id, Index)
|
||||||
end,
|
end,
|
||||||
get_indexes(Name, Id)
|
get_indexes(Name, counter, Id)
|
||||||
).
|
).
|
||||||
|
|
||||||
|
-spec get_slide(handler_name(), metric_id()) -> map().
|
||||||
|
get_slide(Name, Id) ->
|
||||||
|
gen_server:call(Name, {get_slide, Id}).
|
||||||
|
|
||||||
|
%% Get the average for a specified sliding window period.
|
||||||
|
%%
|
||||||
|
%% It will only account for the samples recorded in the past `Window' seconds.
|
||||||
|
-spec get_slide(handler_name(), metric_id(), non_neg_integer()) -> number().
|
||||||
|
get_slide(Name, Id, Window) ->
|
||||||
|
gen_server:call(Name, {get_slide, Id, Window}).
|
||||||
|
|
||||||
-spec reset_counters(handler_name(), metric_id()) -> ok.
|
-spec reset_counters(handler_name(), metric_id()) -> ok.
|
||||||
reset_counters(Name, Id) ->
|
reset_counters(Name, Id) ->
|
||||||
Indexes = maps:values(get_indexes(Name, Id)),
|
case get_ref(Name, Id) of
|
||||||
Ref = get_ref(Name, Id),
|
not_found ->
|
||||||
lists:foreach(fun(Idx) -> counters:put(Ref, Idx, 0) end, Indexes).
|
ok;
|
||||||
|
Ref ->
|
||||||
|
#{size := Size} = counters:info(Ref),
|
||||||
|
lists:foreach(fun(Idx) -> counters:put(Ref, Idx, 0) end, lists:seq(1, Size))
|
||||||
|
end.
|
||||||
|
|
||||||
-spec get_metrics(handler_name(), metric_id()) -> metrics().
|
-spec get_metrics(handler_name(), metric_id()) -> metrics().
|
||||||
get_metrics(Name, Id) ->
|
get_metrics(Name, Id) ->
|
||||||
#{
|
#{
|
||||||
rate => get_rate(Name, Id),
|
rate => get_rate(Name, Id),
|
||||||
counters => get_counters(Name, Id),
|
counters => get_counters(Name, Id),
|
||||||
gauges => get_gauges(Name, Id)
|
gauges => get_gauges(Name, Id),
|
||||||
|
slides => get_slide(Name, Id)
|
||||||
}.
|
}.
|
||||||
|
|
||||||
-spec inc(handler_name(), metric_id(), atom()) -> ok.
|
-spec inc(handler_name(), metric_id(), atom()) -> ok.
|
||||||
|
@ -194,7 +240,37 @@ inc(Name, Id, Metric) ->
|
||||||
|
|
||||||
-spec inc(handler_name(), metric_id(), metric_name(), integer()) -> ok.
|
-spec inc(handler_name(), metric_id(), metric_name(), integer()) -> ok.
|
||||||
inc(Name, Id, Metric, Val) ->
|
inc(Name, Id, Metric, Val) ->
|
||||||
counters:add(get_ref(Name, Id), idx_metric(Name, Id, Metric), Val).
|
counters:add(get_ref(Name, Id), idx_metric(Name, Id, counter, Metric), Val).
|
||||||
|
|
||||||
|
%% Add a sample to the slide.
|
||||||
|
%%
|
||||||
|
%% Slide is short for "sliding window average" type of metric.
|
||||||
|
%%
|
||||||
|
%% It allows to monitor an average of some observed values in time,
|
||||||
|
%% and it's mainly used for performance analysis. For example, it can
|
||||||
|
%% be used to report run time of operations.
|
||||||
|
%%
|
||||||
|
%% Consider an example:
|
||||||
|
%%
|
||||||
|
%% ```
|
||||||
|
%% emqx_metrics_worker:create_metrics(Name, Id, [{slide, a}]),
|
||||||
|
%% emqx_metrics_worker:observe(Name, Id, a, 10),
|
||||||
|
%% emqx_metrics_worker:observe(Name, Id, a, 30),
|
||||||
|
%% #{a := 20} = emqx_metrics_worker:get_slide(Name, Id, _Window = 1).
|
||||||
|
%% '''
|
||||||
|
%%
|
||||||
|
%% After recording 2 samples, this metric becomes 20 (the average of 10 and 30).
|
||||||
|
%%
|
||||||
|
%% But after 1 second it becomes 0 again, unless new samples are recorded.
|
||||||
|
%%
|
||||||
|
-spec observe(handler_name(), metric_id(), atom(), integer()) -> ok.
|
||||||
|
observe(Name, Id, Metric, Val) ->
|
||||||
|
#{ref := CRef, slide := Idx} = maps:get(Id, get_pterm(Name)),
|
||||||
|
Index = maps:get(Metric, Idx),
|
||||||
|
%% Update sum:
|
||||||
|
counters:add(CRef, Index, Val),
|
||||||
|
%% Update number of samples:
|
||||||
|
counters:add(CRef, Index + 1, 1).
|
||||||
|
|
||||||
-spec set_gauge(handler_name(), metric_id(), worker_id(), metric_name(), integer()) -> ok.
|
-spec set_gauge(handler_name(), metric_id(), worker_id(), metric_name(), integer()) -> ok.
|
||||||
set_gauge(Name, Id, WorkerId, Metric, Val) ->
|
set_gauge(Name, Id, WorkerId, Metric, Val) ->
|
||||||
|
@ -300,9 +376,9 @@ handle_call({get_rate, Id}, _From, State = #state{rates = Rates}) ->
|
||||||
handle_call(
|
handle_call(
|
||||||
{create_metrics, Id, Metrics, RateMetrics},
|
{create_metrics, Id, Metrics, RateMetrics},
|
||||||
_From,
|
_From,
|
||||||
State = #state{metric_ids = MIDs, rates = Rates}
|
State = #state{metric_ids = MIDs, rates = Rates, slides = Slides}
|
||||||
) ->
|
) ->
|
||||||
case RateMetrics -- Metrics of
|
case RateMetrics -- filter_counters(Metrics) of
|
||||||
[] ->
|
[] ->
|
||||||
RatePerId = maps:from_list([{M, #rate{}} || M <- RateMetrics]),
|
RatePerId = maps:from_list([{M, #rate{}} || M <- RateMetrics]),
|
||||||
Rate1 =
|
Rate1 =
|
||||||
|
@ -310,9 +386,11 @@ handle_call(
|
||||||
undefined -> #{Id => RatePerId};
|
undefined -> #{Id => RatePerId};
|
||||||
_ -> Rates#{Id => RatePerId}
|
_ -> Rates#{Id => RatePerId}
|
||||||
end,
|
end,
|
||||||
|
Slides1 = Slides#{Id => create_slides(Metrics)},
|
||||||
{reply, create_counters(get_self_name(), Id, Metrics), State#state{
|
{reply, create_counters(get_self_name(), Id, Metrics), State#state{
|
||||||
metric_ids = sets:add_element(Id, MIDs),
|
metric_ids = sets:add_element(Id, MIDs),
|
||||||
rates = Rate1
|
rates = Rate1,
|
||||||
|
slides = Slides1
|
||||||
}};
|
}};
|
||||||
_ ->
|
_ ->
|
||||||
{reply, {error, not_super_set_of, {RateMetrics, Metrics}}, State}
|
{reply, {error, not_super_set_of, {RateMetrics, Metrics}}, State}
|
||||||
|
@ -320,7 +398,7 @@ handle_call(
|
||||||
handle_call(
|
handle_call(
|
||||||
{delete_metrics, Id},
|
{delete_metrics, Id},
|
||||||
_From,
|
_From,
|
||||||
State = #state{metric_ids = MIDs, rates = Rates}
|
State = #state{metric_ids = MIDs, rates = Rates, slides = Slides}
|
||||||
) ->
|
) ->
|
||||||
Name = get_self_name(),
|
Name = get_self_name(),
|
||||||
delete_counters(Name, Id),
|
delete_counters(Name, Id),
|
||||||
|
@ -331,17 +409,16 @@ handle_call(
|
||||||
case Rates of
|
case Rates of
|
||||||
undefined -> undefined;
|
undefined -> undefined;
|
||||||
_ -> maps:remove(Id, Rates)
|
_ -> maps:remove(Id, Rates)
|
||||||
end
|
end,
|
||||||
|
slides = maps:remove(Id, Slides)
|
||||||
}};
|
}};
|
||||||
handle_call(
|
handle_call(
|
||||||
{reset_metrics, Id},
|
{reset_metrics, Id},
|
||||||
_From,
|
_From,
|
||||||
State = #state{rates = Rates}
|
State = #state{rates = Rates, slides = Slides}
|
||||||
) ->
|
) ->
|
||||||
Name = get_self_name(),
|
delete_gauges(get_self_name(), Id),
|
||||||
delete_gauges(Name, Id),
|
NewRates =
|
||||||
{reply, reset_counters(Name, Id), State#state{
|
|
||||||
rates =
|
|
||||||
case Rates of
|
case Rates of
|
||||||
undefined ->
|
undefined ->
|
||||||
undefined;
|
undefined;
|
||||||
|
@ -352,8 +429,23 @@ handle_call(
|
||||||
maps:get(Id, Rates, #{})
|
maps:get(Id, Rates, #{})
|
||||||
),
|
),
|
||||||
maps:put(Id, ResetRate, Rates)
|
maps:put(Id, ResetRate, Rates)
|
||||||
end
|
end,
|
||||||
|
SlideSpecs = [{slide, I} || I <- maps:keys(maps:get(Id, Slides, #{}))],
|
||||||
|
NewSlides = Slides#{Id => create_slides(SlideSpecs)},
|
||||||
|
{reply, reset_counters(get_self_name(), Id), State#state{
|
||||||
|
rates =
|
||||||
|
NewRates,
|
||||||
|
slides = NewSlides
|
||||||
}};
|
}};
|
||||||
|
handle_call({get_slide, Id}, _From, State = #state{slides = Slides}) ->
|
||||||
|
SlidesForID = maps:get(Id, Slides, #{}),
|
||||||
|
{reply, maps:map(fun(Metric, Slide) -> do_get_slide(Id, Metric, Slide) end, SlidesForID),
|
||||||
|
State};
|
||||||
|
handle_call({get_slide, Id, Window}, _From, State = #state{slides = Slides}) ->
|
||||||
|
SlidesForID = maps:get(Id, Slides, #{}),
|
||||||
|
{reply,
|
||||||
|
maps:map(fun(Metric, Slide) -> do_get_slide(Window, Id, Metric, Slide) end, SlidesForID),
|
||||||
|
State};
|
||||||
handle_call(_Request, _From, State) ->
|
handle_call(_Request, _From, State) ->
|
||||||
{reply, ok, State}.
|
{reply, ok, State}.
|
||||||
|
|
||||||
|
@ -363,7 +455,7 @@ handle_cast(_Msg, State) ->
|
||||||
handle_info(ticking, State = #state{rates = undefined}) ->
|
handle_info(ticking, State = #state{rates = undefined}) ->
|
||||||
erlang:send_after(timer:seconds(?SAMPLING), self(), ticking),
|
erlang:send_after(timer:seconds(?SAMPLING), self(), ticking),
|
||||||
{noreply, State};
|
{noreply, State};
|
||||||
handle_info(ticking, State = #state{rates = Rates0}) ->
|
handle_info(ticking, State = #state{rates = Rates0, slides = Slides0}) ->
|
||||||
Rates =
|
Rates =
|
||||||
maps:map(
|
maps:map(
|
||||||
fun(Id, RatesPerID) ->
|
fun(Id, RatesPerID) ->
|
||||||
|
@ -376,8 +468,20 @@ handle_info(ticking, State = #state{rates = Rates0}) ->
|
||||||
end,
|
end,
|
||||||
Rates0
|
Rates0
|
||||||
),
|
),
|
||||||
|
Slides =
|
||||||
|
maps:map(
|
||||||
|
fun(Id, SlidesPerID) ->
|
||||||
|
maps:map(
|
||||||
|
fun(Metric, Slide) ->
|
||||||
|
update_slide(Id, Metric, Slide)
|
||||||
|
end,
|
||||||
|
SlidesPerID
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
Slides0
|
||||||
|
),
|
||||||
erlang:send_after(timer:seconds(?SAMPLING), self(), ticking),
|
erlang:send_after(timer:seconds(?SAMPLING), self(), ticking),
|
||||||
{noreply, State#state{rates = Rates}};
|
{noreply, State#state{rates = Rates, slides = Slides}};
|
||||||
handle_info(_Info, State) ->
|
handle_info(_Info, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
|
@ -408,17 +512,18 @@ create_counters(_Name, _Id, []) ->
|
||||||
error({create_counter_error, must_provide_a_list_of_metrics});
|
error({create_counter_error, must_provide_a_list_of_metrics});
|
||||||
create_counters(Name, Id, Metrics) ->
|
create_counters(Name, Id, Metrics) ->
|
||||||
%% backup the old counters
|
%% backup the old counters
|
||||||
OlderCounters = maps:with(Metrics, get_counters(Name, Id)),
|
OlderCounters = maps:with(filter_counters(Metrics), get_counters(Name, Id)),
|
||||||
%% create the new counter
|
%% create the new counter
|
||||||
Size = length(Metrics),
|
{Size, Indexes} = create_metric_indexes(Metrics),
|
||||||
Indexes = maps:from_list(lists:zip(Metrics, lists:seq(1, Size))),
|
|
||||||
Counters = get_pterm(Name),
|
Counters = get_pterm(Name),
|
||||||
CntrRef = counters:new(Size, [write_concurrency]),
|
CntrRef = counters:new(Size, [write_concurrency]),
|
||||||
persistent_term:put(
|
persistent_term:put(
|
||||||
?CntrRef(Name),
|
?CntrRef(Name),
|
||||||
Counters#{Id => #{ref => CntrRef, indexes => Indexes}}
|
Counters#{Id => Indexes#{ref => CntrRef}}
|
||||||
),
|
),
|
||||||
%% restore the old counters
|
%% Restore the old counters. Slides are not restored, since they
|
||||||
|
%% are periodically zeroed anyway. We do lose some samples in the
|
||||||
|
%% current interval, but that's acceptable for now.
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun({Metric, N}) ->
|
fun({Metric, N}) ->
|
||||||
inc(Name, Id, Metric, N)
|
inc(Name, Id, Metric, N)
|
||||||
|
@ -426,6 +531,16 @@ create_counters(Name, Id, Metrics) ->
|
||||||
maps:to_list(OlderCounters)
|
maps:to_list(OlderCounters)
|
||||||
).
|
).
|
||||||
|
|
||||||
|
create_metric_indexes(Metrics) ->
|
||||||
|
create_metric_indexes(Metrics, 1, [], []).
|
||||||
|
|
||||||
|
create_metric_indexes([], Size, Counters, Slides) ->
|
||||||
|
{Size, #{counter => maps:from_list(Counters), slide => maps:from_list(Slides)}};
|
||||||
|
create_metric_indexes([{counter, Id} | Rest], Index, Counters, Slides) ->
|
||||||
|
create_metric_indexes(Rest, Index + 1, [{Id, Index} | Counters], Slides);
|
||||||
|
create_metric_indexes([{slide, Id} | Rest], Index, Counters, Slides) ->
|
||||||
|
create_metric_indexes(Rest, Index + 2, Counters, [{Id, Index} | Slides]).
|
||||||
|
|
||||||
delete_counters(Name, Id) ->
|
delete_counters(Name, Id) ->
|
||||||
persistent_term:put(?CntrRef(Name), maps:remove(Id, get_pterm(Name))).
|
persistent_term:put(?CntrRef(Name), maps:remove(Id, get_pterm(Name))).
|
||||||
|
|
||||||
|
@ -435,12 +550,12 @@ get_ref(Name, Id) ->
|
||||||
error -> not_found
|
error -> not_found
|
||||||
end.
|
end.
|
||||||
|
|
||||||
idx_metric(Name, Id, Metric) ->
|
idx_metric(Name, Id, Type, Metric) ->
|
||||||
maps:get(Metric, get_indexes(Name, Id)).
|
maps:get(Metric, get_indexes(Name, Type, Id)).
|
||||||
|
|
||||||
get_indexes(Name, Id) ->
|
get_indexes(Name, Type, Id) ->
|
||||||
case maps:find(Id, get_pterm(Name)) of
|
case maps:find(Id, get_pterm(Name)) of
|
||||||
{ok, #{indexes := Indexes}} -> Indexes;
|
{ok, #{Type := Indexes}} -> Indexes;
|
||||||
error -> #{}
|
error -> #{}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -488,6 +603,53 @@ calculate_rate(CurrVal, #rate{
|
||||||
tick = Tick + 1
|
tick = Tick + 1
|
||||||
}.
|
}.
|
||||||
|
|
||||||
|
do_get_slide(Id, Metric, S = #slide{n_samples = NSamples}) ->
|
||||||
|
#{
|
||||||
|
n_samples => NSamples,
|
||||||
|
current => do_get_slide(2, Id, Metric, S),
|
||||||
|
last5m => do_get_slide(?SECS_5M, Id, Metric, S)
|
||||||
|
}.
|
||||||
|
|
||||||
|
do_get_slide(Window, Id, Metric, #slide{datapoints = DP0}) ->
|
||||||
|
Datapoint = get_slide_datapoint(Id, Metric),
|
||||||
|
{N, Sum} = get_slide_window(os:system_time(second) - Window, [Datapoint | DP0], 0, 0),
|
||||||
|
case N > 0 of
|
||||||
|
true -> Sum div N;
|
||||||
|
false -> 0
|
||||||
|
end.
|
||||||
|
|
||||||
|
get_slide_window(_StartTime, [], N, S) ->
|
||||||
|
{N, S};
|
||||||
|
get_slide_window(StartTime, [#slide_datapoint{time = T} | _], N, S) when T < StartTime ->
|
||||||
|
{N, S};
|
||||||
|
get_slide_window(StartTime, [#slide_datapoint{samples = N, sum = S} | Rest], AccN, AccS) ->
|
||||||
|
get_slide_window(StartTime, Rest, AccN + N, AccS + S).
|
||||||
|
|
||||||
|
get_slide_datapoint(Id, Metric) ->
|
||||||
|
Name = get_self_name(),
|
||||||
|
CRef = get_ref(Name, Id),
|
||||||
|
Index = idx_metric(Name, Id, slide, Metric),
|
||||||
|
Total = counters:get(CRef, Index),
|
||||||
|
N = counters:get(CRef, Index + 1),
|
||||||
|
#slide_datapoint{
|
||||||
|
sum = Total,
|
||||||
|
samples = N,
|
||||||
|
time = os:system_time(second)
|
||||||
|
}.
|
||||||
|
|
||||||
|
update_slide(Id, Metric, Slide0 = #slide{n_samples = NSamples, datapoints = DPs}) ->
|
||||||
|
Datapoint = get_slide_datapoint(Id, Metric),
|
||||||
|
%% Reset counters:
|
||||||
|
Name = get_self_name(),
|
||||||
|
CRef = get_ref(Name, Id),
|
||||||
|
Index = idx_metric(Name, Id, slide, Metric),
|
||||||
|
counters:put(CRef, Index, 0),
|
||||||
|
counters:put(CRef, Index + 1, 0),
|
||||||
|
Slide0#slide{
|
||||||
|
datapoints = [Datapoint | lists:droplast(DPs)],
|
||||||
|
n_samples = Datapoint#slide_datapoint.samples + NSamples
|
||||||
|
}.
|
||||||
|
|
||||||
format_rates_of_id(RatesPerId) ->
|
format_rates_of_id(RatesPerId) ->
|
||||||
maps:map(
|
maps:map(
|
||||||
fun(_Metric, Rates) ->
|
fun(_Metric, Rates) ->
|
||||||
|
@ -510,6 +672,27 @@ precision(Float, N) ->
|
||||||
Base = math:pow(10, N),
|
Base = math:pow(10, N),
|
||||||
round(Float * Base) / Base.
|
round(Float * Base) / Base.
|
||||||
|
|
||||||
|
desugar(Metrics) ->
|
||||||
|
lists:map(
|
||||||
|
fun
|
||||||
|
(Atom) when is_atom(Atom) ->
|
||||||
|
{counter, Atom};
|
||||||
|
(Spec = {_, _}) ->
|
||||||
|
Spec
|
||||||
|
end,
|
||||||
|
Metrics
|
||||||
|
).
|
||||||
|
|
||||||
|
filter_counters(Metrics) ->
|
||||||
|
[K || {counter, K} <- Metrics].
|
||||||
|
|
||||||
|
create_slides(Metrics) ->
|
||||||
|
EmptyDatapoints = [
|
||||||
|
#slide_datapoint{sum = 0, samples = 0, time = 0}
|
||||||
|
|| _ <- lists:seq(1, ?SECS_5M div ?SAMPLING)
|
||||||
|
],
|
||||||
|
maps:from_list([{K, #slide{datapoints = EmptyDatapoints}} || {slide, K} <- Metrics]).
|
||||||
|
|
||||||
get_self_name() ->
|
get_self_name() ->
|
||||||
{registered_name, Name} = process_info(self(), registered_name),
|
{registered_name, Name} = process_info(self(), registered_name),
|
||||||
Name.
|
Name.
|
||||||
|
|
|
@ -16,6 +16,8 @@
|
||||||
|
|
||||||
-module(emqx_packet).
|
-module(emqx_packet).
|
||||||
|
|
||||||
|
-elvis([{elvis_style, no_spec_with_records, disable}]).
|
||||||
|
|
||||||
-include("emqx.hrl").
|
-include("emqx.hrl").
|
||||||
-include("emqx_mqtt.hrl").
|
-include("emqx_mqtt.hrl").
|
||||||
|
|
||||||
|
@ -492,7 +494,7 @@ format_variable(undefined, _, _) ->
|
||||||
format_variable(Variable, undefined, PayloadEncode) ->
|
format_variable(Variable, undefined, PayloadEncode) ->
|
||||||
format_variable(Variable, PayloadEncode);
|
format_variable(Variable, PayloadEncode);
|
||||||
format_variable(Variable, Payload, PayloadEncode) ->
|
format_variable(Variable, Payload, PayloadEncode) ->
|
||||||
[format_variable(Variable, PayloadEncode), format_payload(Payload, PayloadEncode)].
|
[format_variable(Variable, PayloadEncode), ",", format_payload(Payload, PayloadEncode)].
|
||||||
|
|
||||||
format_variable(
|
format_variable(
|
||||||
#mqtt_packet_connect{
|
#mqtt_packet_connect{
|
||||||
|
|
|
@ -111,15 +111,19 @@
|
||||||
comma_separated_atoms/0
|
comma_separated_atoms/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([namespace/0, roots/0, roots/1, fields/1, desc/1]).
|
-export([namespace/0, roots/0, roots/1, fields/1, desc/1, tags/0]).
|
||||||
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
|
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
|
||||||
-export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1]).
|
-export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1]).
|
||||||
|
-export([authz_fields/0]).
|
||||||
-export([sc/2, map/2]).
|
-export([sc/2, map/2]).
|
||||||
|
|
||||||
-elvis([{elvis_style, god_modules, disable}]).
|
-elvis([{elvis_style, god_modules, disable}]).
|
||||||
|
|
||||||
namespace() -> broker.
|
namespace() -> broker.
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"EMQX">>].
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
%% TODO change config importance to a field metadata
|
%% TODO change config importance to a field metadata
|
||||||
roots(high) ++ roots(medium) ++ roots(low).
|
roots(high) ++ roots(medium) ++ roots(low).
|
||||||
|
@ -323,31 +327,7 @@ fields("stats") ->
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
fields("authorization") ->
|
fields("authorization") ->
|
||||||
[
|
authz_fields();
|
||||||
{"no_match",
|
|
||||||
sc(
|
|
||||||
hoconsc:enum([allow, deny]),
|
|
||||||
#{
|
|
||||||
default => allow,
|
|
||||||
required => true,
|
|
||||||
desc => ?DESC(fields_authorization_no_match)
|
|
||||||
}
|
|
||||||
)},
|
|
||||||
{"deny_action",
|
|
||||||
sc(
|
|
||||||
hoconsc:enum([ignore, disconnect]),
|
|
||||||
#{
|
|
||||||
default => ignore,
|
|
||||||
required => true,
|
|
||||||
desc => ?DESC(fields_authorization_deny_action)
|
|
||||||
}
|
|
||||||
)},
|
|
||||||
{"cache",
|
|
||||||
sc(
|
|
||||||
ref(?MODULE, "cache"),
|
|
||||||
#{}
|
|
||||||
)}
|
|
||||||
];
|
|
||||||
fields("cache") ->
|
fields("cache") ->
|
||||||
[
|
[
|
||||||
{"enable",
|
{"enable",
|
||||||
|
@ -1644,7 +1624,7 @@ base_listener(Bind) ->
|
||||||
sc(
|
sc(
|
||||||
hoconsc:union([infinity, pos_integer()]),
|
hoconsc:union([infinity, pos_integer()]),
|
||||||
#{
|
#{
|
||||||
default => infinity,
|
default => <<"infinity">>,
|
||||||
desc => ?DESC(base_listener_max_connections)
|
desc => ?DESC(base_listener_max_connections)
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
|
@ -2088,6 +2068,33 @@ do_default_ciphers(_) ->
|
||||||
%% otherwise resolve default ciphers list at runtime
|
%% otherwise resolve default ciphers list at runtime
|
||||||
[].
|
[].
|
||||||
|
|
||||||
|
authz_fields() ->
|
||||||
|
[
|
||||||
|
{"no_match",
|
||||||
|
sc(
|
||||||
|
hoconsc:enum([allow, deny]),
|
||||||
|
#{
|
||||||
|
default => allow,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(fields_authorization_no_match)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{"deny_action",
|
||||||
|
sc(
|
||||||
|
hoconsc:enum([ignore, disconnect]),
|
||||||
|
#{
|
||||||
|
default => ignore,
|
||||||
|
required => true,
|
||||||
|
desc => ?DESC(fields_authorization_deny_action)
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{"cache",
|
||||||
|
sc(
|
||||||
|
ref(?MODULE, "cache"),
|
||||||
|
#{}
|
||||||
|
)}
|
||||||
|
].
|
||||||
|
|
||||||
%% @private return a list of keys in a parent field
|
%% @private return a list of keys in a parent field
|
||||||
-spec keys(string(), hocon:config()) -> [string()].
|
-spec keys(string(), hocon:config()) -> [string()].
|
||||||
keys(Parent, Conf) ->
|
keys(Parent, Conf) ->
|
||||||
|
@ -2342,7 +2349,7 @@ authentication(Which) ->
|
||||||
undefined -> hoconsc:array(typerefl:map());
|
undefined -> hoconsc:array(typerefl:map());
|
||||||
Module -> Module:root_type()
|
Module -> Module:root_type()
|
||||||
end,
|
end,
|
||||||
%% It is a lazy type because when handing runtime update requests
|
%% It is a lazy type because when handling runtime update requests
|
||||||
%% the config is not checked by emqx_schema, but by the injected schema
|
%% the config is not checked by emqx_schema, but by the injected schema
|
||||||
Type = hoconsc:lazy(Type0),
|
Type = hoconsc:lazy(Type0),
|
||||||
#{
|
#{
|
||||||
|
|
|
@ -71,24 +71,15 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
create_router_tab(disc) ->
|
create_router_tab(disc) ->
|
||||||
ok = mria:create_table(?ROUTE_DISC_TAB, [
|
create_table(?ROUTE_DISC_TAB, disc_copies);
|
||||||
{type, bag},
|
|
||||||
{rlog_shard, ?ROUTE_SHARD},
|
|
||||||
{storage, disc_copies},
|
|
||||||
{record_name, route},
|
|
||||||
{attributes, record_info(fields, route)},
|
|
||||||
{storage_properties, [
|
|
||||||
{ets, [
|
|
||||||
{read_concurrency, true},
|
|
||||||
{write_concurrency, true}
|
|
||||||
]}
|
|
||||||
]}
|
|
||||||
]);
|
|
||||||
create_router_tab(ram) ->
|
create_router_tab(ram) ->
|
||||||
ok = mria:create_table(?ROUTE_RAM_TAB, [
|
create_table(?ROUTE_RAM_TAB, ram_copies).
|
||||||
|
|
||||||
|
create_table(Tab, Storage) ->
|
||||||
|
ok = mria:create_table(Tab, [
|
||||||
{type, bag},
|
{type, bag},
|
||||||
{rlog_shard, ?ROUTE_SHARD},
|
{rlog_shard, ?ROUTE_SHARD},
|
||||||
{storage, ram_copies},
|
{storage, Storage},
|
||||||
{record_name, route},
|
{record_name, route},
|
||||||
{attributes, record_info(fields, route)},
|
{attributes, record_info(fields, route)},
|
||||||
{storage_properties, [
|
{storage_properties, [
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIID/jCCAeagAwIBAgIJAKTICmq1Lg6dMA0GCSqGSIb3DQEBCwUAMDQxEjAQBgNV
|
||||||
|
BAoMCUVNUVggVGVzdDEeMBwGA1UEAwwVQ2VydGlmaWNhdGUgQXV0aG9yaXR5MB4X
|
||||||
|
DTIxMTIzMDA4NDExMloXDTQ5MDUxNzA4NDExMlowJTESMBAGA1UECgwJRU1RWCBU
|
||||||
|
ZXN0MQ8wDQYDVQQDDAZjbGllbnQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
|
||||||
|
AoIBAQDzrujfx6XZTH0MWqLO6kNAeHndUZ+OGaURXvxKMPMF5dA40lxNG6cEzzlq
|
||||||
|
0Rm61adlv8tF4kRJrs6EnRjEVoMImrdh07vGFdOTYqP01LjiBhErAzyRtSn2X8FT
|
||||||
|
Te8ExoCRs3x61SPebGY2hOvFxuO6YDPVOSDvbbxvRgqIlM1ZXC8dOvPSSGZ+P8hV
|
||||||
|
56EPayRthfu1FVptnkW9CyZCRI0gg95Hv8RC7bGG+tuWpkN9ZrRvohhgGR1+bDUi
|
||||||
|
BNBpncEsSh+UgWaj8KRN8D16H6m/Im6ty467j0at49FvPx5nACL48/ghtYvzgKLc
|
||||||
|
uKHtokKUuuzebDK/hQxN3mUSAJStAgMBAAGjIjAgMAsGA1UdDwQEAwIFoDARBglg
|
||||||
|
hkgBhvhCAQEEBAMCB4AwDQYJKoZIhvcNAQELBQADggIBAIlVyPhOpkz3MNzQmjX7
|
||||||
|
xgJ3vGPK5uK11n/wfjRwe2qXwZbrI2sYLVtTpUgvLDuP0gB73Vwfu7xAMdue6TRm
|
||||||
|
CKr9z0lkQsVBtgoqzZCjd4PYLfHm4EhsOMi98OGKU5uOGD4g3yLwQWXHhbYtiZMO
|
||||||
|
Jsj0hebYveYJt/BYTd1syGQcIcYCyVExWvSWjidfpAqjT6EF7whdubaFtuF2kaGF
|
||||||
|
IO9yn9rWtXB5yK99uCguEmKhx3fAQxomzqweTu3WRvy9axsUH3WAUW9a4DIBSz2+
|
||||||
|
ZSJNheFn5GktgggygJUGYqpSZHooUJW0UBs/8vX6AP+8MtINmqOGZUawmNwLWLOq
|
||||||
|
wHyVt2YGD5TXjzzsWNSQ4mqXxM6AXniZVZK0yYNjA4ATikX1AtwunyWBR4IjyE/D
|
||||||
|
FxYPORdZCOtywRFE1R5KLTUq/C8BNGCkYnoO78DJBO+pT0oagkQGQb0CnmC6C1db
|
||||||
|
4lWzA9K0i4B0PyooZA+gp+5FFgaLuX1DkyeaY1J204QhHR1z/Vcyl5dpqR9hqnYP
|
||||||
|
t8raLk9ogMDKqKA9iG0wc3CBNckD4sjVWAEeovXhElG55fD21wwhF+AnDCvX8iVK
|
||||||
|
cBfKV6z6uxfKjGIxc2I643I5DiIn+V3DnPxYyY74Ln1lWFYmt5JREhAxPu42zq74
|
||||||
|
e6+eIMYFszB+5gKgt6pa6ZNI
|
||||||
|
-----END CERTIFICATE-----
|
|
@ -0,0 +1,27 @@
|
||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIEpAIBAAKCAQEA867o38el2Ux9DFqizupDQHh53VGfjhmlEV78SjDzBeXQONJc
|
||||||
|
TRunBM85atEZutWnZb/LReJESa7OhJ0YxFaDCJq3YdO7xhXTk2Kj9NS44gYRKwM8
|
||||||
|
kbUp9l/BU03vBMaAkbN8etUj3mxmNoTrxcbjumAz1Tkg7228b0YKiJTNWVwvHTrz
|
||||||
|
0khmfj/IVeehD2skbYX7tRVabZ5FvQsmQkSNIIPeR7/EQu2xhvrblqZDfWa0b6IY
|
||||||
|
YBkdfmw1IgTQaZ3BLEoflIFmo/CkTfA9eh+pvyJurcuOu49GrePRbz8eZwAi+PP4
|
||||||
|
IbWL84Ci3Lih7aJClLrs3mwyv4UMTd5lEgCUrQIDAQABAoIBAQDwEbBgznrIwn8r
|
||||||
|
jZt5x/brbAV7Ea/kOcWSgIaCvQifFdJ2OGAwov5/UXwajNgRZe2d4z7qoUhvYuUY
|
||||||
|
ZwCAZU6ASpRBr2v9cYFYYURvrqZaHmoJew3P6q/lhl6aqFvC06DUagRHqvXEafyk
|
||||||
|
13zEAvZVpfNKrBaTawPKiDFWb2qDDc9D6hC07EuJ/DNeehiHvzHrSZSDVV5Ut7Bw
|
||||||
|
YDm33XygheUPAlHfeCnaixzcs3osiVyFEmVjxcIaM0ZS1NgcSaohSpJHMzvEaohX
|
||||||
|
e+v9vccraSVlw01AlvFwI2vHYUV8jT6HwglTPKKGOCzK/ace3wPdYSU9qLcqfuHn
|
||||||
|
EFhNc3tNAoGBAPugLMgbReJg2gpbIPUkYyoMMAAU7llFU1WvPWwXzo1a9EBjBACw
|
||||||
|
WfCZISNtANXR38zIYXzoH547uXi4YPks1Nne3sYuCDpvuX+iz7fIo4zHf1nFmxH7
|
||||||
|
eE6GtQr2ubmuuipTc28S0wBMGT1/KybH0e2NKL6GaOkNDmAI0IbEMBrvAoGBAPfr
|
||||||
|
Y1QYLhPhan6m5g/5s+bQpKtHfNH9TNkk13HuYu72zNuY3qL2GC7oSadR8vTbRXZg
|
||||||
|
KQqfaO0IGRcdkSFTq/AEhSSqr2Ld5nPadMbKvSGrSCc1s8rFH97jRVQY56yhM7ti
|
||||||
|
IW4+6cE8ylCMbdYB6wuduK/GIgNpqoF4xs1i2XojAoGACacBUMPLEH4Kny8TupOk
|
||||||
|
wi4pgTdMVVxVcAoC3yyincWJbRbfRm99Y79cCBHcYFdmsGJXawU0gUtlN/5KqgRQ
|
||||||
|
PfNQtGV7p1I12XGTakdmDrZwai8sXao52TlNpJgGU9siBRGicfZU5cQFi9he/WPY
|
||||||
|
57XshDJ/v8DidkigRysrdT0CgYEA5iuO22tblC+KvK1dGOXeZWO+DhrfwuGlcFBp
|
||||||
|
CaimB2/w/8vsn2VVTG9yujo2E6hj1CQw1mDrfG0xRim4LTXOgpbfugwRqvuTUmo2
|
||||||
|
Ur21XEX2RhjwpEfhcACWxB4fMUG0krrniMA2K6axupi1/KNpQi6bYe3UdFCs8Wld
|
||||||
|
QSAOAvsCgYBk/X5PmD44DvndE5FShM2w70YOoMr3Cgl5sdwAFUFE9yDuC14UhVxk
|
||||||
|
oxnYxwtVI9uVVirET+LczP9JEvcvxnN/Xg3tH/qm0WlIxmTxyYrFFIK9j0rqeu9z
|
||||||
|
blPu56OzNI2VMrR1GbOBLxQINLTIpaacjNJAlr8XOlegdUJsW/Jwqw==
|
||||||
|
-----END RSA PRIVATE KEY-----
|
|
@ -0,0 +1,27 @@
|
||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIEpAIBAAKCAQEAzLiGiSwpxkENtjrzS7pNLblTnWe4HUUFwYyUX0H+3TnvA86X
|
||||||
|
EX85yZvFjkzB6lLjUkMY+C6UTVXt+mxeSJbUtSKZhX+2yoF/KYh7SaVjug5FqEqO
|
||||||
|
LvMpDZQEhUWF2W9DG6eUgOfDoX2milSDIe10yG2WBkryipHAfE7l1t+i6Rh3on+v
|
||||||
|
561LmrbqyBWR/cLp23RN3sHbkf2pb5/ugtU9twdgJr6Lve73rvSeulewL5BzszKD
|
||||||
|
BrYqr+PBT5+3ItCc55bTsO7M7CzOIL99BlqdvFH7xT0U1+2BFwLe4/8kwphSqyJE
|
||||||
|
C5oOiQBFnFVNXmFQSV+k7rPr80i1IO++HeJ6KQIDAQABAoIBAGWgvPjfuaU3qizq
|
||||||
|
uti/FY07USz0zkuJdkANH6LiSjlchzDmn8wJ0pApCjuIE0PV/g9aS8z4opp5q/gD
|
||||||
|
UBLM/a8mC/xf2EhTXOMrY7i9p/I3H5FZ4ZehEqIw9sWKK9YzC6dw26HabB2BGOnW
|
||||||
|
5nozPSQ6cp2RGzJ7BIkxSZwPzPnVTgy3OAuPOiJytvK+hGLhsNaT+Y9bNDvplVT2
|
||||||
|
ZwYTV8GlHZC+4b2wNROILm0O86v96O+Qd8nn3fXjGHbMsAnONBq10bZS16L4fvkH
|
||||||
|
5G+W/1PeSXmtZFppdRRDxIW+DWcXK0D48WRliuxcV4eOOxI+a9N2ZJZZiNLQZGwg
|
||||||
|
w3A8+mECgYEA8HuJFrlRvdoBe2U/EwUtG74dcyy30L4yEBnN5QscXmEEikhaQCfX
|
||||||
|
Wm6EieMcIB/5I5TQmSw0cmBMeZjSXYoFdoI16/X6yMMuATdxpvhOZGdUGXxhAH+x
|
||||||
|
xoTUavWZnEqW3fkUU71kT5E2f2i+0zoatFESXHeslJyz85aAYpP92H0CgYEA2e5A
|
||||||
|
Yozt5eaA1Gyhd8SeptkEU4xPirNUnVQHStpMWUb1kzTNXrPmNWccQ7JpfpG6DcYl
|
||||||
|
zUF6p6mlzY+zkMiyPQjwEJlhiHM2NlL1QS7td0R8ewgsFoyn8WsBI4RejWrEG9td
|
||||||
|
EDniuIw+pBFkcWthnTLHwECHdzgquToyTMjrBB0CgYEA28tdGbrZXhcyAZEhHAZA
|
||||||
|
Gzog+pKlkpEzeonLKIuGKzCrEKRecIK5jrqyQsCjhS0T7ZRnL4g6i0s+umiV5M5w
|
||||||
|
fcc292pEA1h45L3DD6OlKplSQVTv55/OYS4oY3YEJtf5mfm8vWi9lQeY8sxOlQpn
|
||||||
|
O+VZTdBHmTC8PGeTAgZXHZUCgYA6Tyv88lYowB7SN2qQgBQu8jvdGtqhcs/99GCr
|
||||||
|
H3N0I69LPsKAR0QeH8OJPXBKhDUywESXAaEOwS5yrLNP1tMRz5Vj65YUCzeDG3kx
|
||||||
|
gpvY4IMp7ArX0bSRvJ6mYSFnVxy3k174G3TVCfksrtagHioVBGQ7xUg5ltafjrms
|
||||||
|
n8l55QKBgQDVzU8tQvBVqY8/1lnw11Vj4fkE/drZHJ5UkdC1eenOfSWhlSLfUJ8j
|
||||||
|
ds7vEWpRPPoVuPZYeR1y78cyxKe1GBx6Wa2lF5c7xjmiu0xbRnrxYeLolce9/ntp
|
||||||
|
asClqpnHT8/VJYTD7Kqj0fouTTZf0zkig/y+2XERppd8k+pSKjUCPQ==
|
||||||
|
-----END RSA PRIVATE KEY-----
|
|
@ -29,6 +29,9 @@
|
||||||
auth_header/2
|
auth_header/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-define(DEFAULT_APP_ID, <<"default_appid">>).
|
||||||
|
-define(DEFAULT_APP_SECRET, <<"default_app_secret">>).
|
||||||
|
|
||||||
request_api(Method, Url, Auth) ->
|
request_api(Method, Url, Auth) ->
|
||||||
request_api(Method, Url, [], Auth, []).
|
request_api(Method, Url, [], Auth, []).
|
||||||
|
|
||||||
|
@ -74,12 +77,18 @@ auth_header(User, Pass) ->
|
||||||
{"Authorization", "Basic " ++ Encoded}.
|
{"Authorization", "Basic " ++ Encoded}.
|
||||||
|
|
||||||
default_auth_header() ->
|
default_auth_header() ->
|
||||||
AppId = <<"myappid">>,
|
{ok, #{api_key := APIKey}} = emqx_mgmt_auth:read(?DEFAULT_APP_ID),
|
||||||
AppSecret = emqx_mgmt_auth:get_appsecret(AppId),
|
auth_header(
|
||||||
auth_header(erlang:binary_to_list(AppId), erlang:binary_to_list(AppSecret)).
|
erlang:binary_to_list(APIKey), erlang:binary_to_list(?DEFAULT_APP_SECRET)
|
||||||
|
).
|
||||||
|
|
||||||
create_default_app() ->
|
create_default_app() ->
|
||||||
emqx_mgmt_auth:add_app(<<"myappid">>, <<"test">>).
|
Now = erlang:system_time(second),
|
||||||
|
ExpiredAt = Now + timer:minutes(10),
|
||||||
|
emqx_mgmt_auth:create(
|
||||||
|
?DEFAULT_APP_ID, ?DEFAULT_APP_SECRET, true, ExpiredAt, <<"default app key for test">>
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
delete_default_app() ->
|
delete_default_app() ->
|
||||||
emqx_mgmt_auth:del_app(<<"myappid">>).
|
emqx_mgmt_auth:delete(?DEFAULT_APP_ID).
|
||||||
|
|
|
@ -46,7 +46,7 @@ end_per_testcase(_, _Config) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_get_metrics(_) ->
|
t_get_metrics(_) ->
|
||||||
Metrics = [a, b, c],
|
Metrics = [a, b, c, {slide, d}],
|
||||||
Id = <<"testid">>,
|
Id = <<"testid">>,
|
||||||
ok = emqx_metrics_worker:create_metrics(?NAME, Id, Metrics),
|
ok = emqx_metrics_worker:create_metrics(?NAME, Id, Metrics),
|
||||||
%% all the metrics are set to zero at start
|
%% all the metrics are set to zero at start
|
||||||
|
@ -73,6 +73,8 @@ t_get_metrics(_) ->
|
||||||
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id0, inflight, 5),
|
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id0, inflight, 5),
|
||||||
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id1, inflight, 7),
|
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id1, inflight, 7),
|
||||||
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id2, queuing, 9),
|
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id2, queuing, 9),
|
||||||
|
ok = emqx_metrics_worker:observe(?NAME, Id, d, 10),
|
||||||
|
ok = emqx_metrics_worker:observe(?NAME, Id, d, 30),
|
||||||
ct:sleep(1500),
|
ct:sleep(1500),
|
||||||
?LET(
|
?LET(
|
||||||
#{
|
#{
|
||||||
|
@ -89,6 +91,9 @@ t_get_metrics(_) ->
|
||||||
a := 1,
|
a := 1,
|
||||||
b := 1,
|
b := 1,
|
||||||
c := 2
|
c := 2
|
||||||
|
} = Counters,
|
||||||
|
slides := #{
|
||||||
|
d := #{n_samples := 2, last5m := 20, current := _}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
emqx_metrics_worker:get_metrics(?NAME, Id),
|
emqx_metrics_worker:get_metrics(?NAME, Id),
|
||||||
|
@ -100,7 +105,8 @@ t_get_metrics(_) ->
|
||||||
?assert(MaxB > 0),
|
?assert(MaxB > 0),
|
||||||
?assert(MaxC > 0),
|
?assert(MaxC > 0),
|
||||||
?assert(Inflight == 12),
|
?assert(Inflight == 12),
|
||||||
?assert(Queuing == 9)
|
?assert(Queuing == 9),
|
||||||
|
?assertNot(maps:is_key(d, Counters))
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
ok = emqx_metrics_worker:clear_metrics(?NAME, Id).
|
ok = emqx_metrics_worker:clear_metrics(?NAME, Id).
|
||||||
|
@ -117,6 +123,7 @@ t_clear_metrics(_Config) ->
|
||||||
c := #{current := 0.0, max := 0.0, last5m := 0.0}
|
c := #{current := 0.0, max := 0.0, last5m := 0.0}
|
||||||
},
|
},
|
||||||
gauges := #{},
|
gauges := #{},
|
||||||
|
slides := #{},
|
||||||
counters := #{
|
counters := #{
|
||||||
a := 0,
|
a := 0,
|
||||||
b := 0,
|
b := 0,
|
||||||
|
@ -138,14 +145,15 @@ t_clear_metrics(_Config) ->
|
||||||
#{
|
#{
|
||||||
counters => #{},
|
counters => #{},
|
||||||
gauges => #{},
|
gauges => #{},
|
||||||
rate => #{current => 0.0, last5m => 0.0, max => 0.0}
|
rate => #{current => 0.0, last5m => 0.0, max => 0.0},
|
||||||
|
slides => #{}
|
||||||
},
|
},
|
||||||
emqx_metrics_worker:get_metrics(?NAME, Id)
|
emqx_metrics_worker:get_metrics(?NAME, Id)
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_reset_metrics(_) ->
|
t_reset_metrics(_) ->
|
||||||
Metrics = [a, b, c],
|
Metrics = [a, b, c, {slide, d}],
|
||||||
Id = <<"testid">>,
|
Id = <<"testid">>,
|
||||||
ok = emqx_metrics_worker:create_metrics(?NAME, Id, Metrics),
|
ok = emqx_metrics_worker:create_metrics(?NAME, Id, Metrics),
|
||||||
%% all the metrics are set to zero at start
|
%% all the metrics are set to zero at start
|
||||||
|
@ -161,6 +169,9 @@ t_reset_metrics(_) ->
|
||||||
a := 0,
|
a := 0,
|
||||||
b := 0,
|
b := 0,
|
||||||
c := 0
|
c := 0
|
||||||
|
},
|
||||||
|
slides := #{
|
||||||
|
d := #{n_samples := 0, last5m := 0, current := 0}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
emqx_metrics_worker:get_metrics(?NAME, Id)
|
emqx_metrics_worker:get_metrics(?NAME, Id)
|
||||||
|
@ -172,7 +183,12 @@ t_reset_metrics(_) ->
|
||||||
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id0, inflight, 5),
|
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id0, inflight, 5),
|
||||||
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id1, inflight, 7),
|
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id1, inflight, 7),
|
||||||
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id2, queuing, 9),
|
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id2, queuing, 9),
|
||||||
|
ok = emqx_metrics_worker:observe(?NAME, Id, d, 100),
|
||||||
|
ok = emqx_metrics_worker:observe(?NAME, Id, d, 200),
|
||||||
ct:sleep(1500),
|
ct:sleep(1500),
|
||||||
|
?assertMatch(
|
||||||
|
#{d := #{n_samples := 2}}, emqx_metrics_worker:get_slide(?NAME, <<"testid">>)
|
||||||
|
),
|
||||||
ok = emqx_metrics_worker:reset_metrics(?NAME, Id),
|
ok = emqx_metrics_worker:reset_metrics(?NAME, Id),
|
||||||
?LET(
|
?LET(
|
||||||
#{
|
#{
|
||||||
|
@ -186,6 +202,9 @@ t_reset_metrics(_) ->
|
||||||
a := 0,
|
a := 0,
|
||||||
b := 0,
|
b := 0,
|
||||||
c := 0
|
c := 0
|
||||||
|
},
|
||||||
|
slides := #{
|
||||||
|
d := #{n_samples := 0, last5m := 0, current := 0}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
emqx_metrics_worker:get_metrics(?NAME, Id),
|
emqx_metrics_worker:get_metrics(?NAME, Id),
|
||||||
|
@ -202,7 +221,7 @@ t_reset_metrics(_) ->
|
||||||
ok = emqx_metrics_worker:clear_metrics(?NAME, Id).
|
ok = emqx_metrics_worker:clear_metrics(?NAME, Id).
|
||||||
|
|
||||||
t_get_metrics_2(_) ->
|
t_get_metrics_2(_) ->
|
||||||
Metrics = [a, b, c],
|
Metrics = [a, b, c, {slide, d}],
|
||||||
Id = <<"testid">>,
|
Id = <<"testid">>,
|
||||||
ok = emqx_metrics_worker:create_metrics(
|
ok = emqx_metrics_worker:create_metrics(
|
||||||
?NAME,
|
?NAME,
|
||||||
|
|
|
@ -153,7 +153,7 @@ ssl_opts_gc_after_handshake_test_rancher_listener_test() ->
|
||||||
#{
|
#{
|
||||||
kind := validation_error,
|
kind := validation_error,
|
||||||
reason := unknown_fields,
|
reason := unknown_fields,
|
||||||
unknown := <<"gc_after_handshake">>
|
unknown := "gc_after_handshake"
|
||||||
}
|
}
|
||||||
]},
|
]},
|
||||||
validate(Sc, #{<<"gc_after_handshake">> => true})
|
validate(Sc, #{<<"gc_after_handshake">> => true})
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_authn, [
|
{application, emqx_authn, [
|
||||||
{description, "EMQX Authentication"},
|
{description, "EMQX Authentication"},
|
||||||
{vsn, "0.1.11"},
|
{vsn, "0.1.12"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_authn_sup, emqx_authn_registry]},
|
{registered, [emqx_authn_sup, emqx_authn_registry]},
|
||||||
{applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]},
|
{applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]},
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-export([
|
-export([
|
||||||
common_fields/0,
|
common_fields/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
|
tags/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
authenticator_type/0,
|
authenticator_type/0,
|
||||||
authenticator_type_without_scram/0,
|
authenticator_type_without_scram/0,
|
||||||
|
@ -32,6 +33,9 @@
|
||||||
|
|
||||||
roots() -> [].
|
roots() -> [].
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Authentication">>].
|
||||||
|
|
||||||
common_fields() ->
|
common_fields() ->
|
||||||
[{enable, fun enable/1}].
|
[{enable, fun enable/1}].
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
|
tags/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1
|
desc/1
|
||||||
|
@ -105,6 +106,9 @@ mnesia(boot) ->
|
||||||
|
|
||||||
namespace() -> "authn-scram-builtin_db".
|
namespace() -> "authn-scram-builtin_db".
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Authentication">>].
|
||||||
|
|
||||||
roots() -> [?CONF_NS].
|
roots() -> [?CONF_NS].
|
||||||
|
|
||||||
fields(?CONF_NS) ->
|
fields(?CONF_NS) ->
|
||||||
|
|
|
@ -26,6 +26,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
|
tags/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
|
@ -51,6 +52,9 @@
|
||||||
|
|
||||||
namespace() -> "authn-http".
|
namespace() -> "authn-http".
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Authentication">>].
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[
|
[
|
||||||
{?CONF_NS,
|
{?CONF_NS,
|
||||||
|
|
|
@ -25,6 +25,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
|
tags/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1
|
desc/1
|
||||||
|
@ -44,6 +45,9 @@
|
||||||
|
|
||||||
namespace() -> "authn-jwt".
|
namespace() -> "authn-jwt".
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Authentication">>].
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[
|
[
|
||||||
{?CONF_NS,
|
{?CONF_NS,
|
||||||
|
|
|
@ -26,6 +26,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
|
tags/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1
|
desc/1
|
||||||
|
@ -107,6 +108,9 @@ mnesia(boot) ->
|
||||||
|
|
||||||
namespace() -> "authn-builtin_db".
|
namespace() -> "authn-builtin_db".
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Authentication">>].
|
||||||
|
|
||||||
roots() -> [?CONF_NS].
|
roots() -> [?CONF_NS].
|
||||||
|
|
||||||
fields(?CONF_NS) ->
|
fields(?CONF_NS) ->
|
||||||
|
|
|
@ -25,6 +25,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
|
tags/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1
|
desc/1
|
||||||
|
@ -44,6 +45,9 @@
|
||||||
|
|
||||||
namespace() -> "authn-mongodb".
|
namespace() -> "authn-mongodb".
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Authentication">>].
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[
|
[
|
||||||
{?CONF_NS,
|
{?CONF_NS,
|
||||||
|
|
|
@ -27,6 +27,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
|
tags/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1
|
desc/1
|
||||||
|
@ -46,6 +47,9 @@
|
||||||
|
|
||||||
namespace() -> "authn-mysql".
|
namespace() -> "authn-mysql".
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Authentication">>].
|
||||||
|
|
||||||
roots() -> [?CONF_NS].
|
roots() -> [?CONF_NS].
|
||||||
|
|
||||||
fields(?CONF_NS) ->
|
fields(?CONF_NS) ->
|
||||||
|
|
|
@ -26,6 +26,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
|
tags/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1
|
desc/1
|
||||||
|
@ -50,6 +51,9 @@
|
||||||
|
|
||||||
namespace() -> "authn-postgresql".
|
namespace() -> "authn-postgresql".
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Authentication">>].
|
||||||
|
|
||||||
roots() -> [?CONF_NS].
|
roots() -> [?CONF_NS].
|
||||||
|
|
||||||
fields(?CONF_NS) ->
|
fields(?CONF_NS) ->
|
||||||
|
|
|
@ -25,6 +25,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
|
tags/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1
|
desc/1
|
||||||
|
@ -44,6 +45,9 @@
|
||||||
|
|
||||||
namespace() -> "authn-redis".
|
namespace() -> "authn-redis".
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Authentication">>].
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[
|
[
|
||||||
{?CONF_NS,
|
{?CONF_NS,
|
||||||
|
|
|
@ -18,7 +18,8 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
||||||
-import(emqx_dashboard_api_test_helpers, [request/3, uri/1, multipart_formdata_request/3]).
|
-import(emqx_dashboard_api_test_helpers, [multipart_formdata_request/3]).
|
||||||
|
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
|
||||||
|
|
||||||
-include("emqx_authn.hrl").
|
-include("emqx_authn.hrl").
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
@ -65,9 +66,8 @@ end_per_testcase(_, Config) ->
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
emqx_config:erase(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY),
|
emqx_config:erase(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY),
|
||||||
_ = application:load(emqx_conf),
|
_ = application:load(emqx_conf),
|
||||||
ok = emqx_common_test_helpers:start_apps(
|
ok = emqx_mgmt_api_test_util:init_suite(
|
||||||
[emqx_authn, emqx_dashboard],
|
[emqx_authn]
|
||||||
fun set_special_configs/1
|
|
||||||
),
|
),
|
||||||
|
|
||||||
?AUTHN:delete_chain(?GLOBAL),
|
?AUTHN:delete_chain(?GLOBAL),
|
||||||
|
@ -76,12 +76,7 @@ init_per_suite(Config) ->
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
end_per_suite(_Config) ->
|
end_per_suite(_Config) ->
|
||||||
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authn]),
|
emqx_mgmt_api_test_util:end_suite([emqx_authn]),
|
||||||
ok.
|
|
||||||
|
|
||||||
set_special_configs(emqx_dashboard) ->
|
|
||||||
emqx_dashboard_api_test_helpers:set_default_config();
|
|
||||||
set_special_configs(_App) ->
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
|
@ -197,7 +197,7 @@ t_list_users(_) ->
|
||||||
#{is_superuser := false, user_id := _},
|
#{is_superuser := false, user_id := _},
|
||||||
#{is_superuser := false, user_id := _}
|
#{is_superuser := false, user_id := _}
|
||||||
],
|
],
|
||||||
meta := #{page := 1, limit := 2, count := 3}
|
meta := #{page := 1, limit := 2, count := 3, hasnext := true}
|
||||||
} = emqx_authn_mnesia:list_users(
|
} = emqx_authn_mnesia:list_users(
|
||||||
#{<<"page">> => 1, <<"limit">> => 2},
|
#{<<"page">> => 1, <<"limit">> => 2},
|
||||||
State
|
State
|
||||||
|
@ -205,7 +205,7 @@ t_list_users(_) ->
|
||||||
|
|
||||||
#{
|
#{
|
||||||
data := [#{is_superuser := false, user_id := _}],
|
data := [#{is_superuser := false, user_id := _}],
|
||||||
meta := #{page := 2, limit := 2, count := 3}
|
meta := #{page := 2, limit := 2, count := 3, hasnext := false}
|
||||||
} = emqx_authn_mnesia:list_users(
|
} = emqx_authn_mnesia:list_users(
|
||||||
#{<<"page">> => 2, <<"limit">> => 2},
|
#{<<"page">> => 2, <<"limit">> => 2},
|
||||||
State
|
State
|
||||||
|
@ -213,7 +213,7 @@ t_list_users(_) ->
|
||||||
|
|
||||||
#{
|
#{
|
||||||
data := [#{is_superuser := false, user_id := <<"u3">>}],
|
data := [#{is_superuser := false, user_id := <<"u3">>}],
|
||||||
meta := #{page := 1, limit := 20, count := 0}
|
meta := #{page := 1, limit := 20, hasnext := false}
|
||||||
} = emqx_authn_mnesia:list_users(
|
} = emqx_authn_mnesia:list_users(
|
||||||
#{
|
#{
|
||||||
<<"page">> => 1,
|
<<"page">> => 1,
|
||||||
|
|
|
@ -300,14 +300,14 @@ t_list_users(_) ->
|
||||||
|
|
||||||
#{
|
#{
|
||||||
data := [?USER_MAP, ?USER_MAP],
|
data := [?USER_MAP, ?USER_MAP],
|
||||||
meta := #{page := 1, limit := 2, count := 3}
|
meta := #{page := 1, limit := 2, count := 3, hasnext := true}
|
||||||
} = emqx_enhanced_authn_scram_mnesia:list_users(
|
} = emqx_enhanced_authn_scram_mnesia:list_users(
|
||||||
#{<<"page">> => 1, <<"limit">> => 2},
|
#{<<"page">> => 1, <<"limit">> => 2},
|
||||||
State
|
State
|
||||||
),
|
),
|
||||||
#{
|
#{
|
||||||
data := [?USER_MAP],
|
data := [?USER_MAP],
|
||||||
meta := #{page := 2, limit := 2, count := 3}
|
meta := #{page := 2, limit := 2, count := 3, hasnext := false}
|
||||||
} = emqx_enhanced_authn_scram_mnesia:list_users(
|
} = emqx_enhanced_authn_scram_mnesia:list_users(
|
||||||
#{<<"page">> => 2, <<"limit">> => 2},
|
#{<<"page">> => 2, <<"limit">> => 2},
|
||||||
State
|
State
|
||||||
|
@ -319,7 +319,7 @@ t_list_users(_) ->
|
||||||
is_superuser := _
|
is_superuser := _
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
meta := #{page := 1, limit := 3, count := 0}
|
meta := #{page := 1, limit := 3, hasnext := false}
|
||||||
} = emqx_enhanced_authn_scram_mnesia:list_users(
|
} = emqx_enhanced_authn_scram_mnesia:list_users(
|
||||||
#{
|
#{
|
||||||
<<"page">> => 1,
|
<<"page">> => 1,
|
||||||
|
|
|
@ -15,7 +15,6 @@ authz:{
|
||||||
pool_size: 1
|
pool_size: 1
|
||||||
username: root
|
username: root
|
||||||
password: public
|
password: public
|
||||||
auto_reconnect: true
|
|
||||||
ssl: {
|
ssl: {
|
||||||
enable: true
|
enable: true
|
||||||
cacertfile: "etc/certs/cacert.pem"
|
cacertfile: "etc/certs/cacert.pem"
|
||||||
|
@ -33,7 +32,6 @@ authz:{
|
||||||
pool_size: 1
|
pool_size: 1
|
||||||
username: root
|
username: root
|
||||||
password: public
|
password: public
|
||||||
auto_reconnect: true
|
|
||||||
ssl: {enable: false}
|
ssl: {enable: false}
|
||||||
}
|
}
|
||||||
sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = ${peerhost} or username = ${username} or username = '$all' or clientid = ${clientid}"
|
sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = ${peerhost} or username = ${username} or username = '$all' or clientid = ${clientid}"
|
||||||
|
@ -45,7 +43,6 @@ authz:{
|
||||||
database: 0
|
database: 0
|
||||||
pool_size: 1
|
pool_size: 1
|
||||||
password: public
|
password: public
|
||||||
auto_reconnect: true
|
|
||||||
ssl: {enable: false}
|
ssl: {enable: false}
|
||||||
}
|
}
|
||||||
cmd: "HGETALL mqtt_authz:${username}"
|
cmd: "HGETALL mqtt_authz:${username}"
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
authorization {
|
authorization {
|
||||||
deny_action = ignore
|
deny_action = ignore
|
||||||
no_match = allow
|
no_match = allow
|
||||||
|
cache = { enable = true }
|
||||||
sources = [
|
sources = [
|
||||||
{
|
{
|
||||||
type = file
|
type = file
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_authz, [
|
{application, emqx_authz, [
|
||||||
{description, "An OTP application"},
|
{description, "An OTP application"},
|
||||||
{vsn, "0.1.11"},
|
{vsn, "0.1.12"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_authz_app, []}},
|
{mod, {emqx_authz_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
-include("emqx_authz.hrl").
|
-include("emqx_authz.hrl").
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
-include_lib("emqx/include/emqx_hooks.hrl").
|
-include_lib("emqx/include/emqx_hooks.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
@ -358,6 +359,7 @@ authorize_non_superuser(
|
||||||
emqx_metrics:inc(?METRIC_DENY),
|
emqx_metrics:inc(?METRIC_DENY),
|
||||||
{stop, #{result => deny, from => AuthzSource}};
|
{stop, #{result => deny, from => AuthzSource}};
|
||||||
nomatch ->
|
nomatch ->
|
||||||
|
?tp(authz_non_superuser, #{result => nomatch}),
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "authorization_failed_nomatch",
|
msg => "authorization_failed_nomatch",
|
||||||
username => Username,
|
username => Username,
|
||||||
|
@ -388,6 +390,12 @@ do_authorize(
|
||||||
nomatch ->
|
nomatch ->
|
||||||
emqx_metrics_worker:inc(authz_metrics, Type, nomatch),
|
emqx_metrics_worker:inc(authz_metrics, Type, nomatch),
|
||||||
do_authorize(Client, PubSub, Topic, Tail);
|
do_authorize(Client, PubSub, Topic, Tail);
|
||||||
|
%% {matched, allow | deny | ignore}
|
||||||
|
{matched, ignore} ->
|
||||||
|
do_authorize(Client, PubSub, Topic, Tail);
|
||||||
|
ignore ->
|
||||||
|
do_authorize(Client, PubSub, Topic, Tail);
|
||||||
|
%% {matched, allow | deny}
|
||||||
Matched ->
|
Matched ->
|
||||||
{Matched, Type}
|
{Matched, Type}
|
||||||
catch
|
catch
|
||||||
|
|
|
@ -64,7 +64,7 @@ schema("/authorization/settings") ->
|
||||||
}.
|
}.
|
||||||
|
|
||||||
ref_authz_schema() ->
|
ref_authz_schema() ->
|
||||||
proplists:delete(sources, emqx_conf_schema:fields("authorization")).
|
emqx_schema:authz_fields().
|
||||||
|
|
||||||
settings(get, _Params) ->
|
settings(get, _Params) ->
|
||||||
{200, authorization_settings()};
|
{200, authorization_settings()};
|
||||||
|
@ -83,4 +83,6 @@ settings(put, #{
|
||||||
{200, authorization_settings()}.
|
{200, authorization_settings()}.
|
||||||
|
|
||||||
authorization_settings() ->
|
authorization_settings() ->
|
||||||
maps:remove(<<"sources">>, emqx:get_raw_config([authorization], #{})).
|
C = maps:remove(<<"sources">>, emqx:get_raw_config([authorization], #{})),
|
||||||
|
Schema = emqx_hocon:make_schema(emqx_schema:authz_fields()),
|
||||||
|
hocon_tconf:make_serializable(Schema, C, #{}).
|
||||||
|
|
|
@ -449,7 +449,7 @@ is_ok(ResL) ->
|
||||||
|
|
||||||
get_raw_sources() ->
|
get_raw_sources() ->
|
||||||
RawSources = emqx:get_raw_config([authorization, sources], []),
|
RawSources = emqx:get_raw_config([authorization, sources], []),
|
||||||
Schema = #{roots => emqx_authz_schema:fields("authorization"), fields => #{}},
|
Schema = emqx_hocon:make_schema(emqx_authz_schema:authz_fields()),
|
||||||
Conf = #{<<"sources">> => RawSources},
|
Conf = #{<<"sources">> => RawSources},
|
||||||
#{<<"sources">> := Sources} = hocon_tconf:make_serializable(Schema, Conf, #{}),
|
#{<<"sources">> := Sources} = hocon_tconf:make_serializable(Schema, Conf, #{}),
|
||||||
merge_default_headers(Sources).
|
merge_default_headers(Sources).
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
-include_lib("emqx/include/emqx.hrl").
|
-include_lib("emqx/include/emqx.hrl").
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
-behaviour(emqx_authz).
|
-behaviour(emqx_authz).
|
||||||
|
|
||||||
|
@ -104,6 +105,7 @@ authorize(
|
||||||
log_nomtach_msg(Status, Headers, Body),
|
log_nomtach_msg(Status, Headers, Body),
|
||||||
nomatch;
|
nomatch;
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
?tp(authz_http_request_failure, #{error => Reason}),
|
||||||
?SLOG(error, #{
|
?SLOG(error, #{
|
||||||
msg => "http_server_query_failed",
|
msg => "http_server_query_failed",
|
||||||
resource => ResourceID,
|
resource => ResourceID,
|
||||||
|
|
|
@ -33,9 +33,11 @@
|
||||||
-export([
|
-export([
|
||||||
namespace/0,
|
namespace/0,
|
||||||
roots/0,
|
roots/0,
|
||||||
|
tags/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
validations/0,
|
validations/0,
|
||||||
desc/1
|
desc/1,
|
||||||
|
authz_fields/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -47,14 +49,8 @@
|
||||||
%% Hocon Schema
|
%% Hocon Schema
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
namespace() -> authz.
|
type_names() ->
|
||||||
|
[
|
||||||
%% @doc authorization schema is not exported
|
|
||||||
%% but directly used by emqx_schema
|
|
||||||
roots() -> [].
|
|
||||||
|
|
||||||
fields("authorization") ->
|
|
||||||
Types = [
|
|
||||||
file,
|
file,
|
||||||
http_get,
|
http_get,
|
||||||
http_post,
|
http_post,
|
||||||
|
@ -67,18 +63,19 @@ fields("authorization") ->
|
||||||
redis_single,
|
redis_single,
|
||||||
redis_sentinel,
|
redis_sentinel,
|
||||||
redis_cluster
|
redis_cluster
|
||||||
],
|
].
|
||||||
Unions = [?R_REF(Type) || Type <- Types],
|
|
||||||
[
|
namespace() -> authz.
|
||||||
{sources,
|
|
||||||
?HOCON(
|
tags() ->
|
||||||
?ARRAY(?UNION(Unions)),
|
[<<"Authorization">>].
|
||||||
#{
|
|
||||||
default => [],
|
%% @doc authorization schema is not exported
|
||||||
desc => ?DESC(sources)
|
%% but directly used by emqx_schema
|
||||||
}
|
roots() -> [].
|
||||||
)}
|
|
||||||
];
|
fields("authorization") ->
|
||||||
|
authz_fields();
|
||||||
fields(file) ->
|
fields(file) ->
|
||||||
authz_common_fields(file) ++
|
authz_common_fields(file) ++
|
||||||
[{path, ?HOCON(string(), #{required => true, desc => ?DESC(path)})}];
|
[{path, ?HOCON(string(), #{required => true, desc => ?DESC(path)})}];
|
||||||
|
@ -408,9 +405,94 @@ common_rate_field() ->
|
||||||
].
|
].
|
||||||
|
|
||||||
method(Method) ->
|
method(Method) ->
|
||||||
?HOCON(Method, #{default => Method, required => true, desc => ?DESC(method)}).
|
?HOCON(Method, #{required => true, desc => ?DESC(method)}).
|
||||||
|
|
||||||
array(Ref) -> array(Ref, Ref).
|
array(Ref) -> array(Ref, Ref).
|
||||||
|
|
||||||
array(Ref, DescId) ->
|
array(Ref, DescId) ->
|
||||||
?HOCON(?ARRAY(?R_REF(Ref)), #{desc => ?DESC(DescId)}).
|
?HOCON(?ARRAY(?R_REF(Ref)), #{desc => ?DESC(DescId)}).
|
||||||
|
|
||||||
|
select_union_member(#{<<"type">> := <<"mongodb">>} = Value) ->
|
||||||
|
MongoType = maps:get(<<"mongo_type">>, Value, undefined),
|
||||||
|
case MongoType of
|
||||||
|
<<"single">> ->
|
||||||
|
?R_REF(mongo_single);
|
||||||
|
<<"rs">> ->
|
||||||
|
?R_REF(mongo_rs);
|
||||||
|
<<"sharded">> ->
|
||||||
|
?R_REF(mongo_sharded);
|
||||||
|
Else ->
|
||||||
|
throw(#{
|
||||||
|
reason => "unknown_mongo_type",
|
||||||
|
expected => "single | rs | sharded",
|
||||||
|
got => Else
|
||||||
|
})
|
||||||
|
end;
|
||||||
|
select_union_member(#{<<"type">> := <<"redis">>} = Value) ->
|
||||||
|
RedisType = maps:get(<<"redis_type">>, Value, undefined),
|
||||||
|
case RedisType of
|
||||||
|
<<"single">> ->
|
||||||
|
?R_REF(redis_single);
|
||||||
|
<<"cluster">> ->
|
||||||
|
?R_REF(redis_cluster);
|
||||||
|
<<"sentinel">> ->
|
||||||
|
?R_REF(redis_sentinel);
|
||||||
|
Else ->
|
||||||
|
throw(#{
|
||||||
|
reason => "unknown_redis_type",
|
||||||
|
expected => "single | cluster | sentinel",
|
||||||
|
got => Else
|
||||||
|
})
|
||||||
|
end;
|
||||||
|
select_union_member(#{<<"type">> := <<"http">>} = Value) ->
|
||||||
|
RedisType = maps:get(<<"method">>, Value, undefined),
|
||||||
|
case RedisType of
|
||||||
|
<<"get">> ->
|
||||||
|
?R_REF(http_get);
|
||||||
|
<<"post">> ->
|
||||||
|
?R_REF(http_post);
|
||||||
|
Else ->
|
||||||
|
throw(#{
|
||||||
|
reason => "unknown_http_method",
|
||||||
|
expected => "get | post",
|
||||||
|
got => Else
|
||||||
|
})
|
||||||
|
end;
|
||||||
|
select_union_member(#{<<"type">> := <<"built_in_database">>}) ->
|
||||||
|
?R_REF(mnesia);
|
||||||
|
select_union_member(#{<<"type">> := Type}) ->
|
||||||
|
select_union_member_loop(Type, type_names());
|
||||||
|
select_union_member(_) ->
|
||||||
|
throw("missing_type_field").
|
||||||
|
|
||||||
|
select_union_member_loop(TypeValue, []) ->
|
||||||
|
throw(#{
|
||||||
|
reason => "unknown_authz_type",
|
||||||
|
got => TypeValue
|
||||||
|
});
|
||||||
|
select_union_member_loop(TypeValue, [Type | Types]) ->
|
||||||
|
case TypeValue =:= atom_to_binary(Type) of
|
||||||
|
true ->
|
||||||
|
?R_REF(Type);
|
||||||
|
false ->
|
||||||
|
select_union_member_loop(TypeValue, Types)
|
||||||
|
end.
|
||||||
|
|
||||||
|
authz_fields() ->
|
||||||
|
Types = [?R_REF(Type) || Type <- type_names()],
|
||||||
|
UnionMemberSelector =
|
||||||
|
fun
|
||||||
|
(all_union_members) -> Types;
|
||||||
|
%% must return list
|
||||||
|
({value, Value}) -> [select_union_member(Value)]
|
||||||
|
end,
|
||||||
|
[
|
||||||
|
{sources,
|
||||||
|
?HOCON(
|
||||||
|
?ARRAY(?UNION(UnionMemberSelector)),
|
||||||
|
#{
|
||||||
|
default => [],
|
||||||
|
desc => ?DESC(sources)
|
||||||
|
}
|
||||||
|
)}
|
||||||
|
].
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
||||||
-import(emqx_dashboard_api_test_helpers, [request/2, uri/1]).
|
-import(emqx_mgmt_api_test_util, [request/2, uri/1]).
|
||||||
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
@ -32,8 +32,8 @@ groups() ->
|
||||||
[].
|
[].
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
ok = emqx_common_test_helpers:start_apps(
|
ok = emqx_mgmt_api_test_util:init_suite(
|
||||||
[emqx_conf, emqx_authz, emqx_dashboard, emqx_management],
|
[emqx_conf, emqx_authz],
|
||||||
fun set_special_configs/1
|
fun set_special_configs/1
|
||||||
),
|
),
|
||||||
Config.
|
Config.
|
||||||
|
@ -47,7 +47,7 @@ end_per_suite(_Config) ->
|
||||||
<<"sources">> => []
|
<<"sources">> => []
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf, emqx_management]),
|
emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
set_special_configs(emqx_dashboard) ->
|
set_special_configs(emqx_dashboard) ->
|
||||||
|
@ -67,12 +67,12 @@ t_clean_cahce(_) ->
|
||||||
ok = emqtt:publish(C, <<"a/b/c">>, <<"{\"x\":1,\"y\":1}">>, 0),
|
ok = emqtt:publish(C, <<"a/b/c">>, <<"{\"x\":1,\"y\":1}">>, 0),
|
||||||
|
|
||||||
{ok, 200, Result3} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
{ok, 200, Result3} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
||||||
?assertEqual(2, length(jsx:decode(Result3))),
|
?assertEqual(2, length(emqx_json:decode(Result3))),
|
||||||
|
|
||||||
request(delete, uri(["authorization", "cache"])),
|
request(delete, uri(["authorization", "cache"])),
|
||||||
|
|
||||||
{ok, 200, Result4} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
{ok, 200, Result4} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
|
||||||
?assertEqual(0, length(jsx:decode(Result4))),
|
?assertEqual(0, length(emqx_json:decode(Result4))),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
|
||||||
-import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
|
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
|
||||||
|
|
||||||
all() ->
|
all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
@ -31,8 +31,8 @@ groups() ->
|
||||||
[].
|
[].
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
ok = emqx_common_test_helpers:start_apps(
|
ok = emqx_mgmt_api_test_util:init_suite(
|
||||||
[emqx_conf, emqx_authz, emqx_dashboard],
|
[emqx_conf, emqx_authz],
|
||||||
fun set_special_configs/1
|
fun set_special_configs/1
|
||||||
),
|
),
|
||||||
Config.
|
Config.
|
||||||
|
@ -46,7 +46,7 @@ end_per_suite(_Config) ->
|
||||||
<<"sources">> => []
|
<<"sources">> => []
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]),
|
emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
set_special_configs(emqx_dashboard) ->
|
set_special_configs(emqx_dashboard) ->
|
||||||
|
@ -92,7 +92,8 @@ t_api(_) ->
|
||||||
<<"meta">> := #{
|
<<"meta">> := #{
|
||||||
<<"count">> := 1,
|
<<"count">> := 1,
|
||||||
<<"limit">> := 100,
|
<<"limit">> := 100,
|
||||||
<<"page">> := 1
|
<<"page">> := 1,
|
||||||
|
<<"hasnext">> := false
|
||||||
}
|
}
|
||||||
} = jsx:decode(Request1),
|
} = jsx:decode(Request1),
|
||||||
?assertEqual(3, length(Rules1)),
|
?assertEqual(3, length(Rules1)),
|
||||||
|
@ -109,14 +110,17 @@ t_api(_) ->
|
||||||
]),
|
]),
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
|
?assertEqual(
|
||||||
#{
|
#{
|
||||||
<<"data">> := [],
|
<<"data">> => [],
|
||||||
<<"meta">> := #{
|
<<"meta">> => #{
|
||||||
<<"count">> := 0,
|
<<"limit">> => 20,
|
||||||
<<"limit">> := 20,
|
<<"page">> => 1,
|
||||||
<<"page">> := 1
|
<<"hasnext">> => false
|
||||||
}
|
}
|
||||||
} = jsx:decode(Request1_1),
|
},
|
||||||
|
jsx:decode(Request1_1)
|
||||||
|
),
|
||||||
|
|
||||||
{ok, 200, Request2} =
|
{ok, 200, Request2} =
|
||||||
request(
|
request(
|
||||||
|
@ -160,6 +164,14 @@ t_api(_) ->
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
|
|
||||||
|
% ensure that db contain a mix of records
|
||||||
|
{ok, 204, _} =
|
||||||
|
request(
|
||||||
|
post,
|
||||||
|
uri(["authorization", "sources", "built_in_database", "username"]),
|
||||||
|
[?USERNAME_RULES_EXAMPLE]
|
||||||
|
),
|
||||||
|
|
||||||
{ok, 204, _} =
|
{ok, 204, _} =
|
||||||
request(
|
request(
|
||||||
post,
|
post,
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
||||||
-import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
|
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
|
||||||
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
@ -30,7 +30,7 @@ groups() ->
|
||||||
[].
|
[].
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
ok = emqx_common_test_helpers:start_apps(
|
ok = emqx_mgmt_api_test_util:init_suite(
|
||||||
[emqx_conf, emqx_authz, emqx_dashboard],
|
[emqx_conf, emqx_authz, emqx_dashboard],
|
||||||
fun set_special_configs/1
|
fun set_special_configs/1
|
||||||
),
|
),
|
||||||
|
@ -46,7 +46,7 @@ end_per_suite(_Config) ->
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
ok = stop_apps([emqx_resource]),
|
ok = stop_apps([emqx_resource]),
|
||||||
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]),
|
emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
set_special_configs(emqx_dashboard) ->
|
set_special_configs(emqx_dashboard) ->
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
||||||
-import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
|
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
|
||||||
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
@ -115,8 +115,8 @@ init_per_suite(Config) ->
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
|
|
||||||
ok = emqx_common_test_helpers:start_apps(
|
ok = emqx_mgmt_api_test_util:init_suite(
|
||||||
[emqx_conf, emqx_authz, emqx_dashboard],
|
[emqx_conf, emqx_authz],
|
||||||
fun set_special_configs/1
|
fun set_special_configs/1
|
||||||
),
|
),
|
||||||
ok = start_apps([emqx_resource]),
|
ok = start_apps([emqx_resource]),
|
||||||
|
@ -134,7 +134,7 @@ end_per_suite(_Config) ->
|
||||||
%% resource and connector should be stop first,
|
%% resource and connector should be stop first,
|
||||||
%% or authz_[mysql|pgsql|redis..]_SUITE would be failed
|
%% or authz_[mysql|pgsql|redis..]_SUITE would be failed
|
||||||
ok = stop_apps([emqx_resource]),
|
ok = stop_apps([emqx_resource]),
|
||||||
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]),
|
emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
|
||||||
meck:unload(emqx_resource),
|
meck:unload(emqx_resource),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
-define(HTTP_PORT, 33333).
|
-define(HTTP_PORT, 33333).
|
||||||
-define(HTTP_PATH, "/authz/[...]").
|
-define(HTTP_PATH, "/authz/[...]").
|
||||||
|
@ -64,7 +65,14 @@ init_per_testcase(_Case, Config) ->
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
end_per_testcase(_Case, _Config) ->
|
end_per_testcase(_Case, _Config) ->
|
||||||
ok = emqx_authz_http_test_server:stop().
|
try
|
||||||
|
ok = emqx_authz_http_test_server:stop()
|
||||||
|
catch
|
||||||
|
exit:noproc ->
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
snabbkaffe:stop(),
|
||||||
|
ok.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Tests
|
%% Tests
|
||||||
|
@ -148,7 +156,39 @@ t_response_handling(_Config) ->
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
deny,
|
deny,
|
||||||
emqx_access_control:authorize(ClientInfo, publish, <<"t">>)
|
emqx_access_control:authorize(ClientInfo, publish, <<"t">>)
|
||||||
).
|
),
|
||||||
|
|
||||||
|
%% the server cannot be reached; should skip to the next
|
||||||
|
%% authorizer in the chain.
|
||||||
|
ok = emqx_authz_http_test_server:stop(),
|
||||||
|
|
||||||
|
?check_trace(
|
||||||
|
?assertEqual(
|
||||||
|
deny,
|
||||||
|
emqx_access_control:authorize(ClientInfo, publish, <<"t">>)
|
||||||
|
),
|
||||||
|
fun(Trace) ->
|
||||||
|
?assertMatch(
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
?snk_kind := authz_http_request_failure,
|
||||||
|
error := {recoverable_error, econnrefused}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
?of_kind(authz_http_request_failure, Trace)
|
||||||
|
),
|
||||||
|
?assert(
|
||||||
|
?strict_causality(
|
||||||
|
#{?snk_kind := authz_http_request_failure},
|
||||||
|
#{?snk_kind := authz_non_superuser, result := nomatch},
|
||||||
|
Trace
|
||||||
|
)
|
||||||
|
),
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
),
|
||||||
|
|
||||||
|
ok.
|
||||||
|
|
||||||
t_query_params(_Config) ->
|
t_query_params(_Config) ->
|
||||||
ok = setup_handler_and_config(
|
ok = setup_handler_and_config(
|
||||||
|
|
|
@ -0,0 +1,116 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_authz_schema_tests).
|
||||||
|
|
||||||
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
|
||||||
|
bad_authz_type_test() ->
|
||||||
|
Txt = "[{type: foobar}]",
|
||||||
|
?assertThrow(
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
reason := "unknown_authz_type",
|
||||||
|
got := <<"foobar">>
|
||||||
|
}
|
||||||
|
],
|
||||||
|
check(Txt)
|
||||||
|
).
|
||||||
|
|
||||||
|
bad_mongodb_type_test() ->
|
||||||
|
Txt = "[{type: mongodb, mongo_type: foobar}]",
|
||||||
|
?assertThrow(
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
reason := "unknown_mongo_type",
|
||||||
|
got := <<"foobar">>
|
||||||
|
}
|
||||||
|
],
|
||||||
|
check(Txt)
|
||||||
|
).
|
||||||
|
|
||||||
|
missing_mongodb_type_test() ->
|
||||||
|
Txt = "[{type: mongodb}]",
|
||||||
|
?assertThrow(
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
reason := "unknown_mongo_type",
|
||||||
|
got := undefined
|
||||||
|
}
|
||||||
|
],
|
||||||
|
check(Txt)
|
||||||
|
).
|
||||||
|
|
||||||
|
unknown_redis_type_test() ->
|
||||||
|
Txt = "[{type: redis, redis_type: foobar}]",
|
||||||
|
?assertThrow(
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
reason := "unknown_redis_type",
|
||||||
|
got := <<"foobar">>
|
||||||
|
}
|
||||||
|
],
|
||||||
|
check(Txt)
|
||||||
|
).
|
||||||
|
|
||||||
|
missing_redis_type_test() ->
|
||||||
|
Txt = "[{type: redis}]",
|
||||||
|
?assertThrow(
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
reason := "unknown_redis_type",
|
||||||
|
got := undefined
|
||||||
|
}
|
||||||
|
],
|
||||||
|
check(Txt)
|
||||||
|
).
|
||||||
|
|
||||||
|
unknown_http_method_test() ->
|
||||||
|
Txt = "[{type: http, method: getx}]",
|
||||||
|
?assertThrow(
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
reason := "unknown_http_method",
|
||||||
|
got := <<"getx">>
|
||||||
|
}
|
||||||
|
],
|
||||||
|
check(Txt)
|
||||||
|
).
|
||||||
|
|
||||||
|
missing_http_method_test() ->
|
||||||
|
Txt = "[{type: http, methodx: get}]",
|
||||||
|
?assertThrow(
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
reason := "unknown_http_method",
|
||||||
|
got := undefined
|
||||||
|
}
|
||||||
|
],
|
||||||
|
check(Txt)
|
||||||
|
).
|
||||||
|
|
||||||
|
check(Txt0) ->
|
||||||
|
Txt = ["sources: ", Txt0],
|
||||||
|
{ok, RawConf} = hocon:binary(Txt),
|
||||||
|
try
|
||||||
|
hocon_tconf:check_plain(schema(), RawConf, #{})
|
||||||
|
catch
|
||||||
|
throw:{_Schema, Errors} ->
|
||||||
|
throw(Errors)
|
||||||
|
end.
|
||||||
|
|
||||||
|
schema() ->
|
||||||
|
#{roots => emqx_authz_schema:fields("authorization")}.
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auto_subscribe, [
|
{application, emqx_auto_subscribe, [
|
||||||
{description, "An OTP application"},
|
{description, "Auto subscribe Application"},
|
||||||
{vsn, "0.1.2"},
|
{vsn, "0.1.3"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auto_subscribe_app, []}},
|
{mod, {emqx_auto_subscribe_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -51,8 +51,21 @@ max_limit() ->
|
||||||
list() ->
|
list() ->
|
||||||
format(emqx_conf:get([auto_subscribe, topics], [])).
|
format(emqx_conf:get([auto_subscribe, topics], [])).
|
||||||
|
|
||||||
update(Topics) ->
|
update(Topics) when length(Topics) =< ?MAX_AUTO_SUBSCRIBE ->
|
||||||
update_(Topics).
|
case
|
||||||
|
emqx_conf:update(
|
||||||
|
[auto_subscribe, topics],
|
||||||
|
Topics,
|
||||||
|
#{rawconf_with_defaults => true, override_to => cluster}
|
||||||
|
)
|
||||||
|
of
|
||||||
|
{ok, #{raw_config := NewTopics}} ->
|
||||||
|
{ok, NewTopics};
|
||||||
|
{error, Reason} ->
|
||||||
|
{error, Reason}
|
||||||
|
end;
|
||||||
|
update(_Topics) ->
|
||||||
|
{error, quota_exceeded}.
|
||||||
|
|
||||||
post_config_update(_KeyPath, _Req, NewTopics, _OldConf, _AppEnvs) ->
|
post_config_update(_KeyPath, _Req, NewTopics, _OldConf, _AppEnvs) ->
|
||||||
Config = emqx_conf:get([auto_subscribe], #{}),
|
Config = emqx_conf:get([auto_subscribe], #{}),
|
||||||
|
@ -95,22 +108,6 @@ format(Rule = #{topic := Topic}) when is_map(Rule) ->
|
||||||
nl => maps:get(nl, Rule, 0)
|
nl => maps:get(nl, Rule, 0)
|
||||||
}.
|
}.
|
||||||
|
|
||||||
update_(Topics) when length(Topics) =< ?MAX_AUTO_SUBSCRIBE ->
|
|
||||||
case
|
|
||||||
emqx_conf:update(
|
|
||||||
[auto_subscribe, topics],
|
|
||||||
Topics,
|
|
||||||
#{rawconf_with_defaults => true, override_to => cluster}
|
|
||||||
)
|
|
||||||
of
|
|
||||||
{ok, #{raw_config := NewTopics}} ->
|
|
||||||
{ok, NewTopics};
|
|
||||||
{error, Reason} ->
|
|
||||||
{error, Reason}
|
|
||||||
end;
|
|
||||||
update_(_Topics) ->
|
|
||||||
{error, quota_exceeded}.
|
|
||||||
|
|
||||||
update_hook() ->
|
update_hook() ->
|
||||||
update_hook(emqx_conf:get([auto_subscribe], #{})).
|
update_hook(emqx_conf:get([auto_subscribe], #{})).
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@
|
||||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
|
|
||||||
api_spec() ->
|
api_spec() ->
|
||||||
emqx_dashboard_swagger:spec(?MODULE).
|
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
|
||||||
|
|
||||||
paths() ->
|
paths() ->
|
||||||
["/mqtt/auto_subscribe"].
|
["/mqtt/auto_subscribe"].
|
||||||
|
@ -46,15 +46,15 @@ schema("/mqtt/auto_subscribe") ->
|
||||||
description => ?DESC(list_auto_subscribe_api),
|
description => ?DESC(list_auto_subscribe_api),
|
||||||
tags => [<<"Auto Subscribe">>],
|
tags => [<<"Auto Subscribe">>],
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => hoconsc:ref(emqx_auto_subscribe_schema, "auto_subscribe")
|
200 => topics()
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
put => #{
|
put => #{
|
||||||
description => ?DESC(update_auto_subscribe_api),
|
description => ?DESC(update_auto_subscribe_api),
|
||||||
tags => [<<"Auto Subscribe">>],
|
tags => [<<"Auto Subscribe">>],
|
||||||
'requestBody' => hoconsc:ref(emqx_auto_subscribe_schema, "auto_subscribe"),
|
'requestBody' => topics(),
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => hoconsc:ref(emqx_auto_subscribe_schema, "auto_subscribe"),
|
200 => topics(),
|
||||||
409 => emqx_dashboard_swagger:error_codes(
|
409 => emqx_dashboard_swagger:error_codes(
|
||||||
[?EXCEED_LIMIT],
|
[?EXCEED_LIMIT],
|
||||||
?DESC(update_auto_subscribe_api_response409)
|
?DESC(update_auto_subscribe_api_response409)
|
||||||
|
@ -63,14 +63,17 @@ schema("/mqtt/auto_subscribe") ->
|
||||||
}
|
}
|
||||||
}.
|
}.
|
||||||
|
|
||||||
|
topics() ->
|
||||||
|
Fields = emqx_auto_subscribe_schema:fields("auto_subscribe"),
|
||||||
|
{topics, Topics} = lists:keyfind(topics, 1, Fields),
|
||||||
|
Topics.
|
||||||
|
|
||||||
%%%==============================================================================================
|
%%%==============================================================================================
|
||||||
%% api apply
|
%% api apply
|
||||||
auto_subscribe(get, _) ->
|
auto_subscribe(get, _) ->
|
||||||
{200, emqx_auto_subscribe:list()};
|
{200, emqx_auto_subscribe:list()};
|
||||||
auto_subscribe(put, #{body := #{}}) ->
|
auto_subscribe(put, #{body := Topics}) when is_list(Topics) ->
|
||||||
{400, #{code => ?BAD_REQUEST, message => <<"Request body required">>}};
|
case emqx_auto_subscribe:update(Topics) of
|
||||||
auto_subscribe(put, #{body := Params}) ->
|
|
||||||
case emqx_auto_subscribe:update(Params) of
|
|
||||||
{error, quota_exceeded} ->
|
{error, quota_exceeded} ->
|
||||||
Message = list_to_binary(
|
Message = list_to_binary(
|
||||||
io_lib:format(
|
io_lib:format(
|
||||||
|
|
|
@ -93,9 +93,8 @@ init_per_suite(Config) ->
|
||||||
" }"
|
" }"
|
||||||
>>
|
>>
|
||||||
),
|
),
|
||||||
emqx_common_test_helpers:start_apps(
|
emqx_mgmt_api_test_util:init_suite(
|
||||||
[emqx_conf, emqx_dashboard, ?APP],
|
[emqx_conf, ?APP]
|
||||||
fun set_special_configs/1
|
|
||||||
),
|
),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
@ -111,12 +110,6 @@ end_per_testcase(t_get_basic_usage_info, _Config) ->
|
||||||
end_per_testcase(_TestCase, _Config) ->
|
end_per_testcase(_TestCase, _Config) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
set_special_configs(emqx_dashboard) ->
|
|
||||||
emqx_dashboard_api_test_helpers:set_default_config(),
|
|
||||||
ok;
|
|
||||||
set_special_configs(_) ->
|
|
||||||
ok.
|
|
||||||
|
|
||||||
topic_config(T) ->
|
topic_config(T) ->
|
||||||
#{
|
#{
|
||||||
topic => T,
|
topic => T,
|
||||||
|
@ -132,7 +125,7 @@ end_per_suite(_) ->
|
||||||
application:unload(?APP),
|
application:unload(?APP),
|
||||||
meck:unload(emqx_resource),
|
meck:unload(emqx_resource),
|
||||||
meck:unload(emqx_schema),
|
meck:unload(emqx_schema),
|
||||||
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_conf, ?APP]).
|
emqx_mgmt_api_test_util:end_suite([emqx_conf, ?APP]).
|
||||||
|
|
||||||
t_auto_subscribe(_) ->
|
t_auto_subscribe(_) ->
|
||||||
emqx_auto_subscribe:update([#{<<"topic">> => Topic} || Topic <- ?TOPICS]),
|
emqx_auto_subscribe:update([#{<<"topic">> => Topic} || Topic <- ?TOPICS]),
|
||||||
|
@ -151,6 +144,32 @@ t_update(_) ->
|
||||||
ResponseMap = emqx_json:decode(Response, [return_maps]),
|
ResponseMap = emqx_json:decode(Response, [return_maps]),
|
||||||
?assertEqual(1, erlang:length(ResponseMap)),
|
?assertEqual(1, erlang:length(ResponseMap)),
|
||||||
|
|
||||||
|
BadBody1 = #{topic => ?TOPIC_S},
|
||||||
|
?assertMatch(
|
||||||
|
{error, {"HTTP/1.1", 400, "Bad Request"}},
|
||||||
|
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody1)
|
||||||
|
),
|
||||||
|
BadBody2 = [#{topic => ?TOPIC_S, qos => 3}],
|
||||||
|
?assertMatch(
|
||||||
|
{error, {"HTTP/1.1", 400, "Bad Request"}},
|
||||||
|
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody2)
|
||||||
|
),
|
||||||
|
BadBody3 = [#{topic => ?TOPIC_S, rh => 10}],
|
||||||
|
?assertMatch(
|
||||||
|
{error, {"HTTP/1.1", 400, "Bad Request"}},
|
||||||
|
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody3)
|
||||||
|
),
|
||||||
|
BadBody4 = [#{topic => ?TOPIC_S, rap => -1}],
|
||||||
|
?assertMatch(
|
||||||
|
{error, {"HTTP/1.1", 400, "Bad Request"}},
|
||||||
|
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody4)
|
||||||
|
),
|
||||||
|
BadBody5 = [#{topic => ?TOPIC_S, nl => -1}],
|
||||||
|
?assertMatch(
|
||||||
|
{error, {"HTTP/1.1", 400, "Bad Request"}},
|
||||||
|
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody5)
|
||||||
|
),
|
||||||
|
|
||||||
{ok, Client} = emqtt:start_link(#{username => ?CLIENT_USERNAME, clientid => ?CLIENT_ID}),
|
{ok, Client} = emqtt:start_link(#{username => ?CLIENT_USERNAME, clientid => ?CLIENT_ID}),
|
||||||
{ok, _} = emqtt:connect(Client),
|
{ok, _} = emqtt:connect(Client),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
|
|
|
@ -93,11 +93,20 @@ HTTP 请求的标头。<br/>
|
||||||
desc {
|
desc {
|
||||||
en: """
|
en: """
|
||||||
The body of the HTTP request.<br/>
|
The body of the HTTP request.<br/>
|
||||||
|
If not provided, the body will be a JSON object of all the available fields.<br/>
|
||||||
|
There, 'all the available fields' means the context of a MQTT message when
|
||||||
|
this webhook is triggered by receiving a MQTT message (the `local_topic` is set),
|
||||||
|
or the context of the event when this webhook is triggered by a rule (i.e. this
|
||||||
|
webhook is used as an action of a rule).<br/>
|
||||||
Template with variables is allowed.
|
Template with variables is allowed.
|
||||||
"""
|
"""
|
||||||
zh: """
|
zh: """
|
||||||
HTTP 请求的正文。<br/>
|
HTTP 请求的正文。<br/>
|
||||||
允许使用带有变量的模板。"""
|
如果没有设置该字段,请求正文将是包含所有可用字段的 JSON object。<br/>
|
||||||
|
如果该 webhook 是由于收到 MQTT 消息触发的,'所有可用字段' 将是 MQTT 消息的
|
||||||
|
上下文信息;如果该 webhook 是由于规则触发的,'所有可用字段' 则为触发事件的上下文信息。<br/>
|
||||||
|
允许使用带有变量的模板。
|
||||||
|
"""
|
||||||
}
|
}
|
||||||
label: {
|
label: {
|
||||||
en: "HTTP Body"
|
en: "HTTP Body"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_bridge, [
|
{application, emqx_bridge, [
|
||||||
{description, "EMQX bridges"},
|
{description, "EMQX bridges"},
|
||||||
{vsn, "0.1.8"},
|
{vsn, "0.1.9"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_bridge_app, []}},
|
{mod, {emqx_bridge_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -330,6 +330,7 @@ schema("/bridges/:id") ->
|
||||||
responses => #{
|
responses => #{
|
||||||
204 => <<"Bridge deleted">>,
|
204 => <<"Bridge deleted">>,
|
||||||
400 => error_schema(['INVALID_ID'], "Update bridge failed"),
|
400 => error_schema(['INVALID_ID'], "Update bridge failed"),
|
||||||
|
404 => error_schema('NOT_FOUND', "Bridge not found"),
|
||||||
403 => error_schema('FORBIDDEN_REQUEST', "Forbidden operation"),
|
403 => error_schema('FORBIDDEN_REQUEST', "Forbidden operation"),
|
||||||
503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
|
503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
|
||||||
}
|
}
|
||||||
|
@ -452,6 +453,8 @@ schema("/bridges_probe") ->
|
||||||
end,
|
end,
|
||||||
?TRY_PARSE_ID(
|
?TRY_PARSE_ID(
|
||||||
Id,
|
Id,
|
||||||
|
case emqx_bridge:lookup(BridgeType, BridgeName) of
|
||||||
|
{ok, _} ->
|
||||||
case emqx_bridge:check_deps_and_remove(BridgeType, BridgeName, AlsoDeleteActs) of
|
case emqx_bridge:check_deps_and_remove(BridgeType, BridgeName, AlsoDeleteActs) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
204;
|
204;
|
||||||
|
@ -465,6 +468,9 @@ schema("/bridges_probe") ->
|
||||||
{503, error_msg('SERVICE_UNAVAILABLE', <<"request timeout">>)};
|
{503, error_msg('SERVICE_UNAVAILABLE', <<"request timeout">>)};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{500, error_msg('INTERNAL_ERROR', Reason)}
|
{500, error_msg('INTERNAL_ERROR', Reason)}
|
||||||
|
end;
|
||||||
|
{error, not_found} ->
|
||||||
|
{404, error_msg('NOT_FOUND', <<"Bridge not found">>)}
|
||||||
end
|
end
|
||||||
).
|
).
|
||||||
|
|
||||||
|
|
|
@ -132,13 +132,14 @@ create(BridgeId, Conf) ->
|
||||||
create(Type, Name, Conf) ->
|
create(Type, Name, Conf) ->
|
||||||
create(Type, Name, Conf, #{}).
|
create(Type, Name, Conf, #{}).
|
||||||
|
|
||||||
create(Type, Name, Conf, Opts) ->
|
create(Type, Name, Conf, Opts0) ->
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "create bridge",
|
msg => "create bridge",
|
||||||
type => Type,
|
type => Type,
|
||||||
name => Name,
|
name => Name,
|
||||||
config => Conf
|
config => Conf
|
||||||
}),
|
}),
|
||||||
|
Opts = override_start_after_created(Conf, Opts0),
|
||||||
{ok, _Data} = emqx_resource:create_local(
|
{ok, _Data} = emqx_resource:create_local(
|
||||||
resource_id(Type, Name),
|
resource_id(Type, Name),
|
||||||
<<"emqx_bridge">>,
|
<<"emqx_bridge">>,
|
||||||
|
@ -146,7 +147,7 @@ create(Type, Name, Conf, Opts) ->
|
||||||
parse_confs(bin(Type), Name, Conf),
|
parse_confs(bin(Type), Name, Conf),
|
||||||
Opts
|
Opts
|
||||||
),
|
),
|
||||||
maybe_disable_bridge(Type, Name, Conf).
|
ok.
|
||||||
|
|
||||||
update(BridgeId, {OldConf, Conf}) ->
|
update(BridgeId, {OldConf, Conf}) ->
|
||||||
{BridgeType, BridgeName} = parse_bridge_id(BridgeId),
|
{BridgeType, BridgeName} = parse_bridge_id(BridgeId),
|
||||||
|
@ -155,7 +156,7 @@ update(BridgeId, {OldConf, Conf}) ->
|
||||||
update(Type, Name, {OldConf, Conf}) ->
|
update(Type, Name, {OldConf, Conf}) ->
|
||||||
update(Type, Name, {OldConf, Conf}, #{}).
|
update(Type, Name, {OldConf, Conf}, #{}).
|
||||||
|
|
||||||
update(Type, Name, {OldConf, Conf}, Opts) ->
|
update(Type, Name, {OldConf, Conf}, Opts0) ->
|
||||||
%% TODO: sometimes its not necessary to restart the bridge connection.
|
%% TODO: sometimes its not necessary to restart the bridge connection.
|
||||||
%%
|
%%
|
||||||
%% - if the connection related configs like `servers` is updated, we should restart/start
|
%% - if the connection related configs like `servers` is updated, we should restart/start
|
||||||
|
@ -164,6 +165,7 @@ update(Type, Name, {OldConf, Conf}, Opts) ->
|
||||||
%% the `method` or `headers` of a WebHook is changed, then the bridge can be updated
|
%% the `method` or `headers` of a WebHook is changed, then the bridge can be updated
|
||||||
%% without restarting the bridge.
|
%% without restarting the bridge.
|
||||||
%%
|
%%
|
||||||
|
Opts = override_start_after_created(Conf, Opts0),
|
||||||
case emqx_map_lib:if_only_to_toggle_enable(OldConf, Conf) of
|
case emqx_map_lib:if_only_to_toggle_enable(OldConf, Conf) of
|
||||||
false ->
|
false ->
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
|
@ -174,10 +176,10 @@ update(Type, Name, {OldConf, Conf}, Opts) ->
|
||||||
}),
|
}),
|
||||||
case recreate(Type, Name, Conf, Opts) of
|
case recreate(Type, Name, Conf, Opts) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
maybe_disable_bridge(Type, Name, Conf);
|
ok;
|
||||||
{error, not_found} ->
|
{error, not_found} ->
|
||||||
?SLOG(warning, #{
|
?SLOG(warning, #{
|
||||||
msg => "updating_a_non-exist_bridge_need_create_a_new_one",
|
msg => "updating_a_non_existing_bridge",
|
||||||
type => Type,
|
type => Type,
|
||||||
name => Name,
|
name => Name,
|
||||||
config => Conf
|
config => Conf
|
||||||
|
@ -244,12 +246,6 @@ remove(Type, Name, _Conf, _Opts) ->
|
||||||
{error, Reason} -> {error, Reason}
|
{error, Reason} -> {error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
maybe_disable_bridge(Type, Name, Conf) ->
|
|
||||||
case maps:get(enable, Conf, true) of
|
|
||||||
false -> stop(Type, Name);
|
|
||||||
true -> ok
|
|
||||||
end.
|
|
||||||
|
|
||||||
maybe_clear_certs(TmpPath, #{ssl := SslConf} = Conf) ->
|
maybe_clear_certs(TmpPath, #{ssl := SslConf} = Conf) ->
|
||||||
%% don't remove the cert files if they are in use
|
%% don't remove the cert files if they are in use
|
||||||
case is_tmp_path_conf(TmpPath, SslConf) of
|
case is_tmp_path_conf(TmpPath, SslConf) of
|
||||||
|
@ -276,7 +272,6 @@ parse_confs(
|
||||||
#{
|
#{
|
||||||
url := Url,
|
url := Url,
|
||||||
method := Method,
|
method := Method,
|
||||||
body := Body,
|
|
||||||
headers := Headers,
|
headers := Headers,
|
||||||
request_timeout := ReqTimeout,
|
request_timeout := ReqTimeout,
|
||||||
max_retries := Retry
|
max_retries := Retry
|
||||||
|
@ -290,7 +285,7 @@ parse_confs(
|
||||||
#{
|
#{
|
||||||
path => Path,
|
path => Path,
|
||||||
method => Method,
|
method => Method,
|
||||||
body => Body,
|
body => maps:get(body, Conf, undefined),
|
||||||
headers => Headers,
|
headers => Headers,
|
||||||
request_timeout => ReqTimeout,
|
request_timeout => ReqTimeout,
|
||||||
max_retries => Retry
|
max_retries => Retry
|
||||||
|
@ -324,3 +319,8 @@ str(Str) when is_list(Str) -> Str.
|
||||||
bin(Bin) when is_binary(Bin) -> Bin;
|
bin(Bin) when is_binary(Bin) -> Bin;
|
||||||
bin(Str) when is_list(Str) -> list_to_binary(Str);
|
bin(Str) when is_list(Str) -> list_to_binary(Str);
|
||||||
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
|
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
|
||||||
|
|
||||||
|
override_start_after_created(Config, Opts) ->
|
||||||
|
Enabled = maps:get(enable, Config, true),
|
||||||
|
StartAfterCreated = Enabled andalso maps:get(start_after_created, Opts, Enabled),
|
||||||
|
Opts#{start_after_created => StartAfterCreated}.
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, ref/2]).
|
-import(hoconsc, [mk/2, ref/2]).
|
||||||
|
|
||||||
-export([roots/0, fields/1, desc/1, namespace/0]).
|
-export([roots/0, fields/1, desc/1, namespace/0, tags/0]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
get_response/0,
|
get_response/0,
|
||||||
|
@ -104,6 +104,9 @@ metrics_status_fields() ->
|
||||||
|
|
||||||
namespace() -> "bridge".
|
namespace() -> "bridge".
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"Bridge">>].
|
||||||
|
|
||||||
roots() -> [bridges].
|
roots() -> [bridges].
|
||||||
|
|
||||||
fields(bridges) ->
|
fields(bridges) ->
|
||||||
|
@ -122,7 +125,9 @@ fields(bridges) ->
|
||||||
#{
|
#{
|
||||||
desc => ?DESC("bridges_mqtt"),
|
desc => ?DESC("bridges_mqtt"),
|
||||||
required => false,
|
required => false,
|
||||||
converter => fun emqx_bridge_mqtt_config:upgrade_pre_ee/1
|
converter => fun(X, _HoconOpts) ->
|
||||||
|
emqx_bridge_mqtt_config:upgrade_pre_ee(X)
|
||||||
|
end
|
||||||
}
|
}
|
||||||
)}
|
)}
|
||||||
] ++ ee_fields_bridges();
|
] ++ ee_fields_bridges();
|
||||||
|
|
|
@ -115,7 +115,7 @@ request_config() ->
|
||||||
mk(
|
mk(
|
||||||
binary(),
|
binary(),
|
||||||
#{
|
#{
|
||||||
default => <<"${payload}">>,
|
default => undefined,
|
||||||
desc => ?DESC("config_body")
|
desc => ?DESC("config_body")
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
||||||
-import(emqx_dashboard_api_test_helpers, [request/4, uri/1]).
|
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
|
||||||
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
@ -68,9 +68,8 @@ init_per_suite(Config) ->
|
||||||
%% some testcases (may from other app) already get emqx_connector started
|
%% some testcases (may from other app) already get emqx_connector started
|
||||||
_ = application:stop(emqx_resource),
|
_ = application:stop(emqx_resource),
|
||||||
_ = application:stop(emqx_connector),
|
_ = application:stop(emqx_connector),
|
||||||
ok = emqx_common_test_helpers:start_apps(
|
ok = emqx_mgmt_api_test_util:init_suite(
|
||||||
[emqx_rule_engine, emqx_bridge, emqx_dashboard],
|
[emqx_rule_engine, emqx_bridge]
|
||||||
fun set_special_configs/1
|
|
||||||
),
|
),
|
||||||
ok = emqx_common_test_helpers:load_config(
|
ok = emqx_common_test_helpers:load_config(
|
||||||
emqx_rule_engine_schema,
|
emqx_rule_engine_schema,
|
||||||
|
@ -80,12 +79,7 @@ init_per_suite(Config) ->
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
end_per_suite(_Config) ->
|
end_per_suite(_Config) ->
|
||||||
emqx_common_test_helpers:stop_apps([emqx_rule_engine, emqx_bridge, emqx_dashboard]),
|
emqx_mgmt_api_test_util:end_suite([emqx_rule_engine, emqx_bridge]),
|
||||||
ok.
|
|
||||||
|
|
||||||
set_special_configs(emqx_dashboard) ->
|
|
||||||
emqx_dashboard_api_test_helpers:set_default_config(<<"bridge_admin">>);
|
|
||||||
set_special_configs(_) ->
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(_, Config) ->
|
init_per_testcase(_, Config) ->
|
||||||
|
@ -311,6 +305,15 @@ t_http_crud_apis(Config) ->
|
||||||
},
|
},
|
||||||
jsx:decode(ErrMsg2)
|
jsx:decode(ErrMsg2)
|
||||||
),
|
),
|
||||||
|
%% Deleting a non-existing bridge should result in an error
|
||||||
|
{ok, 404, ErrMsg3} = request(delete, uri(["bridges", BridgeID]), []),
|
||||||
|
?assertMatch(
|
||||||
|
#{
|
||||||
|
<<"code">> := _,
|
||||||
|
<<"message">> := <<"Bridge not found">>
|
||||||
|
},
|
||||||
|
jsx:decode(ErrMsg3)
|
||||||
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_http_bridges_local_topic(Config) ->
|
t_http_bridges_local_topic(Config) ->
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_conf, [
|
{application, emqx_conf, [
|
||||||
{description, "EMQX configuration management"},
|
{description, "EMQX configuration management"},
|
||||||
{vsn, "0.1.9"},
|
{vsn, "0.1.10"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_conf_app, []}},
|
{mod, {emqx_conf_app, []}},
|
||||||
{applications, [kernel, stdlib]},
|
{applications, [kernel, stdlib]},
|
||||||
|
|
|
@ -316,7 +316,7 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) ->
|
||||||
{[Schema | Acc], SubRefs ++ RefsAcc}
|
{[Schema | Acc], SubRefs ++ RefsAcc}
|
||||||
end,
|
end,
|
||||||
{[], []},
|
{[], []},
|
||||||
Types
|
hoconsc:union_members(Types)
|
||||||
),
|
),
|
||||||
{#{<<"oneOf">> => OneOf}, Refs};
|
{#{<<"oneOf">> => OneOf}, Refs};
|
||||||
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
||||||
|
|
|
@ -38,7 +38,9 @@
|
||||||
cipher/0
|
cipher/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1]).
|
-export([
|
||||||
|
namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1, tags/0
|
||||||
|
]).
|
||||||
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
|
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
|
||||||
|
|
||||||
%% Static apps which merge their configs into the merged emqx.conf
|
%% Static apps which merge their configs into the merged emqx.conf
|
||||||
|
@ -60,12 +62,16 @@
|
||||||
emqx_exhook_schema,
|
emqx_exhook_schema,
|
||||||
emqx_psk_schema,
|
emqx_psk_schema,
|
||||||
emqx_limiter_schema,
|
emqx_limiter_schema,
|
||||||
emqx_slow_subs_schema
|
emqx_slow_subs_schema,
|
||||||
|
emqx_mgmt_api_key_schema
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% root config should not have a namespace
|
%% root config should not have a namespace
|
||||||
namespace() -> undefined.
|
namespace() -> undefined.
|
||||||
|
|
||||||
|
tags() ->
|
||||||
|
[<<"EMQX">>].
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
PtKey = ?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY,
|
PtKey = ?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY,
|
||||||
case persistent_term:get(PtKey, undefined) of
|
case persistent_term:get(PtKey, undefined) of
|
||||||
|
@ -941,8 +947,8 @@ fields("log_burst_limit") ->
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
fields("authorization") ->
|
fields("authorization") ->
|
||||||
emqx_schema:fields("authorization") ++
|
emqx_schema:authz_fields() ++
|
||||||
emqx_authz_schema:fields("authorization").
|
emqx_authz_schema:authz_fields().
|
||||||
|
|
||||||
desc("cluster") ->
|
desc("cluster") ->
|
||||||
?DESC("desc_cluster");
|
?DESC("desc_cluster");
|
||||||
|
|
|
@ -14,7 +14,7 @@ An MySQL connector can be used as following:
|
||||||
```
|
```
|
||||||
(emqx@127.0.0.1)5> emqx_resource:list_instances_verbose().
|
(emqx@127.0.0.1)5> emqx_resource:list_instances_verbose().
|
||||||
[#{config =>
|
[#{config =>
|
||||||
#{auto_reconnect => true,cacertfile => [],certfile => [],
|
#{cacertfile => [],certfile => [],
|
||||||
database => "mqtt",keyfile => [],password => "public",
|
database => "mqtt",keyfile => [],password => "public",
|
||||||
pool_size => 1,
|
pool_size => 1,
|
||||||
server => {{127,0,0,1},3306},
|
server => {{127,0,0,1},3306},
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
emqx_connector_ldap {
|
||||||
|
|
||||||
|
bind_dn {
|
||||||
|
desc {
|
||||||
|
en: """LDAP's Binding Distinguished Name (DN)"""
|
||||||
|
zh: """LDAP 绑定的 DN 的值"""
|
||||||
|
}
|
||||||
|
label: {
|
||||||
|
en: "Bind DN"
|
||||||
|
zh: "Bind DN"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
port {
|
||||||
|
desc {
|
||||||
|
en: """LDAP Port"""
|
||||||
|
zh: """LDAP 端口"""
|
||||||
|
}
|
||||||
|
label: {
|
||||||
|
en: "Port"
|
||||||
|
zh: "端口"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
timeout {
|
||||||
|
desc {
|
||||||
|
en: """LDAP's query timeout"""
|
||||||
|
zh: """LDAP 查询超时时间"""
|
||||||
|
}
|
||||||
|
label: {
|
||||||
|
en: "timeout"
|
||||||
|
zh: "超时时间"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -2,34 +2,34 @@ emqx_connector_mongo {
|
||||||
|
|
||||||
single_mongo_type {
|
single_mongo_type {
|
||||||
desc {
|
desc {
|
||||||
en: "Standalone instance."
|
en: "Standalone instance. Must be set to 'single' when MongoDB server is running in standalone mode."
|
||||||
zh: "Standalone模式。"
|
zh: "Standalone 模式。当 MongoDB 服务运行在 standalone 模式下,该配置必须设置为 'single'。 "
|
||||||
}
|
}
|
||||||
label: {
|
label: {
|
||||||
en: "Standalone instance"
|
en: "Standalone instance"
|
||||||
zh: "Standalone模式"
|
zh: "Standalone 模式"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
rs_mongo_type {
|
rs_mongo_type {
|
||||||
desc {
|
desc {
|
||||||
en: "Replica set."
|
en: "Replica set. Must be set to 'rs' when MongoDB server is running in 'replica set' mode."
|
||||||
zh: "Replica set模式。"
|
zh: "Replica set模式。当 MongoDB 服务运行在 replica-set 模式下,该配置必须设置为 'rs'。"
|
||||||
}
|
}
|
||||||
label: {
|
label: {
|
||||||
en: "Replica set"
|
en: "Replica set"
|
||||||
zh: "Replica set模式"
|
zh: "Replica set 模式"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
sharded_mongo_type {
|
sharded_mongo_type {
|
||||||
desc {
|
desc {
|
||||||
en: "Sharded cluster."
|
en: "Sharded cluster. Must be set to 'sharded' when MongoDB server is running in 'sharded' mode."
|
||||||
zh: "Sharded cluster模式。"
|
zh: "Sharded cluster模式。当 MongoDB 服务运行在 sharded 模式下,该配置必须设置为 'sharded'。"
|
||||||
}
|
}
|
||||||
label: {
|
label: {
|
||||||
en: "Sharded cluster"
|
en: "Sharded cluster"
|
||||||
zh: "Sharded cluster模式"
|
zh: "Sharded cluster 模式"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@ emqx_connector_redis {
|
||||||
|
|
||||||
single {
|
single {
|
||||||
desc {
|
desc {
|
||||||
en: "Single mode"
|
en: "Single mode. Must be set to 'single' when Redis server is running in single mode."
|
||||||
zh: "单机模式。"
|
zh: "单机模式。当 Redis 服务运行在单机模式下,该配置必须设置为 'single'。"
|
||||||
}
|
}
|
||||||
label: {
|
label: {
|
||||||
en: "Single Mode"
|
en: "Single Mode"
|
||||||
|
@ -13,8 +13,8 @@ emqx_connector_redis {
|
||||||
|
|
||||||
cluster {
|
cluster {
|
||||||
desc {
|
desc {
|
||||||
en: "Cluster mode"
|
en: "Cluster mode. Must be set to 'cluster' when Redis server is running in clustered mode."
|
||||||
zh: "集群模式。"
|
zh: "集群模式。当 Redis 服务运行在集群模式下,该配置必须设置为 'cluster'。"
|
||||||
}
|
}
|
||||||
label: {
|
label: {
|
||||||
en: "Cluster Mode"
|
en: "Cluster Mode"
|
||||||
|
@ -24,8 +24,8 @@ emqx_connector_redis {
|
||||||
|
|
||||||
sentinel {
|
sentinel {
|
||||||
desc {
|
desc {
|
||||||
en: "Sentinel mode"
|
en: "Sentinel mode. Must be set to 'sentinel' when Redis server is running in sentinel mode."
|
||||||
zh: "哨兵模式。"
|
zh: "哨兵模式。当 Redis 服务运行在哨兵模式下,该配置必须设置为 'sentinel'。"
|
||||||
}
|
}
|
||||||
label: {
|
label: {
|
||||||
en: "Sentinel Mode"
|
en: "Sentinel Mode"
|
||||||
|
|
|
@ -68,12 +68,12 @@ emqx_connector_schema_lib {
|
||||||
|
|
||||||
auto_reconnect {
|
auto_reconnect {
|
||||||
desc {
|
desc {
|
||||||
en: "Enable automatic reconnect to the database."
|
en: "Deprecated. Enable automatic reconnect to the database."
|
||||||
zh: "自动重连数据库。"
|
zh: "已弃用。自动重连数据库。"
|
||||||
}
|
}
|
||||||
label: {
|
label: {
|
||||||
en: "Auto Reconnect Database"
|
en: "Deprecated. Auto Reconnect Database"
|
||||||
zh: "自动重连数据库"
|
zh: "已弃用。自动重连数据库"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
-define(REDIS_DEFAULT_PORT, 6379).
|
-define(REDIS_DEFAULT_PORT, 6379).
|
||||||
-define(PGSQL_DEFAULT_PORT, 5432).
|
-define(PGSQL_DEFAULT_PORT, 5432).
|
||||||
|
|
||||||
|
-define(AUTO_RECONNECT_INTERVAL, 2).
|
||||||
|
|
||||||
-define(SERVERS_DESC,
|
-define(SERVERS_DESC,
|
||||||
"A Node list for Cluster to connect to. The nodes should be separated with commas, such as: `Node[,Node].`<br/>"
|
"A Node list for Cluster to connect to. The nodes should be separated with commas, such as: `Node[,Node].`<br/>"
|
||||||
"For each Node should be: "
|
"For each Node should be: "
|
||||||
|
|
|
@ -12,9 +12,9 @@
|
||||||
{mysql, {git, "https://github.com/emqx/mysql-otp", {tag, "1.7.1"}}},
|
{mysql, {git, "https://github.com/emqx/mysql-otp", {tag, "1.7.1"}}},
|
||||||
{epgsql, {git, "https://github.com/emqx/epgsql", {tag, "4.7-emqx.2"}}},
|
{epgsql, {git, "https://github.com/emqx/epgsql", {tag, "4.7-emqx.2"}}},
|
||||||
%% NOTE: mind poolboy version when updating mongodb-erlang version
|
%% NOTE: mind poolboy version when updating mongodb-erlang version
|
||||||
{mongodb, {git, "https://github.com/emqx/mongodb-erlang", {tag, "v3.0.13"}}},
|
{mongodb, {git, "https://github.com/emqx/mongodb-erlang", {tag, "v3.0.19"}}},
|
||||||
%% NOTE: mind poolboy version when updating eredis_cluster version
|
%% NOTE: mind poolboy version when updating eredis_cluster version
|
||||||
{eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.7.1"}}},
|
{eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.7.5"}}},
|
||||||
%% mongodb-erlang uses a special fork https://github.com/comtihon/poolboy.git
|
%% mongodb-erlang uses a special fork https://github.com/comtihon/poolboy.git
|
||||||
%% (which has overflow_ttl feature added).
|
%% (which has overflow_ttl feature added).
|
||||||
%% However, it references `{branch, "master}` (commit 9c06a9a on 2021-04-07).
|
%% However, it references `{branch, "master}` (commit 9c06a9a on 2021-04-07).
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_connector, [
|
{application, emqx_connector, [
|
||||||
{description, "EMQX Data Integration Connectors"},
|
{description, "EMQX Data Integration Connectors"},
|
||||||
{vsn, "0.1.11"},
|
{vsn, "0.1.12"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_connector_app, []}},
|
{mod, {emqx_connector_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -209,7 +209,7 @@ on_start(
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "starting_http_connector",
|
msg => "starting_http_connector",
|
||||||
connector => InstId,
|
connector => InstId,
|
||||||
config => Config
|
config => emqx_misc:redact(Config)
|
||||||
}),
|
}),
|
||||||
{Transport, TransportOpts} =
|
{Transport, TransportOpts} =
|
||||||
case Scheme of
|
case Scheme of
|
||||||
|
@ -431,14 +431,13 @@ preprocess_request(
|
||||||
#{
|
#{
|
||||||
method := Method,
|
method := Method,
|
||||||
path := Path,
|
path := Path,
|
||||||
body := Body,
|
|
||||||
headers := Headers
|
headers := Headers
|
||||||
} = Req
|
} = Req
|
||||||
) ->
|
) ->
|
||||||
#{
|
#{
|
||||||
method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method)),
|
method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method)),
|
||||||
path => emqx_plugin_libs_rule:preproc_tmpl(Path),
|
path => emqx_plugin_libs_rule:preproc_tmpl(Path),
|
||||||
body => emqx_plugin_libs_rule:preproc_tmpl(Body),
|
body => maybe_preproc_tmpl(body, Req),
|
||||||
headers => preproc_headers(Headers),
|
headers => preproc_headers(Headers),
|
||||||
request_timeout => maps:get(request_timeout, Req, 30000),
|
request_timeout => maps:get(request_timeout, Req, 30000),
|
||||||
max_retries => maps:get(max_retries, Req, 2)
|
max_retries => maps:get(max_retries, Req, 2)
|
||||||
|
@ -469,6 +468,12 @@ preproc_headers(Headers) when is_list(Headers) ->
|
||||||
Headers
|
Headers
|
||||||
).
|
).
|
||||||
|
|
||||||
|
maybe_preproc_tmpl(Key, Conf) ->
|
||||||
|
case maps:get(Key, Conf, undefined) of
|
||||||
|
undefined -> undefined;
|
||||||
|
Val -> emqx_plugin_libs_rule:preproc_tmpl(Val)
|
||||||
|
end.
|
||||||
|
|
||||||
process_request(
|
process_request(
|
||||||
#{
|
#{
|
||||||
method := MethodTks,
|
method := MethodTks,
|
||||||
|
@ -487,7 +492,7 @@ process_request(
|
||||||
request_timeout => ReqTimeout
|
request_timeout => ReqTimeout
|
||||||
}.
|
}.
|
||||||
|
|
||||||
process_request_body([], Msg) ->
|
process_request_body(undefined, Msg) ->
|
||||||
emqx_json:encode(Msg);
|
emqx_json:encode(Msg);
|
||||||
process_request_body(BodyTks, Msg) ->
|
process_request_body(BodyTks, Msg) ->
|
||||||
emqx_plugin_libs_rule:proc_tmpl(BodyTks, Msg).
|
emqx_plugin_libs_rule:proc_tmpl(BodyTks, Msg).
|
||||||
|
|
|
@ -59,14 +59,13 @@ on_start(
|
||||||
bind_password := BindPassword,
|
bind_password := BindPassword,
|
||||||
timeout := Timeout,
|
timeout := Timeout,
|
||||||
pool_size := PoolSize,
|
pool_size := PoolSize,
|
||||||
auto_reconnect := AutoReconn,
|
|
||||||
ssl := SSL
|
ssl := SSL
|
||||||
} = Config
|
} = Config
|
||||||
) ->
|
) ->
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "starting_ldap_connector",
|
msg => "starting_ldap_connector",
|
||||||
connector => InstId,
|
connector => InstId,
|
||||||
config => Config
|
config => emqx_misc:redact(Config)
|
||||||
}),
|
}),
|
||||||
Servers = emqx_schema:parse_servers(Servers0, ?LDAP_HOST_OPTIONS),
|
Servers = emqx_schema:parse_servers(Servers0, ?LDAP_HOST_OPTIONS),
|
||||||
SslOpts =
|
SslOpts =
|
||||||
|
@ -86,11 +85,11 @@ on_start(
|
||||||
{bind_password, BindPassword},
|
{bind_password, BindPassword},
|
||||||
{timeout, Timeout},
|
{timeout, Timeout},
|
||||||
{pool_size, PoolSize},
|
{pool_size, PoolSize},
|
||||||
{auto_reconnect, reconn_interval(AutoReconn)}
|
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL}
|
||||||
],
|
],
|
||||||
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
|
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
|
||||||
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts ++ SslOpts) of
|
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts ++ SslOpts) of
|
||||||
ok -> {ok, #{poolname => PoolName, auto_reconnect => AutoReconn}};
|
ok -> {ok, #{poolname => PoolName}};
|
||||||
{error, Reason} -> {error, Reason}
|
{error, Reason} -> {error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -129,9 +128,6 @@ on_query(InstId, {search, Base, Filter, Attributes}, #{poolname := PoolName} = S
|
||||||
|
|
||||||
on_get_status(_InstId, _State) -> connected.
|
on_get_status(_InstId, _State) -> connected.
|
||||||
|
|
||||||
reconn_interval(true) -> 15;
|
|
||||||
reconn_interval(false) -> false.
|
|
||||||
|
|
||||||
search(Conn, Base, Filter, Attributes) ->
|
search(Conn, Base, Filter, Attributes) ->
|
||||||
eldap2:search(Conn, [
|
eldap2:search(Conn, [
|
||||||
{base, Base},
|
{base, Base},
|
||||||
|
|
|
@ -68,7 +68,6 @@ fields(single) ->
|
||||||
{mongo_type, #{
|
{mongo_type, #{
|
||||||
type => single,
|
type => single,
|
||||||
default => single,
|
default => single,
|
||||||
required => true,
|
|
||||||
desc => ?DESC("single_mongo_type")
|
desc => ?DESC("single_mongo_type")
|
||||||
}},
|
}},
|
||||||
{server, server()},
|
{server, server()},
|
||||||
|
@ -79,7 +78,6 @@ fields(rs) ->
|
||||||
{mongo_type, #{
|
{mongo_type, #{
|
||||||
type => rs,
|
type => rs,
|
||||||
default => rs,
|
default => rs,
|
||||||
required => true,
|
|
||||||
desc => ?DESC("rs_mongo_type")
|
desc => ?DESC("rs_mongo_type")
|
||||||
}},
|
}},
|
||||||
{servers, servers()},
|
{servers, servers()},
|
||||||
|
@ -92,7 +90,6 @@ fields(sharded) ->
|
||||||
{mongo_type, #{
|
{mongo_type, #{
|
||||||
type => sharded,
|
type => sharded,
|
||||||
default => sharded,
|
default => sharded,
|
||||||
required => true,
|
|
||||||
desc => ?DESC("sharded_mongo_type")
|
desc => ?DESC("sharded_mongo_type")
|
||||||
}},
|
}},
|
||||||
{servers, servers()},
|
{servers, servers()},
|
||||||
|
@ -158,7 +155,7 @@ on_start(
|
||||||
rs -> "starting_mongodb_replica_set_connector";
|
rs -> "starting_mongodb_replica_set_connector";
|
||||||
sharded -> "starting_mongodb_sharded_connector"
|
sharded -> "starting_mongodb_sharded_connector"
|
||||||
end,
|
end,
|
||||||
?SLOG(info, #{msg => Msg, connector => InstId, config => Config}),
|
?SLOG(info, #{msg => Msg, connector => InstId, config => emqx_misc:redact(Config)}),
|
||||||
NConfig = #{hosts := Hosts} = maybe_resolve_srv_and_txt_records(Config),
|
NConfig = #{hosts := Hosts} = maybe_resolve_srv_and_txt_records(Config),
|
||||||
SslOpts =
|
SslOpts =
|
||||||
case maps:get(enable, SSL) of
|
case maps:get(enable, SSL) of
|
||||||
|
|
|
@ -15,6 +15,8 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
-module(emqx_connector_mqtt).
|
-module(emqx_connector_mqtt).
|
||||||
|
|
||||||
|
-include("emqx_connector.hrl").
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
@ -147,7 +149,7 @@ on_start(InstId, Conf) ->
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "starting_mqtt_connector",
|
msg => "starting_mqtt_connector",
|
||||||
connector => InstanceId,
|
connector => InstanceId,
|
||||||
config => Conf
|
config => emqx_misc:redact(Conf)
|
||||||
}),
|
}),
|
||||||
BasicConf = basic_config(Conf),
|
BasicConf = basic_config(Conf),
|
||||||
BridgeConf = BasicConf#{
|
BridgeConf = BasicConf#{
|
||||||
|
@ -198,12 +200,10 @@ on_query_async(
|
||||||
?TRACE("QUERY", "async_send_msg_to_remote_node", #{message => Msg, connector => InstanceId}),
|
?TRACE("QUERY", "async_send_msg_to_remote_node", #{message => Msg, connector => InstanceId}),
|
||||||
emqx_connector_mqtt_worker:send_to_remote_async(InstanceId, Msg, {ReplyFun, Args}).
|
emqx_connector_mqtt_worker:send_to_remote_async(InstanceId, Msg, {ReplyFun, Args}).
|
||||||
|
|
||||||
on_get_status(_InstId, #{name := InstanceId, bridge_conf := Conf}) ->
|
on_get_status(_InstId, #{name := InstanceId}) ->
|
||||||
AutoReconn = maps:get(auto_reconnect, Conf, true),
|
|
||||||
case emqx_connector_mqtt_worker:status(InstanceId) of
|
case emqx_connector_mqtt_worker:status(InstanceId) of
|
||||||
connected -> connected;
|
connected -> connected;
|
||||||
_ when AutoReconn == true -> connecting;
|
_ -> connecting
|
||||||
_ when AutoReconn == false -> disconnected
|
|
||||||
end.
|
end.
|
||||||
|
|
||||||
ensure_mqtt_worker_started(InstanceId, BridgeConf) ->
|
ensure_mqtt_worker_started(InstanceId, BridgeConf) ->
|
||||||
|
@ -236,7 +236,6 @@ make_forward_confs(FrowardConf) ->
|
||||||
basic_config(
|
basic_config(
|
||||||
#{
|
#{
|
||||||
server := Server,
|
server := Server,
|
||||||
reconnect_interval := ReconnIntv,
|
|
||||||
proto_ver := ProtoVer,
|
proto_ver := ProtoVer,
|
||||||
bridge_mode := BridgeMode,
|
bridge_mode := BridgeMode,
|
||||||
clean_start := CleanStart,
|
clean_start := CleanStart,
|
||||||
|
@ -252,7 +251,7 @@ basic_config(
|
||||||
%% 30s
|
%% 30s
|
||||||
connect_timeout => 30,
|
connect_timeout => 30,
|
||||||
auto_reconnect => true,
|
auto_reconnect => true,
|
||||||
reconnect_interval => ReconnIntv,
|
reconnect_interval => ?AUTO_RECONNECT_INTERVAL,
|
||||||
proto_ver => ProtoVer,
|
proto_ver => ProtoVer,
|
||||||
%% Opening bridge_mode will form a non-standard mqtt connection message.
|
%% Opening bridge_mode will form a non-standard mqtt connection message.
|
||||||
%% A load balancing server (such as haproxy) is often set up before the emqx broker server.
|
%% A load balancing server (such as haproxy) is often set up before the emqx broker server.
|
||||||
|
|
|
@ -52,7 +52,6 @@
|
||||||
-type state() ::
|
-type state() ::
|
||||||
#{
|
#{
|
||||||
poolname := atom(),
|
poolname := atom(),
|
||||||
auto_reconnect := boolean(),
|
|
||||||
prepare_statement := prepares(),
|
prepare_statement := prepares(),
|
||||||
params_tokens := params_tokens(),
|
params_tokens := params_tokens(),
|
||||||
batch_inserts := sqls(),
|
batch_inserts := sqls(),
|
||||||
|
@ -95,7 +94,6 @@ on_start(
|
||||||
server := Server,
|
server := Server,
|
||||||
database := DB,
|
database := DB,
|
||||||
username := Username,
|
username := Username,
|
||||||
auto_reconnect := AutoReconn,
|
|
||||||
pool_size := PoolSize,
|
pool_size := PoolSize,
|
||||||
ssl := SSL
|
ssl := SSL
|
||||||
} = Config
|
} = Config
|
||||||
|
@ -104,7 +102,7 @@ on_start(
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "starting_mysql_connector",
|
msg => "starting_mysql_connector",
|
||||||
connector => InstId,
|
connector => InstId,
|
||||||
config => Config
|
config => emqx_misc:redact(Config)
|
||||||
}),
|
}),
|
||||||
SslOpts =
|
SslOpts =
|
||||||
case maps:get(enable, SSL) of
|
case maps:get(enable, SSL) of
|
||||||
|
@ -120,13 +118,13 @@ on_start(
|
||||||
{port, Port},
|
{port, Port},
|
||||||
{user, Username},
|
{user, Username},
|
||||||
{database, DB},
|
{database, DB},
|
||||||
{auto_reconnect, reconn_interval(AutoReconn)},
|
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL},
|
||||||
{pool_size, PoolSize}
|
{pool_size, PoolSize}
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
|
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
|
||||||
Prepares = parse_prepare_sql(Config),
|
Prepares = parse_prepare_sql(Config),
|
||||||
State = maps:merge(#{poolname => PoolName, auto_reconnect => AutoReconn}, Prepares),
|
State = maps:merge(#{poolname => PoolName}, Prepares),
|
||||||
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of
|
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of
|
||||||
ok ->
|
ok ->
|
||||||
{ok, init_prepare(State)};
|
{ok, init_prepare(State)};
|
||||||
|
@ -211,7 +209,7 @@ mysql_function(prepared_query) ->
|
||||||
mysql_function(_) ->
|
mysql_function(_) ->
|
||||||
mysql_function(prepared_query).
|
mysql_function(prepared_query).
|
||||||
|
|
||||||
on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State) ->
|
on_get_status(_InstId, #{poolname := Pool} = State) ->
|
||||||
case emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1) of
|
case emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1) of
|
||||||
true ->
|
true ->
|
||||||
case do_check_prepares(State) of
|
case do_check_prepares(State) of
|
||||||
|
@ -222,10 +220,10 @@ on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State
|
||||||
{connected, NState};
|
{connected, NState};
|
||||||
{error, _Reason} ->
|
{error, _Reason} ->
|
||||||
%% do not log error, it is logged in prepare_sql_to_conn
|
%% do not log error, it is logged in prepare_sql_to_conn
|
||||||
conn_status(AutoReconn)
|
connecting
|
||||||
end;
|
end;
|
||||||
false ->
|
false ->
|
||||||
conn_status(AutoReconn)
|
connecting
|
||||||
end.
|
end.
|
||||||
|
|
||||||
do_get_status(Conn) ->
|
do_get_status(Conn) ->
|
||||||
|
@ -244,11 +242,6 @@ do_check_prepares(State = #{poolname := PoolName, prepare_statement := {error, P
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% ===================================================================
|
%% ===================================================================
|
||||||
conn_status(_AutoReconn = true) -> connecting;
|
|
||||||
conn_status(_AutoReconn = false) -> disconnected.
|
|
||||||
|
|
||||||
reconn_interval(true) -> 15;
|
|
||||||
reconn_interval(false) -> false.
|
|
||||||
|
|
||||||
connect(Options) ->
|
connect(Options) ->
|
||||||
mysql:start_link(Options).
|
mysql:start_link(Options).
|
||||||
|
|
|
@ -56,7 +56,6 @@
|
||||||
-type state() ::
|
-type state() ::
|
||||||
#{
|
#{
|
||||||
poolname := atom(),
|
poolname := atom(),
|
||||||
auto_reconnect := boolean(),
|
|
||||||
prepare_sql := prepares(),
|
prepare_sql := prepares(),
|
||||||
params_tokens := params_tokens(),
|
params_tokens := params_tokens(),
|
||||||
prepare_statement := epgsql:statement()
|
prepare_statement := epgsql:statement()
|
||||||
|
@ -87,8 +86,6 @@ on_start(
|
||||||
server := Server,
|
server := Server,
|
||||||
database := DB,
|
database := DB,
|
||||||
username := User,
|
username := User,
|
||||||
password := Password,
|
|
||||||
auto_reconnect := AutoReconn,
|
|
||||||
pool_size := PoolSize,
|
pool_size := PoolSize,
|
||||||
ssl := SSL
|
ssl := SSL
|
||||||
} = Config
|
} = Config
|
||||||
|
@ -97,7 +94,7 @@ on_start(
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "starting_postgresql_connector",
|
msg => "starting_postgresql_connector",
|
||||||
connector => InstId,
|
connector => InstId,
|
||||||
config => Config
|
config => emqx_misc:redact(Config)
|
||||||
}),
|
}),
|
||||||
SslOpts =
|
SslOpts =
|
||||||
case maps:get(enable, SSL) of
|
case maps:get(enable, SSL) of
|
||||||
|
@ -113,14 +110,14 @@ on_start(
|
||||||
{host, Host},
|
{host, Host},
|
||||||
{port, Port},
|
{port, Port},
|
||||||
{username, User},
|
{username, User},
|
||||||
{password, emqx_secret:wrap(Password)},
|
{password, emqx_secret:wrap(maps:get(password, Config, ""))},
|
||||||
{database, DB},
|
{database, DB},
|
||||||
{auto_reconnect, reconn_interval(AutoReconn)},
|
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL},
|
||||||
{pool_size, PoolSize}
|
{pool_size, PoolSize}
|
||||||
],
|
],
|
||||||
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
|
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
|
||||||
Prepares = parse_prepare_sql(Config),
|
Prepares = parse_prepare_sql(Config),
|
||||||
InitState = #{poolname => PoolName, auto_reconnect => AutoReconn, prepare_statement => #{}},
|
InitState = #{poolname => PoolName, prepare_statement => #{}},
|
||||||
State = maps:merge(InitState, Prepares),
|
State = maps:merge(InitState, Prepares),
|
||||||
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of
|
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of
|
||||||
ok ->
|
ok ->
|
||||||
|
@ -247,7 +244,7 @@ on_sql_query(InstId, PoolName, Type, NameOrSQL, Data) ->
|
||||||
end,
|
end,
|
||||||
Result.
|
Result.
|
||||||
|
|
||||||
on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State) ->
|
on_get_status(_InstId, #{poolname := Pool} = State) ->
|
||||||
case emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1) of
|
case emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1) of
|
||||||
true ->
|
true ->
|
||||||
case do_check_prepares(State) of
|
case do_check_prepares(State) of
|
||||||
|
@ -258,10 +255,10 @@ on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State
|
||||||
{connected, NState};
|
{connected, NState};
|
||||||
false ->
|
false ->
|
||||||
%% do not log error, it is logged in prepare_sql_to_conn
|
%% do not log error, it is logged in prepare_sql_to_conn
|
||||||
conn_status(AutoReconn)
|
connecting
|
||||||
end;
|
end;
|
||||||
false ->
|
false ->
|
||||||
conn_status(AutoReconn)
|
connecting
|
||||||
end.
|
end.
|
||||||
|
|
||||||
do_get_status(Conn) ->
|
do_get_status(Conn) ->
|
||||||
|
@ -280,11 +277,6 @@ do_check_prepares(State = #{poolname := PoolName, prepare_sql := {error, Prepare
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% ===================================================================
|
%% ===================================================================
|
||||||
conn_status(_AutoReconn = true) -> connecting;
|
|
||||||
conn_status(_AutoReconn = false) -> disconnected.
|
|
||||||
|
|
||||||
reconn_interval(true) -> 15;
|
|
||||||
reconn_interval(false) -> false.
|
|
||||||
|
|
||||||
connect(Opts) ->
|
connect(Opts) ->
|
||||||
Host = proplists:get_value(host, Opts),
|
Host = proplists:get_value(host, Opts),
|
||||||
|
|
|
@ -64,7 +64,7 @@ fields(single) ->
|
||||||
{redis_type, #{
|
{redis_type, #{
|
||||||
type => single,
|
type => single,
|
||||||
default => single,
|
default => single,
|
||||||
required => true,
|
required => false,
|
||||||
desc => ?DESC("single")
|
desc => ?DESC("single")
|
||||||
}}
|
}}
|
||||||
] ++
|
] ++
|
||||||
|
@ -76,7 +76,7 @@ fields(cluster) ->
|
||||||
{redis_type, #{
|
{redis_type, #{
|
||||||
type => cluster,
|
type => cluster,
|
||||||
default => cluster,
|
default => cluster,
|
||||||
required => true,
|
required => false,
|
||||||
desc => ?DESC("cluster")
|
desc => ?DESC("cluster")
|
||||||
}}
|
}}
|
||||||
] ++
|
] ++
|
||||||
|
@ -88,7 +88,7 @@ fields(sentinel) ->
|
||||||
{redis_type, #{
|
{redis_type, #{
|
||||||
type => sentinel,
|
type => sentinel,
|
||||||
default => sentinel,
|
default => sentinel,
|
||||||
required => true,
|
required => false,
|
||||||
desc => ?DESC("sentinel")
|
desc => ?DESC("sentinel")
|
||||||
}},
|
}},
|
||||||
{sentinel, #{
|
{sentinel, #{
|
||||||
|
@ -117,14 +117,13 @@ on_start(
|
||||||
#{
|
#{
|
||||||
redis_type := Type,
|
redis_type := Type,
|
||||||
pool_size := PoolSize,
|
pool_size := PoolSize,
|
||||||
auto_reconnect := AutoReconn,
|
|
||||||
ssl := SSL
|
ssl := SSL
|
||||||
} = Config
|
} = Config
|
||||||
) ->
|
) ->
|
||||||
?SLOG(info, #{
|
?SLOG(info, #{
|
||||||
msg => "starting_redis_connector",
|
msg => "starting_redis_connector",
|
||||||
connector => InstId,
|
connector => InstId,
|
||||||
config => Config
|
config => emqx_misc:redact(Config)
|
||||||
}),
|
}),
|
||||||
ConfKey =
|
ConfKey =
|
||||||
case Type of
|
case Type of
|
||||||
|
@ -142,7 +141,7 @@ on_start(
|
||||||
[
|
[
|
||||||
{pool_size, PoolSize},
|
{pool_size, PoolSize},
|
||||||
{password, maps:get(password, Config, "")},
|
{password, maps:get(password, Config, "")},
|
||||||
{auto_reconnect, reconn_interval(AutoReconn)}
|
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL}
|
||||||
] ++ Database ++ Servers,
|
] ++ Database ++ Servers,
|
||||||
Options =
|
Options =
|
||||||
case maps:get(enable, SSL) of
|
case maps:get(enable, SSL) of
|
||||||
|
@ -155,7 +154,7 @@ on_start(
|
||||||
[{ssl, false}]
|
[{ssl, false}]
|
||||||
end ++ [{sentinel, maps:get(sentinel, Config, undefined)}],
|
end ++ [{sentinel, maps:get(sentinel, Config, undefined)}],
|
||||||
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
|
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
|
||||||
State = #{poolname => PoolName, type => Type, auto_reconnect => AutoReconn},
|
State = #{poolname => PoolName, type => Type},
|
||||||
case Type of
|
case Type of
|
||||||
cluster ->
|
cluster ->
|
||||||
case eredis_cluster:start_pool(PoolName, Opts ++ [{options, Options}]) of
|
case eredis_cluster:start_pool(PoolName, Opts ++ [{options, Options}]) of
|
||||||
|
@ -229,18 +228,18 @@ eredis_cluster_workers_exist_and_are_connected(Workers) ->
|
||||||
Workers
|
Workers
|
||||||
).
|
).
|
||||||
|
|
||||||
on_get_status(_InstId, #{type := cluster, poolname := PoolName, auto_reconnect := AutoReconn}) ->
|
on_get_status(_InstId, #{type := cluster, poolname := PoolName}) ->
|
||||||
case eredis_cluster:pool_exists(PoolName) of
|
case eredis_cluster:pool_exists(PoolName) of
|
||||||
true ->
|
true ->
|
||||||
Workers = extract_eredis_cluster_workers(PoolName),
|
Workers = extract_eredis_cluster_workers(PoolName),
|
||||||
Health = eredis_cluster_workers_exist_and_are_connected(Workers),
|
Health = eredis_cluster_workers_exist_and_are_connected(Workers),
|
||||||
status_result(Health, AutoReconn);
|
status_result(Health);
|
||||||
false ->
|
false ->
|
||||||
disconnected
|
disconnected
|
||||||
end;
|
end;
|
||||||
on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn}) ->
|
on_get_status(_InstId, #{poolname := Pool}) ->
|
||||||
Health = emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1),
|
Health = emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1),
|
||||||
status_result(Health, AutoReconn).
|
status_result(Health).
|
||||||
|
|
||||||
do_get_status(Conn) ->
|
do_get_status(Conn) ->
|
||||||
case eredis:q(Conn, ["PING"]) of
|
case eredis:q(Conn, ["PING"]) of
|
||||||
|
@ -248,12 +247,8 @@ do_get_status(Conn) ->
|
||||||
_ -> false
|
_ -> false
|
||||||
end.
|
end.
|
||||||
|
|
||||||
status_result(_Status = true, _AutoReconn) -> connected;
|
status_result(_Status = true) -> connected;
|
||||||
status_result(_Status = false, _AutoReconn = true) -> connecting;
|
status_result(_Status = false) -> connecting.
|
||||||
status_result(_Status = false, _AutoReconn = false) -> disconnected.
|
|
||||||
|
|
||||||
reconn_interval(true) -> 15;
|
|
||||||
reconn_interval(false) -> false.
|
|
||||||
|
|
||||||
do_cmd(PoolName, cluster, {cmd, Command}) ->
|
do_cmd(PoolName, cluster, {cmd, Command}) ->
|
||||||
eredis_cluster:q(PoolName, Command);
|
eredis_cluster:q(PoolName, Command);
|
||||||
|
|
|
@ -106,4 +106,5 @@ password(_) -> undefined.
|
||||||
auto_reconnect(type) -> boolean();
|
auto_reconnect(type) -> boolean();
|
||||||
auto_reconnect(desc) -> ?DESC("auto_reconnect");
|
auto_reconnect(desc) -> ?DESC("auto_reconnect");
|
||||||
auto_reconnect(default) -> true;
|
auto_reconnect(default) -> true;
|
||||||
|
auto_reconnect(deprecated) -> {since, "v5.0.15"};
|
||||||
auto_reconnect(_) -> undefined.
|
auto_reconnect(_) -> undefined.
|
||||||
|
|
|
@ -75,9 +75,9 @@ wait_for_redis(Checks) ->
|
||||||
wait_for_redis(Checks - 1)
|
wait_for_redis(Checks - 1)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
% %%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
% %% Testcases
|
%% Testcases
|
||||||
% %%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
t_single_lifecycle(_Config) ->
|
t_single_lifecycle(_Config) ->
|
||||||
perform_lifecycle_check(
|
perform_lifecycle_check(
|
||||||
|
|
|
@ -199,23 +199,12 @@ its own from which a browser should permit loading resources."""
|
||||||
}
|
}
|
||||||
bootstrap_users_file {
|
bootstrap_users_file {
|
||||||
desc {
|
desc {
|
||||||
en: "Initialize users file."
|
en: "Deprecated, use api_key.bootstrap_file"
|
||||||
zh: "初始化用户文件"
|
zh: "已废弃,请使用 api_key.bootstrap_file"
|
||||||
}
|
}
|
||||||
label {
|
label {
|
||||||
en: """Is used to add an administrative user to Dashboard when emqx is first launched,
|
en: """Deprecated"""
|
||||||
the format is:
|
zh: """已废弃"""
|
||||||
```
|
|
||||||
username1:password1
|
|
||||||
username2:password2
|
|
||||||
```
|
|
||||||
"""
|
|
||||||
zh: """用于在首次启动 emqx 时,为 Dashboard 添加管理用户,其格式为:
|
|
||||||
```
|
|
||||||
username1:password1
|
|
||||||
username2:password2
|
|
||||||
```
|
|
||||||
"""
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
{application, emqx_dashboard, [
|
{application, emqx_dashboard, [
|
||||||
{description, "EMQX Web Dashboard"},
|
{description, "EMQX Web Dashboard"},
|
||||||
% strict semver, bump manually!
|
% strict semver, bump manually!
|
||||||
{vsn, "5.0.11"},
|
{vsn, "5.0.12"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_dashboard_sup]},
|
{registered, [emqx_dashboard_sup]},
|
||||||
{applications, [kernel, stdlib, mnesia, minirest, emqx]},
|
{applications, [kernel, stdlib, mnesia, minirest, emqx]},
|
||||||
|
|
|
@ -65,8 +65,12 @@ start_listeners(Listeners) ->
|
||||||
components => #{
|
components => #{
|
||||||
schemas => #{},
|
schemas => #{},
|
||||||
'securitySchemes' => #{
|
'securitySchemes' => #{
|
||||||
'basicAuth' => #{type => http, scheme => basic},
|
'basicAuth' => #{
|
||||||
'bearerAuth' => #{type => http, scheme => bearer}
|
type => http,
|
||||||
|
scheme => basic,
|
||||||
|
description =>
|
||||||
|
<<"Authorize with [API Keys](https://www.emqx.io/docs/en/v5.0/admin/api.html#api-keys)">>
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -215,28 +219,7 @@ listener_name(Protocol) ->
|
||||||
authorize(Req) ->
|
authorize(Req) ->
|
||||||
case cowboy_req:parse_header(<<"authorization">>, Req) of
|
case cowboy_req:parse_header(<<"authorization">>, Req) of
|
||||||
{basic, Username, Password} ->
|
{basic, Username, Password} ->
|
||||||
case emqx_dashboard_admin:check(Username, Password) of
|
api_key_authorize(Req, Username, Password);
|
||||||
ok ->
|
|
||||||
ok;
|
|
||||||
{error, <<"username_not_found">>} ->
|
|
||||||
Path = cowboy_req:path(Req),
|
|
||||||
case emqx_mgmt_auth:authorize(Path, Username, Password) of
|
|
||||||
ok ->
|
|
||||||
ok;
|
|
||||||
{error, <<"not_allowed">>} ->
|
|
||||||
return_unauthorized(
|
|
||||||
?WRONG_USERNAME_OR_PWD,
|
|
||||||
<<"Check username/password">>
|
|
||||||
);
|
|
||||||
{error, _} ->
|
|
||||||
return_unauthorized(
|
|
||||||
?WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET,
|
|
||||||
<<"Check username/password or api_key/api_secret">>
|
|
||||||
)
|
|
||||||
end;
|
|
||||||
{error, _} ->
|
|
||||||
return_unauthorized(?WRONG_USERNAME_OR_PWD, <<"Check username/password">>)
|
|
||||||
end;
|
|
||||||
{bearer, Token} ->
|
{bearer, Token} ->
|
||||||
case emqx_dashboard_admin:verify_token(Token) of
|
case emqx_dashboard_admin:verify_token(Token) of
|
||||||
ok ->
|
ok ->
|
||||||
|
@ -269,3 +252,20 @@ i18n_file() ->
|
||||||
|
|
||||||
listeners() ->
|
listeners() ->
|
||||||
emqx_conf:get([dashboard, listeners], []).
|
emqx_conf:get([dashboard, listeners], []).
|
||||||
|
|
||||||
|
api_key_authorize(Req, Key, Secret) ->
|
||||||
|
Path = cowboy_req:path(Req),
|
||||||
|
case emqx_mgmt_auth:authorize(Path, Key, Secret) of
|
||||||
|
ok ->
|
||||||
|
ok;
|
||||||
|
{error, <<"not_allowed">>} ->
|
||||||
|
return_unauthorized(
|
||||||
|
?BAD_API_KEY_OR_SECRET,
|
||||||
|
<<"Not allowed, Check api_key/api_secret">>
|
||||||
|
);
|
||||||
|
{error, _} ->
|
||||||
|
return_unauthorized(
|
||||||
|
?BAD_API_KEY_OR_SECRET,
|
||||||
|
<<"Check api_key/api_secret">>
|
||||||
|
)
|
||||||
|
end.
|
||||||
|
|
|
@ -51,8 +51,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
add_default_user/0,
|
add_default_user/0,
|
||||||
default_username/0,
|
default_username/0
|
||||||
add_bootstrap_users/0
|
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-type emqx_admin() :: #?ADMIN{}.
|
-type emqx_admin() :: #?ADMIN{}.
|
||||||
|
@ -85,21 +84,6 @@ mnesia(boot) ->
|
||||||
add_default_user() ->
|
add_default_user() ->
|
||||||
add_default_user(binenv(default_username), binenv(default_password)).
|
add_default_user(binenv(default_username), binenv(default_password)).
|
||||||
|
|
||||||
-spec add_bootstrap_users() -> ok | {error, _}.
|
|
||||||
add_bootstrap_users() ->
|
|
||||||
case emqx:get_config([dashboard, bootstrap_users_file], undefined) of
|
|
||||||
undefined ->
|
|
||||||
ok;
|
|
||||||
File ->
|
|
||||||
case mnesia:table_info(?ADMIN, size) of
|
|
||||||
0 ->
|
|
||||||
?SLOG(debug, #{msg => "Add dashboard bootstrap users", file => File}),
|
|
||||||
add_bootstrap_users(File);
|
|
||||||
_ ->
|
|
||||||
ok
|
|
||||||
end
|
|
||||||
end.
|
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% API
|
%% API
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -311,44 +295,3 @@ add_default_user(Username, Password) ->
|
||||||
[] -> add_user(Username, Password, <<"administrator">>);
|
[] -> add_user(Username, Password, <<"administrator">>);
|
||||||
_ -> {ok, default_user_exists}
|
_ -> {ok, default_user_exists}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
add_bootstrap_users(File) ->
|
|
||||||
case file:open(File, [read]) of
|
|
||||||
{ok, Dev} ->
|
|
||||||
{ok, MP} = re:compile(<<"(\.+):(\.+$)">>, [ungreedy]),
|
|
||||||
try
|
|
||||||
load_bootstrap_user(Dev, MP)
|
|
||||||
catch
|
|
||||||
Type:Reason ->
|
|
||||||
{error, {Type, Reason}}
|
|
||||||
after
|
|
||||||
file:close(Dev)
|
|
||||||
end;
|
|
||||||
{error, Reason} = Error ->
|
|
||||||
?SLOG(error, #{
|
|
||||||
msg => "failed to open the dashboard bootstrap users file",
|
|
||||||
file => File,
|
|
||||||
reason => Reason
|
|
||||||
}),
|
|
||||||
Error
|
|
||||||
end.
|
|
||||||
|
|
||||||
load_bootstrap_user(Dev, MP) ->
|
|
||||||
case file:read_line(Dev) of
|
|
||||||
{ok, Line} ->
|
|
||||||
case re:run(Line, MP, [global, {capture, all_but_first, binary}]) of
|
|
||||||
{match, [[Username, Password]]} ->
|
|
||||||
case add_user(Username, Password, ?BOOTSTRAP_USER_TAG) of
|
|
||||||
{ok, _} ->
|
|
||||||
load_bootstrap_user(Dev, MP);
|
|
||||||
Error ->
|
|
||||||
Error
|
|
||||||
end;
|
|
||||||
_ ->
|
|
||||||
load_bootstrap_user(Dev, MP)
|
|
||||||
end;
|
|
||||||
eof ->
|
|
||||||
ok;
|
|
||||||
Error ->
|
|
||||||
Error
|
|
||||||
end.
|
|
||||||
|
|
|
@ -47,7 +47,7 @@
|
||||||
|
|
||||||
-define(EMPTY(V), (V == undefined orelse V == <<>>)).
|
-define(EMPTY(V), (V == undefined orelse V == <<>>)).
|
||||||
|
|
||||||
-define(WRONG_USERNAME_OR_PWD, 'WRONG_USERNAME_OR_PWD').
|
-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
|
||||||
-define(WRONG_TOKEN_OR_USERNAME, 'WRONG_TOKEN_OR_USERNAME').
|
-define(WRONG_TOKEN_OR_USERNAME, 'WRONG_TOKEN_OR_USERNAME').
|
||||||
-define(USER_NOT_FOUND, 'USER_NOT_FOUND').
|
-define(USER_NOT_FOUND, 'USER_NOT_FOUND').
|
||||||
-define(ERROR_PWD_NOT_MATCH, 'ERROR_PWD_NOT_MATCH').
|
-define(ERROR_PWD_NOT_MATCH, 'ERROR_PWD_NOT_MATCH').
|
||||||
|
@ -164,7 +164,7 @@ schema("/users/:username/change_pwd") ->
|
||||||
}.
|
}.
|
||||||
|
|
||||||
response_schema(401) ->
|
response_schema(401) ->
|
||||||
emqx_dashboard_swagger:error_codes([?WRONG_USERNAME_OR_PWD], ?DESC(login_failed401));
|
emqx_dashboard_swagger:error_codes([?BAD_USERNAME_OR_PWD], ?DESC(login_failed401));
|
||||||
response_schema(404) ->
|
response_schema(404) ->
|
||||||
emqx_dashboard_swagger:error_codes([?USER_NOT_FOUND], ?DESC(users_api404)).
|
emqx_dashboard_swagger:error_codes([?USER_NOT_FOUND], ?DESC(users_api404)).
|
||||||
|
|
||||||
|
@ -223,7 +223,7 @@ login(post, #{body := Params}) ->
|
||||||
}};
|
}};
|
||||||
{error, R} ->
|
{error, R} ->
|
||||||
?SLOG(info, #{msg => "Dashboard login failed", username => Username, reason => R}),
|
?SLOG(info, #{msg => "Dashboard login failed", username => Username, reason => R}),
|
||||||
{401, ?WRONG_USERNAME_OR_PWD, <<"Auth failed">>}
|
{401, ?BAD_USERNAME_OR_PWD, <<"Auth failed">>}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
logout(_, #{
|
logout(_, #{
|
||||||
|
|
|
@ -31,13 +31,8 @@ start(_StartType, _StartArgs) ->
|
||||||
case emqx_dashboard:start_listeners() of
|
case emqx_dashboard:start_listeners() of
|
||||||
ok ->
|
ok ->
|
||||||
emqx_dashboard_cli:load(),
|
emqx_dashboard_cli:load(),
|
||||||
case emqx_dashboard_admin:add_bootstrap_users() of
|
|
||||||
ok ->
|
|
||||||
{ok, _} = emqx_dashboard_admin:add_default_user(),
|
{ok, _} = emqx_dashboard_admin:add_default_user(),
|
||||||
{ok, Sup};
|
{ok, Sup};
|
||||||
Error ->
|
|
||||||
Error
|
|
||||||
end;
|
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
|
@ -56,7 +56,15 @@ fields("dashboard") ->
|
||||||
{cors, fun cors/1},
|
{cors, fun cors/1},
|
||||||
{i18n_lang, fun i18n_lang/1},
|
{i18n_lang, fun i18n_lang/1},
|
||||||
{bootstrap_users_file,
|
{bootstrap_users_file,
|
||||||
?HOCON(binary(), #{desc => ?DESC(bootstrap_users_file), required => false})}
|
?HOCON(
|
||||||
|
binary(),
|
||||||
|
#{
|
||||||
|
desc => ?DESC(bootstrap_users_file),
|
||||||
|
required => false,
|
||||||
|
default => <<>>
|
||||||
|
%% deprecated => {since, "5.1.0"}
|
||||||
|
}
|
||||||
|
)}
|
||||||
];
|
];
|
||||||
fields("listeners") ->
|
fields("listeners") ->
|
||||||
[
|
[
|
||||||
|
|
|
@ -139,14 +139,20 @@ fields(limit) ->
|
||||||
[{limit, hoconsc:mk(range(1, ?MAX_ROW_LIMIT), Meta)}];
|
[{limit, hoconsc:mk(range(1, ?MAX_ROW_LIMIT), Meta)}];
|
||||||
fields(count) ->
|
fields(count) ->
|
||||||
Desc = <<
|
Desc = <<
|
||||||
"Total number of records counted.<br/>"
|
"Total number of records matching the query.<br/>"
|
||||||
"Note: this field is <code>0</code> when the queryed table is empty, "
|
"Note: this field is present only if the query can be optimized and does "
|
||||||
"or if the query can not be optimized and requires a full table scan."
|
"not require a full table scan."
|
||||||
|
>>,
|
||||||
|
Meta = #{desc => Desc, required => false},
|
||||||
|
[{count, hoconsc:mk(non_neg_integer(), Meta)}];
|
||||||
|
fields(hasnext) ->
|
||||||
|
Desc = <<
|
||||||
|
"Flag indicating whether there are more results available on next pages."
|
||||||
>>,
|
>>,
|
||||||
Meta = #{desc => Desc, required => true},
|
Meta = #{desc => Desc, required => true},
|
||||||
[{count, hoconsc:mk(non_neg_integer(), Meta)}];
|
[{hasnext, hoconsc:mk(boolean(), Meta)}];
|
||||||
fields(meta) ->
|
fields(meta) ->
|
||||||
fields(page) ++ fields(limit) ++ fields(count).
|
fields(page) ++ fields(limit) ++ fields(count) ++ fields(hasnext).
|
||||||
|
|
||||||
-spec schema_with_example(hocon_schema:type(), term()) -> hocon_schema:field_schema_map().
|
-spec schema_with_example(hocon_schema:type(), term()) -> hocon_schema:field_schema_map().
|
||||||
schema_with_example(Type, Example) ->
|
schema_with_example(Type, Example) ->
|
||||||
|
@ -623,7 +629,7 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) ->
|
||||||
{[Schema | Acc], SubRefs ++ RefsAcc}
|
{[Schema | Acc], SubRefs ++ RefsAcc}
|
||||||
end,
|
end,
|
||||||
{[], []},
|
{[], []},
|
||||||
Types
|
hoconsc:union_members(Types)
|
||||||
),
|
),
|
||||||
{#{<<"oneOf">> => OneOf}, Refs};
|
{#{<<"oneOf">> => OneOf}, Refs};
|
||||||
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
||||||
|
@ -705,9 +711,11 @@ typename_to_spec("service_account_json()", _Mod) ->
|
||||||
typename_to_spec("#{" ++ _, Mod) ->
|
typename_to_spec("#{" ++ _, Mod) ->
|
||||||
typename_to_spec("map()", Mod);
|
typename_to_spec("map()", Mod);
|
||||||
typename_to_spec("qos()", _Mod) ->
|
typename_to_spec("qos()", _Mod) ->
|
||||||
#{type => string, enum => [0, 1, 2]};
|
#{type => integer, minimum => 0, maximum => 2, example => 0};
|
||||||
typename_to_spec("{binary(), binary()}", _Mod) ->
|
typename_to_spec("{binary(), binary()}", _Mod) ->
|
||||||
#{type => object, example => #{}};
|
#{type => object, example => #{}};
|
||||||
|
typename_to_spec("{string(), string()}", _Mod) ->
|
||||||
|
#{type => object, example => #{}};
|
||||||
typename_to_spec("comma_separated_list()", _Mod) ->
|
typename_to_spec("comma_separated_list()", _Mod) ->
|
||||||
#{type => string, example => <<"item1,item2">>};
|
#{type => string, example => <<"item1,item2">>};
|
||||||
typename_to_spec("comma_separated_binary()", _Mod) ->
|
typename_to_spec("comma_separated_binary()", _Mod) ->
|
||||||
|
|
|
@ -114,9 +114,9 @@ t_admin_delete_self_failed(_) ->
|
||||||
?assertEqual(1, length(Admins)),
|
?assertEqual(1, length(Admins)),
|
||||||
Header = auth_header_(<<"username1">>, <<"password">>),
|
Header = auth_header_(<<"username1">>, <<"password">>),
|
||||||
{error, {_, 400, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header),
|
{error, {_, 400, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header),
|
||||||
Token = erlang:iolist_to_binary(["Basic ", base64:encode("username1:password")]),
|
Token = ["Basic ", base64:encode("username1:password")],
|
||||||
Header2 = {"Authorization", Token},
|
Header2 = {"Authorization", Token},
|
||||||
{error, {_, 400, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header2),
|
{error, {_, 401, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header2),
|
||||||
mnesia:clear_table(?ADMIN).
|
mnesia:clear_table(?ADMIN).
|
||||||
|
|
||||||
t_rest_api(_Config) ->
|
t_rest_api(_Config) ->
|
||||||
|
|
|
@ -25,43 +25,24 @@
|
||||||
|
|
||||||
-define(SERVER, "http://127.0.0.1:18083/api/v5").
|
-define(SERVER, "http://127.0.0.1:18083/api/v5").
|
||||||
|
|
||||||
|
-import(emqx_mgmt_api_test_util, [request/2]).
|
||||||
|
|
||||||
all() ->
|
all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
mria:start(),
|
mria:start(),
|
||||||
application:load(emqx_dashboard),
|
emqx_mgmt_api_test_util:init_suite([emqx_conf]),
|
||||||
emqx_common_test_helpers:start_apps([emqx_conf, emqx_dashboard], fun set_special_configs/1),
|
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
set_special_configs(emqx_dashboard) ->
|
|
||||||
emqx_dashboard_api_test_helpers:set_default_config(),
|
|
||||||
ok;
|
|
||||||
set_special_configs(_) ->
|
|
||||||
ok.
|
|
||||||
|
|
||||||
end_per_suite(Config) ->
|
end_per_suite(Config) ->
|
||||||
end_suite(),
|
end_suite(),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
end_suite() ->
|
end_suite() ->
|
||||||
application:unload(emqx_management),
|
emqx_mgmt_api_test_util:end_suite([emqx_conf]).
|
||||||
emqx_common_test_helpers:stop_apps([emqx_dashboard]).
|
|
||||||
|
|
||||||
t_bad_api_path(_) ->
|
t_bad_api_path(_) ->
|
||||||
Url = ?SERVER ++ "/for/test/some/path/not/exist",
|
Url = ?SERVER ++ "/for/test/some/path/not/exist",
|
||||||
{error, {"HTTP/1.1", 404, "Not Found"}} = request(Url),
|
{ok, 404, _} = request(get, Url),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
request(Url) ->
|
|
||||||
Request = {Url, []},
|
|
||||||
case httpc:request(get, Request, [], []) of
|
|
||||||
{error, Reason} ->
|
|
||||||
{error, Reason};
|
|
||||||
{ok, {{"HTTP/1.1", Code, _}, _, Return}} when
|
|
||||||
Code >= 200 andalso Code =< 299
|
|
||||||
->
|
|
||||||
{ok, emqx_json:decode(Return, [return_maps])};
|
|
||||||
{ok, {Reason, _, _}} ->
|
|
||||||
{error, Reason}
|
|
||||||
end.
|
|
||||||
|
|
|
@ -19,6 +19,8 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
||||||
|
-import(emqx_dashboard_SUITE, [auth_header_/0]).
|
||||||
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
-include_lib("emqx/include/emqx.hrl").
|
-include_lib("emqx/include/emqx.hrl").
|
||||||
|
@ -153,10 +155,6 @@ do_request_api(Method, Request) ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
auth_header_() ->
|
|
||||||
Basic = binary_to_list(base64:encode(<<"admin:public">>)),
|
|
||||||
{"Authorization", "Basic " ++ Basic}.
|
|
||||||
|
|
||||||
restart_monitor() ->
|
restart_monitor() ->
|
||||||
OldMonitor = erlang:whereis(emqx_dashboard_monitor),
|
OldMonitor = erlang:whereis(emqx_dashboard_monitor),
|
||||||
erlang:exit(OldMonitor, kill),
|
erlang:exit(OldMonitor, kill),
|
||||||
|
|
|
@ -112,7 +112,7 @@ t_in_query(_Config) ->
|
||||||
description => <<"QOS">>,
|
description => <<"QOS">>,
|
||||||
in => query,
|
in => query,
|
||||||
name => qos,
|
name => qos,
|
||||||
schema => #{enum => [0, 1, 2], type => string}
|
schema => #{minimum => 0, maximum => 2, type => integer, example => 0}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
validate("/test/in/query", Expect),
|
validate("/test/in/query", Expect),
|
||||||
|
|
|
@ -347,13 +347,7 @@ do_request_api(Method, Request) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
auth_header_() ->
|
auth_header_() ->
|
||||||
AppId = <<"admin">>,
|
emqx_mgmt_api_test_util:auth_header_().
|
||||||
AppSecret = <<"public">>,
|
|
||||||
auth_header_(binary_to_list(AppId), binary_to_list(AppSecret)).
|
|
||||||
|
|
||||||
auth_header_(User, Pass) ->
|
|
||||||
Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
|
|
||||||
{"Authorization", "Basic " ++ Encoded}.
|
|
||||||
|
|
||||||
api_path(Parts) ->
|
api_path(Parts) ->
|
||||||
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION] ++ Parts).
|
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION] ++ Parts).
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_gateway, [
|
{application, emqx_gateway, [
|
||||||
{description, "The Gateway management application"},
|
{description, "The Gateway management application"},
|
||||||
{vsn, "0.1.10"},
|
{vsn, "0.1.11"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_gateway_app, []}},
|
{mod, {emqx_gateway_app, []}},
|
||||||
{applications, [kernel, stdlib, grpc, emqx, emqx_authn]},
|
{applications, [kernel, stdlib, grpc, emqx, emqx_authn]},
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue