Merge branch 'master' into dev/api-refactor

This commit is contained in:
Stefan Strigler 2023-01-13 15:34:13 +01:00 committed by GitHub
commit 1690a6dcfc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
220 changed files with 2733 additions and 1396 deletions

View File

@ -20,8 +20,3 @@ indent_size = 4
# Tab indentation (no size specified)
[Makefile]
indent_style = tab
# Matches the exact files either package.json or .travis.yml
[{.travis.yml}]
indent_style = space
indent_size = 2

View File

@ -1,81 +0,0 @@
name: 'Docker meta'
inputs:
profile:
required: true
type: string
registry:
required: true
type: string
arch:
required: true
type: string
otp:
required: true
type: string
elixir:
required: false
type: string
default: ''
builder_base:
required: true
type: string
owner:
required: true
type: string
docker_tags:
required: true
type: string
outputs:
emqx_name:
description: "EMQX name"
value: ${{ steps.pre-meta.outputs.emqx_name }}
version:
description: "docker image version"
value: ${{ steps.meta.outputs.version }}
tags:
description: "docker image tags"
value: ${{ steps.meta.outputs.tags }}
labels:
description: "docker image labels"
value: ${{ steps.meta.outputs.labels }}
runs:
using: composite
steps:
- name: prepare for docker/metadata-action
id: pre-meta
shell: bash
run: |
emqx_name=${{ inputs.profile }}
img_suffix=${{ inputs.arch }}
img_labels="org.opencontainers.image.otp.version=${{ inputs.otp }}"
if [ -n "${{ inputs.elixir }}" ]; then
emqx_name="emqx-elixir"
img_suffix="elixir-${{ inputs.arch }}"
img_labels="org.opencontainers.image.elixir.version=${{ inputs.elixir }}\n${img_labels}"
fi
if [ "${{ inputs.profile }}" = "emqx" ]; then
img_labels="org.opencontainers.image.edition=Opensource\n${img_labels}"
fi
if [ "${{ inputs.profile }}" = "emqx-enterprise" ]; then
img_labels="org.opencontainers.image.edition=Enterprise\n${img_labels}"
fi
if [[ "${{ inputs.builder_base }}" =~ "alpine" ]]; then
img_suffix="${img_suffix}-alpine"
fi
echo "emqx_name=${emqx_name}" >> $GITHUB_OUTPUT
echo "img_suffix=${img_suffix}" >> $GITHUB_OUTPUT
echo "img_labels=${img_labels}" >> $GITHUB_OUTPUT
echo "img_name=${{ inputs.registry }}/${{ inputs.owner }}/${{ inputs.profile }}" >> $GITHUB_OUTPUT
- uses: docker/metadata-action@v4
id: meta
with:
images:
${{ steps.pre-meta.outputs.img_name }}
flavor: |
suffix=-${{ steps.pre-meta.outputs.img_suffix }}
tags: |
type=raw,value=${{ inputs.docker_tags }}
labels:
${{ steps.pre-meta.outputs.img_labels }}

View File

@ -9,15 +9,17 @@ on:
tags:
- v*
- e*
release:
types:
- published
- docker-latest-*
workflow_dispatch:
inputs:
branch_or_tag:
required: false
profile:
required: false
default: 'emqx'
is_latest:
required: false
default: false
jobs:
prepare:
@ -26,10 +28,11 @@ jobs:
container: "ghcr.io/emqx/emqx-builder/5.0-26:1.13.4-24.3.4.2-1-ubuntu20.04"
outputs:
BUILD_PROFILE: ${{ steps.get_profile.outputs.BUILD_PROFILE }}
IS_DOCKER_LATEST: ${{ steps.get_profile.outputs.IS_DOCKER_LATEST }}
PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
EDITION: ${{ steps.get_profile.outputs.EDITION }}
IS_LATEST: ${{ steps.get_profile.outputs.IS_LATEST }}
IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }}
DOCKER_TAG_VERSION: ${{ steps.get_profile.outputs.DOCKER_TAG_VERSION }}
VERSION: ${{ steps.get_profile.outputs.VERSION }}
steps:
- uses: actions/checkout@v3
@ -45,14 +48,14 @@ jobs:
tag=${{ github.ref }}
# tag docker-latest-ce or docker-latest-ee
if git describe --tags --exact --match 'docker-latest-*' 2>/dev/null; then
echo 'docker_latest=true due to docker-latest-* tag'
docker_latest=true
elif [ "${{ github.event_name }}" = "release" ]; then
echo 'docker_latest=true due to release'
docker_latest=true
echo 'is_latest=true due to docker-latest-* tag'
is_latest=true
elif [ "${{ inputs.is_latest }}" = "true" ]; then
echo 'is_latest=true due to manual input from workflow_dispatch'
is_latest=true
else
echo 'docker_latest=false'
docker_latest=false
echo 'is_latest=false'
is_latest=false
fi
if git describe --tags --match "[v|e]*" --exact; then
echo "This is an exact git tag, will publish images"
@ -64,18 +67,20 @@ jobs:
case $tag in
refs/tags/v*)
PROFILE='emqx'
EDITION='Opensource'
;;
refs/tags/e*)
PROFILE=emqx-enterprise
EDITION='Enterprise'
;;
*)
PROFILE=${{ github.event.inputs.profile }}
case "$PROFILE" in
emqx)
true
EDITION='Opensource'
;;
emqx-enterprise)
true
EDITION='Enterprise'
;;
*)
echo "ERROR: Failed to resolve build profile"
@ -85,14 +90,18 @@ jobs:
;;
esac
VSN="$(./pkg-vsn.sh "$PROFILE")"
echo "Building $PROFILE image with tag $VSN (latest=$docker_latest)"
echo "IS_DOCKER_LATEST=$docker_latest" >> $GITHUB_OUTPUT
echo "Building emqx/$PROFILE:$VSN image (latest=$is_latest)"
echo "Push = $is_exact"
echo "IS_LATEST=$is_latest" >> $GITHUB_OUTPUT
echo "IS_EXACT_TAG=$is_exact" >> $GITHUB_OUTPUT
echo "BUILD_PROFILE=$PROFILE" >> $GITHUB_OUTPUT
echo "DOCKER_TAG_VERSION=$VSN" >> $GITHUB_OUTPUT
echo "PROFILE=$PROFILE" >> $GITHUB_OUTPUT
echo "EDITION=$EDITION" >> $GITHUB_OUTPUT
echo "VERSION=$VSN" >> $GITHUB_OUTPUT
- name: get_all_deps
env:
PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
run: |
make -C source deps-all
PROFILE=$PROFILE make -C source deps-$PROFILE
zip -ryq source.zip source/* source/.[^.]*
- uses: actions/upload-artifact@v3
with:
@ -100,17 +109,17 @@ jobs:
path: source.zip
docker:
runs-on: ${{ matrix.arch[1] }}
runs-on: ubuntu-20.04
needs: prepare
strategy:
fail-fast: false
matrix:
arch:
- [amd64, ubuntu-20.04]
- [arm64, aws-arm64]
profile:
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
- "${{ needs.prepare.outputs.PROFILE }}"
flavor:
- ''
- '-elixir'
registry:
- 'docker.io'
- 'public.ecr.aws'
@ -128,9 +137,10 @@ jobs:
exclude: # TODO: publish enterprise to ecr too?
- registry: 'public.ecr.aws'
profile: emqx-enterprise
- flavor: '-elixir'
os: [alpine3.15.1, "alpine:3.15.1", "deploy/docker/Dockerfile.alpine"]
steps:
- uses: AutoModality/action-clean@v1
if: matrix.arch[1] == 'aws-arm64'
- uses: actions/download-artifact@v3
with:
name: source
@ -138,16 +148,17 @@ jobs:
- name: unzip source code
run: unzip -q source.zip
- uses: docker/setup-qemu-action@v2
- uses: docker/setup-buildx-action@v2
- name: Login for docker.
- name: Login to hub.docker.com
uses: docker/login-action@v2
if: matrix.registry == 'docker.io'
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: Login for AWS ECR
- name: Login to AWS ECR
uses: docker/login-action@v2
if: matrix.registry == 'public.ecr.aws'
with:
@ -156,229 +167,48 @@ jobs:
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ecr: true
- uses: ./source/.github/actions/docker-meta
- name: prepare for docker/metadata-action
id: pre-meta
shell: bash
run: |
extra_labels=
img_suffix=
flavor="${{ matrix.flavor }}"
if [ "${{ matrix.flavor }}" = '-elixir' ]; then
img_suffix="-elixir"
extra_labels="org.opencontainers.image.elixir.version=${{ matrix.elixir }}"
fi
if [[ "${{ matrix.os[0] }}" =~ "alpine" ]]; then
img_suffix="${img_suffix}-alpine"
fi
echo "img_suffix=$img_suffix" >> $GITHUB_OUTPUT
echo "extra_labels=$extra_labels" >> $GITHUB_OUTPUT
- uses: docker/metadata-action@v4
id: meta
with:
profile: ${{ matrix.profile }}
registry: ${{ matrix.registry }}
arch: ${{ matrix.arch[0] }}
otp: ${{ matrix.otp }}
builder_base: ${{ matrix.os[0] }}
owner: ${{ github.repository_owner }}
docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
images: |
${{ matrix.registry }}/${{ github.repository_owner }}/${{ matrix.profile }}
flavor: |
suffix=${{ steps.pre-meta.outputs.img_suffix }}
tags: |
type=raw,value=${{ needs.prepare.outputs.VERSION }}
type=raw,value=latest,enable=${{ needs.prepare.outputs.IS_LATEST }}
labels: |
org.opencontainers.image.otp.version=${{ matrix.otp }}
org.opencontainers.image.edition=${{ needs.prepare.outputs.EDITION }}
${{ steps.pre-meta.outputs.extra_labels }}
- uses: docker/build-push-action@v3
with:
push: ${{ needs.prepare.outputs.IS_EXACT_TAG == 'true' || github.repository_owner != 'emqx' }}
pull: true
no-cache: true
platforms: linux/${{ matrix.arch[0] }}
platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
BUILD_FROM=ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }}
RUN_FROM=${{ matrix.os[1] }}
EMQX_NAME=${{ steps.meta.outputs.emqx_name }}
EMQX_NAME=${{ matrix.profile }}${{ matrix.flavor }}
file: source/${{ matrix.os[2] }}
context: source
- name: Docker Hub Description
if: matrix.registry == 'docker.io'
uses: peter-evans/dockerhub-description@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
repository: "emqx/${{ needs.prepare.outputs.BUILD_PROFILE }}"
readme-filepath: ./source/deploy/docker/README.md
short-description: "The most scalable open-source MQTT broker for IoT, IIoT, connected vehicles, and more."
docker-elixir:
runs-on: ${{ matrix.arch[1] }}
needs: prepare
# do not build elixir images for ee for now
if: needs.prepare.outputs.BUILD_PROFILE == 'emqx'
strategy:
fail-fast: false
matrix:
arch:
- [amd64, ubuntu-20.04]
- [arm64, aws-arm64]
profile:
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
registry:
- 'docker.io'
os:
- [debian11, "debian:11-slim", "deploy/docker/Dockerfile"]
builder:
- 5.0-26 # update to latest
otp:
- 25.1.2-2 # update to latest
elixir:
- 1.13.4 # update to latest
steps:
- uses: AutoModality/action-clean@v1
if: matrix.arch[1] == 'aws-arm64'
- uses: actions/download-artifact@v3
with:
name: source
path: .
- name: unzip source code
run: unzip -q source.zip
- uses: docker/setup-buildx-action@v2
- name: Login for docker.
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- uses: ./source/.github/actions/docker-meta
id: meta
with:
profile: ${{ matrix.profile }}
registry: ${{ matrix.registry }}
arch: ${{ matrix.arch[0] }}
otp: ${{ matrix.otp }}
elixir: ${{ matrix.elixir }}
builder_base: ${{ matrix.os[0] }}
owner: ${{ github.repository_owner }}
docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
- uses: docker/build-push-action@v3
with:
push: ${{ needs.prepare.outputs.IS_EXACT_TAG == 'true' || github.repository_owner != 'emqx' }}
pull: true
no-cache: true
platforms: linux/${{ matrix.arch[0] }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
BUILD_FROM=ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }}
RUN_FROM=${{ matrix.os[1] }}
EMQX_NAME=${{ steps.meta.outputs.emqx_name }}
file: source/${{ matrix.os[2] }}
context: source
docker-push-multi-arch-manifest:
# note, we only run on amd64
if: needs.prepare.outputs.IS_EXACT_TAG
needs:
- prepare
- docker
runs-on: ${{ matrix.arch[1] }}
strategy:
fail-fast: false
matrix:
arch:
- [amd64, ubuntu-20.04]
profile:
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
os:
- [alpine3.15.1, "alpine:3.15.1", "deploy/docker/Dockerfile.alpine"]
- [debian11, "debian:11-slim", "deploy/docker/Dockerfile"]
# NOTE: only support latest otp version, not a matrix
otp:
- 24.3.4.2-1 # switch to 25 once ready to release 5.1
registry:
- 'docker.io'
- 'public.ecr.aws'
exclude:
- registry: 'public.ecr.aws'
profile: emqx-enterprise
steps:
- uses: actions/download-artifact@v3
with:
name: source
path: .
- name: unzip source code
run: unzip -q source.zip
- uses: docker/login-action@v2
if: matrix.registry == 'docker.io'
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- uses: docker/login-action@v2
if: matrix.registry == 'public.ecr.aws'
with:
registry: public.ecr.aws
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
ecr: true
- uses: ./source/.github/actions/docker-meta
id: meta
with:
profile: ${{ matrix.profile }}
registry: ${{ matrix.registry }}
arch: ${{ matrix.arch[0] }}
otp: ${{ matrix.otp }}
builder_base: ${{ matrix.os[0] }}
owner: ${{ github.repository_owner }}
docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
- name: update manifest for multiarch image
working-directory: source
run: |
is_latest="${{ needs.prepare.outputs.IS_DOCKER_LATEST }}"
scripts/docker-create-push-manifests.sh "${{ steps.meta.outputs.tags }}" "$is_latest"
docker-elixir-push-multi-arch-manifest:
# note, we only run on amd64
# do not build enterprise elixir images for now
if: needs.prepare.outputs.IS_EXACT_TAG == 'true' && needs.prepare.outputs.BUILD_PROFILE == 'emqx'
needs:
- prepare
- docker-elixir
runs-on: ${{ matrix.arch[1] }}
strategy:
fail-fast: false
matrix:
arch:
- [amd64, ubuntu-20.04]
profile:
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
# NOTE: for docker, only support latest otp version, not a matrix
otp:
- 25.1.2-2 # update to latest
elixir:
- 1.13.4 # update to latest
registry:
- 'docker.io'
steps:
- uses: actions/download-artifact@v3
with:
name: source
path: .
- name: unzip source code
run: unzip -q source.zip
- uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- uses: ./source/.github/actions/docker-meta
id: meta
with:
profile: ${{ matrix.profile }}
registry: ${{ matrix.registry }}
arch: ${{ matrix.arch[0] }}
otp: ${{ matrix.otp }}
elixir: ${{ matrix.elixir }}
builder_base: ${{ matrix.os[0] }}
owner: ${{ github.repository_owner }}
docker_tags: ${{ needs.prepare.outputs.DOCKER_TAG_VERSION }}
- name: update manifest for multiarch image
working-directory: source
run: |
scripts/docker-create-push-manifests.sh "${{ steps.meta.outputs.tags }}" false

View File

@ -201,12 +201,25 @@ jobs:
echo "waiting emqx started";
sleep 10;
done
- name: Get Token
timeout-minutes: 1
run: |
kubectl port-forward service/${{ matrix.profile }} 18083:18083 > /dev/null &
while
[ "$(curl --silent -X 'GET' 'http://127.0.0.1:18083/api/v5/status' | tail -n1)" != "emqx is running" ]
do
echo "waiting emqx"
sleep 1
done
echo "TOKEN=$(curl --silent -X 'POST' 'http://127.0.0.1:18083/api/v5/login' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"username": "admin","password": "public"}' | jq -r ".token")" >> $GITHUB_ENV
- name: Check cluster
timeout-minutes: 10
run: |
kubectl port-forward service/${{ matrix.profile }} 18083:18083 > /dev/null &
while
[ "$(curl --silent --basic -u admin:public -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')" != "3" ];
[ "$(curl --silent -H "Authorization: Bearer $TOKEN" -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')" != "3" ];
do
echo "waiting ${{ matrix.profile }} cluster scale"
sleep 1

View File

@ -92,7 +92,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
ref: broker-autotest-v3
ref: broker-autotest-v5
path: scripts
- uses: actions/setup-java@v3
with:
@ -191,7 +191,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
ref: broker-autotest-v3
ref: broker-autotest-v5
path: scripts
- uses: actions/setup-java@v3
with:
@ -297,7 +297,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
ref: broker-autotest-v3
ref: broker-autotest-v5
path: scripts
- uses: actions/setup-java@v3
with:
@ -396,7 +396,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
ref: broker-autotest-v3
ref: broker-autotest-v5
path: scripts
- name: run jwks_server
timeout-minutes: 10
@ -496,7 +496,7 @@ jobs:
- uses: actions/checkout@v3
with:
repository: emqx/emqx-fvt
ref: broker-autotest-v3
ref: broker-autotest-v5
path: scripts
- uses: actions/setup-java@v3
with:

View File

@ -55,7 +55,7 @@ Must be one of the following:
- **chore**: Updating grunt tasks etc; no production code change
- **perf**: A code change that improves performance
- **test**: Adding missing tests, refactoring tests; no production code change
- **build**: Changes that affect the CI/CD pipeline or build system or external dependencies (example scopes: travis, jenkins, makefile)
- **build**: Changes that affect the CI/CD pipeline or build system or external dependencies (example scopes: jenkins, makefile)
- **ci**: Changes provided by DevOps for CI purposes.
- **revert**: Reverts a previous commit.

View File

@ -6,8 +6,8 @@ export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.0-26:1.13.4-24.3.4.2-1
export EMQX_DEFAULT_RUNNER = debian:11-slim
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
export ELIXIR_VSN ?= $(shell $(CURDIR)/scripts/get-elixir-vsn.sh)
export EMQX_DASHBOARD_VERSION ?= v1.1.4
export EMQX_EE_DASHBOARD_VERSION ?= e1.0.1-beta.9
export EMQX_DASHBOARD_VERSION ?= v1.1.5
export EMQX_EE_DASHBOARD_VERSION ?= e1.0.1-beta.12
export EMQX_REL_FORM ?= tgz
export QUICER_DOWNLOAD_FROM_RELEASE = 1
ifeq ($(OS),Windows_NT)

View File

@ -1,7 +1,7 @@
# EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
[![Build Status](https://img.shields.io/travis/emqx/emqx?label=Build)](https://travis-ci.org/emqx/emqx)
[![Build Status](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml)
[![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
[![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)

View File

@ -1,7 +1,7 @@
# Брокер EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
[![Build Status](https://img.shields.io/travis/emqx/emqx?label=Build)](https://travis-ci.org/emqx/emqx)
[![Build Status](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml)
[![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
[![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)

View File

@ -1,7 +1,7 @@
# EMQX
[![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases)
[![Build Status](https://img.shields.io/travis/emqx/emqx?label=Build)](https://travis-ci.org/emqx/emqx)
[![Build Status](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml)
[![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx)
[![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/)

View File

@ -2050,7 +2050,7 @@ base_listener_enable_authn {
Set <code>true</code> (default) to enable client authentication on this listener, the authentication
process goes through the configured authentication chain.
When set to <code>false</code> to allow any clients with or without authentication information such as username or password to log in.
When set to <code>quick_deny_anonymous<code>, it behaves like when set to <code>true</code> but clients will be
When set to <code>quick_deny_anonymous</code>, it behaves like when set to <code>true</code>, but clients will be
denied immediately without going through any authenticators if <code>username</code> is not provided. This is useful to fence off
anonymous clients early.
"""

View File

@ -32,7 +32,7 @@
%% `apps/emqx/src/bpapi/README.md'
%% Community edition
-define(EMQX_RELEASE_CE, "5.0.13").
-define(EMQX_RELEASE_CE, "5.0.14").
%% Enterprise edition
-define(EMQX_RELEASE_EE, "5.0.0-beta.6").

View File

@ -15,10 +15,8 @@
%%--------------------------------------------------------------------
%% HTTP API Auth
-define(WRONG_USERNAME_OR_PWD, 'WRONG_USERNAME_OR_PWD').
-define(WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET,
'WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET'
).
-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
-define(BAD_API_KEY_OR_SECRET, 'BAD_API_KEY_OR_SECRET').
%% Bad Request
-define(BAD_REQUEST, 'BAD_REQUEST').
@ -57,8 +55,8 @@
%% All codes
-define(ERROR_CODES, [
{'WRONG_USERNAME_OR_PWD', <<"Wrong username or pwd">>},
{'WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET', <<"Wrong username & pwd or key & secret">>},
{?BAD_USERNAME_OR_PWD, <<"Bad username or password">>},
{?BAD_API_KEY_OR_SECRET, <<"Bad API key or secret">>},
{'BAD_REQUEST', <<"Request parameters are not legal">>},
{'NOT_MATCH', <<"Conditions are not matched">>},
{'ALREADY_EXISTS', <<"Resource already existed">>},

View File

@ -29,7 +29,7 @@
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.13.7"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.33.0"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.35.0"}}},
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.0"}}}

View File

@ -3,7 +3,7 @@
{id, "emqx"},
{description, "EMQX Core"},
% strict semver, bump manually!
{vsn, "5.0.14"},
{vsn, "5.0.15"},
{modules, []},
{registered, []},
{applications, [

View File

@ -362,8 +362,8 @@ schema_default(Schema) ->
[];
?LAZY(?ARRAY(_)) ->
[];
?LAZY(?UNION(Unions)) ->
case [A || ?ARRAY(A) <- Unions] of
?LAZY(?UNION(Members)) ->
case [A || ?ARRAY(A) <- hoconsc:union_members(Members)] of
[_ | _] -> [];
_ -> #{}
end;
@ -402,7 +402,6 @@ merge_envs(SchemaMod, RawConf) ->
required => false,
format => map,
apply_override_envs => true,
remove_env_meta => true,
check_lazy => true
},
hocon_tconf:merge_env_overrides(SchemaMod, RawConf, all, Opts).

View File

@ -21,7 +21,8 @@
format_path/1,
check/2,
format_error/1,
format_error/2
format_error/2,
make_schema/1
]).
%% @doc Format hocon config field path to dot-separated string in iolist format.
@ -79,6 +80,9 @@ format_error({_Schema, [#{kind := K} = First | Rest] = All}, Opts) when
format_error(_Other, _) ->
false.
make_schema(Fields) ->
#{roots => Fields, fields => #{}}.
%% Ensure iolist()
iol(B) when is_binary(B) -> B;
iol(A) when is_atom(A) -> atom_to_binary(A, utf8);

View File

@ -57,6 +57,10 @@
-export([format_bind/1]).
-ifdef(TEST).
-export([certs_dir/2]).
-endif.
-define(CONF_KEY_PATH, [listeners, '?', '?']).
-define(TYPES_STRING, ["tcp", "ssl", "ws", "wss", "quic"]).
@ -415,6 +419,7 @@ pre_config_update(_Path, _Request, RawConf) ->
post_config_update([listeners, Type, Name], {create, _Request}, NewConf, undefined, _AppEnvs) ->
start_listener(Type, Name, NewConf);
post_config_update([listeners, Type, Name], {update, _Request}, NewConf, OldConf, _AppEnvs) ->
try_clear_ssl_files(certs_dir(Type, Name), NewConf, OldConf),
case NewConf of
#{enabled := true} -> restart_listener(Type, Name, {OldConf, NewConf});
_ -> ok
@ -670,7 +675,7 @@ certs_dir(Type, Name) ->
iolist_to_binary(filename:join(["listeners", Type, Name])).
convert_certs(CertsDir, Conf) ->
case emqx_tls_lib:ensure_ssl_files(CertsDir, maps:get(<<"ssl_options">>, Conf, undefined)) of
case emqx_tls_lib:ensure_ssl_files(CertsDir, get_ssl_options(Conf)) of
{ok, undefined} ->
Conf;
{ok, SSL} ->
@ -681,7 +686,7 @@ convert_certs(CertsDir, Conf) ->
end.
clear_certs(CertsDir, Conf) ->
OldSSL = maps:get(<<"ssl_options">>, Conf, undefined),
OldSSL = get_ssl_options(Conf),
emqx_tls_lib:delete_ssl_files(CertsDir, undefined, OldSSL).
filter_stacktrace({Reason, _Stacktrace}) -> Reason;
@ -692,3 +697,16 @@ ensure_override_limiter_conf(Conf, #{<<"limiter">> := Limiter}) ->
Conf#{<<"limiter">> => Limiter};
ensure_override_limiter_conf(Conf, _) ->
Conf.
try_clear_ssl_files(CertsDir, NewConf, OldConf) ->
NewSSL = get_ssl_options(NewConf),
OldSSL = get_ssl_options(OldConf),
emqx_tls_lib:delete_ssl_files(CertsDir, NewSSL, OldSSL).
get_ssl_options(Conf) ->
case maps:find(ssl_options, Conf) of
{ok, SSL} ->
SSL;
error ->
maps:get(<<"ssl_options">>, Conf, undefined)
end.

View File

@ -31,6 +31,7 @@
-export([
inc/3,
inc/4,
observe/4,
get/3,
get_gauge/3,
set_gauge/5,
@ -38,6 +39,8 @@
get_gauges/2,
delete_gauges/2,
get_rate/2,
get_slide/2,
get_slide/3,
get_counters/2,
create_metrics/3,
create_metrics/4,
@ -67,7 +70,16 @@
-define(SAMPLING, 1).
-endif.
-export_type([metrics/0, handler_name/0, metric_id/0]).
-export_type([metrics/0, handler_name/0, metric_id/0, metric_spec/0]).
% Default
-type metric_type() ::
%% Simple counter
counter
%% Sliding window average
| slide.
-type metric_spec() :: {metric_type(), atom()}.
-type rate() :: #{
current := float(),
@ -77,6 +89,7 @@
-type metrics() :: #{
counters := #{metric_name() => integer()},
gauges := #{metric_name() => integer()},
slides := #{metric_name() => number()},
rate := #{metric_name() => rate()}
}.
-type handler_name() :: atom().
@ -103,9 +116,22 @@
last5m_smpl = [] :: list()
}).
-record(slide_datapoint, {
sum :: non_neg_integer(),
samples :: non_neg_integer(),
time :: non_neg_integer()
}).
-record(slide, {
%% Total number of samples through the history
n_samples = 0 :: non_neg_integer(),
datapoints = [] :: [#slide_datapoint{}]
}).
-record(state, {
metric_ids = sets:new(),
rates :: undefined | #{metric_id() => #rate{}}
rates :: #{metric_id() => #{metric_name() => #rate{}}} | undefined,
slides = #{} :: #{metric_id() => #{metric_name() => #slide{}}}
}).
%%------------------------------------------------------------------------------
@ -126,14 +152,18 @@ child_spec(ChldName, Name) ->
modules => [emqx_metrics_worker]
}.
-spec create_metrics(handler_name(), metric_id(), [metric_name()]) -> ok | {error, term()}.
-spec create_metrics(handler_name(), metric_id(), [metric_spec() | metric_name()]) ->
ok | {error, term()}.
create_metrics(Name, Id, Metrics) ->
create_metrics(Name, Id, Metrics, Metrics).
Metrics1 = desugar(Metrics),
Counters = filter_counters(Metrics1),
create_metrics(Name, Id, Metrics1, Counters).
-spec create_metrics(handler_name(), metric_id(), [metric_name()], [metric_name()]) ->
-spec create_metrics(handler_name(), metric_id(), [metric_spec() | metric_name()], [atom()]) ->
ok | {error, term()}.
create_metrics(Name, Id, Metrics, RateMetrics) ->
gen_server:call(Name, {create_metrics, Id, Metrics, RateMetrics}).
Metrics1 = desugar(Metrics),
gen_server:call(Name, {create_metrics, Id, Metrics1, RateMetrics}).
-spec clear_metrics(handler_name(), metric_id()) -> ok.
clear_metrics(Name, Id) ->
@ -156,7 +186,7 @@ get(Name, Id, Metric) ->
not_found ->
0;
Ref when is_atom(Metric) ->
counters:get(Ref, idx_metric(Name, Id, Metric));
counters:get(Ref, idx_metric(Name, Id, counter, Metric));
Ref when is_integer(Metric) ->
counters:get(Ref, Metric)
end.
@ -171,21 +201,37 @@ get_counters(Name, Id) ->
fun(_Metric, Index) ->
get(Name, Id, Index)
end,
get_indexes(Name, Id)
get_indexes(Name, counter, Id)
).
-spec get_slide(handler_name(), metric_id()) -> map().
get_slide(Name, Id) ->
gen_server:call(Name, {get_slide, Id}).
%% Get the average for a specified sliding window period.
%%
%% It will only account for the samples recorded in the past `Window' seconds.
-spec get_slide(handler_name(), metric_id(), non_neg_integer()) -> number().
get_slide(Name, Id, Window) ->
gen_server:call(Name, {get_slide, Id, Window}).
-spec reset_counters(handler_name(), metric_id()) -> ok.
reset_counters(Name, Id) ->
Indexes = maps:values(get_indexes(Name, Id)),
Ref = get_ref(Name, Id),
lists:foreach(fun(Idx) -> counters:put(Ref, Idx, 0) end, Indexes).
case get_ref(Name, Id) of
not_found ->
ok;
Ref ->
#{size := Size} = counters:info(Ref),
lists:foreach(fun(Idx) -> counters:put(Ref, Idx, 0) end, lists:seq(1, Size))
end.
-spec get_metrics(handler_name(), metric_id()) -> metrics().
get_metrics(Name, Id) ->
#{
rate => get_rate(Name, Id),
counters => get_counters(Name, Id),
gauges => get_gauges(Name, Id)
gauges => get_gauges(Name, Id),
slides => get_slide(Name, Id)
}.
-spec inc(handler_name(), metric_id(), atom()) -> ok.
@ -194,7 +240,37 @@ inc(Name, Id, Metric) ->
-spec inc(handler_name(), metric_id(), metric_name(), integer()) -> ok.
inc(Name, Id, Metric, Val) ->
counters:add(get_ref(Name, Id), idx_metric(Name, Id, Metric), Val).
counters:add(get_ref(Name, Id), idx_metric(Name, Id, counter, Metric), Val).
%% Add a sample to the slide.
%%
%% Slide is short for "sliding window average" type of metric.
%%
%% It allows to monitor an average of some observed values in time,
%% and it's mainly used for performance analysis. For example, it can
%% be used to report run time of operations.
%%
%% Consider an example:
%%
%% ```
%% emqx_metrics_worker:create_metrics(Name, Id, [{slide, a}]),
%% emqx_metrics_worker:observe(Name, Id, a, 10),
%% emqx_metrics_worker:observe(Name, Id, a, 30),
%% #{a := 20} = emqx_metrics_worker:get_slide(Name, Id, _Window = 1).
%% '''
%%
%% After recording 2 samples, this metric becomes 20 (the average of 10 and 30).
%%
%% But after 1 second it becomes 0 again, unless new samples are recorded.
%%
-spec observe(handler_name(), metric_id(), atom(), integer()) -> ok.
observe(Name, Id, Metric, Val) ->
#{ref := CRef, slide := Idx} = maps:get(Id, get_pterm(Name)),
Index = maps:get(Metric, Idx),
%% Update sum:
counters:add(CRef, Index, Val),
%% Update number of samples:
counters:add(CRef, Index + 1, 1).
-spec set_gauge(handler_name(), metric_id(), worker_id(), metric_name(), integer()) -> ok.
set_gauge(Name, Id, WorkerId, Metric, Val) ->
@ -300,9 +376,9 @@ handle_call({get_rate, Id}, _From, State = #state{rates = Rates}) ->
handle_call(
{create_metrics, Id, Metrics, RateMetrics},
_From,
State = #state{metric_ids = MIDs, rates = Rates}
State = #state{metric_ids = MIDs, rates = Rates, slides = Slides}
) ->
case RateMetrics -- Metrics of
case RateMetrics -- filter_counters(Metrics) of
[] ->
RatePerId = maps:from_list([{M, #rate{}} || M <- RateMetrics]),
Rate1 =
@ -310,9 +386,11 @@ handle_call(
undefined -> #{Id => RatePerId};
_ -> Rates#{Id => RatePerId}
end,
Slides1 = Slides#{Id => create_slides(Metrics)},
{reply, create_counters(get_self_name(), Id, Metrics), State#state{
metric_ids = sets:add_element(Id, MIDs),
rates = Rate1
rates = Rate1,
slides = Slides1
}};
_ ->
{reply, {error, not_super_set_of, {RateMetrics, Metrics}}, State}
@ -320,7 +398,7 @@ handle_call(
handle_call(
{delete_metrics, Id},
_From,
State = #state{metric_ids = MIDs, rates = Rates}
State = #state{metric_ids = MIDs, rates = Rates, slides = Slides}
) ->
Name = get_self_name(),
delete_counters(Name, Id),
@ -331,17 +409,16 @@ handle_call(
case Rates of
undefined -> undefined;
_ -> maps:remove(Id, Rates)
end
end,
slides = maps:remove(Id, Slides)
}};
handle_call(
{reset_metrics, Id},
_From,
State = #state{rates = Rates}
State = #state{rates = Rates, slides = Slides}
) ->
Name = get_self_name(),
delete_gauges(Name, Id),
{reply, reset_counters(Name, Id), State#state{
rates =
delete_gauges(get_self_name(), Id),
NewRates =
case Rates of
undefined ->
undefined;
@ -352,8 +429,23 @@ handle_call(
maps:get(Id, Rates, #{})
),
maps:put(Id, ResetRate, Rates)
end
end,
SlideSpecs = [{slide, I} || I <- maps:keys(maps:get(Id, Slides, #{}))],
NewSlides = Slides#{Id => create_slides(SlideSpecs)},
{reply, reset_counters(get_self_name(), Id), State#state{
rates =
NewRates,
slides = NewSlides
}};
handle_call({get_slide, Id}, _From, State = #state{slides = Slides}) ->
SlidesForID = maps:get(Id, Slides, #{}),
{reply, maps:map(fun(Metric, Slide) -> do_get_slide(Id, Metric, Slide) end, SlidesForID),
State};
handle_call({get_slide, Id, Window}, _From, State = #state{slides = Slides}) ->
SlidesForID = maps:get(Id, Slides, #{}),
{reply,
maps:map(fun(Metric, Slide) -> do_get_slide(Window, Id, Metric, Slide) end, SlidesForID),
State};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
@ -363,7 +455,7 @@ handle_cast(_Msg, State) ->
handle_info(ticking, State = #state{rates = undefined}) ->
erlang:send_after(timer:seconds(?SAMPLING), self(), ticking),
{noreply, State};
handle_info(ticking, State = #state{rates = Rates0}) ->
handle_info(ticking, State = #state{rates = Rates0, slides = Slides0}) ->
Rates =
maps:map(
fun(Id, RatesPerID) ->
@ -376,8 +468,20 @@ handle_info(ticking, State = #state{rates = Rates0}) ->
end,
Rates0
),
Slides =
maps:map(
fun(Id, SlidesPerID) ->
maps:map(
fun(Metric, Slide) ->
update_slide(Id, Metric, Slide)
end,
SlidesPerID
)
end,
Slides0
),
erlang:send_after(timer:seconds(?SAMPLING), self(), ticking),
{noreply, State#state{rates = Rates}};
{noreply, State#state{rates = Rates, slides = Slides}};
handle_info(_Info, State) ->
{noreply, State}.
@ -408,17 +512,18 @@ create_counters(_Name, _Id, []) ->
error({create_counter_error, must_provide_a_list_of_metrics});
create_counters(Name, Id, Metrics) ->
%% backup the old counters
OlderCounters = maps:with(Metrics, get_counters(Name, Id)),
OlderCounters = maps:with(filter_counters(Metrics), get_counters(Name, Id)),
%% create the new counter
Size = length(Metrics),
Indexes = maps:from_list(lists:zip(Metrics, lists:seq(1, Size))),
{Size, Indexes} = create_metric_indexes(Metrics),
Counters = get_pterm(Name),
CntrRef = counters:new(Size, [write_concurrency]),
persistent_term:put(
?CntrRef(Name),
Counters#{Id => #{ref => CntrRef, indexes => Indexes}}
Counters#{Id => Indexes#{ref => CntrRef}}
),
%% restore the old counters
%% Restore the old counters. Slides are not restored, since they
%% are periodically zeroed anyway. We do lose some samples in the
%% current interval, but that's acceptable for now.
lists:foreach(
fun({Metric, N}) ->
inc(Name, Id, Metric, N)
@ -426,6 +531,16 @@ create_counters(Name, Id, Metrics) ->
maps:to_list(OlderCounters)
).
create_metric_indexes(Metrics) ->
create_metric_indexes(Metrics, 1, [], []).
create_metric_indexes([], Size, Counters, Slides) ->
{Size, #{counter => maps:from_list(Counters), slide => maps:from_list(Slides)}};
create_metric_indexes([{counter, Id} | Rest], Index, Counters, Slides) ->
create_metric_indexes(Rest, Index + 1, [{Id, Index} | Counters], Slides);
create_metric_indexes([{slide, Id} | Rest], Index, Counters, Slides) ->
create_metric_indexes(Rest, Index + 2, Counters, [{Id, Index} | Slides]).
delete_counters(Name, Id) ->
persistent_term:put(?CntrRef(Name), maps:remove(Id, get_pterm(Name))).
@ -435,12 +550,12 @@ get_ref(Name, Id) ->
error -> not_found
end.
idx_metric(Name, Id, Metric) ->
maps:get(Metric, get_indexes(Name, Id)).
idx_metric(Name, Id, Type, Metric) ->
maps:get(Metric, get_indexes(Name, Type, Id)).
get_indexes(Name, Id) ->
get_indexes(Name, Type, Id) ->
case maps:find(Id, get_pterm(Name)) of
{ok, #{indexes := Indexes}} -> Indexes;
{ok, #{Type := Indexes}} -> Indexes;
error -> #{}
end.
@ -488,6 +603,53 @@ calculate_rate(CurrVal, #rate{
tick = Tick + 1
}.
do_get_slide(Id, Metric, S = #slide{n_samples = NSamples}) ->
#{
n_samples => NSamples,
current => do_get_slide(2, Id, Metric, S),
last5m => do_get_slide(?SECS_5M, Id, Metric, S)
}.
do_get_slide(Window, Id, Metric, #slide{datapoints = DP0}) ->
Datapoint = get_slide_datapoint(Id, Metric),
{N, Sum} = get_slide_window(os:system_time(second) - Window, [Datapoint | DP0], 0, 0),
case N > 0 of
true -> Sum div N;
false -> 0
end.
get_slide_window(_StartTime, [], N, S) ->
{N, S};
get_slide_window(StartTime, [#slide_datapoint{time = T} | _], N, S) when T < StartTime ->
{N, S};
get_slide_window(StartTime, [#slide_datapoint{samples = N, sum = S} | Rest], AccN, AccS) ->
get_slide_window(StartTime, Rest, AccN + N, AccS + S).
get_slide_datapoint(Id, Metric) ->
Name = get_self_name(),
CRef = get_ref(Name, Id),
Index = idx_metric(Name, Id, slide, Metric),
Total = counters:get(CRef, Index),
N = counters:get(CRef, Index + 1),
#slide_datapoint{
sum = Total,
samples = N,
time = os:system_time(second)
}.
update_slide(Id, Metric, Slide0 = #slide{n_samples = NSamples, datapoints = DPs}) ->
Datapoint = get_slide_datapoint(Id, Metric),
%% Reset counters:
Name = get_self_name(),
CRef = get_ref(Name, Id),
Index = idx_metric(Name, Id, slide, Metric),
counters:put(CRef, Index, 0),
counters:put(CRef, Index + 1, 0),
Slide0#slide{
datapoints = [Datapoint | lists:droplast(DPs)],
n_samples = Datapoint#slide_datapoint.samples + NSamples
}.
format_rates_of_id(RatesPerId) ->
maps:map(
fun(_Metric, Rates) ->
@ -510,6 +672,27 @@ precision(Float, N) ->
Base = math:pow(10, N),
round(Float * Base) / Base.
desugar(Metrics) ->
lists:map(
fun
(Atom) when is_atom(Atom) ->
{counter, Atom};
(Spec = {_, _}) ->
Spec
end,
Metrics
).
filter_counters(Metrics) ->
[K || {counter, K} <- Metrics].
create_slides(Metrics) ->
EmptyDatapoints = [
#slide_datapoint{sum = 0, samples = 0, time = 0}
|| _ <- lists:seq(1, ?SECS_5M div ?SAMPLING)
],
maps:from_list([{K, #slide{datapoints = EmptyDatapoints}} || {slide, K} <- Metrics]).
get_self_name() ->
{registered_name, Name} = process_info(self(), registered_name),
Name.

View File

@ -16,6 +16,8 @@
-module(emqx_packet).
-elvis([{elvis_style, no_spec_with_records, disable}]).
-include("emqx.hrl").
-include("emqx_mqtt.hrl").
@ -492,7 +494,7 @@ format_variable(undefined, _, _) ->
format_variable(Variable, undefined, PayloadEncode) ->
format_variable(Variable, PayloadEncode);
format_variable(Variable, Payload, PayloadEncode) ->
[format_variable(Variable, PayloadEncode), format_payload(Payload, PayloadEncode)].
[format_variable(Variable, PayloadEncode), ",", format_payload(Payload, PayloadEncode)].
format_variable(
#mqtt_packet_connect{

View File

@ -111,15 +111,19 @@
comma_separated_atoms/0
]).
-export([namespace/0, roots/0, roots/1, fields/1, desc/1]).
-export([namespace/0, roots/0, roots/1, fields/1, desc/1, tags/0]).
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
-export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1]).
-export([authz_fields/0]).
-export([sc/2, map/2]).
-elvis([{elvis_style, god_modules, disable}]).
namespace() -> broker.
tags() ->
[<<"EMQX">>].
roots() ->
%% TODO change config importance to a field metadata
roots(high) ++ roots(medium) ++ roots(low).
@ -323,31 +327,7 @@ fields("stats") ->
)}
];
fields("authorization") ->
[
{"no_match",
sc(
hoconsc:enum([allow, deny]),
#{
default => allow,
required => true,
desc => ?DESC(fields_authorization_no_match)
}
)},
{"deny_action",
sc(
hoconsc:enum([ignore, disconnect]),
#{
default => ignore,
required => true,
desc => ?DESC(fields_authorization_deny_action)
}
)},
{"cache",
sc(
ref(?MODULE, "cache"),
#{}
)}
];
authz_fields();
fields("cache") ->
[
{"enable",
@ -1644,7 +1624,7 @@ base_listener(Bind) ->
sc(
hoconsc:union([infinity, pos_integer()]),
#{
default => infinity,
default => <<"infinity">>,
desc => ?DESC(base_listener_max_connections)
}
)},
@ -2088,6 +2068,33 @@ do_default_ciphers(_) ->
%% otherwise resolve default ciphers list at runtime
[].
authz_fields() ->
[
{"no_match",
sc(
hoconsc:enum([allow, deny]),
#{
default => allow,
required => true,
desc => ?DESC(fields_authorization_no_match)
}
)},
{"deny_action",
sc(
hoconsc:enum([ignore, disconnect]),
#{
default => ignore,
required => true,
desc => ?DESC(fields_authorization_deny_action)
}
)},
{"cache",
sc(
ref(?MODULE, "cache"),
#{}
)}
].
%% @private return a list of keys in a parent field
-spec keys(string(), hocon:config()) -> [string()].
keys(Parent, Conf) ->
@ -2342,7 +2349,7 @@ authentication(Which) ->
undefined -> hoconsc:array(typerefl:map());
Module -> Module:root_type()
end,
%% It is a lazy type because when handing runtime update requests
%% It is a lazy type because when handling runtime update requests
%% the config is not checked by emqx_schema, but by the injected schema
Type = hoconsc:lazy(Type0),
#{

View File

@ -71,24 +71,15 @@
%%--------------------------------------------------------------------
create_router_tab(disc) ->
ok = mria:create_table(?ROUTE_DISC_TAB, [
{type, bag},
{rlog_shard, ?ROUTE_SHARD},
{storage, disc_copies},
{record_name, route},
{attributes, record_info(fields, route)},
{storage_properties, [
{ets, [
{read_concurrency, true},
{write_concurrency, true}
]}
]}
]);
create_table(?ROUTE_DISC_TAB, disc_copies);
create_router_tab(ram) ->
ok = mria:create_table(?ROUTE_RAM_TAB, [
create_table(?ROUTE_RAM_TAB, ram_copies).
create_table(Tab, Storage) ->
ok = mria:create_table(Tab, [
{type, bag},
{rlog_shard, ?ROUTE_SHARD},
{storage, ram_copies},
{storage, Storage},
{record_name, route},
{attributes, record_info(fields, route)},
{storage_properties, [

View File

@ -0,0 +1,24 @@
-----BEGIN CERTIFICATE-----
MIID/jCCAeagAwIBAgIJAKTICmq1Lg6dMA0GCSqGSIb3DQEBCwUAMDQxEjAQBgNV
BAoMCUVNUVggVGVzdDEeMBwGA1UEAwwVQ2VydGlmaWNhdGUgQXV0aG9yaXR5MB4X
DTIxMTIzMDA4NDExMloXDTQ5MDUxNzA4NDExMlowJTESMBAGA1UECgwJRU1RWCBU
ZXN0MQ8wDQYDVQQDDAZjbGllbnQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
AoIBAQDzrujfx6XZTH0MWqLO6kNAeHndUZ+OGaURXvxKMPMF5dA40lxNG6cEzzlq
0Rm61adlv8tF4kRJrs6EnRjEVoMImrdh07vGFdOTYqP01LjiBhErAzyRtSn2X8FT
Te8ExoCRs3x61SPebGY2hOvFxuO6YDPVOSDvbbxvRgqIlM1ZXC8dOvPSSGZ+P8hV
56EPayRthfu1FVptnkW9CyZCRI0gg95Hv8RC7bGG+tuWpkN9ZrRvohhgGR1+bDUi
BNBpncEsSh+UgWaj8KRN8D16H6m/Im6ty467j0at49FvPx5nACL48/ghtYvzgKLc
uKHtokKUuuzebDK/hQxN3mUSAJStAgMBAAGjIjAgMAsGA1UdDwQEAwIFoDARBglg
hkgBhvhCAQEEBAMCB4AwDQYJKoZIhvcNAQELBQADggIBAIlVyPhOpkz3MNzQmjX7
xgJ3vGPK5uK11n/wfjRwe2qXwZbrI2sYLVtTpUgvLDuP0gB73Vwfu7xAMdue6TRm
CKr9z0lkQsVBtgoqzZCjd4PYLfHm4EhsOMi98OGKU5uOGD4g3yLwQWXHhbYtiZMO
Jsj0hebYveYJt/BYTd1syGQcIcYCyVExWvSWjidfpAqjT6EF7whdubaFtuF2kaGF
IO9yn9rWtXB5yK99uCguEmKhx3fAQxomzqweTu3WRvy9axsUH3WAUW9a4DIBSz2+
ZSJNheFn5GktgggygJUGYqpSZHooUJW0UBs/8vX6AP+8MtINmqOGZUawmNwLWLOq
wHyVt2YGD5TXjzzsWNSQ4mqXxM6AXniZVZK0yYNjA4ATikX1AtwunyWBR4IjyE/D
FxYPORdZCOtywRFE1R5KLTUq/C8BNGCkYnoO78DJBO+pT0oagkQGQb0CnmC6C1db
4lWzA9K0i4B0PyooZA+gp+5FFgaLuX1DkyeaY1J204QhHR1z/Vcyl5dpqR9hqnYP
t8raLk9ogMDKqKA9iG0wc3CBNckD4sjVWAEeovXhElG55fD21wwhF+AnDCvX8iVK
cBfKV6z6uxfKjGIxc2I643I5DiIn+V3DnPxYyY74Ln1lWFYmt5JREhAxPu42zq74
e6+eIMYFszB+5gKgt6pa6ZNI
-----END CERTIFICATE-----

View File

@ -0,0 +1,27 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEA867o38el2Ux9DFqizupDQHh53VGfjhmlEV78SjDzBeXQONJc
TRunBM85atEZutWnZb/LReJESa7OhJ0YxFaDCJq3YdO7xhXTk2Kj9NS44gYRKwM8
kbUp9l/BU03vBMaAkbN8etUj3mxmNoTrxcbjumAz1Tkg7228b0YKiJTNWVwvHTrz
0khmfj/IVeehD2skbYX7tRVabZ5FvQsmQkSNIIPeR7/EQu2xhvrblqZDfWa0b6IY
YBkdfmw1IgTQaZ3BLEoflIFmo/CkTfA9eh+pvyJurcuOu49GrePRbz8eZwAi+PP4
IbWL84Ci3Lih7aJClLrs3mwyv4UMTd5lEgCUrQIDAQABAoIBAQDwEbBgznrIwn8r
jZt5x/brbAV7Ea/kOcWSgIaCvQifFdJ2OGAwov5/UXwajNgRZe2d4z7qoUhvYuUY
ZwCAZU6ASpRBr2v9cYFYYURvrqZaHmoJew3P6q/lhl6aqFvC06DUagRHqvXEafyk
13zEAvZVpfNKrBaTawPKiDFWb2qDDc9D6hC07EuJ/DNeehiHvzHrSZSDVV5Ut7Bw
YDm33XygheUPAlHfeCnaixzcs3osiVyFEmVjxcIaM0ZS1NgcSaohSpJHMzvEaohX
e+v9vccraSVlw01AlvFwI2vHYUV8jT6HwglTPKKGOCzK/ace3wPdYSU9qLcqfuHn
EFhNc3tNAoGBAPugLMgbReJg2gpbIPUkYyoMMAAU7llFU1WvPWwXzo1a9EBjBACw
WfCZISNtANXR38zIYXzoH547uXi4YPks1Nne3sYuCDpvuX+iz7fIo4zHf1nFmxH7
eE6GtQr2ubmuuipTc28S0wBMGT1/KybH0e2NKL6GaOkNDmAI0IbEMBrvAoGBAPfr
Y1QYLhPhan6m5g/5s+bQpKtHfNH9TNkk13HuYu72zNuY3qL2GC7oSadR8vTbRXZg
KQqfaO0IGRcdkSFTq/AEhSSqr2Ld5nPadMbKvSGrSCc1s8rFH97jRVQY56yhM7ti
IW4+6cE8ylCMbdYB6wuduK/GIgNpqoF4xs1i2XojAoGACacBUMPLEH4Kny8TupOk
wi4pgTdMVVxVcAoC3yyincWJbRbfRm99Y79cCBHcYFdmsGJXawU0gUtlN/5KqgRQ
PfNQtGV7p1I12XGTakdmDrZwai8sXao52TlNpJgGU9siBRGicfZU5cQFi9he/WPY
57XshDJ/v8DidkigRysrdT0CgYEA5iuO22tblC+KvK1dGOXeZWO+DhrfwuGlcFBp
CaimB2/w/8vsn2VVTG9yujo2E6hj1CQw1mDrfG0xRim4LTXOgpbfugwRqvuTUmo2
Ur21XEX2RhjwpEfhcACWxB4fMUG0krrniMA2K6axupi1/KNpQi6bYe3UdFCs8Wld
QSAOAvsCgYBk/X5PmD44DvndE5FShM2w70YOoMr3Cgl5sdwAFUFE9yDuC14UhVxk
oxnYxwtVI9uVVirET+LczP9JEvcvxnN/Xg3tH/qm0WlIxmTxyYrFFIK9j0rqeu9z
blPu56OzNI2VMrR1GbOBLxQINLTIpaacjNJAlr8XOlegdUJsW/Jwqw==
-----END RSA PRIVATE KEY-----

View File

@ -0,0 +1,27 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEAzLiGiSwpxkENtjrzS7pNLblTnWe4HUUFwYyUX0H+3TnvA86X
EX85yZvFjkzB6lLjUkMY+C6UTVXt+mxeSJbUtSKZhX+2yoF/KYh7SaVjug5FqEqO
LvMpDZQEhUWF2W9DG6eUgOfDoX2milSDIe10yG2WBkryipHAfE7l1t+i6Rh3on+v
561LmrbqyBWR/cLp23RN3sHbkf2pb5/ugtU9twdgJr6Lve73rvSeulewL5BzszKD
BrYqr+PBT5+3ItCc55bTsO7M7CzOIL99BlqdvFH7xT0U1+2BFwLe4/8kwphSqyJE
C5oOiQBFnFVNXmFQSV+k7rPr80i1IO++HeJ6KQIDAQABAoIBAGWgvPjfuaU3qizq
uti/FY07USz0zkuJdkANH6LiSjlchzDmn8wJ0pApCjuIE0PV/g9aS8z4opp5q/gD
UBLM/a8mC/xf2EhTXOMrY7i9p/I3H5FZ4ZehEqIw9sWKK9YzC6dw26HabB2BGOnW
5nozPSQ6cp2RGzJ7BIkxSZwPzPnVTgy3OAuPOiJytvK+hGLhsNaT+Y9bNDvplVT2
ZwYTV8GlHZC+4b2wNROILm0O86v96O+Qd8nn3fXjGHbMsAnONBq10bZS16L4fvkH
5G+W/1PeSXmtZFppdRRDxIW+DWcXK0D48WRliuxcV4eOOxI+a9N2ZJZZiNLQZGwg
w3A8+mECgYEA8HuJFrlRvdoBe2U/EwUtG74dcyy30L4yEBnN5QscXmEEikhaQCfX
Wm6EieMcIB/5I5TQmSw0cmBMeZjSXYoFdoI16/X6yMMuATdxpvhOZGdUGXxhAH+x
xoTUavWZnEqW3fkUU71kT5E2f2i+0zoatFESXHeslJyz85aAYpP92H0CgYEA2e5A
Yozt5eaA1Gyhd8SeptkEU4xPirNUnVQHStpMWUb1kzTNXrPmNWccQ7JpfpG6DcYl
zUF6p6mlzY+zkMiyPQjwEJlhiHM2NlL1QS7td0R8ewgsFoyn8WsBI4RejWrEG9td
EDniuIw+pBFkcWthnTLHwECHdzgquToyTMjrBB0CgYEA28tdGbrZXhcyAZEhHAZA
Gzog+pKlkpEzeonLKIuGKzCrEKRecIK5jrqyQsCjhS0T7ZRnL4g6i0s+umiV5M5w
fcc292pEA1h45L3DD6OlKplSQVTv55/OYS4oY3YEJtf5mfm8vWi9lQeY8sxOlQpn
O+VZTdBHmTC8PGeTAgZXHZUCgYA6Tyv88lYowB7SN2qQgBQu8jvdGtqhcs/99GCr
H3N0I69LPsKAR0QeH8OJPXBKhDUywESXAaEOwS5yrLNP1tMRz5Vj65YUCzeDG3kx
gpvY4IMp7ArX0bSRvJ6mYSFnVxy3k174G3TVCfksrtagHioVBGQ7xUg5ltafjrms
n8l55QKBgQDVzU8tQvBVqY8/1lnw11Vj4fkE/drZHJ5UkdC1eenOfSWhlSLfUJ8j
ds7vEWpRPPoVuPZYeR1y78cyxKe1GBx6Wa2lF5c7xjmiu0xbRnrxYeLolce9/ntp
asClqpnHT8/VJYTD7Kqj0fouTTZf0zkig/y+2XERppd8k+pSKjUCPQ==
-----END RSA PRIVATE KEY-----

View File

@ -29,6 +29,9 @@
auth_header/2
]).
-define(DEFAULT_APP_ID, <<"default_appid">>).
-define(DEFAULT_APP_SECRET, <<"default_app_secret">>).
request_api(Method, Url, Auth) ->
request_api(Method, Url, [], Auth, []).
@ -74,12 +77,18 @@ auth_header(User, Pass) ->
{"Authorization", "Basic " ++ Encoded}.
default_auth_header() ->
AppId = <<"myappid">>,
AppSecret = emqx_mgmt_auth:get_appsecret(AppId),
auth_header(erlang:binary_to_list(AppId), erlang:binary_to_list(AppSecret)).
{ok, #{api_key := APIKey}} = emqx_mgmt_auth:read(?DEFAULT_APP_ID),
auth_header(
erlang:binary_to_list(APIKey), erlang:binary_to_list(?DEFAULT_APP_SECRET)
).
create_default_app() ->
emqx_mgmt_auth:add_app(<<"myappid">>, <<"test">>).
Now = erlang:system_time(second),
ExpiredAt = Now + timer:minutes(10),
emqx_mgmt_auth:create(
?DEFAULT_APP_ID, ?DEFAULT_APP_SECRET, true, ExpiredAt, <<"default app key for test">>
),
ok.
delete_default_app() ->
emqx_mgmt_auth:del_app(<<"myappid">>).
emqx_mgmt_auth:delete(?DEFAULT_APP_ID).

View File

@ -46,7 +46,7 @@ end_per_testcase(_, _Config) ->
ok.
t_get_metrics(_) ->
Metrics = [a, b, c],
Metrics = [a, b, c, {slide, d}],
Id = <<"testid">>,
ok = emqx_metrics_worker:create_metrics(?NAME, Id, Metrics),
%% all the metrics are set to zero at start
@ -73,6 +73,8 @@ t_get_metrics(_) ->
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id0, inflight, 5),
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id1, inflight, 7),
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id2, queuing, 9),
ok = emqx_metrics_worker:observe(?NAME, Id, d, 10),
ok = emqx_metrics_worker:observe(?NAME, Id, d, 30),
ct:sleep(1500),
?LET(
#{
@ -89,6 +91,9 @@ t_get_metrics(_) ->
a := 1,
b := 1,
c := 2
} = Counters,
slides := #{
d := #{n_samples := 2, last5m := 20, current := _}
}
},
emqx_metrics_worker:get_metrics(?NAME, Id),
@ -100,7 +105,8 @@ t_get_metrics(_) ->
?assert(MaxB > 0),
?assert(MaxC > 0),
?assert(Inflight == 12),
?assert(Queuing == 9)
?assert(Queuing == 9),
?assertNot(maps:is_key(d, Counters))
}
),
ok = emqx_metrics_worker:clear_metrics(?NAME, Id).
@ -117,6 +123,7 @@ t_clear_metrics(_Config) ->
c := #{current := 0.0, max := 0.0, last5m := 0.0}
},
gauges := #{},
slides := #{},
counters := #{
a := 0,
b := 0,
@ -138,14 +145,15 @@ t_clear_metrics(_Config) ->
#{
counters => #{},
gauges => #{},
rate => #{current => 0.0, last5m => 0.0, max => 0.0}
rate => #{current => 0.0, last5m => 0.0, max => 0.0},
slides => #{}
},
emqx_metrics_worker:get_metrics(?NAME, Id)
),
ok.
t_reset_metrics(_) ->
Metrics = [a, b, c],
Metrics = [a, b, c, {slide, d}],
Id = <<"testid">>,
ok = emqx_metrics_worker:create_metrics(?NAME, Id, Metrics),
%% all the metrics are set to zero at start
@ -161,6 +169,9 @@ t_reset_metrics(_) ->
a := 0,
b := 0,
c := 0
},
slides := #{
d := #{n_samples := 0, last5m := 0, current := 0}
}
},
emqx_metrics_worker:get_metrics(?NAME, Id)
@ -172,7 +183,12 @@ t_reset_metrics(_) ->
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id0, inflight, 5),
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id1, inflight, 7),
ok = emqx_metrics_worker:set_gauge(?NAME, Id, worker_id2, queuing, 9),
ok = emqx_metrics_worker:observe(?NAME, Id, d, 100),
ok = emqx_metrics_worker:observe(?NAME, Id, d, 200),
ct:sleep(1500),
?assertMatch(
#{d := #{n_samples := 2}}, emqx_metrics_worker:get_slide(?NAME, <<"testid">>)
),
ok = emqx_metrics_worker:reset_metrics(?NAME, Id),
?LET(
#{
@ -186,6 +202,9 @@ t_reset_metrics(_) ->
a := 0,
b := 0,
c := 0
},
slides := #{
d := #{n_samples := 0, last5m := 0, current := 0}
}
},
emqx_metrics_worker:get_metrics(?NAME, Id),
@ -202,7 +221,7 @@ t_reset_metrics(_) ->
ok = emqx_metrics_worker:clear_metrics(?NAME, Id).
t_get_metrics_2(_) ->
Metrics = [a, b, c],
Metrics = [a, b, c, {slide, d}],
Id = <<"testid">>,
ok = emqx_metrics_worker:create_metrics(
?NAME,

View File

@ -153,7 +153,7 @@ ssl_opts_gc_after_handshake_test_rancher_listener_test() ->
#{
kind := validation_error,
reason := unknown_fields,
unknown := <<"gc_after_handshake">>
unknown := "gc_after_handshake"
}
]},
validate(Sc, #{<<"gc_after_handshake">> => true})

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_authn, [
{description, "EMQX Authentication"},
{vsn, "0.1.11"},
{vsn, "0.1.12"},
{modules, []},
{registered, [emqx_authn_sup, emqx_authn_registry]},
{applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]},

View File

@ -22,6 +22,7 @@
-export([
common_fields/0,
roots/0,
tags/0,
fields/1,
authenticator_type/0,
authenticator_type_without_scram/0,
@ -32,6 +33,9 @@
roots() -> [].
tags() ->
[<<"Authentication">>].
common_fields() ->
[{enable, fun enable/1}].

View File

@ -25,6 +25,7 @@
-export([
namespace/0,
tags/0,
roots/0,
fields/1,
desc/1
@ -105,6 +106,9 @@ mnesia(boot) ->
namespace() -> "authn-scram-builtin_db".
tags() ->
[<<"Authentication">>].
roots() -> [?CONF_NS].
fields(?CONF_NS) ->

View File

@ -26,6 +26,7 @@
-export([
namespace/0,
tags/0,
roots/0,
fields/1,
desc/1,
@ -51,6 +52,9 @@
namespace() -> "authn-http".
tags() ->
[<<"Authentication">>].
roots() ->
[
{?CONF_NS,

View File

@ -25,6 +25,7 @@
-export([
namespace/0,
tags/0,
roots/0,
fields/1,
desc/1
@ -44,6 +45,9 @@
namespace() -> "authn-jwt".
tags() ->
[<<"Authentication">>].
roots() ->
[
{?CONF_NS,

View File

@ -26,6 +26,7 @@
-export([
namespace/0,
tags/0,
roots/0,
fields/1,
desc/1
@ -107,6 +108,9 @@ mnesia(boot) ->
namespace() -> "authn-builtin_db".
tags() ->
[<<"Authentication">>].
roots() -> [?CONF_NS].
fields(?CONF_NS) ->

View File

@ -25,6 +25,7 @@
-export([
namespace/0,
tags/0,
roots/0,
fields/1,
desc/1
@ -44,6 +45,9 @@
namespace() -> "authn-mongodb".
tags() ->
[<<"Authentication">>].
roots() ->
[
{?CONF_NS,

View File

@ -27,6 +27,7 @@
-export([
namespace/0,
tags/0,
roots/0,
fields/1,
desc/1
@ -46,6 +47,9 @@
namespace() -> "authn-mysql".
tags() ->
[<<"Authentication">>].
roots() -> [?CONF_NS].
fields(?CONF_NS) ->

View File

@ -26,6 +26,7 @@
-export([
namespace/0,
tags/0,
roots/0,
fields/1,
desc/1
@ -50,6 +51,9 @@
namespace() -> "authn-postgresql".
tags() ->
[<<"Authentication">>].
roots() -> [?CONF_NS].
fields(?CONF_NS) ->

View File

@ -25,6 +25,7 @@
-export([
namespace/0,
tags/0,
roots/0,
fields/1,
desc/1
@ -44,6 +45,9 @@
namespace() -> "authn-redis".
tags() ->
[<<"Authentication">>].
roots() ->
[
{?CONF_NS,

View File

@ -18,7 +18,8 @@
-compile(nowarn_export_all).
-compile(export_all).
-import(emqx_dashboard_api_test_helpers, [request/3, uri/1, multipart_formdata_request/3]).
-import(emqx_dashboard_api_test_helpers, [multipart_formdata_request/3]).
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
-include("emqx_authn.hrl").
-include_lib("eunit/include/eunit.hrl").
@ -65,9 +66,8 @@ end_per_testcase(_, Config) ->
init_per_suite(Config) ->
emqx_config:erase(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY),
_ = application:load(emqx_conf),
ok = emqx_common_test_helpers:start_apps(
[emqx_authn, emqx_dashboard],
fun set_special_configs/1
ok = emqx_mgmt_api_test_util:init_suite(
[emqx_authn]
),
?AUTHN:delete_chain(?GLOBAL),
@ -76,12 +76,7 @@ init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authn]),
ok.
set_special_configs(emqx_dashboard) ->
emqx_dashboard_api_test_helpers:set_default_config();
set_special_configs(_App) ->
emqx_mgmt_api_test_util:end_suite([emqx_authn]),
ok.
%%------------------------------------------------------------------------------

View File

@ -197,7 +197,7 @@ t_list_users(_) ->
#{is_superuser := false, user_id := _},
#{is_superuser := false, user_id := _}
],
meta := #{page := 1, limit := 2, count := 3}
meta := #{page := 1, limit := 2, count := 3, hasnext := true}
} = emqx_authn_mnesia:list_users(
#{<<"page">> => 1, <<"limit">> => 2},
State
@ -205,7 +205,7 @@ t_list_users(_) ->
#{
data := [#{is_superuser := false, user_id := _}],
meta := #{page := 2, limit := 2, count := 3}
meta := #{page := 2, limit := 2, count := 3, hasnext := false}
} = emqx_authn_mnesia:list_users(
#{<<"page">> => 2, <<"limit">> => 2},
State
@ -213,7 +213,7 @@ t_list_users(_) ->
#{
data := [#{is_superuser := false, user_id := <<"u3">>}],
meta := #{page := 1, limit := 20, count := 0}
meta := #{page := 1, limit := 20, hasnext := false}
} = emqx_authn_mnesia:list_users(
#{
<<"page">> => 1,

View File

@ -300,14 +300,14 @@ t_list_users(_) ->
#{
data := [?USER_MAP, ?USER_MAP],
meta := #{page := 1, limit := 2, count := 3}
meta := #{page := 1, limit := 2, count := 3, hasnext := true}
} = emqx_enhanced_authn_scram_mnesia:list_users(
#{<<"page">> => 1, <<"limit">> => 2},
State
),
#{
data := [?USER_MAP],
meta := #{page := 2, limit := 2, count := 3}
meta := #{page := 2, limit := 2, count := 3, hasnext := false}
} = emqx_enhanced_authn_scram_mnesia:list_users(
#{<<"page">> => 2, <<"limit">> => 2},
State
@ -319,7 +319,7 @@ t_list_users(_) ->
is_superuser := _
}
],
meta := #{page := 1, limit := 3, count := 0}
meta := #{page := 1, limit := 3, hasnext := false}
} = emqx_enhanced_authn_scram_mnesia:list_users(
#{
<<"page">> => 1,

View File

@ -15,7 +15,6 @@ authz:{
pool_size: 1
username: root
password: public
auto_reconnect: true
ssl: {
enable: true
cacertfile: "etc/certs/cacert.pem"
@ -33,7 +32,6 @@ authz:{
pool_size: 1
username: root
password: public
auto_reconnect: true
ssl: {enable: false}
}
sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = ${peerhost} or username = ${username} or username = '$all' or clientid = ${clientid}"
@ -45,7 +43,6 @@ authz:{
database: 0
pool_size: 1
password: public
auto_reconnect: true
ssl: {enable: false}
}
cmd: "HGETALL mqtt_authz:${username}"

View File

@ -1,6 +1,7 @@
authorization {
deny_action = ignore
no_match = allow
cache = { enable = true }
sources = [
{
type = file

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_authz, [
{description, "An OTP application"},
{vsn, "0.1.11"},
{vsn, "0.1.12"},
{registered, []},
{mod, {emqx_authz_app, []}},
{applications, [

View File

@ -20,6 +20,7 @@
-include("emqx_authz.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx/include/emqx_hooks.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-ifdef(TEST).
-compile(export_all).
@ -358,6 +359,7 @@ authorize_non_superuser(
emqx_metrics:inc(?METRIC_DENY),
{stop, #{result => deny, from => AuthzSource}};
nomatch ->
?tp(authz_non_superuser, #{result => nomatch}),
?SLOG(info, #{
msg => "authorization_failed_nomatch",
username => Username,
@ -388,6 +390,12 @@ do_authorize(
nomatch ->
emqx_metrics_worker:inc(authz_metrics, Type, nomatch),
do_authorize(Client, PubSub, Topic, Tail);
%% {matched, allow | deny | ignore}
{matched, ignore} ->
do_authorize(Client, PubSub, Topic, Tail);
ignore ->
do_authorize(Client, PubSub, Topic, Tail);
%% {matched, allow | deny}
Matched ->
{Matched, Type}
catch

View File

@ -64,7 +64,7 @@ schema("/authorization/settings") ->
}.
ref_authz_schema() ->
proplists:delete(sources, emqx_conf_schema:fields("authorization")).
emqx_schema:authz_fields().
settings(get, _Params) ->
{200, authorization_settings()};
@ -83,4 +83,6 @@ settings(put, #{
{200, authorization_settings()}.
authorization_settings() ->
maps:remove(<<"sources">>, emqx:get_raw_config([authorization], #{})).
C = maps:remove(<<"sources">>, emqx:get_raw_config([authorization], #{})),
Schema = emqx_hocon:make_schema(emqx_schema:authz_fields()),
hocon_tconf:make_serializable(Schema, C, #{}).

View File

@ -449,7 +449,7 @@ is_ok(ResL) ->
get_raw_sources() ->
RawSources = emqx:get_raw_config([authorization, sources], []),
Schema = #{roots => emqx_authz_schema:fields("authorization"), fields => #{}},
Schema = emqx_hocon:make_schema(emqx_authz_schema:authz_fields()),
Conf = #{<<"sources">> => RawSources},
#{<<"sources">> := Sources} = hocon_tconf:make_serializable(Schema, Conf, #{}),
merge_default_headers(Sources).

View File

@ -20,6 +20,7 @@
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx/include/emqx_placeholder.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-behaviour(emqx_authz).
@ -104,6 +105,7 @@ authorize(
log_nomtach_msg(Status, Headers, Body),
nomatch;
{error, Reason} ->
?tp(authz_http_request_failure, #{error => Reason}),
?SLOG(error, #{
msg => "http_server_query_failed",
resource => ResourceID,

View File

@ -33,9 +33,11 @@
-export([
namespace/0,
roots/0,
tags/0,
fields/1,
validations/0,
desc/1
desc/1,
authz_fields/0
]).
-export([
@ -47,14 +49,8 @@
%% Hocon Schema
%%--------------------------------------------------------------------
namespace() -> authz.
%% @doc authorization schema is not exported
%% but directly used by emqx_schema
roots() -> [].
fields("authorization") ->
Types = [
type_names() ->
[
file,
http_get,
http_post,
@ -67,18 +63,19 @@ fields("authorization") ->
redis_single,
redis_sentinel,
redis_cluster
],
Unions = [?R_REF(Type) || Type <- Types],
[
{sources,
?HOCON(
?ARRAY(?UNION(Unions)),
#{
default => [],
desc => ?DESC(sources)
}
)}
];
].
namespace() -> authz.
tags() ->
[<<"Authorization">>].
%% @doc authorization schema is not exported
%% but directly used by emqx_schema
roots() -> [].
fields("authorization") ->
authz_fields();
fields(file) ->
authz_common_fields(file) ++
[{path, ?HOCON(string(), #{required => true, desc => ?DESC(path)})}];
@ -408,9 +405,94 @@ common_rate_field() ->
].
method(Method) ->
?HOCON(Method, #{default => Method, required => true, desc => ?DESC(method)}).
?HOCON(Method, #{required => true, desc => ?DESC(method)}).
array(Ref) -> array(Ref, Ref).
array(Ref, DescId) ->
?HOCON(?ARRAY(?R_REF(Ref)), #{desc => ?DESC(DescId)}).
select_union_member(#{<<"type">> := <<"mongodb">>} = Value) ->
MongoType = maps:get(<<"mongo_type">>, Value, undefined),
case MongoType of
<<"single">> ->
?R_REF(mongo_single);
<<"rs">> ->
?R_REF(mongo_rs);
<<"sharded">> ->
?R_REF(mongo_sharded);
Else ->
throw(#{
reason => "unknown_mongo_type",
expected => "single | rs | sharded",
got => Else
})
end;
select_union_member(#{<<"type">> := <<"redis">>} = Value) ->
RedisType = maps:get(<<"redis_type">>, Value, undefined),
case RedisType of
<<"single">> ->
?R_REF(redis_single);
<<"cluster">> ->
?R_REF(redis_cluster);
<<"sentinel">> ->
?R_REF(redis_sentinel);
Else ->
throw(#{
reason => "unknown_redis_type",
expected => "single | cluster | sentinel",
got => Else
})
end;
select_union_member(#{<<"type">> := <<"http">>} = Value) ->
RedisType = maps:get(<<"method">>, Value, undefined),
case RedisType of
<<"get">> ->
?R_REF(http_get);
<<"post">> ->
?R_REF(http_post);
Else ->
throw(#{
reason => "unknown_http_method",
expected => "get | post",
got => Else
})
end;
select_union_member(#{<<"type">> := <<"built_in_database">>}) ->
?R_REF(mnesia);
select_union_member(#{<<"type">> := Type}) ->
select_union_member_loop(Type, type_names());
select_union_member(_) ->
throw("missing_type_field").
select_union_member_loop(TypeValue, []) ->
throw(#{
reason => "unknown_authz_type",
got => TypeValue
});
select_union_member_loop(TypeValue, [Type | Types]) ->
case TypeValue =:= atom_to_binary(Type) of
true ->
?R_REF(Type);
false ->
select_union_member_loop(TypeValue, Types)
end.
authz_fields() ->
Types = [?R_REF(Type) || Type <- type_names()],
UnionMemberSelector =
fun
(all_union_members) -> Types;
%% must return list
({value, Value}) -> [select_union_member(Value)]
end,
[
{sources,
?HOCON(
?ARRAY(?UNION(UnionMemberSelector)),
#{
default => [],
desc => ?DESC(sources)
}
)}
].

View File

@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
-import(emqx_dashboard_api_test_helpers, [request/2, uri/1]).
-import(emqx_mgmt_api_test_util, [request/2, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@ -32,8 +32,8 @@ groups() ->
[].
init_per_suite(Config) ->
ok = emqx_common_test_helpers:start_apps(
[emqx_conf, emqx_authz, emqx_dashboard, emqx_management],
ok = emqx_mgmt_api_test_util:init_suite(
[emqx_conf, emqx_authz],
fun set_special_configs/1
),
Config.
@ -47,7 +47,7 @@ end_per_suite(_Config) ->
<<"sources">> => []
}
),
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf, emqx_management]),
emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
ok.
set_special_configs(emqx_dashboard) ->
@ -67,12 +67,12 @@ t_clean_cahce(_) ->
ok = emqtt:publish(C, <<"a/b/c">>, <<"{\"x\":1,\"y\":1}">>, 0),
{ok, 200, Result3} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
?assertEqual(2, length(jsx:decode(Result3))),
?assertEqual(2, length(emqx_json:decode(Result3))),
request(delete, uri(["authorization", "cache"])),
{ok, 200, Result4} = request(get, uri(["clients", "emqx0", "authorization", "cache"])),
?assertEqual(0, length(jsx:decode(Result4))),
?assertEqual(0, length(emqx_json:decode(Result4))),
ok.

View File

@ -22,7 +22,7 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
all() ->
emqx_common_test_helpers:all(?MODULE).
@ -31,8 +31,8 @@ groups() ->
[].
init_per_suite(Config) ->
ok = emqx_common_test_helpers:start_apps(
[emqx_conf, emqx_authz, emqx_dashboard],
ok = emqx_mgmt_api_test_util:init_suite(
[emqx_conf, emqx_authz],
fun set_special_configs/1
),
Config.
@ -46,7 +46,7 @@ end_per_suite(_Config) ->
<<"sources">> => []
}
),
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]),
emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
ok.
set_special_configs(emqx_dashboard) ->
@ -92,7 +92,8 @@ t_api(_) ->
<<"meta">> := #{
<<"count">> := 1,
<<"limit">> := 100,
<<"page">> := 1
<<"page">> := 1,
<<"hasnext">> := false
}
} = jsx:decode(Request1),
?assertEqual(3, length(Rules1)),
@ -109,14 +110,17 @@ t_api(_) ->
]),
[]
),
?assertEqual(
#{
<<"data">> := [],
<<"meta">> := #{
<<"count">> := 0,
<<"limit">> := 20,
<<"page">> := 1
<<"data">> => [],
<<"meta">> => #{
<<"limit">> => 20,
<<"page">> => 1,
<<"hasnext">> => false
}
} = jsx:decode(Request1_1),
},
jsx:decode(Request1_1)
),
{ok, 200, Request2} =
request(
@ -160,6 +164,14 @@ t_api(_) ->
[]
),
% ensure that db contain a mix of records
{ok, 204, _} =
request(
post,
uri(["authorization", "sources", "built_in_database", "username"]),
[?USERNAME_RULES_EXAMPLE]
),
{ok, 204, _} =
request(
post,

View File

@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
-import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@ -30,7 +30,7 @@ groups() ->
[].
init_per_suite(Config) ->
ok = emqx_common_test_helpers:start_apps(
ok = emqx_mgmt_api_test_util:init_suite(
[emqx_conf, emqx_authz, emqx_dashboard],
fun set_special_configs/1
),
@ -46,7 +46,7 @@ end_per_suite(_Config) ->
}
),
ok = stop_apps([emqx_resource]),
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]),
emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
ok.
set_special_configs(emqx_dashboard) ->

View File

@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
-import(emqx_dashboard_api_test_helpers, [request/3, uri/1]).
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@ -115,8 +115,8 @@ init_per_suite(Config) ->
end
),
ok = emqx_common_test_helpers:start_apps(
[emqx_conf, emqx_authz, emqx_dashboard],
ok = emqx_mgmt_api_test_util:init_suite(
[emqx_conf, emqx_authz],
fun set_special_configs/1
),
ok = start_apps([emqx_resource]),
@ -134,7 +134,7 @@ end_per_suite(_Config) ->
%% resource and connector should be stop first,
%% or authz_[mysql|pgsql|redis..]_SUITE would be failed
ok = stop_apps([emqx_resource]),
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]),
emqx_mgmt_api_test_util:end_suite([emqx_authz, emqx_conf]),
meck:unload(emqx_resource),
ok.

View File

@ -23,6 +23,7 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("emqx/include/emqx_placeholder.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-define(HTTP_PORT, 33333).
-define(HTTP_PATH, "/authz/[...]").
@ -64,7 +65,14 @@ init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_Case, _Config) ->
ok = emqx_authz_http_test_server:stop().
try
ok = emqx_authz_http_test_server:stop()
catch
exit:noproc ->
ok
end,
snabbkaffe:stop(),
ok.
%%------------------------------------------------------------------------------
%% Tests
@ -148,7 +156,39 @@ t_response_handling(_Config) ->
?assertEqual(
deny,
emqx_access_control:authorize(ClientInfo, publish, <<"t">>)
).
),
%% the server cannot be reached; should skip to the next
%% authorizer in the chain.
ok = emqx_authz_http_test_server:stop(),
?check_trace(
?assertEqual(
deny,
emqx_access_control:authorize(ClientInfo, publish, <<"t">>)
),
fun(Trace) ->
?assertMatch(
[
#{
?snk_kind := authz_http_request_failure,
error := {recoverable_error, econnrefused}
}
],
?of_kind(authz_http_request_failure, Trace)
),
?assert(
?strict_causality(
#{?snk_kind := authz_http_request_failure},
#{?snk_kind := authz_non_superuser, result := nomatch},
Trace
)
),
ok
end
),
ok.
t_query_params(_Config) ->
ok = setup_handler_and_config(

View File

@ -0,0 +1,116 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authz_schema_tests).
-include_lib("eunit/include/eunit.hrl").
bad_authz_type_test() ->
Txt = "[{type: foobar}]",
?assertThrow(
[
#{
reason := "unknown_authz_type",
got := <<"foobar">>
}
],
check(Txt)
).
bad_mongodb_type_test() ->
Txt = "[{type: mongodb, mongo_type: foobar}]",
?assertThrow(
[
#{
reason := "unknown_mongo_type",
got := <<"foobar">>
}
],
check(Txt)
).
missing_mongodb_type_test() ->
Txt = "[{type: mongodb}]",
?assertThrow(
[
#{
reason := "unknown_mongo_type",
got := undefined
}
],
check(Txt)
).
unknown_redis_type_test() ->
Txt = "[{type: redis, redis_type: foobar}]",
?assertThrow(
[
#{
reason := "unknown_redis_type",
got := <<"foobar">>
}
],
check(Txt)
).
missing_redis_type_test() ->
Txt = "[{type: redis}]",
?assertThrow(
[
#{
reason := "unknown_redis_type",
got := undefined
}
],
check(Txt)
).
unknown_http_method_test() ->
Txt = "[{type: http, method: getx}]",
?assertThrow(
[
#{
reason := "unknown_http_method",
got := <<"getx">>
}
],
check(Txt)
).
missing_http_method_test() ->
Txt = "[{type: http, methodx: get}]",
?assertThrow(
[
#{
reason := "unknown_http_method",
got := undefined
}
],
check(Txt)
).
check(Txt0) ->
Txt = ["sources: ", Txt0],
{ok, RawConf} = hocon:binary(Txt),
try
hocon_tconf:check_plain(schema(), RawConf, #{})
catch
throw:{_Schema, Errors} ->
throw(Errors)
end.
schema() ->
#{roots => emqx_authz_schema:fields("authorization")}.

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_auto_subscribe, [
{description, "An OTP application"},
{vsn, "0.1.2"},
{description, "Auto subscribe Application"},
{vsn, "0.1.3"},
{registered, []},
{mod, {emqx_auto_subscribe_app, []}},
{applications, [

View File

@ -51,8 +51,21 @@ max_limit() ->
list() ->
format(emqx_conf:get([auto_subscribe, topics], [])).
update(Topics) ->
update_(Topics).
update(Topics) when length(Topics) =< ?MAX_AUTO_SUBSCRIBE ->
case
emqx_conf:update(
[auto_subscribe, topics],
Topics,
#{rawconf_with_defaults => true, override_to => cluster}
)
of
{ok, #{raw_config := NewTopics}} ->
{ok, NewTopics};
{error, Reason} ->
{error, Reason}
end;
update(_Topics) ->
{error, quota_exceeded}.
post_config_update(_KeyPath, _Req, NewTopics, _OldConf, _AppEnvs) ->
Config = emqx_conf:get([auto_subscribe], #{}),
@ -95,22 +108,6 @@ format(Rule = #{topic := Topic}) when is_map(Rule) ->
nl => maps:get(nl, Rule, 0)
}.
update_(Topics) when length(Topics) =< ?MAX_AUTO_SUBSCRIBE ->
case
emqx_conf:update(
[auto_subscribe, topics],
Topics,
#{rawconf_with_defaults => true, override_to => cluster}
)
of
{ok, #{raw_config := NewTopics}} ->
{ok, NewTopics};
{error, Reason} ->
{error, Reason}
end;
update_(_Topics) ->
{error, quota_exceeded}.
update_hook() ->
update_hook(emqx_conf:get([auto_subscribe], #{})).

View File

@ -34,7 +34,7 @@
-include_lib("emqx/include/emqx_placeholder.hrl").
api_spec() ->
emqx_dashboard_swagger:spec(?MODULE).
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
paths() ->
["/mqtt/auto_subscribe"].
@ -46,15 +46,15 @@ schema("/mqtt/auto_subscribe") ->
description => ?DESC(list_auto_subscribe_api),
tags => [<<"Auto Subscribe">>],
responses => #{
200 => hoconsc:ref(emqx_auto_subscribe_schema, "auto_subscribe")
200 => topics()
}
},
put => #{
description => ?DESC(update_auto_subscribe_api),
tags => [<<"Auto Subscribe">>],
'requestBody' => hoconsc:ref(emqx_auto_subscribe_schema, "auto_subscribe"),
'requestBody' => topics(),
responses => #{
200 => hoconsc:ref(emqx_auto_subscribe_schema, "auto_subscribe"),
200 => topics(),
409 => emqx_dashboard_swagger:error_codes(
[?EXCEED_LIMIT],
?DESC(update_auto_subscribe_api_response409)
@ -63,14 +63,17 @@ schema("/mqtt/auto_subscribe") ->
}
}.
topics() ->
Fields = emqx_auto_subscribe_schema:fields("auto_subscribe"),
{topics, Topics} = lists:keyfind(topics, 1, Fields),
Topics.
%%%==============================================================================================
%% api apply
auto_subscribe(get, _) ->
{200, emqx_auto_subscribe:list()};
auto_subscribe(put, #{body := #{}}) ->
{400, #{code => ?BAD_REQUEST, message => <<"Request body required">>}};
auto_subscribe(put, #{body := Params}) ->
case emqx_auto_subscribe:update(Params) of
auto_subscribe(put, #{body := Topics}) when is_list(Topics) ->
case emqx_auto_subscribe:update(Topics) of
{error, quota_exceeded} ->
Message = list_to_binary(
io_lib:format(

View File

@ -93,9 +93,8 @@ init_per_suite(Config) ->
" }"
>>
),
emqx_common_test_helpers:start_apps(
[emqx_conf, emqx_dashboard, ?APP],
fun set_special_configs/1
emqx_mgmt_api_test_util:init_suite(
[emqx_conf, ?APP]
),
Config.
@ -111,12 +110,6 @@ end_per_testcase(t_get_basic_usage_info, _Config) ->
end_per_testcase(_TestCase, _Config) ->
ok.
set_special_configs(emqx_dashboard) ->
emqx_dashboard_api_test_helpers:set_default_config(),
ok;
set_special_configs(_) ->
ok.
topic_config(T) ->
#{
topic => T,
@ -132,7 +125,7 @@ end_per_suite(_) ->
application:unload(?APP),
meck:unload(emqx_resource),
meck:unload(emqx_schema),
emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_conf, ?APP]).
emqx_mgmt_api_test_util:end_suite([emqx_conf, ?APP]).
t_auto_subscribe(_) ->
emqx_auto_subscribe:update([#{<<"topic">> => Topic} || Topic <- ?TOPICS]),
@ -151,6 +144,32 @@ t_update(_) ->
ResponseMap = emqx_json:decode(Response, [return_maps]),
?assertEqual(1, erlang:length(ResponseMap)),
BadBody1 = #{topic => ?TOPIC_S},
?assertMatch(
{error, {"HTTP/1.1", 400, "Bad Request"}},
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody1)
),
BadBody2 = [#{topic => ?TOPIC_S, qos => 3}],
?assertMatch(
{error, {"HTTP/1.1", 400, "Bad Request"}},
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody2)
),
BadBody3 = [#{topic => ?TOPIC_S, rh => 10}],
?assertMatch(
{error, {"HTTP/1.1", 400, "Bad Request"}},
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody3)
),
BadBody4 = [#{topic => ?TOPIC_S, rap => -1}],
?assertMatch(
{error, {"HTTP/1.1", 400, "Bad Request"}},
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody4)
),
BadBody5 = [#{topic => ?TOPIC_S, nl => -1}],
?assertMatch(
{error, {"HTTP/1.1", 400, "Bad Request"}},
emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, BadBody5)
),
{ok, Client} = emqtt:start_link(#{username => ?CLIENT_USERNAME, clientid => ?CLIENT_ID}),
{ok, _} = emqtt:connect(Client),
timer:sleep(100),

View File

@ -93,11 +93,20 @@ HTTP 请求的标头。<br/>
desc {
en: """
The body of the HTTP request.<br/>
If not provided, the body will be a JSON object of all the available fields.<br/>
There, 'all the available fields' means the context of a MQTT message when
this webhook is triggered by receiving a MQTT message (the `local_topic` is set),
or the context of the event when this webhook is triggered by a rule (i.e. this
webhook is used as an action of a rule).<br/>
Template with variables is allowed.
"""
zh: """
HTTP 请求的正文。<br/>
允许使用带有变量的模板。"""
如果没有设置该字段,请求正文将是包含所有可用字段的 JSON object。<br/>
如果该 webhook 是由于收到 MQTT 消息触发的,'所有可用字段' 将是 MQTT 消息的
上下文信息;如果该 webhook 是由于规则触发的,'所有可用字段' 则为触发事件的上下文信息。<br/>
允许使用带有变量的模板。
"""
}
label: {
en: "HTTP Body"

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_bridge, [
{description, "EMQX bridges"},
{vsn, "0.1.8"},
{vsn, "0.1.9"},
{registered, []},
{mod, {emqx_bridge_app, []}},
{applications, [

View File

@ -330,6 +330,7 @@ schema("/bridges/:id") ->
responses => #{
204 => <<"Bridge deleted">>,
400 => error_schema(['INVALID_ID'], "Update bridge failed"),
404 => error_schema('NOT_FOUND', "Bridge not found"),
403 => error_schema('FORBIDDEN_REQUEST', "Forbidden operation"),
503 => error_schema('SERVICE_UNAVAILABLE', "Service unavailable")
}
@ -452,6 +453,8 @@ schema("/bridges_probe") ->
end,
?TRY_PARSE_ID(
Id,
case emqx_bridge:lookup(BridgeType, BridgeName) of
{ok, _} ->
case emqx_bridge:check_deps_and_remove(BridgeType, BridgeName, AlsoDeleteActs) of
{ok, _} ->
204;
@ -465,6 +468,9 @@ schema("/bridges_probe") ->
{503, error_msg('SERVICE_UNAVAILABLE', <<"request timeout">>)};
{error, Reason} ->
{500, error_msg('INTERNAL_ERROR', Reason)}
end;
{error, not_found} ->
{404, error_msg('NOT_FOUND', <<"Bridge not found">>)}
end
).

View File

@ -132,13 +132,14 @@ create(BridgeId, Conf) ->
create(Type, Name, Conf) ->
create(Type, Name, Conf, #{}).
create(Type, Name, Conf, Opts) ->
create(Type, Name, Conf, Opts0) ->
?SLOG(info, #{
msg => "create bridge",
type => Type,
name => Name,
config => Conf
}),
Opts = override_start_after_created(Conf, Opts0),
{ok, _Data} = emqx_resource:create_local(
resource_id(Type, Name),
<<"emqx_bridge">>,
@ -146,7 +147,7 @@ create(Type, Name, Conf, Opts) ->
parse_confs(bin(Type), Name, Conf),
Opts
),
maybe_disable_bridge(Type, Name, Conf).
ok.
update(BridgeId, {OldConf, Conf}) ->
{BridgeType, BridgeName} = parse_bridge_id(BridgeId),
@ -155,7 +156,7 @@ update(BridgeId, {OldConf, Conf}) ->
update(Type, Name, {OldConf, Conf}) ->
update(Type, Name, {OldConf, Conf}, #{}).
update(Type, Name, {OldConf, Conf}, Opts) ->
update(Type, Name, {OldConf, Conf}, Opts0) ->
%% TODO: sometimes its not necessary to restart the bridge connection.
%%
%% - if the connection related configs like `servers` is updated, we should restart/start
@ -164,6 +165,7 @@ update(Type, Name, {OldConf, Conf}, Opts) ->
%% the `method` or `headers` of a WebHook is changed, then the bridge can be updated
%% without restarting the bridge.
%%
Opts = override_start_after_created(Conf, Opts0),
case emqx_map_lib:if_only_to_toggle_enable(OldConf, Conf) of
false ->
?SLOG(info, #{
@ -174,10 +176,10 @@ update(Type, Name, {OldConf, Conf}, Opts) ->
}),
case recreate(Type, Name, Conf, Opts) of
{ok, _} ->
maybe_disable_bridge(Type, Name, Conf);
ok;
{error, not_found} ->
?SLOG(warning, #{
msg => "updating_a_non-exist_bridge_need_create_a_new_one",
msg => "updating_a_non_existing_bridge",
type => Type,
name => Name,
config => Conf
@ -244,12 +246,6 @@ remove(Type, Name, _Conf, _Opts) ->
{error, Reason} -> {error, Reason}
end.
maybe_disable_bridge(Type, Name, Conf) ->
case maps:get(enable, Conf, true) of
false -> stop(Type, Name);
true -> ok
end.
maybe_clear_certs(TmpPath, #{ssl := SslConf} = Conf) ->
%% don't remove the cert files if they are in use
case is_tmp_path_conf(TmpPath, SslConf) of
@ -276,7 +272,6 @@ parse_confs(
#{
url := Url,
method := Method,
body := Body,
headers := Headers,
request_timeout := ReqTimeout,
max_retries := Retry
@ -290,7 +285,7 @@ parse_confs(
#{
path => Path,
method => Method,
body => Body,
body => maps:get(body, Conf, undefined),
headers => Headers,
request_timeout => ReqTimeout,
max_retries => Retry
@ -324,3 +319,8 @@ str(Str) when is_list(Str) -> Str.
bin(Bin) when is_binary(Bin) -> Bin;
bin(Str) when is_list(Str) -> list_to_binary(Str);
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
override_start_after_created(Config, Opts) ->
Enabled = maps:get(enable, Config, true),
StartAfterCreated = Enabled andalso maps:get(start_after_created, Opts, Enabled),
Opts#{start_after_created => StartAfterCreated}.

View File

@ -20,7 +20,7 @@
-import(hoconsc, [mk/2, ref/2]).
-export([roots/0, fields/1, desc/1, namespace/0]).
-export([roots/0, fields/1, desc/1, namespace/0, tags/0]).
-export([
get_response/0,
@ -104,6 +104,9 @@ metrics_status_fields() ->
namespace() -> "bridge".
tags() ->
[<<"Bridge">>].
roots() -> [bridges].
fields(bridges) ->
@ -122,7 +125,9 @@ fields(bridges) ->
#{
desc => ?DESC("bridges_mqtt"),
required => false,
converter => fun emqx_bridge_mqtt_config:upgrade_pre_ee/1
converter => fun(X, _HoconOpts) ->
emqx_bridge_mqtt_config:upgrade_pre_ee(X)
end
}
)}
] ++ ee_fields_bridges();

View File

@ -115,7 +115,7 @@ request_config() ->
mk(
binary(),
#{
default => <<"${payload}">>,
default => undefined,
desc => ?DESC("config_body")
}
)},

View File

@ -18,7 +18,7 @@
-compile(nowarn_export_all).
-compile(export_all).
-import(emqx_dashboard_api_test_helpers, [request/4, uri/1]).
-import(emqx_mgmt_api_test_util, [request/3, uri/1]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@ -68,9 +68,8 @@ init_per_suite(Config) ->
%% some testcases (may from other app) already get emqx_connector started
_ = application:stop(emqx_resource),
_ = application:stop(emqx_connector),
ok = emqx_common_test_helpers:start_apps(
[emqx_rule_engine, emqx_bridge, emqx_dashboard],
fun set_special_configs/1
ok = emqx_mgmt_api_test_util:init_suite(
[emqx_rule_engine, emqx_bridge]
),
ok = emqx_common_test_helpers:load_config(
emqx_rule_engine_schema,
@ -80,12 +79,7 @@ init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
emqx_common_test_helpers:stop_apps([emqx_rule_engine, emqx_bridge, emqx_dashboard]),
ok.
set_special_configs(emqx_dashboard) ->
emqx_dashboard_api_test_helpers:set_default_config(<<"bridge_admin">>);
set_special_configs(_) ->
emqx_mgmt_api_test_util:end_suite([emqx_rule_engine, emqx_bridge]),
ok.
init_per_testcase(_, Config) ->
@ -311,6 +305,15 @@ t_http_crud_apis(Config) ->
},
jsx:decode(ErrMsg2)
),
%% Deleting a non-existing bridge should result in an error
{ok, 404, ErrMsg3} = request(delete, uri(["bridges", BridgeID]), []),
?assertMatch(
#{
<<"code">> := _,
<<"message">> := <<"Bridge not found">>
},
jsx:decode(ErrMsg3)
),
ok.
t_http_bridges_local_topic(Config) ->

View File

@ -1,6 +1,6 @@
{application, emqx_conf, [
{description, "EMQX configuration management"},
{vsn, "0.1.9"},
{vsn, "0.1.10"},
{registered, []},
{mod, {emqx_conf_app, []}},
{applications, [kernel, stdlib]},

View File

@ -316,7 +316,7 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) ->
{[Schema | Acc], SubRefs ++ RefsAcc}
end,
{[], []},
Types
hoconsc:union_members(Types)
),
{#{<<"oneOf">> => OneOf}, Refs};
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->

View File

@ -38,7 +38,9 @@
cipher/0
]).
-export([namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1]).
-export([
namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1, tags/0
]).
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
%% Static apps which merge their configs into the merged emqx.conf
@ -60,12 +62,16 @@
emqx_exhook_schema,
emqx_psk_schema,
emqx_limiter_schema,
emqx_slow_subs_schema
emqx_slow_subs_schema,
emqx_mgmt_api_key_schema
]).
%% root config should not have a namespace
namespace() -> undefined.
tags() ->
[<<"EMQX">>].
roots() ->
PtKey = ?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY,
case persistent_term:get(PtKey, undefined) of
@ -941,8 +947,8 @@ fields("log_burst_limit") ->
)}
];
fields("authorization") ->
emqx_schema:fields("authorization") ++
emqx_authz_schema:fields("authorization").
emqx_schema:authz_fields() ++
emqx_authz_schema:authz_fields().
desc("cluster") ->
?DESC("desc_cluster");

View File

@ -14,7 +14,7 @@ An MySQL connector can be used as following:
```
(emqx@127.0.0.1)5> emqx_resource:list_instances_verbose().
[#{config =>
#{auto_reconnect => true,cacertfile => [],certfile => [],
#{cacertfile => [],certfile => [],
database => "mqtt",keyfile => [],password => "public",
pool_size => 1,
server => {{127,0,0,1},3306},

View File

@ -0,0 +1,37 @@
emqx_connector_ldap {
bind_dn {
desc {
en: """LDAP's Binding Distinguished Name (DN)"""
zh: """LDAP 绑定的 DN 的值"""
}
label: {
en: "Bind DN"
zh: "Bind DN"
}
}
port {
desc {
en: """LDAP Port"""
zh: """LDAP 端口"""
}
label: {
en: "Port"
zh: "端口"
}
}
timeout {
desc {
en: """LDAP's query timeout"""
zh: """LDAP 查询超时时间"""
}
label: {
en: "timeout"
zh: "超时时间"
}
}
}

View File

@ -2,34 +2,34 @@ emqx_connector_mongo {
single_mongo_type {
desc {
en: "Standalone instance."
zh: "Standalone模式。"
en: "Standalone instance. Must be set to 'single' when MongoDB server is running in standalone mode."
zh: "Standalone 模式。当 MongoDB 服务运行在 standalone 模式下,该配置必须设置为 'single'。 "
}
label: {
en: "Standalone instance"
zh: "Standalone模式"
zh: "Standalone 模式"
}
}
rs_mongo_type {
desc {
en: "Replica set."
zh: "Replica set模式。"
en: "Replica set. Must be set to 'rs' when MongoDB server is running in 'replica set' mode."
zh: "Replica set模式。当 MongoDB 服务运行在 replica-set 模式下,该配置必须设置为 'rs'。"
}
label: {
en: "Replica set"
zh: "Replica set模式"
zh: "Replica set 模式"
}
}
sharded_mongo_type {
desc {
en: "Sharded cluster."
zh: "Sharded cluster模式。"
en: "Sharded cluster. Must be set to 'sharded' when MongoDB server is running in 'sharded' mode."
zh: "Sharded cluster模式。当 MongoDB 服务运行在 sharded 模式下,该配置必须设置为 'sharded'。"
}
label: {
en: "Sharded cluster"
zh: "Sharded cluster模式"
zh: "Sharded cluster 模式"
}
}

View File

@ -2,8 +2,8 @@ emqx_connector_redis {
single {
desc {
en: "Single mode"
zh: "单机模式。"
en: "Single mode. Must be set to 'single' when Redis server is running in single mode."
zh: "单机模式。当 Redis 服务运行在单机模式下,该配置必须设置为 'single'。"
}
label: {
en: "Single Mode"
@ -13,8 +13,8 @@ emqx_connector_redis {
cluster {
desc {
en: "Cluster mode"
zh: "集群模式。"
en: "Cluster mode. Must be set to 'cluster' when Redis server is running in clustered mode."
zh: "集群模式。当 Redis 服务运行在集群模式下,该配置必须设置为 'cluster'。"
}
label: {
en: "Cluster Mode"
@ -24,8 +24,8 @@ emqx_connector_redis {
sentinel {
desc {
en: "Sentinel mode"
zh: "哨兵模式。"
en: "Sentinel mode. Must be set to 'sentinel' when Redis server is running in sentinel mode."
zh: "哨兵模式。当 Redis 服务运行在哨兵模式下,该配置必须设置为 'sentinel'。"
}
label: {
en: "Sentinel Mode"

View File

@ -68,12 +68,12 @@ emqx_connector_schema_lib {
auto_reconnect {
desc {
en: "Enable automatic reconnect to the database."
zh: "自动重连数据库。"
en: "Deprecated. Enable automatic reconnect to the database."
zh: "已弃用。自动重连数据库。"
}
label: {
en: "Auto Reconnect Database"
zh: "自动重连数据库"
en: "Deprecated. Auto Reconnect Database"
zh: "已弃用。自动重连数据库"
}
}

View File

@ -24,6 +24,8 @@
-define(REDIS_DEFAULT_PORT, 6379).
-define(PGSQL_DEFAULT_PORT, 5432).
-define(AUTO_RECONNECT_INTERVAL, 2).
-define(SERVERS_DESC,
"A Node list for Cluster to connect to. The nodes should be separated with commas, such as: `Node[,Node].`<br/>"
"For each Node should be: "

View File

@ -12,9 +12,9 @@
{mysql, {git, "https://github.com/emqx/mysql-otp", {tag, "1.7.1"}}},
{epgsql, {git, "https://github.com/emqx/epgsql", {tag, "4.7-emqx.2"}}},
%% NOTE: mind poolboy version when updating mongodb-erlang version
{mongodb, {git, "https://github.com/emqx/mongodb-erlang", {tag, "v3.0.13"}}},
{mongodb, {git, "https://github.com/emqx/mongodb-erlang", {tag, "v3.0.19"}}},
%% NOTE: mind poolboy version when updating eredis_cluster version
{eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.7.1"}}},
{eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.7.5"}}},
%% mongodb-erlang uses a special fork https://github.com/comtihon/poolboy.git
%% (which has overflow_ttl feature added).
%% However, it references `{branch, "master}` (commit 9c06a9a on 2021-04-07).

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_connector, [
{description, "EMQX Data Integration Connectors"},
{vsn, "0.1.11"},
{vsn, "0.1.12"},
{registered, []},
{mod, {emqx_connector_app, []}},
{applications, [

View File

@ -209,7 +209,7 @@ on_start(
?SLOG(info, #{
msg => "starting_http_connector",
connector => InstId,
config => Config
config => emqx_misc:redact(Config)
}),
{Transport, TransportOpts} =
case Scheme of
@ -431,14 +431,13 @@ preprocess_request(
#{
method := Method,
path := Path,
body := Body,
headers := Headers
} = Req
) ->
#{
method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method)),
path => emqx_plugin_libs_rule:preproc_tmpl(Path),
body => emqx_plugin_libs_rule:preproc_tmpl(Body),
body => maybe_preproc_tmpl(body, Req),
headers => preproc_headers(Headers),
request_timeout => maps:get(request_timeout, Req, 30000),
max_retries => maps:get(max_retries, Req, 2)
@ -469,6 +468,12 @@ preproc_headers(Headers) when is_list(Headers) ->
Headers
).
maybe_preproc_tmpl(Key, Conf) ->
case maps:get(Key, Conf, undefined) of
undefined -> undefined;
Val -> emqx_plugin_libs_rule:preproc_tmpl(Val)
end.
process_request(
#{
method := MethodTks,
@ -487,7 +492,7 @@ process_request(
request_timeout => ReqTimeout
}.
process_request_body([], Msg) ->
process_request_body(undefined, Msg) ->
emqx_json:encode(Msg);
process_request_body(BodyTks, Msg) ->
emqx_plugin_libs_rule:proc_tmpl(BodyTks, Msg).

View File

@ -59,14 +59,13 @@ on_start(
bind_password := BindPassword,
timeout := Timeout,
pool_size := PoolSize,
auto_reconnect := AutoReconn,
ssl := SSL
} = Config
) ->
?SLOG(info, #{
msg => "starting_ldap_connector",
connector => InstId,
config => Config
config => emqx_misc:redact(Config)
}),
Servers = emqx_schema:parse_servers(Servers0, ?LDAP_HOST_OPTIONS),
SslOpts =
@ -86,11 +85,11 @@ on_start(
{bind_password, BindPassword},
{timeout, Timeout},
{pool_size, PoolSize},
{auto_reconnect, reconn_interval(AutoReconn)}
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL}
],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts ++ SslOpts) of
ok -> {ok, #{poolname => PoolName, auto_reconnect => AutoReconn}};
ok -> {ok, #{poolname => PoolName}};
{error, Reason} -> {error, Reason}
end.
@ -129,9 +128,6 @@ on_query(InstId, {search, Base, Filter, Attributes}, #{poolname := PoolName} = S
on_get_status(_InstId, _State) -> connected.
reconn_interval(true) -> 15;
reconn_interval(false) -> false.
search(Conn, Base, Filter, Attributes) ->
eldap2:search(Conn, [
{base, Base},

View File

@ -68,7 +68,6 @@ fields(single) ->
{mongo_type, #{
type => single,
default => single,
required => true,
desc => ?DESC("single_mongo_type")
}},
{server, server()},
@ -79,7 +78,6 @@ fields(rs) ->
{mongo_type, #{
type => rs,
default => rs,
required => true,
desc => ?DESC("rs_mongo_type")
}},
{servers, servers()},
@ -92,7 +90,6 @@ fields(sharded) ->
{mongo_type, #{
type => sharded,
default => sharded,
required => true,
desc => ?DESC("sharded_mongo_type")
}},
{servers, servers()},
@ -158,7 +155,7 @@ on_start(
rs -> "starting_mongodb_replica_set_connector";
sharded -> "starting_mongodb_sharded_connector"
end,
?SLOG(info, #{msg => Msg, connector => InstId, config => Config}),
?SLOG(info, #{msg => Msg, connector => InstId, config => emqx_misc:redact(Config)}),
NConfig = #{hosts := Hosts} = maybe_resolve_srv_and_txt_records(Config),
SslOpts =
case maps:get(enable, SSL) of

View File

@ -15,6 +15,8 @@
%%--------------------------------------------------------------------
-module(emqx_connector_mqtt).
-include("emqx_connector.hrl").
-include_lib("typerefl/include/types.hrl").
-include_lib("hocon/include/hoconsc.hrl").
-include_lib("emqx/include/logger.hrl").
@ -147,7 +149,7 @@ on_start(InstId, Conf) ->
?SLOG(info, #{
msg => "starting_mqtt_connector",
connector => InstanceId,
config => Conf
config => emqx_misc:redact(Conf)
}),
BasicConf = basic_config(Conf),
BridgeConf = BasicConf#{
@ -198,12 +200,10 @@ on_query_async(
?TRACE("QUERY", "async_send_msg_to_remote_node", #{message => Msg, connector => InstanceId}),
emqx_connector_mqtt_worker:send_to_remote_async(InstanceId, Msg, {ReplyFun, Args}).
on_get_status(_InstId, #{name := InstanceId, bridge_conf := Conf}) ->
AutoReconn = maps:get(auto_reconnect, Conf, true),
on_get_status(_InstId, #{name := InstanceId}) ->
case emqx_connector_mqtt_worker:status(InstanceId) of
connected -> connected;
_ when AutoReconn == true -> connecting;
_ when AutoReconn == false -> disconnected
_ -> connecting
end.
ensure_mqtt_worker_started(InstanceId, BridgeConf) ->
@ -236,7 +236,6 @@ make_forward_confs(FrowardConf) ->
basic_config(
#{
server := Server,
reconnect_interval := ReconnIntv,
proto_ver := ProtoVer,
bridge_mode := BridgeMode,
clean_start := CleanStart,
@ -252,7 +251,7 @@ basic_config(
%% 30s
connect_timeout => 30,
auto_reconnect => true,
reconnect_interval => ReconnIntv,
reconnect_interval => ?AUTO_RECONNECT_INTERVAL,
proto_ver => ProtoVer,
%% Opening bridge_mode will form a non-standard mqtt connection message.
%% A load balancing server (such as haproxy) is often set up before the emqx broker server.

View File

@ -52,7 +52,6 @@
-type state() ::
#{
poolname := atom(),
auto_reconnect := boolean(),
prepare_statement := prepares(),
params_tokens := params_tokens(),
batch_inserts := sqls(),
@ -95,7 +94,6 @@ on_start(
server := Server,
database := DB,
username := Username,
auto_reconnect := AutoReconn,
pool_size := PoolSize,
ssl := SSL
} = Config
@ -104,7 +102,7 @@ on_start(
?SLOG(info, #{
msg => "starting_mysql_connector",
connector => InstId,
config => Config
config => emqx_misc:redact(Config)
}),
SslOpts =
case maps:get(enable, SSL) of
@ -120,13 +118,13 @@ on_start(
{port, Port},
{user, Username},
{database, DB},
{auto_reconnect, reconn_interval(AutoReconn)},
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL},
{pool_size, PoolSize}
]
],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
Prepares = parse_prepare_sql(Config),
State = maps:merge(#{poolname => PoolName, auto_reconnect => AutoReconn}, Prepares),
State = maps:merge(#{poolname => PoolName}, Prepares),
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of
ok ->
{ok, init_prepare(State)};
@ -211,7 +209,7 @@ mysql_function(prepared_query) ->
mysql_function(_) ->
mysql_function(prepared_query).
on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State) ->
on_get_status(_InstId, #{poolname := Pool} = State) ->
case emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1) of
true ->
case do_check_prepares(State) of
@ -222,10 +220,10 @@ on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State
{connected, NState};
{error, _Reason} ->
%% do not log error, it is logged in prepare_sql_to_conn
conn_status(AutoReconn)
connecting
end;
false ->
conn_status(AutoReconn)
connecting
end.
do_get_status(Conn) ->
@ -244,11 +242,6 @@ do_check_prepares(State = #{poolname := PoolName, prepare_statement := {error, P
end.
%% ===================================================================
conn_status(_AutoReconn = true) -> connecting;
conn_status(_AutoReconn = false) -> disconnected.
reconn_interval(true) -> 15;
reconn_interval(false) -> false.
connect(Options) ->
mysql:start_link(Options).

View File

@ -56,7 +56,6 @@
-type state() ::
#{
poolname := atom(),
auto_reconnect := boolean(),
prepare_sql := prepares(),
params_tokens := params_tokens(),
prepare_statement := epgsql:statement()
@ -87,8 +86,6 @@ on_start(
server := Server,
database := DB,
username := User,
password := Password,
auto_reconnect := AutoReconn,
pool_size := PoolSize,
ssl := SSL
} = Config
@ -97,7 +94,7 @@ on_start(
?SLOG(info, #{
msg => "starting_postgresql_connector",
connector => InstId,
config => Config
config => emqx_misc:redact(Config)
}),
SslOpts =
case maps:get(enable, SSL) of
@ -113,14 +110,14 @@ on_start(
{host, Host},
{port, Port},
{username, User},
{password, emqx_secret:wrap(Password)},
{password, emqx_secret:wrap(maps:get(password, Config, ""))},
{database, DB},
{auto_reconnect, reconn_interval(AutoReconn)},
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL},
{pool_size, PoolSize}
],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
Prepares = parse_prepare_sql(Config),
InitState = #{poolname => PoolName, auto_reconnect => AutoReconn, prepare_statement => #{}},
InitState = #{poolname => PoolName, prepare_statement => #{}},
State = maps:merge(InitState, Prepares),
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of
ok ->
@ -247,7 +244,7 @@ on_sql_query(InstId, PoolName, Type, NameOrSQL, Data) ->
end,
Result.
on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State) ->
on_get_status(_InstId, #{poolname := Pool} = State) ->
case emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1) of
true ->
case do_check_prepares(State) of
@ -258,10 +255,10 @@ on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn} = State
{connected, NState};
false ->
%% do not log error, it is logged in prepare_sql_to_conn
conn_status(AutoReconn)
connecting
end;
false ->
conn_status(AutoReconn)
connecting
end.
do_get_status(Conn) ->
@ -280,11 +277,6 @@ do_check_prepares(State = #{poolname := PoolName, prepare_sql := {error, Prepare
end.
%% ===================================================================
conn_status(_AutoReconn = true) -> connecting;
conn_status(_AutoReconn = false) -> disconnected.
reconn_interval(true) -> 15;
reconn_interval(false) -> false.
connect(Opts) ->
Host = proplists:get_value(host, Opts),

View File

@ -64,7 +64,7 @@ fields(single) ->
{redis_type, #{
type => single,
default => single,
required => true,
required => false,
desc => ?DESC("single")
}}
] ++
@ -76,7 +76,7 @@ fields(cluster) ->
{redis_type, #{
type => cluster,
default => cluster,
required => true,
required => false,
desc => ?DESC("cluster")
}}
] ++
@ -88,7 +88,7 @@ fields(sentinel) ->
{redis_type, #{
type => sentinel,
default => sentinel,
required => true,
required => false,
desc => ?DESC("sentinel")
}},
{sentinel, #{
@ -117,14 +117,13 @@ on_start(
#{
redis_type := Type,
pool_size := PoolSize,
auto_reconnect := AutoReconn,
ssl := SSL
} = Config
) ->
?SLOG(info, #{
msg => "starting_redis_connector",
connector => InstId,
config => Config
config => emqx_misc:redact(Config)
}),
ConfKey =
case Type of
@ -142,7 +141,7 @@ on_start(
[
{pool_size, PoolSize},
{password, maps:get(password, Config, "")},
{auto_reconnect, reconn_interval(AutoReconn)}
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL}
] ++ Database ++ Servers,
Options =
case maps:get(enable, SSL) of
@ -155,7 +154,7 @@ on_start(
[{ssl, false}]
end ++ [{sentinel, maps:get(sentinel, Config, undefined)}],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
State = #{poolname => PoolName, type => Type, auto_reconnect => AutoReconn},
State = #{poolname => PoolName, type => Type},
case Type of
cluster ->
case eredis_cluster:start_pool(PoolName, Opts ++ [{options, Options}]) of
@ -229,18 +228,18 @@ eredis_cluster_workers_exist_and_are_connected(Workers) ->
Workers
).
on_get_status(_InstId, #{type := cluster, poolname := PoolName, auto_reconnect := AutoReconn}) ->
on_get_status(_InstId, #{type := cluster, poolname := PoolName}) ->
case eredis_cluster:pool_exists(PoolName) of
true ->
Workers = extract_eredis_cluster_workers(PoolName),
Health = eredis_cluster_workers_exist_and_are_connected(Workers),
status_result(Health, AutoReconn);
status_result(Health);
false ->
disconnected
end;
on_get_status(_InstId, #{poolname := Pool, auto_reconnect := AutoReconn}) ->
on_get_status(_InstId, #{poolname := Pool}) ->
Health = emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1),
status_result(Health, AutoReconn).
status_result(Health).
do_get_status(Conn) ->
case eredis:q(Conn, ["PING"]) of
@ -248,12 +247,8 @@ do_get_status(Conn) ->
_ -> false
end.
status_result(_Status = true, _AutoReconn) -> connected;
status_result(_Status = false, _AutoReconn = true) -> connecting;
status_result(_Status = false, _AutoReconn = false) -> disconnected.
reconn_interval(true) -> 15;
reconn_interval(false) -> false.
status_result(_Status = true) -> connected;
status_result(_Status = false) -> connecting.
do_cmd(PoolName, cluster, {cmd, Command}) ->
eredis_cluster:q(PoolName, Command);

View File

@ -106,4 +106,5 @@ password(_) -> undefined.
auto_reconnect(type) -> boolean();
auto_reconnect(desc) -> ?DESC("auto_reconnect");
auto_reconnect(default) -> true;
auto_reconnect(deprecated) -> {since, "v5.0.15"};
auto_reconnect(_) -> undefined.

View File

@ -75,9 +75,9 @@ wait_for_redis(Checks) ->
wait_for_redis(Checks - 1)
end.
% %%------------------------------------------------------------------------------
% %% Testcases
% %%------------------------------------------------------------------------------
%%------------------------------------------------------------------------------
%% Testcases
%%------------------------------------------------------------------------------
t_single_lifecycle(_Config) ->
perform_lifecycle_check(

View File

@ -199,23 +199,12 @@ its own from which a browser should permit loading resources."""
}
bootstrap_users_file {
desc {
en: "Initialize users file."
zh: "初始化用户文件"
en: "Deprecated, use api_key.bootstrap_file"
zh: "已废弃,请使用 api_key.bootstrap_file"
}
label {
en: """Is used to add an administrative user to Dashboard when emqx is first launched,
the format is:
```
username1:password1
username2:password2
```
"""
zh: """用于在首次启动 emqx 时,为 Dashboard 添加管理用户,其格式为:
```
username1:password1
username2:password2
```
"""
en: """Deprecated"""
zh: """已废弃"""
}
}
}

View File

@ -2,7 +2,7 @@
{application, emqx_dashboard, [
{description, "EMQX Web Dashboard"},
% strict semver, bump manually!
{vsn, "5.0.11"},
{vsn, "5.0.12"},
{modules, []},
{registered, [emqx_dashboard_sup]},
{applications, [kernel, stdlib, mnesia, minirest, emqx]},

View File

@ -65,8 +65,12 @@ start_listeners(Listeners) ->
components => #{
schemas => #{},
'securitySchemes' => #{
'basicAuth' => #{type => http, scheme => basic},
'bearerAuth' => #{type => http, scheme => bearer}
'basicAuth' => #{
type => http,
scheme => basic,
description =>
<<"Authorize with [API Keys](https://www.emqx.io/docs/en/v5.0/admin/api.html#api-keys)">>
}
}
}
},
@ -215,28 +219,7 @@ listener_name(Protocol) ->
authorize(Req) ->
case cowboy_req:parse_header(<<"authorization">>, Req) of
{basic, Username, Password} ->
case emqx_dashboard_admin:check(Username, Password) of
ok ->
ok;
{error, <<"username_not_found">>} ->
Path = cowboy_req:path(Req),
case emqx_mgmt_auth:authorize(Path, Username, Password) of
ok ->
ok;
{error, <<"not_allowed">>} ->
return_unauthorized(
?WRONG_USERNAME_OR_PWD,
<<"Check username/password">>
);
{error, _} ->
return_unauthorized(
?WRONG_USERNAME_OR_PWD_OR_API_KEY_OR_API_SECRET,
<<"Check username/password or api_key/api_secret">>
)
end;
{error, _} ->
return_unauthorized(?WRONG_USERNAME_OR_PWD, <<"Check username/password">>)
end;
api_key_authorize(Req, Username, Password);
{bearer, Token} ->
case emqx_dashboard_admin:verify_token(Token) of
ok ->
@ -269,3 +252,20 @@ i18n_file() ->
listeners() ->
emqx_conf:get([dashboard, listeners], []).
api_key_authorize(Req, Key, Secret) ->
Path = cowboy_req:path(Req),
case emqx_mgmt_auth:authorize(Path, Key, Secret) of
ok ->
ok;
{error, <<"not_allowed">>} ->
return_unauthorized(
?BAD_API_KEY_OR_SECRET,
<<"Not allowed, Check api_key/api_secret">>
);
{error, _} ->
return_unauthorized(
?BAD_API_KEY_OR_SECRET,
<<"Check api_key/api_secret">>
)
end.

View File

@ -51,8 +51,7 @@
-export([
add_default_user/0,
default_username/0,
add_bootstrap_users/0
default_username/0
]).
-type emqx_admin() :: #?ADMIN{}.
@ -85,21 +84,6 @@ mnesia(boot) ->
add_default_user() ->
add_default_user(binenv(default_username), binenv(default_password)).
-spec add_bootstrap_users() -> ok | {error, _}.
add_bootstrap_users() ->
case emqx:get_config([dashboard, bootstrap_users_file], undefined) of
undefined ->
ok;
File ->
case mnesia:table_info(?ADMIN, size) of
0 ->
?SLOG(debug, #{msg => "Add dashboard bootstrap users", file => File}),
add_bootstrap_users(File);
_ ->
ok
end
end.
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
@ -311,44 +295,3 @@ add_default_user(Username, Password) ->
[] -> add_user(Username, Password, <<"administrator">>);
_ -> {ok, default_user_exists}
end.
add_bootstrap_users(File) ->
case file:open(File, [read]) of
{ok, Dev} ->
{ok, MP} = re:compile(<<"(\.+):(\.+$)">>, [ungreedy]),
try
load_bootstrap_user(Dev, MP)
catch
Type:Reason ->
{error, {Type, Reason}}
after
file:close(Dev)
end;
{error, Reason} = Error ->
?SLOG(error, #{
msg => "failed to open the dashboard bootstrap users file",
file => File,
reason => Reason
}),
Error
end.
load_bootstrap_user(Dev, MP) ->
case file:read_line(Dev) of
{ok, Line} ->
case re:run(Line, MP, [global, {capture, all_but_first, binary}]) of
{match, [[Username, Password]]} ->
case add_user(Username, Password, ?BOOTSTRAP_USER_TAG) of
{ok, _} ->
load_bootstrap_user(Dev, MP);
Error ->
Error
end;
_ ->
load_bootstrap_user(Dev, MP)
end;
eof ->
ok;
Error ->
Error
end.

View File

@ -47,7 +47,7 @@
-define(EMPTY(V), (V == undefined orelse V == <<>>)).
-define(WRONG_USERNAME_OR_PWD, 'WRONG_USERNAME_OR_PWD').
-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
-define(WRONG_TOKEN_OR_USERNAME, 'WRONG_TOKEN_OR_USERNAME').
-define(USER_NOT_FOUND, 'USER_NOT_FOUND').
-define(ERROR_PWD_NOT_MATCH, 'ERROR_PWD_NOT_MATCH').
@ -164,7 +164,7 @@ schema("/users/:username/change_pwd") ->
}.
response_schema(401) ->
emqx_dashboard_swagger:error_codes([?WRONG_USERNAME_OR_PWD], ?DESC(login_failed401));
emqx_dashboard_swagger:error_codes([?BAD_USERNAME_OR_PWD], ?DESC(login_failed401));
response_schema(404) ->
emqx_dashboard_swagger:error_codes([?USER_NOT_FOUND], ?DESC(users_api404)).
@ -223,7 +223,7 @@ login(post, #{body := Params}) ->
}};
{error, R} ->
?SLOG(info, #{msg => "Dashboard login failed", username => Username, reason => R}),
{401, ?WRONG_USERNAME_OR_PWD, <<"Auth failed">>}
{401, ?BAD_USERNAME_OR_PWD, <<"Auth failed">>}
end.
logout(_, #{

View File

@ -31,13 +31,8 @@ start(_StartType, _StartArgs) ->
case emqx_dashboard:start_listeners() of
ok ->
emqx_dashboard_cli:load(),
case emqx_dashboard_admin:add_bootstrap_users() of
ok ->
{ok, _} = emqx_dashboard_admin:add_default_user(),
{ok, Sup};
Error ->
Error
end;
{error, Reason} ->
{error, Reason}
end.

View File

@ -56,7 +56,15 @@ fields("dashboard") ->
{cors, fun cors/1},
{i18n_lang, fun i18n_lang/1},
{bootstrap_users_file,
?HOCON(binary(), #{desc => ?DESC(bootstrap_users_file), required => false})}
?HOCON(
binary(),
#{
desc => ?DESC(bootstrap_users_file),
required => false,
default => <<>>
%% deprecated => {since, "5.1.0"}
}
)}
];
fields("listeners") ->
[

View File

@ -139,14 +139,20 @@ fields(limit) ->
[{limit, hoconsc:mk(range(1, ?MAX_ROW_LIMIT), Meta)}];
fields(count) ->
Desc = <<
"Total number of records counted.<br/>"
"Note: this field is <code>0</code> when the queryed table is empty, "
"or if the query can not be optimized and requires a full table scan."
"Total number of records matching the query.<br/>"
"Note: this field is present only if the query can be optimized and does "
"not require a full table scan."
>>,
Meta = #{desc => Desc, required => false},
[{count, hoconsc:mk(non_neg_integer(), Meta)}];
fields(hasnext) ->
Desc = <<
"Flag indicating whether there are more results available on next pages."
>>,
Meta = #{desc => Desc, required => true},
[{count, hoconsc:mk(non_neg_integer(), Meta)}];
[{hasnext, hoconsc:mk(boolean(), Meta)}];
fields(meta) ->
fields(page) ++ fields(limit) ++ fields(count).
fields(page) ++ fields(limit) ++ fields(count) ++ fields(hasnext).
-spec schema_with_example(hocon_schema:type(), term()) -> hocon_schema:field_schema_map().
schema_with_example(Type, Example) ->
@ -623,7 +629,7 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) ->
{[Schema | Acc], SubRefs ++ RefsAcc}
end,
{[], []},
Types
hoconsc:union_members(Types)
),
{#{<<"oneOf">> => OneOf}, Refs};
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
@ -705,9 +711,11 @@ typename_to_spec("service_account_json()", _Mod) ->
typename_to_spec("#{" ++ _, Mod) ->
typename_to_spec("map()", Mod);
typename_to_spec("qos()", _Mod) ->
#{type => string, enum => [0, 1, 2]};
#{type => integer, minimum => 0, maximum => 2, example => 0};
typename_to_spec("{binary(), binary()}", _Mod) ->
#{type => object, example => #{}};
typename_to_spec("{string(), string()}", _Mod) ->
#{type => object, example => #{}};
typename_to_spec("comma_separated_list()", _Mod) ->
#{type => string, example => <<"item1,item2">>};
typename_to_spec("comma_separated_binary()", _Mod) ->

View File

@ -114,9 +114,9 @@ t_admin_delete_self_failed(_) ->
?assertEqual(1, length(Admins)),
Header = auth_header_(<<"username1">>, <<"password">>),
{error, {_, 400, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header),
Token = erlang:iolist_to_binary(["Basic ", base64:encode("username1:password")]),
Token = ["Basic ", base64:encode("username1:password")],
Header2 = {"Authorization", Token},
{error, {_, 400, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header2),
{error, {_, 401, _}} = request_dashboard(delete, api_path(["users", "username1"]), Header2),
mnesia:clear_table(?ADMIN).
t_rest_api(_Config) ->

View File

@ -25,43 +25,24 @@
-define(SERVER, "http://127.0.0.1:18083/api/v5").
-import(emqx_mgmt_api_test_util, [request/2]).
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
mria:start(),
application:load(emqx_dashboard),
emqx_common_test_helpers:start_apps([emqx_conf, emqx_dashboard], fun set_special_configs/1),
emqx_mgmt_api_test_util:init_suite([emqx_conf]),
Config.
set_special_configs(emqx_dashboard) ->
emqx_dashboard_api_test_helpers:set_default_config(),
ok;
set_special_configs(_) ->
ok.
end_per_suite(Config) ->
end_suite(),
Config.
end_suite() ->
application:unload(emqx_management),
emqx_common_test_helpers:stop_apps([emqx_dashboard]).
emqx_mgmt_api_test_util:end_suite([emqx_conf]).
t_bad_api_path(_) ->
Url = ?SERVER ++ "/for/test/some/path/not/exist",
{error, {"HTTP/1.1", 404, "Not Found"}} = request(Url),
{ok, 404, _} = request(get, Url),
ok.
request(Url) ->
Request = {Url, []},
case httpc:request(get, Request, [], []) of
{error, Reason} ->
{error, Reason};
{ok, {{"HTTP/1.1", Code, _}, _, Return}} when
Code >= 200 andalso Code =< 299
->
{ok, emqx_json:decode(Return, [return_maps])};
{ok, {Reason, _, _}} ->
{error, Reason}
end.

View File

@ -19,6 +19,8 @@
-compile(nowarn_export_all).
-compile(export_all).
-import(emqx_dashboard_SUITE, [auth_header_/0]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("emqx/include/emqx.hrl").
@ -153,10 +155,6 @@ do_request_api(Method, Request) ->
{error, Reason}
end.
auth_header_() ->
Basic = binary_to_list(base64:encode(<<"admin:public">>)),
{"Authorization", "Basic " ++ Basic}.
restart_monitor() ->
OldMonitor = erlang:whereis(emqx_dashboard_monitor),
erlang:exit(OldMonitor, kill),

View File

@ -112,7 +112,7 @@ t_in_query(_Config) ->
description => <<"QOS">>,
in => query,
name => qos,
schema => #{enum => [0, 1, 2], type => string}
schema => #{minimum => 0, maximum => 2, type => integer, example => 0}
}
],
validate("/test/in/query", Expect),

View File

@ -347,13 +347,7 @@ do_request_api(Method, Request) ->
end.
auth_header_() ->
AppId = <<"admin">>,
AppSecret = <<"public">>,
auth_header_(binary_to_list(AppId), binary_to_list(AppSecret)).
auth_header_(User, Pass) ->
Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
{"Authorization", "Basic " ++ Encoded}.
emqx_mgmt_api_test_util:auth_header_().
api_path(Parts) ->
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION] ++ Parts).

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_gateway, [
{description, "The Gateway management application"},
{vsn, "0.1.10"},
{vsn, "0.1.11"},
{registered, []},
{mod, {emqx_gateway_app, []}},
{applications, [kernel, stdlib, grpc, emqx, emqx_authn]},

Some files were not shown because too many files have changed in this diff Show More