Merge branch 'master' into EMQX-871-872
This commit is contained in:
commit
69673613d6
|
@ -36,9 +36,9 @@ emqx_test(){
|
|||
"zip")
|
||||
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.zip)
|
||||
unzip -q "${PACKAGE_PATH}/${packagename}"
|
||||
export EMQX_ZONE__EXTERNAL__SERVER_KEEPALIVE=60 \
|
||||
export EMQX_ZONES__DEFAULT__MQTT__SERVER_KEEPALIVE=60 \
|
||||
EMQX_MQTT__MAX_TOPIC_ALIAS=10
|
||||
[[ $(arch) == *arm* || $(arch) == aarch64 ]] && export EMQX_ZONES__DEFAULT__LISTENERS__MQTT_QUIC__ENABLED=false
|
||||
[[ $(arch) == *arm* || $(arch) == aarch64 ]] && export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
|
||||
# sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins
|
||||
|
||||
echo "running ${packagename} start"
|
||||
|
@ -48,7 +48,7 @@ emqx_test(){
|
|||
exit 1
|
||||
fi
|
||||
IDLE_TIME=0
|
||||
while ! curl http://localhost:8081/api/v5/status >/dev/null 2>&1; do
|
||||
while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do
|
||||
if [ $IDLE_TIME -gt 10 ]
|
||||
then
|
||||
echo "emqx running error"
|
||||
|
@ -91,6 +91,12 @@ emqx_test(){
|
|||
;;
|
||||
"rpm")
|
||||
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.rpm)
|
||||
|
||||
if [[ "${ARCH}" == "amd64" && $(rpm -E '%{rhel}') == 7 ]] ;
|
||||
then
|
||||
# EMQX OTP requires openssl11 to have TLS1.3 support
|
||||
yum install -y openssl11;
|
||||
fi
|
||||
rpm -ivh "${PACKAGE_PATH}/${packagename}"
|
||||
if ! rpm -q emqx | grep -q emqx; then
|
||||
echo "package install error"
|
||||
|
@ -119,15 +125,14 @@ run_test(){
|
|||
if [ -f "$emqx_env_vars" ];
|
||||
then
|
||||
tee -a "$emqx_env_vars" <<EOF
|
||||
export EMQX_ZONE__EXTERNAL__SERVER_KEEPALIVE=60
|
||||
export EMQX_ZONES__DEFAULT__MQTT__SERVER_KEEPALIVE=60
|
||||
export EMQX_MQTT__MAX_TOPIC_ALIAS=10
|
||||
export EMQX_LOG__CONSOLE_HANDLER__LEVEL=debug
|
||||
export EMQX_LOG__FILE_HANDLERS__EMQX_LOG__LEVEL=debug
|
||||
export EMQX_LOG__PRIMARY_LEVEL=debug
|
||||
export EMQX_LOG__FILE_HANDLERS__DEFAULT__LEVEL=debug
|
||||
EOF
|
||||
## for ARM, due to CI env issue, skip start of quic listener for the moment
|
||||
[[ $(arch) == *arm* || $(arch) == aarch64 ]] && tee -a "$emqx_env_vars" <<EOF
|
||||
export EMQX_ZONES__DEFAULT__LISTENERS__MQTT_QUIC__ENABLED=false
|
||||
export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
|
||||
EOF
|
||||
else
|
||||
echo "Error: cannot locate emqx_vars"
|
||||
|
@ -140,7 +145,7 @@ EOF
|
|||
exit 1
|
||||
fi
|
||||
IDLE_TIME=0
|
||||
while ! curl http://localhost:8081/api/v5/status >/dev/null 2>&1; do
|
||||
while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do
|
||||
if [ $IDLE_TIME -gt 10 ]
|
||||
then
|
||||
echo "emqx running error"
|
||||
|
@ -169,7 +174,7 @@ EOF
|
|||
exit 1
|
||||
fi
|
||||
IDLE_TIME=0
|
||||
while ! curl http://localhost:8081/api/v5/status >/dev/null 2>&1; do
|
||||
while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do
|
||||
if [ $IDLE_TIME -gt 10 ]
|
||||
then
|
||||
echo "emqx service error"
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
EMQX_NAME=emqx
|
||||
EMQX_CLUSTER__DISCOVERY_STRATEGY=static
|
||||
EMQX_CLUSTER__STATIC__SEEDS="[emqx@node1.emqx.io, emqx@node2.emqx.io]"
|
||||
EMQX_ZONES__DEFAULT__LISTENERS__MQTT_TCP__PROXY_PROTOCOL=true
|
||||
EMQX_ZONES__DEFAULT__LISTENERS__MQTT_WS__PROXY_PROTOCOL=true
|
||||
EMQX_LISTENERS__TCP__DEFAULT__PROXY_PROTOCOL=true
|
||||
EMQX_LISTENERS__WS__DEFAULT__PROXY_PROTOCOL=true
|
||||
EMQX_LOG__CONSOLE_HANDLER__ENABLE=true
|
||||
EMQX_LOG__CONSOLE_HANDLER__LEVEL=debug
|
||||
EMQX_LOG__PRIMARY_LEVEL=debug
|
||||
|
|
|
@ -33,7 +33,7 @@ defaults
|
|||
frontend emqx_mgmt
|
||||
mode tcp
|
||||
option tcplog
|
||||
bind *:8081
|
||||
bind *:18083
|
||||
default_backend emqx_mgmt_back
|
||||
|
||||
frontend emqx_dashboard
|
||||
|
@ -45,8 +45,8 @@ frontend emqx_dashboard
|
|||
backend emqx_mgmt_back
|
||||
mode http
|
||||
# balance static-rr
|
||||
server emqx-1 node1.emqx.io:8081
|
||||
server emqx-2 node2.emqx.io:8081
|
||||
server emqx-1 node1.emqx.io:18083
|
||||
server emqx-2 node2.emqx.io:18083
|
||||
|
||||
backend emqx_dashboard_back
|
||||
mode http
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
name: Bug Report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: Support
|
||||
labels: "Support, needs-triage"
|
||||
|
||||
---
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
name: Feature Request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: Feature
|
||||
labels: "Feature, needs-triage"
|
||||
|
||||
---
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
name: Support Needed
|
||||
about: Asking a question about usages, docs or anything you're insterested in
|
||||
title: ''
|
||||
labels: Support
|
||||
labels: "Support, needs-triage"
|
||||
|
||||
---
|
||||
|
||||
|
|
|
@ -20,8 +20,8 @@ jobs:
|
|||
container: ${{ matrix.container }}
|
||||
|
||||
outputs:
|
||||
profiles: ${{ steps.set_profile.outputs.profiles}}
|
||||
old_vsns: ${{ steps.set_profile.outputs.old_vsns}}
|
||||
profiles: ${{ steps.set_profile.outputs.profiles }}
|
||||
old_vsns: ${{ steps.set_profile.outputs.old_vsns }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -44,6 +44,11 @@ jobs:
|
|||
echo "::set-output name=old_vsns::$old_vsns"
|
||||
echo "::set-output name=profiles::[\"emqx\", \"emqx-edge\"]"
|
||||
fi
|
||||
- name: get otp version
|
||||
id: get_otp_version
|
||||
run: |
|
||||
otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)"
|
||||
echo "::set-output name=otp::$otp"
|
||||
- name: set get token
|
||||
if: endsWith(github.repository, 'enterprise')
|
||||
run: |
|
||||
|
@ -54,12 +59,13 @@ jobs:
|
|||
run: |
|
||||
make ensure-rebar3
|
||||
./rebar3 as default get-deps
|
||||
rm -rf rebar.lock
|
||||
- name: gen zip file
|
||||
run: zip -ryq source.zip source/* source/.[^.]*
|
||||
run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]*
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: source
|
||||
path: source.zip
|
||||
name: source-${{ steps.get_otp_version.outputs.otp }}
|
||||
path: source-${{ steps.get_otp_version.outputs.otp }}.zip
|
||||
|
||||
windows:
|
||||
runs-on: windows-2019
|
||||
|
@ -77,19 +83,21 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: source
|
||||
name: source-23.2.7.2-emqx-2
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: Expand-Archive -Path source.zip -DestinationPath ./
|
||||
run: Expand-Archive -Path source-23.2.7.2-emqx-2.zip -DestinationPath ./
|
||||
- uses: ilammy/msvc-dev-cmd@v1
|
||||
- uses: gleam-lang/setup-erlang@v1.1.0
|
||||
- uses: gleam-lang/setup-erlang@v1.1.2
|
||||
id: install_erlang
|
||||
## gleam-lang/setup-erlang does not yet support the installation of otp24 on windows
|
||||
with:
|
||||
otp-version: 24.0.5
|
||||
otp-version: 23.2
|
||||
- name: build
|
||||
env:
|
||||
PYTHON: python
|
||||
DIAGNOSTIC: 1
|
||||
working-directory: source
|
||||
run: |
|
||||
$env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH"
|
||||
|
||||
|
@ -101,9 +109,9 @@ jobs:
|
|||
else {
|
||||
$pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip"
|
||||
}
|
||||
cd source
|
||||
## We do not build/release bcrypt for windows package
|
||||
## We do not build/release bcrypt and quic for windows package
|
||||
Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/
|
||||
Remove-Item -Recurse -Force -Path _build/default/lib/quicer/
|
||||
if (Test-Path rebar.lock) {
|
||||
Remove-Item -Force -Path rebar.lock
|
||||
}
|
||||
|
@ -118,8 +126,8 @@ jobs:
|
|||
Get-FileHash -Path "_packages/${{ matrix.profile }}/$pkg_name" | Format-List | grep 'Hash' | awk '{print $3}' > _packages/${{ matrix.profile }}/$pkg_name.sha256
|
||||
- name: run emqx
|
||||
timeout-minutes: 1
|
||||
working-directory: source
|
||||
run: |
|
||||
cd source
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start
|
||||
Start-Sleep -s 5
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop
|
||||
|
@ -128,7 +136,7 @@ jobs:
|
|||
- uses: actions/upload-artifact@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
name: ${{ matrix.profile }}
|
||||
name: ${{ matrix.profile }}-23.2.7.2-emqx-2
|
||||
path: source/_packages/${{ matrix.profile }}/.
|
||||
|
||||
mac:
|
||||
|
@ -140,7 +148,7 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
|
||||
erl_otp:
|
||||
otp:
|
||||
- 24.0.5-emqx-1
|
||||
exclude:
|
||||
- profile: emqx-edge
|
||||
|
@ -148,10 +156,10 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: source
|
||||
name: source-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
run: unzip -q source-${{ matrix.otp }}.zip
|
||||
- name: prepare
|
||||
run: |
|
||||
brew update
|
||||
|
@ -162,7 +170,7 @@ jobs:
|
|||
id: cache
|
||||
with:
|
||||
path: ~/.kerl
|
||||
key: erl${{ matrix.erl_otp }}-macos10.15
|
||||
key: erl${{ matrix.otp }}-macos10.15
|
||||
- name: build erlang
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 60
|
||||
|
@ -171,25 +179,25 @@ jobs:
|
|||
OTP_GITHUB_URL: https://github.com/emqx/otp
|
||||
run: |
|
||||
kerl update releases
|
||||
kerl build ${{ matrix.erl_otp }}
|
||||
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }}
|
||||
kerl build ${{ matrix.otp }}
|
||||
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
|
||||
- name: build
|
||||
working-directory: source
|
||||
run: |
|
||||
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
|
||||
cd source
|
||||
. $HOME/.kerl/${{ matrix.otp }}/activate
|
||||
make ensure-rebar3
|
||||
sudo cp rebar3 /usr/local/bin/rebar3
|
||||
make ${{ matrix.profile }}-zip
|
||||
- name: test
|
||||
working-directory: source
|
||||
run: |
|
||||
cd source
|
||||
pkg_name=$(basename _packages/${{ matrix.profile }}/${{ matrix.profile }}-*.zip)
|
||||
unzip -q _packages/${{ matrix.profile }}/$pkg_name
|
||||
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
|
||||
./emqx/bin/emqx start || cat emqx/log/erlang.log.1
|
||||
ready='no'
|
||||
for i in {1..10}; do
|
||||
if curl -fs 127.0.0.1:8081/api/v5/status > /dev/null; then
|
||||
if curl -fs 127.0.0.1:18083/api/v5/status > /dev/null; then
|
||||
ready='yes'
|
||||
break
|
||||
fi
|
||||
|
@ -207,7 +215,7 @@ jobs:
|
|||
- uses: actions/upload-artifact@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
name: ${{ matrix.profile }}
|
||||
name: ${{ matrix.profile }}-${{ matrix.otp }}
|
||||
path: source/_packages/${{ matrix.profile }}/.
|
||||
|
||||
linux:
|
||||
|
@ -219,12 +227,6 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
|
||||
erl_otp:
|
||||
- 23.2.7.2-emqx-2
|
||||
- 24.0.5-emqx-1
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
os:
|
||||
- ubuntu20.04
|
||||
- ubuntu18.04
|
||||
|
@ -237,6 +239,12 @@ jobs:
|
|||
- centos6
|
||||
- raspbian10
|
||||
# - raspbian9
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
otp:
|
||||
- 23.2.7.2-emqx-2
|
||||
- 24.0.5-emqx-1
|
||||
exclude:
|
||||
- os: centos6
|
||||
arch: arm64
|
||||
|
@ -265,10 +273,10 @@ jobs:
|
|||
platforms: all
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: source
|
||||
name: source-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
run: unzip -q source-${{ matrix.otp }}.zip
|
||||
- name: downloads old emqx zip packages
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
|
@ -298,7 +306,7 @@ jobs:
|
|||
done
|
||||
- name: build emqx packages
|
||||
env:
|
||||
ERL_OTP: erl${{ matrix.erl_otp }}
|
||||
ERL_OTP: erl${{ matrix.otp }}
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
ARCH: ${{ matrix.arch }}
|
||||
SYSTEM: ${{ matrix.os }}
|
||||
|
@ -327,7 +335,7 @@ jobs:
|
|||
- uses: actions/upload-artifact@v1
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
with:
|
||||
name: ${{ matrix.profile }}
|
||||
name: ${{ matrix.profile }}-${{ matrix.otp }}
|
||||
path: source/_packages/${{ matrix.profile }}/.
|
||||
|
||||
docker:
|
||||
|
@ -338,67 +346,74 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
|
||||
erl_otp:
|
||||
otp:
|
||||
- 24.0.5-emqx-1
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: source
|
||||
name: source-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
run: unzip -q source-${{ matrix.otp }}.zip
|
||||
- name: get version
|
||||
id: version
|
||||
working-directory: source
|
||||
run: echo "::set-output name=version::$(./pkg-vsn.sh)"
|
||||
- uses: docker/setup-buildx-action@v1
|
||||
- uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
- name: build emqx docker image
|
||||
- uses: docker/build-push-action@v2
|
||||
if: github.event_name != 'release'
|
||||
env:
|
||||
ERL_OTP: erl${{ matrix.erl_otp }}
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
working-directory: source
|
||||
run: |
|
||||
PKG_VSN="$(./pkg-vsn.sh)"
|
||||
docker buildx build --no-cache \
|
||||
--platform=linux/amd64,linux/arm64 \
|
||||
--build-arg PKG_VSN=$PKG_VSN \
|
||||
--build-arg BUILD_FROM=emqx/build-env:$ERL_OTP-alpine \
|
||||
--build-arg RUN_FROM=alpine:3.14 \
|
||||
--build-arg EMQX_NAME=$PROFILE \
|
||||
--tag emqx/$PROFILE:$PKG_VSN \
|
||||
-f deploy/docker/Dockerfile .
|
||||
with:
|
||||
push: false
|
||||
pull: true
|
||||
no-cache: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: emqx/${{ matrix.profile }}:${{ steps.version.outputs.version }}
|
||||
build-args: |
|
||||
PKG_VSN=${{ steps.version.outputs.version }}
|
||||
BUILD_FROM=emqx/build-env:erl${{ matrix.otp }}-alpine
|
||||
RUN_FROM=alpine:3.14
|
||||
EMQX_NAME=${{ matrix.profile }}
|
||||
file: source/deploy/docker/Dockerfile
|
||||
context: source
|
||||
- uses: docker/login-action@v1
|
||||
if: github.event_name == 'release'
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USER }}
|
||||
password: ${{ secrets.DOCKER_HUB_TOKEN }}
|
||||
- name: build emqx docker image
|
||||
- uses: docker/build-push-action@v2
|
||||
if: github.event_name == 'release'
|
||||
env:
|
||||
ERL_OTP: erl${{ matrix.erl_otp }}
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
working-directory: source
|
||||
run: |
|
||||
PKG_VSN="$(./pkg-vsn.sh)"
|
||||
docker buildx build --no-cache \
|
||||
--platform=linux/amd64,linux/arm64 \
|
||||
--build-arg PKG_VSN=$PKG_VSN \
|
||||
--build-arg BUILD_FROM=emqx/build-env:$ERL_OTP-alpine \
|
||||
--build-arg RUN_FROM=alpine:3.14 \
|
||||
--build-arg EMQX_NAME=$PROFILE \
|
||||
--tag emqx/$PROFILE:$PKG_VSN \
|
||||
-f deploy/docker/Dockerfile \
|
||||
--push .
|
||||
with:
|
||||
push: true
|
||||
pull: true
|
||||
no-cache: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: emqx/${{ matrix.profile }}:${{ steps.version.outputs.version }}
|
||||
build-args: |
|
||||
PKG_VSN=${{ steps.version.outputs.version }}
|
||||
BUILD_FROM=emqx/build-env:erl${{ matrix.otp }}-alpine
|
||||
RUN_FROM=alpine:3.14
|
||||
EMQX_NAME=${{ matrix.profile }}
|
||||
file: source/deploy/docker/Dockerfile
|
||||
context: source
|
||||
|
||||
delete-artifact:
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
strategy:
|
||||
matrix:
|
||||
otp:
|
||||
- 23.2.7.2-emqx-2
|
||||
- 24.0.5-emqx-1
|
||||
needs: [prepare, mac, linux, docker]
|
||||
steps:
|
||||
- uses: geekyeggo/delete-artifact@v1
|
||||
with:
|
||||
name: source
|
||||
name: source-${{ matrix.otp }}
|
||||
|
||||
upload:
|
||||
runs-on: ubuntu-20.04
|
||||
|
@ -410,6 +425,8 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
|
||||
otp:
|
||||
- 24.0.5-emqx-1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -420,7 +437,7 @@ jobs:
|
|||
echo 'EOF' >> $GITHUB_ENV
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: ${{ matrix.profile }}
|
||||
name: ${{ matrix.profile }}-${{ matrix.otp }}
|
||||
path: ./_packages/${{ matrix.profile }}
|
||||
- name: install dos2unix
|
||||
run: sudo apt-get update && sudo apt install -y dos2unix
|
||||
|
|
|
@ -113,7 +113,7 @@ jobs:
|
|||
./emqx/bin/emqx start || cat emqx/log/erlang.log.1
|
||||
ready='no'
|
||||
for i in {1..10}; do
|
||||
if curl -fs 127.0.0.1:8081/api/v5/status > /dev/null; then
|
||||
if curl -fs 127.0.0.1:18083/api/v5/status > /dev/null; then
|
||||
ready='yes'
|
||||
break
|
||||
fi
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
name: Sync to enterprise
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 */6 * * *'
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main-v*
|
||||
|
||||
jobs:
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
name: API Test Suite
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- e*
|
||||
- v*
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
container: "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: zip emqx-broker
|
||||
if: endsWith(github.repository, 'emqx')
|
||||
run: |
|
||||
make emqx-zip
|
||||
- name: zip emqx-broker
|
||||
if: endsWith(github.repository, 'enterprise')
|
||||
run: |
|
||||
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
||||
git config --global credential.helper store
|
||||
make emqx-ee-zip
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: emqx-broker
|
||||
path: _packages/**/*.zip
|
||||
api-test:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
script_name:
|
||||
- api_metrics
|
||||
- api_subscriptions
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: emqx/emqx-fvt
|
||||
path: .
|
||||
- uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||
architecture: x64 # (x64 or x86) - defaults to x64
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: emqx-broker
|
||||
path: .
|
||||
- name: start emqx-broker
|
||||
env:
|
||||
EMQX_LISTENERS__WSS__DEFAULT__BIND: "0.0.0.0:8085"
|
||||
run: |
|
||||
unzip ./emqx/*.zip
|
||||
./emqx/bin/emqx start
|
||||
- name: install jmeter
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
JMETER_VERSION: 5.3
|
||||
run: |
|
||||
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz https://downloads.apache.org/jmeter/binaries/apache-jmeter-$JMETER_VERSION.tgz
|
||||
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-2.0.2-jar-with-dependencies.jar
|
||||
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||
- name: run ${{ matrix.script_name }}
|
||||
run: |
|
||||
/opt/jmeter/bin/jmeter.sh \
|
||||
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||
-t .ci/api-test-suite/${{ matrix.script_name }}.jmx \
|
||||
-Demqx_ip="127.0.0.1" \
|
||||
-l jmeter_logs/${{ matrix.script_name }}.jtl \
|
||||
-j jmeter_logs/logs/${{ matrix.script_name }}.log
|
||||
- name: check test logs
|
||||
run: |
|
||||
if cat jmeter_logs/${{ matrix.script_name }}.jtl | grep -e '<failure>true</failure>' > /dev/null 2>&1; then
|
||||
grep -A 5 -B 3 '<failure>true</failure>' jmeter_logs/${{ matrix.script_name }}.jtl > jmeter_logs/${{ matrix.script_name }}_err_api.txt
|
||||
echo "check logs failed"
|
||||
exit 1
|
||||
fi
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: jmeter_logs
|
||||
path: ./jmeter_logs
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: jmeter_logs
|
||||
path: emqx/log
|
||||
delete-package:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: api-test
|
||||
if: always()
|
||||
steps:
|
||||
- uses: geekyeggo/delete-artifact@v1
|
||||
with:
|
||||
name: emqx-broker
|
|
@ -8,300 +8,186 @@ on:
|
|||
pull_request:
|
||||
|
||||
jobs:
|
||||
docker_test:
|
||||
runs-on: ubuntu-20.04
|
||||
prepare:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: gleam-lang/setup-erlang@v1.1.2
|
||||
id: install_erlang
|
||||
with:
|
||||
otp-version: 24.0.5
|
||||
- name: prepare
|
||||
run: |
|
||||
if make emqx-ee --dry-run > /dev/null 2>&1; then
|
||||
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
||||
git config --global credential.helper store
|
||||
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
|
||||
make deps-emqx-ee
|
||||
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
|
||||
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV
|
||||
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
|
||||
else
|
||||
echo "PROFILE=emqx" >> $GITHUB_ENV
|
||||
echo "TARGET=emqx/emqx" >> $GITHUB_ENV
|
||||
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: make emqx image
|
||||
run: make $PROFILE-docker
|
||||
- name: run emqx
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
set -e -u -x
|
||||
echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_" >> .ci/docker-compose-file/conf.cluster.env
|
||||
echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s" >> .ci/docker-compose-file/conf.cluster.env
|
||||
echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10" >> .ci/docker-compose-file/conf.cluster.env
|
||||
docker-compose \
|
||||
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-python.yaml \
|
||||
up -d
|
||||
while ! docker exec -i node1.emqx.io bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1; do
|
||||
echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx";
|
||||
sleep 5;
|
||||
done
|
||||
# - name: verify EMQX_LOADED_PLUGINS override working
|
||||
# run: |
|
||||
# expected="{emqx_sn, true}."
|
||||
# output=$(docker exec -i node1.emqx.io bash -c "cat data/loaded_plugins" | tail -n1)
|
||||
# if [ "$expected" != "$output" ]; then
|
||||
# exit 1
|
||||
# fi
|
||||
- name: make paho tests
|
||||
run: |
|
||||
if ! docker exec -i python /scripts/pytest.sh; then
|
||||
echo "DUMP_CONTAINER_LOGS_BGN"
|
||||
docker logs haproxy
|
||||
docker logs node1.emqx.io
|
||||
docker logs node2.emqx.io
|
||||
echo "DUMP_CONTAINER_LOGS_END"
|
||||
exit 1
|
||||
fi
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
|
||||
helm_test:
|
||||
runs-on: ubuntu-20.04
|
||||
outputs:
|
||||
profile: ${{ steps.profile.outputs.profile }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: gleam-lang/setup-erlang@v1.1.2
|
||||
id: install_erlang
|
||||
with:
|
||||
otp-version: 24.0.5
|
||||
- name: prepare
|
||||
run: |
|
||||
if make emqx-ee --dry-run > /dev/null 2>&1; then
|
||||
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
||||
git config --global credential.helper store
|
||||
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
|
||||
make deps-emqx-ee
|
||||
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV
|
||||
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
|
||||
else
|
||||
echo "TARGET=emqx/emqx" >> $GITHUB_ENV
|
||||
echo "PROFILE=emqx" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: make emqx image
|
||||
run: make $PROFILE-docker
|
||||
- name: install k3s
|
||||
env:
|
||||
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||
run: |
|
||||
sudo sh -c "echo \"127.0.0.1 $(hostname)\" >> /etc/hosts"
|
||||
curl -sfL https://get.k3s.io | sh -
|
||||
sudo chmod 644 /etc/rancher/k3s/k3s.yaml
|
||||
kubectl cluster-info
|
||||
- name: install helm
|
||||
env:
|
||||
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||
run: |
|
||||
curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3
|
||||
sudo chmod 700 get_helm.sh
|
||||
sudo ./get_helm.sh
|
||||
helm version
|
||||
- name: run emqx on chart
|
||||
env:
|
||||
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
version=$(./pkg-vsn.sh)
|
||||
sudo docker save ${TARGET}:$version -o emqx.tar.gz
|
||||
sudo k3s ctr image import emqx.tar.gz
|
||||
steps:
|
||||
- name: get otp version
|
||||
id: get_otp_version
|
||||
run: |
|
||||
otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)"
|
||||
echo "::set-output name=otp::$otp"
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
path: source
|
||||
fetch-depth: 0
|
||||
- name: set profile
|
||||
id: profile
|
||||
shell: bash
|
||||
working-directory: source
|
||||
run: |
|
||||
vsn="$(./pkg-vsn.sh)"
|
||||
if make emqx-ee --dry-run > /dev/null 2>&1; then
|
||||
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
||||
git config --global credential.helper store
|
||||
echo "::set-output name=profile::emqx-ee"
|
||||
else
|
||||
echo "::set-output name=profile::emqx"
|
||||
fi
|
||||
- name: get deps
|
||||
working-directory: source
|
||||
run: |
|
||||
make ensure-rebar3
|
||||
./rebar3 as default get-deps
|
||||
rm -rf rebar.lock
|
||||
- name: gen zip file
|
||||
run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]*
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: source-${{ steps.get_otp_version.outputs.otp }}
|
||||
path: source-${{ steps.get_otp_version.outputs.otp }}.zip
|
||||
|
||||
sed -i -r "s/^appVersion: .*$/appVersion: \"${version}\"/g" deploy/charts/emqx/Chart.yaml
|
||||
sed -i '/emqx_telemetry/d' deploy/charts/emqx/values.yaml
|
||||
docker_test:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: prepare
|
||||
|
||||
helm install emqx \
|
||||
--set image.repository=${TARGET} \
|
||||
--set image.pullPolicy=Never \
|
||||
--set emqxAclConfig="" \
|
||||
--set image.pullPolicy=Never \
|
||||
--set emqxConfig.EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s \
|
||||
--set emqxConfig.EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10 \
|
||||
deploy/charts/emqx \
|
||||
--debug
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
otp:
|
||||
- 23.2.7.2-emqx-2
|
||||
- 24.0.5-emqx-1
|
||||
|
||||
while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \
|
||||
!= "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do
|
||||
echo "==============================";
|
||||
kubectl get pods;
|
||||
echo "==============================";
|
||||
echo "waiting emqx started";
|
||||
sleep 10;
|
||||
done
|
||||
- name: get emqx-0 pods log
|
||||
if: failure()
|
||||
env:
|
||||
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||
run: |
|
||||
kubectl describe pods emqx-0
|
||||
kubectl logs emqx-0
|
||||
- name: get emqx-1 pods log
|
||||
if: failure()
|
||||
env:
|
||||
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||
run: |
|
||||
kubectl describe pods emqx-1
|
||||
kubectl logs emqx-1
|
||||
- name: get emqx-2 pods log
|
||||
if: failure()
|
||||
env:
|
||||
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||
run: |
|
||||
kubectl describe pods emqx-2
|
||||
kubectl logs emqx-2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: emqx/paho.mqtt.testing
|
||||
ref: develop-4.0
|
||||
path: paho.mqtt.testing
|
||||
- name: install pytest
|
||||
run: |
|
||||
pip install pytest
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
- name: run paho test
|
||||
env:
|
||||
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
|
||||
run: |
|
||||
emqx_svc=$(kubectl get svc --namespace default emqx -o jsonpath="{.spec.clusterIP}")
|
||||
emqx1=$(kubectl get pods emqx-1 -o jsonpath='{.status.podIP}')
|
||||
emqx2=$(kubectl get pods emqx-2 -o jsonpath='{.status.podIP}')
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: source-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source-${{ matrix.otp }}.zip
|
||||
- name: make docker image
|
||||
working-directory: source
|
||||
env:
|
||||
OTP: ${{ matrix.otp }}
|
||||
run: |
|
||||
make ${{ needs.prepare.outputs.profile }}-docker
|
||||
echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV
|
||||
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
|
||||
- name: run emqx
|
||||
timeout-minutes: 5
|
||||
working-directory: source
|
||||
run: |
|
||||
set -e -u -x
|
||||
echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_" >> .ci/docker-compose-file/conf.cluster.env
|
||||
echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s" >> .ci/docker-compose-file/conf.cluster.env
|
||||
echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10" >> .ci/docker-compose-file/conf.cluster.env
|
||||
docker-compose \
|
||||
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-python.yaml \
|
||||
up -d
|
||||
while ! docker exec -i node1.emqx.io bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1; do
|
||||
echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx";
|
||||
sleep 5;
|
||||
done
|
||||
- name: make paho tests
|
||||
run: |
|
||||
if ! docker exec -i python /scripts/pytest.sh; then
|
||||
echo "DUMP_CONTAINER_LOGS_BGN"
|
||||
docker logs haproxy
|
||||
docker logs node1.emqx.io
|
||||
docker logs node2.emqx.io
|
||||
echo "DUMP_CONTAINER_LOGS_END"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pytest -v paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host $emqx_svc
|
||||
RESULT=$?
|
||||
pytest -v paho.mqtt.testing/interoperability/test_cluster --host1 $emqx1 --host2 $emqx2
|
||||
RESULT=$((RESULT + $?))
|
||||
if [ 0 -ne $RESULT ]; then
|
||||
kubectl logs emqx-1
|
||||
kubectl logs emqx-2
|
||||
fi
|
||||
exit $RESULT
|
||||
helm_test:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: prepare
|
||||
|
||||
relup_test:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
otp:
|
||||
- 23.2.7.2-emqx-2
|
||||
- 24.0.5-emqx-1
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
steps:
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: source-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source-${{ matrix.otp }}.zip
|
||||
- name: make docker image
|
||||
working-directory: source
|
||||
env:
|
||||
OTP: ${{ matrix.otp }}
|
||||
run: |
|
||||
make ${{ needs.prepare.outputs.profile }}-docker
|
||||
echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV
|
||||
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
|
||||
- run: minikube start
|
||||
- name: run emqx on chart
|
||||
timeout-minutes: 5
|
||||
working-directory: source
|
||||
run: |
|
||||
minikube image load $TARGET:$EMQX_TAG
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
architecture: 'x64'
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: emqx/paho.mqtt.testing
|
||||
ref: develop-4.0
|
||||
path: paho.mqtt.testing
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: terry-xiaoyu/one_more_emqx
|
||||
ref: master
|
||||
path: one_more_emqx
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: emqx/emqtt-bench
|
||||
ref: master
|
||||
path: emqtt-bench
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: hawk/lux
|
||||
ref: lux-2.6
|
||||
path: lux
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: ${{ github.repository }}
|
||||
path: emqx
|
||||
fetch-depth: 0
|
||||
- name: prepare
|
||||
run: |
|
||||
if make -C emqx emqx-ee --dry-run > /dev/null 2>&1; then
|
||||
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
||||
git config --global credential.helper store
|
||||
echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token
|
||||
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
|
||||
else
|
||||
echo "PROFILE=emqx" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: get version
|
||||
run: |
|
||||
set -e -x -u
|
||||
cd emqx
|
||||
if [ $PROFILE = "emqx" ];then
|
||||
broker="emqx-ce"
|
||||
edition='opensource'
|
||||
else
|
||||
broker="emqx-ee"
|
||||
edition='enterprise'
|
||||
fi
|
||||
echo "BROKER=$broker" >> $GITHUB_ENV
|
||||
sed -i -r "s/^appVersion: .*$/appVersion: \"$EMQX_TAG\"/g" deploy/charts/emqx/Chart.yaml
|
||||
|
||||
vsn="$(./pkg-vsn.sh)"
|
||||
echo "VSN=$vsn" >> $GITHUB_ENV
|
||||
helm install emqx \
|
||||
--set image.repository=$TARGET \
|
||||
--set image.pullPolicy=Never \
|
||||
--set emqxAclConfig="" \
|
||||
--set image.pullPolicy=Never \
|
||||
--set emqxConfig.EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s \
|
||||
--set emqxConfig.EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10 \
|
||||
deploy/charts/emqx \
|
||||
--debug
|
||||
|
||||
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
|
||||
if [ $PROFILE = "emqx" ]; then
|
||||
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
|
||||
else
|
||||
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")"
|
||||
fi
|
||||
echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV
|
||||
- name: download emqx
|
||||
run: |
|
||||
set -e -x -u
|
||||
mkdir -p emqx/_upgrade_base
|
||||
cd emqx/_upgrade_base
|
||||
old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
|
||||
for old_vsn in ${old_vsns[@]}; do
|
||||
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip
|
||||
done
|
||||
- name: build emqx
|
||||
run: make -C emqx ${PROFILE}-zip
|
||||
- name: build emqtt-bench
|
||||
run: make -C emqtt-bench
|
||||
- name: build lux
|
||||
run: |
|
||||
set -e -u -x
|
||||
cd lux
|
||||
autoconf
|
||||
./configure
|
||||
make
|
||||
make install
|
||||
- name: run relup test
|
||||
timeout-minutes: 20
|
||||
run: |
|
||||
set -e -x -u
|
||||
if [ -n "$OLD_VSNS" ]; then
|
||||
mkdir -p packages
|
||||
cp emqx/_packages/${PROFILE}/*.zip packages
|
||||
cp emqx/_upgrade_base/*.zip packages
|
||||
lux \
|
||||
--case_timeout infinity \
|
||||
--var PROFILE=$PROFILE \
|
||||
--var PACKAGE_PATH=$(pwd)/packages \
|
||||
--var BENCH_PATH=$(pwd)/emqtt-bench \
|
||||
--var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \
|
||||
--var VSN="$VSN" \
|
||||
--var OLD_VSNS="$OLD_VSNS" \
|
||||
emqx/.ci/fvt_tests/relup.lux
|
||||
fi
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: lux_logs
|
||||
path: lux_logs
|
||||
while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \
|
||||
!= "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do
|
||||
echo "==============================";
|
||||
kubectl get pods;
|
||||
echo "==============================";
|
||||
echo "waiting emqx started";
|
||||
sleep 10;
|
||||
done
|
||||
- name: get emqx-0 pods log
|
||||
if: failure()
|
||||
run: |
|
||||
kubectl describe pods emqx-0
|
||||
kubectl logs emqx-0
|
||||
- name: get emqx-1 pods log
|
||||
if: failure()
|
||||
run: |
|
||||
kubectl describe pods emqx-1
|
||||
kubectl logs emqx-1
|
||||
- name: get emqx-2 pods log
|
||||
if: failure()
|
||||
run: |
|
||||
kubectl describe pods emqx-2
|
||||
kubectl logs emqx-2
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: emqx/paho.mqtt.testing
|
||||
ref: develop-4.0
|
||||
path: paho.mqtt.testing
|
||||
- name: install pytest
|
||||
run: |
|
||||
pip install pytest
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
- name: run paho test
|
||||
run: |
|
||||
kubectl port-forward service/emqx 1883:1883 > /dev/null &
|
||||
pytest -v paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "127.0.0.1"
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
name: Release Upgrade Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
- e*
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
relup_test:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.8'
|
||||
architecture: 'x64'
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: emqx/paho.mqtt.testing
|
||||
ref: develop-4.0
|
||||
path: paho.mqtt.testing
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: terry-xiaoyu/one_more_emqx
|
||||
ref: master
|
||||
path: one_more_emqx
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: emqx/emqtt-bench
|
||||
ref: master
|
||||
path: emqtt-bench
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: hawk/lux
|
||||
ref: lux-2.6
|
||||
path: lux
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: ${{ github.repository }}
|
||||
path: emqx
|
||||
fetch-depth: 0
|
||||
- name: prepare
|
||||
run: |
|
||||
if make -C emqx emqx-ee --dry-run > /dev/null 2>&1; then
|
||||
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
||||
git config --global credential.helper store
|
||||
echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token
|
||||
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
|
||||
else
|
||||
echo "PROFILE=emqx" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: get version
|
||||
run: |
|
||||
set -e -x -u
|
||||
cd emqx
|
||||
if [ $PROFILE = "emqx" ];then
|
||||
broker="emqx-ce"
|
||||
edition='opensource'
|
||||
else
|
||||
broker="emqx-ee"
|
||||
edition='enterprise'
|
||||
fi
|
||||
echo "BROKER=$broker" >> $GITHUB_ENV
|
||||
|
||||
vsn="$(./pkg-vsn.sh)"
|
||||
echo "VSN=$vsn" >> $GITHUB_ENV
|
||||
|
||||
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
|
||||
if [ $PROFILE = "emqx" ]; then
|
||||
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
|
||||
else
|
||||
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")"
|
||||
fi
|
||||
echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV
|
||||
- name: download emqx
|
||||
run: |
|
||||
set -e -x -u
|
||||
mkdir -p emqx/_upgrade_base
|
||||
cd emqx/_upgrade_base
|
||||
old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
|
||||
for old_vsn in ${old_vsns[@]}; do
|
||||
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip
|
||||
done
|
||||
- name: build emqx
|
||||
run: make -C emqx ${PROFILE}-zip
|
||||
- name: build emqtt-bench
|
||||
run: make -C emqtt-bench
|
||||
- name: build lux
|
||||
run: |
|
||||
set -e -u -x
|
||||
cd lux
|
||||
autoconf
|
||||
./configure
|
||||
make
|
||||
make install
|
||||
- name: run relup test
|
||||
timeout-minutes: 20
|
||||
run: |
|
||||
set -e -x -u
|
||||
if [ -n "$OLD_VSNS" ]; then
|
||||
mkdir -p packages
|
||||
cp emqx/_packages/${PROFILE}/*.zip packages
|
||||
cp emqx/_upgrade_base/*.zip packages
|
||||
lux \
|
||||
--case_timeout infinity \
|
||||
--var PROFILE=$PROFILE \
|
||||
--var PACKAGE_PATH=$(pwd)/packages \
|
||||
--var BENCH_PATH=$(pwd)/emqtt-bench \
|
||||
--var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \
|
||||
--var VSN="$VSN" \
|
||||
--var OLD_VSNS="$OLD_VSNS" \
|
||||
emqx/.ci/fvt_tests/relup.lux
|
||||
fi
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: lux_logs
|
||||
path: lux_logs
|
|
@ -98,19 +98,19 @@ jobs:
|
|||
- name: run cover
|
||||
run: |
|
||||
printenv > .env
|
||||
docker exec -i ${{ matrix.otp_release }} bash -c "make cover"
|
||||
docker exec --env-file .env -i ${{ matrix.otp_release }} bash -c "make coveralls"
|
||||
docker exec -i ${{ matrix.otp_release }} bash -c "DIAGNOSTIC=1 make cover"
|
||||
docker exec --env-file .env -i ${{ matrix.otp_release }} bash -c "DIAGNOSTIC=1 make coveralls"
|
||||
- name: cat rebar.crashdump
|
||||
if: failure()
|
||||
run: if [ -f 'rebar3.crashdump' ];then cat 'rebar3.crashdump'; fi
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: logs
|
||||
name: logs_${{ matrix.otp_release }}
|
||||
path: _build/test/logs
|
||||
- uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: cover
|
||||
name: cover_${{ matrix.otp_release }}
|
||||
path: _build/test/cover
|
||||
|
||||
finish:
|
||||
|
|
2
Makefile
2
Makefile
|
@ -5,7 +5,7 @@ BUILD = $(CURDIR)/build
|
|||
SCRIPTS = $(CURDIR)/scripts
|
||||
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
|
||||
export EMQX_DESC ?= EMQ X
|
||||
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.4
|
||||
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.11
|
||||
ifeq ($(OS),Windows_NT)
|
||||
export REBAR_COLOR=none
|
||||
endif
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[](https://travis-ci.org/emqx/emqx)
|
||||
[](https://coveralls.io/github/emqx/emqx)
|
||||
[](https://hub.docker.com/r/emqx/emqx)
|
||||
[](https://slack-invite.emqx.io)
|
||||
[](https://slack-invite.emqx.io/)
|
||||
[](https://twitter.com/EMQTech)
|
||||
[](https://askemq.com)
|
||||
[](https://www.youtube.com/channel/UCir_r04HIsLjf2qqyZ4A8Cg)
|
||||
|
@ -90,7 +90,7 @@ make eunit ct
|
|||
### 执行部分应用的 common tests
|
||||
|
||||
```bash
|
||||
make apps/emqx_bridge_mqtt-ct
|
||||
make apps/emqx_retainer-ct
|
||||
```
|
||||
|
||||
### 静态分析(Dialyzer)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[](https://travis-ci.org/emqx/emqx)
|
||||
[](https://coveralls.io/github/emqx/emqx)
|
||||
[](https://hub.docker.com/r/emqx/emqx)
|
||||
[](https://slack-invite.emqx.io)
|
||||
[](https://slack-invite.emqx.io/)
|
||||
[](https://twitter.com/EMQTech)
|
||||
[](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
|
||||
|
||||
|
@ -84,7 +84,7 @@ make eunit ct
|
|||
### common test の一部を実行する
|
||||
|
||||
```bash
|
||||
make apps/emqx_bridge_mqtt-ct
|
||||
make apps/emqx_retainer-ct
|
||||
```
|
||||
|
||||
### Dialyzer
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[](https://travis-ci.org/emqx/emqx)
|
||||
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
||||
[](https://hub.docker.com/r/emqx/emqx)
|
||||
[](https://slack-invite.emqx.io)
|
||||
[](https://slack-invite.emqx.io/)
|
||||
[](https://twitter.com/EMQTech)
|
||||
[](https://github.com/emqx/emqx/discussions)
|
||||
[](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
|
||||
|
@ -93,7 +93,7 @@ make eunit ct
|
|||
Пример:
|
||||
|
||||
```bash
|
||||
make apps/emqx_bridge_mqtt-ct
|
||||
make apps/emqx_retainer-ct
|
||||
```
|
||||
|
||||
### Dialyzer
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[](https://travis-ci.org/emqx/emqx)
|
||||
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
||||
[](https://hub.docker.com/r/emqx/emqx)
|
||||
[](https://slack-invite.emqx.io)
|
||||
[](https://slack-invite.emqx.io/)
|
||||
[](https://twitter.com/EMQTech)
|
||||
[](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
|
||||
|
||||
|
@ -92,7 +92,7 @@ make eunit ct
|
|||
Examples
|
||||
|
||||
```bash
|
||||
make apps/emqx_bridge_mqtt-ct
|
||||
make apps/emqx_retainer-ct
|
||||
```
|
||||
|
||||
### Dialyzer
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -26,6 +26,7 @@
|
|||
-define(COMMON_SHARD, emqx_common_shard).
|
||||
-define(SHARED_SUB_SHARD, emqx_shared_sub_shard).
|
||||
-define(MOD_DELAYED_SHARD, emqx_delayed_shard).
|
||||
-define(CM_SHARD, emqx_cm_shard).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Banner
|
||||
|
@ -125,8 +126,7 @@
|
|||
-record(banned, {
|
||||
who :: {clientid, binary()}
|
||||
| {peerhost, inet:ip_address()}
|
||||
| {username, binary()}
|
||||
| {ip_address, inet:ip_address()},
|
||||
| {username, binary()},
|
||||
by :: binary(),
|
||||
reason :: binary(),
|
||||
at :: integer(),
|
||||
|
@ -134,3 +134,19 @@
|
|||
}).
|
||||
|
||||
-endif.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Authentication
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-record(authenticator,
|
||||
{ id :: binary()
|
||||
, provider :: module()
|
||||
, enable :: boolean()
|
||||
, state :: map()
|
||||
}).
|
||||
|
||||
-record(chain,
|
||||
{ name :: atom()
|
||||
, authenticators :: [#authenticator{}]
|
||||
}).
|
|
@ -29,7 +29,7 @@
|
|||
|
||||
-ifndef(EMQX_ENTERPRISE).
|
||||
|
||||
-define(EMQX_RELEASE, {opensource, "5.0-alpha.3"}).
|
||||
-define(EMQX_RELEASE, {opensource, "5.0-alpha.5"}).
|
||||
|
||||
-else.
|
||||
|
||||
|
|
|
@ -13,9 +13,9 @@
|
|||
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
|
||||
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.2"}}}
|
||||
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.2"}}}
|
||||
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.10.4"}}}
|
||||
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.10.8"}}}
|
||||
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
|
||||
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.11.0"}}}
|
||||
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.15.0"}}}
|
||||
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
|
||||
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
|
||||
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.14.1"}}}
|
||||
|
@ -28,8 +28,8 @@
|
|||
[{deps,
|
||||
[ meck
|
||||
, {bbmustache,"1.10.0"}
|
||||
, {emqx_ct_helpers, {git,"https://github.com/emqx/emqx-ct-helpers", {branch,"hocon"}}}
|
||||
, {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.4.2"}}}
|
||||
, {emqx_ct_helpers, {git,"https://github.com/emqx/emqx-ct-helpers.git", {tag,"2.1.0"}}}
|
||||
, {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.4.3"}}}
|
||||
]},
|
||||
{extra_src_dirs, [{"test",[recursive]}]}
|
||||
]}
|
||||
|
|
|
@ -18,7 +18,7 @@ IsQuicSupp = fun() ->
|
|||
end,
|
||||
|
||||
Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {branch, "0.6.0"}}},
|
||||
Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {branch, "0.0.7"}}},
|
||||
Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {branch, "0.0.8"}}},
|
||||
|
||||
ExtraDeps = fun(C) ->
|
||||
{deps, Deps0} = lists:keyfind(deps, 1, C),
|
||||
|
|
|
@ -55,6 +55,18 @@
|
|||
-export([ set_debug_secret/1
|
||||
]).
|
||||
|
||||
%% Configs APIs
|
||||
-export([ get_config/1
|
||||
, get_config/2
|
||||
, get_raw_config/1
|
||||
, get_raw_config/2
|
||||
, update_config/2
|
||||
, update_config/3
|
||||
, remove_config/1
|
||||
, remove_config/2
|
||||
, reset_config/2
|
||||
]).
|
||||
|
||||
-define(APP, ?MODULE).
|
||||
|
||||
%% @hidden Path to the file which has debug_info encryption secret in it.
|
||||
|
@ -184,3 +196,53 @@ run_hook(HookPoint, Args) ->
|
|||
-spec(run_fold_hook(emqx_hooks:hookpoint(), list(any()), any()) -> any()).
|
||||
run_fold_hook(HookPoint, Args, Acc) ->
|
||||
emqx_hooks:run_fold(HookPoint, Args, Acc).
|
||||
|
||||
-spec get_config(emqx_map_lib:config_key_path()) -> term().
|
||||
get_config(KeyPath) ->
|
||||
emqx_config:get(KeyPath).
|
||||
|
||||
-spec get_config(emqx_map_lib:config_key_path(), term()) -> term().
|
||||
get_config(KeyPath, Default) ->
|
||||
emqx_config:get(KeyPath, Default).
|
||||
|
||||
-spec get_raw_config(emqx_map_lib:config_key_path()) -> term().
|
||||
get_raw_config(KeyPath) ->
|
||||
emqx_config:get_raw(KeyPath).
|
||||
|
||||
-spec get_raw_config(emqx_map_lib:config_key_path(), term()) -> term().
|
||||
get_raw_config(KeyPath, Default) ->
|
||||
emqx_config:get_raw(KeyPath, Default).
|
||||
|
||||
-spec update_config(emqx_map_lib:config_key_path(), emqx_config:update_request()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
update_config(KeyPath, UpdateReq) ->
|
||||
update_config(KeyPath, UpdateReq, #{}).
|
||||
|
||||
-spec update_config(emqx_map_lib:config_key_path(), emqx_config:update_request(),
|
||||
emqx_config:update_opts()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
update_config([RootName | _] = KeyPath, UpdateReq, Opts) ->
|
||||
emqx_config_handler:update_config(emqx_config:get_schema_mod(RootName), KeyPath,
|
||||
{{update, UpdateReq}, Opts}).
|
||||
|
||||
-spec remove_config(emqx_map_lib:config_key_path()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
remove_config(KeyPath) ->
|
||||
remove_config(KeyPath, #{}).
|
||||
|
||||
-spec remove_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
remove_config([RootName | _] = KeyPath, Opts) ->
|
||||
emqx_config_handler:update_config(emqx_config:get_schema_mod(RootName),
|
||||
KeyPath, {remove, Opts}).
|
||||
|
||||
-spec reset_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
reset_config([RootName | _] = KeyPath, Opts) ->
|
||||
case emqx_config:get_default_value(KeyPath) of
|
||||
{ok, Default} ->
|
||||
emqx_config_handler:update_config(emqx_config:get_schema_mod(RootName), KeyPath,
|
||||
{{update, Default}, Opts});
|
||||
{error, _} = Error ->
|
||||
Error
|
||||
end.
|
||||
|
|
|
@ -27,30 +27,36 @@
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(authenticate(emqx_types:clientinfo()) ->
|
||||
ok | {ok, binary()} | {continue, map()} | {continue, binary(), map()} | {error, term()}).
|
||||
{ok, map()} | {ok, map(), binary()} | {continue, map()} | {continue, binary(), map()} | {error, term()}).
|
||||
authenticate(Credential) ->
|
||||
run_hooks('client.authenticate', [Credential], ok).
|
||||
case run_hooks('client.authenticate', [Credential], {ok, #{is_superuser => false}}) of
|
||||
ok ->
|
||||
{ok, #{is_superuser => false}};
|
||||
Other ->
|
||||
Other
|
||||
end.
|
||||
|
||||
%% @doc Check Authorization
|
||||
-spec authorize(emqx_types:clientinfo(), emqx_types:pubsub(), emqx_types:topic())
|
||||
-> allow | deny.
|
||||
authorize(ClientInfo = #{zone := Zone}, PubSub, Topic) ->
|
||||
case emqx_authz_cache:is_enabled(Zone) of
|
||||
authorize(ClientInfo, PubSub, Topic) ->
|
||||
case emqx_authz_cache:is_enabled() of
|
||||
true -> check_authorization_cache(ClientInfo, PubSub, Topic);
|
||||
false -> do_authorize(ClientInfo, PubSub, Topic)
|
||||
end.
|
||||
|
||||
check_authorization_cache(ClientInfo = #{zone := Zone}, PubSub, Topic) ->
|
||||
case emqx_authz_cache:get_authz_cache(Zone, PubSub, Topic) of
|
||||
check_authorization_cache(ClientInfo, PubSub, Topic) ->
|
||||
case emqx_authz_cache:get_authz_cache(PubSub, Topic) of
|
||||
not_found ->
|
||||
AuthzResult = do_authorize(ClientInfo, PubSub, Topic),
|
||||
emqx_authz_cache:put_authz_cache(Zone, PubSub, Topic, AuthzResult),
|
||||
emqx_authz_cache:put_authz_cache(PubSub, Topic, AuthzResult),
|
||||
AuthzResult;
|
||||
AuthzResult -> AuthzResult
|
||||
end.
|
||||
|
||||
do_authorize(ClientInfo, PubSub, Topic) ->
|
||||
case run_hooks('client.authorize', [ClientInfo, PubSub, Topic], allow) of
|
||||
NoMatch = emqx:get_config([authorization, no_match], allow),
|
||||
case run_hooks('client.authorize', [ClientInfo, PubSub, Topic], NoMatch) of
|
||||
allow -> allow;
|
||||
_Other -> deny
|
||||
end.
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
-boot_mnesia({mnesia, [boot]}).
|
||||
-copy_mnesia({mnesia, [copy]}).
|
||||
|
||||
-export([pre_config_update/2]).
|
||||
-export([post_config_update/4]).
|
||||
|
||||
-export([ start_link/0
|
||||
, stop/0
|
||||
|
@ -85,9 +85,6 @@
|
|||
|
||||
-define(DEACTIVATED_ALARM, emqx_deactivated_alarm).
|
||||
|
||||
-rlog_shard({?COMMON_SHARD, ?ACTIVATED_ALARM}).
|
||||
-rlog_shard({?COMMON_SHARD, ?DEACTIVATED_ALARM}).
|
||||
|
||||
-ifdef(TEST).
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
|
@ -151,14 +148,9 @@ get_alarms(activated) ->
|
|||
get_alarms(deactivated) ->
|
||||
gen_server:call(?MODULE, {get_alarms, deactivated}).
|
||||
|
||||
pre_config_update(#{<<"validity_period">> := Period0} = NewConf, OldConf) ->
|
||||
?MODULE ! {update_timer, hocon_postprocess:duration(Period0)},
|
||||
merge(OldConf, NewConf);
|
||||
pre_config_update(NewConf, OldConf) ->
|
||||
merge(OldConf, NewConf).
|
||||
|
||||
merge(undefined, New) -> New;
|
||||
merge(Old, New) -> maps:merge(Old, New).
|
||||
post_config_update(_, #{validity_period := Period0}, _OldConf, _AppEnv) ->
|
||||
?MODULE ! {update_timer, Period0},
|
||||
ok.
|
||||
|
||||
format(#activated_alarm{name = Name, message = Message, activate_at = At, details = Details}) ->
|
||||
Now = erlang:system_time(microsecond),
|
||||
|
@ -166,7 +158,8 @@ format(#activated_alarm{name = Name, message = Message, activate_at = At, detail
|
|||
node => node(),
|
||||
name => Name,
|
||||
message => Message,
|
||||
duration => Now - At,
|
||||
duration => (Now - At) div 1000, %% to millisecond
|
||||
activate_at => to_rfc3339(At),
|
||||
details => Details
|
||||
};
|
||||
format(#deactivated_alarm{name = Name, message = Message, activate_at = At, details = Details,
|
||||
|
@ -176,18 +169,23 @@ format(#deactivated_alarm{name = Name, message = Message, activate_at = At, deta
|
|||
name => Name,
|
||||
message => Message,
|
||||
duration => DAt - At,
|
||||
activate_at => to_rfc3339(At),
|
||||
deactivate_at => to_rfc3339(DAt),
|
||||
details => Details
|
||||
};
|
||||
format(_) ->
|
||||
{error, unknow_alarm}.
|
||||
|
||||
to_rfc3339(Timestamp) ->
|
||||
list_to_binary(calendar:system_time_to_rfc3339(Timestamp div 1000, [{unit, millisecond}])).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init([]) ->
|
||||
deactivate_all_alarms(),
|
||||
emqx_config_handler:add_handler([alarm], ?MODULE),
|
||||
ok = emqx_config_handler:add_handler([alarm], ?MODULE),
|
||||
{ok, #state{timer = ensure_timer(undefined, get_validity_period())}}.
|
||||
|
||||
%% suppress dialyzer warning due to dirty read/write race condition.
|
||||
|
@ -204,7 +202,7 @@ handle_call({activate_alarm, Name, Details}, _From, State) ->
|
|||
message = normalize_message(Name, Details),
|
||||
activate_at = erlang:system_time(microsecond)},
|
||||
ekka_mnesia:dirty_write(?ACTIVATED_ALARM, Alarm),
|
||||
do_actions(activate, Alarm, emqx_config:get([alarm, actions])),
|
||||
do_actions(activate, Alarm, emqx:get_config([alarm, actions])),
|
||||
{reply, ok, State}
|
||||
end;
|
||||
|
||||
|
@ -263,6 +261,7 @@ handle_info(Info, State) ->
|
|||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, _State) ->
|
||||
ok = emqx_config_handler:remove_handler([alarm]),
|
||||
ok.
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
|
@ -273,11 +272,11 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
%%------------------------------------------------------------------------------
|
||||
|
||||
get_validity_period() ->
|
||||
emqx_config:get([alarm, validity_period]).
|
||||
emqx:get_config([alarm, validity_period]).
|
||||
|
||||
deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name,
|
||||
details = Details0, message = Msg0}) ->
|
||||
SizeLimit = emqx_config:get([alarm, size_limit]),
|
||||
SizeLimit = emqx:get_config([alarm, size_limit]),
|
||||
case SizeLimit > 0 andalso (mnesia:table_info(?DEACTIVATED_ALARM, size) >= SizeLimit) of
|
||||
true ->
|
||||
case mnesia:dirty_first(?DEACTIVATED_ALARM) of
|
||||
|
@ -294,7 +293,7 @@ deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name
|
|||
erlang:system_time(microsecond)),
|
||||
ekka_mnesia:dirty_write(?DEACTIVATED_ALARM, HistoryAlarm),
|
||||
ekka_mnesia:dirty_delete(?ACTIVATED_ALARM, Name),
|
||||
do_actions(deactivate, DeActAlarm, emqx_config:get([alarm, actions])).
|
||||
do_actions(deactivate, DeActAlarm, emqx:get_config([alarm, actions])).
|
||||
|
||||
make_deactivated_alarm(ActivateAt, Name, Details, Message, DeActivateAt) ->
|
||||
#deactivated_alarm{
|
||||
|
|
|
@ -0,0 +1,735 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authentication).
|
||||
|
||||
-behaviour(gen_server).
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_config_handler).
|
||||
|
||||
-include("emqx.hrl").
|
||||
-include("logger.hrl").
|
||||
|
||||
-export([ roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ pre_config_update/2
|
||||
, post_config_update/4
|
||||
]).
|
||||
|
||||
-export([ authenticate/2
|
||||
]).
|
||||
|
||||
-export([ initialize_authentication/2 ]).
|
||||
|
||||
-export([ start_link/0
|
||||
, stop/0
|
||||
]).
|
||||
|
||||
-export([ add_provider/2
|
||||
, remove_provider/1
|
||||
, create_chain/1
|
||||
, delete_chain/1
|
||||
, lookup_chain/1
|
||||
, list_chains/0
|
||||
, create_authenticator/2
|
||||
, delete_authenticator/2
|
||||
, update_authenticator/3
|
||||
, lookup_authenticator/2
|
||||
, list_authenticators/1
|
||||
, move_authenticator/3
|
||||
]).
|
||||
|
||||
-export([ import_users/3
|
||||
, add_user/3
|
||||
, delete_user/3
|
||||
, update_user/4
|
||||
, lookup_user/3
|
||||
, list_users/2
|
||||
]).
|
||||
|
||||
-export([ generate_id/1 ]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([ init/1
|
||||
, handle_call/3
|
||||
, handle_cast/2
|
||||
, handle_info/2
|
||||
, terminate/2
|
||||
, code_change/3
|
||||
]).
|
||||
|
||||
-define(CHAINS_TAB, emqx_authn_chains).
|
||||
|
||||
-define(VER_1, <<"1">>).
|
||||
-define(VER_2, <<"2">>).
|
||||
|
||||
-type config() :: #{atom() => term()}.
|
||||
-type state() :: #{atom() => term()}.
|
||||
-type extra() :: #{is_superuser := boolean(),
|
||||
atom() => term()}.
|
||||
-type user_info() :: #{user_id := binary(),
|
||||
atom() => term()}.
|
||||
|
||||
-callback refs() -> [{ref, Module, Name}] when Module::module(), Name::atom().
|
||||
|
||||
-callback create(Config)
|
||||
-> {ok, State}
|
||||
| {error, term()}
|
||||
when Config::config(), State::state().
|
||||
|
||||
-callback update(Config, State)
|
||||
-> {ok, NewState}
|
||||
| {error, term()}
|
||||
when Config::config(), State::state(), NewState::state().
|
||||
|
||||
-callback authenticate(Credential, State)
|
||||
-> ignore
|
||||
| {ok, Extra}
|
||||
| {ok, Extra, AuthData}
|
||||
| {continue, AuthCache}
|
||||
| {continue, AuthData, AuthCache}
|
||||
| {error, term()}
|
||||
when Credential::map(), State::state(), Extra::extra(), AuthData::binary(), AuthCache::map().
|
||||
|
||||
-callback destroy(State)
|
||||
-> ok
|
||||
when State::state().
|
||||
|
||||
-callback import_users(Filename, State)
|
||||
-> ok
|
||||
| {error, term()}
|
||||
when Filename::binary(), State::state().
|
||||
|
||||
-callback add_user(UserInfo, State)
|
||||
-> {ok, User}
|
||||
| {error, term()}
|
||||
when UserInfo::user_info(), State::state(), User::user_info().
|
||||
|
||||
-callback delete_user(UserID, State)
|
||||
-> ok
|
||||
| {error, term()}
|
||||
when UserID::binary(), State::state().
|
||||
|
||||
-callback update_user(UserID, UserInfo, State)
|
||||
-> {ok, User}
|
||||
| {error, term()}
|
||||
when UserID::binary, UserInfo::map(), State::state(), User::user_info().
|
||||
|
||||
-callback list_users(State)
|
||||
-> {ok, Users}
|
||||
when State::state(), Users::[user_info()].
|
||||
|
||||
-optional_callbacks([ import_users/2
|
||||
, add_user/2
|
||||
, delete_user/2
|
||||
, update_user/3
|
||||
, list_users/1
|
||||
]).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
roots() -> [{authentication, fun authentication/1}].
|
||||
|
||||
fields(_) -> [].
|
||||
|
||||
authentication(type) ->
|
||||
{ok, Refs} = get_refs(),
|
||||
hoconsc:union([hoconsc:array(hoconsc:union(Refs)) | Refs]);
|
||||
authentication(default) -> [];
|
||||
authentication(_) -> undefined.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Callbacks of config handler
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
pre_config_update(UpdateReq, OldConfig) ->
|
||||
case do_pre_config_update(UpdateReq, to_list(OldConfig)) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
{ok, NewConfig} -> {ok, may_to_map(NewConfig)}
|
||||
end.
|
||||
|
||||
do_pre_config_update({create_authenticator, _ChainName, Config}, OldConfig) ->
|
||||
{ok, OldConfig ++ [Config]};
|
||||
do_pre_config_update({delete_authenticator, _ChainName, AuthenticatorID}, OldConfig) ->
|
||||
NewConfig = lists:filter(fun(OldConfig0) ->
|
||||
AuthenticatorID =/= generate_id(OldConfig0)
|
||||
end, OldConfig),
|
||||
{ok, NewConfig};
|
||||
do_pre_config_update({update_authenticator, _ChainName, AuthenticatorID, Config}, OldConfig) ->
|
||||
NewConfig = lists:map(fun(OldConfig0) ->
|
||||
case AuthenticatorID =:= generate_id(OldConfig0) of
|
||||
true -> maps:merge(OldConfig0, Config);
|
||||
false -> OldConfig0
|
||||
end
|
||||
end, OldConfig),
|
||||
{ok, NewConfig};
|
||||
do_pre_config_update({move_authenticator, _ChainName, AuthenticatorID, Position}, OldConfig) ->
|
||||
case split_by_id(AuthenticatorID, OldConfig) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
{ok, Part1, [Found | Part2]} ->
|
||||
case Position of
|
||||
<<"top">> ->
|
||||
{ok, [Found | Part1] ++ Part2};
|
||||
<<"bottom">> ->
|
||||
{ok, Part1 ++ Part2 ++ [Found]};
|
||||
<<"before:", Before/binary>> ->
|
||||
case split_by_id(Before, Part1 ++ Part2) of
|
||||
{error, Reason} ->
|
||||
{error, Reason};
|
||||
{ok, NPart1, [NFound | NPart2]} ->
|
||||
{ok, NPart1 ++ [Found, NFound | NPart2]}
|
||||
end;
|
||||
_ ->
|
||||
{error, {invalid_parameter, position}}
|
||||
end
|
||||
end.
|
||||
|
||||
post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs) ->
|
||||
do_post_config_update(UpdateReq, check_config(to_list(NewConfig)), OldConfig, AppEnvs).
|
||||
|
||||
do_post_config_update({create_authenticator, ChainName, Config}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
NConfig = check_config(Config),
|
||||
_ = create_chain(ChainName),
|
||||
create_authenticator(ChainName, NConfig);
|
||||
|
||||
do_post_config_update({delete_authenticator, ChainName, AuthenticatorID}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
delete_authenticator(ChainName, AuthenticatorID);
|
||||
|
||||
do_post_config_update({update_authenticator, ChainName, AuthenticatorID, _Config}, NewConfig, _OldConfig, _AppEnvs) ->
|
||||
[Config] = lists:filter(fun(NewConfig0) ->
|
||||
AuthenticatorID =:= generate_id(NewConfig0)
|
||||
end, NewConfig),
|
||||
NConfig = check_config(Config),
|
||||
update_authenticator(ChainName, AuthenticatorID, NConfig);
|
||||
|
||||
do_post_config_update({move_authenticator, ChainName, AuthenticatorID, Position}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
NPosition = case Position of
|
||||
<<"top">> -> top;
|
||||
<<"bottom">> -> bottom;
|
||||
<<"before:", Before/binary>> ->
|
||||
{before, Before}
|
||||
end,
|
||||
move_authenticator(ChainName, AuthenticatorID, NPosition).
|
||||
|
||||
check_config(Config) ->
|
||||
#{authentication := CheckedConfig} = hocon_schema:check_plain(emqx_authentication,
|
||||
#{<<"authentication">> => Config}, #{nullable => true, atom_key => true}),
|
||||
CheckedConfig.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Authenticate
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
authenticate(#{listener := Listener, protocol := Protocol} = Credential, _AuthResult) ->
|
||||
case ets:lookup(?CHAINS_TAB, Listener) of
|
||||
[#chain{authenticators = Authenticators}] when Authenticators =/= [] ->
|
||||
do_authenticate(Authenticators, Credential);
|
||||
_ ->
|
||||
case ets:lookup(?CHAINS_TAB, global_chain(Protocol)) of
|
||||
[#chain{authenticators = Authenticators}] when Authenticators =/= [] ->
|
||||
do_authenticate(Authenticators, Credential);
|
||||
_ ->
|
||||
ignore
|
||||
end
|
||||
end.
|
||||
|
||||
do_authenticate([], _) ->
|
||||
{stop, {error, not_authorized}};
|
||||
do_authenticate([#authenticator{provider = Provider, state = State} | More], Credential) ->
|
||||
case Provider:authenticate(Credential, State) of
|
||||
ignore ->
|
||||
do_authenticate(More, Credential);
|
||||
Result ->
|
||||
%% {ok, Extra}
|
||||
%% {ok, Extra, AuthData}
|
||||
%% {continue, AuthCache}
|
||||
%% {continue, AuthData, AuthCache}
|
||||
%% {error, Reason}
|
||||
{stop, Result}
|
||||
end.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
initialize_authentication(_, []) ->
|
||||
ok;
|
||||
initialize_authentication(ChainName, AuthenticatorsConfig) ->
|
||||
_ = create_chain(ChainName),
|
||||
CheckedConfig = check_config(to_list(AuthenticatorsConfig)),
|
||||
lists:foreach(fun(AuthenticatorConfig) ->
|
||||
case create_authenticator(ChainName, AuthenticatorConfig) of
|
||||
{ok, _} ->
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
?LOG(error, "Failed to create authenticator '~s': ~p", [generate_id(AuthenticatorConfig), Reason])
|
||||
end
|
||||
end, CheckedConfig).
|
||||
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
stop() ->
|
||||
gen_server:stop(?MODULE).
|
||||
|
||||
get_refs() ->
|
||||
gen_server:call(?MODULE, get_refs).
|
||||
|
||||
add_provider(AuthNType, Provider) ->
|
||||
gen_server:call(?MODULE, {add_provider, AuthNType, Provider}).
|
||||
|
||||
remove_provider(AuthNType) ->
|
||||
gen_server:call(?MODULE, {remove_provider, AuthNType}).
|
||||
|
||||
create_chain(Name) ->
|
||||
gen_server:call(?MODULE, {create_chain, Name}).
|
||||
|
||||
delete_chain(Name) ->
|
||||
gen_server:call(?MODULE, {delete_chain, Name}).
|
||||
|
||||
lookup_chain(Name) ->
|
||||
gen_server:call(?MODULE, {lookup_chain, Name}).
|
||||
|
||||
list_chains() ->
|
||||
Chains = ets:tab2list(?CHAINS_TAB),
|
||||
{ok, [serialize_chain(Chain) || Chain <- Chains]}.
|
||||
|
||||
create_authenticator(ChainName, Config) ->
|
||||
gen_server:call(?MODULE, {create_authenticator, ChainName, Config}).
|
||||
|
||||
delete_authenticator(ChainName, AuthenticatorID) ->
|
||||
gen_server:call(?MODULE, {delete_authenticator, ChainName, AuthenticatorID}).
|
||||
|
||||
update_authenticator(ChainName, AuthenticatorID, Config) ->
|
||||
gen_server:call(?MODULE, {update_authenticator, ChainName, AuthenticatorID, Config}).
|
||||
|
||||
lookup_authenticator(ChainName, AuthenticatorID) ->
|
||||
case ets:lookup(?CHAINS_TAB, ChainName) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainName}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
Authenticator ->
|
||||
{ok, serialize_authenticator(Authenticator)}
|
||||
end
|
||||
end.
|
||||
|
||||
list_authenticators(ChainName) ->
|
||||
case ets:lookup(?CHAINS_TAB, ChainName) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainName}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
{ok, serialize_authenticators(Authenticators)}
|
||||
end.
|
||||
|
||||
move_authenticator(ChainName, AuthenticatorID, Position) ->
|
||||
gen_server:call(?MODULE, {move_authenticator, ChainName, AuthenticatorID, Position}).
|
||||
|
||||
import_users(ChainName, AuthenticatorID, Filename) ->
|
||||
gen_server:call(?MODULE, {import_users, ChainName, AuthenticatorID, Filename}).
|
||||
|
||||
add_user(ChainName, AuthenticatorID, UserInfo) ->
|
||||
gen_server:call(?MODULE, {add_user, ChainName, AuthenticatorID, UserInfo}).
|
||||
|
||||
delete_user(ChainName, AuthenticatorID, UserID) ->
|
||||
gen_server:call(?MODULE, {delete_user, ChainName, AuthenticatorID, UserID}).
|
||||
|
||||
update_user(ChainName, AuthenticatorID, UserID, NewUserInfo) ->
|
||||
gen_server:call(?MODULE, {update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}).
|
||||
|
||||
lookup_user(ChainName, AuthenticatorID, UserID) ->
|
||||
gen_server:call(?MODULE, {lookup_user, ChainName, AuthenticatorID, UserID}).
|
||||
|
||||
%% TODO: Support pagination
|
||||
list_users(ChainName, AuthenticatorID) ->
|
||||
gen_server:call(?MODULE, {list_users, ChainName, AuthenticatorID}).
|
||||
|
||||
generate_id(#{mechanism := Mechanism0, backend := Backend0}) ->
|
||||
Mechanism = atom_to_binary(Mechanism0),
|
||||
Backend = atom_to_binary(Backend0),
|
||||
<<Mechanism/binary, ":", Backend/binary>>;
|
||||
generate_id(#{mechanism := Mechanism}) ->
|
||||
atom_to_binary(Mechanism);
|
||||
generate_id(#{<<"mechanism">> := Mechanism, <<"backend">> := Backend}) ->
|
||||
<<Mechanism/binary, ":", Backend/binary>>;
|
||||
generate_id(#{<<"mechanism">> := Mechanism}) ->
|
||||
Mechanism.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init(_Opts) ->
|
||||
_ = ets:new(?CHAINS_TAB, [ named_table, set, public
|
||||
, {keypos, #chain.name}
|
||||
, {read_concurrency, true}]),
|
||||
ok = emqx_config_handler:add_handler([authentication], ?MODULE),
|
||||
ok = emqx_config_handler:add_handler([listeners, '?', '?', authentication], ?MODULE),
|
||||
{ok, #{hooked => false, providers => #{}}}.
|
||||
|
||||
handle_call({add_provider, AuthNType, Provider}, _From, #{providers := Providers} = State) ->
|
||||
reply(ok, State#{providers := Providers#{AuthNType => Provider}});
|
||||
|
||||
handle_call({remove_provider, AuthNType}, _From, #{providers := Providers} = State) ->
|
||||
reply(ok, State#{providers := maps:remove(AuthNType, Providers)});
|
||||
|
||||
handle_call(get_refs, _From, #{providers := Providers} = State) ->
|
||||
Refs = lists:foldl(fun({_, Provider}, Acc) ->
|
||||
Acc ++ Provider:refs()
|
||||
end, [], maps:to_list(Providers)),
|
||||
reply({ok, Refs}, State);
|
||||
|
||||
handle_call({create_chain, Name}, _From, State) ->
|
||||
case ets:member(?CHAINS_TAB, Name) of
|
||||
true ->
|
||||
reply({error, {already_exists, {chain, Name}}}, State);
|
||||
false ->
|
||||
Chain = #chain{name = Name,
|
||||
authenticators = []},
|
||||
true = ets:insert(?CHAINS_TAB, Chain),
|
||||
reply({ok, serialize_chain(Chain)}, State)
|
||||
end;
|
||||
|
||||
handle_call({delete_chain, Name}, _From, State) ->
|
||||
case ets:lookup(?CHAINS_TAB, Name) of
|
||||
[] ->
|
||||
reply({error, {not_found, {chain, Name}}}, State);
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
_ = [do_delete_authenticator(Authenticator) || Authenticator <- Authenticators],
|
||||
true = ets:delete(?CHAINS_TAB, Name),
|
||||
reply(ok, may_unhook(State))
|
||||
end;
|
||||
|
||||
handle_call({lookup_chain, Name}, _From, State) ->
|
||||
case ets:lookup(?CHAINS_TAB, Name) of
|
||||
[] ->
|
||||
reply({error, {not_found, {chain, Name}}}, State);
|
||||
[Chain] ->
|
||||
reply({ok, serialize_chain(Chain)}, State)
|
||||
end;
|
||||
|
||||
handle_call({create_authenticator, ChainName, Config}, _From, #{providers := Providers} = State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
AuthenticatorID = generate_id(Config),
|
||||
case lists:keymember(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
true ->
|
||||
{error, {already_exists, {authenticator, AuthenticatorID}}};
|
||||
false ->
|
||||
case do_create_authenticator(ChainName, AuthenticatorID, Config, Providers) of
|
||||
{ok, Authenticator} ->
|
||||
NAuthenticators = Authenticators ++ [Authenticator],
|
||||
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
{ok, serialize_authenticator(Authenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, may_hook(State));
|
||||
|
||||
handle_call({delete_authenticator, ChainName, AuthenticatorID}, _From, State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case lists:keytake(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
{value, Authenticator, NAuthenticators} ->
|
||||
_ = do_delete_authenticator(Authenticator),
|
||||
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
ok
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, may_unhook(State));
|
||||
|
||||
handle_call({update_authenticator, ChainName, AuthenticatorID, Config}, _From, State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
#authenticator{provider = Provider,
|
||||
state = #{version := Version} = ST} = Authenticator ->
|
||||
case AuthenticatorID =:= generate_id(Config) of
|
||||
true ->
|
||||
Unique = unique(ChainName, AuthenticatorID, Version),
|
||||
case Provider:update(Config#{'_unique' => Unique}, ST) of
|
||||
{ok, NewST} ->
|
||||
NewAuthenticator = Authenticator#authenticator{state = switch_version(NewST)},
|
||||
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
|
||||
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NewAuthenticators}),
|
||||
{ok, serialize_authenticator(NewAuthenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
false ->
|
||||
{error, mechanism_or_backend_change_is_not_alloed}
|
||||
end
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({move_authenticator, ChainName, AuthenticatorID, Position}, _From, State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case do_move_authenticator(AuthenticatorID, Authenticators, Position) of
|
||||
{ok, NAuthenticators} ->
|
||||
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({import_users, ChainName, AuthenticatorID, Filename}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, import_users, [Filename]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({add_user, ChainName, AuthenticatorID, UserInfo}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, add_user, [UserInfo]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({delete_user, ChainName, AuthenticatorID, UserID}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, delete_user, [UserID]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, update_user, [UserID, NewUserInfo]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({lookup_user, ChainName, AuthenticatorID, UserID}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, lookup_user, [UserID]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({list_users, ChainName, AuthenticatorID}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, list_users, []),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call(Req, _From, State) ->
|
||||
?LOG(error, "Unexpected call: ~p", [Req]),
|
||||
{reply, ignored, State}.
|
||||
|
||||
handle_cast(Req, State) ->
|
||||
?LOG(error, "Unexpected case: ~p", [Req]),
|
||||
{noreply, State}.
|
||||
|
||||
handle_info(Info, State) ->
|
||||
?LOG(error, "Unexpected info: ~p", [Info]),
|
||||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, _State) ->
|
||||
emqx_config_handler:remove_handler([authentication]),
|
||||
emqx_config_handler:remove_handler([listeners, '?', '?', authentication]),
|
||||
ok.
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
reply(Reply, State) ->
|
||||
{reply, Reply, State}.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
split_by_id(ID, AuthenticatorsConfig) ->
|
||||
case lists:foldl(
|
||||
fun(C, {P1, P2, F0}) ->
|
||||
F = case ID =:= generate_id(C) of
|
||||
true -> true;
|
||||
false -> F0
|
||||
end,
|
||||
case F of
|
||||
false -> {[C | P1], P2, F};
|
||||
true -> {P1, [C | P2], F}
|
||||
end
|
||||
end, {[], [], false}, AuthenticatorsConfig) of
|
||||
{_, _, false} ->
|
||||
{error, {not_found, {authenticator, ID}}};
|
||||
{Part1, Part2, true} ->
|
||||
{ok, lists:reverse(Part1), lists:reverse(Part2)}
|
||||
end.
|
||||
|
||||
global_chain(mqtt) ->
|
||||
'mqtt:global';
|
||||
global_chain('mqtt-sn') ->
|
||||
'mqtt-sn:global';
|
||||
global_chain(coap) ->
|
||||
'coap:global';
|
||||
global_chain(lwm2m) ->
|
||||
'lwm2m:global';
|
||||
global_chain(stomp) ->
|
||||
'stomp:global';
|
||||
global_chain(_) ->
|
||||
'unknown:global'.
|
||||
|
||||
may_hook(#{hooked := false} = State) ->
|
||||
case lists:any(fun(#chain{authenticators = []}) -> false;
|
||||
(_) -> true
|
||||
end, ets:tab2list(?CHAINS_TAB)) of
|
||||
true ->
|
||||
_ = emqx:hook('client.authenticate', {emqx_authentication, authenticate, []}),
|
||||
State#{hooked => true};
|
||||
false ->
|
||||
State
|
||||
end;
|
||||
may_hook(State) ->
|
||||
State.
|
||||
|
||||
may_unhook(#{hooked := true} = State) ->
|
||||
case lists:all(fun(#chain{authenticators = []}) -> true;
|
||||
(_) -> false
|
||||
end, ets:tab2list(?CHAINS_TAB)) of
|
||||
true ->
|
||||
_ = emqx:unhook('client.authenticate', {emqx_authentication, authenticate, []}),
|
||||
State#{hooked => false};
|
||||
false ->
|
||||
State
|
||||
end;
|
||||
may_unhook(State) ->
|
||||
State.
|
||||
|
||||
do_create_authenticator(ChainName, AuthenticatorID, #{enable := Enable} = Config, Providers) ->
|
||||
case maps:get(authn_type(Config), Providers, undefined) of
|
||||
undefined ->
|
||||
{error, no_available_provider};
|
||||
Provider ->
|
||||
Unique = unique(ChainName, AuthenticatorID, ?VER_1),
|
||||
case Provider:create(Config#{'_unique' => Unique}) of
|
||||
{ok, State} ->
|
||||
Authenticator = #authenticator{id = AuthenticatorID,
|
||||
provider = Provider,
|
||||
enable = Enable,
|
||||
state = switch_version(State)},
|
||||
{ok, Authenticator};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end.
|
||||
|
||||
do_delete_authenticator(#authenticator{provider = Provider, state = State}) ->
|
||||
_ = Provider:destroy(State),
|
||||
ok.
|
||||
|
||||
replace_authenticator(ID, Authenticator, Authenticators) ->
|
||||
lists:keyreplace(ID, #authenticator.id, Authenticators, Authenticator).
|
||||
|
||||
do_move_authenticator(ID, Authenticators, Position) ->
|
||||
case lists:keytake(ID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, ID}}};
|
||||
{value, Authenticator, NAuthenticators} ->
|
||||
case Position of
|
||||
top ->
|
||||
{ok, [Authenticator | NAuthenticators]};
|
||||
bottom ->
|
||||
{ok, NAuthenticators ++ [Authenticator]};
|
||||
{before, ID0} ->
|
||||
insert(Authenticator, NAuthenticators, ID0, [])
|
||||
end
|
||||
end.
|
||||
|
||||
insert(_, [], ID, _) ->
|
||||
{error, {not_found, {authenticator, ID}}};
|
||||
insert(Authenticator, [#authenticator{id = ID} | _] = Authenticators, ID, Acc) ->
|
||||
{ok, lists:reverse(Acc) ++ [Authenticator | Authenticators]};
|
||||
insert(Authenticator, [Authenticator0 | More], ID, Acc) ->
|
||||
insert(Authenticator, More, ID, [Authenticator0 | Acc]).
|
||||
|
||||
update_chain(ChainName, UpdateFun) ->
|
||||
case ets:lookup(?CHAINS_TAB, ChainName) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainName}}};
|
||||
[Chain] ->
|
||||
UpdateFun(Chain)
|
||||
end.
|
||||
|
||||
call_authenticator(ChainName, AuthenticatorID, Func, Args) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators}) ->
|
||||
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
#authenticator{provider = Provider, state = State} ->
|
||||
case erlang:function_exported(Provider, Func, length(Args) + 1) of
|
||||
true ->
|
||||
erlang:apply(Provider, Func, Args ++ [State]);
|
||||
false ->
|
||||
{error, unsupported_feature}
|
||||
end
|
||||
end
|
||||
end,
|
||||
update_chain(ChainName, UpdateFun).
|
||||
|
||||
serialize_chain(#chain{name = Name,
|
||||
authenticators = Authenticators}) ->
|
||||
#{ name => Name
|
||||
, authenticators => serialize_authenticators(Authenticators)
|
||||
}.
|
||||
|
||||
serialize_authenticators(Authenticators) ->
|
||||
[serialize_authenticator(Authenticator) || Authenticator <- Authenticators].
|
||||
|
||||
serialize_authenticator(#authenticator{id = ID,
|
||||
provider = Provider,
|
||||
enable = Enable,
|
||||
state = State}) ->
|
||||
#{ id => ID
|
||||
, provider => Provider
|
||||
, enable => Enable
|
||||
, state => State
|
||||
}.
|
||||
|
||||
unique(ChainName, AuthenticatorID, Version) ->
|
||||
NChainName = atom_to_binary(ChainName),
|
||||
<<NChainName/binary, "/", AuthenticatorID/binary, ":", Version/binary>>.
|
||||
|
||||
switch_version(State = #{version := ?VER_1}) ->
|
||||
State#{version := ?VER_2};
|
||||
switch_version(State = #{version := ?VER_2}) ->
|
||||
State#{version := ?VER_1};
|
||||
switch_version(State) ->
|
||||
State#{version => ?VER_1}.
|
||||
|
||||
authn_type(#{mechanism := Mechanism, backend := Backend}) ->
|
||||
{Mechanism, Backend};
|
||||
authn_type(#{mechanism := Mechanism}) ->
|
||||
Mechanism.
|
||||
|
||||
may_to_map([L]) ->
|
||||
L;
|
||||
may_to_map(L) ->
|
||||
L.
|
||||
|
||||
to_list(undefined) ->
|
||||
[];
|
||||
to_list(M) when M =:= #{} ->
|
||||
[];
|
||||
to_list(M) when is_map(M) ->
|
||||
[M];
|
||||
to_list(L) when is_list(L) ->
|
||||
L.
|
|
@ -18,15 +18,15 @@
|
|||
|
||||
-include("emqx.hrl").
|
||||
|
||||
-export([ list_authz_cache/1
|
||||
, get_authz_cache/3
|
||||
, put_authz_cache/4
|
||||
, cleanup_authz_cache/1
|
||||
-export([ list_authz_cache/0
|
||||
, get_authz_cache/2
|
||||
, put_authz_cache/3
|
||||
, cleanup_authz_cache/0
|
||||
, empty_authz_cache/0
|
||||
, dump_authz_cache/0
|
||||
, get_cache_max_size/1
|
||||
, get_cache_ttl/1
|
||||
, is_enabled/1
|
||||
, get_cache_max_size/0
|
||||
, get_cache_ttl/0
|
||||
, is_enabled/0
|
||||
, drain_cache/0
|
||||
]).
|
||||
|
||||
|
@ -50,45 +50,45 @@ cache_k(PubSub, Topic)-> {PubSub, Topic}.
|
|||
cache_v(AuthzResult)-> {AuthzResult, time_now()}.
|
||||
drain_k() -> {?MODULE, drain_timestamp}.
|
||||
|
||||
-spec(is_enabled(atom()) -> boolean()).
|
||||
is_enabled(Zone) ->
|
||||
emqx_config:get_zone_conf(Zone, [authorization, cache, enable]).
|
||||
-spec(is_enabled() -> boolean()).
|
||||
is_enabled() ->
|
||||
emqx:get_config([authorization, cache, enable], false).
|
||||
|
||||
-spec(get_cache_max_size(atom()) -> integer()).
|
||||
get_cache_max_size(Zone) ->
|
||||
emqx_config:get_zone_conf(Zone, [authorization, cache, max_size]).
|
||||
-spec(get_cache_max_size() -> integer()).
|
||||
get_cache_max_size() ->
|
||||
emqx:get_config([authorization, cache, max_size]).
|
||||
|
||||
-spec(get_cache_ttl(atom()) -> integer()).
|
||||
get_cache_ttl(Zone) ->
|
||||
emqx_config:get_zone_conf(Zone, [authorization, cache, ttl]).
|
||||
-spec(get_cache_ttl() -> integer()).
|
||||
get_cache_ttl() ->
|
||||
emqx:get_config([authorization, cache, ttl]).
|
||||
|
||||
-spec(list_authz_cache(atom()) -> [authz_cache_entry()]).
|
||||
list_authz_cache(Zone) ->
|
||||
cleanup_authz_cache(Zone),
|
||||
-spec(list_authz_cache() -> [authz_cache_entry()]).
|
||||
list_authz_cache() ->
|
||||
cleanup_authz_cache(),
|
||||
map_authz_cache(fun(Cache) -> Cache end).
|
||||
|
||||
%% We'll cleanup the cache before replacing an expired authz.
|
||||
-spec get_authz_cache(atom(), emqx_types:pubsub(), emqx_topic:topic()) ->
|
||||
-spec get_authz_cache(emqx_types:pubsub(), emqx_topic:topic()) ->
|
||||
authz_result() | not_found.
|
||||
get_authz_cache(Zone, PubSub, Topic) ->
|
||||
get_authz_cache(PubSub, Topic) ->
|
||||
case erlang:get(cache_k(PubSub, Topic)) of
|
||||
undefined -> not_found;
|
||||
{AuthzResult, CachedAt} ->
|
||||
if_expired(get_cache_ttl(Zone), CachedAt,
|
||||
if_expired(get_cache_ttl(), CachedAt,
|
||||
fun(false) ->
|
||||
AuthzResult;
|
||||
(true) ->
|
||||
cleanup_authz_cache(Zone),
|
||||
cleanup_authz_cache(),
|
||||
not_found
|
||||
end)
|
||||
end.
|
||||
|
||||
%% If the cache get full, and also the latest one
|
||||
%% is expired, then delete all the cache entries
|
||||
-spec put_authz_cache(atom(), emqx_types:pubsub(), emqx_topic:topic(), authz_result())
|
||||
-spec put_authz_cache(emqx_types:pubsub(), emqx_topic:topic(), authz_result())
|
||||
-> ok.
|
||||
put_authz_cache(Zone, PubSub, Topic, AuthzResult) ->
|
||||
MaxSize = get_cache_max_size(Zone), true = (MaxSize =/= 0),
|
||||
put_authz_cache(PubSub, Topic, AuthzResult) ->
|
||||
MaxSize = get_cache_max_size(), true = (MaxSize =/= 0),
|
||||
Size = get_cache_size(),
|
||||
case Size < MaxSize of
|
||||
true ->
|
||||
|
@ -96,7 +96,7 @@ put_authz_cache(Zone, PubSub, Topic, AuthzResult) ->
|
|||
false ->
|
||||
NewestK = get_newest_key(),
|
||||
{_AuthzResult, CachedAt} = erlang:get(NewestK),
|
||||
if_expired(get_cache_ttl(Zone), CachedAt,
|
||||
if_expired(get_cache_ttl(), CachedAt,
|
||||
fun(true) ->
|
||||
% all cache expired, cleanup first
|
||||
empty_authz_cache(),
|
||||
|
@ -123,10 +123,10 @@ evict_authz_cache() ->
|
|||
decr_cache_size().
|
||||
|
||||
%% cleanup all the expired cache entries
|
||||
-spec(cleanup_authz_cache(atom()) -> ok).
|
||||
cleanup_authz_cache(Zone) ->
|
||||
-spec(cleanup_authz_cache() -> ok).
|
||||
cleanup_authz_cache() ->
|
||||
keys_queue_set(
|
||||
cleanup_authz(get_cache_ttl(Zone), keys_queue_get())).
|
||||
cleanup_authz(get_cache_ttl(), keys_queue_get())).
|
||||
|
||||
get_oldest_key() ->
|
||||
keys_queue_pick(queue_front()).
|
||||
|
@ -143,8 +143,8 @@ dump_authz_cache() ->
|
|||
map_authz_cache(fun(Cache) -> Cache end).
|
||||
|
||||
map_authz_cache(Fun) ->
|
||||
[Fun(R) || R = {{SubPub, _T}, _Authz} <- get(), SubPub =:= publish
|
||||
orelse SubPub =:= subscribe].
|
||||
[Fun(R) || R = {{SubPub, _T}, _Authz} <- erlang:get(),
|
||||
SubPub =:= publish orelse SubPub =:= subscribe].
|
||||
foreach_authz_cache(Fun) ->
|
||||
_ = map_authz_cache(Fun),
|
||||
ok.
|
||||
|
|
|
@ -33,8 +33,11 @@
|
|||
|
||||
-export([ check/1
|
||||
, create/1
|
||||
, look_up/1
|
||||
, delete/1
|
||||
, info/1
|
||||
, format/1
|
||||
, parse/1
|
||||
]).
|
||||
|
||||
%% gen_server callbacks
|
||||
|
@ -50,8 +53,6 @@
|
|||
|
||||
-define(BANNED_TAB, ?MODULE).
|
||||
|
||||
-rlog_shard({?COMMON_SHARD, ?BANNED_TAB}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Mnesia bootstrap
|
||||
%%--------------------------------------------------------------------
|
||||
|
@ -59,6 +60,7 @@
|
|||
mnesia(boot) ->
|
||||
ok = ekka_mnesia:create_table(?BANNED_TAB, [
|
||||
{type, set},
|
||||
{rlog_shard, ?COMMON_SHARD},
|
||||
{disc_copies, [node()]},
|
||||
{record_name, banned},
|
||||
{attributes, record_info(fields, banned)},
|
||||
|
@ -91,7 +93,63 @@ do_check(Who) when is_tuple(Who) ->
|
|||
Until > erlang:system_time(second)
|
||||
end.
|
||||
|
||||
-spec(create(emqx_types:banned()) -> ok).
|
||||
format(#banned{who = Who0,
|
||||
by = By,
|
||||
reason = Reason,
|
||||
at = At,
|
||||
until = Until}) ->
|
||||
{As, Who} = maybe_format_host(Who0),
|
||||
#{
|
||||
as => As,
|
||||
who => Who,
|
||||
by => By,
|
||||
reason => Reason,
|
||||
at => to_rfc3339(At),
|
||||
until => to_rfc3339(Until)
|
||||
}.
|
||||
|
||||
parse(Params) ->
|
||||
Who = pares_who(Params),
|
||||
By = maps:get(<<"by">>, Params, <<"mgmt_api">>),
|
||||
Reason = maps:get(<<"reason">>, Params, <<"">>),
|
||||
At = pares_time(maps:get(<<"at">>, Params, undefined), erlang:system_time(second)),
|
||||
Until = pares_time(maps:get(<<"until">>, Params, undefined), At + 5 * 60),
|
||||
#banned{
|
||||
who = Who,
|
||||
by = By,
|
||||
reason = Reason,
|
||||
at = At,
|
||||
until = Until
|
||||
}.
|
||||
|
||||
pares_who(#{as := As, who := Who}) ->
|
||||
pares_who(#{<<"as">> => As, <<"who">> => Who});
|
||||
pares_who(#{<<"as">> := <<"peerhost">>, <<"who">> := Peerhost0}) ->
|
||||
{ok, Peerhost} = inet:parse_address(binary_to_list(Peerhost0)),
|
||||
{peerhost, Peerhost};
|
||||
pares_who(#{<<"as">> := As, <<"who">> := Who}) ->
|
||||
{binary_to_atom(As, utf8), Who}.
|
||||
|
||||
pares_time(undefined, Default) ->
|
||||
Default;
|
||||
pares_time(Rfc3339, _Default) ->
|
||||
to_timestamp(Rfc3339).
|
||||
|
||||
maybe_format_host({peerhost, Host}) ->
|
||||
AddrBinary = list_to_binary(inet:ntoa(Host)),
|
||||
{peerhost, AddrBinary};
|
||||
maybe_format_host({As, Who}) ->
|
||||
{As, Who}.
|
||||
|
||||
to_rfc3339(Timestamp) ->
|
||||
list_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, second}])).
|
||||
|
||||
to_timestamp(Rfc3339) when is_binary(Rfc3339) ->
|
||||
to_timestamp(binary_to_list(Rfc3339));
|
||||
to_timestamp(Rfc3339) ->
|
||||
calendar:rfc3339_to_system_time(Rfc3339, [{unit, second}]).
|
||||
|
||||
-spec(create(emqx_types:banned() | map()) -> ok).
|
||||
create(#{who := Who,
|
||||
by := By,
|
||||
reason := Reason,
|
||||
|
@ -105,9 +163,16 @@ create(#{who := Who,
|
|||
create(Banned) when is_record(Banned, banned) ->
|
||||
ekka_mnesia:dirty_write(?BANNED_TAB, Banned).
|
||||
|
||||
look_up(Who) when is_map(Who) ->
|
||||
look_up(pares_who(Who));
|
||||
look_up(Who) ->
|
||||
mnesia:dirty_read(?BANNED_TAB, Who).
|
||||
|
||||
-spec(delete({clientid, emqx_types:clientid()}
|
||||
| {username, emqx_types:username()}
|
||||
| {peerhost, emqx_types:peerhost()}) -> ok).
|
||||
delete(Who) when is_map(Who)->
|
||||
delete(pares_who(Who));
|
||||
delete(Who) ->
|
||||
ekka_mnesia:dirty_delete(?BANNED_TAB, Who).
|
||||
|
||||
|
|
|
@ -242,7 +242,7 @@ route(Routes, Delivery) ->
|
|||
do_route({To, Node}, Delivery) when Node =:= node() ->
|
||||
{Node, To, dispatch(To, Delivery)};
|
||||
do_route({To, Node}, Delivery) when is_atom(Node) ->
|
||||
{Node, To, forward(Node, To, Delivery, emqx_config:get([rpc, mode]))};
|
||||
{Node, To, forward(Node, To, Delivery, emqx:get_config([rpc, mode]))};
|
||||
do_route({To, Group}, Delivery) when is_tuple(Group); is_binary(Group) ->
|
||||
{share, To, emqx_shared_sub:dispatch(Group, To, Delivery)}.
|
||||
|
||||
|
|
|
@ -43,6 +43,14 @@ init([]) ->
|
|||
type => worker,
|
||||
modules => [emqx_shared_sub]},
|
||||
|
||||
%% Authentication
|
||||
AuthN = #{id => authn,
|
||||
start => {emqx_authentication, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 2000,
|
||||
type => worker,
|
||||
modules => [emqx_authentication]},
|
||||
|
||||
%% Broker helper
|
||||
Helper = #{id => helper,
|
||||
start => {emqx_broker_helper, start_link, []},
|
||||
|
@ -51,5 +59,5 @@ init([]) ->
|
|||
type => worker,
|
||||
modules => [emqx_broker_helper]},
|
||||
|
||||
{ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, Helper]}}.
|
||||
{ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, AuthN, Helper]}}.
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@
|
|||
|
||||
-type(channel() :: #channel{}).
|
||||
|
||||
-type(opts() :: #{zone := atom(), listener := atom(), atom() => term()}).
|
||||
-type(opts() :: #{zone := atom(), listener := {Type::atom(), Name::atom()}, atom() => term()}).
|
||||
|
||||
-type(conn_state() :: idle | connecting | connected | reauthenticating | disconnected).
|
||||
|
||||
|
@ -202,18 +202,19 @@ caps(#channel{clientinfo = #{zone := Zone}}) ->
|
|||
|
||||
-spec(init(emqx_types:conninfo(), opts()) -> channel()).
|
||||
init(ConnInfo = #{peername := {PeerHost, _Port},
|
||||
sockname := {_Host, SockPort}}, #{zone := Zone, listener := Listener}) ->
|
||||
sockname := {_Host, SockPort}},
|
||||
#{zone := Zone, listener := {Type, Listener}}) ->
|
||||
Peercert = maps:get(peercert, ConnInfo, undefined),
|
||||
Protocol = maps:get(protocol, ConnInfo, mqtt),
|
||||
MountPoint = case get_mqtt_conf(Zone, mountpoint) of
|
||||
MountPoint = case emqx_config:get_listener_conf(Type, Listener, [mountpoint]) of
|
||||
<<>> -> undefined;
|
||||
MP -> MP
|
||||
end,
|
||||
QuotaPolicy = emqx_config:get_listener_conf(Zone, Listener,[rate_limit, quota], []),
|
||||
QuotaPolicy = emqx_config:get_zone_conf(Zone, [quota], #{}),
|
||||
ClientInfo = set_peercert_infos(
|
||||
Peercert,
|
||||
#{zone => Zone,
|
||||
listener => Listener,
|
||||
listener => emqx_listeners:listener_id(Type, Listener),
|
||||
protocol => Protocol,
|
||||
peerhost => PeerHost,
|
||||
sockport => SockPort,
|
||||
|
@ -222,7 +223,7 @@ init(ConnInfo = #{peername := {PeerHost, _Port},
|
|||
mountpoint => MountPoint,
|
||||
is_bridge => false,
|
||||
is_superuser => false
|
||||
}, Zone, Listener),
|
||||
}, Zone),
|
||||
{NClientInfo, NConnInfo} = take_ws_cookie(ClientInfo, ConnInfo),
|
||||
#channel{conninfo = NConnInfo,
|
||||
clientinfo = NClientInfo,
|
||||
|
@ -243,12 +244,12 @@ quota_policy(RawPolicy) ->
|
|||
erlang:trunc(hocon_postprocess:duration(StrWind) / 1000)}}
|
||||
|| {Name, [StrCount, StrWind]} <- maps:to_list(RawPolicy)].
|
||||
|
||||
set_peercert_infos(NoSSL, ClientInfo, _, _)
|
||||
set_peercert_infos(NoSSL, ClientInfo, _)
|
||||
when NoSSL =:= nossl;
|
||||
NoSSL =:= undefined ->
|
||||
ClientInfo#{username => undefined};
|
||||
|
||||
set_peercert_infos(Peercert, ClientInfo, Zone, _Listener) ->
|
||||
set_peercert_infos(Peercert, ClientInfo, Zone) ->
|
||||
{DN, CN} = {esockd_peercert:subject(Peercert),
|
||||
esockd_peercert:common_name(Peercert)},
|
||||
PeercetAs = fun(Key) ->
|
||||
|
@ -425,7 +426,7 @@ handle_in(?PUBCOMP_PACKET(PacketId, _ReasonCode), Channel = #channel{session = S
|
|||
end;
|
||||
|
||||
handle_in(Packet = ?SUBSCRIBE_PACKET(PacketId, Properties, TopicFilters),
|
||||
Channel = #channel{clientinfo = ClientInfo = #{zone := Zone}}) ->
|
||||
Channel = #channel{clientinfo = ClientInfo}) ->
|
||||
case emqx_packet:check(Packet) of
|
||||
ok ->
|
||||
TopicFilters0 = parse_topic_filters(TopicFilters),
|
||||
|
@ -434,7 +435,7 @@ handle_in(Packet = ?SUBSCRIBE_PACKET(PacketId, Properties, TopicFilters),
|
|||
HasAuthzDeny = lists:any(fun({_TopicFilter, ReasonCode}) ->
|
||||
ReasonCode =:= ?RC_NOT_AUTHORIZED
|
||||
end, TupleTopicFilters0),
|
||||
DenyAction = emqx_config:get_zone_conf(Zone, [authorization, deny_action]),
|
||||
DenyAction = emqx:get_config([authorization, deny_action], ignore),
|
||||
case DenyAction =:= disconnect andalso HasAuthzDeny of
|
||||
true -> handle_out(disconnect, ?RC_NOT_AUTHORIZED, Channel);
|
||||
false ->
|
||||
|
@ -536,8 +537,7 @@ process_connect(AckProps, Channel = #channel{conninfo = ConnInfo,
|
|||
%% Process Publish
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId),
|
||||
Channel = #channel{clientinfo = #{zone := Zone}}) ->
|
||||
process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) ->
|
||||
case pipeline([fun check_quota_exceeded/2,
|
||||
fun process_alias/2,
|
||||
fun check_pub_alias/2,
|
||||
|
@ -550,7 +550,7 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId),
|
|||
{error, Rc = ?RC_NOT_AUTHORIZED, NChannel} ->
|
||||
?LOG(warning, "Cannot publish message to ~s due to ~s.",
|
||||
[Topic, emqx_reason_codes:text(Rc)]),
|
||||
case emqx_config:get_zone_conf(Zone, [authorization, deny_action]) of
|
||||
case emqx:get_config([authorization, deny_action], ignore) of
|
||||
ignore ->
|
||||
case QoS of
|
||||
?QOS_0 -> {ok, NChannel};
|
||||
|
@ -955,9 +955,8 @@ handle_call({takeover, 'end'}, Channel = #channel{session = Session,
|
|||
AllPendings = lists:append(Delivers, Pendings),
|
||||
disconnect_and_shutdown(takeovered, AllPendings, Channel);
|
||||
|
||||
handle_call(list_authz_cache, #channel{clientinfo = #{zone := Zone}}
|
||||
= Channel) ->
|
||||
{reply, emqx_authz_cache:list_authz_cache(Zone), Channel};
|
||||
handle_call(list_authz_cache, Channel) ->
|
||||
{reply, emqx_authz_cache:list_authz_cache(), Channel};
|
||||
|
||||
handle_call({quota, Policy}, Channel) ->
|
||||
Zone = info(zone, Channel),
|
||||
|
@ -1299,14 +1298,17 @@ authenticate(?AUTH_PACKET(_, #{'Authentication-Method' := AuthMethod} = Properti
|
|||
{error, ?RC_BAD_AUTHENTICATION_METHOD}
|
||||
end.
|
||||
|
||||
do_authenticate(#{auth_method := AuthMethod} = Credential, Channel) ->
|
||||
do_authenticate(#{auth_method := AuthMethod} = Credential, #channel{clientinfo = ClientInfo} = Channel) ->
|
||||
Properties = #{'Authentication-Method' => AuthMethod},
|
||||
case emqx_access_control:authenticate(Credential) of
|
||||
ok ->
|
||||
{ok, Properties, Channel#channel{auth_cache = #{}}};
|
||||
{ok, AuthData} ->
|
||||
{ok, Result} ->
|
||||
{ok, Properties,
|
||||
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
|
||||
auth_cache = #{}}};
|
||||
{ok, Result, AuthData} ->
|
||||
{ok, Properties#{'Authentication-Data' => AuthData},
|
||||
Channel#channel{auth_cache = #{}}};
|
||||
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
|
||||
auth_cache = #{}}};
|
||||
{continue, AuthCache} ->
|
||||
{continue, Properties, Channel#channel{auth_cache = AuthCache}};
|
||||
{continue, AuthData, AuthCache} ->
|
||||
|
@ -1316,10 +1318,10 @@ do_authenticate(#{auth_method := AuthMethod} = Credential, Channel) ->
|
|||
{error, emqx_reason_codes:connack_error(Reason)}
|
||||
end;
|
||||
|
||||
do_authenticate(Credential, Channel) ->
|
||||
do_authenticate(Credential, #channel{clientinfo = ClientInfo} = Channel) ->
|
||||
case emqx_access_control:authenticate(Credential) of
|
||||
ok ->
|
||||
{ok, #{}, Channel};
|
||||
{ok, #{is_superuser := IsSuperuser}} ->
|
||||
{ok, #{}, Channel#channel{clientinfo = ClientInfo#{is_superuser => IsSuperuser}}};
|
||||
{error, Reason} ->
|
||||
{error, emqx_reason_codes:connack_error(Reason)}
|
||||
end.
|
||||
|
@ -1417,9 +1419,7 @@ check_pub_alias(_Packet, _Channel) -> ok.
|
|||
|
||||
check_pub_authz(#mqtt_packet{variable = #mqtt_packet_publish{topic_name = Topic}},
|
||||
#channel{clientinfo = ClientInfo}) ->
|
||||
case is_authz_enabled(ClientInfo) andalso
|
||||
emqx_access_control:authorize(ClientInfo, publish, Topic) of
|
||||
false -> ok;
|
||||
case emqx_access_control:authorize(ClientInfo, publish, Topic) of
|
||||
allow -> ok;
|
||||
deny -> {error, ?RC_NOT_AUTHORIZED}
|
||||
end.
|
||||
|
@ -1440,8 +1440,10 @@ check_pub_caps(#mqtt_packet{header = #mqtt_packet_header{qos = QoS,
|
|||
check_sub_authzs(TopicFilters, Channel) ->
|
||||
check_sub_authzs(TopicFilters, Channel, []).
|
||||
|
||||
check_sub_authzs([ TopicFilter = {Topic, _} | More] , Channel, Acc) ->
|
||||
case check_sub_authz(Topic, Channel) of
|
||||
check_sub_authzs([ TopicFilter = {Topic, _} | More],
|
||||
Channel = #channel{clientinfo = ClientInfo},
|
||||
Acc) ->
|
||||
case emqx_access_control:authorize(ClientInfo, subscribe, Topic) of
|
||||
allow ->
|
||||
check_sub_authzs(More, Channel, [ {TopicFilter, 0} | Acc]);
|
||||
deny ->
|
||||
|
@ -1450,13 +1452,6 @@ check_sub_authzs([ TopicFilter = {Topic, _} | More] , Channel, Acc) ->
|
|||
check_sub_authzs([], _Channel, Acc) ->
|
||||
lists:reverse(Acc).
|
||||
|
||||
check_sub_authz(TopicFilter, #channel{clientinfo = ClientInfo}) ->
|
||||
case is_authz_enabled(ClientInfo) andalso
|
||||
emqx_access_control:authorize(ClientInfo, subscribe, TopicFilter) of
|
||||
false -> allow;
|
||||
Result -> Result
|
||||
end.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Check Sub Caps
|
||||
|
||||
|
@ -1618,11 +1613,6 @@ maybe_shutdown(Reason, Channel = #channel{conninfo = ConnInfo}) ->
|
|||
_ -> shutdown(Reason, Channel)
|
||||
end.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Is Authorization enabled?
|
||||
is_authz_enabled(#{zone := Zone, is_superuser := IsSuperuser}) ->
|
||||
(not IsSuperuser) andalso emqx_config:get_zone_conf(Zone, [authorization, enable]).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Parse Topic Filters
|
||||
|
||||
|
|
|
@ -62,5 +62,5 @@ unlock(ClientId) ->
|
|||
|
||||
-spec(strategy() -> local | leader | quorum | all).
|
||||
strategy() ->
|
||||
emqx_config:get([broker, session_locking_strategy]).
|
||||
emqx:get_config([broker, session_locking_strategy]).
|
||||
|
||||
|
|
|
@ -47,10 +47,6 @@
|
|||
-define(TAB, emqx_channel_registry).
|
||||
-define(LOCK, {?MODULE, cleanup_down}).
|
||||
|
||||
-define(CM_SHARD, emqx_cm_shard).
|
||||
|
||||
-rlog_shard({?CM_SHARD, ?TAB}).
|
||||
|
||||
-record(channel, {chid, pid}).
|
||||
|
||||
%% @doc Start the global channel registry.
|
||||
|
@ -65,7 +61,7 @@ start_link() ->
|
|||
%% @doc Is the global registry enabled?
|
||||
-spec(is_enabled() -> boolean()).
|
||||
is_enabled() ->
|
||||
emqx_config:get([broker, enable_session_registry]).
|
||||
emqx:get_config([broker, enable_session_registry]).
|
||||
|
||||
%% @doc Register a global channel.
|
||||
-spec(register_channel(emqx_types:clientid()
|
||||
|
@ -106,6 +102,7 @@ record(ClientId, ChanPid) ->
|
|||
init([]) ->
|
||||
ok = ekka_mnesia:create_table(?TAB, [
|
||||
{type, bag},
|
||||
{rlog_shard, ?CM_SHARD},
|
||||
{ram_copies, [node()]},
|
||||
{record_name, channel},
|
||||
{attributes, record_info(fields, channel)},
|
||||
|
|
|
@ -22,49 +22,38 @@
|
|||
|
||||
-export([init/1]).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% API
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
start_link() ->
|
||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Supervisor callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init([]) ->
|
||||
Banned = #{id => banned,
|
||||
start => {emqx_banned, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 1000,
|
||||
type => worker,
|
||||
modules => [emqx_banned]},
|
||||
Flapping = #{id => flapping,
|
||||
start => {emqx_flapping, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 1000,
|
||||
type => worker,
|
||||
modules => [emqx_flapping]},
|
||||
%% Channel locker
|
||||
Locker = #{id => locker,
|
||||
start => {emqx_cm_locker, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 5000,
|
||||
type => worker,
|
||||
modules => [emqx_cm_locker]
|
||||
},
|
||||
%% Channel registry
|
||||
Registry = #{id => registry,
|
||||
start => {emqx_cm_registry, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 5000,
|
||||
type => worker,
|
||||
modules => [emqx_cm_registry]
|
||||
},
|
||||
%% Channel Manager
|
||||
Manager = #{id => manager,
|
||||
start => {emqx_cm, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 5000,
|
||||
type => worker,
|
||||
modules => [emqx_cm]
|
||||
},
|
||||
SupFlags = #{strategy => one_for_one,
|
||||
intensity => 100,
|
||||
period => 10
|
||||
},
|
||||
Banned = child_spec(emqx_banned, 1000, worker),
|
||||
Flapping = child_spec(emqx_flapping, 1000, worker),
|
||||
Locker = child_spec(emqx_cm_locker, 5000, worker),
|
||||
Registry = child_spec(emqx_cm_registry, 5000, worker),
|
||||
Manager = child_spec(emqx_cm, 5000, worker),
|
||||
{ok, {SupFlags, [Banned, Flapping, Locker, Registry, Manager]}}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
child_spec(Mod, Shutdown, Type) ->
|
||||
#{id => Mod,
|
||||
start => {Mod, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => Shutdown,
|
||||
type => Type,
|
||||
modules => [Mod]
|
||||
}.
|
||||
|
|
|
@ -20,14 +20,20 @@
|
|||
-export([ init_load/2
|
||||
, read_override_conf/0
|
||||
, check_config/2
|
||||
, fill_defaults/1
|
||||
, fill_defaults/2
|
||||
, save_configs/4
|
||||
, save_to_app_env/1
|
||||
, save_to_config_map/2
|
||||
, save_to_override_conf/1
|
||||
]).
|
||||
|
||||
-export([get_root/1,
|
||||
get_root_raw/1]).
|
||||
-export([ get_root/1
|
||||
, get_root_raw/1
|
||||
]).
|
||||
|
||||
-export([ get_default_value/1
|
||||
]).
|
||||
|
||||
-export([ get/1
|
||||
, get/2
|
||||
|
@ -37,10 +43,21 @@
|
|||
, put/2
|
||||
]).
|
||||
|
||||
-export([ get_raw/1
|
||||
, get_raw/2
|
||||
, put_raw/1
|
||||
, put_raw/2
|
||||
]).
|
||||
|
||||
-export([ save_schema_mod_and_names/1
|
||||
, get_schema_mod/0
|
||||
, get_schema_mod/1
|
||||
, get_root_names/0
|
||||
]).
|
||||
|
||||
-export([ get_zone_conf/2
|
||||
, get_zone_conf/3
|
||||
, put_zone_conf/3
|
||||
, find_zone_conf/2
|
||||
]).
|
||||
|
||||
-export([ get_listener_conf/3
|
||||
|
@ -49,23 +66,12 @@
|
|||
, find_listener_conf/3
|
||||
]).
|
||||
|
||||
-export([ update/2
|
||||
, update/3
|
||||
, remove/1
|
||||
, remove/2
|
||||
]).
|
||||
|
||||
-export([ get_raw/1
|
||||
, get_raw/2
|
||||
, put_raw/1
|
||||
, put_raw/2
|
||||
]).
|
||||
|
||||
-define(CONF, conf).
|
||||
-define(RAW_CONF, raw_conf).
|
||||
-define(PERSIS_SCHEMA_MODS, {?MODULE, schema_mods}).
|
||||
-define(PERSIS_KEY(TYPE, ROOT), {?MODULE, TYPE, ROOT}).
|
||||
-define(ZONE_CONF_PATH(ZONE, PATH), [zones, ZONE | PATH]).
|
||||
-define(LISTENER_CONF_PATH(ZONE, LISTENER, PATH), [zones, ZONE, listeners, LISTENER | PATH]).
|
||||
-define(LISTENER_CONF_PATH(TYPE, LISTENER, PATH), [listeners, TYPE, LISTENER | PATH]).
|
||||
|
||||
-define(ATOM_CONF_PATH(PATH, EXP, EXP_ON_FAIL),
|
||||
try [atom(Key) || Key <- PATH] of
|
||||
|
@ -74,12 +80,35 @@
|
|||
error:badarg -> EXP_ON_FAIL
|
||||
end).
|
||||
|
||||
-export_type([update_request/0, raw_config/0, config/0]).
|
||||
-export_type([update_request/0, raw_config/0, config/0, app_envs/0,
|
||||
update_opts/0, update_cmd/0, update_args/0,
|
||||
update_error/0, update_result/0]).
|
||||
|
||||
-type update_request() :: term().
|
||||
-type update_cmd() :: {update, update_request()} | remove.
|
||||
-type update_opts() :: #{
|
||||
%% rawconf_with_defaults:
|
||||
%% fill the default values into the `raw_config` field of the return value
|
||||
%% defaults to `false`
|
||||
rawconf_with_defaults => boolean(),
|
||||
%% persistent:
|
||||
%% save the updated config to the emqx_override.conf file
|
||||
%% defaults to `true`
|
||||
persistent => boolean()
|
||||
}.
|
||||
-type update_args() :: {update_cmd(), Opts :: update_opts()}.
|
||||
-type update_stage() :: pre_config_update | post_config_update.
|
||||
-type update_error() :: {update_stage(), module(), term()} | {save_configs, term()} | term().
|
||||
-type update_result() :: #{
|
||||
config => emqx_config:config(),
|
||||
raw_config => emqx_config:raw_config(),
|
||||
post_config_update => #{module() => any()}
|
||||
}.
|
||||
|
||||
%% raw_config() is the config that is NOT parsed and tranlated by hocon schema
|
||||
-type raw_config() :: #{binary() => term()} | undefined.
|
||||
-type raw_config() :: #{binary() => term()} | list() | undefined.
|
||||
%% config() is the config that is parsed and tranlated by hocon schema
|
||||
-type config() :: #{atom() => term()} | undefined.
|
||||
-type config() :: #{atom() => term()} | list() | undefined.
|
||||
-type app_envs() :: [proplists:property()].
|
||||
|
||||
%% @doc For the given path, get root value enclosed in a single-key map.
|
||||
|
@ -127,63 +156,66 @@ find_raw(KeyPath) ->
|
|||
|
||||
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path()) -> term().
|
||||
get_zone_conf(Zone, KeyPath) ->
|
||||
?MODULE:get(?ZONE_CONF_PATH(Zone, KeyPath)).
|
||||
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
|
||||
{not_found, _, _} -> %% not found in zones, try to find the global config
|
||||
?MODULE:get(KeyPath);
|
||||
{ok, Value} -> Value
|
||||
end.
|
||||
|
||||
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> term().
|
||||
get_zone_conf(Zone, KeyPath, Default) ->
|
||||
?MODULE:get(?ZONE_CONF_PATH(Zone, KeyPath), Default).
|
||||
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
|
||||
{not_found, _, _} -> %% not found in zones, try to find the global config
|
||||
?MODULE:get(KeyPath, Default);
|
||||
{ok, Value} -> Value
|
||||
end.
|
||||
|
||||
-spec put_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> ok.
|
||||
put_zone_conf(Zone, KeyPath, Conf) ->
|
||||
?MODULE:put(?ZONE_CONF_PATH(Zone, KeyPath), Conf).
|
||||
|
||||
-spec find_zone_conf(atom(), emqx_map_lib:config_key_path()) ->
|
||||
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
|
||||
find_zone_conf(Zone, KeyPath) ->
|
||||
find(?ZONE_CONF_PATH(Zone, KeyPath)).
|
||||
|
||||
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) -> term().
|
||||
get_listener_conf(Zone, Listener, KeyPath) ->
|
||||
?MODULE:get(?LISTENER_CONF_PATH(Zone, Listener, KeyPath)).
|
||||
get_listener_conf(Type, Listener, KeyPath) ->
|
||||
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
|
||||
|
||||
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> term().
|
||||
get_listener_conf(Zone, Listener, KeyPath, Default) ->
|
||||
?MODULE:get(?LISTENER_CONF_PATH(Zone, Listener, KeyPath), Default).
|
||||
get_listener_conf(Type, Listener, KeyPath, Default) ->
|
||||
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Default).
|
||||
|
||||
-spec put_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> ok.
|
||||
put_listener_conf(Zone, Listener, KeyPath, Conf) ->
|
||||
?MODULE:put(?LISTENER_CONF_PATH(Zone, Listener, KeyPath), Conf).
|
||||
put_listener_conf(Type, Listener, KeyPath, Conf) ->
|
||||
?MODULE:put(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Conf).
|
||||
|
||||
-spec find_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) ->
|
||||
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
|
||||
find_listener_conf(Zone, Listener, KeyPath) ->
|
||||
find(?LISTENER_CONF_PATH(Zone, Listener, KeyPath)).
|
||||
find_listener_conf(Type, Listener, KeyPath) ->
|
||||
find(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
|
||||
|
||||
-spec put(map()) -> ok.
|
||||
put(Config) ->
|
||||
maps:fold(fun(RootName, RootValue, _) ->
|
||||
?MODULE:put([RootName], RootValue)
|
||||
end, [], Config).
|
||||
?MODULE:put([RootName], RootValue)
|
||||
end, ok, Config).
|
||||
|
||||
-spec put(emqx_map_lib:config_key_path(), term()) -> ok.
|
||||
put(KeyPath, Config) -> do_put(?CONF, KeyPath, Config).
|
||||
|
||||
-spec update(emqx_map_lib:config_key_path(), update_request()) ->
|
||||
ok | {error, term()}.
|
||||
update(KeyPath, UpdateReq) ->
|
||||
update(emqx_schema, KeyPath, UpdateReq).
|
||||
|
||||
-spec update(module(), emqx_map_lib:config_key_path(), update_request()) ->
|
||||
ok | {error, term()}.
|
||||
update(SchemaModule, KeyPath, UpdateReq) ->
|
||||
emqx_config_handler:update_config(SchemaModule, KeyPath, {update, UpdateReq}).
|
||||
|
||||
-spec remove(emqx_map_lib:config_key_path()) -> ok | {error, term()}.
|
||||
remove(KeyPath) ->
|
||||
remove(emqx_schema, KeyPath).
|
||||
|
||||
remove(SchemaModule, KeyPath) ->
|
||||
emqx_config_handler:update_config(SchemaModule, KeyPath, remove).
|
||||
-spec get_default_value(emqx_map_lib:config_key_path()) -> {ok, term()} | {error, term()}.
|
||||
get_default_value([RootName | _] = KeyPath) ->
|
||||
BinKeyPath = [bin(Key) || Key <- KeyPath],
|
||||
case find_raw([RootName]) of
|
||||
{ok, RawConf} ->
|
||||
RawConf1 = emqx_map_lib:deep_remove(BinKeyPath, #{bin(RootName) => RawConf}),
|
||||
try fill_defaults(get_schema_mod(RootName), RawConf1) of FullConf ->
|
||||
case emqx_map_lib:deep_find(BinKeyPath, FullConf) of
|
||||
{not_found, _, _} -> {error, no_default_value};
|
||||
{ok, Val} -> {ok, Val}
|
||||
end
|
||||
catch error : Reason -> {error, Reason}
|
||||
end;
|
||||
{not_found, _, _} ->
|
||||
{error, {rootname_not_found, RootName}}
|
||||
end.
|
||||
|
||||
-spec get_raw(emqx_map_lib:config_key_path()) -> term().
|
||||
get_raw(KeyPath) -> do_get(?RAW_CONF, KeyPath).
|
||||
|
@ -194,8 +226,8 @@ get_raw(KeyPath, Default) -> do_get(?RAW_CONF, KeyPath, Default).
|
|||
-spec put_raw(map()) -> ok.
|
||||
put_raw(Config) ->
|
||||
maps:fold(fun(RootName, RootV, _) ->
|
||||
?MODULE:put_raw([RootName], RootV)
|
||||
end, [], hocon_schema:get_value([], Config)).
|
||||
?MODULE:put_raw([RootName], RootV)
|
||||
end, ok, hocon_schema:get_value([], Config)).
|
||||
|
||||
-spec put_raw(emqx_map_lib:config_key_path(), term()) -> ok.
|
||||
put_raw(KeyPath, Config) -> do_put(?RAW_CONF, KeyPath, Config).
|
||||
|
@ -208,47 +240,93 @@ put_raw(KeyPath, Config) -> do_put(?RAW_CONF, KeyPath, Config).
|
|||
%% NOTE: The order of the files is significant, configs from files orderd
|
||||
%% in the rear of the list overrides prior values.
|
||||
-spec init_load(module(), [string()] | binary() | hocon:config()) -> ok.
|
||||
init_load(SchemaModule, Conf) when is_list(Conf) orelse is_binary(Conf) ->
|
||||
ParseOptions = #{format => richmap},
|
||||
init_load(SchemaMod, Conf) when is_list(Conf) orelse is_binary(Conf) ->
|
||||
ParseOptions = #{format => map},
|
||||
Parser = case is_binary(Conf) of
|
||||
true -> fun hocon:binary/2;
|
||||
false -> fun hocon:files/2
|
||||
end,
|
||||
case Parser(Conf, ParseOptions) of
|
||||
{ok, RawRichConf} ->
|
||||
init_load(SchemaModule, RawRichConf);
|
||||
init_load(SchemaMod, RawRichConf);
|
||||
{error, Reason} ->
|
||||
logger:error(#{msg => failed_to_load_hocon_conf,
|
||||
reason => Reason
|
||||
}),
|
||||
error(failed_to_load_hocon_conf)
|
||||
end;
|
||||
init_load(SchemaModule, RawRichConf) when is_map(RawRichConf) ->
|
||||
%% check with richmap for line numbers in error reports (future enhancement)
|
||||
Opts = #{return_plain => true,
|
||||
nullable => true
|
||||
},
|
||||
%% this call throws exception in case of check failure
|
||||
{_AppEnvs, CheckedConf} = hocon_schema:map_translate(SchemaModule, RawRichConf, Opts),
|
||||
ok = save_to_config_map(emqx_map_lib:unsafe_atom_key_map(CheckedConf),
|
||||
hocon_schema:richmap_to_map(RawRichConf)).
|
||||
init_load(SchemaMod, RawConf0) when is_map(RawConf0) ->
|
||||
ok = save_schema_mod_and_names(SchemaMod),
|
||||
%% override part of the input conf using emqx_override.conf
|
||||
RawConf = merge_with_override_conf(RawConf0),
|
||||
%% check and save configs
|
||||
{_AppEnvs, CheckedConf} = check_config(SchemaMod, RawConf),
|
||||
ok = save_to_config_map(maps:with(get_atom_root_names(), CheckedConf),
|
||||
maps:with(get_root_names(), RawConf)).
|
||||
|
||||
merge_with_override_conf(RawConf) ->
|
||||
maps:merge(RawConf, maps:with(maps:keys(RawConf), read_override_conf())).
|
||||
|
||||
-spec check_config(module(), raw_config()) -> {AppEnvs, CheckedConf}
|
||||
when AppEnvs :: app_envs(), CheckedConf :: config().
|
||||
check_config(SchemaModule, RawConf) ->
|
||||
check_config(SchemaMod, RawConf) ->
|
||||
Opts = #{return_plain => true,
|
||||
nullable => true,
|
||||
format => map
|
||||
},
|
||||
{AppEnvs, CheckedConf} =
|
||||
hocon_schema:map_translate(SchemaModule, RawConf, Opts),
|
||||
hocon_schema:map_translate(SchemaMod, RawConf, Opts),
|
||||
Conf = maps:with(maps:keys(RawConf), CheckedConf),
|
||||
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(Conf)}.
|
||||
|
||||
-spec fill_defaults(raw_config()) -> map().
|
||||
fill_defaults(RawConf) ->
|
||||
RootNames = get_root_names(),
|
||||
maps:fold(fun(Key, Conf, Acc) ->
|
||||
SubMap = #{Key => Conf},
|
||||
WithDefaults = case lists:member(Key, RootNames) of
|
||||
true -> fill_defaults(get_schema_mod(Key), SubMap);
|
||||
false -> SubMap
|
||||
end,
|
||||
maps:merge(Acc, WithDefaults)
|
||||
end, #{}, RawConf).
|
||||
|
||||
-spec fill_defaults(module(), raw_config()) -> map().
|
||||
fill_defaults(SchemaMod, RawConf) ->
|
||||
hocon_schema:check_plain(SchemaMod, RawConf,
|
||||
#{nullable => true, no_conversion => true}, root_names_from_conf(RawConf)).
|
||||
|
||||
-spec read_override_conf() -> raw_config().
|
||||
read_override_conf() ->
|
||||
load_hocon_file(emqx_override_conf_name(), map).
|
||||
|
||||
-spec save_schema_mod_and_names(module()) -> ok.
|
||||
save_schema_mod_and_names(SchemaMod) ->
|
||||
RootNames = hocon_schema:root_names(SchemaMod),
|
||||
OldMods = get_schema_mod(),
|
||||
OldNames = get_root_names(),
|
||||
%% map from root name to schema module name
|
||||
NewMods = maps:from_list([{Name, SchemaMod} || Name <- RootNames]),
|
||||
persistent_term:put(?PERSIS_SCHEMA_MODS, #{
|
||||
mods => maps:merge(OldMods, NewMods),
|
||||
names => lists:usort(OldNames ++ RootNames)
|
||||
}).
|
||||
|
||||
-spec get_schema_mod() -> #{binary() => atom()}.
|
||||
get_schema_mod() ->
|
||||
maps:get(mods, persistent_term:get(?PERSIS_SCHEMA_MODS, #{mods => #{}})).
|
||||
|
||||
-spec get_schema_mod(atom() | binary()) -> module().
|
||||
get_schema_mod(RootName) ->
|
||||
maps:get(bin(RootName), get_schema_mod()).
|
||||
|
||||
-spec get_root_names() -> [binary()].
|
||||
get_root_names() ->
|
||||
maps:get(names, persistent_term:get(?PERSIS_SCHEMA_MODS, #{names => []})).
|
||||
|
||||
get_atom_root_names() ->
|
||||
[atom(N) || N <- get_root_names()].
|
||||
|
||||
-spec save_configs(app_envs(), config(), raw_config(), raw_config()) -> ok | {error, term()}.
|
||||
save_configs(_AppEnvs, Conf, RawConf, OverrideConf) ->
|
||||
%% We may need also support hot config update for the apps that use application envs.
|
||||
|
@ -270,14 +348,19 @@ save_to_config_map(Conf, RawConf) ->
|
|||
?MODULE:put_raw(RawConf).
|
||||
|
||||
-spec save_to_override_conf(raw_config()) -> ok | {error, term()}.
|
||||
save_to_override_conf(undefined) ->
|
||||
ok;
|
||||
save_to_override_conf(RawConf) ->
|
||||
FileName = emqx_override_conf_name(),
|
||||
ok = filelib:ensure_dir(FileName),
|
||||
case file:write_file(FileName, jsx:prettify(jsx:encode(RawConf))) of
|
||||
ok -> ok;
|
||||
{error, Reason} ->
|
||||
logger:error("write to ~s failed, ~p", [FileName, Reason]),
|
||||
{error, Reason}
|
||||
case emqx_override_conf_name() of
|
||||
undefined -> ok;
|
||||
FileName ->
|
||||
ok = filelib:ensure_dir(FileName),
|
||||
case file:write_file(FileName, jsx:prettify(jsx:encode(RawConf))) of
|
||||
ok -> ok;
|
||||
{error, Reason} ->
|
||||
logger:error("write to ~s failed, ~p", [FileName, Reason]),
|
||||
{error, Reason}
|
||||
end
|
||||
end.
|
||||
|
||||
load_hocon_file(FileName, LoadType) ->
|
||||
|
@ -289,7 +372,7 @@ load_hocon_file(FileName, LoadType) ->
|
|||
end.
|
||||
|
||||
emqx_override_conf_name() ->
|
||||
application:get_env(emqx, override_conf_file, "emqx_override.conf").
|
||||
application:get_env(emqx, override_conf_file, undefined).
|
||||
|
||||
do_get(Type, KeyPath) ->
|
||||
Ref = make_ref(),
|
||||
|
@ -336,12 +419,19 @@ do_deep_put(?CONF, KeyPath, Map, Value) ->
|
|||
do_deep_put(?RAW_CONF, KeyPath, Map, Value) ->
|
||||
emqx_map_lib:deep_put([bin(Key) || Key <- KeyPath], Map, Value).
|
||||
|
||||
root_names_from_conf(RawConf) ->
|
||||
Keys = maps:keys(RawConf),
|
||||
[Name || Name <- get_root_names(), lists:member(Name, Keys)].
|
||||
|
||||
atom(Bin) when is_binary(Bin) ->
|
||||
binary_to_existing_atom(Bin, latin1);
|
||||
atom(Str) when is_list(Str) ->
|
||||
list_to_existing_atom(Str);
|
||||
atom(Atom) when is_atom(Atom) ->
|
||||
Atom.
|
||||
|
||||
bin(Bin) when is_binary(Bin) -> Bin;
|
||||
bin(Str) when is_list(Str) -> list_to_binary(Str);
|
||||
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
|
||||
|
||||
conf_key(?CONF, RootName) ->
|
||||
|
|
|
@ -23,7 +23,9 @@
|
|||
|
||||
%% API functions
|
||||
-export([ start_link/0
|
||||
, stop/0
|
||||
, add_handler/2
|
||||
, remove_handler/1
|
||||
, update_config/3
|
||||
, merge_to_old_config/2
|
||||
]).
|
||||
|
@ -37,20 +39,28 @@
|
|||
code_change/3]).
|
||||
|
||||
-define(MOD, {mod}).
|
||||
-define(WKEY, '?').
|
||||
|
||||
-define(ATOM_CONF_PATH(PATH, EXP, EXP_ON_FAIL),
|
||||
try [safe_atom(Key) || Key <- PATH] of
|
||||
AtomKeyPath -> EXP
|
||||
catch
|
||||
error:badarg -> EXP_ON_FAIL
|
||||
end).
|
||||
|
||||
-type handler_name() :: module().
|
||||
-type handlers() :: #{emqx_config:config_key() => handlers(), ?MOD => handler_name()}.
|
||||
-type update_args() :: {update, emqx_config:update_request()} | remove.
|
||||
|
||||
-optional_callbacks([ pre_config_update/2
|
||||
, post_config_update/3
|
||||
, post_config_update/4
|
||||
]).
|
||||
|
||||
-callback pre_config_update(emqx_config:update_request(), emqx_config:raw_config()) ->
|
||||
emqx_config:update_request().
|
||||
{ok, emqx_config:update_request()} | {error, term()}.
|
||||
|
||||
-callback post_config_update(emqx_config:update_request(), emqx_config:config(),
|
||||
emqx_config:config()) -> any().
|
||||
emqx_config:config(), emqx_config:app_envs()) ->
|
||||
ok | {ok, Result::any()} | {error, Reason::term()}.
|
||||
|
||||
-type state() :: #{
|
||||
handlers := handlers(),
|
||||
|
@ -60,14 +70,22 @@
|
|||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, {}, []).
|
||||
|
||||
-spec update_config(module(), emqx_config:config_key_path(), update_args()) ->
|
||||
ok | {error, term()}.
|
||||
stop() ->
|
||||
gen_server:stop(?MODULE).
|
||||
|
||||
-spec update_config(module(), emqx_config:config_key_path(), emqx_config:update_args()) ->
|
||||
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
|
||||
update_config(SchemaModule, ConfKeyPath, UpdateArgs) ->
|
||||
gen_server:call(?MODULE, {change_config, SchemaModule, ConfKeyPath, UpdateArgs}).
|
||||
?ATOM_CONF_PATH(ConfKeyPath, gen_server:call(?MODULE, {change_config, SchemaModule,
|
||||
AtomKeyPath, UpdateArgs}), {error, ConfKeyPath}).
|
||||
|
||||
-spec add_handler(emqx_config:config_key_path(), handler_name()) -> ok.
|
||||
add_handler(ConfKeyPath, HandlerName) ->
|
||||
gen_server:call(?MODULE, {add_child, ConfKeyPath, HandlerName}).
|
||||
gen_server:call(?MODULE, {add_handler, ConfKeyPath, HandlerName}).
|
||||
|
||||
-spec remove_handler(emqx_config:config_key_path()) -> ok.
|
||||
remove_handler(ConfKeyPath) ->
|
||||
gen_server:call(?MODULE, {remove_handler, ConfKeyPath}).
|
||||
|
||||
%%============================================================================
|
||||
|
||||
|
@ -75,26 +93,34 @@ add_handler(ConfKeyPath, HandlerName) ->
|
|||
init(_) ->
|
||||
{ok, #{handlers => #{?MOD => ?MODULE}}}.
|
||||
|
||||
handle_call({add_child, ConfKeyPath, HandlerName}, _From,
|
||||
handle_call({add_handler, ConfKeyPath, HandlerName}, _From, State = #{handlers := Handlers}) ->
|
||||
case deep_put_handler(ConfKeyPath, Handlers, HandlerName) of
|
||||
{ok, NewHandlers} ->
|
||||
{reply, ok, State#{handlers => NewHandlers}};
|
||||
Error ->
|
||||
{reply, Error, State}
|
||||
end;
|
||||
|
||||
handle_call({remove_handler, ConfKeyPath}, _From,
|
||||
State = #{handlers := Handlers}) ->
|
||||
{reply, ok, State#{handlers =>
|
||||
emqx_map_lib:deep_put(ConfKeyPath, Handlers, #{?MOD => HandlerName})}};
|
||||
emqx_map_lib:deep_remove(ConfKeyPath ++ [?MOD], Handlers)}};
|
||||
|
||||
handle_call({change_config, SchemaModule, ConfKeyPath, UpdateArgs}, _From,
|
||||
#{handlers := Handlers} = State) ->
|
||||
OldConf = emqx_config:get_root(ConfKeyPath),
|
||||
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
||||
Result = try
|
||||
{NewRawConf, OverrideConf} = process_upadate_request(ConfKeyPath, OldRawConf,
|
||||
Handlers, UpdateArgs),
|
||||
{AppEnvs, CheckedConf} = emqx_config:check_config(SchemaModule, NewRawConf),
|
||||
_ = do_post_config_update(ConfKeyPath, Handlers, OldConf, CheckedConf, UpdateArgs),
|
||||
emqx_config:save_configs(AppEnvs, CheckedConf, NewRawConf, OverrideConf)
|
||||
Reply = try
|
||||
case process_update_request(ConfKeyPath, Handlers, UpdateArgs) of
|
||||
{ok, NewRawConf, OverrideConf} ->
|
||||
check_and_save_configs(SchemaModule, ConfKeyPath, Handlers, NewRawConf,
|
||||
OverrideConf, UpdateArgs);
|
||||
{error, Result} ->
|
||||
{error, Result}
|
||||
end
|
||||
catch Error:Reason:ST ->
|
||||
?LOG(error, "change_config failed: ~p", [{Error, Reason, ST}]),
|
||||
{error, Reason}
|
||||
end,
|
||||
{reply, Result, State};
|
||||
{reply, Reply, State};
|
||||
|
||||
handle_call(_Request, _From, State) ->
|
||||
Reply = ok,
|
||||
|
@ -112,32 +138,93 @@ terminate(_Reason, _State) ->
|
|||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
process_upadate_request(ConfKeyPath, OldRawConf, _Handlers, remove) ->
|
||||
deep_put_handler([], Handlers, Mod) when is_map(Handlers) ->
|
||||
{ok, Handlers#{?MOD => Mod}};
|
||||
deep_put_handler([], _Handlers, Mod) ->
|
||||
{ok, #{?MOD => Mod}};
|
||||
deep_put_handler([?WKEY | KeyPath], Handlers, Mod) ->
|
||||
deep_put_handler2(?WKEY, KeyPath, Handlers, Mod);
|
||||
deep_put_handler([Key | KeyPath], Handlers, Mod) ->
|
||||
case maps:find(?WKEY, Handlers) of
|
||||
error ->
|
||||
deep_put_handler2(Key, KeyPath, Handlers, Mod);
|
||||
{ok, _SubHandlers} ->
|
||||
{error, {cannot_override_a_wildcard_path, [?WKEY | KeyPath]}}
|
||||
end.
|
||||
|
||||
deep_put_handler2(Key, KeyPath, Handlers, Mod) ->
|
||||
SubHandlers = maps:get(Key, Handlers, #{}),
|
||||
case deep_put_handler(KeyPath, SubHandlers, Mod) of
|
||||
{ok, SubHandlers1} ->
|
||||
{ok, Handlers#{Key => SubHandlers1}};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
process_update_request(ConfKeyPath, _Handlers, {remove, Opts}) ->
|
||||
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
||||
BinKeyPath = bin_path(ConfKeyPath),
|
||||
NewRawConf = emqx_map_lib:deep_remove(BinKeyPath, OldRawConf),
|
||||
OverrideConf = emqx_map_lib:deep_remove(BinKeyPath, emqx_config:read_override_conf()),
|
||||
{NewRawConf, OverrideConf};
|
||||
process_upadate_request(ConfKeyPath, OldRawConf, Handlers, {update, UpdateReq}) ->
|
||||
NewRawConf = do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq),
|
||||
OverrideConf = update_override_config(NewRawConf),
|
||||
{NewRawConf, OverrideConf}.
|
||||
OverrideConf = remove_from_override_config(BinKeyPath, Opts),
|
||||
{ok, NewRawConf, OverrideConf};
|
||||
process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
|
||||
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
||||
case do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) of
|
||||
{ok, NewRawConf} ->
|
||||
OverrideConf = update_override_config(NewRawConf, Opts),
|
||||
{ok, NewRawConf, OverrideConf};
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
do_update_config([], Handlers, OldRawConf, UpdateReq) ->
|
||||
call_pre_config_update(Handlers, OldRawConf, UpdateReq);
|
||||
do_update_config([ConfKey | ConfKeyPath], Handlers, OldRawConf, UpdateReq) ->
|
||||
SubOldRawConf = get_sub_config(bin(ConfKey), OldRawConf),
|
||||
SubHandlers = maps:get(ConfKey, Handlers, #{}),
|
||||
NewUpdateReq = do_update_config(ConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq),
|
||||
call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq}).
|
||||
SubHandlers = get_sub_handlers(ConfKey, Handlers),
|
||||
case do_update_config(ConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq) of
|
||||
{ok, NewUpdateReq} ->
|
||||
call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq});
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
do_post_config_update([], Handlers, OldConf, NewConf, UpdateArgs) ->
|
||||
call_post_config_update(Handlers, OldConf, NewConf, up_req(UpdateArgs));
|
||||
do_post_config_update([ConfKey | ConfKeyPath], Handlers, OldConf, NewConf, UpdateArgs) ->
|
||||
check_and_save_configs(SchemaModule, ConfKeyPath, Handlers, NewRawConf, OverrideConf,
|
||||
UpdateArgs) ->
|
||||
OldConf = emqx_config:get_root(ConfKeyPath),
|
||||
FullRawConf = with_full_raw_confs(NewRawConf),
|
||||
{AppEnvs, CheckedConf} = emqx_config:check_config(SchemaModule, FullRawConf),
|
||||
NewConf = maps:with(maps:keys(OldConf), CheckedConf),
|
||||
case do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, #{}) of
|
||||
{ok, Result0} ->
|
||||
case save_configs(ConfKeyPath, AppEnvs, NewConf, NewRawConf, OverrideConf,
|
||||
UpdateArgs) of
|
||||
{ok, Result1} ->
|
||||
{ok, Result1#{post_config_update => Result0}};
|
||||
Error -> Error
|
||||
end;
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
do_post_config_update([], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result) ->
|
||||
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs), Result);
|
||||
do_post_config_update([ConfKey | ConfKeyPath], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs,
|
||||
Result) ->
|
||||
SubOldConf = get_sub_config(ConfKey, OldConf),
|
||||
SubNewConf = get_sub_config(ConfKey, NewConf),
|
||||
SubHandlers = maps:get(ConfKey, Handlers, #{}),
|
||||
_ = do_post_config_update(ConfKeyPath, SubHandlers, SubOldConf, SubNewConf, UpdateArgs),
|
||||
call_post_config_update(Handlers, OldConf, NewConf, up_req(UpdateArgs)).
|
||||
SubHandlers = get_sub_handlers(ConfKey, Handlers),
|
||||
case do_post_config_update(ConfKeyPath, SubHandlers, SubOldConf, SubNewConf, AppEnvs,
|
||||
UpdateArgs, Result) of
|
||||
{ok, Result1} ->
|
||||
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs),
|
||||
Result1);
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
get_sub_handlers(ConfKey, Handlers) ->
|
||||
case maps:find(ConfKey, Handlers) of
|
||||
error -> maps:get(?WKEY, Handlers, #{});
|
||||
{ok, SubHandlers} -> SubHandlers
|
||||
end.
|
||||
|
||||
get_sub_config(ConfKey, Conf) when is_map(Conf) ->
|
||||
maps:get(ConfKey, Conf, undefined);
|
||||
|
@ -147,15 +234,30 @@ get_sub_config(_, _Conf) -> %% the Conf is a primitive
|
|||
call_pre_config_update(Handlers, OldRawConf, UpdateReq) ->
|
||||
HandlerName = maps:get(?MOD, Handlers, undefined),
|
||||
case erlang:function_exported(HandlerName, pre_config_update, 2) of
|
||||
true -> HandlerName:pre_config_update(UpdateReq, OldRawConf);
|
||||
true ->
|
||||
case HandlerName:pre_config_update(UpdateReq, OldRawConf) of
|
||||
{ok, NewUpdateReq} -> {ok, NewUpdateReq};
|
||||
{error, Reason} -> {error, {pre_config_update, HandlerName, Reason}}
|
||||
end;
|
||||
false -> merge_to_old_config(UpdateReq, OldRawConf)
|
||||
end.
|
||||
|
||||
call_post_config_update(Handlers, OldConf, NewConf, UpdateReq) ->
|
||||
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, UpdateReq, Result) ->
|
||||
HandlerName = maps:get(?MOD, Handlers, undefined),
|
||||
case erlang:function_exported(HandlerName, post_config_update, 3) of
|
||||
true -> HandlerName:post_config_update(UpdateReq, NewConf, OldConf);
|
||||
false -> ok
|
||||
case erlang:function_exported(HandlerName, post_config_update, 4) of
|
||||
true ->
|
||||
case HandlerName:post_config_update(UpdateReq, NewConf, OldConf, AppEnvs) of
|
||||
ok -> {ok, Result};
|
||||
{ok, Result1} -> {ok, Result#{HandlerName => Result1}};
|
||||
{error, Reason} -> {error, {post_config_update, HandlerName, Reason}}
|
||||
end;
|
||||
false -> {ok, Result}
|
||||
end.
|
||||
|
||||
save_configs(ConfKeyPath, AppEnvs, CheckedConf, NewRawConf, OverrideConf, UpdateArgs) ->
|
||||
case emqx_config:save_configs(AppEnvs, CheckedConf, NewRawConf, OverrideConf) of
|
||||
ok -> {ok, return_change_result(ConfKeyPath, UpdateArgs)};
|
||||
{error, Reason} -> {error, {save_configs, Reason}}
|
||||
end.
|
||||
|
||||
%% The default callback of config handlers
|
||||
|
@ -164,18 +266,48 @@ call_post_config_update(Handlers, OldConf, NewConf, UpdateReq) ->
|
|||
%% 2. either the old or the new config is not of map type
|
||||
%% the behaviour is merging the new the config to the old config if they are maps.
|
||||
merge_to_old_config(UpdateReq, RawConf) when is_map(UpdateReq), is_map(RawConf) ->
|
||||
maps:merge(RawConf, UpdateReq);
|
||||
{ok, maps:merge(RawConf, UpdateReq)};
|
||||
merge_to_old_config(UpdateReq, _RawConf) ->
|
||||
UpdateReq.
|
||||
{ok, UpdateReq}.
|
||||
|
||||
update_override_config(RawConf) ->
|
||||
remove_from_override_config(_BinKeyPath, #{persistent := false}) ->
|
||||
undefined;
|
||||
remove_from_override_config(BinKeyPath, _Opts) ->
|
||||
OldConf = emqx_config:read_override_conf(),
|
||||
emqx_map_lib:deep_remove(BinKeyPath, OldConf).
|
||||
|
||||
update_override_config(_RawConf, #{persistent := false}) ->
|
||||
undefined;
|
||||
update_override_config(RawConf, _Opts) ->
|
||||
OldConf = emqx_config:read_override_conf(),
|
||||
maps:merge(OldConf, RawConf).
|
||||
|
||||
up_req(remove) -> '$remove';
|
||||
up_req({update, Req}) -> Req.
|
||||
up_req({remove, _Opts}) -> '$remove';
|
||||
up_req({{update, Req}, _Opts}) -> Req.
|
||||
|
||||
return_change_result(ConfKeyPath, {{update, _Req}, Opts}) ->
|
||||
#{config => emqx_config:get(ConfKeyPath),
|
||||
raw_config => return_rawconf(ConfKeyPath, Opts)};
|
||||
return_change_result(_ConfKeyPath, {remove, _Opts}) ->
|
||||
#{}.
|
||||
|
||||
return_rawconf(ConfKeyPath, #{rawconf_with_defaults := true}) ->
|
||||
FullRawConf = emqx_config:fill_defaults(emqx_config:get_raw([])),
|
||||
emqx_map_lib:deep_get(bin_path(ConfKeyPath), FullRawConf);
|
||||
return_rawconf(ConfKeyPath, _) ->
|
||||
emqx_config:get_raw(ConfKeyPath).
|
||||
|
||||
with_full_raw_confs(PartialConf) ->
|
||||
maps:merge(emqx_config:get_raw([]), PartialConf).
|
||||
|
||||
bin_path(ConfKeyPath) -> [bin(Key) || Key <- ConfKeyPath].
|
||||
|
||||
bin(A) when is_atom(A) -> atom_to_binary(A, utf8);
|
||||
bin(B) when is_binary(B) -> B.
|
||||
|
||||
safe_atom(Bin) when is_binary(Bin) ->
|
||||
binary_to_existing_atom(Bin, latin1);
|
||||
safe_atom(Str) when is_list(Str) ->
|
||||
list_to_existing_atom(Str);
|
||||
safe_atom(Atom) when is_atom(Atom) ->
|
||||
Atom.
|
|
@ -102,8 +102,8 @@
|
|||
idle_timer :: maybe(reference()),
|
||||
%% Zone name
|
||||
zone :: atom(),
|
||||
%% Listener Name
|
||||
listener :: atom()
|
||||
%% Listener Type and Name
|
||||
listener :: {Type::atom(), Name::atom()}
|
||||
}).
|
||||
|
||||
-type(state() :: #state{}).
|
||||
|
@ -135,7 +135,9 @@
|
|||
, system_code_change/4
|
||||
]}).
|
||||
|
||||
-spec(start_link(esockd:transport(), esockd:socket(), emqx_channel:opts())
|
||||
-spec(start_link(esockd:transport(),
|
||||
esockd:socket() | {pid(), quicer:connection_handler()},
|
||||
emqx_channel:opts())
|
||||
-> {ok, pid()}).
|
||||
start_link(Transport, Socket, Options) ->
|
||||
Args = [self(), Transport, Socket, Options],
|
||||
|
@ -463,15 +465,15 @@ handle_msg({Passive, _Sock}, State)
|
|||
NState1 = check_oom(run_gc(InStats, NState)),
|
||||
handle_info(activate_socket, NState1);
|
||||
|
||||
handle_msg(Deliver = {deliver, _Topic, _Msg}, #state{zone = Zone,
|
||||
listener = Listener} = State) ->
|
||||
ActiveN = get_active_n(Zone, Listener),
|
||||
handle_msg(Deliver = {deliver, _Topic, _Msg}, #state{
|
||||
listener = {Type, Listener}} = State) ->
|
||||
ActiveN = get_active_n(Type, Listener),
|
||||
Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)],
|
||||
with_channel(handle_deliver, [Delivers], State);
|
||||
|
||||
%% Something sent
|
||||
handle_msg({inet_reply, _Sock, ok}, State = #state{zone = Zone, listener = Listener}) ->
|
||||
case emqx_pd:get_counter(outgoing_pubs) > get_active_n(Zone, Listener) of
|
||||
handle_msg({inet_reply, _Sock, ok}, State = #state{listener = {Type, Listener}}) ->
|
||||
case emqx_pd:get_counter(outgoing_pubs) > get_active_n(Type, Listener) of
|
||||
true ->
|
||||
Pubs = emqx_pd:reset_counter(outgoing_pubs),
|
||||
Bytes = emqx_pd:reset_counter(outgoing_bytes),
|
||||
|
@ -820,8 +822,8 @@ activate_socket(State = #state{sockstate = closed}) ->
|
|||
activate_socket(State = #state{sockstate = blocked}) ->
|
||||
{ok, State};
|
||||
activate_socket(State = #state{transport = Transport, socket = Socket,
|
||||
zone = Zone, listener = Listener}) ->
|
||||
ActiveN = get_active_n(Zone, Listener),
|
||||
listener = {Type, Listener}}) ->
|
||||
ActiveN = get_active_n(Type, Listener),
|
||||
case Transport:setopts(Socket, [{active, ActiveN}]) of
|
||||
ok -> {ok, State#state{sockstate = running}};
|
||||
Error -> Error
|
||||
|
@ -904,8 +906,6 @@ get_state(Pid) ->
|
|||
maps:from_list(lists:zip(record_info(fields, state),
|
||||
tl(tuple_to_list(State)))).
|
||||
|
||||
get_active_n(Zone, Listener) ->
|
||||
case emqx_config:get([zones, Zone, listeners, Listener, type]) of
|
||||
quic -> 100;
|
||||
_ -> emqx_config:get_listener_conf(Zone, Listener, [tcp, active_n])
|
||||
end.
|
||||
get_active_n(quic, _Listener) -> 100;
|
||||
get_active_n(Type, Listener) ->
|
||||
emqx_config:get_listener_conf(Type, Listener, [tcp, active_n]).
|
||||
|
|
|
@ -160,4 +160,4 @@ start_timer(Zone) ->
|
|||
start_timers() ->
|
||||
lists:foreach(fun({Zone, _ZoneConf}) ->
|
||||
start_timer(Zone)
|
||||
end, maps:to_list(emqx_config:get([zones], #{}))).
|
||||
end, maps:to_list(emqx:get_config([zones], #{}))).
|
||||
|
|
|
@ -100,14 +100,10 @@ parse(<<Type:4, Dup:1, QoS:2, Retain:1, Rest/binary>>,
|
|||
StrictMode andalso validate_header(Type, Dup, QoS, Retain),
|
||||
Header = #mqtt_packet_header{type = Type,
|
||||
dup = bool(Dup),
|
||||
qos = QoS,
|
||||
qos = fixqos(Type, QoS),
|
||||
retain = bool(Retain)
|
||||
},
|
||||
Header1 = case fixqos(Type, QoS) of
|
||||
QoS -> Header;
|
||||
FixedQoS -> Header#mqtt_packet_header{qos = FixedQoS}
|
||||
end,
|
||||
parse_remaining_len(Rest, Header1, Options);
|
||||
parse_remaining_len(Rest, Header, Options);
|
||||
|
||||
parse(Bin, {{len, #{hdr := Header,
|
||||
len := {Multiplier, Length}}
|
||||
|
|
|
@ -85,7 +85,7 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
ensure_timer(State) ->
|
||||
case emqx_config:get([node, global_gc_interval]) of
|
||||
case emqx:get_config([node, global_gc_interval]) of
|
||||
undefined -> State;
|
||||
Interval -> TRef = emqx_misc:start_timer(Interval, run),
|
||||
State#{timer := TRef}
|
||||
|
|
|
@ -34,6 +34,7 @@ init([]) ->
|
|||
, child_spec(emqx_stats, worker)
|
||||
, child_spec(emqx_metrics, worker)
|
||||
, child_spec(emqx_ctl, worker)
|
||||
, child_spec(emqx_logger, worker)
|
||||
]}}.
|
||||
|
||||
child_spec(M, Type) ->
|
||||
|
|
|
@ -26,6 +26,8 @@
|
|||
, restart/0
|
||||
, stop/0
|
||||
, is_running/1
|
||||
, current_conns/2
|
||||
, max_conns/2
|
||||
]).
|
||||
|
||||
-export([ start_listener/1
|
||||
|
@ -37,35 +39,39 @@
|
|||
, has_enabled_listener_conf_by_type/1
|
||||
]).
|
||||
|
||||
-export([ listener_id/2
|
||||
, parse_listener_id/1
|
||||
]).
|
||||
|
||||
-export([post_config_update/4]).
|
||||
|
||||
-define(CONF_KEY_PATH, [listeners]).
|
||||
|
||||
%% @doc List configured listeners.
|
||||
-spec(list() -> [{ListenerId :: atom(), ListenerConf :: map()}]).
|
||||
list() ->
|
||||
[{listener_id(ZoneName, LName), LConf} || {ZoneName, LName, LConf} <- do_list()].
|
||||
[{listener_id(Type, LName), LConf} || {Type, LName, LConf} <- do_list()].
|
||||
|
||||
do_list() ->
|
||||
Zones = maps:to_list(emqx_config:get([zones], #{})),
|
||||
lists:append([list(ZoneName, ZoneConf) || {ZoneName, ZoneConf} <- Zones]).
|
||||
Listeners = maps:to_list(emqx:get_config([listeners], #{})),
|
||||
lists:append([list(Type, maps:to_list(Conf)) || {Type, Conf} <- Listeners]).
|
||||
|
||||
list(ZoneName, ZoneConf) ->
|
||||
Listeners = maps:to_list(maps:get(listeners, ZoneConf, #{})),
|
||||
[
|
||||
begin
|
||||
Conf = merge_zone_and_listener_confs(ZoneConf, LConf),
|
||||
Running = is_running(listener_id(ZoneName, LName), Conf),
|
||||
{ZoneName , LName, maps:put(running, Running, Conf)}
|
||||
end
|
||||
|| {LName, LConf} <- Listeners, is_map(LConf)].
|
||||
list(Type, Conf) ->
|
||||
[begin
|
||||
Running = is_running(Type, listener_id(Type, LName), LConf),
|
||||
{Type, LName, maps:put(running, Running, LConf)}
|
||||
end || {LName, LConf} <- Conf, is_map(LConf)].
|
||||
|
||||
-spec is_running(ListenerId :: atom()) -> boolean() | {error, no_found}.
|
||||
is_running(ListenerId) ->
|
||||
case lists:filtermap(fun({_Zone, Id, #{running := IsRunning}}) ->
|
||||
case lists:filtermap(fun({_Type, Id, #{running := IsRunning}}) ->
|
||||
Id =:= ListenerId andalso {true, IsRunning}
|
||||
end, do_list()) of
|
||||
[IsRunning] -> IsRunning;
|
||||
[] -> {error, not_found}
|
||||
end.
|
||||
|
||||
is_running(ListenerId, #{type := tcp, bind := ListenOn})->
|
||||
is_running(Type, ListenerId, #{bind := ListenOn}) when Type =:= tcp; Type =:= ssl ->
|
||||
try esockd:listener({ListenerId, ListenOn}) of
|
||||
Pid when is_pid(Pid)->
|
||||
true
|
||||
|
@ -73,7 +79,7 @@ is_running(ListenerId, #{type := tcp, bind := ListenOn})->
|
|||
false
|
||||
end;
|
||||
|
||||
is_running(ListenerId, #{type := ws})->
|
||||
is_running(Type, ListenerId, _Conf) when Type =:= ws; Type =:= wss ->
|
||||
try
|
||||
Info = ranch:info(ListenerId),
|
||||
proplists:get_value(status, Info) =:= running
|
||||
|
@ -81,13 +87,38 @@ is_running(ListenerId, #{type := ws})->
|
|||
false
|
||||
end;
|
||||
|
||||
is_running(_ListenerId, #{type := quic})->
|
||||
%% TODO: quic support
|
||||
is_running(quic, _ListenerId, _Conf)->
|
||||
%% TODO: quic support
|
||||
{error, no_found}.
|
||||
|
||||
current_conns(ID, ListenOn) ->
|
||||
{Type, Name} = parse_listener_id(ID),
|
||||
current_conns(Type, Name, ListenOn).
|
||||
|
||||
current_conns(Type, Name, ListenOn) when Type == tcl; Type == ssl ->
|
||||
esockd:get_current_connections({listener_id(Type, Name), ListenOn});
|
||||
current_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
|
||||
proplists:get_value(all_connections, ranch:info(listener_id(Type, Name)));
|
||||
current_conns(_, _, _) ->
|
||||
{error, not_support}.
|
||||
|
||||
max_conns(ID, ListenOn) ->
|
||||
{Type, Name} = parse_listener_id(ID),
|
||||
max_conns(Type, Name, ListenOn).
|
||||
|
||||
max_conns(Type, Name, ListenOn) when Type == tcl; Type == ssl ->
|
||||
esockd:get_max_connections({listener_id(Type, Name), ListenOn});
|
||||
max_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
|
||||
proplists:get_value(max_connections, ranch:info(listener_id(Type, Name)));
|
||||
max_conns(_, _, _) ->
|
||||
{error, not_support}.
|
||||
|
||||
%% @doc Start all listeners.
|
||||
-spec(start() -> ok).
|
||||
start() ->
|
||||
%% The ?MODULE:start/0 will be called by emqx_app when emqx get started,
|
||||
%% so we install the config handler here.
|
||||
ok = emqx_config_handler:add_handler(?CONF_KEY_PATH, ?MODULE),
|
||||
foreach_listeners(fun start_listener/3).
|
||||
|
||||
-spec start_listener(atom()) -> ok | {error, term()}.
|
||||
|
@ -95,23 +126,76 @@ start_listener(ListenerId) ->
|
|||
apply_on_listener(ListenerId, fun start_listener/3).
|
||||
|
||||
-spec start_listener(atom(), atom(), map()) -> ok | {error, term()}.
|
||||
start_listener(ZoneName, ListenerName, #{type := Type, bind := Bind} = Conf) ->
|
||||
case do_start_listener(ZoneName, ListenerName, Conf) of
|
||||
start_listener(Type, ListenerName, #{bind := Bind} = Conf) ->
|
||||
case do_start_listener(Type, ListenerName, Conf) of
|
||||
{ok, {skipped, Reason}} when Reason =:= listener_disabled;
|
||||
Reason =:= quic_app_missing ->
|
||||
console_print("- Skip - starting ~s listener ~s on ~s ~n due to ~p",
|
||||
[Type, listener_id(ZoneName, ListenerName), format(Bind), Reason]);
|
||||
console_print("- Skip - starting listener ~s on ~s ~n due to ~p",
|
||||
[listener_id(Type, ListenerName), format_addr(Bind), Reason]);
|
||||
{ok, _} ->
|
||||
console_print("Start ~s listener ~s on ~s successfully.~n",
|
||||
[Type, listener_id(ZoneName, ListenerName), format(Bind)]);
|
||||
console_print("Listener ~s on ~s started.~n",
|
||||
[listener_id(Type, ListenerName), format_addr(Bind)]);
|
||||
{error, {already_started, Pid}} ->
|
||||
{error, {already_started, Pid}};
|
||||
{error, Reason} ->
|
||||
?ELOG("Failed to start ~s listener ~s on ~s: ~0p~n",
|
||||
[Type, listener_id(ZoneName, ListenerName), format(Bind), Reason]),
|
||||
?ELOG("Failed to start listener ~s on ~s: ~0p~n",
|
||||
[listener_id(Type, ListenerName), format_addr(Bind), Reason]),
|
||||
error(Reason)
|
||||
end.
|
||||
|
||||
%% @doc Restart all listeners
|
||||
-spec(restart() -> ok).
|
||||
restart() ->
|
||||
foreach_listeners(fun restart_listener/3).
|
||||
|
||||
-spec(restart_listener(atom()) -> ok | {error, term()}).
|
||||
restart_listener(ListenerId) ->
|
||||
apply_on_listener(ListenerId, fun restart_listener/3).
|
||||
|
||||
-spec(restart_listener(atom(), atom(), map()) -> ok | {error, term()}).
|
||||
restart_listener(Type, ListenerName, {OldConf, NewConf}) ->
|
||||
restart_listener(Type, ListenerName, OldConf, NewConf);
|
||||
restart_listener(Type, ListenerName, Conf) ->
|
||||
restart_listener(Type, ListenerName, Conf, Conf).
|
||||
|
||||
restart_listener(Type, ListenerName, OldConf, NewConf) ->
|
||||
case stop_listener(Type, ListenerName, OldConf) of
|
||||
ok -> start_listener(Type, ListenerName, NewConf);
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
%% @doc Stop all listeners.
|
||||
-spec(stop() -> ok).
|
||||
stop() ->
|
||||
%% The ?MODULE:stop/0 will be called by emqx_app when emqx is going to shutdown,
|
||||
%% so we uninstall the config handler here.
|
||||
_ = emqx_config_handler:remove_handler(?CONF_KEY_PATH),
|
||||
foreach_listeners(fun stop_listener/3).
|
||||
|
||||
-spec(stop_listener(atom()) -> ok | {error, term()}).
|
||||
stop_listener(ListenerId) ->
|
||||
apply_on_listener(ListenerId, fun stop_listener/3).
|
||||
|
||||
stop_listener(Type, ListenerName, #{bind := Bind} = Conf) ->
|
||||
case do_stop_listener(Type, ListenerName, Conf) of
|
||||
ok ->
|
||||
console_print("Listener ~s on ~s stopped.~n",
|
||||
[listener_id(Type, ListenerName), format_addr(Bind)]),
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
?ELOG("Failed to stop listener ~s on ~s: ~0p~n",
|
||||
[listener_id(Type, ListenerName), format_addr(Bind), Reason]),
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec(do_stop_listener(atom(), atom(), map()) -> ok | {error, term()}).
|
||||
do_stop_listener(Type, ListenerName, #{bind := ListenOn}) when Type == tcp; Type == ssl ->
|
||||
esockd:close(listener_id(Type, ListenerName), ListenOn);
|
||||
do_stop_listener(Type, ListenerName, _Conf) when Type == ws; Type == wss ->
|
||||
cowboy:stop_listener(listener_id(Type, ListenerName));
|
||||
do_stop_listener(quic, ListenerName, _Conf) ->
|
||||
quicer:stop_listener(listener_id(quic, ListenerName)).
|
||||
|
||||
-ifndef(TEST).
|
||||
console_print(Fmt, Args) -> ?ULOG(Fmt, Args).
|
||||
-else.
|
||||
|
@ -121,79 +205,108 @@ console_print(_Fmt, _Args) -> ok.
|
|||
%% Start MQTT/TCP listener
|
||||
-spec(do_start_listener(atom(), atom(), map())
|
||||
-> {ok, pid() | {skipped, atom()}} | {error, term()}).
|
||||
do_start_listener(_ZoneName, _ListenerName, #{enabled := false}) ->
|
||||
do_start_listener(_Type, _ListenerName, #{enabled := false}) ->
|
||||
{ok, {skipped, listener_disabled}};
|
||||
do_start_listener(ZoneName, ListenerName, #{type := tcp, bind := ListenOn} = Opts) ->
|
||||
esockd:open(listener_id(ZoneName, ListenerName), ListenOn, merge_default(esockd_opts(Opts)),
|
||||
do_start_listener(Type, ListenerName, #{bind := ListenOn} = Opts)
|
||||
when Type == tcp; Type == ssl ->
|
||||
esockd:open(listener_id(Type, ListenerName), ListenOn, merge_default(esockd_opts(Type, Opts)),
|
||||
{emqx_connection, start_link,
|
||||
[#{zone => ZoneName, listener => ListenerName}]});
|
||||
[#{listener => {Type, ListenerName},
|
||||
zone => zone(Opts)}]});
|
||||
|
||||
%% Start MQTT/WS listener
|
||||
do_start_listener(ZoneName, ListenerName, #{type := ws, bind := ListenOn} = Opts) ->
|
||||
Id = listener_id(ZoneName, ListenerName),
|
||||
RanchOpts = ranch_opts(ListenOn, Opts),
|
||||
WsOpts = ws_opts(ZoneName, ListenerName, Opts),
|
||||
case is_ssl(Opts) of
|
||||
false ->
|
||||
cowboy:start_clear(Id, RanchOpts, WsOpts);
|
||||
true ->
|
||||
cowboy:start_tls(Id, RanchOpts, WsOpts)
|
||||
do_start_listener(Type, ListenerName, #{bind := ListenOn} = Opts)
|
||||
when Type == ws; Type == wss ->
|
||||
Id = listener_id(Type, ListenerName),
|
||||
RanchOpts = ranch_opts(Type, ListenOn, Opts),
|
||||
WsOpts = ws_opts(Type, ListenerName, Opts),
|
||||
case Type of
|
||||
ws -> cowboy:start_clear(Id, RanchOpts, WsOpts);
|
||||
wss -> cowboy:start_tls(Id, RanchOpts, WsOpts)
|
||||
end;
|
||||
|
||||
%% Start MQTT/QUIC listener
|
||||
do_start_listener(ZoneName, ListenerName, #{type := quic, bind := ListenOn} = Opts) ->
|
||||
do_start_listener(quic, ListenerName, #{bind := ListenOn} = Opts) ->
|
||||
case [ A || {quicer, _, _} = A<-application:which_applications() ] of
|
||||
[_] ->
|
||||
%% @fixme unsure why we need reopen lib and reopen config.
|
||||
quicer_nif:open_lib(),
|
||||
quicer_nif:reg_open(),
|
||||
DefAcceptors = erlang:system_info(schedulers_online) * 8,
|
||||
ListenOpts = [ {cert, maps:get(certfile, Opts)}
|
||||
, {key, maps:get(keyfile, Opts)}
|
||||
, {alpn, ["mqtt"]}
|
||||
, {conn_acceptors, maps:get(acceptors, Opts, DefAcceptors)}
|
||||
, {idle_timeout_ms, emqx_config:get_zone_conf(ZoneName, [mqtt, idle_timeout])}
|
||||
, {conn_acceptors, lists:max([DefAcceptors, maps:get(acceptors, Opts, 0)])}
|
||||
, {idle_timeout_ms, lists:max([
|
||||
emqx_config:get_zone_conf(zone(Opts), [mqtt, idle_timeout]) * 3
|
||||
, timer:seconds(maps:get(idle_timeout, Opts))]
|
||||
)}
|
||||
],
|
||||
ConnectionOpts = #{conn_callback => emqx_quic_connection
|
||||
ConnectionOpts = #{ conn_callback => emqx_quic_connection
|
||||
, peer_unidi_stream_count => 1
|
||||
, peer_bidi_stream_count => 10
|
||||
, zone => ZoneName
|
||||
, listener => ListenerName
|
||||
, zone => zone(Opts)
|
||||
, listener => {quic, ListenerName}
|
||||
},
|
||||
StreamOpts = [],
|
||||
quicer:start_listener(listener_id(ZoneName, ListenerName),
|
||||
StreamOpts = [{stream_callback, emqx_quic_stream}],
|
||||
quicer:start_listener(listener_id(quic, ListenerName),
|
||||
port(ListenOn), {ListenOpts, ConnectionOpts, StreamOpts});
|
||||
[] ->
|
||||
{ok, {skipped, quic_app_missing}}
|
||||
end.
|
||||
|
||||
esockd_opts(Opts0) ->
|
||||
delete_authentication(Type, ListenerName, _Conf) ->
|
||||
emqx_authentication:delete_chain(atom_to_binary(listener_id(Type, ListenerName))).
|
||||
|
||||
%% Update the listeners at runtime
|
||||
post_config_update(_Req, NewListeners, OldListeners, _AppEnvs) ->
|
||||
#{added := Added, removed := Removed, changed := Updated}
|
||||
= diff_listeners(NewListeners, OldListeners),
|
||||
perform_listener_changes(fun stop_listener/3, Removed),
|
||||
perform_listener_changes(fun delete_authentication/3, Removed),
|
||||
perform_listener_changes(fun start_listener/3, Added),
|
||||
perform_listener_changes(fun restart_listener/3, Updated).
|
||||
|
||||
perform_listener_changes(Action, MapConfs) ->
|
||||
lists:foreach(fun
|
||||
({Id, Conf}) ->
|
||||
{Type, Name} = parse_listener_id(Id),
|
||||
Action(Type, Name, Conf)
|
||||
end, maps:to_list(MapConfs)).
|
||||
|
||||
diff_listeners(NewListeners, OldListeners) ->
|
||||
emqx_map_lib:diff_maps(flatten_listeners(NewListeners), flatten_listeners(OldListeners)).
|
||||
|
||||
flatten_listeners(Conf0) ->
|
||||
maps:from_list(
|
||||
lists:append([do_flatten_listeners(Type, Conf)
|
||||
|| {Type, Conf} <- maps:to_list(Conf0)])).
|
||||
|
||||
do_flatten_listeners(Type, Conf0) ->
|
||||
[{listener_id(Type, Name), maps:remove(authentication, Conf)} || {Name, Conf} <- maps:to_list(Conf0)].
|
||||
|
||||
esockd_opts(Type, Opts0) ->
|
||||
Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0),
|
||||
Opts2 = case emqx_map_lib:deep_get([rate_limit, max_conn_rate], Opts0) of
|
||||
Opts2 = case emqx_config:get_zone_conf(zone(Opts0), [rate_limit, max_conn_rate]) of
|
||||
infinity -> Opts1;
|
||||
Rate -> Opts1#{max_conn_rate => Rate}
|
||||
end,
|
||||
Opts3 = Opts2#{access_rules => esockd_access_rules(maps:get(access_rules, Opts0, []))},
|
||||
maps:to_list(case is_ssl(Opts0) of
|
||||
false ->
|
||||
Opts3#{tcp_options => tcp_opts(Opts0)};
|
||||
true ->
|
||||
Opts3#{ssl_options => ssl_opts(Opts0), tcp_options => tcp_opts(Opts0)}
|
||||
maps:to_list(case Type of
|
||||
tcp -> Opts3#{tcp_options => tcp_opts(Opts0)};
|
||||
ssl -> Opts3#{ssl_options => ssl_opts(Opts0), tcp_options => tcp_opts(Opts0)}
|
||||
end).
|
||||
|
||||
ws_opts(ZoneName, ListenerName, Opts) ->
|
||||
ws_opts(Type, ListenerName, Opts) ->
|
||||
WsPaths = [{maps:get(mqtt_path, Opts, "/mqtt"), emqx_ws_connection,
|
||||
#{zone => ZoneName, listener => ListenerName}}],
|
||||
#{zone => zone(Opts), listener => {Type, ListenerName}}}],
|
||||
Dispatch = cowboy_router:compile([{'_', WsPaths}]),
|
||||
ProxyProto = maps:get(proxy_protocol, Opts, false),
|
||||
#{env => #{dispatch => Dispatch}, proxy_header => ProxyProto}.
|
||||
|
||||
ranch_opts(ListenOn, Opts) ->
|
||||
ranch_opts(Type, ListenOn, Opts) ->
|
||||
NumAcceptors = maps:get(acceptors, Opts, 4),
|
||||
MaxConnections = maps:get(max_connections, Opts, 1024),
|
||||
SocketOpts = case is_ssl(Opts) of
|
||||
true -> tcp_opts(Opts) ++ proplists:delete(handshake_timeout, ssl_opts(Opts));
|
||||
false -> tcp_opts(Opts)
|
||||
SocketOpts = case Type of
|
||||
wss -> tcp_opts(Opts) ++ proplists:delete(handshake_timeout, ssl_opts(Opts));
|
||||
ws -> tcp_opts(Opts)
|
||||
end,
|
||||
#{num_acceptors => NumAcceptors,
|
||||
max_connections => MaxConnections,
|
||||
|
@ -217,39 +330,6 @@ esockd_access_rules(StrRules) ->
|
|||
end,
|
||||
[Access(R) || R <- StrRules].
|
||||
|
||||
%% @doc Restart all listeners
|
||||
-spec(restart() -> ok).
|
||||
restart() ->
|
||||
foreach_listeners(fun restart_listener/3).
|
||||
|
||||
-spec(restart_listener(atom()) -> ok | {error, term()}).
|
||||
restart_listener(ListenerId) ->
|
||||
apply_on_listener(ListenerId, fun restart_listener/3).
|
||||
|
||||
-spec(restart_listener(atom(), atom(), map()) -> ok | {error, term()}).
|
||||
restart_listener(ZoneName, ListenerName, Conf) ->
|
||||
case stop_listener(ZoneName, ListenerName, Conf) of
|
||||
ok -> start_listener(ZoneName, ListenerName, Conf);
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
%% @doc Stop all listeners.
|
||||
-spec(stop() -> ok).
|
||||
stop() ->
|
||||
foreach_listeners(fun stop_listener/3).
|
||||
|
||||
-spec(stop_listener(atom()) -> ok | {error, term()}).
|
||||
stop_listener(ListenerId) ->
|
||||
apply_on_listener(ListenerId, fun stop_listener/3).
|
||||
|
||||
-spec(stop_listener(atom(), atom(), map()) -> ok | {error, term()}).
|
||||
stop_listener(ZoneName, ListenerName, #{type := tcp, bind := ListenOn}) ->
|
||||
esockd:close(listener_id(ZoneName, ListenerName), ListenOn);
|
||||
stop_listener(ZoneName, ListenerName, #{type := ws}) ->
|
||||
cowboy:stop_listener(listener_id(ZoneName, ListenerName));
|
||||
stop_listener(ZoneName, ListenerName, #{type := quic}) ->
|
||||
quicer:stop_listener(listener_id(ZoneName, ListenerName)).
|
||||
|
||||
merge_default(Options) ->
|
||||
case lists:keytake(tcp_options, 1, Options) of
|
||||
{value, {tcp_options, TcpOpts}, Options1} ->
|
||||
|
@ -258,24 +338,27 @@ merge_default(Options) ->
|
|||
[{tcp_options, ?MQTT_SOCKOPTS} | Options]
|
||||
end.
|
||||
|
||||
format(Port) when is_integer(Port) ->
|
||||
format_addr(Port) when is_integer(Port) ->
|
||||
io_lib:format("0.0.0.0:~w", [Port]);
|
||||
format({Addr, Port}) when is_list(Addr) ->
|
||||
format_addr({Addr, Port}) when is_list(Addr) ->
|
||||
io_lib:format("~s:~w", [Addr, Port]);
|
||||
format({Addr, Port}) when is_tuple(Addr) ->
|
||||
format_addr({Addr, Port}) when is_tuple(Addr) ->
|
||||
io_lib:format("~s:~w", [inet:ntoa(Addr), Port]).
|
||||
|
||||
listener_id(ZoneName, ListenerName) ->
|
||||
list_to_atom(lists:append([atom_to_list(ZoneName), ":", atom_to_list(ListenerName)])).
|
||||
listener_id(Type, ListenerName) ->
|
||||
list_to_atom(lists:append([str(Type), ":", str(ListenerName)])).
|
||||
|
||||
decode_listener_id(Id) ->
|
||||
parse_listener_id(Id) ->
|
||||
try
|
||||
[Zone, Listen] = string:split(atom_to_list(Id), ":", leading),
|
||||
{list_to_existing_atom(Zone), list_to_existing_atom(Listen)}
|
||||
[Type, Name] = string:split(str(Id), ":", leading),
|
||||
{list_to_existing_atom(Type), list_to_atom(Name)}
|
||||
catch
|
||||
_ : _ -> error({invalid_listener_id, Id})
|
||||
end.
|
||||
|
||||
zone(Opts) ->
|
||||
maps:get(zone, Opts, undefined).
|
||||
|
||||
ssl_opts(Opts) ->
|
||||
maps:to_list(
|
||||
emqx_tls_lib:drop_tls13_for_old_otp(
|
||||
|
@ -287,32 +370,28 @@ tcp_opts(Opts) ->
|
|||
maps:without([active_n],
|
||||
maps:get(tcp, Opts, #{}))).
|
||||
|
||||
is_ssl(Opts) ->
|
||||
emqx_map_lib:deep_get([ssl, enable], Opts, false).
|
||||
|
||||
foreach_listeners(Do) ->
|
||||
lists:foreach(
|
||||
fun({ZoneName, LName, LConf}) ->
|
||||
Do(ZoneName, LName, LConf)
|
||||
fun({Type, LName, LConf}) ->
|
||||
Do(Type, LName, LConf)
|
||||
end, do_list()).
|
||||
|
||||
has_enabled_listener_conf_by_type(Type) ->
|
||||
lists:any(
|
||||
fun({_Zone, _LName, LConf}) when is_map(LConf) ->
|
||||
Type =:= maps:get(type, LConf) andalso
|
||||
maps:get(enabled, LConf, true)
|
||||
fun({Type0, _LName, LConf}) when is_map(LConf) ->
|
||||
Type =:= Type0 andalso maps:get(enabled, LConf, true)
|
||||
end, do_list()).
|
||||
|
||||
%% merge the configs in zone and listeners in a manner that
|
||||
%% all config entries in the listener are prior to the ones in the zone.
|
||||
merge_zone_and_listener_confs(ZoneConf, ListenerConf) ->
|
||||
ConfsInZonesOnly = [listeners, overall_max_connections],
|
||||
BaseConf = maps:without(ConfsInZonesOnly, ZoneConf),
|
||||
emqx_map_lib:deep_merge(BaseConf, ListenerConf).
|
||||
|
||||
apply_on_listener(ListenerId, Do) ->
|
||||
{ZoneName, ListenerName} = decode_listener_id(ListenerId),
|
||||
case emqx_config:find_listener_conf(ZoneName, ListenerName, []) of
|
||||
{not_found, _, _} -> error({listener_config_not_found, ZoneName, ListenerName});
|
||||
{ok, Conf} -> Do(ZoneName, ListenerName, Conf)
|
||||
{Type, ListenerName} = parse_listener_id(ListenerId),
|
||||
case emqx_config:find_listener_conf(Type, ListenerName, []) of
|
||||
{not_found, _, _} -> error({listener_config_not_found, Type, ListenerName});
|
||||
{ok, Conf} -> Do(Type, ListenerName, Conf)
|
||||
end.
|
||||
|
||||
str(A) when is_atom(A) ->
|
||||
atom_to_list(A);
|
||||
str(B) when is_binary(B) ->
|
||||
binary_to_list(B);
|
||||
str(S) when is_list(S) ->
|
||||
S.
|
||||
|
|
|
@ -18,6 +18,19 @@
|
|||
|
||||
-compile({no_auto_import, [error/1]}).
|
||||
|
||||
-behaviour(gen_server).
|
||||
-behaviour(emqx_config_handler).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([ start_link/0
|
||||
, init/1
|
||||
, handle_call/3
|
||||
, handle_cast/2
|
||||
, handle_info/2
|
||||
, terminate/2
|
||||
, code_change/3
|
||||
]).
|
||||
|
||||
%% Logs
|
||||
-export([ debug/1
|
||||
, debug/2
|
||||
|
@ -47,6 +60,7 @@
|
|||
]).
|
||||
|
||||
-export([ get_primary_log_level/0
|
||||
, tune_primary_log_level/0
|
||||
, get_log_handlers/0
|
||||
, get_log_handlers/1
|
||||
, get_log_handler/1
|
||||
|
@ -56,6 +70,8 @@
|
|||
, stop_log_handler/1
|
||||
]).
|
||||
|
||||
-export([post_config_update/4]).
|
||||
|
||||
-type(peername_str() :: list()).
|
||||
-type(logger_dst() :: file:filename() | console | unknown).
|
||||
-type(logger_handler_info() :: #{
|
||||
|
@ -66,6 +82,49 @@
|
|||
}).
|
||||
|
||||
-define(stopped_handlers, {?MODULE, stopped_handlers}).
|
||||
-define(CONF_PATH, [log]).
|
||||
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
init([]) ->
|
||||
ok = emqx_config_handler:add_handler(?CONF_PATH, ?MODULE),
|
||||
{ok, #{}}.
|
||||
|
||||
handle_call({update_config, AppEnvs}, _From, State) ->
|
||||
OldEnvs = application:get_env(kernel, logger, []),
|
||||
NewEnvs = proplists:get_value(logger, proplists:get_value(kernel, AppEnvs, []), []),
|
||||
ok = application:set_env(kernel, logger, NewEnvs),
|
||||
_ = [logger:remove_handler(HandlerId) || {handler, HandlerId, _Mod, _Conf} <- OldEnvs],
|
||||
_ = [logger:add_handler(HandlerId, Mod, Conf) || {handler, HandlerId, Mod, Conf} <- NewEnvs],
|
||||
ok = tune_primary_log_level(),
|
||||
{reply, ok, State};
|
||||
|
||||
handle_call(_Req, _From, State) ->
|
||||
{reply, ignored, State}.
|
||||
|
||||
handle_cast(_Msg, State) ->
|
||||
{noreply, State}.
|
||||
|
||||
handle_info(_Info, State) ->
|
||||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, _State) ->
|
||||
ok = emqx_config_handler:remove_handler(?CONF_PATH),
|
||||
ok.
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% emqx_config_handler callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
post_config_update(_Req, _NewConf, _OldConf, AppEnvs) ->
|
||||
gen_server:call(?MODULE, {update_config, AppEnvs}, 5000).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% APIs
|
||||
|
@ -159,6 +218,16 @@ get_primary_log_level() ->
|
|||
#{level := Level} = logger:get_primary_config(),
|
||||
Level.
|
||||
|
||||
-spec tune_primary_log_level() -> ok.
|
||||
tune_primary_log_level() ->
|
||||
LowestLevel = lists:foldl(fun(#{level := Level}, OldLevel) ->
|
||||
case logger:compare_levels(Level, OldLevel) of
|
||||
lt -> Level;
|
||||
_ -> OldLevel
|
||||
end
|
||||
end, get_primary_log_level(), get_log_handlers()),
|
||||
set_primary_log_level(LowestLevel).
|
||||
|
||||
-spec(set_primary_log_level(logger:level()) -> ok | {error, term()}).
|
||||
set_primary_log_level(Level) ->
|
||||
logger:set_primary_config(level, Level).
|
||||
|
|
|
@ -23,17 +23,17 @@ check_config(X) -> logger_formatter:check_config(X).
|
|||
|
||||
format(#{msg := {report, Report}, meta := Meta} = Event, Config) when is_map(Report) ->
|
||||
logger_formatter:format(Event#{msg := {report, enrich(Report, Meta)}}, Config);
|
||||
format(#{msg := {Fmt, Args}, meta := Meta} = Event, Config) when is_list(Fmt) ->
|
||||
{NewFmt, NewArgs} = enrich_fmt(Fmt, Args, Meta),
|
||||
logger_formatter:format(Event#{msg := {NewFmt, NewArgs}}, Config).
|
||||
format(#{msg := Msg, meta := Meta} = Event, Config) ->
|
||||
NewMsg = enrich_fmt(Msg, Meta),
|
||||
logger_formatter:format(Event#{msg := NewMsg}, Config).
|
||||
|
||||
enrich(Report, #{mfa := Mfa, line := Line}) ->
|
||||
Report#{mfa => mfa(Mfa), line => Line};
|
||||
enrich(Report, _) -> Report.
|
||||
|
||||
enrich_fmt(Fmt, Args, #{mfa := Mfa, line := Line}) ->
|
||||
enrich_fmt({Fmt, Args}, #{mfa := Mfa, line := Line}) when is_list(Fmt) ->
|
||||
{Fmt ++ " mfa: ~s line: ~w", Args ++ [mfa(Mfa), Line]};
|
||||
enrich_fmt(Fmt, Args, _) ->
|
||||
{Fmt, Args}.
|
||||
enrich_fmt(Msg, _) ->
|
||||
Msg.
|
||||
|
||||
mfa({M, F, A}) -> atom_to_list(M) ++ ":" ++ atom_to_list(F) ++ "/" ++ integer_to_list(A).
|
||||
|
|
|
@ -23,11 +23,17 @@
|
|||
, deep_merge/2
|
||||
, safe_atom_key_map/1
|
||||
, unsafe_atom_key_map/1
|
||||
, jsonable_map/1
|
||||
, jsonable_map/2
|
||||
, binary_string/1
|
||||
, deep_convert/3
|
||||
, diff_maps/2
|
||||
]).
|
||||
|
||||
-export_type([config_key/0, config_key_path/0]).
|
||||
-type config_key() :: atom() | binary().
|
||||
-type config_key_path() :: [config_key()].
|
||||
-type convert_fun() :: fun((...) -> {K1::any(), V1::any()} | drop).
|
||||
|
||||
%%-----------------------------------------------------------------
|
||||
-spec deep_get(config_key_path(), map()) -> term().
|
||||
|
@ -59,13 +65,11 @@ deep_find(_KeyPath, Data) ->
|
|||
{not_found, _KeyPath, Data}.
|
||||
|
||||
-spec deep_put(config_key_path(), map(), term()) -> map().
|
||||
deep_put([], Map, Config) when is_map(Map) ->
|
||||
Config;
|
||||
deep_put([], _Map, Config) -> %% not map, replace it
|
||||
Config;
|
||||
deep_put([Key | KeyPath], Map, Config) ->
|
||||
SubMap = deep_put(KeyPath, maps:get(Key, Map, #{}), Config),
|
||||
Map#{Key => SubMap}.
|
||||
deep_put([], _Map, Data) ->
|
||||
Data;
|
||||
deep_put([Key | KeyPath], Map, Data) ->
|
||||
SubMap = maps:get(Key, Map, #{}),
|
||||
Map#{Key => deep_put(KeyPath, SubMap, Data)}.
|
||||
|
||||
-spec deep_remove(config_key_path(), map()) -> map().
|
||||
deep_remove([], Map) ->
|
||||
|
@ -97,21 +101,72 @@ deep_merge(BaseMap, NewMap) ->
|
|||
end, #{}, BaseMap),
|
||||
maps:merge(MergedBase, maps:with(NewKeys, NewMap)).
|
||||
|
||||
-spec deep_convert(map(), convert_fun(), Args::list()) -> map().
|
||||
deep_convert(Map, ConvFun, Args) when is_map(Map) ->
|
||||
maps:fold(fun(K, V, Acc) ->
|
||||
case apply(ConvFun, [K, deep_convert(V, ConvFun, Args) | Args]) of
|
||||
drop -> Acc;
|
||||
{K1, V1} -> Acc#{K1 => V1}
|
||||
end
|
||||
end, #{}, Map);
|
||||
deep_convert(ListV, ConvFun, Args) when is_list(ListV) ->
|
||||
[deep_convert(V, ConvFun, Args) || V <- ListV];
|
||||
deep_convert(Val, _, _Args) -> Val.
|
||||
|
||||
-spec unsafe_atom_key_map(#{binary() | atom() => any()}) -> #{atom() => any()}.
|
||||
unsafe_atom_key_map(Map) ->
|
||||
covert_keys_to_atom(Map, fun(K) -> binary_to_atom(K, utf8) end).
|
||||
|
||||
-spec safe_atom_key_map(#{binary() | atom() => any()}) -> #{atom() => any()}.
|
||||
safe_atom_key_map(Map) ->
|
||||
covert_keys_to_atom(Map, fun(K) -> binary_to_existing_atom(K, utf8) end).
|
||||
|
||||
-spec jsonable_map(map() | list()) -> map() | list().
|
||||
jsonable_map(Map) ->
|
||||
jsonable_map(Map, fun(K, V) -> {K, V} end).
|
||||
|
||||
jsonable_map(Map, JsonableFun) ->
|
||||
deep_convert(Map, fun binary_string_kv/3, [JsonableFun]).
|
||||
|
||||
-spec diff_maps(map(), map()) ->
|
||||
#{added := map(), identical := map(), removed := map(),
|
||||
changed := #{any() => {OldValue::any(), NewValue::any()}}}.
|
||||
diff_maps(NewMap, OldMap) ->
|
||||
InitR = #{identical => #{}, changed => #{}, removed => #{}},
|
||||
{Result, RemInNew} =
|
||||
lists:foldl(fun({OldK, OldV}, {Result0 = #{identical := I, changed := U, removed := D},
|
||||
RemNewMap}) ->
|
||||
Result1 = case maps:find(OldK, NewMap) of
|
||||
error ->
|
||||
Result0#{removed => D#{OldK => OldV}};
|
||||
{ok, NewV} when NewV == OldV ->
|
||||
Result0#{identical => I#{OldK => OldV}};
|
||||
{ok, NewV} ->
|
||||
Result0#{changed => U#{OldK => {OldV, NewV}}}
|
||||
end,
|
||||
{Result1, maps:remove(OldK, RemNewMap)}
|
||||
end, {InitR, NewMap}, maps:to_list(OldMap)),
|
||||
Result#{added => RemInNew}.
|
||||
|
||||
|
||||
binary_string_kv(K, V, JsonableFun) ->
|
||||
case JsonableFun(K, V) of
|
||||
drop -> drop;
|
||||
{K1, V1} -> {binary_string(K1), binary_string(V1)}
|
||||
end.
|
||||
|
||||
binary_string([]) -> [];
|
||||
binary_string(Val) when is_list(Val) ->
|
||||
case io_lib:printable_unicode_list(Val) of
|
||||
true -> unicode:characters_to_binary(Val);
|
||||
false -> [binary_string(V) || V <- Val]
|
||||
end;
|
||||
binary_string(Val) ->
|
||||
Val.
|
||||
|
||||
%%---------------------------------------------------------------------------
|
||||
covert_keys_to_atom(BinKeyMap, Conv) when is_map(BinKeyMap) ->
|
||||
maps:fold(
|
||||
fun(K, V, Acc) when is_binary(K) ->
|
||||
Acc#{Conv(K) => covert_keys_to_atom(V, Conv)};
|
||||
(K, V, Acc) when is_atom(K) ->
|
||||
%% richmap keys
|
||||
Acc#{K => covert_keys_to_atom(V, Conv)}
|
||||
end, #{}, BinKeyMap);
|
||||
covert_keys_to_atom(ListV, Conv) when is_list(ListV) ->
|
||||
[covert_keys_to_atom(V, Conv) || V <- ListV];
|
||||
covert_keys_to_atom(Val, _) -> Val.
|
||||
covert_keys_to_atom(BinKeyMap, Conv) ->
|
||||
deep_convert(BinKeyMap, fun
|
||||
(K, V) when is_atom(K) -> {K, V};
|
||||
(K, V) when is_binary(K) -> {Conv(K), V}
|
||||
end, []).
|
||||
|
|
|
@ -22,8 +22,6 @@
|
|||
-include("logger.hrl").
|
||||
-include("types.hrl").
|
||||
-include("emqx_mqtt.hrl").
|
||||
-include("emqx.hrl").
|
||||
|
||||
|
||||
-export([ start_link/0
|
||||
, stop/0
|
||||
|
|
|
@ -76,7 +76,7 @@ set_procmem_high_watermark(Float) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
init([]) ->
|
||||
Opts = emqx_config:get([sysmon, os]),
|
||||
Opts = emqx:get_config([sysmon, os]),
|
||||
set_mem_check_interval(maps:get(mem_check_interval, Opts)),
|
||||
set_sysmem_high_watermark(maps:get(sysmem_high_watermark, Opts)),
|
||||
set_procmem_high_watermark(maps:get(procmem_high_watermark, Opts)),
|
||||
|
@ -91,8 +91,8 @@ handle_cast(Msg, State) ->
|
|||
{noreply, State}.
|
||||
|
||||
handle_info({timeout, _Timer, check}, State) ->
|
||||
CPUHighWatermark = emqx_config:get([sysmon, os, cpu_high_watermark]) * 100,
|
||||
CPULowWatermark = emqx_config:get([sysmon, os, cpu_low_watermark]) * 100,
|
||||
CPUHighWatermark = emqx:get_config([sysmon, os, cpu_high_watermark]) * 100,
|
||||
CPULowWatermark = emqx:get_config([sysmon, os, cpu_low_watermark]) * 100,
|
||||
_ = case emqx_vm:cpu_util() of %% TODO: should be improved?
|
||||
0 -> ok;
|
||||
Busy when Busy >= CPUHighWatermark ->
|
||||
|
@ -123,7 +123,7 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
start_check_timer() ->
|
||||
Interval = emqx_config:get([sysmon, os, cpu_check_interval]),
|
||||
Interval = emqx:get_config([sysmon, os, cpu_check_interval]),
|
||||
case erlang:system_info(system_architecture) of
|
||||
"x86_64-pc-linux-musl" -> ok;
|
||||
_ -> emqx_misc:start_timer(Interval, check)
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
%% @doc Load all plugins when the broker started.
|
||||
-spec(load() -> ok | ignore | {error, term()}).
|
||||
load() ->
|
||||
ok = load_ext_plugins(emqx_config:get([plugins, expand_plugins_dir], undefined)).
|
||||
ok = load_ext_plugins(emqx:get_config([plugins, expand_plugins_dir], undefined)).
|
||||
|
||||
%% @doc Load a Plugin
|
||||
-spec(load(atom()) -> ok | {error, term()}).
|
||||
|
|
|
@ -17,8 +17,41 @@
|
|||
-module(emqx_quic_connection).
|
||||
|
||||
%% Callbacks
|
||||
-export([ new_conn/2
|
||||
-export([ init/1
|
||||
, new_conn/2
|
||||
, connected/2
|
||||
, shutdown/2
|
||||
]).
|
||||
|
||||
new_conn(Conn, {_L, COpts, _S}) when is_map(COpts) ->
|
||||
emqx_connection:start_link(emqx_quic_stream, Conn, COpts).
|
||||
-type cb_state() :: map() | proplists:proplist().
|
||||
|
||||
|
||||
-spec init(cb_state()) -> cb_state().
|
||||
init(ConnOpts) when is_list(ConnOpts) ->
|
||||
init(maps:from_list(ConnOpts));
|
||||
init(ConnOpts) when is_map(ConnOpts) ->
|
||||
ConnOpts.
|
||||
|
||||
-spec new_conn(quicer:connection_handler(), cb_state()) -> {ok, cb_state()} | {error, any()}.
|
||||
new_conn(Conn, S) ->
|
||||
process_flag(trap_exit, true),
|
||||
{ok, Pid} = emqx_connection:start_link(emqx_quic_stream, {self(), Conn}, S),
|
||||
receive
|
||||
{Pid, stream_acceptor_ready} ->
|
||||
ok = quicer:async_handshake(Conn),
|
||||
{ok, S};
|
||||
{'EXIT', Pid, _Reason} ->
|
||||
{error, stream_accept_error}
|
||||
end.
|
||||
|
||||
-spec connected(quicer:connection_handler(), cb_state()) -> {ok, cb_state()} | {error, any()}.
|
||||
connected(Conn, #{slow_start := false} = S) ->
|
||||
{ok, _Pid} = emqx_connection:start_link(emqx_quic_stream, Conn, S),
|
||||
{ok, S};
|
||||
connected(_Conn, S) ->
|
||||
{ok, S}.
|
||||
|
||||
-spec shutdown(quicer:connection_handler(), cb_state()) -> {ok, cb_state()} | {error, any()}.
|
||||
shutdown(Conn, S) ->
|
||||
quicer:async_close_connection(Conn),
|
||||
{ok, S}.
|
||||
|
|
|
@ -31,8 +31,16 @@
|
|||
, peercert/1
|
||||
]).
|
||||
|
||||
wait(Conn) ->
|
||||
quicer:accept_stream(Conn, []).
|
||||
wait({ConnOwner, Conn}) ->
|
||||
{ok, Conn} = quicer:async_accept_stream(Conn, []),
|
||||
ConnOwner ! {self(), stream_acceptor_ready},
|
||||
receive
|
||||
%% from msquic
|
||||
{quic, new_stream, Stream} ->
|
||||
{ok, Stream};
|
||||
{'EXIT', ConnOwner, _Reason} ->
|
||||
{error, enotconn}
|
||||
end.
|
||||
|
||||
type(_) ->
|
||||
quic.
|
||||
|
@ -44,6 +52,7 @@ sockname(S) ->
|
|||
quicer:sockname(S).
|
||||
|
||||
peercert(_S) ->
|
||||
%% @todo but unsupported by msquic
|
||||
nossl.
|
||||
|
||||
getstat(Socket, Stats) ->
|
||||
|
@ -88,5 +97,8 @@ ensure_ok_or_exit(Fun, Args = [Sock|_]) when is_atom(Fun), is_list(Args) ->
|
|||
async_send(Stream, Data, Options) when is_list(Data) ->
|
||||
async_send(Stream, iolist_to_binary(Data), Options);
|
||||
async_send(Stream, Data, _Options) when is_binary(Data) ->
|
||||
{ok, _Len} = quicer:send(Stream, Data),
|
||||
ok.
|
||||
case quicer:send(Stream, Data) of
|
||||
{ok, _Len} -> ok;
|
||||
Other ->
|
||||
Other
|
||||
end.
|
||||
|
|
|
@ -68,7 +68,6 @@
|
|||
-type(dest() :: node() | {group(), node()}).
|
||||
|
||||
-define(ROUTE_TAB, emqx_route).
|
||||
-rlog_shard({?ROUTE_SHARD, ?ROUTE_TAB}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Mnesia bootstrap
|
||||
|
@ -77,6 +76,7 @@
|
|||
mnesia(boot) ->
|
||||
ok = ekka_mnesia:create_table(?ROUTE_TAB, [
|
||||
{type, bag},
|
||||
{rlog_shard, ?ROUTE_SHARD},
|
||||
{ram_copies, [node()]},
|
||||
{record_name, route},
|
||||
{attributes, record_info(fields, route)},
|
||||
|
@ -250,7 +250,7 @@ delete_trie_route(Route = #route{topic = Topic}) ->
|
|||
%% @private
|
||||
-spec(maybe_trans(function(), list(any())) -> ok | {error, term()}).
|
||||
maybe_trans(Fun, Args) ->
|
||||
case emqx_config:get([broker, perf, route_lock_type]) of
|
||||
case emqx:get_config([broker, perf, route_lock_type]) of
|
||||
key ->
|
||||
trans(Fun, Args);
|
||||
global ->
|
||||
|
|
|
@ -52,8 +52,6 @@
|
|||
-define(ROUTING_NODE, emqx_routing_node).
|
||||
-define(LOCK, {?MODULE, cleanup_routes}).
|
||||
|
||||
-rlog_shard({?ROUTE_SHARD, ?ROUTING_NODE}).
|
||||
|
||||
-dialyzer({nowarn_function, [cleanup_routes/1]}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
|
@ -63,6 +61,7 @@
|
|||
mnesia(boot) ->
|
||||
ok = ekka_mnesia:create_table(?ROUTING_NODE, [
|
||||
{type, set},
|
||||
{rlog_shard, ?ROUTE_SHARD},
|
||||
{ram_copies, [node()]},
|
||||
{record_name, routing_node},
|
||||
{attributes, record_info(fields, routing_node)},
|
||||
|
|
|
@ -72,4 +72,4 @@ filter_result(Delivery) ->
|
|||
Delivery.
|
||||
|
||||
max_client_num() ->
|
||||
emqx_config:get([rpc, tcp_client_num], ?DefaultClientNum).
|
||||
emqx:get_config([rpc, tcp_client_num], ?DefaultClientNum).
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -76,8 +76,6 @@
|
|||
-define(NACK(Reason), {shared_sub_nack, Reason}).
|
||||
-define(NO_ACK, no_ack).
|
||||
|
||||
-rlog_shard({?SHARED_SUB_SHARD, ?TAB}).
|
||||
|
||||
-record(state, {pmon}).
|
||||
|
||||
-record(emqx_shared_subscription, {group, topic, subpid}).
|
||||
|
@ -89,6 +87,7 @@
|
|||
mnesia(boot) ->
|
||||
ok = ekka_mnesia:create_table(?TAB, [
|
||||
{type, bag},
|
||||
{rlog_shard, ?SHARED_SUB_SHARD},
|
||||
{ram_copies, [node()]},
|
||||
{record_name, emqx_shared_subscription},
|
||||
{attributes, record_info(fields, emqx_shared_subscription)}]);
|
||||
|
@ -136,11 +135,11 @@ dispatch(Group, Topic, Delivery = #delivery{message = Msg}, FailedSubs) ->
|
|||
|
||||
-spec(strategy() -> strategy()).
|
||||
strategy() ->
|
||||
emqx_config:get([broker, shared_subscription_strategy]).
|
||||
emqx:get_config([broker, shared_subscription_strategy]).
|
||||
|
||||
-spec(ack_enabled() -> boolean()).
|
||||
ack_enabled() ->
|
||||
emqx_config:get([broker, shared_dispatch_ack_enabled]).
|
||||
emqx:get_config([broker, shared_dispatch_ack_enabled]).
|
||||
|
||||
do_dispatch(SubPid, Topic, Msg, _Type) when SubPid =:= self() ->
|
||||
%% Deadlock otherwise
|
||||
|
|
|
@ -102,10 +102,10 @@ datetime() ->
|
|||
"~4..0w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w", [Y, M, D, H, MM, S])).
|
||||
|
||||
sys_interval() ->
|
||||
emqx_config:get([broker, sys_msg_interval]).
|
||||
emqx:get_config([broker, sys_msg_interval]).
|
||||
|
||||
sys_heatbeat_interval() ->
|
||||
emqx_config:get([broker, sys_heartbeat_interval]).
|
||||
emqx:get_config([broker, sys_heartbeat_interval]).
|
||||
|
||||
%% @doc Get sys info
|
||||
-spec(info() -> list(tuple())).
|
||||
|
|
|
@ -60,7 +60,7 @@ start_timer(State) ->
|
|||
State#{timer := emqx_misc:start_timer(timer:seconds(2), reset)}.
|
||||
|
||||
sysm_opts() ->
|
||||
sysm_opts(maps:to_list(emqx_config:get([sysmon, vm])), []).
|
||||
sysm_opts(maps:to_list(emqx:get_config([sysmon, vm])), []).
|
||||
sysm_opts([], Acc) ->
|
||||
Acc;
|
||||
sysm_opts([{_, disabled}|Opts], Acc) ->
|
||||
|
|
|
@ -50,8 +50,6 @@
|
|||
, count = 0 :: non_neg_integer()
|
||||
}).
|
||||
|
||||
-rlog_shard({?ROUTE_SHARD, ?TRIE}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Mnesia bootstrap
|
||||
%%--------------------------------------------------------------------
|
||||
|
@ -64,6 +62,7 @@ mnesia(boot) ->
|
|||
{write_concurrency, true}
|
||||
]}],
|
||||
ok = ekka_mnesia:create_table(?TRIE, [
|
||||
{rlog_shard, ?ROUTE_SHARD},
|
||||
{ram_copies, [node()]},
|
||||
{record_name, ?TRIE},
|
||||
{attributes, record_info(fields, ?TRIE)},
|
||||
|
@ -270,7 +269,7 @@ match_compact([Word | Words], Prefix, IsWildcard, Acc0) ->
|
|||
lookup_topic(MlTopic).
|
||||
|
||||
is_compact() ->
|
||||
emqx_config:get([broker, perf, trie_compaction], true).
|
||||
emqx:get_config([broker, perf, trie_compaction], true).
|
||||
|
||||
set_compact(Bool) ->
|
||||
emqx_config:put([broker, perf, trie_compaction], Bool).
|
||||
|
|
|
@ -57,8 +57,8 @@ handle_cast(Msg, State) ->
|
|||
{noreply, State}.
|
||||
|
||||
handle_info({timeout, _Timer, check}, State) ->
|
||||
ProcHighWatermark = emqx_config:get([sysmon, vm, process_high_watermark]),
|
||||
ProcLowWatermark = emqx_config:get([sysmon, vm, process_low_watermark]),
|
||||
ProcHighWatermark = emqx:get_config([sysmon, vm, process_high_watermark]),
|
||||
ProcLowWatermark = emqx:get_config([sysmon, vm, process_low_watermark]),
|
||||
ProcessCount = erlang:system_info(process_count),
|
||||
case ProcessCount / erlang:system_info(process_limit) of
|
||||
Percent when Percent >= ProcHighWatermark ->
|
||||
|
@ -89,5 +89,5 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
start_check_timer() ->
|
||||
Interval = emqx_config:get([sysmon, vm, process_check_interval]),
|
||||
Interval = emqx:get_config([sysmon, vm, process_check_interval]),
|
||||
emqx_misc:start_timer(Interval, check).
|
||||
|
|
|
@ -85,8 +85,8 @@
|
|||
idle_timer :: maybe(reference()),
|
||||
%% Zone name
|
||||
zone :: atom(),
|
||||
%% Listener Name
|
||||
listener :: atom()
|
||||
%% Listener Type and Name
|
||||
listener :: {Type::atom(), Name::atom()}
|
||||
}).
|
||||
|
||||
-type(state() :: #state{}).
|
||||
|
@ -173,12 +173,12 @@ call(WsPid, Req, Timeout) when is_pid(WsPid) ->
|
|||
%% WebSocket callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init(Req, #{zone := Zone, listener := Listener} = Opts) ->
|
||||
init(Req, #{listener := {Type, Listener}} = Opts) ->
|
||||
%% WS Transport Idle Timeout
|
||||
WsOpts = #{compress => get_ws_opts(Zone, Listener, compress),
|
||||
deflate_opts => get_ws_opts(Zone, Listener, deflate_opts),
|
||||
max_frame_size => get_ws_opts(Zone, Listener, max_frame_size),
|
||||
idle_timeout => get_ws_opts(Zone, Listener, idle_timeout)
|
||||
WsOpts = #{compress => get_ws_opts(Type, Listener, compress),
|
||||
deflate_opts => get_ws_opts(Type, Listener, deflate_opts),
|
||||
max_frame_size => get_ws_opts(Type, Listener, max_frame_size),
|
||||
idle_timeout => get_ws_opts(Type, Listener, idle_timeout)
|
||||
},
|
||||
case check_origin_header(Req, Opts) of
|
||||
{error, Message} ->
|
||||
|
@ -187,17 +187,17 @@ init(Req, #{zone := Zone, listener := Listener} = Opts) ->
|
|||
ok -> parse_sec_websocket_protocol(Req, Opts, WsOpts)
|
||||
end.
|
||||
|
||||
parse_sec_websocket_protocol(Req, #{zone := Zone, listener := Listener} = Opts, WsOpts) ->
|
||||
parse_sec_websocket_protocol(Req, #{listener := {Type, Listener}} = Opts, WsOpts) ->
|
||||
case cowboy_req:parse_header(<<"sec-websocket-protocol">>, Req) of
|
||||
undefined ->
|
||||
case get_ws_opts(Zone, Listener, fail_if_no_subprotocol) of
|
||||
case get_ws_opts(Type, Listener, fail_if_no_subprotocol) of
|
||||
true ->
|
||||
{ok, cowboy_req:reply(400, Req), WsOpts};
|
||||
false ->
|
||||
{cowboy_websocket, Req, [Req, Opts], WsOpts}
|
||||
end;
|
||||
Subprotocols ->
|
||||
SupportedSubprotocols = get_ws_opts(Zone, Listener, supported_subprotocols),
|
||||
SupportedSubprotocols = get_ws_opts(Type, Listener, supported_subprotocols),
|
||||
NSupportedSubprotocols = [list_to_binary(Subprotocol)
|
||||
|| Subprotocol <- SupportedSubprotocols],
|
||||
case pick_subprotocol(Subprotocols, NSupportedSubprotocols) of
|
||||
|
@ -221,29 +221,29 @@ pick_subprotocol([Subprotocol | Rest], SupportedSubprotocols) ->
|
|||
pick_subprotocol(Rest, SupportedSubprotocols)
|
||||
end.
|
||||
|
||||
parse_header_fun_origin(Req, #{zone := Zone, listener := Listener}) ->
|
||||
parse_header_fun_origin(Req, #{listener := {Type, Listener}}) ->
|
||||
case cowboy_req:header(<<"origin">>, Req) of
|
||||
undefined ->
|
||||
case get_ws_opts(Zone, Listener, allow_origin_absence) of
|
||||
case get_ws_opts(Type, Listener, allow_origin_absence) of
|
||||
true -> ok;
|
||||
false -> {error, origin_header_cannot_be_absent}
|
||||
end;
|
||||
Value ->
|
||||
case lists:member(Value, get_ws_opts(Zone, Listener, check_origins)) of
|
||||
case lists:member(Value, get_ws_opts(Type, Listener, check_origins)) of
|
||||
true -> ok;
|
||||
false -> {origin_not_allowed, Value}
|
||||
end
|
||||
end.
|
||||
|
||||
check_origin_header(Req, #{zone := Zone, listener := Listener} = Opts) ->
|
||||
case get_ws_opts(Zone, Listener, check_origin_enable) of
|
||||
check_origin_header(Req, #{listener := {Type, Listener}} = Opts) ->
|
||||
case get_ws_opts(Type, Listener, check_origin_enable) of
|
||||
true -> parse_header_fun_origin(Req, Opts);
|
||||
false -> ok
|
||||
end.
|
||||
|
||||
websocket_init([Req, #{zone := Zone, listener := Listener} = Opts]) ->
|
||||
websocket_init([Req, #{zone := Zone, listener := {Type, Listener}} = Opts]) ->
|
||||
{Peername, Peercert} =
|
||||
case emqx_config:get_listener_conf(Zone, Listener, [proxy_protocol]) andalso
|
||||
case emqx_config:get_listener_conf(Type, Listener, [proxy_protocol]) andalso
|
||||
maps:get(proxy_header, Req) of
|
||||
#{src_address := SrcAddr, src_port := SrcPort, ssl := SSL} ->
|
||||
SourceName = {SrcAddr, SrcPort},
|
||||
|
@ -278,7 +278,7 @@ websocket_init([Req, #{zone := Zone, listener := Listener} = Opts]) ->
|
|||
conn_mod => ?MODULE
|
||||
},
|
||||
Limiter = emqx_limiter:init(Zone, undefined, undefined, []),
|
||||
MQTTPiggyback = get_ws_opts(Zone, Listener, mqtt_piggyback),
|
||||
MQTTPiggyback = get_ws_opts(Type, Listener, mqtt_piggyback),
|
||||
FrameOpts = #{
|
||||
strict_mode => emqx_config:get_zone_conf(Zone, [mqtt, strict_mode]),
|
||||
max_size => emqx_config:get_zone_conf(Zone, [mqtt, max_packet_size])
|
||||
|
@ -317,7 +317,7 @@ websocket_init([Req, #{zone := Zone, listener := Listener} = Opts]) ->
|
|||
idle_timeout = IdleTimeout,
|
||||
idle_timer = IdleTimer,
|
||||
zone = Zone,
|
||||
listener = Listener
|
||||
listener = {Type, Listener}
|
||||
}, hibernate}.
|
||||
|
||||
websocket_handle({binary, Data}, State) when is_list(Data) ->
|
||||
|
@ -370,8 +370,8 @@ websocket_info({check_gc, Stats}, State) ->
|
|||
return(check_oom(run_gc(Stats, State)));
|
||||
|
||||
websocket_info(Deliver = {deliver, _Topic, _Msg},
|
||||
State = #state{zone = Zone, listener = Listener}) ->
|
||||
ActiveN = emqx_config:get_listener_conf(Zone, Listener, [tcp, active_n]),
|
||||
State = #state{listener = {Type, Listener}}) ->
|
||||
ActiveN = get_active_n(Type, Listener),
|
||||
Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)],
|
||||
with_channel(handle_deliver, [Delivers], State);
|
||||
|
||||
|
@ -558,12 +558,12 @@ parse_incoming(Data, State = #state{parse_state = ParseState}) ->
|
|||
%% Handle incoming packet
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
handle_incoming(Packet, State = #state{zone = Zone, listener = Listener})
|
||||
handle_incoming(Packet, State = #state{listener = {Type, Listener}})
|
||||
when is_record(Packet, mqtt_packet) ->
|
||||
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
|
||||
ok = inc_incoming_stats(Packet),
|
||||
NState = case emqx_pd:get_counter(incoming_pubs) >
|
||||
emqx_config:get_listener_conf(Zone, Listener, [tcp, active_n]) of
|
||||
get_active_n(Type, Listener) of
|
||||
true -> postpone({cast, rate_limit}, State);
|
||||
false -> State
|
||||
end,
|
||||
|
@ -595,12 +595,12 @@ with_channel(Fun, Args, State = #state{channel = Channel}) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
handle_outgoing(Packets, State = #state{mqtt_piggyback = MQTTPiggyback,
|
||||
zone = Zone, listener = Listener}) ->
|
||||
listener = {Type, Listener}}) ->
|
||||
IoData = lists:map(serialize_and_inc_stats_fun(State), Packets),
|
||||
Oct = iolist_size(IoData),
|
||||
ok = inc_sent_stats(length(Packets), Oct),
|
||||
NState = case emqx_pd:get_counter(outgoing_pubs) >
|
||||
emqx_config:get_listener_conf(Zone, Listener, [tcp, active_n]) of
|
||||
get_active_n(Type, Listener) of
|
||||
true ->
|
||||
Stats = #{cnt => emqx_pd:reset_counter(outgoing_pubs),
|
||||
oct => emqx_pd:reset_counter(outgoing_bytes)
|
||||
|
@ -749,10 +749,10 @@ classify([Event|More], Packets, Cmds, Events) ->
|
|||
|
||||
trigger(Event) -> erlang:send(self(), Event).
|
||||
|
||||
get_peer(Req, #{zone := Zone, listener := Listener}) ->
|
||||
get_peer(Req, #{listener := {Type, Listener}}) ->
|
||||
{PeerAddr, PeerPort} = cowboy_req:peer(Req),
|
||||
AddrHeader = cowboy_req:header(
|
||||
get_ws_opts(Zone, Listener, proxy_address_header), Req, <<>>),
|
||||
get_ws_opts(Type, Listener, proxy_address_header), Req, <<>>),
|
||||
ClientAddr = case string:tokens(binary_to_list(AddrHeader), ", ") of
|
||||
[] ->
|
||||
undefined;
|
||||
|
@ -766,7 +766,7 @@ get_peer(Req, #{zone := Zone, listener := Listener}) ->
|
|||
PeerAddr
|
||||
end,
|
||||
PortHeader = cowboy_req:header(
|
||||
get_ws_opts(Zone, Listener, proxy_port_header), Req, <<>>),
|
||||
get_ws_opts(Type, Listener, proxy_port_header), Req, <<>>),
|
||||
ClientPort = case string:tokens(binary_to_list(PortHeader), ", ") of
|
||||
[] ->
|
||||
undefined;
|
||||
|
@ -787,5 +787,8 @@ set_field(Name, Value, State) ->
|
|||
Pos = emqx_misc:index_of(Name, record_info(fields, state)),
|
||||
setelement(Pos+1, State, Value).
|
||||
|
||||
get_ws_opts(Zone, Listener, Key) ->
|
||||
emqx_config:get_listener_conf(Zone, Listener, [websocket, Key]).
|
||||
get_ws_opts(Type, Listener, Key) ->
|
||||
emqx_config:get_listener_conf(Type, Listener, [websocket, Key]).
|
||||
|
||||
get_active_n(Type, Listener) ->
|
||||
emqx_config:get_listener_conf(Type, Listener, [tcp, active_n]).
|
|
@ -14,25 +14,21 @@
|
|||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_stub_conn).
|
||||
-module(emqx_zone_schema).
|
||||
|
||||
-export([ start/1
|
||||
, send/2
|
||||
, stop/1
|
||||
]).
|
||||
-export([namespace/0, roots/0, fields/1]).
|
||||
|
||||
-type ack_ref() :: emqx_bridge_worker:ack_ref().
|
||||
-type batch() :: emqx_bridge_worker:batch().
|
||||
namespace() -> zone.
|
||||
|
||||
start(#{client_pid := Pid} = Cfg) ->
|
||||
Pid ! {self(), ?MODULE, ready},
|
||||
{ok, Cfg}.
|
||||
roots() -> [].
|
||||
|
||||
stop(_) -> ok.
|
||||
%% zone schemas are clones from the same name from root level
|
||||
%% only not allowed to have default values.
|
||||
fields(Name) ->
|
||||
[{N, no_default(Sc)} || {N, Sc} <- emqx_schema:fields(Name)].
|
||||
|
||||
%% @doc Callback for `emqx_bridge_connect' behaviour
|
||||
-spec send(_, batch()) -> {ok, ack_ref()} | {error, any()}.
|
||||
send(#{client_pid := Pid}, Batch) ->
|
||||
Ref = make_ref(),
|
||||
Pid ! {stub_message, self(), Ref, Batch},
|
||||
{ok, Ref}.
|
||||
%% no default values for zone settings
|
||||
no_default(Sc) ->
|
||||
fun(default) -> undefined;
|
||||
(Other) -> hocon_schema:field_schema(Sc, Other)
|
||||
end.
|
|
@ -33,7 +33,7 @@ end_per_suite(_Config) ->
|
|||
emqx_ct_helpers:stop_apps([]).
|
||||
|
||||
t_authenticate(_) ->
|
||||
?assertMatch(ok, emqx_access_control:authenticate(clientinfo())).
|
||||
?assertMatch({ok, _}, emqx_access_control:authenticate(clientinfo())).
|
||||
|
||||
t_authorize(_) ->
|
||||
Publish = ?PUBLISH_PACKET(?QOS_0, <<"t">>, 1, <<"payload">>),
|
||||
|
@ -46,7 +46,7 @@ t_authorize(_) ->
|
|||
clientinfo() -> clientinfo(#{}).
|
||||
clientinfo(InitProps) ->
|
||||
maps:merge(#{zone => default,
|
||||
listener => mqtt_tcp,
|
||||
listener => {tcp, default},
|
||||
protocol => mqtt,
|
||||
peerhost => {127,0,0,1},
|
||||
clientid => <<"clientid">>,
|
||||
|
|
|
@ -28,14 +28,14 @@ all() -> emqx_ct:all(?MODULE).
|
|||
init_per_testcase(t_size_limit, Config) ->
|
||||
emqx_ct_helpers:boot_modules(all),
|
||||
emqx_ct_helpers:start_apps([]),
|
||||
emqx_config:update([alarm], #{
|
||||
{ok, _} = emqx:update_config([alarm], #{
|
||||
<<"size_limit">> => 2
|
||||
}),
|
||||
Config;
|
||||
init_per_testcase(t_validity_period, Config) ->
|
||||
emqx_ct_helpers:boot_modules(all),
|
||||
emqx_ct_helpers:start_apps([]),
|
||||
emqx_config:update([alarm], #{
|
||||
{ok, _} = emqx:update_config([alarm], #{
|
||||
<<"validity_period">> => <<"1s">>
|
||||
}),
|
||||
Config;
|
||||
|
|
|
@ -0,0 +1,238 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authentication_SUITE).
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-export([ fields/1 ]).
|
||||
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
]).
|
||||
|
||||
-define(AUTHN, emqx_authentication).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
fields(type1) ->
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, ['built-in-database']}}
|
||||
, {enable, fun enable/1}
|
||||
];
|
||||
|
||||
fields(type2) ->
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, ['mysql']}}
|
||||
, {enable, fun enable/1}
|
||||
].
|
||||
|
||||
enable(type) -> boolean();
|
||||
enable(default) -> true;
|
||||
enable(_) -> undefined.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Callbacks
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, type1)
|
||||
, hoconsc:ref(?MODULE, type2)
|
||||
].
|
||||
|
||||
create(_Config) ->
|
||||
{ok, #{mark => 1}}.
|
||||
|
||||
update(_Config, _State) ->
|
||||
{ok, #{mark => 2}}.
|
||||
|
||||
authenticate(#{username := <<"good">>}, _State) ->
|
||||
{ok, #{is_superuser => true}};
|
||||
authenticate(#{username := _}, _State) ->
|
||||
{error, bad_username_or_password}.
|
||||
|
||||
destroy(_State) ->
|
||||
ok.
|
||||
|
||||
all() ->
|
||||
emqx_ct:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
application:set_env(ekka, strict_mode, true),
|
||||
emqx_ct_helpers:start_apps([]),
|
||||
Config.
|
||||
|
||||
end_per_suite(_) ->
|
||||
emqx_ct_helpers:stop_apps([]),
|
||||
ok.
|
||||
|
||||
t_chain(_) ->
|
||||
% CRUD of authentication chain
|
||||
ChainName = 'test',
|
||||
?assertMatch({ok, []}, ?AUTHN:list_chains()),
|
||||
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:create_chain(ChainName)),
|
||||
?assertEqual({error, {already_exists, {chain, ChainName}}}, ?AUTHN:create_chain(ChainName)),
|
||||
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:lookup_chain(ChainName)),
|
||||
?assertMatch({ok, [#{name := ChainName}]}, ?AUTHN:list_chains()),
|
||||
?assertEqual(ok, ?AUTHN:delete_chain(ChainName)),
|
||||
?assertMatch({error, {not_found, {chain, ChainName}}}, ?AUTHN:lookup_chain(ChainName)),
|
||||
ok.
|
||||
|
||||
t_authenticator(_) ->
|
||||
ChainName = 'test',
|
||||
AuthenticatorConfig1 = #{mechanism => 'password-based',
|
||||
backend => 'built-in-database',
|
||||
enable => true},
|
||||
|
||||
% Create an authenticator when the authentication chain does not exist
|
||||
?assertEqual({error, {not_found, {chain, ChainName}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
?AUTHN:create_chain(ChainName),
|
||||
% Create an authenticator when the provider does not exist
|
||||
?assertEqual({error, no_available_provider}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
|
||||
AuthNType1 = {'password-based', 'built-in-database'},
|
||||
?AUTHN:add_provider(AuthNType1, ?MODULE),
|
||||
ID1 = <<"password-based:built-in-database">>,
|
||||
|
||||
% CRUD of authencaticator
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
?assertMatch({ok, #{id := ID1}}, ?AUTHN:lookup_authenticator(ChainName, ID1)),
|
||||
?assertMatch({ok, [#{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
?assertEqual({error, {already_exists, {authenticator, ID1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
|
||||
?assertEqual(ok, ?AUTHN:delete_authenticator(ChainName, ID1)),
|
||||
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
|
||||
?assertMatch({ok, []}, ?AUTHN:list_authenticators(ChainName)),
|
||||
|
||||
% Multiple authenticators exist at the same time
|
||||
AuthNType2 = {'password-based', mysql},
|
||||
?AUTHN:add_provider(AuthNType2, ?MODULE),
|
||||
ID2 = <<"password-based:mysql">>,
|
||||
AuthenticatorConfig2 = #{mechanism => 'password-based',
|
||||
backend => mysql,
|
||||
enable => true},
|
||||
?assertMatch({ok, #{id := ID1}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
?assertMatch({ok, #{id := ID2}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig2)),
|
||||
|
||||
% Move authenticator
|
||||
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, top)),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, bottom)),
|
||||
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, {before, ID1})),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
|
||||
?AUTHN:delete_chain(ChainName),
|
||||
?AUTHN:remove_provider(AuthNType1),
|
||||
?AUTHN:remove_provider(AuthNType2),
|
||||
ok.
|
||||
|
||||
t_authenticate(_) ->
|
||||
ListenerID = 'tcp:default',
|
||||
ClientInfo = #{zone => default,
|
||||
listener => ListenerID,
|
||||
protocol => mqtt,
|
||||
username => <<"good">>,
|
||||
password => <<"any">>},
|
||||
?assertEqual({ok, #{is_superuser => false}}, emqx_access_control:authenticate(ClientInfo)),
|
||||
|
||||
AuthNType = {'password-based', 'built-in-database'},
|
||||
?AUTHN:add_provider(AuthNType, ?MODULE),
|
||||
|
||||
AuthenticatorConfig = #{mechanism => 'password-based',
|
||||
backend => 'built-in-database',
|
||||
enable => true},
|
||||
?AUTHN:create_chain(ListenerID),
|
||||
?assertMatch({ok, _}, ?AUTHN:create_authenticator(ListenerID, AuthenticatorConfig)),
|
||||
?assertEqual({ok, #{is_superuser => true}}, emqx_access_control:authenticate(ClientInfo)),
|
||||
?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>})),
|
||||
|
||||
?AUTHN:delete_chain(ListenerID),
|
||||
?AUTHN:remove_provider(AuthNType),
|
||||
ok.
|
||||
|
||||
t_update_config(_) ->
|
||||
emqx_config_handler:add_handler([authentication], emqx_authentication),
|
||||
|
||||
AuthNType1 = {'password-based', 'built-in-database'},
|
||||
AuthNType2 = {'password-based', mysql},
|
||||
?AUTHN:add_provider(AuthNType1, ?MODULE),
|
||||
?AUTHN:add_provider(AuthNType2, ?MODULE),
|
||||
|
||||
Global = 'mqtt:global',
|
||||
AuthenticatorConfig1 = #{mechanism => 'password-based',
|
||||
backend => 'built-in-database',
|
||||
enable => true},
|
||||
AuthenticatorConfig2 = #{mechanism => 'password-based',
|
||||
backend => mysql,
|
||||
enable => true},
|
||||
ID1 = <<"password-based:built-in-database">>,
|
||||
ID2 = <<"password-based:mysql">>,
|
||||
|
||||
?assertMatch({ok, []}, ?AUTHN:list_chains()),
|
||||
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})),
|
||||
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID2)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {update_authenticator, Global, ID1, #{}})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {move_authenticator, Global, ID2, <<"top">>})),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(Global)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})),
|
||||
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
|
||||
|
||||
ListenerID = 'tcp:default',
|
||||
ConfKeyPath = [listeners, tcp, default, authentication],
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig1})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig2})),
|
||||
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID2)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {update_authenticator, ListenerID, ID1, #{}})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, <<"top">>})),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ListenerID)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})),
|
||||
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
|
||||
|
||||
?AUTHN:delete_chain(Global),
|
||||
?AUTHN:remove_provider(AuthNType1),
|
||||
?AUTHN:remove_provider(AuthNType2),
|
||||
ok.
|
||||
|
||||
update_config(Path, ConfigRequest) ->
|
||||
emqx:update_config(Path, ConfigRequest, #{rawconf_with_defaults => true}).
|
|
@ -26,7 +26,6 @@ all() -> emqx_ct:all(?MODULE).
|
|||
init_per_suite(Config) ->
|
||||
emqx_ct_helpers:boot_modules(all),
|
||||
emqx_ct_helpers:start_apps([]),
|
||||
toggle_authz(true),
|
||||
Config.
|
||||
|
||||
end_per_suite(_Config) ->
|
||||
|
@ -78,6 +77,3 @@ t_drain_authz_cache(_) ->
|
|||
{ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0),
|
||||
?assert(length(gen_server:call(ClientPid, list_authz_cache)) > 0),
|
||||
emqtt:stop(Client).
|
||||
|
||||
toggle_authz(Bool) when is_boolean(Bool) ->
|
||||
emqx_config:put_zone_conf(default, [authorization, enable], Bool).
|
||||
|
|
|
@ -27,149 +27,112 @@
|
|||
all() ->
|
||||
emqx_ct:all(?MODULE).
|
||||
|
||||
force_gc_conf() ->
|
||||
#{bytes => 16777216,count => 16000,enable => true}.
|
||||
|
||||
force_shutdown_conf() ->
|
||||
#{enable => true,max_heap_size => 4194304, max_message_queue_len => 1000}.
|
||||
|
||||
rate_limit_conf() ->
|
||||
#{conn_bytes_in => ["100KB","10s"],
|
||||
conn_messages_in => ["100","10s"],
|
||||
max_conn_rate => 1000,
|
||||
quota =>
|
||||
#{conn_messages_routing => infinity,
|
||||
overall_messages_routing => infinity}}.
|
||||
|
||||
rpc_conf() ->
|
||||
#{async_batch_size => 256,authentication_timeout => 5000,
|
||||
call_receive_timeout => 15000,connect_timeout => 5000,
|
||||
mode => async,port_discovery => stateless,
|
||||
send_timeout => 5000,socket_buffer => 1048576,
|
||||
socket_keepalive_count => 9,socket_keepalive_idle => 900,
|
||||
socket_keepalive_interval => 75,socket_recbuf => 1048576,
|
||||
socket_sndbuf => 1048576,tcp_client_num => 1,
|
||||
tcp_server_port => 5369}.
|
||||
|
||||
mqtt_conf() ->
|
||||
#{await_rel_timeout => 300000,
|
||||
idle_timeout => 15000,
|
||||
ignore_loop_deliver => false,
|
||||
keepalive_backoff => 0.75,
|
||||
max_awaiting_rel => 100,
|
||||
max_clientid_len => 65535,
|
||||
max_inflight => 32,
|
||||
max_mqueue_len => 1000,
|
||||
max_packet_size => 1048576,
|
||||
max_qos_allowed => 2,
|
||||
max_subscriptions => infinity,
|
||||
max_topic_alias => 65535,
|
||||
max_topic_levels => 65535,
|
||||
mountpoint => <<>>,
|
||||
mqueue_default_priority => lowest,
|
||||
mqueue_priorities => #{},
|
||||
mqueue_store_qos0 => true,
|
||||
peer_cert_as_clientid => disabled,
|
||||
peer_cert_as_username => disabled,
|
||||
response_information => [],
|
||||
retain_available => true,
|
||||
retry_interval => 30000,
|
||||
server_keepalive => disabled,
|
||||
session_expiry_interval => 7200000,
|
||||
shared_subscription => true,
|
||||
strict_mode => false,
|
||||
upgrade_qos => false,
|
||||
use_username_as_clientid => false,
|
||||
wildcard_subscription => true}.
|
||||
#{await_rel_timeout => 300000,idle_timeout => 15000,
|
||||
ignore_loop_deliver => false,keepalive_backoff => 0.75,
|
||||
max_awaiting_rel => 100,max_clientid_len => 65535,
|
||||
max_inflight => 32,max_mqueue_len => 1000,
|
||||
max_packet_size => 1048576,max_qos_allowed => 2,
|
||||
max_subscriptions => infinity,max_topic_alias => 65535,
|
||||
max_topic_levels => 65535,mqueue_default_priority => lowest,
|
||||
mqueue_priorities => disabled,mqueue_store_qos0 => true,
|
||||
peer_cert_as_clientid => disabled,
|
||||
peer_cert_as_username => disabled,
|
||||
response_information => [],retain_available => true,
|
||||
retry_interval => 30000,server_keepalive => disabled,
|
||||
session_expiry_interval => 7200000,
|
||||
shared_subscription => true,strict_mode => false,
|
||||
upgrade_qos => false,use_username_as_clientid => false,
|
||||
wildcard_subscription => true}.
|
||||
|
||||
|
||||
listener_mqtt_tcp_conf() ->
|
||||
#{acceptors => 16,
|
||||
access_rules => ["allow all"],
|
||||
bind => {{0,0,0,0},1883},
|
||||
max_connections => 1024000,
|
||||
proxy_protocol => false,
|
||||
proxy_protocol_timeout => 3000,
|
||||
rate_limit =>
|
||||
#{conn_bytes_in =>
|
||||
["100KB","10s"],
|
||||
conn_messages_in =>
|
||||
["100","10s"],
|
||||
max_conn_rate => 1000,
|
||||
quota =>
|
||||
#{conn_messages_routing => infinity,
|
||||
overall_messages_routing => infinity}},
|
||||
tcp =>
|
||||
#{active_n => 100,
|
||||
backlog => 1024,
|
||||
buffer => 4096,
|
||||
high_watermark => 1048576,
|
||||
send_timeout => 15000,
|
||||
send_timeout_close =>
|
||||
true},
|
||||
type => tcp}.
|
||||
zone => default,
|
||||
access_rules => ["allow all"],
|
||||
bind => {{0,0,0,0},1883},
|
||||
max_connections => 1024000,mountpoint => <<>>,
|
||||
proxy_protocol => false,proxy_protocol_timeout => 3000,
|
||||
tcp => #{
|
||||
active_n => 100,backlog => 1024,buffer => 4096,
|
||||
high_watermark => 1048576,nodelay => false,
|
||||
reuseaddr => true,send_timeout => 15000,
|
||||
send_timeout_close => true}}.
|
||||
|
||||
listener_mqtt_ws_conf() ->
|
||||
#{acceptors => 16,
|
||||
access_rules => ["allow all"],
|
||||
bind => {{0,0,0,0},8083},
|
||||
max_connections => 1024000,
|
||||
proxy_protocol => false,
|
||||
proxy_protocol_timeout => 3000,
|
||||
rate_limit =>
|
||||
#{conn_bytes_in =>
|
||||
["100KB","10s"],
|
||||
conn_messages_in =>
|
||||
["100","10s"],
|
||||
max_conn_rate => 1000,
|
||||
quota =>
|
||||
#{conn_messages_routing => infinity,
|
||||
overall_messages_routing => infinity}},
|
||||
tcp =>
|
||||
#{active_n => 100,
|
||||
backlog => 1024,
|
||||
buffer => 4096,
|
||||
high_watermark => 1048576,
|
||||
send_timeout => 15000,
|
||||
send_timeout_close =>
|
||||
true},
|
||||
type => ws,
|
||||
websocket =>
|
||||
#{allow_origin_absence =>
|
||||
true,
|
||||
check_origin_enable =>
|
||||
false,
|
||||
check_origins => [],
|
||||
compress => false,
|
||||
deflate_opts =>
|
||||
#{client_max_window_bits =>
|
||||
15,
|
||||
mem_level => 8,
|
||||
server_max_window_bits =>
|
||||
15},
|
||||
fail_if_no_subprotocol =>
|
||||
true,
|
||||
idle_timeout => 86400000,
|
||||
max_frame_size => infinity,
|
||||
mqtt_path => "/mqtt",
|
||||
mqtt_piggyback => multiple,
|
||||
proxy_address_header =>
|
||||
"x-forwarded-for",
|
||||
proxy_port_header =>
|
||||
"x-forwarded-port",
|
||||
supported_subprotocols =>
|
||||
["mqtt","mqtt-v3",
|
||||
"mqtt-v3.1.1",
|
||||
"mqtt-v5"]}}.
|
||||
zone => default,
|
||||
access_rules => ["allow all"],
|
||||
bind => {{0,0,0,0},8083},
|
||||
max_connections => 1024000,mountpoint => <<>>,
|
||||
proxy_protocol => false,proxy_protocol_timeout => 3000,
|
||||
tcp =>
|
||||
#{active_n => 100,backlog => 1024,buffer => 4096,
|
||||
high_watermark => 1048576,nodelay => false,
|
||||
reuseaddr => true,send_timeout => 15000,
|
||||
send_timeout_close => true},
|
||||
websocket =>
|
||||
#{allow_origin_absence => true,check_origin_enable => false,
|
||||
check_origins => [],compress => false,
|
||||
deflate_opts =>
|
||||
#{client_max_window_bits => 15,mem_level => 8,
|
||||
server_max_window_bits => 15},
|
||||
fail_if_no_subprotocol => true,idle_timeout => 86400000,
|
||||
max_frame_size => infinity,mqtt_path => "/mqtt",
|
||||
mqtt_piggyback => multiple,
|
||||
proxy_address_header => "x-forwarded-for",
|
||||
proxy_port_header => "x-forwarded-port",
|
||||
supported_subprotocols =>
|
||||
["mqtt","mqtt-v3","mqtt-v3.1.1","mqtt-v5"]}}.
|
||||
|
||||
default_zone_conf() ->
|
||||
#{zones =>
|
||||
#{default =>
|
||||
#{ authorization => #{
|
||||
cache => #{enable => true,max_size => 32, ttl => 60000},
|
||||
deny_action => ignore,
|
||||
enable => false
|
||||
},
|
||||
auth => #{enable => false},
|
||||
overall_max_connections => infinity,
|
||||
stats => #{enable => true},
|
||||
conn_congestion =>
|
||||
#{enable_alarm => true, min_alarm_sustain_duration => 60000},
|
||||
flapping_detect =>
|
||||
#{ban_time => 300000,enable => false,
|
||||
max_count => 15,window_time => 60000},
|
||||
force_gc =>
|
||||
#{bytes => 16777216,count => 16000,
|
||||
enable => true},
|
||||
force_shutdown =>
|
||||
#{enable => true,
|
||||
max_heap_size => 4194304,
|
||||
max_message_queue_len => 1000},
|
||||
mqtt => mqtt_conf(),
|
||||
listeners =>
|
||||
#{mqtt_tcp => listener_mqtt_tcp_conf(),
|
||||
mqtt_ws => listener_mqtt_ws_conf()}
|
||||
}
|
||||
}
|
||||
listeners_conf() ->
|
||||
#{tcp => #{default => listener_mqtt_tcp_conf()},
|
||||
ws => #{default => listener_mqtt_ws_conf()}
|
||||
}.
|
||||
|
||||
set_default_zone_conf() ->
|
||||
emqx_config:put(default_zone_conf()).
|
||||
stats_conf() ->
|
||||
#{enable => true}.
|
||||
|
||||
zone_conf() ->
|
||||
#{}.
|
||||
|
||||
basic_conf() ->
|
||||
#{rate_limit => rate_limit_conf(),
|
||||
force_gc => force_gc_conf(),
|
||||
force_shutdown => force_shutdown_conf(),
|
||||
mqtt => mqtt_conf(),
|
||||
rpc => rpc_conf(),
|
||||
stats => stats_conf(),
|
||||
listeners => listeners_conf(),
|
||||
zones => zone_conf()
|
||||
}.
|
||||
|
||||
set_test_listenser_confs() ->
|
||||
emqx_config:put(basic_conf()).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% CT Callbacks
|
||||
|
@ -181,7 +144,7 @@ init_per_suite(Config) ->
|
|||
%% Access Control Meck
|
||||
ok = meck:new(emqx_access_control, [passthrough, no_history, no_link]),
|
||||
ok = meck:expect(emqx_access_control, authenticate,
|
||||
fun(_) -> ok end),
|
||||
fun(_) -> {ok, #{is_superuser => false}} end),
|
||||
ok = meck:expect(emqx_access_control, authorize, fun(_, _, _) -> allow end),
|
||||
%% Broker Meck
|
||||
ok = meck:new(emqx_broker, [passthrough, no_history, no_link]),
|
||||
|
@ -211,7 +174,7 @@ end_per_suite(_Config) ->
|
|||
]).
|
||||
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
set_default_zone_conf(),
|
||||
set_test_listenser_confs(),
|
||||
Config.
|
||||
|
||||
end_per_testcase(_TestCase, Config) ->
|
||||
|
@ -917,7 +880,7 @@ t_ws_cookie_init(_) ->
|
|||
conn_mod => emqx_ws_connection,
|
||||
ws_cookie => WsCookie
|
||||
},
|
||||
Channel = emqx_channel:init(ConnInfo, #{zone => default, listener => mqtt_tcp}),
|
||||
Channel = emqx_channel:init(ConnInfo, #{zone => default, listener => {tcp, default}}),
|
||||
?assertMatch(#{ws_cookie := WsCookie}, emqx_channel:info(clientinfo, Channel)).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
|
@ -942,7 +905,7 @@ channel(InitFields) ->
|
|||
maps:fold(fun(Field, Value, Channel) ->
|
||||
emqx_channel:set_field(Field, Value, Channel)
|
||||
end,
|
||||
emqx_channel:init(ConnInfo, #{zone => default, listener => mqtt_tcp}),
|
||||
emqx_channel:init(ConnInfo, #{zone => default, listener => {tcp, default}}),
|
||||
maps:merge(#{clientinfo => clientinfo(),
|
||||
session => session(),
|
||||
conn_state => connected
|
||||
|
@ -951,7 +914,7 @@ channel(InitFields) ->
|
|||
clientinfo() -> clientinfo(#{}).
|
||||
clientinfo(InitProps) ->
|
||||
maps:merge(#{zone => default,
|
||||
listener => mqtt_tcp,
|
||||
listener => {tcp, default},
|
||||
protocol => mqtt,
|
||||
peerhost => {127,0,0,1},
|
||||
clientid => <<"clientid">>,
|
||||
|
|
|
@ -79,8 +79,8 @@ groups() ->
|
|||
init_per_suite(Config) ->
|
||||
emqx_ct_helpers:boot_modules(all),
|
||||
emqx_ct_helpers:start_apps([]),
|
||||
emqx_config:put_listener_conf(default, mqtt_ssl, [ssl, verify], verify_peer),
|
||||
emqx_listeners:restart_listener('default:mqtt_ssl'),
|
||||
emqx_config:put_listener_conf(ssl, default, [ssl, verify], verify_peer),
|
||||
emqx_listeners:restart_listener('ssl:default'),
|
||||
Config.
|
||||
|
||||
end_per_suite(_Config) ->
|
||||
|
@ -114,8 +114,8 @@ t_cm(_) ->
|
|||
emqx_config:put_zone_conf(default, [mqtt, idle_timeout], 15000).
|
||||
|
||||
t_cm_registry(_) ->
|
||||
Info = supervisor:which_children(emqx_cm_sup),
|
||||
{_, Pid, _, _} = lists:keyfind(registry, 1, Info),
|
||||
Children = supervisor:which_children(emqx_cm_sup),
|
||||
{_, Pid, _, _} = lists:keyfind(emqx_cm_registry, 1, Children),
|
||||
ignored = gen_server:call(Pid, <<"Unexpected call">>),
|
||||
gen_server:cast(Pid, <<"Unexpected cast">>),
|
||||
Pid ! <<"Unexpected info">>.
|
||||
|
|
|
@ -89,7 +89,7 @@ t_open_session(_) ->
|
|||
ok = meck:expect(emqx_connection, call, fun(_, _) -> ok end),
|
||||
ok = meck:expect(emqx_connection, call, fun(_, _, _) -> ok end),
|
||||
|
||||
ClientInfo = #{zone => default, listener => mqtt_tcp,
|
||||
ClientInfo = #{zone => default, listener => {tcp, default},
|
||||
clientid => <<"clientid">>,
|
||||
username => <<"username">>,
|
||||
peerhost => {127,0,0,1}},
|
||||
|
@ -114,7 +114,7 @@ rand_client_id() ->
|
|||
|
||||
t_open_session_race_condition(_) ->
|
||||
ClientId = rand_client_id(),
|
||||
ClientInfo = #{zone => default, listener => mqtt_tcp,
|
||||
ClientInfo = #{zone => default, listener => {tcp, default},
|
||||
clientid => ClientId,
|
||||
username => <<"username">>,
|
||||
peerhost => {127,0,0,1}},
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_config_SUITE).
|
||||
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
all() -> emqx_ct:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
emqx_ct_helpers:boot_modules(all),
|
||||
emqx_ct_helpers:start_apps([]),
|
||||
Config.
|
||||
|
||||
end_per_suite(_Config) ->
|
||||
emqx_ct_helpers:stop_apps([]).
|
||||
|
||||
t_fill_default_values(_) ->
|
||||
Conf = #{
|
||||
<<"broker">> => #{
|
||||
<<"perf">> => #{},
|
||||
<<"route_batch_clean">> => false}
|
||||
},
|
||||
?assertMatch(#{<<"broker">> :=
|
||||
#{<<"enable_session_registry">> := true,
|
||||
<<"perf">> :=
|
||||
#{<<"route_lock_type">> := key,
|
||||
<<"trie_compaction">> := true},
|
||||
<<"route_batch_clean">> := false,
|
||||
<<"session_locking_strategy">> := quorum,
|
||||
<<"shared_dispatch_ack_enabled">> := false,
|
||||
<<"shared_subscription_strategy">> := round_robin,
|
||||
<<"sys_heartbeat_interval">> := "30s",
|
||||
<<"sys_msg_interval">> := "1m"}},
|
||||
emqx_config:fill_defaults(Conf)).
|
|
@ -57,7 +57,7 @@ init_per_suite(Config) ->
|
|||
ok = meck:expect(emqx_alarm, deactivate, fun(_) -> ok end),
|
||||
ok = meck:expect(emqx_alarm, deactivate, fun(_, _) -> ok end),
|
||||
|
||||
emqx_channel_SUITE:set_default_zone_conf(),
|
||||
emqx_channel_SUITE:set_test_listenser_confs(),
|
||||
Config.
|
||||
|
||||
end_per_suite(_Config) ->
|
||||
|
@ -219,9 +219,9 @@ t_handle_msg_deliver(_) ->
|
|||
|
||||
t_handle_msg_inet_reply(_) ->
|
||||
ok = meck:expect(emqx_pd, get_counter, fun(_) -> 10 end),
|
||||
emqx_config:put_listener_conf(default, mqtt_tcp, [tcp, active_n], 0),
|
||||
emqx_config:put_listener_conf(tcp, default, [tcp, active_n], 0),
|
||||
?assertMatch({ok, _St}, handle_msg({inet_reply, for_testing, ok}, st())),
|
||||
emqx_config:put_listener_conf(default, mqtt_tcp, [tcp, active_n], 100),
|
||||
emqx_config:put_listener_conf(tcp, default, [tcp, active_n], 100),
|
||||
?assertEqual(ok, handle_msg({inet_reply, for_testing, ok}, st())),
|
||||
?assertMatch({stop, {shutdown, for_testing}, _St},
|
||||
handle_msg({inet_reply, for_testing, {error, for_testing}}, st())).
|
||||
|
@ -456,7 +456,7 @@ with_conn(TestFun, Opts) when is_map(Opts) ->
|
|||
TrapExit = maps:get(trap_exit, Opts, false),
|
||||
process_flag(trap_exit, TrapExit),
|
||||
{ok, CPid} = emqx_connection:start_link(emqx_transport, sock,
|
||||
maps:merge(Opts, #{zone => default, listener => mqtt_tcp})),
|
||||
maps:merge(Opts, #{zone => default, listener => {tcp, default}})),
|
||||
TestFun(CPid),
|
||||
TrapExit orelse emqx_connection:stop(CPid),
|
||||
ok.
|
||||
|
@ -479,7 +479,7 @@ st(InitFields) when is_map(InitFields) ->
|
|||
st(InitFields, #{}).
|
||||
st(InitFields, ChannelFields) when is_map(InitFields) ->
|
||||
St = emqx_connection:init_state(emqx_transport, sock, #{zone => default,
|
||||
listener => mqtt_tcp}),
|
||||
listener => {tcp, default}}),
|
||||
maps:fold(fun(N, V, S) -> emqx_connection:set_field(N, V, S) end,
|
||||
emqx_connection:set_field(channel, channel(ChannelFields), St),
|
||||
InitFields
|
||||
|
@ -500,7 +500,7 @@ channel(InitFields) ->
|
|||
expiry_interval => 0
|
||||
},
|
||||
ClientInfo = #{zone => default,
|
||||
listener => mqtt_tcp,
|
||||
listener => {tcp, default},
|
||||
protocol => mqtt,
|
||||
peerhost => {127,0,0,1},
|
||||
clientid => <<"clientid">>,
|
||||
|
@ -513,7 +513,7 @@ channel(InitFields) ->
|
|||
maps:fold(fun(Field, Value, Channel) ->
|
||||
emqx_channel:set_field(Field, Value, Channel)
|
||||
end,
|
||||
emqx_channel:init(ConnInfo, #{zone => default, listener => mqtt_tcp}),
|
||||
emqx_channel:init(ConnInfo, #{zone => default, listener => {tcp, default}}),
|
||||
maps:merge(#{clientinfo => ClientInfo,
|
||||
session => Session,
|
||||
conn_state => connected
|
||||
|
|
|
@ -40,7 +40,7 @@ end_per_suite(_Config) ->
|
|||
|
||||
t_detect_check(_) ->
|
||||
ClientInfo = #{zone => default,
|
||||
listener => mqtt_tcp,
|
||||
listener => {tcp, default},
|
||||
clientid => <<"client007">>,
|
||||
peerhost => {127,0,0,1}
|
||||
},
|
||||
|
@ -55,8 +55,8 @@ t_detect_check(_) ->
|
|||
true = emqx_banned:check(ClientInfo),
|
||||
timer:sleep(3000),
|
||||
false = emqx_banned:check(ClientInfo),
|
||||
Childrens = supervisor:which_children(emqx_cm_sup),
|
||||
{flapping, Pid, _, _} = lists:keyfind(flapping, 1, Childrens),
|
||||
Children = supervisor:which_children(emqx_cm_sup),
|
||||
{emqx_flapping, Pid, _, _} = lists:keyfind(emqx_flapping, 1, Children),
|
||||
gen_server:call(Pid, unexpected_msg),
|
||||
gen_server:cast(Pid, unexpected_msg),
|
||||
Pid ! test,
|
||||
|
@ -64,7 +64,7 @@ t_detect_check(_) ->
|
|||
|
||||
t_expired_detecting(_) ->
|
||||
ClientInfo = #{zone => default,
|
||||
listener => mqtt_tcp,
|
||||
listener => {tcp, default},
|
||||
clientid => <<"client008">>,
|
||||
peerhost => {127,0,0,1}},
|
||||
false = emqx_flapping:detect(ClientInfo),
|
||||
|
@ -72,4 +72,4 @@ t_expired_detecting(_) ->
|
|||
(_) -> false end, ets:tab2list(emqx_flapping))),
|
||||
timer:sleep(200),
|
||||
?assertEqual(true, lists:all(fun({flapping, <<"client008">>, _, _, _}) -> false;
|
||||
(_) -> true end, ets:tab2list(emqx_flapping))).
|
||||
(_) -> true end, ets:tab2list(emqx_flapping))).
|
||||
|
|
|
@ -37,6 +37,14 @@ end_per_suite(_Config) ->
|
|||
application:stop(esockd),
|
||||
application:stop(cowboy).
|
||||
|
||||
init_per_testcase(_, Config) ->
|
||||
{ok, _} = emqx_config_handler:start_link(),
|
||||
Config.
|
||||
|
||||
end_per_testcase(_, _Config) ->
|
||||
_ = emqx_config_handler:stop(),
|
||||
ok.
|
||||
|
||||
t_start_stop_listeners(_) ->
|
||||
ok = emqx_listeners:start(),
|
||||
?assertException(error, _, emqx_listeners:start_listener({ws,{"127.0.0.1", 8083}, []})),
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
all() -> emqx_ct:all(?MODULE).
|
||||
|
||||
t_check_pub(_) ->
|
||||
OldConf = emqx_config:get([zones]),
|
||||
OldConf = emqx:get_config([zones]),
|
||||
emqx_config:put_zone_conf(default, [mqtt, max_qos_allowed], ?QOS_1),
|
||||
emqx_config:put_zone_conf(default, [mqtt, retain_available], false),
|
||||
timer:sleep(50),
|
||||
|
@ -39,7 +39,7 @@ t_check_pub(_) ->
|
|||
emqx_config:put([zones], OldConf).
|
||||
|
||||
t_check_sub(_) ->
|
||||
OldConf = emqx_config:get([zones]),
|
||||
OldConf = emqx:get_config([zones]),
|
||||
SubOpts = #{rh => 0,
|
||||
rap => 0,
|
||||
nl => 0,
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
emqx_hocon_plugin: {
|
||||
name: test
|
||||
emqx_hocon_plugin {
|
||||
name = test
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
{profiles,
|
||||
[{test, [
|
||||
{deps, [ {emqx_ct_helper, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "v1.1.4"}}}
|
||||
{deps, [{emqx_ct_helpers, {git,"https://github.com/emqx/emqx-ct-helpers.git", {branch,"hocon"}}}
|
||||
]}
|
||||
]}
|
||||
]}.
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-export([structs/0, fields/1]).
|
||||
-export([roots/0, fields/1]).
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
|
||||
structs() -> ["emqx_hocon_plugin"].
|
||||
roots() -> ["emqx_hocon_plugin"].
|
||||
|
||||
fields("emqx_hocon_plugin") ->
|
||||
[{name, fun name/1}].
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
{profiles,
|
||||
[{test, [
|
||||
{deps, [ {emqx_ct_helper, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "v1.1.4"}}}
|
||||
{deps, [{emqx_ct_helpers, {git,"https://github.com/emqx/emqx-ct-helpers.git", {branch,"hocon"}}}
|
||||
]}
|
||||
]}
|
||||
]}.
|
||||
|
|
|
@ -29,7 +29,7 @@ all() -> emqx_ct:all(?MODULE).
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
init_per_suite(Config) ->
|
||||
emqx_channel_SUITE:set_default_zone_conf(),
|
||||
emqx_channel_SUITE:set_test_listenser_confs(),
|
||||
ok = meck:new([emqx_hooks, emqx_metrics, emqx_broker],
|
||||
[passthrough, no_history, no_link]),
|
||||
ok = meck:expect(emqx_metrics, inc, fun(_) -> ok end),
|
||||
|
|
|
@ -48,7 +48,7 @@ init_per_testcase(TestCase, Config) when
|
|||
TestCase =/= t_ws_pingreq_before_connected,
|
||||
TestCase =/= t_ws_non_check_origin
|
||||
->
|
||||
emqx_channel_SUITE:set_default_zone_conf(),
|
||||
emqx_channel_SUITE:set_test_listenser_confs(),
|
||||
%% Mock cowboy_req
|
||||
ok = meck:new(cowboy_req, [passthrough, no_history, no_link]),
|
||||
ok = meck:expect(cowboy_req, header, fun(_, _, _) -> <<>> end),
|
||||
|
@ -119,7 +119,7 @@ t_info(_) ->
|
|||
} = SockInfo.
|
||||
|
||||
set_ws_opts(Key, Val) ->
|
||||
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, Key], Val).
|
||||
emqx_config:put_listener_conf(ws, default, [websocket, Key], Val).
|
||||
|
||||
t_header(_) ->
|
||||
ok = meck:expect(cowboy_req, header,
|
||||
|
@ -127,7 +127,7 @@ t_header(_) ->
|
|||
(<<"x-forwarded-port">>, _, _) -> <<"1000">> end),
|
||||
set_ws_opts(proxy_address_header, <<"x-forwarded-for">>),
|
||||
set_ws_opts(proxy_port_header, <<"x-forwarded-port">>),
|
||||
{ok, St, _} = ?ws_conn:websocket_init([req, #{zone => default, listener => mqtt_ws}]),
|
||||
{ok, St, _} = ?ws_conn:websocket_init([req, #{zone => default, listener => {ws, default}}]),
|
||||
WsPid = spawn(fun() ->
|
||||
receive {call, From, info} ->
|
||||
gen_server:reply(From, ?ws_conn:info(St))
|
||||
|
@ -222,8 +222,8 @@ t_ws_sub_protocols_mqtt_equivalents(_) ->
|
|||
start_ws_client(#{protocols => [<<"not-mqtt">>]})).
|
||||
|
||||
t_ws_check_origin(_) ->
|
||||
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, check_origin_enable], true),
|
||||
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, check_origins],
|
||||
emqx_config:put_listener_conf(ws, default, [websocket, check_origin_enable], true),
|
||||
emqx_config:put_listener_conf(ws, default, [websocket, check_origins],
|
||||
[<<"http://localhost:18083">>]),
|
||||
{ok, _} = application:ensure_all_started(gun),
|
||||
?assertMatch({gun_upgrade, _},
|
||||
|
@ -234,8 +234,8 @@ t_ws_check_origin(_) ->
|
|||
headers => [{<<"origin">>, <<"http://localhost:18080">>}]})).
|
||||
|
||||
t_ws_non_check_origin(_) ->
|
||||
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, check_origin_enable], false),
|
||||
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, check_origins], []),
|
||||
emqx_config:put_listener_conf(ws, default, [websocket, check_origin_enable], false),
|
||||
emqx_config:put_listener_conf(ws, default, [websocket, check_origins], []),
|
||||
{ok, _} = application:ensure_all_started(gun),
|
||||
?assertMatch({gun_upgrade, _},
|
||||
start_ws_client(#{protocols => [<<"mqtt">>],
|
||||
|
@ -245,7 +245,7 @@ t_ws_non_check_origin(_) ->
|
|||
headers => [{<<"origin">>, <<"http://localhost:18080">>}]})).
|
||||
|
||||
t_init(_) ->
|
||||
Opts = #{listener => mqtt_ws, zone => default},
|
||||
Opts = #{listener => {ws, default}, zone => default},
|
||||
ok = meck:expect(cowboy_req, parse_header, fun(_, req) -> undefined end),
|
||||
ok = meck:expect(cowboy_req, reply, fun(_, Req) -> Req end),
|
||||
{ok, req, _} = ?ws_conn:init(req, Opts),
|
||||
|
@ -438,7 +438,7 @@ t_shutdown(_) ->
|
|||
|
||||
st() -> st(#{}).
|
||||
st(InitFields) when is_map(InitFields) ->
|
||||
{ok, St, _} = ?ws_conn:websocket_init([req, #{zone => default, listener => mqtt_ws}]),
|
||||
{ok, St, _} = ?ws_conn:websocket_init([req, #{zone => default, listener => {ws, default}}]),
|
||||
maps:fold(fun(N, V, S) -> ?ws_conn:set_field(N, V, S) end,
|
||||
?ws_conn:set_field(channel, channel(), St),
|
||||
InitFields
|
||||
|
@ -459,7 +459,7 @@ channel(InitFields) ->
|
|||
expiry_interval => 0
|
||||
},
|
||||
ClientInfo = #{zone => default,
|
||||
listener => mqtt_ws,
|
||||
listener => {ws, default},
|
||||
protocol => mqtt,
|
||||
peerhost => {127,0,0,1},
|
||||
clientid => <<"clientid">>,
|
||||
|
@ -472,7 +472,7 @@ channel(InitFields) ->
|
|||
maps:fold(fun(Field, Value, Channel) ->
|
||||
emqx_channel:set_field(Field, Value, Channel)
|
||||
end,
|
||||
emqx_channel:init(ConnInfo, #{zone => default, listener => mqtt_ws}),
|
||||
emqx_channel:init(ConnInfo, #{zone => default, listener => {ws, default}}),
|
||||
maps:merge(#{clientinfo => ClientInfo,
|
||||
session => Session,
|
||||
conn_state => connected
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
user_id,password_hash,salt
|
||||
myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235
|
||||
myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139
|
||||
user_id,password_hash,salt,is_superuser
|
||||
myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235,true
|
||||
myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139,false
|
||||
|
|
|
|
@ -2,11 +2,13 @@
|
|||
{
|
||||
"user_id":"myuser1",
|
||||
"password_hash":"c5e46903df45e5dc096dc74657610dbee8deaacae656df88a1788f1847390242",
|
||||
"salt": "e378187547bf2d6f0545a3f441aa4d8a"
|
||||
"salt": "e378187547bf2d6f0545a3f441aa4d8a",
|
||||
"is_superuser": true
|
||||
},
|
||||
{
|
||||
"user_id":"myuser2",
|
||||
"password_hash":"f4d17f300b11e522fd33f497c11b126ef1ea5149c74d2220f9a16dc876d4567b",
|
||||
"salt": "6d3f9bd5b54d94b98adbcfe10b6d181f"
|
||||
"salt": "6d3f9bd5b54d94b98adbcfe10b6d181f",
|
||||
"is_superuser": false
|
||||
}
|
||||
]
|
||||
|
|
|
@ -1,37 +1,6 @@
|
|||
authentication: {
|
||||
enable: false
|
||||
authenticators: [
|
||||
# {
|
||||
# name: "authenticator1"
|
||||
# mechanism: password-based
|
||||
# server_type: built-in-database
|
||||
# user_id_type: clientid
|
||||
# },
|
||||
# {
|
||||
# name: "authenticator2"
|
||||
# mechanism: password-based
|
||||
# server_type: mongodb
|
||||
# server: "127.0.0.1:27017"
|
||||
# database: mqtt
|
||||
# collection: users
|
||||
# selector: {
|
||||
# username: "${mqtt-username}"
|
||||
# }
|
||||
# password_hash_field: password_hash
|
||||
# salt_field: salt
|
||||
# password_hash_algorithm: sha256
|
||||
# salt_position: prefix
|
||||
# },
|
||||
# {
|
||||
# name: "authenticator 3"
|
||||
# mechanism: password-based
|
||||
# server_type: redis
|
||||
# server: "127.0.0.1:6379"
|
||||
# password: "public"
|
||||
# database: 0
|
||||
# query: "HMGET ${mqtt-username} password_hash salt"
|
||||
# password_hash_algorithm: sha256
|
||||
# salt_position: prefix
|
||||
# }
|
||||
]
|
||||
}
|
||||
# authentication: {
|
||||
# mechanism: password-based
|
||||
# backend: built-in-database
|
||||
# user_id_type: clientid
|
||||
# }
|
||||
|
||||
|
|
|
@ -15,25 +15,11 @@
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(APP, emqx_authn).
|
||||
-define(CHAIN, <<"mqtt">>).
|
||||
|
||||
-define(VER_1, <<"1">>).
|
||||
-define(VER_2, <<"2">>).
|
||||
-define(AUTHN, emqx_authentication).
|
||||
|
||||
-define(GLOBAL, 'mqtt:global').
|
||||
|
||||
-define(RE_PLACEHOLDER, "\\$\\{[a-z0-9\\-]+\\}").
|
||||
|
||||
-record(authenticator,
|
||||
{ id :: binary()
|
||||
, name :: binary()
|
||||
, provider :: module()
|
||||
, config :: map()
|
||||
, state :: map()
|
||||
}).
|
||||
|
||||
-record(chain,
|
||||
{ id :: binary()
|
||||
, authenticators :: [{binary(), binary(), #authenticator{}}]
|
||||
, created_at :: integer()
|
||||
}).
|
||||
|
||||
-define(AUTH_SHARD, emqx_authn_shard).
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
{deps, [
|
||||
{jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.1"}}}
|
||||
]}.
|
||||
{deps, []}.
|
||||
|
||||
{edoc_opts, [{preprocess, true}]}.
|
||||
{erl_opts, [warn_unused_vars,
|
||||
|
|
|
@ -15,427 +15,3 @@
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authn).
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
|
||||
-export([ enable/0
|
||||
, disable/0
|
||||
, is_enabled/0
|
||||
]).
|
||||
|
||||
-export([authenticate/2]).
|
||||
|
||||
-export([ create_chain/1
|
||||
, delete_chain/1
|
||||
, lookup_chain/1
|
||||
, list_chains/0
|
||||
, create_authenticator/2
|
||||
, delete_authenticator/2
|
||||
, update_authenticator/3
|
||||
, update_or_create_authenticator/3
|
||||
, lookup_authenticator/2
|
||||
, list_authenticators/1
|
||||
, move_authenticator_to_the_nth/3
|
||||
]).
|
||||
|
||||
-export([ import_users/3
|
||||
, add_user/3
|
||||
, delete_user/3
|
||||
, update_user/4
|
||||
, lookup_user/3
|
||||
, list_users/2
|
||||
]).
|
||||
|
||||
-export([mnesia/1]).
|
||||
|
||||
-boot_mnesia({mnesia, [boot]}).
|
||||
-copy_mnesia({mnesia, [copy]}).
|
||||
|
||||
-define(CHAIN_TAB, emqx_authn_chain).
|
||||
|
||||
-rlog_shard({?AUTH_SHARD, ?CHAIN_TAB}).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Mnesia bootstrap
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
%% @doc Create or replicate tables.
|
||||
-spec(mnesia(boot) -> ok).
|
||||
mnesia(boot) ->
|
||||
%% Optimize storage
|
||||
StoreProps = [{ets, [{read_concurrency, true}]}],
|
||||
%% Chain table
|
||||
ok = ekka_mnesia:create_table(?CHAIN_TAB, [
|
||||
{ram_copies, [node()]},
|
||||
{record_name, chain},
|
||||
{local_content, true},
|
||||
{attributes, record_info(fields, chain)},
|
||||
{storage_properties, StoreProps}]);
|
||||
|
||||
mnesia(copy) ->
|
||||
ok = ekka_mnesia:copy_table(?CHAIN_TAB, ram_copies).
|
||||
|
||||
enable() ->
|
||||
case emqx:hook('client.authenticate', {?MODULE, authenticate, []}) of
|
||||
ok -> ok;
|
||||
{error, already_exists} -> ok
|
||||
end.
|
||||
|
||||
disable() ->
|
||||
emqx:unhook('client.authenticate', {?MODULE, authenticate, []}),
|
||||
ok.
|
||||
|
||||
is_enabled() ->
|
||||
Callbacks = emqx_hooks:lookup('client.authenticate'),
|
||||
lists:any(fun({callback, {?MODULE, authenticate, []}, _, _}) ->
|
||||
true;
|
||||
(_) ->
|
||||
false
|
||||
end, Callbacks).
|
||||
|
||||
authenticate(Credential, _AuthResult) ->
|
||||
case mnesia:dirty_read(?CHAIN_TAB, ?CHAIN) of
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
do_authenticate(Authenticators, Credential);
|
||||
[] ->
|
||||
{stop, {error, not_authorized}}
|
||||
end.
|
||||
|
||||
do_authenticate([], _) ->
|
||||
{stop, {error, not_authorized}};
|
||||
do_authenticate([{_, _, #authenticator{provider = Provider, state = State}} | More], Credential) ->
|
||||
case Provider:authenticate(Credential, State) of
|
||||
ignore ->
|
||||
do_authenticate(More, Credential);
|
||||
Result ->
|
||||
%% ok
|
||||
%% {ok, AuthData}
|
||||
%% {continue, AuthCache}
|
||||
%% {continue, AuthData, AuthCache}
|
||||
%% {error, Reason}
|
||||
{stop, Result}
|
||||
end.
|
||||
|
||||
create_chain(#{id := ID}) ->
|
||||
trans(
|
||||
fun() ->
|
||||
case mnesia:read(?CHAIN_TAB, ID, write) of
|
||||
[] ->
|
||||
Chain = #chain{id = ID,
|
||||
authenticators = [],
|
||||
created_at = erlang:system_time(millisecond)},
|
||||
mnesia:write(?CHAIN_TAB, Chain, write),
|
||||
{ok, serialize_chain(Chain)};
|
||||
[_ | _] ->
|
||||
{error, {already_exists, {chain, ID}}}
|
||||
end
|
||||
end).
|
||||
|
||||
delete_chain(ID) ->
|
||||
trans(
|
||||
fun() ->
|
||||
case mnesia:read(?CHAIN_TAB, ID, write) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ID}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
_ = [do_delete_authenticator(Authenticator) || {_, _, Authenticator} <- Authenticators],
|
||||
mnesia:delete(?CHAIN_TAB, ID, write)
|
||||
end
|
||||
end).
|
||||
|
||||
lookup_chain(ID) ->
|
||||
case mnesia:dirty_read(?CHAIN_TAB, ID) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ID}}};
|
||||
[Chain] ->
|
||||
{ok, serialize_chain(Chain)}
|
||||
end.
|
||||
|
||||
list_chains() ->
|
||||
Chains = ets:tab2list(?CHAIN_TAB),
|
||||
{ok, [serialize_chain(Chain) || Chain <- Chains]}.
|
||||
|
||||
create_authenticator(ChainID, #{name := Name} = Config) ->
|
||||
UpdateFun =
|
||||
fun(Chain = #chain{authenticators = Authenticators}) ->
|
||||
case lists:keymember(Name, 2, Authenticators) of
|
||||
true ->
|
||||
{error, name_has_be_used};
|
||||
false ->
|
||||
AlreadyExist = fun(ID) ->
|
||||
lists:keymember(ID, 1, Authenticators)
|
||||
end,
|
||||
AuthenticatorID = gen_id(AlreadyExist),
|
||||
case do_create_authenticator(ChainID, AuthenticatorID, Config) of
|
||||
{ok, Authenticator} ->
|
||||
NAuthenticators = Authenticators ++ [{AuthenticatorID, Name, Authenticator}],
|
||||
ok = mnesia:write(?CHAIN_TAB, Chain#chain{authenticators = NAuthenticators}, write),
|
||||
{ok, serialize_authenticator(Authenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end
|
||||
end,
|
||||
update_chain(ChainID, UpdateFun).
|
||||
|
||||
delete_authenticator(ChainID, AuthenticatorID) ->
|
||||
UpdateFun = fun(Chain = #chain{authenticators = Authenticators}) ->
|
||||
case lists:keytake(AuthenticatorID, 1, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
{value, {_, _, Authenticator}, NAuthenticators} ->
|
||||
_ = do_delete_authenticator(Authenticator),
|
||||
NChain = Chain#chain{authenticators = NAuthenticators},
|
||||
mnesia:write(?CHAIN_TAB, NChain, write)
|
||||
end
|
||||
end,
|
||||
update_chain(ChainID, UpdateFun).
|
||||
|
||||
update_authenticator(ChainID, AuthenticatorID, Config) ->
|
||||
do_update_authenticator(ChainID, AuthenticatorID, Config, false).
|
||||
|
||||
update_or_create_authenticator(ChainID, AuthenticatorID, Config) ->
|
||||
do_update_authenticator(ChainID, AuthenticatorID, Config, true).
|
||||
|
||||
do_update_authenticator(ChainID, AuthenticatorID, #{name := NewName} = Config, CreateWhenNotFound) ->
|
||||
UpdateFun = fun(Chain = #chain{authenticators = Authenticators}) ->
|
||||
case lists:keytake(AuthenticatorID, 1, Authenticators) of
|
||||
false ->
|
||||
case CreateWhenNotFound of
|
||||
true ->
|
||||
case lists:keymember(NewName, 2, Authenticators) of
|
||||
true ->
|
||||
{error, name_has_be_used};
|
||||
false ->
|
||||
case do_create_authenticator(ChainID, AuthenticatorID, Config) of
|
||||
{ok, Authenticator} ->
|
||||
NAuthenticators = Authenticators ++ [{AuthenticatorID, NewName, Authenticator}],
|
||||
ok = mnesia:write(?CHAIN_TAB, Chain#chain{authenticators = NAuthenticators}, write),
|
||||
{ok, serialize_authenticator(Authenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end;
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}}
|
||||
end;
|
||||
{value,
|
||||
{_, _, #authenticator{provider = Provider,
|
||||
state = #{version := Version} = State} = Authenticator},
|
||||
Others} ->
|
||||
case lists:keymember(NewName, 2, Others) of
|
||||
true ->
|
||||
{error, name_has_be_used};
|
||||
false ->
|
||||
case (NewProvider = authenticator_provider(Config)) =:= Provider of
|
||||
true ->
|
||||
Unique = <<ChainID/binary, "/", AuthenticatorID/binary, ":", Version/binary>>,
|
||||
case Provider:update(Config#{'_unique' => Unique}, State) of
|
||||
{ok, NewState} ->
|
||||
NewAuthenticator = Authenticator#authenticator{name = NewName,
|
||||
config = Config,
|
||||
state = switch_version(NewState)},
|
||||
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
|
||||
ok = mnesia:write(?CHAIN_TAB, Chain#chain{authenticators = NewAuthenticators}, write),
|
||||
{ok, serialize_authenticator(NewAuthenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
false ->
|
||||
Unique = <<ChainID/binary, "/", AuthenticatorID/binary, ":", Version/binary>>,
|
||||
case NewProvider:create(Config#{'_unique' => Unique}) of
|
||||
{ok, NewState} ->
|
||||
NewAuthenticator = Authenticator#authenticator{name = NewName,
|
||||
provider = NewProvider,
|
||||
config = Config,
|
||||
state = switch_version(NewState)},
|
||||
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
|
||||
ok = mnesia:write(?CHAIN_TAB, Chain#chain{authenticators = NewAuthenticators}, write),
|
||||
_ = Provider:destroy(State),
|
||||
{ok, serialize_authenticator(NewAuthenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end,
|
||||
update_chain(ChainID, UpdateFun).
|
||||
|
||||
lookup_authenticator(ChainID, AuthenticatorID) ->
|
||||
case mnesia:dirty_read(?CHAIN_TAB, ChainID) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainID}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
case lists:keyfind(AuthenticatorID, 1, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
{_, _, Authenticator} ->
|
||||
{ok, serialize_authenticator(Authenticator)}
|
||||
end
|
||||
end.
|
||||
|
||||
list_authenticators(ChainID) ->
|
||||
case mnesia:dirty_read(?CHAIN_TAB, ChainID) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainID}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
{ok, serialize_authenticators(Authenticators)}
|
||||
end.
|
||||
|
||||
move_authenticator_to_the_nth(ChainID, AuthenticatorID, N) ->
|
||||
UpdateFun = fun(Chain = #chain{authenticators = Authenticators}) ->
|
||||
case move_authenticator_to_the_nth_(AuthenticatorID, Authenticators, N) of
|
||||
{ok, NAuthenticators} ->
|
||||
NChain = Chain#chain{authenticators = NAuthenticators},
|
||||
mnesia:write(?CHAIN_TAB, NChain, write);
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end,
|
||||
update_chain(ChainID, UpdateFun).
|
||||
|
||||
import_users(ChainID, AuthenticatorID, Filename) ->
|
||||
call_authenticator(ChainID, AuthenticatorID, import_users, [Filename]).
|
||||
|
||||
add_user(ChainID, AuthenticatorID, UserInfo) ->
|
||||
call_authenticator(ChainID, AuthenticatorID, add_user, [UserInfo]).
|
||||
|
||||
delete_user(ChainID, AuthenticatorID, UserID) ->
|
||||
call_authenticator(ChainID, AuthenticatorID, delete_user, [UserID]).
|
||||
|
||||
update_user(ChainID, AuthenticatorID, UserID, NewUserInfo) ->
|
||||
call_authenticator(ChainID, AuthenticatorID, update_user, [UserID, NewUserInfo]).
|
||||
|
||||
lookup_user(ChainID, AuthenticatorID, UserID) ->
|
||||
call_authenticator(ChainID, AuthenticatorID, lookup_user, [UserID]).
|
||||
|
||||
list_users(ChainID, AuthenticatorID) ->
|
||||
call_authenticator(ChainID, AuthenticatorID, list_users, []).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'built-in-database'}) ->
|
||||
emqx_authn_mnesia;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'mysql'}) ->
|
||||
emqx_authn_mysql;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'pgsql'}) ->
|
||||
emqx_authn_pgsql;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'mongodb'}) ->
|
||||
emqx_authn_mongodb;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'redis'}) ->
|
||||
emqx_authn_redis;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'http-server'}) ->
|
||||
emqx_authn_http;
|
||||
authenticator_provider(#{mechanism := jwt}) ->
|
||||
emqx_authn_jwt;
|
||||
authenticator_provider(#{mechanism := scram, server_type := 'built-in-database'}) ->
|
||||
emqx_enhanced_authn_scram_mnesia.
|
||||
|
||||
gen_id(AlreadyExist) ->
|
||||
ID = list_to_binary(emqx_rule_id:gen()),
|
||||
case AlreadyExist(ID) of
|
||||
true -> gen_id(AlreadyExist);
|
||||
false -> ID
|
||||
end.
|
||||
|
||||
switch_version(State = #{version := ?VER_1}) ->
|
||||
State#{version := ?VER_2};
|
||||
switch_version(State = #{version := ?VER_2}) ->
|
||||
State#{version := ?VER_1};
|
||||
switch_version(State) ->
|
||||
State#{version => ?VER_1}.
|
||||
|
||||
do_create_authenticator(ChainID, AuthenticatorID, #{name := Name} = Config) ->
|
||||
Provider = authenticator_provider(Config),
|
||||
Unique = <<ChainID/binary, "/", AuthenticatorID/binary, ":", ?VER_1/binary>>,
|
||||
case Provider:create(Config#{'_unique' => Unique}) of
|
||||
{ok, State} ->
|
||||
Authenticator = #authenticator{id = AuthenticatorID,
|
||||
name = Name,
|
||||
provider = Provider,
|
||||
config = Config,
|
||||
state = switch_version(State)},
|
||||
{ok, Authenticator};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
do_delete_authenticator(#authenticator{provider = Provider, state = State}) ->
|
||||
_ = Provider:destroy(State),
|
||||
ok.
|
||||
|
||||
replace_authenticator(ID, #authenticator{name = Name} = Authenticator, Authenticators) ->
|
||||
lists:keyreplace(ID, 1, Authenticators, {ID, Name, Authenticator}).
|
||||
|
||||
move_authenticator_to_the_nth_(AuthenticatorID, Authenticators, N)
|
||||
when N =< length(Authenticators) andalso N > 0 ->
|
||||
move_authenticator_to_the_nth_(AuthenticatorID, Authenticators, N, []);
|
||||
move_authenticator_to_the_nth_(_, _, _) ->
|
||||
{error, out_of_range}.
|
||||
|
||||
move_authenticator_to_the_nth_(AuthenticatorID, [], _, _) ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
move_authenticator_to_the_nth_(AuthenticatorID, [{AuthenticatorID, _, _} = Authenticator | More], N, Passed)
|
||||
when N =< length(Passed) ->
|
||||
{L1, L2} = lists:split(N - 1, lists:reverse(Passed)),
|
||||
{ok, L1 ++ [Authenticator] ++ L2 ++ More};
|
||||
move_authenticator_to_the_nth_(AuthenticatorID, [{AuthenticatorID, _, _} = Authenticator | More], N, Passed) ->
|
||||
{L1, L2} = lists:split(N - length(Passed) - 1, More),
|
||||
{ok, lists:reverse(Passed) ++ L1 ++ [Authenticator] ++ L2};
|
||||
move_authenticator_to_the_nth_(AuthenticatorID, [Authenticator | More], N, Passed) ->
|
||||
move_authenticator_to_the_nth_(AuthenticatorID, More, N, [Authenticator | Passed]).
|
||||
|
||||
update_chain(ChainID, UpdateFun) ->
|
||||
trans(
|
||||
fun() ->
|
||||
case mnesia:read(?CHAIN_TAB, ChainID, write) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainID}}};
|
||||
[Chain] ->
|
||||
UpdateFun(Chain)
|
||||
end
|
||||
end).
|
||||
|
||||
call_authenticator(ChainID, AuthenticatorID, Func, Args) ->
|
||||
case mnesia:dirty_read(?CHAIN_TAB, ChainID) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainID}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
case lists:keyfind(AuthenticatorID, 1, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
{_, _, #authenticator{provider = Provider, state = State}} ->
|
||||
case erlang:function_exported(Provider, Func, length(Args) + 1) of
|
||||
true ->
|
||||
erlang:apply(Provider, Func, Args ++ [State]);
|
||||
false ->
|
||||
{error, unsupported_feature}
|
||||
end
|
||||
end
|
||||
end.
|
||||
|
||||
serialize_chain(#chain{id = ID,
|
||||
authenticators = Authenticators,
|
||||
created_at = CreatedAt}) ->
|
||||
#{id => ID,
|
||||
authenticators => serialize_authenticators(Authenticators),
|
||||
created_at => CreatedAt}.
|
||||
|
||||
serialize_authenticators(Authenticators) ->
|
||||
[serialize_authenticator(Authenticator) || {_, _, Authenticator} <- Authenticators].
|
||||
|
||||
serialize_authenticator(#authenticator{id = ID,
|
||||
config = Config}) ->
|
||||
Config#{id => ID}.
|
||||
|
||||
trans(Fun) ->
|
||||
trans(Fun, []).
|
||||
|
||||
trans(Fun, Args) ->
|
||||
case ekka_mnesia:transaction(?AUTH_SHARD, Fun, Args) of
|
||||
{atomic, Res} -> Res;
|
||||
{aborted, Reason} -> {error, Reason}
|
||||
end.
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -17,7 +17,6 @@
|
|||
-module(emqx_authn_app).
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
|
||||
-behaviour(application).
|
||||
|
||||
|
@ -26,32 +25,45 @@
|
|||
, stop/1
|
||||
]).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
start(_StartType, _StartArgs) ->
|
||||
{ok, Sup} = emqx_authn_sup:start_link(),
|
||||
ok = ekka_rlog:wait_for_shards([?AUTH_SHARD], infinity),
|
||||
initialize(),
|
||||
{ok, Sup} = emqx_authn_sup:start_link(),
|
||||
ok = add_providers(),
|
||||
ok = initialize(),
|
||||
{ok, Sup}.
|
||||
|
||||
stop(_State) ->
|
||||
ok = remove_providers(),
|
||||
ok.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
add_providers() ->
|
||||
_ = [?AUTHN:add_provider(AuthNType, Provider) || {AuthNType, Provider} <- providers()], ok.
|
||||
|
||||
remove_providers() ->
|
||||
_ = [?AUTHN:remove_provider(AuthNType) || {AuthNType, _} <- providers()], ok.
|
||||
|
||||
initialize() ->
|
||||
AuthNConfig = emqx_config:get([authentication], #{enable => false,
|
||||
authenticators => []}),
|
||||
initialize(AuthNConfig).
|
||||
|
||||
initialize(#{enable := Enable, authenticators := AuthenticatorsConfig}) ->
|
||||
{ok, _} = emqx_authn:create_chain(#{id => ?CHAIN}),
|
||||
initialize_authenticators(AuthenticatorsConfig),
|
||||
Enable =:= true andalso emqx_authn:enable(),
|
||||
?AUTHN:initialize_authentication(?GLOBAL, emqx:get_raw_config([authentication], [])),
|
||||
lists:foreach(fun({ListenerID, ListenerConfig}) ->
|
||||
?AUTHN:initialize_authentication(ListenerID, maps:get(authentication, ListenerConfig, []))
|
||||
end, emqx_listeners:list()),
|
||||
ok.
|
||||
|
||||
initialize_authenticators([]) ->
|
||||
ok;
|
||||
initialize_authenticators([#{name := Name} = AuthenticatorConfig | More]) ->
|
||||
case emqx_authn:create_authenticator(?CHAIN, AuthenticatorConfig) of
|
||||
{ok, _} ->
|
||||
initialize_authenticators(More);
|
||||
{error, Reason} ->
|
||||
?LOG(error, "Failed to create authenticator '~s': ~p", [Name, Reason])
|
||||
end.
|
||||
providers() ->
|
||||
[ {{'password-based', 'built-in-database'}, emqx_authn_mnesia}
|
||||
, {{'password-based', mysql}, emqx_authn_mysql}
|
||||
, {{'password-based', posgresql}, emqx_authn_pgsql}
|
||||
, {{'password-based', mongodb}, emqx_authn_mongodb}
|
||||
, {{'password-based', redis}, emqx_authn_redis}
|
||||
, {{'password-based', 'http-server'}, emqx_authn_http}
|
||||
, {jwt, emqx_authn_jwt}
|
||||
, {{scram, 'built-in-database'}, emqx_enhanced_authn_scram_mnesia}
|
||||
].
|
|
@ -16,53 +16,15 @@
|
|||
|
||||
-module(emqx_authn_schema).
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
|
||||
-export([ structs/0
|
||||
, fields/1
|
||||
-export([ common_fields/0
|
||||
]).
|
||||
|
||||
-export([ authenticator_name/1
|
||||
]).
|
||||
|
||||
%% Export it for emqx_gateway_schema module
|
||||
-export([ authenticators/1
|
||||
]).
|
||||
|
||||
structs() -> [ "authentication" ].
|
||||
|
||||
fields("authentication") ->
|
||||
[ {enable, fun enable/1}
|
||||
, {authenticators, fun authenticators/1}
|
||||
common_fields() ->
|
||||
[ {enable, fun enable/1}
|
||||
].
|
||||
|
||||
authenticator_name(type) -> binary();
|
||||
authenticator_name(nullable) -> false;
|
||||
authenticator_name(_) -> undefined.
|
||||
|
||||
enable(type) -> boolean();
|
||||
enable(default) -> false;
|
||||
enable(default) -> true;
|
||||
enable(_) -> undefined.
|
||||
|
||||
authenticators(type) ->
|
||||
hoconsc:array({union, [ hoconsc:ref(emqx_authn_mnesia, config)
|
||||
, hoconsc:ref(emqx_authn_mysql, config)
|
||||
, hoconsc:ref(emqx_authn_pgsql, config)
|
||||
, hoconsc:ref(emqx_authn_mongodb, standalone)
|
||||
, hoconsc:ref(emqx_authn_mongodb, 'replica-set')
|
||||
, hoconsc:ref(emqx_authn_mongodb, 'sharded-cluster')
|
||||
, hoconsc:ref(emqx_authn_redis, standalone)
|
||||
, hoconsc:ref(emqx_authn_redis, cluster)
|
||||
, hoconsc:ref(emqx_authn_redis, sentinel)
|
||||
, hoconsc:ref(emqx_authn_http, get)
|
||||
, hoconsc:ref(emqx_authn_http, post)
|
||||
, hoconsc:ref(emqx_authn_jwt, 'hmac-based')
|
||||
, hoconsc:ref(emqx_authn_jwt, 'public-key')
|
||||
, hoconsc:ref(emqx_authn_jwt, 'jwks')
|
||||
, hoconsc:ref(emqx_enhanced_authn_scram_mnesia, config)
|
||||
]});
|
||||
authenticators(default) -> [];
|
||||
authenticators(_) -> undefined.
|
||||
|
|
|
@ -26,4 +26,5 @@ start_link() ->
|
|||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
init([]) ->
|
||||
{ok, {{one_for_one, 10, 10}, []}}.
|
||||
ChildSpecs = [],
|
||||
{ok, {{one_for_one, 10, 10}, ChildSpecs}}.
|
||||
|
|
|
@ -17,16 +17,18 @@
|
|||
-module(emqx_enhanced_authn_scram_mnesia).
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
-include_lib("esasl/include/esasl_scram.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ structs/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -46,7 +48,13 @@
|
|||
-boot_mnesia({mnesia, [boot]}).
|
||||
-copy_mnesia({mnesia, [copy]}).
|
||||
|
||||
-rlog_shard({?AUTH_SHARD, ?TAB}).
|
||||
-record(user_info,
|
||||
{ user_id
|
||||
, stored_key
|
||||
, server_key
|
||||
, salt
|
||||
, is_superuser
|
||||
}).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Mnesia bootstrap
|
||||
|
@ -56,9 +64,10 @@
|
|||
-spec(mnesia(boot | copy) -> ok).
|
||||
mnesia(boot) ->
|
||||
ok = ekka_mnesia:create_table(?TAB, [
|
||||
{rlog_shard, ?AUTH_SHARD},
|
||||
{disc_copies, [node()]},
|
||||
{record_name, scram_user_credentail},
|
||||
{attributes, record_info(fields, scram_user_credentail)},
|
||||
{record_name, user_info},
|
||||
{attributes, record_info(fields, user_info)},
|
||||
{storage_properties, [{ets, [{read_concurrency, true}]}]}]);
|
||||
|
||||
mnesia(copy) ->
|
||||
|
@ -68,19 +77,16 @@ mnesia(copy) ->
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
structs() -> [config].
|
||||
namespace() -> "authn:scram:builtin-db".
|
||||
|
||||
roots() -> [config].
|
||||
|
||||
fields(config) ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, [scram]}}
|
||||
, {server_type, fun server_type/1}
|
||||
[ {mechanism, {enum, [scram]}}
|
||||
, {backend, {enum, ['built-in-database']}}
|
||||
, {algorithm, fun algorithm/1}
|
||||
, {iteration_count, fun iteration_count/1}
|
||||
].
|
||||
|
||||
server_type(type) -> hoconsc:enum(['built-in-database']);
|
||||
server_type(default) -> 'built-in-database';
|
||||
server_type(_) -> undefined.
|
||||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
algorithm(type) -> hoconsc:enum([sha256, sha512]);
|
||||
algorithm(default) -> sha256;
|
||||
|
@ -94,6 +100,9 @@ iteration_count(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[hoconsc:ref(?MODULE, config)].
|
||||
|
||||
create(#{ algorithm := Algorithm
|
||||
, iteration_count := IterationCount
|
||||
, '_unique' := Unique
|
||||
|
@ -105,7 +114,7 @@ create(#{ algorithm := Algorithm
|
|||
|
||||
update(Config, #{user_group := Unique}) ->
|
||||
create(Config#{'_unique' => Unique}).
|
||||
|
||||
|
||||
authenticate(#{auth_method := AuthMethod,
|
||||
auth_data := AuthData,
|
||||
auth_cache := AuthCache}, State) ->
|
||||
|
@ -126,20 +135,21 @@ authenticate(_Credential, _State) ->
|
|||
destroy(#{user_group := UserGroup}) ->
|
||||
trans(
|
||||
fun() ->
|
||||
MatchSpec = [{{scram_user_credentail, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}],
|
||||
ok = lists:foreach(fun(UserCredential) ->
|
||||
mnesia:delete_object(?TAB, UserCredential, write)
|
||||
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_', '_'}, [], ['$_']}],
|
||||
ok = lists:foreach(fun(UserInfo) ->
|
||||
mnesia:delete_object(?TAB, UserInfo, write)
|
||||
end, mnesia:select(?TAB, MatchSpec, write))
|
||||
end).
|
||||
|
||||
add_user(#{user_id := UserID,
|
||||
password := Password}, #{user_group := UserGroup} = State) ->
|
||||
password := Password} = UserInfo, #{user_group := UserGroup} = State) ->
|
||||
trans(
|
||||
fun() ->
|
||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||
[] ->
|
||||
add_user(UserID, Password, State),
|
||||
{ok, #{user_id => UserID}};
|
||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||
add_user(UserID, Password, IsSuperuser, State),
|
||||
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
||||
[_] ->
|
||||
{error, already_exist}
|
||||
end
|
||||
|
@ -156,31 +166,41 @@ delete_user(UserID, #{user_group := UserGroup}) ->
|
|||
end
|
||||
end).
|
||||
|
||||
update_user(UserID, #{password := Password},
|
||||
update_user(UserID, User,
|
||||
#{user_group := UserGroup} = State) ->
|
||||
trans(
|
||||
fun() ->
|
||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||
[] ->
|
||||
{error, not_found};
|
||||
[_] ->
|
||||
add_user(UserID, Password, State),
|
||||
{ok, #{user_id => UserID}}
|
||||
[#user_info{is_superuser = IsSuperuser} = UserInfo] ->
|
||||
UserInfo1 = UserInfo#user_info{is_superuser = maps:get(is_superuser, User, IsSuperuser)},
|
||||
UserInfo2 = case maps:get(password, User, undefined) of
|
||||
undefined ->
|
||||
UserInfo1;
|
||||
Password ->
|
||||
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State),
|
||||
UserInfo1#user_info{stored_key = StoredKey,
|
||||
server_key = ServerKey,
|
||||
salt = Salt}
|
||||
end,
|
||||
mnesia:write(?TAB, UserInfo2, write),
|
||||
{ok, serialize_user_info(UserInfo2)}
|
||||
end
|
||||
end).
|
||||
|
||||
lookup_user(UserID, #{user_group := UserGroup}) ->
|
||||
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
||||
[#scram_user_credentail{user_id = {_, UserID}}] ->
|
||||
{ok, #{user_id => UserID}};
|
||||
[UserInfo] ->
|
||||
{ok, serialize_user_info(UserInfo)};
|
||||
[] ->
|
||||
{error, not_found}
|
||||
end.
|
||||
|
||||
%% TODO: Support Pagination
|
||||
list_users(#{user_group := UserGroup}) ->
|
||||
Users = [#{user_id => UserID} ||
|
||||
#scram_user_credentail{user_id = {UserGroup0, UserID}} <- ets:tab2list(?TAB), UserGroup0 =:= UserGroup],
|
||||
Users = [serialize_user_info(UserInfo) ||
|
||||
#user_info{user_id = {UserGroup0, _}} = UserInfo <- ets:tab2list(?TAB), UserGroup0 =:= UserGroup],
|
||||
{ok, Users}.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
@ -195,13 +215,13 @@ ensure_auth_method(_, _) ->
|
|||
false.
|
||||
|
||||
check_client_first_message(Bin, _Cache, #{iteration_count := IterationCount} = State) ->
|
||||
LookupFun = fun(Username) ->
|
||||
lookup_user2(Username, State)
|
||||
RetrieveFun = fun(Username) ->
|
||||
retrieve(Username, State)
|
||||
end,
|
||||
case esasl_scram:check_client_first_message(
|
||||
Bin,
|
||||
#{iteration_count => IterationCount,
|
||||
lookup => LookupFun}
|
||||
retrieve => RetrieveFun}
|
||||
) of
|
||||
{cotinue, ServerFirstMessage, Cache} ->
|
||||
{cotinue, ServerFirstMessage, Cache};
|
||||
|
@ -209,25 +229,36 @@ check_client_first_message(Bin, _Cache, #{iteration_count := IterationCount} = S
|
|||
{error, not_authorized}
|
||||
end.
|
||||
|
||||
check_client_final_message(Bin, Cache, #{algorithm := Alg}) ->
|
||||
check_client_final_message(Bin, #{is_superuser := IsSuperuser} = Cache, #{algorithm := Alg}) ->
|
||||
case esasl_scram:check_client_final_message(
|
||||
Bin,
|
||||
Cache#{algorithm => Alg}
|
||||
) of
|
||||
{ok, ServerFinalMessage} ->
|
||||
{ok, ServerFinalMessage};
|
||||
{ok, #{is_superuser => IsSuperuser}, ServerFinalMessage};
|
||||
{error, _Reason} ->
|
||||
{error, not_authorized}
|
||||
end.
|
||||
|
||||
add_user(UserID, Password, State) ->
|
||||
UserCredential = esasl_scram:generate_user_credential(UserID, Password, State),
|
||||
mnesia:write(?TAB, UserCredential, write).
|
||||
add_user(UserID, Password, IsSuperuser, State) ->
|
||||
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State),
|
||||
UserInfo = #user_info{user_id = UserID,
|
||||
stored_key = StoredKey,
|
||||
server_key = ServerKey,
|
||||
salt = Salt,
|
||||
is_superuser = IsSuperuser},
|
||||
mnesia:write(?TAB, UserInfo, write).
|
||||
|
||||
lookup_user2(UserID, #{user_group := UserGroup}) ->
|
||||
retrieve(UserID, #{user_group := UserGroup}) ->
|
||||
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
||||
[#scram_user_credentail{} = UserCredential] ->
|
||||
{ok, UserCredential};
|
||||
[#user_info{stored_key = StoredKey,
|
||||
server_key = ServerKey,
|
||||
salt = Salt,
|
||||
is_superuser = IsSuperuser}] ->
|
||||
{ok, #{stored_key => StoredKey,
|
||||
server_key => ServerKey,
|
||||
salt => Salt,
|
||||
is_superuser => IsSuperuser}};
|
||||
[] ->
|
||||
{error, not_found}
|
||||
end.
|
||||
|
@ -241,3 +272,6 @@ trans(Fun, Args) ->
|
|||
{atomic, Res} -> Res;
|
||||
{aborted, Reason} -> {error, Reason}
|
||||
end.
|
||||
|
||||
serialize_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
|
||||
#{user_id => UserID, is_superuser => IsSuperuser}.
|
||||
|
|
|
@ -21,13 +21,16 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ structs/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
, validations/0
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -37,13 +40,13 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
structs() -> [""].
|
||||
namespace() -> "authn:password-based:http-server".
|
||||
|
||||
fields("") ->
|
||||
[ {config, {union, [ hoconsc:t(get)
|
||||
, hoconsc:t(post)
|
||||
roots() ->
|
||||
[ {config, {union, [ hoconsc:ref(?MODULE, get)
|
||||
, hoconsc:ref(?MODULE, post)
|
||||
]}}
|
||||
];
|
||||
].
|
||||
|
||||
fields(get) ->
|
||||
[ {method, #{type => get,
|
||||
|
@ -58,15 +61,15 @@ fields(post) ->
|
|||
] ++ common_fields().
|
||||
|
||||
common_fields() ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, ['http-server']}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, ['http-server']}}
|
||||
, {url, fun url/1}
|
||||
, {form_data, fun form_data/1}
|
||||
, {body, fun body/1}
|
||||
, {request_timeout, fun request_timeout/1}
|
||||
] ++ maps:to_list(maps:without([ base_url
|
||||
, pool_type],
|
||||
maps:from_list(emqx_connector_http:fields(config)))).
|
||||
] ++ emqx_authn_schema:common_fields()
|
||||
++ maps:to_list(maps:without([ base_url
|
||||
, pool_type],
|
||||
maps:from_list(emqx_connector_http:fields(config)))).
|
||||
|
||||
validations() ->
|
||||
[ {check_ssl_opts, fun check_ssl_opts/1}
|
||||
|
@ -89,16 +92,15 @@ headers(_) -> undefined.
|
|||
headers_no_content_type(type) -> map();
|
||||
headers_no_content_type(converter) ->
|
||||
fun(Headers) ->
|
||||
maps:merge(default_headers_no_content_type(), transform_header_name(Headers))
|
||||
maps:merge(default_headers_no_content_type(), transform_header_name(Headers))
|
||||
end;
|
||||
headers_no_content_type(default) -> default_headers_no_content_type();
|
||||
headers_no_content_type(_) -> undefined.
|
||||
|
||||
%% TODO: Using map()
|
||||
form_data(type) -> map();
|
||||
form_data(nullable) -> false;
|
||||
form_data(validate) -> [fun check_form_data/1];
|
||||
form_data(_) -> undefined.
|
||||
body(type) -> map();
|
||||
body(nullable) -> false;
|
||||
body(validate) -> [fun check_body/1];
|
||||
body(_) -> undefined.
|
||||
|
||||
request_timeout(type) -> non_neg_integer();
|
||||
request_timeout(default) -> 5000;
|
||||
|
@ -108,10 +110,15 @@ request_timeout(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, get)
|
||||
, hoconsc:ref(?MODULE, post)
|
||||
].
|
||||
|
||||
create(#{ method := Method
|
||||
, url := URL
|
||||
, headers := Headers
|
||||
, form_data := FormData
|
||||
, body := Body
|
||||
, request_timeout := RequestTimeout
|
||||
, '_unique' := Unique
|
||||
} = Config) ->
|
||||
|
@ -120,8 +127,8 @@ create(#{ method := Method
|
|||
State = #{ method => Method
|
||||
, path => Path
|
||||
, base_query => cow_qs:parse_qs(list_to_binary(Query))
|
||||
, headers => normalize_headers(Headers)
|
||||
, form_data => maps:to_list(FormData)
|
||||
, headers => maps:to_list(Headers)
|
||||
, body => maps:to_list(Body)
|
||||
, request_timeout => RequestTimeout
|
||||
, '_unique' => Unique
|
||||
},
|
||||
|
@ -129,9 +136,9 @@ create(#{ method := Method
|
|||
emqx_connector_http,
|
||||
Config#{base_url => maps:remove(query, URIMap),
|
||||
pool_type => random}) of
|
||||
{ok, _} ->
|
||||
{ok, already_created} ->
|
||||
{ok, State};
|
||||
{error, already_created} ->
|
||||
{ok, _} ->
|
||||
{ok, State};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
|
@ -154,15 +161,16 @@ authenticate(Credential, #{'_unique' := Unique,
|
|||
try
|
||||
Request = generate_request(Credential, State),
|
||||
case emqx_resource:query(Unique, {Method, Request, RequestTimeout}) of
|
||||
{ok, 204, _Headers} -> ok;
|
||||
{ok, 204, _Headers} -> {ok, #{is_superuser => false}};
|
||||
{ok, 200, Headers, Body} ->
|
||||
ContentType = proplists:get_value(<<"content-type">>, Headers, <<"application/json">>),
|
||||
case safely_parse_body(ContentType, Body) of
|
||||
{ok, _NBody} ->
|
||||
{ok, NBody} ->
|
||||
%% TODO: Return by user property
|
||||
ok;
|
||||
{ok, #{is_superuser => maps:get(<<"is_superuser">>, NBody, false),
|
||||
user_property => NBody}};
|
||||
{error, _Reason} ->
|
||||
ok
|
||||
{ok, #{is_superuser => false}}
|
||||
end;
|
||||
{error, _Reason} ->
|
||||
ignore
|
||||
|
@ -187,10 +195,10 @@ check_url(URL) ->
|
|||
{error, _} -> false
|
||||
end.
|
||||
|
||||
check_form_data(FormData) ->
|
||||
check_body(Body) ->
|
||||
lists:any(fun({_, V}) ->
|
||||
not is_binary(V)
|
||||
end, maps:to_list(FormData)).
|
||||
end, maps:to_list(Body)).
|
||||
|
||||
default_headers() ->
|
||||
maps:put(<<"content-type">>,
|
||||
|
@ -230,24 +238,21 @@ parse_url(URL) ->
|
|||
URIMap
|
||||
end.
|
||||
|
||||
normalize_headers(Headers) ->
|
||||
[{atom_to_binary(K), V} || {K, V} <- maps:to_list(Headers)].
|
||||
|
||||
generate_request(Credential, #{method := Method,
|
||||
path := Path,
|
||||
base_query := BaseQuery,
|
||||
headers := Headers,
|
||||
form_data := FormData0}) ->
|
||||
FormData = replace_placeholders(FormData0, Credential),
|
||||
body := Body0}) ->
|
||||
Body = replace_placeholders(Body0, Credential),
|
||||
case Method of
|
||||
get ->
|
||||
NPath = append_query(Path, BaseQuery ++ FormData),
|
||||
NPath = append_query(Path, BaseQuery ++ Body),
|
||||
{NPath, Headers};
|
||||
post ->
|
||||
NPath = append_query(Path, BaseQuery),
|
||||
ContentType = proplists:get_value(<<"content-type">>, Headers),
|
||||
Body = serialize_body(ContentType, FormData),
|
||||
{NPath, Headers, Body}
|
||||
NBody = serialize_body(ContentType, Body),
|
||||
{NPath, Headers, NBody}
|
||||
end.
|
||||
|
||||
replace_placeholders(KVs, Credential) ->
|
||||
|
@ -277,10 +282,10 @@ qs([], Acc) ->
|
|||
qs([{K, V} | More], Acc) ->
|
||||
qs(More, [["&", emqx_http_lib:uri_encode(K), "=", emqx_http_lib:uri_encode(V)] | Acc]).
|
||||
|
||||
serialize_body(<<"application/json">>, FormData) ->
|
||||
emqx_json:encode(FormData);
|
||||
serialize_body(<<"application/x-www-form-urlencoded">>, FormData) ->
|
||||
qs(FormData).
|
||||
serialize_body(<<"application/json">>, Body) ->
|
||||
emqx_json:encode(Body);
|
||||
serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
||||
qs(Body).
|
||||
|
||||
safely_parse_body(ContentType, Body) ->
|
||||
try parse_body(ContentType, Body) of
|
||||
|
@ -291,8 +296,8 @@ safely_parse_body(ContentType, Body) ->
|
|||
end.
|
||||
|
||||
parse_body(<<"application/json">>, Body) ->
|
||||
{ok, emqx_json:decode(Body)};
|
||||
{ok, emqx_json:decode(Body, [return_maps])};
|
||||
parse_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
||||
{ok, cow_qs:parse_qs(Body)};
|
||||
{ok, maps:from_list(cow_qs:parse_qs(Body))};
|
||||
parse_body(ContentType, _) ->
|
||||
{error, {unsupported_content_type, ContentType}}.
|
||||
{error, {unsupported_content_type, ContentType}}.
|
||||
|
|
|
@ -19,12 +19,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ structs/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -34,14 +37,14 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
structs() -> [""].
|
||||
namespace() -> "authn:jwt".
|
||||
|
||||
fields("") ->
|
||||
[ {config, {union, [ hoconsc:t('hmac-based')
|
||||
, hoconsc:t('public-key')
|
||||
, hoconsc:t('jwks')
|
||||
roots() ->
|
||||
[ {config, {union, [ hoconsc:mk('hmac-based')
|
||||
, hoconsc:mk('public-key')
|
||||
, hoconsc:mk('jwks')
|
||||
]}}
|
||||
];
|
||||
].
|
||||
|
||||
fields('hmac-based') ->
|
||||
[ {use_jwks, {enum, [false]}}
|
||||
|
@ -80,12 +83,11 @@ fields(ssl_disable) ->
|
|||
[ {enable, #{type => false}} ].
|
||||
|
||||
common_fields() ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, [jwt]}}
|
||||
[ {mechanism, {enum, [jwt]}}
|
||||
, {verify_claims, fun verify_claims/1}
|
||||
].
|
||||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
secret(type) -> string();
|
||||
secret(type) -> binary();
|
||||
secret(_) -> undefined.
|
||||
|
||||
secret_base64_encoded(type) -> boolean();
|
||||
|
@ -132,6 +134,12 @@ verify_claims(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, 'hmac-based')
|
||||
, hoconsc:ref(?MODULE, 'public-key')
|
||||
, hoconsc:ref(?MODULE, 'jwks')
|
||||
].
|
||||
|
||||
create(#{verify_claims := VerifyClaims} = Config) ->
|
||||
create2(Config#{verify_claims => handle_verify_claims(VerifyClaims)}).
|
||||
|
||||
|
@ -169,7 +177,7 @@ authenticate(Credential = #{password := JWT}, #{jwk := JWK,
|
|||
end,
|
||||
VerifyClaims = replace_placeholder(VerifyClaims0, Credential),
|
||||
case verify(JWT, JWKs, VerifyClaims) of
|
||||
ok -> ok;
|
||||
{ok, Extra} -> {ok, Extra};
|
||||
{error, invalid_signature} -> ignore;
|
||||
{error, {claims, _}} -> {error, bad_username_or_password}
|
||||
end.
|
||||
|
@ -239,7 +247,12 @@ verify(JWS, [JWK | More], VerifyClaims) ->
|
|||
try jose_jws:verify(JWK, JWS) of
|
||||
{true, Payload, _JWS} ->
|
||||
Claims = emqx_json:decode(Payload, [return_maps]),
|
||||
verify_claims(Claims, VerifyClaims);
|
||||
case verify_claims(Claims, VerifyClaims) of
|
||||
ok ->
|
||||
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Claims, false)}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
{false, _, _} ->
|
||||
verify(JWS, More, VerifyClaims)
|
||||
catch
|
||||
|
|
|
@ -20,10 +20,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ structs/0, fields/1 ]).
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -46,6 +51,7 @@
|
|||
{ user_id :: {user_group(), user_id()}
|
||||
, password_hash :: binary()
|
||||
, salt :: binary()
|
||||
, is_superuser :: boolean()
|
||||
}).
|
||||
|
||||
-reflect_type([ user_id_type/0 ]).
|
||||
|
@ -57,7 +63,6 @@
|
|||
|
||||
-define(TAB, ?MODULE).
|
||||
|
||||
-rlog_shard({?AUTH_SHARD, ?TAB}).
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Mnesia bootstrap
|
||||
%%------------------------------------------------------------------------------
|
||||
|
@ -66,6 +71,7 @@
|
|||
-spec(mnesia(boot | copy) -> ok).
|
||||
mnesia(boot) ->
|
||||
ok = ekka_mnesia:create_table(?TAB, [
|
||||
{rlog_shard, ?AUTH_SHARD},
|
||||
{disc_copies, [node()]},
|
||||
{record_name, user_info},
|
||||
{attributes, record_info(fields, user_info)},
|
||||
|
@ -78,15 +84,16 @@ mnesia(copy) ->
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
structs() -> [config].
|
||||
namespace() -> "authn:password-based:builtin-db".
|
||||
|
||||
roots() -> [config].
|
||||
|
||||
fields(config) ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, ['built-in-database']}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, ['built-in-database']}}
|
||||
, {user_id_type, fun user_id_type/1}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
];
|
||||
] ++ emqx_authn_schema:common_fields();
|
||||
|
||||
fields(bcrypt) ->
|
||||
[ {name, {enum, [bcrypt]}}
|
||||
|
@ -101,7 +108,8 @@ user_id_type(type) -> user_id_type();
|
|||
user_id_type(default) -> username;
|
||||
user_id_type(_) -> undefined.
|
||||
|
||||
password_hash_algorithm(type) -> {union, [hoconsc:ref(bcrypt), hoconsc:ref(other_algorithms)]};
|
||||
password_hash_algorithm(type) -> hoconsc:union([hoconsc:ref(?MODULE, bcrypt),
|
||||
hoconsc:ref(?MODULE, other_algorithms)]);
|
||||
password_hash_algorithm(default) -> #{<<"name">> => sha256};
|
||||
password_hash_algorithm(_) -> undefined.
|
||||
|
||||
|
@ -113,6 +121,9 @@ salt_rounds(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[hoconsc:ref(?MODULE, config)].
|
||||
|
||||
create(#{ user_id_type := Type
|
||||
, password_hash_algorithm := #{name := bcrypt,
|
||||
salt_rounds := SaltRounds}
|
||||
|
@ -147,13 +158,13 @@ authenticate(#{password := Password} = Credential,
|
|||
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
||||
[] ->
|
||||
ignore;
|
||||
[#user_info{password_hash = PasswordHash, salt = Salt0}] ->
|
||||
[#user_info{password_hash = PasswordHash, salt = Salt0, is_superuser = IsSuperuser}] ->
|
||||
Salt = case Algorithm of
|
||||
bcrypt -> PasswordHash;
|
||||
_ -> Salt0
|
||||
end,
|
||||
case PasswordHash =:= hash(Algorithm, Password, Salt) of
|
||||
true -> ok;
|
||||
true -> {ok, #{is_superuser => IsSuperuser}};
|
||||
false -> {error, bad_username_or_password}
|
||||
end
|
||||
end.
|
||||
|
@ -161,7 +172,7 @@ authenticate(#{password := Password} = Credential,
|
|||
destroy(#{user_group := UserGroup}) ->
|
||||
trans(
|
||||
fun() ->
|
||||
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_'}, [], ['$_']}],
|
||||
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}],
|
||||
ok = lists:foreach(fun delete_user2/1, mnesia:select(?TAB, MatchSpec, write))
|
||||
end).
|
||||
|
||||
|
@ -179,14 +190,16 @@ import_users(Filename0, State) ->
|
|||
end.
|
||||
|
||||
add_user(#{user_id := UserID,
|
||||
password := Password},
|
||||
password := Password} = UserInfo,
|
||||
#{user_group := UserGroup} = State) ->
|
||||
trans(
|
||||
fun() ->
|
||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||
[] ->
|
||||
add(UserID, Password, State),
|
||||
{ok, #{user_id => UserID}};
|
||||
{PasswordHash, Salt} = hash(Password, State),
|
||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
||||
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
||||
[_] ->
|
||||
{error, already_exist}
|
||||
end
|
||||
|
@ -203,29 +216,38 @@ delete_user(UserID, #{user_group := UserGroup}) ->
|
|||
end
|
||||
end).
|
||||
|
||||
update_user(UserID, #{password := Password},
|
||||
update_user(UserID, UserInfo,
|
||||
#{user_group := UserGroup} = State) ->
|
||||
trans(
|
||||
fun() ->
|
||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||
[] ->
|
||||
{error, not_found};
|
||||
[_] ->
|
||||
add(UserID, Password, State),
|
||||
{ok, #{user_id => UserID}}
|
||||
[#user_info{ password_hash = PasswordHash
|
||||
, salt = Salt
|
||||
, is_superuser = IsSuperuser}] ->
|
||||
NSuperuser = maps:get(is_superuser, UserInfo, IsSuperuser),
|
||||
{NPasswordHash, NSalt} = case maps:get(password, UserInfo, undefined) of
|
||||
undefined ->
|
||||
{PasswordHash, Salt};
|
||||
Password ->
|
||||
hash(Password, State)
|
||||
end,
|
||||
insert_user(UserGroup, UserID, NPasswordHash, NSalt, NSuperuser),
|
||||
{ok, #{user_id => UserID, is_superuser => NSuperuser}}
|
||||
end
|
||||
end).
|
||||
|
||||
lookup_user(UserID, #{user_group := UserGroup}) ->
|
||||
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
||||
[#user_info{user_id = {_, UserID}}] ->
|
||||
{ok, #{user_id => UserID}};
|
||||
[UserInfo] ->
|
||||
{ok, serialize_user_info(UserInfo)};
|
||||
[] ->
|
||||
{error, not_found}
|
||||
end.
|
||||
|
||||
list_users(#{user_group := UserGroup}) ->
|
||||
Users = [#{user_id => UserID} || #user_info{user_id = {UserGroup0, UserID}} <- ets:tab2list(?TAB), UserGroup0 =:= UserGroup],
|
||||
Users = [serialize_user_info(UserInfo) || #user_info{user_id = {UserGroup0, _}} = UserInfo <- ets:tab2list(?TAB), UserGroup0 =:= UserGroup],
|
||||
{ok, Users}.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
@ -268,7 +290,8 @@ import(UserGroup, [#{<<"user_id">> := UserID,
|
|||
<<"password_hash">> := PasswordHash} = UserInfo | More])
|
||||
when is_binary(UserID) andalso is_binary(PasswordHash) ->
|
||||
Salt = maps:get(<<"salt">>, UserInfo, <<>>),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt),
|
||||
IsSuperuser = maps:get(<<"is_superuser">>, UserInfo, false),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
||||
import(UserGroup, More);
|
||||
import(_UserGroup, [_ | _More]) ->
|
||||
{error, bad_format}.
|
||||
|
@ -282,7 +305,8 @@ import(UserGroup, File, Seq) ->
|
|||
{ok, #{user_id := UserID,
|
||||
password_hash := PasswordHash} = UserInfo} ->
|
||||
Salt = maps:get(salt, UserInfo, <<>>),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt),
|
||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
||||
import(UserGroup, File, Seq);
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
|
@ -307,8 +331,6 @@ get_csv_header(File) ->
|
|||
get_user_info_by_seq(Fields, Seq) ->
|
||||
get_user_info_by_seq(Fields, Seq, #{}).
|
||||
|
||||
get_user_info_by_seq([], [], #{user_id := _, password_hash := _, salt := _} = Acc) ->
|
||||
{ok, Acc};
|
||||
get_user_info_by_seq([], [], #{user_id := _, password_hash := _} = Acc) ->
|
||||
{ok, Acc};
|
||||
get_user_info_by_seq(_, [], _) ->
|
||||
|
@ -319,19 +341,13 @@ get_user_info_by_seq([PasswordHash | More1], [<<"password_hash">> | More2], Acc)
|
|||
get_user_info_by_seq(More1, More2, Acc#{password_hash => PasswordHash});
|
||||
get_user_info_by_seq([Salt | More1], [<<"salt">> | More2], Acc) ->
|
||||
get_user_info_by_seq(More1, More2, Acc#{salt => Salt});
|
||||
get_user_info_by_seq([<<"true">> | More1], [<<"is_superuser">> | More2], Acc) ->
|
||||
get_user_info_by_seq(More1, More2, Acc#{is_superuser => true});
|
||||
get_user_info_by_seq([<<"false">> | More1], [<<"is_superuser">> | More2], Acc) ->
|
||||
get_user_info_by_seq(More1, More2, Acc#{is_superuser => false});
|
||||
get_user_info_by_seq(_, _, _) ->
|
||||
{error, bad_format}.
|
||||
|
||||
-compile({inline, [add/3]}).
|
||||
add(UserID, Password, #{user_group := UserGroup,
|
||||
password_hash_algorithm := Algorithm} = State) ->
|
||||
Salt = gen_salt(State),
|
||||
PasswordHash = hash(Algorithm, Password, Salt),
|
||||
case Algorithm of
|
||||
bcrypt -> insert_user(UserGroup, UserID, PasswordHash);
|
||||
_ -> insert_user(UserGroup, UserID, PasswordHash, Salt)
|
||||
end.
|
||||
|
||||
gen_salt(#{password_hash_algorithm := plain}) ->
|
||||
<<>>;
|
||||
gen_salt(#{password_hash_algorithm := bcrypt,
|
||||
|
@ -347,13 +363,16 @@ hash(bcrypt, Password, Salt) ->
|
|||
hash(Algorithm, Password, Salt) ->
|
||||
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>).
|
||||
|
||||
insert_user(UserGroup, UserID, PasswordHash) ->
|
||||
insert_user(UserGroup, UserID, PasswordHash, <<>>).
|
||||
hash(Password, #{password_hash_algorithm := Algorithm} = State) ->
|
||||
Salt = gen_salt(State),
|
||||
PasswordHash = hash(Algorithm, Password, Salt),
|
||||
{PasswordHash, Salt}.
|
||||
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt) ->
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) ->
|
||||
UserInfo = #user_info{user_id = {UserGroup, UserID},
|
||||
password_hash = PasswordHash,
|
||||
salt = Salt},
|
||||
salt = Salt,
|
||||
is_superuser = IsSuperuser},
|
||||
mnesia:write(?TAB, UserInfo, write).
|
||||
|
||||
delete_user2(UserInfo) ->
|
||||
|
@ -376,8 +395,10 @@ trans(Fun, Args) ->
|
|||
{aborted, Reason} -> {error, Reason}
|
||||
end.
|
||||
|
||||
|
||||
to_binary(B) when is_binary(B) ->
|
||||
B;
|
||||
to_binary(L) when is_list(L) ->
|
||||
iolist_to_binary(L).
|
||||
|
||||
serialize_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
|
||||
#{user_id => UserID, is_superuser => IsSuperuser}.
|
||||
|
|
|
@ -21,12 +21,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ structs/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -36,14 +39,14 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
structs() -> [""].
|
||||
namespace() -> "authn:password-based:mongodb".
|
||||
|
||||
fields("") ->
|
||||
[ {config, {union, [ hoconsc:t(standalone)
|
||||
, hoconsc:t('replica-set')
|
||||
, hoconsc:t('sharded-cluster')
|
||||
roots() ->
|
||||
[ {config, {union, [ hoconsc:mk(standalone)
|
||||
, hoconsc:mk('replica-set')
|
||||
, hoconsc:mk('sharded-cluster')
|
||||
]}}
|
||||
];
|
||||
].
|
||||
|
||||
fields(standalone) ->
|
||||
common_fields() ++ emqx_connector_mongo:fields(single);
|
||||
|
@ -55,16 +58,16 @@ fields('sharded-cluster') ->
|
|||
common_fields() ++ emqx_connector_mongo:fields(sharded).
|
||||
|
||||
common_fields() ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, [mongodb]}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, [mongodb]}}
|
||||
, {collection, fun collection/1}
|
||||
, {selector, fun selector/1}
|
||||
, {password_hash_field, fun password_hash_field/1}
|
||||
, {salt_field, fun salt_field/1}
|
||||
, {is_superuser_field, fun is_superuser_field/1}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
, {salt_position, fun salt_position/1}
|
||||
].
|
||||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
collection(type) -> binary();
|
||||
collection(nullable) -> false;
|
||||
|
@ -82,6 +85,10 @@ salt_field(type) -> binary();
|
|||
salt_field(nullable) -> true;
|
||||
salt_field(_) -> undefined.
|
||||
|
||||
is_superuser_field(type) -> binary();
|
||||
is_superuser_field(nullable) -> true;
|
||||
is_superuser_field(_) -> undefined.
|
||||
|
||||
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
|
||||
password_hash_algorithm(default) -> sha256;
|
||||
password_hash_algorithm(_) -> undefined.
|
||||
|
@ -94,6 +101,12 @@ salt_position(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, standalone)
|
||||
, hoconsc:ref(?MODULE, 'replica-set')
|
||||
, hoconsc:ref(?MODULE, 'sharded-cluster')
|
||||
].
|
||||
|
||||
create(#{ selector := Selector
|
||||
, '_unique' := Unique
|
||||
} = Config) ->
|
||||
|
@ -101,14 +114,15 @@ create(#{ selector := Selector
|
|||
State = maps:with([ collection
|
||||
, password_hash_field
|
||||
, salt_field
|
||||
, is_superuser_field
|
||||
, password_hash_algorithm
|
||||
, salt_position
|
||||
, '_unique'], Config),
|
||||
NState = State#{selector => NSelector},
|
||||
case emqx_resource:create_local(Unique, emqx_connector_mongo, Config) of
|
||||
{ok, _} ->
|
||||
{ok, already_created} ->
|
||||
{ok, NState};
|
||||
{error, already_created} ->
|
||||
{ok, _} ->
|
||||
{ok, NState};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
|
@ -140,7 +154,8 @@ authenticate(#{password := Password} = Credential,
|
|||
ignore;
|
||||
Doc ->
|
||||
case check_password(Password, Doc, State) of
|
||||
ok -> ok;
|
||||
ok ->
|
||||
{ok, #{is_superuser => is_superuser(Doc, State)}};
|
||||
{error, {cannot_find_password_hash_field, PasswordHashField}} ->
|
||||
?LOG(error, "['~s'] Can't find password hash field: ~s", [Unique, PasswordHashField]),
|
||||
{error, bad_username_or_password};
|
||||
|
@ -221,6 +236,11 @@ check_password(Password,
|
|||
end
|
||||
end.
|
||||
|
||||
is_superuser(Doc, #{is_superuser_field := IsSuperuserField}) ->
|
||||
maps:get(IsSuperuserField, Doc, false);
|
||||
is_superuser(_, _) ->
|
||||
false.
|
||||
|
||||
hash(Algorithm, Password, Salt, prefix) ->
|
||||
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>);
|
||||
hash(Algorithm, Password, Salt, suffix) ->
|
||||
|
|
|
@ -21,12 +21,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ structs/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -36,17 +39,19 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
structs() -> [config].
|
||||
namespace() -> "authn:password-based:mysql".
|
||||
|
||||
roots() -> [config].
|
||||
|
||||
fields(config) ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, [mysql]}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, [mysql]}}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
, {salt_position, fun salt_position/1}
|
||||
, {query, fun query/1}
|
||||
, {query_timeout, fun query_timeout/1}
|
||||
] ++ emqx_connector_schema_lib:relational_db_fields()
|
||||
] ++ emqx_authn_schema:common_fields()
|
||||
++ emqx_connector_schema_lib:relational_db_fields()
|
||||
++ emqx_connector_schema_lib:ssl_fields().
|
||||
|
||||
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
|
||||
|
@ -69,6 +74,9 @@ query_timeout(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[hoconsc:ref(?MODULE, config)].
|
||||
|
||||
create(#{ password_hash_algorithm := Algorithm
|
||||
, salt_position := SaltPosition
|
||||
, query := Query0
|
||||
|
@ -83,9 +91,9 @@ create(#{ password_hash_algorithm := Algorithm
|
|||
query_timeout => QueryTimeout,
|
||||
'_unique' => Unique},
|
||||
case emqx_resource:create_local(Unique, emqx_connector_mysql, Config) of
|
||||
{ok, _} ->
|
||||
{ok, already_created} ->
|
||||
{ok, State};
|
||||
{error, already_created} ->
|
||||
{ok, _} ->
|
||||
{ok, State};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
|
@ -112,22 +120,26 @@ authenticate(#{password := Password} = Credential,
|
|||
case emqx_resource:query(Unique, {sql, Query, Params, Timeout}) of
|
||||
{ok, _Columns, []} -> ignore;
|
||||
{ok, Columns, Rows} ->
|
||||
%% TODO: Support superuser
|
||||
Selected = maps:from_list(lists:zip(Columns, Rows)),
|
||||
check_password(Password, Selected, State);
|
||||
case check_password(Password, Selected, State) of
|
||||
ok ->
|
||||
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
{error, _Reason} ->
|
||||
ignore
|
||||
end
|
||||
catch
|
||||
error:Reason ->
|
||||
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Reason]),
|
||||
error:Error ->
|
||||
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Error]),
|
||||
ignore
|
||||
end.
|
||||
|
||||
destroy(#{'_unique' := Unique}) ->
|
||||
_ = emqx_resource:remove_local(Unique),
|
||||
ok.
|
||||
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
@ -135,17 +147,17 @@ destroy(#{'_unique' := Unique}) ->
|
|||
check_password(undefined, _Selected, _State) ->
|
||||
{error, bad_username_or_password};
|
||||
check_password(Password,
|
||||
#{password_hash := Hash},
|
||||
#{<<"password_hash">> := Hash},
|
||||
#{password_hash_algorithm := bcrypt}) ->
|
||||
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of
|
||||
true -> ok;
|
||||
false -> {error, bad_username_or_password}
|
||||
end;
|
||||
check_password(Password,
|
||||
#{password_hash := Hash} = Selected,
|
||||
#{<<"password_hash">> := Hash} = Selected,
|
||||
#{password_hash_algorithm := Algorithm,
|
||||
salt_position := SaltPosition}) ->
|
||||
Salt = maps:get(salt, Selected, <<>>),
|
||||
Salt = maps:get(<<"salt">>, Selected, <<>>),
|
||||
case Hash =:= emqx_authn_utils:hash(Algorithm, Password, Salt, SaltPosition) of
|
||||
true -> ok;
|
||||
false -> {error, bad_username_or_password}
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authn_other_schema).
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
|
||||
-export([ structs/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
structs() -> [ "filename", "position", "user_info", "new_user_info"].
|
||||
|
||||
fields("filename") ->
|
||||
[ {filename, fun filename/1} ];
|
||||
fields("position") ->
|
||||
[ {position, fun position/1} ];
|
||||
fields("user_info") ->
|
||||
[ {user_id, fun user_id/1}
|
||||
, {password, fun password/1}
|
||||
];
|
||||
fields("new_user_info") ->
|
||||
[ {password, fun password/1}
|
||||
].
|
||||
|
||||
filename(type) -> string();
|
||||
filename(nullable) -> false;
|
||||
filename(_) -> undefined.
|
||||
|
||||
position(type) -> integer();
|
||||
position(validate) -> [fun (Position) -> Position > 0 end];
|
||||
position(nullable) -> false;
|
||||
position(_) -> undefined.
|
||||
|
||||
user_id(type) -> binary();
|
||||
user_id(nullable) -> false;
|
||||
user_id(_) -> undefined.
|
||||
|
||||
password(type) -> binary();
|
||||
password(nullable) -> false;
|
||||
password(_) -> undefined.
|
||||
|
|
@ -18,13 +18,19 @@
|
|||
|
||||
-include("emqx_authn.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("epgsql/include/epgsql.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ structs/0, fields/1 ]).
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -34,16 +40,18 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
structs() -> [config].
|
||||
namespace() -> "authn:password-based:postgresql".
|
||||
|
||||
roots() -> [config].
|
||||
|
||||
fields(config) ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, [pgsql]}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, [postgresql]}}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
, {salt_position, {enum, [prefix, suffix]}}
|
||||
, {query, fun query/1}
|
||||
] ++ emqx_connector_schema_lib:relational_db_fields()
|
||||
] ++ emqx_authn_schema:common_fields()
|
||||
++ emqx_connector_schema_lib:relational_db_fields()
|
||||
++ emqx_connector_schema_lib:ssl_fields().
|
||||
|
||||
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
|
||||
|
@ -58,6 +66,9 @@ query(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[hoconsc:ref(?MODULE, config)].
|
||||
|
||||
create(#{ query := Query0
|
||||
, password_hash_algorithm := Algorithm
|
||||
, salt_position := SaltPosition
|
||||
|
@ -70,9 +81,9 @@ create(#{ query := Query0
|
|||
salt_position => SaltPosition,
|
||||
'_unique' => Unique},
|
||||
case emqx_resource:create_local(Unique, emqx_connector_pgsql, Config) of
|
||||
{ok, _} ->
|
||||
{ok, already_created} ->
|
||||
{ok, State};
|
||||
{error, already_created} ->
|
||||
{ok, _} ->
|
||||
{ok, State};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
|
@ -98,22 +109,27 @@ authenticate(#{password := Password} = Credential,
|
|||
case emqx_resource:query(Unique, {sql, Query, Params}) of
|
||||
{ok, _Columns, []} -> ignore;
|
||||
{ok, Columns, Rows} ->
|
||||
%% TODO: Support superuser
|
||||
Selected = maps:from_list(lists:zip(Columns, Rows)),
|
||||
check_password(Password, Selected, State);
|
||||
NColumns = [Name || #column{name = Name} <- Columns],
|
||||
Selected = maps:from_list(lists:zip(NColumns, Rows)),
|
||||
case check_password(Password, Selected, State) of
|
||||
ok ->
|
||||
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
{error, _Reason} ->
|
||||
ignore
|
||||
end
|
||||
catch
|
||||
error:Reason ->
|
||||
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Reason]),
|
||||
error:Error ->
|
||||
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Error]),
|
||||
ignore
|
||||
end.
|
||||
|
||||
destroy(#{'_unique' := Unique}) ->
|
||||
_ = emqx_resource:remove_local(Unique),
|
||||
ok.
|
||||
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
@ -121,17 +137,17 @@ destroy(#{'_unique' := Unique}) ->
|
|||
check_password(undefined, _Selected, _State) ->
|
||||
{error, bad_username_or_password};
|
||||
check_password(Password,
|
||||
#{password_hash := Hash},
|
||||
#{<<"password_hash">> := Hash},
|
||||
#{password_hash_algorithm := bcrypt}) ->
|
||||
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of
|
||||
true -> ok;
|
||||
false -> {error, bad_username_or_password}
|
||||
end;
|
||||
check_password(Password,
|
||||
#{password_hash := Hash} = Selected,
|
||||
#{<<"password_hash">> := Hash} = Selected,
|
||||
#{password_hash_algorithm := Algorithm,
|
||||
salt_position := SaltPosition}) ->
|
||||
Salt = maps:get(salt, Selected, <<>>),
|
||||
Salt = maps:get(<<"salt">>, Selected, <<>>),
|
||||
case Hash =:= emqx_authn_utils:hash(Algorithm, Password, Salt, SaltPosition) of
|
||||
true -> ok;
|
||||
false -> {error, bad_username_or_password}
|
||||
|
|
|
@ -21,12 +21,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ structs/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -36,14 +39,14 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
structs() -> [""].
|
||||
namespace() -> "authn:password-based:redis".
|
||||
|
||||
fields("") ->
|
||||
[ {config, {union, [ hoconsc:t(standalone)
|
||||
, hoconsc:t(cluster)
|
||||
, hoconsc:t(sentinel)
|
||||
roots() ->
|
||||
[ {config, {union, [ hoconsc:mk(standalone)
|
||||
, hoconsc:mk(cluster)
|
||||
, hoconsc:mk(sentinel)
|
||||
]}}
|
||||
];
|
||||
].
|
||||
|
||||
fields(standalone) ->
|
||||
common_fields() ++ emqx_connector_redis:fields(single);
|
||||
|
@ -55,13 +58,12 @@ fields(sentinel) ->
|
|||
common_fields() ++ emqx_connector_redis:fields(sentinel).
|
||||
|
||||
common_fields() ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, [redis]}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, [redis]}}
|
||||
, {query, fun query/1}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
, {salt_position, fun salt_position/1}
|
||||
].
|
||||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
query(type) -> string();
|
||||
query(nullable) -> false;
|
||||
|
@ -79,6 +81,12 @@ salt_position(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, standalone)
|
||||
, hoconsc:ref(?MODULE, cluster)
|
||||
, hoconsc:ref(?MODULE, sentinel)
|
||||
].
|
||||
|
||||
create(#{ query := Query
|
||||
, '_unique' := Unique
|
||||
} = Config) ->
|
||||
|
@ -89,9 +97,9 @@ create(#{ query := Query
|
|||
, '_unique'], Config),
|
||||
NState = State#{query => NQuery},
|
||||
case emqx_resource:create_local(Unique, emqx_connector_redis, Config) of
|
||||
{ok, _} ->
|
||||
{ok, already_created} ->
|
||||
{ok, NState};
|
||||
{error, already_created} ->
|
||||
{ok, _} ->
|
||||
{ok, NState};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
|
@ -124,7 +132,13 @@ authenticate(#{password := Password} = Credential,
|
|||
NKey = binary_to_list(iolist_to_binary(replace_placeholders(Key, Credential))),
|
||||
case emqx_resource:query(Unique, {cmd, [Command, NKey | Fields]}) of
|
||||
{ok, Values} ->
|
||||
check_password(Password, merge(Fields, Values), State);
|
||||
Selected = merge(Fields, Values),
|
||||
case check_password(Password, Selected, State) of
|
||||
ok ->
|
||||
{ok, #{is_superuser => maps:get("is_superuser", Selected, false)}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
{error, Reason} ->
|
||||
?LOG(error, "['~s'] Query failed: ~p", [Unique, Reason]),
|
||||
ignore
|
||||
|
@ -166,11 +180,11 @@ check_fields(["password_hash" | More], false) ->
|
|||
check_fields(More, true);
|
||||
check_fields(["salt" | More], HasPassHash) ->
|
||||
check_fields(More, HasPassHash);
|
||||
% check_fields(["is_superuser" | More], HasPassHash) ->
|
||||
% check_fields(More, HasPassHash);
|
||||
check_fields(["is_superuser" | More], HasPassHash) ->
|
||||
check_fields(More, HasPassHash);
|
||||
check_fields([Field | _], _) ->
|
||||
error({unsupported_field, Field}).
|
||||
|
||||
|
||||
parse_key(Key) ->
|
||||
Tokens = re:split(Key, "(" ++ ?RE_PLACEHOLDER ++ ")", [{return, binary}, group, trim]),
|
||||
parse_key(Tokens, []).
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
user_id,password_hash,salt
|
||||
myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235
|
||||
myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139
|
||||
user_id,password_hash,salt,is_superuser
|
||||
myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235,true
|
||||
myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139,false
|
||||
|
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue