Merge branch 'master' into EMQX-871-872

This commit is contained in:
x1001100011 2021-09-09 23:38:38 -07:00
commit 69673613d6
415 changed files with 43239 additions and 17389 deletions

View File

@ -36,9 +36,9 @@ emqx_test(){
"zip") "zip")
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.zip) packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.zip)
unzip -q "${PACKAGE_PATH}/${packagename}" unzip -q "${PACKAGE_PATH}/${packagename}"
export EMQX_ZONE__EXTERNAL__SERVER_KEEPALIVE=60 \ export EMQX_ZONES__DEFAULT__MQTT__SERVER_KEEPALIVE=60 \
EMQX_MQTT__MAX_TOPIC_ALIAS=10 EMQX_MQTT__MAX_TOPIC_ALIAS=10
[[ $(arch) == *arm* || $(arch) == aarch64 ]] && export EMQX_ZONES__DEFAULT__LISTENERS__MQTT_QUIC__ENABLED=false [[ $(arch) == *arm* || $(arch) == aarch64 ]] && export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
# sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins # sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins
echo "running ${packagename} start" echo "running ${packagename} start"
@ -48,7 +48,7 @@ emqx_test(){
exit 1 exit 1
fi fi
IDLE_TIME=0 IDLE_TIME=0
while ! curl http://localhost:8081/api/v5/status >/dev/null 2>&1; do while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ] if [ $IDLE_TIME -gt 10 ]
then then
echo "emqx running error" echo "emqx running error"
@ -91,6 +91,12 @@ emqx_test(){
;; ;;
"rpm") "rpm")
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.rpm) packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.rpm)
if [[ "${ARCH}" == "amd64" && $(rpm -E '%{rhel}') == 7 ]] ;
then
# EMQX OTP requires openssl11 to have TLS1.3 support
yum install -y openssl11;
fi
rpm -ivh "${PACKAGE_PATH}/${packagename}" rpm -ivh "${PACKAGE_PATH}/${packagename}"
if ! rpm -q emqx | grep -q emqx; then if ! rpm -q emqx | grep -q emqx; then
echo "package install error" echo "package install error"
@ -119,15 +125,14 @@ run_test(){
if [ -f "$emqx_env_vars" ]; if [ -f "$emqx_env_vars" ];
then then
tee -a "$emqx_env_vars" <<EOF tee -a "$emqx_env_vars" <<EOF
export EMQX_ZONE__EXTERNAL__SERVER_KEEPALIVE=60 export EMQX_ZONES__DEFAULT__MQTT__SERVER_KEEPALIVE=60
export EMQX_MQTT__MAX_TOPIC_ALIAS=10 export EMQX_MQTT__MAX_TOPIC_ALIAS=10
export EMQX_LOG__CONSOLE_HANDLER__LEVEL=debug export EMQX_LOG__CONSOLE_HANDLER__LEVEL=debug
export EMQX_LOG__FILE_HANDLERS__EMQX_LOG__LEVEL=debug export EMQX_LOG__FILE_HANDLERS__DEFAULT__LEVEL=debug
export EMQX_LOG__PRIMARY_LEVEL=debug
EOF EOF
## for ARM, due to CI env issue, skip start of quic listener for the moment ## for ARM, due to CI env issue, skip start of quic listener for the moment
[[ $(arch) == *arm* || $(arch) == aarch64 ]] && tee -a "$emqx_env_vars" <<EOF [[ $(arch) == *arm* || $(arch) == aarch64 ]] && tee -a "$emqx_env_vars" <<EOF
export EMQX_ZONES__DEFAULT__LISTENERS__MQTT_QUIC__ENABLED=false export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
EOF EOF
else else
echo "Error: cannot locate emqx_vars" echo "Error: cannot locate emqx_vars"
@ -140,7 +145,7 @@ EOF
exit 1 exit 1
fi fi
IDLE_TIME=0 IDLE_TIME=0
while ! curl http://localhost:8081/api/v5/status >/dev/null 2>&1; do while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ] if [ $IDLE_TIME -gt 10 ]
then then
echo "emqx running error" echo "emqx running error"
@ -169,7 +174,7 @@ EOF
exit 1 exit 1
fi fi
IDLE_TIME=0 IDLE_TIME=0
while ! curl http://localhost:8081/api/v5/status >/dev/null 2>&1; do while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ] if [ $IDLE_TIME -gt 10 ]
then then
echo "emqx service error" echo "emqx service error"

View File

@ -1,8 +1,7 @@
EMQX_NAME=emqx EMQX_NAME=emqx
EMQX_CLUSTER__DISCOVERY_STRATEGY=static EMQX_CLUSTER__DISCOVERY_STRATEGY=static
EMQX_CLUSTER__STATIC__SEEDS="[emqx@node1.emqx.io, emqx@node2.emqx.io]" EMQX_CLUSTER__STATIC__SEEDS="[emqx@node1.emqx.io, emqx@node2.emqx.io]"
EMQX_ZONES__DEFAULT__LISTENERS__MQTT_TCP__PROXY_PROTOCOL=true EMQX_LISTENERS__TCP__DEFAULT__PROXY_PROTOCOL=true
EMQX_ZONES__DEFAULT__LISTENERS__MQTT_WS__PROXY_PROTOCOL=true EMQX_LISTENERS__WS__DEFAULT__PROXY_PROTOCOL=true
EMQX_LOG__CONSOLE_HANDLER__ENABLE=true EMQX_LOG__CONSOLE_HANDLER__ENABLE=true
EMQX_LOG__CONSOLE_HANDLER__LEVEL=debug EMQX_LOG__CONSOLE_HANDLER__LEVEL=debug
EMQX_LOG__PRIMARY_LEVEL=debug

View File

@ -33,7 +33,7 @@ defaults
frontend emqx_mgmt frontend emqx_mgmt
mode tcp mode tcp
option tcplog option tcplog
bind *:8081 bind *:18083
default_backend emqx_mgmt_back default_backend emqx_mgmt_back
frontend emqx_dashboard frontend emqx_dashboard
@ -45,8 +45,8 @@ frontend emqx_dashboard
backend emqx_mgmt_back backend emqx_mgmt_back
mode http mode http
# balance static-rr # balance static-rr
server emqx-1 node1.emqx.io:8081 server emqx-1 node1.emqx.io:18083
server emqx-2 node2.emqx.io:8081 server emqx-2 node2.emqx.io:18083
backend emqx_dashboard_back backend emqx_dashboard_back
mode http mode http

View File

@ -2,7 +2,7 @@
name: Bug Report name: Bug Report
about: Create a report to help us improve about: Create a report to help us improve
title: '' title: ''
labels: Support labels: "Support, needs-triage"
--- ---

View File

@ -2,7 +2,7 @@
name: Feature Request name: Feature Request
about: Suggest an idea for this project about: Suggest an idea for this project
title: '' title: ''
labels: Feature labels: "Feature, needs-triage"
--- ---

View File

@ -2,7 +2,7 @@
name: Support Needed name: Support Needed
about: Asking a question about usages, docs or anything you're insterested in about: Asking a question about usages, docs or anything you're insterested in
title: '' title: ''
labels: Support labels: "Support, needs-triage"
--- ---

View File

@ -20,8 +20,8 @@ jobs:
container: ${{ matrix.container }} container: ${{ matrix.container }}
outputs: outputs:
profiles: ${{ steps.set_profile.outputs.profiles}} profiles: ${{ steps.set_profile.outputs.profiles }}
old_vsns: ${{ steps.set_profile.outputs.old_vsns}} old_vsns: ${{ steps.set_profile.outputs.old_vsns }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -44,6 +44,11 @@ jobs:
echo "::set-output name=old_vsns::$old_vsns" echo "::set-output name=old_vsns::$old_vsns"
echo "::set-output name=profiles::[\"emqx\", \"emqx-edge\"]" echo "::set-output name=profiles::[\"emqx\", \"emqx-edge\"]"
fi fi
- name: get otp version
id: get_otp_version
run: |
otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)"
echo "::set-output name=otp::$otp"
- name: set get token - name: set get token
if: endsWith(github.repository, 'enterprise') if: endsWith(github.repository, 'enterprise')
run: | run: |
@ -54,12 +59,13 @@ jobs:
run: | run: |
make ensure-rebar3 make ensure-rebar3
./rebar3 as default get-deps ./rebar3 as default get-deps
rm -rf rebar.lock
- name: gen zip file - name: gen zip file
run: zip -ryq source.zip source/* source/.[^.]* run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]*
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: source name: source-${{ steps.get_otp_version.outputs.otp }}
path: source.zip path: source-${{ steps.get_otp_version.outputs.otp }}.zip
windows: windows:
runs-on: windows-2019 runs-on: windows-2019
@ -77,19 +83,21 @@ jobs:
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source name: source-23.2.7.2-emqx-2
path: . path: .
- name: unzip source code - name: unzip source code
run: Expand-Archive -Path source.zip -DestinationPath ./ run: Expand-Archive -Path source-23.2.7.2-emqx-2.zip -DestinationPath ./
- uses: ilammy/msvc-dev-cmd@v1 - uses: ilammy/msvc-dev-cmd@v1
- uses: gleam-lang/setup-erlang@v1.1.0 - uses: gleam-lang/setup-erlang@v1.1.2
id: install_erlang id: install_erlang
## gleam-lang/setup-erlang does not yet support the installation of otp24 on windows
with: with:
otp-version: 24.0.5 otp-version: 23.2
- name: build - name: build
env: env:
PYTHON: python PYTHON: python
DIAGNOSTIC: 1 DIAGNOSTIC: 1
working-directory: source
run: | run: |
$env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH" $env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH"
@ -101,9 +109,9 @@ jobs:
else { else {
$pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip" $pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip"
} }
cd source ## We do not build/release bcrypt and quic for windows package
## We do not build/release bcrypt for windows package
Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/ Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/
Remove-Item -Recurse -Force -Path _build/default/lib/quicer/
if (Test-Path rebar.lock) { if (Test-Path rebar.lock) {
Remove-Item -Force -Path rebar.lock Remove-Item -Force -Path rebar.lock
} }
@ -118,8 +126,8 @@ jobs:
Get-FileHash -Path "_packages/${{ matrix.profile }}/$pkg_name" | Format-List | grep 'Hash' | awk '{print $3}' > _packages/${{ matrix.profile }}/$pkg_name.sha256 Get-FileHash -Path "_packages/${{ matrix.profile }}/$pkg_name" | Format-List | grep 'Hash' | awk '{print $3}' > _packages/${{ matrix.profile }}/$pkg_name.sha256
- name: run emqx - name: run emqx
timeout-minutes: 1 timeout-minutes: 1
working-directory: source
run: | run: |
cd source
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start
Start-Sleep -s 5 Start-Sleep -s 5
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop
@ -128,7 +136,7 @@ jobs:
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-23.2.7.2-emqx-2
path: source/_packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
mac: mac:
@ -140,7 +148,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
erl_otp: otp:
- 24.0.5-emqx-1 - 24.0.5-emqx-1
exclude: exclude:
- profile: emqx-edge - profile: emqx-edge
@ -148,10 +156,10 @@ jobs:
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source name: source-${{ matrix.otp }}
path: . path: .
- name: unzip source code - name: unzip source code
run: unzip -q source.zip run: unzip -q source-${{ matrix.otp }}.zip
- name: prepare - name: prepare
run: | run: |
brew update brew update
@ -162,7 +170,7 @@ jobs:
id: cache id: cache
with: with:
path: ~/.kerl path: ~/.kerl
key: erl${{ matrix.erl_otp }}-macos10.15 key: erl${{ matrix.otp }}-macos10.15
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60 timeout-minutes: 60
@ -171,25 +179,25 @@ jobs:
OTP_GITHUB_URL: https://github.com/emqx/otp OTP_GITHUB_URL: https://github.com/emqx/otp
run: | run: |
kerl update releases kerl update releases
kerl build ${{ matrix.erl_otp }} kerl build ${{ matrix.otp }}
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }} kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build - name: build
working-directory: source
run: | run: |
. $HOME/.kerl/${{ matrix.erl_otp }}/activate . $HOME/.kerl/${{ matrix.otp }}/activate
cd source
make ensure-rebar3 make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3 sudo cp rebar3 /usr/local/bin/rebar3
make ${{ matrix.profile }}-zip make ${{ matrix.profile }}-zip
- name: test - name: test
working-directory: source
run: | run: |
cd source
pkg_name=$(basename _packages/${{ matrix.profile }}/${{ matrix.profile }}-*.zip) pkg_name=$(basename _packages/${{ matrix.profile }}/${{ matrix.profile }}-*.zip)
unzip -q _packages/${{ matrix.profile }}/$pkg_name unzip -q _packages/${{ matrix.profile }}/$pkg_name
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins # gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no' ready='no'
for i in {1..10}; do for i in {1..10}; do
if curl -fs 127.0.0.1:8081/api/v5/status > /dev/null; then if curl -fs 127.0.0.1:18083/api/v5/status > /dev/null; then
ready='yes' ready='yes'
break break
fi fi
@ -207,7 +215,7 @@ jobs:
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: source/_packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
linux: linux:
@ -219,12 +227,6 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
erl_otp:
- 23.2.7.2-emqx-2
- 24.0.5-emqx-1
arch:
- amd64
- arm64
os: os:
- ubuntu20.04 - ubuntu20.04
- ubuntu18.04 - ubuntu18.04
@ -237,6 +239,12 @@ jobs:
- centos6 - centos6
- raspbian10 - raspbian10
# - raspbian9 # - raspbian9
arch:
- amd64
- arm64
otp:
- 23.2.7.2-emqx-2
- 24.0.5-emqx-1
exclude: exclude:
- os: centos6 - os: centos6
arch: arm64 arch: arm64
@ -265,10 +273,10 @@ jobs:
platforms: all platforms: all
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source name: source-${{ matrix.otp }}
path: . path: .
- name: unzip source code - name: unzip source code
run: unzip -q source.zip run: unzip -q source-${{ matrix.otp }}.zip
- name: downloads old emqx zip packages - name: downloads old emqx zip packages
env: env:
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
@ -298,7 +306,7 @@ jobs:
done done
- name: build emqx packages - name: build emqx packages
env: env:
ERL_OTP: erl${{ matrix.erl_otp }} ERL_OTP: erl${{ matrix.otp }}
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }} ARCH: ${{ matrix.arch }}
SYSTEM: ${{ matrix.os }} SYSTEM: ${{ matrix.os }}
@ -327,7 +335,7 @@ jobs:
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: source/_packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
docker: docker:
@ -338,67 +346,74 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
erl_otp: otp:
- 24.0.5-emqx-1 - 24.0.5-emqx-1
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source name: source-${{ matrix.otp }}
path: . path: .
- name: unzip source code - name: unzip source code
run: unzip -q source.zip run: unzip -q source-${{ matrix.otp }}.zip
- name: get version
id: version
working-directory: source
run: echo "::set-output name=version::$(./pkg-vsn.sh)"
- uses: docker/setup-buildx-action@v1 - uses: docker/setup-buildx-action@v1
- uses: docker/setup-qemu-action@v1 - uses: docker/setup-qemu-action@v1
with: with:
image: tonistiigi/binfmt:latest image: tonistiigi/binfmt:latest
platforms: all platforms: all
- name: build emqx docker image - uses: docker/build-push-action@v2
if: github.event_name != 'release' if: github.event_name != 'release'
env: with:
ERL_OTP: erl${{ matrix.erl_otp }} push: false
PROFILE: ${{ matrix.profile }} pull: true
working-directory: source no-cache: true
run: | platforms: linux/amd64,linux/arm64
PKG_VSN="$(./pkg-vsn.sh)" tags: emqx/${{ matrix.profile }}:${{ steps.version.outputs.version }}
docker buildx build --no-cache \ build-args: |
--platform=linux/amd64,linux/arm64 \ PKG_VSN=${{ steps.version.outputs.version }}
--build-arg PKG_VSN=$PKG_VSN \ BUILD_FROM=emqx/build-env:erl${{ matrix.otp }}-alpine
--build-arg BUILD_FROM=emqx/build-env:$ERL_OTP-alpine \ RUN_FROM=alpine:3.14
--build-arg RUN_FROM=alpine:3.14 \ EMQX_NAME=${{ matrix.profile }}
--build-arg EMQX_NAME=$PROFILE \ file: source/deploy/docker/Dockerfile
--tag emqx/$PROFILE:$PKG_VSN \ context: source
-f deploy/docker/Dockerfile .
- uses: docker/login-action@v1 - uses: docker/login-action@v1
if: github.event_name == 'release' if: github.event_name == 'release'
with: with:
username: ${{ secrets.DOCKER_HUB_USER }} username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }} password: ${{ secrets.DOCKER_HUB_TOKEN }}
- name: build emqx docker image - uses: docker/build-push-action@v2
if: github.event_name == 'release' if: github.event_name == 'release'
env: with:
ERL_OTP: erl${{ matrix.erl_otp }} push: true
PROFILE: ${{ matrix.profile }} pull: true
working-directory: source no-cache: true
run: | platforms: linux/amd64,linux/arm64
PKG_VSN="$(./pkg-vsn.sh)" tags: emqx/${{ matrix.profile }}:${{ steps.version.outputs.version }}
docker buildx build --no-cache \ build-args: |
--platform=linux/amd64,linux/arm64 \ PKG_VSN=${{ steps.version.outputs.version }}
--build-arg PKG_VSN=$PKG_VSN \ BUILD_FROM=emqx/build-env:erl${{ matrix.otp }}-alpine
--build-arg BUILD_FROM=emqx/build-env:$ERL_OTP-alpine \ RUN_FROM=alpine:3.14
--build-arg RUN_FROM=alpine:3.14 \ EMQX_NAME=${{ matrix.profile }}
--build-arg EMQX_NAME=$PROFILE \ file: source/deploy/docker/Dockerfile
--tag emqx/$PROFILE:$PKG_VSN \ context: source
-f deploy/docker/Dockerfile \
--push .
delete-artifact: delete-artifact:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy:
matrix:
otp:
- 23.2.7.2-emqx-2
- 24.0.5-emqx-1
needs: [prepare, mac, linux, docker] needs: [prepare, mac, linux, docker]
steps: steps:
- uses: geekyeggo/delete-artifact@v1 - uses: geekyeggo/delete-artifact@v1
with: with:
name: source name: source-${{ matrix.otp }}
upload: upload:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
@ -410,6 +425,8 @@ jobs:
strategy: strategy:
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp:
- 24.0.5-emqx-1
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -420,7 +437,7 @@ jobs:
echo 'EOF' >> $GITHUB_ENV echo 'EOF' >> $GITHUB_ENV
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: ./_packages/${{ matrix.profile }} path: ./_packages/${{ matrix.profile }}
- name: install dos2unix - name: install dos2unix
run: sudo apt-get update && sudo apt install -y dos2unix run: sudo apt-get update && sudo apt install -y dos2unix

View File

@ -113,7 +113,7 @@ jobs:
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no' ready='no'
for i in {1..10}; do for i in {1..10}; do
if curl -fs 127.0.0.1:8081/api/v5/status > /dev/null; then if curl -fs 127.0.0.1:18083/api/v5/status > /dev/null; then
ready='yes' ready='yes'
break break
fi fi

View File

@ -1,9 +1,10 @@
name: Sync to enterprise name: Sync to enterprise
on: on:
schedule:
- cron: '0 */6 * * *'
push: push:
branches: branches:
- master
- main-v* - main-v*
jobs: jobs:

102
.github/workflows/run_api_tests.yaml vendored Normal file
View File

@ -0,0 +1,102 @@
name: API Test Suite
on:
push:
tags:
- e*
- v*
pull_request:
jobs:
build:
runs-on: ubuntu-latest
container: "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
steps:
- uses: actions/checkout@v2
- name: zip emqx-broker
if: endsWith(github.repository, 'emqx')
run: |
make emqx-zip
- name: zip emqx-broker
if: endsWith(github.repository, 'enterprise')
run: |
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
make emqx-ee-zip
- uses: actions/upload-artifact@v2
with:
name: emqx-broker
path: _packages/**/*.zip
api-test:
needs: build
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
script_name:
- api_metrics
- api_subscriptions
steps:
- uses: actions/checkout@v2
with:
repository: emqx/emqx-fvt
path: .
- uses: actions/setup-java@v1
with:
java-version: '8.0.282' # The JDK version to make available on the path.
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
architecture: x64 # (x64 or x86) - defaults to x64
- uses: actions/download-artifact@v2
with:
name: emqx-broker
path: .
- name: start emqx-broker
env:
EMQX_LISTENERS__WSS__DEFAULT__BIND: "0.0.0.0:8085"
run: |
unzip ./emqx/*.zip
./emqx/bin/emqx start
- name: install jmeter
timeout-minutes: 10
env:
JMETER_VERSION: 5.3
run: |
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz https://downloads.apache.org/jmeter/binaries/apache-jmeter-$JMETER_VERSION.tgz
cd /tmp && tar -xvf apache-jmeter.tgz
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-2.0.2-jar-with-dependencies.jar
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
- name: run ${{ matrix.script_name }}
run: |
/opt/jmeter/bin/jmeter.sh \
-Jjmeter.save.saveservice.output_format=xml -n \
-t .ci/api-test-suite/${{ matrix.script_name }}.jmx \
-Demqx_ip="127.0.0.1" \
-l jmeter_logs/${{ matrix.script_name }}.jtl \
-j jmeter_logs/logs/${{ matrix.script_name }}.log
- name: check test logs
run: |
if cat jmeter_logs/${{ matrix.script_name }}.jtl | grep -e '<failure>true</failure>' > /dev/null 2>&1; then
grep -A 5 -B 3 '<failure>true</failure>' jmeter_logs/${{ matrix.script_name }}.jtl > jmeter_logs/${{ matrix.script_name }}_err_api.txt
echo "check logs failed"
exit 1
fi
- uses: actions/upload-artifact@v1
if: failure()
with:
name: jmeter_logs
path: ./jmeter_logs
- uses: actions/upload-artifact@v1
if: failure()
with:
name: jmeter_logs
path: emqx/log
delete-package:
runs-on: ubuntu-20.04
needs: api-test
if: always()
steps:
- uses: geekyeggo/delete-artifact@v1
with:
name: emqx-broker

View File

@ -8,300 +8,186 @@ on:
pull_request: pull_request:
jobs: jobs:
docker_test: prepare:
runs-on: ubuntu-20.04 strategy:
matrix:
container:
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
steps: runs-on: ubuntu-20.04
- uses: actions/checkout@v1 container: ${{ matrix.container }}
- uses: gleam-lang/setup-erlang@v1.1.2
id: install_erlang
with:
otp-version: 24.0.5
- name: prepare
run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
make deps-emqx-ee
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
else
echo "PROFILE=emqx" >> $GITHUB_ENV
echo "TARGET=emqx/emqx" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
fi
- name: make emqx image
run: make $PROFILE-docker
- name: run emqx
timeout-minutes: 5
run: |
set -e -u -x
echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_" >> .ci/docker-compose-file/conf.cluster.env
echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s" >> .ci/docker-compose-file/conf.cluster.env
echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10" >> .ci/docker-compose-file/conf.cluster.env
docker-compose \
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
-f .ci/docker-compose-file/docker-compose-python.yaml \
up -d
while ! docker exec -i node1.emqx.io bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1; do
echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx";
sleep 5;
done
# - name: verify EMQX_LOADED_PLUGINS override working
# run: |
# expected="{emqx_sn, true}."
# output=$(docker exec -i node1.emqx.io bash -c "cat data/loaded_plugins" | tail -n1)
# if [ "$expected" != "$output" ]; then
# exit 1
# fi
- name: make paho tests
run: |
if ! docker exec -i python /scripts/pytest.sh; then
echo "DUMP_CONTAINER_LOGS_BGN"
docker logs haproxy
docker logs node1.emqx.io
docker logs node2.emqx.io
echo "DUMP_CONTAINER_LOGS_END"
exit 1
fi
helm_test: outputs:
runs-on: ubuntu-20.04 profile: ${{ steps.profile.outputs.profile }}
steps: steps:
- uses: actions/checkout@v1 - name: get otp version
- uses: gleam-lang/setup-erlang@v1.1.2 id: get_otp_version
id: install_erlang run: |
with: otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)"
otp-version: 24.0.5 echo "::set-output name=otp::$otp"
- name: prepare - uses: actions/checkout@v2
run: | with:
if make emqx-ee --dry-run > /dev/null 2>&1; then path: source
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials fetch-depth: 0
git config --global credential.helper store - name: set profile
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token id: profile
make deps-emqx-ee shell: bash
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV working-directory: source
echo "PROFILE=emqx-ee" >> $GITHUB_ENV run: |
else vsn="$(./pkg-vsn.sh)"
echo "TARGET=emqx/emqx" >> $GITHUB_ENV if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "PROFILE=emqx" >> $GITHUB_ENV echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
fi git config --global credential.helper store
- name: make emqx image echo "::set-output name=profile::emqx-ee"
run: make $PROFILE-docker else
- name: install k3s echo "::set-output name=profile::emqx"
env: fi
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" - name: get deps
run: | working-directory: source
sudo sh -c "echo \"127.0.0.1 $(hostname)\" >> /etc/hosts" run: |
curl -sfL https://get.k3s.io | sh - make ensure-rebar3
sudo chmod 644 /etc/rancher/k3s/k3s.yaml ./rebar3 as default get-deps
kubectl cluster-info rm -rf rebar.lock
- name: install helm - name: gen zip file
env: run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]*
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" - uses: actions/upload-artifact@v2
run: | with:
curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 name: source-${{ steps.get_otp_version.outputs.otp }}
sudo chmod 700 get_helm.sh path: source-${{ steps.get_otp_version.outputs.otp }}.zip
sudo ./get_helm.sh
helm version
- name: run emqx on chart
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 5
run: |
version=$(./pkg-vsn.sh)
sudo docker save ${TARGET}:$version -o emqx.tar.gz
sudo k3s ctr image import emqx.tar.gz
sed -i -r "s/^appVersion: .*$/appVersion: \"${version}\"/g" deploy/charts/emqx/Chart.yaml docker_test:
sed -i '/emqx_telemetry/d' deploy/charts/emqx/values.yaml runs-on: ubuntu-20.04
needs: prepare
helm install emqx \ strategy:
--set image.repository=${TARGET} \ fail-fast: false
--set image.pullPolicy=Never \ matrix:
--set emqxAclConfig="" \ otp:
--set image.pullPolicy=Never \ - 23.2.7.2-emqx-2
--set emqxConfig.EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s \ - 24.0.5-emqx-1
--set emqxConfig.EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10 \
deploy/charts/emqx \
--debug
while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \ steps:
!= "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do - uses: actions/download-artifact@v2
echo "=============================="; with:
kubectl get pods; name: source-${{ matrix.otp }}
echo "=============================="; path: .
echo "waiting emqx started"; - name: unzip source code
sleep 10; run: unzip -q source-${{ matrix.otp }}.zip
done - name: make docker image
- name: get emqx-0 pods log working-directory: source
if: failure() env:
env: OTP: ${{ matrix.otp }}
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" run: |
run: | make ${{ needs.prepare.outputs.profile }}-docker
kubectl describe pods emqx-0 echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV
kubectl logs emqx-0 echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
- name: get emqx-1 pods log - name: run emqx
if: failure() timeout-minutes: 5
env: working-directory: source
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" run: |
run: | set -e -u -x
kubectl describe pods emqx-1 echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_" >> .ci/docker-compose-file/conf.cluster.env
kubectl logs emqx-1 echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s" >> .ci/docker-compose-file/conf.cluster.env
- name: get emqx-2 pods log echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10" >> .ci/docker-compose-file/conf.cluster.env
if: failure() docker-compose \
env: -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" -f .ci/docker-compose-file/docker-compose-python.yaml \
run: | up -d
kubectl describe pods emqx-2 while ! docker exec -i node1.emqx.io bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1; do
kubectl logs emqx-2 echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx";
- uses: actions/checkout@v2 sleep 5;
with: done
repository: emqx/paho.mqtt.testing - name: make paho tests
ref: develop-4.0 run: |
path: paho.mqtt.testing if ! docker exec -i python /scripts/pytest.sh; then
- name: install pytest echo "DUMP_CONTAINER_LOGS_BGN"
run: | docker logs haproxy
pip install pytest docker logs node1.emqx.io
echo "$HOME/.local/bin" >> $GITHUB_PATH docker logs node2.emqx.io
- name: run paho test echo "DUMP_CONTAINER_LOGS_END"
env: exit 1
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" fi
run: |
emqx_svc=$(kubectl get svc --namespace default emqx -o jsonpath="{.spec.clusterIP}")
emqx1=$(kubectl get pods emqx-1 -o jsonpath='{.status.podIP}')
emqx2=$(kubectl get pods emqx-2 -o jsonpath='{.status.podIP}')
pytest -v paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host $emqx_svc helm_test:
RESULT=$? runs-on: ubuntu-20.04
pytest -v paho.mqtt.testing/interoperability/test_cluster --host1 $emqx1 --host2 $emqx2 needs: prepare
RESULT=$((RESULT + $?))
if [ 0 -ne $RESULT ]; then
kubectl logs emqx-1
kubectl logs emqx-2
fi
exit $RESULT
relup_test: strategy:
strategy: fail-fast: false
matrix: matrix:
container: otp:
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04" - 23.2.7.2-emqx-2
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04" - 24.0.5-emqx-1
runs-on: ubuntu-20.04 steps:
container: ${{ matrix.container }} - uses: actions/download-artifact@v2
with:
name: source-${{ matrix.otp }}
path: .
- name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip
- name: make docker image
working-directory: source
env:
OTP: ${{ matrix.otp }}
run: |
make ${{ needs.prepare.outputs.profile }}-docker
echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
- run: minikube start
- name: run emqx on chart
timeout-minutes: 5
working-directory: source
run: |
minikube image load $TARGET:$EMQX_TAG
defaults: sed -i -r "s/^appVersion: .*$/appVersion: \"$EMQX_TAG\"/g" deploy/charts/emqx/Chart.yaml
run:
shell: bash
steps:
- uses: actions/setup-python@v2
with:
python-version: '3.8'
architecture: 'x64'
- uses: actions/checkout@v2
with:
repository: emqx/paho.mqtt.testing
ref: develop-4.0
path: paho.mqtt.testing
- uses: actions/checkout@v2
with:
repository: terry-xiaoyu/one_more_emqx
ref: master
path: one_more_emqx
- uses: actions/checkout@v2
with:
repository: emqx/emqtt-bench
ref: master
path: emqtt-bench
- uses: actions/checkout@v2
with:
repository: hawk/lux
ref: lux-2.6
path: lux
- uses: actions/checkout@v2
with:
repository: ${{ github.repository }}
path: emqx
fetch-depth: 0
- name: prepare
run: |
if make -C emqx emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
else
echo "PROFILE=emqx" >> $GITHUB_ENV
fi
- name: get version
run: |
set -e -x -u
cd emqx
if [ $PROFILE = "emqx" ];then
broker="emqx-ce"
edition='opensource'
else
broker="emqx-ee"
edition='enterprise'
fi
echo "BROKER=$broker" >> $GITHUB_ENV
vsn="$(./pkg-vsn.sh)" helm install emqx \
echo "VSN=$vsn" >> $GITHUB_ENV --set image.repository=$TARGET \
--set image.pullPolicy=Never \
--set emqxAclConfig="" \
--set image.pullPolicy=Never \
--set emqxConfig.EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s \
--set emqxConfig.EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10 \
deploy/charts/emqx \
--debug
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')" while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \
if [ $PROFILE = "emqx" ]; then != "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")" echo "==============================";
else kubectl get pods;
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")" echo "==============================";
fi echo "waiting emqx started";
echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV sleep 10;
- name: download emqx done
run: | - name: get emqx-0 pods log
set -e -x -u if: failure()
mkdir -p emqx/_upgrade_base run: |
cd emqx/_upgrade_base kubectl describe pods emqx-0
old_vsns=($(echo $OLD_VSNS | tr ' ' ' ')) kubectl logs emqx-0
for old_vsn in ${old_vsns[@]}; do - name: get emqx-1 pods log
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip if: failure()
done run: |
- name: build emqx kubectl describe pods emqx-1
run: make -C emqx ${PROFILE}-zip kubectl logs emqx-1
- name: build emqtt-bench - name: get emqx-2 pods log
run: make -C emqtt-bench if: failure()
- name: build lux run: |
run: | kubectl describe pods emqx-2
set -e -u -x kubectl logs emqx-2
cd lux - uses: actions/checkout@v2
autoconf with:
./configure repository: emqx/paho.mqtt.testing
make ref: develop-4.0
make install path: paho.mqtt.testing
- name: run relup test - name: install pytest
timeout-minutes: 20 run: |
run: | pip install pytest
set -e -x -u echo "$HOME/.local/bin" >> $GITHUB_PATH
if [ -n "$OLD_VSNS" ]; then - name: run paho test
mkdir -p packages run: |
cp emqx/_packages/${PROFILE}/*.zip packages kubectl port-forward service/emqx 1883:1883 > /dev/null &
cp emqx/_upgrade_base/*.zip packages pytest -v paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "127.0.0.1"
lux \
--case_timeout infinity \
--var PROFILE=$PROFILE \
--var PACKAGE_PATH=$(pwd)/packages \
--var BENCH_PATH=$(pwd)/emqtt-bench \
--var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \
--var VSN="$VSN" \
--var OLD_VSNS="$OLD_VSNS" \
emqx/.ci/fvt_tests/relup.lux
fi
- uses: actions/upload-artifact@v1
if: failure()
with:
name: lux_logs
path: lux_logs

130
.github/workflows/run_relup_tests.yaml vendored Normal file
View File

@ -0,0 +1,130 @@
name: Release Upgrade Tests
on:
push:
tags:
- v*
- e*
pull_request:
jobs:
relup_test:
strategy:
matrix:
container:
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
runs-on: ubuntu-20.04
container: ${{ matrix.container }}
defaults:
run:
shell: bash
steps:
- uses: actions/setup-python@v2
with:
python-version: '3.8'
architecture: 'x64'
- uses: actions/checkout@v2
with:
repository: emqx/paho.mqtt.testing
ref: develop-4.0
path: paho.mqtt.testing
- uses: actions/checkout@v2
with:
repository: terry-xiaoyu/one_more_emqx
ref: master
path: one_more_emqx
- uses: actions/checkout@v2
with:
repository: emqx/emqtt-bench
ref: master
path: emqtt-bench
- uses: actions/checkout@v2
with:
repository: hawk/lux
ref: lux-2.6
path: lux
- uses: actions/checkout@v2
with:
repository: ${{ github.repository }}
path: emqx
fetch-depth: 0
- name: prepare
run: |
if make -C emqx emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
else
echo "PROFILE=emqx" >> $GITHUB_ENV
fi
- name: get version
run: |
set -e -x -u
cd emqx
if [ $PROFILE = "emqx" ];then
broker="emqx-ce"
edition='opensource'
else
broker="emqx-ee"
edition='enterprise'
fi
echo "BROKER=$broker" >> $GITHUB_ENV
vsn="$(./pkg-vsn.sh)"
echo "VSN=$vsn" >> $GITHUB_ENV
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
if [ $PROFILE = "emqx" ]; then
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
else
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")"
fi
echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV
- name: download emqx
run: |
set -e -x -u
mkdir -p emqx/_upgrade_base
cd emqx/_upgrade_base
old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
for old_vsn in ${old_vsns[@]}; do
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip
done
- name: build emqx
run: make -C emqx ${PROFILE}-zip
- name: build emqtt-bench
run: make -C emqtt-bench
- name: build lux
run: |
set -e -u -x
cd lux
autoconf
./configure
make
make install
- name: run relup test
timeout-minutes: 20
run: |
set -e -x -u
if [ -n "$OLD_VSNS" ]; then
mkdir -p packages
cp emqx/_packages/${PROFILE}/*.zip packages
cp emqx/_upgrade_base/*.zip packages
lux \
--case_timeout infinity \
--var PROFILE=$PROFILE \
--var PACKAGE_PATH=$(pwd)/packages \
--var BENCH_PATH=$(pwd)/emqtt-bench \
--var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \
--var VSN="$VSN" \
--var OLD_VSNS="$OLD_VSNS" \
emqx/.ci/fvt_tests/relup.lux
fi
- uses: actions/upload-artifact@v1
if: failure()
with:
name: lux_logs
path: lux_logs

View File

@ -98,19 +98,19 @@ jobs:
- name: run cover - name: run cover
run: | run: |
printenv > .env printenv > .env
docker exec -i ${{ matrix.otp_release }} bash -c "make cover" docker exec -i ${{ matrix.otp_release }} bash -c "DIAGNOSTIC=1 make cover"
docker exec --env-file .env -i ${{ matrix.otp_release }} bash -c "make coveralls" docker exec --env-file .env -i ${{ matrix.otp_release }} bash -c "DIAGNOSTIC=1 make coveralls"
- name: cat rebar.crashdump - name: cat rebar.crashdump
if: failure() if: failure()
run: if [ -f 'rebar3.crashdump' ];then cat 'rebar3.crashdump'; fi run: if [ -f 'rebar3.crashdump' ];then cat 'rebar3.crashdump'; fi
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
if: failure() if: failure()
with: with:
name: logs name: logs_${{ matrix.otp_release }}
path: _build/test/logs path: _build/test/logs
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
with: with:
name: cover name: cover_${{ matrix.otp_release }}
path: _build/test/cover path: _build/test/cover
finish: finish:

View File

@ -5,7 +5,7 @@ BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts SCRIPTS = $(CURDIR)/scripts
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh) export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
export EMQX_DESC ?= EMQ X export EMQX_DESC ?= EMQ X
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.4 export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.11
ifeq ($(OS),Windows_NT) ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none export REBAR_COLOR=none
endif endif

View File

@ -4,7 +4,7 @@
[![Build Status](https://travis-ci.org/emqx/emqx.svg)](https://travis-ci.org/emqx/emqx) [![Build Status](https://travis-ci.org/emqx/emqx.svg)](https://travis-ci.org/emqx/emqx)
[![Coverage Status](https://coveralls.io/repos/github/emqx/emqx/badge.svg)](https://coveralls.io/github/emqx/emqx) [![Coverage Status](https://coveralls.io/repos/github/emqx/emqx/badge.svg)](https://coveralls.io/github/emqx/emqx)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx)
[![Slack Invite](<https://slack-invite.emqx.io/badge.svg>)](https://slack-invite.emqx.io) [![Slack](https://img.shields.io/badge/Slack-EMQ%20X-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech) [![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
[![Community](https://img.shields.io/badge/Community-EMQ%20X-yellow)](https://askemq.com) [![Community](https://img.shields.io/badge/Community-EMQ%20X-yellow)](https://askemq.com)
[![YouTube](https://img.shields.io/badge/Subscribe-EMQ%20中文-FF0000?logo=youtube)](https://www.youtube.com/channel/UCir_r04HIsLjf2qqyZ4A8Cg) [![YouTube](https://img.shields.io/badge/Subscribe-EMQ%20中文-FF0000?logo=youtube)](https://www.youtube.com/channel/UCir_r04HIsLjf2qqyZ4A8Cg)
@ -90,7 +90,7 @@ make eunit ct
### 执行部分应用的 common tests ### 执行部分应用的 common tests
```bash ```bash
make apps/emqx_bridge_mqtt-ct make apps/emqx_retainer-ct
``` ```
### 静态分析(Dialyzer) ### 静态分析(Dialyzer)

View File

@ -4,7 +4,7 @@
[![Build Status](https://travis-ci.org/emqx/emqx.svg)](https://travis-ci.org/emqx/emqx) [![Build Status](https://travis-ci.org/emqx/emqx.svg)](https://travis-ci.org/emqx/emqx)
[![Coverage Status](https://coveralls.io/repos/github/emqx/emqx/badge.svg)](https://coveralls.io/github/emqx/emqx) [![Coverage Status](https://coveralls.io/repos/github/emqx/emqx/badge.svg)](https://coveralls.io/github/emqx/emqx)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx)
[![Slack Invite](<https://slack-invite.emqx.io/badge.svg>)](https://slack-invite.emqx.io) [![Slack](https://img.shields.io/badge/Slack-EMQ%20X-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech) [![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
[![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q) [![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
@ -84,7 +84,7 @@ make eunit ct
### common test の一部を実行する ### common test の一部を実行する
```bash ```bash
make apps/emqx_bridge_mqtt-ct make apps/emqx_retainer-ct
``` ```
### Dialyzer ### Dialyzer

View File

@ -4,7 +4,7 @@
[![Build Status](https://travis-ci.org/emqx/emqx.svg)](https://travis-ci.org/emqx/emqx) [![Build Status](https://travis-ci.org/emqx/emqx.svg)](https://travis-ci.org/emqx/emqx)
[![Coverage Status](https://coveralls.io/repos/github/emqx/emqx/badge.svg?branch=master)](https://coveralls.io/github/emqx/emqx?branch=master) [![Coverage Status](https://coveralls.io/repos/github/emqx/emqx/badge.svg?branch=master)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx)
[![Slack Invite](<https://slack-invite.emqx.io/badge.svg>)](https://slack-invite.emqx.io) [![Slack](https://img.shields.io/badge/Slack-EMQ%20X-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech) [![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
[![Community](https://img.shields.io/badge/Community-EMQ%20X-yellow?logo=github)](https://github.com/emqx/emqx/discussions) [![Community](https://img.shields.io/badge/Community-EMQ%20X-yellow?logo=github)](https://github.com/emqx/emqx/discussions)
[![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q) [![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
@ -93,7 +93,7 @@ make eunit ct
Пример: Пример:
```bash ```bash
make apps/emqx_bridge_mqtt-ct make apps/emqx_retainer-ct
``` ```
### Dialyzer ### Dialyzer

View File

@ -4,7 +4,7 @@
[![Build Status](https://travis-ci.org/emqx/emqx.svg)](https://travis-ci.org/emqx/emqx) [![Build Status](https://travis-ci.org/emqx/emqx.svg)](https://travis-ci.org/emqx/emqx)
[![Coverage Status](https://coveralls.io/repos/github/emqx/emqx/badge.svg?branch=master)](https://coveralls.io/github/emqx/emqx?branch=master) [![Coverage Status](https://coveralls.io/repos/github/emqx/emqx/badge.svg?branch=master)](https://coveralls.io/github/emqx/emqx?branch=master)
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx)
[![Slack Invite](<https://slack-invite.emqx.io/badge.svg>)](https://slack-invite.emqx.io) [![Slack](https://img.shields.io/badge/Slack-EMQ%20X-39AE85?logo=slack)](https://slack-invite.emqx.io/)
[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech) [![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
[![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q) [![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
@ -92,7 +92,7 @@ make eunit ct
Examples Examples
```bash ```bash
make apps/emqx_bridge_mqtt-ct make apps/emqx_retainer-ct
``` ```
### Dialyzer ### Dialyzer

File diff suppressed because it is too large Load Diff

View File

@ -26,6 +26,7 @@
-define(COMMON_SHARD, emqx_common_shard). -define(COMMON_SHARD, emqx_common_shard).
-define(SHARED_SUB_SHARD, emqx_shared_sub_shard). -define(SHARED_SUB_SHARD, emqx_shared_sub_shard).
-define(MOD_DELAYED_SHARD, emqx_delayed_shard). -define(MOD_DELAYED_SHARD, emqx_delayed_shard).
-define(CM_SHARD, emqx_cm_shard).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Banner %% Banner
@ -125,8 +126,7 @@
-record(banned, { -record(banned, {
who :: {clientid, binary()} who :: {clientid, binary()}
| {peerhost, inet:ip_address()} | {peerhost, inet:ip_address()}
| {username, binary()} | {username, binary()},
| {ip_address, inet:ip_address()},
by :: binary(), by :: binary(),
reason :: binary(), reason :: binary(),
at :: integer(), at :: integer(),
@ -134,3 +134,19 @@
}). }).
-endif. -endif.
%%--------------------------------------------------------------------
%% Authentication
%%--------------------------------------------------------------------
-record(authenticator,
{ id :: binary()
, provider :: module()
, enable :: boolean()
, state :: map()
}).
-record(chain,
{ name :: atom()
, authenticators :: [#authenticator{}]
}).

View File

@ -29,7 +29,7 @@
-ifndef(EMQX_ENTERPRISE). -ifndef(EMQX_ENTERPRISE).
-define(EMQX_RELEASE, {opensource, "5.0-alpha.3"}). -define(EMQX_RELEASE, {opensource, "5.0-alpha.5"}).
-else. -else.

View File

@ -13,9 +13,9 @@
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}} , {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.2"}}} , {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.2"}}}
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.2"}}} , {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.2"}}}
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.10.4"}}} , {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.10.8"}}}
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}} , {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.11.0"}}} , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.15.0"}}}
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}} , {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}} , {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.14.1"}}} , {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.14.1"}}}
@ -28,8 +28,8 @@
[{deps, [{deps,
[ meck [ meck
, {bbmustache,"1.10.0"} , {bbmustache,"1.10.0"}
, {emqx_ct_helpers, {git,"https://github.com/emqx/emqx-ct-helpers", {branch,"hocon"}}} , {emqx_ct_helpers, {git,"https://github.com/emqx/emqx-ct-helpers.git", {tag,"2.1.0"}}}
, {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.4.2"}}} , {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.4.3"}}}
]}, ]},
{extra_src_dirs, [{"test",[recursive]}]} {extra_src_dirs, [{"test",[recursive]}]}
]} ]}

View File

@ -18,7 +18,7 @@ IsQuicSupp = fun() ->
end, end,
Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {branch, "0.6.0"}}}, Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {branch, "0.6.0"}}},
Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {branch, "0.0.7"}}}, Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {branch, "0.0.8"}}},
ExtraDeps = fun(C) -> ExtraDeps = fun(C) ->
{deps, Deps0} = lists:keyfind(deps, 1, C), {deps, Deps0} = lists:keyfind(deps, 1, C),

View File

@ -55,6 +55,18 @@
-export([ set_debug_secret/1 -export([ set_debug_secret/1
]). ]).
%% Configs APIs
-export([ get_config/1
, get_config/2
, get_raw_config/1
, get_raw_config/2
, update_config/2
, update_config/3
, remove_config/1
, remove_config/2
, reset_config/2
]).
-define(APP, ?MODULE). -define(APP, ?MODULE).
%% @hidden Path to the file which has debug_info encryption secret in it. %% @hidden Path to the file which has debug_info encryption secret in it.
@ -184,3 +196,53 @@ run_hook(HookPoint, Args) ->
-spec(run_fold_hook(emqx_hooks:hookpoint(), list(any()), any()) -> any()). -spec(run_fold_hook(emqx_hooks:hookpoint(), list(any()), any()) -> any()).
run_fold_hook(HookPoint, Args, Acc) -> run_fold_hook(HookPoint, Args, Acc) ->
emqx_hooks:run_fold(HookPoint, Args, Acc). emqx_hooks:run_fold(HookPoint, Args, Acc).
-spec get_config(emqx_map_lib:config_key_path()) -> term().
get_config(KeyPath) ->
emqx_config:get(KeyPath).
-spec get_config(emqx_map_lib:config_key_path(), term()) -> term().
get_config(KeyPath, Default) ->
emqx_config:get(KeyPath, Default).
-spec get_raw_config(emqx_map_lib:config_key_path()) -> term().
get_raw_config(KeyPath) ->
emqx_config:get_raw(KeyPath).
-spec get_raw_config(emqx_map_lib:config_key_path(), term()) -> term().
get_raw_config(KeyPath, Default) ->
emqx_config:get_raw(KeyPath, Default).
-spec update_config(emqx_map_lib:config_key_path(), emqx_config:update_request()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(KeyPath, UpdateReq) ->
update_config(KeyPath, UpdateReq, #{}).
-spec update_config(emqx_map_lib:config_key_path(), emqx_config:update_request(),
emqx_config:update_opts()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config([RootName | _] = KeyPath, UpdateReq, Opts) ->
emqx_config_handler:update_config(emqx_config:get_schema_mod(RootName), KeyPath,
{{update, UpdateReq}, Opts}).
-spec remove_config(emqx_map_lib:config_key_path()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config(KeyPath) ->
remove_config(KeyPath, #{}).
-spec remove_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config([RootName | _] = KeyPath, Opts) ->
emqx_config_handler:update_config(emqx_config:get_schema_mod(RootName),
KeyPath, {remove, Opts}).
-spec reset_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
reset_config([RootName | _] = KeyPath, Opts) ->
case emqx_config:get_default_value(KeyPath) of
{ok, Default} ->
emqx_config_handler:update_config(emqx_config:get_schema_mod(RootName), KeyPath,
{{update, Default}, Opts});
{error, _} = Error ->
Error
end.

View File

@ -27,30 +27,36 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-spec(authenticate(emqx_types:clientinfo()) -> -spec(authenticate(emqx_types:clientinfo()) ->
ok | {ok, binary()} | {continue, map()} | {continue, binary(), map()} | {error, term()}). {ok, map()} | {ok, map(), binary()} | {continue, map()} | {continue, binary(), map()} | {error, term()}).
authenticate(Credential) -> authenticate(Credential) ->
run_hooks('client.authenticate', [Credential], ok). case run_hooks('client.authenticate', [Credential], {ok, #{is_superuser => false}}) of
ok ->
{ok, #{is_superuser => false}};
Other ->
Other
end.
%% @doc Check Authorization %% @doc Check Authorization
-spec authorize(emqx_types:clientinfo(), emqx_types:pubsub(), emqx_types:topic()) -spec authorize(emqx_types:clientinfo(), emqx_types:pubsub(), emqx_types:topic())
-> allow | deny. -> allow | deny.
authorize(ClientInfo = #{zone := Zone}, PubSub, Topic) -> authorize(ClientInfo, PubSub, Topic) ->
case emqx_authz_cache:is_enabled(Zone) of case emqx_authz_cache:is_enabled() of
true -> check_authorization_cache(ClientInfo, PubSub, Topic); true -> check_authorization_cache(ClientInfo, PubSub, Topic);
false -> do_authorize(ClientInfo, PubSub, Topic) false -> do_authorize(ClientInfo, PubSub, Topic)
end. end.
check_authorization_cache(ClientInfo = #{zone := Zone}, PubSub, Topic) -> check_authorization_cache(ClientInfo, PubSub, Topic) ->
case emqx_authz_cache:get_authz_cache(Zone, PubSub, Topic) of case emqx_authz_cache:get_authz_cache(PubSub, Topic) of
not_found -> not_found ->
AuthzResult = do_authorize(ClientInfo, PubSub, Topic), AuthzResult = do_authorize(ClientInfo, PubSub, Topic),
emqx_authz_cache:put_authz_cache(Zone, PubSub, Topic, AuthzResult), emqx_authz_cache:put_authz_cache(PubSub, Topic, AuthzResult),
AuthzResult; AuthzResult;
AuthzResult -> AuthzResult AuthzResult -> AuthzResult
end. end.
do_authorize(ClientInfo, PubSub, Topic) -> do_authorize(ClientInfo, PubSub, Topic) ->
case run_hooks('client.authorize', [ClientInfo, PubSub, Topic], allow) of NoMatch = emqx:get_config([authorization, no_match], allow),
case run_hooks('client.authorize', [ClientInfo, PubSub, Topic], NoMatch) of
allow -> allow; allow -> allow;
_Other -> deny _Other -> deny
end. end.

View File

@ -28,7 +28,7 @@
-boot_mnesia({mnesia, [boot]}). -boot_mnesia({mnesia, [boot]}).
-copy_mnesia({mnesia, [copy]}). -copy_mnesia({mnesia, [copy]}).
-export([pre_config_update/2]). -export([post_config_update/4]).
-export([ start_link/0 -export([ start_link/0
, stop/0 , stop/0
@ -85,9 +85,6 @@
-define(DEACTIVATED_ALARM, emqx_deactivated_alarm). -define(DEACTIVATED_ALARM, emqx_deactivated_alarm).
-rlog_shard({?COMMON_SHARD, ?ACTIVATED_ALARM}).
-rlog_shard({?COMMON_SHARD, ?DEACTIVATED_ALARM}).
-ifdef(TEST). -ifdef(TEST).
-compile(export_all). -compile(export_all).
-compile(nowarn_export_all). -compile(nowarn_export_all).
@ -151,14 +148,9 @@ get_alarms(activated) ->
get_alarms(deactivated) -> get_alarms(deactivated) ->
gen_server:call(?MODULE, {get_alarms, deactivated}). gen_server:call(?MODULE, {get_alarms, deactivated}).
pre_config_update(#{<<"validity_period">> := Period0} = NewConf, OldConf) -> post_config_update(_, #{validity_period := Period0}, _OldConf, _AppEnv) ->
?MODULE ! {update_timer, hocon_postprocess:duration(Period0)}, ?MODULE ! {update_timer, Period0},
merge(OldConf, NewConf); ok.
pre_config_update(NewConf, OldConf) ->
merge(OldConf, NewConf).
merge(undefined, New) -> New;
merge(Old, New) -> maps:merge(Old, New).
format(#activated_alarm{name = Name, message = Message, activate_at = At, details = Details}) -> format(#activated_alarm{name = Name, message = Message, activate_at = At, details = Details}) ->
Now = erlang:system_time(microsecond), Now = erlang:system_time(microsecond),
@ -166,7 +158,8 @@ format(#activated_alarm{name = Name, message = Message, activate_at = At, detail
node => node(), node => node(),
name => Name, name => Name,
message => Message, message => Message,
duration => Now - At, duration => (Now - At) div 1000, %% to millisecond
activate_at => to_rfc3339(At),
details => Details details => Details
}; };
format(#deactivated_alarm{name = Name, message = Message, activate_at = At, details = Details, format(#deactivated_alarm{name = Name, message = Message, activate_at = At, details = Details,
@ -176,18 +169,23 @@ format(#deactivated_alarm{name = Name, message = Message, activate_at = At, deta
name => Name, name => Name,
message => Message, message => Message,
duration => DAt - At, duration => DAt - At,
activate_at => to_rfc3339(At),
deactivate_at => to_rfc3339(DAt),
details => Details details => Details
}; };
format(_) -> format(_) ->
{error, unknow_alarm}. {error, unknow_alarm}.
to_rfc3339(Timestamp) ->
list_to_binary(calendar:system_time_to_rfc3339(Timestamp div 1000, [{unit, millisecond}])).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% gen_server callbacks %% gen_server callbacks
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init([]) -> init([]) ->
deactivate_all_alarms(), deactivate_all_alarms(),
emqx_config_handler:add_handler([alarm], ?MODULE), ok = emqx_config_handler:add_handler([alarm], ?MODULE),
{ok, #state{timer = ensure_timer(undefined, get_validity_period())}}. {ok, #state{timer = ensure_timer(undefined, get_validity_period())}}.
%% suppress dialyzer warning due to dirty read/write race condition. %% suppress dialyzer warning due to dirty read/write race condition.
@ -204,7 +202,7 @@ handle_call({activate_alarm, Name, Details}, _From, State) ->
message = normalize_message(Name, Details), message = normalize_message(Name, Details),
activate_at = erlang:system_time(microsecond)}, activate_at = erlang:system_time(microsecond)},
ekka_mnesia:dirty_write(?ACTIVATED_ALARM, Alarm), ekka_mnesia:dirty_write(?ACTIVATED_ALARM, Alarm),
do_actions(activate, Alarm, emqx_config:get([alarm, actions])), do_actions(activate, Alarm, emqx:get_config([alarm, actions])),
{reply, ok, State} {reply, ok, State}
end; end;
@ -263,6 +261,7 @@ handle_info(Info, State) ->
{noreply, State}. {noreply, State}.
terminate(_Reason, _State) -> terminate(_Reason, _State) ->
ok = emqx_config_handler:remove_handler([alarm]),
ok. ok.
code_change(_OldVsn, State, _Extra) -> code_change(_OldVsn, State, _Extra) ->
@ -273,11 +272,11 @@ code_change(_OldVsn, State, _Extra) ->
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
get_validity_period() -> get_validity_period() ->
emqx_config:get([alarm, validity_period]). emqx:get_config([alarm, validity_period]).
deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name, deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name,
details = Details0, message = Msg0}) -> details = Details0, message = Msg0}) ->
SizeLimit = emqx_config:get([alarm, size_limit]), SizeLimit = emqx:get_config([alarm, size_limit]),
case SizeLimit > 0 andalso (mnesia:table_info(?DEACTIVATED_ALARM, size) >= SizeLimit) of case SizeLimit > 0 andalso (mnesia:table_info(?DEACTIVATED_ALARM, size) >= SizeLimit) of
true -> true ->
case mnesia:dirty_first(?DEACTIVATED_ALARM) of case mnesia:dirty_first(?DEACTIVATED_ALARM) of
@ -294,7 +293,7 @@ deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name
erlang:system_time(microsecond)), erlang:system_time(microsecond)),
ekka_mnesia:dirty_write(?DEACTIVATED_ALARM, HistoryAlarm), ekka_mnesia:dirty_write(?DEACTIVATED_ALARM, HistoryAlarm),
ekka_mnesia:dirty_delete(?ACTIVATED_ALARM, Name), ekka_mnesia:dirty_delete(?ACTIVATED_ALARM, Name),
do_actions(deactivate, DeActAlarm, emqx_config:get([alarm, actions])). do_actions(deactivate, DeActAlarm, emqx:get_config([alarm, actions])).
make_deactivated_alarm(ActivateAt, Name, Details, Message, DeActivateAt) -> make_deactivated_alarm(ActivateAt, Name, Details, Message, DeActivateAt) ->
#deactivated_alarm{ #deactivated_alarm{

View File

@ -0,0 +1,735 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authentication).
-behaviour(gen_server).
-behaviour(hocon_schema).
-behaviour(emqx_config_handler).
-include("emqx.hrl").
-include("logger.hrl").
-export([ roots/0
, fields/1
]).
-export([ pre_config_update/2
, post_config_update/4
]).
-export([ authenticate/2
]).
-export([ initialize_authentication/2 ]).
-export([ start_link/0
, stop/0
]).
-export([ add_provider/2
, remove_provider/1
, create_chain/1
, delete_chain/1
, lookup_chain/1
, list_chains/0
, create_authenticator/2
, delete_authenticator/2
, update_authenticator/3
, lookup_authenticator/2
, list_authenticators/1
, move_authenticator/3
]).
-export([ import_users/3
, add_user/3
, delete_user/3
, update_user/4
, lookup_user/3
, list_users/2
]).
-export([ generate_id/1 ]).
%% gen_server callbacks
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-define(CHAINS_TAB, emqx_authn_chains).
-define(VER_1, <<"1">>).
-define(VER_2, <<"2">>).
-type config() :: #{atom() => term()}.
-type state() :: #{atom() => term()}.
-type extra() :: #{is_superuser := boolean(),
atom() => term()}.
-type user_info() :: #{user_id := binary(),
atom() => term()}.
-callback refs() -> [{ref, Module, Name}] when Module::module(), Name::atom().
-callback create(Config)
-> {ok, State}
| {error, term()}
when Config::config(), State::state().
-callback update(Config, State)
-> {ok, NewState}
| {error, term()}
when Config::config(), State::state(), NewState::state().
-callback authenticate(Credential, State)
-> ignore
| {ok, Extra}
| {ok, Extra, AuthData}
| {continue, AuthCache}
| {continue, AuthData, AuthCache}
| {error, term()}
when Credential::map(), State::state(), Extra::extra(), AuthData::binary(), AuthCache::map().
-callback destroy(State)
-> ok
when State::state().
-callback import_users(Filename, State)
-> ok
| {error, term()}
when Filename::binary(), State::state().
-callback add_user(UserInfo, State)
-> {ok, User}
| {error, term()}
when UserInfo::user_info(), State::state(), User::user_info().
-callback delete_user(UserID, State)
-> ok
| {error, term()}
when UserID::binary(), State::state().
-callback update_user(UserID, UserInfo, State)
-> {ok, User}
| {error, term()}
when UserID::binary, UserInfo::map(), State::state(), User::user_info().
-callback list_users(State)
-> {ok, Users}
when State::state(), Users::[user_info()].
-optional_callbacks([ import_users/2
, add_user/2
, delete_user/2
, update_user/3
, list_users/1
]).
%%------------------------------------------------------------------------------
%% Hocon Schema
%%------------------------------------------------------------------------------
roots() -> [{authentication, fun authentication/1}].
fields(_) -> [].
authentication(type) ->
{ok, Refs} = get_refs(),
hoconsc:union([hoconsc:array(hoconsc:union(Refs)) | Refs]);
authentication(default) -> [];
authentication(_) -> undefined.
%%------------------------------------------------------------------------------
%% Callbacks of config handler
%%------------------------------------------------------------------------------
pre_config_update(UpdateReq, OldConfig) ->
case do_pre_config_update(UpdateReq, to_list(OldConfig)) of
{error, Reason} -> {error, Reason};
{ok, NewConfig} -> {ok, may_to_map(NewConfig)}
end.
do_pre_config_update({create_authenticator, _ChainName, Config}, OldConfig) ->
{ok, OldConfig ++ [Config]};
do_pre_config_update({delete_authenticator, _ChainName, AuthenticatorID}, OldConfig) ->
NewConfig = lists:filter(fun(OldConfig0) ->
AuthenticatorID =/= generate_id(OldConfig0)
end, OldConfig),
{ok, NewConfig};
do_pre_config_update({update_authenticator, _ChainName, AuthenticatorID, Config}, OldConfig) ->
NewConfig = lists:map(fun(OldConfig0) ->
case AuthenticatorID =:= generate_id(OldConfig0) of
true -> maps:merge(OldConfig0, Config);
false -> OldConfig0
end
end, OldConfig),
{ok, NewConfig};
do_pre_config_update({move_authenticator, _ChainName, AuthenticatorID, Position}, OldConfig) ->
case split_by_id(AuthenticatorID, OldConfig) of
{error, Reason} -> {error, Reason};
{ok, Part1, [Found | Part2]} ->
case Position of
<<"top">> ->
{ok, [Found | Part1] ++ Part2};
<<"bottom">> ->
{ok, Part1 ++ Part2 ++ [Found]};
<<"before:", Before/binary>> ->
case split_by_id(Before, Part1 ++ Part2) of
{error, Reason} ->
{error, Reason};
{ok, NPart1, [NFound | NPart2]} ->
{ok, NPart1 ++ [Found, NFound | NPart2]}
end;
_ ->
{error, {invalid_parameter, position}}
end
end.
post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs) ->
do_post_config_update(UpdateReq, check_config(to_list(NewConfig)), OldConfig, AppEnvs).
do_post_config_update({create_authenticator, ChainName, Config}, _NewConfig, _OldConfig, _AppEnvs) ->
NConfig = check_config(Config),
_ = create_chain(ChainName),
create_authenticator(ChainName, NConfig);
do_post_config_update({delete_authenticator, ChainName, AuthenticatorID}, _NewConfig, _OldConfig, _AppEnvs) ->
delete_authenticator(ChainName, AuthenticatorID);
do_post_config_update({update_authenticator, ChainName, AuthenticatorID, _Config}, NewConfig, _OldConfig, _AppEnvs) ->
[Config] = lists:filter(fun(NewConfig0) ->
AuthenticatorID =:= generate_id(NewConfig0)
end, NewConfig),
NConfig = check_config(Config),
update_authenticator(ChainName, AuthenticatorID, NConfig);
do_post_config_update({move_authenticator, ChainName, AuthenticatorID, Position}, _NewConfig, _OldConfig, _AppEnvs) ->
NPosition = case Position of
<<"top">> -> top;
<<"bottom">> -> bottom;
<<"before:", Before/binary>> ->
{before, Before}
end,
move_authenticator(ChainName, AuthenticatorID, NPosition).
check_config(Config) ->
#{authentication := CheckedConfig} = hocon_schema:check_plain(emqx_authentication,
#{<<"authentication">> => Config}, #{nullable => true, atom_key => true}),
CheckedConfig.
%%------------------------------------------------------------------------------
%% Authenticate
%%------------------------------------------------------------------------------
authenticate(#{listener := Listener, protocol := Protocol} = Credential, _AuthResult) ->
case ets:lookup(?CHAINS_TAB, Listener) of
[#chain{authenticators = Authenticators}] when Authenticators =/= [] ->
do_authenticate(Authenticators, Credential);
_ ->
case ets:lookup(?CHAINS_TAB, global_chain(Protocol)) of
[#chain{authenticators = Authenticators}] when Authenticators =/= [] ->
do_authenticate(Authenticators, Credential);
_ ->
ignore
end
end.
do_authenticate([], _) ->
{stop, {error, not_authorized}};
do_authenticate([#authenticator{provider = Provider, state = State} | More], Credential) ->
case Provider:authenticate(Credential, State) of
ignore ->
do_authenticate(More, Credential);
Result ->
%% {ok, Extra}
%% {ok, Extra, AuthData}
%% {continue, AuthCache}
%% {continue, AuthData, AuthCache}
%% {error, Reason}
{stop, Result}
end.
%%------------------------------------------------------------------------------
%% APIs
%%------------------------------------------------------------------------------
initialize_authentication(_, []) ->
ok;
initialize_authentication(ChainName, AuthenticatorsConfig) ->
_ = create_chain(ChainName),
CheckedConfig = check_config(to_list(AuthenticatorsConfig)),
lists:foreach(fun(AuthenticatorConfig) ->
case create_authenticator(ChainName, AuthenticatorConfig) of
{ok, _} ->
ok;
{error, Reason} ->
?LOG(error, "Failed to create authenticator '~s': ~p", [generate_id(AuthenticatorConfig), Reason])
end
end, CheckedConfig).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
stop() ->
gen_server:stop(?MODULE).
get_refs() ->
gen_server:call(?MODULE, get_refs).
add_provider(AuthNType, Provider) ->
gen_server:call(?MODULE, {add_provider, AuthNType, Provider}).
remove_provider(AuthNType) ->
gen_server:call(?MODULE, {remove_provider, AuthNType}).
create_chain(Name) ->
gen_server:call(?MODULE, {create_chain, Name}).
delete_chain(Name) ->
gen_server:call(?MODULE, {delete_chain, Name}).
lookup_chain(Name) ->
gen_server:call(?MODULE, {lookup_chain, Name}).
list_chains() ->
Chains = ets:tab2list(?CHAINS_TAB),
{ok, [serialize_chain(Chain) || Chain <- Chains]}.
create_authenticator(ChainName, Config) ->
gen_server:call(?MODULE, {create_authenticator, ChainName, Config}).
delete_authenticator(ChainName, AuthenticatorID) ->
gen_server:call(?MODULE, {delete_authenticator, ChainName, AuthenticatorID}).
update_authenticator(ChainName, AuthenticatorID, Config) ->
gen_server:call(?MODULE, {update_authenticator, ChainName, AuthenticatorID, Config}).
lookup_authenticator(ChainName, AuthenticatorID) ->
case ets:lookup(?CHAINS_TAB, ChainName) of
[] ->
{error, {not_found, {chain, ChainName}}};
[#chain{authenticators = Authenticators}] ->
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
Authenticator ->
{ok, serialize_authenticator(Authenticator)}
end
end.
list_authenticators(ChainName) ->
case ets:lookup(?CHAINS_TAB, ChainName) of
[] ->
{error, {not_found, {chain, ChainName}}};
[#chain{authenticators = Authenticators}] ->
{ok, serialize_authenticators(Authenticators)}
end.
move_authenticator(ChainName, AuthenticatorID, Position) ->
gen_server:call(?MODULE, {move_authenticator, ChainName, AuthenticatorID, Position}).
import_users(ChainName, AuthenticatorID, Filename) ->
gen_server:call(?MODULE, {import_users, ChainName, AuthenticatorID, Filename}).
add_user(ChainName, AuthenticatorID, UserInfo) ->
gen_server:call(?MODULE, {add_user, ChainName, AuthenticatorID, UserInfo}).
delete_user(ChainName, AuthenticatorID, UserID) ->
gen_server:call(?MODULE, {delete_user, ChainName, AuthenticatorID, UserID}).
update_user(ChainName, AuthenticatorID, UserID, NewUserInfo) ->
gen_server:call(?MODULE, {update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}).
lookup_user(ChainName, AuthenticatorID, UserID) ->
gen_server:call(?MODULE, {lookup_user, ChainName, AuthenticatorID, UserID}).
%% TODO: Support pagination
list_users(ChainName, AuthenticatorID) ->
gen_server:call(?MODULE, {list_users, ChainName, AuthenticatorID}).
generate_id(#{mechanism := Mechanism0, backend := Backend0}) ->
Mechanism = atom_to_binary(Mechanism0),
Backend = atom_to_binary(Backend0),
<<Mechanism/binary, ":", Backend/binary>>;
generate_id(#{mechanism := Mechanism}) ->
atom_to_binary(Mechanism);
generate_id(#{<<"mechanism">> := Mechanism, <<"backend">> := Backend}) ->
<<Mechanism/binary, ":", Backend/binary>>;
generate_id(#{<<"mechanism">> := Mechanism}) ->
Mechanism.
%%--------------------------------------------------------------------
%% gen_server callbacks
%%--------------------------------------------------------------------
init(_Opts) ->
_ = ets:new(?CHAINS_TAB, [ named_table, set, public
, {keypos, #chain.name}
, {read_concurrency, true}]),
ok = emqx_config_handler:add_handler([authentication], ?MODULE),
ok = emqx_config_handler:add_handler([listeners, '?', '?', authentication], ?MODULE),
{ok, #{hooked => false, providers => #{}}}.
handle_call({add_provider, AuthNType, Provider}, _From, #{providers := Providers} = State) ->
reply(ok, State#{providers := Providers#{AuthNType => Provider}});
handle_call({remove_provider, AuthNType}, _From, #{providers := Providers} = State) ->
reply(ok, State#{providers := maps:remove(AuthNType, Providers)});
handle_call(get_refs, _From, #{providers := Providers} = State) ->
Refs = lists:foldl(fun({_, Provider}, Acc) ->
Acc ++ Provider:refs()
end, [], maps:to_list(Providers)),
reply({ok, Refs}, State);
handle_call({create_chain, Name}, _From, State) ->
case ets:member(?CHAINS_TAB, Name) of
true ->
reply({error, {already_exists, {chain, Name}}}, State);
false ->
Chain = #chain{name = Name,
authenticators = []},
true = ets:insert(?CHAINS_TAB, Chain),
reply({ok, serialize_chain(Chain)}, State)
end;
handle_call({delete_chain, Name}, _From, State) ->
case ets:lookup(?CHAINS_TAB, Name) of
[] ->
reply({error, {not_found, {chain, Name}}}, State);
[#chain{authenticators = Authenticators}] ->
_ = [do_delete_authenticator(Authenticator) || Authenticator <- Authenticators],
true = ets:delete(?CHAINS_TAB, Name),
reply(ok, may_unhook(State))
end;
handle_call({lookup_chain, Name}, _From, State) ->
case ets:lookup(?CHAINS_TAB, Name) of
[] ->
reply({error, {not_found, {chain, Name}}}, State);
[Chain] ->
reply({ok, serialize_chain(Chain)}, State)
end;
handle_call({create_authenticator, ChainName, Config}, _From, #{providers := Providers} = State) ->
UpdateFun =
fun(#chain{authenticators = Authenticators} = Chain) ->
AuthenticatorID = generate_id(Config),
case lists:keymember(AuthenticatorID, #authenticator.id, Authenticators) of
true ->
{error, {already_exists, {authenticator, AuthenticatorID}}};
false ->
case do_create_authenticator(ChainName, AuthenticatorID, Config, Providers) of
{ok, Authenticator} ->
NAuthenticators = Authenticators ++ [Authenticator],
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
{ok, serialize_authenticator(Authenticator)};
{error, Reason} ->
{error, Reason}
end
end
end,
Reply = update_chain(ChainName, UpdateFun),
reply(Reply, may_hook(State));
handle_call({delete_authenticator, ChainName, AuthenticatorID}, _From, State) ->
UpdateFun =
fun(#chain{authenticators = Authenticators} = Chain) ->
case lists:keytake(AuthenticatorID, #authenticator.id, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
{value, Authenticator, NAuthenticators} ->
_ = do_delete_authenticator(Authenticator),
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
ok
end
end,
Reply = update_chain(ChainName, UpdateFun),
reply(Reply, may_unhook(State));
handle_call({update_authenticator, ChainName, AuthenticatorID, Config}, _From, State) ->
UpdateFun =
fun(#chain{authenticators = Authenticators} = Chain) ->
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
#authenticator{provider = Provider,
state = #{version := Version} = ST} = Authenticator ->
case AuthenticatorID =:= generate_id(Config) of
true ->
Unique = unique(ChainName, AuthenticatorID, Version),
case Provider:update(Config#{'_unique' => Unique}, ST) of
{ok, NewST} ->
NewAuthenticator = Authenticator#authenticator{state = switch_version(NewST)},
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NewAuthenticators}),
{ok, serialize_authenticator(NewAuthenticator)};
{error, Reason} ->
{error, Reason}
end;
false ->
{error, mechanism_or_backend_change_is_not_alloed}
end
end
end,
Reply = update_chain(ChainName, UpdateFun),
reply(Reply, State);
handle_call({move_authenticator, ChainName, AuthenticatorID, Position}, _From, State) ->
UpdateFun =
fun(#chain{authenticators = Authenticators} = Chain) ->
case do_move_authenticator(AuthenticatorID, Authenticators, Position) of
{ok, NAuthenticators} ->
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
ok;
{error, Reason} ->
{error, Reason}
end
end,
Reply = update_chain(ChainName, UpdateFun),
reply(Reply, State);
handle_call({import_users, ChainName, AuthenticatorID, Filename}, _From, State) ->
Reply = call_authenticator(ChainName, AuthenticatorID, import_users, [Filename]),
reply(Reply, State);
handle_call({add_user, ChainName, AuthenticatorID, UserInfo}, _From, State) ->
Reply = call_authenticator(ChainName, AuthenticatorID, add_user, [UserInfo]),
reply(Reply, State);
handle_call({delete_user, ChainName, AuthenticatorID, UserID}, _From, State) ->
Reply = call_authenticator(ChainName, AuthenticatorID, delete_user, [UserID]),
reply(Reply, State);
handle_call({update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}, _From, State) ->
Reply = call_authenticator(ChainName, AuthenticatorID, update_user, [UserID, NewUserInfo]),
reply(Reply, State);
handle_call({lookup_user, ChainName, AuthenticatorID, UserID}, _From, State) ->
Reply = call_authenticator(ChainName, AuthenticatorID, lookup_user, [UserID]),
reply(Reply, State);
handle_call({list_users, ChainName, AuthenticatorID}, _From, State) ->
Reply = call_authenticator(ChainName, AuthenticatorID, list_users, []),
reply(Reply, State);
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ignored, State}.
handle_cast(Req, State) ->
?LOG(error, "Unexpected case: ~p", [Req]),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, _State) ->
emqx_config_handler:remove_handler([authentication]),
emqx_config_handler:remove_handler([listeners, '?', '?', authentication]),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
reply(Reply, State) ->
{reply, Reply, State}.
%%------------------------------------------------------------------------------
%% Internal functions
%%------------------------------------------------------------------------------
split_by_id(ID, AuthenticatorsConfig) ->
case lists:foldl(
fun(C, {P1, P2, F0}) ->
F = case ID =:= generate_id(C) of
true -> true;
false -> F0
end,
case F of
false -> {[C | P1], P2, F};
true -> {P1, [C | P2], F}
end
end, {[], [], false}, AuthenticatorsConfig) of
{_, _, false} ->
{error, {not_found, {authenticator, ID}}};
{Part1, Part2, true} ->
{ok, lists:reverse(Part1), lists:reverse(Part2)}
end.
global_chain(mqtt) ->
'mqtt:global';
global_chain('mqtt-sn') ->
'mqtt-sn:global';
global_chain(coap) ->
'coap:global';
global_chain(lwm2m) ->
'lwm2m:global';
global_chain(stomp) ->
'stomp:global';
global_chain(_) ->
'unknown:global'.
may_hook(#{hooked := false} = State) ->
case lists:any(fun(#chain{authenticators = []}) -> false;
(_) -> true
end, ets:tab2list(?CHAINS_TAB)) of
true ->
_ = emqx:hook('client.authenticate', {emqx_authentication, authenticate, []}),
State#{hooked => true};
false ->
State
end;
may_hook(State) ->
State.
may_unhook(#{hooked := true} = State) ->
case lists:all(fun(#chain{authenticators = []}) -> true;
(_) -> false
end, ets:tab2list(?CHAINS_TAB)) of
true ->
_ = emqx:unhook('client.authenticate', {emqx_authentication, authenticate, []}),
State#{hooked => false};
false ->
State
end;
may_unhook(State) ->
State.
do_create_authenticator(ChainName, AuthenticatorID, #{enable := Enable} = Config, Providers) ->
case maps:get(authn_type(Config), Providers, undefined) of
undefined ->
{error, no_available_provider};
Provider ->
Unique = unique(ChainName, AuthenticatorID, ?VER_1),
case Provider:create(Config#{'_unique' => Unique}) of
{ok, State} ->
Authenticator = #authenticator{id = AuthenticatorID,
provider = Provider,
enable = Enable,
state = switch_version(State)},
{ok, Authenticator};
{error, Reason} ->
{error, Reason}
end
end.
do_delete_authenticator(#authenticator{provider = Provider, state = State}) ->
_ = Provider:destroy(State),
ok.
replace_authenticator(ID, Authenticator, Authenticators) ->
lists:keyreplace(ID, #authenticator.id, Authenticators, Authenticator).
do_move_authenticator(ID, Authenticators, Position) ->
case lists:keytake(ID, #authenticator.id, Authenticators) of
false ->
{error, {not_found, {authenticator, ID}}};
{value, Authenticator, NAuthenticators} ->
case Position of
top ->
{ok, [Authenticator | NAuthenticators]};
bottom ->
{ok, NAuthenticators ++ [Authenticator]};
{before, ID0} ->
insert(Authenticator, NAuthenticators, ID0, [])
end
end.
insert(_, [], ID, _) ->
{error, {not_found, {authenticator, ID}}};
insert(Authenticator, [#authenticator{id = ID} | _] = Authenticators, ID, Acc) ->
{ok, lists:reverse(Acc) ++ [Authenticator | Authenticators]};
insert(Authenticator, [Authenticator0 | More], ID, Acc) ->
insert(Authenticator, More, ID, [Authenticator0 | Acc]).
update_chain(ChainName, UpdateFun) ->
case ets:lookup(?CHAINS_TAB, ChainName) of
[] ->
{error, {not_found, {chain, ChainName}}};
[Chain] ->
UpdateFun(Chain)
end.
call_authenticator(ChainName, AuthenticatorID, Func, Args) ->
UpdateFun =
fun(#chain{authenticators = Authenticators}) ->
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
#authenticator{provider = Provider, state = State} ->
case erlang:function_exported(Provider, Func, length(Args) + 1) of
true ->
erlang:apply(Provider, Func, Args ++ [State]);
false ->
{error, unsupported_feature}
end
end
end,
update_chain(ChainName, UpdateFun).
serialize_chain(#chain{name = Name,
authenticators = Authenticators}) ->
#{ name => Name
, authenticators => serialize_authenticators(Authenticators)
}.
serialize_authenticators(Authenticators) ->
[serialize_authenticator(Authenticator) || Authenticator <- Authenticators].
serialize_authenticator(#authenticator{id = ID,
provider = Provider,
enable = Enable,
state = State}) ->
#{ id => ID
, provider => Provider
, enable => Enable
, state => State
}.
unique(ChainName, AuthenticatorID, Version) ->
NChainName = atom_to_binary(ChainName),
<<NChainName/binary, "/", AuthenticatorID/binary, ":", Version/binary>>.
switch_version(State = #{version := ?VER_1}) ->
State#{version := ?VER_2};
switch_version(State = #{version := ?VER_2}) ->
State#{version := ?VER_1};
switch_version(State) ->
State#{version => ?VER_1}.
authn_type(#{mechanism := Mechanism, backend := Backend}) ->
{Mechanism, Backend};
authn_type(#{mechanism := Mechanism}) ->
Mechanism.
may_to_map([L]) ->
L;
may_to_map(L) ->
L.
to_list(undefined) ->
[];
to_list(M) when M =:= #{} ->
[];
to_list(M) when is_map(M) ->
[M];
to_list(L) when is_list(L) ->
L.

View File

@ -18,15 +18,15 @@
-include("emqx.hrl"). -include("emqx.hrl").
-export([ list_authz_cache/1 -export([ list_authz_cache/0
, get_authz_cache/3 , get_authz_cache/2
, put_authz_cache/4 , put_authz_cache/3
, cleanup_authz_cache/1 , cleanup_authz_cache/0
, empty_authz_cache/0 , empty_authz_cache/0
, dump_authz_cache/0 , dump_authz_cache/0
, get_cache_max_size/1 , get_cache_max_size/0
, get_cache_ttl/1 , get_cache_ttl/0
, is_enabled/1 , is_enabled/0
, drain_cache/0 , drain_cache/0
]). ]).
@ -50,45 +50,45 @@ cache_k(PubSub, Topic)-> {PubSub, Topic}.
cache_v(AuthzResult)-> {AuthzResult, time_now()}. cache_v(AuthzResult)-> {AuthzResult, time_now()}.
drain_k() -> {?MODULE, drain_timestamp}. drain_k() -> {?MODULE, drain_timestamp}.
-spec(is_enabled(atom()) -> boolean()). -spec(is_enabled() -> boolean()).
is_enabled(Zone) -> is_enabled() ->
emqx_config:get_zone_conf(Zone, [authorization, cache, enable]). emqx:get_config([authorization, cache, enable], false).
-spec(get_cache_max_size(atom()) -> integer()). -spec(get_cache_max_size() -> integer()).
get_cache_max_size(Zone) -> get_cache_max_size() ->
emqx_config:get_zone_conf(Zone, [authorization, cache, max_size]). emqx:get_config([authorization, cache, max_size]).
-spec(get_cache_ttl(atom()) -> integer()). -spec(get_cache_ttl() -> integer()).
get_cache_ttl(Zone) -> get_cache_ttl() ->
emqx_config:get_zone_conf(Zone, [authorization, cache, ttl]). emqx:get_config([authorization, cache, ttl]).
-spec(list_authz_cache(atom()) -> [authz_cache_entry()]). -spec(list_authz_cache() -> [authz_cache_entry()]).
list_authz_cache(Zone) -> list_authz_cache() ->
cleanup_authz_cache(Zone), cleanup_authz_cache(),
map_authz_cache(fun(Cache) -> Cache end). map_authz_cache(fun(Cache) -> Cache end).
%% We'll cleanup the cache before replacing an expired authz. %% We'll cleanup the cache before replacing an expired authz.
-spec get_authz_cache(atom(), emqx_types:pubsub(), emqx_topic:topic()) -> -spec get_authz_cache(emqx_types:pubsub(), emqx_topic:topic()) ->
authz_result() | not_found. authz_result() | not_found.
get_authz_cache(Zone, PubSub, Topic) -> get_authz_cache(PubSub, Topic) ->
case erlang:get(cache_k(PubSub, Topic)) of case erlang:get(cache_k(PubSub, Topic)) of
undefined -> not_found; undefined -> not_found;
{AuthzResult, CachedAt} -> {AuthzResult, CachedAt} ->
if_expired(get_cache_ttl(Zone), CachedAt, if_expired(get_cache_ttl(), CachedAt,
fun(false) -> fun(false) ->
AuthzResult; AuthzResult;
(true) -> (true) ->
cleanup_authz_cache(Zone), cleanup_authz_cache(),
not_found not_found
end) end)
end. end.
%% If the cache get full, and also the latest one %% If the cache get full, and also the latest one
%% is expired, then delete all the cache entries %% is expired, then delete all the cache entries
-spec put_authz_cache(atom(), emqx_types:pubsub(), emqx_topic:topic(), authz_result()) -spec put_authz_cache(emqx_types:pubsub(), emqx_topic:topic(), authz_result())
-> ok. -> ok.
put_authz_cache(Zone, PubSub, Topic, AuthzResult) -> put_authz_cache(PubSub, Topic, AuthzResult) ->
MaxSize = get_cache_max_size(Zone), true = (MaxSize =/= 0), MaxSize = get_cache_max_size(), true = (MaxSize =/= 0),
Size = get_cache_size(), Size = get_cache_size(),
case Size < MaxSize of case Size < MaxSize of
true -> true ->
@ -96,7 +96,7 @@ put_authz_cache(Zone, PubSub, Topic, AuthzResult) ->
false -> false ->
NewestK = get_newest_key(), NewestK = get_newest_key(),
{_AuthzResult, CachedAt} = erlang:get(NewestK), {_AuthzResult, CachedAt} = erlang:get(NewestK),
if_expired(get_cache_ttl(Zone), CachedAt, if_expired(get_cache_ttl(), CachedAt,
fun(true) -> fun(true) ->
% all cache expired, cleanup first % all cache expired, cleanup first
empty_authz_cache(), empty_authz_cache(),
@ -123,10 +123,10 @@ evict_authz_cache() ->
decr_cache_size(). decr_cache_size().
%% cleanup all the expired cache entries %% cleanup all the expired cache entries
-spec(cleanup_authz_cache(atom()) -> ok). -spec(cleanup_authz_cache() -> ok).
cleanup_authz_cache(Zone) -> cleanup_authz_cache() ->
keys_queue_set( keys_queue_set(
cleanup_authz(get_cache_ttl(Zone), keys_queue_get())). cleanup_authz(get_cache_ttl(), keys_queue_get())).
get_oldest_key() -> get_oldest_key() ->
keys_queue_pick(queue_front()). keys_queue_pick(queue_front()).
@ -143,8 +143,8 @@ dump_authz_cache() ->
map_authz_cache(fun(Cache) -> Cache end). map_authz_cache(fun(Cache) -> Cache end).
map_authz_cache(Fun) -> map_authz_cache(Fun) ->
[Fun(R) || R = {{SubPub, _T}, _Authz} <- get(), SubPub =:= publish [Fun(R) || R = {{SubPub, _T}, _Authz} <- erlang:get(),
orelse SubPub =:= subscribe]. SubPub =:= publish orelse SubPub =:= subscribe].
foreach_authz_cache(Fun) -> foreach_authz_cache(Fun) ->
_ = map_authz_cache(Fun), _ = map_authz_cache(Fun),
ok. ok.

View File

@ -33,8 +33,11 @@
-export([ check/1 -export([ check/1
, create/1 , create/1
, look_up/1
, delete/1 , delete/1
, info/1 , info/1
, format/1
, parse/1
]). ]).
%% gen_server callbacks %% gen_server callbacks
@ -50,8 +53,6 @@
-define(BANNED_TAB, ?MODULE). -define(BANNED_TAB, ?MODULE).
-rlog_shard({?COMMON_SHARD, ?BANNED_TAB}).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Mnesia bootstrap %% Mnesia bootstrap
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -59,6 +60,7 @@
mnesia(boot) -> mnesia(boot) ->
ok = ekka_mnesia:create_table(?BANNED_TAB, [ ok = ekka_mnesia:create_table(?BANNED_TAB, [
{type, set}, {type, set},
{rlog_shard, ?COMMON_SHARD},
{disc_copies, [node()]}, {disc_copies, [node()]},
{record_name, banned}, {record_name, banned},
{attributes, record_info(fields, banned)}, {attributes, record_info(fields, banned)},
@ -91,7 +93,63 @@ do_check(Who) when is_tuple(Who) ->
Until > erlang:system_time(second) Until > erlang:system_time(second)
end. end.
-spec(create(emqx_types:banned()) -> ok). format(#banned{who = Who0,
by = By,
reason = Reason,
at = At,
until = Until}) ->
{As, Who} = maybe_format_host(Who0),
#{
as => As,
who => Who,
by => By,
reason => Reason,
at => to_rfc3339(At),
until => to_rfc3339(Until)
}.
parse(Params) ->
Who = pares_who(Params),
By = maps:get(<<"by">>, Params, <<"mgmt_api">>),
Reason = maps:get(<<"reason">>, Params, <<"">>),
At = pares_time(maps:get(<<"at">>, Params, undefined), erlang:system_time(second)),
Until = pares_time(maps:get(<<"until">>, Params, undefined), At + 5 * 60),
#banned{
who = Who,
by = By,
reason = Reason,
at = At,
until = Until
}.
pares_who(#{as := As, who := Who}) ->
pares_who(#{<<"as">> => As, <<"who">> => Who});
pares_who(#{<<"as">> := <<"peerhost">>, <<"who">> := Peerhost0}) ->
{ok, Peerhost} = inet:parse_address(binary_to_list(Peerhost0)),
{peerhost, Peerhost};
pares_who(#{<<"as">> := As, <<"who">> := Who}) ->
{binary_to_atom(As, utf8), Who}.
pares_time(undefined, Default) ->
Default;
pares_time(Rfc3339, _Default) ->
to_timestamp(Rfc3339).
maybe_format_host({peerhost, Host}) ->
AddrBinary = list_to_binary(inet:ntoa(Host)),
{peerhost, AddrBinary};
maybe_format_host({As, Who}) ->
{As, Who}.
to_rfc3339(Timestamp) ->
list_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, second}])).
to_timestamp(Rfc3339) when is_binary(Rfc3339) ->
to_timestamp(binary_to_list(Rfc3339));
to_timestamp(Rfc3339) ->
calendar:rfc3339_to_system_time(Rfc3339, [{unit, second}]).
-spec(create(emqx_types:banned() | map()) -> ok).
create(#{who := Who, create(#{who := Who,
by := By, by := By,
reason := Reason, reason := Reason,
@ -105,9 +163,16 @@ create(#{who := Who,
create(Banned) when is_record(Banned, banned) -> create(Banned) when is_record(Banned, banned) ->
ekka_mnesia:dirty_write(?BANNED_TAB, Banned). ekka_mnesia:dirty_write(?BANNED_TAB, Banned).
look_up(Who) when is_map(Who) ->
look_up(pares_who(Who));
look_up(Who) ->
mnesia:dirty_read(?BANNED_TAB, Who).
-spec(delete({clientid, emqx_types:clientid()} -spec(delete({clientid, emqx_types:clientid()}
| {username, emqx_types:username()} | {username, emqx_types:username()}
| {peerhost, emqx_types:peerhost()}) -> ok). | {peerhost, emqx_types:peerhost()}) -> ok).
delete(Who) when is_map(Who)->
delete(pares_who(Who));
delete(Who) -> delete(Who) ->
ekka_mnesia:dirty_delete(?BANNED_TAB, Who). ekka_mnesia:dirty_delete(?BANNED_TAB, Who).

View File

@ -242,7 +242,7 @@ route(Routes, Delivery) ->
do_route({To, Node}, Delivery) when Node =:= node() -> do_route({To, Node}, Delivery) when Node =:= node() ->
{Node, To, dispatch(To, Delivery)}; {Node, To, dispatch(To, Delivery)};
do_route({To, Node}, Delivery) when is_atom(Node) -> do_route({To, Node}, Delivery) when is_atom(Node) ->
{Node, To, forward(Node, To, Delivery, emqx_config:get([rpc, mode]))}; {Node, To, forward(Node, To, Delivery, emqx:get_config([rpc, mode]))};
do_route({To, Group}, Delivery) when is_tuple(Group); is_binary(Group) -> do_route({To, Group}, Delivery) when is_tuple(Group); is_binary(Group) ->
{share, To, emqx_shared_sub:dispatch(Group, To, Delivery)}. {share, To, emqx_shared_sub:dispatch(Group, To, Delivery)}.

View File

@ -43,6 +43,14 @@ init([]) ->
type => worker, type => worker,
modules => [emqx_shared_sub]}, modules => [emqx_shared_sub]},
%% Authentication
AuthN = #{id => authn,
start => {emqx_authentication, start_link, []},
restart => permanent,
shutdown => 2000,
type => worker,
modules => [emqx_authentication]},
%% Broker helper %% Broker helper
Helper = #{id => helper, Helper = #{id => helper,
start => {emqx_broker_helper, start_link, []}, start => {emqx_broker_helper, start_link, []},
@ -51,5 +59,5 @@ init([]) ->
type => worker, type => worker,
modules => [emqx_broker_helper]}, modules => [emqx_broker_helper]},
{ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, Helper]}}. {ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, AuthN, Helper]}}.

View File

@ -99,7 +99,7 @@
-type(channel() :: #channel{}). -type(channel() :: #channel{}).
-type(opts() :: #{zone := atom(), listener := atom(), atom() => term()}). -type(opts() :: #{zone := atom(), listener := {Type::atom(), Name::atom()}, atom() => term()}).
-type(conn_state() :: idle | connecting | connected | reauthenticating | disconnected). -type(conn_state() :: idle | connecting | connected | reauthenticating | disconnected).
@ -202,18 +202,19 @@ caps(#channel{clientinfo = #{zone := Zone}}) ->
-spec(init(emqx_types:conninfo(), opts()) -> channel()). -spec(init(emqx_types:conninfo(), opts()) -> channel()).
init(ConnInfo = #{peername := {PeerHost, _Port}, init(ConnInfo = #{peername := {PeerHost, _Port},
sockname := {_Host, SockPort}}, #{zone := Zone, listener := Listener}) -> sockname := {_Host, SockPort}},
#{zone := Zone, listener := {Type, Listener}}) ->
Peercert = maps:get(peercert, ConnInfo, undefined), Peercert = maps:get(peercert, ConnInfo, undefined),
Protocol = maps:get(protocol, ConnInfo, mqtt), Protocol = maps:get(protocol, ConnInfo, mqtt),
MountPoint = case get_mqtt_conf(Zone, mountpoint) of MountPoint = case emqx_config:get_listener_conf(Type, Listener, [mountpoint]) of
<<>> -> undefined; <<>> -> undefined;
MP -> MP MP -> MP
end, end,
QuotaPolicy = emqx_config:get_listener_conf(Zone, Listener,[rate_limit, quota], []), QuotaPolicy = emqx_config:get_zone_conf(Zone, [quota], #{}),
ClientInfo = set_peercert_infos( ClientInfo = set_peercert_infos(
Peercert, Peercert,
#{zone => Zone, #{zone => Zone,
listener => Listener, listener => emqx_listeners:listener_id(Type, Listener),
protocol => Protocol, protocol => Protocol,
peerhost => PeerHost, peerhost => PeerHost,
sockport => SockPort, sockport => SockPort,
@ -222,7 +223,7 @@ init(ConnInfo = #{peername := {PeerHost, _Port},
mountpoint => MountPoint, mountpoint => MountPoint,
is_bridge => false, is_bridge => false,
is_superuser => false is_superuser => false
}, Zone, Listener), }, Zone),
{NClientInfo, NConnInfo} = take_ws_cookie(ClientInfo, ConnInfo), {NClientInfo, NConnInfo} = take_ws_cookie(ClientInfo, ConnInfo),
#channel{conninfo = NConnInfo, #channel{conninfo = NConnInfo,
clientinfo = NClientInfo, clientinfo = NClientInfo,
@ -243,12 +244,12 @@ quota_policy(RawPolicy) ->
erlang:trunc(hocon_postprocess:duration(StrWind) / 1000)}} erlang:trunc(hocon_postprocess:duration(StrWind) / 1000)}}
|| {Name, [StrCount, StrWind]} <- maps:to_list(RawPolicy)]. || {Name, [StrCount, StrWind]} <- maps:to_list(RawPolicy)].
set_peercert_infos(NoSSL, ClientInfo, _, _) set_peercert_infos(NoSSL, ClientInfo, _)
when NoSSL =:= nossl; when NoSSL =:= nossl;
NoSSL =:= undefined -> NoSSL =:= undefined ->
ClientInfo#{username => undefined}; ClientInfo#{username => undefined};
set_peercert_infos(Peercert, ClientInfo, Zone, _Listener) -> set_peercert_infos(Peercert, ClientInfo, Zone) ->
{DN, CN} = {esockd_peercert:subject(Peercert), {DN, CN} = {esockd_peercert:subject(Peercert),
esockd_peercert:common_name(Peercert)}, esockd_peercert:common_name(Peercert)},
PeercetAs = fun(Key) -> PeercetAs = fun(Key) ->
@ -425,7 +426,7 @@ handle_in(?PUBCOMP_PACKET(PacketId, _ReasonCode), Channel = #channel{session = S
end; end;
handle_in(Packet = ?SUBSCRIBE_PACKET(PacketId, Properties, TopicFilters), handle_in(Packet = ?SUBSCRIBE_PACKET(PacketId, Properties, TopicFilters),
Channel = #channel{clientinfo = ClientInfo = #{zone := Zone}}) -> Channel = #channel{clientinfo = ClientInfo}) ->
case emqx_packet:check(Packet) of case emqx_packet:check(Packet) of
ok -> ok ->
TopicFilters0 = parse_topic_filters(TopicFilters), TopicFilters0 = parse_topic_filters(TopicFilters),
@ -434,7 +435,7 @@ handle_in(Packet = ?SUBSCRIBE_PACKET(PacketId, Properties, TopicFilters),
HasAuthzDeny = lists:any(fun({_TopicFilter, ReasonCode}) -> HasAuthzDeny = lists:any(fun({_TopicFilter, ReasonCode}) ->
ReasonCode =:= ?RC_NOT_AUTHORIZED ReasonCode =:= ?RC_NOT_AUTHORIZED
end, TupleTopicFilters0), end, TupleTopicFilters0),
DenyAction = emqx_config:get_zone_conf(Zone, [authorization, deny_action]), DenyAction = emqx:get_config([authorization, deny_action], ignore),
case DenyAction =:= disconnect andalso HasAuthzDeny of case DenyAction =:= disconnect andalso HasAuthzDeny of
true -> handle_out(disconnect, ?RC_NOT_AUTHORIZED, Channel); true -> handle_out(disconnect, ?RC_NOT_AUTHORIZED, Channel);
false -> false ->
@ -536,8 +537,7 @@ process_connect(AckProps, Channel = #channel{conninfo = ConnInfo,
%% Process Publish %% Process Publish
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) ->
Channel = #channel{clientinfo = #{zone := Zone}}) ->
case pipeline([fun check_quota_exceeded/2, case pipeline([fun check_quota_exceeded/2,
fun process_alias/2, fun process_alias/2,
fun check_pub_alias/2, fun check_pub_alias/2,
@ -550,7 +550,7 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId),
{error, Rc = ?RC_NOT_AUTHORIZED, NChannel} -> {error, Rc = ?RC_NOT_AUTHORIZED, NChannel} ->
?LOG(warning, "Cannot publish message to ~s due to ~s.", ?LOG(warning, "Cannot publish message to ~s due to ~s.",
[Topic, emqx_reason_codes:text(Rc)]), [Topic, emqx_reason_codes:text(Rc)]),
case emqx_config:get_zone_conf(Zone, [authorization, deny_action]) of case emqx:get_config([authorization, deny_action], ignore) of
ignore -> ignore ->
case QoS of case QoS of
?QOS_0 -> {ok, NChannel}; ?QOS_0 -> {ok, NChannel};
@ -955,9 +955,8 @@ handle_call({takeover, 'end'}, Channel = #channel{session = Session,
AllPendings = lists:append(Delivers, Pendings), AllPendings = lists:append(Delivers, Pendings),
disconnect_and_shutdown(takeovered, AllPendings, Channel); disconnect_and_shutdown(takeovered, AllPendings, Channel);
handle_call(list_authz_cache, #channel{clientinfo = #{zone := Zone}} handle_call(list_authz_cache, Channel) ->
= Channel) -> {reply, emqx_authz_cache:list_authz_cache(), Channel};
{reply, emqx_authz_cache:list_authz_cache(Zone), Channel};
handle_call({quota, Policy}, Channel) -> handle_call({quota, Policy}, Channel) ->
Zone = info(zone, Channel), Zone = info(zone, Channel),
@ -1299,14 +1298,17 @@ authenticate(?AUTH_PACKET(_, #{'Authentication-Method' := AuthMethod} = Properti
{error, ?RC_BAD_AUTHENTICATION_METHOD} {error, ?RC_BAD_AUTHENTICATION_METHOD}
end. end.
do_authenticate(#{auth_method := AuthMethod} = Credential, Channel) -> do_authenticate(#{auth_method := AuthMethod} = Credential, #channel{clientinfo = ClientInfo} = Channel) ->
Properties = #{'Authentication-Method' => AuthMethod}, Properties = #{'Authentication-Method' => AuthMethod},
case emqx_access_control:authenticate(Credential) of case emqx_access_control:authenticate(Credential) of
ok -> {ok, Result} ->
{ok, Properties, Channel#channel{auth_cache = #{}}}; {ok, Properties,
{ok, AuthData} -> Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
auth_cache = #{}}};
{ok, Result, AuthData} ->
{ok, Properties#{'Authentication-Data' => AuthData}, {ok, Properties#{'Authentication-Data' => AuthData},
Channel#channel{auth_cache = #{}}}; Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
auth_cache = #{}}};
{continue, AuthCache} -> {continue, AuthCache} ->
{continue, Properties, Channel#channel{auth_cache = AuthCache}}; {continue, Properties, Channel#channel{auth_cache = AuthCache}};
{continue, AuthData, AuthCache} -> {continue, AuthData, AuthCache} ->
@ -1316,10 +1318,10 @@ do_authenticate(#{auth_method := AuthMethod} = Credential, Channel) ->
{error, emqx_reason_codes:connack_error(Reason)} {error, emqx_reason_codes:connack_error(Reason)}
end; end;
do_authenticate(Credential, Channel) -> do_authenticate(Credential, #channel{clientinfo = ClientInfo} = Channel) ->
case emqx_access_control:authenticate(Credential) of case emqx_access_control:authenticate(Credential) of
ok -> {ok, #{is_superuser := IsSuperuser}} ->
{ok, #{}, Channel}; {ok, #{}, Channel#channel{clientinfo = ClientInfo#{is_superuser => IsSuperuser}}};
{error, Reason} -> {error, Reason} ->
{error, emqx_reason_codes:connack_error(Reason)} {error, emqx_reason_codes:connack_error(Reason)}
end. end.
@ -1417,9 +1419,7 @@ check_pub_alias(_Packet, _Channel) -> ok.
check_pub_authz(#mqtt_packet{variable = #mqtt_packet_publish{topic_name = Topic}}, check_pub_authz(#mqtt_packet{variable = #mqtt_packet_publish{topic_name = Topic}},
#channel{clientinfo = ClientInfo}) -> #channel{clientinfo = ClientInfo}) ->
case is_authz_enabled(ClientInfo) andalso case emqx_access_control:authorize(ClientInfo, publish, Topic) of
emqx_access_control:authorize(ClientInfo, publish, Topic) of
false -> ok;
allow -> ok; allow -> ok;
deny -> {error, ?RC_NOT_AUTHORIZED} deny -> {error, ?RC_NOT_AUTHORIZED}
end. end.
@ -1440,8 +1440,10 @@ check_pub_caps(#mqtt_packet{header = #mqtt_packet_header{qos = QoS,
check_sub_authzs(TopicFilters, Channel) -> check_sub_authzs(TopicFilters, Channel) ->
check_sub_authzs(TopicFilters, Channel, []). check_sub_authzs(TopicFilters, Channel, []).
check_sub_authzs([ TopicFilter = {Topic, _} | More] , Channel, Acc) -> check_sub_authzs([ TopicFilter = {Topic, _} | More],
case check_sub_authz(Topic, Channel) of Channel = #channel{clientinfo = ClientInfo},
Acc) ->
case emqx_access_control:authorize(ClientInfo, subscribe, Topic) of
allow -> allow ->
check_sub_authzs(More, Channel, [ {TopicFilter, 0} | Acc]); check_sub_authzs(More, Channel, [ {TopicFilter, 0} | Acc]);
deny -> deny ->
@ -1450,13 +1452,6 @@ check_sub_authzs([ TopicFilter = {Topic, _} | More] , Channel, Acc) ->
check_sub_authzs([], _Channel, Acc) -> check_sub_authzs([], _Channel, Acc) ->
lists:reverse(Acc). lists:reverse(Acc).
check_sub_authz(TopicFilter, #channel{clientinfo = ClientInfo}) ->
case is_authz_enabled(ClientInfo) andalso
emqx_access_control:authorize(ClientInfo, subscribe, TopicFilter) of
false -> allow;
Result -> Result
end.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Check Sub Caps %% Check Sub Caps
@ -1618,11 +1613,6 @@ maybe_shutdown(Reason, Channel = #channel{conninfo = ConnInfo}) ->
_ -> shutdown(Reason, Channel) _ -> shutdown(Reason, Channel)
end. end.
%%--------------------------------------------------------------------
%% Is Authorization enabled?
is_authz_enabled(#{zone := Zone, is_superuser := IsSuperuser}) ->
(not IsSuperuser) andalso emqx_config:get_zone_conf(Zone, [authorization, enable]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Parse Topic Filters %% Parse Topic Filters

View File

@ -62,5 +62,5 @@ unlock(ClientId) ->
-spec(strategy() -> local | leader | quorum | all). -spec(strategy() -> local | leader | quorum | all).
strategy() -> strategy() ->
emqx_config:get([broker, session_locking_strategy]). emqx:get_config([broker, session_locking_strategy]).

View File

@ -47,10 +47,6 @@
-define(TAB, emqx_channel_registry). -define(TAB, emqx_channel_registry).
-define(LOCK, {?MODULE, cleanup_down}). -define(LOCK, {?MODULE, cleanup_down}).
-define(CM_SHARD, emqx_cm_shard).
-rlog_shard({?CM_SHARD, ?TAB}).
-record(channel, {chid, pid}). -record(channel, {chid, pid}).
%% @doc Start the global channel registry. %% @doc Start the global channel registry.
@ -65,7 +61,7 @@ start_link() ->
%% @doc Is the global registry enabled? %% @doc Is the global registry enabled?
-spec(is_enabled() -> boolean()). -spec(is_enabled() -> boolean()).
is_enabled() -> is_enabled() ->
emqx_config:get([broker, enable_session_registry]). emqx:get_config([broker, enable_session_registry]).
%% @doc Register a global channel. %% @doc Register a global channel.
-spec(register_channel(emqx_types:clientid() -spec(register_channel(emqx_types:clientid()
@ -106,6 +102,7 @@ record(ClientId, ChanPid) ->
init([]) -> init([]) ->
ok = ekka_mnesia:create_table(?TAB, [ ok = ekka_mnesia:create_table(?TAB, [
{type, bag}, {type, bag},
{rlog_shard, ?CM_SHARD},
{ram_copies, [node()]}, {ram_copies, [node()]},
{record_name, channel}, {record_name, channel},
{attributes, record_info(fields, channel)}, {attributes, record_info(fields, channel)},

View File

@ -22,49 +22,38 @@
-export([init/1]). -export([init/1]).
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
start_link() -> start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []). supervisor:start_link({local, ?MODULE}, ?MODULE, []).
%%--------------------------------------------------------------------
%% Supervisor callbacks
%%--------------------------------------------------------------------
init([]) -> init([]) ->
Banned = #{id => banned,
start => {emqx_banned, start_link, []},
restart => permanent,
shutdown => 1000,
type => worker,
modules => [emqx_banned]},
Flapping = #{id => flapping,
start => {emqx_flapping, start_link, []},
restart => permanent,
shutdown => 1000,
type => worker,
modules => [emqx_flapping]},
%% Channel locker
Locker = #{id => locker,
start => {emqx_cm_locker, start_link, []},
restart => permanent,
shutdown => 5000,
type => worker,
modules => [emqx_cm_locker]
},
%% Channel registry
Registry = #{id => registry,
start => {emqx_cm_registry, start_link, []},
restart => permanent,
shutdown => 5000,
type => worker,
modules => [emqx_cm_registry]
},
%% Channel Manager
Manager = #{id => manager,
start => {emqx_cm, start_link, []},
restart => permanent,
shutdown => 5000,
type => worker,
modules => [emqx_cm]
},
SupFlags = #{strategy => one_for_one, SupFlags = #{strategy => one_for_one,
intensity => 100, intensity => 100,
period => 10 period => 10
}, },
Banned = child_spec(emqx_banned, 1000, worker),
Flapping = child_spec(emqx_flapping, 1000, worker),
Locker = child_spec(emqx_cm_locker, 5000, worker),
Registry = child_spec(emqx_cm_registry, 5000, worker),
Manager = child_spec(emqx_cm, 5000, worker),
{ok, {SupFlags, [Banned, Flapping, Locker, Registry, Manager]}}. {ok, {SupFlags, [Banned, Flapping, Locker, Registry, Manager]}}.
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------
child_spec(Mod, Shutdown, Type) ->
#{id => Mod,
start => {Mod, start_link, []},
restart => permanent,
shutdown => Shutdown,
type => Type,
modules => [Mod]
}.

View File

@ -20,14 +20,20 @@
-export([ init_load/2 -export([ init_load/2
, read_override_conf/0 , read_override_conf/0
, check_config/2 , check_config/2
, fill_defaults/1
, fill_defaults/2
, save_configs/4 , save_configs/4
, save_to_app_env/1 , save_to_app_env/1
, save_to_config_map/2 , save_to_config_map/2
, save_to_override_conf/1 , save_to_override_conf/1
]). ]).
-export([get_root/1, -export([ get_root/1
get_root_raw/1]). , get_root_raw/1
]).
-export([ get_default_value/1
]).
-export([ get/1 -export([ get/1
, get/2 , get/2
@ -37,10 +43,21 @@
, put/2 , put/2
]). ]).
-export([ get_raw/1
, get_raw/2
, put_raw/1
, put_raw/2
]).
-export([ save_schema_mod_and_names/1
, get_schema_mod/0
, get_schema_mod/1
, get_root_names/0
]).
-export([ get_zone_conf/2 -export([ get_zone_conf/2
, get_zone_conf/3 , get_zone_conf/3
, put_zone_conf/3 , put_zone_conf/3
, find_zone_conf/2
]). ]).
-export([ get_listener_conf/3 -export([ get_listener_conf/3
@ -49,23 +66,12 @@
, find_listener_conf/3 , find_listener_conf/3
]). ]).
-export([ update/2
, update/3
, remove/1
, remove/2
]).
-export([ get_raw/1
, get_raw/2
, put_raw/1
, put_raw/2
]).
-define(CONF, conf). -define(CONF, conf).
-define(RAW_CONF, raw_conf). -define(RAW_CONF, raw_conf).
-define(PERSIS_SCHEMA_MODS, {?MODULE, schema_mods}).
-define(PERSIS_KEY(TYPE, ROOT), {?MODULE, TYPE, ROOT}). -define(PERSIS_KEY(TYPE, ROOT), {?MODULE, TYPE, ROOT}).
-define(ZONE_CONF_PATH(ZONE, PATH), [zones, ZONE | PATH]). -define(ZONE_CONF_PATH(ZONE, PATH), [zones, ZONE | PATH]).
-define(LISTENER_CONF_PATH(ZONE, LISTENER, PATH), [zones, ZONE, listeners, LISTENER | PATH]). -define(LISTENER_CONF_PATH(TYPE, LISTENER, PATH), [listeners, TYPE, LISTENER | PATH]).
-define(ATOM_CONF_PATH(PATH, EXP, EXP_ON_FAIL), -define(ATOM_CONF_PATH(PATH, EXP, EXP_ON_FAIL),
try [atom(Key) || Key <- PATH] of try [atom(Key) || Key <- PATH] of
@ -74,12 +80,35 @@
error:badarg -> EXP_ON_FAIL error:badarg -> EXP_ON_FAIL
end). end).
-export_type([update_request/0, raw_config/0, config/0]). -export_type([update_request/0, raw_config/0, config/0, app_envs/0,
update_opts/0, update_cmd/0, update_args/0,
update_error/0, update_result/0]).
-type update_request() :: term(). -type update_request() :: term().
-type update_cmd() :: {update, update_request()} | remove.
-type update_opts() :: #{
%% rawconf_with_defaults:
%% fill the default values into the `raw_config` field of the return value
%% defaults to `false`
rawconf_with_defaults => boolean(),
%% persistent:
%% save the updated config to the emqx_override.conf file
%% defaults to `true`
persistent => boolean()
}.
-type update_args() :: {update_cmd(), Opts :: update_opts()}.
-type update_stage() :: pre_config_update | post_config_update.
-type update_error() :: {update_stage(), module(), term()} | {save_configs, term()} | term().
-type update_result() :: #{
config => emqx_config:config(),
raw_config => emqx_config:raw_config(),
post_config_update => #{module() => any()}
}.
%% raw_config() is the config that is NOT parsed and tranlated by hocon schema %% raw_config() is the config that is NOT parsed and tranlated by hocon schema
-type raw_config() :: #{binary() => term()} | undefined. -type raw_config() :: #{binary() => term()} | list() | undefined.
%% config() is the config that is parsed and tranlated by hocon schema %% config() is the config that is parsed and tranlated by hocon schema
-type config() :: #{atom() => term()} | undefined. -type config() :: #{atom() => term()} | list() | undefined.
-type app_envs() :: [proplists:property()]. -type app_envs() :: [proplists:property()].
%% @doc For the given path, get root value enclosed in a single-key map. %% @doc For the given path, get root value enclosed in a single-key map.
@ -127,63 +156,66 @@ find_raw(KeyPath) ->
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path()) -> term(). -spec get_zone_conf(atom(), emqx_map_lib:config_key_path()) -> term().
get_zone_conf(Zone, KeyPath) -> get_zone_conf(Zone, KeyPath) ->
?MODULE:get(?ZONE_CONF_PATH(Zone, KeyPath)). case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
{not_found, _, _} -> %% not found in zones, try to find the global config
?MODULE:get(KeyPath);
{ok, Value} -> Value
end.
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> term(). -spec get_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> term().
get_zone_conf(Zone, KeyPath, Default) -> get_zone_conf(Zone, KeyPath, Default) ->
?MODULE:get(?ZONE_CONF_PATH(Zone, KeyPath), Default). case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
{not_found, _, _} -> %% not found in zones, try to find the global config
?MODULE:get(KeyPath, Default);
{ok, Value} -> Value
end.
-spec put_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> ok. -spec put_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> ok.
put_zone_conf(Zone, KeyPath, Conf) -> put_zone_conf(Zone, KeyPath, Conf) ->
?MODULE:put(?ZONE_CONF_PATH(Zone, KeyPath), Conf). ?MODULE:put(?ZONE_CONF_PATH(Zone, KeyPath), Conf).
-spec find_zone_conf(atom(), emqx_map_lib:config_key_path()) ->
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
find_zone_conf(Zone, KeyPath) ->
find(?ZONE_CONF_PATH(Zone, KeyPath)).
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) -> term(). -spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) -> term().
get_listener_conf(Zone, Listener, KeyPath) -> get_listener_conf(Type, Listener, KeyPath) ->
?MODULE:get(?LISTENER_CONF_PATH(Zone, Listener, KeyPath)). ?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> term(). -spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> term().
get_listener_conf(Zone, Listener, KeyPath, Default) -> get_listener_conf(Type, Listener, KeyPath, Default) ->
?MODULE:get(?LISTENER_CONF_PATH(Zone, Listener, KeyPath), Default). ?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Default).
-spec put_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> ok. -spec put_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> ok.
put_listener_conf(Zone, Listener, KeyPath, Conf) -> put_listener_conf(Type, Listener, KeyPath, Conf) ->
?MODULE:put(?LISTENER_CONF_PATH(Zone, Listener, KeyPath), Conf). ?MODULE:put(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Conf).
-spec find_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) -> -spec find_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) ->
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}. {ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
find_listener_conf(Zone, Listener, KeyPath) -> find_listener_conf(Type, Listener, KeyPath) ->
find(?LISTENER_CONF_PATH(Zone, Listener, KeyPath)). find(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
-spec put(map()) -> ok. -spec put(map()) -> ok.
put(Config) -> put(Config) ->
maps:fold(fun(RootName, RootValue, _) -> maps:fold(fun(RootName, RootValue, _) ->
?MODULE:put([RootName], RootValue) ?MODULE:put([RootName], RootValue)
end, [], Config). end, ok, Config).
-spec put(emqx_map_lib:config_key_path(), term()) -> ok. -spec put(emqx_map_lib:config_key_path(), term()) -> ok.
put(KeyPath, Config) -> do_put(?CONF, KeyPath, Config). put(KeyPath, Config) -> do_put(?CONF, KeyPath, Config).
-spec update(emqx_map_lib:config_key_path(), update_request()) -> -spec get_default_value(emqx_map_lib:config_key_path()) -> {ok, term()} | {error, term()}.
ok | {error, term()}. get_default_value([RootName | _] = KeyPath) ->
update(KeyPath, UpdateReq) -> BinKeyPath = [bin(Key) || Key <- KeyPath],
update(emqx_schema, KeyPath, UpdateReq). case find_raw([RootName]) of
{ok, RawConf} ->
-spec update(module(), emqx_map_lib:config_key_path(), update_request()) -> RawConf1 = emqx_map_lib:deep_remove(BinKeyPath, #{bin(RootName) => RawConf}),
ok | {error, term()}. try fill_defaults(get_schema_mod(RootName), RawConf1) of FullConf ->
update(SchemaModule, KeyPath, UpdateReq) -> case emqx_map_lib:deep_find(BinKeyPath, FullConf) of
emqx_config_handler:update_config(SchemaModule, KeyPath, {update, UpdateReq}). {not_found, _, _} -> {error, no_default_value};
{ok, Val} -> {ok, Val}
-spec remove(emqx_map_lib:config_key_path()) -> ok | {error, term()}. end
remove(KeyPath) -> catch error : Reason -> {error, Reason}
remove(emqx_schema, KeyPath). end;
{not_found, _, _} ->
remove(SchemaModule, KeyPath) -> {error, {rootname_not_found, RootName}}
emqx_config_handler:update_config(SchemaModule, KeyPath, remove). end.
-spec get_raw(emqx_map_lib:config_key_path()) -> term(). -spec get_raw(emqx_map_lib:config_key_path()) -> term().
get_raw(KeyPath) -> do_get(?RAW_CONF, KeyPath). get_raw(KeyPath) -> do_get(?RAW_CONF, KeyPath).
@ -194,8 +226,8 @@ get_raw(KeyPath, Default) -> do_get(?RAW_CONF, KeyPath, Default).
-spec put_raw(map()) -> ok. -spec put_raw(map()) -> ok.
put_raw(Config) -> put_raw(Config) ->
maps:fold(fun(RootName, RootV, _) -> maps:fold(fun(RootName, RootV, _) ->
?MODULE:put_raw([RootName], RootV) ?MODULE:put_raw([RootName], RootV)
end, [], hocon_schema:get_value([], Config)). end, ok, hocon_schema:get_value([], Config)).
-spec put_raw(emqx_map_lib:config_key_path(), term()) -> ok. -spec put_raw(emqx_map_lib:config_key_path(), term()) -> ok.
put_raw(KeyPath, Config) -> do_put(?RAW_CONF, KeyPath, Config). put_raw(KeyPath, Config) -> do_put(?RAW_CONF, KeyPath, Config).
@ -208,47 +240,93 @@ put_raw(KeyPath, Config) -> do_put(?RAW_CONF, KeyPath, Config).
%% NOTE: The order of the files is significant, configs from files orderd %% NOTE: The order of the files is significant, configs from files orderd
%% in the rear of the list overrides prior values. %% in the rear of the list overrides prior values.
-spec init_load(module(), [string()] | binary() | hocon:config()) -> ok. -spec init_load(module(), [string()] | binary() | hocon:config()) -> ok.
init_load(SchemaModule, Conf) when is_list(Conf) orelse is_binary(Conf) -> init_load(SchemaMod, Conf) when is_list(Conf) orelse is_binary(Conf) ->
ParseOptions = #{format => richmap}, ParseOptions = #{format => map},
Parser = case is_binary(Conf) of Parser = case is_binary(Conf) of
true -> fun hocon:binary/2; true -> fun hocon:binary/2;
false -> fun hocon:files/2 false -> fun hocon:files/2
end, end,
case Parser(Conf, ParseOptions) of case Parser(Conf, ParseOptions) of
{ok, RawRichConf} -> {ok, RawRichConf} ->
init_load(SchemaModule, RawRichConf); init_load(SchemaMod, RawRichConf);
{error, Reason} -> {error, Reason} ->
logger:error(#{msg => failed_to_load_hocon_conf, logger:error(#{msg => failed_to_load_hocon_conf,
reason => Reason reason => Reason
}), }),
error(failed_to_load_hocon_conf) error(failed_to_load_hocon_conf)
end; end;
init_load(SchemaModule, RawRichConf) when is_map(RawRichConf) -> init_load(SchemaMod, RawConf0) when is_map(RawConf0) ->
%% check with richmap for line numbers in error reports (future enhancement) ok = save_schema_mod_and_names(SchemaMod),
Opts = #{return_plain => true, %% override part of the input conf using emqx_override.conf
nullable => true RawConf = merge_with_override_conf(RawConf0),
}, %% check and save configs
%% this call throws exception in case of check failure {_AppEnvs, CheckedConf} = check_config(SchemaMod, RawConf),
{_AppEnvs, CheckedConf} = hocon_schema:map_translate(SchemaModule, RawRichConf, Opts), ok = save_to_config_map(maps:with(get_atom_root_names(), CheckedConf),
ok = save_to_config_map(emqx_map_lib:unsafe_atom_key_map(CheckedConf), maps:with(get_root_names(), RawConf)).
hocon_schema:richmap_to_map(RawRichConf)).
merge_with_override_conf(RawConf) ->
maps:merge(RawConf, maps:with(maps:keys(RawConf), read_override_conf())).
-spec check_config(module(), raw_config()) -> {AppEnvs, CheckedConf} -spec check_config(module(), raw_config()) -> {AppEnvs, CheckedConf}
when AppEnvs :: app_envs(), CheckedConf :: config(). when AppEnvs :: app_envs(), CheckedConf :: config().
check_config(SchemaModule, RawConf) -> check_config(SchemaMod, RawConf) ->
Opts = #{return_plain => true, Opts = #{return_plain => true,
nullable => true, nullable => true,
format => map format => map
}, },
{AppEnvs, CheckedConf} = {AppEnvs, CheckedConf} =
hocon_schema:map_translate(SchemaModule, RawConf, Opts), hocon_schema:map_translate(SchemaMod, RawConf, Opts),
Conf = maps:with(maps:keys(RawConf), CheckedConf), Conf = maps:with(maps:keys(RawConf), CheckedConf),
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(Conf)}. {AppEnvs, emqx_map_lib:unsafe_atom_key_map(Conf)}.
-spec fill_defaults(raw_config()) -> map().
fill_defaults(RawConf) ->
RootNames = get_root_names(),
maps:fold(fun(Key, Conf, Acc) ->
SubMap = #{Key => Conf},
WithDefaults = case lists:member(Key, RootNames) of
true -> fill_defaults(get_schema_mod(Key), SubMap);
false -> SubMap
end,
maps:merge(Acc, WithDefaults)
end, #{}, RawConf).
-spec fill_defaults(module(), raw_config()) -> map().
fill_defaults(SchemaMod, RawConf) ->
hocon_schema:check_plain(SchemaMod, RawConf,
#{nullable => true, no_conversion => true}, root_names_from_conf(RawConf)).
-spec read_override_conf() -> raw_config(). -spec read_override_conf() -> raw_config().
read_override_conf() -> read_override_conf() ->
load_hocon_file(emqx_override_conf_name(), map). load_hocon_file(emqx_override_conf_name(), map).
-spec save_schema_mod_and_names(module()) -> ok.
save_schema_mod_and_names(SchemaMod) ->
RootNames = hocon_schema:root_names(SchemaMod),
OldMods = get_schema_mod(),
OldNames = get_root_names(),
%% map from root name to schema module name
NewMods = maps:from_list([{Name, SchemaMod} || Name <- RootNames]),
persistent_term:put(?PERSIS_SCHEMA_MODS, #{
mods => maps:merge(OldMods, NewMods),
names => lists:usort(OldNames ++ RootNames)
}).
-spec get_schema_mod() -> #{binary() => atom()}.
get_schema_mod() ->
maps:get(mods, persistent_term:get(?PERSIS_SCHEMA_MODS, #{mods => #{}})).
-spec get_schema_mod(atom() | binary()) -> module().
get_schema_mod(RootName) ->
maps:get(bin(RootName), get_schema_mod()).
-spec get_root_names() -> [binary()].
get_root_names() ->
maps:get(names, persistent_term:get(?PERSIS_SCHEMA_MODS, #{names => []})).
get_atom_root_names() ->
[atom(N) || N <- get_root_names()].
-spec save_configs(app_envs(), config(), raw_config(), raw_config()) -> ok | {error, term()}. -spec save_configs(app_envs(), config(), raw_config(), raw_config()) -> ok | {error, term()}.
save_configs(_AppEnvs, Conf, RawConf, OverrideConf) -> save_configs(_AppEnvs, Conf, RawConf, OverrideConf) ->
%% We may need also support hot config update for the apps that use application envs. %% We may need also support hot config update for the apps that use application envs.
@ -270,14 +348,19 @@ save_to_config_map(Conf, RawConf) ->
?MODULE:put_raw(RawConf). ?MODULE:put_raw(RawConf).
-spec save_to_override_conf(raw_config()) -> ok | {error, term()}. -spec save_to_override_conf(raw_config()) -> ok | {error, term()}.
save_to_override_conf(undefined) ->
ok;
save_to_override_conf(RawConf) -> save_to_override_conf(RawConf) ->
FileName = emqx_override_conf_name(), case emqx_override_conf_name() of
ok = filelib:ensure_dir(FileName), undefined -> ok;
case file:write_file(FileName, jsx:prettify(jsx:encode(RawConf))) of FileName ->
ok -> ok; ok = filelib:ensure_dir(FileName),
{error, Reason} -> case file:write_file(FileName, jsx:prettify(jsx:encode(RawConf))) of
logger:error("write to ~s failed, ~p", [FileName, Reason]), ok -> ok;
{error, Reason} {error, Reason} ->
logger:error("write to ~s failed, ~p", [FileName, Reason]),
{error, Reason}
end
end. end.
load_hocon_file(FileName, LoadType) -> load_hocon_file(FileName, LoadType) ->
@ -289,7 +372,7 @@ load_hocon_file(FileName, LoadType) ->
end. end.
emqx_override_conf_name() -> emqx_override_conf_name() ->
application:get_env(emqx, override_conf_file, "emqx_override.conf"). application:get_env(emqx, override_conf_file, undefined).
do_get(Type, KeyPath) -> do_get(Type, KeyPath) ->
Ref = make_ref(), Ref = make_ref(),
@ -336,12 +419,19 @@ do_deep_put(?CONF, KeyPath, Map, Value) ->
do_deep_put(?RAW_CONF, KeyPath, Map, Value) -> do_deep_put(?RAW_CONF, KeyPath, Map, Value) ->
emqx_map_lib:deep_put([bin(Key) || Key <- KeyPath], Map, Value). emqx_map_lib:deep_put([bin(Key) || Key <- KeyPath], Map, Value).
root_names_from_conf(RawConf) ->
Keys = maps:keys(RawConf),
[Name || Name <- get_root_names(), lists:member(Name, Keys)].
atom(Bin) when is_binary(Bin) -> atom(Bin) when is_binary(Bin) ->
binary_to_existing_atom(Bin, latin1); binary_to_existing_atom(Bin, latin1);
atom(Str) when is_list(Str) ->
list_to_existing_atom(Str);
atom(Atom) when is_atom(Atom) -> atom(Atom) when is_atom(Atom) ->
Atom. Atom.
bin(Bin) when is_binary(Bin) -> Bin; bin(Bin) when is_binary(Bin) -> Bin;
bin(Str) when is_list(Str) -> list_to_binary(Str);
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8). bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
conf_key(?CONF, RootName) -> conf_key(?CONF, RootName) ->

View File

@ -23,7 +23,9 @@
%% API functions %% API functions
-export([ start_link/0 -export([ start_link/0
, stop/0
, add_handler/2 , add_handler/2
, remove_handler/1
, update_config/3 , update_config/3
, merge_to_old_config/2 , merge_to_old_config/2
]). ]).
@ -37,20 +39,28 @@
code_change/3]). code_change/3]).
-define(MOD, {mod}). -define(MOD, {mod}).
-define(WKEY, '?').
-define(ATOM_CONF_PATH(PATH, EXP, EXP_ON_FAIL),
try [safe_atom(Key) || Key <- PATH] of
AtomKeyPath -> EXP
catch
error:badarg -> EXP_ON_FAIL
end).
-type handler_name() :: module(). -type handler_name() :: module().
-type handlers() :: #{emqx_config:config_key() => handlers(), ?MOD => handler_name()}. -type handlers() :: #{emqx_config:config_key() => handlers(), ?MOD => handler_name()}.
-type update_args() :: {update, emqx_config:update_request()} | remove.
-optional_callbacks([ pre_config_update/2 -optional_callbacks([ pre_config_update/2
, post_config_update/3 , post_config_update/4
]). ]).
-callback pre_config_update(emqx_config:update_request(), emqx_config:raw_config()) -> -callback pre_config_update(emqx_config:update_request(), emqx_config:raw_config()) ->
emqx_config:update_request(). {ok, emqx_config:update_request()} | {error, term()}.
-callback post_config_update(emqx_config:update_request(), emqx_config:config(), -callback post_config_update(emqx_config:update_request(), emqx_config:config(),
emqx_config:config()) -> any(). emqx_config:config(), emqx_config:app_envs()) ->
ok | {ok, Result::any()} | {error, Reason::term()}.
-type state() :: #{ -type state() :: #{
handlers := handlers(), handlers := handlers(),
@ -60,14 +70,22 @@
start_link() -> start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, {}, []). gen_server:start_link({local, ?MODULE}, ?MODULE, {}, []).
-spec update_config(module(), emqx_config:config_key_path(), update_args()) -> stop() ->
ok | {error, term()}. gen_server:stop(?MODULE).
-spec update_config(module(), emqx_config:config_key_path(), emqx_config:update_args()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(SchemaModule, ConfKeyPath, UpdateArgs) -> update_config(SchemaModule, ConfKeyPath, UpdateArgs) ->
gen_server:call(?MODULE, {change_config, SchemaModule, ConfKeyPath, UpdateArgs}). ?ATOM_CONF_PATH(ConfKeyPath, gen_server:call(?MODULE, {change_config, SchemaModule,
AtomKeyPath, UpdateArgs}), {error, ConfKeyPath}).
-spec add_handler(emqx_config:config_key_path(), handler_name()) -> ok. -spec add_handler(emqx_config:config_key_path(), handler_name()) -> ok.
add_handler(ConfKeyPath, HandlerName) -> add_handler(ConfKeyPath, HandlerName) ->
gen_server:call(?MODULE, {add_child, ConfKeyPath, HandlerName}). gen_server:call(?MODULE, {add_handler, ConfKeyPath, HandlerName}).
-spec remove_handler(emqx_config:config_key_path()) -> ok.
remove_handler(ConfKeyPath) ->
gen_server:call(?MODULE, {remove_handler, ConfKeyPath}).
%%============================================================================ %%============================================================================
@ -75,26 +93,34 @@ add_handler(ConfKeyPath, HandlerName) ->
init(_) -> init(_) ->
{ok, #{handlers => #{?MOD => ?MODULE}}}. {ok, #{handlers => #{?MOD => ?MODULE}}}.
handle_call({add_child, ConfKeyPath, HandlerName}, _From, handle_call({add_handler, ConfKeyPath, HandlerName}, _From, State = #{handlers := Handlers}) ->
case deep_put_handler(ConfKeyPath, Handlers, HandlerName) of
{ok, NewHandlers} ->
{reply, ok, State#{handlers => NewHandlers}};
Error ->
{reply, Error, State}
end;
handle_call({remove_handler, ConfKeyPath}, _From,
State = #{handlers := Handlers}) -> State = #{handlers := Handlers}) ->
{reply, ok, State#{handlers => {reply, ok, State#{handlers =>
emqx_map_lib:deep_put(ConfKeyPath, Handlers, #{?MOD => HandlerName})}}; emqx_map_lib:deep_remove(ConfKeyPath ++ [?MOD], Handlers)}};
handle_call({change_config, SchemaModule, ConfKeyPath, UpdateArgs}, _From, handle_call({change_config, SchemaModule, ConfKeyPath, UpdateArgs}, _From,
#{handlers := Handlers} = State) -> #{handlers := Handlers} = State) ->
OldConf = emqx_config:get_root(ConfKeyPath), Reply = try
OldRawConf = emqx_config:get_root_raw(ConfKeyPath), case process_update_request(ConfKeyPath, Handlers, UpdateArgs) of
Result = try {ok, NewRawConf, OverrideConf} ->
{NewRawConf, OverrideConf} = process_upadate_request(ConfKeyPath, OldRawConf, check_and_save_configs(SchemaModule, ConfKeyPath, Handlers, NewRawConf,
Handlers, UpdateArgs), OverrideConf, UpdateArgs);
{AppEnvs, CheckedConf} = emqx_config:check_config(SchemaModule, NewRawConf), {error, Result} ->
_ = do_post_config_update(ConfKeyPath, Handlers, OldConf, CheckedConf, UpdateArgs), {error, Result}
emqx_config:save_configs(AppEnvs, CheckedConf, NewRawConf, OverrideConf) end
catch Error:Reason:ST -> catch Error:Reason:ST ->
?LOG(error, "change_config failed: ~p", [{Error, Reason, ST}]), ?LOG(error, "change_config failed: ~p", [{Error, Reason, ST}]),
{error, Reason} {error, Reason}
end, end,
{reply, Result, State}; {reply, Reply, State};
handle_call(_Request, _From, State) -> handle_call(_Request, _From, State) ->
Reply = ok, Reply = ok,
@ -112,32 +138,93 @@ terminate(_Reason, _State) ->
code_change(_OldVsn, State, _Extra) -> code_change(_OldVsn, State, _Extra) ->
{ok, State}. {ok, State}.
process_upadate_request(ConfKeyPath, OldRawConf, _Handlers, remove) -> deep_put_handler([], Handlers, Mod) when is_map(Handlers) ->
{ok, Handlers#{?MOD => Mod}};
deep_put_handler([], _Handlers, Mod) ->
{ok, #{?MOD => Mod}};
deep_put_handler([?WKEY | KeyPath], Handlers, Mod) ->
deep_put_handler2(?WKEY, KeyPath, Handlers, Mod);
deep_put_handler([Key | KeyPath], Handlers, Mod) ->
case maps:find(?WKEY, Handlers) of
error ->
deep_put_handler2(Key, KeyPath, Handlers, Mod);
{ok, _SubHandlers} ->
{error, {cannot_override_a_wildcard_path, [?WKEY | KeyPath]}}
end.
deep_put_handler2(Key, KeyPath, Handlers, Mod) ->
SubHandlers = maps:get(Key, Handlers, #{}),
case deep_put_handler(KeyPath, SubHandlers, Mod) of
{ok, SubHandlers1} ->
{ok, Handlers#{Key => SubHandlers1}};
Error ->
Error
end.
process_update_request(ConfKeyPath, _Handlers, {remove, Opts}) ->
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
BinKeyPath = bin_path(ConfKeyPath), BinKeyPath = bin_path(ConfKeyPath),
NewRawConf = emqx_map_lib:deep_remove(BinKeyPath, OldRawConf), NewRawConf = emqx_map_lib:deep_remove(BinKeyPath, OldRawConf),
OverrideConf = emqx_map_lib:deep_remove(BinKeyPath, emqx_config:read_override_conf()), OverrideConf = remove_from_override_config(BinKeyPath, Opts),
{NewRawConf, OverrideConf}; {ok, NewRawConf, OverrideConf};
process_upadate_request(ConfKeyPath, OldRawConf, Handlers, {update, UpdateReq}) -> process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
NewRawConf = do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq), OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
OverrideConf = update_override_config(NewRawConf), case do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) of
{NewRawConf, OverrideConf}. {ok, NewRawConf} ->
OverrideConf = update_override_config(NewRawConf, Opts),
{ok, NewRawConf, OverrideConf};
Error -> Error
end.
do_update_config([], Handlers, OldRawConf, UpdateReq) -> do_update_config([], Handlers, OldRawConf, UpdateReq) ->
call_pre_config_update(Handlers, OldRawConf, UpdateReq); call_pre_config_update(Handlers, OldRawConf, UpdateReq);
do_update_config([ConfKey | ConfKeyPath], Handlers, OldRawConf, UpdateReq) -> do_update_config([ConfKey | ConfKeyPath], Handlers, OldRawConf, UpdateReq) ->
SubOldRawConf = get_sub_config(bin(ConfKey), OldRawConf), SubOldRawConf = get_sub_config(bin(ConfKey), OldRawConf),
SubHandlers = maps:get(ConfKey, Handlers, #{}), SubHandlers = get_sub_handlers(ConfKey, Handlers),
NewUpdateReq = do_update_config(ConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq), case do_update_config(ConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq) of
call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq}). {ok, NewUpdateReq} ->
call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq});
Error ->
Error
end.
do_post_config_update([], Handlers, OldConf, NewConf, UpdateArgs) -> check_and_save_configs(SchemaModule, ConfKeyPath, Handlers, NewRawConf, OverrideConf,
call_post_config_update(Handlers, OldConf, NewConf, up_req(UpdateArgs)); UpdateArgs) ->
do_post_config_update([ConfKey | ConfKeyPath], Handlers, OldConf, NewConf, UpdateArgs) -> OldConf = emqx_config:get_root(ConfKeyPath),
FullRawConf = with_full_raw_confs(NewRawConf),
{AppEnvs, CheckedConf} = emqx_config:check_config(SchemaModule, FullRawConf),
NewConf = maps:with(maps:keys(OldConf), CheckedConf),
case do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, #{}) of
{ok, Result0} ->
case save_configs(ConfKeyPath, AppEnvs, NewConf, NewRawConf, OverrideConf,
UpdateArgs) of
{ok, Result1} ->
{ok, Result1#{post_config_update => Result0}};
Error -> Error
end;
Error -> Error
end.
do_post_config_update([], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result) ->
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs), Result);
do_post_config_update([ConfKey | ConfKeyPath], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs,
Result) ->
SubOldConf = get_sub_config(ConfKey, OldConf), SubOldConf = get_sub_config(ConfKey, OldConf),
SubNewConf = get_sub_config(ConfKey, NewConf), SubNewConf = get_sub_config(ConfKey, NewConf),
SubHandlers = maps:get(ConfKey, Handlers, #{}), SubHandlers = get_sub_handlers(ConfKey, Handlers),
_ = do_post_config_update(ConfKeyPath, SubHandlers, SubOldConf, SubNewConf, UpdateArgs), case do_post_config_update(ConfKeyPath, SubHandlers, SubOldConf, SubNewConf, AppEnvs,
call_post_config_update(Handlers, OldConf, NewConf, up_req(UpdateArgs)). UpdateArgs, Result) of
{ok, Result1} ->
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs),
Result1);
Error -> Error
end.
get_sub_handlers(ConfKey, Handlers) ->
case maps:find(ConfKey, Handlers) of
error -> maps:get(?WKEY, Handlers, #{});
{ok, SubHandlers} -> SubHandlers
end.
get_sub_config(ConfKey, Conf) when is_map(Conf) -> get_sub_config(ConfKey, Conf) when is_map(Conf) ->
maps:get(ConfKey, Conf, undefined); maps:get(ConfKey, Conf, undefined);
@ -147,15 +234,30 @@ get_sub_config(_, _Conf) -> %% the Conf is a primitive
call_pre_config_update(Handlers, OldRawConf, UpdateReq) -> call_pre_config_update(Handlers, OldRawConf, UpdateReq) ->
HandlerName = maps:get(?MOD, Handlers, undefined), HandlerName = maps:get(?MOD, Handlers, undefined),
case erlang:function_exported(HandlerName, pre_config_update, 2) of case erlang:function_exported(HandlerName, pre_config_update, 2) of
true -> HandlerName:pre_config_update(UpdateReq, OldRawConf); true ->
case HandlerName:pre_config_update(UpdateReq, OldRawConf) of
{ok, NewUpdateReq} -> {ok, NewUpdateReq};
{error, Reason} -> {error, {pre_config_update, HandlerName, Reason}}
end;
false -> merge_to_old_config(UpdateReq, OldRawConf) false -> merge_to_old_config(UpdateReq, OldRawConf)
end. end.
call_post_config_update(Handlers, OldConf, NewConf, UpdateReq) -> call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, UpdateReq, Result) ->
HandlerName = maps:get(?MOD, Handlers, undefined), HandlerName = maps:get(?MOD, Handlers, undefined),
case erlang:function_exported(HandlerName, post_config_update, 3) of case erlang:function_exported(HandlerName, post_config_update, 4) of
true -> HandlerName:post_config_update(UpdateReq, NewConf, OldConf); true ->
false -> ok case HandlerName:post_config_update(UpdateReq, NewConf, OldConf, AppEnvs) of
ok -> {ok, Result};
{ok, Result1} -> {ok, Result#{HandlerName => Result1}};
{error, Reason} -> {error, {post_config_update, HandlerName, Reason}}
end;
false -> {ok, Result}
end.
save_configs(ConfKeyPath, AppEnvs, CheckedConf, NewRawConf, OverrideConf, UpdateArgs) ->
case emqx_config:save_configs(AppEnvs, CheckedConf, NewRawConf, OverrideConf) of
ok -> {ok, return_change_result(ConfKeyPath, UpdateArgs)};
{error, Reason} -> {error, {save_configs, Reason}}
end. end.
%% The default callback of config handlers %% The default callback of config handlers
@ -164,18 +266,48 @@ call_post_config_update(Handlers, OldConf, NewConf, UpdateReq) ->
%% 2. either the old or the new config is not of map type %% 2. either the old or the new config is not of map type
%% the behaviour is merging the new the config to the old config if they are maps. %% the behaviour is merging the new the config to the old config if they are maps.
merge_to_old_config(UpdateReq, RawConf) when is_map(UpdateReq), is_map(RawConf) -> merge_to_old_config(UpdateReq, RawConf) when is_map(UpdateReq), is_map(RawConf) ->
maps:merge(RawConf, UpdateReq); {ok, maps:merge(RawConf, UpdateReq)};
merge_to_old_config(UpdateReq, _RawConf) -> merge_to_old_config(UpdateReq, _RawConf) ->
UpdateReq. {ok, UpdateReq}.
update_override_config(RawConf) -> remove_from_override_config(_BinKeyPath, #{persistent := false}) ->
undefined;
remove_from_override_config(BinKeyPath, _Opts) ->
OldConf = emqx_config:read_override_conf(),
emqx_map_lib:deep_remove(BinKeyPath, OldConf).
update_override_config(_RawConf, #{persistent := false}) ->
undefined;
update_override_config(RawConf, _Opts) ->
OldConf = emqx_config:read_override_conf(), OldConf = emqx_config:read_override_conf(),
maps:merge(OldConf, RawConf). maps:merge(OldConf, RawConf).
up_req(remove) -> '$remove'; up_req({remove, _Opts}) -> '$remove';
up_req({update, Req}) -> Req. up_req({{update, Req}, _Opts}) -> Req.
return_change_result(ConfKeyPath, {{update, _Req}, Opts}) ->
#{config => emqx_config:get(ConfKeyPath),
raw_config => return_rawconf(ConfKeyPath, Opts)};
return_change_result(_ConfKeyPath, {remove, _Opts}) ->
#{}.
return_rawconf(ConfKeyPath, #{rawconf_with_defaults := true}) ->
FullRawConf = emqx_config:fill_defaults(emqx_config:get_raw([])),
emqx_map_lib:deep_get(bin_path(ConfKeyPath), FullRawConf);
return_rawconf(ConfKeyPath, _) ->
emqx_config:get_raw(ConfKeyPath).
with_full_raw_confs(PartialConf) ->
maps:merge(emqx_config:get_raw([]), PartialConf).
bin_path(ConfKeyPath) -> [bin(Key) || Key <- ConfKeyPath]. bin_path(ConfKeyPath) -> [bin(Key) || Key <- ConfKeyPath].
bin(A) when is_atom(A) -> atom_to_binary(A, utf8); bin(A) when is_atom(A) -> atom_to_binary(A, utf8);
bin(B) when is_binary(B) -> B. bin(B) when is_binary(B) -> B.
safe_atom(Bin) when is_binary(Bin) ->
binary_to_existing_atom(Bin, latin1);
safe_atom(Str) when is_list(Str) ->
list_to_existing_atom(Str);
safe_atom(Atom) when is_atom(Atom) ->
Atom.

View File

@ -102,8 +102,8 @@
idle_timer :: maybe(reference()), idle_timer :: maybe(reference()),
%% Zone name %% Zone name
zone :: atom(), zone :: atom(),
%% Listener Name %% Listener Type and Name
listener :: atom() listener :: {Type::atom(), Name::atom()}
}). }).
-type(state() :: #state{}). -type(state() :: #state{}).
@ -135,7 +135,9 @@
, system_code_change/4 , system_code_change/4
]}). ]}).
-spec(start_link(esockd:transport(), esockd:socket(), emqx_channel:opts()) -spec(start_link(esockd:transport(),
esockd:socket() | {pid(), quicer:connection_handler()},
emqx_channel:opts())
-> {ok, pid()}). -> {ok, pid()}).
start_link(Transport, Socket, Options) -> start_link(Transport, Socket, Options) ->
Args = [self(), Transport, Socket, Options], Args = [self(), Transport, Socket, Options],
@ -463,15 +465,15 @@ handle_msg({Passive, _Sock}, State)
NState1 = check_oom(run_gc(InStats, NState)), NState1 = check_oom(run_gc(InStats, NState)),
handle_info(activate_socket, NState1); handle_info(activate_socket, NState1);
handle_msg(Deliver = {deliver, _Topic, _Msg}, #state{zone = Zone, handle_msg(Deliver = {deliver, _Topic, _Msg}, #state{
listener = Listener} = State) -> listener = {Type, Listener}} = State) ->
ActiveN = get_active_n(Zone, Listener), ActiveN = get_active_n(Type, Listener),
Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)], Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State); with_channel(handle_deliver, [Delivers], State);
%% Something sent %% Something sent
handle_msg({inet_reply, _Sock, ok}, State = #state{zone = Zone, listener = Listener}) -> handle_msg({inet_reply, _Sock, ok}, State = #state{listener = {Type, Listener}}) ->
case emqx_pd:get_counter(outgoing_pubs) > get_active_n(Zone, Listener) of case emqx_pd:get_counter(outgoing_pubs) > get_active_n(Type, Listener) of
true -> true ->
Pubs = emqx_pd:reset_counter(outgoing_pubs), Pubs = emqx_pd:reset_counter(outgoing_pubs),
Bytes = emqx_pd:reset_counter(outgoing_bytes), Bytes = emqx_pd:reset_counter(outgoing_bytes),
@ -820,8 +822,8 @@ activate_socket(State = #state{sockstate = closed}) ->
activate_socket(State = #state{sockstate = blocked}) -> activate_socket(State = #state{sockstate = blocked}) ->
{ok, State}; {ok, State};
activate_socket(State = #state{transport = Transport, socket = Socket, activate_socket(State = #state{transport = Transport, socket = Socket,
zone = Zone, listener = Listener}) -> listener = {Type, Listener}}) ->
ActiveN = get_active_n(Zone, Listener), ActiveN = get_active_n(Type, Listener),
case Transport:setopts(Socket, [{active, ActiveN}]) of case Transport:setopts(Socket, [{active, ActiveN}]) of
ok -> {ok, State#state{sockstate = running}}; ok -> {ok, State#state{sockstate = running}};
Error -> Error Error -> Error
@ -904,8 +906,6 @@ get_state(Pid) ->
maps:from_list(lists:zip(record_info(fields, state), maps:from_list(lists:zip(record_info(fields, state),
tl(tuple_to_list(State)))). tl(tuple_to_list(State)))).
get_active_n(Zone, Listener) -> get_active_n(quic, _Listener) -> 100;
case emqx_config:get([zones, Zone, listeners, Listener, type]) of get_active_n(Type, Listener) ->
quic -> 100; emqx_config:get_listener_conf(Type, Listener, [tcp, active_n]).
_ -> emqx_config:get_listener_conf(Zone, Listener, [tcp, active_n])
end.

View File

@ -160,4 +160,4 @@ start_timer(Zone) ->
start_timers() -> start_timers() ->
lists:foreach(fun({Zone, _ZoneConf}) -> lists:foreach(fun({Zone, _ZoneConf}) ->
start_timer(Zone) start_timer(Zone)
end, maps:to_list(emqx_config:get([zones], #{}))). end, maps:to_list(emqx:get_config([zones], #{}))).

View File

@ -100,14 +100,10 @@ parse(<<Type:4, Dup:1, QoS:2, Retain:1, Rest/binary>>,
StrictMode andalso validate_header(Type, Dup, QoS, Retain), StrictMode andalso validate_header(Type, Dup, QoS, Retain),
Header = #mqtt_packet_header{type = Type, Header = #mqtt_packet_header{type = Type,
dup = bool(Dup), dup = bool(Dup),
qos = QoS, qos = fixqos(Type, QoS),
retain = bool(Retain) retain = bool(Retain)
}, },
Header1 = case fixqos(Type, QoS) of parse_remaining_len(Rest, Header, Options);
QoS -> Header;
FixedQoS -> Header#mqtt_packet_header{qos = FixedQoS}
end,
parse_remaining_len(Rest, Header1, Options);
parse(Bin, {{len, #{hdr := Header, parse(Bin, {{len, #{hdr := Header,
len := {Multiplier, Length}} len := {Multiplier, Length}}

View File

@ -85,7 +85,7 @@ code_change(_OldVsn, State, _Extra) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
ensure_timer(State) -> ensure_timer(State) ->
case emqx_config:get([node, global_gc_interval]) of case emqx:get_config([node, global_gc_interval]) of
undefined -> State; undefined -> State;
Interval -> TRef = emqx_misc:start_timer(Interval, run), Interval -> TRef = emqx_misc:start_timer(Interval, run),
State#{timer := TRef} State#{timer := TRef}

View File

@ -34,6 +34,7 @@ init([]) ->
, child_spec(emqx_stats, worker) , child_spec(emqx_stats, worker)
, child_spec(emqx_metrics, worker) , child_spec(emqx_metrics, worker)
, child_spec(emqx_ctl, worker) , child_spec(emqx_ctl, worker)
, child_spec(emqx_logger, worker)
]}}. ]}}.
child_spec(M, Type) -> child_spec(M, Type) ->

View File

@ -26,6 +26,8 @@
, restart/0 , restart/0
, stop/0 , stop/0
, is_running/1 , is_running/1
, current_conns/2
, max_conns/2
]). ]).
-export([ start_listener/1 -export([ start_listener/1
@ -37,35 +39,39 @@
, has_enabled_listener_conf_by_type/1 , has_enabled_listener_conf_by_type/1
]). ]).
-export([ listener_id/2
, parse_listener_id/1
]).
-export([post_config_update/4]).
-define(CONF_KEY_PATH, [listeners]).
%% @doc List configured listeners. %% @doc List configured listeners.
-spec(list() -> [{ListenerId :: atom(), ListenerConf :: map()}]). -spec(list() -> [{ListenerId :: atom(), ListenerConf :: map()}]).
list() -> list() ->
[{listener_id(ZoneName, LName), LConf} || {ZoneName, LName, LConf} <- do_list()]. [{listener_id(Type, LName), LConf} || {Type, LName, LConf} <- do_list()].
do_list() -> do_list() ->
Zones = maps:to_list(emqx_config:get([zones], #{})), Listeners = maps:to_list(emqx:get_config([listeners], #{})),
lists:append([list(ZoneName, ZoneConf) || {ZoneName, ZoneConf} <- Zones]). lists:append([list(Type, maps:to_list(Conf)) || {Type, Conf} <- Listeners]).
list(ZoneName, ZoneConf) -> list(Type, Conf) ->
Listeners = maps:to_list(maps:get(listeners, ZoneConf, #{})), [begin
[ Running = is_running(Type, listener_id(Type, LName), LConf),
begin {Type, LName, maps:put(running, Running, LConf)}
Conf = merge_zone_and_listener_confs(ZoneConf, LConf), end || {LName, LConf} <- Conf, is_map(LConf)].
Running = is_running(listener_id(ZoneName, LName), Conf),
{ZoneName , LName, maps:put(running, Running, Conf)}
end
|| {LName, LConf} <- Listeners, is_map(LConf)].
-spec is_running(ListenerId :: atom()) -> boolean() | {error, no_found}. -spec is_running(ListenerId :: atom()) -> boolean() | {error, no_found}.
is_running(ListenerId) -> is_running(ListenerId) ->
case lists:filtermap(fun({_Zone, Id, #{running := IsRunning}}) -> case lists:filtermap(fun({_Type, Id, #{running := IsRunning}}) ->
Id =:= ListenerId andalso {true, IsRunning} Id =:= ListenerId andalso {true, IsRunning}
end, do_list()) of end, do_list()) of
[IsRunning] -> IsRunning; [IsRunning] -> IsRunning;
[] -> {error, not_found} [] -> {error, not_found}
end. end.
is_running(ListenerId, #{type := tcp, bind := ListenOn})-> is_running(Type, ListenerId, #{bind := ListenOn}) when Type =:= tcp; Type =:= ssl ->
try esockd:listener({ListenerId, ListenOn}) of try esockd:listener({ListenerId, ListenOn}) of
Pid when is_pid(Pid)-> Pid when is_pid(Pid)->
true true
@ -73,7 +79,7 @@ is_running(ListenerId, #{type := tcp, bind := ListenOn})->
false false
end; end;
is_running(ListenerId, #{type := ws})-> is_running(Type, ListenerId, _Conf) when Type =:= ws; Type =:= wss ->
try try
Info = ranch:info(ListenerId), Info = ranch:info(ListenerId),
proplists:get_value(status, Info) =:= running proplists:get_value(status, Info) =:= running
@ -81,13 +87,38 @@ is_running(ListenerId, #{type := ws})->
false false
end; end;
is_running(_ListenerId, #{type := quic})-> is_running(quic, _ListenerId, _Conf)->
%% TODO: quic support %% TODO: quic support
{error, no_found}. {error, no_found}.
current_conns(ID, ListenOn) ->
{Type, Name} = parse_listener_id(ID),
current_conns(Type, Name, ListenOn).
current_conns(Type, Name, ListenOn) when Type == tcl; Type == ssl ->
esockd:get_current_connections({listener_id(Type, Name), ListenOn});
current_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
proplists:get_value(all_connections, ranch:info(listener_id(Type, Name)));
current_conns(_, _, _) ->
{error, not_support}.
max_conns(ID, ListenOn) ->
{Type, Name} = parse_listener_id(ID),
max_conns(Type, Name, ListenOn).
max_conns(Type, Name, ListenOn) when Type == tcl; Type == ssl ->
esockd:get_max_connections({listener_id(Type, Name), ListenOn});
max_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
proplists:get_value(max_connections, ranch:info(listener_id(Type, Name)));
max_conns(_, _, _) ->
{error, not_support}.
%% @doc Start all listeners. %% @doc Start all listeners.
-spec(start() -> ok). -spec(start() -> ok).
start() -> start() ->
%% The ?MODULE:start/0 will be called by emqx_app when emqx get started,
%% so we install the config handler here.
ok = emqx_config_handler:add_handler(?CONF_KEY_PATH, ?MODULE),
foreach_listeners(fun start_listener/3). foreach_listeners(fun start_listener/3).
-spec start_listener(atom()) -> ok | {error, term()}. -spec start_listener(atom()) -> ok | {error, term()}.
@ -95,23 +126,76 @@ start_listener(ListenerId) ->
apply_on_listener(ListenerId, fun start_listener/3). apply_on_listener(ListenerId, fun start_listener/3).
-spec start_listener(atom(), atom(), map()) -> ok | {error, term()}. -spec start_listener(atom(), atom(), map()) -> ok | {error, term()}.
start_listener(ZoneName, ListenerName, #{type := Type, bind := Bind} = Conf) -> start_listener(Type, ListenerName, #{bind := Bind} = Conf) ->
case do_start_listener(ZoneName, ListenerName, Conf) of case do_start_listener(Type, ListenerName, Conf) of
{ok, {skipped, Reason}} when Reason =:= listener_disabled; {ok, {skipped, Reason}} when Reason =:= listener_disabled;
Reason =:= quic_app_missing -> Reason =:= quic_app_missing ->
console_print("- Skip - starting ~s listener ~s on ~s ~n due to ~p", console_print("- Skip - starting listener ~s on ~s ~n due to ~p",
[Type, listener_id(ZoneName, ListenerName), format(Bind), Reason]); [listener_id(Type, ListenerName), format_addr(Bind), Reason]);
{ok, _} -> {ok, _} ->
console_print("Start ~s listener ~s on ~s successfully.~n", console_print("Listener ~s on ~s started.~n",
[Type, listener_id(ZoneName, ListenerName), format(Bind)]); [listener_id(Type, ListenerName), format_addr(Bind)]);
{error, {already_started, Pid}} -> {error, {already_started, Pid}} ->
{error, {already_started, Pid}}; {error, {already_started, Pid}};
{error, Reason} -> {error, Reason} ->
?ELOG("Failed to start ~s listener ~s on ~s: ~0p~n", ?ELOG("Failed to start listener ~s on ~s: ~0p~n",
[Type, listener_id(ZoneName, ListenerName), format(Bind), Reason]), [listener_id(Type, ListenerName), format_addr(Bind), Reason]),
error(Reason) error(Reason)
end. end.
%% @doc Restart all listeners
-spec(restart() -> ok).
restart() ->
foreach_listeners(fun restart_listener/3).
-spec(restart_listener(atom()) -> ok | {error, term()}).
restart_listener(ListenerId) ->
apply_on_listener(ListenerId, fun restart_listener/3).
-spec(restart_listener(atom(), atom(), map()) -> ok | {error, term()}).
restart_listener(Type, ListenerName, {OldConf, NewConf}) ->
restart_listener(Type, ListenerName, OldConf, NewConf);
restart_listener(Type, ListenerName, Conf) ->
restart_listener(Type, ListenerName, Conf, Conf).
restart_listener(Type, ListenerName, OldConf, NewConf) ->
case stop_listener(Type, ListenerName, OldConf) of
ok -> start_listener(Type, ListenerName, NewConf);
Error -> Error
end.
%% @doc Stop all listeners.
-spec(stop() -> ok).
stop() ->
%% The ?MODULE:stop/0 will be called by emqx_app when emqx is going to shutdown,
%% so we uninstall the config handler here.
_ = emqx_config_handler:remove_handler(?CONF_KEY_PATH),
foreach_listeners(fun stop_listener/3).
-spec(stop_listener(atom()) -> ok | {error, term()}).
stop_listener(ListenerId) ->
apply_on_listener(ListenerId, fun stop_listener/3).
stop_listener(Type, ListenerName, #{bind := Bind} = Conf) ->
case do_stop_listener(Type, ListenerName, Conf) of
ok ->
console_print("Listener ~s on ~s stopped.~n",
[listener_id(Type, ListenerName), format_addr(Bind)]),
ok;
{error, Reason} ->
?ELOG("Failed to stop listener ~s on ~s: ~0p~n",
[listener_id(Type, ListenerName), format_addr(Bind), Reason]),
{error, Reason}
end.
-spec(do_stop_listener(atom(), atom(), map()) -> ok | {error, term()}).
do_stop_listener(Type, ListenerName, #{bind := ListenOn}) when Type == tcp; Type == ssl ->
esockd:close(listener_id(Type, ListenerName), ListenOn);
do_stop_listener(Type, ListenerName, _Conf) when Type == ws; Type == wss ->
cowboy:stop_listener(listener_id(Type, ListenerName));
do_stop_listener(quic, ListenerName, _Conf) ->
quicer:stop_listener(listener_id(quic, ListenerName)).
-ifndef(TEST). -ifndef(TEST).
console_print(Fmt, Args) -> ?ULOG(Fmt, Args). console_print(Fmt, Args) -> ?ULOG(Fmt, Args).
-else. -else.
@ -121,79 +205,108 @@ console_print(_Fmt, _Args) -> ok.
%% Start MQTT/TCP listener %% Start MQTT/TCP listener
-spec(do_start_listener(atom(), atom(), map()) -spec(do_start_listener(atom(), atom(), map())
-> {ok, pid() | {skipped, atom()}} | {error, term()}). -> {ok, pid() | {skipped, atom()}} | {error, term()}).
do_start_listener(_ZoneName, _ListenerName, #{enabled := false}) -> do_start_listener(_Type, _ListenerName, #{enabled := false}) ->
{ok, {skipped, listener_disabled}}; {ok, {skipped, listener_disabled}};
do_start_listener(ZoneName, ListenerName, #{type := tcp, bind := ListenOn} = Opts) -> do_start_listener(Type, ListenerName, #{bind := ListenOn} = Opts)
esockd:open(listener_id(ZoneName, ListenerName), ListenOn, merge_default(esockd_opts(Opts)), when Type == tcp; Type == ssl ->
esockd:open(listener_id(Type, ListenerName), ListenOn, merge_default(esockd_opts(Type, Opts)),
{emqx_connection, start_link, {emqx_connection, start_link,
[#{zone => ZoneName, listener => ListenerName}]}); [#{listener => {Type, ListenerName},
zone => zone(Opts)}]});
%% Start MQTT/WS listener %% Start MQTT/WS listener
do_start_listener(ZoneName, ListenerName, #{type := ws, bind := ListenOn} = Opts) -> do_start_listener(Type, ListenerName, #{bind := ListenOn} = Opts)
Id = listener_id(ZoneName, ListenerName), when Type == ws; Type == wss ->
RanchOpts = ranch_opts(ListenOn, Opts), Id = listener_id(Type, ListenerName),
WsOpts = ws_opts(ZoneName, ListenerName, Opts), RanchOpts = ranch_opts(Type, ListenOn, Opts),
case is_ssl(Opts) of WsOpts = ws_opts(Type, ListenerName, Opts),
false -> case Type of
cowboy:start_clear(Id, RanchOpts, WsOpts); ws -> cowboy:start_clear(Id, RanchOpts, WsOpts);
true -> wss -> cowboy:start_tls(Id, RanchOpts, WsOpts)
cowboy:start_tls(Id, RanchOpts, WsOpts)
end; end;
%% Start MQTT/QUIC listener %% Start MQTT/QUIC listener
do_start_listener(ZoneName, ListenerName, #{type := quic, bind := ListenOn} = Opts) -> do_start_listener(quic, ListenerName, #{bind := ListenOn} = Opts) ->
case [ A || {quicer, _, _} = A<-application:which_applications() ] of case [ A || {quicer, _, _} = A<-application:which_applications() ] of
[_] -> [_] ->
%% @fixme unsure why we need reopen lib and reopen config.
quicer_nif:open_lib(),
quicer_nif:reg_open(),
DefAcceptors = erlang:system_info(schedulers_online) * 8, DefAcceptors = erlang:system_info(schedulers_online) * 8,
ListenOpts = [ {cert, maps:get(certfile, Opts)} ListenOpts = [ {cert, maps:get(certfile, Opts)}
, {key, maps:get(keyfile, Opts)} , {key, maps:get(keyfile, Opts)}
, {alpn, ["mqtt"]} , {alpn, ["mqtt"]}
, {conn_acceptors, maps:get(acceptors, Opts, DefAcceptors)} , {conn_acceptors, lists:max([DefAcceptors, maps:get(acceptors, Opts, 0)])}
, {idle_timeout_ms, emqx_config:get_zone_conf(ZoneName, [mqtt, idle_timeout])} , {idle_timeout_ms, lists:max([
emqx_config:get_zone_conf(zone(Opts), [mqtt, idle_timeout]) * 3
, timer:seconds(maps:get(idle_timeout, Opts))]
)}
], ],
ConnectionOpts = #{conn_callback => emqx_quic_connection ConnectionOpts = #{ conn_callback => emqx_quic_connection
, peer_unidi_stream_count => 1 , peer_unidi_stream_count => 1
, peer_bidi_stream_count => 10 , peer_bidi_stream_count => 10
, zone => ZoneName , zone => zone(Opts)
, listener => ListenerName , listener => {quic, ListenerName}
}, },
StreamOpts = [], StreamOpts = [{stream_callback, emqx_quic_stream}],
quicer:start_listener(listener_id(ZoneName, ListenerName), quicer:start_listener(listener_id(quic, ListenerName),
port(ListenOn), {ListenOpts, ConnectionOpts, StreamOpts}); port(ListenOn), {ListenOpts, ConnectionOpts, StreamOpts});
[] -> [] ->
{ok, {skipped, quic_app_missing}} {ok, {skipped, quic_app_missing}}
end. end.
esockd_opts(Opts0) -> delete_authentication(Type, ListenerName, _Conf) ->
emqx_authentication:delete_chain(atom_to_binary(listener_id(Type, ListenerName))).
%% Update the listeners at runtime
post_config_update(_Req, NewListeners, OldListeners, _AppEnvs) ->
#{added := Added, removed := Removed, changed := Updated}
= diff_listeners(NewListeners, OldListeners),
perform_listener_changes(fun stop_listener/3, Removed),
perform_listener_changes(fun delete_authentication/3, Removed),
perform_listener_changes(fun start_listener/3, Added),
perform_listener_changes(fun restart_listener/3, Updated).
perform_listener_changes(Action, MapConfs) ->
lists:foreach(fun
({Id, Conf}) ->
{Type, Name} = parse_listener_id(Id),
Action(Type, Name, Conf)
end, maps:to_list(MapConfs)).
diff_listeners(NewListeners, OldListeners) ->
emqx_map_lib:diff_maps(flatten_listeners(NewListeners), flatten_listeners(OldListeners)).
flatten_listeners(Conf0) ->
maps:from_list(
lists:append([do_flatten_listeners(Type, Conf)
|| {Type, Conf} <- maps:to_list(Conf0)])).
do_flatten_listeners(Type, Conf0) ->
[{listener_id(Type, Name), maps:remove(authentication, Conf)} || {Name, Conf} <- maps:to_list(Conf0)].
esockd_opts(Type, Opts0) ->
Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0), Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0),
Opts2 = case emqx_map_lib:deep_get([rate_limit, max_conn_rate], Opts0) of Opts2 = case emqx_config:get_zone_conf(zone(Opts0), [rate_limit, max_conn_rate]) of
infinity -> Opts1; infinity -> Opts1;
Rate -> Opts1#{max_conn_rate => Rate} Rate -> Opts1#{max_conn_rate => Rate}
end, end,
Opts3 = Opts2#{access_rules => esockd_access_rules(maps:get(access_rules, Opts0, []))}, Opts3 = Opts2#{access_rules => esockd_access_rules(maps:get(access_rules, Opts0, []))},
maps:to_list(case is_ssl(Opts0) of maps:to_list(case Type of
false -> tcp -> Opts3#{tcp_options => tcp_opts(Opts0)};
Opts3#{tcp_options => tcp_opts(Opts0)}; ssl -> Opts3#{ssl_options => ssl_opts(Opts0), tcp_options => tcp_opts(Opts0)}
true ->
Opts3#{ssl_options => ssl_opts(Opts0), tcp_options => tcp_opts(Opts0)}
end). end).
ws_opts(ZoneName, ListenerName, Opts) -> ws_opts(Type, ListenerName, Opts) ->
WsPaths = [{maps:get(mqtt_path, Opts, "/mqtt"), emqx_ws_connection, WsPaths = [{maps:get(mqtt_path, Opts, "/mqtt"), emqx_ws_connection,
#{zone => ZoneName, listener => ListenerName}}], #{zone => zone(Opts), listener => {Type, ListenerName}}}],
Dispatch = cowboy_router:compile([{'_', WsPaths}]), Dispatch = cowboy_router:compile([{'_', WsPaths}]),
ProxyProto = maps:get(proxy_protocol, Opts, false), ProxyProto = maps:get(proxy_protocol, Opts, false),
#{env => #{dispatch => Dispatch}, proxy_header => ProxyProto}. #{env => #{dispatch => Dispatch}, proxy_header => ProxyProto}.
ranch_opts(ListenOn, Opts) -> ranch_opts(Type, ListenOn, Opts) ->
NumAcceptors = maps:get(acceptors, Opts, 4), NumAcceptors = maps:get(acceptors, Opts, 4),
MaxConnections = maps:get(max_connections, Opts, 1024), MaxConnections = maps:get(max_connections, Opts, 1024),
SocketOpts = case is_ssl(Opts) of SocketOpts = case Type of
true -> tcp_opts(Opts) ++ proplists:delete(handshake_timeout, ssl_opts(Opts)); wss -> tcp_opts(Opts) ++ proplists:delete(handshake_timeout, ssl_opts(Opts));
false -> tcp_opts(Opts) ws -> tcp_opts(Opts)
end, end,
#{num_acceptors => NumAcceptors, #{num_acceptors => NumAcceptors,
max_connections => MaxConnections, max_connections => MaxConnections,
@ -217,39 +330,6 @@ esockd_access_rules(StrRules) ->
end, end,
[Access(R) || R <- StrRules]. [Access(R) || R <- StrRules].
%% @doc Restart all listeners
-spec(restart() -> ok).
restart() ->
foreach_listeners(fun restart_listener/3).
-spec(restart_listener(atom()) -> ok | {error, term()}).
restart_listener(ListenerId) ->
apply_on_listener(ListenerId, fun restart_listener/3).
-spec(restart_listener(atom(), atom(), map()) -> ok | {error, term()}).
restart_listener(ZoneName, ListenerName, Conf) ->
case stop_listener(ZoneName, ListenerName, Conf) of
ok -> start_listener(ZoneName, ListenerName, Conf);
Error -> Error
end.
%% @doc Stop all listeners.
-spec(stop() -> ok).
stop() ->
foreach_listeners(fun stop_listener/3).
-spec(stop_listener(atom()) -> ok | {error, term()}).
stop_listener(ListenerId) ->
apply_on_listener(ListenerId, fun stop_listener/3).
-spec(stop_listener(atom(), atom(), map()) -> ok | {error, term()}).
stop_listener(ZoneName, ListenerName, #{type := tcp, bind := ListenOn}) ->
esockd:close(listener_id(ZoneName, ListenerName), ListenOn);
stop_listener(ZoneName, ListenerName, #{type := ws}) ->
cowboy:stop_listener(listener_id(ZoneName, ListenerName));
stop_listener(ZoneName, ListenerName, #{type := quic}) ->
quicer:stop_listener(listener_id(ZoneName, ListenerName)).
merge_default(Options) -> merge_default(Options) ->
case lists:keytake(tcp_options, 1, Options) of case lists:keytake(tcp_options, 1, Options) of
{value, {tcp_options, TcpOpts}, Options1} -> {value, {tcp_options, TcpOpts}, Options1} ->
@ -258,24 +338,27 @@ merge_default(Options) ->
[{tcp_options, ?MQTT_SOCKOPTS} | Options] [{tcp_options, ?MQTT_SOCKOPTS} | Options]
end. end.
format(Port) when is_integer(Port) -> format_addr(Port) when is_integer(Port) ->
io_lib:format("0.0.0.0:~w", [Port]); io_lib:format("0.0.0.0:~w", [Port]);
format({Addr, Port}) when is_list(Addr) -> format_addr({Addr, Port}) when is_list(Addr) ->
io_lib:format("~s:~w", [Addr, Port]); io_lib:format("~s:~w", [Addr, Port]);
format({Addr, Port}) when is_tuple(Addr) -> format_addr({Addr, Port}) when is_tuple(Addr) ->
io_lib:format("~s:~w", [inet:ntoa(Addr), Port]). io_lib:format("~s:~w", [inet:ntoa(Addr), Port]).
listener_id(ZoneName, ListenerName) -> listener_id(Type, ListenerName) ->
list_to_atom(lists:append([atom_to_list(ZoneName), ":", atom_to_list(ListenerName)])). list_to_atom(lists:append([str(Type), ":", str(ListenerName)])).
decode_listener_id(Id) -> parse_listener_id(Id) ->
try try
[Zone, Listen] = string:split(atom_to_list(Id), ":", leading), [Type, Name] = string:split(str(Id), ":", leading),
{list_to_existing_atom(Zone), list_to_existing_atom(Listen)} {list_to_existing_atom(Type), list_to_atom(Name)}
catch catch
_ : _ -> error({invalid_listener_id, Id}) _ : _ -> error({invalid_listener_id, Id})
end. end.
zone(Opts) ->
maps:get(zone, Opts, undefined).
ssl_opts(Opts) -> ssl_opts(Opts) ->
maps:to_list( maps:to_list(
emqx_tls_lib:drop_tls13_for_old_otp( emqx_tls_lib:drop_tls13_for_old_otp(
@ -287,32 +370,28 @@ tcp_opts(Opts) ->
maps:without([active_n], maps:without([active_n],
maps:get(tcp, Opts, #{}))). maps:get(tcp, Opts, #{}))).
is_ssl(Opts) ->
emqx_map_lib:deep_get([ssl, enable], Opts, false).
foreach_listeners(Do) -> foreach_listeners(Do) ->
lists:foreach( lists:foreach(
fun({ZoneName, LName, LConf}) -> fun({Type, LName, LConf}) ->
Do(ZoneName, LName, LConf) Do(Type, LName, LConf)
end, do_list()). end, do_list()).
has_enabled_listener_conf_by_type(Type) -> has_enabled_listener_conf_by_type(Type) ->
lists:any( lists:any(
fun({_Zone, _LName, LConf}) when is_map(LConf) -> fun({Type0, _LName, LConf}) when is_map(LConf) ->
Type =:= maps:get(type, LConf) andalso Type =:= Type0 andalso maps:get(enabled, LConf, true)
maps:get(enabled, LConf, true)
end, do_list()). end, do_list()).
%% merge the configs in zone and listeners in a manner that
%% all config entries in the listener are prior to the ones in the zone.
merge_zone_and_listener_confs(ZoneConf, ListenerConf) ->
ConfsInZonesOnly = [listeners, overall_max_connections],
BaseConf = maps:without(ConfsInZonesOnly, ZoneConf),
emqx_map_lib:deep_merge(BaseConf, ListenerConf).
apply_on_listener(ListenerId, Do) -> apply_on_listener(ListenerId, Do) ->
{ZoneName, ListenerName} = decode_listener_id(ListenerId), {Type, ListenerName} = parse_listener_id(ListenerId),
case emqx_config:find_listener_conf(ZoneName, ListenerName, []) of case emqx_config:find_listener_conf(Type, ListenerName, []) of
{not_found, _, _} -> error({listener_config_not_found, ZoneName, ListenerName}); {not_found, _, _} -> error({listener_config_not_found, Type, ListenerName});
{ok, Conf} -> Do(ZoneName, ListenerName, Conf) {ok, Conf} -> Do(Type, ListenerName, Conf)
end. end.
str(A) when is_atom(A) ->
atom_to_list(A);
str(B) when is_binary(B) ->
binary_to_list(B);
str(S) when is_list(S) ->
S.

View File

@ -18,6 +18,19 @@
-compile({no_auto_import, [error/1]}). -compile({no_auto_import, [error/1]}).
-behaviour(gen_server).
-behaviour(emqx_config_handler).
%% gen_server callbacks
-export([ start_link/0
, init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
%% Logs %% Logs
-export([ debug/1 -export([ debug/1
, debug/2 , debug/2
@ -47,6 +60,7 @@
]). ]).
-export([ get_primary_log_level/0 -export([ get_primary_log_level/0
, tune_primary_log_level/0
, get_log_handlers/0 , get_log_handlers/0
, get_log_handlers/1 , get_log_handlers/1
, get_log_handler/1 , get_log_handler/1
@ -56,6 +70,8 @@
, stop_log_handler/1 , stop_log_handler/1
]). ]).
-export([post_config_update/4]).
-type(peername_str() :: list()). -type(peername_str() :: list()).
-type(logger_dst() :: file:filename() | console | unknown). -type(logger_dst() :: file:filename() | console | unknown).
-type(logger_handler_info() :: #{ -type(logger_handler_info() :: #{
@ -66,6 +82,49 @@
}). }).
-define(stopped_handlers, {?MODULE, stopped_handlers}). -define(stopped_handlers, {?MODULE, stopped_handlers}).
-define(CONF_PATH, [log]).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
%%--------------------------------------------------------------------
%% gen_server callbacks
%%--------------------------------------------------------------------
init([]) ->
ok = emqx_config_handler:add_handler(?CONF_PATH, ?MODULE),
{ok, #{}}.
handle_call({update_config, AppEnvs}, _From, State) ->
OldEnvs = application:get_env(kernel, logger, []),
NewEnvs = proplists:get_value(logger, proplists:get_value(kernel, AppEnvs, []), []),
ok = application:set_env(kernel, logger, NewEnvs),
_ = [logger:remove_handler(HandlerId) || {handler, HandlerId, _Mod, _Conf} <- OldEnvs],
_ = [logger:add_handler(HandlerId, Mod, Conf) || {handler, HandlerId, Mod, Conf} <- NewEnvs],
ok = tune_primary_log_level(),
{reply, ok, State};
handle_call(_Req, _From, State) ->
{reply, ignored, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok = emqx_config_handler:remove_handler(?CONF_PATH),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
%% emqx_config_handler callbacks
%%--------------------------------------------------------------------
post_config_update(_Req, _NewConf, _OldConf, AppEnvs) ->
gen_server:call(?MODULE, {update_config, AppEnvs}, 5000).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% APIs %% APIs
@ -159,6 +218,16 @@ get_primary_log_level() ->
#{level := Level} = logger:get_primary_config(), #{level := Level} = logger:get_primary_config(),
Level. Level.
-spec tune_primary_log_level() -> ok.
tune_primary_log_level() ->
LowestLevel = lists:foldl(fun(#{level := Level}, OldLevel) ->
case logger:compare_levels(Level, OldLevel) of
lt -> Level;
_ -> OldLevel
end
end, get_primary_log_level(), get_log_handlers()),
set_primary_log_level(LowestLevel).
-spec(set_primary_log_level(logger:level()) -> ok | {error, term()}). -spec(set_primary_log_level(logger:level()) -> ok | {error, term()}).
set_primary_log_level(Level) -> set_primary_log_level(Level) ->
logger:set_primary_config(level, Level). logger:set_primary_config(level, Level).

View File

@ -23,17 +23,17 @@ check_config(X) -> logger_formatter:check_config(X).
format(#{msg := {report, Report}, meta := Meta} = Event, Config) when is_map(Report) -> format(#{msg := {report, Report}, meta := Meta} = Event, Config) when is_map(Report) ->
logger_formatter:format(Event#{msg := {report, enrich(Report, Meta)}}, Config); logger_formatter:format(Event#{msg := {report, enrich(Report, Meta)}}, Config);
format(#{msg := {Fmt, Args}, meta := Meta} = Event, Config) when is_list(Fmt) -> format(#{msg := Msg, meta := Meta} = Event, Config) ->
{NewFmt, NewArgs} = enrich_fmt(Fmt, Args, Meta), NewMsg = enrich_fmt(Msg, Meta),
logger_formatter:format(Event#{msg := {NewFmt, NewArgs}}, Config). logger_formatter:format(Event#{msg := NewMsg}, Config).
enrich(Report, #{mfa := Mfa, line := Line}) -> enrich(Report, #{mfa := Mfa, line := Line}) ->
Report#{mfa => mfa(Mfa), line => Line}; Report#{mfa => mfa(Mfa), line => Line};
enrich(Report, _) -> Report. enrich(Report, _) -> Report.
enrich_fmt(Fmt, Args, #{mfa := Mfa, line := Line}) -> enrich_fmt({Fmt, Args}, #{mfa := Mfa, line := Line}) when is_list(Fmt) ->
{Fmt ++ " mfa: ~s line: ~w", Args ++ [mfa(Mfa), Line]}; {Fmt ++ " mfa: ~s line: ~w", Args ++ [mfa(Mfa), Line]};
enrich_fmt(Fmt, Args, _) -> enrich_fmt(Msg, _) ->
{Fmt, Args}. Msg.
mfa({M, F, A}) -> atom_to_list(M) ++ ":" ++ atom_to_list(F) ++ "/" ++ integer_to_list(A). mfa({M, F, A}) -> atom_to_list(M) ++ ":" ++ atom_to_list(F) ++ "/" ++ integer_to_list(A).

View File

@ -23,11 +23,17 @@
, deep_merge/2 , deep_merge/2
, safe_atom_key_map/1 , safe_atom_key_map/1
, unsafe_atom_key_map/1 , unsafe_atom_key_map/1
, jsonable_map/1
, jsonable_map/2
, binary_string/1
, deep_convert/3
, diff_maps/2
]). ]).
-export_type([config_key/0, config_key_path/0]). -export_type([config_key/0, config_key_path/0]).
-type config_key() :: atom() | binary(). -type config_key() :: atom() | binary().
-type config_key_path() :: [config_key()]. -type config_key_path() :: [config_key()].
-type convert_fun() :: fun((...) -> {K1::any(), V1::any()} | drop).
%%----------------------------------------------------------------- %%-----------------------------------------------------------------
-spec deep_get(config_key_path(), map()) -> term(). -spec deep_get(config_key_path(), map()) -> term().
@ -59,13 +65,11 @@ deep_find(_KeyPath, Data) ->
{not_found, _KeyPath, Data}. {not_found, _KeyPath, Data}.
-spec deep_put(config_key_path(), map(), term()) -> map(). -spec deep_put(config_key_path(), map(), term()) -> map().
deep_put([], Map, Config) when is_map(Map) -> deep_put([], _Map, Data) ->
Config; Data;
deep_put([], _Map, Config) -> %% not map, replace it deep_put([Key | KeyPath], Map, Data) ->
Config; SubMap = maps:get(Key, Map, #{}),
deep_put([Key | KeyPath], Map, Config) -> Map#{Key => deep_put(KeyPath, SubMap, Data)}.
SubMap = deep_put(KeyPath, maps:get(Key, Map, #{}), Config),
Map#{Key => SubMap}.
-spec deep_remove(config_key_path(), map()) -> map(). -spec deep_remove(config_key_path(), map()) -> map().
deep_remove([], Map) -> deep_remove([], Map) ->
@ -97,21 +101,72 @@ deep_merge(BaseMap, NewMap) ->
end, #{}, BaseMap), end, #{}, BaseMap),
maps:merge(MergedBase, maps:with(NewKeys, NewMap)). maps:merge(MergedBase, maps:with(NewKeys, NewMap)).
-spec deep_convert(map(), convert_fun(), Args::list()) -> map().
deep_convert(Map, ConvFun, Args) when is_map(Map) ->
maps:fold(fun(K, V, Acc) ->
case apply(ConvFun, [K, deep_convert(V, ConvFun, Args) | Args]) of
drop -> Acc;
{K1, V1} -> Acc#{K1 => V1}
end
end, #{}, Map);
deep_convert(ListV, ConvFun, Args) when is_list(ListV) ->
[deep_convert(V, ConvFun, Args) || V <- ListV];
deep_convert(Val, _, _Args) -> Val.
-spec unsafe_atom_key_map(#{binary() | atom() => any()}) -> #{atom() => any()}.
unsafe_atom_key_map(Map) -> unsafe_atom_key_map(Map) ->
covert_keys_to_atom(Map, fun(K) -> binary_to_atom(K, utf8) end). covert_keys_to_atom(Map, fun(K) -> binary_to_atom(K, utf8) end).
-spec safe_atom_key_map(#{binary() | atom() => any()}) -> #{atom() => any()}.
safe_atom_key_map(Map) -> safe_atom_key_map(Map) ->
covert_keys_to_atom(Map, fun(K) -> binary_to_existing_atom(K, utf8) end). covert_keys_to_atom(Map, fun(K) -> binary_to_existing_atom(K, utf8) end).
-spec jsonable_map(map() | list()) -> map() | list().
jsonable_map(Map) ->
jsonable_map(Map, fun(K, V) -> {K, V} end).
jsonable_map(Map, JsonableFun) ->
deep_convert(Map, fun binary_string_kv/3, [JsonableFun]).
-spec diff_maps(map(), map()) ->
#{added := map(), identical := map(), removed := map(),
changed := #{any() => {OldValue::any(), NewValue::any()}}}.
diff_maps(NewMap, OldMap) ->
InitR = #{identical => #{}, changed => #{}, removed => #{}},
{Result, RemInNew} =
lists:foldl(fun({OldK, OldV}, {Result0 = #{identical := I, changed := U, removed := D},
RemNewMap}) ->
Result1 = case maps:find(OldK, NewMap) of
error ->
Result0#{removed => D#{OldK => OldV}};
{ok, NewV} when NewV == OldV ->
Result0#{identical => I#{OldK => OldV}};
{ok, NewV} ->
Result0#{changed => U#{OldK => {OldV, NewV}}}
end,
{Result1, maps:remove(OldK, RemNewMap)}
end, {InitR, NewMap}, maps:to_list(OldMap)),
Result#{added => RemInNew}.
binary_string_kv(K, V, JsonableFun) ->
case JsonableFun(K, V) of
drop -> drop;
{K1, V1} -> {binary_string(K1), binary_string(V1)}
end.
binary_string([]) -> [];
binary_string(Val) when is_list(Val) ->
case io_lib:printable_unicode_list(Val) of
true -> unicode:characters_to_binary(Val);
false -> [binary_string(V) || V <- Val]
end;
binary_string(Val) ->
Val.
%%--------------------------------------------------------------------------- %%---------------------------------------------------------------------------
covert_keys_to_atom(BinKeyMap, Conv) when is_map(BinKeyMap) -> covert_keys_to_atom(BinKeyMap, Conv) ->
maps:fold( deep_convert(BinKeyMap, fun
fun(K, V, Acc) when is_binary(K) -> (K, V) when is_atom(K) -> {K, V};
Acc#{Conv(K) => covert_keys_to_atom(V, Conv)}; (K, V) when is_binary(K) -> {Conv(K), V}
(K, V, Acc) when is_atom(K) -> end, []).
%% richmap keys
Acc#{K => covert_keys_to_atom(V, Conv)}
end, #{}, BinKeyMap);
covert_keys_to_atom(ListV, Conv) when is_list(ListV) ->
[covert_keys_to_atom(V, Conv) || V <- ListV];
covert_keys_to_atom(Val, _) -> Val.

View File

@ -22,8 +22,6 @@
-include("logger.hrl"). -include("logger.hrl").
-include("types.hrl"). -include("types.hrl").
-include("emqx_mqtt.hrl"). -include("emqx_mqtt.hrl").
-include("emqx.hrl").
-export([ start_link/0 -export([ start_link/0
, stop/0 , stop/0

View File

@ -76,7 +76,7 @@ set_procmem_high_watermark(Float) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init([]) -> init([]) ->
Opts = emqx_config:get([sysmon, os]), Opts = emqx:get_config([sysmon, os]),
set_mem_check_interval(maps:get(mem_check_interval, Opts)), set_mem_check_interval(maps:get(mem_check_interval, Opts)),
set_sysmem_high_watermark(maps:get(sysmem_high_watermark, Opts)), set_sysmem_high_watermark(maps:get(sysmem_high_watermark, Opts)),
set_procmem_high_watermark(maps:get(procmem_high_watermark, Opts)), set_procmem_high_watermark(maps:get(procmem_high_watermark, Opts)),
@ -91,8 +91,8 @@ handle_cast(Msg, State) ->
{noreply, State}. {noreply, State}.
handle_info({timeout, _Timer, check}, State) -> handle_info({timeout, _Timer, check}, State) ->
CPUHighWatermark = emqx_config:get([sysmon, os, cpu_high_watermark]) * 100, CPUHighWatermark = emqx:get_config([sysmon, os, cpu_high_watermark]) * 100,
CPULowWatermark = emqx_config:get([sysmon, os, cpu_low_watermark]) * 100, CPULowWatermark = emqx:get_config([sysmon, os, cpu_low_watermark]) * 100,
_ = case emqx_vm:cpu_util() of %% TODO: should be improved? _ = case emqx_vm:cpu_util() of %% TODO: should be improved?
0 -> ok; 0 -> ok;
Busy when Busy >= CPUHighWatermark -> Busy when Busy >= CPUHighWatermark ->
@ -123,7 +123,7 @@ code_change(_OldVsn, State, _Extra) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
start_check_timer() -> start_check_timer() ->
Interval = emqx_config:get([sysmon, os, cpu_check_interval]), Interval = emqx:get_config([sysmon, os, cpu_check_interval]),
case erlang:system_info(system_architecture) of case erlang:system_info(system_architecture) of
"x86_64-pc-linux-musl" -> ok; "x86_64-pc-linux-musl" -> ok;
_ -> emqx_misc:start_timer(Interval, check) _ -> emqx_misc:start_timer(Interval, check)

View File

@ -43,7 +43,7 @@
%% @doc Load all plugins when the broker started. %% @doc Load all plugins when the broker started.
-spec(load() -> ok | ignore | {error, term()}). -spec(load() -> ok | ignore | {error, term()}).
load() -> load() ->
ok = load_ext_plugins(emqx_config:get([plugins, expand_plugins_dir], undefined)). ok = load_ext_plugins(emqx:get_config([plugins, expand_plugins_dir], undefined)).
%% @doc Load a Plugin %% @doc Load a Plugin
-spec(load(atom()) -> ok | {error, term()}). -spec(load(atom()) -> ok | {error, term()}).

View File

@ -17,8 +17,41 @@
-module(emqx_quic_connection). -module(emqx_quic_connection).
%% Callbacks %% Callbacks
-export([ new_conn/2 -export([ init/1
, new_conn/2
, connected/2
, shutdown/2
]). ]).
new_conn(Conn, {_L, COpts, _S}) when is_map(COpts) -> -type cb_state() :: map() | proplists:proplist().
emqx_connection:start_link(emqx_quic_stream, Conn, COpts).
-spec init(cb_state()) -> cb_state().
init(ConnOpts) when is_list(ConnOpts) ->
init(maps:from_list(ConnOpts));
init(ConnOpts) when is_map(ConnOpts) ->
ConnOpts.
-spec new_conn(quicer:connection_handler(), cb_state()) -> {ok, cb_state()} | {error, any()}.
new_conn(Conn, S) ->
process_flag(trap_exit, true),
{ok, Pid} = emqx_connection:start_link(emqx_quic_stream, {self(), Conn}, S),
receive
{Pid, stream_acceptor_ready} ->
ok = quicer:async_handshake(Conn),
{ok, S};
{'EXIT', Pid, _Reason} ->
{error, stream_accept_error}
end.
-spec connected(quicer:connection_handler(), cb_state()) -> {ok, cb_state()} | {error, any()}.
connected(Conn, #{slow_start := false} = S) ->
{ok, _Pid} = emqx_connection:start_link(emqx_quic_stream, Conn, S),
{ok, S};
connected(_Conn, S) ->
{ok, S}.
-spec shutdown(quicer:connection_handler(), cb_state()) -> {ok, cb_state()} | {error, any()}.
shutdown(Conn, S) ->
quicer:async_close_connection(Conn),
{ok, S}.

View File

@ -31,8 +31,16 @@
, peercert/1 , peercert/1
]). ]).
wait(Conn) -> wait({ConnOwner, Conn}) ->
quicer:accept_stream(Conn, []). {ok, Conn} = quicer:async_accept_stream(Conn, []),
ConnOwner ! {self(), stream_acceptor_ready},
receive
%% from msquic
{quic, new_stream, Stream} ->
{ok, Stream};
{'EXIT', ConnOwner, _Reason} ->
{error, enotconn}
end.
type(_) -> type(_) ->
quic. quic.
@ -44,6 +52,7 @@ sockname(S) ->
quicer:sockname(S). quicer:sockname(S).
peercert(_S) -> peercert(_S) ->
%% @todo but unsupported by msquic
nossl. nossl.
getstat(Socket, Stats) -> getstat(Socket, Stats) ->
@ -88,5 +97,8 @@ ensure_ok_or_exit(Fun, Args = [Sock|_]) when is_atom(Fun), is_list(Args) ->
async_send(Stream, Data, Options) when is_list(Data) -> async_send(Stream, Data, Options) when is_list(Data) ->
async_send(Stream, iolist_to_binary(Data), Options); async_send(Stream, iolist_to_binary(Data), Options);
async_send(Stream, Data, _Options) when is_binary(Data) -> async_send(Stream, Data, _Options) when is_binary(Data) ->
{ok, _Len} = quicer:send(Stream, Data), case quicer:send(Stream, Data) of
ok. {ok, _Len} -> ok;
Other ->
Other
end.

View File

@ -68,7 +68,6 @@
-type(dest() :: node() | {group(), node()}). -type(dest() :: node() | {group(), node()}).
-define(ROUTE_TAB, emqx_route). -define(ROUTE_TAB, emqx_route).
-rlog_shard({?ROUTE_SHARD, ?ROUTE_TAB}).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Mnesia bootstrap %% Mnesia bootstrap
@ -77,6 +76,7 @@
mnesia(boot) -> mnesia(boot) ->
ok = ekka_mnesia:create_table(?ROUTE_TAB, [ ok = ekka_mnesia:create_table(?ROUTE_TAB, [
{type, bag}, {type, bag},
{rlog_shard, ?ROUTE_SHARD},
{ram_copies, [node()]}, {ram_copies, [node()]},
{record_name, route}, {record_name, route},
{attributes, record_info(fields, route)}, {attributes, record_info(fields, route)},
@ -250,7 +250,7 @@ delete_trie_route(Route = #route{topic = Topic}) ->
%% @private %% @private
-spec(maybe_trans(function(), list(any())) -> ok | {error, term()}). -spec(maybe_trans(function(), list(any())) -> ok | {error, term()}).
maybe_trans(Fun, Args) -> maybe_trans(Fun, Args) ->
case emqx_config:get([broker, perf, route_lock_type]) of case emqx:get_config([broker, perf, route_lock_type]) of
key -> key ->
trans(Fun, Args); trans(Fun, Args);
global -> global ->

View File

@ -52,8 +52,6 @@
-define(ROUTING_NODE, emqx_routing_node). -define(ROUTING_NODE, emqx_routing_node).
-define(LOCK, {?MODULE, cleanup_routes}). -define(LOCK, {?MODULE, cleanup_routes}).
-rlog_shard({?ROUTE_SHARD, ?ROUTING_NODE}).
-dialyzer({nowarn_function, [cleanup_routes/1]}). -dialyzer({nowarn_function, [cleanup_routes/1]}).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -63,6 +61,7 @@
mnesia(boot) -> mnesia(boot) ->
ok = ekka_mnesia:create_table(?ROUTING_NODE, [ ok = ekka_mnesia:create_table(?ROUTING_NODE, [
{type, set}, {type, set},
{rlog_shard, ?ROUTE_SHARD},
{ram_copies, [node()]}, {ram_copies, [node()]},
{record_name, routing_node}, {record_name, routing_node},
{attributes, record_info(fields, routing_node)}, {attributes, record_info(fields, routing_node)},

View File

@ -72,4 +72,4 @@ filter_result(Delivery) ->
Delivery. Delivery.
max_client_num() -> max_client_num() ->
emqx_config:get([rpc, tcp_client_num], ?DefaultClientNum). emqx:get_config([rpc, tcp_client_num], ?DefaultClientNum).

File diff suppressed because it is too large Load Diff

View File

@ -76,8 +76,6 @@
-define(NACK(Reason), {shared_sub_nack, Reason}). -define(NACK(Reason), {shared_sub_nack, Reason}).
-define(NO_ACK, no_ack). -define(NO_ACK, no_ack).
-rlog_shard({?SHARED_SUB_SHARD, ?TAB}).
-record(state, {pmon}). -record(state, {pmon}).
-record(emqx_shared_subscription, {group, topic, subpid}). -record(emqx_shared_subscription, {group, topic, subpid}).
@ -89,6 +87,7 @@
mnesia(boot) -> mnesia(boot) ->
ok = ekka_mnesia:create_table(?TAB, [ ok = ekka_mnesia:create_table(?TAB, [
{type, bag}, {type, bag},
{rlog_shard, ?SHARED_SUB_SHARD},
{ram_copies, [node()]}, {ram_copies, [node()]},
{record_name, emqx_shared_subscription}, {record_name, emqx_shared_subscription},
{attributes, record_info(fields, emqx_shared_subscription)}]); {attributes, record_info(fields, emqx_shared_subscription)}]);
@ -136,11 +135,11 @@ dispatch(Group, Topic, Delivery = #delivery{message = Msg}, FailedSubs) ->
-spec(strategy() -> strategy()). -spec(strategy() -> strategy()).
strategy() -> strategy() ->
emqx_config:get([broker, shared_subscription_strategy]). emqx:get_config([broker, shared_subscription_strategy]).
-spec(ack_enabled() -> boolean()). -spec(ack_enabled() -> boolean()).
ack_enabled() -> ack_enabled() ->
emqx_config:get([broker, shared_dispatch_ack_enabled]). emqx:get_config([broker, shared_dispatch_ack_enabled]).
do_dispatch(SubPid, Topic, Msg, _Type) when SubPid =:= self() -> do_dispatch(SubPid, Topic, Msg, _Type) when SubPid =:= self() ->
%% Deadlock otherwise %% Deadlock otherwise

View File

@ -102,10 +102,10 @@ datetime() ->
"~4..0w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w", [Y, M, D, H, MM, S])). "~4..0w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w", [Y, M, D, H, MM, S])).
sys_interval() -> sys_interval() ->
emqx_config:get([broker, sys_msg_interval]). emqx:get_config([broker, sys_msg_interval]).
sys_heatbeat_interval() -> sys_heatbeat_interval() ->
emqx_config:get([broker, sys_heartbeat_interval]). emqx:get_config([broker, sys_heartbeat_interval]).
%% @doc Get sys info %% @doc Get sys info
-spec(info() -> list(tuple())). -spec(info() -> list(tuple())).

View File

@ -60,7 +60,7 @@ start_timer(State) ->
State#{timer := emqx_misc:start_timer(timer:seconds(2), reset)}. State#{timer := emqx_misc:start_timer(timer:seconds(2), reset)}.
sysm_opts() -> sysm_opts() ->
sysm_opts(maps:to_list(emqx_config:get([sysmon, vm])), []). sysm_opts(maps:to_list(emqx:get_config([sysmon, vm])), []).
sysm_opts([], Acc) -> sysm_opts([], Acc) ->
Acc; Acc;
sysm_opts([{_, disabled}|Opts], Acc) -> sysm_opts([{_, disabled}|Opts], Acc) ->

View File

@ -50,8 +50,6 @@
, count = 0 :: non_neg_integer() , count = 0 :: non_neg_integer()
}). }).
-rlog_shard({?ROUTE_SHARD, ?TRIE}).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Mnesia bootstrap %% Mnesia bootstrap
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -64,6 +62,7 @@ mnesia(boot) ->
{write_concurrency, true} {write_concurrency, true}
]}], ]}],
ok = ekka_mnesia:create_table(?TRIE, [ ok = ekka_mnesia:create_table(?TRIE, [
{rlog_shard, ?ROUTE_SHARD},
{ram_copies, [node()]}, {ram_copies, [node()]},
{record_name, ?TRIE}, {record_name, ?TRIE},
{attributes, record_info(fields, ?TRIE)}, {attributes, record_info(fields, ?TRIE)},
@ -270,7 +269,7 @@ match_compact([Word | Words], Prefix, IsWildcard, Acc0) ->
lookup_topic(MlTopic). lookup_topic(MlTopic).
is_compact() -> is_compact() ->
emqx_config:get([broker, perf, trie_compaction], true). emqx:get_config([broker, perf, trie_compaction], true).
set_compact(Bool) -> set_compact(Bool) ->
emqx_config:put([broker, perf, trie_compaction], Bool). emqx_config:put([broker, perf, trie_compaction], Bool).

View File

@ -57,8 +57,8 @@ handle_cast(Msg, State) ->
{noreply, State}. {noreply, State}.
handle_info({timeout, _Timer, check}, State) -> handle_info({timeout, _Timer, check}, State) ->
ProcHighWatermark = emqx_config:get([sysmon, vm, process_high_watermark]), ProcHighWatermark = emqx:get_config([sysmon, vm, process_high_watermark]),
ProcLowWatermark = emqx_config:get([sysmon, vm, process_low_watermark]), ProcLowWatermark = emqx:get_config([sysmon, vm, process_low_watermark]),
ProcessCount = erlang:system_info(process_count), ProcessCount = erlang:system_info(process_count),
case ProcessCount / erlang:system_info(process_limit) of case ProcessCount / erlang:system_info(process_limit) of
Percent when Percent >= ProcHighWatermark -> Percent when Percent >= ProcHighWatermark ->
@ -89,5 +89,5 @@ code_change(_OldVsn, State, _Extra) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
start_check_timer() -> start_check_timer() ->
Interval = emqx_config:get([sysmon, vm, process_check_interval]), Interval = emqx:get_config([sysmon, vm, process_check_interval]),
emqx_misc:start_timer(Interval, check). emqx_misc:start_timer(Interval, check).

View File

@ -85,8 +85,8 @@
idle_timer :: maybe(reference()), idle_timer :: maybe(reference()),
%% Zone name %% Zone name
zone :: atom(), zone :: atom(),
%% Listener Name %% Listener Type and Name
listener :: atom() listener :: {Type::atom(), Name::atom()}
}). }).
-type(state() :: #state{}). -type(state() :: #state{}).
@ -173,12 +173,12 @@ call(WsPid, Req, Timeout) when is_pid(WsPid) ->
%% WebSocket callbacks %% WebSocket callbacks
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init(Req, #{zone := Zone, listener := Listener} = Opts) -> init(Req, #{listener := {Type, Listener}} = Opts) ->
%% WS Transport Idle Timeout %% WS Transport Idle Timeout
WsOpts = #{compress => get_ws_opts(Zone, Listener, compress), WsOpts = #{compress => get_ws_opts(Type, Listener, compress),
deflate_opts => get_ws_opts(Zone, Listener, deflate_opts), deflate_opts => get_ws_opts(Type, Listener, deflate_opts),
max_frame_size => get_ws_opts(Zone, Listener, max_frame_size), max_frame_size => get_ws_opts(Type, Listener, max_frame_size),
idle_timeout => get_ws_opts(Zone, Listener, idle_timeout) idle_timeout => get_ws_opts(Type, Listener, idle_timeout)
}, },
case check_origin_header(Req, Opts) of case check_origin_header(Req, Opts) of
{error, Message} -> {error, Message} ->
@ -187,17 +187,17 @@ init(Req, #{zone := Zone, listener := Listener} = Opts) ->
ok -> parse_sec_websocket_protocol(Req, Opts, WsOpts) ok -> parse_sec_websocket_protocol(Req, Opts, WsOpts)
end. end.
parse_sec_websocket_protocol(Req, #{zone := Zone, listener := Listener} = Opts, WsOpts) -> parse_sec_websocket_protocol(Req, #{listener := {Type, Listener}} = Opts, WsOpts) ->
case cowboy_req:parse_header(<<"sec-websocket-protocol">>, Req) of case cowboy_req:parse_header(<<"sec-websocket-protocol">>, Req) of
undefined -> undefined ->
case get_ws_opts(Zone, Listener, fail_if_no_subprotocol) of case get_ws_opts(Type, Listener, fail_if_no_subprotocol) of
true -> true ->
{ok, cowboy_req:reply(400, Req), WsOpts}; {ok, cowboy_req:reply(400, Req), WsOpts};
false -> false ->
{cowboy_websocket, Req, [Req, Opts], WsOpts} {cowboy_websocket, Req, [Req, Opts], WsOpts}
end; end;
Subprotocols -> Subprotocols ->
SupportedSubprotocols = get_ws_opts(Zone, Listener, supported_subprotocols), SupportedSubprotocols = get_ws_opts(Type, Listener, supported_subprotocols),
NSupportedSubprotocols = [list_to_binary(Subprotocol) NSupportedSubprotocols = [list_to_binary(Subprotocol)
|| Subprotocol <- SupportedSubprotocols], || Subprotocol <- SupportedSubprotocols],
case pick_subprotocol(Subprotocols, NSupportedSubprotocols) of case pick_subprotocol(Subprotocols, NSupportedSubprotocols) of
@ -221,29 +221,29 @@ pick_subprotocol([Subprotocol | Rest], SupportedSubprotocols) ->
pick_subprotocol(Rest, SupportedSubprotocols) pick_subprotocol(Rest, SupportedSubprotocols)
end. end.
parse_header_fun_origin(Req, #{zone := Zone, listener := Listener}) -> parse_header_fun_origin(Req, #{listener := {Type, Listener}}) ->
case cowboy_req:header(<<"origin">>, Req) of case cowboy_req:header(<<"origin">>, Req) of
undefined -> undefined ->
case get_ws_opts(Zone, Listener, allow_origin_absence) of case get_ws_opts(Type, Listener, allow_origin_absence) of
true -> ok; true -> ok;
false -> {error, origin_header_cannot_be_absent} false -> {error, origin_header_cannot_be_absent}
end; end;
Value -> Value ->
case lists:member(Value, get_ws_opts(Zone, Listener, check_origins)) of case lists:member(Value, get_ws_opts(Type, Listener, check_origins)) of
true -> ok; true -> ok;
false -> {origin_not_allowed, Value} false -> {origin_not_allowed, Value}
end end
end. end.
check_origin_header(Req, #{zone := Zone, listener := Listener} = Opts) -> check_origin_header(Req, #{listener := {Type, Listener}} = Opts) ->
case get_ws_opts(Zone, Listener, check_origin_enable) of case get_ws_opts(Type, Listener, check_origin_enable) of
true -> parse_header_fun_origin(Req, Opts); true -> parse_header_fun_origin(Req, Opts);
false -> ok false -> ok
end. end.
websocket_init([Req, #{zone := Zone, listener := Listener} = Opts]) -> websocket_init([Req, #{zone := Zone, listener := {Type, Listener}} = Opts]) ->
{Peername, Peercert} = {Peername, Peercert} =
case emqx_config:get_listener_conf(Zone, Listener, [proxy_protocol]) andalso case emqx_config:get_listener_conf(Type, Listener, [proxy_protocol]) andalso
maps:get(proxy_header, Req) of maps:get(proxy_header, Req) of
#{src_address := SrcAddr, src_port := SrcPort, ssl := SSL} -> #{src_address := SrcAddr, src_port := SrcPort, ssl := SSL} ->
SourceName = {SrcAddr, SrcPort}, SourceName = {SrcAddr, SrcPort},
@ -278,7 +278,7 @@ websocket_init([Req, #{zone := Zone, listener := Listener} = Opts]) ->
conn_mod => ?MODULE conn_mod => ?MODULE
}, },
Limiter = emqx_limiter:init(Zone, undefined, undefined, []), Limiter = emqx_limiter:init(Zone, undefined, undefined, []),
MQTTPiggyback = get_ws_opts(Zone, Listener, mqtt_piggyback), MQTTPiggyback = get_ws_opts(Type, Listener, mqtt_piggyback),
FrameOpts = #{ FrameOpts = #{
strict_mode => emqx_config:get_zone_conf(Zone, [mqtt, strict_mode]), strict_mode => emqx_config:get_zone_conf(Zone, [mqtt, strict_mode]),
max_size => emqx_config:get_zone_conf(Zone, [mqtt, max_packet_size]) max_size => emqx_config:get_zone_conf(Zone, [mqtt, max_packet_size])
@ -317,7 +317,7 @@ websocket_init([Req, #{zone := Zone, listener := Listener} = Opts]) ->
idle_timeout = IdleTimeout, idle_timeout = IdleTimeout,
idle_timer = IdleTimer, idle_timer = IdleTimer,
zone = Zone, zone = Zone,
listener = Listener listener = {Type, Listener}
}, hibernate}. }, hibernate}.
websocket_handle({binary, Data}, State) when is_list(Data) -> websocket_handle({binary, Data}, State) when is_list(Data) ->
@ -370,8 +370,8 @@ websocket_info({check_gc, Stats}, State) ->
return(check_oom(run_gc(Stats, State))); return(check_oom(run_gc(Stats, State)));
websocket_info(Deliver = {deliver, _Topic, _Msg}, websocket_info(Deliver = {deliver, _Topic, _Msg},
State = #state{zone = Zone, listener = Listener}) -> State = #state{listener = {Type, Listener}}) ->
ActiveN = emqx_config:get_listener_conf(Zone, Listener, [tcp, active_n]), ActiveN = get_active_n(Type, Listener),
Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)], Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State); with_channel(handle_deliver, [Delivers], State);
@ -558,12 +558,12 @@ parse_incoming(Data, State = #state{parse_state = ParseState}) ->
%% Handle incoming packet %% Handle incoming packet
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
handle_incoming(Packet, State = #state{zone = Zone, listener = Listener}) handle_incoming(Packet, State = #state{listener = {Type, Listener}})
when is_record(Packet, mqtt_packet) -> when is_record(Packet, mqtt_packet) ->
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]), ?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
ok = inc_incoming_stats(Packet), ok = inc_incoming_stats(Packet),
NState = case emqx_pd:get_counter(incoming_pubs) > NState = case emqx_pd:get_counter(incoming_pubs) >
emqx_config:get_listener_conf(Zone, Listener, [tcp, active_n]) of get_active_n(Type, Listener) of
true -> postpone({cast, rate_limit}, State); true -> postpone({cast, rate_limit}, State);
false -> State false -> State
end, end,
@ -595,12 +595,12 @@ with_channel(Fun, Args, State = #state{channel = Channel}) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
handle_outgoing(Packets, State = #state{mqtt_piggyback = MQTTPiggyback, handle_outgoing(Packets, State = #state{mqtt_piggyback = MQTTPiggyback,
zone = Zone, listener = Listener}) -> listener = {Type, Listener}}) ->
IoData = lists:map(serialize_and_inc_stats_fun(State), Packets), IoData = lists:map(serialize_and_inc_stats_fun(State), Packets),
Oct = iolist_size(IoData), Oct = iolist_size(IoData),
ok = inc_sent_stats(length(Packets), Oct), ok = inc_sent_stats(length(Packets), Oct),
NState = case emqx_pd:get_counter(outgoing_pubs) > NState = case emqx_pd:get_counter(outgoing_pubs) >
emqx_config:get_listener_conf(Zone, Listener, [tcp, active_n]) of get_active_n(Type, Listener) of
true -> true ->
Stats = #{cnt => emqx_pd:reset_counter(outgoing_pubs), Stats = #{cnt => emqx_pd:reset_counter(outgoing_pubs),
oct => emqx_pd:reset_counter(outgoing_bytes) oct => emqx_pd:reset_counter(outgoing_bytes)
@ -749,10 +749,10 @@ classify([Event|More], Packets, Cmds, Events) ->
trigger(Event) -> erlang:send(self(), Event). trigger(Event) -> erlang:send(self(), Event).
get_peer(Req, #{zone := Zone, listener := Listener}) -> get_peer(Req, #{listener := {Type, Listener}}) ->
{PeerAddr, PeerPort} = cowboy_req:peer(Req), {PeerAddr, PeerPort} = cowboy_req:peer(Req),
AddrHeader = cowboy_req:header( AddrHeader = cowboy_req:header(
get_ws_opts(Zone, Listener, proxy_address_header), Req, <<>>), get_ws_opts(Type, Listener, proxy_address_header), Req, <<>>),
ClientAddr = case string:tokens(binary_to_list(AddrHeader), ", ") of ClientAddr = case string:tokens(binary_to_list(AddrHeader), ", ") of
[] -> [] ->
undefined; undefined;
@ -766,7 +766,7 @@ get_peer(Req, #{zone := Zone, listener := Listener}) ->
PeerAddr PeerAddr
end, end,
PortHeader = cowboy_req:header( PortHeader = cowboy_req:header(
get_ws_opts(Zone, Listener, proxy_port_header), Req, <<>>), get_ws_opts(Type, Listener, proxy_port_header), Req, <<>>),
ClientPort = case string:tokens(binary_to_list(PortHeader), ", ") of ClientPort = case string:tokens(binary_to_list(PortHeader), ", ") of
[] -> [] ->
undefined; undefined;
@ -787,5 +787,8 @@ set_field(Name, Value, State) ->
Pos = emqx_misc:index_of(Name, record_info(fields, state)), Pos = emqx_misc:index_of(Name, record_info(fields, state)),
setelement(Pos+1, State, Value). setelement(Pos+1, State, Value).
get_ws_opts(Zone, Listener, Key) -> get_ws_opts(Type, Listener, Key) ->
emqx_config:get_listener_conf(Zone, Listener, [websocket, Key]). emqx_config:get_listener_conf(Type, Listener, [websocket, Key]).
get_active_n(Type, Listener) ->
emqx_config:get_listener_conf(Type, Listener, [tcp, active_n]).

View File

@ -14,25 +14,21 @@
%% limitations under the License. %% limitations under the License.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-module(emqx_bridge_stub_conn). -module(emqx_zone_schema).
-export([ start/1 -export([namespace/0, roots/0, fields/1]).
, send/2
, stop/1
]).
-type ack_ref() :: emqx_bridge_worker:ack_ref(). namespace() -> zone.
-type batch() :: emqx_bridge_worker:batch().
start(#{client_pid := Pid} = Cfg) -> roots() -> [].
Pid ! {self(), ?MODULE, ready},
{ok, Cfg}.
stop(_) -> ok. %% zone schemas are clones from the same name from root level
%% only not allowed to have default values.
fields(Name) ->
[{N, no_default(Sc)} || {N, Sc} <- emqx_schema:fields(Name)].
%% @doc Callback for `emqx_bridge_connect' behaviour %% no default values for zone settings
-spec send(_, batch()) -> {ok, ack_ref()} | {error, any()}. no_default(Sc) ->
send(#{client_pid := Pid}, Batch) -> fun(default) -> undefined;
Ref = make_ref(), (Other) -> hocon_schema:field_schema(Sc, Other)
Pid ! {stub_message, self(), Ref, Batch}, end.
{ok, Ref}.

View File

@ -33,7 +33,7 @@ end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([]). emqx_ct_helpers:stop_apps([]).
t_authenticate(_) -> t_authenticate(_) ->
?assertMatch(ok, emqx_access_control:authenticate(clientinfo())). ?assertMatch({ok, _}, emqx_access_control:authenticate(clientinfo())).
t_authorize(_) -> t_authorize(_) ->
Publish = ?PUBLISH_PACKET(?QOS_0, <<"t">>, 1, <<"payload">>), Publish = ?PUBLISH_PACKET(?QOS_0, <<"t">>, 1, <<"payload">>),
@ -46,7 +46,7 @@ t_authorize(_) ->
clientinfo() -> clientinfo(#{}). clientinfo() -> clientinfo(#{}).
clientinfo(InitProps) -> clientinfo(InitProps) ->
maps:merge(#{zone => default, maps:merge(#{zone => default,
listener => mqtt_tcp, listener => {tcp, default},
protocol => mqtt, protocol => mqtt,
peerhost => {127,0,0,1}, peerhost => {127,0,0,1},
clientid => <<"clientid">>, clientid => <<"clientid">>,

View File

@ -28,14 +28,14 @@ all() -> emqx_ct:all(?MODULE).
init_per_testcase(t_size_limit, Config) -> init_per_testcase(t_size_limit, Config) ->
emqx_ct_helpers:boot_modules(all), emqx_ct_helpers:boot_modules(all),
emqx_ct_helpers:start_apps([]), emqx_ct_helpers:start_apps([]),
emqx_config:update([alarm], #{ {ok, _} = emqx:update_config([alarm], #{
<<"size_limit">> => 2 <<"size_limit">> => 2
}), }),
Config; Config;
init_per_testcase(t_validity_period, Config) -> init_per_testcase(t_validity_period, Config) ->
emqx_ct_helpers:boot_modules(all), emqx_ct_helpers:boot_modules(all),
emqx_ct_helpers:start_apps([]), emqx_ct_helpers:start_apps([]),
emqx_config:update([alarm], #{ {ok, _} = emqx:update_config([alarm], #{
<<"validity_period">> => <<"1s">> <<"validity_period">> => <<"1s">>
}), }),
Config; Config;

View File

@ -0,0 +1,238 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authentication_SUITE).
-behaviour(hocon_schema).
-behaviour(emqx_authentication).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("typerefl/include/types.hrl").
-export([ fields/1 ]).
-export([ refs/0
, create/1
, update/2
, authenticate/2
, destroy/1
]).
-define(AUTHN, emqx_authentication).
%%------------------------------------------------------------------------------
%% Hocon Schema
%%------------------------------------------------------------------------------
fields(type1) ->
[ {mechanism, {enum, ['password-based']}}
, {backend, {enum, ['built-in-database']}}
, {enable, fun enable/1}
];
fields(type2) ->
[ {mechanism, {enum, ['password-based']}}
, {backend, {enum, ['mysql']}}
, {enable, fun enable/1}
].
enable(type) -> boolean();
enable(default) -> true;
enable(_) -> undefined.
%%------------------------------------------------------------------------------
%% Callbacks
%%------------------------------------------------------------------------------
refs() ->
[ hoconsc:ref(?MODULE, type1)
, hoconsc:ref(?MODULE, type2)
].
create(_Config) ->
{ok, #{mark => 1}}.
update(_Config, _State) ->
{ok, #{mark => 2}}.
authenticate(#{username := <<"good">>}, _State) ->
{ok, #{is_superuser => true}};
authenticate(#{username := _}, _State) ->
{error, bad_username_or_password}.
destroy(_State) ->
ok.
all() ->
emqx_ct:all(?MODULE).
init_per_suite(Config) ->
application:set_env(ekka, strict_mode, true),
emqx_ct_helpers:start_apps([]),
Config.
end_per_suite(_) ->
emqx_ct_helpers:stop_apps([]),
ok.
t_chain(_) ->
% CRUD of authentication chain
ChainName = 'test',
?assertMatch({ok, []}, ?AUTHN:list_chains()),
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:create_chain(ChainName)),
?assertEqual({error, {already_exists, {chain, ChainName}}}, ?AUTHN:create_chain(ChainName)),
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:lookup_chain(ChainName)),
?assertMatch({ok, [#{name := ChainName}]}, ?AUTHN:list_chains()),
?assertEqual(ok, ?AUTHN:delete_chain(ChainName)),
?assertMatch({error, {not_found, {chain, ChainName}}}, ?AUTHN:lookup_chain(ChainName)),
ok.
t_authenticator(_) ->
ChainName = 'test',
AuthenticatorConfig1 = #{mechanism => 'password-based',
backend => 'built-in-database',
enable => true},
% Create an authenticator when the authentication chain does not exist
?assertEqual({error, {not_found, {chain, ChainName}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?AUTHN:create_chain(ChainName),
% Create an authenticator when the provider does not exist
?assertEqual({error, no_available_provider}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
AuthNType1 = {'password-based', 'built-in-database'},
?AUTHN:add_provider(AuthNType1, ?MODULE),
ID1 = <<"password-based:built-in-database">>,
% CRUD of authencaticator
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch({ok, #{id := ID1}}, ?AUTHN:lookup_authenticator(ChainName, ID1)),
?assertMatch({ok, [#{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual({error, {already_exists, {authenticator, ID1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
?assertEqual(ok, ?AUTHN:delete_authenticator(ChainName, ID1)),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
?assertMatch({ok, []}, ?AUTHN:list_authenticators(ChainName)),
% Multiple authenticators exist at the same time
AuthNType2 = {'password-based', mysql},
?AUTHN:add_provider(AuthNType2, ?MODULE),
ID2 = <<"password-based:mysql">>,
AuthenticatorConfig2 = #{mechanism => 'password-based',
backend => mysql,
enable => true},
?assertMatch({ok, #{id := ID1}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch({ok, #{id := ID2}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig2)),
% Move authenticator
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, top)),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, bottom)),
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, {before, ID1})),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
?AUTHN:delete_chain(ChainName),
?AUTHN:remove_provider(AuthNType1),
?AUTHN:remove_provider(AuthNType2),
ok.
t_authenticate(_) ->
ListenerID = 'tcp:default',
ClientInfo = #{zone => default,
listener => ListenerID,
protocol => mqtt,
username => <<"good">>,
password => <<"any">>},
?assertEqual({ok, #{is_superuser => false}}, emqx_access_control:authenticate(ClientInfo)),
AuthNType = {'password-based', 'built-in-database'},
?AUTHN:add_provider(AuthNType, ?MODULE),
AuthenticatorConfig = #{mechanism => 'password-based',
backend => 'built-in-database',
enable => true},
?AUTHN:create_chain(ListenerID),
?assertMatch({ok, _}, ?AUTHN:create_authenticator(ListenerID, AuthenticatorConfig)),
?assertEqual({ok, #{is_superuser => true}}, emqx_access_control:authenticate(ClientInfo)),
?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>})),
?AUTHN:delete_chain(ListenerID),
?AUTHN:remove_provider(AuthNType),
ok.
t_update_config(_) ->
emqx_config_handler:add_handler([authentication], emqx_authentication),
AuthNType1 = {'password-based', 'built-in-database'},
AuthNType2 = {'password-based', mysql},
?AUTHN:add_provider(AuthNType1, ?MODULE),
?AUTHN:add_provider(AuthNType2, ?MODULE),
Global = 'mqtt:global',
AuthenticatorConfig1 = #{mechanism => 'password-based',
backend => 'built-in-database',
enable => true},
AuthenticatorConfig2 = #{mechanism => 'password-based',
backend => mysql,
enable => true},
ID1 = <<"password-based:built-in-database">>,
ID2 = <<"password-based:mysql">>,
?assertMatch({ok, []}, ?AUTHN:list_chains()),
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})),
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})),
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID2)),
?assertMatch({ok, _}, update_config([authentication], {update_authenticator, Global, ID1, #{}})),
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch({ok, _}, update_config([authentication], {move_authenticator, Global, ID2, <<"top">>})),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(Global)),
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
ListenerID = 'tcp:default',
ConfKeyPath = [listeners, tcp, default, authentication],
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig1})),
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig2})),
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID2)),
?assertMatch({ok, _}, update_config(ConfKeyPath, {update_authenticator, ListenerID, ID1, #{}})),
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
?assertMatch({ok, _}, update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, <<"top">>})),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ListenerID)),
?assertMatch({ok, _}, update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
?AUTHN:delete_chain(Global),
?AUTHN:remove_provider(AuthNType1),
?AUTHN:remove_provider(AuthNType2),
ok.
update_config(Path, ConfigRequest) ->
emqx:update_config(Path, ConfigRequest, #{rawconf_with_defaults => true}).

View File

@ -26,7 +26,6 @@ all() -> emqx_ct:all(?MODULE).
init_per_suite(Config) -> init_per_suite(Config) ->
emqx_ct_helpers:boot_modules(all), emqx_ct_helpers:boot_modules(all),
emqx_ct_helpers:start_apps([]), emqx_ct_helpers:start_apps([]),
toggle_authz(true),
Config. Config.
end_per_suite(_Config) -> end_per_suite(_Config) ->
@ -78,6 +77,3 @@ t_drain_authz_cache(_) ->
{ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0), {ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0),
?assert(length(gen_server:call(ClientPid, list_authz_cache)) > 0), ?assert(length(gen_server:call(ClientPid, list_authz_cache)) > 0),
emqtt:stop(Client). emqtt:stop(Client).
toggle_authz(Bool) when is_boolean(Bool) ->
emqx_config:put_zone_conf(default, [authorization, enable], Bool).

View File

@ -27,149 +27,112 @@
all() -> all() ->
emqx_ct:all(?MODULE). emqx_ct:all(?MODULE).
force_gc_conf() ->
#{bytes => 16777216,count => 16000,enable => true}.
force_shutdown_conf() ->
#{enable => true,max_heap_size => 4194304, max_message_queue_len => 1000}.
rate_limit_conf() ->
#{conn_bytes_in => ["100KB","10s"],
conn_messages_in => ["100","10s"],
max_conn_rate => 1000,
quota =>
#{conn_messages_routing => infinity,
overall_messages_routing => infinity}}.
rpc_conf() ->
#{async_batch_size => 256,authentication_timeout => 5000,
call_receive_timeout => 15000,connect_timeout => 5000,
mode => async,port_discovery => stateless,
send_timeout => 5000,socket_buffer => 1048576,
socket_keepalive_count => 9,socket_keepalive_idle => 900,
socket_keepalive_interval => 75,socket_recbuf => 1048576,
socket_sndbuf => 1048576,tcp_client_num => 1,
tcp_server_port => 5369}.
mqtt_conf() -> mqtt_conf() ->
#{await_rel_timeout => 300000, #{await_rel_timeout => 300000,idle_timeout => 15000,
idle_timeout => 15000, ignore_loop_deliver => false,keepalive_backoff => 0.75,
ignore_loop_deliver => false, max_awaiting_rel => 100,max_clientid_len => 65535,
keepalive_backoff => 0.75, max_inflight => 32,max_mqueue_len => 1000,
max_awaiting_rel => 100, max_packet_size => 1048576,max_qos_allowed => 2,
max_clientid_len => 65535, max_subscriptions => infinity,max_topic_alias => 65535,
max_inflight => 32, max_topic_levels => 65535,mqueue_default_priority => lowest,
max_mqueue_len => 1000, mqueue_priorities => disabled,mqueue_store_qos0 => true,
max_packet_size => 1048576, peer_cert_as_clientid => disabled,
max_qos_allowed => 2, peer_cert_as_username => disabled,
max_subscriptions => infinity, response_information => [],retain_available => true,
max_topic_alias => 65535, retry_interval => 30000,server_keepalive => disabled,
max_topic_levels => 65535, session_expiry_interval => 7200000,
mountpoint => <<>>, shared_subscription => true,strict_mode => false,
mqueue_default_priority => lowest, upgrade_qos => false,use_username_as_clientid => false,
mqueue_priorities => #{}, wildcard_subscription => true}.
mqueue_store_qos0 => true,
peer_cert_as_clientid => disabled,
peer_cert_as_username => disabled,
response_information => [],
retain_available => true,
retry_interval => 30000,
server_keepalive => disabled,
session_expiry_interval => 7200000,
shared_subscription => true,
strict_mode => false,
upgrade_qos => false,
use_username_as_clientid => false,
wildcard_subscription => true}.
listener_mqtt_tcp_conf() -> listener_mqtt_tcp_conf() ->
#{acceptors => 16, #{acceptors => 16,
access_rules => ["allow all"], zone => default,
bind => {{0,0,0,0},1883}, access_rules => ["allow all"],
max_connections => 1024000, bind => {{0,0,0,0},1883},
proxy_protocol => false, max_connections => 1024000,mountpoint => <<>>,
proxy_protocol_timeout => 3000, proxy_protocol => false,proxy_protocol_timeout => 3000,
rate_limit => tcp => #{
#{conn_bytes_in => active_n => 100,backlog => 1024,buffer => 4096,
["100KB","10s"], high_watermark => 1048576,nodelay => false,
conn_messages_in => reuseaddr => true,send_timeout => 15000,
["100","10s"], send_timeout_close => true}}.
max_conn_rate => 1000,
quota =>
#{conn_messages_routing => infinity,
overall_messages_routing => infinity}},
tcp =>
#{active_n => 100,
backlog => 1024,
buffer => 4096,
high_watermark => 1048576,
send_timeout => 15000,
send_timeout_close =>
true},
type => tcp}.
listener_mqtt_ws_conf() -> listener_mqtt_ws_conf() ->
#{acceptors => 16, #{acceptors => 16,
access_rules => ["allow all"], zone => default,
bind => {{0,0,0,0},8083}, access_rules => ["allow all"],
max_connections => 1024000, bind => {{0,0,0,0},8083},
proxy_protocol => false, max_connections => 1024000,mountpoint => <<>>,
proxy_protocol_timeout => 3000, proxy_protocol => false,proxy_protocol_timeout => 3000,
rate_limit => tcp =>
#{conn_bytes_in => #{active_n => 100,backlog => 1024,buffer => 4096,
["100KB","10s"], high_watermark => 1048576,nodelay => false,
conn_messages_in => reuseaddr => true,send_timeout => 15000,
["100","10s"], send_timeout_close => true},
max_conn_rate => 1000, websocket =>
quota => #{allow_origin_absence => true,check_origin_enable => false,
#{conn_messages_routing => infinity, check_origins => [],compress => false,
overall_messages_routing => infinity}}, deflate_opts =>
tcp => #{client_max_window_bits => 15,mem_level => 8,
#{active_n => 100, server_max_window_bits => 15},
backlog => 1024, fail_if_no_subprotocol => true,idle_timeout => 86400000,
buffer => 4096, max_frame_size => infinity,mqtt_path => "/mqtt",
high_watermark => 1048576, mqtt_piggyback => multiple,
send_timeout => 15000, proxy_address_header => "x-forwarded-for",
send_timeout_close => proxy_port_header => "x-forwarded-port",
true}, supported_subprotocols =>
type => ws, ["mqtt","mqtt-v3","mqtt-v3.1.1","mqtt-v5"]}}.
websocket =>
#{allow_origin_absence =>
true,
check_origin_enable =>
false,
check_origins => [],
compress => false,
deflate_opts =>
#{client_max_window_bits =>
15,
mem_level => 8,
server_max_window_bits =>
15},
fail_if_no_subprotocol =>
true,
idle_timeout => 86400000,
max_frame_size => infinity,
mqtt_path => "/mqtt",
mqtt_piggyback => multiple,
proxy_address_header =>
"x-forwarded-for",
proxy_port_header =>
"x-forwarded-port",
supported_subprotocols =>
["mqtt","mqtt-v3",
"mqtt-v3.1.1",
"mqtt-v5"]}}.
default_zone_conf() -> listeners_conf() ->
#{zones => #{tcp => #{default => listener_mqtt_tcp_conf()},
#{default => ws => #{default => listener_mqtt_ws_conf()}
#{ authorization => #{
cache => #{enable => true,max_size => 32, ttl => 60000},
deny_action => ignore,
enable => false
},
auth => #{enable => false},
overall_max_connections => infinity,
stats => #{enable => true},
conn_congestion =>
#{enable_alarm => true, min_alarm_sustain_duration => 60000},
flapping_detect =>
#{ban_time => 300000,enable => false,
max_count => 15,window_time => 60000},
force_gc =>
#{bytes => 16777216,count => 16000,
enable => true},
force_shutdown =>
#{enable => true,
max_heap_size => 4194304,
max_message_queue_len => 1000},
mqtt => mqtt_conf(),
listeners =>
#{mqtt_tcp => listener_mqtt_tcp_conf(),
mqtt_ws => listener_mqtt_ws_conf()}
}
}
}. }.
set_default_zone_conf() -> stats_conf() ->
emqx_config:put(default_zone_conf()). #{enable => true}.
zone_conf() ->
#{}.
basic_conf() ->
#{rate_limit => rate_limit_conf(),
force_gc => force_gc_conf(),
force_shutdown => force_shutdown_conf(),
mqtt => mqtt_conf(),
rpc => rpc_conf(),
stats => stats_conf(),
listeners => listeners_conf(),
zones => zone_conf()
}.
set_test_listenser_confs() ->
emqx_config:put(basic_conf()).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% CT Callbacks %% CT Callbacks
@ -181,7 +144,7 @@ init_per_suite(Config) ->
%% Access Control Meck %% Access Control Meck
ok = meck:new(emqx_access_control, [passthrough, no_history, no_link]), ok = meck:new(emqx_access_control, [passthrough, no_history, no_link]),
ok = meck:expect(emqx_access_control, authenticate, ok = meck:expect(emqx_access_control, authenticate,
fun(_) -> ok end), fun(_) -> {ok, #{is_superuser => false}} end),
ok = meck:expect(emqx_access_control, authorize, fun(_, _, _) -> allow end), ok = meck:expect(emqx_access_control, authorize, fun(_, _, _) -> allow end),
%% Broker Meck %% Broker Meck
ok = meck:new(emqx_broker, [passthrough, no_history, no_link]), ok = meck:new(emqx_broker, [passthrough, no_history, no_link]),
@ -211,7 +174,7 @@ end_per_suite(_Config) ->
]). ]).
init_per_testcase(_TestCase, Config) -> init_per_testcase(_TestCase, Config) ->
set_default_zone_conf(), set_test_listenser_confs(),
Config. Config.
end_per_testcase(_TestCase, Config) -> end_per_testcase(_TestCase, Config) ->
@ -917,7 +880,7 @@ t_ws_cookie_init(_) ->
conn_mod => emqx_ws_connection, conn_mod => emqx_ws_connection,
ws_cookie => WsCookie ws_cookie => WsCookie
}, },
Channel = emqx_channel:init(ConnInfo, #{zone => default, listener => mqtt_tcp}), Channel = emqx_channel:init(ConnInfo, #{zone => default, listener => {tcp, default}}),
?assertMatch(#{ws_cookie := WsCookie}, emqx_channel:info(clientinfo, Channel)). ?assertMatch(#{ws_cookie := WsCookie}, emqx_channel:info(clientinfo, Channel)).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -942,7 +905,7 @@ channel(InitFields) ->
maps:fold(fun(Field, Value, Channel) -> maps:fold(fun(Field, Value, Channel) ->
emqx_channel:set_field(Field, Value, Channel) emqx_channel:set_field(Field, Value, Channel)
end, end,
emqx_channel:init(ConnInfo, #{zone => default, listener => mqtt_tcp}), emqx_channel:init(ConnInfo, #{zone => default, listener => {tcp, default}}),
maps:merge(#{clientinfo => clientinfo(), maps:merge(#{clientinfo => clientinfo(),
session => session(), session => session(),
conn_state => connected conn_state => connected
@ -951,7 +914,7 @@ channel(InitFields) ->
clientinfo() -> clientinfo(#{}). clientinfo() -> clientinfo(#{}).
clientinfo(InitProps) -> clientinfo(InitProps) ->
maps:merge(#{zone => default, maps:merge(#{zone => default,
listener => mqtt_tcp, listener => {tcp, default},
protocol => mqtt, protocol => mqtt,
peerhost => {127,0,0,1}, peerhost => {127,0,0,1},
clientid => <<"clientid">>, clientid => <<"clientid">>,

View File

@ -79,8 +79,8 @@ groups() ->
init_per_suite(Config) -> init_per_suite(Config) ->
emqx_ct_helpers:boot_modules(all), emqx_ct_helpers:boot_modules(all),
emqx_ct_helpers:start_apps([]), emqx_ct_helpers:start_apps([]),
emqx_config:put_listener_conf(default, mqtt_ssl, [ssl, verify], verify_peer), emqx_config:put_listener_conf(ssl, default, [ssl, verify], verify_peer),
emqx_listeners:restart_listener('default:mqtt_ssl'), emqx_listeners:restart_listener('ssl:default'),
Config. Config.
end_per_suite(_Config) -> end_per_suite(_Config) ->
@ -114,8 +114,8 @@ t_cm(_) ->
emqx_config:put_zone_conf(default, [mqtt, idle_timeout], 15000). emqx_config:put_zone_conf(default, [mqtt, idle_timeout], 15000).
t_cm_registry(_) -> t_cm_registry(_) ->
Info = supervisor:which_children(emqx_cm_sup), Children = supervisor:which_children(emqx_cm_sup),
{_, Pid, _, _} = lists:keyfind(registry, 1, Info), {_, Pid, _, _} = lists:keyfind(emqx_cm_registry, 1, Children),
ignored = gen_server:call(Pid, <<"Unexpected call">>), ignored = gen_server:call(Pid, <<"Unexpected call">>),
gen_server:cast(Pid, <<"Unexpected cast">>), gen_server:cast(Pid, <<"Unexpected cast">>),
Pid ! <<"Unexpected info">>. Pid ! <<"Unexpected info">>.

View File

@ -89,7 +89,7 @@ t_open_session(_) ->
ok = meck:expect(emqx_connection, call, fun(_, _) -> ok end), ok = meck:expect(emqx_connection, call, fun(_, _) -> ok end),
ok = meck:expect(emqx_connection, call, fun(_, _, _) -> ok end), ok = meck:expect(emqx_connection, call, fun(_, _, _) -> ok end),
ClientInfo = #{zone => default, listener => mqtt_tcp, ClientInfo = #{zone => default, listener => {tcp, default},
clientid => <<"clientid">>, clientid => <<"clientid">>,
username => <<"username">>, username => <<"username">>,
peerhost => {127,0,0,1}}, peerhost => {127,0,0,1}},
@ -114,7 +114,7 @@ rand_client_id() ->
t_open_session_race_condition(_) -> t_open_session_race_condition(_) ->
ClientId = rand_client_id(), ClientId = rand_client_id(),
ClientInfo = #{zone => default, listener => mqtt_tcp, ClientInfo = #{zone => default, listener => {tcp, default},
clientid => ClientId, clientid => ClientId,
username => <<"username">>, username => <<"username">>,
peerhost => {127,0,0,1}}, peerhost => {127,0,0,1}},

View File

@ -0,0 +1,50 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_config_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
all() -> emqx_ct:all(?MODULE).
init_per_suite(Config) ->
emqx_ct_helpers:boot_modules(all),
emqx_ct_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([]).
t_fill_default_values(_) ->
Conf = #{
<<"broker">> => #{
<<"perf">> => #{},
<<"route_batch_clean">> => false}
},
?assertMatch(#{<<"broker">> :=
#{<<"enable_session_registry">> := true,
<<"perf">> :=
#{<<"route_lock_type">> := key,
<<"trie_compaction">> := true},
<<"route_batch_clean">> := false,
<<"session_locking_strategy">> := quorum,
<<"shared_dispatch_ack_enabled">> := false,
<<"shared_subscription_strategy">> := round_robin,
<<"sys_heartbeat_interval">> := "30s",
<<"sys_msg_interval">> := "1m"}},
emqx_config:fill_defaults(Conf)).

View File

@ -57,7 +57,7 @@ init_per_suite(Config) ->
ok = meck:expect(emqx_alarm, deactivate, fun(_) -> ok end), ok = meck:expect(emqx_alarm, deactivate, fun(_) -> ok end),
ok = meck:expect(emqx_alarm, deactivate, fun(_, _) -> ok end), ok = meck:expect(emqx_alarm, deactivate, fun(_, _) -> ok end),
emqx_channel_SUITE:set_default_zone_conf(), emqx_channel_SUITE:set_test_listenser_confs(),
Config. Config.
end_per_suite(_Config) -> end_per_suite(_Config) ->
@ -219,9 +219,9 @@ t_handle_msg_deliver(_) ->
t_handle_msg_inet_reply(_) -> t_handle_msg_inet_reply(_) ->
ok = meck:expect(emqx_pd, get_counter, fun(_) -> 10 end), ok = meck:expect(emqx_pd, get_counter, fun(_) -> 10 end),
emqx_config:put_listener_conf(default, mqtt_tcp, [tcp, active_n], 0), emqx_config:put_listener_conf(tcp, default, [tcp, active_n], 0),
?assertMatch({ok, _St}, handle_msg({inet_reply, for_testing, ok}, st())), ?assertMatch({ok, _St}, handle_msg({inet_reply, for_testing, ok}, st())),
emqx_config:put_listener_conf(default, mqtt_tcp, [tcp, active_n], 100), emqx_config:put_listener_conf(tcp, default, [tcp, active_n], 100),
?assertEqual(ok, handle_msg({inet_reply, for_testing, ok}, st())), ?assertEqual(ok, handle_msg({inet_reply, for_testing, ok}, st())),
?assertMatch({stop, {shutdown, for_testing}, _St}, ?assertMatch({stop, {shutdown, for_testing}, _St},
handle_msg({inet_reply, for_testing, {error, for_testing}}, st())). handle_msg({inet_reply, for_testing, {error, for_testing}}, st())).
@ -456,7 +456,7 @@ with_conn(TestFun, Opts) when is_map(Opts) ->
TrapExit = maps:get(trap_exit, Opts, false), TrapExit = maps:get(trap_exit, Opts, false),
process_flag(trap_exit, TrapExit), process_flag(trap_exit, TrapExit),
{ok, CPid} = emqx_connection:start_link(emqx_transport, sock, {ok, CPid} = emqx_connection:start_link(emqx_transport, sock,
maps:merge(Opts, #{zone => default, listener => mqtt_tcp})), maps:merge(Opts, #{zone => default, listener => {tcp, default}})),
TestFun(CPid), TestFun(CPid),
TrapExit orelse emqx_connection:stop(CPid), TrapExit orelse emqx_connection:stop(CPid),
ok. ok.
@ -479,7 +479,7 @@ st(InitFields) when is_map(InitFields) ->
st(InitFields, #{}). st(InitFields, #{}).
st(InitFields, ChannelFields) when is_map(InitFields) -> st(InitFields, ChannelFields) when is_map(InitFields) ->
St = emqx_connection:init_state(emqx_transport, sock, #{zone => default, St = emqx_connection:init_state(emqx_transport, sock, #{zone => default,
listener => mqtt_tcp}), listener => {tcp, default}}),
maps:fold(fun(N, V, S) -> emqx_connection:set_field(N, V, S) end, maps:fold(fun(N, V, S) -> emqx_connection:set_field(N, V, S) end,
emqx_connection:set_field(channel, channel(ChannelFields), St), emqx_connection:set_field(channel, channel(ChannelFields), St),
InitFields InitFields
@ -500,7 +500,7 @@ channel(InitFields) ->
expiry_interval => 0 expiry_interval => 0
}, },
ClientInfo = #{zone => default, ClientInfo = #{zone => default,
listener => mqtt_tcp, listener => {tcp, default},
protocol => mqtt, protocol => mqtt,
peerhost => {127,0,0,1}, peerhost => {127,0,0,1},
clientid => <<"clientid">>, clientid => <<"clientid">>,
@ -513,7 +513,7 @@ channel(InitFields) ->
maps:fold(fun(Field, Value, Channel) -> maps:fold(fun(Field, Value, Channel) ->
emqx_channel:set_field(Field, Value, Channel) emqx_channel:set_field(Field, Value, Channel)
end, end,
emqx_channel:init(ConnInfo, #{zone => default, listener => mqtt_tcp}), emqx_channel:init(ConnInfo, #{zone => default, listener => {tcp, default}}),
maps:merge(#{clientinfo => ClientInfo, maps:merge(#{clientinfo => ClientInfo,
session => Session, session => Session,
conn_state => connected conn_state => connected

View File

@ -40,7 +40,7 @@ end_per_suite(_Config) ->
t_detect_check(_) -> t_detect_check(_) ->
ClientInfo = #{zone => default, ClientInfo = #{zone => default,
listener => mqtt_tcp, listener => {tcp, default},
clientid => <<"client007">>, clientid => <<"client007">>,
peerhost => {127,0,0,1} peerhost => {127,0,0,1}
}, },
@ -55,8 +55,8 @@ t_detect_check(_) ->
true = emqx_banned:check(ClientInfo), true = emqx_banned:check(ClientInfo),
timer:sleep(3000), timer:sleep(3000),
false = emqx_banned:check(ClientInfo), false = emqx_banned:check(ClientInfo),
Childrens = supervisor:which_children(emqx_cm_sup), Children = supervisor:which_children(emqx_cm_sup),
{flapping, Pid, _, _} = lists:keyfind(flapping, 1, Childrens), {emqx_flapping, Pid, _, _} = lists:keyfind(emqx_flapping, 1, Children),
gen_server:call(Pid, unexpected_msg), gen_server:call(Pid, unexpected_msg),
gen_server:cast(Pid, unexpected_msg), gen_server:cast(Pid, unexpected_msg),
Pid ! test, Pid ! test,
@ -64,7 +64,7 @@ t_detect_check(_) ->
t_expired_detecting(_) -> t_expired_detecting(_) ->
ClientInfo = #{zone => default, ClientInfo = #{zone => default,
listener => mqtt_tcp, listener => {tcp, default},
clientid => <<"client008">>, clientid => <<"client008">>,
peerhost => {127,0,0,1}}, peerhost => {127,0,0,1}},
false = emqx_flapping:detect(ClientInfo), false = emqx_flapping:detect(ClientInfo),
@ -72,4 +72,4 @@ t_expired_detecting(_) ->
(_) -> false end, ets:tab2list(emqx_flapping))), (_) -> false end, ets:tab2list(emqx_flapping))),
timer:sleep(200), timer:sleep(200),
?assertEqual(true, lists:all(fun({flapping, <<"client008">>, _, _, _}) -> false; ?assertEqual(true, lists:all(fun({flapping, <<"client008">>, _, _, _}) -> false;
(_) -> true end, ets:tab2list(emqx_flapping))). (_) -> true end, ets:tab2list(emqx_flapping))).

View File

@ -37,6 +37,14 @@ end_per_suite(_Config) ->
application:stop(esockd), application:stop(esockd),
application:stop(cowboy). application:stop(cowboy).
init_per_testcase(_, Config) ->
{ok, _} = emqx_config_handler:start_link(),
Config.
end_per_testcase(_, _Config) ->
_ = emqx_config_handler:stop(),
ok.
t_start_stop_listeners(_) -> t_start_stop_listeners(_) ->
ok = emqx_listeners:start(), ok = emqx_listeners:start(),
?assertException(error, _, emqx_listeners:start_listener({ws,{"127.0.0.1", 8083}, []})), ?assertException(error, _, emqx_listeners:start_listener({ws,{"127.0.0.1", 8083}, []})),

View File

@ -25,7 +25,7 @@
all() -> emqx_ct:all(?MODULE). all() -> emqx_ct:all(?MODULE).
t_check_pub(_) -> t_check_pub(_) ->
OldConf = emqx_config:get([zones]), OldConf = emqx:get_config([zones]),
emqx_config:put_zone_conf(default, [mqtt, max_qos_allowed], ?QOS_1), emqx_config:put_zone_conf(default, [mqtt, max_qos_allowed], ?QOS_1),
emqx_config:put_zone_conf(default, [mqtt, retain_available], false), emqx_config:put_zone_conf(default, [mqtt, retain_available], false),
timer:sleep(50), timer:sleep(50),
@ -39,7 +39,7 @@ t_check_pub(_) ->
emqx_config:put([zones], OldConf). emqx_config:put([zones], OldConf).
t_check_sub(_) -> t_check_sub(_) ->
OldConf = emqx_config:get([zones]), OldConf = emqx:get_config([zones]),
SubOpts = #{rh => 0, SubOpts = #{rh => 0,
rap => 0, rap => 0,
nl => 0, nl => 0,

View File

@ -1,3 +1,3 @@
emqx_hocon_plugin: { emqx_hocon_plugin {
name: test name = test
} }

View File

@ -17,7 +17,7 @@
{profiles, {profiles,
[{test, [ [{test, [
{deps, [ {emqx_ct_helper, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "v1.1.4"}}} {deps, [{emqx_ct_helpers, {git,"https://github.com/emqx/emqx-ct-helpers.git", {branch,"hocon"}}}
]} ]}
]} ]}
]}. ]}.

View File

@ -2,11 +2,11 @@
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-export([structs/0, fields/1]). -export([roots/0, fields/1]).
-behaviour(hocon_schema). -behaviour(hocon_schema).
structs() -> ["emqx_hocon_plugin"]. roots() -> ["emqx_hocon_plugin"].
fields("emqx_hocon_plugin") -> fields("emqx_hocon_plugin") ->
[{name, fun name/1}]. [{name, fun name/1}].

View File

@ -17,7 +17,7 @@
{profiles, {profiles,
[{test, [ [{test, [
{deps, [ {emqx_ct_helper, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "v1.1.4"}}} {deps, [{emqx_ct_helpers, {git,"https://github.com/emqx/emqx-ct-helpers.git", {branch,"hocon"}}}
]} ]}
]} ]}
]}. ]}.

View File

@ -29,7 +29,7 @@ all() -> emqx_ct:all(?MODULE).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init_per_suite(Config) -> init_per_suite(Config) ->
emqx_channel_SUITE:set_default_zone_conf(), emqx_channel_SUITE:set_test_listenser_confs(),
ok = meck:new([emqx_hooks, emqx_metrics, emqx_broker], ok = meck:new([emqx_hooks, emqx_metrics, emqx_broker],
[passthrough, no_history, no_link]), [passthrough, no_history, no_link]),
ok = meck:expect(emqx_metrics, inc, fun(_) -> ok end), ok = meck:expect(emqx_metrics, inc, fun(_) -> ok end),

View File

@ -48,7 +48,7 @@ init_per_testcase(TestCase, Config) when
TestCase =/= t_ws_pingreq_before_connected, TestCase =/= t_ws_pingreq_before_connected,
TestCase =/= t_ws_non_check_origin TestCase =/= t_ws_non_check_origin
-> ->
emqx_channel_SUITE:set_default_zone_conf(), emqx_channel_SUITE:set_test_listenser_confs(),
%% Mock cowboy_req %% Mock cowboy_req
ok = meck:new(cowboy_req, [passthrough, no_history, no_link]), ok = meck:new(cowboy_req, [passthrough, no_history, no_link]),
ok = meck:expect(cowboy_req, header, fun(_, _, _) -> <<>> end), ok = meck:expect(cowboy_req, header, fun(_, _, _) -> <<>> end),
@ -119,7 +119,7 @@ t_info(_) ->
} = SockInfo. } = SockInfo.
set_ws_opts(Key, Val) -> set_ws_opts(Key, Val) ->
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, Key], Val). emqx_config:put_listener_conf(ws, default, [websocket, Key], Val).
t_header(_) -> t_header(_) ->
ok = meck:expect(cowboy_req, header, ok = meck:expect(cowboy_req, header,
@ -127,7 +127,7 @@ t_header(_) ->
(<<"x-forwarded-port">>, _, _) -> <<"1000">> end), (<<"x-forwarded-port">>, _, _) -> <<"1000">> end),
set_ws_opts(proxy_address_header, <<"x-forwarded-for">>), set_ws_opts(proxy_address_header, <<"x-forwarded-for">>),
set_ws_opts(proxy_port_header, <<"x-forwarded-port">>), set_ws_opts(proxy_port_header, <<"x-forwarded-port">>),
{ok, St, _} = ?ws_conn:websocket_init([req, #{zone => default, listener => mqtt_ws}]), {ok, St, _} = ?ws_conn:websocket_init([req, #{zone => default, listener => {ws, default}}]),
WsPid = spawn(fun() -> WsPid = spawn(fun() ->
receive {call, From, info} -> receive {call, From, info} ->
gen_server:reply(From, ?ws_conn:info(St)) gen_server:reply(From, ?ws_conn:info(St))
@ -222,8 +222,8 @@ t_ws_sub_protocols_mqtt_equivalents(_) ->
start_ws_client(#{protocols => [<<"not-mqtt">>]})). start_ws_client(#{protocols => [<<"not-mqtt">>]})).
t_ws_check_origin(_) -> t_ws_check_origin(_) ->
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, check_origin_enable], true), emqx_config:put_listener_conf(ws, default, [websocket, check_origin_enable], true),
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, check_origins], emqx_config:put_listener_conf(ws, default, [websocket, check_origins],
[<<"http://localhost:18083">>]), [<<"http://localhost:18083">>]),
{ok, _} = application:ensure_all_started(gun), {ok, _} = application:ensure_all_started(gun),
?assertMatch({gun_upgrade, _}, ?assertMatch({gun_upgrade, _},
@ -234,8 +234,8 @@ t_ws_check_origin(_) ->
headers => [{<<"origin">>, <<"http://localhost:18080">>}]})). headers => [{<<"origin">>, <<"http://localhost:18080">>}]})).
t_ws_non_check_origin(_) -> t_ws_non_check_origin(_) ->
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, check_origin_enable], false), emqx_config:put_listener_conf(ws, default, [websocket, check_origin_enable], false),
emqx_config:put_listener_conf(default, mqtt_ws, [websocket, check_origins], []), emqx_config:put_listener_conf(ws, default, [websocket, check_origins], []),
{ok, _} = application:ensure_all_started(gun), {ok, _} = application:ensure_all_started(gun),
?assertMatch({gun_upgrade, _}, ?assertMatch({gun_upgrade, _},
start_ws_client(#{protocols => [<<"mqtt">>], start_ws_client(#{protocols => [<<"mqtt">>],
@ -245,7 +245,7 @@ t_ws_non_check_origin(_) ->
headers => [{<<"origin">>, <<"http://localhost:18080">>}]})). headers => [{<<"origin">>, <<"http://localhost:18080">>}]})).
t_init(_) -> t_init(_) ->
Opts = #{listener => mqtt_ws, zone => default}, Opts = #{listener => {ws, default}, zone => default},
ok = meck:expect(cowboy_req, parse_header, fun(_, req) -> undefined end), ok = meck:expect(cowboy_req, parse_header, fun(_, req) -> undefined end),
ok = meck:expect(cowboy_req, reply, fun(_, Req) -> Req end), ok = meck:expect(cowboy_req, reply, fun(_, Req) -> Req end),
{ok, req, _} = ?ws_conn:init(req, Opts), {ok, req, _} = ?ws_conn:init(req, Opts),
@ -438,7 +438,7 @@ t_shutdown(_) ->
st() -> st(#{}). st() -> st(#{}).
st(InitFields) when is_map(InitFields) -> st(InitFields) when is_map(InitFields) ->
{ok, St, _} = ?ws_conn:websocket_init([req, #{zone => default, listener => mqtt_ws}]), {ok, St, _} = ?ws_conn:websocket_init([req, #{zone => default, listener => {ws, default}}]),
maps:fold(fun(N, V, S) -> ?ws_conn:set_field(N, V, S) end, maps:fold(fun(N, V, S) -> ?ws_conn:set_field(N, V, S) end,
?ws_conn:set_field(channel, channel(), St), ?ws_conn:set_field(channel, channel(), St),
InitFields InitFields
@ -459,7 +459,7 @@ channel(InitFields) ->
expiry_interval => 0 expiry_interval => 0
}, },
ClientInfo = #{zone => default, ClientInfo = #{zone => default,
listener => mqtt_ws, listener => {ws, default},
protocol => mqtt, protocol => mqtt,
peerhost => {127,0,0,1}, peerhost => {127,0,0,1},
clientid => <<"clientid">>, clientid => <<"clientid">>,
@ -472,7 +472,7 @@ channel(InitFields) ->
maps:fold(fun(Field, Value, Channel) -> maps:fold(fun(Field, Value, Channel) ->
emqx_channel:set_field(Field, Value, Channel) emqx_channel:set_field(Field, Value, Channel)
end, end,
emqx_channel:init(ConnInfo, #{zone => default, listener => mqtt_ws}), emqx_channel:init(ConnInfo, #{zone => default, listener => {ws, default}}),
maps:merge(#{clientinfo => ClientInfo, maps:merge(#{clientinfo => ClientInfo,
session => Session, session => Session,
conn_state => connected conn_state => connected

View File

@ -1,3 +1,3 @@
user_id,password_hash,salt user_id,password_hash,salt,is_superuser
myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235 myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235,true
myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139 myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139,false

1 user_id password_hash salt is_superuser
2 myuser3 b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75 de1024f462fb83910fd13151bd4bd235 true
3 myuser4 ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8 ad773b5be9dd0613fe6c2f4d8c403139 false

View File

@ -2,11 +2,13 @@
{ {
"user_id":"myuser1", "user_id":"myuser1",
"password_hash":"c5e46903df45e5dc096dc74657610dbee8deaacae656df88a1788f1847390242", "password_hash":"c5e46903df45e5dc096dc74657610dbee8deaacae656df88a1788f1847390242",
"salt": "e378187547bf2d6f0545a3f441aa4d8a" "salt": "e378187547bf2d6f0545a3f441aa4d8a",
"is_superuser": true
}, },
{ {
"user_id":"myuser2", "user_id":"myuser2",
"password_hash":"f4d17f300b11e522fd33f497c11b126ef1ea5149c74d2220f9a16dc876d4567b", "password_hash":"f4d17f300b11e522fd33f497c11b126ef1ea5149c74d2220f9a16dc876d4567b",
"salt": "6d3f9bd5b54d94b98adbcfe10b6d181f" "salt": "6d3f9bd5b54d94b98adbcfe10b6d181f",
"is_superuser": false
} }
] ]

View File

@ -1,37 +1,6 @@
authentication: { # authentication: {
enable: false # mechanism: password-based
authenticators: [ # backend: built-in-database
# { # user_id_type: clientid
# name: "authenticator1" # }
# mechanism: password-based
# server_type: built-in-database
# user_id_type: clientid
# },
# {
# name: "authenticator2"
# mechanism: password-based
# server_type: mongodb
# server: "127.0.0.1:27017"
# database: mqtt
# collection: users
# selector: {
# username: "${mqtt-username}"
# }
# password_hash_field: password_hash
# salt_field: salt
# password_hash_algorithm: sha256
# salt_position: prefix
# },
# {
# name: "authenticator 3"
# mechanism: password-based
# server_type: redis
# server: "127.0.0.1:6379"
# password: "public"
# database: 0
# query: "HMGET ${mqtt-username} password_hash salt"
# password_hash_algorithm: sha256
# salt_position: prefix
# }
]
}

View File

@ -15,25 +15,11 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-define(APP, emqx_authn). -define(APP, emqx_authn).
-define(CHAIN, <<"mqtt">>).
-define(VER_1, <<"1">>). -define(AUTHN, emqx_authentication).
-define(VER_2, <<"2">>).
-define(GLOBAL, 'mqtt:global').
-define(RE_PLACEHOLDER, "\\$\\{[a-z0-9\\-]+\\}"). -define(RE_PLACEHOLDER, "\\$\\{[a-z0-9\\-]+\\}").
-record(authenticator,
{ id :: binary()
, name :: binary()
, provider :: module()
, config :: map()
, state :: map()
}).
-record(chain,
{ id :: binary()
, authenticators :: [{binary(), binary(), #authenticator{}}]
, created_at :: integer()
}).
-define(AUTH_SHARD, emqx_authn_shard). -define(AUTH_SHARD, emqx_authn_shard).

View File

@ -1,6 +1,4 @@
{deps, [ {deps, []}.
{jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.1"}}}
]}.
{edoc_opts, [{preprocess, true}]}. {edoc_opts, [{preprocess, true}]}.
{erl_opts, [warn_unused_vars, {erl_opts, [warn_unused_vars,

View File

@ -15,427 +15,3 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-module(emqx_authn). -module(emqx_authn).
-include("emqx_authn.hrl").
-export([ enable/0
, disable/0
, is_enabled/0
]).
-export([authenticate/2]).
-export([ create_chain/1
, delete_chain/1
, lookup_chain/1
, list_chains/0
, create_authenticator/2
, delete_authenticator/2
, update_authenticator/3
, update_or_create_authenticator/3
, lookup_authenticator/2
, list_authenticators/1
, move_authenticator_to_the_nth/3
]).
-export([ import_users/3
, add_user/3
, delete_user/3
, update_user/4
, lookup_user/3
, list_users/2
]).
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
-copy_mnesia({mnesia, [copy]}).
-define(CHAIN_TAB, emqx_authn_chain).
-rlog_shard({?AUTH_SHARD, ?CHAIN_TAB}).
%%------------------------------------------------------------------------------
%% Mnesia bootstrap
%%------------------------------------------------------------------------------
%% @doc Create or replicate tables.
-spec(mnesia(boot) -> ok).
mnesia(boot) ->
%% Optimize storage
StoreProps = [{ets, [{read_concurrency, true}]}],
%% Chain table
ok = ekka_mnesia:create_table(?CHAIN_TAB, [
{ram_copies, [node()]},
{record_name, chain},
{local_content, true},
{attributes, record_info(fields, chain)},
{storage_properties, StoreProps}]);
mnesia(copy) ->
ok = ekka_mnesia:copy_table(?CHAIN_TAB, ram_copies).
enable() ->
case emqx:hook('client.authenticate', {?MODULE, authenticate, []}) of
ok -> ok;
{error, already_exists} -> ok
end.
disable() ->
emqx:unhook('client.authenticate', {?MODULE, authenticate, []}),
ok.
is_enabled() ->
Callbacks = emqx_hooks:lookup('client.authenticate'),
lists:any(fun({callback, {?MODULE, authenticate, []}, _, _}) ->
true;
(_) ->
false
end, Callbacks).
authenticate(Credential, _AuthResult) ->
case mnesia:dirty_read(?CHAIN_TAB, ?CHAIN) of
[#chain{authenticators = Authenticators}] ->
do_authenticate(Authenticators, Credential);
[] ->
{stop, {error, not_authorized}}
end.
do_authenticate([], _) ->
{stop, {error, not_authorized}};
do_authenticate([{_, _, #authenticator{provider = Provider, state = State}} | More], Credential) ->
case Provider:authenticate(Credential, State) of
ignore ->
do_authenticate(More, Credential);
Result ->
%% ok
%% {ok, AuthData}
%% {continue, AuthCache}
%% {continue, AuthData, AuthCache}
%% {error, Reason}
{stop, Result}
end.
create_chain(#{id := ID}) ->
trans(
fun() ->
case mnesia:read(?CHAIN_TAB, ID, write) of
[] ->
Chain = #chain{id = ID,
authenticators = [],
created_at = erlang:system_time(millisecond)},
mnesia:write(?CHAIN_TAB, Chain, write),
{ok, serialize_chain(Chain)};
[_ | _] ->
{error, {already_exists, {chain, ID}}}
end
end).
delete_chain(ID) ->
trans(
fun() ->
case mnesia:read(?CHAIN_TAB, ID, write) of
[] ->
{error, {not_found, {chain, ID}}};
[#chain{authenticators = Authenticators}] ->
_ = [do_delete_authenticator(Authenticator) || {_, _, Authenticator} <- Authenticators],
mnesia:delete(?CHAIN_TAB, ID, write)
end
end).
lookup_chain(ID) ->
case mnesia:dirty_read(?CHAIN_TAB, ID) of
[] ->
{error, {not_found, {chain, ID}}};
[Chain] ->
{ok, serialize_chain(Chain)}
end.
list_chains() ->
Chains = ets:tab2list(?CHAIN_TAB),
{ok, [serialize_chain(Chain) || Chain <- Chains]}.
create_authenticator(ChainID, #{name := Name} = Config) ->
UpdateFun =
fun(Chain = #chain{authenticators = Authenticators}) ->
case lists:keymember(Name, 2, Authenticators) of
true ->
{error, name_has_be_used};
false ->
AlreadyExist = fun(ID) ->
lists:keymember(ID, 1, Authenticators)
end,
AuthenticatorID = gen_id(AlreadyExist),
case do_create_authenticator(ChainID, AuthenticatorID, Config) of
{ok, Authenticator} ->
NAuthenticators = Authenticators ++ [{AuthenticatorID, Name, Authenticator}],
ok = mnesia:write(?CHAIN_TAB, Chain#chain{authenticators = NAuthenticators}, write),
{ok, serialize_authenticator(Authenticator)};
{error, Reason} ->
{error, Reason}
end
end
end,
update_chain(ChainID, UpdateFun).
delete_authenticator(ChainID, AuthenticatorID) ->
UpdateFun = fun(Chain = #chain{authenticators = Authenticators}) ->
case lists:keytake(AuthenticatorID, 1, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
{value, {_, _, Authenticator}, NAuthenticators} ->
_ = do_delete_authenticator(Authenticator),
NChain = Chain#chain{authenticators = NAuthenticators},
mnesia:write(?CHAIN_TAB, NChain, write)
end
end,
update_chain(ChainID, UpdateFun).
update_authenticator(ChainID, AuthenticatorID, Config) ->
do_update_authenticator(ChainID, AuthenticatorID, Config, false).
update_or_create_authenticator(ChainID, AuthenticatorID, Config) ->
do_update_authenticator(ChainID, AuthenticatorID, Config, true).
do_update_authenticator(ChainID, AuthenticatorID, #{name := NewName} = Config, CreateWhenNotFound) ->
UpdateFun = fun(Chain = #chain{authenticators = Authenticators}) ->
case lists:keytake(AuthenticatorID, 1, Authenticators) of
false ->
case CreateWhenNotFound of
true ->
case lists:keymember(NewName, 2, Authenticators) of
true ->
{error, name_has_be_used};
false ->
case do_create_authenticator(ChainID, AuthenticatorID, Config) of
{ok, Authenticator} ->
NAuthenticators = Authenticators ++ [{AuthenticatorID, NewName, Authenticator}],
ok = mnesia:write(?CHAIN_TAB, Chain#chain{authenticators = NAuthenticators}, write),
{ok, serialize_authenticator(Authenticator)};
{error, Reason} ->
{error, Reason}
end
end;
false ->
{error, {not_found, {authenticator, AuthenticatorID}}}
end;
{value,
{_, _, #authenticator{provider = Provider,
state = #{version := Version} = State} = Authenticator},
Others} ->
case lists:keymember(NewName, 2, Others) of
true ->
{error, name_has_be_used};
false ->
case (NewProvider = authenticator_provider(Config)) =:= Provider of
true ->
Unique = <<ChainID/binary, "/", AuthenticatorID/binary, ":", Version/binary>>,
case Provider:update(Config#{'_unique' => Unique}, State) of
{ok, NewState} ->
NewAuthenticator = Authenticator#authenticator{name = NewName,
config = Config,
state = switch_version(NewState)},
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
ok = mnesia:write(?CHAIN_TAB, Chain#chain{authenticators = NewAuthenticators}, write),
{ok, serialize_authenticator(NewAuthenticator)};
{error, Reason} ->
{error, Reason}
end;
false ->
Unique = <<ChainID/binary, "/", AuthenticatorID/binary, ":", Version/binary>>,
case NewProvider:create(Config#{'_unique' => Unique}) of
{ok, NewState} ->
NewAuthenticator = Authenticator#authenticator{name = NewName,
provider = NewProvider,
config = Config,
state = switch_version(NewState)},
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
ok = mnesia:write(?CHAIN_TAB, Chain#chain{authenticators = NewAuthenticators}, write),
_ = Provider:destroy(State),
{ok, serialize_authenticator(NewAuthenticator)};
{error, Reason} ->
{error, Reason}
end
end
end
end
end,
update_chain(ChainID, UpdateFun).
lookup_authenticator(ChainID, AuthenticatorID) ->
case mnesia:dirty_read(?CHAIN_TAB, ChainID) of
[] ->
{error, {not_found, {chain, ChainID}}};
[#chain{authenticators = Authenticators}] ->
case lists:keyfind(AuthenticatorID, 1, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
{_, _, Authenticator} ->
{ok, serialize_authenticator(Authenticator)}
end
end.
list_authenticators(ChainID) ->
case mnesia:dirty_read(?CHAIN_TAB, ChainID) of
[] ->
{error, {not_found, {chain, ChainID}}};
[#chain{authenticators = Authenticators}] ->
{ok, serialize_authenticators(Authenticators)}
end.
move_authenticator_to_the_nth(ChainID, AuthenticatorID, N) ->
UpdateFun = fun(Chain = #chain{authenticators = Authenticators}) ->
case move_authenticator_to_the_nth_(AuthenticatorID, Authenticators, N) of
{ok, NAuthenticators} ->
NChain = Chain#chain{authenticators = NAuthenticators},
mnesia:write(?CHAIN_TAB, NChain, write);
{error, Reason} ->
{error, Reason}
end
end,
update_chain(ChainID, UpdateFun).
import_users(ChainID, AuthenticatorID, Filename) ->
call_authenticator(ChainID, AuthenticatorID, import_users, [Filename]).
add_user(ChainID, AuthenticatorID, UserInfo) ->
call_authenticator(ChainID, AuthenticatorID, add_user, [UserInfo]).
delete_user(ChainID, AuthenticatorID, UserID) ->
call_authenticator(ChainID, AuthenticatorID, delete_user, [UserID]).
update_user(ChainID, AuthenticatorID, UserID, NewUserInfo) ->
call_authenticator(ChainID, AuthenticatorID, update_user, [UserID, NewUserInfo]).
lookup_user(ChainID, AuthenticatorID, UserID) ->
call_authenticator(ChainID, AuthenticatorID, lookup_user, [UserID]).
list_users(ChainID, AuthenticatorID) ->
call_authenticator(ChainID, AuthenticatorID, list_users, []).
%%------------------------------------------------------------------------------
%% Internal functions
%%------------------------------------------------------------------------------
authenticator_provider(#{mechanism := 'password-based', server_type := 'built-in-database'}) ->
emqx_authn_mnesia;
authenticator_provider(#{mechanism := 'password-based', server_type := 'mysql'}) ->
emqx_authn_mysql;
authenticator_provider(#{mechanism := 'password-based', server_type := 'pgsql'}) ->
emqx_authn_pgsql;
authenticator_provider(#{mechanism := 'password-based', server_type := 'mongodb'}) ->
emqx_authn_mongodb;
authenticator_provider(#{mechanism := 'password-based', server_type := 'redis'}) ->
emqx_authn_redis;
authenticator_provider(#{mechanism := 'password-based', server_type := 'http-server'}) ->
emqx_authn_http;
authenticator_provider(#{mechanism := jwt}) ->
emqx_authn_jwt;
authenticator_provider(#{mechanism := scram, server_type := 'built-in-database'}) ->
emqx_enhanced_authn_scram_mnesia.
gen_id(AlreadyExist) ->
ID = list_to_binary(emqx_rule_id:gen()),
case AlreadyExist(ID) of
true -> gen_id(AlreadyExist);
false -> ID
end.
switch_version(State = #{version := ?VER_1}) ->
State#{version := ?VER_2};
switch_version(State = #{version := ?VER_2}) ->
State#{version := ?VER_1};
switch_version(State) ->
State#{version => ?VER_1}.
do_create_authenticator(ChainID, AuthenticatorID, #{name := Name} = Config) ->
Provider = authenticator_provider(Config),
Unique = <<ChainID/binary, "/", AuthenticatorID/binary, ":", ?VER_1/binary>>,
case Provider:create(Config#{'_unique' => Unique}) of
{ok, State} ->
Authenticator = #authenticator{id = AuthenticatorID,
name = Name,
provider = Provider,
config = Config,
state = switch_version(State)},
{ok, Authenticator};
{error, Reason} ->
{error, Reason}
end.
do_delete_authenticator(#authenticator{provider = Provider, state = State}) ->
_ = Provider:destroy(State),
ok.
replace_authenticator(ID, #authenticator{name = Name} = Authenticator, Authenticators) ->
lists:keyreplace(ID, 1, Authenticators, {ID, Name, Authenticator}).
move_authenticator_to_the_nth_(AuthenticatorID, Authenticators, N)
when N =< length(Authenticators) andalso N > 0 ->
move_authenticator_to_the_nth_(AuthenticatorID, Authenticators, N, []);
move_authenticator_to_the_nth_(_, _, _) ->
{error, out_of_range}.
move_authenticator_to_the_nth_(AuthenticatorID, [], _, _) ->
{error, {not_found, {authenticator, AuthenticatorID}}};
move_authenticator_to_the_nth_(AuthenticatorID, [{AuthenticatorID, _, _} = Authenticator | More], N, Passed)
when N =< length(Passed) ->
{L1, L2} = lists:split(N - 1, lists:reverse(Passed)),
{ok, L1 ++ [Authenticator] ++ L2 ++ More};
move_authenticator_to_the_nth_(AuthenticatorID, [{AuthenticatorID, _, _} = Authenticator | More], N, Passed) ->
{L1, L2} = lists:split(N - length(Passed) - 1, More),
{ok, lists:reverse(Passed) ++ L1 ++ [Authenticator] ++ L2};
move_authenticator_to_the_nth_(AuthenticatorID, [Authenticator | More], N, Passed) ->
move_authenticator_to_the_nth_(AuthenticatorID, More, N, [Authenticator | Passed]).
update_chain(ChainID, UpdateFun) ->
trans(
fun() ->
case mnesia:read(?CHAIN_TAB, ChainID, write) of
[] ->
{error, {not_found, {chain, ChainID}}};
[Chain] ->
UpdateFun(Chain)
end
end).
call_authenticator(ChainID, AuthenticatorID, Func, Args) ->
case mnesia:dirty_read(?CHAIN_TAB, ChainID) of
[] ->
{error, {not_found, {chain, ChainID}}};
[#chain{authenticators = Authenticators}] ->
case lists:keyfind(AuthenticatorID, 1, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
{_, _, #authenticator{provider = Provider, state = State}} ->
case erlang:function_exported(Provider, Func, length(Args) + 1) of
true ->
erlang:apply(Provider, Func, Args ++ [State]);
false ->
{error, unsupported_feature}
end
end
end.
serialize_chain(#chain{id = ID,
authenticators = Authenticators,
created_at = CreatedAt}) ->
#{id => ID,
authenticators => serialize_authenticators(Authenticators),
created_at => CreatedAt}.
serialize_authenticators(Authenticators) ->
[serialize_authenticator(Authenticator) || {_, _, Authenticator} <- Authenticators].
serialize_authenticator(#authenticator{id = ID,
config = Config}) ->
Config#{id => ID}.
trans(Fun) ->
trans(Fun, []).
trans(Fun, Args) ->
case ekka_mnesia:transaction(?AUTH_SHARD, Fun, Args) of
{atomic, Res} -> Res;
{aborted, Reason} -> {error, Reason}
end.

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,6 @@
-module(emqx_authn_app). -module(emqx_authn_app).
-include("emqx_authn.hrl"). -include("emqx_authn.hrl").
-include_lib("emqx/include/logger.hrl").
-behaviour(application). -behaviour(application).
@ -26,32 +25,45 @@
, stop/1 , stop/1
]). ]).
%%------------------------------------------------------------------------------
%% APIs
%%------------------------------------------------------------------------------
start(_StartType, _StartArgs) -> start(_StartType, _StartArgs) ->
{ok, Sup} = emqx_authn_sup:start_link(),
ok = ekka_rlog:wait_for_shards([?AUTH_SHARD], infinity), ok = ekka_rlog:wait_for_shards([?AUTH_SHARD], infinity),
initialize(), {ok, Sup} = emqx_authn_sup:start_link(),
ok = add_providers(),
ok = initialize(),
{ok, Sup}. {ok, Sup}.
stop(_State) -> stop(_State) ->
ok = remove_providers(),
ok. ok.
%%------------------------------------------------------------------------------
%% Internal functions
%%------------------------------------------------------------------------------
add_providers() ->
_ = [?AUTHN:add_provider(AuthNType, Provider) || {AuthNType, Provider} <- providers()], ok.
remove_providers() ->
_ = [?AUTHN:remove_provider(AuthNType) || {AuthNType, _} <- providers()], ok.
initialize() -> initialize() ->
AuthNConfig = emqx_config:get([authentication], #{enable => false, ?AUTHN:initialize_authentication(?GLOBAL, emqx:get_raw_config([authentication], [])),
authenticators => []}), lists:foreach(fun({ListenerID, ListenerConfig}) ->
initialize(AuthNConfig). ?AUTHN:initialize_authentication(ListenerID, maps:get(authentication, ListenerConfig, []))
end, emqx_listeners:list()),
initialize(#{enable := Enable, authenticators := AuthenticatorsConfig}) ->
{ok, _} = emqx_authn:create_chain(#{id => ?CHAIN}),
initialize_authenticators(AuthenticatorsConfig),
Enable =:= true andalso emqx_authn:enable(),
ok. ok.
initialize_authenticators([]) -> providers() ->
ok; [ {{'password-based', 'built-in-database'}, emqx_authn_mnesia}
initialize_authenticators([#{name := Name} = AuthenticatorConfig | More]) -> , {{'password-based', mysql}, emqx_authn_mysql}
case emqx_authn:create_authenticator(?CHAIN, AuthenticatorConfig) of , {{'password-based', posgresql}, emqx_authn_pgsql}
{ok, _} -> , {{'password-based', mongodb}, emqx_authn_mongodb}
initialize_authenticators(More); , {{'password-based', redis}, emqx_authn_redis}
{error, Reason} -> , {{'password-based', 'http-server'}, emqx_authn_http}
?LOG(error, "Failed to create authenticator '~s': ~p", [Name, Reason]) , {jwt, emqx_authn_jwt}
end. , {{scram, 'built-in-database'}, emqx_enhanced_authn_scram_mnesia}
].

View File

@ -16,53 +16,15 @@
-module(emqx_authn_schema). -module(emqx_authn_schema).
-include("emqx_authn.hrl").
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -export([ common_fields/0
-export([ structs/0
, fields/1
]). ]).
-export([ authenticator_name/1 common_fields() ->
]). [ {enable, fun enable/1}
%% Export it for emqx_gateway_schema module
-export([ authenticators/1
]).
structs() -> [ "authentication" ].
fields("authentication") ->
[ {enable, fun enable/1}
, {authenticators, fun authenticators/1}
]. ].
authenticator_name(type) -> binary();
authenticator_name(nullable) -> false;
authenticator_name(_) -> undefined.
enable(type) -> boolean(); enable(type) -> boolean();
enable(default) -> false; enable(default) -> true;
enable(_) -> undefined. enable(_) -> undefined.
authenticators(type) ->
hoconsc:array({union, [ hoconsc:ref(emqx_authn_mnesia, config)
, hoconsc:ref(emqx_authn_mysql, config)
, hoconsc:ref(emqx_authn_pgsql, config)
, hoconsc:ref(emqx_authn_mongodb, standalone)
, hoconsc:ref(emqx_authn_mongodb, 'replica-set')
, hoconsc:ref(emqx_authn_mongodb, 'sharded-cluster')
, hoconsc:ref(emqx_authn_redis, standalone)
, hoconsc:ref(emqx_authn_redis, cluster)
, hoconsc:ref(emqx_authn_redis, sentinel)
, hoconsc:ref(emqx_authn_http, get)
, hoconsc:ref(emqx_authn_http, post)
, hoconsc:ref(emqx_authn_jwt, 'hmac-based')
, hoconsc:ref(emqx_authn_jwt, 'public-key')
, hoconsc:ref(emqx_authn_jwt, 'jwks')
, hoconsc:ref(emqx_enhanced_authn_scram_mnesia, config)
]});
authenticators(default) -> [];
authenticators(_) -> undefined.

View File

@ -26,4 +26,5 @@ start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []). supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) -> init([]) ->
{ok, {{one_for_one, 10, 10}, []}}. ChildSpecs = [],
{ok, {{one_for_one, 10, 10}, ChildSpecs}}.

View File

@ -17,16 +17,18 @@
-module(emqx_enhanced_authn_scram_mnesia). -module(emqx_enhanced_authn_scram_mnesia).
-include("emqx_authn.hrl"). -include("emqx_authn.hrl").
-include_lib("esasl/include/esasl_scram.hrl").
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
-behaviour(emqx_authentication).
-export([ structs/0 -export([ namespace/0
, roots/0
, fields/1 , fields/1
]). ]).
-export([ create/1 -export([ refs/0
, create/1
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -46,7 +48,13 @@
-boot_mnesia({mnesia, [boot]}). -boot_mnesia({mnesia, [boot]}).
-copy_mnesia({mnesia, [copy]}). -copy_mnesia({mnesia, [copy]}).
-rlog_shard({?AUTH_SHARD, ?TAB}). -record(user_info,
{ user_id
, stored_key
, server_key
, salt
, is_superuser
}).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Mnesia bootstrap %% Mnesia bootstrap
@ -56,9 +64,10 @@
-spec(mnesia(boot | copy) -> ok). -spec(mnesia(boot | copy) -> ok).
mnesia(boot) -> mnesia(boot) ->
ok = ekka_mnesia:create_table(?TAB, [ ok = ekka_mnesia:create_table(?TAB, [
{rlog_shard, ?AUTH_SHARD},
{disc_copies, [node()]}, {disc_copies, [node()]},
{record_name, scram_user_credentail}, {record_name, user_info},
{attributes, record_info(fields, scram_user_credentail)}, {attributes, record_info(fields, user_info)},
{storage_properties, [{ets, [{read_concurrency, true}]}]}]); {storage_properties, [{ets, [{read_concurrency, true}]}]}]);
mnesia(copy) -> mnesia(copy) ->
@ -68,19 +77,16 @@ mnesia(copy) ->
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
structs() -> [config]. namespace() -> "authn:scram:builtin-db".
roots() -> [config].
fields(config) -> fields(config) ->
[ {name, fun emqx_authn_schema:authenticator_name/1} [ {mechanism, {enum, [scram]}}
, {mechanism, {enum, [scram]}} , {backend, {enum, ['built-in-database']}}
, {server_type, fun server_type/1}
, {algorithm, fun algorithm/1} , {algorithm, fun algorithm/1}
, {iteration_count, fun iteration_count/1} , {iteration_count, fun iteration_count/1}
]. ] ++ emqx_authn_schema:common_fields().
server_type(type) -> hoconsc:enum(['built-in-database']);
server_type(default) -> 'built-in-database';
server_type(_) -> undefined.
algorithm(type) -> hoconsc:enum([sha256, sha512]); algorithm(type) -> hoconsc:enum([sha256, sha512]);
algorithm(default) -> sha256; algorithm(default) -> sha256;
@ -94,6 +100,9 @@ iteration_count(_) -> undefined.
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() ->
[hoconsc:ref(?MODULE, config)].
create(#{ algorithm := Algorithm create(#{ algorithm := Algorithm
, iteration_count := IterationCount , iteration_count := IterationCount
, '_unique' := Unique , '_unique' := Unique
@ -105,7 +114,7 @@ create(#{ algorithm := Algorithm
update(Config, #{user_group := Unique}) -> update(Config, #{user_group := Unique}) ->
create(Config#{'_unique' => Unique}). create(Config#{'_unique' => Unique}).
authenticate(#{auth_method := AuthMethod, authenticate(#{auth_method := AuthMethod,
auth_data := AuthData, auth_data := AuthData,
auth_cache := AuthCache}, State) -> auth_cache := AuthCache}, State) ->
@ -126,20 +135,21 @@ authenticate(_Credential, _State) ->
destroy(#{user_group := UserGroup}) -> destroy(#{user_group := UserGroup}) ->
trans( trans(
fun() -> fun() ->
MatchSpec = [{{scram_user_credentail, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}], MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_', '_'}, [], ['$_']}],
ok = lists:foreach(fun(UserCredential) -> ok = lists:foreach(fun(UserInfo) ->
mnesia:delete_object(?TAB, UserCredential, write) mnesia:delete_object(?TAB, UserInfo, write)
end, mnesia:select(?TAB, MatchSpec, write)) end, mnesia:select(?TAB, MatchSpec, write))
end). end).
add_user(#{user_id := UserID, add_user(#{user_id := UserID,
password := Password}, #{user_group := UserGroup} = State) -> password := Password} = UserInfo, #{user_group := UserGroup} = State) ->
trans( trans(
fun() -> fun() ->
case mnesia:read(?TAB, {UserGroup, UserID}, write) of case mnesia:read(?TAB, {UserGroup, UserID}, write) of
[] -> [] ->
add_user(UserID, Password, State), IsSuperuser = maps:get(is_superuser, UserInfo, false),
{ok, #{user_id => UserID}}; add_user(UserID, Password, IsSuperuser, State),
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
[_] -> [_] ->
{error, already_exist} {error, already_exist}
end end
@ -156,31 +166,41 @@ delete_user(UserID, #{user_group := UserGroup}) ->
end end
end). end).
update_user(UserID, #{password := Password}, update_user(UserID, User,
#{user_group := UserGroup} = State) -> #{user_group := UserGroup} = State) ->
trans( trans(
fun() -> fun() ->
case mnesia:read(?TAB, {UserGroup, UserID}, write) of case mnesia:read(?TAB, {UserGroup, UserID}, write) of
[] -> [] ->
{error, not_found}; {error, not_found};
[_] -> [#user_info{is_superuser = IsSuperuser} = UserInfo] ->
add_user(UserID, Password, State), UserInfo1 = UserInfo#user_info{is_superuser = maps:get(is_superuser, User, IsSuperuser)},
{ok, #{user_id => UserID}} UserInfo2 = case maps:get(password, User, undefined) of
undefined ->
UserInfo1;
Password ->
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State),
UserInfo1#user_info{stored_key = StoredKey,
server_key = ServerKey,
salt = Salt}
end,
mnesia:write(?TAB, UserInfo2, write),
{ok, serialize_user_info(UserInfo2)}
end end
end). end).
lookup_user(UserID, #{user_group := UserGroup}) -> lookup_user(UserID, #{user_group := UserGroup}) ->
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
[#scram_user_credentail{user_id = {_, UserID}}] -> [UserInfo] ->
{ok, #{user_id => UserID}}; {ok, serialize_user_info(UserInfo)};
[] -> [] ->
{error, not_found} {error, not_found}
end. end.
%% TODO: Support Pagination %% TODO: Support Pagination
list_users(#{user_group := UserGroup}) -> list_users(#{user_group := UserGroup}) ->
Users = [#{user_id => UserID} || Users = [serialize_user_info(UserInfo) ||
#scram_user_credentail{user_id = {UserGroup0, UserID}} <- ets:tab2list(?TAB), UserGroup0 =:= UserGroup], #user_info{user_id = {UserGroup0, _}} = UserInfo <- ets:tab2list(?TAB), UserGroup0 =:= UserGroup],
{ok, Users}. {ok, Users}.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -195,13 +215,13 @@ ensure_auth_method(_, _) ->
false. false.
check_client_first_message(Bin, _Cache, #{iteration_count := IterationCount} = State) -> check_client_first_message(Bin, _Cache, #{iteration_count := IterationCount} = State) ->
LookupFun = fun(Username) -> RetrieveFun = fun(Username) ->
lookup_user2(Username, State) retrieve(Username, State)
end, end,
case esasl_scram:check_client_first_message( case esasl_scram:check_client_first_message(
Bin, Bin,
#{iteration_count => IterationCount, #{iteration_count => IterationCount,
lookup => LookupFun} retrieve => RetrieveFun}
) of ) of
{cotinue, ServerFirstMessage, Cache} -> {cotinue, ServerFirstMessage, Cache} ->
{cotinue, ServerFirstMessage, Cache}; {cotinue, ServerFirstMessage, Cache};
@ -209,25 +229,36 @@ check_client_first_message(Bin, _Cache, #{iteration_count := IterationCount} = S
{error, not_authorized} {error, not_authorized}
end. end.
check_client_final_message(Bin, Cache, #{algorithm := Alg}) -> check_client_final_message(Bin, #{is_superuser := IsSuperuser} = Cache, #{algorithm := Alg}) ->
case esasl_scram:check_client_final_message( case esasl_scram:check_client_final_message(
Bin, Bin,
Cache#{algorithm => Alg} Cache#{algorithm => Alg}
) of ) of
{ok, ServerFinalMessage} -> {ok, ServerFinalMessage} ->
{ok, ServerFinalMessage}; {ok, #{is_superuser => IsSuperuser}, ServerFinalMessage};
{error, _Reason} -> {error, _Reason} ->
{error, not_authorized} {error, not_authorized}
end. end.
add_user(UserID, Password, State) -> add_user(UserID, Password, IsSuperuser, State) ->
UserCredential = esasl_scram:generate_user_credential(UserID, Password, State), {StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State),
mnesia:write(?TAB, UserCredential, write). UserInfo = #user_info{user_id = UserID,
stored_key = StoredKey,
server_key = ServerKey,
salt = Salt,
is_superuser = IsSuperuser},
mnesia:write(?TAB, UserInfo, write).
lookup_user2(UserID, #{user_group := UserGroup}) -> retrieve(UserID, #{user_group := UserGroup}) ->
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
[#scram_user_credentail{} = UserCredential] -> [#user_info{stored_key = StoredKey,
{ok, UserCredential}; server_key = ServerKey,
salt = Salt,
is_superuser = IsSuperuser}] ->
{ok, #{stored_key => StoredKey,
server_key => ServerKey,
salt => Salt,
is_superuser => IsSuperuser}};
[] -> [] ->
{error, not_found} {error, not_found}
end. end.
@ -241,3 +272,6 @@ trans(Fun, Args) ->
{atomic, Res} -> Res; {atomic, Res} -> Res;
{aborted, Reason} -> {error, Reason} {aborted, Reason} -> {error, Reason}
end. end.
serialize_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
#{user_id => UserID, is_superuser => IsSuperuser}.

View File

@ -21,13 +21,16 @@
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
-behaviour(emqx_authentication).
-export([ structs/0 -export([ namespace/0
, roots/0
, fields/1 , fields/1
, validations/0 , validations/0
]). ]).
-export([ create/1 -export([ refs/0
, create/1
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -37,13 +40,13 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
structs() -> [""]. namespace() -> "authn:password-based:http-server".
fields("") -> roots() ->
[ {config, {union, [ hoconsc:t(get) [ {config, {union, [ hoconsc:ref(?MODULE, get)
, hoconsc:t(post) , hoconsc:ref(?MODULE, post)
]}} ]}}
]; ].
fields(get) -> fields(get) ->
[ {method, #{type => get, [ {method, #{type => get,
@ -58,15 +61,15 @@ fields(post) ->
] ++ common_fields(). ] ++ common_fields().
common_fields() -> common_fields() ->
[ {name, fun emqx_authn_schema:authenticator_name/1} [ {mechanism, {enum, ['password-based']}}
, {mechanism, {enum, ['password-based']}} , {backend, {enum, ['http-server']}}
, {server_type, {enum, ['http-server']}}
, {url, fun url/1} , {url, fun url/1}
, {form_data, fun form_data/1} , {body, fun body/1}
, {request_timeout, fun request_timeout/1} , {request_timeout, fun request_timeout/1}
] ++ maps:to_list(maps:without([ base_url ] ++ emqx_authn_schema:common_fields()
, pool_type], ++ maps:to_list(maps:without([ base_url
maps:from_list(emqx_connector_http:fields(config)))). , pool_type],
maps:from_list(emqx_connector_http:fields(config)))).
validations() -> validations() ->
[ {check_ssl_opts, fun check_ssl_opts/1} [ {check_ssl_opts, fun check_ssl_opts/1}
@ -89,16 +92,15 @@ headers(_) -> undefined.
headers_no_content_type(type) -> map(); headers_no_content_type(type) -> map();
headers_no_content_type(converter) -> headers_no_content_type(converter) ->
fun(Headers) -> fun(Headers) ->
maps:merge(default_headers_no_content_type(), transform_header_name(Headers)) maps:merge(default_headers_no_content_type(), transform_header_name(Headers))
end; end;
headers_no_content_type(default) -> default_headers_no_content_type(); headers_no_content_type(default) -> default_headers_no_content_type();
headers_no_content_type(_) -> undefined. headers_no_content_type(_) -> undefined.
%% TODO: Using map() body(type) -> map();
form_data(type) -> map(); body(nullable) -> false;
form_data(nullable) -> false; body(validate) -> [fun check_body/1];
form_data(validate) -> [fun check_form_data/1]; body(_) -> undefined.
form_data(_) -> undefined.
request_timeout(type) -> non_neg_integer(); request_timeout(type) -> non_neg_integer();
request_timeout(default) -> 5000; request_timeout(default) -> 5000;
@ -108,10 +110,15 @@ request_timeout(_) -> undefined.
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() ->
[ hoconsc:ref(?MODULE, get)
, hoconsc:ref(?MODULE, post)
].
create(#{ method := Method create(#{ method := Method
, url := URL , url := URL
, headers := Headers , headers := Headers
, form_data := FormData , body := Body
, request_timeout := RequestTimeout , request_timeout := RequestTimeout
, '_unique' := Unique , '_unique' := Unique
} = Config) -> } = Config) ->
@ -120,8 +127,8 @@ create(#{ method := Method
State = #{ method => Method State = #{ method => Method
, path => Path , path => Path
, base_query => cow_qs:parse_qs(list_to_binary(Query)) , base_query => cow_qs:parse_qs(list_to_binary(Query))
, headers => normalize_headers(Headers) , headers => maps:to_list(Headers)
, form_data => maps:to_list(FormData) , body => maps:to_list(Body)
, request_timeout => RequestTimeout , request_timeout => RequestTimeout
, '_unique' => Unique , '_unique' => Unique
}, },
@ -129,9 +136,9 @@ create(#{ method := Method
emqx_connector_http, emqx_connector_http,
Config#{base_url => maps:remove(query, URIMap), Config#{base_url => maps:remove(query, URIMap),
pool_type => random}) of pool_type => random}) of
{ok, _} -> {ok, already_created} ->
{ok, State}; {ok, State};
{error, already_created} -> {ok, _} ->
{ok, State}; {ok, State};
{error, Reason} -> {error, Reason} ->
{error, Reason} {error, Reason}
@ -154,15 +161,16 @@ authenticate(Credential, #{'_unique' := Unique,
try try
Request = generate_request(Credential, State), Request = generate_request(Credential, State),
case emqx_resource:query(Unique, {Method, Request, RequestTimeout}) of case emqx_resource:query(Unique, {Method, Request, RequestTimeout}) of
{ok, 204, _Headers} -> ok; {ok, 204, _Headers} -> {ok, #{is_superuser => false}};
{ok, 200, Headers, Body} -> {ok, 200, Headers, Body} ->
ContentType = proplists:get_value(<<"content-type">>, Headers, <<"application/json">>), ContentType = proplists:get_value(<<"content-type">>, Headers, <<"application/json">>),
case safely_parse_body(ContentType, Body) of case safely_parse_body(ContentType, Body) of
{ok, _NBody} -> {ok, NBody} ->
%% TODO: Return by user property %% TODO: Return by user property
ok; {ok, #{is_superuser => maps:get(<<"is_superuser">>, NBody, false),
user_property => NBody}};
{error, _Reason} -> {error, _Reason} ->
ok {ok, #{is_superuser => false}}
end; end;
{error, _Reason} -> {error, _Reason} ->
ignore ignore
@ -187,10 +195,10 @@ check_url(URL) ->
{error, _} -> false {error, _} -> false
end. end.
check_form_data(FormData) -> check_body(Body) ->
lists:any(fun({_, V}) -> lists:any(fun({_, V}) ->
not is_binary(V) not is_binary(V)
end, maps:to_list(FormData)). end, maps:to_list(Body)).
default_headers() -> default_headers() ->
maps:put(<<"content-type">>, maps:put(<<"content-type">>,
@ -230,24 +238,21 @@ parse_url(URL) ->
URIMap URIMap
end. end.
normalize_headers(Headers) ->
[{atom_to_binary(K), V} || {K, V} <- maps:to_list(Headers)].
generate_request(Credential, #{method := Method, generate_request(Credential, #{method := Method,
path := Path, path := Path,
base_query := BaseQuery, base_query := BaseQuery,
headers := Headers, headers := Headers,
form_data := FormData0}) -> body := Body0}) ->
FormData = replace_placeholders(FormData0, Credential), Body = replace_placeholders(Body0, Credential),
case Method of case Method of
get -> get ->
NPath = append_query(Path, BaseQuery ++ FormData), NPath = append_query(Path, BaseQuery ++ Body),
{NPath, Headers}; {NPath, Headers};
post -> post ->
NPath = append_query(Path, BaseQuery), NPath = append_query(Path, BaseQuery),
ContentType = proplists:get_value(<<"content-type">>, Headers), ContentType = proplists:get_value(<<"content-type">>, Headers),
Body = serialize_body(ContentType, FormData), NBody = serialize_body(ContentType, Body),
{NPath, Headers, Body} {NPath, Headers, NBody}
end. end.
replace_placeholders(KVs, Credential) -> replace_placeholders(KVs, Credential) ->
@ -277,10 +282,10 @@ qs([], Acc) ->
qs([{K, V} | More], Acc) -> qs([{K, V} | More], Acc) ->
qs(More, [["&", emqx_http_lib:uri_encode(K), "=", emqx_http_lib:uri_encode(V)] | Acc]). qs(More, [["&", emqx_http_lib:uri_encode(K), "=", emqx_http_lib:uri_encode(V)] | Acc]).
serialize_body(<<"application/json">>, FormData) -> serialize_body(<<"application/json">>, Body) ->
emqx_json:encode(FormData); emqx_json:encode(Body);
serialize_body(<<"application/x-www-form-urlencoded">>, FormData) -> serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
qs(FormData). qs(Body).
safely_parse_body(ContentType, Body) -> safely_parse_body(ContentType, Body) ->
try parse_body(ContentType, Body) of try parse_body(ContentType, Body) of
@ -291,8 +296,8 @@ safely_parse_body(ContentType, Body) ->
end. end.
parse_body(<<"application/json">>, Body) -> parse_body(<<"application/json">>, Body) ->
{ok, emqx_json:decode(Body)}; {ok, emqx_json:decode(Body, [return_maps])};
parse_body(<<"application/x-www-form-urlencoded">>, Body) -> parse_body(<<"application/x-www-form-urlencoded">>, Body) ->
{ok, cow_qs:parse_qs(Body)}; {ok, maps:from_list(cow_qs:parse_qs(Body))};
parse_body(ContentType, _) -> parse_body(ContentType, _) ->
{error, {unsupported_content_type, ContentType}}. {error, {unsupported_content_type, ContentType}}.

View File

@ -19,12 +19,15 @@
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
-behaviour(emqx_authentication).
-export([ structs/0 -export([ namespace/0
, roots/0
, fields/1 , fields/1
]). ]).
-export([ create/1 -export([ refs/0
, create/1
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -34,14 +37,14 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
structs() -> [""]. namespace() -> "authn:jwt".
fields("") -> roots() ->
[ {config, {union, [ hoconsc:t('hmac-based') [ {config, {union, [ hoconsc:mk('hmac-based')
, hoconsc:t('public-key') , hoconsc:mk('public-key')
, hoconsc:t('jwks') , hoconsc:mk('jwks')
]}} ]}}
]; ].
fields('hmac-based') -> fields('hmac-based') ->
[ {use_jwks, {enum, [false]}} [ {use_jwks, {enum, [false]}}
@ -80,12 +83,11 @@ fields(ssl_disable) ->
[ {enable, #{type => false}} ]. [ {enable, #{type => false}} ].
common_fields() -> common_fields() ->
[ {name, fun emqx_authn_schema:authenticator_name/1} [ {mechanism, {enum, [jwt]}}
, {mechanism, {enum, [jwt]}}
, {verify_claims, fun verify_claims/1} , {verify_claims, fun verify_claims/1}
]. ] ++ emqx_authn_schema:common_fields().
secret(type) -> string(); secret(type) -> binary();
secret(_) -> undefined. secret(_) -> undefined.
secret_base64_encoded(type) -> boolean(); secret_base64_encoded(type) -> boolean();
@ -132,6 +134,12 @@ verify_claims(_) -> undefined.
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() ->
[ hoconsc:ref(?MODULE, 'hmac-based')
, hoconsc:ref(?MODULE, 'public-key')
, hoconsc:ref(?MODULE, 'jwks')
].
create(#{verify_claims := VerifyClaims} = Config) -> create(#{verify_claims := VerifyClaims} = Config) ->
create2(Config#{verify_claims => handle_verify_claims(VerifyClaims)}). create2(Config#{verify_claims => handle_verify_claims(VerifyClaims)}).
@ -169,7 +177,7 @@ authenticate(Credential = #{password := JWT}, #{jwk := JWK,
end, end,
VerifyClaims = replace_placeholder(VerifyClaims0, Credential), VerifyClaims = replace_placeholder(VerifyClaims0, Credential),
case verify(JWT, JWKs, VerifyClaims) of case verify(JWT, JWKs, VerifyClaims) of
ok -> ok; {ok, Extra} -> {ok, Extra};
{error, invalid_signature} -> ignore; {error, invalid_signature} -> ignore;
{error, {claims, _}} -> {error, bad_username_or_password} {error, {claims, _}} -> {error, bad_username_or_password}
end. end.
@ -239,7 +247,12 @@ verify(JWS, [JWK | More], VerifyClaims) ->
try jose_jws:verify(JWK, JWS) of try jose_jws:verify(JWK, JWS) of
{true, Payload, _JWS} -> {true, Payload, _JWS} ->
Claims = emqx_json:decode(Payload, [return_maps]), Claims = emqx_json:decode(Payload, [return_maps]),
verify_claims(Claims, VerifyClaims); case verify_claims(Claims, VerifyClaims) of
ok ->
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Claims, false)}};
{error, Reason} ->
{error, Reason}
end;
{false, _, _} -> {false, _, _} ->
verify(JWS, More, VerifyClaims) verify(JWS, More, VerifyClaims)
catch catch

View File

@ -20,10 +20,15 @@
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
-behaviour(emqx_authentication).
-export([ structs/0, fields/1 ]). -export([ namespace/0
, roots/0
, fields/1
]).
-export([ create/1 -export([ refs/0
, create/1
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -46,6 +51,7 @@
{ user_id :: {user_group(), user_id()} { user_id :: {user_group(), user_id()}
, password_hash :: binary() , password_hash :: binary()
, salt :: binary() , salt :: binary()
, is_superuser :: boolean()
}). }).
-reflect_type([ user_id_type/0 ]). -reflect_type([ user_id_type/0 ]).
@ -57,7 +63,6 @@
-define(TAB, ?MODULE). -define(TAB, ?MODULE).
-rlog_shard({?AUTH_SHARD, ?TAB}).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Mnesia bootstrap %% Mnesia bootstrap
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -66,6 +71,7 @@
-spec(mnesia(boot | copy) -> ok). -spec(mnesia(boot | copy) -> ok).
mnesia(boot) -> mnesia(boot) ->
ok = ekka_mnesia:create_table(?TAB, [ ok = ekka_mnesia:create_table(?TAB, [
{rlog_shard, ?AUTH_SHARD},
{disc_copies, [node()]}, {disc_copies, [node()]},
{record_name, user_info}, {record_name, user_info},
{attributes, record_info(fields, user_info)}, {attributes, record_info(fields, user_info)},
@ -78,15 +84,16 @@ mnesia(copy) ->
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
structs() -> [config]. namespace() -> "authn:password-based:builtin-db".
roots() -> [config].
fields(config) -> fields(config) ->
[ {name, fun emqx_authn_schema:authenticator_name/1} [ {mechanism, {enum, ['password-based']}}
, {mechanism, {enum, ['password-based']}} , {backend, {enum, ['built-in-database']}}
, {server_type, {enum, ['built-in-database']}}
, {user_id_type, fun user_id_type/1} , {user_id_type, fun user_id_type/1}
, {password_hash_algorithm, fun password_hash_algorithm/1} , {password_hash_algorithm, fun password_hash_algorithm/1}
]; ] ++ emqx_authn_schema:common_fields();
fields(bcrypt) -> fields(bcrypt) ->
[ {name, {enum, [bcrypt]}} [ {name, {enum, [bcrypt]}}
@ -101,7 +108,8 @@ user_id_type(type) -> user_id_type();
user_id_type(default) -> username; user_id_type(default) -> username;
user_id_type(_) -> undefined. user_id_type(_) -> undefined.
password_hash_algorithm(type) -> {union, [hoconsc:ref(bcrypt), hoconsc:ref(other_algorithms)]}; password_hash_algorithm(type) -> hoconsc:union([hoconsc:ref(?MODULE, bcrypt),
hoconsc:ref(?MODULE, other_algorithms)]);
password_hash_algorithm(default) -> #{<<"name">> => sha256}; password_hash_algorithm(default) -> #{<<"name">> => sha256};
password_hash_algorithm(_) -> undefined. password_hash_algorithm(_) -> undefined.
@ -113,6 +121,9 @@ salt_rounds(_) -> undefined.
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() ->
[hoconsc:ref(?MODULE, config)].
create(#{ user_id_type := Type create(#{ user_id_type := Type
, password_hash_algorithm := #{name := bcrypt, , password_hash_algorithm := #{name := bcrypt,
salt_rounds := SaltRounds} salt_rounds := SaltRounds}
@ -147,13 +158,13 @@ authenticate(#{password := Password} = Credential,
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
[] -> [] ->
ignore; ignore;
[#user_info{password_hash = PasswordHash, salt = Salt0}] -> [#user_info{password_hash = PasswordHash, salt = Salt0, is_superuser = IsSuperuser}] ->
Salt = case Algorithm of Salt = case Algorithm of
bcrypt -> PasswordHash; bcrypt -> PasswordHash;
_ -> Salt0 _ -> Salt0
end, end,
case PasswordHash =:= hash(Algorithm, Password, Salt) of case PasswordHash =:= hash(Algorithm, Password, Salt) of
true -> ok; true -> {ok, #{is_superuser => IsSuperuser}};
false -> {error, bad_username_or_password} false -> {error, bad_username_or_password}
end end
end. end.
@ -161,7 +172,7 @@ authenticate(#{password := Password} = Credential,
destroy(#{user_group := UserGroup}) -> destroy(#{user_group := UserGroup}) ->
trans( trans(
fun() -> fun() ->
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_'}, [], ['$_']}], MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}],
ok = lists:foreach(fun delete_user2/1, mnesia:select(?TAB, MatchSpec, write)) ok = lists:foreach(fun delete_user2/1, mnesia:select(?TAB, MatchSpec, write))
end). end).
@ -179,14 +190,16 @@ import_users(Filename0, State) ->
end. end.
add_user(#{user_id := UserID, add_user(#{user_id := UserID,
password := Password}, password := Password} = UserInfo,
#{user_group := UserGroup} = State) -> #{user_group := UserGroup} = State) ->
trans( trans(
fun() -> fun() ->
case mnesia:read(?TAB, {UserGroup, UserID}, write) of case mnesia:read(?TAB, {UserGroup, UserID}, write) of
[] -> [] ->
add(UserID, Password, State), {PasswordHash, Salt} = hash(Password, State),
{ok, #{user_id => UserID}}; IsSuperuser = maps:get(is_superuser, UserInfo, false),
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
[_] -> [_] ->
{error, already_exist} {error, already_exist}
end end
@ -203,29 +216,38 @@ delete_user(UserID, #{user_group := UserGroup}) ->
end end
end). end).
update_user(UserID, #{password := Password}, update_user(UserID, UserInfo,
#{user_group := UserGroup} = State) -> #{user_group := UserGroup} = State) ->
trans( trans(
fun() -> fun() ->
case mnesia:read(?TAB, {UserGroup, UserID}, write) of case mnesia:read(?TAB, {UserGroup, UserID}, write) of
[] -> [] ->
{error, not_found}; {error, not_found};
[_] -> [#user_info{ password_hash = PasswordHash
add(UserID, Password, State), , salt = Salt
{ok, #{user_id => UserID}} , is_superuser = IsSuperuser}] ->
NSuperuser = maps:get(is_superuser, UserInfo, IsSuperuser),
{NPasswordHash, NSalt} = case maps:get(password, UserInfo, undefined) of
undefined ->
{PasswordHash, Salt};
Password ->
hash(Password, State)
end,
insert_user(UserGroup, UserID, NPasswordHash, NSalt, NSuperuser),
{ok, #{user_id => UserID, is_superuser => NSuperuser}}
end end
end). end).
lookup_user(UserID, #{user_group := UserGroup}) -> lookup_user(UserID, #{user_group := UserGroup}) ->
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
[#user_info{user_id = {_, UserID}}] -> [UserInfo] ->
{ok, #{user_id => UserID}}; {ok, serialize_user_info(UserInfo)};
[] -> [] ->
{error, not_found} {error, not_found}
end. end.
list_users(#{user_group := UserGroup}) -> list_users(#{user_group := UserGroup}) ->
Users = [#{user_id => UserID} || #user_info{user_id = {UserGroup0, UserID}} <- ets:tab2list(?TAB), UserGroup0 =:= UserGroup], Users = [serialize_user_info(UserInfo) || #user_info{user_id = {UserGroup0, _}} = UserInfo <- ets:tab2list(?TAB), UserGroup0 =:= UserGroup],
{ok, Users}. {ok, Users}.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -268,7 +290,8 @@ import(UserGroup, [#{<<"user_id">> := UserID,
<<"password_hash">> := PasswordHash} = UserInfo | More]) <<"password_hash">> := PasswordHash} = UserInfo | More])
when is_binary(UserID) andalso is_binary(PasswordHash) -> when is_binary(UserID) andalso is_binary(PasswordHash) ->
Salt = maps:get(<<"salt">>, UserInfo, <<>>), Salt = maps:get(<<"salt">>, UserInfo, <<>>),
insert_user(UserGroup, UserID, PasswordHash, Salt), IsSuperuser = maps:get(<<"is_superuser">>, UserInfo, false),
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
import(UserGroup, More); import(UserGroup, More);
import(_UserGroup, [_ | _More]) -> import(_UserGroup, [_ | _More]) ->
{error, bad_format}. {error, bad_format}.
@ -282,7 +305,8 @@ import(UserGroup, File, Seq) ->
{ok, #{user_id := UserID, {ok, #{user_id := UserID,
password_hash := PasswordHash} = UserInfo} -> password_hash := PasswordHash} = UserInfo} ->
Salt = maps:get(salt, UserInfo, <<>>), Salt = maps:get(salt, UserInfo, <<>>),
insert_user(UserGroup, UserID, PasswordHash, Salt), IsSuperuser = maps:get(is_superuser, UserInfo, false),
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
import(UserGroup, File, Seq); import(UserGroup, File, Seq);
{error, Reason} -> {error, Reason} ->
{error, Reason} {error, Reason}
@ -307,8 +331,6 @@ get_csv_header(File) ->
get_user_info_by_seq(Fields, Seq) -> get_user_info_by_seq(Fields, Seq) ->
get_user_info_by_seq(Fields, Seq, #{}). get_user_info_by_seq(Fields, Seq, #{}).
get_user_info_by_seq([], [], #{user_id := _, password_hash := _, salt := _} = Acc) ->
{ok, Acc};
get_user_info_by_seq([], [], #{user_id := _, password_hash := _} = Acc) -> get_user_info_by_seq([], [], #{user_id := _, password_hash := _} = Acc) ->
{ok, Acc}; {ok, Acc};
get_user_info_by_seq(_, [], _) -> get_user_info_by_seq(_, [], _) ->
@ -319,19 +341,13 @@ get_user_info_by_seq([PasswordHash | More1], [<<"password_hash">> | More2], Acc)
get_user_info_by_seq(More1, More2, Acc#{password_hash => PasswordHash}); get_user_info_by_seq(More1, More2, Acc#{password_hash => PasswordHash});
get_user_info_by_seq([Salt | More1], [<<"salt">> | More2], Acc) -> get_user_info_by_seq([Salt | More1], [<<"salt">> | More2], Acc) ->
get_user_info_by_seq(More1, More2, Acc#{salt => Salt}); get_user_info_by_seq(More1, More2, Acc#{salt => Salt});
get_user_info_by_seq([<<"true">> | More1], [<<"is_superuser">> | More2], Acc) ->
get_user_info_by_seq(More1, More2, Acc#{is_superuser => true});
get_user_info_by_seq([<<"false">> | More1], [<<"is_superuser">> | More2], Acc) ->
get_user_info_by_seq(More1, More2, Acc#{is_superuser => false});
get_user_info_by_seq(_, _, _) -> get_user_info_by_seq(_, _, _) ->
{error, bad_format}. {error, bad_format}.
-compile({inline, [add/3]}).
add(UserID, Password, #{user_group := UserGroup,
password_hash_algorithm := Algorithm} = State) ->
Salt = gen_salt(State),
PasswordHash = hash(Algorithm, Password, Salt),
case Algorithm of
bcrypt -> insert_user(UserGroup, UserID, PasswordHash);
_ -> insert_user(UserGroup, UserID, PasswordHash, Salt)
end.
gen_salt(#{password_hash_algorithm := plain}) -> gen_salt(#{password_hash_algorithm := plain}) ->
<<>>; <<>>;
gen_salt(#{password_hash_algorithm := bcrypt, gen_salt(#{password_hash_algorithm := bcrypt,
@ -347,13 +363,16 @@ hash(bcrypt, Password, Salt) ->
hash(Algorithm, Password, Salt) -> hash(Algorithm, Password, Salt) ->
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>). emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>).
insert_user(UserGroup, UserID, PasswordHash) -> hash(Password, #{password_hash_algorithm := Algorithm} = State) ->
insert_user(UserGroup, UserID, PasswordHash, <<>>). Salt = gen_salt(State),
PasswordHash = hash(Algorithm, Password, Salt),
{PasswordHash, Salt}.
insert_user(UserGroup, UserID, PasswordHash, Salt) -> insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) ->
UserInfo = #user_info{user_id = {UserGroup, UserID}, UserInfo = #user_info{user_id = {UserGroup, UserID},
password_hash = PasswordHash, password_hash = PasswordHash,
salt = Salt}, salt = Salt,
is_superuser = IsSuperuser},
mnesia:write(?TAB, UserInfo, write). mnesia:write(?TAB, UserInfo, write).
delete_user2(UserInfo) -> delete_user2(UserInfo) ->
@ -376,8 +395,10 @@ trans(Fun, Args) ->
{aborted, Reason} -> {error, Reason} {aborted, Reason} -> {error, Reason}
end. end.
to_binary(B) when is_binary(B) -> to_binary(B) when is_binary(B) ->
B; B;
to_binary(L) when is_list(L) -> to_binary(L) when is_list(L) ->
iolist_to_binary(L). iolist_to_binary(L).
serialize_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
#{user_id => UserID, is_superuser => IsSuperuser}.

View File

@ -21,12 +21,15 @@
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
-behaviour(emqx_authentication).
-export([ structs/0 -export([ namespace/0
, roots/0
, fields/1 , fields/1
]). ]).
-export([ create/1 -export([ refs/0
, create/1
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -36,14 +39,14 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
structs() -> [""]. namespace() -> "authn:password-based:mongodb".
fields("") -> roots() ->
[ {config, {union, [ hoconsc:t(standalone) [ {config, {union, [ hoconsc:mk(standalone)
, hoconsc:t('replica-set') , hoconsc:mk('replica-set')
, hoconsc:t('sharded-cluster') , hoconsc:mk('sharded-cluster')
]}} ]}}
]; ].
fields(standalone) -> fields(standalone) ->
common_fields() ++ emqx_connector_mongo:fields(single); common_fields() ++ emqx_connector_mongo:fields(single);
@ -55,16 +58,16 @@ fields('sharded-cluster') ->
common_fields() ++ emqx_connector_mongo:fields(sharded). common_fields() ++ emqx_connector_mongo:fields(sharded).
common_fields() -> common_fields() ->
[ {name, fun emqx_authn_schema:authenticator_name/1} [ {mechanism, {enum, ['password-based']}}
, {mechanism, {enum, ['password-based']}} , {backend, {enum, [mongodb]}}
, {server_type, {enum, [mongodb]}}
, {collection, fun collection/1} , {collection, fun collection/1}
, {selector, fun selector/1} , {selector, fun selector/1}
, {password_hash_field, fun password_hash_field/1} , {password_hash_field, fun password_hash_field/1}
, {salt_field, fun salt_field/1} , {salt_field, fun salt_field/1}
, {is_superuser_field, fun is_superuser_field/1}
, {password_hash_algorithm, fun password_hash_algorithm/1} , {password_hash_algorithm, fun password_hash_algorithm/1}
, {salt_position, fun salt_position/1} , {salt_position, fun salt_position/1}
]. ] ++ emqx_authn_schema:common_fields().
collection(type) -> binary(); collection(type) -> binary();
collection(nullable) -> false; collection(nullable) -> false;
@ -82,6 +85,10 @@ salt_field(type) -> binary();
salt_field(nullable) -> true; salt_field(nullable) -> true;
salt_field(_) -> undefined. salt_field(_) -> undefined.
is_superuser_field(type) -> binary();
is_superuser_field(nullable) -> true;
is_superuser_field(_) -> undefined.
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]}; password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
password_hash_algorithm(default) -> sha256; password_hash_algorithm(default) -> sha256;
password_hash_algorithm(_) -> undefined. password_hash_algorithm(_) -> undefined.
@ -94,6 +101,12 @@ salt_position(_) -> undefined.
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() ->
[ hoconsc:ref(?MODULE, standalone)
, hoconsc:ref(?MODULE, 'replica-set')
, hoconsc:ref(?MODULE, 'sharded-cluster')
].
create(#{ selector := Selector create(#{ selector := Selector
, '_unique' := Unique , '_unique' := Unique
} = Config) -> } = Config) ->
@ -101,14 +114,15 @@ create(#{ selector := Selector
State = maps:with([ collection State = maps:with([ collection
, password_hash_field , password_hash_field
, salt_field , salt_field
, is_superuser_field
, password_hash_algorithm , password_hash_algorithm
, salt_position , salt_position
, '_unique'], Config), , '_unique'], Config),
NState = State#{selector => NSelector}, NState = State#{selector => NSelector},
case emqx_resource:create_local(Unique, emqx_connector_mongo, Config) of case emqx_resource:create_local(Unique, emqx_connector_mongo, Config) of
{ok, _} -> {ok, already_created} ->
{ok, NState}; {ok, NState};
{error, already_created} -> {ok, _} ->
{ok, NState}; {ok, NState};
{error, Reason} -> {error, Reason} ->
{error, Reason} {error, Reason}
@ -140,7 +154,8 @@ authenticate(#{password := Password} = Credential,
ignore; ignore;
Doc -> Doc ->
case check_password(Password, Doc, State) of case check_password(Password, Doc, State) of
ok -> ok; ok ->
{ok, #{is_superuser => is_superuser(Doc, State)}};
{error, {cannot_find_password_hash_field, PasswordHashField}} -> {error, {cannot_find_password_hash_field, PasswordHashField}} ->
?LOG(error, "['~s'] Can't find password hash field: ~s", [Unique, PasswordHashField]), ?LOG(error, "['~s'] Can't find password hash field: ~s", [Unique, PasswordHashField]),
{error, bad_username_or_password}; {error, bad_username_or_password};
@ -221,6 +236,11 @@ check_password(Password,
end end
end. end.
is_superuser(Doc, #{is_superuser_field := IsSuperuserField}) ->
maps:get(IsSuperuserField, Doc, false);
is_superuser(_, _) ->
false.
hash(Algorithm, Password, Salt, prefix) -> hash(Algorithm, Password, Salt, prefix) ->
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>); emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>);
hash(Algorithm, Password, Salt, suffix) -> hash(Algorithm, Password, Salt, suffix) ->

View File

@ -21,12 +21,15 @@
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
-behaviour(emqx_authentication).
-export([ structs/0 -export([ namespace/0
, roots/0
, fields/1 , fields/1
]). ]).
-export([ create/1 -export([ refs/0
, create/1
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -36,17 +39,19 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
structs() -> [config]. namespace() -> "authn:password-based:mysql".
roots() -> [config].
fields(config) -> fields(config) ->
[ {name, fun emqx_authn_schema:authenticator_name/1} [ {mechanism, {enum, ['password-based']}}
, {mechanism, {enum, ['password-based']}} , {backend, {enum, [mysql]}}
, {server_type, {enum, [mysql]}}
, {password_hash_algorithm, fun password_hash_algorithm/1} , {password_hash_algorithm, fun password_hash_algorithm/1}
, {salt_position, fun salt_position/1} , {salt_position, fun salt_position/1}
, {query, fun query/1} , {query, fun query/1}
, {query_timeout, fun query_timeout/1} , {query_timeout, fun query_timeout/1}
] ++ emqx_connector_schema_lib:relational_db_fields() ] ++ emqx_authn_schema:common_fields()
++ emqx_connector_schema_lib:relational_db_fields()
++ emqx_connector_schema_lib:ssl_fields(). ++ emqx_connector_schema_lib:ssl_fields().
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]}; password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
@ -69,6 +74,9 @@ query_timeout(_) -> undefined.
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() ->
[hoconsc:ref(?MODULE, config)].
create(#{ password_hash_algorithm := Algorithm create(#{ password_hash_algorithm := Algorithm
, salt_position := SaltPosition , salt_position := SaltPosition
, query := Query0 , query := Query0
@ -83,9 +91,9 @@ create(#{ password_hash_algorithm := Algorithm
query_timeout => QueryTimeout, query_timeout => QueryTimeout,
'_unique' => Unique}, '_unique' => Unique},
case emqx_resource:create_local(Unique, emqx_connector_mysql, Config) of case emqx_resource:create_local(Unique, emqx_connector_mysql, Config) of
{ok, _} -> {ok, already_created} ->
{ok, State}; {ok, State};
{error, already_created} -> {ok, _} ->
{ok, State}; {ok, State};
{error, Reason} -> {error, Reason} ->
{error, Reason} {error, Reason}
@ -112,22 +120,26 @@ authenticate(#{password := Password} = Credential,
case emqx_resource:query(Unique, {sql, Query, Params, Timeout}) of case emqx_resource:query(Unique, {sql, Query, Params, Timeout}) of
{ok, _Columns, []} -> ignore; {ok, _Columns, []} -> ignore;
{ok, Columns, Rows} -> {ok, Columns, Rows} ->
%% TODO: Support superuser
Selected = maps:from_list(lists:zip(Columns, Rows)), Selected = maps:from_list(lists:zip(Columns, Rows)),
check_password(Password, Selected, State); case check_password(Password, Selected, State) of
ok ->
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}};
{error, Reason} ->
{error, Reason}
end;
{error, _Reason} -> {error, _Reason} ->
ignore ignore
end end
catch catch
error:Reason -> error:Error ->
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Reason]), ?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Error]),
ignore ignore
end. end.
destroy(#{'_unique' := Unique}) -> destroy(#{'_unique' := Unique}) ->
_ = emqx_resource:remove_local(Unique), _ = emqx_resource:remove_local(Unique),
ok. ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Internal functions %% Internal functions
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -135,17 +147,17 @@ destroy(#{'_unique' := Unique}) ->
check_password(undefined, _Selected, _State) -> check_password(undefined, _Selected, _State) ->
{error, bad_username_or_password}; {error, bad_username_or_password};
check_password(Password, check_password(Password,
#{password_hash := Hash}, #{<<"password_hash">> := Hash},
#{password_hash_algorithm := bcrypt}) -> #{password_hash_algorithm := bcrypt}) ->
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of
true -> ok; true -> ok;
false -> {error, bad_username_or_password} false -> {error, bad_username_or_password}
end; end;
check_password(Password, check_password(Password,
#{password_hash := Hash} = Selected, #{<<"password_hash">> := Hash} = Selected,
#{password_hash_algorithm := Algorithm, #{password_hash_algorithm := Algorithm,
salt_position := SaltPosition}) -> salt_position := SaltPosition}) ->
Salt = maps:get(salt, Selected, <<>>), Salt = maps:get(<<"salt">>, Selected, <<>>),
case Hash =:= emqx_authn_utils:hash(Algorithm, Password, Salt, SaltPosition) of case Hash =:= emqx_authn_utils:hash(Algorithm, Password, Salt, SaltPosition) of
true -> ok; true -> ok;
false -> {error, bad_username_or_password} false -> {error, bad_username_or_password}

View File

@ -1,58 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authn_other_schema).
-include("emqx_authn.hrl").
-include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema).
-export([ structs/0
, fields/1
]).
structs() -> [ "filename", "position", "user_info", "new_user_info"].
fields("filename") ->
[ {filename, fun filename/1} ];
fields("position") ->
[ {position, fun position/1} ];
fields("user_info") ->
[ {user_id, fun user_id/1}
, {password, fun password/1}
];
fields("new_user_info") ->
[ {password, fun password/1}
].
filename(type) -> string();
filename(nullable) -> false;
filename(_) -> undefined.
position(type) -> integer();
position(validate) -> [fun (Position) -> Position > 0 end];
position(nullable) -> false;
position(_) -> undefined.
user_id(type) -> binary();
user_id(nullable) -> false;
user_id(_) -> undefined.
password(type) -> binary();
password(nullable) -> false;
password(_) -> undefined.

View File

@ -18,13 +18,19 @@
-include("emqx_authn.hrl"). -include("emqx_authn.hrl").
-include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/logger.hrl").
-include_lib("epgsql/include/epgsql.hrl").
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
-behaviour(emqx_authentication).
-export([ structs/0, fields/1 ]). -export([ namespace/0
, roots/0
, fields/1
]).
-export([ create/1 -export([ refs/0
, create/1
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -34,16 +40,18 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
structs() -> [config]. namespace() -> "authn:password-based:postgresql".
roots() -> [config].
fields(config) -> fields(config) ->
[ {name, fun emqx_authn_schema:authenticator_name/1} [ {mechanism, {enum, ['password-based']}}
, {mechanism, {enum, ['password-based']}} , {backend, {enum, [postgresql]}}
, {server_type, {enum, [pgsql]}}
, {password_hash_algorithm, fun password_hash_algorithm/1} , {password_hash_algorithm, fun password_hash_algorithm/1}
, {salt_position, {enum, [prefix, suffix]}} , {salt_position, {enum, [prefix, suffix]}}
, {query, fun query/1} , {query, fun query/1}
] ++ emqx_connector_schema_lib:relational_db_fields() ] ++ emqx_authn_schema:common_fields()
++ emqx_connector_schema_lib:relational_db_fields()
++ emqx_connector_schema_lib:ssl_fields(). ++ emqx_connector_schema_lib:ssl_fields().
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]}; password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
@ -58,6 +66,9 @@ query(_) -> undefined.
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() ->
[hoconsc:ref(?MODULE, config)].
create(#{ query := Query0 create(#{ query := Query0
, password_hash_algorithm := Algorithm , password_hash_algorithm := Algorithm
, salt_position := SaltPosition , salt_position := SaltPosition
@ -70,9 +81,9 @@ create(#{ query := Query0
salt_position => SaltPosition, salt_position => SaltPosition,
'_unique' => Unique}, '_unique' => Unique},
case emqx_resource:create_local(Unique, emqx_connector_pgsql, Config) of case emqx_resource:create_local(Unique, emqx_connector_pgsql, Config) of
{ok, _} -> {ok, already_created} ->
{ok, State}; {ok, State};
{error, already_created} -> {ok, _} ->
{ok, State}; {ok, State};
{error, Reason} -> {error, Reason} ->
{error, Reason} {error, Reason}
@ -98,22 +109,27 @@ authenticate(#{password := Password} = Credential,
case emqx_resource:query(Unique, {sql, Query, Params}) of case emqx_resource:query(Unique, {sql, Query, Params}) of
{ok, _Columns, []} -> ignore; {ok, _Columns, []} -> ignore;
{ok, Columns, Rows} -> {ok, Columns, Rows} ->
%% TODO: Support superuser NColumns = [Name || #column{name = Name} <- Columns],
Selected = maps:from_list(lists:zip(Columns, Rows)), Selected = maps:from_list(lists:zip(NColumns, Rows)),
check_password(Password, Selected, State); case check_password(Password, Selected, State) of
ok ->
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}};
{error, Reason} ->
{error, Reason}
end;
{error, _Reason} -> {error, _Reason} ->
ignore ignore
end end
catch catch
error:Reason -> error:Error ->
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Reason]), ?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Error]),
ignore ignore
end. end.
destroy(#{'_unique' := Unique}) -> destroy(#{'_unique' := Unique}) ->
_ = emqx_resource:remove_local(Unique), _ = emqx_resource:remove_local(Unique),
ok. ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Internal functions %% Internal functions
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -121,17 +137,17 @@ destroy(#{'_unique' := Unique}) ->
check_password(undefined, _Selected, _State) -> check_password(undefined, _Selected, _State) ->
{error, bad_username_or_password}; {error, bad_username_or_password};
check_password(Password, check_password(Password,
#{password_hash := Hash}, #{<<"password_hash">> := Hash},
#{password_hash_algorithm := bcrypt}) -> #{password_hash_algorithm := bcrypt}) ->
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of
true -> ok; true -> ok;
false -> {error, bad_username_or_password} false -> {error, bad_username_or_password}
end; end;
check_password(Password, check_password(Password,
#{password_hash := Hash} = Selected, #{<<"password_hash">> := Hash} = Selected,
#{password_hash_algorithm := Algorithm, #{password_hash_algorithm := Algorithm,
salt_position := SaltPosition}) -> salt_position := SaltPosition}) ->
Salt = maps:get(salt, Selected, <<>>), Salt = maps:get(<<"salt">>, Selected, <<>>),
case Hash =:= emqx_authn_utils:hash(Algorithm, Password, Salt, SaltPosition) of case Hash =:= emqx_authn_utils:hash(Algorithm, Password, Salt, SaltPosition) of
true -> ok; true -> ok;
false -> {error, bad_username_or_password} false -> {error, bad_username_or_password}

View File

@ -21,12 +21,15 @@
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
-behaviour(emqx_authentication).
-export([ structs/0 -export([ namespace/0
, roots/0
, fields/1 , fields/1
]). ]).
-export([ create/1 -export([ refs/0
, create/1
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -36,14 +39,14 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
structs() -> [""]. namespace() -> "authn:password-based:redis".
fields("") -> roots() ->
[ {config, {union, [ hoconsc:t(standalone) [ {config, {union, [ hoconsc:mk(standalone)
, hoconsc:t(cluster) , hoconsc:mk(cluster)
, hoconsc:t(sentinel) , hoconsc:mk(sentinel)
]}} ]}}
]; ].
fields(standalone) -> fields(standalone) ->
common_fields() ++ emqx_connector_redis:fields(single); common_fields() ++ emqx_connector_redis:fields(single);
@ -55,13 +58,12 @@ fields(sentinel) ->
common_fields() ++ emqx_connector_redis:fields(sentinel). common_fields() ++ emqx_connector_redis:fields(sentinel).
common_fields() -> common_fields() ->
[ {name, fun emqx_authn_schema:authenticator_name/1} [ {mechanism, {enum, ['password-based']}}
, {mechanism, {enum, ['password-based']}} , {backend, {enum, [redis]}}
, {server_type, {enum, [redis]}}
, {query, fun query/1} , {query, fun query/1}
, {password_hash_algorithm, fun password_hash_algorithm/1} , {password_hash_algorithm, fun password_hash_algorithm/1}
, {salt_position, fun salt_position/1} , {salt_position, fun salt_position/1}
]. ] ++ emqx_authn_schema:common_fields().
query(type) -> string(); query(type) -> string();
query(nullable) -> false; query(nullable) -> false;
@ -79,6 +81,12 @@ salt_position(_) -> undefined.
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() ->
[ hoconsc:ref(?MODULE, standalone)
, hoconsc:ref(?MODULE, cluster)
, hoconsc:ref(?MODULE, sentinel)
].
create(#{ query := Query create(#{ query := Query
, '_unique' := Unique , '_unique' := Unique
} = Config) -> } = Config) ->
@ -89,9 +97,9 @@ create(#{ query := Query
, '_unique'], Config), , '_unique'], Config),
NState = State#{query => NQuery}, NState = State#{query => NQuery},
case emqx_resource:create_local(Unique, emqx_connector_redis, Config) of case emqx_resource:create_local(Unique, emqx_connector_redis, Config) of
{ok, _} -> {ok, already_created} ->
{ok, NState}; {ok, NState};
{error, already_created} -> {ok, _} ->
{ok, NState}; {ok, NState};
{error, Reason} -> {error, Reason} ->
{error, Reason} {error, Reason}
@ -124,7 +132,13 @@ authenticate(#{password := Password} = Credential,
NKey = binary_to_list(iolist_to_binary(replace_placeholders(Key, Credential))), NKey = binary_to_list(iolist_to_binary(replace_placeholders(Key, Credential))),
case emqx_resource:query(Unique, {cmd, [Command, NKey | Fields]}) of case emqx_resource:query(Unique, {cmd, [Command, NKey | Fields]}) of
{ok, Values} -> {ok, Values} ->
check_password(Password, merge(Fields, Values), State); Selected = merge(Fields, Values),
case check_password(Password, Selected, State) of
ok ->
{ok, #{is_superuser => maps:get("is_superuser", Selected, false)}};
{error, Reason} ->
{error, Reason}
end;
{error, Reason} -> {error, Reason} ->
?LOG(error, "['~s'] Query failed: ~p", [Unique, Reason]), ?LOG(error, "['~s'] Query failed: ~p", [Unique, Reason]),
ignore ignore
@ -166,11 +180,11 @@ check_fields(["password_hash" | More], false) ->
check_fields(More, true); check_fields(More, true);
check_fields(["salt" | More], HasPassHash) -> check_fields(["salt" | More], HasPassHash) ->
check_fields(More, HasPassHash); check_fields(More, HasPassHash);
% check_fields(["is_superuser" | More], HasPassHash) -> check_fields(["is_superuser" | More], HasPassHash) ->
% check_fields(More, HasPassHash); check_fields(More, HasPassHash);
check_fields([Field | _], _) -> check_fields([Field | _], _) ->
error({unsupported_field, Field}). error({unsupported_field, Field}).
parse_key(Key) -> parse_key(Key) ->
Tokens = re:split(Key, "(" ++ ?RE_PLACEHOLDER ++ ")", [{return, binary}, group, trim]), Tokens = re:split(Key, "(" ++ ?RE_PLACEHOLDER ++ ")", [{return, binary}, group, trim]),
parse_key(Tokens, []). parse_key(Tokens, []).

View File

@ -1,3 +1,3 @@
user_id,password_hash,salt user_id,password_hash,salt,is_superuser
myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235 myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235,true
myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139 myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139,false

1 user_id password_hash salt is_superuser
2 myuser3 b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75 de1024f462fb83910fd13151bd4bd235 true
3 myuser4 ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8 ad773b5be9dd0613fe6c2f4d8c403139 false

Some files were not shown because too many files have changed in this diff Show More