Merge remote-tracking branch 'upstream/master' into 1115-sync-master-to-r53
This commit is contained in:
commit
65ba381fd8
|
@ -0,0 +1,3 @@
|
||||||
|
.git/*
|
||||||
|
*/.git/*
|
||||||
|
*/.github/*
|
|
@ -137,9 +137,10 @@ jobs:
|
||||||
ENABLE_COVER_COMPILE: 1
|
ENABLE_COVER_COMPILE: 1
|
||||||
run: |
|
run: |
|
||||||
make ensure-rebar3
|
make ensure-rebar3
|
||||||
make ${PROFILE}
|
make ${PROFILE}-compile test-compile
|
||||||
make test-compile
|
echo "PROFILE=${PROFILE}" | tee -a .env
|
||||||
zip -ryq $PROFILE.zip .
|
echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env
|
||||||
|
zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.profile }}
|
name: ${{ matrix.profile }}
|
||||||
|
|
|
@ -146,7 +146,9 @@ jobs:
|
||||||
ENABLE_COVER_COMPILE: 1
|
ENABLE_COVER_COMPILE: 1
|
||||||
run: |
|
run: |
|
||||||
make $PROFILE
|
make $PROFILE
|
||||||
zip -ryq $PROFILE.zip .
|
echo "PROFILE=${PROFILE}" | tee -a .env
|
||||||
|
echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env
|
||||||
|
zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip .
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.profile }}
|
name: ${{ matrix.profile }}
|
||||||
|
|
|
@ -68,6 +68,9 @@ on:
|
||||||
type: string
|
type: string
|
||||||
default: '5.2-3'
|
default: '5.2-3'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker:
|
docker:
|
||||||
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
||||||
|
|
|
@ -20,6 +20,9 @@ on:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker:
|
docker:
|
||||||
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
||||||
|
|
|
@ -7,6 +7,9 @@ on:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_deps_integrity:
|
check_deps_integrity:
|
||||||
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
||||||
|
|
|
@ -8,6 +8,9 @@ on:
|
||||||
ref:
|
ref:
|
||||||
required: false
|
required: false
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
name: Analyze
|
name: Analyze
|
||||||
|
@ -15,7 +18,6 @@ jobs:
|
||||||
timeout-minutes: 360
|
timeout-minutes: 360
|
||||||
permissions:
|
permissions:
|
||||||
actions: read
|
actions: read
|
||||||
contents: read
|
|
||||||
security-events: write
|
security-events: write
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/emqx/emqx-builder/5.2-3:1.14.5-25.3.2-2-ubuntu22.04
|
image: ghcr.io/emqx/emqx-builder/5.2-3:1.14.5-25.3.2-2-ubuntu22.04
|
||||||
|
|
|
@ -7,6 +7,12 @@ on:
|
||||||
# run hourly
|
# run hourly
|
||||||
- cron: "0 * * * *"
|
- cron: "0 * * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
rerun-failed-jobs:
|
rerun-failed-jobs:
|
||||||
|
@ -17,10 +23,16 @@ jobs:
|
||||||
actions: write
|
actions: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.inputs.ref || 'master' }}
|
||||||
|
|
||||||
- name: run script
|
- name: run script
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
python3 scripts/rerun-failed-checks.py
|
gh api --method GET -f head_sha=$(git rev-parse HEAD) -f status=completed -f exclude_pull_requests=true /repos/emqx/emqx/actions/runs > runs.json
|
||||||
|
for id in $(jq -r '.workflow_runs[] | select((."conclusion" != "success") and .run_attempt < 3) | .id' runs.json); do
|
||||||
|
echo "rerun https://github.com/emqx/emqx/actions/runs/$id"
|
||||||
|
gh api --method POST /repos/emqx/emqx/actions/runs/$id/rerun-failed-jobs
|
||||||
|
done
|
||||||
|
|
|
@ -19,6 +19,9 @@ env:
|
||||||
TF_VAR_prometheus_remote_write_url: ${{ secrets.TF_EMQX_PERF_TEST_PROMETHEUS_REMOTE_WRITE_URL }}
|
TF_VAR_prometheus_remote_write_url: ${{ secrets.TF_EMQX_PERF_TEST_PROMETHEUS_REMOTE_WRITE_URL }}
|
||||||
SLACK_WEBHOOK_URL: ${{ secrets.TF_EMQX_PERF_TEST_SLACK_URL }}
|
SLACK_WEBHOOK_URL: ${{ secrets.TF_EMQX_PERF_TEST_SLACK_URL }}
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
prepare:
|
prepare:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
@ -13,9 +13,14 @@ on:
|
||||||
required: true
|
required: true
|
||||||
default: false
|
default: false
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
upload:
|
upload:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
|
|
|
@ -11,12 +11,13 @@ on:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
run_conf_tests:
|
run_conf_tests:
|
||||||
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
||||||
container: ${{ inputs.builder }}
|
container: ${{ inputs.builder }}
|
||||||
env:
|
|
||||||
PROFILE: ${{ matrix.profile }}
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
@ -31,6 +32,8 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
unzip -o -q ${{ matrix.profile }}.zip
|
unzip -o -q ${{ matrix.profile }}.zip
|
||||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||||
|
- run: cat .env | tee -a $GITHUB_ENV
|
||||||
|
- run: make ${{ matrix.profile }}
|
||||||
- run: ./scripts/test/check-example-configs.sh
|
- run: ./scripts/test/check-example-configs.sh
|
||||||
- run: ./scripts/conf-test/run.sh
|
- run: ./scripts/conf-test/run.sh
|
||||||
- name: print erlang log
|
- name: print erlang log
|
||||||
|
|
|
@ -14,6 +14,9 @@ on:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
basic-tests:
|
basic-tests:
|
||||||
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
||||||
|
|
|
@ -23,6 +23,9 @@ on:
|
||||||
env:
|
env:
|
||||||
IS_CI: "yes"
|
IS_CI: "yes"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
run_emqx_app_tests:
|
run_emqx_app_tests:
|
||||||
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
||||||
|
|
|
@ -14,6 +14,9 @@ on:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
helm_test:
|
helm_test:
|
||||||
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON('["self-hosted","ephemeral","linux","x64"]') || 'ubuntu-22.04' }}
|
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON('["self-hosted","ephemeral","linux","x64"]') || 'ubuntu-22.04' }}
|
||||||
|
|
|
@ -11,6 +11,9 @@ on:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
relup_test_plan:
|
relup_test_plan:
|
||||||
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
name: Scorecard supply-chain security
|
||||||
|
on:
|
||||||
|
# For Branch-Protection check. Only the default branch is supported. See
|
||||||
|
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
|
||||||
|
branch_protection_rule:
|
||||||
|
# To guarantee Maintained check is occasionally updated. See
|
||||||
|
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
|
||||||
|
schedule:
|
||||||
|
- cron: '25 21 * * 6'
|
||||||
|
push:
|
||||||
|
branches: [ "master" ]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
name: Scorecard analysis
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
security-events: write
|
||||||
|
id-token: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout code"
|
||||||
|
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: "Run analysis"
|
||||||
|
uses: ossf/scorecard-action@483ef80eb98fb506c348f7d62e28055e49fe2398 # v2.3.0
|
||||||
|
with:
|
||||||
|
results_file: results.sarif
|
||||||
|
results_format: sarif
|
||||||
|
# - Publish results to OpenSSF REST API for easy access by consumers
|
||||||
|
# - Allows the repository to include the Scorecard badge.
|
||||||
|
# - See https://github.com/ossf/scorecard-action#publishing-results.
|
||||||
|
publish_results: true
|
||||||
|
|
||||||
|
- name: "Upload artifact"
|
||||||
|
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
|
||||||
|
with:
|
||||||
|
name: SARIF file
|
||||||
|
path: results.sarif
|
||||||
|
retention-days: 5
|
||||||
|
|
||||||
|
# Upload the results to GitHub's code scanning dashboard.
|
||||||
|
- name: "Upload to code-scanning"
|
||||||
|
uses: github/codeql-action/upload-sarif@8e0b1c74b1d5a0077b04d064c76ee714d3da7637 # v2.22.1
|
||||||
|
with:
|
||||||
|
sarif_file: results.sarif
|
|
@ -7,6 +7,9 @@ concurrency:
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
spellcheck:
|
spellcheck:
|
||||||
strategy:
|
strategy:
|
||||||
|
|
|
@ -8,6 +8,9 @@ on:
|
||||||
- cron: "0 * * * *"
|
- cron: "0 * * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
stale:
|
stale:
|
||||||
if: github.repository_owner == 'emqx'
|
if: github.repository_owner == 'emqx'
|
||||||
|
|
|
@ -17,6 +17,9 @@ on:
|
||||||
env:
|
env:
|
||||||
IS_CI: "yes"
|
IS_CI: "yes"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
static_checks:
|
static_checks:
|
||||||
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON('["self-hosted","ephemeral","linux","x64"]') || 'ubuntu-22.04' }}
|
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON('["self-hosted","ephemeral","linux","x64"]') || 'ubuntu-22.04' }}
|
||||||
|
@ -37,10 +40,9 @@ jobs:
|
||||||
- uses: actions/cache@v3
|
- uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: "emqx_dialyzer_${{ matrix.otp }}_plt"
|
path: "emqx_dialyzer_${{ matrix.otp }}_plt"
|
||||||
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*', 'lib-ee/*/rebar.*') }}
|
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-
|
rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-
|
||||||
|
- run: cat .env | tee -a $GITHUB_ENV
|
||||||
- name: run static checks
|
- name: run static checks
|
||||||
env:
|
|
||||||
PROFILE: ${{ matrix.profile }}
|
|
||||||
run: make static_checks
|
run: make static_checks
|
||||||
|
|
4
Makefile
4
Makefile
|
@ -315,8 +315,10 @@ $(foreach tt,$(ALL_ELIXIR_TGZS),$(eval $(call gen-elixir-tgz-target,$(tt))))
|
||||||
|
|
||||||
.PHONY: fmt
|
.PHONY: fmt
|
||||||
fmt: $(REBAR)
|
fmt: $(REBAR)
|
||||||
@$(SCRIPTS)/erlfmt -w '{apps,lib-ee}/*/{src,include,priv,test,integration_test}/**/*.{erl,hrl,app.src,eterm}'
|
@$(SCRIPTS)/erlfmt -w 'apps/*/{src,include,priv,test,integration_test}/**/*.{erl,hrl,app.src,eterm}'
|
||||||
@$(SCRIPTS)/erlfmt -w 'rebar.config.erl'
|
@$(SCRIPTS)/erlfmt -w 'rebar.config.erl'
|
||||||
|
@$(SCRIPTS)/erlfmt -w '$(SCRIPTS)/**/*.escript'
|
||||||
|
@$(SCRIPTS)/erlfmt -w 'bin/**/*.escript'
|
||||||
@mix format
|
@mix format
|
||||||
|
|
||||||
.PHONY: clean-test-cluster-config
|
.PHONY: clean-test-cluster-config
|
||||||
|
|
|
@ -14,9 +14,4 @@
|
||||||
%% limitations under the License.
|
%% limitations under the License.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-ifndef(EMQX_BPAPI_HRL).
|
-include_lib("emqx_utils/include/bpapi.hrl").
|
||||||
-define(EMQX_BPAPI_HRL, true).
|
|
||||||
|
|
||||||
-compile({parse_transform, emqx_bpapi_trans}).
|
|
||||||
|
|
||||||
-endif.
|
|
||||||
|
|
|
@ -55,29 +55,7 @@
|
||||||
|
|
||||||
-record(subscription, {topic, subid, subopts}).
|
-record(subscription, {topic, subid, subopts}).
|
||||||
|
|
||||||
%% See 'Application Message' in MQTT Version 5.0
|
-include_lib("emqx_utils/include/emqx_message.hrl").
|
||||||
-record(message, {
|
|
||||||
%% Global unique message ID
|
|
||||||
id :: binary(),
|
|
||||||
%% Message QoS
|
|
||||||
qos = 0,
|
|
||||||
%% Message from
|
|
||||||
from :: atom() | binary(),
|
|
||||||
%% Message flags
|
|
||||||
flags = #{} :: emqx_types:flags(),
|
|
||||||
%% Message headers. May contain any metadata. e.g. the
|
|
||||||
%% protocol version number, username, peerhost or
|
|
||||||
%% the PUBLISH properties (MQTT 5.0).
|
|
||||||
headers = #{} :: emqx_types:headers(),
|
|
||||||
%% Topic that the message is published to
|
|
||||||
topic :: emqx_types:topic(),
|
|
||||||
%% Message Payload
|
|
||||||
payload :: emqx_types:payload(),
|
|
||||||
%% Timestamp (Unit: millisecond)
|
|
||||||
timestamp :: integer(),
|
|
||||||
%% not used so far, for future extension
|
|
||||||
extra = [] :: term()
|
|
||||||
}).
|
|
||||||
|
|
||||||
-record(delivery, {
|
-record(delivery, {
|
||||||
%% Sender of the delivery
|
%% Sender of the delivery
|
||||||
|
|
|
@ -19,67 +19,79 @@
|
||||||
|
|
||||||
-define(PH_VAR_THIS, <<"$_THIS_">>).
|
-define(PH_VAR_THIS, <<"$_THIS_">>).
|
||||||
|
|
||||||
-define(PH(Type), <<"${", Type/binary, "}">>).
|
-define(PH(Var), <<"${" Var "}">>).
|
||||||
|
|
||||||
%% action: publish/subscribe
|
%% action: publish/subscribe
|
||||||
-define(PH_ACTION, <<"${action}">>).
|
-define(VAR_ACTION, "action").
|
||||||
|
-define(PH_ACTION, ?PH(?VAR_ACTION)).
|
||||||
|
|
||||||
%% cert
|
%% cert
|
||||||
-define(PH_CERT_SUBJECT, <<"${cert_subject}">>).
|
-define(VAR_CERT_SUBJECT, "cert_subject").
|
||||||
-define(PH_CERT_CN_NAME, <<"${cert_common_name}">>).
|
-define(VAR_CERT_CN_NAME, "cert_common_name").
|
||||||
|
-define(PH_CERT_SUBJECT, ?PH(?VAR_CERT_SUBJECT)).
|
||||||
|
-define(PH_CERT_CN_NAME, ?PH(?VAR_CERT_CN_NAME)).
|
||||||
|
|
||||||
%% MQTT
|
%% MQTT
|
||||||
-define(PH_PASSWORD, <<"${password}">>).
|
-define(VAR_PASSWORD, "password").
|
||||||
-define(PH_CLIENTID, <<"${clientid}">>).
|
-define(VAR_CLIENTID, "clientid").
|
||||||
-define(PH_FROM_CLIENTID, <<"${from_clientid}">>).
|
-define(VAR_USERNAME, "username").
|
||||||
-define(PH_USERNAME, <<"${username}">>).
|
-define(VAR_TOPIC, "topic").
|
||||||
-define(PH_FROM_USERNAME, <<"${from_username}">>).
|
-define(PH_PASSWORD, ?PH(?VAR_PASSWORD)).
|
||||||
-define(PH_TOPIC, <<"${topic}">>).
|
-define(PH_CLIENTID, ?PH(?VAR_CLIENTID)).
|
||||||
|
-define(PH_FROM_CLIENTID, ?PH("from_clientid")).
|
||||||
|
-define(PH_USERNAME, ?PH(?VAR_USERNAME)).
|
||||||
|
-define(PH_FROM_USERNAME, ?PH("from_username")).
|
||||||
|
-define(PH_TOPIC, ?PH(?VAR_TOPIC)).
|
||||||
%% MQTT payload
|
%% MQTT payload
|
||||||
-define(PH_PAYLOAD, <<"${payload}">>).
|
-define(PH_PAYLOAD, ?PH("payload")).
|
||||||
%% client IPAddress
|
%% client IPAddress
|
||||||
-define(PH_PEERHOST, <<"${peerhost}">>).
|
-define(VAR_PEERHOST, "peerhost").
|
||||||
|
-define(PH_PEERHOST, ?PH(?VAR_PEERHOST)).
|
||||||
%% ip & port
|
%% ip & port
|
||||||
-define(PH_HOST, <<"${host}">>).
|
-define(PH_HOST, ?PH("host")).
|
||||||
-define(PH_PORT, <<"${port}">>).
|
-define(PH_PORT, ?PH("port")).
|
||||||
%% Enumeration of message QoS 0,1,2
|
%% Enumeration of message QoS 0,1,2
|
||||||
-define(PH_QOS, <<"${qos}">>).
|
-define(VAR_QOS, "qos").
|
||||||
-define(PH_FLAGS, <<"${flags}">>).
|
-define(PH_QOS, ?PH(?VAR_QOS)).
|
||||||
|
-define(PH_FLAGS, ?PH("flags")).
|
||||||
%% Additional data related to process within the MQTT message
|
%% Additional data related to process within the MQTT message
|
||||||
-define(PH_HEADERS, <<"${headers}">>).
|
-define(PH_HEADERS, ?PH("headers")).
|
||||||
%% protocol name
|
%% protocol name
|
||||||
-define(PH_PROTONAME, <<"${proto_name}">>).
|
-define(VAR_PROTONAME, "proto_name").
|
||||||
|
-define(PH_PROTONAME, ?PH(?VAR_PROTONAME)).
|
||||||
%% protocol version
|
%% protocol version
|
||||||
-define(PH_PROTOVER, <<"${proto_ver}">>).
|
-define(PH_PROTOVER, ?PH("proto_ver")).
|
||||||
%% MQTT keepalive interval
|
%% MQTT keepalive interval
|
||||||
-define(PH_KEEPALIVE, <<"${keepalive}">>).
|
-define(PH_KEEPALIVE, ?PH("keepalive")).
|
||||||
%% MQTT clean_start
|
%% MQTT clean_start
|
||||||
-define(PH_CLEAR_START, <<"${clean_start}">>).
|
-define(PH_CLEAR_START, ?PH("clean_start")).
|
||||||
%% MQTT Session Expiration time
|
%% MQTT Session Expiration time
|
||||||
-define(PH_EXPIRY_INTERVAL, <<"${expiry_interval}">>).
|
-define(PH_EXPIRY_INTERVAL, ?PH("expiry_interval")).
|
||||||
|
|
||||||
%% Time when PUBLISH message reaches Broker (ms)
|
%% Time when PUBLISH message reaches Broker (ms)
|
||||||
-define(PH_PUBLISH_RECEIVED_AT, <<"${publish_received_at}">>).
|
-define(PH_PUBLISH_RECEIVED_AT, ?PH("publish_received_at")).
|
||||||
%% Mountpoint for bridging messages
|
%% Mountpoint for bridging messages
|
||||||
-define(PH_MOUNTPOINT, <<"${mountpoint}">>).
|
-define(VAR_MOUNTPOINT, "mountpoint").
|
||||||
|
-define(PH_MOUNTPOINT, ?PH(?VAR_MOUNTPOINT)).
|
||||||
%% IPAddress and Port of terminal
|
%% IPAddress and Port of terminal
|
||||||
-define(PH_PEERNAME, <<"${peername}">>).
|
-define(PH_PEERNAME, ?PH("peername")).
|
||||||
%% IPAddress and Port listened by emqx
|
%% IPAddress and Port listened by emqx
|
||||||
-define(PH_SOCKNAME, <<"${sockname}">>).
|
-define(PH_SOCKNAME, ?PH("sockname")).
|
||||||
%% whether it is MQTT bridge connection
|
%% whether it is MQTT bridge connection
|
||||||
-define(PH_IS_BRIDGE, <<"${is_bridge}">>).
|
-define(PH_IS_BRIDGE, ?PH("is_bridge")).
|
||||||
%% Terminal connection completion time (s)
|
%% Terminal connection completion time (s)
|
||||||
-define(PH_CONNECTED_AT, <<"${connected_at}">>).
|
-define(PH_CONNECTED_AT, ?PH("connected_at")).
|
||||||
%% Event trigger time(millisecond)
|
%% Event trigger time(millisecond)
|
||||||
-define(PH_TIMESTAMP, <<"${timestamp}">>).
|
-define(PH_TIMESTAMP, ?PH("timestamp")).
|
||||||
%% Terminal disconnection completion time (s)
|
%% Terminal disconnection completion time (s)
|
||||||
-define(PH_DISCONNECTED_AT, <<"${disconnected_at}">>).
|
-define(PH_DISCONNECTED_AT, ?PH("disconnected_at")).
|
||||||
|
|
||||||
-define(PH_NODE, <<"${node}">>).
|
-define(PH_NODE, ?PH("node")).
|
||||||
-define(PH_REASON, <<"${reason}">>).
|
-define(PH_REASON, ?PH("reason")).
|
||||||
|
|
||||||
-define(PH_ENDPOINT_NAME, <<"${endpoint_name}">>).
|
-define(PH_ENDPOINT_NAME, ?PH("endpoint_name")).
|
||||||
-define(PH_RETAIN, <<"${retain}">>).
|
-define(VAR_RETAIN, "retain").
|
||||||
|
-define(PH_RETAIN, ?PH(?VAR_RETAIN)).
|
||||||
|
|
||||||
%% sync change these place holder with binary def.
|
%% sync change these place holder with binary def.
|
||||||
-define(PH_S_ACTION, "${action}").
|
-define(PH_S_ACTION, "${action}").
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
-module(emqx_ds_SUITE).
|
-module(emqx_persistent_session_ds_SUITE).
|
||||||
|
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
|
@ -11,10 +11,11 @@
|
||||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||||
|
|
||||||
|
-include_lib("emqx/src/emqx_persistent_session_ds.hrl").
|
||||||
|
|
||||||
-define(DEFAULT_KEYSPACE, default).
|
-define(DEFAULT_KEYSPACE, default).
|
||||||
-define(DS_SHARD_ID, <<"local">>).
|
-define(DS_SHARD_ID, <<"local">>).
|
||||||
-define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}).
|
-define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}).
|
||||||
-define(ITERATOR_REF_TAB, emqx_ds_iterator_ref).
|
|
||||||
|
|
||||||
-import(emqx_common_test_helpers, [on_exit/1]).
|
-import(emqx_common_test_helpers, [on_exit/1]).
|
||||||
|
|
||||||
|
@ -91,9 +92,6 @@ get_mqtt_port(Node, Type) ->
|
||||||
{_IP, Port} = erpc:call(Node, emqx_config, get, [[listeners, Type, default, bind]]),
|
{_IP, Port} = erpc:call(Node, emqx_config, get, [[listeners, Type, default, bind]]),
|
||||||
Port.
|
Port.
|
||||||
|
|
||||||
get_all_iterator_refs(Node) ->
|
|
||||||
erpc:call(Node, mnesia, dirty_all_keys, [?ITERATOR_REF_TAB]).
|
|
||||||
|
|
||||||
get_all_iterator_ids(Node) ->
|
get_all_iterator_ids(Node) ->
|
||||||
Fn = fun(K, _V, Acc) -> [K | Acc] end,
|
Fn = fun(K, _V, Acc) -> [K | Acc] end,
|
||||||
erpc:call(Node, fun() ->
|
erpc:call(Node, fun() ->
|
||||||
|
@ -122,10 +120,40 @@ start_client(Opts0 = #{}) ->
|
||||||
properties => #{'Session-Expiry-Interval' => 300}
|
properties => #{'Session-Expiry-Interval' => 300}
|
||||||
},
|
},
|
||||||
Opts = maps:to_list(emqx_utils_maps:deep_merge(Defaults, Opts0)),
|
Opts = maps:to_list(emqx_utils_maps:deep_merge(Defaults, Opts0)),
|
||||||
|
ct:pal("starting client with opts:\n ~p", [Opts]),
|
||||||
{ok, Client} = emqtt:start_link(Opts),
|
{ok, Client} = emqtt:start_link(Opts),
|
||||||
on_exit(fun() -> catch emqtt:stop(Client) end),
|
on_exit(fun() -> catch emqtt:stop(Client) end),
|
||||||
Client.
|
Client.
|
||||||
|
|
||||||
|
restart_node(Node, NodeSpec) ->
|
||||||
|
?tp(will_restart_node, #{}),
|
||||||
|
?tp(notice, "restarting node", #{node => Node}),
|
||||||
|
true = monitor_node(Node, true),
|
||||||
|
ok = erpc:call(Node, init, restart, []),
|
||||||
|
receive
|
||||||
|
{nodedown, Node} ->
|
||||||
|
ok
|
||||||
|
after 10_000 ->
|
||||||
|
ct:fail("node ~p didn't stop", [Node])
|
||||||
|
end,
|
||||||
|
?tp(notice, "waiting for nodeup", #{node => Node}),
|
||||||
|
wait_nodeup(Node),
|
||||||
|
wait_gen_rpc_down(NodeSpec),
|
||||||
|
?tp(notice, "restarting apps", #{node => Node}),
|
||||||
|
Apps = maps:get(apps, NodeSpec),
|
||||||
|
ok = erpc:call(Node, emqx_cth_suite, load_apps, [Apps]),
|
||||||
|
_ = erpc:call(Node, emqx_cth_suite, start_apps, [Apps, NodeSpec]),
|
||||||
|
%% have to re-inject this so that we may stop the node succesfully at the
|
||||||
|
%% end....
|
||||||
|
ok = emqx_cth_cluster:set_node_opts(Node, NodeSpec),
|
||||||
|
ok = snabbkaffe:forward_trace(Node),
|
||||||
|
?tp(notice, "node restarted", #{node => Node}),
|
||||||
|
?tp(restarted_node, #{}),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
is_persistent_connect_opts(#{properties := #{'Session-Expiry-Interval' := EI}}) ->
|
||||||
|
EI > 0.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Testcases
|
%% Testcases
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
@ -143,24 +171,14 @@ t_non_persistent_session_subscription(_Config) ->
|
||||||
{ok, _} = emqtt:connect(Client),
|
{ok, _} = emqtt:connect(Client),
|
||||||
?tp(notice, "subscribing", #{}),
|
?tp(notice, "subscribing", #{}),
|
||||||
{ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client, SubTopicFilter, qos2),
|
{ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client, SubTopicFilter, qos2),
|
||||||
IteratorRefs = get_all_iterator_refs(node()),
|
|
||||||
IteratorIds = get_all_iterator_ids(node()),
|
|
||||||
|
|
||||||
ok = emqtt:stop(Client),
|
ok = emqtt:stop(Client),
|
||||||
|
|
||||||
#{
|
ok
|
||||||
iterator_refs => IteratorRefs,
|
|
||||||
iterator_ids => IteratorIds
|
|
||||||
}
|
|
||||||
end,
|
end,
|
||||||
fun(Res, Trace) ->
|
fun(Trace) ->
|
||||||
ct:pal("trace:\n ~p", [Trace]),
|
ct:pal("trace:\n ~p", [Trace]),
|
||||||
#{
|
?assertEqual([], ?of_kind(ds_session_subscription_added, Trace)),
|
||||||
iterator_refs := IteratorRefs,
|
|
||||||
iterator_ids := IteratorIds
|
|
||||||
} = Res,
|
|
||||||
?assertEqual([], IteratorRefs),
|
|
||||||
?assertEqual({ok, []}, IteratorIds),
|
|
||||||
ok
|
ok
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
|
@ -175,7 +193,7 @@ t_session_subscription_idempotency(Config) ->
|
||||||
?check_trace(
|
?check_trace(
|
||||||
begin
|
begin
|
||||||
?force_ordering(
|
?force_ordering(
|
||||||
#{?snk_kind := persistent_session_ds_iterator_added},
|
#{?snk_kind := persistent_session_ds_subscription_added},
|
||||||
_NEvents0 = 1,
|
_NEvents0 = 1,
|
||||||
#{?snk_kind := will_restart_node},
|
#{?snk_kind := will_restart_node},
|
||||||
_Guard0 = true
|
_Guard0 = true
|
||||||
|
@ -187,32 +205,7 @@ t_session_subscription_idempotency(Config) ->
|
||||||
_Guard1 = true
|
_Guard1 = true
|
||||||
),
|
),
|
||||||
|
|
||||||
spawn_link(fun() ->
|
spawn_link(fun() -> restart_node(Node1, Node1Spec) end),
|
||||||
?tp(will_restart_node, #{}),
|
|
||||||
?tp(notice, "restarting node", #{node => Node1}),
|
|
||||||
true = monitor_node(Node1, true),
|
|
||||||
ok = erpc:call(Node1, init, restart, []),
|
|
||||||
receive
|
|
||||||
{nodedown, Node1} ->
|
|
||||||
ok
|
|
||||||
after 10_000 ->
|
|
||||||
ct:fail("node ~p didn't stop", [Node1])
|
|
||||||
end,
|
|
||||||
?tp(notice, "waiting for nodeup", #{node => Node1}),
|
|
||||||
wait_nodeup(Node1),
|
|
||||||
wait_gen_rpc_down(Node1Spec),
|
|
||||||
?tp(notice, "restarting apps", #{node => Node1}),
|
|
||||||
Apps = maps:get(apps, Node1Spec),
|
|
||||||
ok = erpc:call(Node1, emqx_cth_suite, load_apps, [Apps]),
|
|
||||||
_ = erpc:call(Node1, emqx_cth_suite, start_apps, [Apps, Node1Spec]),
|
|
||||||
%% have to re-inject this so that we may stop the node succesfully at the
|
|
||||||
%% end....
|
|
||||||
ok = emqx_cth_cluster:set_node_opts(Node1, Node1Spec),
|
|
||||||
ok = snabbkaffe:forward_trace(Node1),
|
|
||||||
?tp(notice, "node restarted", #{node => Node1}),
|
|
||||||
?tp(restarted_node, #{}),
|
|
||||||
ok
|
|
||||||
end),
|
|
||||||
|
|
||||||
?tp(notice, "starting 1", #{}),
|
?tp(notice, "starting 1", #{}),
|
||||||
Client0 = start_client(#{port => Port, clientid => ClientId}),
|
Client0 = start_client(#{port => Port, clientid => ClientId}),
|
||||||
|
@ -223,7 +216,7 @@ t_session_subscription_idempotency(Config) ->
|
||||||
receive
|
receive
|
||||||
{'EXIT', {shutdown, _}} ->
|
{'EXIT', {shutdown, _}} ->
|
||||||
ok
|
ok
|
||||||
after 0 -> ok
|
after 100 -> ok
|
||||||
end,
|
end,
|
||||||
process_flag(trap_exit, false),
|
process_flag(trap_exit, false),
|
||||||
|
|
||||||
|
@ -240,10 +233,7 @@ t_session_subscription_idempotency(Config) ->
|
||||||
end,
|
end,
|
||||||
fun(Trace) ->
|
fun(Trace) ->
|
||||||
ct:pal("trace:\n ~p", [Trace]),
|
ct:pal("trace:\n ~p", [Trace]),
|
||||||
%% Exactly one iterator should have been opened.
|
|
||||||
SubTopicFilterWords = emqx_topic:words(SubTopicFilter),
|
SubTopicFilterWords = emqx_topic:words(SubTopicFilter),
|
||||||
?assertEqual([{ClientId, SubTopicFilterWords}], get_all_iterator_refs(Node1)),
|
|
||||||
?assertMatch({ok, [_]}, get_all_iterator_ids(Node1)),
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, #{}, #{SubTopicFilterWords := #{}}},
|
{ok, #{}, #{SubTopicFilterWords := #{}}},
|
||||||
erpc:call(Node1, emqx_persistent_session_ds, session_open, [ClientId])
|
erpc:call(Node1, emqx_persistent_session_ds, session_open, [ClientId])
|
||||||
|
@ -262,7 +252,10 @@ t_session_unsubscription_idempotency(Config) ->
|
||||||
?check_trace(
|
?check_trace(
|
||||||
begin
|
begin
|
||||||
?force_ordering(
|
?force_ordering(
|
||||||
#{?snk_kind := persistent_session_ds_close_iterators, ?snk_span := {complete, _}},
|
#{
|
||||||
|
?snk_kind := persistent_session_ds_subscription_delete,
|
||||||
|
?snk_span := {complete, _}
|
||||||
|
},
|
||||||
_NEvents0 = 1,
|
_NEvents0 = 1,
|
||||||
#{?snk_kind := will_restart_node},
|
#{?snk_kind := will_restart_node},
|
||||||
_Guard0 = true
|
_Guard0 = true
|
||||||
|
@ -270,36 +263,11 @@ t_session_unsubscription_idempotency(Config) ->
|
||||||
?force_ordering(
|
?force_ordering(
|
||||||
#{?snk_kind := restarted_node},
|
#{?snk_kind := restarted_node},
|
||||||
_NEvents1 = 1,
|
_NEvents1 = 1,
|
||||||
#{?snk_kind := persistent_session_ds_iterator_delete, ?snk_span := start},
|
#{?snk_kind := persistent_session_ds_subscription_route_delete, ?snk_span := start},
|
||||||
_Guard1 = true
|
_Guard1 = true
|
||||||
),
|
),
|
||||||
|
|
||||||
spawn_link(fun() ->
|
spawn_link(fun() -> restart_node(Node1, Node1Spec) end),
|
||||||
?tp(will_restart_node, #{}),
|
|
||||||
?tp(notice, "restarting node", #{node => Node1}),
|
|
||||||
true = monitor_node(Node1, true),
|
|
||||||
ok = erpc:call(Node1, init, restart, []),
|
|
||||||
receive
|
|
||||||
{nodedown, Node1} ->
|
|
||||||
ok
|
|
||||||
after 10_000 ->
|
|
||||||
ct:fail("node ~p didn't stop", [Node1])
|
|
||||||
end,
|
|
||||||
?tp(notice, "waiting for nodeup", #{node => Node1}),
|
|
||||||
wait_nodeup(Node1),
|
|
||||||
wait_gen_rpc_down(Node1Spec),
|
|
||||||
?tp(notice, "restarting apps", #{node => Node1}),
|
|
||||||
Apps = maps:get(apps, Node1Spec),
|
|
||||||
ok = erpc:call(Node1, emqx_cth_suite, load_apps, [Apps]),
|
|
||||||
_ = erpc:call(Node1, emqx_cth_suite, start_apps, [Apps, Node1Spec]),
|
|
||||||
%% have to re-inject this so that we may stop the node succesfully at the
|
|
||||||
%% end....
|
|
||||||
ok = emqx_cth_cluster:set_node_opts(Node1, Node1Spec),
|
|
||||||
ok = snabbkaffe:forward_trace(Node1),
|
|
||||||
?tp(notice, "node restarted", #{node => Node1}),
|
|
||||||
?tp(restarted_node, #{}),
|
|
||||||
ok
|
|
||||||
end),
|
|
||||||
|
|
||||||
?tp(notice, "starting 1", #{}),
|
?tp(notice, "starting 1", #{}),
|
||||||
Client0 = start_client(#{port => Port, clientid => ClientId}),
|
Client0 = start_client(#{port => Port, clientid => ClientId}),
|
||||||
|
@ -312,7 +280,7 @@ t_session_unsubscription_idempotency(Config) ->
|
||||||
receive
|
receive
|
||||||
{'EXIT', {shutdown, _}} ->
|
{'EXIT', {shutdown, _}} ->
|
||||||
ok
|
ok
|
||||||
after 0 -> ok
|
after 100 -> ok
|
||||||
end,
|
end,
|
||||||
process_flag(trap_exit, false),
|
process_flag(trap_exit, false),
|
||||||
|
|
||||||
|
@ -327,7 +295,7 @@ t_session_unsubscription_idempotency(Config) ->
|
||||||
?wait_async_action(
|
?wait_async_action(
|
||||||
emqtt:unsubscribe(Client1, SubTopicFilter),
|
emqtt:unsubscribe(Client1, SubTopicFilter),
|
||||||
#{
|
#{
|
||||||
?snk_kind := persistent_session_ds_iterator_delete,
|
?snk_kind := persistent_session_ds_subscription_route_delete,
|
||||||
?snk_span := {complete, _}
|
?snk_span := {complete, _}
|
||||||
},
|
},
|
||||||
15_000
|
15_000
|
||||||
|
@ -339,9 +307,101 @@ t_session_unsubscription_idempotency(Config) ->
|
||||||
end,
|
end,
|
||||||
fun(Trace) ->
|
fun(Trace) ->
|
||||||
ct:pal("trace:\n ~p", [Trace]),
|
ct:pal("trace:\n ~p", [Trace]),
|
||||||
%% No iterators remaining
|
?assertMatch(
|
||||||
?assertEqual([], get_all_iterator_refs(Node1)),
|
{ok, #{}, Subs = #{}} when map_size(Subs) =:= 0,
|
||||||
?assertEqual({ok, []}, get_all_iterator_ids(Node1)),
|
erpc:call(Node1, emqx_persistent_session_ds, session_open, [ClientId])
|
||||||
|
),
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
t_session_discard_persistent_to_non_persistent(_Config) ->
|
||||||
|
ClientId = atom_to_binary(?FUNCTION_NAME),
|
||||||
|
Params = #{
|
||||||
|
client_id => ClientId,
|
||||||
|
reconnect_opts =>
|
||||||
|
#{
|
||||||
|
clean_start => true,
|
||||||
|
%% we set it to zero so that a new session is not created.
|
||||||
|
properties => #{'Session-Expiry-Interval' => 0},
|
||||||
|
proto_ver => v5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
do_t_session_discard(Params).
|
||||||
|
|
||||||
|
t_session_discard_persistent_to_persistent(_Config) ->
|
||||||
|
ClientId = atom_to_binary(?FUNCTION_NAME),
|
||||||
|
Params = #{
|
||||||
|
client_id => ClientId,
|
||||||
|
reconnect_opts =>
|
||||||
|
#{
|
||||||
|
clean_start => true,
|
||||||
|
properties => #{'Session-Expiry-Interval' => 30},
|
||||||
|
proto_ver => v5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
do_t_session_discard(Params).
|
||||||
|
|
||||||
|
do_t_session_discard(Params) ->
|
||||||
|
#{
|
||||||
|
client_id := ClientId,
|
||||||
|
reconnect_opts := ReconnectOpts0
|
||||||
|
} = Params,
|
||||||
|
ReconnectOpts = ReconnectOpts0#{clientid => ClientId},
|
||||||
|
SubTopicFilter = <<"t/+">>,
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
?tp(notice, "starting", #{}),
|
||||||
|
Client0 = start_client(#{
|
||||||
|
clientid => ClientId,
|
||||||
|
clean_start => false,
|
||||||
|
properties => #{'Session-Expiry-Interval' => 30},
|
||||||
|
proto_ver => v5
|
||||||
|
}),
|
||||||
|
{ok, _} = emqtt:connect(Client0),
|
||||||
|
?tp(notice, "subscribing", #{}),
|
||||||
|
{ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client0, SubTopicFilter, qos2),
|
||||||
|
%% Store some matching messages so that streams and iterators are created.
|
||||||
|
ok = emqtt:publish(Client0, <<"t/1">>, <<"1">>),
|
||||||
|
ok = emqtt:publish(Client0, <<"t/2">>, <<"2">>),
|
||||||
|
?retry(
|
||||||
|
_Sleep0 = 100,
|
||||||
|
_Attempts0 = 50,
|
||||||
|
true = map_size(emqx_persistent_session_ds:list_all_streams()) > 0
|
||||||
|
),
|
||||||
|
?retry(
|
||||||
|
_Sleep0 = 100,
|
||||||
|
_Attempts0 = 50,
|
||||||
|
true = map_size(emqx_persistent_session_ds:list_all_iterators()) > 0
|
||||||
|
),
|
||||||
|
ok = emqtt:stop(Client0),
|
||||||
|
?tp(notice, "disconnected", #{}),
|
||||||
|
|
||||||
|
?tp(notice, "reconnecting", #{}),
|
||||||
|
%% we still have iterators and streams
|
||||||
|
?assert(map_size(emqx_persistent_session_ds:list_all_streams()) > 0),
|
||||||
|
?assert(map_size(emqx_persistent_session_ds:list_all_iterators()) > 0),
|
||||||
|
Client1 = start_client(ReconnectOpts),
|
||||||
|
{ok, _} = emqtt:connect(Client1),
|
||||||
|
?assertEqual([], emqtt:subscriptions(Client1)),
|
||||||
|
case is_persistent_connect_opts(ReconnectOpts) of
|
||||||
|
true ->
|
||||||
|
?assertMatch(#{ClientId := _}, emqx_persistent_session_ds:list_all_sessions());
|
||||||
|
false ->
|
||||||
|
?assertEqual(#{}, emqx_persistent_session_ds:list_all_sessions())
|
||||||
|
end,
|
||||||
|
?assertEqual(#{}, emqx_persistent_session_ds:list_all_subscriptions()),
|
||||||
|
?assertEqual([], emqx_persistent_session_ds_router:topics()),
|
||||||
|
?assertEqual(#{}, emqx_persistent_session_ds:list_all_streams()),
|
||||||
|
?assertEqual(#{}, emqx_persistent_session_ds:list_all_iterators()),
|
||||||
|
ok = emqtt:stop(Client1),
|
||||||
|
?tp(notice, "disconnected", #{}),
|
||||||
|
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
fun(Trace) ->
|
||||||
|
ct:pal("trace:\n ~p", [Trace]),
|
||||||
ok
|
ok
|
||||||
end
|
end
|
||||||
),
|
),
|
|
@ -14,10 +14,11 @@
|
||||||
{emqx_conf,1}.
|
{emqx_conf,1}.
|
||||||
{emqx_conf,2}.
|
{emqx_conf,2}.
|
||||||
{emqx_conf,3}.
|
{emqx_conf,3}.
|
||||||
{emqx_connector, 1}.
|
{emqx_connector,1}.
|
||||||
{emqx_dashboard,1}.
|
{emqx_dashboard,1}.
|
||||||
{emqx_delayed,1}.
|
{emqx_delayed,1}.
|
||||||
{emqx_delayed,2}.
|
{emqx_delayed,2}.
|
||||||
|
{emqx_ds,1}.
|
||||||
{emqx_eviction_agent,1}.
|
{emqx_eviction_agent,1}.
|
||||||
{emqx_eviction_agent,2}.
|
{emqx_eviction_agent,2}.
|
||||||
{emqx_exhook,1}.
|
{emqx_exhook,1}.
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.7"}}},
|
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.7"}}},
|
||||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.16"}}},
|
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.16"}}},
|
||||||
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.2.1"}}},
|
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.2.1"}}},
|
||||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.19"}}},
|
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.0"}}},
|
||||||
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
|
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
|
||||||
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
||||||
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
{application, emqx, [
|
{application, emqx, [
|
||||||
{id, "emqx"},
|
{id, "emqx"},
|
||||||
{description, "EMQX Core"},
|
{description, "EMQX Core"},
|
||||||
{vsn, "5.1.13"},
|
{vsn, "5.1.14"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -258,21 +258,21 @@ set_chan_stats(ClientId, ChanPid, Stats) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Open a session.
|
%% @doc Open a session.
|
||||||
-spec open_session(boolean(), emqx_types:clientinfo(), emqx_types:conninfo()) ->
|
-spec open_session(_CleanStart :: boolean(), emqx_types:clientinfo(), emqx_types:conninfo()) ->
|
||||||
{ok, #{
|
{ok, #{
|
||||||
session := emqx_session:t(),
|
session := emqx_session:t(),
|
||||||
present := boolean(),
|
present := boolean(),
|
||||||
replay => _ReplayContext
|
replay => _ReplayContext
|
||||||
}}
|
}}
|
||||||
| {error, Reason :: term()}.
|
| {error, Reason :: term()}.
|
||||||
open_session(true, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
|
open_session(_CleanStart = true, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
|
||||||
Self = self(),
|
Self = self(),
|
||||||
emqx_cm_locker:trans(ClientId, fun(_) ->
|
emqx_cm_locker:trans(ClientId, fun(_) ->
|
||||||
ok = discard_session(ClientId),
|
ok = discard_session(ClientId),
|
||||||
ok = emqx_session:destroy(ClientInfo, ConnInfo),
|
ok = emqx_session:destroy(ClientInfo, ConnInfo),
|
||||||
create_register_session(ClientInfo, ConnInfo, Self)
|
create_register_session(ClientInfo, ConnInfo, Self)
|
||||||
end);
|
end);
|
||||||
open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
|
open_session(_CleanStart = false, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
|
||||||
Self = self(),
|
Self = self(),
|
||||||
emqx_cm_locker:trans(ClientId, fun(_) ->
|
emqx_cm_locker:trans(ClientId, fun(_) ->
|
||||||
case emqx_session:open(ClientInfo, ConnInfo) of
|
case emqx_session:open(ClientInfo, ConnInfo) of
|
||||||
|
|
|
@ -66,8 +66,9 @@
|
||||||
%% - Callbacks with greater priority values will be run before
|
%% - Callbacks with greater priority values will be run before
|
||||||
%% the ones with lower priority values. e.g. A Callback with
|
%% the ones with lower priority values. e.g. A Callback with
|
||||||
%% priority = 2 precedes the callback with priority = 1.
|
%% priority = 2 precedes the callback with priority = 1.
|
||||||
%% - The execution order is the adding order of callbacks if they have
|
%% - If the priorities of the hooks are equal then their execution
|
||||||
%% equal priority values.
|
%% order is determined by the lexicographic of hook function
|
||||||
|
%% names.
|
||||||
|
|
||||||
-type hookpoint() :: atom() | binary().
|
-type hookpoint() :: atom() | binary().
|
||||||
-type action() :: {module(), atom(), [term()] | undefined}.
|
-type action() :: {module(), atom(), [term()] | undefined}.
|
||||||
|
|
|
@ -33,7 +33,8 @@
|
||||||
desc/1,
|
desc/1,
|
||||||
types/0,
|
types/0,
|
||||||
short_paths/0,
|
short_paths/0,
|
||||||
short_paths_fields/1
|
short_paths_fields/0,
|
||||||
|
rate_type/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-define(KILOBYTE, 1024).
|
-define(KILOBYTE, 1024).
|
||||||
|
@ -103,11 +104,11 @@ roots() ->
|
||||||
].
|
].
|
||||||
|
|
||||||
fields(limiter) ->
|
fields(limiter) ->
|
||||||
short_paths_fields(?MODULE, ?IMPORTANCE_HIDDEN) ++
|
short_paths_fields(?IMPORTANCE_HIDDEN) ++
|
||||||
[
|
[
|
||||||
{Type,
|
{Type,
|
||||||
?HOCON(?R_REF(node_opts), #{
|
?HOCON(?R_REF(node_opts), #{
|
||||||
desc => ?DESC(Type),
|
desc => deprecated_desc(Type),
|
||||||
importance => ?IMPORTANCE_HIDDEN,
|
importance => ?IMPORTANCE_HIDDEN,
|
||||||
required => {false, recursively},
|
required => {false, recursively},
|
||||||
aliases => alias_of_type(Type)
|
aliases => alias_of_type(Type)
|
||||||
|
@ -120,7 +121,7 @@ fields(limiter) ->
|
||||||
?HOCON(
|
?HOCON(
|
||||||
?R_REF(client_fields),
|
?R_REF(client_fields),
|
||||||
#{
|
#{
|
||||||
desc => ?DESC(client),
|
desc => deprecated_desc(client),
|
||||||
importance => ?IMPORTANCE_HIDDEN,
|
importance => ?IMPORTANCE_HIDDEN,
|
||||||
required => {false, recursively},
|
required => {false, recursively},
|
||||||
deprecated => {since, "5.0.25"}
|
deprecated => {since, "5.0.25"}
|
||||||
|
@ -129,10 +130,10 @@ fields(limiter) ->
|
||||||
];
|
];
|
||||||
fields(node_opts) ->
|
fields(node_opts) ->
|
||||||
[
|
[
|
||||||
{rate, ?HOCON(rate(), #{desc => ?DESC(rate), default => <<"infinity">>})},
|
{rate, ?HOCON(rate_type(), #{desc => deprecated_desc(rate), default => <<"infinity">>})},
|
||||||
{burst,
|
{burst,
|
||||||
?HOCON(burst_rate(), #{
|
?HOCON(burst_rate_type(), #{
|
||||||
desc => ?DESC(burst),
|
desc => deprecated_desc(burst),
|
||||||
default => <<"0">>
|
default => <<"0">>
|
||||||
})}
|
})}
|
||||||
];
|
];
|
||||||
|
@ -142,11 +143,12 @@ fields(bucket_opts) ->
|
||||||
fields_of_bucket(<<"infinity">>);
|
fields_of_bucket(<<"infinity">>);
|
||||||
fields(client_opts) ->
|
fields(client_opts) ->
|
||||||
[
|
[
|
||||||
{rate, ?HOCON(rate(), #{default => <<"infinity">>, desc => ?DESC(rate)})},
|
{rate, ?HOCON(rate_type(), #{default => <<"infinity">>, desc => deprecated_desc(rate)})},
|
||||||
{initial,
|
{initial,
|
||||||
?HOCON(initial(), #{
|
?HOCON(initial(), #{
|
||||||
default => <<"0">>,
|
default => <<"0">>,
|
||||||
desc => ?DESC(initial),
|
|
||||||
|
desc => deprecated_desc(initial),
|
||||||
importance => ?IMPORTANCE_HIDDEN
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
})},
|
})},
|
||||||
%% low_watermark add for emqx_channel and emqx_session
|
%% low_watermark add for emqx_channel and emqx_session
|
||||||
|
@ -157,14 +159,14 @@ fields(client_opts) ->
|
||||||
?HOCON(
|
?HOCON(
|
||||||
initial(),
|
initial(),
|
||||||
#{
|
#{
|
||||||
desc => ?DESC(low_watermark),
|
desc => deprecated_desc(low_watermark),
|
||||||
default => <<"0">>,
|
default => <<"0">>,
|
||||||
importance => ?IMPORTANCE_HIDDEN
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
{burst,
|
{burst,
|
||||||
?HOCON(burst(), #{
|
?HOCON(burst_type(), #{
|
||||||
desc => ?DESC(burst),
|
desc => deprecated_desc(burst),
|
||||||
default => <<"0">>,
|
default => <<"0">>,
|
||||||
importance => ?IMPORTANCE_HIDDEN,
|
importance => ?IMPORTANCE_HIDDEN,
|
||||||
aliases => [capacity]
|
aliases => [capacity]
|
||||||
|
@ -173,7 +175,7 @@ fields(client_opts) ->
|
||||||
?HOCON(
|
?HOCON(
|
||||||
boolean(),
|
boolean(),
|
||||||
#{
|
#{
|
||||||
desc => ?DESC(divisible),
|
desc => deprecated_desc(divisible),
|
||||||
default => true,
|
default => true,
|
||||||
importance => ?IMPORTANCE_HIDDEN
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
}
|
}
|
||||||
|
@ -182,7 +184,7 @@ fields(client_opts) ->
|
||||||
?HOCON(
|
?HOCON(
|
||||||
emqx_schema:timeout_duration(),
|
emqx_schema:timeout_duration(),
|
||||||
#{
|
#{
|
||||||
desc => ?DESC(max_retry_time),
|
desc => deprecated_desc(max_retry_time),
|
||||||
default => <<"1h">>,
|
default => <<"1h">>,
|
||||||
importance => ?IMPORTANCE_HIDDEN
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
}
|
}
|
||||||
|
@ -191,7 +193,7 @@ fields(client_opts) ->
|
||||||
?HOCON(
|
?HOCON(
|
||||||
failure_strategy(),
|
failure_strategy(),
|
||||||
#{
|
#{
|
||||||
desc => ?DESC(failure_strategy),
|
desc => deprecated_desc(failure_strategy),
|
||||||
default => force,
|
default => force,
|
||||||
importance => ?IMPORTANCE_HIDDEN
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
}
|
}
|
||||||
|
@ -204,14 +206,14 @@ fields(listener_client_fields) ->
|
||||||
fields(Type) ->
|
fields(Type) ->
|
||||||
simple_bucket_field(Type).
|
simple_bucket_field(Type).
|
||||||
|
|
||||||
short_paths_fields(DesModule) ->
|
short_paths_fields() ->
|
||||||
short_paths_fields(DesModule, ?DEFAULT_IMPORTANCE).
|
short_paths_fields(?DEFAULT_IMPORTANCE).
|
||||||
|
|
||||||
short_paths_fields(DesModule, Importance) ->
|
short_paths_fields(Importance) ->
|
||||||
[
|
[
|
||||||
{Name,
|
{Name,
|
||||||
?HOCON(rate(), #{
|
?HOCON(rate_type(), #{
|
||||||
desc => ?DESC(DesModule, Name),
|
desc => ?DESC(Name),
|
||||||
required => false,
|
required => false,
|
||||||
importance => Importance,
|
importance => Importance,
|
||||||
example => Example
|
example => Example
|
||||||
|
@ -381,7 +383,7 @@ simple_bucket_field(Type) when is_atom(Type) ->
|
||||||
?HOCON(
|
?HOCON(
|
||||||
?R_REF(?MODULE, client_opts),
|
?R_REF(?MODULE, client_opts),
|
||||||
#{
|
#{
|
||||||
desc => ?DESC(client),
|
desc => deprecated_desc(client),
|
||||||
required => {false, recursively},
|
required => {false, recursively},
|
||||||
importance => importance_of_type(Type),
|
importance => importance_of_type(Type),
|
||||||
aliases => alias_of_type(Type)
|
aliases => alias_of_type(Type)
|
||||||
|
@ -394,7 +396,7 @@ composite_bucket_fields(Types, ClientRef) ->
|
||||||
[
|
[
|
||||||
{Type,
|
{Type,
|
||||||
?HOCON(?R_REF(?MODULE, bucket_opts), #{
|
?HOCON(?R_REF(?MODULE, bucket_opts), #{
|
||||||
desc => ?DESC(?MODULE, Type),
|
desc => deprecated_desc(Type),
|
||||||
required => {false, recursively},
|
required => {false, recursively},
|
||||||
importance => importance_of_type(Type),
|
importance => importance_of_type(Type),
|
||||||
aliases => alias_of_type(Type)
|
aliases => alias_of_type(Type)
|
||||||
|
@ -406,7 +408,7 @@ composite_bucket_fields(Types, ClientRef) ->
|
||||||
?HOCON(
|
?HOCON(
|
||||||
?R_REF(?MODULE, ClientRef),
|
?R_REF(?MODULE, ClientRef),
|
||||||
#{
|
#{
|
||||||
desc => ?DESC(client),
|
desc => deprecated_desc(client),
|
||||||
required => {false, recursively}
|
required => {false, recursively}
|
||||||
}
|
}
|
||||||
)}
|
)}
|
||||||
|
@ -414,10 +416,10 @@ composite_bucket_fields(Types, ClientRef) ->
|
||||||
|
|
||||||
fields_of_bucket(Default) ->
|
fields_of_bucket(Default) ->
|
||||||
[
|
[
|
||||||
{rate, ?HOCON(rate(), #{desc => ?DESC(rate), default => Default})},
|
{rate, ?HOCON(rate_type(), #{desc => deprecated_desc(rate), default => Default})},
|
||||||
{burst,
|
{burst,
|
||||||
?HOCON(burst(), #{
|
?HOCON(burst(), #{
|
||||||
desc => ?DESC(burst),
|
desc => deprecated_desc(burst),
|
||||||
default => <<"0">>,
|
default => <<"0">>,
|
||||||
importance => ?IMPORTANCE_HIDDEN,
|
importance => ?IMPORTANCE_HIDDEN,
|
||||||
aliases => [capacity]
|
aliases => [capacity]
|
||||||
|
@ -425,7 +427,7 @@ fields_of_bucket(Default) ->
|
||||||
{initial,
|
{initial,
|
||||||
?HOCON(initial(), #{
|
?HOCON(initial(), #{
|
||||||
default => <<"0">>,
|
default => <<"0">>,
|
||||||
desc => ?DESC(initial),
|
desc => deprecated_desc(initial),
|
||||||
importance => ?IMPORTANCE_HIDDEN
|
importance => ?IMPORTANCE_HIDDEN
|
||||||
})}
|
})}
|
||||||
].
|
].
|
||||||
|
@ -434,7 +436,7 @@ client_fields(Types) ->
|
||||||
[
|
[
|
||||||
{Type,
|
{Type,
|
||||||
?HOCON(?R_REF(client_opts), #{
|
?HOCON(?R_REF(client_opts), #{
|
||||||
desc => ?DESC(Type),
|
desc => deprecated_desc(Type),
|
||||||
required => false,
|
required => false,
|
||||||
importance => importance_of_type(Type),
|
importance => importance_of_type(Type),
|
||||||
aliases => alias_of_type(Type)
|
aliases => alias_of_type(Type)
|
||||||
|
@ -457,3 +459,15 @@ alias_of_type(bytes) ->
|
||||||
[bytes_in];
|
[bytes_in];
|
||||||
alias_of_type(_) ->
|
alias_of_type(_) ->
|
||||||
[].
|
[].
|
||||||
|
|
||||||
|
deprecated_desc(_Field) ->
|
||||||
|
<<"Deprecated since v5.0.25">>.
|
||||||
|
|
||||||
|
rate_type() ->
|
||||||
|
typerefl:alias("string", rate()).
|
||||||
|
|
||||||
|
burst_type() ->
|
||||||
|
typerefl:alias("string", burst()).
|
||||||
|
|
||||||
|
burst_rate_type() ->
|
||||||
|
typerefl:alias("string", burst_rate()).
|
||||||
|
|
|
@ -66,7 +66,8 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
is_expired/1,
|
is_expired/1,
|
||||||
update_expiry/1
|
update_expiry/1,
|
||||||
|
timestamp_now/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -113,14 +114,13 @@ make(From, Topic, Payload) ->
|
||||||
emqx_types:payload()
|
emqx_types:payload()
|
||||||
) -> emqx_types:message().
|
) -> emqx_types:message().
|
||||||
make(From, QoS, Topic, Payload) when ?QOS_0 =< QoS, QoS =< ?QOS_2 ->
|
make(From, QoS, Topic, Payload) when ?QOS_0 =< QoS, QoS =< ?QOS_2 ->
|
||||||
Now = erlang:system_time(millisecond),
|
|
||||||
#message{
|
#message{
|
||||||
id = emqx_guid:gen(),
|
id = emqx_guid:gen(),
|
||||||
qos = QoS,
|
qos = QoS,
|
||||||
from = From,
|
from = From,
|
||||||
topic = Topic,
|
topic = Topic,
|
||||||
payload = Payload,
|
payload = Payload,
|
||||||
timestamp = Now
|
timestamp = timestamp_now()
|
||||||
}.
|
}.
|
||||||
|
|
||||||
-spec make(
|
-spec make(
|
||||||
|
@ -137,7 +137,6 @@ make(From, QoS, Topic, Payload, Flags, Headers) when
|
||||||
is_map(Flags),
|
is_map(Flags),
|
||||||
is_map(Headers)
|
is_map(Headers)
|
||||||
->
|
->
|
||||||
Now = erlang:system_time(millisecond),
|
|
||||||
#message{
|
#message{
|
||||||
id = emqx_guid:gen(),
|
id = emqx_guid:gen(),
|
||||||
qos = QoS,
|
qos = QoS,
|
||||||
|
@ -146,7 +145,7 @@ make(From, QoS, Topic, Payload, Flags, Headers) when
|
||||||
headers = Headers,
|
headers = Headers,
|
||||||
topic = Topic,
|
topic = Topic,
|
||||||
payload = Payload,
|
payload = Payload,
|
||||||
timestamp = Now
|
timestamp = timestamp_now()
|
||||||
}.
|
}.
|
||||||
|
|
||||||
-spec make(
|
-spec make(
|
||||||
|
@ -164,7 +163,6 @@ make(MsgId, From, QoS, Topic, Payload, Flags, Headers) when
|
||||||
is_map(Flags),
|
is_map(Flags),
|
||||||
is_map(Headers)
|
is_map(Headers)
|
||||||
->
|
->
|
||||||
Now = erlang:system_time(millisecond),
|
|
||||||
#message{
|
#message{
|
||||||
id = MsgId,
|
id = MsgId,
|
||||||
qos = QoS,
|
qos = QoS,
|
||||||
|
@ -173,7 +171,7 @@ make(MsgId, From, QoS, Topic, Payload, Flags, Headers) when
|
||||||
headers = Headers,
|
headers = Headers,
|
||||||
topic = Topic,
|
topic = Topic,
|
||||||
payload = Payload,
|
payload = Payload,
|
||||||
timestamp = Now
|
timestamp = timestamp_now()
|
||||||
}.
|
}.
|
||||||
|
|
||||||
%% optimistic esitmation of a message size after serialization
|
%% optimistic esitmation of a message size after serialization
|
||||||
|
@ -403,6 +401,11 @@ from_map(#{
|
||||||
extra = Extra
|
extra = Extra
|
||||||
}.
|
}.
|
||||||
|
|
||||||
|
%% @doc Get current timestamp in milliseconds.
|
||||||
|
-spec timestamp_now() -> integer().
|
||||||
|
timestamp_now() ->
|
||||||
|
erlang:system_time(millisecond).
|
||||||
|
|
||||||
%% MilliSeconds
|
%% MilliSeconds
|
||||||
elapsed(Since) ->
|
elapsed(Since) ->
|
||||||
max(0, erlang:system_time(millisecond) - Since).
|
max(0, timestamp_now() - Since).
|
||||||
|
|
|
@ -83,7 +83,7 @@ do_check_pass({_SimpleHash, _Salt, _SaltPosition} = HashParams, PasswordHash, Pa
|
||||||
compare_secure(Hash, PasswordHash).
|
compare_secure(Hash, PasswordHash).
|
||||||
|
|
||||||
-spec hash(hash_params(), password()) -> password_hash().
|
-spec hash(hash_params(), password()) -> password_hash().
|
||||||
hash({pbkdf2, MacFun, Salt, Iterations, DKLength}, Password) ->
|
hash({pbkdf2, MacFun, Salt, Iterations, DKLength}, Password) when Iterations > 0 ->
|
||||||
case pbkdf2(MacFun, Password, Salt, Iterations, DKLength) of
|
case pbkdf2(MacFun, Password, Salt, Iterations, DKLength) of
|
||||||
{ok, HashPasswd} ->
|
{ok, HashPasswd} ->
|
||||||
hex(HashPasswd);
|
hex(HashPasswd);
|
||||||
|
|
|
@ -23,16 +23,12 @@
|
||||||
|
|
||||||
%% Message persistence
|
%% Message persistence
|
||||||
-export([
|
-export([
|
||||||
persist/1,
|
persist/1
|
||||||
serialize/1,
|
|
||||||
deserialize/1
|
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% FIXME
|
-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message).
|
||||||
-define(DS_SHARD_ID, <<"local">>).
|
|
||||||
-define(DEFAULT_KEYSPACE, default).
|
|
||||||
-define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}).
|
|
||||||
|
|
||||||
|
%% FIXME
|
||||||
-define(WHEN_ENABLED(DO),
|
-define(WHEN_ENABLED(DO),
|
||||||
case is_store_enabled() of
|
case is_store_enabled() of
|
||||||
true -> DO;
|
true -> DO;
|
||||||
|
@ -44,18 +40,10 @@
|
||||||
|
|
||||||
init() ->
|
init() ->
|
||||||
?WHEN_ENABLED(begin
|
?WHEN_ENABLED(begin
|
||||||
ok = emqx_ds:ensure_shard(
|
ok = emqx_ds:open_db(?PERSISTENT_MESSAGE_DB, #{
|
||||||
?DS_SHARD,
|
backend => builtin,
|
||||||
#{
|
storage => {emqx_ds_storage_bitfield_lts, #{}}
|
||||||
dir => filename:join([
|
}),
|
||||||
emqx:data_dir(),
|
|
||||||
ds,
|
|
||||||
messages,
|
|
||||||
?DEFAULT_KEYSPACE,
|
|
||||||
?DS_SHARD_ID
|
|
||||||
])
|
|
||||||
}
|
|
||||||
),
|
|
||||||
ok = emqx_persistent_session_ds_router:init_tables(),
|
ok = emqx_persistent_session_ds_router:init_tables(),
|
||||||
ok = emqx_persistent_session_ds:create_tables(),
|
ok = emqx_persistent_session_ds:create_tables(),
|
||||||
ok
|
ok
|
||||||
|
@ -82,19 +70,11 @@ persist(Msg) ->
|
||||||
needs_persistence(Msg) ->
|
needs_persistence(Msg) ->
|
||||||
not (emqx_message:get_flag(dup, Msg) orelse emqx_message:is_sys(Msg)).
|
not (emqx_message:get_flag(dup, Msg) orelse emqx_message:is_sys(Msg)).
|
||||||
|
|
||||||
|
-spec store_message(emqx_types:message()) -> emqx_ds:store_batch_result().
|
||||||
store_message(Msg) ->
|
store_message(Msg) ->
|
||||||
ID = emqx_message:id(Msg),
|
emqx_ds:store_batch(?PERSISTENT_MESSAGE_DB, [Msg]).
|
||||||
Timestamp = emqx_guid:timestamp(ID),
|
|
||||||
Topic = emqx_topic:words(emqx_message:topic(Msg)),
|
|
||||||
emqx_ds_storage_layer:store(?DS_SHARD, ID, Timestamp, Topic, serialize(Msg)).
|
|
||||||
|
|
||||||
has_subscribers(#message{topic = Topic}) ->
|
has_subscribers(#message{topic = Topic}) ->
|
||||||
emqx_persistent_session_ds_router:has_any_route(Topic).
|
emqx_persistent_session_ds_router:has_any_route(Topic).
|
||||||
|
|
||||||
%%
|
%%
|
||||||
|
|
||||||
serialize(Msg) ->
|
|
||||||
term_to_binary(emqx_message:to_map(Msg)).
|
|
||||||
|
|
||||||
deserialize(Bin) ->
|
|
||||||
emqx_message:from_map(binary_to_term(Bin)).
|
|
||||||
|
|
|
@ -0,0 +1,314 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
%% @doc This module implements the routines for replaying streams of
|
||||||
|
%% messages.
|
||||||
|
-module(emqx_persistent_message_ds_replayer).
|
||||||
|
|
||||||
|
%% API:
|
||||||
|
-export([new/0, next_packet_id/1, replay/2, commit_offset/3, poll/3, n_inflight/1]).
|
||||||
|
|
||||||
|
%% internal exports:
|
||||||
|
-export([]).
|
||||||
|
|
||||||
|
-export_type([inflight/0]).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
-include("emqx_persistent_session_ds.hrl").
|
||||||
|
|
||||||
|
-ifdef(TEST).
|
||||||
|
-include_lib("proper/include/proper.hrl").
|
||||||
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
-endif.
|
||||||
|
|
||||||
|
%%================================================================================
|
||||||
|
%% Type declarations
|
||||||
|
%%================================================================================
|
||||||
|
|
||||||
|
%% Note: sequence numbers are monotonic; they don't wrap around:
|
||||||
|
-type seqno() :: non_neg_integer().
|
||||||
|
|
||||||
|
-record(range, {
|
||||||
|
stream :: emqx_ds:stream(),
|
||||||
|
first :: seqno(),
|
||||||
|
last :: seqno(),
|
||||||
|
iterator_next :: emqx_ds:iterator() | undefined
|
||||||
|
}).
|
||||||
|
|
||||||
|
-type range() :: #range{}.
|
||||||
|
|
||||||
|
-record(inflight, {
|
||||||
|
next_seqno = 0 :: seqno(),
|
||||||
|
acked_seqno = 0 :: seqno(),
|
||||||
|
offset_ranges = [] :: [range()]
|
||||||
|
}).
|
||||||
|
|
||||||
|
-opaque inflight() :: #inflight{}.
|
||||||
|
|
||||||
|
%%================================================================================
|
||||||
|
%% API funcions
|
||||||
|
%%================================================================================
|
||||||
|
|
||||||
|
-spec new() -> inflight().
|
||||||
|
new() ->
|
||||||
|
#inflight{}.
|
||||||
|
|
||||||
|
-spec next_packet_id(inflight()) -> {emqx_types:packet_id(), inflight()}.
|
||||||
|
next_packet_id(Inflight0 = #inflight{next_seqno = LastSeqNo}) ->
|
||||||
|
Inflight = Inflight0#inflight{next_seqno = LastSeqNo + 1},
|
||||||
|
case LastSeqNo rem 16#10000 of
|
||||||
|
0 ->
|
||||||
|
%% We skip sequence numbers that lead to PacketId = 0 to
|
||||||
|
%% simplify math. Note: it leads to occasional gaps in the
|
||||||
|
%% sequence numbers.
|
||||||
|
next_packet_id(Inflight);
|
||||||
|
PacketId ->
|
||||||
|
{PacketId, Inflight}
|
||||||
|
end.
|
||||||
|
|
||||||
|
-spec n_inflight(inflight()) -> non_neg_integer().
|
||||||
|
n_inflight(#inflight{next_seqno = NextSeqNo, acked_seqno = AckedSeqno}) ->
|
||||||
|
%% NOTE: this function assumes that gaps in the sequence ID occur
|
||||||
|
%% _only_ when the packet ID wraps:
|
||||||
|
case AckedSeqno >= ((NextSeqNo bsr 16) bsl 16) of
|
||||||
|
true ->
|
||||||
|
NextSeqNo - AckedSeqno;
|
||||||
|
false ->
|
||||||
|
NextSeqNo - AckedSeqno - 1
|
||||||
|
end.
|
||||||
|
|
||||||
|
-spec replay(emqx_persistent_session_ds:id(), inflight()) ->
|
||||||
|
emqx_session:replies().
|
||||||
|
replay(_SessionId, _Inflight = #inflight{offset_ranges = _Ranges}) ->
|
||||||
|
[].
|
||||||
|
|
||||||
|
-spec commit_offset(emqx_persistent_session_ds:id(), emqx_types:packet_id(), inflight()) ->
|
||||||
|
{_IsValidOffset :: boolean(), inflight()}.
|
||||||
|
commit_offset(
|
||||||
|
SessionId,
|
||||||
|
PacketId,
|
||||||
|
Inflight0 = #inflight{
|
||||||
|
acked_seqno = AckedSeqno0, next_seqno = NextSeqNo, offset_ranges = Ranges0
|
||||||
|
}
|
||||||
|
) ->
|
||||||
|
AckedSeqno =
|
||||||
|
case packet_id_to_seqno(NextSeqNo, PacketId) of
|
||||||
|
N when N > AckedSeqno0; AckedSeqno0 =:= 0 ->
|
||||||
|
N;
|
||||||
|
OutOfRange ->
|
||||||
|
?SLOG(warning, #{
|
||||||
|
msg => "out-of-order_ack",
|
||||||
|
prev_seqno => AckedSeqno0,
|
||||||
|
acked_seqno => OutOfRange,
|
||||||
|
next_seqno => NextSeqNo,
|
||||||
|
packet_id => PacketId
|
||||||
|
}),
|
||||||
|
AckedSeqno0
|
||||||
|
end,
|
||||||
|
Ranges = lists:filter(
|
||||||
|
fun(#range{stream = Stream, last = LastSeqno, iterator_next = ItNext}) ->
|
||||||
|
case LastSeqno =< AckedSeqno of
|
||||||
|
true ->
|
||||||
|
%% This range has been fully
|
||||||
|
%% acked. Remove it and replace saved
|
||||||
|
%% iterator with the trailing iterator.
|
||||||
|
update_iterator(SessionId, Stream, ItNext),
|
||||||
|
false;
|
||||||
|
false ->
|
||||||
|
%% This range still has unacked
|
||||||
|
%% messages:
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
Ranges0
|
||||||
|
),
|
||||||
|
Inflight = Inflight0#inflight{acked_seqno = AckedSeqno, offset_ranges = Ranges},
|
||||||
|
{true, Inflight}.
|
||||||
|
|
||||||
|
-spec poll(emqx_persistent_session_ds:id(), inflight(), pos_integer()) ->
|
||||||
|
{emqx_session:replies(), inflight()}.
|
||||||
|
poll(SessionId, Inflight0, WindowSize) when WindowSize > 0, WindowSize < 16#7fff ->
|
||||||
|
#inflight{next_seqno = NextSeqNo0, acked_seqno = AckedSeqno} =
|
||||||
|
Inflight0,
|
||||||
|
FetchThreshold = max(1, WindowSize div 2),
|
||||||
|
FreeSpace = AckedSeqno + WindowSize - NextSeqNo0,
|
||||||
|
case FreeSpace >= FetchThreshold of
|
||||||
|
false ->
|
||||||
|
%% TODO: this branch is meant to avoid fetching data from
|
||||||
|
%% the DB in chunks that are too small. However, this
|
||||||
|
%% logic is not exactly good for the latency. Can the
|
||||||
|
%% client get stuck even?
|
||||||
|
{[], Inflight0};
|
||||||
|
true ->
|
||||||
|
Streams = shuffle(get_streams(SessionId)),
|
||||||
|
fetch(SessionId, Inflight0, Streams, FreeSpace, [])
|
||||||
|
end.
|
||||||
|
|
||||||
|
%%================================================================================
|
||||||
|
%% Internal exports
|
||||||
|
%%================================================================================
|
||||||
|
|
||||||
|
%%================================================================================
|
||||||
|
%% Internal functions
|
||||||
|
%%================================================================================
|
||||||
|
|
||||||
|
fetch(_SessionId, Inflight, _Streams = [], _N, Acc) ->
|
||||||
|
{lists:reverse(Acc), Inflight};
|
||||||
|
fetch(_SessionId, Inflight, _Streams, 0, Acc) ->
|
||||||
|
{lists:reverse(Acc), Inflight};
|
||||||
|
fetch(SessionId, Inflight0, [Stream | Streams], N, Publishes0) ->
|
||||||
|
#inflight{next_seqno = FirstSeqNo, offset_ranges = Ranges0} = Inflight0,
|
||||||
|
ItBegin = get_last_iterator(SessionId, Stream, Ranges0),
|
||||||
|
{ok, ItEnd, Messages} = emqx_ds:next(?PERSISTENT_MESSAGE_DB, ItBegin, N),
|
||||||
|
{NMessages, Publishes, Inflight1} =
|
||||||
|
lists:foldl(
|
||||||
|
fun(Msg, {N0, PubAcc0, InflightAcc0}) ->
|
||||||
|
{PacketId, InflightAcc} = next_packet_id(InflightAcc0),
|
||||||
|
PubAcc = [{PacketId, Msg} | PubAcc0],
|
||||||
|
{N0 + 1, PubAcc, InflightAcc}
|
||||||
|
end,
|
||||||
|
{0, Publishes0, Inflight0},
|
||||||
|
Messages
|
||||||
|
),
|
||||||
|
#inflight{next_seqno = LastSeqNo} = Inflight1,
|
||||||
|
case NMessages > 0 of
|
||||||
|
true ->
|
||||||
|
Range = #range{
|
||||||
|
first = FirstSeqNo,
|
||||||
|
last = LastSeqNo - 1,
|
||||||
|
stream = Stream,
|
||||||
|
iterator_next = ItEnd
|
||||||
|
},
|
||||||
|
Inflight = Inflight1#inflight{offset_ranges = Ranges0 ++ [Range]},
|
||||||
|
fetch(SessionId, Inflight, Streams, N - NMessages, Publishes);
|
||||||
|
false ->
|
||||||
|
fetch(SessionId, Inflight1, Streams, N, Publishes)
|
||||||
|
end.
|
||||||
|
|
||||||
|
-spec update_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream(), emqx_ds:iterator()) -> ok.
|
||||||
|
update_iterator(DSSessionId, Stream, Iterator) ->
|
||||||
|
%% Workaround: we convert `Stream' to a binary before attempting to store it in
|
||||||
|
%% mnesia(rocksdb) because of a bug in `mnesia_rocksdb' when trying to do
|
||||||
|
%% `mnesia:dirty_all_keys' later.
|
||||||
|
StreamBin = term_to_binary(Stream),
|
||||||
|
mria:dirty_write(?SESSION_ITER_TAB, #ds_iter{id = {DSSessionId, StreamBin}, iter = Iterator}).
|
||||||
|
|
||||||
|
get_last_iterator(SessionId, Stream, Ranges) ->
|
||||||
|
case lists:keyfind(Stream, #range.stream, lists:reverse(Ranges)) of
|
||||||
|
false ->
|
||||||
|
get_iterator(SessionId, Stream);
|
||||||
|
#range{iterator_next = Next} ->
|
||||||
|
Next
|
||||||
|
end.
|
||||||
|
|
||||||
|
-spec get_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream()) -> emqx_ds:iterator().
|
||||||
|
get_iterator(DSSessionId, Stream) ->
|
||||||
|
%% See comment in `update_iterator'.
|
||||||
|
StreamBin = term_to_binary(Stream),
|
||||||
|
Id = {DSSessionId, StreamBin},
|
||||||
|
[#ds_iter{iter = It}] = mnesia:dirty_read(?SESSION_ITER_TAB, Id),
|
||||||
|
It.
|
||||||
|
|
||||||
|
-spec get_streams(emqx_persistent_session_ds:id()) -> [emqx_ds:stream()].
|
||||||
|
get_streams(SessionId) ->
|
||||||
|
lists:map(
|
||||||
|
fun(#ds_stream{stream = Stream}) ->
|
||||||
|
Stream
|
||||||
|
end,
|
||||||
|
mnesia:dirty_read(?SESSION_STREAM_TAB, SessionId)
|
||||||
|
).
|
||||||
|
|
||||||
|
%% Reconstruct session counter by adding most significant bits from
|
||||||
|
%% the current counter to the packet id.
|
||||||
|
-spec packet_id_to_seqno(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer().
|
||||||
|
packet_id_to_seqno(NextSeqNo, PacketId) ->
|
||||||
|
Epoch = NextSeqNo bsr 16,
|
||||||
|
case packet_id_to_seqno_(Epoch, PacketId) of
|
||||||
|
N when N =< NextSeqNo ->
|
||||||
|
N;
|
||||||
|
_ ->
|
||||||
|
packet_id_to_seqno_(Epoch - 1, PacketId)
|
||||||
|
end.
|
||||||
|
|
||||||
|
-spec packet_id_to_seqno_(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer().
|
||||||
|
packet_id_to_seqno_(Epoch, PacketId) ->
|
||||||
|
(Epoch bsl 16) + PacketId.
|
||||||
|
|
||||||
|
-spec shuffle([A]) -> [A].
|
||||||
|
shuffle(L0) ->
|
||||||
|
L1 = lists:map(
|
||||||
|
fun(A) ->
|
||||||
|
{rand:uniform(), A}
|
||||||
|
end,
|
||||||
|
L0
|
||||||
|
),
|
||||||
|
L2 = lists:sort(L1),
|
||||||
|
{_, L} = lists:unzip(L2),
|
||||||
|
L.
|
||||||
|
|
||||||
|
-ifdef(TEST).
|
||||||
|
|
||||||
|
%% This test only tests boundary conditions (to make sure property-based test didn't skip them):
|
||||||
|
packet_id_to_seqno_test() ->
|
||||||
|
%% Packet ID = 1; first epoch:
|
||||||
|
?assertEqual(1, packet_id_to_seqno(1, 1)),
|
||||||
|
?assertEqual(1, packet_id_to_seqno(10, 1)),
|
||||||
|
?assertEqual(1, packet_id_to_seqno(1 bsl 16 - 1, 1)),
|
||||||
|
?assertEqual(1, packet_id_to_seqno(1 bsl 16, 1)),
|
||||||
|
%% Packet ID = 1; second and 3rd epochs:
|
||||||
|
?assertEqual(1 bsl 16 + 1, packet_id_to_seqno(1 bsl 16 + 1, 1)),
|
||||||
|
?assertEqual(1 bsl 16 + 1, packet_id_to_seqno(2 bsl 16, 1)),
|
||||||
|
?assertEqual(2 bsl 16 + 1, packet_id_to_seqno(2 bsl 16 + 1, 1)),
|
||||||
|
%% Packet ID = 16#ffff:
|
||||||
|
PID = 1 bsl 16 - 1,
|
||||||
|
?assertEqual(PID, packet_id_to_seqno(PID, PID)),
|
||||||
|
?assertEqual(PID, packet_id_to_seqno(1 bsl 16, PID)),
|
||||||
|
?assertEqual(1 bsl 16 + PID, packet_id_to_seqno(2 bsl 16, PID)),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
packet_id_to_seqno_test_() ->
|
||||||
|
Opts = [{numtests, 1000}, {to_file, user}],
|
||||||
|
{timeout, 30, fun() -> ?assert(proper:quickcheck(packet_id_to_seqno_prop(), Opts)) end}.
|
||||||
|
|
||||||
|
packet_id_to_seqno_prop() ->
|
||||||
|
?FORALL(
|
||||||
|
NextSeqNo,
|
||||||
|
next_seqno_gen(),
|
||||||
|
?FORALL(
|
||||||
|
SeqNo,
|
||||||
|
seqno_gen(NextSeqNo),
|
||||||
|
begin
|
||||||
|
PacketId = SeqNo rem 16#10000,
|
||||||
|
?assertEqual(SeqNo, packet_id_to_seqno(NextSeqNo, PacketId)),
|
||||||
|
true
|
||||||
|
end
|
||||||
|
)
|
||||||
|
).
|
||||||
|
|
||||||
|
next_seqno_gen() ->
|
||||||
|
?LET(
|
||||||
|
{Epoch, Offset},
|
||||||
|
{non_neg_integer(), non_neg_integer()},
|
||||||
|
Epoch bsl 16 + Offset
|
||||||
|
).
|
||||||
|
|
||||||
|
seqno_gen(NextSeqNo) ->
|
||||||
|
WindowSize = 1 bsl 16 - 1,
|
||||||
|
Min = max(0, NextSeqNo - WindowSize),
|
||||||
|
Max = max(0, NextSeqNo - 1),
|
||||||
|
range(Min, Max).
|
||||||
|
|
||||||
|
-endif.
|
|
@ -16,11 +16,16 @@
|
||||||
|
|
||||||
-module(emqx_persistent_session_ds).
|
-module(emqx_persistent_session_ds).
|
||||||
|
|
||||||
|
-behaviour(emqx_session).
|
||||||
|
|
||||||
-include("emqx.hrl").
|
-include("emqx.hrl").
|
||||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
-include_lib("stdlib/include/ms_transform.hrl").
|
||||||
|
|
||||||
-include("emqx_mqtt.hrl").
|
-include("emqx_mqtt.hrl").
|
||||||
|
|
||||||
|
-include("emqx_persistent_session_ds.hrl").
|
||||||
|
|
||||||
%% Session API
|
%% Session API
|
||||||
-export([
|
-export([
|
||||||
create/3,
|
create/3,
|
||||||
|
@ -50,7 +55,7 @@
|
||||||
-export([
|
-export([
|
||||||
deliver/3,
|
deliver/3,
|
||||||
replay/3,
|
replay/3,
|
||||||
% handle_timeout/3,
|
handle_timeout/3,
|
||||||
disconnect/1,
|
disconnect/1,
|
||||||
terminate/2
|
terminate/2
|
||||||
]).
|
]).
|
||||||
|
@ -58,33 +63,33 @@
|
||||||
%% session table operations
|
%% session table operations
|
||||||
-export([create_tables/0]).
|
-export([create_tables/0]).
|
||||||
|
|
||||||
-ifdef(TEST).
|
%% Remove me later (satisfy checks for an unused BPAPI)
|
||||||
-export([session_open/1]).
|
|
||||||
-endif.
|
|
||||||
|
|
||||||
%% RPC
|
|
||||||
-export([
|
|
||||||
ensure_iterator_closed_on_all_shards/1,
|
|
||||||
ensure_all_iterators_closed/1
|
|
||||||
]).
|
|
||||||
-export([
|
-export([
|
||||||
do_open_iterator/3,
|
do_open_iterator/3,
|
||||||
do_ensure_iterator_closed/1,
|
do_ensure_iterator_closed/1,
|
||||||
do_ensure_all_iterators_closed/1
|
do_ensure_all_iterators_closed/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% FIXME
|
-ifdef(TEST).
|
||||||
-define(DS_SHARD_ID, <<"local">>).
|
-export([
|
||||||
-define(DEFAULT_KEYSPACE, default).
|
session_open/1,
|
||||||
-define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}).
|
list_all_sessions/0,
|
||||||
|
list_all_subscriptions/0,
|
||||||
|
list_all_streams/0,
|
||||||
|
list_all_iterators/0
|
||||||
|
]).
|
||||||
|
-endif.
|
||||||
|
|
||||||
%% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be
|
%% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be
|
||||||
%% an atom, in theory (?).
|
%% an atom, in theory (?).
|
||||||
-type id() :: binary().
|
-type id() :: binary().
|
||||||
-type iterator() :: emqx_ds:iterator().
|
|
||||||
-type iterator_id() :: emqx_ds:iterator_id().
|
|
||||||
-type topic_filter() :: emqx_ds:topic_filter().
|
-type topic_filter() :: emqx_ds:topic_filter().
|
||||||
-type iterators() :: #{topic_filter() => iterator()}.
|
-type subscription_id() :: {id(), topic_filter()}.
|
||||||
|
-type subscription() :: #{
|
||||||
|
start_time := emqx_ds:time(),
|
||||||
|
propts := map(),
|
||||||
|
extra := map()
|
||||||
|
}.
|
||||||
-type session() :: #{
|
-type session() :: #{
|
||||||
%% Client ID
|
%% Client ID
|
||||||
id := id(),
|
id := id(),
|
||||||
|
@ -93,7 +98,11 @@
|
||||||
%% When the session should expire
|
%% When the session should expire
|
||||||
expires_at := timestamp() | never,
|
expires_at := timestamp() | never,
|
||||||
%% Client’s Subscriptions.
|
%% Client’s Subscriptions.
|
||||||
iterators := #{topic() => iterator()},
|
iterators := #{topic() => subscription()},
|
||||||
|
%% Inflight messages
|
||||||
|
inflight := emqx_persistent_message_ds_replayer:inflight(),
|
||||||
|
%% Receive maximum
|
||||||
|
receive_maximum := pos_integer(),
|
||||||
%%
|
%%
|
||||||
props := map()
|
props := map()
|
||||||
}.
|
}.
|
||||||
|
@ -104,19 +113,28 @@
|
||||||
-type conninfo() :: emqx_session:conninfo().
|
-type conninfo() :: emqx_session:conninfo().
|
||||||
-type replies() :: emqx_session:replies().
|
-type replies() :: emqx_session:replies().
|
||||||
|
|
||||||
|
-define(STATS_KEYS, [
|
||||||
|
subscriptions_cnt,
|
||||||
|
subscriptions_max,
|
||||||
|
inflight_cnt,
|
||||||
|
inflight_max,
|
||||||
|
next_pkt_id
|
||||||
|
]).
|
||||||
|
|
||||||
-export_type([id/0]).
|
-export_type([id/0]).
|
||||||
|
|
||||||
%%
|
%%
|
||||||
|
|
||||||
-spec create(clientinfo(), conninfo(), emqx_session:conf()) ->
|
-spec create(clientinfo(), conninfo(), emqx_session:conf()) ->
|
||||||
session().
|
session().
|
||||||
create(#{clientid := ClientID}, _ConnInfo, Conf) ->
|
create(#{clientid := ClientID}, ConnInfo, Conf) ->
|
||||||
% TODO: expiration
|
% TODO: expiration
|
||||||
ensure_session(ClientID, Conf).
|
ensure_timers(),
|
||||||
|
ensure_session(ClientID, ConnInfo, Conf).
|
||||||
|
|
||||||
-spec open(clientinfo(), conninfo()) ->
|
-spec open(clientinfo(), conninfo()) ->
|
||||||
{_IsPresent :: true, session(), []} | false.
|
{_IsPresent :: true, session(), []} | false.
|
||||||
open(#{clientid := ClientID}, _ConnInfo) ->
|
open(#{clientid := ClientID} = _ClientInfo, ConnInfo) ->
|
||||||
%% NOTE
|
%% NOTE
|
||||||
%% The fact that we need to concern about discarding all live channels here
|
%% The fact that we need to concern about discarding all live channels here
|
||||||
%% is essentially a consequence of the in-memory session design, where we
|
%% is essentially a consequence of the in-memory session design, where we
|
||||||
|
@ -125,29 +143,33 @@ open(#{clientid := ClientID}, _ConnInfo) ->
|
||||||
%% space, and move this call back into `emqx_cm` where it belongs.
|
%% space, and move this call back into `emqx_cm` where it belongs.
|
||||||
ok = emqx_cm:discard_session(ClientID),
|
ok = emqx_cm:discard_session(ClientID),
|
||||||
case open_session(ClientID) of
|
case open_session(ClientID) of
|
||||||
Session = #{} ->
|
Session0 = #{} ->
|
||||||
|
ensure_timers(),
|
||||||
|
ReceiveMaximum = receive_maximum(ConnInfo),
|
||||||
|
Session = Session0#{receive_maximum => ReceiveMaximum},
|
||||||
{true, Session, []};
|
{true, Session, []};
|
||||||
false ->
|
false ->
|
||||||
false
|
false
|
||||||
end.
|
end.
|
||||||
|
|
||||||
ensure_session(ClientID, Conf) ->
|
ensure_session(ClientID, ConnInfo, Conf) ->
|
||||||
{ok, Session, #{}} = session_ensure_new(ClientID, Conf),
|
{ok, Session, #{}} = session_ensure_new(ClientID, Conf),
|
||||||
Session#{iterators => #{}}.
|
ReceiveMaximum = receive_maximum(ConnInfo),
|
||||||
|
Session#{iterators => #{}, receive_maximum => ReceiveMaximum}.
|
||||||
|
|
||||||
open_session(ClientID) ->
|
open_session(ClientID) ->
|
||||||
case session_open(ClientID) of
|
case session_open(ClientID) of
|
||||||
{ok, Session, Iterators} ->
|
{ok, Session, Subscriptions} ->
|
||||||
Session#{iterators => prep_iterators(Iterators)};
|
Session#{iterators => prep_subscriptions(Subscriptions)};
|
||||||
false ->
|
false ->
|
||||||
false
|
false
|
||||||
end.
|
end.
|
||||||
|
|
||||||
prep_iterators(Iterators) ->
|
prep_subscriptions(Subscriptions) ->
|
||||||
maps:fold(
|
maps:fold(
|
||||||
fun(Topic, Iterator, Acc) -> Acc#{emqx_topic:join(Topic) => Iterator} end,
|
fun(Topic, Subscription, Acc) -> Acc#{emqx_topic:join(Topic) => Subscription} end,
|
||||||
#{},
|
#{},
|
||||||
Iterators
|
Subscriptions
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec destroy(session() | clientinfo()) -> ok.
|
-spec destroy(session() | clientinfo()) -> ok.
|
||||||
|
@ -157,7 +179,6 @@ destroy(#{clientid := ClientID}) ->
|
||||||
destroy_session(ClientID).
|
destroy_session(ClientID).
|
||||||
|
|
||||||
destroy_session(ClientID) ->
|
destroy_session(ClientID) ->
|
||||||
_ = ensure_all_iterators_closed(ClientID),
|
|
||||||
session_drop(ClientID).
|
session_drop(ClientID).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -184,10 +205,10 @@ info(upgrade_qos, #{props := Conf}) ->
|
||||||
maps:get(upgrade_qos, Conf);
|
maps:get(upgrade_qos, Conf);
|
||||||
% info(inflight, #sessmem{inflight = Inflight}) ->
|
% info(inflight, #sessmem{inflight = Inflight}) ->
|
||||||
% Inflight;
|
% Inflight;
|
||||||
% info(inflight_cnt, #sessmem{inflight = Inflight}) ->
|
info(inflight_cnt, #{inflight := Inflight}) ->
|
||||||
% emqx_inflight:size(Inflight);
|
emqx_persistent_message_ds_replayer:n_inflight(Inflight);
|
||||||
% info(inflight_max, #sessmem{inflight = Inflight}) ->
|
info(inflight_max, #{receive_maximum := ReceiveMaximum}) ->
|
||||||
% emqx_inflight:max_size(Inflight);
|
ReceiveMaximum;
|
||||||
info(retry_interval, #{props := Conf}) ->
|
info(retry_interval, #{props := Conf}) ->
|
||||||
maps:get(retry_interval, Conf);
|
maps:get(retry_interval, Conf);
|
||||||
% info(mqueue, #sessmem{mqueue = MQueue}) ->
|
% info(mqueue, #sessmem{mqueue = MQueue}) ->
|
||||||
|
@ -198,8 +219,9 @@ info(retry_interval, #{props := Conf}) ->
|
||||||
% emqx_mqueue:max_len(MQueue);
|
% emqx_mqueue:max_len(MQueue);
|
||||||
% info(mqueue_dropped, #sessmem{mqueue = MQueue}) ->
|
% info(mqueue_dropped, #sessmem{mqueue = MQueue}) ->
|
||||||
% emqx_mqueue:dropped(MQueue);
|
% emqx_mqueue:dropped(MQueue);
|
||||||
info(next_pkt_id, #{}) ->
|
info(next_pkt_id, #{inflight := Inflight}) ->
|
||||||
_PacketId = 'TODO';
|
{PacketId, _} = emqx_persistent_message_ds_replayer:next_packet_id(Inflight),
|
||||||
|
PacketId;
|
||||||
% info(awaiting_rel, #sessmem{awaiting_rel = AwaitingRel}) ->
|
% info(awaiting_rel, #sessmem{awaiting_rel = AwaitingRel}) ->
|
||||||
% AwaitingRel;
|
% AwaitingRel;
|
||||||
% info(awaiting_rel_cnt, #sessmem{awaiting_rel = AwaitingRel}) ->
|
% info(awaiting_rel_cnt, #sessmem{awaiting_rel = AwaitingRel}) ->
|
||||||
|
@ -211,8 +233,7 @@ info(await_rel_timeout, #{props := Conf}) ->
|
||||||
|
|
||||||
-spec stats(session()) -> emqx_types:stats().
|
-spec stats(session()) -> emqx_types:stats().
|
||||||
stats(Session) ->
|
stats(Session) ->
|
||||||
% TODO: stub
|
info(?STATS_KEYS, Session).
|
||||||
info([], Session).
|
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Client -> Broker: SUBSCRIBE / UNSUBSCRIBE
|
%% Client -> Broker: SUBSCRIBE / UNSUBSCRIBE
|
||||||
|
@ -245,7 +266,7 @@ unsubscribe(
|
||||||
) when is_map_key(TopicFilter, Iters) ->
|
) when is_map_key(TopicFilter, Iters) ->
|
||||||
Iterator = maps:get(TopicFilter, Iters),
|
Iterator = maps:get(TopicFilter, Iters),
|
||||||
SubOpts = maps:get(props, Iterator),
|
SubOpts = maps:get(props, Iterator),
|
||||||
ok = del_subscription(TopicFilter, Iterator, ID),
|
ok = del_subscription(TopicFilter, ID),
|
||||||
{ok, Session#{iterators := maps:remove(TopicFilter, Iters)}, SubOpts};
|
{ok, Session#{iterators := maps:remove(TopicFilter, Iters)}, SubOpts};
|
||||||
unsubscribe(
|
unsubscribe(
|
||||||
_TopicFilter,
|
_TopicFilter,
|
||||||
|
@ -271,19 +292,29 @@ get_subscription(TopicFilter, #{iterators := Iters}) ->
|
||||||
{ok, emqx_types:publish_result(), replies(), session()}
|
{ok, emqx_types:publish_result(), replies(), session()}
|
||||||
| {error, emqx_types:reason_code()}.
|
| {error, emqx_types:reason_code()}.
|
||||||
publish(_PacketId, Msg, Session) ->
|
publish(_PacketId, Msg, Session) ->
|
||||||
% TODO: stub
|
%% TODO:
|
||||||
{ok, emqx_broker:publish(Msg), [], Session}.
|
Result = emqx_broker:publish(Msg),
|
||||||
|
{ok, Result, [], Session}.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Client -> Broker: PUBACK
|
%% Client -> Broker: PUBACK
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
%% FIXME: parts of the commit offset function are mocked
|
||||||
|
-dialyzer({nowarn_function, puback/3}).
|
||||||
|
|
||||||
-spec puback(clientinfo(), emqx_types:packet_id(), session()) ->
|
-spec puback(clientinfo(), emqx_types:packet_id(), session()) ->
|
||||||
{ok, emqx_types:message(), replies(), session()}
|
{ok, emqx_types:message(), replies(), session()}
|
||||||
| {error, emqx_types:reason_code()}.
|
| {error, emqx_types:reason_code()}.
|
||||||
puback(_ClientInfo, _PacketId, _Session = #{}) ->
|
puback(_ClientInfo, PacketId, Session = #{id := Id, inflight := Inflight0}) ->
|
||||||
% TODO: stub
|
case emqx_persistent_message_ds_replayer:commit_offset(Id, PacketId, Inflight0) of
|
||||||
{error, ?RC_PACKET_IDENTIFIER_NOT_FOUND}.
|
{true, Inflight} ->
|
||||||
|
%% TODO
|
||||||
|
Msg = #message{},
|
||||||
|
{ok, Msg, [], Session#{inflight => Inflight}};
|
||||||
|
{false, _} ->
|
||||||
|
{error, ?RC_PACKET_IDENTIFIER_NOT_FOUND}
|
||||||
|
end.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Client -> Broker: PUBREC
|
%% Client -> Broker: PUBREC
|
||||||
|
@ -320,10 +351,33 @@ pubcomp(_ClientInfo, _PacketId, _Session = #{}) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-spec deliver(clientinfo(), [emqx_types:deliver()], session()) ->
|
-spec deliver(clientinfo(), [emqx_types:deliver()], session()) ->
|
||||||
no_return().
|
{ok, replies(), session()}.
|
||||||
deliver(_ClientInfo, _Delivers, _Session = #{}) ->
|
deliver(_ClientInfo, _Delivers, Session) ->
|
||||||
% TODO: ensure it's unreachable somehow
|
%% TODO: QoS0 and system messages end up here.
|
||||||
error(unexpected).
|
{ok, [], Session}.
|
||||||
|
|
||||||
|
-spec handle_timeout(clientinfo(), _Timeout, session()) ->
|
||||||
|
{ok, replies(), session()} | {ok, replies(), timeout(), session()}.
|
||||||
|
handle_timeout(
|
||||||
|
_ClientInfo,
|
||||||
|
pull,
|
||||||
|
Session = #{id := Id, inflight := Inflight0, receive_maximum := ReceiveMaximum}
|
||||||
|
) ->
|
||||||
|
{Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, ReceiveMaximum),
|
||||||
|
%% TODO: make these values configurable:
|
||||||
|
Timeout =
|
||||||
|
case Publishes of
|
||||||
|
[] ->
|
||||||
|
100;
|
||||||
|
[_ | _] ->
|
||||||
|
0
|
||||||
|
end,
|
||||||
|
ensure_timer(pull, Timeout),
|
||||||
|
{ok, Publishes, Session#{inflight => Inflight}};
|
||||||
|
handle_timeout(_ClientInfo, get_streams, Session = #{id := Id}) ->
|
||||||
|
renew_streams(Id),
|
||||||
|
ensure_timer(get_streams),
|
||||||
|
{ok, [], Session}.
|
||||||
|
|
||||||
-spec replay(clientinfo(), [], session()) ->
|
-spec replay(clientinfo(), [], session()) ->
|
||||||
{ok, replies(), session()}.
|
{ok, replies(), session()}.
|
||||||
|
@ -344,151 +398,69 @@ terminate(_Reason, _Session = #{}) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-spec add_subscription(topic(), emqx_types:subopts(), id()) ->
|
-spec add_subscription(topic(), emqx_types:subopts(), id()) ->
|
||||||
emqx_ds:iterator().
|
subscription().
|
||||||
add_subscription(TopicFilterBin, SubOpts, DSSessionID) ->
|
add_subscription(TopicFilterBin, SubOpts, DSSessionID) ->
|
||||||
% N.B.: we chose to update the router before adding the subscription to the
|
%% N.B.: we chose to update the router before adding the subscription to the
|
||||||
% session/iterator table. The reasoning for this is as follows:
|
%% session/iterator table. The reasoning for this is as follows:
|
||||||
%
|
%%
|
||||||
% Messages matching this topic filter should start to be persisted as soon as
|
%% Messages matching this topic filter should start to be persisted as soon as
|
||||||
% possible to avoid missing messages. If this is the first such persistent
|
%% possible to avoid missing messages. If this is the first such persistent
|
||||||
% session subscription, it's important to do so early on.
|
%% session subscription, it's important to do so early on.
|
||||||
%
|
%%
|
||||||
% This could, in turn, lead to some inconsistency: if such a route gets
|
%% This could, in turn, lead to some inconsistency: if such a route gets
|
||||||
% created but the session/iterator data fails to be updated accordingly, we
|
%% created but the session/iterator data fails to be updated accordingly, we
|
||||||
% have a dangling route. To remove such dangling routes, we may have a
|
%% have a dangling route. To remove such dangling routes, we may have a
|
||||||
% periodic GC process that removes routes that do not have a matching
|
%% periodic GC process that removes routes that do not have a matching
|
||||||
% persistent subscription. Also, route operations use dirty mnesia
|
%% persistent subscription. Also, route operations use dirty mnesia
|
||||||
% operations, which inherently have room for inconsistencies.
|
%% operations, which inherently have room for inconsistencies.
|
||||||
%
|
%%
|
||||||
% In practice, we use the iterator reference table as a source of truth,
|
%% In practice, we use the iterator reference table as a source of truth,
|
||||||
% since it is guarded by a transaction context: we consider a subscription
|
%% since it is guarded by a transaction context: we consider a subscription
|
||||||
% operation to be successful if it ended up changing this table. Both router
|
%% operation to be successful if it ended up changing this table. Both router
|
||||||
% and iterator information can be reconstructed from this table, if needed.
|
%% and iterator information can be reconstructed from this table, if needed.
|
||||||
ok = emqx_persistent_session_ds_router:do_add_route(TopicFilterBin, DSSessionID),
|
ok = emqx_persistent_session_ds_router:do_add_route(TopicFilterBin, DSSessionID),
|
||||||
TopicFilter = emqx_topic:words(TopicFilterBin),
|
TopicFilter = emqx_topic:words(TopicFilterBin),
|
||||||
{ok, Iterator, IsNew} = session_add_iterator(
|
{ok, DSSubExt, IsNew} = session_add_subscription(
|
||||||
DSSessionID, TopicFilter, SubOpts
|
DSSessionID, TopicFilter, SubOpts
|
||||||
),
|
),
|
||||||
Ctx = #{iterator => Iterator, is_new => IsNew},
|
?tp(persistent_session_ds_subscription_added, #{sub => DSSubExt, is_new => IsNew}),
|
||||||
?tp(persistent_session_ds_iterator_added, Ctx),
|
%% we'll list streams and open iterators when implementing message replay.
|
||||||
?tp_span(
|
DSSubExt.
|
||||||
persistent_session_ds_open_iterators,
|
|
||||||
Ctx,
|
|
||||||
ok = open_iterator_on_all_shards(TopicFilter, Iterator)
|
|
||||||
),
|
|
||||||
Iterator.
|
|
||||||
|
|
||||||
-spec update_subscription(topic(), iterator(), emqx_types:subopts(), id()) ->
|
-spec update_subscription(topic(), subscription(), emqx_types:subopts(), id()) ->
|
||||||
iterator().
|
subscription().
|
||||||
update_subscription(TopicFilterBin, Iterator, SubOpts, DSSessionID) ->
|
update_subscription(TopicFilterBin, DSSubExt, SubOpts, DSSessionID) ->
|
||||||
TopicFilter = emqx_topic:words(TopicFilterBin),
|
TopicFilter = emqx_topic:words(TopicFilterBin),
|
||||||
{ok, NIterator, false} = session_add_iterator(
|
{ok, NDSSubExt, false} = session_add_subscription(
|
||||||
DSSessionID, TopicFilter, SubOpts
|
DSSessionID, TopicFilter, SubOpts
|
||||||
),
|
),
|
||||||
ok = ?tp(persistent_session_ds_iterator_updated, #{iterator => Iterator}),
|
ok = ?tp(persistent_session_ds_iterator_updated, #{sub => DSSubExt}),
|
||||||
NIterator.
|
NDSSubExt.
|
||||||
|
|
||||||
-spec open_iterator_on_all_shards(emqx_types:words(), emqx_ds:iterator()) -> ok.
|
-spec del_subscription(topic(), id()) ->
|
||||||
open_iterator_on_all_shards(TopicFilter, Iterator) ->
|
|
||||||
?tp(persistent_session_ds_will_open_iterators, #{iterator => Iterator}),
|
|
||||||
%% Note: currently, shards map 1:1 to nodes, but this will change in the future.
|
|
||||||
Nodes = emqx:running_nodes(),
|
|
||||||
Results = emqx_persistent_session_ds_proto_v1:open_iterator(
|
|
||||||
Nodes,
|
|
||||||
TopicFilter,
|
|
||||||
maps:get(start_time, Iterator),
|
|
||||||
maps:get(id, Iterator)
|
|
||||||
),
|
|
||||||
%% TODO
|
|
||||||
%% 1. Handle errors.
|
|
||||||
%% 2. Iterator handles are rocksdb resources, it's doubtful they survive RPC.
|
|
||||||
%% Even if they do, we throw them away here anyway. All in all, we probably should
|
|
||||||
%% hold each of them in a process on the respective node.
|
|
||||||
true = lists:all(fun(Res) -> element(1, Res) =:= ok end, Results),
|
|
||||||
ok.
|
ok.
|
||||||
|
del_subscription(TopicFilterBin, DSSessionId) ->
|
||||||
%% RPC target.
|
|
||||||
-spec do_open_iterator(emqx_types:words(), emqx_ds:time(), emqx_ds:iterator_id()) ->
|
|
||||||
{ok, emqx_ds_storage_layer:iterator()} | {error, _Reason}.
|
|
||||||
do_open_iterator(TopicFilter, StartMS, IteratorID) ->
|
|
||||||
Replay = {TopicFilter, StartMS},
|
|
||||||
emqx_ds_storage_layer:ensure_iterator(?DS_SHARD, IteratorID, Replay).
|
|
||||||
|
|
||||||
-spec del_subscription(topic(), iterator(), id()) ->
|
|
||||||
ok.
|
|
||||||
del_subscription(TopicFilterBin, #{id := IteratorID}, DSSessionID) ->
|
|
||||||
% N.B.: see comments in `?MODULE:add_subscription' for a discussion about the
|
|
||||||
% order of operations here.
|
|
||||||
TopicFilter = emqx_topic:words(TopicFilterBin),
|
TopicFilter = emqx_topic:words(TopicFilterBin),
|
||||||
Ctx = #{iterator_id => IteratorID},
|
|
||||||
?tp_span(
|
?tp_span(
|
||||||
persistent_session_ds_close_iterators,
|
persistent_session_ds_subscription_delete,
|
||||||
Ctx,
|
#{session_id => DSSessionId},
|
||||||
ok = ensure_iterator_closed_on_all_shards(IteratorID)
|
ok = session_del_subscription(DSSessionId, TopicFilter)
|
||||||
),
|
),
|
||||||
?tp_span(
|
?tp_span(
|
||||||
persistent_session_ds_iterator_delete,
|
persistent_session_ds_subscription_route_delete,
|
||||||
Ctx,
|
#{session_id => DSSessionId},
|
||||||
session_del_iterator(DSSessionID, TopicFilter)
|
ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilterBin, DSSessionId)
|
||||||
),
|
).
|
||||||
ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilterBin, DSSessionID).
|
|
||||||
|
|
||||||
-spec ensure_iterator_closed_on_all_shards(emqx_ds:iterator_id()) -> ok.
|
|
||||||
ensure_iterator_closed_on_all_shards(IteratorID) ->
|
|
||||||
%% Note: currently, shards map 1:1 to nodes, but this will change in the future.
|
|
||||||
Nodes = emqx:running_nodes(),
|
|
||||||
Results = emqx_persistent_session_ds_proto_v1:close_iterator(Nodes, IteratorID),
|
|
||||||
%% TODO: handle errors
|
|
||||||
true = lists:all(fun(Res) -> Res =:= {ok, ok} end, Results),
|
|
||||||
ok.
|
|
||||||
|
|
||||||
%% RPC target.
|
|
||||||
-spec do_ensure_iterator_closed(emqx_ds:iterator_id()) -> ok.
|
|
||||||
do_ensure_iterator_closed(IteratorID) ->
|
|
||||||
ok = emqx_ds_storage_layer:discard_iterator(?DS_SHARD, IteratorID),
|
|
||||||
ok.
|
|
||||||
|
|
||||||
-spec ensure_all_iterators_closed(id()) -> ok.
|
|
||||||
ensure_all_iterators_closed(DSSessionID) ->
|
|
||||||
%% Note: currently, shards map 1:1 to nodes, but this will change in the future.
|
|
||||||
Nodes = emqx:running_nodes(),
|
|
||||||
Results = emqx_persistent_session_ds_proto_v1:close_all_iterators(Nodes, DSSessionID),
|
|
||||||
%% TODO: handle errors
|
|
||||||
true = lists:all(fun(Res) -> Res =:= {ok, ok} end, Results),
|
|
||||||
ok.
|
|
||||||
|
|
||||||
%% RPC target.
|
|
||||||
-spec do_ensure_all_iterators_closed(id()) -> ok.
|
|
||||||
do_ensure_all_iterators_closed(DSSessionID) ->
|
|
||||||
ok = emqx_ds_storage_layer:discard_iterator_prefix(?DS_SHARD, DSSessionID),
|
|
||||||
ok.
|
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Session tables operations
|
%% Session tables operations
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-define(SESSION_TAB, emqx_ds_session).
|
|
||||||
-define(ITERATOR_REF_TAB, emqx_ds_iterator_ref).
|
|
||||||
-define(DS_MRIA_SHARD, emqx_ds_shard).
|
|
||||||
|
|
||||||
-record(session, {
|
|
||||||
%% same as clientid
|
|
||||||
id :: id(),
|
|
||||||
%% creation time
|
|
||||||
created_at :: _Millisecond :: non_neg_integer(),
|
|
||||||
expires_at = never :: _Millisecond :: non_neg_integer() | never,
|
|
||||||
%% for future usage
|
|
||||||
props = #{} :: map()
|
|
||||||
}).
|
|
||||||
|
|
||||||
-record(iterator_ref, {
|
|
||||||
ref_id :: {id(), emqx_ds:topic_filter()},
|
|
||||||
it_id :: emqx_ds:iterator_id(),
|
|
||||||
start_time :: emqx_ds:time(),
|
|
||||||
props = #{} :: map()
|
|
||||||
}).
|
|
||||||
|
|
||||||
create_tables() ->
|
create_tables() ->
|
||||||
|
ok = emqx_ds:open_db(?PERSISTENT_MESSAGE_DB, #{
|
||||||
|
backend => builtin,
|
||||||
|
storage => {emqx_ds_storage_bitfield_lts, #{}}
|
||||||
|
}),
|
||||||
ok = mria:create_table(
|
ok = mria:create_table(
|
||||||
?SESSION_TAB,
|
?SESSION_TAB,
|
||||||
[
|
[
|
||||||
|
@ -500,15 +472,38 @@ create_tables() ->
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
ok = mria:create_table(
|
ok = mria:create_table(
|
||||||
?ITERATOR_REF_TAB,
|
?SESSION_SUBSCRIPTIONS_TAB,
|
||||||
[
|
[
|
||||||
{rlog_shard, ?DS_MRIA_SHARD},
|
{rlog_shard, ?DS_MRIA_SHARD},
|
||||||
{type, ordered_set},
|
{type, ordered_set},
|
||||||
{storage, storage()},
|
{storage, storage()},
|
||||||
{record_name, iterator_ref},
|
{record_name, ds_sub},
|
||||||
{attributes, record_info(fields, iterator_ref)}
|
{attributes, record_info(fields, ds_sub)}
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
|
ok = mria:create_table(
|
||||||
|
?SESSION_STREAM_TAB,
|
||||||
|
[
|
||||||
|
{rlog_shard, ?DS_MRIA_SHARD},
|
||||||
|
{type, bag},
|
||||||
|
{storage, storage()},
|
||||||
|
{record_name, ds_stream},
|
||||||
|
{attributes, record_info(fields, ds_stream)}
|
||||||
|
]
|
||||||
|
),
|
||||||
|
ok = mria:create_table(
|
||||||
|
?SESSION_ITER_TAB,
|
||||||
|
[
|
||||||
|
{rlog_shard, ?DS_MRIA_SHARD},
|
||||||
|
{type, set},
|
||||||
|
{storage, storage()},
|
||||||
|
{record_name, ds_iter},
|
||||||
|
{attributes, record_info(fields, ds_iter)}
|
||||||
|
]
|
||||||
|
),
|
||||||
|
ok = mria:wait_for_tables([
|
||||||
|
?SESSION_TAB, ?SESSION_SUBSCRIPTIONS_TAB, ?SESSION_STREAM_TAB, ?SESSION_ITER_TAB
|
||||||
|
]),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
-dialyzer({nowarn_function, storage/0}).
|
-dialyzer({nowarn_function, storage/0}).
|
||||||
|
@ -524,31 +519,29 @@ storage() ->
|
||||||
%% @doc Called when a client connects. This function looks up a
|
%% @doc Called when a client connects. This function looks up a
|
||||||
%% session or returns `false` if previous one couldn't be found.
|
%% session or returns `false` if previous one couldn't be found.
|
||||||
%%
|
%%
|
||||||
%% This function also spawns replay agents for each iterator.
|
|
||||||
%%
|
|
||||||
%% Note: session API doesn't handle session takeovers, it's the job of
|
%% Note: session API doesn't handle session takeovers, it's the job of
|
||||||
%% the broker.
|
%% the broker.
|
||||||
-spec session_open(id()) ->
|
-spec session_open(id()) ->
|
||||||
{ok, session(), iterators()} | false.
|
{ok, session(), #{topic() => subscription()}} | false.
|
||||||
session_open(SessionId) ->
|
session_open(SessionId) ->
|
||||||
transaction(fun() ->
|
transaction(fun() ->
|
||||||
case mnesia:read(?SESSION_TAB, SessionId, write) of
|
case mnesia:read(?SESSION_TAB, SessionId, write) of
|
||||||
[Record = #session{}] ->
|
[Record = #session{}] ->
|
||||||
Session = export_record(Record),
|
Session = export_session(Record),
|
||||||
IteratorRefs = session_read_iterators(SessionId),
|
DSSubs = session_read_subscriptions(SessionId),
|
||||||
Iterators = export_iterators(IteratorRefs),
|
Subscriptions = export_subscriptions(DSSubs),
|
||||||
{ok, Session, Iterators};
|
{ok, Session, Subscriptions};
|
||||||
[] ->
|
[] ->
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
end).
|
end).
|
||||||
|
|
||||||
-spec session_ensure_new(id(), _Props :: map()) ->
|
-spec session_ensure_new(id(), _Props :: map()) ->
|
||||||
{ok, session(), iterators()}.
|
{ok, session(), #{topic() => subscription()}}.
|
||||||
session_ensure_new(SessionId, Props) ->
|
session_ensure_new(SessionId, Props) ->
|
||||||
transaction(fun() ->
|
transaction(fun() ->
|
||||||
ok = session_drop_iterators(SessionId),
|
ok = session_drop_subscriptions(SessionId),
|
||||||
Session = export_record(session_create(SessionId, Props)),
|
Session = export_session(session_create(SessionId, Props)),
|
||||||
{ok, Session, #{}}
|
{ok, Session, #{}}
|
||||||
end).
|
end).
|
||||||
|
|
||||||
|
@ -557,7 +550,8 @@ session_create(SessionId, Props) ->
|
||||||
id = SessionId,
|
id = SessionId,
|
||||||
created_at = erlang:system_time(millisecond),
|
created_at = erlang:system_time(millisecond),
|
||||||
expires_at = never,
|
expires_at = never,
|
||||||
props = Props
|
props = Props,
|
||||||
|
inflight = emqx_persistent_message_ds_replayer:new()
|
||||||
},
|
},
|
||||||
ok = mnesia:write(?SESSION_TAB, Session, write),
|
ok = mnesia:write(?SESSION_TAB, Session, write),
|
||||||
Session.
|
Session.
|
||||||
|
@ -567,81 +561,194 @@ session_create(SessionId, Props) ->
|
||||||
-spec session_drop(id()) -> ok.
|
-spec session_drop(id()) -> ok.
|
||||||
session_drop(DSSessionId) ->
|
session_drop(DSSessionId) ->
|
||||||
transaction(fun() ->
|
transaction(fun() ->
|
||||||
%% TODO: ensure all iterators from this clientid are closed?
|
ok = session_drop_subscriptions(DSSessionId),
|
||||||
ok = session_drop_iterators(DSSessionId),
|
ok = session_drop_iterators(DSSessionId),
|
||||||
|
ok = session_drop_streams(DSSessionId),
|
||||||
ok = mnesia:delete(?SESSION_TAB, DSSessionId, write)
|
ok = mnesia:delete(?SESSION_TAB, DSSessionId, write)
|
||||||
end).
|
end).
|
||||||
|
|
||||||
session_drop_iterators(DSSessionId) ->
|
-spec session_drop_subscriptions(id()) -> ok.
|
||||||
IteratorRefs = session_read_iterators(DSSessionId),
|
session_drop_subscriptions(DSSessionId) ->
|
||||||
ok = lists:foreach(fun session_del_iterator/1, IteratorRefs).
|
Subscriptions = session_read_subscriptions(DSSessionId),
|
||||||
|
lists:foreach(
|
||||||
|
fun(#ds_sub{id = DSSubId} = DSSub) ->
|
||||||
|
TopicFilter = subscription_id_to_topic_filter(DSSubId),
|
||||||
|
TopicFilterBin = emqx_topic:join(TopicFilter),
|
||||||
|
ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilterBin, DSSessionId),
|
||||||
|
ok = session_del_subscription(DSSub)
|
||||||
|
end,
|
||||||
|
Subscriptions
|
||||||
|
).
|
||||||
|
|
||||||
%% @doc Called when a client subscribes to a topic. Idempotent.
|
%% @doc Called when a client subscribes to a topic. Idempotent.
|
||||||
-spec session_add_iterator(id(), topic_filter(), _Props :: map()) ->
|
-spec session_add_subscription(id(), topic_filter(), _Props :: map()) ->
|
||||||
{ok, iterator(), _IsNew :: boolean()}.
|
{ok, subscription(), _IsNew :: boolean()}.
|
||||||
session_add_iterator(DSSessionId, TopicFilter, Props) ->
|
session_add_subscription(DSSessionId, TopicFilter, Props) ->
|
||||||
IteratorRefId = {DSSessionId, TopicFilter},
|
DSSubId = {DSSessionId, TopicFilter},
|
||||||
transaction(fun() ->
|
transaction(fun() ->
|
||||||
case mnesia:read(?ITERATOR_REF_TAB, IteratorRefId, write) of
|
case mnesia:read(?SESSION_SUBSCRIPTIONS_TAB, DSSubId, write) of
|
||||||
[] ->
|
[] ->
|
||||||
IteratorRef = session_insert_iterator(DSSessionId, TopicFilter, Props),
|
DSSub = session_insert_subscription(DSSessionId, TopicFilter, Props),
|
||||||
Iterator = export_record(IteratorRef),
|
DSSubExt = export_subscription(DSSub),
|
||||||
?tp(
|
?tp(
|
||||||
ds_session_subscription_added,
|
ds_session_subscription_added,
|
||||||
#{iterator => Iterator, session_id => DSSessionId}
|
#{sub => DSSubExt, session_id => DSSessionId}
|
||||||
),
|
),
|
||||||
{ok, Iterator, _IsNew = true};
|
{ok, DSSubExt, _IsNew = true};
|
||||||
[#iterator_ref{} = IteratorRef] ->
|
[#ds_sub{} = DSSub] ->
|
||||||
NIteratorRef = session_update_iterator(IteratorRef, Props),
|
NDSSub = session_update_subscription(DSSub, Props),
|
||||||
NIterator = export_record(NIteratorRef),
|
NDSSubExt = export_subscription(NDSSub),
|
||||||
?tp(
|
?tp(
|
||||||
ds_session_subscription_present,
|
ds_session_subscription_present,
|
||||||
#{iterator => NIterator, session_id => DSSessionId}
|
#{sub => NDSSubExt, session_id => DSSessionId}
|
||||||
),
|
),
|
||||||
{ok, NIterator, _IsNew = false}
|
{ok, NDSSubExt, _IsNew = false}
|
||||||
end
|
end
|
||||||
end).
|
end).
|
||||||
|
|
||||||
session_insert_iterator(DSSessionId, TopicFilter, Props) ->
|
-spec session_insert_subscription(id(), topic_filter(), map()) -> ds_sub().
|
||||||
{IteratorId, StartMS} = new_iterator_id(DSSessionId),
|
session_insert_subscription(DSSessionId, TopicFilter, Props) ->
|
||||||
IteratorRef = #iterator_ref{
|
{DSSubId, StartMS} = new_subscription_id(DSSessionId, TopicFilter),
|
||||||
ref_id = {DSSessionId, TopicFilter},
|
DSSub = #ds_sub{
|
||||||
it_id = IteratorId,
|
id = DSSubId,
|
||||||
start_time = StartMS,
|
start_time = StartMS,
|
||||||
props = Props
|
props = Props,
|
||||||
|
extra = #{}
|
||||||
},
|
},
|
||||||
ok = mnesia:write(?ITERATOR_REF_TAB, IteratorRef, write),
|
ok = mnesia:write(?SESSION_SUBSCRIPTIONS_TAB, DSSub, write),
|
||||||
IteratorRef.
|
DSSub.
|
||||||
|
|
||||||
session_update_iterator(IteratorRef, Props) ->
|
-spec session_update_subscription(ds_sub(), map()) -> ds_sub().
|
||||||
NIteratorRef = IteratorRef#iterator_ref{props = Props},
|
session_update_subscription(DSSub, Props) ->
|
||||||
ok = mnesia:write(?ITERATOR_REF_TAB, NIteratorRef, write),
|
NDSSub = DSSub#ds_sub{props = Props},
|
||||||
NIteratorRef.
|
ok = mnesia:write(?SESSION_SUBSCRIPTIONS_TAB, NDSSub, write),
|
||||||
|
NDSSub.
|
||||||
|
|
||||||
%% @doc Called when a client unsubscribes from a topic.
|
session_del_subscription(DSSessionId, TopicFilter) ->
|
||||||
-spec session_del_iterator(id(), topic_filter()) -> ok.
|
DSSubId = {DSSessionId, TopicFilter},
|
||||||
session_del_iterator(DSSessionId, TopicFilter) ->
|
|
||||||
IteratorRefId = {DSSessionId, TopicFilter},
|
|
||||||
transaction(fun() ->
|
transaction(fun() ->
|
||||||
mnesia:delete(?ITERATOR_REF_TAB, IteratorRefId, write)
|
mnesia:delete(?SESSION_SUBSCRIPTIONS_TAB, DSSubId, write)
|
||||||
end).
|
end).
|
||||||
|
|
||||||
session_del_iterator(#iterator_ref{ref_id = IteratorRefId}) ->
|
session_del_subscription(#ds_sub{id = DSSubId}) ->
|
||||||
mnesia:delete(?ITERATOR_REF_TAB, IteratorRefId, write).
|
mnesia:delete(?SESSION_SUBSCRIPTIONS_TAB, DSSubId, write).
|
||||||
|
|
||||||
session_read_iterators(DSSessionId) ->
|
session_read_subscriptions(DSSessionId) ->
|
||||||
% NOTE: somewhat convoluted way to trick dialyzer
|
MS = ets:fun2ms(
|
||||||
Pat = erlang:make_tuple(record_info(size, iterator_ref), '_', [
|
fun(Sub = #ds_sub{id = {Sess, _}}) when Sess =:= DSSessionId ->
|
||||||
{1, iterator_ref},
|
Sub
|
||||||
{#iterator_ref.ref_id, {DSSessionId, '_'}}
|
end
|
||||||
]),
|
),
|
||||||
mnesia:match_object(?ITERATOR_REF_TAB, Pat, read).
|
mnesia:select(?SESSION_SUBSCRIPTIONS_TAB, MS, read).
|
||||||
|
|
||||||
-spec new_iterator_id(id()) -> {iterator_id(), emqx_ds:time()}.
|
-spec new_subscription_id(id(), topic_filter()) -> {subscription_id(), integer()}.
|
||||||
new_iterator_id(DSSessionId) ->
|
new_subscription_id(DSSessionId, TopicFilter) ->
|
||||||
NowMS = erlang:system_time(microsecond),
|
%% Note: here we use _milliseconds_ to match with the timestamp
|
||||||
IteratorId = <<DSSessionId/binary, (emqx_guid:gen())/binary>>,
|
%% field of `#message' record.
|
||||||
{IteratorId, NowMS}.
|
NowMS = erlang:system_time(millisecond),
|
||||||
|
DSSubId = {DSSessionId, TopicFilter},
|
||||||
|
{DSSubId, NowMS}.
|
||||||
|
|
||||||
|
-spec subscription_id_to_topic_filter(subscription_id()) -> topic_filter().
|
||||||
|
subscription_id_to_topic_filter({_DSSessionId, TopicFilter}) ->
|
||||||
|
TopicFilter.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% RPC targets (v1)
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
%% RPC target.
|
||||||
|
-spec do_open_iterator(emqx_types:words(), emqx_ds:time(), emqx_ds:iterator_id()) ->
|
||||||
|
{ok, emqx_ds_storage_layer:iterator()} | {error, _Reason}.
|
||||||
|
do_open_iterator(_TopicFilter, _StartMS, _IteratorID) ->
|
||||||
|
{error, not_implemented}.
|
||||||
|
|
||||||
|
%% RPC target.
|
||||||
|
-spec do_ensure_iterator_closed(emqx_ds:iterator_id()) -> ok.
|
||||||
|
do_ensure_iterator_closed(_IteratorID) ->
|
||||||
|
ok.
|
||||||
|
|
||||||
|
%% RPC target.
|
||||||
|
-spec do_ensure_all_iterators_closed(id()) -> ok.
|
||||||
|
do_ensure_all_iterators_closed(_DSSessionID) ->
|
||||||
|
ok.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Reading batches
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-spec renew_streams(id()) -> ok.
|
||||||
|
renew_streams(DSSessionId) ->
|
||||||
|
Subscriptions = ro_transaction(fun() -> session_read_subscriptions(DSSessionId) end),
|
||||||
|
ExistingStreams = ro_transaction(fun() -> mnesia:read(?SESSION_STREAM_TAB, DSSessionId) end),
|
||||||
|
lists:foreach(
|
||||||
|
fun(#ds_sub{id = {_, TopicFilter}, start_time = StartTime}) ->
|
||||||
|
renew_streams(DSSessionId, ExistingStreams, TopicFilter, StartTime)
|
||||||
|
end,
|
||||||
|
Subscriptions
|
||||||
|
).
|
||||||
|
|
||||||
|
-spec renew_streams(id(), [ds_stream()], emqx_ds:topic_filter(), emqx_ds:time()) -> ok.
|
||||||
|
renew_streams(DSSessionId, ExistingStreams, TopicFilter, StartTime) ->
|
||||||
|
AllStreams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartTime),
|
||||||
|
transaction(
|
||||||
|
fun() ->
|
||||||
|
lists:foreach(
|
||||||
|
fun({Rank, Stream}) ->
|
||||||
|
Rec = #ds_stream{
|
||||||
|
session = DSSessionId,
|
||||||
|
topic_filter = TopicFilter,
|
||||||
|
stream = Stream,
|
||||||
|
rank = Rank
|
||||||
|
},
|
||||||
|
case lists:member(Rec, ExistingStreams) of
|
||||||
|
true ->
|
||||||
|
ok;
|
||||||
|
false ->
|
||||||
|
mnesia:write(?SESSION_STREAM_TAB, Rec, write),
|
||||||
|
{ok, Iterator} = emqx_ds:make_iterator(
|
||||||
|
?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartTime
|
||||||
|
),
|
||||||
|
%% Workaround: we convert `Stream' to a binary before
|
||||||
|
%% attempting to store it in mnesia(rocksdb) because of a bug
|
||||||
|
%% in `mnesia_rocksdb' when trying to do
|
||||||
|
%% `mnesia:dirty_all_keys' later.
|
||||||
|
StreamBin = term_to_binary(Stream),
|
||||||
|
IterRec = #ds_iter{id = {DSSessionId, StreamBin}, iter = Iterator},
|
||||||
|
mnesia:write(?SESSION_ITER_TAB, IterRec, write)
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
AllStreams
|
||||||
|
)
|
||||||
|
end
|
||||||
|
).
|
||||||
|
|
||||||
|
%% must be called inside a transaction
|
||||||
|
-spec session_drop_streams(id()) -> ok.
|
||||||
|
session_drop_streams(DSSessionId) ->
|
||||||
|
MS = ets:fun2ms(
|
||||||
|
fun(#ds_stream{session = DSSessionId0}) when DSSessionId0 =:= DSSessionId ->
|
||||||
|
DSSessionId0
|
||||||
|
end
|
||||||
|
),
|
||||||
|
StreamIDs = mnesia:select(?SESSION_STREAM_TAB, MS, write),
|
||||||
|
lists:foreach(fun(Key) -> mnesia:delete(?SESSION_STREAM_TAB, Key, write) end, StreamIDs).
|
||||||
|
|
||||||
|
%% must be called inside a transaction
|
||||||
|
-spec session_drop_iterators(id()) -> ok.
|
||||||
|
session_drop_iterators(DSSessionId) ->
|
||||||
|
MS = ets:fun2ms(
|
||||||
|
fun(#ds_iter{id = {DSSessionId0, StreamBin}}) when DSSessionId0 =:= DSSessionId ->
|
||||||
|
StreamBin
|
||||||
|
end
|
||||||
|
),
|
||||||
|
StreamBins = mnesia:select(?SESSION_ITER_TAB, MS, write),
|
||||||
|
lists:foreach(
|
||||||
|
fun(StreamBin) ->
|
||||||
|
mnesia:delete(?SESSION_ITER_TAB, {DSSessionId, StreamBin}, write)
|
||||||
|
end,
|
||||||
|
StreamBins
|
||||||
|
).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------------------
|
%%--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -649,23 +756,110 @@ transaction(Fun) ->
|
||||||
{atomic, Res} = mria:transaction(?DS_MRIA_SHARD, Fun),
|
{atomic, Res} = mria:transaction(?DS_MRIA_SHARD, Fun),
|
||||||
Res.
|
Res.
|
||||||
|
|
||||||
|
ro_transaction(Fun) ->
|
||||||
|
{atomic, Res} = mria:ro_transaction(?DS_MRIA_SHARD, Fun),
|
||||||
|
Res.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------------------
|
%%--------------------------------------------------------------------------------
|
||||||
|
|
||||||
export_iterators(IteratorRefs) ->
|
export_subscriptions(DSSubs) ->
|
||||||
lists:foldl(
|
lists:foldl(
|
||||||
fun(IteratorRef = #iterator_ref{ref_id = {_DSSessionId, TopicFilter}}, Acc) ->
|
fun(DSSub = #ds_sub{id = {_DSSessionId, TopicFilter}}, Acc) ->
|
||||||
Acc#{TopicFilter => export_record(IteratorRef)}
|
Acc#{TopicFilter => export_subscription(DSSub)}
|
||||||
end,
|
end,
|
||||||
#{},
|
#{},
|
||||||
IteratorRefs
|
DSSubs
|
||||||
).
|
).
|
||||||
|
|
||||||
export_record(#session{} = Record) ->
|
export_session(#session{} = Record) ->
|
||||||
export_record(Record, #session.id, [id, created_at, expires_at, props], #{});
|
export_record(Record, #session.id, [id, created_at, expires_at, inflight, props], #{}).
|
||||||
export_record(#iterator_ref{} = Record) ->
|
|
||||||
export_record(Record, #iterator_ref.it_id, [id, start_time, props], #{}).
|
export_subscription(#ds_sub{} = Record) ->
|
||||||
|
export_record(Record, #ds_sub.start_time, [start_time, props, extra], #{}).
|
||||||
|
|
||||||
export_record(Record, I, [Field | Rest], Acc) ->
|
export_record(Record, I, [Field | Rest], Acc) ->
|
||||||
export_record(Record, I + 1, Rest, Acc#{Field => element(I, Record)});
|
export_record(Record, I + 1, Rest, Acc#{Field => element(I, Record)});
|
||||||
export_record(_, _, [], Acc) ->
|
export_record(_, _, [], Acc) ->
|
||||||
Acc.
|
Acc.
|
||||||
|
|
||||||
|
%% TODO: find a more reliable way to perform actions that have side
|
||||||
|
%% effects. Add `CBM:init' callback to the session behavior?
|
||||||
|
ensure_timers() ->
|
||||||
|
ensure_timer(pull),
|
||||||
|
ensure_timer(get_streams).
|
||||||
|
|
||||||
|
-spec ensure_timer(pull | get_streams) -> ok.
|
||||||
|
ensure_timer(Type) ->
|
||||||
|
ensure_timer(Type, 100).
|
||||||
|
|
||||||
|
-spec ensure_timer(pull | get_streams, non_neg_integer()) -> ok.
|
||||||
|
ensure_timer(Type, Timeout) ->
|
||||||
|
_ = emqx_utils:start_timer(Timeout, {emqx_session, Type}),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
-spec receive_maximum(conninfo()) -> pos_integer().
|
||||||
|
receive_maximum(ConnInfo) ->
|
||||||
|
%% Note: the default value should be always set by the channel
|
||||||
|
%% with respect to the zone configuration, but the type spec
|
||||||
|
%% indicates that it's optional.
|
||||||
|
maps:get(receive_maximum, ConnInfo, 65_535).
|
||||||
|
|
||||||
|
-ifdef(TEST).
|
||||||
|
list_all_sessions() ->
|
||||||
|
DSSessionIds = mnesia:dirty_all_keys(?SESSION_TAB),
|
||||||
|
Sessions = lists:map(
|
||||||
|
fun(SessionID) ->
|
||||||
|
{ok, Session, Subscriptions} = session_open(SessionID),
|
||||||
|
{SessionID, #{session => Session, subscriptions => Subscriptions}}
|
||||||
|
end,
|
||||||
|
DSSessionIds
|
||||||
|
),
|
||||||
|
maps:from_list(Sessions).
|
||||||
|
|
||||||
|
list_all_subscriptions() ->
|
||||||
|
DSSubIds = mnesia:dirty_all_keys(?SESSION_SUBSCRIPTIONS_TAB),
|
||||||
|
Subscriptions = lists:map(
|
||||||
|
fun(DSSubId) ->
|
||||||
|
[DSSub] = mnesia:dirty_read(?SESSION_SUBSCRIPTIONS_TAB, DSSubId),
|
||||||
|
{DSSubId, export_subscription(DSSub)}
|
||||||
|
end,
|
||||||
|
DSSubIds
|
||||||
|
),
|
||||||
|
maps:from_list(Subscriptions).
|
||||||
|
|
||||||
|
list_all_streams() ->
|
||||||
|
DSStreamIds = mnesia:dirty_all_keys(?SESSION_STREAM_TAB),
|
||||||
|
DSStreams = lists:map(
|
||||||
|
fun(DSStreamId) ->
|
||||||
|
Records = mnesia:dirty_read(?SESSION_STREAM_TAB, DSStreamId),
|
||||||
|
ExtDSStreams =
|
||||||
|
lists:map(
|
||||||
|
fun(Record) ->
|
||||||
|
export_record(
|
||||||
|
Record,
|
||||||
|
#ds_stream.session,
|
||||||
|
[session, topic_filter, stream, rank],
|
||||||
|
#{}
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
Records
|
||||||
|
),
|
||||||
|
{DSStreamId, ExtDSStreams}
|
||||||
|
end,
|
||||||
|
DSStreamIds
|
||||||
|
),
|
||||||
|
maps:from_list(DSStreams).
|
||||||
|
|
||||||
|
list_all_iterators() ->
|
||||||
|
DSIterIds = mnesia:dirty_all_keys(?SESSION_ITER_TAB),
|
||||||
|
DSIters = lists:map(
|
||||||
|
fun(DSIterId) ->
|
||||||
|
[Record] = mnesia:dirty_read(?SESSION_ITER_TAB, DSIterId),
|
||||||
|
{DSIterId, export_record(Record, #ds_iter.id, [id, iter], #{})}
|
||||||
|
end,
|
||||||
|
DSIterIds
|
||||||
|
),
|
||||||
|
maps:from_list(DSIters).
|
||||||
|
|
||||||
|
%% ifdef(TEST)
|
||||||
|
-endif.
|
||||||
|
|
|
@ -0,0 +1,60 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
-ifndef(EMQX_PERSISTENT_SESSION_DS_HRL_HRL).
|
||||||
|
-define(EMQX_PERSISTENT_SESSION_DS_HRL_HRL, true).
|
||||||
|
|
||||||
|
-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message).
|
||||||
|
|
||||||
|
-define(SESSION_TAB, emqx_ds_session).
|
||||||
|
-define(SESSION_SUBSCRIPTIONS_TAB, emqx_ds_session_subscriptions).
|
||||||
|
-define(SESSION_STREAM_TAB, emqx_ds_stream_tab).
|
||||||
|
-define(SESSION_ITER_TAB, emqx_ds_iter_tab).
|
||||||
|
-define(DS_MRIA_SHARD, emqx_ds_session_shard).
|
||||||
|
|
||||||
|
-record(ds_sub, {
|
||||||
|
id :: emqx_persistent_session_ds:subscription_id(),
|
||||||
|
start_time :: emqx_ds:time(),
|
||||||
|
props = #{} :: map(),
|
||||||
|
extra = #{} :: map()
|
||||||
|
}).
|
||||||
|
-type ds_sub() :: #ds_sub{}.
|
||||||
|
|
||||||
|
-record(ds_stream, {
|
||||||
|
session :: emqx_persistent_session_ds:id(),
|
||||||
|
topic_filter :: emqx_ds:topic_filter(),
|
||||||
|
stream :: emqx_ds:stream(),
|
||||||
|
rank :: emqx_ds:stream_rank()
|
||||||
|
}).
|
||||||
|
-type ds_stream() :: #ds_stream{}.
|
||||||
|
-type ds_stream_bin() :: binary().
|
||||||
|
|
||||||
|
-record(ds_iter, {
|
||||||
|
id :: {emqx_persistent_session_ds:id(), ds_stream_bin()},
|
||||||
|
iter :: emqx_ds:iterator()
|
||||||
|
}).
|
||||||
|
|
||||||
|
-record(session, {
|
||||||
|
%% same as clientid
|
||||||
|
id :: emqx_persistent_session_ds:id(),
|
||||||
|
%% creation time
|
||||||
|
created_at :: _Millisecond :: non_neg_integer(),
|
||||||
|
expires_at = never :: _Millisecond :: non_neg_integer() | never,
|
||||||
|
inflight :: emqx_persistent_message_ds_replayer:inflight(),
|
||||||
|
%% for future usage
|
||||||
|
props = #{} :: map()
|
||||||
|
}).
|
||||||
|
|
||||||
|
-endif.
|
|
@ -47,11 +47,9 @@
|
||||||
-type bytesize() :: integer().
|
-type bytesize() :: integer().
|
||||||
-type wordsize() :: bytesize().
|
-type wordsize() :: bytesize().
|
||||||
-type percent() :: float().
|
-type percent() :: float().
|
||||||
-type file() :: string().
|
-type comma_separated_list() :: list(string()).
|
||||||
-type comma_separated_list() :: list().
|
|
||||||
-type comma_separated_binary() :: [binary()].
|
-type comma_separated_binary() :: [binary()].
|
||||||
-type comma_separated_atoms() :: [atom()].
|
-type comma_separated_atoms() :: [atom()].
|
||||||
-type bar_separated_list() :: list().
|
|
||||||
-type ip_port() :: tuple() | integer().
|
-type ip_port() :: tuple() | integer().
|
||||||
-type cipher() :: map().
|
-type cipher() :: map().
|
||||||
-type port_number() :: 1..65535.
|
-type port_number() :: 1..65535.
|
||||||
|
@ -75,7 +73,6 @@
|
||||||
-typerefl_from_string({percent/0, emqx_schema, to_percent}).
|
-typerefl_from_string({percent/0, emqx_schema, to_percent}).
|
||||||
-typerefl_from_string({comma_separated_list/0, emqx_schema, to_comma_separated_list}).
|
-typerefl_from_string({comma_separated_list/0, emqx_schema, to_comma_separated_list}).
|
||||||
-typerefl_from_string({comma_separated_binary/0, emqx_schema, to_comma_separated_binary}).
|
-typerefl_from_string({comma_separated_binary/0, emqx_schema, to_comma_separated_binary}).
|
||||||
-typerefl_from_string({bar_separated_list/0, emqx_schema, to_bar_separated_list}).
|
|
||||||
-typerefl_from_string({ip_port/0, emqx_schema, to_ip_port}).
|
-typerefl_from_string({ip_port/0, emqx_schema, to_ip_port}).
|
||||||
-typerefl_from_string({cipher/0, emqx_schema, to_erl_cipher_suite}).
|
-typerefl_from_string({cipher/0, emqx_schema, to_erl_cipher_suite}).
|
||||||
-typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}).
|
-typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}).
|
||||||
|
@ -118,7 +115,6 @@
|
||||||
to_percent/1,
|
to_percent/1,
|
||||||
to_comma_separated_list/1,
|
to_comma_separated_list/1,
|
||||||
to_comma_separated_binary/1,
|
to_comma_separated_binary/1,
|
||||||
to_bar_separated_list/1,
|
|
||||||
to_ip_port/1,
|
to_ip_port/1,
|
||||||
to_erl_cipher_suite/1,
|
to_erl_cipher_suite/1,
|
||||||
to_comma_separated_atoms/1,
|
to_comma_separated_atoms/1,
|
||||||
|
@ -154,10 +150,8 @@
|
||||||
bytesize/0,
|
bytesize/0,
|
||||||
wordsize/0,
|
wordsize/0,
|
||||||
percent/0,
|
percent/0,
|
||||||
file/0,
|
|
||||||
comma_separated_list/0,
|
comma_separated_list/0,
|
||||||
comma_separated_binary/0,
|
comma_separated_binary/0,
|
||||||
bar_separated_list/0,
|
|
||||||
ip_port/0,
|
ip_port/0,
|
||||||
cipher/0,
|
cipher/0,
|
||||||
comma_separated_atoms/0,
|
comma_separated_atoms/0,
|
||||||
|
@ -1849,7 +1843,7 @@ base_listener(Bind) ->
|
||||||
default => true
|
default => true
|
||||||
}
|
}
|
||||||
)}
|
)}
|
||||||
] ++ emqx_limiter_schema:short_paths_fields(?MODULE).
|
] ++ emqx_limiter_schema:short_paths_fields().
|
||||||
|
|
||||||
desc("persistent_session_store") ->
|
desc("persistent_session_store") ->
|
||||||
"Settings for message persistence.";
|
"Settings for message persistence.";
|
||||||
|
@ -2564,9 +2558,6 @@ to_json_binary(Str) ->
|
||||||
Error
|
Error
|
||||||
end.
|
end.
|
||||||
|
|
||||||
to_bar_separated_list(Str) ->
|
|
||||||
{ok, string:tokens(Str, "| ")}.
|
|
||||||
|
|
||||||
%% @doc support the following format:
|
%% @doc support the following format:
|
||||||
%% - 127.0.0.1:1883
|
%% - 127.0.0.1:1883
|
||||||
%% - ::1:1883
|
%% - ::1:1883
|
||||||
|
@ -3316,7 +3307,7 @@ get_tombstone_map_value_type(Schema) ->
|
||||||
%% hoconsc:map_value_type(Schema)
|
%% hoconsc:map_value_type(Schema)
|
||||||
?MAP(_Name, Union) = hocon_schema:field_schema(Schema, type),
|
?MAP(_Name, Union) = hocon_schema:field_schema(Schema, type),
|
||||||
%% TODO: violation of abstraction, fix hoconsc:union_members/1
|
%% TODO: violation of abstraction, fix hoconsc:union_members/1
|
||||||
?UNION(Members) = Union,
|
?UNION(Members, _) = Union,
|
||||||
Tombstone = tombstone(),
|
Tombstone = tombstone(),
|
||||||
[Type, Tombstone] = hoconsc:union_members(Members),
|
[Type, Tombstone] = hoconsc:union_members(Members),
|
||||||
Type.
|
Type.
|
||||||
|
|
|
@ -176,6 +176,7 @@
|
||||||
t().
|
t().
|
||||||
-callback open(clientinfo(), conninfo()) ->
|
-callback open(clientinfo(), conninfo()) ->
|
||||||
{_IsPresent :: true, t(), _ReplayContext} | false.
|
{_IsPresent :: true, t(), _ReplayContext} | false.
|
||||||
|
-callback destroy(t() | clientinfo()) -> ok.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Create a Session
|
%% Create a Session
|
||||||
|
@ -247,7 +248,14 @@ get_mqtt_conf(Zone, Key) ->
|
||||||
|
|
||||||
-spec destroy(clientinfo(), conninfo()) -> ok.
|
-spec destroy(clientinfo(), conninfo()) -> ok.
|
||||||
destroy(ClientInfo, ConnInfo) ->
|
destroy(ClientInfo, ConnInfo) ->
|
||||||
(choose_impl_mod(ConnInfo)):destroy(ClientInfo).
|
%% When destroying/discarding a session, the current `ClientInfo' might suggest an
|
||||||
|
%% implementation which does not correspond to the one previously used by this client.
|
||||||
|
%% An example of this is a client that first connects with `Session-Expiry-Interval' >
|
||||||
|
%% 0, and later reconnects with `Session-Expiry-Interval' = 0 and `clean_start' =
|
||||||
|
%% true. So we may simply destroy sessions from all implementations, since the key
|
||||||
|
%% (ClientID) is the same.
|
||||||
|
Mods = choose_impl_candidates(ConnInfo),
|
||||||
|
lists:foreach(fun(Mod) -> Mod:destroy(ClientInfo) end, Mods).
|
||||||
|
|
||||||
-spec destroy(t()) -> ok.
|
-spec destroy(t()) -> ok.
|
||||||
destroy(Session) ->
|
destroy(Session) ->
|
||||||
|
|
|
@ -44,6 +44,8 @@
|
||||||
%% State is stored in-memory in the process heap.
|
%% State is stored in-memory in the process heap.
|
||||||
-module(emqx_session_mem).
|
-module(emqx_session_mem).
|
||||||
|
|
||||||
|
-behaviour(emqx_session).
|
||||||
|
|
||||||
-include("emqx.hrl").
|
-include("emqx.hrl").
|
||||||
-include("emqx_mqtt.hrl").
|
-include("emqx_mqtt.hrl").
|
||||||
-include("emqx_session_mem.hrl").
|
-include("emqx_session_mem.hrl").
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
introduced_in/0,
|
introduced_in/0,
|
||||||
|
deprecated_since/0,
|
||||||
|
|
||||||
open_iterator/4,
|
open_iterator/4,
|
||||||
close_iterator/2,
|
close_iterator/2,
|
||||||
|
@ -31,9 +32,11 @@
|
||||||
-define(TIMEOUT, 30_000).
|
-define(TIMEOUT, 30_000).
|
||||||
|
|
||||||
introduced_in() ->
|
introduced_in() ->
|
||||||
%% FIXME
|
|
||||||
"5.3.0".
|
"5.3.0".
|
||||||
|
|
||||||
|
deprecated_since() ->
|
||||||
|
"5.4.0".
|
||||||
|
|
||||||
-spec open_iterator(
|
-spec open_iterator(
|
||||||
[node()],
|
[node()],
|
||||||
emqx_types:words(),
|
emqx_types:words(),
|
||||||
|
|
|
@ -244,19 +244,28 @@ get_param_types(Signatures, {M, F, A}) ->
|
||||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
|
||||||
dump() ->
|
dump() ->
|
||||||
case
|
RootDir = project_root_dir(),
|
||||||
{
|
TryRelDir = RootDir ++ "/_build/check/lib",
|
||||||
filelib:wildcard(project_root_dir() ++ "/*_plt"),
|
case {filelib:wildcard(RootDir ++ "/*_plt"), filelib:wildcard(TryRelDir)} of
|
||||||
filelib:wildcard(project_root_dir() ++ "/_build/check/lib")
|
|
||||||
}
|
|
||||||
of
|
|
||||||
{[PLT | _], [RelDir | _]} ->
|
{[PLT | _], [RelDir | _]} ->
|
||||||
dump(#{
|
dump(#{
|
||||||
plt => PLT,
|
plt => PLT,
|
||||||
reldir => RelDir
|
reldir => RelDir
|
||||||
});
|
});
|
||||||
_ ->
|
{[], _} ->
|
||||||
error("failed to guess run options")
|
logger:error(
|
||||||
|
"No usable PLT files found in \"~s\", abort ~n"
|
||||||
|
"Try running `rebar3 as check dialyzer` at least once first",
|
||||||
|
[RootDir]
|
||||||
|
),
|
||||||
|
error(run_failed);
|
||||||
|
{_, []} ->
|
||||||
|
logger:error(
|
||||||
|
"No built applications found in \"~s\", abort ~n"
|
||||||
|
"Try running `rebar3 as check compile` at least once first",
|
||||||
|
[TryRelDir]
|
||||||
|
),
|
||||||
|
error(run_failed)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% Collect the local BPAPI modules to a dump file
|
%% Collect the local BPAPI modules to a dump file
|
||||||
|
@ -411,10 +420,19 @@ setnok() ->
|
||||||
put(bpapi_ok, false).
|
put(bpapi_ok, false).
|
||||||
|
|
||||||
dumps_dir() ->
|
dumps_dir() ->
|
||||||
filename:join(project_root_dir(), "apps/emqx/test/emqx_static_checks_data").
|
filename:join(emqx_app_dir(), "test/emqx_static_checks_data").
|
||||||
|
|
||||||
project_root_dir() ->
|
|
||||||
string:trim(os:cmd("git rev-parse --show-toplevel")).
|
|
||||||
|
|
||||||
versions_file() ->
|
versions_file() ->
|
||||||
filename:join(project_root_dir(), "apps/emqx/priv/bpapi.versions").
|
filename:join(emqx_app_dir(), "priv/bpapi.versions").
|
||||||
|
|
||||||
|
emqx_app_dir() ->
|
||||||
|
Info = ?MODULE:module_info(compile),
|
||||||
|
case proplists:get_value(source, Info) of
|
||||||
|
Source when is_list(Source) ->
|
||||||
|
filename:dirname(filename:dirname(Source));
|
||||||
|
undefined ->
|
||||||
|
"apps/emqx"
|
||||||
|
end.
|
||||||
|
|
||||||
|
project_root_dir() ->
|
||||||
|
filename:dirname(filename:dirname(emqx_app_dir())).
|
||||||
|
|
|
@ -306,7 +306,7 @@ test_stepdown_session(Action, Reason) ->
|
||||||
ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo),
|
ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo),
|
||||||
ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo),
|
ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo),
|
||||||
ok = emqx_cm:register_channel(ClientId, Pid2, ConnInfo),
|
ok = emqx_cm:register_channel(ClientId, Pid2, ConnInfo),
|
||||||
?assertEqual([Pid1, Pid2], lists:sort(emqx_cm:lookup_channels(ClientId))),
|
?assertEqual(lists:sort([Pid1, Pid2]), lists:sort(emqx_cm:lookup_channels(ClientId))),
|
||||||
case Reason of
|
case Reason of
|
||||||
noproc ->
|
noproc ->
|
||||||
exit(Pid1, kill),
|
exit(Pid1, kill),
|
||||||
|
|
|
@ -26,9 +26,7 @@
|
||||||
|
|
||||||
-import(emqx_common_test_helpers, [on_exit/1]).
|
-import(emqx_common_test_helpers, [on_exit/1]).
|
||||||
|
|
||||||
-define(DEFAULT_KEYSPACE, default).
|
-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message).
|
||||||
-define(DS_SHARD_ID, <<"local">>).
|
|
||||||
-define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}).
|
|
||||||
|
|
||||||
all() ->
|
all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
@ -48,6 +46,7 @@ init_per_testcase(t_session_subscription_iterators = TestCase, Config) ->
|
||||||
Nodes = emqx_cth_cluster:start(Cluster, #{work_dir => emqx_cth_suite:work_dir(TestCase, Config)}),
|
Nodes = emqx_cth_cluster:start(Cluster, #{work_dir => emqx_cth_suite:work_dir(TestCase, Config)}),
|
||||||
[{nodes, Nodes} | Config];
|
[{nodes, Nodes} | Config];
|
||||||
init_per_testcase(TestCase, Config) ->
|
init_per_testcase(TestCase, Config) ->
|
||||||
|
ok = emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB),
|
||||||
Apps = emqx_cth_suite:start(
|
Apps = emqx_cth_suite:start(
|
||||||
app_specs(),
|
app_specs(),
|
||||||
#{work_dir => emqx_cth_suite:work_dir(TestCase, Config)}
|
#{work_dir => emqx_cth_suite:work_dir(TestCase, Config)}
|
||||||
|
@ -58,10 +57,11 @@ end_per_testcase(t_session_subscription_iterators, Config) ->
|
||||||
Nodes = ?config(nodes, Config),
|
Nodes = ?config(nodes, Config),
|
||||||
emqx_common_test_helpers:call_janitor(60_000),
|
emqx_common_test_helpers:call_janitor(60_000),
|
||||||
ok = emqx_cth_cluster:stop(Nodes),
|
ok = emqx_cth_cluster:stop(Nodes),
|
||||||
ok;
|
end_per_testcase(common, Config);
|
||||||
end_per_testcase(_TestCase, Config) ->
|
end_per_testcase(_TestCase, Config) ->
|
||||||
Apps = ?config(apps, Config),
|
Apps = proplists:get_value(apps, Config, []),
|
||||||
emqx_common_test_helpers:call_janitor(60_000),
|
emqx_common_test_helpers:call_janitor(60_000),
|
||||||
|
clear_db(),
|
||||||
emqx_cth_suite:stop(Apps),
|
emqx_cth_suite:stop(Apps),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
@ -95,14 +95,15 @@ t_messages_persisted(_Config) ->
|
||||||
Results = [emqtt:publish(CP, Topic, Payload, 1) || {Topic, Payload} <- Messages],
|
Results = [emqtt:publish(CP, Topic, Payload, 1) || {Topic, Payload} <- Messages],
|
||||||
|
|
||||||
ct:pal("Results = ~p", [Results]),
|
ct:pal("Results = ~p", [Results]),
|
||||||
|
timer:sleep(2000),
|
||||||
|
|
||||||
Persisted = consume(?DS_SHARD, {['#'], 0}),
|
Persisted = consume(['#'], 0),
|
||||||
|
|
||||||
ct:pal("Persisted = ~p", [Persisted]),
|
ct:pal("Persisted = ~p", [Persisted]),
|
||||||
|
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
[M1, M2, M5, M7, M9, M10],
|
lists:sort([M1, M2, M5, M7, M9, M10]),
|
||||||
[{emqx_message:topic(M), emqx_message:payload(M)} || M <- Persisted]
|
lists:sort([{emqx_message:topic(M), emqx_message:payload(M)} || M <- Persisted])
|
||||||
),
|
),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
@ -139,23 +140,25 @@ t_messages_persisted_2(_Config) ->
|
||||||
{ok, #{reason_code := ?RC_NO_MATCHING_SUBSCRIBERS}} =
|
{ok, #{reason_code := ?RC_NO_MATCHING_SUBSCRIBERS}} =
|
||||||
emqtt:publish(CP, T(<<"client/2/topic">>), <<"8">>, 1),
|
emqtt:publish(CP, T(<<"client/2/topic">>), <<"8">>, 1),
|
||||||
|
|
||||||
Persisted = consume(?DS_SHARD, {['#'], 0}),
|
timer:sleep(2000),
|
||||||
|
|
||||||
|
Persisted = consume(['#'], 0),
|
||||||
|
|
||||||
ct:pal("Persisted = ~p", [Persisted]),
|
ct:pal("Persisted = ~p", [Persisted]),
|
||||||
|
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
[
|
lists:sort([
|
||||||
{T(<<"client/1/topic">>), <<"4">>},
|
{T(<<"client/1/topic">>), <<"4">>},
|
||||||
{T(<<"client/2/topic">>), <<"5">>}
|
{T(<<"client/2/topic">>), <<"5">>}
|
||||||
],
|
]),
|
||||||
[{emqx_message:topic(M), emqx_message:payload(M)} || M <- Persisted]
|
lists:sort([{emqx_message:topic(M), emqx_message:payload(M)} || M <- Persisted])
|
||||||
),
|
),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
%% TODO: test quic and ws too
|
%% TODO: test quic and ws too
|
||||||
t_session_subscription_iterators(Config) ->
|
t_session_subscription_iterators(Config) ->
|
||||||
[Node1, Node2] = ?config(nodes, Config),
|
[Node1, _Node2] = ?config(nodes, Config),
|
||||||
Port = get_mqtt_port(Node1, tcp),
|
Port = get_mqtt_port(Node1, tcp),
|
||||||
Topic = <<"t/topic">>,
|
Topic = <<"t/topic">>,
|
||||||
SubTopicFilter = <<"t/+">>,
|
SubTopicFilter = <<"t/+">>,
|
||||||
|
@ -202,11 +205,8 @@ t_session_subscription_iterators(Config) ->
|
||||||
messages => [Message1, Message2, Message3, Message4]
|
messages => [Message1, Message2, Message3, Message4]
|
||||||
}
|
}
|
||||||
end,
|
end,
|
||||||
fun(Results, Trace) ->
|
fun(Trace) ->
|
||||||
ct:pal("trace:\n ~p", [Trace]),
|
ct:pal("trace:\n ~p", [Trace]),
|
||||||
#{
|
|
||||||
messages := [_Message1, Message2, Message3 | _]
|
|
||||||
} = Results,
|
|
||||||
case ?of_kind(ds_session_subscription_added, Trace) of
|
case ?of_kind(ds_session_subscription_added, Trace) of
|
||||||
[] ->
|
[] ->
|
||||||
%% Since `emqx_durable_storage' is a dependency of `emqx', it gets
|
%% Since `emqx_durable_storage' is a dependency of `emqx', it gets
|
||||||
|
@ -228,17 +228,6 @@ t_session_subscription_iterators(Config) ->
|
||||||
),
|
),
|
||||||
ok
|
ok
|
||||||
end,
|
end,
|
||||||
?assertMatch({ok, [_]}, get_all_iterator_ids(Node1)),
|
|
||||||
{ok, [IteratorId]} = get_all_iterator_ids(Node1),
|
|
||||||
?assertMatch({ok, [IteratorId]}, get_all_iterator_ids(Node2)),
|
|
||||||
ReplayMessages1 = erpc:call(Node1, fun() -> consume(?DS_SHARD, IteratorId) end),
|
|
||||||
ExpectedMessages = [Message2, Message3],
|
|
||||||
%% Note: it is expected that this will break after replayers are in place.
|
|
||||||
%% They might have consumed all the messages by this time.
|
|
||||||
?assertEqual(ExpectedMessages, ReplayMessages1),
|
|
||||||
%% Different DS shard
|
|
||||||
ReplayMessages2 = erpc:call(Node2, fun() -> consume(?DS_SHARD, IteratorId) end),
|
|
||||||
?assertEqual([], ReplayMessages2),
|
|
||||||
ok
|
ok
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
|
@ -263,33 +252,26 @@ connect(Opts0 = #{}) ->
|
||||||
{ok, _} = emqtt:connect(Client),
|
{ok, _} = emqtt:connect(Client),
|
||||||
Client.
|
Client.
|
||||||
|
|
||||||
consume(Shard, Replay = {_TopicFiler, _StartMS}) ->
|
consume(TopicFilter, StartMS) ->
|
||||||
{ok, It} = emqx_ds_storage_layer:make_iterator(Shard, Replay),
|
Streams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartMS),
|
||||||
consume(It);
|
lists:flatmap(
|
||||||
consume(Shard, IteratorId) when is_binary(IteratorId) ->
|
fun({_Rank, Stream}) ->
|
||||||
{ok, It} = emqx_ds_storage_layer:restore_iterator(Shard, IteratorId),
|
{ok, It} = emqx_ds:make_iterator(?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartMS),
|
||||||
consume(It).
|
consume(It)
|
||||||
|
end,
|
||||||
|
Streams
|
||||||
|
).
|
||||||
|
|
||||||
consume(It) ->
|
consume(It) ->
|
||||||
case emqx_ds_storage_layer:next(It) of
|
case emqx_ds:next(?PERSISTENT_MESSAGE_DB, It, 100) of
|
||||||
{value, Msg, NIt} ->
|
{ok, _NIt, _Msgs = []} ->
|
||||||
[emqx_persistent_message:deserialize(Msg) | consume(NIt)];
|
[];
|
||||||
none ->
|
{ok, NIt, Msgs} ->
|
||||||
|
Msgs ++ consume(NIt);
|
||||||
|
{ok, end_of_stream} ->
|
||||||
[]
|
[]
|
||||||
end.
|
end.
|
||||||
|
|
||||||
delete_all_messages() ->
|
|
||||||
Persisted = consume(?DS_SHARD, {['#'], 0}),
|
|
||||||
lists:foreach(
|
|
||||||
fun(Msg) ->
|
|
||||||
GUID = emqx_message:id(Msg),
|
|
||||||
Topic = emqx_topic:words(emqx_message:topic(Msg)),
|
|
||||||
Timestamp = emqx_guid:timestamp(GUID),
|
|
||||||
ok = emqx_ds_storage_layer:delete(?DS_SHARD, GUID, Timestamp, Topic)
|
|
||||||
end,
|
|
||||||
Persisted
|
|
||||||
).
|
|
||||||
|
|
||||||
receive_messages(Count) ->
|
receive_messages(Count) ->
|
||||||
receive_messages(Count, []).
|
receive_messages(Count, []).
|
||||||
|
|
||||||
|
@ -306,13 +288,6 @@ receive_messages(Count, Msgs) ->
|
||||||
publish(Node, Message) ->
|
publish(Node, Message) ->
|
||||||
erpc:call(Node, emqx, publish, [Message]).
|
erpc:call(Node, emqx, publish, [Message]).
|
||||||
|
|
||||||
get_iterator_ids(Node, ClientId) ->
|
|
||||||
Channel = erpc:call(Node, fun() ->
|
|
||||||
[ConnPid] = emqx_cm:lookup_channels(ClientId),
|
|
||||||
sys:get_state(ConnPid)
|
|
||||||
end),
|
|
||||||
emqx_connection:info({channel, {session, iterators}}, Channel).
|
|
||||||
|
|
||||||
app_specs() ->
|
app_specs() ->
|
||||||
[
|
[
|
||||||
emqx_durable_storage,
|
emqx_durable_storage,
|
||||||
|
@ -330,5 +305,6 @@ get_mqtt_port(Node, Type) ->
|
||||||
{_IP, Port} = erpc:call(Node, emqx_config, get, [[listeners, Type, default, bind]]),
|
{_IP, Port} = erpc:call(Node, emqx_config, get, [[listeners, Type, default, bind]]),
|
||||||
Port.
|
Port.
|
||||||
|
|
||||||
get_all_iterator_ids(Node) ->
|
clear_db() ->
|
||||||
erpc:call(Node, emqx_ds_storage_layer, list_iterator_prefix, [?DS_SHARD, <<>>]).
|
ok = emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB),
|
||||||
|
ok.
|
||||||
|
|
|
@ -24,6 +24,8 @@
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
|
|
||||||
|
-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% SUITE boilerplate
|
%% SUITE boilerplate
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -131,6 +133,7 @@ get_listener_port(Type, Name) ->
|
||||||
end_per_group(Group, Config) when Group == tcp; Group == ws; Group == quic ->
|
end_per_group(Group, Config) when Group == tcp; Group == ws; Group == quic ->
|
||||||
ok = emqx_cth_suite:stop(?config(group_apps, Config));
|
ok = emqx_cth_suite:stop(?config(group_apps, Config));
|
||||||
end_per_group(_, _Config) ->
|
end_per_group(_, _Config) ->
|
||||||
|
catch emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(TestCase, Config) ->
|
init_per_testcase(TestCase, Config) ->
|
||||||
|
@ -188,7 +191,7 @@ receive_messages(Count, Msgs) ->
|
||||||
receive_messages(Count - 1, [Msg | Msgs]);
|
receive_messages(Count - 1, [Msg | Msgs]);
|
||||||
_Other ->
|
_Other ->
|
||||||
receive_messages(Count, Msgs)
|
receive_messages(Count, Msgs)
|
||||||
after 5000 ->
|
after 15000 ->
|
||||||
Msgs
|
Msgs
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -227,11 +230,11 @@ wait_for_cm_unregister(ClientId, N) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
publish(Topic, Payloads) ->
|
publish(Topic, Payloads) ->
|
||||||
publish(Topic, Payloads, false).
|
publish(Topic, Payloads, false, 2).
|
||||||
|
|
||||||
publish(Topic, Payloads, WaitForUnregister) ->
|
publish(Topic, Payloads, WaitForUnregister, QoS) ->
|
||||||
Fun = fun(Client, Payload) ->
|
Fun = fun(Client, Payload) ->
|
||||||
{ok, _} = emqtt:publish(Client, Topic, Payload, 2)
|
{ok, _} = emqtt:publish(Client, Topic, Payload, QoS)
|
||||||
end,
|
end,
|
||||||
do_publish(Payloads, Fun, WaitForUnregister).
|
do_publish(Payloads, Fun, WaitForUnregister).
|
||||||
|
|
||||||
|
@ -510,6 +513,48 @@ t_process_dies_session_expires(Config) ->
|
||||||
|
|
||||||
emqtt:disconnect(Client2).
|
emqtt:disconnect(Client2).
|
||||||
|
|
||||||
|
t_publish_while_client_is_gone_qos1(Config) ->
|
||||||
|
%% A persistent session should receive messages in its
|
||||||
|
%% subscription even if the process owning the session dies.
|
||||||
|
ConnFun = ?config(conn_fun, Config),
|
||||||
|
Topic = ?config(topic, Config),
|
||||||
|
STopic = ?config(stopic, Config),
|
||||||
|
Payload1 = <<"hello1">>,
|
||||||
|
Payload2 = <<"hello2">>,
|
||||||
|
ClientId = ?config(client_id, Config),
|
||||||
|
{ok, Client1} = emqtt:start_link([
|
||||||
|
{proto_ver, v5},
|
||||||
|
{clientid, ClientId},
|
||||||
|
{properties, #{'Session-Expiry-Interval' => 30}},
|
||||||
|
{clean_start, true}
|
||||||
|
| Config
|
||||||
|
]),
|
||||||
|
{ok, _} = emqtt:ConnFun(Client1),
|
||||||
|
{ok, _, [1]} = emqtt:subscribe(Client1, STopic, qos1),
|
||||||
|
|
||||||
|
ok = emqtt:disconnect(Client1),
|
||||||
|
maybe_kill_connection_process(ClientId, Config),
|
||||||
|
|
||||||
|
ok = publish(Topic, [Payload1, Payload2], false, 1),
|
||||||
|
|
||||||
|
{ok, Client2} = emqtt:start_link([
|
||||||
|
{proto_ver, v5},
|
||||||
|
{clientid, ClientId},
|
||||||
|
{properties, #{'Session-Expiry-Interval' => 30}},
|
||||||
|
{clean_start, false}
|
||||||
|
| Config
|
||||||
|
]),
|
||||||
|
{ok, _} = emqtt:ConnFun(Client2),
|
||||||
|
Msgs = receive_messages(2),
|
||||||
|
?assertMatch([_, _], Msgs),
|
||||||
|
[Msg2, Msg1] = Msgs,
|
||||||
|
?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)),
|
||||||
|
?assertEqual({ok, 1}, maps:find(qos, Msg1)),
|
||||||
|
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg2)),
|
||||||
|
?assertEqual({ok, 1}, maps:find(qos, Msg2)),
|
||||||
|
|
||||||
|
ok = emqtt:disconnect(Client2).
|
||||||
|
|
||||||
t_publish_while_client_is_gone(init, Config) -> skip_ds_tc(Config);
|
t_publish_while_client_is_gone(init, Config) -> skip_ds_tc(Config);
|
||||||
t_publish_while_client_is_gone('end', _Config) -> ok.
|
t_publish_while_client_is_gone('end', _Config) -> ok.
|
||||||
t_publish_while_client_is_gone(Config) ->
|
t_publish_while_client_is_gone(Config) ->
|
||||||
|
@ -554,6 +599,7 @@ t_publish_while_client_is_gone(Config) ->
|
||||||
|
|
||||||
ok = emqtt:disconnect(Client2).
|
ok = emqtt:disconnect(Client2).
|
||||||
|
|
||||||
|
%% TODO: don't skip after QoS2 support is added to DS.
|
||||||
t_clean_start_drops_subscriptions(init, Config) -> skip_ds_tc(Config);
|
t_clean_start_drops_subscriptions(init, Config) -> skip_ds_tc(Config);
|
||||||
t_clean_start_drops_subscriptions('end', _Config) -> ok.
|
t_clean_start_drops_subscriptions('end', _Config) -> ok.
|
||||||
t_clean_start_drops_subscriptions(Config) ->
|
t_clean_start_drops_subscriptions(Config) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth, [
|
{application, emqx_auth, [
|
||||||
{description, "EMQX Authentication and authorization"},
|
{description, "EMQX Authentication and authorization"},
|
||||||
{vsn, "0.1.27"},
|
{vsn, "0.1.28"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_auth_sup]},
|
{registered, [emqx_auth_sup]},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -147,7 +147,7 @@ schema("/authentication") ->
|
||||||
description => ?DESC(authentication_get),
|
description => ?DESC(authentication_get),
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => emqx_dashboard_swagger:schema_with_example(
|
200 => emqx_dashboard_swagger:schema_with_example(
|
||||||
hoconsc:array(emqx_authn_schema:authenticator_type()),
|
hoconsc:array(authenticator_type(config)),
|
||||||
authenticator_array_example()
|
authenticator_array_example()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -156,12 +156,12 @@ schema("/authentication") ->
|
||||||
tags => ?API_TAGS_GLOBAL,
|
tags => ?API_TAGS_GLOBAL,
|
||||||
description => ?DESC(authentication_post),
|
description => ?DESC(authentication_post),
|
||||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||||
emqx_authn_schema:authenticator_type(),
|
authenticator_type(api_write),
|
||||||
authenticator_examples()
|
authenticator_examples()
|
||||||
),
|
),
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => emqx_dashboard_swagger:schema_with_examples(
|
200 => emqx_dashboard_swagger:schema_with_examples(
|
||||||
emqx_authn_schema:authenticator_type(),
|
authenticator_type(config),
|
||||||
authenticator_examples()
|
authenticator_examples()
|
||||||
),
|
),
|
||||||
400 => error_codes([?BAD_REQUEST], <<"Bad Request">>),
|
400 => error_codes([?BAD_REQUEST], <<"Bad Request">>),
|
||||||
|
@ -178,7 +178,7 @@ schema("/authentication/:id") ->
|
||||||
parameters => [param_auth_id()],
|
parameters => [param_auth_id()],
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => emqx_dashboard_swagger:schema_with_examples(
|
200 => emqx_dashboard_swagger:schema_with_examples(
|
||||||
emqx_authn_schema:authenticator_type(),
|
authenticator_type(config),
|
||||||
authenticator_examples()
|
authenticator_examples()
|
||||||
),
|
),
|
||||||
404 => error_codes([?NOT_FOUND], <<"Not Found">>)
|
404 => error_codes([?NOT_FOUND], <<"Not Found">>)
|
||||||
|
@ -189,7 +189,7 @@ schema("/authentication/:id") ->
|
||||||
description => ?DESC(authentication_id_put),
|
description => ?DESC(authentication_id_put),
|
||||||
parameters => [param_auth_id()],
|
parameters => [param_auth_id()],
|
||||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||||
emqx_authn_schema:authenticator_type(),
|
authenticator_type(api_write),
|
||||||
authenticator_examples()
|
authenticator_examples()
|
||||||
),
|
),
|
||||||
responses => #{
|
responses => #{
|
||||||
|
@ -236,7 +236,7 @@ schema("/listeners/:listener_id/authentication") ->
|
||||||
parameters => [param_listener_id()],
|
parameters => [param_listener_id()],
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => emqx_dashboard_swagger:schema_with_example(
|
200 => emqx_dashboard_swagger:schema_with_example(
|
||||||
hoconsc:array(emqx_authn_schema:authenticator_type()),
|
hoconsc:array(authenticator_type(config)),
|
||||||
authenticator_array_example()
|
authenticator_array_example()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -247,12 +247,12 @@ schema("/listeners/:listener_id/authentication") ->
|
||||||
description => ?DESC(listeners_listener_id_authentication_post),
|
description => ?DESC(listeners_listener_id_authentication_post),
|
||||||
parameters => [param_listener_id()],
|
parameters => [param_listener_id()],
|
||||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||||
emqx_authn_schema:authenticator_type(),
|
authenticator_type(api_write),
|
||||||
authenticator_examples()
|
authenticator_examples()
|
||||||
),
|
),
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => emqx_dashboard_swagger:schema_with_examples(
|
200 => emqx_dashboard_swagger:schema_with_examples(
|
||||||
emqx_authn_schema:authenticator_type(),
|
authenticator_type(config),
|
||||||
authenticator_examples()
|
authenticator_examples()
|
||||||
),
|
),
|
||||||
400 => error_codes([?BAD_REQUEST], <<"Bad Request">>),
|
400 => error_codes([?BAD_REQUEST], <<"Bad Request">>),
|
||||||
|
@ -270,7 +270,7 @@ schema("/listeners/:listener_id/authentication/:id") ->
|
||||||
parameters => [param_listener_id(), param_auth_id()],
|
parameters => [param_listener_id(), param_auth_id()],
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => emqx_dashboard_swagger:schema_with_examples(
|
200 => emqx_dashboard_swagger:schema_with_examples(
|
||||||
emqx_authn_schema:authenticator_type(),
|
authenticator_type(config),
|
||||||
authenticator_examples()
|
authenticator_examples()
|
||||||
),
|
),
|
||||||
404 => error_codes([?NOT_FOUND], <<"Not Found">>)
|
404 => error_codes([?NOT_FOUND], <<"Not Found">>)
|
||||||
|
@ -282,7 +282,7 @@ schema("/listeners/:listener_id/authentication/:id") ->
|
||||||
description => ?DESC(listeners_listener_id_authentication_id_put),
|
description => ?DESC(listeners_listener_id_authentication_id_put),
|
||||||
parameters => [param_listener_id(), param_auth_id()],
|
parameters => [param_listener_id(), param_auth_id()],
|
||||||
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
|
||||||
emqx_authn_schema:authenticator_type(),
|
authenticator_type(api_write),
|
||||||
authenticator_examples()
|
authenticator_examples()
|
||||||
),
|
),
|
||||||
responses => #{
|
responses => #{
|
||||||
|
@ -1278,6 +1278,9 @@ paginated_list_type(Type) ->
|
||||||
{meta, ref(emqx_dashboard_swagger, meta)}
|
{meta, ref(emqx_dashboard_swagger, meta)}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
authenticator_type(Kind) ->
|
||||||
|
emqx_authn_schema:authenticator_type(Kind).
|
||||||
|
|
||||||
authenticator_array_example() ->
|
authenticator_array_example() ->
|
||||||
[Config || #{value := Config} <- maps:values(authenticator_examples())].
|
[Config || #{value := Config} <- maps:values(authenticator_examples())].
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,8 @@
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
type_ro/1,
|
type_ro/1,
|
||||||
type_rw/1
|
type_rw/1,
|
||||||
|
type_rw_api/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -67,21 +68,17 @@
|
||||||
-define(SALT_ROUNDS_MAX, 10).
|
-define(SALT_ROUNDS_MAX, 10).
|
||||||
|
|
||||||
namespace() -> "authn-hash".
|
namespace() -> "authn-hash".
|
||||||
roots() -> [pbkdf2, bcrypt, bcrypt_rw, simple].
|
roots() -> [pbkdf2, bcrypt, bcrypt_rw, bcrypt_rw_api, simple].
|
||||||
|
|
||||||
fields(bcrypt_rw) ->
|
fields(bcrypt_rw) ->
|
||||||
fields(bcrypt) ++
|
fields(bcrypt) ++
|
||||||
[
|
[
|
||||||
{salt_rounds,
|
{salt_rounds, fun bcrypt_salt_rounds/1}
|
||||||
sc(
|
];
|
||||||
range(?SALT_ROUNDS_MIN, ?SALT_ROUNDS_MAX),
|
fields(bcrypt_rw_api) ->
|
||||||
#{
|
fields(bcrypt) ++
|
||||||
default => ?SALT_ROUNDS_MAX,
|
[
|
||||||
example => ?SALT_ROUNDS_MAX,
|
{salt_rounds, fun bcrypt_salt_rounds_api/1}
|
||||||
desc => "Work factor for BCRYPT password generation.",
|
|
||||||
converter => fun salt_rounds_converter/2
|
|
||||||
}
|
|
||||||
)}
|
|
||||||
];
|
];
|
||||||
fields(bcrypt) ->
|
fields(bcrypt) ->
|
||||||
[{name, sc(bcrypt, #{required => true, desc => "BCRYPT password hashing."})}];
|
[{name, sc(bcrypt, #{required => true, desc => "BCRYPT password hashing."})}];
|
||||||
|
@ -95,7 +92,7 @@ fields(pbkdf2) ->
|
||||||
)},
|
)},
|
||||||
{iterations,
|
{iterations,
|
||||||
sc(
|
sc(
|
||||||
integer(),
|
pos_integer(),
|
||||||
#{required => true, desc => "Iteration count for PBKDF2 hashing algorithm."}
|
#{required => true, desc => "Iteration count for PBKDF2 hashing algorithm."}
|
||||||
)},
|
)},
|
||||||
{dk_length, fun dk_length/1}
|
{dk_length, fun dk_length/1}
|
||||||
|
@ -110,6 +107,15 @@ fields(simple) ->
|
||||||
{salt_position, fun salt_position/1}
|
{salt_position, fun salt_position/1}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
bcrypt_salt_rounds(converter) -> fun salt_rounds_converter/2;
|
||||||
|
bcrypt_salt_rounds(Option) -> bcrypt_salt_rounds_api(Option).
|
||||||
|
|
||||||
|
bcrypt_salt_rounds_api(type) -> range(?SALT_ROUNDS_MIN, ?SALT_ROUNDS_MAX);
|
||||||
|
bcrypt_salt_rounds_api(default) -> ?SALT_ROUNDS_MAX;
|
||||||
|
bcrypt_salt_rounds_api(example) -> ?SALT_ROUNDS_MAX;
|
||||||
|
bcrypt_salt_rounds_api(desc) -> "Work factor for BCRYPT password generation.";
|
||||||
|
bcrypt_salt_rounds_api(_) -> undefined.
|
||||||
|
|
||||||
salt_rounds_converter(undefined, _) ->
|
salt_rounds_converter(undefined, _) ->
|
||||||
undefined;
|
undefined;
|
||||||
salt_rounds_converter(I, _) when is_integer(I) ->
|
salt_rounds_converter(I, _) when is_integer(I) ->
|
||||||
|
@ -119,6 +125,8 @@ salt_rounds_converter(X, _) ->
|
||||||
|
|
||||||
desc(bcrypt_rw) ->
|
desc(bcrypt_rw) ->
|
||||||
"Settings for bcrypt password hashing algorithm (for DB backends with write capability).";
|
"Settings for bcrypt password hashing algorithm (for DB backends with write capability).";
|
||||||
|
desc(bcrypt_rw_api) ->
|
||||||
|
desc(bcrypt_rw);
|
||||||
desc(bcrypt) ->
|
desc(bcrypt) ->
|
||||||
"Settings for bcrypt password hashing algorithm.";
|
"Settings for bcrypt password hashing algorithm.";
|
||||||
desc(pbkdf2) ->
|
desc(pbkdf2) ->
|
||||||
|
@ -143,14 +151,20 @@ dk_length(desc) ->
|
||||||
dk_length(_) ->
|
dk_length(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
%% for simple_authn/emqx_authn_mnesia
|
%% for emqx_authn_mnesia
|
||||||
type_rw(type) ->
|
type_rw(type) ->
|
||||||
hoconsc:union(rw_refs());
|
hoconsc:union(rw_refs());
|
||||||
type_rw(default) ->
|
|
||||||
#{<<"name">> => sha256, <<"salt_position">> => prefix};
|
|
||||||
type_rw(desc) ->
|
type_rw(desc) ->
|
||||||
"Options for password hash creation and verification.";
|
"Options for password hash creation and verification.";
|
||||||
type_rw(_) ->
|
type_rw(Option) ->
|
||||||
|
type_ro(Option).
|
||||||
|
|
||||||
|
%% for emqx_authn_mnesia API
|
||||||
|
type_rw_api(type) ->
|
||||||
|
hoconsc:union(api_refs());
|
||||||
|
type_rw_api(desc) ->
|
||||||
|
"Options for password hash creation and verification through API.";
|
||||||
|
type_rw_api(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
%% for other authn resources
|
%% for other authn resources
|
||||||
|
@ -242,31 +256,41 @@ check_password(#{name := Other, salt_position := SaltPosition}, Salt, PasswordHa
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
rw_refs() ->
|
rw_refs() ->
|
||||||
All = [
|
union_selector(rw).
|
||||||
hoconsc:ref(?MODULE, bcrypt_rw),
|
|
||||||
hoconsc:ref(?MODULE, pbkdf2),
|
|
||||||
hoconsc:ref(?MODULE, simple)
|
|
||||||
],
|
|
||||||
fun
|
|
||||||
(all_union_members) -> All;
|
|
||||||
({value, #{<<"name">> := <<"bcrypt">>}}) -> [hoconsc:ref(?MODULE, bcrypt_rw)];
|
|
||||||
({value, #{<<"name">> := <<"pbkdf2">>}}) -> [hoconsc:ref(?MODULE, pbkdf2)];
|
|
||||||
({value, #{<<"name">> := _}}) -> [hoconsc:ref(?MODULE, simple)];
|
|
||||||
({value, _}) -> throw(#{reason => "algorithm_name_missing"})
|
|
||||||
end.
|
|
||||||
|
|
||||||
ro_refs() ->
|
ro_refs() ->
|
||||||
All = [
|
union_selector(ro).
|
||||||
hoconsc:ref(?MODULE, bcrypt),
|
|
||||||
hoconsc:ref(?MODULE, pbkdf2),
|
api_refs() ->
|
||||||
hoconsc:ref(?MODULE, simple)
|
union_selector(api).
|
||||||
],
|
|
||||||
|
sc(Type, Meta) -> hoconsc:mk(Type, Meta).
|
||||||
|
|
||||||
|
union_selector(Kind) ->
|
||||||
fun
|
fun
|
||||||
(all_union_members) -> All;
|
(all_union_members) -> refs(Kind);
|
||||||
({value, #{<<"name">> := <<"bcrypt">>}}) -> [hoconsc:ref(?MODULE, bcrypt)];
|
({value, #{<<"name">> := <<"bcrypt">>}}) -> [bcrypt_ref(Kind)];
|
||||||
({value, #{<<"name">> := <<"pbkdf2">>}}) -> [hoconsc:ref(?MODULE, pbkdf2)];
|
({value, #{<<"name">> := <<"pbkdf2">>}}) -> [pbkdf2_ref(Kind)];
|
||||||
({value, #{<<"name">> := _}}) -> [hoconsc:ref(?MODULE, simple)];
|
({value, #{<<"name">> := _}}) -> [simple_ref(Kind)];
|
||||||
({value, _}) -> throw(#{reason => "algorithm_name_missing"})
|
({value, _}) -> throw(#{reason => "algorithm_name_missing"})
|
||||||
end.
|
end.
|
||||||
|
|
||||||
sc(Type, Meta) -> hoconsc:mk(Type, Meta).
|
refs(Kind) ->
|
||||||
|
[
|
||||||
|
bcrypt_ref(Kind),
|
||||||
|
pbkdf2_ref(Kind),
|
||||||
|
simple_ref(Kind)
|
||||||
|
].
|
||||||
|
|
||||||
|
pbkdf2_ref(_) ->
|
||||||
|
hoconsc:ref(?MODULE, pbkdf2).
|
||||||
|
|
||||||
|
bcrypt_ref(rw) ->
|
||||||
|
hoconsc:ref(?MODULE, bcrypt_rw);
|
||||||
|
bcrypt_ref(api) ->
|
||||||
|
hoconsc:ref(?MODULE, bcrypt_rw_api);
|
||||||
|
bcrypt_ref(_) ->
|
||||||
|
hoconsc:ref(?MODULE, bcrypt).
|
||||||
|
|
||||||
|
simple_ref(_) ->
|
||||||
|
hoconsc:ref(?MODULE, simple).
|
||||||
|
|
|
@ -34,26 +34,50 @@
|
||||||
tags/0,
|
tags/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
authenticator_type/0,
|
authenticator_type/0,
|
||||||
|
authenticator_type/1,
|
||||||
authenticator_type_without/1,
|
authenticator_type_without/1,
|
||||||
|
authenticator_type_without/2,
|
||||||
mechanism/1,
|
mechanism/1,
|
||||||
backend/1
|
backend/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
global_auth_fields/0
|
global_auth_fields/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-export_type([shema_kind/0]).
|
||||||
|
|
||||||
-define(AUTHN_MODS_PT_KEY, {?MODULE, authn_schema_mods}).
|
-define(AUTHN_MODS_PT_KEY, {?MODULE, authn_schema_mods}).
|
||||||
|
-define(DEFAULT_SCHEMA_KIND, config).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Authn Source Schema Behaviour
|
%% Authn Source Schema Behaviour
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-type schema_ref() :: ?R_REF(module(), hocon_schema:name()).
|
-type schema_ref() :: ?R_REF(module(), hocon_schema:name()).
|
||||||
|
-type shema_kind() ::
|
||||||
|
%% api_write: schema for mutating API request validation
|
||||||
|
api_write
|
||||||
|
%% config: schema for config validation
|
||||||
|
| config.
|
||||||
|
-callback namespace() -> string().
|
||||||
-callback refs() -> [schema_ref()].
|
-callback refs() -> [schema_ref()].
|
||||||
-callback select_union_member(emqx_config:raw_config()) -> schema_ref() | undefined | no_return().
|
-callback refs(shema_kind()) -> [schema_ref()].
|
||||||
|
-callback select_union_member(emqx_config:raw_config()) -> [schema_ref()] | undefined | no_return().
|
||||||
|
-callback select_union_member(shema_kind(), emqx_config:raw_config()) ->
|
||||||
|
[schema_ref()] | undefined | no_return().
|
||||||
-callback fields(hocon_schema:name()) -> [hocon_schema:field()].
|
-callback fields(hocon_schema:name()) -> [hocon_schema:field()].
|
||||||
|
|
||||||
|
-optional_callbacks([
|
||||||
|
select_union_member/1,
|
||||||
|
select_union_member/2,
|
||||||
|
refs/0,
|
||||||
|
refs/1
|
||||||
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
roots() -> [].
|
roots() -> [].
|
||||||
|
|
||||||
injected_fields(AuthnSchemaMods) ->
|
injected_fields(AuthnSchemaMods) ->
|
||||||
|
@ -67,45 +91,63 @@ tags() ->
|
||||||
[<<"Authentication">>].
|
[<<"Authentication">>].
|
||||||
|
|
||||||
authenticator_type() ->
|
authenticator_type() ->
|
||||||
hoconsc:union(union_member_selector(provider_schema_mods())).
|
authenticator_type(?DEFAULT_SCHEMA_KIND).
|
||||||
|
|
||||||
|
authenticator_type(Kind) ->
|
||||||
|
hoconsc:union(union_member_selector(Kind, provider_schema_mods())).
|
||||||
|
|
||||||
authenticator_type_without(ProviderSchemaMods) ->
|
authenticator_type_without(ProviderSchemaMods) ->
|
||||||
|
authenticator_type_without(?DEFAULT_SCHEMA_KIND, ProviderSchemaMods).
|
||||||
|
|
||||||
|
authenticator_type_without(Kind, ProviderSchemaMods) ->
|
||||||
hoconsc:union(
|
hoconsc:union(
|
||||||
union_member_selector(provider_schema_mods() -- ProviderSchemaMods)
|
union_member_selector(Kind, provider_schema_mods() -- ProviderSchemaMods)
|
||||||
).
|
).
|
||||||
|
|
||||||
union_member_selector(Mods) ->
|
union_member_selector(Kind, Mods) ->
|
||||||
AllTypes = config_refs(Mods),
|
AllTypes = config_refs(Kind, Mods),
|
||||||
fun
|
fun
|
||||||
(all_union_members) -> AllTypes;
|
(all_union_members) -> AllTypes;
|
||||||
({value, Value}) -> select_union_member(Value, Mods)
|
({value, Value}) -> select_union_member(Kind, Value, Mods)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
select_union_member(#{<<"mechanism">> := Mechanism, <<"backend">> := Backend}, []) ->
|
select_union_member(_Kind, #{<<"mechanism">> := Mechanism, <<"backend">> := Backend}, []) ->
|
||||||
throw(#{
|
throw(#{
|
||||||
reason => "unsupported_mechanism",
|
reason => "unsupported_mechanism",
|
||||||
mechanism => Mechanism,
|
mechanism => Mechanism,
|
||||||
backend => Backend
|
backend => Backend
|
||||||
});
|
});
|
||||||
select_union_member(#{<<"mechanism">> := Mechanism}, []) ->
|
select_union_member(_Kind, #{<<"mechanism">> := Mechanism}, []) ->
|
||||||
throw(#{
|
throw(#{
|
||||||
reason => "unsupported_mechanism",
|
reason => "unsupported_mechanism",
|
||||||
mechanism => Mechanism
|
mechanism => Mechanism
|
||||||
});
|
});
|
||||||
select_union_member(#{<<"mechanism">> := _} = Value, [Mod | Mods]) ->
|
select_union_member(Kind, #{<<"mechanism">> := _} = Value, [Mod | Mods]) ->
|
||||||
case Mod:select_union_member(Value) of
|
case mod_select_union_member(Kind, Value, Mod) of
|
||||||
undefined ->
|
undefined ->
|
||||||
select_union_member(Value, Mods);
|
select_union_member(Kind, Value, Mods);
|
||||||
Member ->
|
Member ->
|
||||||
Member
|
Member
|
||||||
end;
|
end;
|
||||||
select_union_member(#{} = _Value, _Mods) ->
|
select_union_member(_Kind, #{} = _Value, _Mods) ->
|
||||||
throw(#{reason => "missing_mechanism_field"});
|
throw(#{reason => "missing_mechanism_field"});
|
||||||
select_union_member(Value, _Mods) ->
|
select_union_member(_Kind, Value, _Mods) ->
|
||||||
throw(#{reason => "not_a_struct", value => Value}).
|
throw(#{reason => "not_a_struct", value => Value}).
|
||||||
|
|
||||||
config_refs(Mods) ->
|
mod_select_union_member(Kind, Value, Mod) ->
|
||||||
lists:append([Mod:refs() || Mod <- Mods]).
|
emqx_utils:call_first_defined([
|
||||||
|
{Mod, select_union_member, [Kind, Value]},
|
||||||
|
{Mod, select_union_member, [Value]}
|
||||||
|
]).
|
||||||
|
|
||||||
|
config_refs(Kind, Mods) ->
|
||||||
|
lists:append([mod_refs(Kind, Mod) || Mod <- Mods]).
|
||||||
|
|
||||||
|
mod_refs(Kind, Mod) ->
|
||||||
|
emqx_utils:call_first_defined([
|
||||||
|
{Mod, refs, [Kind]},
|
||||||
|
{Mod, refs, []}
|
||||||
|
]).
|
||||||
|
|
||||||
root_type() ->
|
root_type() ->
|
||||||
hoconsc:array(authenticator_type()).
|
hoconsc:array(authenticator_type()).
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
-include_lib("emqx_authn.hrl").
|
-include_lib("emqx_authn.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/trace.hrl").
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
create_resource/3,
|
create_resource/3,
|
||||||
|
@ -44,13 +45,13 @@
|
||||||
default_headers_no_content_type/0
|
default_headers_no_content_type/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-define(AUTHN_PLACEHOLDERS, [
|
-define(ALLOWED_VARS, [
|
||||||
?PH_USERNAME,
|
?VAR_USERNAME,
|
||||||
?PH_CLIENTID,
|
?VAR_CLIENTID,
|
||||||
?PH_PASSWORD,
|
?VAR_PASSWORD,
|
||||||
?PH_PEERHOST,
|
?VAR_PEERHOST,
|
||||||
?PH_CERT_SUBJECT,
|
?VAR_CERT_SUBJECT,
|
||||||
?PH_CERT_CN_NAME
|
?VAR_CERT_CN_NAME
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-define(DEFAULT_RESOURCE_OPTS, #{
|
-define(DEFAULT_RESOURCE_OPTS, #{
|
||||||
|
@ -107,48 +108,96 @@ check_password_from_selected_map(Algorithm, Selected, Password) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
parse_deep(Template) ->
|
parse_deep(Template) ->
|
||||||
emqx_placeholder:preproc_tmpl_deep(Template, #{placeholders => ?AUTHN_PLACEHOLDERS}).
|
Result = emqx_template:parse_deep(Template),
|
||||||
|
handle_disallowed_placeholders(Result, {deep, Template}).
|
||||||
|
|
||||||
parse_str(Template) ->
|
parse_str(Template) ->
|
||||||
emqx_placeholder:preproc_tmpl(Template, #{placeholders => ?AUTHN_PLACEHOLDERS}).
|
Result = emqx_template:parse(Template),
|
||||||
|
handle_disallowed_placeholders(Result, {string, Template}).
|
||||||
|
|
||||||
parse_sql(Template, ReplaceWith) ->
|
parse_sql(Template, ReplaceWith) ->
|
||||||
emqx_placeholder:preproc_sql(
|
{Statement, Result} = emqx_template_sql:parse_prepstmt(
|
||||||
Template,
|
Template,
|
||||||
#{
|
#{parameters => ReplaceWith, strip_double_quote => true}
|
||||||
replace_with => ReplaceWith,
|
),
|
||||||
placeholders => ?AUTHN_PLACEHOLDERS,
|
{Statement, handle_disallowed_placeholders(Result, {string, Template})}.
|
||||||
strip_double_quote => true
|
|
||||||
}
|
handle_disallowed_placeholders(Template, Source) ->
|
||||||
).
|
case emqx_template:validate(?ALLOWED_VARS, Template) of
|
||||||
|
ok ->
|
||||||
|
Template;
|
||||||
|
{error, Disallowed} ->
|
||||||
|
?tp(warning, "authn_template_invalid", #{
|
||||||
|
template => Source,
|
||||||
|
reason => Disallowed,
|
||||||
|
allowed => #{placeholders => ?ALLOWED_VARS},
|
||||||
|
notice =>
|
||||||
|
"Disallowed placeholders will be rendered as is."
|
||||||
|
" However, consider using `${$}` escaping for literal `$` where"
|
||||||
|
" needed to avoid unexpected results."
|
||||||
|
}),
|
||||||
|
Result = prerender_disallowed_placeholders(Template),
|
||||||
|
case Source of
|
||||||
|
{string, _} ->
|
||||||
|
emqx_template:parse(Result);
|
||||||
|
{deep, _} ->
|
||||||
|
emqx_template:parse_deep(Result)
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
|
prerender_disallowed_placeholders(Template) ->
|
||||||
|
{Result, _} = emqx_template:render(Template, #{}, #{
|
||||||
|
var_trans => fun(Name, _) ->
|
||||||
|
% NOTE
|
||||||
|
% Rendering disallowed placeholders in escaped form, which will then
|
||||||
|
% parse as a literal string.
|
||||||
|
case lists:member(Name, ?ALLOWED_VARS) of
|
||||||
|
true -> "${" ++ Name ++ "}";
|
||||||
|
false -> "${$}{" ++ Name ++ "}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
}),
|
||||||
|
Result.
|
||||||
|
|
||||||
render_deep(Template, Credential) ->
|
render_deep(Template, Credential) ->
|
||||||
emqx_placeholder:proc_tmpl_deep(
|
% NOTE
|
||||||
|
% Ignoring errors here, undefined bindings will be replaced with empty string.
|
||||||
|
{Term, _Errors} = emqx_template:render(
|
||||||
Template,
|
Template,
|
||||||
mapping_credential(Credential),
|
mapping_credential(Credential),
|
||||||
#{return => full_binary, var_trans => fun handle_var/2}
|
#{var_trans => fun to_string/2}
|
||||||
).
|
),
|
||||||
|
Term.
|
||||||
|
|
||||||
render_str(Template, Credential) ->
|
render_str(Template, Credential) ->
|
||||||
emqx_placeholder:proc_tmpl(
|
% NOTE
|
||||||
|
% Ignoring errors here, undefined bindings will be replaced with empty string.
|
||||||
|
{String, _Errors} = emqx_template:render(
|
||||||
Template,
|
Template,
|
||||||
mapping_credential(Credential),
|
mapping_credential(Credential),
|
||||||
#{return => full_binary, var_trans => fun handle_var/2}
|
#{var_trans => fun to_string/2}
|
||||||
).
|
),
|
||||||
|
unicode:characters_to_binary(String).
|
||||||
|
|
||||||
render_urlencoded_str(Template, Credential) ->
|
render_urlencoded_str(Template, Credential) ->
|
||||||
emqx_placeholder:proc_tmpl(
|
% NOTE
|
||||||
|
% Ignoring errors here, undefined bindings will be replaced with empty string.
|
||||||
|
{String, _Errors} = emqx_template:render(
|
||||||
Template,
|
Template,
|
||||||
mapping_credential(Credential),
|
mapping_credential(Credential),
|
||||||
#{return => full_binary, var_trans => fun urlencode_var/2}
|
#{var_trans => fun to_urlencoded_string/2}
|
||||||
).
|
),
|
||||||
|
unicode:characters_to_binary(String).
|
||||||
|
|
||||||
render_sql_params(ParamList, Credential) ->
|
render_sql_params(ParamList, Credential) ->
|
||||||
emqx_placeholder:proc_tmpl(
|
% NOTE
|
||||||
|
% Ignoring errors here, undefined bindings will be replaced with empty string.
|
||||||
|
{Row, _Errors} = emqx_template:render(
|
||||||
ParamList,
|
ParamList,
|
||||||
mapping_credential(Credential),
|
mapping_credential(Credential),
|
||||||
#{return => rawlist, var_trans => fun handle_sql_var/2}
|
#{var_trans => fun to_sql_valaue/2}
|
||||||
).
|
),
|
||||||
|
Row.
|
||||||
|
|
||||||
is_superuser(#{<<"is_superuser">> := Value}) ->
|
is_superuser(#{<<"is_superuser">> := Value}) ->
|
||||||
#{is_superuser => to_bool(Value)};
|
#{is_superuser => to_bool(Value)};
|
||||||
|
@ -269,22 +318,24 @@ without_password(Credential, [Name | Rest]) ->
|
||||||
without_password(Credential, Rest)
|
without_password(Credential, Rest)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
urlencode_var(Var, Value) ->
|
to_urlencoded_string(Name, Value) ->
|
||||||
emqx_http_lib:uri_encode(handle_var(Var, Value)).
|
emqx_http_lib:uri_encode(to_string(Name, Value)).
|
||||||
|
|
||||||
handle_var(_Name, undefined) ->
|
to_string(Name, Value) ->
|
||||||
<<>>;
|
emqx_template:to_string(render_var(Name, Value)).
|
||||||
handle_var([<<"peerhost">>], PeerHost) ->
|
|
||||||
emqx_placeholder:bin(inet:ntoa(PeerHost));
|
|
||||||
handle_var(_, Value) ->
|
|
||||||
emqx_placeholder:bin(Value).
|
|
||||||
|
|
||||||
handle_sql_var(_Name, undefined) ->
|
to_sql_valaue(Name, Value) ->
|
||||||
|
emqx_utils_sql:to_sql_value(render_var(Name, Value)).
|
||||||
|
|
||||||
|
render_var(_, undefined) ->
|
||||||
|
% NOTE
|
||||||
|
% Any allowed but undefined binding will be replaced with empty string, even when
|
||||||
|
% rendering SQL values.
|
||||||
<<>>;
|
<<>>;
|
||||||
handle_sql_var([<<"peerhost">>], PeerHost) ->
|
render_var(?VAR_PEERHOST, Value) ->
|
||||||
emqx_placeholder:bin(inet:ntoa(PeerHost));
|
inet:ntoa(Value);
|
||||||
handle_sql_var(_, Value) ->
|
render_var(_Name, Value) ->
|
||||||
emqx_placeholder:sql_data(Value).
|
Value.
|
||||||
|
|
||||||
mapping_credential(C = #{cn := CN, dn := DN}) ->
|
mapping_credential(C = #{cn := CN, dn := DN}) ->
|
||||||
C#{cert_common_name => CN, cert_subject => DN};
|
C#{cert_common_name => CN, cert_subject => DN};
|
||||||
|
|
|
@ -511,7 +511,10 @@ do_authorize(_Client, _PubSub, _Topic, []) ->
|
||||||
do_authorize(Client, PubSub, Topic, [#{enable := false} | Rest]) ->
|
do_authorize(Client, PubSub, Topic, [#{enable := false} | Rest]) ->
|
||||||
do_authorize(Client, PubSub, Topic, Rest);
|
do_authorize(Client, PubSub, Topic, Rest);
|
||||||
do_authorize(
|
do_authorize(
|
||||||
Client,
|
#{
|
||||||
|
username := Username,
|
||||||
|
peerhost := IpAddress
|
||||||
|
} = Client,
|
||||||
PubSub,
|
PubSub,
|
||||||
Topic,
|
Topic,
|
||||||
[Connector = #{type := Type} | Tail]
|
[Connector = #{type := Type} | Tail]
|
||||||
|
@ -521,11 +524,32 @@ do_authorize(
|
||||||
try Module:authorize(Client, PubSub, Topic, Connector) of
|
try Module:authorize(Client, PubSub, Topic, Connector) of
|
||||||
nomatch ->
|
nomatch ->
|
||||||
emqx_metrics_worker:inc(authz_metrics, Type, nomatch),
|
emqx_metrics_worker:inc(authz_metrics, Type, nomatch),
|
||||||
|
?TRACE("AUTHZ", "authorization_module_nomatch", #{
|
||||||
|
module => Module,
|
||||||
|
username => Username,
|
||||||
|
ipaddr => IpAddress,
|
||||||
|
topic => Topic,
|
||||||
|
pub_sub => PubSub
|
||||||
|
}),
|
||||||
do_authorize(Client, PubSub, Topic, Tail);
|
do_authorize(Client, PubSub, Topic, Tail);
|
||||||
%% {matched, allow | deny | ignore}
|
%% {matched, allow | deny | ignore}
|
||||||
{matched, ignore} ->
|
{matched, ignore} ->
|
||||||
|
?TRACE("AUTHZ", "authorization_module_match_ignore", #{
|
||||||
|
module => Module,
|
||||||
|
username => Username,
|
||||||
|
ipaddr => IpAddress,
|
||||||
|
topic => Topic,
|
||||||
|
pub_sub => PubSub
|
||||||
|
}),
|
||||||
do_authorize(Client, PubSub, Topic, Tail);
|
do_authorize(Client, PubSub, Topic, Tail);
|
||||||
ignore ->
|
ignore ->
|
||||||
|
?TRACE("AUTHZ", "authorization_module_ignore", #{
|
||||||
|
module => Module,
|
||||||
|
username => Username,
|
||||||
|
ipaddr => IpAddress,
|
||||||
|
topic => Topic,
|
||||||
|
pub_sub => PubSub
|
||||||
|
}),
|
||||||
do_authorize(Client, PubSub, Topic, Tail);
|
do_authorize(Client, PubSub, Topic, Tail);
|
||||||
%% {matched, allow | deny}
|
%% {matched, allow | deny}
|
||||||
Matched ->
|
Matched ->
|
||||||
|
|
|
@ -49,6 +49,8 @@
|
||||||
aggregate_metrics/1
|
aggregate_metrics/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-export([with_source/2]).
|
||||||
|
|
||||||
-define(TAGS, [<<"Authorization">>]).
|
-define(TAGS, [<<"Authorization">>]).
|
||||||
|
|
||||||
api_spec() ->
|
api_spec() ->
|
||||||
|
|
|
@ -183,19 +183,14 @@ compile_topic(<<"eq ", Topic/binary>>) ->
|
||||||
compile_topic({eq, Topic}) ->
|
compile_topic({eq, Topic}) ->
|
||||||
{eq, emqx_topic:words(bin(Topic))};
|
{eq, emqx_topic:words(bin(Topic))};
|
||||||
compile_topic(Topic) ->
|
compile_topic(Topic) ->
|
||||||
TopicBin = bin(Topic),
|
Template = emqx_authz_utils:parse_str(Topic, [?VAR_USERNAME, ?VAR_CLIENTID]),
|
||||||
case
|
case emqx_template:is_const(Template) of
|
||||||
emqx_placeholder:preproc_tmpl(
|
true -> emqx_topic:words(bin(Topic));
|
||||||
TopicBin,
|
false -> {pattern, Template}
|
||||||
#{placeholders => [?PH_USERNAME, ?PH_CLIENTID]}
|
|
||||||
)
|
|
||||||
of
|
|
||||||
[{str, _}] -> emqx_topic:words(TopicBin);
|
|
||||||
Tokens -> {pattern, Tokens}
|
|
||||||
end.
|
end.
|
||||||
|
|
||||||
bin(L) when is_list(L) ->
|
bin(L) when is_list(L) ->
|
||||||
list_to_binary(L);
|
unicode:characters_to_binary(L);
|
||||||
bin(B) when is_binary(B) ->
|
bin(B) when is_binary(B) ->
|
||||||
B.
|
B.
|
||||||
|
|
||||||
|
@ -307,7 +302,7 @@ match_who(_, _) ->
|
||||||
match_topics(_ClientInfo, _Topic, []) ->
|
match_topics(_ClientInfo, _Topic, []) ->
|
||||||
false;
|
false;
|
||||||
match_topics(ClientInfo, Topic, [{pattern, PatternFilter} | Filters]) ->
|
match_topics(ClientInfo, Topic, [{pattern, PatternFilter} | Filters]) ->
|
||||||
TopicFilter = emqx_placeholder:proc_tmpl(PatternFilter, ClientInfo),
|
TopicFilter = bin(emqx_template:render_strict(PatternFilter, ClientInfo)),
|
||||||
match_topic(emqx_topic:words(Topic), emqx_topic:words(TopicFilter)) orelse
|
match_topic(emqx_topic:words(Topic), emqx_topic:words(TopicFilter)) orelse
|
||||||
match_topics(ClientInfo, Topic, Filters);
|
match_topics(ClientInfo, Topic, Filters);
|
||||||
match_topics(ClientInfo, Topic, [TopicFilter | Filters]) ->
|
match_topics(ClientInfo, Topic, [TopicFilter | Filters]) ->
|
||||||
|
|
|
@ -136,7 +136,7 @@ authz_fields() ->
|
||||||
[
|
[
|
||||||
{sources,
|
{sources,
|
||||||
?HOCON(
|
?HOCON(
|
||||||
?ARRAY(?UNION(UnionMemberSelector)),
|
?ARRAY(hoconsc:union(UnionMemberSelector)),
|
||||||
#{
|
#{
|
||||||
default => [default_authz()],
|
default => [default_authz()],
|
||||||
desc => ?DESC(sources),
|
desc => ?DESC(sources),
|
||||||
|
@ -153,7 +153,7 @@ api_authz_fields() ->
|
||||||
[{sources, ?HOCON(?ARRAY(api_source_type()), #{desc => ?DESC(sources)})}].
|
[{sources, ?HOCON(?ARRAY(api_source_type()), #{desc => ?DESC(sources)})}].
|
||||||
|
|
||||||
api_source_type() ->
|
api_source_type() ->
|
||||||
?UNION(api_authz_refs()).
|
hoconsc:union(api_authz_refs()).
|
||||||
|
|
||||||
api_authz_refs() ->
|
api_authz_refs() ->
|
||||||
lists:concat([api_source_refs(Mod) || Mod <- source_schema_mods()]).
|
lists:concat([api_source_refs(Mod) || Mod <- source_schema_mods()]).
|
||||||
|
|
|
@ -16,7 +16,9 @@
|
||||||
|
|
||||||
-module(emqx_authz_utils).
|
-module(emqx_authz_utils).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
-include_lib("emqx_authz.hrl").
|
-include_lib("emqx_authz.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/trace.hrl").
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
cleanup_resources/0,
|
cleanup_resources/0,
|
||||||
|
@ -108,48 +110,97 @@ update_config(Path, ConfigRequest) ->
|
||||||
}).
|
}).
|
||||||
|
|
||||||
parse_deep(Template, PlaceHolders) ->
|
parse_deep(Template, PlaceHolders) ->
|
||||||
emqx_placeholder:preproc_tmpl_deep(Template, #{placeholders => PlaceHolders}).
|
Result = emqx_template:parse_deep(Template),
|
||||||
|
handle_disallowed_placeholders(Result, {deep, Template}, PlaceHolders).
|
||||||
|
|
||||||
parse_str(Template, PlaceHolders) ->
|
parse_str(Template, PlaceHolders) ->
|
||||||
emqx_placeholder:preproc_tmpl(Template, #{placeholders => PlaceHolders}).
|
Result = emqx_template:parse(Template),
|
||||||
|
handle_disallowed_placeholders(Result, {string, Template}, PlaceHolders).
|
||||||
|
|
||||||
parse_sql(Template, ReplaceWith, PlaceHolders) ->
|
parse_sql(Template, ReplaceWith, PlaceHolders) ->
|
||||||
emqx_placeholder:preproc_sql(
|
{Statement, Result} = emqx_template_sql:parse_prepstmt(
|
||||||
Template,
|
Template,
|
||||||
#{
|
#{parameters => ReplaceWith, strip_double_quote => true}
|
||||||
replace_with => ReplaceWith,
|
),
|
||||||
placeholders => PlaceHolders,
|
FResult = handle_disallowed_placeholders(Result, {string, Template}, PlaceHolders),
|
||||||
strip_double_quote => true
|
{Statement, FResult}.
|
||||||
}
|
|
||||||
).
|
handle_disallowed_placeholders(Template, Source, Allowed) ->
|
||||||
|
case emqx_template:validate(Allowed, Template) of
|
||||||
|
ok ->
|
||||||
|
Template;
|
||||||
|
{error, Disallowed} ->
|
||||||
|
?tp(warning, "authz_template_invalid", #{
|
||||||
|
template => Source,
|
||||||
|
reason => Disallowed,
|
||||||
|
allowed => #{placeholders => Allowed},
|
||||||
|
notice =>
|
||||||
|
"Disallowed placeholders will be rendered as is."
|
||||||
|
" However, consider using `${$}` escaping for literal `$` where"
|
||||||
|
" needed to avoid unexpected results."
|
||||||
|
}),
|
||||||
|
Result = prerender_disallowed_placeholders(Template, Allowed),
|
||||||
|
case Source of
|
||||||
|
{string, _} ->
|
||||||
|
emqx_template:parse(Result);
|
||||||
|
{deep, _} ->
|
||||||
|
emqx_template:parse_deep(Result)
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
|
prerender_disallowed_placeholders(Template, Allowed) ->
|
||||||
|
{Result, _} = emqx_template:render(Template, #{}, #{
|
||||||
|
var_trans => fun(Name, _) ->
|
||||||
|
% NOTE
|
||||||
|
% Rendering disallowed placeholders in escaped form, which will then
|
||||||
|
% parse as a literal string.
|
||||||
|
case lists:member(Name, Allowed) of
|
||||||
|
true -> "${" ++ Name ++ "}";
|
||||||
|
false -> "${$}{" ++ Name ++ "}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
}),
|
||||||
|
Result.
|
||||||
|
|
||||||
render_deep(Template, Values) ->
|
render_deep(Template, Values) ->
|
||||||
emqx_placeholder:proc_tmpl_deep(
|
% NOTE
|
||||||
|
% Ignoring errors here, undefined bindings will be replaced with empty string.
|
||||||
|
{Term, _Errors} = emqx_template:render(
|
||||||
Template,
|
Template,
|
||||||
client_vars(Values),
|
client_vars(Values),
|
||||||
#{return => full_binary, var_trans => fun handle_var/2}
|
#{var_trans => fun to_string/2}
|
||||||
).
|
),
|
||||||
|
Term.
|
||||||
|
|
||||||
render_str(Template, Values) ->
|
render_str(Template, Values) ->
|
||||||
emqx_placeholder:proc_tmpl(
|
% NOTE
|
||||||
|
% Ignoring errors here, undefined bindings will be replaced with empty string.
|
||||||
|
{String, _Errors} = emqx_template:render(
|
||||||
Template,
|
Template,
|
||||||
client_vars(Values),
|
client_vars(Values),
|
||||||
#{return => full_binary, var_trans => fun handle_var/2}
|
#{var_trans => fun to_string/2}
|
||||||
).
|
),
|
||||||
|
unicode:characters_to_binary(String).
|
||||||
|
|
||||||
render_urlencoded_str(Template, Values) ->
|
render_urlencoded_str(Template, Values) ->
|
||||||
emqx_placeholder:proc_tmpl(
|
% NOTE
|
||||||
|
% Ignoring errors here, undefined bindings will be replaced with empty string.
|
||||||
|
{String, _Errors} = emqx_template:render(
|
||||||
Template,
|
Template,
|
||||||
client_vars(Values),
|
client_vars(Values),
|
||||||
#{return => full_binary, var_trans => fun urlencode_var/2}
|
#{var_trans => fun to_urlencoded_string/2}
|
||||||
).
|
),
|
||||||
|
unicode:characters_to_binary(String).
|
||||||
|
|
||||||
render_sql_params(ParamList, Values) ->
|
render_sql_params(ParamList, Values) ->
|
||||||
emqx_placeholder:proc_tmpl(
|
% NOTE
|
||||||
|
% Ignoring errors here, undefined bindings will be replaced with empty string.
|
||||||
|
{Row, _Errors} = emqx_template:render(
|
||||||
ParamList,
|
ParamList,
|
||||||
client_vars(Values),
|
client_vars(Values),
|
||||||
#{return => rawlist, var_trans => fun handle_sql_var/2}
|
#{var_trans => fun to_sql_value/2}
|
||||||
).
|
),
|
||||||
|
Row.
|
||||||
|
|
||||||
-spec parse_http_resp_body(binary(), binary()) -> allow | deny | ignore | error.
|
-spec parse_http_resp_body(binary(), binary()) -> allow | deny | ignore | error.
|
||||||
parse_http_resp_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) ->
|
parse_http_resp_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) ->
|
||||||
|
@ -215,22 +266,24 @@ convert_client_var({dn, DN}) -> {cert_subject, DN};
|
||||||
convert_client_var({protocol, Proto}) -> {proto_name, Proto};
|
convert_client_var({protocol, Proto}) -> {proto_name, Proto};
|
||||||
convert_client_var(Other) -> Other.
|
convert_client_var(Other) -> Other.
|
||||||
|
|
||||||
urlencode_var(Var, Value) ->
|
to_urlencoded_string(Name, Value) ->
|
||||||
emqx_http_lib:uri_encode(handle_var(Var, Value)).
|
emqx_http_lib:uri_encode(to_string(Name, Value)).
|
||||||
|
|
||||||
handle_var(_Name, undefined) ->
|
to_string(Name, Value) ->
|
||||||
<<>>;
|
emqx_template:to_string(render_var(Name, Value)).
|
||||||
handle_var([<<"peerhost">>], IpAddr) ->
|
|
||||||
inet_parse:ntoa(IpAddr);
|
|
||||||
handle_var(_Name, Value) ->
|
|
||||||
emqx_placeholder:bin(Value).
|
|
||||||
|
|
||||||
handle_sql_var(_Name, undefined) ->
|
to_sql_value(Name, Value) ->
|
||||||
|
emqx_utils_sql:to_sql_value(render_var(Name, Value)).
|
||||||
|
|
||||||
|
render_var(_, undefined) ->
|
||||||
|
% NOTE
|
||||||
|
% Any allowed but undefined binding will be replaced with empty string, even when
|
||||||
|
% rendering SQL values.
|
||||||
<<>>;
|
<<>>;
|
||||||
handle_sql_var([<<"peerhost">>], IpAddr) ->
|
render_var(?VAR_PEERHOST, Value) ->
|
||||||
inet_parse:ntoa(IpAddr);
|
inet:ntoa(Value);
|
||||||
handle_sql_var(_Name, Value) ->
|
render_var(_Name, Value) ->
|
||||||
emqx_placeholder:sql_data(Value).
|
Value.
|
||||||
|
|
||||||
bin(A) when is_atom(A) -> atom_to_binary(A, utf8);
|
bin(A) when is_atom(A) -> atom_to_binary(A, utf8);
|
||||||
bin(L) when is_list(L) -> list_to_binary(L);
|
bin(L) when is_list(L) -> list_to_binary(L);
|
||||||
|
|
|
@ -63,14 +63,16 @@ end_per_testcase(_, Config) ->
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
Apps = emqx_cth_suite:start(
|
Apps = emqx_cth_suite:start(
|
||||||
[
|
[
|
||||||
emqx,
|
|
||||||
emqx_conf,
|
emqx_conf,
|
||||||
|
emqx,
|
||||||
emqx_auth,
|
emqx_auth,
|
||||||
|
%% to load schema
|
||||||
|
{emqx_auth_mnesia, #{start => false}},
|
||||||
emqx_management,
|
emqx_management,
|
||||||
{emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}
|
{emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}
|
||||||
],
|
],
|
||||||
#{
|
#{
|
||||||
work_dir => ?config(priv_dir, Config)
|
work_dir => filename:join(?config(priv_dir, Config), ?MODULE)
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
_ = emqx_common_test_http:create_default_app(),
|
_ = emqx_common_test_http:create_default_app(),
|
||||||
|
@ -535,6 +537,36 @@ ignore_switch_to_global_chain(_) ->
|
||||||
),
|
),
|
||||||
ok = emqtt:disconnect(Client4).
|
ok = emqtt:disconnect(Client4).
|
||||||
|
|
||||||
|
t_bcrypt_validation(_Config) ->
|
||||||
|
BaseConf = #{
|
||||||
|
mechanism => <<"password_based">>,
|
||||||
|
backend => <<"built_in_database">>,
|
||||||
|
user_id_type => <<"username">>
|
||||||
|
},
|
||||||
|
BcryptValid = #{
|
||||||
|
name => <<"bcrypt">>,
|
||||||
|
salt_rounds => 10
|
||||||
|
},
|
||||||
|
BcryptInvalid = #{
|
||||||
|
name => <<"bcrypt">>,
|
||||||
|
salt_rounds => 15
|
||||||
|
},
|
||||||
|
|
||||||
|
ConfValid = BaseConf#{password_hash_algorithm => BcryptValid},
|
||||||
|
ConfInvalid = BaseConf#{password_hash_algorithm => BcryptInvalid},
|
||||||
|
|
||||||
|
{ok, 400, _} = request(
|
||||||
|
post,
|
||||||
|
uri([?CONF_NS]),
|
||||||
|
ConfInvalid
|
||||||
|
),
|
||||||
|
|
||||||
|
{ok, 200, _} = request(
|
||||||
|
post,
|
||||||
|
uri([?CONF_NS]),
|
||||||
|
ConfValid
|
||||||
|
).
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Helpers
|
%% Helpers
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
|
@ -16,7 +16,6 @@
|
||||||
|
|
||||||
-module(emqx_authn_chains_SUITE).
|
-module(emqx_authn_chains_SUITE).
|
||||||
|
|
||||||
-behaviour(hocon_schema).
|
|
||||||
-behaviour(emqx_authn_provider).
|
-behaviour(emqx_authn_provider).
|
||||||
|
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
|
@ -185,3 +185,29 @@ hash_examples() ->
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
t_pbkdf2_schema(_Config) ->
|
||||||
|
Config = fun(Iterations) ->
|
||||||
|
#{
|
||||||
|
<<"pbkdf2">> => #{
|
||||||
|
<<"name">> => <<"pbkdf2">>,
|
||||||
|
<<"mac_fun">> => <<"sha">>,
|
||||||
|
<<"iterations">> => Iterations
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end,
|
||||||
|
|
||||||
|
?assertException(
|
||||||
|
throw,
|
||||||
|
{emqx_authn_password_hashing, _},
|
||||||
|
hocon_tconf:check_plain(emqx_authn_password_hashing, Config(0), #{}, [pbkdf2])
|
||||||
|
),
|
||||||
|
?assertException(
|
||||||
|
throw,
|
||||||
|
{emqx_authn_password_hashing, _},
|
||||||
|
hocon_tconf:check_plain(emqx_authn_password_hashing, Config(-1), #{}, [pbkdf2])
|
||||||
|
),
|
||||||
|
?assertMatch(
|
||||||
|
#{<<"pbkdf2">> := _},
|
||||||
|
hocon_tconf:check_plain(emqx_authn_password_hashing, Config(1), #{}, [pbkdf2])
|
||||||
|
).
|
||||||
|
|
|
@ -54,7 +54,7 @@ t_check_schema(_Config) ->
|
||||||
?assertThrow(
|
?assertThrow(
|
||||||
#{
|
#{
|
||||||
path := "authentication.1.password_hash_algorithm.name",
|
path := "authentication.1.password_hash_algorithm.name",
|
||||||
matched_type := "builtin_db/authn-hash:simple",
|
matched_type := "authn:builtin_db/authn-hash:simple",
|
||||||
reason := unable_to_convert_to_enum_symbol
|
reason := unable_to_convert_to_enum_symbol
|
||||||
},
|
},
|
||||||
Check(ConfigNotOk)
|
Check(ConfigNotOk)
|
||||||
|
@ -73,7 +73,7 @@ t_check_schema(_Config) ->
|
||||||
#{
|
#{
|
||||||
path := "authentication.1.password_hash_algorithm",
|
path := "authentication.1.password_hash_algorithm",
|
||||||
reason := "algorithm_name_missing",
|
reason := "algorithm_name_missing",
|
||||||
matched_type := "builtin_db"
|
matched_type := "authn:builtin_db"
|
||||||
},
|
},
|
||||||
Check(ConfigMissingAlgoName)
|
Check(ConfigMissingAlgoName)
|
||||||
).
|
).
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-define(ERR(Reason), {error, Reason}).
|
-define(ERR(Reason), {error, Reason}).
|
||||||
|
|
||||||
union_member_selector_mongo_test_() ->
|
union_member_selector_mongo_test_() ->
|
||||||
|
ok = ensure_schema_load(),
|
||||||
[
|
[
|
||||||
{"unknown", fun() ->
|
{"unknown", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -31,25 +32,26 @@ union_member_selector_mongo_test_() ->
|
||||||
end},
|
end},
|
||||||
{"single", fun() ->
|
{"single", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "mongo_single"}),
|
?ERR(#{matched_type := "authn:mongo_single"}),
|
||||||
check("{mechanism = password_based, backend = mongodb, mongo_type = single}")
|
check("{mechanism = password_based, backend = mongodb, mongo_type = single}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"replica-set", fun() ->
|
{"replica-set", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "mongo_rs"}),
|
?ERR(#{matched_type := "authn:mongo_rs"}),
|
||||||
check("{mechanism = password_based, backend = mongodb, mongo_type = rs}")
|
check("{mechanism = password_based, backend = mongodb, mongo_type = rs}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"sharded", fun() ->
|
{"sharded", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "mongo_sharded"}),
|
?ERR(#{matched_type := "authn:mongo_sharded"}),
|
||||||
check("{mechanism = password_based, backend = mongodb, mongo_type = sharded}")
|
check("{mechanism = password_based, backend = mongodb, mongo_type = sharded}")
|
||||||
)
|
)
|
||||||
end}
|
end}
|
||||||
].
|
].
|
||||||
|
|
||||||
union_member_selector_jwt_test_() ->
|
union_member_selector_jwt_test_() ->
|
||||||
|
ok = ensure_schema_load(),
|
||||||
[
|
[
|
||||||
{"unknown", fun() ->
|
{"unknown", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -59,25 +61,26 @@ union_member_selector_jwt_test_() ->
|
||||||
end},
|
end},
|
||||||
{"jwks", fun() ->
|
{"jwks", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "jwt_jwks"}),
|
?ERR(#{matched_type := "authn:jwt_jwks"}),
|
||||||
check("{mechanism = jwt, use_jwks = true}")
|
check("{mechanism = jwt, use_jwks = true}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"publick-key", fun() ->
|
{"publick-key", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "jwt_public_key"}),
|
?ERR(#{matched_type := "authn:jwt_public_key"}),
|
||||||
check("{mechanism = jwt, use_jwks = false, public_key = 1}")
|
check("{mechanism = jwt, use_jwks = false, public_key = 1}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"hmac-based", fun() ->
|
{"hmac-based", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "jwt_hmac"}),
|
?ERR(#{matched_type := "authn:jwt_hmac"}),
|
||||||
check("{mechanism = jwt, use_jwks = false}")
|
check("{mechanism = jwt, use_jwks = false}")
|
||||||
)
|
)
|
||||||
end}
|
end}
|
||||||
].
|
].
|
||||||
|
|
||||||
union_member_selector_redis_test_() ->
|
union_member_selector_redis_test_() ->
|
||||||
|
ok = ensure_schema_load(),
|
||||||
[
|
[
|
||||||
{"unknown", fun() ->
|
{"unknown", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -87,25 +90,26 @@ union_member_selector_redis_test_() ->
|
||||||
end},
|
end},
|
||||||
{"single", fun() ->
|
{"single", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "redis_single"}),
|
?ERR(#{matched_type := "authn:redis_single"}),
|
||||||
check("{mechanism = password_based, backend = redis, redis_type = single}")
|
check("{mechanism = password_based, backend = redis, redis_type = single}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"cluster", fun() ->
|
{"cluster", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "redis_cluster"}),
|
?ERR(#{matched_type := "authn:redis_cluster"}),
|
||||||
check("{mechanism = password_based, backend = redis, redis_type = cluster}")
|
check("{mechanism = password_based, backend = redis, redis_type = cluster}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"sentinel", fun() ->
|
{"sentinel", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "redis_sentinel"}),
|
?ERR(#{matched_type := "authn:redis_sentinel"}),
|
||||||
check("{mechanism = password_based, backend = redis, redis_type = sentinel}")
|
check("{mechanism = password_based, backend = redis, redis_type = sentinel}")
|
||||||
)
|
)
|
||||||
end}
|
end}
|
||||||
].
|
].
|
||||||
|
|
||||||
union_member_selector_http_test_() ->
|
union_member_selector_http_test_() ->
|
||||||
|
ok = ensure_schema_load(),
|
||||||
[
|
[
|
||||||
{"unknown", fun() ->
|
{"unknown", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -115,13 +119,13 @@ union_member_selector_http_test_() ->
|
||||||
end},
|
end},
|
||||||
{"get", fun() ->
|
{"get", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "http_get"}),
|
?ERR(#{matched_type := "authn:http_get"}),
|
||||||
check("{mechanism = password_based, backend = http, method = get}")
|
check("{mechanism = password_based, backend = http, method = get}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"post", fun() ->
|
{"post", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "http_post"}),
|
?ERR(#{matched_type := "authn:http_post"}),
|
||||||
check("{mechanism = password_based, backend = http, method = post}")
|
check("{mechanism = password_based, backend = http, method = post}")
|
||||||
)
|
)
|
||||||
end}
|
end}
|
||||||
|
@ -132,3 +136,7 @@ check(HoconConf) ->
|
||||||
#{roots => emqx_authn_schema:global_auth_fields()},
|
#{roots => emqx_authn_schema:global_auth_fields()},
|
||||||
["authentication= ", HoconConf]
|
["authentication= ", HoconConf]
|
||||||
).
|
).
|
||||||
|
|
||||||
|
ensure_schema_load() ->
|
||||||
|
_ = emqx_conf_schema:roots(),
|
||||||
|
ok.
|
||||||
|
|
|
@ -70,6 +70,7 @@ init_per_testcase(TestCase, Config) when
|
||||||
{ok, _} = emqx:update_config([authorization, deny_action], disconnect),
|
{ok, _} = emqx:update_config([authorization, deny_action], disconnect),
|
||||||
Config;
|
Config;
|
||||||
init_per_testcase(_TestCase, Config) ->
|
init_per_testcase(_TestCase, Config) ->
|
||||||
|
_ = file:delete(emqx_authz_file:acl_conf_file()),
|
||||||
{ok, _} = emqx_authz:update(?CMD_REPLACE, []),
|
{ok, _} = emqx_authz:update(?CMD_REPLACE, []),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
|
|
@ -67,6 +67,10 @@ set_special_configs(_App) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_compile(_) ->
|
t_compile(_) ->
|
||||||
|
% NOTE
|
||||||
|
% Some of the following testcase are relying on the internal representation of
|
||||||
|
% `emqx_template:t()`. If the internal representation is changed, these testcases
|
||||||
|
% may fail.
|
||||||
?assertEqual({deny, all, all, [['#']]}, emqx_authz_rule:compile({deny, all})),
|
?assertEqual({deny, all, all, [['#']]}, emqx_authz_rule:compile({deny, all})),
|
||||||
|
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
|
@ -74,13 +78,13 @@ t_compile(_) ->
|
||||||
emqx_authz_rule:compile({allow, {ipaddr, "127.0.0.1"}, all, [{eq, "#"}, {eq, "+"}]})
|
emqx_authz_rule:compile({allow, {ipaddr, "127.0.0.1"}, all, [{eq, "#"}, {eq, "+"}]})
|
||||||
),
|
),
|
||||||
|
|
||||||
?assertEqual(
|
?assertMatch(
|
||||||
{allow,
|
{allow,
|
||||||
{ipaddrs, [
|
{ipaddrs, [
|
||||||
{{127, 0, 0, 1}, {127, 0, 0, 1}, 32},
|
{{127, 0, 0, 1}, {127, 0, 0, 1}, 32},
|
||||||
{{192, 168, 1, 0}, {192, 168, 1, 255}, 24}
|
{{192, 168, 1, 0}, {192, 168, 1, 255}, 24}
|
||||||
]},
|
]},
|
||||||
subscribe, [{pattern, [{var, [<<"clientid">>]}]}]},
|
subscribe, [{pattern, [{var, "clientid", [_]}]}]},
|
||||||
emqx_authz_rule:compile(
|
emqx_authz_rule:compile(
|
||||||
{allow, {ipaddrs, ["127.0.0.1", "192.168.1.0/24"]}, subscribe, [?PH_S_CLIENTID]}
|
{allow, {ipaddrs, ["127.0.0.1", "192.168.1.0/24"]}, subscribe, [?PH_S_CLIENTID]}
|
||||||
)
|
)
|
||||||
|
@ -102,7 +106,7 @@ t_compile(_) ->
|
||||||
{clientid, {re_pattern, _, _, _, _}}
|
{clientid, {re_pattern, _, _, _, _}}
|
||||||
]},
|
]},
|
||||||
publish, [
|
publish, [
|
||||||
{pattern, [{var, [<<"username">>]}]}, {pattern, [{var, [<<"clientid">>]}]}
|
{pattern, [{var, "username", [_]}]}, {pattern, [{var, "clientid", [_]}]}
|
||||||
]},
|
]},
|
||||||
emqx_authz_rule:compile(
|
emqx_authz_rule:compile(
|
||||||
{allow,
|
{allow,
|
||||||
|
@ -114,9 +118,9 @@ t_compile(_) ->
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
|
|
||||||
?assertEqual(
|
?assertMatch(
|
||||||
{allow, {username, {eq, <<"test">>}}, publish, [
|
{allow, {username, {eq, <<"test">>}}, publish, [
|
||||||
{pattern, [{str, <<"t/foo">>}, {var, [<<"username">>]}, {str, <<"boo">>}]}
|
{pattern, [<<"t/foo">>, {var, "username", [_]}, <<"boo">>]}
|
||||||
]},
|
]},
|
||||||
emqx_authz_rule:compile({allow, {username, "test"}, publish, ["t/foo${username}boo"]})
|
emqx_authz_rule:compile({allow, {username, "test"}, publish, ["t/foo${username}boo"]})
|
||||||
),
|
),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_http, [
|
{application, emqx_auth_http, [
|
||||||
{description, "EMQX External HTTP API Authentication and Authorization"},
|
{description, "EMQX External HTTP API Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_http_app, []}},
|
{mod, {emqx_auth_http_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -16,10 +16,6 @@
|
||||||
|
|
||||||
-module(emqx_authn_http_schema).
|
-module(emqx_authn_http_schema).
|
||||||
|
|
||||||
-include("emqx_auth_http.hrl").
|
|
||||||
-include_lib("emqx_auth/include/emqx_authn.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -27,9 +23,14 @@
|
||||||
validations/0,
|
validations/0,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_http.hrl").
|
||||||
|
-include_lib("emqx_auth/include/emqx_authn.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
-define(NOT_EMPTY(MSG), emqx_resource_validator:not_empty(MSG)).
|
-define(NOT_EMPTY(MSG), emqx_resource_validator:not_empty(MSG)).
|
||||||
-define(THROW_VALIDATION_ERROR(ERROR, MESSAGE),
|
-define(THROW_VALIDATION_ERROR(ERROR, MESSAGE),
|
||||||
throw(#{
|
throw(#{
|
||||||
|
@ -38,6 +39,8 @@
|
||||||
})
|
})
|
||||||
).
|
).
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[?R_REF(http_get), ?R_REF(http_post)].
|
[?R_REF(http_get), ?R_REF(http_post)].
|
||||||
|
|
||||||
|
@ -97,7 +100,7 @@ common_fields() ->
|
||||||
{backend, emqx_authn_schema:backend(?AUTHN_BACKEND)},
|
{backend, emqx_authn_schema:backend(?AUTHN_BACKEND)},
|
||||||
{url, fun url/1},
|
{url, fun url/1},
|
||||||
{body,
|
{body,
|
||||||
hoconsc:mk(map([{fuzzy, term(), binary()}]), #{
|
hoconsc:mk(typerefl:alias("map", map([{fuzzy, term(), binary()}])), #{
|
||||||
required => false, desc => ?DESC(body)
|
required => false, desc => ?DESC(body)
|
||||||
})},
|
})},
|
||||||
{request_timeout, fun request_timeout/1}
|
{request_timeout, fun request_timeout/1}
|
||||||
|
|
|
@ -38,21 +38,21 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
-define(PLACEHOLDERS, [
|
-define(ALLOWED_VARS, [
|
||||||
?PH_USERNAME,
|
?VAR_USERNAME,
|
||||||
?PH_CLIENTID,
|
?VAR_CLIENTID,
|
||||||
?PH_PEERHOST,
|
?VAR_PEERHOST,
|
||||||
?PH_PROTONAME,
|
?VAR_PROTONAME,
|
||||||
?PH_MOUNTPOINT,
|
?VAR_MOUNTPOINT,
|
||||||
?PH_TOPIC,
|
?VAR_TOPIC,
|
||||||
?PH_ACTION,
|
?VAR_ACTION,
|
||||||
?PH_CERT_SUBJECT,
|
?VAR_CERT_SUBJECT,
|
||||||
?PH_CERT_CN_NAME
|
?VAR_CERT_CN_NAME
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-define(PLACEHOLDERS_FOR_RICH_ACTIONS, [
|
-define(ALLOWED_VARS_RICH_ACTIONS, [
|
||||||
?PH_QOS,
|
?VAR_QOS,
|
||||||
?PH_RETAIN
|
?VAR_RETAIN
|
||||||
]).
|
]).
|
||||||
|
|
||||||
description() ->
|
description() ->
|
||||||
|
@ -157,14 +157,14 @@ parse_config(
|
||||||
method => Method,
|
method => Method,
|
||||||
base_url => BaseUrl,
|
base_url => BaseUrl,
|
||||||
headers => Headers,
|
headers => Headers,
|
||||||
base_path_templete => emqx_authz_utils:parse_str(Path, placeholders()),
|
base_path_templete => emqx_authz_utils:parse_str(Path, allowed_vars()),
|
||||||
base_query_template => emqx_authz_utils:parse_deep(
|
base_query_template => emqx_authz_utils:parse_deep(
|
||||||
cow_qs:parse_qs(to_bin(Query)),
|
cow_qs:parse_qs(to_bin(Query)),
|
||||||
placeholders()
|
allowed_vars()
|
||||||
),
|
),
|
||||||
body_template => emqx_authz_utils:parse_deep(
|
body_template => emqx_authz_utils:parse_deep(
|
||||||
maps:to_list(maps:get(body, Conf, #{})),
|
maps:to_list(maps:get(body, Conf, #{})),
|
||||||
placeholders()
|
allowed_vars()
|
||||||
),
|
),
|
||||||
request_timeout => ReqTimeout,
|
request_timeout => ReqTimeout,
|
||||||
%% pool_type default value `random`
|
%% pool_type default value `random`
|
||||||
|
@ -260,10 +260,10 @@ to_bin(B) when is_binary(B) -> B;
|
||||||
to_bin(L) when is_list(L) -> list_to_binary(L);
|
to_bin(L) when is_list(L) -> list_to_binary(L);
|
||||||
to_bin(X) -> X.
|
to_bin(X) -> X.
|
||||||
|
|
||||||
placeholders() ->
|
allowed_vars() ->
|
||||||
placeholders(emqx_authz:feature_available(rich_actions)).
|
allowed_vars(emqx_authz:feature_available(rich_actions)).
|
||||||
|
|
||||||
placeholders(true) ->
|
allowed_vars(true) ->
|
||||||
?PLACEHOLDERS ++ ?PLACEHOLDERS_FOR_RICH_ACTIONS;
|
?ALLOWED_VARS ++ ?ALLOWED_VARS_RICH_ACTIONS;
|
||||||
placeholders(false) ->
|
allowed_vars(false) ->
|
||||||
?PLACEHOLDERS.
|
?ALLOWED_VARS.
|
||||||
|
|
|
@ -26,7 +26,8 @@
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
source_refs/0,
|
source_refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -38,6 +39,8 @@
|
||||||
|
|
||||||
-import(emqx_schema, [mk_duration/2]).
|
-import(emqx_schema, [mk_duration/2]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
source_refs() ->
|
source_refs() ->
|
||||||
|
@ -96,7 +99,7 @@ http_common_fields() ->
|
||||||
mk_duration("Request timeout", #{
|
mk_duration("Request timeout", #{
|
||||||
required => false, default => <<"30s">>, desc => ?DESC(request_timeout)
|
required => false, default => <<"30s">>, desc => ?DESC(request_timeout)
|
||||||
})},
|
})},
|
||||||
{body, ?HOCON(map(), #{required => false, desc => ?DESC(body)})}
|
{body, ?HOCON(hoconsc:map(name, binary()), #{required => false, desc => ?DESC(body)})}
|
||||||
] ++
|
] ++
|
||||||
lists:keydelete(
|
lists:keydelete(
|
||||||
pool_type,
|
pool_type,
|
||||||
|
@ -105,7 +108,7 @@ http_common_fields() ->
|
||||||
).
|
).
|
||||||
|
|
||||||
headers(type) ->
|
headers(type) ->
|
||||||
list({binary(), binary()});
|
typerefl:alias("map", list({binary(), binary()}), #{}, [binary(), binary()]);
|
||||||
headers(desc) ->
|
headers(desc) ->
|
||||||
?DESC(?FUNCTION_NAME);
|
?DESC(?FUNCTION_NAME);
|
||||||
headers(converter) ->
|
headers(converter) ->
|
||||||
|
@ -118,7 +121,7 @@ headers(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
headers_no_content_type(type) ->
|
headers_no_content_type(type) ->
|
||||||
list({binary(), binary()});
|
typerefl:alias("map", list({binary(), binary()}), #{}, [binary(), binary()]);
|
||||||
headers_no_content_type(desc) ->
|
headers_no_content_type(desc) ->
|
||||||
?DESC(?FUNCTION_NAME);
|
?DESC(?FUNCTION_NAME);
|
||||||
headers_no_content_type(converter) ->
|
headers_no_content_type(converter) ->
|
||||||
|
|
|
@ -27,7 +27,7 @@
|
||||||
-define(PATH, [?CONF_NS_ATOM]).
|
-define(PATH, [?CONF_NS_ATOM]).
|
||||||
|
|
||||||
-define(HTTP_PORT, 32333).
|
-define(HTTP_PORT, 32333).
|
||||||
-define(HTTP_PATH, "/auth").
|
-define(HTTP_PATH, "/auth/[...]").
|
||||||
-define(CREDENTIALS, #{
|
-define(CREDENTIALS, #{
|
||||||
clientid => <<"clienta">>,
|
clientid => <<"clienta">>,
|
||||||
username => <<"plain">>,
|
username => <<"plain">>,
|
||||||
|
@ -146,8 +146,12 @@ t_authenticate(_Config) ->
|
||||||
test_user_auth(#{
|
test_user_auth(#{
|
||||||
handler := Handler,
|
handler := Handler,
|
||||||
config_params := SpecificConfgParams,
|
config_params := SpecificConfgParams,
|
||||||
result := Result
|
result := Expect
|
||||||
}) ->
|
}) ->
|
||||||
|
Result = perform_user_auth(SpecificConfgParams, Handler, ?CREDENTIALS),
|
||||||
|
?assertEqual(Expect, Result).
|
||||||
|
|
||||||
|
perform_user_auth(SpecificConfgParams, Handler, Credentials) ->
|
||||||
AuthConfig = maps:merge(raw_http_auth_config(), SpecificConfgParams),
|
AuthConfig = maps:merge(raw_http_auth_config(), SpecificConfgParams),
|
||||||
|
|
||||||
{ok, _} = emqx:update_config(
|
{ok, _} = emqx:update_config(
|
||||||
|
@ -157,21 +161,21 @@ test_user_auth(#{
|
||||||
|
|
||||||
ok = emqx_authn_http_test_server:set_handler(Handler),
|
ok = emqx_authn_http_test_server:set_handler(Handler),
|
||||||
|
|
||||||
?assertEqual(Result, emqx_access_control:authenticate(?CREDENTIALS)),
|
Result = emqx_access_control:authenticate(Credentials),
|
||||||
|
|
||||||
emqx_authn_test_lib:delete_authenticators(
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
[authentication],
|
[authentication],
|
||||||
?GLOBAL
|
?GLOBAL
|
||||||
).
|
),
|
||||||
|
|
||||||
|
Result.
|
||||||
|
|
||||||
t_authenticate_path_placeholders(_Config) ->
|
t_authenticate_path_placeholders(_Config) ->
|
||||||
ok = emqx_authn_http_test_server:stop(),
|
|
||||||
{ok, _} = emqx_authn_http_test_server:start_link(?HTTP_PORT, <<"/[...]">>),
|
|
||||||
ok = emqx_authn_http_test_server:set_handler(
|
ok = emqx_authn_http_test_server:set_handler(
|
||||||
fun(Req0, State) ->
|
fun(Req0, State) ->
|
||||||
Req =
|
Req =
|
||||||
case cowboy_req:path(Req0) of
|
case cowboy_req:path(Req0) of
|
||||||
<<"/my/p%20ath//us%20er/auth//">> ->
|
<<"/auth/p%20ath//us%20er/auth//">> ->
|
||||||
cowboy_req:reply(
|
cowboy_req:reply(
|
||||||
200,
|
200,
|
||||||
#{<<"content-type">> => <<"application/json">>},
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
|
@ -193,7 +197,7 @@ t_authenticate_path_placeholders(_Config) ->
|
||||||
AuthConfig = maps:merge(
|
AuthConfig = maps:merge(
|
||||||
raw_http_auth_config(),
|
raw_http_auth_config(),
|
||||||
#{
|
#{
|
||||||
<<"url">> => <<"http://127.0.0.1:32333/my/p%20ath//${username}/auth//">>,
|
<<"url">> => <<"http://127.0.0.1:32333/auth/p%20ath//${username}/auth//">>,
|
||||||
<<"body">> => #{}
|
<<"body">> => #{}
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
@ -255,6 +259,39 @@ t_no_value_for_placeholder(_Config) ->
|
||||||
?GLOBAL
|
?GLOBAL
|
||||||
).
|
).
|
||||||
|
|
||||||
|
t_disallowed_placeholders_preserved(_Config) ->
|
||||||
|
Config = #{
|
||||||
|
<<"method">> => <<"post">>,
|
||||||
|
<<"headers">> => #{<<"content-type">> => <<"application/json">>},
|
||||||
|
<<"body">> => #{
|
||||||
|
<<"username">> => ?PH_USERNAME,
|
||||||
|
<<"password">> => ?PH_PASSWORD,
|
||||||
|
<<"this">> => <<"${whatisthis}">>
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Handler = fun(Req0, State) ->
|
||||||
|
{ok, Body, Req1} = cowboy_req:read_body(Req0),
|
||||||
|
#{
|
||||||
|
<<"username">> := <<"plain">>,
|
||||||
|
<<"password">> := <<"plain">>,
|
||||||
|
<<"this">> := <<"${whatisthis}">>
|
||||||
|
} = emqx_utils_json:decode(Body),
|
||||||
|
Req = cowboy_req:reply(
|
||||||
|
200,
|
||||||
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
|
emqx_utils_json:encode(#{result => allow, is_superuser => false}),
|
||||||
|
Req1
|
||||||
|
),
|
||||||
|
{ok, Req, State}
|
||||||
|
end,
|
||||||
|
?assertMatch({ok, _}, perform_user_auth(Config, Handler, ?CREDENTIALS)),
|
||||||
|
|
||||||
|
% NOTE: disallowed placeholder left intact, which makes the URL invalid
|
||||||
|
ConfigUrl = Config#{
|
||||||
|
<<"url">> => <<"http://127.0.0.1:32333/auth/${whatisthis}">>
|
||||||
|
},
|
||||||
|
?assertMatch({error, _}, perform_user_auth(ConfigUrl, Handler, ?CREDENTIALS)).
|
||||||
|
|
||||||
t_destroy(_Config) ->
|
t_destroy(_Config) ->
|
||||||
AuthConfig = raw_http_auth_config(),
|
AuthConfig = raw_http_auth_config(),
|
||||||
|
|
||||||
|
|
|
@ -494,6 +494,67 @@ t_no_value_for_placeholder(_Config) ->
|
||||||
emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>)
|
emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>)
|
||||||
).
|
).
|
||||||
|
|
||||||
|
t_disallowed_placeholders_preserved(_Config) ->
|
||||||
|
ok = setup_handler_and_config(
|
||||||
|
fun(Req0, State) ->
|
||||||
|
{ok, Body, Req1} = cowboy_req:read_body(Req0),
|
||||||
|
?assertMatch(
|
||||||
|
#{
|
||||||
|
<<"cname">> := <<>>,
|
||||||
|
<<"usertypo">> := <<"${usertypo}">>
|
||||||
|
},
|
||||||
|
emqx_utils_json:decode(Body)
|
||||||
|
),
|
||||||
|
{ok, ?AUTHZ_HTTP_RESP(allow, Req1), State}
|
||||||
|
end,
|
||||||
|
#{
|
||||||
|
<<"method">> => <<"post">>,
|
||||||
|
<<"body">> => #{
|
||||||
|
<<"cname">> => ?PH_CERT_CN_NAME,
|
||||||
|
<<"usertypo">> => <<"${usertypo}">>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
|
||||||
|
ClientInfo = #{
|
||||||
|
clientid => <<"client id">>,
|
||||||
|
username => <<"user name">>,
|
||||||
|
peerhost => {127, 0, 0, 1},
|
||||||
|
protocol => <<"MQTT">>,
|
||||||
|
zone => default,
|
||||||
|
listener => {tcp, default}
|
||||||
|
},
|
||||||
|
|
||||||
|
?assertEqual(
|
||||||
|
allow,
|
||||||
|
emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>)
|
||||||
|
).
|
||||||
|
|
||||||
|
t_disallowed_placeholders_path(_Config) ->
|
||||||
|
ok = setup_handler_and_config(
|
||||||
|
fun(Req, State) ->
|
||||||
|
{ok, ?AUTHZ_HTTP_RESP(allow, Req), State}
|
||||||
|
end,
|
||||||
|
#{
|
||||||
|
<<"url">> => <<"http://127.0.0.1:33333/authz/use%20rs/${typo}">>
|
||||||
|
}
|
||||||
|
),
|
||||||
|
|
||||||
|
ClientInfo = #{
|
||||||
|
clientid => <<"client id">>,
|
||||||
|
username => <<"user name">>,
|
||||||
|
peerhost => {127, 0, 0, 1},
|
||||||
|
protocol => <<"MQTT">>,
|
||||||
|
zone => default,
|
||||||
|
listener => {tcp, default}
|
||||||
|
},
|
||||||
|
|
||||||
|
% % NOTE: disallowed placeholder left intact, which makes the URL invalid
|
||||||
|
?assertEqual(
|
||||||
|
deny,
|
||||||
|
emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>)
|
||||||
|
).
|
||||||
|
|
||||||
t_create_replace(_Config) ->
|
t_create_replace(_Config) ->
|
||||||
ClientInfo = #{
|
ClientInfo = #{
|
||||||
clientid => <<"clientid">>,
|
clientid => <<"clientid">>,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_jwt, [
|
{application, emqx_auth_jwt, [
|
||||||
{description, "EMQX JWT Authentication and Authorization"},
|
{description, "EMQX JWT Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_jwt_app, []}},
|
{mod, {emqx_auth_jwt_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -16,18 +16,21 @@
|
||||||
|
|
||||||
-module(emqx_authn_jwt_schema).
|
-module(emqx_authn_jwt_schema).
|
||||||
|
|
||||||
-include("emqx_auth_jwt.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_jwt.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[
|
[
|
||||||
?R_REF(jwt_hmac),
|
?R_REF(jwt_hmac),
|
||||||
|
@ -149,7 +152,8 @@ refresh_interval(validator) -> [fun(I) -> I > 0 end];
|
||||||
refresh_interval(_) -> undefined.
|
refresh_interval(_) -> undefined.
|
||||||
|
|
||||||
verify_claims(type) ->
|
verify_claims(type) ->
|
||||||
list();
|
%% user input is a map, converted to a list of {binary(), binary()}
|
||||||
|
typerefl:alias("map", list());
|
||||||
verify_claims(desc) ->
|
verify_claims(desc) ->
|
||||||
?DESC(?FUNCTION_NAME);
|
?DESC(?FUNCTION_NAME);
|
||||||
verify_claims(default) ->
|
verify_claims(default) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_ldap, [
|
{application, emqx_auth_ldap, [
|
||||||
{description, "EMQX LDAP Authentication and Authorization"},
|
{description, "EMQX LDAP Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_ldap_app, []}},
|
{mod, {emqx_auth_ldap_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -1,5 +1,17 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-module(emqx_authn_ldap).
|
-module(emqx_authn_ldap).
|
||||||
|
|
|
@ -1,5 +1,17 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-module(emqx_authn_ldap_bind).
|
-module(emqx_authn_ldap_bind).
|
||||||
|
|
|
@ -16,18 +16,21 @@
|
||||||
|
|
||||||
-module(emqx_authn_ldap_bind_schema).
|
-module(emqx_authn_ldap_bind_schema).
|
||||||
|
|
||||||
-include("emqx_auth_ldap.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_ldap.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[?R_REF(ldap_bind)].
|
[?R_REF(ldap_bind)].
|
||||||
|
|
||||||
|
|
|
@ -16,18 +16,21 @@
|
||||||
|
|
||||||
-module(emqx_authn_ldap_schema).
|
-module(emqx_authn_ldap_schema).
|
||||||
|
|
||||||
-include("emqx_auth_ldap.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_ldap.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[?R_REF(ldap)].
|
[?R_REF(ldap)].
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
%%
|
%%
|
||||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
%% you may not use this file except in compliance with the License.
|
%% you may not use this file except in compliance with the License.
|
||||||
|
@ -13,6 +13,18 @@
|
||||||
%% See the License for the specific language governing permissions and
|
%% See the License for the specific language governing permissions and
|
||||||
%% limitations under the License.
|
%% limitations under the License.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-module(emqx_authz_ldap).
|
-module(emqx_authz_ldap).
|
||||||
|
|
||||||
|
|
|
@ -26,9 +26,12 @@
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
source_refs/0,
|
source_refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
fields(ldap) ->
|
fields(ldap) ->
|
||||||
|
|
|
@ -1,5 +1,17 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
-module(emqx_authn_ldap_SUITE).
|
-module(emqx_authn_ldap_SUITE).
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,17 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
-module(emqx_authn_ldap_bind_SUITE).
|
-module(emqx_authn_ldap_bind_SUITE).
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,17 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
-module(emqx_authz_ldap_SUITE).
|
-module(emqx_authz_ldap_SUITE).
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_mnesia, [
|
{application, emqx_auth_mnesia, [
|
||||||
{description, "EMQX Buitl-in Database Authentication and Authorization"},
|
{description, "EMQX Buitl-in Database Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_mnesia_app, []}},
|
{mod, {emqx_auth_mnesia_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -50,7 +50,7 @@
|
||||||
%% Internal exports (RPC)
|
%% Internal exports (RPC)
|
||||||
-export([
|
-export([
|
||||||
do_destroy/1,
|
do_destroy/1,
|
||||||
do_add_user/2,
|
do_add_user/1,
|
||||||
do_delete_user/2,
|
do_delete_user/2,
|
||||||
do_update_user/3,
|
do_update_user/3,
|
||||||
import/2,
|
import/2,
|
||||||
|
@ -187,24 +187,22 @@ import_users({Filename0, FileData}, State) ->
|
||||||
{error, {unsupported_file_format, Extension}}
|
{error, {unsupported_file_format, Extension}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
add_user(UserInfo, State) ->
|
add_user(
|
||||||
trans(fun ?MODULE:do_add_user/2, [UserInfo, State]).
|
UserInfo,
|
||||||
|
State
|
||||||
|
) ->
|
||||||
|
UserInfoRecord = user_info_record(UserInfo, State),
|
||||||
|
trans(fun ?MODULE:do_add_user/1, [UserInfoRecord]).
|
||||||
|
|
||||||
do_add_user(
|
do_add_user(
|
||||||
#{
|
#user_info{
|
||||||
user_id := UserID,
|
user_id = {_UserGroup, UserID} = DBUserID,
|
||||||
password := Password
|
is_superuser = IsSuperuser
|
||||||
} = UserInfo,
|
} = UserInfoRecord
|
||||||
#{
|
|
||||||
user_group := UserGroup,
|
|
||||||
password_hash_algorithm := Algorithm
|
|
||||||
}
|
|
||||||
) ->
|
) ->
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
case mnesia:read(?TAB, DBUserID, write) of
|
||||||
[] ->
|
[] ->
|
||||||
{PasswordHash, Salt} = emqx_authn_password_hashing:hash(Algorithm, Password),
|
insert_user(UserInfoRecord),
|
||||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
|
||||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
|
||||||
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
||||||
[_] ->
|
[_] ->
|
||||||
{error, already_exist}
|
{error, already_exist}
|
||||||
|
@ -222,38 +220,30 @@ do_delete_user(UserID, #{user_group := UserGroup}) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
update_user(UserID, UserInfo, State) ->
|
update_user(UserID, UserInfo, State) ->
|
||||||
trans(fun ?MODULE:do_update_user/3, [UserID, UserInfo, State]).
|
FieldsToUpdate = fields_to_update(
|
||||||
|
UserInfo,
|
||||||
|
[
|
||||||
|
hash_and_salt,
|
||||||
|
is_superuser
|
||||||
|
],
|
||||||
|
State
|
||||||
|
),
|
||||||
|
trans(fun ?MODULE:do_update_user/3, [UserID, FieldsToUpdate, State]).
|
||||||
|
|
||||||
do_update_user(
|
do_update_user(
|
||||||
UserID,
|
UserID,
|
||||||
UserInfo,
|
FieldsToUpdate,
|
||||||
#{
|
#{
|
||||||
user_group := UserGroup,
|
user_group := UserGroup
|
||||||
password_hash_algorithm := Algorithm
|
|
||||||
}
|
}
|
||||||
) ->
|
) ->
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||||
[] ->
|
[] ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
[
|
[#user_info{} = UserInfoRecord] ->
|
||||||
#user_info{
|
NUserInfoRecord = update_user_record(UserInfoRecord, FieldsToUpdate),
|
||||||
password_hash = PasswordHash,
|
insert_user(NUserInfoRecord),
|
||||||
salt = Salt,
|
{ok, #{user_id => UserID, is_superuser => NUserInfoRecord#user_info.is_superuser}}
|
||||||
is_superuser = IsSuperuser
|
|
||||||
}
|
|
||||||
] ->
|
|
||||||
NSuperuser = maps:get(is_superuser, UserInfo, IsSuperuser),
|
|
||||||
{NPasswordHash, NSalt} =
|
|
||||||
case UserInfo of
|
|
||||||
#{password := Password} ->
|
|
||||||
emqx_authn_password_hashing:hash(
|
|
||||||
Algorithm, Password
|
|
||||||
);
|
|
||||||
#{} ->
|
|
||||||
{PasswordHash, Salt}
|
|
||||||
end,
|
|
||||||
insert_user(UserGroup, UserID, NPasswordHash, NSalt, NSuperuser),
|
|
||||||
{ok, #{user_id => UserID, is_superuser => NSuperuser}}
|
|
||||||
end.
|
end.
|
||||||
|
|
||||||
lookup_user(UserID, #{user_group := UserGroup}) ->
|
lookup_user(UserID, #{user_group := UserGroup}) ->
|
||||||
|
@ -391,13 +381,59 @@ get_user_info_by_seq(_, _, _) ->
|
||||||
{error, bad_format}.
|
{error, bad_format}.
|
||||||
|
|
||||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) ->
|
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) ->
|
||||||
UserInfo = #user_info{
|
UserInfoRecord = user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
||||||
|
insert_user(UserInfoRecord).
|
||||||
|
|
||||||
|
insert_user(#user_info{} = UserInfoRecord) ->
|
||||||
|
mnesia:write(?TAB, UserInfoRecord, write).
|
||||||
|
|
||||||
|
user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) ->
|
||||||
|
#user_info{
|
||||||
user_id = {UserGroup, UserID},
|
user_id = {UserGroup, UserID},
|
||||||
password_hash = PasswordHash,
|
password_hash = PasswordHash,
|
||||||
salt = Salt,
|
salt = Salt,
|
||||||
is_superuser = IsSuperuser
|
is_superuser = IsSuperuser
|
||||||
},
|
}.
|
||||||
mnesia:write(?TAB, UserInfo, write).
|
|
||||||
|
user_info_record(
|
||||||
|
#{
|
||||||
|
user_id := UserID,
|
||||||
|
password := Password
|
||||||
|
} = UserInfo,
|
||||||
|
#{
|
||||||
|
password_hash_algorithm := Algorithm,
|
||||||
|
user_group := UserGroup
|
||||||
|
} = _State
|
||||||
|
) ->
|
||||||
|
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||||
|
{PasswordHash, Salt} = emqx_authn_password_hashing:hash(Algorithm, Password),
|
||||||
|
user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser).
|
||||||
|
|
||||||
|
fields_to_update(
|
||||||
|
#{password := Password} = UserInfo,
|
||||||
|
[hash_and_salt | Rest],
|
||||||
|
#{password_hash_algorithm := Algorithm} = State
|
||||||
|
) ->
|
||||||
|
[
|
||||||
|
{hash_and_salt,
|
||||||
|
emqx_authn_password_hashing:hash(
|
||||||
|
Algorithm, Password
|
||||||
|
)}
|
||||||
|
| fields_to_update(UserInfo, Rest, State)
|
||||||
|
];
|
||||||
|
fields_to_update(#{is_superuser := IsSuperuser} = UserInfo, [is_superuser | Rest], State) ->
|
||||||
|
[{is_superuser, IsSuperuser} | fields_to_update(UserInfo, Rest, State)];
|
||||||
|
fields_to_update(UserInfo, [_ | Rest], State) ->
|
||||||
|
fields_to_update(UserInfo, Rest, State);
|
||||||
|
fields_to_update(_UserInfo, [], _State) ->
|
||||||
|
[].
|
||||||
|
|
||||||
|
update_user_record(UserInfoRecord, []) ->
|
||||||
|
UserInfoRecord;
|
||||||
|
update_user_record(UserInfoRecord, [{hash_and_salt, {PasswordHash, Salt}} | Rest]) ->
|
||||||
|
update_user_record(UserInfoRecord#user_info{password_hash = PasswordHash, salt = Salt}, Rest);
|
||||||
|
update_user_record(UserInfoRecord, [{is_superuser, IsSuperuser} | Rest]) ->
|
||||||
|
update_user_record(UserInfoRecord#user_info{is_superuser = IsSuperuser}, Rest).
|
||||||
|
|
||||||
%% TODO: Support other type
|
%% TODO: Support other type
|
||||||
get_user_identity(#{username := Username}, username) ->
|
get_user_identity(#{username := Username}, username) ->
|
||||||
|
|
|
@ -24,27 +24,33 @@
|
||||||
-export([
|
-export([
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/1,
|
||||||
select_union_member/1
|
select_union_member/2,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
refs() ->
|
namespace() -> "authn".
|
||||||
|
|
||||||
|
refs(api_write) ->
|
||||||
|
[?R_REF(builtin_db_api)];
|
||||||
|
refs(_) ->
|
||||||
[?R_REF(builtin_db)].
|
[?R_REF(builtin_db)].
|
||||||
|
|
||||||
select_union_member(#{
|
select_union_member(Kind, #{
|
||||||
<<"mechanism">> := ?AUTHN_MECHANISM_SIMPLE_BIN, <<"backend">> := ?AUTHN_BACKEND_BIN
|
<<"mechanism">> := ?AUTHN_MECHANISM_SIMPLE_BIN, <<"backend">> := ?AUTHN_BACKEND_BIN
|
||||||
}) ->
|
}) ->
|
||||||
refs();
|
refs(Kind);
|
||||||
select_union_member(_) ->
|
select_union_member(_Kind, _Value) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
fields(builtin_db) ->
|
fields(builtin_db) ->
|
||||||
[
|
[
|
||||||
{mechanism, emqx_authn_schema:mechanism(?AUTHN_MECHANISM_SIMPLE)},
|
|
||||||
{backend, emqx_authn_schema:backend(?AUTHN_BACKEND)},
|
|
||||||
{user_id_type, fun user_id_type/1},
|
|
||||||
{password_hash_algorithm, fun emqx_authn_password_hashing:type_rw/1}
|
{password_hash_algorithm, fun emqx_authn_password_hashing:type_rw/1}
|
||||||
] ++ emqx_authn_schema:common_fields().
|
] ++ common_fields();
|
||||||
|
fields(builtin_db_api) ->
|
||||||
|
[
|
||||||
|
{password_hash_algorithm, fun emqx_authn_password_hashing:type_rw_api/1}
|
||||||
|
] ++ common_fields().
|
||||||
|
|
||||||
desc(builtin_db) ->
|
desc(builtin_db) ->
|
||||||
?DESC(builtin_db);
|
?DESC(builtin_db);
|
||||||
|
@ -56,3 +62,10 @@ user_id_type(desc) -> ?DESC(?FUNCTION_NAME);
|
||||||
user_id_type(default) -> <<"username">>;
|
user_id_type(default) -> <<"username">>;
|
||||||
user_id_type(required) -> true;
|
user_id_type(required) -> true;
|
||||||
user_id_type(_) -> undefined.
|
user_id_type(_) -> undefined.
|
||||||
|
|
||||||
|
common_fields() ->
|
||||||
|
[
|
||||||
|
{mechanism, emqx_authn_schema:mechanism(?AUTHN_MECHANISM_SIMPLE)},
|
||||||
|
{backend, emqx_authn_schema:backend(?AUTHN_BACKEND)},
|
||||||
|
{user_id_type, fun user_id_type/1}
|
||||||
|
] ++ emqx_authn_schema:common_fields().
|
||||||
|
|
|
@ -51,7 +51,7 @@
|
||||||
%% Internal exports (RPC)
|
%% Internal exports (RPC)
|
||||||
-export([
|
-export([
|
||||||
do_destroy/1,
|
do_destroy/1,
|
||||||
do_add_user/2,
|
do_add_user/1,
|
||||||
do_delete_user/2,
|
do_delete_user/2,
|
||||||
do_update_user/3
|
do_update_user/3
|
||||||
]).
|
]).
|
||||||
|
@ -157,19 +157,15 @@ do_destroy(UserGroup) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
add_user(UserInfo, State) ->
|
add_user(UserInfo, State) ->
|
||||||
trans(fun ?MODULE:do_add_user/2, [UserInfo, State]).
|
UserInfoRecord = user_info_record(UserInfo, State),
|
||||||
|
trans(fun ?MODULE:do_add_user/1, [UserInfoRecord]).
|
||||||
|
|
||||||
do_add_user(
|
do_add_user(
|
||||||
#{
|
#user_info{user_id = {UserID, _} = DBUserID, is_superuser = IsSuperuser} = UserInfoRecord
|
||||||
user_id := UserID,
|
|
||||||
password := Password
|
|
||||||
} = UserInfo,
|
|
||||||
#{user_group := UserGroup} = State
|
|
||||||
) ->
|
) ->
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
case mnesia:read(?TAB, DBUserID, write) of
|
||||||
[] ->
|
[] ->
|
||||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
mnesia:write(?TAB, UserInfoRecord, write),
|
||||||
add_user(UserGroup, UserID, Password, IsSuperuser, State),
|
|
||||||
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
||||||
[_] ->
|
[_] ->
|
||||||
{error, already_exist}
|
{error, already_exist}
|
||||||
|
@ -187,36 +183,28 @@ do_delete_user(UserID, #{user_group := UserGroup}) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
update_user(UserID, User, State) ->
|
update_user(UserID, User, State) ->
|
||||||
trans(fun ?MODULE:do_update_user/3, [UserID, User, State]).
|
FieldsToUpdate = fields_to_update(
|
||||||
|
User,
|
||||||
|
[
|
||||||
|
keys_and_salt,
|
||||||
|
is_superuser
|
||||||
|
],
|
||||||
|
State
|
||||||
|
),
|
||||||
|
trans(fun ?MODULE:do_update_user/3, [UserID, FieldsToUpdate, State]).
|
||||||
|
|
||||||
do_update_user(
|
do_update_user(
|
||||||
UserID,
|
UserID,
|
||||||
User,
|
FieldsToUpdate,
|
||||||
#{user_group := UserGroup} = State
|
#{user_group := UserGroup} = _State
|
||||||
) ->
|
) ->
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||||
[] ->
|
[] ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
[#user_info{is_superuser = IsSuperuser} = UserInfo] ->
|
[#user_info{} = UserInfo0] ->
|
||||||
UserInfo1 = UserInfo#user_info{
|
UserInfo1 = update_user_record(UserInfo0, FieldsToUpdate),
|
||||||
is_superuser = maps:get(is_superuser, User, IsSuperuser)
|
mnesia:write(?TAB, UserInfo1, write),
|
||||||
},
|
{ok, format_user_info(UserInfo1)}
|
||||||
UserInfo2 =
|
|
||||||
case maps:get(password, User, undefined) of
|
|
||||||
undefined ->
|
|
||||||
UserInfo1;
|
|
||||||
Password ->
|
|
||||||
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(
|
|
||||||
Password, State
|
|
||||||
),
|
|
||||||
UserInfo1#user_info{
|
|
||||||
stored_key = StoredKey,
|
|
||||||
server_key = ServerKey,
|
|
||||||
salt = Salt
|
|
||||||
}
|
|
||||||
end,
|
|
||||||
mnesia:write(?TAB, UserInfo2, write),
|
|
||||||
{ok, format_user_info(UserInfo2)}
|
|
||||||
end.
|
end.
|
||||||
|
|
||||||
lookup_user(UserID, #{user_group := UserGroup}) ->
|
lookup_user(UserID, #{user_group := UserGroup}) ->
|
||||||
|
@ -315,19 +303,56 @@ check_client_final_message(Bin, #{is_superuser := IsSuperuser} = Cache, #{algori
|
||||||
{error, not_authorized}
|
{error, not_authorized}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
add_user(UserGroup, UserID, Password, IsSuperuser, State) ->
|
user_info_record(
|
||||||
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State),
|
#{
|
||||||
write_user(UserGroup, UserID, StoredKey, ServerKey, Salt, IsSuperuser).
|
user_id := UserID,
|
||||||
|
password := Password
|
||||||
|
} = UserInfo,
|
||||||
|
#{user_group := UserGroup} = State
|
||||||
|
) ->
|
||||||
|
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||||
|
user_info_record(UserGroup, UserID, Password, IsSuperuser, State).
|
||||||
|
|
||||||
write_user(UserGroup, UserID, StoredKey, ServerKey, Salt, IsSuperuser) ->
|
user_info_record(UserGroup, UserID, Password, IsSuperuser, State) ->
|
||||||
UserInfo = #user_info{
|
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State),
|
||||||
|
#user_info{
|
||||||
user_id = {UserGroup, UserID},
|
user_id = {UserGroup, UserID},
|
||||||
stored_key = StoredKey,
|
stored_key = StoredKey,
|
||||||
server_key = ServerKey,
|
server_key = ServerKey,
|
||||||
salt = Salt,
|
salt = Salt,
|
||||||
is_superuser = IsSuperuser
|
is_superuser = IsSuperuser
|
||||||
},
|
}.
|
||||||
mnesia:write(?TAB, UserInfo, write).
|
|
||||||
|
fields_to_update(
|
||||||
|
#{password := Password} = UserInfo,
|
||||||
|
[keys_and_salt | Rest],
|
||||||
|
State
|
||||||
|
) ->
|
||||||
|
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State),
|
||||||
|
[
|
||||||
|
{keys_and_salt, {StoredKey, ServerKey, Salt}}
|
||||||
|
| fields_to_update(UserInfo, Rest, State)
|
||||||
|
];
|
||||||
|
fields_to_update(#{is_superuser := IsSuperuser} = UserInfo, [is_superuser | Rest], State) ->
|
||||||
|
[{is_superuser, IsSuperuser} | fields_to_update(UserInfo, Rest, State)];
|
||||||
|
fields_to_update(UserInfo, [_ | Rest], State) ->
|
||||||
|
fields_to_update(UserInfo, Rest, State);
|
||||||
|
fields_to_update(_UserInfo, [], _State) ->
|
||||||
|
[].
|
||||||
|
|
||||||
|
update_user_record(UserInfoRecord, []) ->
|
||||||
|
UserInfoRecord;
|
||||||
|
update_user_record(UserInfoRecord, [{keys_and_salt, {StoredKey, ServerKey, Salt}} | Rest]) ->
|
||||||
|
update_user_record(
|
||||||
|
UserInfoRecord#user_info{
|
||||||
|
stored_key = StoredKey,
|
||||||
|
server_key = ServerKey,
|
||||||
|
salt = Salt
|
||||||
|
},
|
||||||
|
Rest
|
||||||
|
);
|
||||||
|
update_user_record(UserInfoRecord, [{is_superuser, IsSuperuser} | Rest]) ->
|
||||||
|
update_user_record(UserInfoRecord#user_info{is_superuser = IsSuperuser}, Rest).
|
||||||
|
|
||||||
retrieve(UserID, #{user_group := UserGroup}) ->
|
retrieve(UserID, #{user_group := UserGroup}) ->
|
||||||
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
||||||
|
|
|
@ -22,12 +22,15 @@
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[?R_REF(scram)].
|
[?R_REF(scram)].
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
-behaviour(minirest_api).
|
-behaviour(minirest_api).
|
||||||
|
|
||||||
|
-include("emqx_auth_mnesia.hrl").
|
||||||
-include_lib("emqx_auth/include/emqx_authz.hrl").
|
-include_lib("emqx_auth/include/emqx_authz.hrl").
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
@ -55,6 +56,9 @@
|
||||||
format_result/1
|
format_result/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% minirest filter callback
|
||||||
|
-export([is_configured_authz_source/2]).
|
||||||
|
|
||||||
-define(BAD_REQUEST, 'BAD_REQUEST').
|
-define(BAD_REQUEST, 'BAD_REQUEST').
|
||||||
-define(NOT_FOUND, 'NOT_FOUND').
|
-define(NOT_FOUND, 'NOT_FOUND').
|
||||||
-define(ALREADY_EXISTS, 'ALREADY_EXISTS').
|
-define(ALREADY_EXISTS, 'ALREADY_EXISTS').
|
||||||
|
@ -85,6 +89,7 @@ paths() ->
|
||||||
schema("/authorization/sources/built_in_database/rules/users") ->
|
schema("/authorization/sources/built_in_database/rules/users") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => users,
|
'operationId' => users,
|
||||||
|
filter => fun ?MODULE:is_configured_authz_source/2,
|
||||||
get =>
|
get =>
|
||||||
#{
|
#{
|
||||||
tags => [<<"authorization">>],
|
tags => [<<"authorization">>],
|
||||||
|
@ -131,6 +136,7 @@ schema("/authorization/sources/built_in_database/rules/users") ->
|
||||||
schema("/authorization/sources/built_in_database/rules/clients") ->
|
schema("/authorization/sources/built_in_database/rules/clients") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => clients,
|
'operationId' => clients,
|
||||||
|
filter => fun ?MODULE:is_configured_authz_source/2,
|
||||||
get =>
|
get =>
|
||||||
#{
|
#{
|
||||||
tags => [<<"authorization">>],
|
tags => [<<"authorization">>],
|
||||||
|
@ -177,6 +183,7 @@ schema("/authorization/sources/built_in_database/rules/clients") ->
|
||||||
schema("/authorization/sources/built_in_database/rules/users/:username") ->
|
schema("/authorization/sources/built_in_database/rules/users/:username") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => user,
|
'operationId' => user,
|
||||||
|
filter => fun ?MODULE:is_configured_authz_source/2,
|
||||||
get =>
|
get =>
|
||||||
#{
|
#{
|
||||||
tags => [<<"authorization">>],
|
tags => [<<"authorization">>],
|
||||||
|
@ -230,6 +237,7 @@ schema("/authorization/sources/built_in_database/rules/users/:username") ->
|
||||||
schema("/authorization/sources/built_in_database/rules/clients/:clientid") ->
|
schema("/authorization/sources/built_in_database/rules/clients/:clientid") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => client,
|
'operationId' => client,
|
||||||
|
filter => fun ?MODULE:is_configured_authz_source/2,
|
||||||
get =>
|
get =>
|
||||||
#{
|
#{
|
||||||
tags => [<<"authorization">>],
|
tags => [<<"authorization">>],
|
||||||
|
@ -283,6 +291,7 @@ schema("/authorization/sources/built_in_database/rules/clients/:clientid") ->
|
||||||
schema("/authorization/sources/built_in_database/rules/all") ->
|
schema("/authorization/sources/built_in_database/rules/all") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => all,
|
'operationId' => all,
|
||||||
|
filter => fun ?MODULE:is_configured_authz_source/2,
|
||||||
get =>
|
get =>
|
||||||
#{
|
#{
|
||||||
tags => [<<"authorization">>],
|
tags => [<<"authorization">>],
|
||||||
|
@ -317,6 +326,7 @@ schema("/authorization/sources/built_in_database/rules/all") ->
|
||||||
schema("/authorization/sources/built_in_database/rules") ->
|
schema("/authorization/sources/built_in_database/rules") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => rules,
|
'operationId' => rules,
|
||||||
|
filter => fun ?MODULE:is_configured_authz_source/2,
|
||||||
delete =>
|
delete =>
|
||||||
#{
|
#{
|
||||||
tags => [<<"authorization">>],
|
tags => [<<"authorization">>],
|
||||||
|
@ -426,6 +436,14 @@ fields(rules) ->
|
||||||
%% HTTP API
|
%% HTTP API
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
is_configured_authz_source(Params, _Meta) ->
|
||||||
|
emqx_authz_api_sources:with_source(
|
||||||
|
?AUTHZ_TYPE_BIN,
|
||||||
|
fun(_Source) ->
|
||||||
|
{ok, Params}
|
||||||
|
end
|
||||||
|
).
|
||||||
|
|
||||||
users(get, #{query_string := QueryString}) ->
|
users(get, #{query_string := QueryString}) ->
|
||||||
case
|
case
|
||||||
emqx_mgmt_api:node_query(
|
emqx_mgmt_api:node_query(
|
||||||
|
@ -440,7 +458,9 @@ users(get, #{query_string := QueryString}) ->
|
||||||
{error, page_limit_invalid} ->
|
{error, page_limit_invalid} ->
|
||||||
{400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}};
|
{400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}};
|
||||||
{error, Node, Error} ->
|
{error, Node, Error} ->
|
||||||
Message = list_to_binary(io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error])),
|
Message = list_to_binary(
|
||||||
|
io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error])
|
||||||
|
),
|
||||||
{500, #{code => <<"NODE_DOWN">>, message => Message}};
|
{500, #{code => <<"NODE_DOWN">>, message => Message}};
|
||||||
Result ->
|
Result ->
|
||||||
{200, Result}
|
{200, Result}
|
||||||
|
@ -476,7 +496,9 @@ clients(get, #{query_string := QueryString}) ->
|
||||||
{error, page_limit_invalid} ->
|
{error, page_limit_invalid} ->
|
||||||
{400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}};
|
{400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}};
|
||||||
{error, Node, Error} ->
|
{error, Node, Error} ->
|
||||||
Message = list_to_binary(io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error])),
|
Message = list_to_binary(
|
||||||
|
io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error])
|
||||||
|
),
|
||||||
{500, #{code => <<"NODE_DOWN">>, message => Message}};
|
{500, #{code => <<"NODE_DOWN">>, message => Message}};
|
||||||
Result ->
|
Result ->
|
||||||
{200, Result}
|
{200, Result}
|
||||||
|
|
|
@ -95,7 +95,9 @@ create(Source) -> Source.
|
||||||
|
|
||||||
update(Source) -> Source.
|
update(Source) -> Source.
|
||||||
|
|
||||||
destroy(_Source) -> ok.
|
destroy(_Source) ->
|
||||||
|
{atomic, ok} = mria:clear_table(?ACL_TABLE),
|
||||||
|
ok.
|
||||||
|
|
||||||
authorize(
|
authorize(
|
||||||
#{
|
#{
|
||||||
|
|
|
@ -26,9 +26,12 @@
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
source_refs/0,
|
source_refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
fields(builtin_db) ->
|
fields(builtin_db) ->
|
||||||
|
|
|
@ -314,6 +314,74 @@ t_update_user(_) ->
|
||||||
|
|
||||||
{ok, #{is_superuser := true}} = emqx_authn_scram_mnesia:lookup_user(<<"u">>, State).
|
{ok, #{is_superuser := true}} = emqx_authn_scram_mnesia:lookup_user(<<"u">>, State).
|
||||||
|
|
||||||
|
t_update_user_keys(_Config) ->
|
||||||
|
Algorithm = sha512,
|
||||||
|
Username = <<"u">>,
|
||||||
|
Password = <<"p">>,
|
||||||
|
|
||||||
|
init_auth(Username, <<"badpass">>, Algorithm),
|
||||||
|
|
||||||
|
{ok, [#{state := State}]} = emqx_authn_chains:list_authenticators(?GLOBAL),
|
||||||
|
|
||||||
|
emqx_authn_scram_mnesia:update_user(
|
||||||
|
Username,
|
||||||
|
#{password => Password},
|
||||||
|
State
|
||||||
|
),
|
||||||
|
|
||||||
|
ok = emqx_config:put([mqtt, idle_timeout], 500),
|
||||||
|
|
||||||
|
{ok, Pid} = emqx_authn_mqtt_test_client:start_link("127.0.0.1", 1883),
|
||||||
|
|
||||||
|
ClientFirstMessage = esasl_scram:client_first_message(Username),
|
||||||
|
|
||||||
|
ConnectPacket = ?CONNECT_PACKET(
|
||||||
|
#mqtt_packet_connect{
|
||||||
|
proto_ver = ?MQTT_PROTO_V5,
|
||||||
|
properties = #{
|
||||||
|
'Authentication-Method' => <<"SCRAM-SHA-512">>,
|
||||||
|
'Authentication-Data' => ClientFirstMessage
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
|
||||||
|
ok = emqx_authn_mqtt_test_client:send(Pid, ConnectPacket),
|
||||||
|
|
||||||
|
?AUTH_PACKET(
|
||||||
|
?RC_CONTINUE_AUTHENTICATION,
|
||||||
|
#{'Authentication-Data' := ServerFirstMessage}
|
||||||
|
) = receive_packet(),
|
||||||
|
|
||||||
|
{continue, ClientFinalMessage, ClientCache} =
|
||||||
|
esasl_scram:check_server_first_message(
|
||||||
|
ServerFirstMessage,
|
||||||
|
#{
|
||||||
|
client_first_message => ClientFirstMessage,
|
||||||
|
password => Password,
|
||||||
|
algorithm => Algorithm
|
||||||
|
}
|
||||||
|
),
|
||||||
|
|
||||||
|
AuthContinuePacket = ?AUTH_PACKET(
|
||||||
|
?RC_CONTINUE_AUTHENTICATION,
|
||||||
|
#{
|
||||||
|
'Authentication-Method' => <<"SCRAM-SHA-512">>,
|
||||||
|
'Authentication-Data' => ClientFinalMessage
|
||||||
|
}
|
||||||
|
),
|
||||||
|
|
||||||
|
ok = emqx_authn_mqtt_test_client:send(Pid, AuthContinuePacket),
|
||||||
|
|
||||||
|
?CONNACK_PACKET(
|
||||||
|
?RC_SUCCESS,
|
||||||
|
_,
|
||||||
|
#{'Authentication-Data' := ServerFinalMessage}
|
||||||
|
) = receive_packet(),
|
||||||
|
|
||||||
|
ok = esasl_scram:check_server_final_message(
|
||||||
|
ServerFinalMessage, ClientCache#{algorithm => Algorithm}
|
||||||
|
).
|
||||||
|
|
||||||
t_list_users(_) ->
|
t_list_users(_) ->
|
||||||
Config = config(),
|
Config = config(),
|
||||||
{ok, State} = emqx_authn_scram_mnesia:create(<<"id">>, Config),
|
{ok, State} = emqx_authn_scram_mnesia:create(<<"id">>, Config),
|
||||||
|
|
|
@ -331,4 +331,163 @@ t_api(_) ->
|
||||||
[]
|
[]
|
||||||
),
|
),
|
||||||
?assertEqual(0, emqx_authz_mnesia:record_count()),
|
?assertEqual(0, emqx_authz_mnesia:record_count()),
|
||||||
|
|
||||||
|
Examples = make_examples(emqx_authz_api_mnesia),
|
||||||
|
?assertEqual(
|
||||||
|
14,
|
||||||
|
length(Examples)
|
||||||
|
),
|
||||||
|
|
||||||
|
Fixtures1 = fun() ->
|
||||||
|
{ok, _, _} =
|
||||||
|
request(
|
||||||
|
delete,
|
||||||
|
uri(["authorization", "sources", "built_in_database", "rules", "all"]),
|
||||||
|
[]
|
||||||
|
),
|
||||||
|
{ok, _, _} =
|
||||||
|
request(
|
||||||
|
delete,
|
||||||
|
uri(["authorization", "sources", "built_in_database", "rules", "users"]),
|
||||||
|
[]
|
||||||
|
),
|
||||||
|
{ok, _, _} =
|
||||||
|
request(
|
||||||
|
delete,
|
||||||
|
uri(["authorization", "sources", "built_in_database", "rules", "clients"]),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
run_examples(Examples, Fixtures1),
|
||||||
|
|
||||||
|
Fixtures2 = fun() ->
|
||||||
|
%% disable/remove built_in_database
|
||||||
|
{ok, 204, _} =
|
||||||
|
request(
|
||||||
|
delete,
|
||||||
|
uri(["authorization", "sources", "built_in_database"]),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
|
||||||
|
run_examples(404, Examples, Fixtures2),
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
%% test helpers
|
||||||
|
-define(REPLACEMENTS, #{
|
||||||
|
":clientid" => <<"client1">>,
|
||||||
|
":username" => <<"user1">>
|
||||||
|
}).
|
||||||
|
|
||||||
|
run_examples(Examples) ->
|
||||||
|
%% assume all ok
|
||||||
|
run_examples(
|
||||||
|
fun
|
||||||
|
({ok, Code, _}) when
|
||||||
|
Code >= 200,
|
||||||
|
Code =< 299
|
||||||
|
->
|
||||||
|
true;
|
||||||
|
(_Res) ->
|
||||||
|
ct:pal("check failed: ~p", [_Res]),
|
||||||
|
false
|
||||||
|
end,
|
||||||
|
Examples
|
||||||
|
).
|
||||||
|
|
||||||
|
run_examples(Examples, Fixtures) when is_function(Fixtures) ->
|
||||||
|
Fixtures(),
|
||||||
|
run_examples(Examples);
|
||||||
|
run_examples(Check, Examples) when is_function(Check) ->
|
||||||
|
lists:foreach(
|
||||||
|
fun({Path, Op, Body} = _Req) ->
|
||||||
|
ct:pal("req: ~p", [_Req]),
|
||||||
|
?assert(
|
||||||
|
Check(
|
||||||
|
request(Op, uri(Path), Body)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
Examples
|
||||||
|
);
|
||||||
|
run_examples(Code, Examples) when is_number(Code) ->
|
||||||
|
run_examples(
|
||||||
|
fun
|
||||||
|
({ok, ResCode, _}) when Code =:= ResCode -> true;
|
||||||
|
(_Res) ->
|
||||||
|
ct:pal("check failed: ~p", [_Res]),
|
||||||
|
false
|
||||||
|
end,
|
||||||
|
Examples
|
||||||
|
).
|
||||||
|
|
||||||
|
run_examples(CodeOrCheck, Examples, Fixtures) when is_function(Fixtures) ->
|
||||||
|
Fixtures(),
|
||||||
|
run_examples(CodeOrCheck, Examples).
|
||||||
|
|
||||||
|
make_examples(ApiMod) ->
|
||||||
|
make_examples(ApiMod, ?REPLACEMENTS).
|
||||||
|
|
||||||
|
-spec make_examples(Mod :: atom()) -> [{Path :: list(), [{Op :: atom(), Body :: term()}]}].
|
||||||
|
make_examples(ApiMod, Replacements) ->
|
||||||
|
Paths = ApiMod:paths(),
|
||||||
|
lists:flatten(
|
||||||
|
lists:map(
|
||||||
|
fun(Path) ->
|
||||||
|
Schema = ApiMod:schema(Path),
|
||||||
|
lists:map(
|
||||||
|
fun({Op, OpSchema}) ->
|
||||||
|
Body =
|
||||||
|
case maps:get('requestBody', OpSchema, undefined) of
|
||||||
|
undefined ->
|
||||||
|
[];
|
||||||
|
HoconWithExamples ->
|
||||||
|
maps:get(
|
||||||
|
value,
|
||||||
|
hd(
|
||||||
|
maps:values(
|
||||||
|
maps:get(
|
||||||
|
<<"examples">>,
|
||||||
|
maps:get(examples, HoconWithExamples)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
{replace_parts(to_parts(Path), Replacements), Op, Body}
|
||||||
|
end,
|
||||||
|
lists:sort(
|
||||||
|
fun op_sort/2, maps:to_list(maps:with([get, put, post, delete], Schema))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
Paths
|
||||||
|
)
|
||||||
|
).
|
||||||
|
|
||||||
|
op_sort({post, _}, {_, _}) ->
|
||||||
|
true;
|
||||||
|
op_sort({put, _}, {_, _}) ->
|
||||||
|
true;
|
||||||
|
op_sort({get, _}, {delete, _}) ->
|
||||||
|
true;
|
||||||
|
op_sort(_, _) ->
|
||||||
|
false.
|
||||||
|
|
||||||
|
to_parts(Path) ->
|
||||||
|
string:tokens(Path, "/").
|
||||||
|
|
||||||
|
replace_parts(Parts, Replacements) ->
|
||||||
|
lists:map(
|
||||||
|
fun(Part) ->
|
||||||
|
%% that's the fun part
|
||||||
|
case maps:is_key(Part, Replacements) of
|
||||||
|
true ->
|
||||||
|
maps:get(Part, Replacements);
|
||||||
|
false ->
|
||||||
|
Part
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
Parts
|
||||||
|
).
|
||||||
|
|
|
@ -221,6 +221,35 @@ t_normalize_rules(_Config) ->
|
||||||
)
|
)
|
||||||
).
|
).
|
||||||
|
|
||||||
|
t_destroy(_Config) ->
|
||||||
|
ClientInfo = emqx_authz_test_lib:base_client_info(),
|
||||||
|
|
||||||
|
ok = emqx_authz_mnesia:store_rules(
|
||||||
|
{username, <<"username">>},
|
||||||
|
[#{<<"permission">> => <<"allow">>, <<"action">> => <<"publish">>, <<"topic">> => <<"t">>}]
|
||||||
|
),
|
||||||
|
|
||||||
|
?assertEqual(
|
||||||
|
allow,
|
||||||
|
emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>)
|
||||||
|
),
|
||||||
|
|
||||||
|
ok = emqx_authz_test_lib:reset_authorizers(),
|
||||||
|
|
||||||
|
?assertEqual(
|
||||||
|
deny,
|
||||||
|
emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>)
|
||||||
|
),
|
||||||
|
|
||||||
|
ok = setup_config(),
|
||||||
|
|
||||||
|
%% After destroy, the rules should be empty
|
||||||
|
|
||||||
|
?assertEqual(
|
||||||
|
deny,
|
||||||
|
emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>)
|
||||||
|
).
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Helpers
|
%% Helpers
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_mongodb, [
|
{application, emqx_auth_mongodb, [
|
||||||
{description, "EMQX MongoDB Authentication and Authorization"},
|
{description, "EMQX MongoDB Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_mongodb_app, []}},
|
{mod, {emqx_auth_mongodb_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -16,18 +16,21 @@
|
||||||
|
|
||||||
-module(emqx_authn_mongodb_schema).
|
-module(emqx_authn_mongodb_schema).
|
||||||
|
|
||||||
-include("emqx_auth_mongodb.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_mongodb.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[
|
[
|
||||||
?R_REF(mongo_single),
|
?R_REF(mongo_single),
|
||||||
|
|
|
@ -35,12 +35,12 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
-define(PLACEHOLDERS, [
|
-define(ALLOWED_VARS, [
|
||||||
?PH_USERNAME,
|
?VAR_USERNAME,
|
||||||
?PH_CLIENTID,
|
?VAR_CLIENTID,
|
||||||
?PH_PEERHOST,
|
?VAR_PEERHOST,
|
||||||
?PH_CERT_CN_NAME,
|
?VAR_CERT_CN_NAME,
|
||||||
?PH_CERT_SUBJECT
|
?VAR_CERT_SUBJECT
|
||||||
]).
|
]).
|
||||||
|
|
||||||
description() ->
|
description() ->
|
||||||
|
@ -49,11 +49,11 @@ description() ->
|
||||||
create(#{filter := Filter} = Source) ->
|
create(#{filter := Filter} = Source) ->
|
||||||
ResourceId = emqx_authz_utils:make_resource_id(?MODULE),
|
ResourceId = emqx_authz_utils:make_resource_id(?MODULE),
|
||||||
{ok, _Data} = emqx_authz_utils:create_resource(ResourceId, emqx_mongodb, Source),
|
{ok, _Data} = emqx_authz_utils:create_resource(ResourceId, emqx_mongodb, Source),
|
||||||
FilterTemp = emqx_authz_utils:parse_deep(Filter, ?PLACEHOLDERS),
|
FilterTemp = emqx_authz_utils:parse_deep(Filter, ?ALLOWED_VARS),
|
||||||
Source#{annotations => #{id => ResourceId}, filter_template => FilterTemp}.
|
Source#{annotations => #{id => ResourceId}, filter_template => FilterTemp}.
|
||||||
|
|
||||||
update(#{filter := Filter} = Source) ->
|
update(#{filter := Filter} = Source) ->
|
||||||
FilterTemp = emqx_authz_utils:parse_deep(Filter, ?PLACEHOLDERS),
|
FilterTemp = emqx_authz_utils:parse_deep(Filter, ?ALLOWED_VARS),
|
||||||
case emqx_authz_utils:update_resource(emqx_mongodb, Source) of
|
case emqx_authz_utils:update_resource(emqx_mongodb, Source) of
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
error({load_config_error, Reason});
|
error({load_config_error, Reason});
|
||||||
|
|
|
@ -16,17 +16,20 @@
|
||||||
|
|
||||||
-module(emqx_authz_mongodb_schema).
|
-module(emqx_authz_mongodb_schema).
|
||||||
|
|
||||||
-include("emqx_auth_mongodb.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
type/0,
|
type/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
source_refs/0,
|
source_refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_mongodb.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
source_refs() ->
|
source_refs() ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_mysql, [
|
{application, emqx_auth_mysql, [
|
||||||
{description, "EMQX MySQL Authentication and Authorization"},
|
{description, "EMQX MySQL Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_mysql_app, []}},
|
{mod, {emqx_auth_mysql_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -16,18 +16,21 @@
|
||||||
|
|
||||||
-module(emqx_authn_mysql_schema).
|
-module(emqx_authn_mysql_schema).
|
||||||
|
|
||||||
-include("emqx_auth_mysql.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_mysql.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[?R_REF(mysql)].
|
[?R_REF(mysql)].
|
||||||
|
|
||||||
|
|
|
@ -37,26 +37,26 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
-define(PLACEHOLDERS, [
|
-define(ALLOWED_VARS, [
|
||||||
?PH_USERNAME,
|
?VAR_USERNAME,
|
||||||
?PH_CLIENTID,
|
?VAR_CLIENTID,
|
||||||
?PH_PEERHOST,
|
?VAR_PEERHOST,
|
||||||
?PH_CERT_CN_NAME,
|
?VAR_CERT_CN_NAME,
|
||||||
?PH_CERT_SUBJECT
|
?VAR_CERT_SUBJECT
|
||||||
]).
|
]).
|
||||||
|
|
||||||
description() ->
|
description() ->
|
||||||
"AuthZ with Mysql".
|
"AuthZ with Mysql".
|
||||||
|
|
||||||
create(#{query := SQL} = Source0) ->
|
create(#{query := SQL} = Source0) ->
|
||||||
{PrepareSQL, TmplToken} = emqx_authz_utils:parse_sql(SQL, '?', ?PLACEHOLDERS),
|
{PrepareSQL, TmplToken} = emqx_authz_utils:parse_sql(SQL, '?', ?ALLOWED_VARS),
|
||||||
ResourceId = emqx_authz_utils:make_resource_id(?MODULE),
|
ResourceId = emqx_authz_utils:make_resource_id(?MODULE),
|
||||||
Source = Source0#{prepare_statement => #{?PREPARE_KEY => PrepareSQL}},
|
Source = Source0#{prepare_statement => #{?PREPARE_KEY => PrepareSQL}},
|
||||||
{ok, _Data} = emqx_authz_utils:create_resource(ResourceId, emqx_mysql, Source),
|
{ok, _Data} = emqx_authz_utils:create_resource(ResourceId, emqx_mysql, Source),
|
||||||
Source#{annotations => #{id => ResourceId, tmpl_token => TmplToken}}.
|
Source#{annotations => #{id => ResourceId, tmpl_token => TmplToken}}.
|
||||||
|
|
||||||
update(#{query := SQL} = Source0) ->
|
update(#{query := SQL} = Source0) ->
|
||||||
{PrepareSQL, TmplToken} = emqx_authz_utils:parse_sql(SQL, '?', ?PLACEHOLDERS),
|
{PrepareSQL, TmplToken} = emqx_authz_utils:parse_sql(SQL, '?', ?ALLOWED_VARS),
|
||||||
Source = Source0#{prepare_statement => #{?PREPARE_KEY => PrepareSQL}},
|
Source = Source0#{prepare_statement => #{?PREPARE_KEY => PrepareSQL}},
|
||||||
case emqx_authz_utils:update_resource(emqx_mysql, Source) of
|
case emqx_authz_utils:update_resource(emqx_mysql, Source) of
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-behaviour(emqx_authz_schema).
|
-behaviour(emqx_authz_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
type/0,
|
type/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
|
@ -29,6 +30,8 @@
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
fields(mysql) ->
|
fields(mysql) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_postgresql, [
|
{application, emqx_auth_postgresql, [
|
||||||
{description, "EMQX PostgreSQL Authentication and Authorization"},
|
{description, "EMQX PostgreSQL Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_postgresql_app, []}},
|
{mod, {emqx_auth_postgresql_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
@ -9,7 +9,7 @@
|
||||||
stdlib,
|
stdlib,
|
||||||
emqx,
|
emqx,
|
||||||
emqx_auth,
|
emqx_auth,
|
||||||
emqx_connector
|
emqx_postgresql
|
||||||
]},
|
]},
|
||||||
{env, []},
|
{env, []},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
|
|
|
@ -46,14 +46,14 @@ create(Config0) ->
|
||||||
{Config, State} = parse_config(Config0, ResourceId),
|
{Config, State} = parse_config(Config0, ResourceId),
|
||||||
{ok, _Data} = emqx_authn_utils:create_resource(
|
{ok, _Data} = emqx_authn_utils:create_resource(
|
||||||
ResourceId,
|
ResourceId,
|
||||||
emqx_connector_pgsql,
|
emqx_postgresql,
|
||||||
Config
|
Config
|
||||||
),
|
),
|
||||||
{ok, State#{resource_id => ResourceId}}.
|
{ok, State#{resource_id => ResourceId}}.
|
||||||
|
|
||||||
update(Config0, #{resource_id := ResourceId} = _State) ->
|
update(Config0, #{resource_id := ResourceId} = _State) ->
|
||||||
{Config, NState} = parse_config(Config0, ResourceId),
|
{Config, NState} = parse_config(Config0, ResourceId),
|
||||||
case emqx_authn_utils:update_resource(emqx_connector_pgsql, Config, ResourceId) of
|
case emqx_authn_utils:update_resource(emqx_postgresql, Config, ResourceId) of
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
error({load_config_error, Reason});
|
error({load_config_error, Reason});
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue