Merge branch 'master' into EMQX-871-872
This commit is contained in:
commit
df1cbb0bab
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, http_server,
|
||||
[{description, "An OTP application"},
|
||||
{vsn, "0.1.0"},
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 123 KiB |
|
@ -0,0 +1,70 @@
|
|||
@startuml
|
||||
|
||||
new: tag "#needs-triage"
|
||||
|
||||
new_waiting: untag '#needs-triage'
|
||||
new_waiting: tag '#triage/wait'
|
||||
new_waiting --> accepted: pass 2nd review
|
||||
|
||||
accepted: auto tag '#triage/accepted'
|
||||
accepted: untag '#needs-triage'
|
||||
accepted: issue is ready to be worked on
|
||||
accepted: in backlog, need planning
|
||||
|
||||
assigned: update type tag (#support | #bug | #feature)
|
||||
assigned: tag '#triage/accepted'
|
||||
assigned: untag '#new, new_waiting'
|
||||
assigned: update assignee
|
||||
assigned: update priority
|
||||
|
||||
InProgress: Update with link to the PR
|
||||
InProgress: Update release tag
|
||||
InProgress: Patch testing with issue reporter
|
||||
|
||||
needs_information: tag '#triage/needs-information', notify reporter
|
||||
|
||||
stale: untag '#triage/wait'
|
||||
stale: tag '#stale' and notify reporter
|
||||
|
||||
closed: github close issue
|
||||
closed: converted to discussion
|
||||
|
||||
[*]--> new: created
|
||||
|
||||
new --> accepted: pass 1st review
|
||||
new --> closed: If the issue is a topic \nfor discussion(not for bug or support)
|
||||
new --> new_waiting: lack of info
|
||||
|
||||
new_waiting --> stale: 7 days no updates
|
||||
stale ---> closed: 14 days no updates
|
||||
stale ---> new_waiting: updated info
|
||||
closed --> [*]
|
||||
|
||||
accepted -down--> assigned: priority review
|
||||
|
||||
accepted --> needs_information: need more information\n to proceeed
|
||||
needs_information --> accepted: updates
|
||||
assigned --> InProgress: In sprint run\n or\n start to work on
|
||||
InProgress --> closed: issue is solved
|
||||
InProgress --->InProgress: More info is required from issuer reporter
|
||||
needs_information -----> stale: no updates \n after 14 days
|
||||
|
||||
note left of new_waiting
|
||||
next review: 5 days
|
||||
end note
|
||||
|
||||
note right of accepted
|
||||
using priority tag
|
||||
- #priority/critical-urgent
|
||||
- #priority/important-soon
|
||||
- #priority/important-longterm
|
||||
- #priority/backlog
|
||||
- #priority/awaiting-more-evidence
|
||||
using area tag
|
||||
- #area/lb
|
||||
- #area/acl
|
||||
- #area/config
|
||||
...
|
||||
end note
|
||||
|
||||
@enduml
|
|
@ -2,14 +2,23 @@
|
|||
name: Bug Report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: "Support, needs-triage"
|
||||
labels: needs-triage
|
||||
|
||||
---
|
||||
|
||||
<!-- Please use this template while reporting a bug and provide as much info as possible. Thanks!-->
|
||||
<!-- 请使用英文描述问题 -->
|
||||
<!-- Note, lacking of information will delay the handling of issue
|
||||
See our github issue handling flow here:
|
||||
https://github.com/emqx/emqx/blob/master/.github/ISSUE_TEMPLATE/assets/issue-handling.png
|
||||
-->
|
||||
|
||||
**Environment**:
|
||||
### Subject of the issue
|
||||
Describe your issue here.
|
||||
|
||||
### Your environment
|
||||
|
||||
For EMQ X 4.3 or newer, please provide the log archive generated by [node_dump tool](https://github.com/emqx/emqx/blob/master/bin/node_dump)
|
||||
|
||||
- EMQ X version (e.g. `emqx_ctl status`):
|
||||
- Hardware configuration (e.g. `lscpu`):
|
||||
|
@ -18,8 +27,13 @@ labels: "Support, needs-triage"
|
|||
- Erlang/OTP version (in case you build emqx from source code):
|
||||
- Others:
|
||||
|
||||
**What happened and what you expected to happen**:
|
||||
### Steps to reproduce
|
||||
Tell us how to reproduce this issue.
|
||||
|
||||
**How to reproduce it (as minimally and precisely as possible)**:
|
||||
### Expected behaviour
|
||||
Tell us what should happen
|
||||
|
||||
**Anything else we need to know?**:
|
||||
### Actual behaviour
|
||||
Tell us what happens instead
|
||||
|
||||
Missing log file can delay the handling of the issue.
|
||||
|
|
|
@ -7,6 +7,11 @@ labels: "Feature, needs-triage"
|
|||
---
|
||||
|
||||
<!-- Please only use this template for submitting enhancement requests -->
|
||||
<!-- Note, lacking of information will delay the handling of issue
|
||||
See our github issue handling flow here:
|
||||
https://github.com/emqx/emqx/blob/master/.github/ISSUE_TEMPLATE/assets/issue-handling.png
|
||||
-->
|
||||
|
||||
|
||||
**What would you like to be added/modified**:
|
||||
|
||||
|
|
|
@ -6,4 +6,29 @@ labels: "Support, needs-triage"
|
|||
|
||||
---
|
||||
|
||||
**Please describe your problem in detail, if necessary, you can upload the log file through the attachment**:
|
||||
<!-- Note, lacking of information will delay the handling of issue
|
||||
See our github issue handling flow here:
|
||||
https://github.com/emqx/emqx/blob/master/.github/ISSUE_TEMPLATE/assets/issue-handling.png
|
||||
-->
|
||||
|
||||
### Subject of the support
|
||||
Describe your issue here.
|
||||
|
||||
Error/Warning printout if any.
|
||||
|
||||
### Your environment
|
||||
|
||||
For EMQ X 4.3 or newer, please provide the log archive generated by [node_dump tool](https://github.com/emqx/emqx/blob/master/bin/node_dump)
|
||||
|
||||
Otherwise please provide below info:
|
||||
- EMQ X version (e.g. `emqx_ctl status`):
|
||||
- If cluster (e.g. 3 X 4Core16GB):
|
||||
- Hardware configuration (e.g. `lscpu`):
|
||||
- OS (e.g. `cat /etc/os-release`):
|
||||
- Kernel (e.g. `uname -a`):
|
||||
- Erlang/OTP version (in case you build emqx from source code):
|
||||
- Others:
|
||||
|
||||
### LOG File
|
||||
|
||||
Missing log file can delay the handling of the issue.
|
||||
|
|
|
@ -12,12 +12,12 @@ jobs:
|
|||
prepare:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
otp:
|
||||
- "23.2.7.2-emqx-2"
|
||||
- "24.0.5-emqx-1"
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
container: "ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-ubuntu20.04"
|
||||
|
||||
outputs:
|
||||
profiles: ${{ steps.set_profile.outputs.profiles }}
|
||||
|
@ -306,7 +306,7 @@ jobs:
|
|||
done
|
||||
- name: build emqx packages
|
||||
env:
|
||||
ERL_OTP: erl${{ matrix.otp }}
|
||||
OTP: ${{ matrix.otp }}
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
ARCH: ${{ matrix.arch }}
|
||||
SYSTEM: ${{ matrix.os }}
|
||||
|
@ -316,7 +316,7 @@ jobs:
|
|||
-v $(pwd):/emqx \
|
||||
--workdir /emqx \
|
||||
--platform linux/$ARCH \
|
||||
emqx/build-env:$ERL_OTP-$SYSTEM \
|
||||
ghcr.io/emqx/emqx-builder-helper/5.0:$OTP-$SYSTEM \
|
||||
bash -euc "make $PROFILE-zip || cat rebar3.crashdump; \
|
||||
make $PROFILE-pkg || cat rebar3.crashdump; \
|
||||
EMQX_NAME=$PROFILE && .ci/build_packages/tests.sh"
|
||||
|
@ -375,7 +375,7 @@ jobs:
|
|||
tags: emqx/${{ matrix.profile }}:${{ steps.version.outputs.version }}
|
||||
build-args: |
|
||||
PKG_VSN=${{ steps.version.outputs.version }}
|
||||
BUILD_FROM=emqx/build-env:erl${{ matrix.otp }}-alpine
|
||||
BUILD_FROM=ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-alpine3.14
|
||||
RUN_FROM=alpine:3.14
|
||||
EMQX_NAME=${{ matrix.profile }}
|
||||
file: source/deploy/docker/Dockerfile
|
||||
|
@ -395,7 +395,7 @@ jobs:
|
|||
tags: emqx/${{ matrix.profile }}:${{ steps.version.outputs.version }}
|
||||
build-args: |
|
||||
PKG_VSN=${{ steps.version.outputs.version }}
|
||||
BUILD_FROM=emqx/build-env:erl${{ matrix.otp }}-alpine
|
||||
BUILD_FROM=ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-alpine3.14
|
||||
RUN_FROM=alpine:3.14
|
||||
EMQX_NAME=${{ matrix.profile }}
|
||||
file: source/deploy/docker/Dockerfile
|
||||
|
|
|
@ -14,14 +14,13 @@ jobs:
|
|||
|
||||
strategy:
|
||||
matrix:
|
||||
erl_otp:
|
||||
- erl24.0.5-emqx-1
|
||||
|
||||
otp:
|
||||
- 24.0.5-emqx-1
|
||||
os:
|
||||
- ubuntu20.04
|
||||
- centos7
|
||||
|
||||
container: emqx/build-env:${{ matrix.erl_otp }}-${{ matrix.os }}
|
||||
container: "ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-${{ matrix.os }}"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
@ -58,7 +57,7 @@ jobs:
|
|||
|
||||
strategy:
|
||||
matrix:
|
||||
erl_otp:
|
||||
otp:
|
||||
- 24.0.5-emqx-1
|
||||
|
||||
steps:
|
||||
|
@ -83,7 +82,7 @@ jobs:
|
|||
id: cache
|
||||
with:
|
||||
path: ~/.kerl
|
||||
key: erl${{ matrix.erl_otp }}-macos10.15
|
||||
key: erl${{ matrix.otp }}-macos10.15
|
||||
- name: build erlang
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 60
|
||||
|
@ -92,11 +91,11 @@ jobs:
|
|||
OTP_GITHUB_URL: https://github.com/emqx/otp
|
||||
run: |
|
||||
kerl update releases
|
||||
kerl build ${{ matrix.erl_otp }}
|
||||
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }}
|
||||
kerl build ${{ matrix.otp }}
|
||||
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
|
||||
- name: build
|
||||
run: |
|
||||
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
|
||||
. $HOME/.kerl/${{ matrix.otp }}/activate
|
||||
make ensure-rebar3
|
||||
sudo cp rebar3 /usr/local/bin/rebar3
|
||||
make ${EMQX_NAME}-zip
|
||||
|
|
|
@ -4,13 +4,8 @@ on: [pull_request]
|
|||
|
||||
jobs:
|
||||
check_deps_integrity:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
container: "ghcr.io/emqx/emqx-builder-helper/5.0:24.0.5-emqx-1-ubuntu20.04"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
|
@ -2,15 +2,16 @@ name: API Test Suite
|
|||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- e*
|
||||
- v*
|
||||
tags:
|
||||
- e*
|
||||
- v*
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
container: "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
container: "ghcr.io/emqx/emqx-builder-helper/5.0:23.2.7.2-emqx-2-ubuntu20.04"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: zip emqx-broker
|
||||
|
@ -27,6 +28,7 @@ jobs:
|
|||
with:
|
||||
name: emqx-broker
|
||||
path: _packages/**/*.zip
|
||||
|
||||
api-test:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -39,6 +41,10 @@ jobs:
|
|||
- api_clients
|
||||
- api_routes
|
||||
- api_publish
|
||||
- api_user
|
||||
- api_login
|
||||
- api_banned
|
||||
- api_alarms
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
|
@ -90,11 +96,7 @@ jobs:
|
|||
with:
|
||||
name: jmeter_logs
|
||||
path: ./jmeter_logs
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: jmeter_logs
|
||||
path: emqx/log
|
||||
|
||||
delete-package:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: api-test
|
||||
|
|
|
@ -8,31 +8,30 @@ on:
|
|||
pull_request:
|
||||
|
||||
jobs:
|
||||
check_all:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
check_all:
|
||||
strategy:
|
||||
matrix:
|
||||
otp:
|
||||
- "23.2.7.2-emqx-2"
|
||||
- "24.0.5-emqx-1"
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: run
|
||||
run: |
|
||||
make ensure-rebar3
|
||||
cp rebar3 apps/emqx/
|
||||
cd apps/emqx
|
||||
./rebar3 xref
|
||||
./rebar3 dialyzer
|
||||
./rebar3 eunit -v
|
||||
./rebar3 ct -v
|
||||
./rebar3 proper -d test/props
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: logs
|
||||
path: apps/emqx/_build/test/logs
|
||||
runs-on: ubuntu-20.04
|
||||
container: "ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-ubuntu20.04"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: run
|
||||
run: |
|
||||
make ensure-rebar3
|
||||
cp rebar3 apps/emqx/
|
||||
cd apps/emqx
|
||||
./rebar3 xref
|
||||
./rebar3 dialyzer
|
||||
./rebar3 eunit -v
|
||||
./rebar3 ct -v
|
||||
./rebar3 proper -d test/props
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: logs
|
||||
path: apps/emqx/_build/test/logs
|
||||
|
|
|
@ -11,12 +11,12 @@ jobs:
|
|||
prepare:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
otp:
|
||||
- "23.2.7.2-emqx-2"
|
||||
- "24.0.5-emqx-1"
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
container: "ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-ubuntu20.04"
|
||||
|
||||
outputs:
|
||||
profile: ${{ steps.profile.outputs.profile }}
|
||||
|
|
|
@ -11,12 +11,12 @@ jobs:
|
|||
relup_test:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
otp:
|
||||
- "23.2.7.2-emqx-2"
|
||||
- "24.0.5-emqx-1"
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
container: "ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-ubuntu20.04"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
|
|
@ -11,12 +11,12 @@ jobs:
|
|||
run_static_analysis:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
otp:
|
||||
- "23.2.7.2-emqx-2"
|
||||
- "24.0.5-emqx-1"
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
container: "ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-ubuntu20.04"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -34,12 +34,12 @@ jobs:
|
|||
run_proper_test:
|
||||
strategy:
|
||||
matrix:
|
||||
container:
|
||||
- "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
- "emqx/build-env:erl24.0.5-emqx-1-ubuntu20.04"
|
||||
otp:
|
||||
- "23.2.7.2-emqx-2"
|
||||
- "24.0.5-emqx-1"
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
container: ${{ matrix.container }}
|
||||
container: "ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-ubuntu20.04"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
|
|
@ -49,7 +49,7 @@ docker run -d --name emqx -p 1883:1883 -p 8081:8081 -p 8083:8083 -p 8883:8883 -p
|
|||
git clone https://github.com/emqx/emqx.git
|
||||
cd emqx
|
||||
make
|
||||
_build/emqx/rel/emqx/bin console
|
||||
_build/emqx/rel/emqx/bin/emqx console
|
||||
```
|
||||
|
||||
对于 4.3 之前的版本,通过另外一个仓库构建:
|
||||
|
|
|
@ -50,7 +50,7 @@ docker run -d --name emqx -p 1883:1883 -p 8081:8081 -p 8083:8083 -p 8883:8883 -p
|
|||
git clone https://github.com/emqx/emqx.git
|
||||
cd emqx
|
||||
make
|
||||
_build/emqx/rel/emqx/bin console
|
||||
_build/emqx/rel/emqx/bin/emqx console
|
||||
```
|
||||
|
||||
Более ранние релизы могут быть собраны с помощью другого репозитория:
|
||||
|
|
|
@ -49,7 +49,7 @@ For 4.3 and later versions.
|
|||
git clone https://github.com/emqx/emqx.git
|
||||
cd emqx
|
||||
make
|
||||
_build/emqx/rel/emqx/bin console
|
||||
_build/emqx/rel/emqx/bin/emqx console
|
||||
```
|
||||
|
||||
For earlier versions, release has to be built from another repo.
|
||||
|
|
|
@ -1325,6 +1325,16 @@ example_common_ssl_options {
|
|||
## Default: true
|
||||
ssl.secure_renegotiate = true
|
||||
|
||||
## In protocols that support client-initiated renegotiation,
|
||||
## the cost of resources of such an operation is higher for the server than the client.
|
||||
## This can act as a vector for denial of service attacks.
|
||||
## The SSL application already takes measures to counter-act such attempts,
|
||||
## but client-initiated renegotiation can be strictly disabled by setting this option to false.
|
||||
## The default value is true. Note that disabling renegotiation can result in
|
||||
## long-lived connections becoming unusable due to limits on
|
||||
## the number of messages the underlying cipher suite can encipher.
|
||||
ssl.client_renegotiation = true
|
||||
|
||||
## An important security setting, it forces the cipher to be set based
|
||||
## on the server-specified order instead of the client-specified order,
|
||||
## hence enforcing the (usually more properly configured) security
|
||||
|
|
|
@ -10,12 +10,13 @@
|
|||
%% `git_subdir` dependency in other projects.
|
||||
{deps,
|
||||
[ {gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}}
|
||||
, {typerefl, {git, "https://github.com/k32/typerefl", {tag, "0.8.4"}}}
|
||||
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
|
||||
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.2"}}}
|
||||
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.3"}}}
|
||||
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.2"}}}
|
||||
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.10.8"}}}
|
||||
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
|
||||
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.17.0"}}}
|
||||
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.19.0"}}}
|
||||
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
|
||||
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
|
||||
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.14.1"}}}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx,
|
||||
[{id, "emqx"},
|
||||
{description, "EMQ X Core"},
|
||||
|
|
|
@ -119,17 +119,17 @@ is_running(Node) ->
|
|||
%% PubSub API
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(subscribe(emqx_topic:topic() | string()) -> ok).
|
||||
-spec(subscribe(emqx_types:topic() | string()) -> ok).
|
||||
subscribe(Topic) ->
|
||||
emqx_broker:subscribe(iolist_to_binary(Topic)).
|
||||
|
||||
-spec(subscribe(emqx_topic:topic() | string(), emqx_types:subid() | emqx_types:subopts()) -> ok).
|
||||
-spec(subscribe(emqx_types:topic() | string(), emqx_types:subid() | emqx_types:subopts()) -> ok).
|
||||
subscribe(Topic, SubId) when is_atom(SubId); is_binary(SubId)->
|
||||
emqx_broker:subscribe(iolist_to_binary(Topic), SubId);
|
||||
subscribe(Topic, SubOpts) when is_map(SubOpts) ->
|
||||
emqx_broker:subscribe(iolist_to_binary(Topic), SubOpts).
|
||||
|
||||
-spec(subscribe(emqx_topic:topic() | string(),
|
||||
-spec(subscribe(emqx_types:topic() | string(),
|
||||
emqx_types:subid() | pid(), emqx_types:subopts()) -> ok).
|
||||
subscribe(Topic, SubId, SubOpts) when (is_atom(SubId) orelse is_binary(SubId)), is_map(SubOpts) ->
|
||||
emqx_broker:subscribe(iolist_to_binary(Topic), SubId, SubOpts).
|
||||
|
@ -138,7 +138,7 @@ subscribe(Topic, SubId, SubOpts) when (is_atom(SubId) orelse is_binary(SubId)),
|
|||
publish(Msg) ->
|
||||
emqx_broker:publish(Msg).
|
||||
|
||||
-spec(unsubscribe(emqx_topic:topic() | string()) -> ok).
|
||||
-spec(unsubscribe(emqx_types:topic() | string()) -> ok).
|
||||
unsubscribe(Topic) ->
|
||||
emqx_broker:unsubscribe(iolist_to_binary(Topic)).
|
||||
|
||||
|
@ -146,18 +146,18 @@ unsubscribe(Topic) ->
|
|||
%% PubSub management API
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(topics() -> list(emqx_topic:topic())).
|
||||
-spec(topics() -> list(emqx_types:topic())).
|
||||
topics() -> emqx_router:topics().
|
||||
|
||||
-spec(subscribers(emqx_topic:topic() | string()) -> [pid()]).
|
||||
-spec(subscribers(emqx_types:topic() | string()) -> [pid()]).
|
||||
subscribers(Topic) ->
|
||||
emqx_broker:subscribers(iolist_to_binary(Topic)).
|
||||
|
||||
-spec(subscriptions(pid()) -> [{emqx_topic:topic(), emqx_types:subopts()}]).
|
||||
-spec(subscriptions(pid()) -> [{emqx_types:topic(), emqx_types:subopts()}]).
|
||||
subscriptions(SubPid) when is_pid(SubPid) ->
|
||||
emqx_broker:subscriptions(SubPid).
|
||||
|
||||
-spec(subscribed(pid() | emqx_types:subid(), emqx_topic:topic() | string()) -> boolean()).
|
||||
-spec(subscribed(pid() | emqx_types:subid(), emqx_types:topic() | string()) -> boolean()).
|
||||
subscribed(SubPid, Topic) when is_pid(SubPid) ->
|
||||
emqx_broker:subscribed(SubPid, iolist_to_binary(Topic));
|
||||
subscribed(SubId, Topic) when is_atom(SubId); is_binary(SubId) ->
|
||||
|
|
|
@ -408,6 +408,8 @@ normalize_message(high_cpu_usage, #{usage := Usage}) ->
|
|||
list_to_binary(io_lib:format("~s cpu usage", [Usage]));
|
||||
normalize_message(too_many_processes, #{usage := Usage}) ->
|
||||
list_to_binary(io_lib:format("~s process usage", [Usage]));
|
||||
normalize_message(cluster_rpc_apply_failed, #{tnx_id := TnxId}) ->
|
||||
list_to_binary(io_lib:format("cluster_rpc_apply_failed:~w", [TnxId]));
|
||||
normalize_message(partition, #{occurred := Node}) ->
|
||||
list_to_binary(io_lib:format("Partition occurs at node ~s", [Node]));
|
||||
normalize_message(<<"resource", _/binary>>, #{type := Type, id := ID}) ->
|
||||
|
|
|
@ -40,8 +40,10 @@
|
|||
, stop/0
|
||||
]).
|
||||
|
||||
-export([ add_provider/2
|
||||
, remove_provider/1
|
||||
-export([ register_provider/2
|
||||
, register_providers/1
|
||||
, deregister_provider/1
|
||||
, deregister_providers/1
|
||||
, create_chain/1
|
||||
, delete_chain/1
|
||||
, lookup_chain/1
|
||||
|
@ -73,6 +75,11 @@
|
|||
, code_change/3
|
||||
]).
|
||||
|
||||
-ifdef(TEST).
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
-endif.
|
||||
|
||||
-define(CHAINS_TAB, emqx_authn_chains).
|
||||
|
||||
-define(VER_1, <<"1">>).
|
||||
|
@ -192,21 +199,39 @@ pre_config_update(UpdateReq, OldConfig) ->
|
|||
{ok, NewConfig} -> {ok, may_to_map(NewConfig)}
|
||||
end.
|
||||
|
||||
do_pre_config_update({create_authenticator, _ChainName, Config}, OldConfig) ->
|
||||
{ok, OldConfig ++ [Config]};
|
||||
do_pre_config_update({create_authenticator, ChainName, Config}, OldConfig) ->
|
||||
try
|
||||
CertsDir = certs_dir([to_bin(ChainName), generate_id(Config)]),
|
||||
NConfig = convert_certs(CertsDir, Config),
|
||||
{ok, OldConfig ++ [NConfig]}
|
||||
catch
|
||||
error:{save_cert_to_file, _} = Reason ->
|
||||
{error, Reason};
|
||||
error:{missing_parameter, _} = Reason ->
|
||||
{error, Reason}
|
||||
end;
|
||||
do_pre_config_update({delete_authenticator, _ChainName, AuthenticatorID}, OldConfig) ->
|
||||
NewConfig = lists:filter(fun(OldConfig0) ->
|
||||
AuthenticatorID =/= generate_id(OldConfig0)
|
||||
end, OldConfig),
|
||||
{ok, NewConfig};
|
||||
do_pre_config_update({update_authenticator, _ChainName, AuthenticatorID, Config}, OldConfig) ->
|
||||
NewConfig = lists:map(fun(OldConfig0) ->
|
||||
case AuthenticatorID =:= generate_id(OldConfig0) of
|
||||
true -> maps:merge(OldConfig0, Config);
|
||||
false -> OldConfig0
|
||||
end
|
||||
end, OldConfig),
|
||||
{ok, NewConfig};
|
||||
do_pre_config_update({update_authenticator, ChainName, AuthenticatorID, Config}, OldConfig) ->
|
||||
try
|
||||
CertsDir = certs_dir([to_bin(ChainName), AuthenticatorID]),
|
||||
NewConfig = lists:map(
|
||||
fun(OldConfig0) ->
|
||||
case AuthenticatorID =:= generate_id(OldConfig0) of
|
||||
true -> convert_certs(CertsDir, Config, OldConfig0);
|
||||
false -> OldConfig0
|
||||
end
|
||||
end, OldConfig),
|
||||
{ok, NewConfig}
|
||||
catch
|
||||
error:{save_cert_to_file, _} = Reason ->
|
||||
{error, Reason};
|
||||
error:{missing_parameter, _} = Reason ->
|
||||
{error, Reason}
|
||||
end;
|
||||
do_pre_config_update({move_authenticator, _ChainName, AuthenticatorID, Position}, OldConfig) ->
|
||||
case split_by_id(AuthenticatorID, OldConfig) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
|
@ -236,13 +261,18 @@ do_post_config_update({create_authenticator, ChainName, Config}, _NewConfig, _Ol
|
|||
_ = create_chain(ChainName),
|
||||
create_authenticator(ChainName, NConfig);
|
||||
|
||||
do_post_config_update({delete_authenticator, ChainName, AuthenticatorID}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
delete_authenticator(ChainName, AuthenticatorID);
|
||||
do_post_config_update({delete_authenticator, ChainName, AuthenticatorID}, _NewConfig, OldConfig, _AppEnvs) ->
|
||||
case delete_authenticator(ChainName, AuthenticatorID) of
|
||||
ok ->
|
||||
[Config] = [Config0 || Config0 <- to_list(OldConfig), AuthenticatorID == generate_id(Config0)],
|
||||
CertsDir = certs_dir([to_bin(ChainName), AuthenticatorID]),
|
||||
clear_certs(CertsDir, Config),
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
|
||||
do_post_config_update({update_authenticator, ChainName, AuthenticatorID, _Config}, NewConfig, _OldConfig, _AppEnvs) ->
|
||||
[Config] = lists:filter(fun(NewConfig0) ->
|
||||
AuthenticatorID =:= generate_id(NewConfig0)
|
||||
end, NewConfig),
|
||||
do_post_config_update({update_authenticator, ChainName, AuthenticatorID, Config}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
NConfig = check_config(Config),
|
||||
update_authenticator(ChainName, AuthenticatorID, NConfig);
|
||||
|
||||
|
@ -250,8 +280,8 @@ do_post_config_update({move_authenticator, ChainName, AuthenticatorID, Position}
|
|||
move_authenticator(ChainName, AuthenticatorID, Position).
|
||||
|
||||
check_config(Config) ->
|
||||
#{authentication := CheckedConfig} = hocon_schema:check_plain(emqx_authentication,
|
||||
#{<<"authentication">> => Config}, #{nullable => true, atom_key => true}),
|
||||
#{authentication := CheckedConfig} =
|
||||
hocon_schema:check_plain(?MODULE, #{<<"authentication">> => Config}, #{atom_key => true}),
|
||||
CheckedConfig.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
@ -315,27 +345,41 @@ stop() ->
|
|||
|
||||
-spec get_refs() -> {ok, Refs} when Refs :: [{authn_type(), module()}].
|
||||
get_refs() ->
|
||||
gen_server:call(?MODULE, get_refs).
|
||||
call(get_refs).
|
||||
|
||||
-spec add_provider(authn_type(), module()) -> ok.
|
||||
add_provider(AuthNType, Provider) ->
|
||||
gen_server:call(?MODULE, {add_provider, AuthNType, Provider}).
|
||||
%% @doc Register authentication providers.
|
||||
%% A provider is a tuple of `AuthNType' the module which implements
|
||||
%% the authenticator callbacks.
|
||||
%% For example, ``[{{'password-based', redis}, emqx_authn_redis}]''
|
||||
%% NOTE: Later registered provider may override earlier registered if they
|
||||
%% happen to clash the same `AuthNType'.
|
||||
-spec register_providers([{authn_type(), module()}]) -> ok.
|
||||
register_providers(Providers) ->
|
||||
call({register_providers, Providers}).
|
||||
|
||||
-spec remove_provider(authn_type()) -> ok.
|
||||
remove_provider(AuthNType) ->
|
||||
gen_server:call(?MODULE, {remove_provider, AuthNType}).
|
||||
-spec register_provider(authn_type(), module()) -> ok.
|
||||
register_provider(AuthNType, Provider) ->
|
||||
register_providers([{AuthNType, Provider}]).
|
||||
|
||||
-spec deregister_providers([authn_type()]) -> ok.
|
||||
deregister_providers(AuthNTypes) when is_list(AuthNTypes) ->
|
||||
call({deregister_providers, AuthNTypes}).
|
||||
|
||||
-spec deregister_provider(authn_type()) -> ok.
|
||||
deregister_provider(AuthNType) ->
|
||||
deregister_providers([AuthNType]).
|
||||
|
||||
-spec create_chain(chain_name()) -> {ok, chain()} | {error, term()}.
|
||||
create_chain(Name) ->
|
||||
gen_server:call(?MODULE, {create_chain, Name}).
|
||||
call({create_chain, Name}).
|
||||
|
||||
-spec delete_chain(chain_name()) -> ok | {error, term()}.
|
||||
delete_chain(Name) ->
|
||||
gen_server:call(?MODULE, {delete_chain, Name}).
|
||||
call({delete_chain, Name}).
|
||||
|
||||
-spec lookup_chain(chain_name()) -> {ok, chain()} | {error, term()}.
|
||||
lookup_chain(Name) ->
|
||||
gen_server:call(?MODULE, {lookup_chain, Name}).
|
||||
call({lookup_chain, Name}).
|
||||
|
||||
-spec list_chains() -> {ok, [chain()]}.
|
||||
list_chains() ->
|
||||
|
@ -344,15 +388,15 @@ list_chains() ->
|
|||
|
||||
-spec create_authenticator(chain_name(), config()) -> {ok, authenticator()} | {error, term()}.
|
||||
create_authenticator(ChainName, Config) ->
|
||||
gen_server:call(?MODULE, {create_authenticator, ChainName, Config}).
|
||||
call({create_authenticator, ChainName, Config}).
|
||||
|
||||
-spec delete_authenticator(chain_name(), authenticator_id()) -> ok | {error, term()}.
|
||||
delete_authenticator(ChainName, AuthenticatorID) ->
|
||||
gen_server:call(?MODULE, {delete_authenticator, ChainName, AuthenticatorID}).
|
||||
call({delete_authenticator, ChainName, AuthenticatorID}).
|
||||
|
||||
-spec update_authenticator(chain_name(), authenticator_id(), config()) -> {ok, authenticator()} | {error, term()}.
|
||||
update_authenticator(ChainName, AuthenticatorID, Config) ->
|
||||
gen_server:call(?MODULE, {update_authenticator, ChainName, AuthenticatorID, Config}).
|
||||
call({update_authenticator, ChainName, AuthenticatorID, Config}).
|
||||
|
||||
-spec lookup_authenticator(chain_name(), authenticator_id()) -> {ok, authenticator()} | {error, term()}.
|
||||
lookup_authenticator(ChainName, AuthenticatorID) ->
|
||||
|
@ -379,44 +423,46 @@ list_authenticators(ChainName) ->
|
|||
|
||||
-spec move_authenticator(chain_name(), authenticator_id(), position()) -> ok | {error, term()}.
|
||||
move_authenticator(ChainName, AuthenticatorID, Position) ->
|
||||
gen_server:call(?MODULE, {move_authenticator, ChainName, AuthenticatorID, Position}).
|
||||
call({move_authenticator, ChainName, AuthenticatorID, Position}).
|
||||
|
||||
-spec import_users(chain_name(), authenticator_id(), binary()) -> ok | {error, term()}.
|
||||
import_users(ChainName, AuthenticatorID, Filename) ->
|
||||
gen_server:call(?MODULE, {import_users, ChainName, AuthenticatorID, Filename}).
|
||||
call({import_users, ChainName, AuthenticatorID, Filename}).
|
||||
|
||||
-spec add_user(chain_name(), authenticator_id(), user_info()) -> {ok, user_info()} | {error, term()}.
|
||||
add_user(ChainName, AuthenticatorID, UserInfo) ->
|
||||
gen_server:call(?MODULE, {add_user, ChainName, AuthenticatorID, UserInfo}).
|
||||
call({add_user, ChainName, AuthenticatorID, UserInfo}).
|
||||
|
||||
-spec delete_user(chain_name(), authenticator_id(), binary()) -> ok | {error, term()}.
|
||||
delete_user(ChainName, AuthenticatorID, UserID) ->
|
||||
gen_server:call(?MODULE, {delete_user, ChainName, AuthenticatorID, UserID}).
|
||||
call({delete_user, ChainName, AuthenticatorID, UserID}).
|
||||
|
||||
-spec update_user(chain_name(), authenticator_id(), binary(), map()) -> {ok, user_info()} | {error, term()}.
|
||||
update_user(ChainName, AuthenticatorID, UserID, NewUserInfo) ->
|
||||
gen_server:call(?MODULE, {update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}).
|
||||
call({update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}).
|
||||
|
||||
-spec lookup_user(chain_name(), authenticator_id(), binary()) -> {ok, user_info()} | {error, term()}.
|
||||
lookup_user(ChainName, AuthenticatorID, UserID) ->
|
||||
gen_server:call(?MODULE, {lookup_user, ChainName, AuthenticatorID, UserID}).
|
||||
call({lookup_user, ChainName, AuthenticatorID, UserID}).
|
||||
|
||||
%% TODO: Support pagination
|
||||
-spec list_users(chain_name(), authenticator_id()) -> {ok, [user_info()]} | {error, term()}.
|
||||
list_users(ChainName, AuthenticatorID) ->
|
||||
gen_server:call(?MODULE, {list_users, ChainName, AuthenticatorID}).
|
||||
call({list_users, ChainName, AuthenticatorID}).
|
||||
|
||||
-spec generate_id(config()) -> authenticator_id().
|
||||
generate_id(#{mechanism := Mechanism0, backend := Backend0}) ->
|
||||
Mechanism = atom_to_binary(Mechanism0),
|
||||
Backend = atom_to_binary(Backend0),
|
||||
Mechanism = to_bin(Mechanism0),
|
||||
Backend = to_bin(Backend0),
|
||||
<<Mechanism/binary, ":", Backend/binary>>;
|
||||
generate_id(#{mechanism := Mechanism}) ->
|
||||
atom_to_binary(Mechanism);
|
||||
to_bin(Mechanism);
|
||||
generate_id(#{<<"mechanism">> := Mechanism, <<"backend">> := Backend}) ->
|
||||
<<Mechanism/binary, ":", Backend/binary>>;
|
||||
generate_id(#{<<"mechanism">> := Mechanism}) ->
|
||||
Mechanism.
|
||||
Mechanism;
|
||||
generate_id(_) ->
|
||||
error({missing_parameter, mechanism}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server callbacks
|
||||
|
@ -430,11 +476,20 @@ init(_Opts) ->
|
|||
ok = emqx_config_handler:add_handler([listeners, '?', '?', authentication], ?MODULE),
|
||||
{ok, #{hooked => false, providers => #{}}}.
|
||||
|
||||
handle_call({add_provider, AuthNType, Provider}, _From, #{providers := Providers} = State) ->
|
||||
reply(ok, State#{providers := Providers#{AuthNType => Provider}});
|
||||
handle_call({register_providers, Providers}, _From,
|
||||
#{providers := Reg0} = State) ->
|
||||
case lists:filter(fun({T, _}) -> maps:is_key(T, Reg0) end, Providers) of
|
||||
[] ->
|
||||
Reg = lists:foldl(fun({AuthNType, Module}, Pin) ->
|
||||
Pin#{AuthNType => Module}
|
||||
end, Reg0, Providers),
|
||||
reply(ok, State#{providers := Reg});
|
||||
Clashes ->
|
||||
reply({error, {authentication_type_clash, Clashes}}, State)
|
||||
end;
|
||||
|
||||
handle_call({remove_provider, AuthNType}, _From, #{providers := Providers} = State) ->
|
||||
reply(ok, State#{providers := maps:remove(AuthNType, Providers)});
|
||||
handle_call({deregister_providers, AuthNTypes}, _From, #{providers := Providers} = State) ->
|
||||
reply(ok, State#{providers := maps:without(AuthNTypes, Providers)});
|
||||
|
||||
handle_call(get_refs, _From, #{providers := Providers} = State) ->
|
||||
Refs = lists:foldl(fun({_, Provider}, Acc) ->
|
||||
|
@ -460,7 +515,7 @@ handle_call({delete_chain, Name}, _From, State) ->
|
|||
[#chain{authenticators = Authenticators}] ->
|
||||
_ = [do_delete_authenticator(Authenticator) || Authenticator <- Authenticators],
|
||||
true = ets:delete(?CHAINS_TAB, Name),
|
||||
reply(ok, may_unhook(State))
|
||||
reply(ok, maybe_unhook(State))
|
||||
end;
|
||||
|
||||
handle_call({lookup_chain, Name}, _From, State) ->
|
||||
|
@ -490,7 +545,7 @@ handle_call({create_authenticator, ChainName, Config}, _From, #{providers := Pro
|
|||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, may_hook(State));
|
||||
reply(Reply, maybe_hook(State));
|
||||
|
||||
handle_call({delete_authenticator, ChainName, AuthenticatorID}, _From, State) ->
|
||||
UpdateFun =
|
||||
|
@ -505,7 +560,7 @@ handle_call({delete_authenticator, ChainName, AuthenticatorID}, _From, State) ->
|
|||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, may_unhook(State));
|
||||
reply(Reply, maybe_unhook(State));
|
||||
|
||||
handle_call({update_authenticator, ChainName, AuthenticatorID, Config}, _From, State) ->
|
||||
UpdateFun =
|
||||
|
@ -600,6 +655,106 @@ reply(Reply, State) ->
|
|||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
certs_dir(Dirs) when is_list(Dirs) ->
|
||||
to_bin(filename:join([emqx:get_config([node, data_dir]), "certs/authn"] ++ Dirs)).
|
||||
|
||||
convert_certs(CertsDir, Config) ->
|
||||
case maps:get(<<"ssl">>, Config, undefined) of
|
||||
undefined ->
|
||||
Config;
|
||||
SSLOpts ->
|
||||
NSSLOPts = lists:foldl(fun(K, Acc) ->
|
||||
case maps:get(K, Acc, undefined) of
|
||||
undefined -> Acc;
|
||||
PemBin ->
|
||||
CertFile = generate_filename(CertsDir, K),
|
||||
ok = save_cert_to_file(CertFile, PemBin),
|
||||
Acc#{K => CertFile}
|
||||
end
|
||||
end, SSLOpts, [<<"certfile">>, <<"keyfile">>, <<"cacertfile">>]),
|
||||
Config#{<<"ssl">> => NSSLOPts}
|
||||
end.
|
||||
|
||||
convert_certs(CertsDir, NewConfig, OldConfig) ->
|
||||
case maps:get(<<"ssl">>, NewConfig, undefined) of
|
||||
undefined ->
|
||||
NewConfig;
|
||||
NewSSLOpts ->
|
||||
OldSSLOpts = maps:get(<<"ssl">>, OldConfig, #{}),
|
||||
Diff = diff_certs(NewSSLOpts, OldSSLOpts),
|
||||
NSSLOpts = lists:foldl(fun({identical, K}, Acc) ->
|
||||
Acc#{K => maps:get(K, OldSSLOpts)};
|
||||
({_, K}, Acc) ->
|
||||
CertFile = generate_filename(CertsDir, K),
|
||||
ok = save_cert_to_file(CertFile, maps:get(K, NewSSLOpts)),
|
||||
Acc#{K => CertFile}
|
||||
end, NewSSLOpts, Diff),
|
||||
NewConfig#{<<"ssl">> => NSSLOpts}
|
||||
end.
|
||||
|
||||
clear_certs(CertsDir, Config) ->
|
||||
case maps:get(<<"ssl">>, Config, undefined) of
|
||||
undefined ->
|
||||
ok;
|
||||
SSLOpts ->
|
||||
lists:foreach(
|
||||
fun({_, Filename}) ->
|
||||
_ = file:delete(filename:join([CertsDir, Filename]))
|
||||
end,
|
||||
maps:to_list(maps:with([<<"certfile">>, <<"keyfile">>, <<"cacertfile">>], SSLOpts)))
|
||||
end.
|
||||
|
||||
save_cert_to_file(Filename, PemBin) ->
|
||||
case public_key:pem_decode(PemBin) =/= [] of
|
||||
true ->
|
||||
case filelib:ensure_dir(Filename) of
|
||||
ok ->
|
||||
case file:write_file(Filename, PemBin) of
|
||||
ok -> ok;
|
||||
{error, Reason} -> error({save_cert_to_file, {write_file, Reason}})
|
||||
end;
|
||||
{error, Reason} ->
|
||||
error({save_cert_to_file, {ensure_dir, Reason}})
|
||||
end;
|
||||
false ->
|
||||
error({save_cert_to_file, invalid_certificate})
|
||||
end.
|
||||
|
||||
generate_filename(CertsDir, Key) ->
|
||||
Prefix = case Key of
|
||||
<<"keyfile">> -> "key-";
|
||||
<<"certfile">> -> "cert-";
|
||||
<<"cacertfile">> -> "cacert-"
|
||||
end,
|
||||
to_bin(filename:join([CertsDir, Prefix ++ emqx_misc:gen_id() ++ ".pem"])).
|
||||
|
||||
diff_certs(NewSSLOpts, OldSSLOpts) ->
|
||||
Keys = [<<"cacertfile">>, <<"certfile">>, <<"keyfile">>],
|
||||
CertPems = maps:with(Keys, NewSSLOpts),
|
||||
CertFiles = maps:with(Keys, OldSSLOpts),
|
||||
Diff = lists:foldl(fun({K, CertFile}, Acc) ->
|
||||
case maps:find(K, CertPems) of
|
||||
error -> Acc;
|
||||
{ok, PemBin1} ->
|
||||
{ok, PemBin2} = file:read_file(CertFile),
|
||||
case diff_cert(PemBin1, PemBin2) of
|
||||
true ->
|
||||
[{changed, K} | Acc];
|
||||
false ->
|
||||
[{identical, K} | Acc]
|
||||
end
|
||||
end
|
||||
end,
|
||||
[], maps:to_list(CertFiles)),
|
||||
Added = [{added, K} || K <- maps:keys(maps:without(maps:keys(CertFiles), CertPems))],
|
||||
Diff ++ Added.
|
||||
|
||||
diff_cert(Pem1, Pem2) ->
|
||||
cal_md5_for_cert(Pem1) =/= cal_md5_for_cert(Pem2).
|
||||
|
||||
cal_md5_for_cert(Pem) ->
|
||||
crypto:hash(md5, term_to_binary(public_key:pem_decode(Pem))).
|
||||
|
||||
split_by_id(ID, AuthenticatorsConfig) ->
|
||||
case lists:foldl(
|
||||
fun(C, {P1, P2, F0}) ->
|
||||
|
@ -631,30 +786,30 @@ global_chain(stomp) ->
|
|||
global_chain(_) ->
|
||||
'unknown:global'.
|
||||
|
||||
may_hook(#{hooked := false} = State) ->
|
||||
maybe_hook(#{hooked := false} = State) ->
|
||||
case lists:any(fun(#chain{authenticators = []}) -> false;
|
||||
(_) -> true
|
||||
end, ets:tab2list(?CHAINS_TAB)) of
|
||||
true ->
|
||||
_ = emqx:hook('client.authenticate', {emqx_authentication, authenticate, []}),
|
||||
_ = emqx:hook('client.authenticate', {?MODULE, authenticate, []}),
|
||||
State#{hooked => true};
|
||||
false ->
|
||||
State
|
||||
end;
|
||||
may_hook(State) ->
|
||||
maybe_hook(State) ->
|
||||
State.
|
||||
|
||||
may_unhook(#{hooked := true} = State) ->
|
||||
maybe_unhook(#{hooked := true} = State) ->
|
||||
case lists:all(fun(#chain{authenticators = []}) -> true;
|
||||
(_) -> false
|
||||
end, ets:tab2list(?CHAINS_TAB)) of
|
||||
true ->
|
||||
_ = emqx:unhook('client.authenticate', {emqx_authentication, authenticate, []}),
|
||||
_ = emqx:unhook('client.authenticate', {?MODULE, authenticate, []}),
|
||||
State#{hooked => false};
|
||||
false ->
|
||||
State
|
||||
end;
|
||||
may_unhook(State) ->
|
||||
maybe_unhook(State) ->
|
||||
State.
|
||||
|
||||
do_create_authenticator(ChainName, AuthenticatorID, #{enable := Enable} = Config, Providers) ->
|
||||
|
@ -678,7 +833,7 @@ do_create_authenticator(ChainName, AuthenticatorID, #{enable := Enable} = Config
|
|||
do_delete_authenticator(#authenticator{provider = Provider, state = State}) ->
|
||||
_ = Provider:destroy(State),
|
||||
ok.
|
||||
|
||||
|
||||
replace_authenticator(ID, Authenticator, Authenticators) ->
|
||||
lists:keyreplace(ID, #authenticator.id, Authenticators, Authenticator).
|
||||
|
||||
|
@ -777,3 +932,9 @@ to_list(M) when is_map(M) ->
|
|||
[M];
|
||||
to_list(L) when is_list(L) ->
|
||||
L.
|
||||
|
||||
to_bin(B) when is_binary(B) -> B;
|
||||
to_bin(L) when is_list(L) -> list_to_binary(L);
|
||||
to_bin(A) when is_atom(A) -> atom_to_binary(A).
|
||||
|
||||
call(Call) -> gen_server:call(?MODULE, Call, infinity).
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%% Copyright (c) 2017-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
|
@ -14,44 +14,35 @@
|
|||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_plugin_libs_id).
|
||||
-module(emqx_authentication_sup).
|
||||
|
||||
-export([gen/0, gen/1]).
|
||||
-behaviour(supervisor).
|
||||
|
||||
-define(SHORT, 8).
|
||||
-export([start_link/0]).
|
||||
|
||||
-export([init/1]).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% APIs
|
||||
%% API
|
||||
%%--------------------------------------------------------------------
|
||||
-spec(gen() -> list()).
|
||||
gen() ->
|
||||
gen(?SHORT).
|
||||
|
||||
-spec(gen(integer()) -> list()).
|
||||
gen(Len) ->
|
||||
BitLen = Len * 4,
|
||||
<<R:BitLen>> = crypto:strong_rand_bytes(Len div 2),
|
||||
int_to_hex(R, Len).
|
||||
start_link() ->
|
||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal Functions
|
||||
%%------------------------------------------------------------------------------
|
||||
%%--------------------------------------------------------------------
|
||||
%% Supervisor callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
int_to_hex(I, N) when is_integer(I), I >= 0 ->
|
||||
int_to_hex([], I, 1, N).
|
||||
init([]) ->
|
||||
SupFlags = #{strategy => one_for_one,
|
||||
intensity => 100,
|
||||
period => 10},
|
||||
|
||||
int_to_hex(L, I, Count, N)
|
||||
when I < 16 ->
|
||||
pad([int_to_hex(I) | L], N - Count);
|
||||
int_to_hex(L, I, Count, N) ->
|
||||
int_to_hex([int_to_hex(I rem 16) | L], I div 16, Count + 1, N).
|
||||
AuthN = #{id => emqx_authentication,
|
||||
start => {emqx_authentication, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 1000,
|
||||
type => worker,
|
||||
modules => [emqx_authentication]},
|
||||
|
||||
int_to_hex(I) when 0 =< I, I =< 9 ->
|
||||
I + $0;
|
||||
int_to_hex(I) when 10 =< I, I =< 15 ->
|
||||
(I - 10) + $a.
|
||||
|
||||
pad(L, 0) ->
|
||||
L;
|
||||
pad(L, Count) ->
|
||||
pad([$0 | L], Count - 1).
|
||||
{ok, {SupFlags, [AuthN]}}.
|
|
@ -68,7 +68,7 @@ list_authz_cache() ->
|
|||
map_authz_cache(fun(Cache) -> Cache end).
|
||||
|
||||
%% We'll cleanup the cache before replacing an expired authz.
|
||||
-spec get_authz_cache(emqx_types:pubsub(), emqx_topic:topic()) ->
|
||||
-spec get_authz_cache(emqx_types:pubsub(), emqx_types:topic()) ->
|
||||
authz_result() | not_found.
|
||||
get_authz_cache(PubSub, Topic) ->
|
||||
case erlang:get(cache_k(PubSub, Topic)) of
|
||||
|
@ -85,7 +85,7 @@ get_authz_cache(PubSub, Topic) ->
|
|||
|
||||
%% If the cache get full, and also the latest one
|
||||
%% is expired, then delete all the cache entries
|
||||
-spec put_authz_cache(emqx_types:pubsub(), emqx_topic:topic(), authz_result())
|
||||
-spec put_authz_cache(emqx_types:pubsub(), emqx_types:topic(), authz_result())
|
||||
-> ok.
|
||||
put_authz_cache(PubSub, Topic, AuthzResult) ->
|
||||
MaxSize = get_cache_max_size(), true = (MaxSize =/= 0),
|
||||
|
|
|
@ -112,17 +112,17 @@ create_tabs() ->
|
|||
%% Subscribe API
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
-spec(subscribe(emqx_topic:topic()) -> ok).
|
||||
-spec(subscribe(emqx_types:topic()) -> ok).
|
||||
subscribe(Topic) when is_binary(Topic) ->
|
||||
subscribe(Topic, undefined).
|
||||
|
||||
-spec(subscribe(emqx_topic:topic(), emqx_types:subid() | emqx_types:subopts()) -> ok).
|
||||
-spec(subscribe(emqx_types:topic(), emqx_types:subid() | emqx_types:subopts()) -> ok).
|
||||
subscribe(Topic, SubId) when is_binary(Topic), ?is_subid(SubId) ->
|
||||
subscribe(Topic, SubId, ?DEFAULT_SUBOPTS);
|
||||
subscribe(Topic, SubOpts) when is_binary(Topic), is_map(SubOpts) ->
|
||||
subscribe(Topic, undefined, SubOpts).
|
||||
|
||||
-spec(subscribe(emqx_topic:topic(), emqx_types:subid(), emqx_types:subopts()) -> ok).
|
||||
-spec(subscribe(emqx_types:topic(), emqx_types:subid(), emqx_types:subopts()) -> ok).
|
||||
subscribe(Topic, SubId, SubOpts0) when is_binary(Topic), ?is_subid(SubId), is_map(SubOpts0) ->
|
||||
SubOpts = maps:merge(?DEFAULT_SUBOPTS, SubOpts0),
|
||||
case ets:member(?SUBOPTION, {SubPid = self(), Topic}) of
|
||||
|
@ -165,7 +165,7 @@ do_subscribe(Group, Topic, SubPid, SubOpts) ->
|
|||
%% Unsubscribe API
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(unsubscribe(emqx_topic:topic()) -> ok).
|
||||
-spec(unsubscribe(emqx_types:topic()) -> ok).
|
||||
unsubscribe(Topic) when is_binary(Topic) ->
|
||||
SubPid = self(),
|
||||
case ets:lookup(?SUBOPTION, {SubPid, Topic}) of
|
||||
|
@ -279,7 +279,7 @@ forward(Node, To, Delivery, sync) ->
|
|||
emqx_metrics:inc('messages.forward'), Result
|
||||
end.
|
||||
|
||||
-spec(dispatch(emqx_topic:topic(), emqx_types:delivery()) -> emqx_types:deliver_result()).
|
||||
-spec(dispatch(emqx_types:topic(), emqx_types:delivery()) -> emqx_types:deliver_result()).
|
||||
dispatch(Topic, #delivery{message = Msg}) ->
|
||||
DispN = lists:foldl(
|
||||
fun(Sub, N) ->
|
||||
|
@ -316,7 +316,7 @@ inc_dropped_cnt(Msg) ->
|
|||
end.
|
||||
|
||||
-compile({inline, [subscribers/1]}).
|
||||
-spec(subscribers(emqx_topic:topic() | {shard, emqx_topic:topic(), non_neg_integer()})
|
||||
-spec(subscribers(emqx_types:topic() | {shard, emqx_types:topic(), non_neg_integer()})
|
||||
-> [pid()]).
|
||||
subscribers(Topic) when is_binary(Topic) ->
|
||||
lookup_value(?SUBSCRIBER, Topic, []);
|
||||
|
@ -351,7 +351,7 @@ subscriber_down(SubPid) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(subscriptions(pid() | emqx_types:subid())
|
||||
-> [{emqx_topic:topic(), emqx_types:subopts()}]).
|
||||
-> [{emqx_types:topic(), emqx_types:subopts()}]).
|
||||
subscriptions(SubPid) when is_pid(SubPid) ->
|
||||
[{Topic, lookup_value(?SUBOPTION, {SubPid, Topic}, #{})}
|
||||
|| Topic <- lookup_value(?SUBSCRIPTION, SubPid, [])];
|
||||
|
@ -362,14 +362,14 @@ subscriptions(SubId) ->
|
|||
undefined -> []
|
||||
end.
|
||||
|
||||
-spec(subscribed(pid() | emqx_types:subid(), emqx_topic:topic()) -> boolean()).
|
||||
-spec(subscribed(pid() | emqx_types:subid(), emqx_types:topic()) -> boolean()).
|
||||
subscribed(SubPid, Topic) when is_pid(SubPid) ->
|
||||
ets:member(?SUBOPTION, {SubPid, Topic});
|
||||
subscribed(SubId, Topic) when ?is_subid(SubId) ->
|
||||
SubPid = emqx_broker_helper:lookup_subpid(SubId),
|
||||
ets:member(?SUBOPTION, {SubPid, Topic}).
|
||||
|
||||
-spec(get_subopts(pid(), emqx_topic:topic()) -> maybe(emqx_types:subopts())).
|
||||
-spec(get_subopts(pid(), emqx_types:topic()) -> maybe(emqx_types:subopts())).
|
||||
get_subopts(SubPid, Topic) when is_pid(SubPid), is_binary(Topic) ->
|
||||
lookup_value(?SUBOPTION, {SubPid, Topic});
|
||||
get_subopts(SubId, Topic) when ?is_subid(SubId) ->
|
||||
|
@ -379,7 +379,7 @@ get_subopts(SubId, Topic) when ?is_subid(SubId) ->
|
|||
undefined -> undefined
|
||||
end.
|
||||
|
||||
-spec(set_subopts(emqx_topic:topic(), emqx_types:subopts()) -> boolean()).
|
||||
-spec(set_subopts(emqx_types:topic(), emqx_types:subopts()) -> boolean()).
|
||||
set_subopts(Topic, NewOpts) when is_binary(Topic), is_map(NewOpts) ->
|
||||
set_subopts(self(), Topic, NewOpts).
|
||||
|
||||
|
@ -392,7 +392,7 @@ set_subopts(SubPid, Topic, NewOpts) ->
|
|||
[] -> false
|
||||
end.
|
||||
|
||||
-spec(topics() -> [emqx_topic:topic()]).
|
||||
-spec(topics() -> [emqx_types:topic()]).
|
||||
topics() ->
|
||||
emqx_router:topics().
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ lookup_subid(SubPid) when is_pid(SubPid) ->
|
|||
lookup_subpid(SubId) ->
|
||||
emqx_tables:lookup_value(?SUBID, SubId).
|
||||
|
||||
-spec(get_sub_shard(pid(), emqx_topic:topic()) -> non_neg_integer()).
|
||||
-spec(get_sub_shard(pid(), emqx_types:topic()) -> non_neg_integer()).
|
||||
get_sub_shard(SubPid, Topic) ->
|
||||
case create_seq(Topic) of
|
||||
Seq when Seq =< ?SHARD -> 0;
|
||||
|
@ -90,11 +90,11 @@ shards_num() ->
|
|||
%% Dynamic sharding later...
|
||||
ets:lookup_element(?HELPER, shards, 2).
|
||||
|
||||
-spec(create_seq(emqx_topic:topic()) -> emqx_sequence:seqid()).
|
||||
-spec(create_seq(emqx_types:topic()) -> emqx_sequence:seqid()).
|
||||
create_seq(Topic) ->
|
||||
emqx_sequence:nextval(?SUBSEQ, Topic).
|
||||
|
||||
-spec(reclaim_seq(emqx_topic:topic()) -> emqx_sequence:seqid()).
|
||||
-spec(reclaim_seq(emqx_types:topic()) -> emqx_sequence:seqid()).
|
||||
reclaim_seq(Topic) ->
|
||||
emqx_sequence:reclaim(?SUBSEQ, Topic).
|
||||
|
||||
|
|
|
@ -44,13 +44,13 @@ init([]) ->
|
|||
modules => [emqx_shared_sub]},
|
||||
|
||||
%% Authentication
|
||||
AuthN = #{id => authn,
|
||||
start => {emqx_authentication, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 2000,
|
||||
type => worker,
|
||||
modules => [emqx_authentication]},
|
||||
|
||||
AuthNSup = #{id => emqx_authentication_sup,
|
||||
start => {emqx_authentication_sup, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => infinity,
|
||||
type => supervisor,
|
||||
modules => [emqx_authentication_sup]},
|
||||
|
||||
%% Broker helper
|
||||
Helper = #{id => helper,
|
||||
start => {emqx_broker_helper, start_link, []},
|
||||
|
@ -59,5 +59,5 @@ init([]) ->
|
|||
type => worker,
|
||||
modules => [emqx_broker_helper]},
|
||||
|
||||
{ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, AuthN, Helper]}}.
|
||||
{ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, AuthNSup, Helper]}}.
|
||||
|
||||
|
|
|
@ -355,7 +355,7 @@ save_to_override_conf(RawConf) ->
|
|||
undefined -> ok;
|
||||
FileName ->
|
||||
ok = filelib:ensure_dir(FileName),
|
||||
case file:write_file(FileName, jsx:prettify(jsx:encode(RawConf))) of
|
||||
case file:write_file(FileName, hocon_pp:do(RawConf, #{})) of
|
||||
ok -> ok;
|
||||
{error, Reason} ->
|
||||
logger:error("write to ~s failed, ~p", [FileName, Reason]),
|
||||
|
@ -424,7 +424,7 @@ root_names_from_conf(RawConf) ->
|
|||
[Name || Name <- get_root_names(), lists:member(Name, Keys)].
|
||||
|
||||
atom(Bin) when is_binary(Bin) ->
|
||||
binary_to_existing_atom(Bin, latin1);
|
||||
binary_to_existing_atom(Bin, utf8);
|
||||
atom(Str) when is_list(Str) ->
|
||||
list_to_existing_atom(Str);
|
||||
atom(Atom) when is_atom(Atom) ->
|
||||
|
|
|
@ -310,4 +310,4 @@ safe_atom(Bin) when is_binary(Bin) ->
|
|||
safe_atom(Str) when is_list(Str) ->
|
||||
list_to_existing_atom(Str);
|
||||
safe_atom(Atom) when is_atom(Atom) ->
|
||||
Atom.
|
||||
Atom.
|
||||
|
|
|
@ -44,7 +44,7 @@
|
|||
|
||||
-type(options() :: #{strict_mode => boolean(),
|
||||
max_size => 1..?MAX_PACKET_SIZE,
|
||||
version => emqx_types:version()
|
||||
version => emqx_types:proto_ver()
|
||||
}).
|
||||
|
||||
-type(parse_state() :: {none, options()} | {cont_state(), options()}).
|
||||
|
@ -490,7 +490,7 @@ serialize_pkt(Packet, #{version := Ver, max_size := MaxSize}) ->
|
|||
-spec(serialize(emqx_types:packet()) -> iodata()).
|
||||
serialize(Packet) -> serialize(Packet, ?MQTT_PROTO_V4).
|
||||
|
||||
-spec(serialize(emqx_types:packet(), emqx_types:version()) -> iodata()).
|
||||
-spec(serialize(emqx_types:packet(), emqx_types:proto_ver()) -> iodata()).
|
||||
serialize(#mqtt_packet{header = Header,
|
||||
variable = Variable,
|
||||
payload = Payload}, Ver) ->
|
||||
|
|
|
@ -86,19 +86,19 @@
|
|||
|
||||
-elvis([{elvis_style, god_modules, disable}]).
|
||||
|
||||
-spec(make(emqx_topic:topic(), emqx_types:payload()) -> emqx_types:message()).
|
||||
-spec(make(emqx_types:topic(), emqx_types:payload()) -> emqx_types:message()).
|
||||
make(Topic, Payload) ->
|
||||
make(undefined, Topic, Payload).
|
||||
|
||||
-spec(make(emqx_types:clientid(),
|
||||
emqx_topic:topic(),
|
||||
emqx_types:topic(),
|
||||
emqx_types:payload()) -> emqx_types:message()).
|
||||
make(From, Topic, Payload) ->
|
||||
make(From, ?QOS_0, Topic, Payload).
|
||||
|
||||
-spec(make(emqx_types:clientid(),
|
||||
emqx_types:qos(),
|
||||
emqx_topic:topic(),
|
||||
emqx_types:topic(),
|
||||
emqx_types:payload()) -> emqx_types:message()).
|
||||
make(From, QoS, Topic, Payload) when ?QOS_0 =< QoS, QoS =< ?QOS_2 ->
|
||||
Now = erlang:system_time(millisecond),
|
||||
|
@ -112,7 +112,7 @@ make(From, QoS, Topic, Payload) when ?QOS_0 =< QoS, QoS =< ?QOS_2 ->
|
|||
|
||||
-spec(make(emqx_types:clientid(),
|
||||
emqx_types:qos(),
|
||||
emqx_topic:topic(),
|
||||
emqx_types:topic(),
|
||||
emqx_types:payload(),
|
||||
emqx_types:flags(),
|
||||
emqx_types:headers()) -> emqx_types:message()).
|
||||
|
@ -133,7 +133,7 @@ make(From, QoS, Topic, Payload, Flags, Headers)
|
|||
-spec(make(MsgId :: binary(),
|
||||
emqx_types:clientid(),
|
||||
emqx_types:qos(),
|
||||
emqx_topic:topic(),
|
||||
emqx_types:topic(),
|
||||
emqx_types:payload(),
|
||||
emqx_types:flags(),
|
||||
emqx_types:headers()) -> emqx_types:message()).
|
||||
|
|
|
@ -45,6 +45,8 @@
|
|||
, index_of/2
|
||||
, maybe_parse_ip/1
|
||||
, ipv6_probe/1
|
||||
, gen_id/0
|
||||
, gen_id/1
|
||||
]).
|
||||
|
||||
-export([ bin2hexstr_A_F/1
|
||||
|
@ -52,6 +54,8 @@
|
|||
, hexstr2bin/1
|
||||
]).
|
||||
|
||||
-define(SHORT, 8).
|
||||
|
||||
%% @doc Parse v4 or v6 string format address to tuple.
|
||||
%% `Host' itself is returned if it's not an ip string.
|
||||
maybe_parse_ip(Host) ->
|
||||
|
@ -298,6 +302,39 @@ hexchar2int(I) when I >= $0 andalso I =< $9 -> I - $0;
|
|||
hexchar2int(I) when I >= $A andalso I =< $F -> I - $A + 10;
|
||||
hexchar2int(I) when I >= $a andalso I =< $f -> I - $a + 10.
|
||||
|
||||
-spec(gen_id() -> list()).
|
||||
gen_id() ->
|
||||
gen_id(?SHORT).
|
||||
|
||||
-spec(gen_id(integer()) -> list()).
|
||||
gen_id(Len) ->
|
||||
BitLen = Len * 4,
|
||||
<<R:BitLen>> = crypto:strong_rand_bytes(Len div 2),
|
||||
int_to_hex(R, Len).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal Functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
int_to_hex(I, N) when is_integer(I), I >= 0 ->
|
||||
int_to_hex([], I, 1, N).
|
||||
|
||||
int_to_hex(L, I, Count, N)
|
||||
when I < 16 ->
|
||||
pad([int_to_hex(I) | L], N - Count);
|
||||
int_to_hex(L, I, Count, N) ->
|
||||
int_to_hex([int_to_hex(I rem 16) | L], I div 16, Count + 1, N).
|
||||
|
||||
int_to_hex(I) when 0 =< I, I =< 9 ->
|
||||
I + $0;
|
||||
int_to_hex(I) when 10 =< I, I =< 15 ->
|
||||
(I - 10) + $a.
|
||||
|
||||
pad(L, 0) ->
|
||||
L;
|
||||
pad(L, Count) ->
|
||||
pad([$0 | L], Count - 1).
|
||||
|
||||
-ifdef(TEST).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@
|
|||
-spec(check_pub(emqx_types:zone(),
|
||||
#{qos := emqx_types:qos(),
|
||||
retain := boolean(),
|
||||
topic := emqx_topic:topic()})
|
||||
topic := emqx_types:topic()})
|
||||
-> ok_or_error(emqx_types:reason_code())).
|
||||
check_pub(Zone, Flags) when is_map(Flags) ->
|
||||
do_check_pub(case maps:take(topic, Flags) of
|
||||
|
|
|
@ -71,7 +71,7 @@
|
|||
|
||||
-export_type([mqueue/0, options/0]).
|
||||
|
||||
-type(topic() :: emqx_topic:topic()).
|
||||
-type(topic() :: emqx_types:topic()).
|
||||
-type(priority() :: infinity | integer()).
|
||||
-type(pq() :: emqx_pqueue:q()).
|
||||
-type(count() :: non_neg_integer()).
|
||||
|
@ -93,6 +93,11 @@
|
|||
-define(MAX_LEN_INFINITY, 0).
|
||||
-define(INFO_KEYS, [store_qos0, max_len, len, dropped]).
|
||||
|
||||
-record(shift_opts, {
|
||||
multiplier :: non_neg_integer(),
|
||||
base :: integer()
|
||||
}).
|
||||
|
||||
-record(mqueue, {
|
||||
store_qos0 = false :: boolean(),
|
||||
max_len = ?MAX_LEN_INFINITY :: count(),
|
||||
|
@ -100,7 +105,10 @@
|
|||
dropped = 0 :: count(),
|
||||
p_table = ?NO_PRIORITY_TABLE :: p_table(),
|
||||
default_p = ?LOWEST_PRIORITY :: priority(),
|
||||
q = ?PQUEUE:new() :: pq()
|
||||
q = ?PQUEUE:new() :: pq(),
|
||||
shift_opts :: #shift_opts{},
|
||||
last_prio :: non_neg_integer() | undefined,
|
||||
p_credit :: non_neg_integer() | undefined
|
||||
}).
|
||||
|
||||
-type(mqueue() :: #mqueue{}).
|
||||
|
@ -114,7 +122,8 @@ init(Opts = #{max_len := MaxLen0, store_qos0 := QoS_0}) ->
|
|||
#mqueue{max_len = MaxLen,
|
||||
store_qos0 = QoS_0,
|
||||
p_table = get_opt(priorities, Opts, ?NO_PRIORITY_TABLE),
|
||||
default_p = get_priority_opt(Opts)
|
||||
default_p = get_priority_opt(Opts),
|
||||
shift_opts = get_shift_opt(Opts)
|
||||
}.
|
||||
|
||||
-spec(info(mqueue()) -> emqx_types:infos()).
|
||||
|
@ -173,9 +182,24 @@ in(Msg = #message{topic = Topic}, MQ = #mqueue{default_p = Dp,
|
|||
out(MQ = #mqueue{len = 0, q = Q}) ->
|
||||
0 = ?PQUEUE:len(Q), %% assert, in this case, ?PQUEUE:len should be very cheap
|
||||
{empty, MQ};
|
||||
out(MQ = #mqueue{q = Q, len = Len}) ->
|
||||
out(MQ = #mqueue{q = Q, len = Len, last_prio = undefined, shift_opts = ShiftOpts}) ->
|
||||
{{value, Val, Prio}, Q1} = ?PQUEUE:out_p(Q), %% Shouldn't fail, since we've checked the length
|
||||
MQ1 = MQ#mqueue{
|
||||
q = Q1,
|
||||
len = Len - 1,
|
||||
last_prio = Prio,
|
||||
p_credit = get_credits(Prio, ShiftOpts)
|
||||
},
|
||||
{{value, Val}, MQ1};
|
||||
out(MQ = #mqueue{q = Q, p_credit = 0}) ->
|
||||
MQ1 = MQ#mqueue{
|
||||
q = ?PQUEUE:shift(Q),
|
||||
last_prio = undefined
|
||||
},
|
||||
out(MQ1);
|
||||
out(MQ = #mqueue{q = Q, len = Len, p_credit = Cnt}) ->
|
||||
{R, Q1} = ?PQUEUE:out(Q),
|
||||
{R, MQ#mqueue{q = Q1, len = Len - 1}}.
|
||||
{R, MQ#mqueue{q = Q1, len = Len - 1, p_credit = Cnt - 1}}.
|
||||
|
||||
get_opt(Key, Opts, Default) ->
|
||||
case maps:get(Key, Opts, Default) of
|
||||
|
@ -196,3 +220,35 @@ get_priority_opt(Opts) ->
|
|||
%% while the highest 'infinity' is a [{infinity, queue:queue()}]
|
||||
get_priority(_Topic, ?NO_PRIORITY_TABLE, _) -> ?LOWEST_PRIORITY;
|
||||
get_priority(Topic, PTab, Dp) -> maps:get(Topic, PTab, Dp).
|
||||
|
||||
get_credits(?HIGHEST_PRIORITY, Opts) ->
|
||||
Infinity = 1000000,
|
||||
get_credits(Infinity, Opts);
|
||||
get_credits(Prio, #shift_opts{multiplier = Mult, base = Base}) ->
|
||||
(Prio + Base + 1) * Mult - 1.
|
||||
|
||||
get_shift_opt(Opts) ->
|
||||
%% Using 10 as a multiplier by default. This is needed to minimize
|
||||
%% overhead of ?PQUEUE:rotate
|
||||
Mult = maps:get(shift_multiplier, Opts, 10),
|
||||
true = is_integer(Mult) andalso Mult > 0,
|
||||
Min = case Opts of
|
||||
#{p_table := PTab} ->
|
||||
case maps:size(PTab) of
|
||||
0 -> 0;
|
||||
_ -> lists:min(maps:values(PTab))
|
||||
end;
|
||||
_ ->
|
||||
?LOWEST_PRIORITY
|
||||
end,
|
||||
%% `mqueue' module supports negative priorities, but we don't want
|
||||
%% the counter to be negative, so all priorities should be shifted
|
||||
%% by a constant, if negative priorities are used:
|
||||
Base = case Min < 0 of
|
||||
true -> -Min;
|
||||
false -> 0
|
||||
end,
|
||||
#shift_opts{
|
||||
multiplier = Mult,
|
||||
base = Base
|
||||
}.
|
||||
|
|
|
@ -114,7 +114,7 @@ proto_name(#mqtt_packet_connect{proto_name = Name}) ->
|
|||
Name.
|
||||
|
||||
%% @doc Protocol version of the CONNECT Packet.
|
||||
-spec(proto_ver(emqx_types:packet()|connect()) -> emqx_types:version()).
|
||||
-spec(proto_ver(emqx_types:packet()|connect()) -> emqx_types:proto_ver()).
|
||||
proto_ver(?CONNECT_PACKET(ConnPkt)) ->
|
||||
proto_ver(ConnPkt);
|
||||
proto_ver(#mqtt_packet_connect{proto_ver = Ver}) ->
|
||||
|
|
|
@ -55,6 +55,7 @@
|
|||
, filter/2
|
||||
, fold/3
|
||||
, highest/1
|
||||
, shift/1
|
||||
]).
|
||||
|
||||
-export_type([q/0]).
|
||||
|
@ -170,6 +171,14 @@ out({pqueue, [{P, Q} | Queues]}) ->
|
|||
end,
|
||||
{R, NewQ}.
|
||||
|
||||
-spec(shift(pqueue()) -> pqueue()).
|
||||
shift(Q = {queue, _, _, _}) ->
|
||||
Q;
|
||||
shift({pqueue, []}) ->
|
||||
{pqueue, []}; %% Shouldn't happen?
|
||||
shift({pqueue, [Hd|Rest]}) ->
|
||||
{pqueue, Rest ++ [Hd]}. %% Let's hope there are not many priorities.
|
||||
|
||||
-spec(out_p(pqueue()) -> {empty | {value, any(), priority()}, pqueue()}).
|
||||
out_p({queue, _, _, _} = Q) -> add_p(out(Q), 0);
|
||||
out_p({pqueue, [{P, _} | _]} = Q) -> add_p(out(Q), maybe_negate_priority(P)).
|
||||
|
@ -266,4 +275,3 @@ r2f([X,Y|R], L) -> {queue, [X,Y], lists:reverse(R, []), L}.
|
|||
|
||||
maybe_negate_priority(infinity) -> infinity;
|
||||
maybe_negate_priority(P) -> -P.
|
||||
|
||||
|
|
|
@ -98,19 +98,19 @@ start_link(Pool, Id) ->
|
|||
%% Route APIs
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(add_route(emqx_topic:topic()) -> ok | {error, term()}).
|
||||
-spec(add_route(emqx_types:topic()) -> ok | {error, term()}).
|
||||
add_route(Topic) when is_binary(Topic) ->
|
||||
add_route(Topic, node()).
|
||||
|
||||
-spec(add_route(emqx_topic:topic(), dest()) -> ok | {error, term()}).
|
||||
-spec(add_route(emqx_types:topic(), dest()) -> ok | {error, term()}).
|
||||
add_route(Topic, Dest) when is_binary(Topic) ->
|
||||
call(pick(Topic), {add_route, Topic, Dest}).
|
||||
|
||||
-spec(do_add_route(emqx_topic:topic()) -> ok | {error, term()}).
|
||||
-spec(do_add_route(emqx_types:topic()) -> ok | {error, term()}).
|
||||
do_add_route(Topic) when is_binary(Topic) ->
|
||||
do_add_route(Topic, node()).
|
||||
|
||||
-spec(do_add_route(emqx_topic:topic(), dest()) -> ok | {error, term()}).
|
||||
-spec(do_add_route(emqx_types:topic(), dest()) -> ok | {error, term()}).
|
||||
do_add_route(Topic, Dest) when is_binary(Topic) ->
|
||||
Route = #route{topic = Topic, dest = Dest},
|
||||
case lists:member(Route, lookup_routes(Topic)) of
|
||||
|
@ -125,7 +125,7 @@ do_add_route(Topic, Dest) when is_binary(Topic) ->
|
|||
end.
|
||||
|
||||
%% @doc Match routes
|
||||
-spec(match_routes(emqx_topic:topic()) -> [emqx_types:route()]).
|
||||
-spec(match_routes(emqx_types:topic()) -> [emqx_types:route()]).
|
||||
match_routes(Topic) when is_binary(Topic) ->
|
||||
case match_trie(Topic) of
|
||||
[] -> lookup_routes(Topic);
|
||||
|
@ -140,27 +140,27 @@ match_trie(Topic) ->
|
|||
false -> emqx_trie:match(Topic)
|
||||
end.
|
||||
|
||||
-spec(lookup_routes(emqx_topic:topic()) -> [emqx_types:route()]).
|
||||
-spec(lookup_routes(emqx_types:topic()) -> [emqx_types:route()]).
|
||||
lookup_routes(Topic) ->
|
||||
ets:lookup(?ROUTE_TAB, Topic).
|
||||
|
||||
-spec(has_routes(emqx_topic:topic()) -> boolean()).
|
||||
-spec(has_routes(emqx_types:topic()) -> boolean()).
|
||||
has_routes(Topic) when is_binary(Topic) ->
|
||||
ets:member(?ROUTE_TAB, Topic).
|
||||
|
||||
-spec(delete_route(emqx_topic:topic()) -> ok | {error, term()}).
|
||||
-spec(delete_route(emqx_types:topic()) -> ok | {error, term()}).
|
||||
delete_route(Topic) when is_binary(Topic) ->
|
||||
delete_route(Topic, node()).
|
||||
|
||||
-spec(delete_route(emqx_topic:topic(), dest()) -> ok | {error, term()}).
|
||||
-spec(delete_route(emqx_types:topic(), dest()) -> ok | {error, term()}).
|
||||
delete_route(Topic, Dest) when is_binary(Topic) ->
|
||||
call(pick(Topic), {delete_route, Topic, Dest}).
|
||||
|
||||
-spec(do_delete_route(emqx_topic:topic()) -> ok | {error, term()}).
|
||||
-spec(do_delete_route(emqx_types:topic()) -> ok | {error, term()}).
|
||||
do_delete_route(Topic) when is_binary(Topic) ->
|
||||
do_delete_route(Topic, node()).
|
||||
|
||||
-spec(do_delete_route(emqx_topic:topic(), dest()) -> ok | {error, term()}).
|
||||
-spec(do_delete_route(emqx_types:topic(), dest()) -> ok | {error, term()}).
|
||||
do_delete_route(Topic, Dest) ->
|
||||
Route = #route{topic = Topic, dest = Dest},
|
||||
case emqx_topic:wildcard(Topic) of
|
||||
|
@ -169,12 +169,12 @@ do_delete_route(Topic, Dest) ->
|
|||
false -> delete_direct_route(Route)
|
||||
end.
|
||||
|
||||
-spec(topics() -> list(emqx_topic:topic())).
|
||||
-spec(topics() -> list(emqx_types:topic())).
|
||||
topics() ->
|
||||
mnesia:dirty_all_keys(?ROUTE_TAB).
|
||||
|
||||
%% @doc Print routes to a topic
|
||||
-spec(print_routes(emqx_topic:topic()) -> ok).
|
||||
-spec(print_routes(emqx_types:topic()) -> ok).
|
||||
print_routes(Topic) ->
|
||||
lists:foreach(fun(#route{topic = To, dest = Dest}) ->
|
||||
io:format("~s -> ~s~n", [To, Dest])
|
||||
|
|
|
@ -156,11 +156,11 @@ fields("stats") ->
|
|||
|
||||
fields("authorization") ->
|
||||
[ {"no_match",
|
||||
sc(union(allow, deny),
|
||||
sc(hoconsc:union([allow, deny]),
|
||||
#{ default => allow
|
||||
})}
|
||||
, {"deny_action",
|
||||
sc(union(ignore, disconnect),
|
||||
sc(hoconsc:union([ignore, disconnect]),
|
||||
#{ default => ignore
|
||||
})}
|
||||
, {"cache",
|
||||
|
@ -904,9 +904,10 @@ filter(Opts) ->
|
|||
|
||||
ssl(Defaults) ->
|
||||
D = fun (Field) -> maps:get(to_atom(Field), Defaults, undefined) end,
|
||||
Df = fun (Field, Default) -> maps:get(to_atom(Field), Defaults, Default) end,
|
||||
[ {"enable",
|
||||
sc(boolean(),
|
||||
#{ default => D("enable")
|
||||
#{ default => Df("enable", false)
|
||||
})
|
||||
}
|
||||
, {"cacertfile",
|
||||
|
@ -926,37 +927,58 @@ ssl(Defaults) ->
|
|||
}
|
||||
, {"verify",
|
||||
sc(hoconsc:union([verify_peer, verify_none]),
|
||||
#{ default => D("verify")
|
||||
#{ default => Df("verify", verify_none)
|
||||
})
|
||||
}
|
||||
, {"fail_if_no_peer_cert",
|
||||
sc(boolean(),
|
||||
#{ default => D("fail_if_no_peer_cert")
|
||||
#{ default => Df("fail_if_no_peer_cert", false)
|
||||
})
|
||||
}
|
||||
, {"secure_renegotiate",
|
||||
sc(boolean(),
|
||||
#{ default => D("secure_renegotiate")
|
||||
#{ default => Df("secure_renegotiate", true)
|
||||
, desc => """
|
||||
SSL parameter renegotiation is a feature that allows a client and a server
|
||||
to renegotiate the parameters of the SSL connection on the fly.
|
||||
RFC 5746 defines a more secure way of doing this. By enabling secure renegotiation,
|
||||
you drop support for the insecure renegotiation, prone to MitM attacks.
|
||||
"""
|
||||
})
|
||||
}
|
||||
, {"client_renegotiation",
|
||||
sc(boolean(),
|
||||
#{ default => Df("client_renegotiation", true)
|
||||
, desc => """
|
||||
In protocols that support client-initiated renegotiation,
|
||||
the cost of resources of such an operation is higher for the server than the client.
|
||||
This can act as a vector for denial of service attacks.
|
||||
The SSL application already takes measures to counter-act such attempts,
|
||||
but client-initiated renegotiation can be strictly disabled by setting this option to false.
|
||||
The default value is true. Note that disabling renegotiation can result in
|
||||
long-lived connections becoming unusable due to limits on
|
||||
the number of messages the underlying cipher suite can encipher.
|
||||
"""
|
||||
})
|
||||
}
|
||||
, {"reuse_sessions",
|
||||
sc(boolean(),
|
||||
#{ default => D("reuse_sessions")
|
||||
#{ default => Df("reuse_sessions", true)
|
||||
})
|
||||
}
|
||||
, {"honor_cipher_order",
|
||||
sc(boolean(),
|
||||
#{ default => D("honor_cipher_order")
|
||||
#{ default => Df("honor_cipher_order", true)
|
||||
})
|
||||
}
|
||||
, {"handshake_timeout",
|
||||
sc(duration(),
|
||||
#{ default => D("handshake_timeout")
|
||||
#{ default => Df("handshake_timeout", "15s")
|
||||
})
|
||||
}
|
||||
, {"depth",
|
||||
sc(integer(),
|
||||
#{default => D("depth")
|
||||
#{default => Df("depth", 10)
|
||||
})
|
||||
}
|
||||
, {"password",
|
||||
|
@ -1093,9 +1115,18 @@ to_bar_separated_list(Str) ->
|
|||
to_ip_port(Str) ->
|
||||
case string:tokens(Str, ":") of
|
||||
[Ip, Port] ->
|
||||
PortVal = list_to_integer(Port),
|
||||
case inet:parse_address(Ip) of
|
||||
{ok, R} -> {ok, {R, list_to_integer(Port)}};
|
||||
_ -> {error, Str}
|
||||
{ok, R} ->
|
||||
{ok, {R, PortVal}};
|
||||
_ ->
|
||||
%% check is a rfc1035's hostname
|
||||
case inet_parse:domain(Ip) of
|
||||
true ->
|
||||
{ok, {Ip, PortVal}};
|
||||
_ ->
|
||||
{error, Str}
|
||||
end
|
||||
end;
|
||||
_ -> {error, Str}
|
||||
end.
|
||||
|
|
|
@ -103,18 +103,18 @@ mnesia(copy) ->
|
|||
start_link() ->
|
||||
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
|
||||
|
||||
-spec(subscribe(emqx_topic:group(), emqx_topic:topic(), pid()) -> ok).
|
||||
-spec(subscribe(emqx_types:group(), emqx_types:topic(), pid()) -> ok).
|
||||
subscribe(Group, Topic, SubPid) when is_pid(SubPid) ->
|
||||
gen_server:call(?SERVER, {subscribe, Group, Topic, SubPid}).
|
||||
|
||||
-spec(unsubscribe(emqx_topic:group(), emqx_topic:topic(), pid()) -> ok).
|
||||
-spec(unsubscribe(emqx_types:group(), emqx_types:topic(), pid()) -> ok).
|
||||
unsubscribe(Group, Topic, SubPid) when is_pid(SubPid) ->
|
||||
gen_server:call(?SERVER, {unsubscribe, Group, Topic, SubPid}).
|
||||
|
||||
record(Group, Topic, SubPid) ->
|
||||
#emqx_shared_subscription{group = Group, topic = Topic, subpid = SubPid}.
|
||||
|
||||
-spec(dispatch(emqx_topic:group(), emqx_topic:topic(), emqx_types:delivery())
|
||||
-spec(dispatch(emqx_types:group(), emqx_types:topic(), emqx_types:delivery())
|
||||
-> emqx_types:deliver_result()).
|
||||
dispatch(Group, Topic, Delivery) ->
|
||||
dispatch(Group, Topic, Delivery, _FailedSubs = []).
|
||||
|
|
|
@ -77,7 +77,7 @@ mnesia(copy) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
%% @doc Insert a topic filter into the trie.
|
||||
-spec(insert(emqx_topic:topic()) -> ok).
|
||||
-spec(insert(emqx_types:topic()) -> ok).
|
||||
insert(Topic) when is_binary(Topic) ->
|
||||
{TopicKey, PrefixKeys} = make_keys(Topic),
|
||||
case mnesia:wread({?TRIE, TopicKey}) of
|
||||
|
@ -86,7 +86,7 @@ insert(Topic) when is_binary(Topic) ->
|
|||
end.
|
||||
|
||||
%% @doc Delete a topic filter from the trie.
|
||||
-spec(delete(emqx_topic:topic()) -> ok).
|
||||
-spec(delete(emqx_types:topic()) -> ok).
|
||||
delete(Topic) when is_binary(Topic) ->
|
||||
{TopicKey, PrefixKeys} = make_keys(Topic),
|
||||
case [] =/= mnesia:wread({?TRIE, TopicKey}) of
|
||||
|
@ -95,7 +95,7 @@ delete(Topic) when is_binary(Topic) ->
|
|||
end.
|
||||
|
||||
%% @doc Find trie nodes that matchs the topic name.
|
||||
-spec(match(emqx_topic:topic()) -> list(emqx_topic:topic())).
|
||||
-spec(match(emqx_types:topic()) -> list(emqx_types:topic())).
|
||||
match(Topic) when is_binary(Topic) ->
|
||||
Words = emqx_topic:words(Topic),
|
||||
case emqx_topic:wildcard(Words) of
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
-include("emqx_mqtt.hrl").
|
||||
-include("types.hrl").
|
||||
|
||||
-export_type([ ver/0
|
||||
-export_type([ proto_ver/0
|
||||
, qos/0
|
||||
, qos_name/0
|
||||
]).
|
||||
|
@ -91,11 +91,11 @@
|
|||
|
||||
-export_type([oom_policy/0]).
|
||||
|
||||
-type(ver() :: ?MQTT_PROTO_V3
|
||||
| ?MQTT_PROTO_V4
|
||||
| ?MQTT_PROTO_V5
|
||||
| non_neg_integer()
|
||||
| binary() % For lwm2m, mqtt-sn...
|
||||
-type(proto_ver() :: ?MQTT_PROTO_V3
|
||||
| ?MQTT_PROTO_V4
|
||||
| ?MQTT_PROTO_V5
|
||||
| non_neg_integer()
|
||||
| binary() % For lwm2m, mqtt-sn...
|
||||
).
|
||||
|
||||
-type(qos() :: ?QOS_0 | ?QOS_1 | ?QOS_2).
|
||||
|
@ -116,7 +116,7 @@
|
|||
peercert := nossl | undefined | esockd_peercert:peercert(),
|
||||
conn_mod := module(),
|
||||
proto_name => binary(),
|
||||
proto_ver => ver(),
|
||||
proto_ver => proto_ver(),
|
||||
clean_start => boolean(),
|
||||
clientid => clientid(),
|
||||
username => username(),
|
||||
|
@ -146,7 +146,7 @@
|
|||
dn => binary(),
|
||||
atom() => term()
|
||||
}).
|
||||
-type(clientid() :: binary()|atom()).
|
||||
-type(clientid() :: binary() | atom()).
|
||||
-type(username() :: maybe(binary())).
|
||||
-type(password() :: maybe(binary())).
|
||||
-type(peerhost() :: inet:ip_address()).
|
||||
|
@ -187,12 +187,12 @@
|
|||
-type(message() :: #message{}).
|
||||
-type(flag() :: sys | dup | retain | atom()).
|
||||
-type(flags() :: #{flag() := boolean()}).
|
||||
-type(headers() :: #{proto_ver => ver(),
|
||||
protocol => protocol(),
|
||||
username => username(),
|
||||
peerhost => peerhost(),
|
||||
-type(headers() :: #{proto_ver => proto_ver(),
|
||||
protocol => protocol(),
|
||||
username => username(),
|
||||
peerhost => peerhost(),
|
||||
properties => properties(),
|
||||
atom() => term()}).
|
||||
atom() => term()}).
|
||||
|
||||
-type(banned() :: #banned{}).
|
||||
-type(deliver() :: {deliver, topic(), message()}).
|
||||
|
@ -201,8 +201,8 @@
|
|||
-type(publish_result() :: [{node(), topic(), deliver_result()} |
|
||||
{share, topic(), deliver_result()}]).
|
||||
-type(route() :: #route{}).
|
||||
-type(sub_group() :: tuple() | binary()).
|
||||
-type(route_entry() :: {topic(), node()} | {topic, sub_group()}).
|
||||
-type(group() :: emqx_topic:group()).
|
||||
-type(route_entry() :: {topic(), node()} | {topic, group()}).
|
||||
-type(plugin() :: #plugin{}).
|
||||
-type(command() :: #command{}).
|
||||
|
||||
|
@ -215,4 +215,3 @@
|
|||
max_heap_size => non_neg_integer(),
|
||||
enable => boolean()
|
||||
}).
|
||||
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
]).
|
||||
|
||||
-define(AUTHN, emqx_authentication).
|
||||
-define(config(KEY), (fun() -> {KEY, _V_} = lists:keyfind(KEY, 1, Config), _V_ end)()).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Hocon Schema
|
||||
|
@ -92,7 +93,22 @@ end_per_suite(_) ->
|
|||
emqx_ct_helpers:stop_apps([]),
|
||||
ok.
|
||||
|
||||
t_chain(_) ->
|
||||
init_per_testcase(Case, Config) ->
|
||||
meck:new(emqx, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(emqx, get_config, fun([node, data_dir]) ->
|
||||
{data_dir, Data} = lists:keyfind(data_dir, 1, Config),
|
||||
Data;
|
||||
(C) -> meck:passthrough([C])
|
||||
end),
|
||||
?MODULE:Case({'init', Config}).
|
||||
|
||||
end_per_testcase(Case, Config) ->
|
||||
_ = ?MODULE:Case({'end', Config}),
|
||||
meck:unload(emqx),
|
||||
ok.
|
||||
|
||||
t_chain({_, Config}) -> Config;
|
||||
t_chain(Config) when is_list(Config) ->
|
||||
% CRUD of authentication chain
|
||||
ChainName = 'test',
|
||||
?assertMatch({ok, []}, ?AUTHN:list_chains()),
|
||||
|
@ -104,7 +120,10 @@ t_chain(_) ->
|
|||
?assertMatch({error, {not_found, {chain, ChainName}}}, ?AUTHN:lookup_chain(ChainName)),
|
||||
ok.
|
||||
|
||||
t_authenticator(_) ->
|
||||
t_authenticator({'init', Config}) ->
|
||||
[{"auth1", {'password-based', 'built-in-database'}},
|
||||
{"auth2", {'password-based', mysql}} | Config];
|
||||
t_authenticator(Config) when is_list(Config) ->
|
||||
ChainName = 'test',
|
||||
AuthenticatorConfig1 = #{mechanism => 'password-based',
|
||||
backend => 'built-in-database',
|
||||
|
@ -116,8 +135,8 @@ t_authenticator(_) ->
|
|||
% Create an authenticator when the provider does not exist
|
||||
?assertEqual({error, no_available_provider}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
|
||||
AuthNType1 = {'password-based', 'built-in-database'},
|
||||
?AUTHN:add_provider(AuthNType1, ?MODULE),
|
||||
AuthNType1 = ?config("auth1"),
|
||||
register_provider(AuthNType1, ?MODULE),
|
||||
ID1 = <<"password-based:built-in-database">>,
|
||||
|
||||
% CRUD of authencaticator
|
||||
|
@ -131,8 +150,8 @@ t_authenticator(_) ->
|
|||
?assertMatch({ok, []}, ?AUTHN:list_authenticators(ChainName)),
|
||||
|
||||
% Multiple authenticators exist at the same time
|
||||
AuthNType2 = {'password-based', mysql},
|
||||
?AUTHN:add_provider(AuthNType2, ?MODULE),
|
||||
AuthNType2 = ?config("auth2"),
|
||||
register_provider(AuthNType2, ?MODULE),
|
||||
ID2 = <<"password-based:mysql">>,
|
||||
AuthenticatorConfig2 = #{mechanism => 'password-based',
|
||||
backend => mysql,
|
||||
|
@ -147,15 +166,18 @@ t_authenticator(_) ->
|
|||
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, bottom)),
|
||||
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, {before, ID1})),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
|
||||
?AUTHN:delete_chain(ChainName),
|
||||
?AUTHN:remove_provider(AuthNType1),
|
||||
?AUTHN:remove_provider(AuthNType2),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName));
|
||||
t_authenticator({'end', Config}) ->
|
||||
?AUTHN:delete_chain(test),
|
||||
?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]),
|
||||
ok.
|
||||
|
||||
t_authenticate(_) ->
|
||||
ListenerID = 'tcp:default',
|
||||
t_authenticate({init, Config}) ->
|
||||
[{listener_id, 'tcp:default'},
|
||||
{authn_type, {'password-based', 'built-in-database'}} | Config];
|
||||
t_authenticate(Config) when is_list(Config) ->
|
||||
ListenerID = ?config(listener_id),
|
||||
AuthNType = ?config(authn_type),
|
||||
ClientInfo = #{zone => default,
|
||||
listener => ListenerID,
|
||||
protocol => mqtt,
|
||||
|
@ -163,8 +185,7 @@ t_authenticate(_) ->
|
|||
password => <<"any">>},
|
||||
?assertEqual({ok, #{is_superuser => false}}, emqx_access_control:authenticate(ClientInfo)),
|
||||
|
||||
AuthNType = {'password-based', 'built-in-database'},
|
||||
?AUTHN:add_provider(AuthNType, ?MODULE),
|
||||
register_provider(AuthNType, ?MODULE),
|
||||
|
||||
AuthenticatorConfig = #{mechanism => 'password-based',
|
||||
backend => 'built-in-database',
|
||||
|
@ -172,30 +193,33 @@ t_authenticate(_) ->
|
|||
?AUTHN:create_chain(ListenerID),
|
||||
?assertMatch({ok, _}, ?AUTHN:create_authenticator(ListenerID, AuthenticatorConfig)),
|
||||
?assertEqual({ok, #{is_superuser => true}}, emqx_access_control:authenticate(ClientInfo)),
|
||||
?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>})),
|
||||
|
||||
?AUTHN:delete_chain(ListenerID),
|
||||
?AUTHN:remove_provider(AuthNType),
|
||||
?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>}));
|
||||
t_authenticate({'end', Config}) ->
|
||||
?AUTHN:delete_chain(?config(listener_id)),
|
||||
?AUTHN:deregister_provider(?config(authn_type)),
|
||||
ok.
|
||||
|
||||
t_update_config(_) ->
|
||||
emqx_config_handler:add_handler([authentication], emqx_authentication),
|
||||
|
||||
t_update_config({init, Config}) ->
|
||||
Global = 'mqtt:global',
|
||||
AuthNType1 = {'password-based', 'built-in-database'},
|
||||
AuthNType2 = {'password-based', mysql},
|
||||
?AUTHN:add_provider(AuthNType1, ?MODULE),
|
||||
?AUTHN:add_provider(AuthNType2, ?MODULE),
|
||||
|
||||
Global = 'mqtt:global',
|
||||
AuthenticatorConfig1 = #{mechanism => 'password-based',
|
||||
backend => 'built-in-database',
|
||||
enable => true},
|
||||
AuthenticatorConfig2 = #{mechanism => 'password-based',
|
||||
backend => mysql,
|
||||
enable => true},
|
||||
[{global, Global},
|
||||
{"auth1", AuthNType1},
|
||||
{"auth2", AuthNType2} | Config];
|
||||
t_update_config(Config) when is_list(Config) ->
|
||||
emqx_config_handler:add_handler([authentication], emqx_authentication),
|
||||
ok = register_provider(?config("auth1"), ?MODULE),
|
||||
ok = register_provider(?config("auth2"), ?MODULE),
|
||||
Global = ?config(global),
|
||||
AuthenticatorConfig1 = #{<<"mechanism">> => <<"password-based">>,
|
||||
<<"backend">> => <<"built-in-database">>,
|
||||
<<"enable">> => true},
|
||||
AuthenticatorConfig2 = #{<<"mechanism">> => <<"password-based">>,
|
||||
<<"backend">> => <<"mysql">>,
|
||||
<<"enable">> => true},
|
||||
ID1 = <<"password-based:built-in-database">>,
|
||||
ID2 = <<"password-based:mysql">>,
|
||||
|
||||
|
||||
?assertMatch({ok, []}, ?AUTHN:list_chains()),
|
||||
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
|
||||
|
@ -203,7 +227,7 @@ t_update_config(_) ->
|
|||
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})),
|
||||
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID2)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {update_authenticator, Global, ID1, #{}})),
|
||||
?assertMatch({ok, _}, update_config([authentication], {update_authenticator, Global, ID1, AuthenticatorConfig1#{<<"enable">> => false}})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {move_authenticator, Global, ID2, top})),
|
||||
|
@ -220,19 +244,65 @@ t_update_config(_) ->
|
|||
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig2})),
|
||||
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID2)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {update_authenticator, ListenerID, ID1, #{}})),
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {update_authenticator, ListenerID, ID1, AuthenticatorConfig1#{<<"enable">> => false}})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, top})),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ListenerID)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})),
|
||||
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
|
||||
|
||||
?AUTHN:delete_chain(Global),
|
||||
?AUTHN:remove_provider(AuthNType1),
|
||||
?AUTHN:remove_provider(AuthNType2),
|
||||
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1));
|
||||
t_update_config({'end', Config}) ->
|
||||
?AUTHN:delete_chain(?config(global)),
|
||||
?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]),
|
||||
ok.
|
||||
|
||||
t_convert_certs({_, Config}) -> Config;
|
||||
t_convert_certs(Config) when is_list(Config) ->
|
||||
Global = <<"mqtt:global">>,
|
||||
Certs = certs([ {<<"keyfile">>, "key.pem"}
|
||||
, {<<"certfile">>, "cert.pem"}
|
||||
, {<<"cacertfile">>, "cacert.pem"}
|
||||
]),
|
||||
|
||||
CertsDir = ?AUTHN:certs_dir([Global, <<"password-based:built-in-database">>]),
|
||||
#{<<"ssl">> := NCerts} = ?AUTHN:convert_certs(CertsDir, #{<<"ssl">> => Certs}),
|
||||
?assertEqual(false, diff_cert(maps:get(<<"keyfile">>, NCerts), maps:get(<<"keyfile">>, Certs))),
|
||||
|
||||
Certs2 = certs([ {<<"keyfile">>, "key.pem"}
|
||||
, {<<"certfile">>, "cert.pem"}
|
||||
]),
|
||||
#{<<"ssl">> := NCerts2} = ?AUTHN:convert_certs(CertsDir, #{<<"ssl">> => Certs2}, #{<<"ssl">> => NCerts}),
|
||||
?assertEqual(false, diff_cert(maps:get(<<"keyfile">>, NCerts2), maps:get(<<"keyfile">>, Certs2))),
|
||||
?assertEqual(maps:get(<<"keyfile">>, NCerts), maps:get(<<"keyfile">>, NCerts2)),
|
||||
?assertEqual(maps:get(<<"certfile">>, NCerts), maps:get(<<"certfile">>, NCerts2)),
|
||||
|
||||
Certs3 = certs([ {<<"keyfile">>, "client-key.pem"}
|
||||
, {<<"certfile">>, "client-cert.pem"}
|
||||
, {<<"cacertfile">>, "cacert.pem"}
|
||||
]),
|
||||
#{<<"ssl">> := NCerts3} = ?AUTHN:convert_certs(CertsDir, #{<<"ssl">> => Certs3}, #{<<"ssl">> => NCerts2}),
|
||||
?assertEqual(false, diff_cert(maps:get(<<"keyfile">>, NCerts3), maps:get(<<"keyfile">>, Certs3))),
|
||||
?assertNotEqual(maps:get(<<"keyfile">>, NCerts2), maps:get(<<"keyfile">>, NCerts3)),
|
||||
?assertNotEqual(maps:get(<<"certfile">>, NCerts2), maps:get(<<"certfile">>, NCerts3)),
|
||||
|
||||
?assertEqual(true, filelib:is_regular(maps:get(<<"keyfile">>, NCerts3))),
|
||||
?AUTHN:clear_certs(CertsDir, #{<<"ssl">> => NCerts3}),
|
||||
?assertEqual(false, filelib:is_regular(maps:get(<<"keyfile">>, NCerts3))).
|
||||
|
||||
update_config(Path, ConfigRequest) ->
|
||||
emqx:update_config(Path, ConfigRequest, #{rawconf_with_defaults => true}).
|
||||
|
||||
certs(Certs) ->
|
||||
CertsPath = emqx_ct_helpers:deps_path(emqx, "etc/certs"),
|
||||
lists:foldl(fun({Key, Filename}, Acc) ->
|
||||
{ok, Bin} = file:read_file(filename:join([CertsPath, Filename])),
|
||||
Acc#{Key => Bin}
|
||||
end, #{}, Certs).
|
||||
|
||||
diff_cert(CertFile, CertPem2) ->
|
||||
{ok, CertPem1} = file:read_file(CertFile),
|
||||
?AUTHN:diff_cert(CertPem1, CertPem2).
|
||||
|
||||
register_provider(Type, Module) ->
|
||||
ok = ?AUTHN:register_providers([{Type, Module}]).
|
||||
|
|
|
@ -39,7 +39,7 @@ t_trans(_) ->
|
|||
ok = emqx_cm_locker:trans(<<"clientid">>, fun(_) -> ok end).
|
||||
|
||||
t_lock_unlocak(_) ->
|
||||
{true, _Nodes} = emqx_cm_locker:lock(<<"clientid">>),
|
||||
{true, _Nodes} = emqx_cm_locker:lock(<<"clientid">>),
|
||||
{true, _Nodes} = emqx_cm_locker:unlock(<<"clientid">>),
|
||||
{true, _Nodes} = emqx_cm_locker:unlock(<<"clientid">>).
|
||||
{true, _} = emqx_cm_locker:lock(<<"clientid">>),
|
||||
{true, _} = emqx_cm_locker:lock(<<"clientid">>),
|
||||
{true, _} = emqx_cm_locker:unlock(<<"clientid">>),
|
||||
{true, _} = emqx_cm_locker:unlock(<<"clientid">>).
|
||||
|
|
|
@ -147,3 +147,7 @@ t_now_to_secs(_) ->
|
|||
t_now_to_ms(_) ->
|
||||
?assert(is_integer(emqx_misc:now_to_ms(os:timestamp()))).
|
||||
|
||||
t_gen_id(_) ->
|
||||
?assertEqual(10, length(emqx_misc:gen_id(10))),
|
||||
?assertEqual(20, length(emqx_misc:gen_id(20))).
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
-include_lib("emqx/include/emqx.hrl").
|
||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||
|
||||
-include_lib("proper/include/proper.hrl").
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-define(Q, emqx_mqueue).
|
||||
|
@ -120,9 +121,88 @@ t_priority_mqueue(_) ->
|
|||
?assertEqual(5, ?Q:len(Q5)),
|
||||
{_, Q6} = ?Q:in(#message{qos = 1, topic = <<"t2">>}, Q5),
|
||||
?assertEqual(5, ?Q:len(Q6)),
|
||||
{{value, Msg}, Q7} = ?Q:out(Q6),
|
||||
?assertEqual(4, ?Q:len(Q7)),
|
||||
?assertEqual(<<"t3">>, Msg#message.topic).
|
||||
{{value, _Msg}, Q7} = ?Q:out(Q6),
|
||||
?assertEqual(4, ?Q:len(Q7)).
|
||||
|
||||
t_priority_mqueue_conservation(_) ->
|
||||
true = proper:quickcheck(conservation_prop()).
|
||||
|
||||
t_priority_order(_) ->
|
||||
Opts = #{max_len => 5,
|
||||
shift_multiplier => 1,
|
||||
priorities =>
|
||||
#{<<"t1">> => 0,
|
||||
<<"t2">> => 1,
|
||||
<<"t3">> => 2
|
||||
},
|
||||
store_qos0 => false
|
||||
},
|
||||
Messages = [{Topic, Message} ||
|
||||
Topic <- [<<"t1">>, <<"t2">>, <<"t3">>],
|
||||
Message <- lists:seq(1, 10)],
|
||||
Q = lists:foldl(fun({Topic, Message}, Q) ->
|
||||
element(2, ?Q:in(#message{topic = Topic, qos = 1, payload = Message}, Q))
|
||||
end,
|
||||
?Q:init(Opts),
|
||||
Messages),
|
||||
?assertMatch([{<<"t3">>, 6},
|
||||
{<<"t3">>, 7},
|
||||
{<<"t3">>, 8},
|
||||
|
||||
{<<"t2">>, 6},
|
||||
{<<"t2">>, 7},
|
||||
|
||||
{<<"t1">>, 6},
|
||||
|
||||
{<<"t3">>, 9},
|
||||
{<<"t3">>, 10},
|
||||
|
||||
{<<"t2">>, 8},
|
||||
|
||||
%% Note: for performance reasons we don't reset the
|
||||
%% counter when we run out of messages with the
|
||||
%% current prio, so next is t1:
|
||||
{<<"t1">>, 7},
|
||||
|
||||
{<<"t2">>, 9},
|
||||
{<<"t2">>, 10},
|
||||
|
||||
{<<"t1">>, 8},
|
||||
{<<"t1">>, 9},
|
||||
{<<"t1">>, 10}
|
||||
], drain(Q)).
|
||||
|
||||
t_priority_order2(_) ->
|
||||
Opts = #{max_len => 5,
|
||||
shift_multiplier => 2,
|
||||
priorities =>
|
||||
#{<<"t1">> => 0,
|
||||
<<"t2">> => 1
|
||||
},
|
||||
store_qos0 => false
|
||||
},
|
||||
Messages = [{Topic, Message} ||
|
||||
Topic <- [<<"t1">>, <<"t2">>],
|
||||
Message <- lists:seq(1, 10)],
|
||||
Q = lists:foldl(fun({Topic, Message}, Q) ->
|
||||
element(2, ?Q:in(#message{topic = Topic, qos = 1, payload = Message}, Q))
|
||||
end,
|
||||
?Q:init(Opts),
|
||||
Messages),
|
||||
?assertMatch([{<<"t2">>, 6},
|
||||
{<<"t2">>, 7},
|
||||
{<<"t2">>, 8},
|
||||
{<<"t2">>, 9},
|
||||
|
||||
{<<"t1">>, 6},
|
||||
{<<"t1">>, 7},
|
||||
|
||||
{<<"t2">>, 10},
|
||||
|
||||
{<<"t1">>, 8},
|
||||
{<<"t1">>, 9},
|
||||
{<<"t1">>, 10}
|
||||
], drain(Q)).
|
||||
|
||||
t_infinity_priority_mqueue(_) ->
|
||||
Opts = #{max_len => 0,
|
||||
|
@ -163,3 +243,57 @@ t_dropped(_) ->
|
|||
{Msg, Q2} = ?Q:in(Msg, Q1),
|
||||
?assertEqual(1, ?Q:dropped(Q2)).
|
||||
|
||||
conservation_prop() ->
|
||||
?FORALL({Priorities, Messages},
|
||||
?LET(Priorities, topic_priorities(),
|
||||
{Priorities, messages(Priorities)}),
|
||||
try
|
||||
Opts = #{max_len => 0,
|
||||
priorities => maps:from_list(Priorities),
|
||||
store_qos0 => false},
|
||||
%% Put messages in
|
||||
Q1 = lists:foldl(fun({Topic, Message}, Q) ->
|
||||
element(2, ?Q:in(#message{topic = Topic, qos = 1, payload = Message}, Q))
|
||||
end,
|
||||
?Q:init(Opts),
|
||||
Messages),
|
||||
%% Collect messages
|
||||
Got = lists:sort(drain(Q1)),
|
||||
Expected = lists:sort(Messages),
|
||||
case Expected =:= Got of
|
||||
true ->
|
||||
true;
|
||||
false ->
|
||||
ct:pal("Mismatch: expected ~p~nGot ~p~n", [Expected, Got]),
|
||||
false
|
||||
end
|
||||
catch
|
||||
EC:Err:Stack ->
|
||||
ct:pal("Error: ~p", [{EC, Err, Stack}]),
|
||||
false
|
||||
end).
|
||||
|
||||
%% Proper generators:
|
||||
|
||||
topic(Priorities) ->
|
||||
{Topics, _} = lists:unzip(Priorities),
|
||||
oneof(Topics).
|
||||
|
||||
topic_priorities() ->
|
||||
non_empty(list({binary(), priority()})).
|
||||
|
||||
priority() ->
|
||||
oneof([integer(), infinity]).
|
||||
|
||||
messages(Topics) ->
|
||||
list({topic(Topics), binary()}).
|
||||
|
||||
%% Internal functions:
|
||||
|
||||
drain(Q) ->
|
||||
case ?Q:out(Q) of
|
||||
{empty, _} ->
|
||||
[];
|
||||
{{value, #message{topic = T, payload = P}}, Q1} ->
|
||||
[{T, P}|drain(Q1)]
|
||||
end.
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_hocon_plugin,
|
||||
[{description, "An EMQ X plugin for hocon testcase"},
|
||||
{vsn, "0.1"},
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_mini_plugin,
|
||||
[{description, "An EMQ X plugin for testcase"},
|
||||
{vsn, "0.1"},
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
|
||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||
|
||||
-type qos() :: emqx_mqtt_types:qos_name() | emqx_mqtt_types:qos().
|
||||
-type topic() :: emqx_topic:topic().
|
||||
-type qos() :: emqx_types:qos_name() | emqx_types:qos().
|
||||
-type topic() :: emqx_types:topic().
|
||||
-type handler() :: fun((CorrData :: binary(), ReqPayload :: binary()) -> RspPayload :: binary()).
|
||||
|
||||
-spec start_link(topic(), qos(), handler(), emqtt:options()) ->
|
||||
|
|
|
@ -109,9 +109,9 @@ t_no_connection_nack(_) ->
|
|||
|
||||
ExpProp = [{properties, #{'Session-Expiry-Interval' => timer:seconds(30)}}],
|
||||
{ok, SubConnPid1} = emqtt:start_link([{clientid, Subscriber1}] ++ ExpProp),
|
||||
{ok, _Props} = emqtt:connect(SubConnPid1),
|
||||
{ok, _Props1} = emqtt:connect(SubConnPid1),
|
||||
{ok, SubConnPid2} = emqtt:start_link([{clientid, Subscriber2}] ++ ExpProp),
|
||||
{ok, _Props} = emqtt:connect(SubConnPid2),
|
||||
{ok, _Props2} = emqtt:connect(SubConnPid2),
|
||||
emqtt:subscribe(SubConnPid1, ShareTopic, QoS),
|
||||
emqtt:subscribe(SubConnPid1, ShareTopic, QoS),
|
||||
|
||||
|
|
|
@ -14,6 +14,9 @@
|
|||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-ifndef(EMQX_AUTHN_HRL).
|
||||
-define(EMQX_AUTHN_HRL, true).
|
||||
|
||||
-define(APP, emqx_authn).
|
||||
|
||||
-define(AUTHN, emqx_authentication).
|
||||
|
@ -23,3 +26,5 @@
|
|||
-define(RE_PLACEHOLDER, "\\$\\{[a-z0-9\\-]+\\}").
|
||||
|
||||
-define(AUTH_SHARD, emqx_authn_shard).
|
||||
|
||||
-endif.
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_authn,
|
||||
[{description, "EMQ X Authentication"},
|
||||
{vsn, "0.1.0"},
|
||||
|
|
|
@ -1589,6 +1589,11 @@ definitions() ->
|
|||
type => string,
|
||||
example => <<"http://localhost:80">>
|
||||
},
|
||||
refresh_interval => #{
|
||||
type => integer,
|
||||
default => 300,
|
||||
example => 300
|
||||
},
|
||||
verify_claims => #{
|
||||
type => object,
|
||||
additionalProperties => #{
|
||||
|
@ -1835,20 +1840,19 @@ find_listener(ListenerID) ->
|
|||
{ok, {Type, Name}}
|
||||
end.
|
||||
|
||||
create_authenticator(ConfKeyPath, ChainName0, Config) ->
|
||||
ChainName = to_atom(ChainName0),
|
||||
case update_config(ConfKeyPath, {create_authenticator, ChainName, Config}) of
|
||||
create_authenticator(ConfKeyPath, ChainName, Config) ->
|
||||
case update_config(ConfKeyPath, {create_authenticator, to_atom(ChainName), Config}) of
|
||||
{ok, #{post_config_update := #{?AUTHN := #{id := ID}},
|
||||
raw_config := AuthenticatorsConfig}} ->
|
||||
raw_config := AuthenticatorsConfig}} ->
|
||||
{ok, AuthenticatorConfig} = find_config(ID, AuthenticatorsConfig),
|
||||
{200, maps:put(id, ID, fill_defaults(AuthenticatorConfig))};
|
||||
{200, maps:put(id, ID, convert_certs(fill_defaults(AuthenticatorConfig)))};
|
||||
{error, {_, _, Reason}} ->
|
||||
serialize_error(Reason)
|
||||
end.
|
||||
|
||||
list_authenticators(ConfKeyPath) ->
|
||||
AuthenticatorsConfig = get_raw_config_with_defaults(ConfKeyPath),
|
||||
NAuthenticators = [maps:put(id, ?AUTHN:generate_id(AuthenticatorConfig), AuthenticatorConfig)
|
||||
NAuthenticators = [maps:put(id, ?AUTHN:generate_id(AuthenticatorConfig), convert_certs(AuthenticatorConfig))
|
||||
|| AuthenticatorConfig <- AuthenticatorsConfig],
|
||||
{200, NAuthenticators}.
|
||||
|
||||
|
@ -1856,18 +1860,17 @@ list_authenticator(ConfKeyPath, AuthenticatorID) ->
|
|||
AuthenticatorsConfig = get_raw_config_with_defaults(ConfKeyPath),
|
||||
case find_config(AuthenticatorID, AuthenticatorsConfig) of
|
||||
{ok, AuthenticatorConfig} ->
|
||||
{200, AuthenticatorConfig#{id => AuthenticatorID}};
|
||||
{200, maps:put(id, AuthenticatorID, convert_certs(AuthenticatorConfig))};
|
||||
{error, Reason} ->
|
||||
serialize_error(Reason)
|
||||
end.
|
||||
|
||||
update_authenticator(ConfKeyPath, ChainName0, AuthenticatorID, Config) ->
|
||||
ChainName = to_atom(ChainName0),
|
||||
case update_config(ConfKeyPath, {update_authenticator, ChainName, AuthenticatorID, Config}) of
|
||||
update_authenticator(ConfKeyPath, ChainName, AuthenticatorID, Config) ->
|
||||
case update_config(ConfKeyPath, {update_authenticator, to_atom(ChainName), AuthenticatorID, Config}) of
|
||||
{ok, #{post_config_update := #{?AUTHN := #{id := ID}},
|
||||
raw_config := AuthenticatorsConfig}} ->
|
||||
{ok, AuthenticatorConfig} = find_config(ID, AuthenticatorsConfig),
|
||||
{200, maps:put(id, ID, fill_defaults(AuthenticatorConfig))};
|
||||
{200, maps:put(id, ID, convert_certs(fill_defaults(AuthenticatorConfig)))};
|
||||
{error, {_, _, Reason}} ->
|
||||
serialize_error(Reason)
|
||||
end.
|
||||
|
@ -1968,9 +1971,22 @@ find_config(AuthenticatorID, AuthenticatorsConfig) ->
|
|||
|
||||
fill_defaults(Config) ->
|
||||
#{<<"authentication">> := CheckedConfig} = hocon_schema:check_plain(
|
||||
?AUTHN, #{<<"authentication">> => Config}, #{nullable => true, no_conversion => true}),
|
||||
?AUTHN, #{<<"authentication">> => Config}, #{no_conversion => true}),
|
||||
CheckedConfig.
|
||||
|
||||
convert_certs(#{<<"ssl">> := SSLOpts} = Config) ->
|
||||
NSSLOpts = lists:foldl(fun(K, Acc) ->
|
||||
case maps:get(K, Acc, undefined) of
|
||||
undefined -> Acc;
|
||||
Filename ->
|
||||
{ok, Bin} = file:read_file(Filename),
|
||||
Acc#{K => Bin}
|
||||
end
|
||||
end, SSLOpts, [<<"certfile">>, <<"keyfile">>, <<"cacertfile">>]),
|
||||
Config#{<<"ssl">> => NSSLOpts};
|
||||
convert_certs(Config) ->
|
||||
Config.
|
||||
|
||||
serialize_error({not_found, {authenticator, ID}}) ->
|
||||
{404, #{code => <<"NOT_FOUND">>,
|
||||
message => list_to_binary(
|
||||
|
@ -2011,6 +2027,16 @@ serialize_error(unsupported_operation) ->
|
|||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
message => <<"Operation not supported in this authentication type">>}};
|
||||
|
||||
serialize_error({save_cert_to_file, invalid_certificate}) ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
message => <<"Invalid certificate">>}};
|
||||
|
||||
serialize_error({save_cert_to_file, {_, Reason}}) ->
|
||||
{500, #{code => <<"INTERNAL_SERVER_ERROR">>,
|
||||
message => list_to_binary(
|
||||
io_lib:format("Cannot save certificate to file due to '~p'", [Reason])
|
||||
)}};
|
||||
|
||||
serialize_error({missing_parameter, Name}) ->
|
||||
{400, #{code => <<"MISSING_PARAMETER">>,
|
||||
message => list_to_binary(
|
||||
|
|
|
@ -32,38 +32,34 @@
|
|||
start(_StartType, _StartArgs) ->
|
||||
ok = ekka_rlog:wait_for_shards([?AUTH_SHARD], infinity),
|
||||
{ok, Sup} = emqx_authn_sup:start_link(),
|
||||
ok = add_providers(),
|
||||
ok = ?AUTHN:register_providers(providers()),
|
||||
ok = initialize(),
|
||||
{ok, Sup}.
|
||||
|
||||
stop(_State) ->
|
||||
ok = remove_providers(),
|
||||
ok = ?AUTHN:deregister_providers(provider_types()),
|
||||
ok.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
add_providers() ->
|
||||
_ = [?AUTHN:add_provider(AuthNType, Provider) || {AuthNType, Provider} <- providers()], ok.
|
||||
|
||||
remove_providers() ->
|
||||
_ = [?AUTHN:remove_provider(AuthNType) || {AuthNType, _} <- providers()], ok.
|
||||
|
||||
initialize() ->
|
||||
?AUTHN:initialize_authentication(?GLOBAL, emqx:get_raw_config([authentication], [])),
|
||||
lists:foreach(fun({ListenerID, ListenerConfig}) ->
|
||||
?AUTHN:initialize_authentication(ListenerID, maps:get(authentication, ListenerConfig, []))
|
||||
end, emqx_listeners:list()),
|
||||
ok.
|
||||
end, emqx_listeners:list()).
|
||||
|
||||
provider_types() ->
|
||||
lists:map(fun({Type, _Module}) -> Type end, providers()).
|
||||
|
||||
providers() ->
|
||||
[ {{'password-based', 'built-in-database'}, emqx_authn_mnesia}
|
||||
, {{'password-based', mysql}, emqx_authn_mysql}
|
||||
, {{'password-based', posgresql}, emqx_authn_pgsql}
|
||||
, {{'password-based', postgresql}, emqx_authn_pgsql}
|
||||
, {{'password-based', mongodb}, emqx_authn_mongodb}
|
||||
, {{'password-based', redis}, emqx_authn_redis}
|
||||
, {{'password-based', 'http-server'}, emqx_authn_http}
|
||||
, {jwt, emqx_authn_jwt}
|
||||
, {{scram, 'built-in-database'}, emqx_enhanced_authn_scram_mnesia}
|
||||
].
|
||||
].
|
||||
|
|
|
@ -77,8 +77,7 @@ validations() ->
|
|||
].
|
||||
|
||||
url(type) -> binary();
|
||||
url(nullable) -> false;
|
||||
url(validate) -> [fun check_url/1];
|
||||
url(validator) -> [fun check_url/1];
|
||||
url(_) -> undefined.
|
||||
|
||||
headers(type) -> map();
|
||||
|
@ -98,8 +97,7 @@ headers_no_content_type(default) -> default_headers_no_content_type();
|
|||
headers_no_content_type(_) -> undefined.
|
||||
|
||||
body(type) -> map();
|
||||
body(nullable) -> false;
|
||||
body(validate) -> [fun check_body/1];
|
||||
body(validator) -> [fun check_body/1];
|
||||
body(_) -> undefined.
|
||||
|
||||
request_timeout(type) -> non_neg_integer();
|
||||
|
|
|
@ -123,7 +123,7 @@ server_name_indication(_) -> undefined.
|
|||
|
||||
verify_claims(type) -> list();
|
||||
verify_claims(default) -> #{};
|
||||
verify_claims(validate) -> [fun check_verify_claims/1];
|
||||
verify_claims(validator) -> [fun do_check_verify_claims/1];
|
||||
verify_claims(converter) ->
|
||||
fun(VerifyClaims) ->
|
||||
maps:to_list(VerifyClaims)
|
||||
|
@ -298,12 +298,8 @@ do_verify_claims(Claims, [{Name, Value} | More]) ->
|
|||
{error, {claims, {Name, Value0}}}
|
||||
end.
|
||||
|
||||
check_verify_claims(Conf) ->
|
||||
Claims = hocon_schema:get_value("verify_claims", Conf),
|
||||
do_check_verify_claims(Claims).
|
||||
|
||||
do_check_verify_claims([]) ->
|
||||
false;
|
||||
true;
|
||||
do_check_verify_claims([{Name, Expected} | More]) ->
|
||||
check_claim_name(Name) andalso
|
||||
check_claim_expected(Expected) andalso
|
||||
|
|
|
@ -70,15 +70,12 @@ common_fields() ->
|
|||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
collection(type) -> binary();
|
||||
collection(nullable) -> false;
|
||||
collection(_) -> undefined.
|
||||
|
||||
selector(type) -> map();
|
||||
selector(nullable) -> false;
|
||||
selector(_) -> undefined.
|
||||
|
||||
password_hash_field(type) -> binary();
|
||||
password_hash_field(nullable) -> false;
|
||||
password_hash_field(_) -> undefined.
|
||||
|
||||
salt_field(type) -> binary();
|
||||
|
|
|
@ -63,7 +63,6 @@ salt_position(default) -> prefix;
|
|||
salt_position(_) -> undefined.
|
||||
|
||||
query(type) -> string();
|
||||
query(nullable) -> false;
|
||||
query(_) -> undefined.
|
||||
|
||||
query_timeout(type) -> integer();
|
||||
|
|
|
@ -59,7 +59,6 @@ password_hash_algorithm(default) -> sha256;
|
|||
password_hash_algorithm(_) -> undefined.
|
||||
|
||||
query(type) -> string();
|
||||
query(nullable) -> false;
|
||||
query(_) -> undefined.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
|
|
@ -66,7 +66,6 @@ common_fields() ->
|
|||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
query(type) -> string();
|
||||
query(nullable) -> false;
|
||||
query(_) -> undefined.
|
||||
|
||||
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
|
||||
|
|
|
@ -26,7 +26,7 @@ authz:{
|
|||
sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or clientid = '%c'"
|
||||
},
|
||||
{
|
||||
type: pgsql
|
||||
type: postgresql
|
||||
config: {
|
||||
server: "127.0.0.1:5432"
|
||||
database: mqtt
|
||||
|
@ -96,7 +96,7 @@ Sample data in the default configuration:
|
|||
INSERT INTO mqtt_authz (ipaddress, username, clientid, action, permission, topic) VALUES ('127.0.0.1', '', '', 'subscribe', 'allow', '$SYS/#');
|
||||
```
|
||||
|
||||
#### Pgsql
|
||||
#### PostgreSQL
|
||||
|
||||
Create Example Table
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ authorization {
|
|||
# # query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or clientid = '%c'"
|
||||
# # },
|
||||
# # {
|
||||
# # type: pgsql
|
||||
# # type: postgresql
|
||||
# # server: "127.0.0.1:5432"
|
||||
# # database: mqtt
|
||||
# # pool_size: 1
|
||||
|
@ -46,7 +46,7 @@ authorization {
|
|||
# # cmd: "HGETALL mqtt_authz:%u"
|
||||
# # },
|
||||
# # {
|
||||
# # type: mongo
|
||||
# # type: mongodb
|
||||
# # mongo_type: single
|
||||
# # server: "127.0.0.1:27017"
|
||||
# # pool_size: 1
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
-type(permission() :: allow | deny).
|
||||
|
||||
-type(rule() :: {permission(), who(), action(), list(emqx_topic:topic())}).
|
||||
-type(rule() :: {permission(), who(), action(), list(emqx_types:topic())}).
|
||||
-type(rules() :: [rule()]).
|
||||
|
||||
-type(sources() :: [map()]).
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_authz,
|
||||
[{description, "An OTP application"},
|
||||
{vsn, "0.1.1"},
|
||||
|
@ -6,6 +7,7 @@
|
|||
{applications,
|
||||
[kernel,
|
||||
stdlib,
|
||||
crypto,
|
||||
emqx_connector
|
||||
]},
|
||||
{env,[]},
|
||||
|
|
|
@ -39,7 +39,7 @@
|
|||
-export([post_config_update/4, pre_config_update/2]).
|
||||
|
||||
-define(CONF_KEY_PATH, [authorization, sources]).
|
||||
-define(SOURCE_TYPES, [file, http, mongo, mysql, pgsql, redis]).
|
||||
-define(SOURCE_TYPES, [file, http, mongodb, mysql, postgresql, redis]).
|
||||
|
||||
-spec(register_metrics() -> ok).
|
||||
register_metrics() ->
|
||||
|
@ -87,15 +87,19 @@ pre_config_update({move, Type, <<"top">>}, Conf) when is_list(Conf) ->
|
|||
{Index, _} = find_source_by_type(Type),
|
||||
{List1, List2} = lists:split(Index, Conf),
|
||||
NConf = [lists:nth(Index, Conf)] ++ lists:droplast(List1) ++ List2,
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
case check_dup_types(NConf) of
|
||||
ok -> {ok, NConf};
|
||||
Error -> Error
|
||||
end;
|
||||
|
||||
pre_config_update({move, Type, <<"bottom">>}, Conf) when is_list(Conf) ->
|
||||
{Index, _} = find_source_by_type(Type),
|
||||
{List1, List2} = lists:split(Index, Conf),
|
||||
NConf = lists:droplast(List1) ++ List2 ++ [lists:nth(Index, Conf)],
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
case check_dup_types(NConf) of
|
||||
ok -> {ok, NConf};
|
||||
Error -> Error
|
||||
end;
|
||||
|
||||
pre_config_update({move, Type, #{<<"before">> := Before}}, Conf) when is_list(Conf) ->
|
||||
{Index1, _} = find_source_by_type(Type),
|
||||
|
@ -107,8 +111,10 @@ pre_config_update({move, Type, #{<<"before">> := Before}}, Conf) when is_list(Co
|
|||
NConf = lists:delete(Conf1, lists:droplast(List1))
|
||||
++ [Conf1] ++ [Conf2]
|
||||
++ lists:delete(Conf1, List2),
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
case check_dup_types(NConf) of
|
||||
ok -> {ok, NConf};
|
||||
Error -> Error
|
||||
end;
|
||||
|
||||
pre_config_update({move, Type, #{<<"after">> := After}}, Conf) when is_list(Conf) ->
|
||||
{Index1, _} = find_source_by_type(Type),
|
||||
|
@ -119,28 +125,39 @@ pre_config_update({move, Type, #{<<"after">> := After}}, Conf) when is_list(Conf
|
|||
NConf = lists:delete(Conf1, List1)
|
||||
++ [Conf1]
|
||||
++ lists:delete(Conf1, List2),
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
case check_dup_types(NConf) of
|
||||
ok -> {ok, NConf};
|
||||
Error -> Error
|
||||
end;
|
||||
|
||||
pre_config_update({head, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
|
||||
NConf = Sources ++ Conf,
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, Sources ++ Conf};
|
||||
case check_dup_types(NConf) of
|
||||
ok -> {ok, Sources ++ Conf};
|
||||
Error -> Error
|
||||
end;
|
||||
pre_config_update({tail, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
|
||||
NConf = Conf ++ Sources,
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, Conf ++ Sources};
|
||||
case check_dup_types(NConf) of
|
||||
ok -> {ok, Conf ++ Sources};
|
||||
Error -> Error
|
||||
end;
|
||||
pre_config_update({{replace_once, Type}, Source}, Conf) when is_map(Source), is_list(Conf) ->
|
||||
{Index, _} = find_source_by_type(Type),
|
||||
{List1, List2} = lists:split(Index, Conf),
|
||||
NConf = lists:droplast(List1) ++ [Source] ++ List2,
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
case check_dup_types(NConf) of
|
||||
ok -> {ok, NConf};
|
||||
Error -> Error
|
||||
end;
|
||||
pre_config_update({{delete_once, Type}, _Source}, Conf) when is_list(Conf) ->
|
||||
{_, Source} = find_source_by_type(Type),
|
||||
NConf = lists:delete(Source, Conf),
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
{Index, _} = find_source_by_type(Type),
|
||||
{List1, List2} = lists:split(Index, Conf),
|
||||
NConf = lists:droplast(List1) ++ List2,
|
||||
case check_dup_types(NConf) of
|
||||
ok -> {ok, NConf};
|
||||
Error -> Error
|
||||
end;
|
||||
pre_config_update({_, Sources}, _Conf) when is_list(Sources)->
|
||||
%% overwrite the entire config!
|
||||
{ok, Sources}.
|
||||
|
@ -249,7 +266,7 @@ check_dup_types(Sources, [T0 | Tail]) ->
|
|||
end, 0, Sources) > 1 of
|
||||
true ->
|
||||
?LOG(error, "The type is duplicated in the Authorization source"),
|
||||
{error, authz_source_dup};
|
||||
{error, 'The type is duplicated in the Authorization source'};
|
||||
false -> check_dup_types(Sources, Tail)
|
||||
end.
|
||||
|
||||
|
@ -283,7 +300,7 @@ init_source(#{enable := true,
|
|||
init_source(#{enable := true,
|
||||
type := DB
|
||||
} = Source) when DB =:= redis;
|
||||
DB =:= mongo ->
|
||||
DB =:= mongodb ->
|
||||
case create_resource(Source) of
|
||||
{error, Reason} -> error({load_config_error, Reason});
|
||||
Id -> Source#{annotations => #{id => Id}}
|
||||
|
@ -292,7 +309,7 @@ init_source(#{enable := true,
|
|||
type := DB,
|
||||
query := SQL
|
||||
} = Source) when DB =:= mysql;
|
||||
DB =:= pgsql ->
|
||||
DB =:= postgresql ->
|
||||
Mod = authz_module(DB),
|
||||
case create_resource(Source) of
|
||||
{error, Reason} -> error({load_config_error, Reason});
|
||||
|
@ -309,7 +326,7 @@ init_source(#{enable := false} = Source) ->Source.
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
%% @doc Check AuthZ
|
||||
-spec(authorize(emqx_types:clientinfo(), emqx_types:all(), emqx_topic:topic(), allow | deny, sources())
|
||||
-spec(authorize(emqx_types:clientinfo(), emqx_types:all(), emqx_types:topic(), allow | deny, sources())
|
||||
-> {stop, allow} | {ok, deny}).
|
||||
authorize(#{username := Username,
|
||||
peerhost := IpAddress
|
||||
|
@ -375,7 +392,7 @@ gen_id(Type) ->
|
|||
|
||||
create_resource(#{type := DB,
|
||||
annotations := #{id := ResourceID}} = Source) ->
|
||||
case emqx_resource:update(ResourceID, connector_module(DB), Source, []) of
|
||||
case emqx_resource:recreate(ResourceID, connector_module(DB), Source, []) of
|
||||
{ok, _} -> ResourceID;
|
||||
{error, Reason} -> {error, Reason}
|
||||
end;
|
||||
|
@ -390,6 +407,10 @@ create_resource(#{type := DB} = Source) ->
|
|||
authz_module(Type) ->
|
||||
list_to_existing_atom("emqx_authz_" ++ atom_to_list(Type)).
|
||||
|
||||
connector_module(mongodb) ->
|
||||
emqx_connector_mongo;
|
||||
connector_module(postgresql) ->
|
||||
emqx_connector_pgsql;
|
||||
connector_module(Type) ->
|
||||
list_to_existing_atom("emqx_connector_" ++ atom_to_list(Type)).
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ definitions() ->
|
|||
, minirest:ref(<<"mongo_rs">>)
|
||||
, minirest:ref(<<"mongo_sharded">>)
|
||||
, minirest:ref(<<"mysql">>)
|
||||
, minirest:ref(<<"pgsql">>)
|
||||
, minirest:ref(<<"postgresql">>)
|
||||
, minirest:ref(<<"redis_single">>)
|
||||
, minirest:ref(<<"redis_sentinel">>)
|
||||
, minirest:ref(<<"redis_cluster">>)
|
||||
|
@ -132,8 +132,8 @@ definitions() ->
|
|||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"mongo">>],
|
||||
example => <<"mongo">>
|
||||
enum => [<<"mongodb">>],
|
||||
example => <<"mongodb">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
|
@ -188,8 +188,8 @@ definitions() ->
|
|||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"mongo">>],
|
||||
example => <<"mongo">>
|
||||
enum => [<<"mongodb">>],
|
||||
example => <<"mongodb">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
|
@ -245,8 +245,8 @@ definitions() ->
|
|||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"mongo">>],
|
||||
example => <<"mongo">>
|
||||
enum => [<<"mongodb">>],
|
||||
example => <<"mongodb">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
|
@ -335,8 +335,8 @@ definitions() ->
|
|||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"pgsql">>],
|
||||
example => <<"pgsql">>
|
||||
enum => [<<"postgresql">>],
|
||||
example => <<"postgresql">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
|
@ -501,7 +501,7 @@ definitions() ->
|
|||
, #{<<"mongo_rs">> => MongoRs}
|
||||
, #{<<"mongo_sharded">> => MongoSharded}
|
||||
, #{<<"mysql">> => Mysql}
|
||||
, #{<<"pgsql">> => Pgsql}
|
||||
, #{<<"postgresql">> => Pgsql}
|
||||
, #{<<"redis_single">> => RedisSingle}
|
||||
, #{<<"redis_sentinel">> => RedisSentinel}
|
||||
, #{<<"redis_cluster">> => RedisCluster}
|
||||
|
|
|
@ -298,12 +298,20 @@ move_source_api() ->
|
|||
|
||||
sources(get, _) ->
|
||||
Sources = lists:foldl(fun (#{type := file, enable := Enable, path := Path}, AccIn) ->
|
||||
{ok, Rules} = file:consult(Path),
|
||||
lists:append(AccIn, [#{type => file,
|
||||
enable => Enable,
|
||||
rules => [ iolist_to_binary(io_lib:format("~p.", [R])) || R <- Rules],
|
||||
annotations => #{status => healthy}
|
||||
}]);
|
||||
case file:read_file(Path) of
|
||||
{ok, Rules} ->
|
||||
lists:append(AccIn, [#{type => file,
|
||||
enable => Enable,
|
||||
rules => Rules,
|
||||
annotations => #{status => healthy}
|
||||
}]);
|
||||
{error, _} ->
|
||||
lists:append(AccIn, [#{type => file,
|
||||
enable => Enable,
|
||||
rules => <<"">>,
|
||||
annotations => #{status => unhealthy}
|
||||
}])
|
||||
end;
|
||||
(#{enable := false} = Source, AccIn) ->
|
||||
lists:append(AccIn, [Source#{annotations => #{status => unhealthy}}]);
|
||||
(#{type := _Type, annotations := #{id := Id}} = Source, AccIn) ->
|
||||
|
@ -328,23 +336,11 @@ sources(get, _) ->
|
|||
lists:append(AccIn, [Source#{annotations => #{status => healthy}}])
|
||||
end, [], emqx_authz:lookup()),
|
||||
{200, #{sources => Sources}};
|
||||
sources(post, #{body := #{<<"type">> := <<"file">>, <<"rules">> := Rules, <<"enable">> := Enable}}) when is_list(Rules) ->
|
||||
{ok, Filename} = write_file(filename:join([emqx:get_config([node, data_dir]), "acl.conf"]),
|
||||
erlang:list_to_bitstring([<<Rule/binary, "\n">> || Rule <- Rules])
|
||||
),
|
||||
case emqx_authz:update(head, [#{type => file, enable => Enable, path => Filename}]) of
|
||||
{ok, _} -> {204};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
end;
|
||||
sources(post, #{body := #{<<"type">> := <<"file">>, <<"rules">> := Rules}}) ->
|
||||
{ok, Filename} = write_file(filename:join([emqx:get_config([node, data_dir]), "acl.conf"]), Rules),
|
||||
update_config(head, [#{type => file, enable => true, path => Filename}]);
|
||||
sources(post, #{body := Body}) when is_map(Body) ->
|
||||
case emqx_authz:update(head, [write_cert(Body)]) of
|
||||
{ok, _} -> {204};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
end;
|
||||
update_config(head, [write_cert(Body)]);
|
||||
sources(put, #{body := Body}) when is_list(Body) ->
|
||||
NBody = [ begin
|
||||
case Source of
|
||||
|
@ -354,24 +350,24 @@ sources(put, #{body := Body}) when is_list(Body) ->
|
|||
_ -> write_cert(Source)
|
||||
end
|
||||
end || Source <- Body],
|
||||
case emqx_authz:update(replace, NBody) of
|
||||
{ok, _} -> {204};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
end.
|
||||
update_config(replace, NBody).
|
||||
|
||||
source(get, #{bindings := #{type := Type}}) ->
|
||||
case emqx_authz:lookup(Type) of
|
||||
{error, Reason} -> {404, #{messgae => atom_to_binary(Reason)}};
|
||||
{error, Reason} -> {404, #{message => atom_to_binary(Reason)}};
|
||||
#{type := file, enable := Enable, path := Path}->
|
||||
{ok, Rules} = file:consult(Path),
|
||||
{200, #{type => file,
|
||||
enable => Enable,
|
||||
rules => [ iolist_to_binary(io_lib:format("~p.", [R])) || R <- Rules],
|
||||
annotations => #{status => healthy}
|
||||
}
|
||||
};
|
||||
case file:read_file(Path) of
|
||||
{ok, Rules} ->
|
||||
{200, #{type => file,
|
||||
enable => Enable,
|
||||
rules => Rules,
|
||||
annotations => #{status => healthy}
|
||||
}
|
||||
};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
message => atom_to_binary(Reason)}}
|
||||
end;
|
||||
#{enable := false} = Source -> {200, Source#{annotations => #{status => unhealthy}}};
|
||||
#{annotations := #{id := Id}} = Source ->
|
||||
NSource0 = case maps:get(server, Source, undefined) of
|
||||
|
@ -398,34 +394,36 @@ source(put, #{bindings := #{type := <<"file">>}, body := #{<<"type">> := <<"file
|
|||
{ok, _} -> {204};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
message => atom_to_binary(Reason)}}
|
||||
end;
|
||||
source(put, #{bindings := #{type := Type}, body := Body}) when is_map(Body) ->
|
||||
case emqx_authz:update({replace_once, Type}, write_cert(Body)) of
|
||||
{ok, _} -> {204};
|
||||
{error, not_found_source} ->
|
||||
{404, #{code => <<"NOT_FOUND">>,
|
||||
messgae => <<"source ", Type/binary, " not found">>}};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
end;
|
||||
update_config({replace_once, Type}, write_cert(Body));
|
||||
source(delete, #{bindings := #{type := Type}}) ->
|
||||
case emqx_authz:update({delete_once, Type}, #{}) of
|
||||
{ok, _} -> {204};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
end.
|
||||
update_config({delete_once, Type}, #{}).
|
||||
|
||||
move_source(post, #{bindings := #{type := Type}, body := #{<<"position">> := Position}}) ->
|
||||
case emqx_authz:move(Type, Position) of
|
||||
{ok, _} -> {204};
|
||||
{error, not_found_source} ->
|
||||
{404, #{code => <<"NOT_FOUND">>,
|
||||
messgae => <<"source ", Type/binary, " not found">>}};
|
||||
message => <<"source ", Type/binary, " not found">>}};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
message => atom_to_binary(Reason)}}
|
||||
end.
|
||||
|
||||
update_config(Cmd, Sources) ->
|
||||
case emqx_authz:update(Cmd, Sources) of
|
||||
{ok, _} -> {204};
|
||||
{error, {pre_config_update, emqx_authz, Reason}} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
message => atom_to_binary(Reason)}};
|
||||
{error, {post_config_update, emqx_authz, Reason}} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
message => atom_to_binary(Reason)}};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
message => atom_to_binary(Reason)}}
|
||||
end.
|
||||
|
||||
read_cert(#{ssl := #{enable := true} = SSL} = Source) ->
|
||||
|
@ -452,21 +450,21 @@ write_cert(#{<<"ssl">> := #{<<"enable">> := true} = SSL} = Source) ->
|
|||
CertPath = filename:join([emqx:get_config([node, data_dir]), "certs"]),
|
||||
CaCert = case maps:is_key(<<"cacertfile">>, SSL) of
|
||||
true ->
|
||||
{ok, CaCertFile} = write_file(filename:join([CertPath, "cacert-" ++ emqx_plugin_libs_id:gen() ++".pem"]),
|
||||
{ok, CaCertFile} = write_file(filename:join([CertPath, "cacert-" ++ emqx_misc:gen_id() ++".pem"]),
|
||||
maps:get(<<"cacertfile">>, SSL)),
|
||||
CaCertFile;
|
||||
false -> ""
|
||||
end,
|
||||
Cert = case maps:is_key(<<"certfile">>, SSL) of
|
||||
true ->
|
||||
{ok, CertFile} = write_file(filename:join([CertPath, "cert-" ++ emqx_plugin_libs_id:gen() ++".pem"]),
|
||||
{ok, CertFile} = write_file(filename:join([CertPath, "cert-" ++ emqx_misc:gen_id() ++".pem"]),
|
||||
maps:get(<<"certfile">>, SSL)),
|
||||
CertFile;
|
||||
false -> ""
|
||||
end,
|
||||
Key = case maps:is_key(<<"keyfile">>, SSL) of
|
||||
true ->
|
||||
{ok, KeyFile} = write_file(filename:join([CertPath, "key-" ++ emqx_plugin_libs_id:gen() ++".pem"]),
|
||||
{ok, KeyFile} = write_file(filename:join([CertPath, "key-" ++ emqx_misc:gen_id() ++".pem"]),
|
||||
maps:get(<<"keyfile">>, SSL)),
|
||||
KeyFile;
|
||||
false -> ""
|
||||
|
@ -478,8 +476,18 @@ write_cert(#{<<"ssl">> := #{<<"enable">> := true} = SSL} = Source) ->
|
|||
};
|
||||
write_cert(Source) -> Source.
|
||||
|
||||
write_file(Filename, Bytes) ->
|
||||
write_file(Filename, Bytes0) ->
|
||||
ok = filelib:ensure_dir(Filename),
|
||||
case file:read_file(Filename) of
|
||||
{ok, Bytes1} ->
|
||||
case crypto:hash(md5, Bytes1) =:= crypto:hash(md5, Bytes0) of
|
||||
true -> {ok,iolist_to_binary(Filename)};
|
||||
false -> do_write_file(Filename, Bytes0)
|
||||
end;
|
||||
_ -> do_write_file(Filename, Bytes0)
|
||||
end.
|
||||
|
||||
do_write_file(Filename, Bytes) ->
|
||||
case file:write_file(Filename, Bytes) of
|
||||
ok -> {ok, iolist_to_binary(Filename)};
|
||||
{error, Reason} ->
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authz_mongo).
|
||||
-module(emqx_authz_mongodb).
|
||||
|
||||
-include("emqx_authz.hrl").
|
||||
-include_lib("emqx/include/emqx.hrl").
|
||||
|
@ -31,7 +31,7 @@
|
|||
-endif.
|
||||
|
||||
description() ->
|
||||
"AuthZ with Mongo".
|
||||
"AuthZ with MongoDB".
|
||||
|
||||
authorize(Client, PubSub, Topic,
|
||||
#{collection := Collection,
|
|
@ -14,7 +14,7 @@
|
|||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authz_pgsql).
|
||||
-module(emqx_authz_postgresql).
|
||||
|
||||
-include("emqx_authz.hrl").
|
||||
-include_lib("emqx/include/emqx.hrl").
|
||||
|
@ -32,7 +32,7 @@
|
|||
-endif.
|
||||
|
||||
description() ->
|
||||
"AuthZ with pgsql".
|
||||
"AuthZ with postgresql".
|
||||
|
||||
parse_query(undefined) ->
|
||||
undefined;
|
||||
|
@ -59,7 +59,7 @@ authorize(Client, PubSub, Topic,
|
|||
{ok, Columns, Rows} ->
|
||||
do_authorize(Client, PubSub, Topic, Columns, Rows);
|
||||
{error, Reason} ->
|
||||
?LOG(error, "[AuthZ] Query pgsql error: ~p~n", [Reason]),
|
||||
?LOG(error, "[AuthZ] Query postgresql error: ~p~n", [Reason]),
|
||||
nomatch
|
||||
end.
|
||||
|
|
@ -33,7 +33,7 @@ fields("authorization") ->
|
|||
, hoconsc:ref(?MODULE, mongo_rs)
|
||||
, hoconsc:ref(?MODULE, mongo_sharded)
|
||||
, hoconsc:ref(?MODULE, mysql)
|
||||
, hoconsc:ref(?MODULE, pgsql)
|
||||
, hoconsc:ref(?MODULE, postgresql)
|
||||
, hoconsc:ref(?MODULE, redis_single)
|
||||
, hoconsc:ref(?MODULE, redis_sentinel)
|
||||
, hoconsc:ref(?MODULE, redis_cluster)
|
||||
|
@ -114,26 +114,35 @@ fields(http_post) ->
|
|||
}
|
||||
] ++ proplists:delete(base_url, emqx_connector_http:fields(config));
|
||||
fields(mongo_single) ->
|
||||
connector_fields(mongo, single) ++
|
||||
[ {collection, #{type => atom()}}
|
||||
, {selector, #{type => map()}}
|
||||
];
|
||||
, {type, #{type => mongodb}}
|
||||
, {enable, #{type => boolean(),
|
||||
default => true}}
|
||||
] ++ emqx_connector_mongo:fields(single);
|
||||
fields(mongo_rs) ->
|
||||
connector_fields(mongo, rs) ++
|
||||
[ {collection, #{type => atom()}}
|
||||
, {selector, #{type => map()}}
|
||||
];
|
||||
, {type, #{type => mongodb}}
|
||||
, {enable, #{type => boolean(),
|
||||
default => true}}
|
||||
] ++ emqx_connector_mongo:fields(rs);
|
||||
fields(mongo_sharded) ->
|
||||
connector_fields(mongo, sharded) ++
|
||||
[ {collection, #{type => atom()}}
|
||||
, {selector, #{type => map()}}
|
||||
];
|
||||
, {type, #{type => mongodb}}
|
||||
, {enable, #{type => boolean(),
|
||||
default => true}}
|
||||
] ++ emqx_connector_mongo:fields(sharded);
|
||||
fields(mysql) ->
|
||||
connector_fields(mysql) ++
|
||||
[ {query, query()} ];
|
||||
fields(pgsql) ->
|
||||
connector_fields(pgsql) ++
|
||||
[ {query, query()} ];
|
||||
fields(postgresql) ->
|
||||
[ {query, query()}
|
||||
, {type, #{type => postgresql}}
|
||||
, {enable, #{type => boolean(),
|
||||
default => true}}
|
||||
] ++ emqx_connector_pgsql:fields(config);
|
||||
fields(redis_single) ->
|
||||
connector_fields(redis, single) ++
|
||||
[ {cmd, query()} ];
|
||||
|
@ -181,4 +190,4 @@ connector_fields(DB, Fields) ->
|
|||
to_list(A) when is_atom(A) ->
|
||||
atom_to_list(A);
|
||||
to_list(B) when is_binary(B) ->
|
||||
binary_to_list(B).
|
||||
binary_to_list(B).
|
||||
|
|
|
@ -67,7 +67,7 @@ init_per_testcase(_, Config) ->
|
|||
<<"method">> => <<"get">>,
|
||||
<<"request_timeout">> => 5000
|
||||
}).
|
||||
-define(SOURCE2, #{<<"type">> => <<"mongo">>,
|
||||
-define(SOURCE2, #{<<"type">> => <<"mongodb">>,
|
||||
<<"enable">> => true,
|
||||
<<"mongo_type">> => <<"single">>,
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
|
@ -88,7 +88,7 @@ init_per_testcase(_, Config) ->
|
|||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"query">> => <<"abcb">>
|
||||
}).
|
||||
-define(SOURCE4, #{<<"type">> => <<"pgsql">>,
|
||||
-define(SOURCE4, #{<<"type">> => <<"postgresql">>,
|
||||
<<"enable">> => true,
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
|
@ -128,24 +128,24 @@ t_update_source(_) ->
|
|||
{ok, _} = emqx_authz:update(tail, [?SOURCE6]),
|
||||
|
||||
?assertMatch([ #{type := http, enable := true}
|
||||
, #{type := mongo, enable := true}
|
||||
, #{type := mongodb, enable := true}
|
||||
, #{type := mysql, enable := true}
|
||||
, #{type := pgsql, enable := true}
|
||||
, #{type := postgresql, enable := true}
|
||||
, #{type := redis, enable := true}
|
||||
, #{type := file, enable := true}
|
||||
], emqx:get_config([authorization, sources], [])),
|
||||
|
||||
{ok, _} = emqx_authz:update({replace_once, http}, ?SOURCE1#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, mongo}, ?SOURCE2#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, mongodb}, ?SOURCE2#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, mysql}, ?SOURCE3#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, pgsql}, ?SOURCE4#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, postgresql}, ?SOURCE4#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, redis}, ?SOURCE5#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, file}, ?SOURCE6#{<<"enable">> := false}),
|
||||
|
||||
?assertMatch([ #{type := http, enable := false}
|
||||
, #{type := mongo, enable := false}
|
||||
, #{type := mongodb, enable := false}
|
||||
, #{type := mysql, enable := false}
|
||||
, #{type := pgsql, enable := false}
|
||||
, #{type := postgresql, enable := false}
|
||||
, #{type := redis, enable := false}
|
||||
, #{type := file, enable := false}
|
||||
], emqx:get_config([authorization, sources], [])),
|
||||
|
@ -155,47 +155,47 @@ t_update_source(_) ->
|
|||
t_move_source(_) ->
|
||||
{ok, _} = emqx_authz:update(replace, [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]),
|
||||
?assertMatch([ #{type := http}
|
||||
, #{type := mongo}
|
||||
, #{type := mongodb}
|
||||
, #{type := mysql}
|
||||
, #{type := pgsql}
|
||||
, #{type := postgresql}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, _} = emqx_authz:move(pgsql, <<"top">>),
|
||||
?assertMatch([ #{type := pgsql}
|
||||
{ok, _} = emqx_authz:move(postgresql, <<"top">>),
|
||||
?assertMatch([ #{type := postgresql}
|
||||
, #{type := http}
|
||||
, #{type := mongo}
|
||||
, #{type := mongodb}
|
||||
, #{type := mysql}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, _} = emqx_authz:move(http, <<"bottom">>),
|
||||
?assertMatch([ #{type := pgsql}
|
||||
, #{type := mongo}
|
||||
?assertMatch([ #{type := postgresql}
|
||||
, #{type := mongodb}
|
||||
, #{type := mysql}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
, #{type := http}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, _} = emqx_authz:move(mysql, #{<<"before">> => pgsql}),
|
||||
{ok, _} = emqx_authz:move(mysql, #{<<"before">> => postgresql}),
|
||||
?assertMatch([ #{type := mysql}
|
||||
, #{type := pgsql}
|
||||
, #{type := mongo}
|
||||
, #{type := postgresql}
|
||||
, #{type := mongodb}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
, #{type := http}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, _} = emqx_authz:move(mongo, #{<<"after">> => http}),
|
||||
{ok, _} = emqx_authz:move(mongodb, #{<<"after">> => http}),
|
||||
?assertMatch([ #{type := mysql}
|
||||
, #{type := pgsql}
|
||||
, #{type := postgresql}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
, #{type := http}
|
||||
, #{type := mongo}
|
||||
, #{type := mongodb}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
ok.
|
||||
|
|
|
@ -44,7 +44,7 @@
|
|||
<<"method">> => <<"get">>,
|
||||
<<"request_timeout">> => 5000
|
||||
}).
|
||||
-define(SOURCE2, #{<<"type">> => <<"mongo">>,
|
||||
-define(SOURCE2, #{<<"type">> => <<"mongodb">>,
|
||||
<<"enable">> => true,
|
||||
<<"mongo_type">> => <<"sharded">>,
|
||||
<<"servers">> => [<<"127.0.0.1:27017">>,
|
||||
|
@ -67,7 +67,7 @@
|
|||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"query">> => <<"abcb">>
|
||||
}).
|
||||
-define(SOURCE4, #{<<"type">> => <<"pgsql">>,
|
||||
-define(SOURCE4, #{<<"type">> => <<"postgresql">>,
|
||||
<<"enable">> => true,
|
||||
<<"server">> => <<"127.0.0.1:5432">>,
|
||||
<<"pool_size">> => 1,
|
||||
|
@ -148,8 +148,8 @@ set_special_configs(_App) ->
|
|||
ok.
|
||||
|
||||
init_per_testcase(t_api, Config) ->
|
||||
meck:new(emqx_plugin_libs_id, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(emqx_plugin_libs_id, gen, fun() -> "fake" end),
|
||||
meck:new(emqx_misc, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(emqx_misc, gen_id, fun() -> "fake" end),
|
||||
|
||||
meck:new(emqx, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(emqx, get_config, fun([node, data_dir]) ->
|
||||
|
@ -162,7 +162,7 @@ init_per_testcase(t_api, Config) ->
|
|||
init_per_testcase(_, Config) -> Config.
|
||||
|
||||
end_per_testcase(t_api, _Config) ->
|
||||
meck:unload(emqx_plugin_libs_id),
|
||||
meck:unload(emqx_misc),
|
||||
meck:unload(emqx),
|
||||
ok;
|
||||
end_per_testcase(_, _Config) -> ok.
|
||||
|
@ -181,9 +181,9 @@ t_api(_) ->
|
|||
{ok, 200, Result2} = request(get, uri(["authorization", "sources"]), []),
|
||||
Sources = get_sources(Result2),
|
||||
?assertMatch([ #{<<"type">> := <<"http">>}
|
||||
, #{<<"type">> := <<"mongo">>}
|
||||
, #{<<"type">> := <<"mongodb">>}
|
||||
, #{<<"type">> := <<"mysql">>}
|
||||
, #{<<"type">> := <<"pgsql">>}
|
||||
, #{<<"type">> := <<"postgresql">>}
|
||||
, #{<<"type">> := <<"redis">>}
|
||||
, #{<<"type">> := <<"file">>}
|
||||
], Sources),
|
||||
|
@ -193,7 +193,7 @@ t_api(_) ->
|
|||
{ok, 200, Result3} = request(get, uri(["authorization", "sources", "http"]), []),
|
||||
?assertMatch(#{<<"type">> := <<"http">>, <<"enable">> := false}, jsx:decode(Result3)),
|
||||
|
||||
{ok, 204, _} = request(put, uri(["authorization", "sources", "mongo"]),
|
||||
{ok, 204, _} = request(put, uri(["authorization", "sources", "mongodb"]),
|
||||
?SOURCE2#{<<"ssl">> := #{
|
||||
<<"enable">> => true,
|
||||
<<"cacertfile">> => <<"fake cacert file">>,
|
||||
|
@ -201,8 +201,8 @@ t_api(_) ->
|
|||
<<"keyfile">> => <<"fake key file">>,
|
||||
<<"verify">> => false
|
||||
}}),
|
||||
{ok, 200, Result4} = request(get, uri(["authorization", "sources", "mongo"]), []),
|
||||
?assertMatch(#{<<"type">> := <<"mongo">>,
|
||||
{ok, 200, Result4} = request(get, uri(["authorization", "sources", "mongodb"]), []),
|
||||
?assertMatch(#{<<"type">> := <<"mongodb">>,
|
||||
<<"ssl">> := #{<<"enable">> := true,
|
||||
<<"cacertfile">> := <<"fake cacert file">>,
|
||||
<<"certfile">> := <<"fake cert file">>,
|
||||
|
@ -219,51 +219,52 @@ t_api(_) ->
|
|||
end, Sources),
|
||||
{ok, 200, Result5} = request(get, uri(["authorization", "sources"]), []),
|
||||
?assertEqual([], get_sources(Result5)),
|
||||
?assertEqual([], emqx:get_config([authorization, sources])),
|
||||
ok.
|
||||
|
||||
t_move_source(_) ->
|
||||
{ok, _} = emqx_authz:update(replace, [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5]),
|
||||
?assertMatch([ #{type := http}
|
||||
, #{type := mongo}
|
||||
, #{type := mongodb}
|
||||
, #{type := mysql}
|
||||
, #{type := pgsql}
|
||||
, #{type := postgresql}
|
||||
, #{type := redis}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, 204, _} = request(post, uri(["authorization", "sources", "pgsql", "move"]),
|
||||
{ok, 204, _} = request(post, uri(["authorization", "sources", "postgresql", "move"]),
|
||||
#{<<"position">> => <<"top">>}),
|
||||
?assertMatch([ #{type := pgsql}
|
||||
?assertMatch([ #{type := postgresql}
|
||||
, #{type := http}
|
||||
, #{type := mongo}
|
||||
, #{type := mongodb}
|
||||
, #{type := mysql}
|
||||
, #{type := redis}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, 204, _} = request(post, uri(["authorization", "sources", "http", "move"]),
|
||||
#{<<"position">> => <<"bottom">>}),
|
||||
?assertMatch([ #{type := pgsql}
|
||||
, #{type := mongo}
|
||||
?assertMatch([ #{type := postgresql}
|
||||
, #{type := mongodb}
|
||||
, #{type := mysql}
|
||||
, #{type := redis}
|
||||
, #{type := http}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, 204, _} = request(post, uri(["authorization", "sources", "mysql", "move"]),
|
||||
#{<<"position">> => #{<<"before">> => <<"pgsql">>}}),
|
||||
#{<<"position">> => #{<<"before">> => <<"postgresql">>}}),
|
||||
?assertMatch([ #{type := mysql}
|
||||
, #{type := pgsql}
|
||||
, #{type := mongo}
|
||||
, #{type := postgresql}
|
||||
, #{type := mongodb}
|
||||
, #{type := redis}
|
||||
, #{type := http}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, 204, _} = request(post, uri(["authorization", "sources", "mongo", "move"]),
|
||||
{ok, 204, _} = request(post, uri(["authorization", "sources", "mongodb", "move"]),
|
||||
#{<<"position">> => #{<<"after">> => <<"http">>}}),
|
||||
?assertMatch([ #{type := mysql}
|
||||
, #{type := pgsql}
|
||||
, #{type := postgresql}
|
||||
, #{type := redis}
|
||||
, #{type := http}
|
||||
, #{type := mongo}
|
||||
, #{type := mongodb}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
ok.
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authz_mongo_SUITE).
|
||||
-module(emqx_authz_mongodb_SUITE).
|
||||
|
||||
-compile(nowarn_export_all).
|
||||
-compile(export_all).
|
||||
|
@ -46,7 +46,7 @@ init_per_suite(Config) ->
|
|||
ok = emqx_ct_helpers:start_apps([emqx_authz]),
|
||||
{ok, _} = emqx:update_config([authorization, cache, enable], false),
|
||||
{ok, _} = emqx:update_config([authorization, no_match], deny),
|
||||
Rules = [#{<<"type">> => <<"mongo">>,
|
||||
Rules = [#{<<"type">> => <<"mongodb">>,
|
||||
<<"mongo_type">> => <<"single">>,
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
|
@ -13,7 +13,7 @@
|
|||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authz_pgsql_SUITE).
|
||||
-module(emqx_authz_postgresql_SUITE).
|
||||
|
||||
-compile(nowarn_export_all).
|
||||
-compile(export_all).
|
||||
|
@ -47,7 +47,7 @@ init_per_suite(Config) ->
|
|||
|
||||
{ok, _} = emqx:update_config([authorization, cache, enable], false),
|
||||
{ok, _} = emqx:update_config([authorization, no_match], deny),
|
||||
Rules = [#{<<"type">> => <<"pgsql">>,
|
||||
Rules = [#{<<"type">> => <<"postgresql">>,
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
|
@ -1,28 +1,28 @@
|
|||
|
||||
auto_subscribe {
|
||||
topics = [
|
||||
# {
|
||||
# topic = "/c/${clientid}",
|
||||
# qos = 0
|
||||
# rh = 0
|
||||
# rap = 0
|
||||
# nl = 0
|
||||
# }
|
||||
# {
|
||||
# topic = "/u/${username}",
|
||||
# },
|
||||
# {
|
||||
# topic = "/h/${host}",
|
||||
# qos = 2
|
||||
# },
|
||||
# {
|
||||
# topic = "/p/${port}",
|
||||
# },
|
||||
# {
|
||||
# topic = "/topic/abc",
|
||||
# },
|
||||
# {
|
||||
# topic = "/client/${clientid}/username/${username}/host/${host}/port/${port}",
|
||||
# }
|
||||
## {
|
||||
## topic = "/c/${clientid}"
|
||||
## qos = 0
|
||||
## rh = 0
|
||||
## rap = 0
|
||||
## nl = 0
|
||||
## },
|
||||
## {
|
||||
## topic = "/u/${username}"
|
||||
## },
|
||||
## {
|
||||
## topic = "/h/${host}"
|
||||
## qos = 2
|
||||
## },
|
||||
## {
|
||||
## topic = "/p/${port}"
|
||||
## },
|
||||
## {
|
||||
## topic = "/topic/abc"
|
||||
## },
|
||||
## {
|
||||
## topic = "/client/${clientid}/username/${username}/host/${host}/port/${port}"
|
||||
## }
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_auto_subscribe,
|
||||
[{description, "An OTP application"},
|
||||
{vsn, "0.1.0"},
|
||||
|
|
|
@ -38,7 +38,7 @@ max_limit() ->
|
|||
?MAX_AUTO_SUBSCRIBE.
|
||||
|
||||
list() ->
|
||||
emqx:get_config([auto_subscribe, topics], []).
|
||||
format(emqx:get_config([auto_subscribe, topics], [])).
|
||||
|
||||
update(Topics) ->
|
||||
update_(Topics).
|
||||
|
@ -68,6 +68,17 @@ on_client_connected(_, _, _) ->
|
|||
%%--------------------------------------------------------------------
|
||||
%% internal
|
||||
|
||||
format(Rules) when is_list(Rules) ->
|
||||
[format(Rule) || Rule <- Rules];
|
||||
format(Rule = #{topic := Topic}) when is_map(Rule) ->
|
||||
#{
|
||||
topic => Topic,
|
||||
qos => maps:get(qos, Rule, 0),
|
||||
rh => maps:get(rh, Rule, 0),
|
||||
rap => maps:get(rap, Rule, 0),
|
||||
nl => maps:get(nl, Rule, 0)
|
||||
}.
|
||||
|
||||
update_(Topics) when length(Topics) =< ?MAX_AUTO_SUBSCRIBE ->
|
||||
{ok, _} = emqx:update_config([auto_subscribe, topics], Topics),
|
||||
update_hook();
|
||||
|
|
|
@ -25,25 +25,23 @@
|
|||
# certfile = "{{ platform_etc_dir }}/certs/client-cert.pem"
|
||||
# cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
|
||||
# }
|
||||
# ## we will create one MQTT connection for each element of the `message_in`
|
||||
# message_in: [{
|
||||
# ## the `id` will be used as part of the clientid
|
||||
# id = "pull_msgs_from_aws"
|
||||
# ## We will create one MQTT connection for each element of the `ingress_channels`
|
||||
# ## Syntax: ingress_channels.<id>
|
||||
# ingress_channels.pull_msgs_from_aws {
|
||||
# subscribe_remote_topic = "aws/#"
|
||||
# subscribe_qos = 1
|
||||
# local_topic = "from_aws/${topic}"
|
||||
# payload = "${payload}"
|
||||
# qos = "${qos}"
|
||||
# retain = "${retain}"
|
||||
# }]
|
||||
# ## we will create one MQTT connection for each element of the `message_out`
|
||||
# message_out: [{
|
||||
# ## the `id` will be used as part of the clientid
|
||||
# id = "push_msgs_to_aws"
|
||||
# }
|
||||
# ## We will create one MQTT connection for each element of the `egress_channels`
|
||||
# ## Syntax: egress_channels.<id>
|
||||
# egress_channels.push_msgs_to_aws {
|
||||
# subscribe_local_topic = "emqx/#"
|
||||
# remote_topic = "from_emqx/${topic}"
|
||||
# payload = "${payload}"
|
||||
# qos = 1
|
||||
# retain = false
|
||||
# }]
|
||||
# }
|
||||
#}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_bridge,
|
||||
[{description, "An OTP application"},
|
||||
{vsn, "0.1.0"},
|
||||
|
|
|
@ -14,21 +14,34 @@
|
|||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_bridge).
|
||||
-behaviour(emqx_config_handler).
|
||||
|
||||
-export([post_config_update/4]).
|
||||
|
||||
-export([ load_bridges/0
|
||||
, resource_type/1
|
||||
, bridge_type/1
|
||||
, name_to_resource_id/1
|
||||
, resource_id_to_name/1
|
||||
, get_bridge/2
|
||||
, get_bridge/3
|
||||
, list_bridges/0
|
||||
, is_bridge/1
|
||||
, config_key_path/0
|
||||
, update_config/1
|
||||
, create_bridge/3
|
||||
, remove_bridge/3
|
||||
, update_bridge/3
|
||||
, start_bridge/2
|
||||
, stop_bridge/2
|
||||
, restart_bridge/2
|
||||
]).
|
||||
|
||||
load_bridges() ->
|
||||
Bridges = emqx:get_config([bridges], #{}),
|
||||
emqx_bridge_monitor:ensure_all_started(Bridges).
|
||||
-export([ config_key_path/0
|
||||
]).
|
||||
|
||||
-export([ resource_type/1
|
||||
, bridge_type/1
|
||||
, resource_id/1
|
||||
, resource_id/2
|
||||
, parse_bridge_id/1
|
||||
]).
|
||||
|
||||
config_key_path() ->
|
||||
[bridges].
|
||||
|
||||
resource_type(mqtt) -> emqx_connector_mqtt;
|
||||
resource_type(mysql) -> emqx_connector_mysql;
|
||||
|
@ -44,27 +57,140 @@ bridge_type(emqx_connector_mongo) -> mongo;
|
|||
bridge_type(emqx_connector_redis) -> redis;
|
||||
bridge_type(emqx_connector_ldap) -> ldap.
|
||||
|
||||
name_to_resource_id(BridgeName) ->
|
||||
Name = bin(BridgeName),
|
||||
<<"bridge:", Name/binary>>.
|
||||
post_config_update(_Req, NewConf, OldConf, _AppEnv) ->
|
||||
#{added := Added, removed := Removed, changed := Updated}
|
||||
= diff_confs(NewConf, OldConf),
|
||||
perform_bridge_changes([
|
||||
{fun remove_bridge/3, Removed},
|
||||
{fun create_bridge/3, Added},
|
||||
{fun update_bridge/3, Updated}
|
||||
]).
|
||||
|
||||
resource_id_to_name(<<"bridge:", BridgeName/binary>> = _ResourceId) ->
|
||||
BridgeName.
|
||||
perform_bridge_changes(Tasks) ->
|
||||
perform_bridge_changes(Tasks, ok).
|
||||
|
||||
perform_bridge_changes([], Result) ->
|
||||
Result;
|
||||
perform_bridge_changes([{Action, MapConfs} | Tasks], Result0) ->
|
||||
Result = maps:fold(fun
|
||||
({_Type, _Name}, _Conf, {error, Reason}) ->
|
||||
{error, Reason};
|
||||
({Type, Name}, Conf, _) ->
|
||||
case Action(Type, Name, Conf) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
Return -> Return
|
||||
end
|
||||
end, Result0, MapConfs),
|
||||
perform_bridge_changes(Tasks, Result).
|
||||
|
||||
load_bridges() ->
|
||||
Bridges = emqx:get_config([bridges], #{}),
|
||||
emqx_bridge_monitor:ensure_all_started(Bridges).
|
||||
|
||||
resource_id(BridgeId) when is_binary(BridgeId) ->
|
||||
<<"bridge:", BridgeId/binary>>.
|
||||
|
||||
resource_id(BridgeType, BridgeName) ->
|
||||
BridgeId = bridge_id(BridgeType, BridgeName),
|
||||
resource_id(BridgeId).
|
||||
|
||||
bridge_id(BridgeType, BridgeName) ->
|
||||
Name = bin(BridgeName),
|
||||
Type = bin(BridgeType),
|
||||
<<Type/binary, ":", Name/binary>>.
|
||||
|
||||
parse_bridge_id(BridgeId) ->
|
||||
try
|
||||
[Type, Name] = string:split(str(BridgeId), ":", leading),
|
||||
{list_to_existing_atom(Type), list_to_atom(Name)}
|
||||
catch
|
||||
_ : _ -> error({invalid_bridge_id, BridgeId})
|
||||
end.
|
||||
|
||||
list_bridges() ->
|
||||
emqx_resource_api:list_instances(fun emqx_bridge:is_bridge/1).
|
||||
lists:foldl(fun({Type, NameAndConf}, Bridges) ->
|
||||
lists:foldl(fun({Name, RawConf}, Acc) ->
|
||||
case get_bridge(Type, Name, RawConf) of
|
||||
{error, not_found} -> Acc;
|
||||
{ok, Res} -> [Res | Acc]
|
||||
end
|
||||
end, Bridges, maps:to_list(NameAndConf))
|
||||
end, [], maps:to_list(emqx:get_raw_config([bridges]))).
|
||||
|
||||
is_bridge(#{id := <<"bridge:", _/binary>>}) ->
|
||||
true;
|
||||
is_bridge(_Data) ->
|
||||
false.
|
||||
get_bridge(Type, Name) ->
|
||||
RawConf = emqx:get_raw_config([bridges, Type, Name], #{}),
|
||||
get_bridge(Type, Name, RawConf).
|
||||
get_bridge(Type, Name, RawConf) ->
|
||||
case emqx_resource:get_instance(resource_id(Type, Name)) of
|
||||
{error, not_found} -> {error, not_found};
|
||||
{ok, Data} -> {ok, #{id => bridge_id(Type, Name), resource_data => Data,
|
||||
raw_config => RawConf}}
|
||||
end.
|
||||
|
||||
config_key_path() ->
|
||||
[emqx_bridge, bridges].
|
||||
start_bridge(Type, Name) ->
|
||||
restart_bridge(Type, Name).
|
||||
|
||||
update_config(ConfigReq) ->
|
||||
emqx:update_config(config_key_path(), ConfigReq).
|
||||
stop_bridge(Type, Name) ->
|
||||
emqx_resource:stop(resource_id(Type, Name)).
|
||||
|
||||
restart_bridge(Type, Name) ->
|
||||
emqx_resource:restart(resource_id(Type, Name)).
|
||||
|
||||
create_bridge(Type, Name, Conf) ->
|
||||
logger:info("create ~p bridge ~p use config: ~p", [Type, Name, Conf]),
|
||||
ResId = resource_id(Type, Name),
|
||||
case emqx_resource:create(ResId,
|
||||
emqx_bridge:resource_type(Type), Conf) of
|
||||
{ok, already_created} ->
|
||||
emqx_resource:get_instance(ResId);
|
||||
{ok, Data} ->
|
||||
{ok, Data};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
update_bridge(Type, Name, {_OldConf, Conf}) ->
|
||||
%% TODO: sometimes its not necessary to restart the bridge connection.
|
||||
%%
|
||||
%% - if the connection related configs like `username` is updated, we should restart/start
|
||||
%% or stop bridges according to the change.
|
||||
%% - if the connection related configs are not update, but channel configs `ingress_channels` or
|
||||
%% `egress_channels` are changed, then we should not restart the bridge, we only restart/start
|
||||
%% the channels.
|
||||
%%
|
||||
logger:info("update ~p bridge ~p use config: ~p", [Type, Name, Conf]),
|
||||
emqx_resource:recreate(resource_id(Type, Name),
|
||||
emqx_bridge:resource_type(Type), Conf, []).
|
||||
|
||||
remove_bridge(Type, Name, _Conf) ->
|
||||
logger:info("remove ~p bridge ~p", [Type, Name]),
|
||||
case emqx_resource:remove(resource_id(Type, Name)) of
|
||||
ok -> ok;
|
||||
{error, not_found} -> ok;
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
diff_confs(NewConfs, OldConfs) ->
|
||||
emqx_map_lib:diff_maps(flatten_confs(NewConfs),
|
||||
flatten_confs(OldConfs)).
|
||||
|
||||
flatten_confs(Conf0) ->
|
||||
maps:from_list(
|
||||
lists:flatmap(fun({Type, Conf}) ->
|
||||
do_flatten_confs(Type, Conf)
|
||||
end, maps:to_list(Conf0))).
|
||||
|
||||
do_flatten_confs(Type, Conf0) ->
|
||||
[{{Type, Name}, Conf} || {Name, Conf} <- maps:to_list(Conf0)].
|
||||
|
||||
bin(Bin) when is_binary(Bin) -> Bin;
|
||||
bin(Str) when is_list(Str) -> list_to_binary(Str);
|
||||
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
|
||||
|
||||
str(A) when is_atom(A) ->
|
||||
atom_to_list(A);
|
||||
str(B) when is_binary(B) ->
|
||||
binary_to_list(B);
|
||||
str(S) when is_list(S) ->
|
||||
S.
|
||||
|
|
|
@ -15,128 +15,224 @@
|
|||
%%--------------------------------------------------------------------
|
||||
-module(emqx_bridge_api).
|
||||
|
||||
-rest_api(#{ name => list_data_bridges
|
||||
, method => 'GET'
|
||||
, path => "/data_bridges"
|
||||
, func => list_bridges
|
||||
, descr => "List all data bridges"
|
||||
}).
|
||||
-behaviour(minirest_api).
|
||||
|
||||
-rest_api(#{ name => get_data_bridge
|
||||
, method => 'GET'
|
||||
, path => "/data_bridges/:bin:name"
|
||||
, func => get_bridge
|
||||
, descr => "Get a data bridge by name"
|
||||
}).
|
||||
|
||||
-rest_api(#{ name => create_data_bridge
|
||||
, method => 'POST'
|
||||
, path => "/data_bridges/:bin:name"
|
||||
, func => create_bridge
|
||||
, descr => "Create a new data bridge"
|
||||
}).
|
||||
|
||||
-rest_api(#{ name => update_data_bridge
|
||||
, method => 'PUT'
|
||||
, path => "/data_bridges/:bin:name"
|
||||
, func => update_bridge
|
||||
, descr => "Update an existing data bridge"
|
||||
}).
|
||||
|
||||
-rest_api(#{ name => delete_data_bridge
|
||||
, method => 'DELETE'
|
||||
, path => "/data_bridges/:bin:name"
|
||||
, func => delete_bridge
|
||||
, descr => "Delete an existing data bridge"
|
||||
}).
|
||||
-export([api_spec/0]).
|
||||
|
||||
-export([ list_bridges/2
|
||||
, get_bridge/2
|
||||
, create_bridge/2
|
||||
, update_bridge/2
|
||||
, delete_bridge/2
|
||||
, list_local_bridges/1
|
||||
, crud_bridges_cluster/2
|
||||
, crud_bridges/3
|
||||
, manage_bridges/2
|
||||
]).
|
||||
|
||||
-define(BRIDGE(N, T, C), #{<<"name">> => N, <<"type">> => T, <<"config">> => C}).
|
||||
-define(TYPES, [mqtt]).
|
||||
-define(BRIDGE(N, T, C), #{<<"id">> => N, <<"type">> => T, <<"config">> => C}).
|
||||
-define(TRY_PARSE_ID(ID, EXPR),
|
||||
try emqx_bridge:parse_bridge_id(Id) of
|
||||
{BridgeType, BridgeName} -> EXPR
|
||||
catch
|
||||
error:{invalid_bridge_id, Id0} ->
|
||||
{400, #{code => 102, message => <<"invalid_bridge_id: ", Id0/binary>>}}
|
||||
end).
|
||||
|
||||
list_bridges(_Binding, _Params) ->
|
||||
{200, #{code => 0, data => [format_api_reply(Data) ||
|
||||
Data <- emqx_bridge:list_bridges()]}}.
|
||||
req_schema() ->
|
||||
Schema = [
|
||||
case maps:to_list(emqx:get_raw_config([bridges, T], #{})) of
|
||||
%% the bridge is not configured, so we have no method to get the schema
|
||||
[] -> #{};
|
||||
[{_K, Conf} | _] ->
|
||||
emqx_mgmt_api_configs:gen_schema(Conf)
|
||||
end
|
||||
|| T <- ?TYPES],
|
||||
#{oneOf => Schema}.
|
||||
|
||||
get_bridge(#{name := Name}, _Params) ->
|
||||
case emqx_resource:get_instance(emqx_bridge:name_to_resource_id(Name)) of
|
||||
{ok, Data} ->
|
||||
{200, #{code => 0, data => format_api_reply(emqx_resource_api:format_data(Data))}};
|
||||
resp_schema() ->
|
||||
#{oneOf := Schema} = req_schema(),
|
||||
AddMetadata = fun(Prop) ->
|
||||
Prop#{is_connected => #{type => boolean},
|
||||
id => #{type => string},
|
||||
bridge_type => #{type => string, enum => ?TYPES},
|
||||
node => #{type => string}}
|
||||
end,
|
||||
Schema1 = [S#{properties => AddMetadata(Prop)}
|
||||
|| S = #{properties := Prop} <- Schema],
|
||||
#{oneOf => Schema1}.
|
||||
|
||||
api_spec() ->
|
||||
{bridge_apis(), []}.
|
||||
|
||||
bridge_apis() ->
|
||||
[list_all_bridges_api(), crud_bridges_apis(), operation_apis()].
|
||||
|
||||
list_all_bridges_api() ->
|
||||
Metadata = #{
|
||||
get => #{
|
||||
description => <<"List all created bridges">>,
|
||||
responses => #{
|
||||
<<"200">> => emqx_mgmt_util:array_schema(resp_schema(),
|
||||
<<"A list of the bridges">>)
|
||||
}
|
||||
}
|
||||
},
|
||||
{"/bridges/", Metadata, list_bridges}.
|
||||
|
||||
crud_bridges_apis() ->
|
||||
ReqSchema = req_schema(),
|
||||
RespSchema = resp_schema(),
|
||||
Metadata = #{
|
||||
get => #{
|
||||
description => <<"Get a bridge by Id">>,
|
||||
parameters => [param_path_id()],
|
||||
responses => #{
|
||||
<<"200">> => emqx_mgmt_util:array_schema(RespSchema,
|
||||
<<"The details of the bridge">>),
|
||||
<<"404">> => emqx_mgmt_util:error_schema(<<"Bridge not found">>, ['NOT_FOUND'])
|
||||
}
|
||||
},
|
||||
put => #{
|
||||
description => <<"Create or update a bridge">>,
|
||||
parameters => [param_path_id()],
|
||||
'requestBody' => emqx_mgmt_util:schema(ReqSchema),
|
||||
responses => #{
|
||||
<<"200">> => emqx_mgmt_util:array_schema(RespSchema, <<"Bridge updated">>),
|
||||
<<"400">> => emqx_mgmt_util:error_schema(<<"Update bridge failed">>,
|
||||
['UPDATE_FAILED'])
|
||||
}
|
||||
},
|
||||
delete => #{
|
||||
description => <<"Delete a bridge">>,
|
||||
parameters => [param_path_id()],
|
||||
responses => #{
|
||||
<<"200">> => emqx_mgmt_util:schema(<<"Bridge deleted">>),
|
||||
<<"404">> => emqx_mgmt_util:error_schema(<<"Bridge not found">>, ['NOT_FOUND'])
|
||||
}
|
||||
}
|
||||
},
|
||||
{"/bridges/:id", Metadata, crud_bridges_cluster}.
|
||||
|
||||
operation_apis() ->
|
||||
Metadata = #{
|
||||
post => #{
|
||||
description => <<"Start/Stop/Restart bridges on a specific node">>,
|
||||
parameters => [
|
||||
param_path_node(),
|
||||
param_path_id(),
|
||||
param_path_operation()],
|
||||
responses => #{
|
||||
<<"500">> => emqx_mgmt_util:error_schema(<<"Operation Failed">>, ['INTERNAL_ERROR']),
|
||||
<<"200">> => emqx_mgmt_util:schema(<<"Operation success">>)}}},
|
||||
{"/nodes/:node/bridges/:id/operation/:operation", Metadata, manage_bridges}.
|
||||
|
||||
param_path_node() ->
|
||||
#{
|
||||
name => node,
|
||||
in => path,
|
||||
schema => #{type => string},
|
||||
required => true,
|
||||
example => node()
|
||||
}.
|
||||
|
||||
param_path_id() ->
|
||||
#{
|
||||
name => id,
|
||||
in => path,
|
||||
schema => #{type => string},
|
||||
required => true
|
||||
}.
|
||||
|
||||
param_path_operation()->
|
||||
#{
|
||||
name => operation,
|
||||
in => path,
|
||||
required => true,
|
||||
schema => #{
|
||||
type => string,
|
||||
enum => [start, stop, restart]},
|
||||
example => restart
|
||||
}.
|
||||
|
||||
list_bridges(get, _Params) ->
|
||||
{200, lists:append([list_local_bridges(Node) || Node <- ekka_mnesia:running_nodes()])}.
|
||||
|
||||
list_local_bridges(Node) when Node =:= node() ->
|
||||
[format_resp(Data) || Data <- emqx_bridge:list_bridges()];
|
||||
list_local_bridges(Node) ->
|
||||
rpc_call(Node, list_local_bridges, [Node]).
|
||||
|
||||
crud_bridges_cluster(Method, Params) ->
|
||||
Results = [crud_bridges(Node, Method, Params) || Node <- ekka_mnesia:running_nodes()],
|
||||
case lists:filter(fun({200}) -> false; ({200, _}) -> false; (_) -> true end, Results) of
|
||||
[] ->
|
||||
case Results of
|
||||
[{200} | _] -> {200};
|
||||
_ -> {200, [Res || {200, Res} <- Results]}
|
||||
end;
|
||||
Errors ->
|
||||
hd(Errors)
|
||||
end.
|
||||
|
||||
crud_bridges(Node, Method, Params) when Node =/= node() ->
|
||||
rpc_call(Node, crud_bridges, [Node, Method, Params]);
|
||||
|
||||
crud_bridges(_, get, #{bindings := #{id := Id}}) ->
|
||||
?TRY_PARSE_ID(Id, case emqx_bridge:get_bridge(BridgeType, BridgeName) of
|
||||
{ok, Data} -> {200, format_resp(Data)};
|
||||
{error, not_found} ->
|
||||
{404, #{code => 102, message => <<"not_found: ", Name/binary>>}}
|
||||
{404, #{code => 102, message => <<"not_found: ", Id/binary>>}}
|
||||
end);
|
||||
|
||||
crud_bridges(_, put, #{bindings := #{id := Id}, body := Conf}) ->
|
||||
?TRY_PARSE_ID(Id,
|
||||
case emqx:update_config(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], Conf,
|
||||
#{rawconf_with_defaults => true}) of
|
||||
{ok, #{raw_config := RawConf, post_config_update := #{emqx_bridge := Data}}} ->
|
||||
{200, format_resp(#{id => Id, raw_config => RawConf, resource_data => Data})};
|
||||
{ok, _} -> %% the bridge already exits
|
||||
{ok, Data} = emqx_bridge:get_bridge(BridgeType, BridgeName),
|
||||
{200, format_resp(Data)};
|
||||
{error, Reason} ->
|
||||
{500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}}
|
||||
end);
|
||||
|
||||
crud_bridges(_, delete, #{bindings := #{id := Id}}) ->
|
||||
?TRY_PARSE_ID(Id,
|
||||
case emqx:remove_config(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName]) of
|
||||
{ok, _} -> {200};
|
||||
{error, Reason} ->
|
||||
{500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}}
|
||||
end).
|
||||
|
||||
manage_bridges(post, #{bindings := #{node := Node, id := Id, operation := Op}}) ->
|
||||
OperFun =
|
||||
fun (<<"start">>) -> start_bridge;
|
||||
(<<"stop">>) -> stop_bridge;
|
||||
(<<"restart">>) -> restart_bridge
|
||||
end,
|
||||
?TRY_PARSE_ID(Id,
|
||||
case rpc_call(binary_to_atom(Node, latin1), emqx_bridge, OperFun(Op),
|
||||
[BridgeType, BridgeName]) of
|
||||
ok -> {200};
|
||||
{error, Reason} ->
|
||||
{500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}}
|
||||
end).
|
||||
|
||||
format_resp(#{id := Id, raw_config := RawConf, resource_data := #{mod := Mod, status := Status}}) ->
|
||||
IsConnected = fun(started) -> true; (_) -> false end,
|
||||
RawConf#{
|
||||
id => Id,
|
||||
node => node(),
|
||||
bridge_type => emqx_bridge:bridge_type(Mod),
|
||||
is_connected => IsConnected(Status)
|
||||
}.
|
||||
|
||||
rpc_call(Node, Fun, Args) ->
|
||||
rpc_call(Node, ?MODULE, Fun, Args).
|
||||
|
||||
rpc_call(Node, Mod, Fun, Args) when Node =:= node() ->
|
||||
apply(Mod, Fun, Args);
|
||||
rpc_call(Node, Mod, Fun, Args) ->
|
||||
case rpc:call(Node, Mod, Fun, Args) of
|
||||
{badrpc, Reason} -> {error, Reason};
|
||||
Res -> Res
|
||||
end.
|
||||
|
||||
create_bridge(#{name := Name}, Params) ->
|
||||
Config = proplists:get_value(<<"config">>, Params),
|
||||
BridgeType = proplists:get_value(<<"type">>, Params),
|
||||
case emqx_resource:check_and_create(
|
||||
emqx_bridge:name_to_resource_id(Name),
|
||||
emqx_bridge:resource_type(atom(BridgeType)), maps:from_list(Config)) of
|
||||
{ok, already_created} ->
|
||||
{400, #{code => 102, message => <<"bridge already created: ", Name/binary>>}};
|
||||
{ok, Data} ->
|
||||
update_config_and_reply(Name, BridgeType, Config, Data);
|
||||
{error, Reason0} ->
|
||||
Reason = emqx_resource_api:stringnify(Reason0),
|
||||
{500, #{code => 102, message => <<"create bridge ", Name/binary,
|
||||
" failed:", Reason/binary>>}}
|
||||
end.
|
||||
|
||||
update_bridge(#{name := Name}, Params) ->
|
||||
Config = proplists:get_value(<<"config">>, Params),
|
||||
BridgeType = proplists:get_value(<<"type">>, Params),
|
||||
case emqx_resource:check_and_update(
|
||||
emqx_bridge:name_to_resource_id(Name),
|
||||
emqx_bridge:resource_type(atom(BridgeType)), maps:from_list(Config), []) of
|
||||
{ok, Data} ->
|
||||
update_config_and_reply(Name, BridgeType, Config, Data);
|
||||
{error, not_found} ->
|
||||
{400, #{code => 102, message => <<"bridge not_found: ", Name/binary>>}};
|
||||
{error, Reason0} ->
|
||||
Reason = emqx_resource_api:stringnify(Reason0),
|
||||
{500, #{code => 102, message => <<"update bridge ", Name/binary,
|
||||
" failed:", Reason/binary>>}}
|
||||
end.
|
||||
|
||||
delete_bridge(#{name := Name}, _Params) ->
|
||||
case emqx_resource:remove(emqx_bridge:name_to_resource_id(Name)) of
|
||||
ok -> delete_config_and_reply(Name);
|
||||
{error, Reason} ->
|
||||
{500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}}
|
||||
end.
|
||||
|
||||
format_api_reply(#{resource_type := Type, id := Id, config := Conf, status := Status}) ->
|
||||
#{type => emqx_bridge:bridge_type(Type),
|
||||
name => emqx_bridge:resource_id_to_name(Id),
|
||||
config => Conf, status => Status}.
|
||||
|
||||
% format_conf(#{resource_type := Type, id := Id, config := Conf}) ->
|
||||
% #{type => Type, name => emqx_bridge:resource_id_to_name(Id),
|
||||
% config => Conf}.
|
||||
|
||||
% get_all_configs() ->
|
||||
% [format_conf(Data) || Data <- emqx_bridge:list_bridges()].
|
||||
|
||||
update_config_and_reply(Name, BridgeType, Config, Data) ->
|
||||
case emqx_bridge:update_config({update, ?BRIDGE(Name, BridgeType, Config)}) of
|
||||
{ok, _} ->
|
||||
{200, #{code => 0, data => format_api_reply(
|
||||
emqx_resource_api:format_data(Data))}};
|
||||
{error, Reason} ->
|
||||
{500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}}
|
||||
end.
|
||||
|
||||
delete_config_and_reply(Name) ->
|
||||
case emqx_bridge:update_config({delete, Name}) of
|
||||
{ok, _} -> {200, #{code => 0, data => #{}}};
|
||||
{error, Reason} ->
|
||||
{500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}}
|
||||
end.
|
||||
|
||||
atom(B) when is_binary(B) ->
|
||||
list_to_existing_atom(binary_to_list(B)).
|
||||
|
|
|
@ -17,29 +17,15 @@
|
|||
|
||||
-behaviour(application).
|
||||
|
||||
-behaviour(emqx_config_handler).
|
||||
|
||||
-export([start/2, stop/1, pre_config_update/2]).
|
||||
-export([start/2, stop/1]).
|
||||
|
||||
start(_StartType, _StartArgs) ->
|
||||
{ok, Sup} = emqx_bridge_sup:start_link(),
|
||||
ok = emqx_bridge:load_bridges(),
|
||||
emqx_config_handler:add_handler(emqx_bridge:config_key_path(), ?MODULE),
|
||||
emqx_config_handler:add_handler(emqx_bridge:config_key_path(), emqx_bridge),
|
||||
{ok, Sup}.
|
||||
|
||||
stop(_State) ->
|
||||
ok.
|
||||
|
||||
%% internal functions
|
||||
pre_config_update({update, Bridge = #{<<"name">> := Name}}, OldConf) ->
|
||||
{ok, [Bridge | remove_bridge(Name, OldConf)]};
|
||||
pre_config_update({delete, Name}, OldConf) ->
|
||||
{ok, remove_bridge(Name, OldConf)};
|
||||
pre_config_update(NewConf, _OldConf) when is_list(NewConf) ->
|
||||
%% overwrite the entire config!
|
||||
{ok, NewConf}.
|
||||
|
||||
remove_bridge(_Name, undefined) ->
|
||||
[];
|
||||
remove_bridge(Name, OldConf) ->
|
||||
[B || B = #{<<"name">> := Name0} <- OldConf, Name0 =/= Name].
|
||||
%% internal functions
|
|
@ -75,7 +75,7 @@ load_bridges(Configs) ->
|
|||
%% emqx_resource:check_and_create_local(ResourceId, ResourceType, Config, #{keep_retry => true}).
|
||||
load_bridge(Name, Type, Config) ->
|
||||
case emqx_resource:create_local(
|
||||
emqx_bridge:name_to_resource_id(Name),
|
||||
emqx_bridge:resource_id(Type, Name),
|
||||
emqx_bridge:resource_type(Type), Config) of
|
||||
{ok, already_created} -> ok;
|
||||
{ok, _} -> ok;
|
||||
|
|
|
@ -5,13 +5,10 @@
|
|||
%%======================================================================================
|
||||
%% Hocon Schema Definitions
|
||||
|
||||
roots() -> ["bridges"].
|
||||
roots() -> [bridges].
|
||||
|
||||
fields("bridges") ->
|
||||
[{mqtt, hoconsc:ref(?MODULE, "mqtt")}];
|
||||
|
||||
fields("mqtt") ->
|
||||
[{"$name", hoconsc:ref(?MODULE, "mqtt_bridge")}];
|
||||
fields(bridges) ->
|
||||
[{mqtt, hoconsc:mk(hoconsc:map(name, hoconsc:ref(?MODULE, "mqtt_bridge")))}];
|
||||
|
||||
fields("mqtt_bridge") ->
|
||||
emqx_connector_mqtt:fields("config").
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_connector,
|
||||
[{description, "An OTP application"},
|
||||
{vsn, "0.1.1"},
|
||||
|
|
|
@ -65,10 +65,10 @@ validations() ->
|
|||
|
||||
base_url(type) -> url();
|
||||
base_url(nullable) -> false;
|
||||
base_url(validate) -> fun (#{query := _Query}) ->
|
||||
{error, "There must be no query in the base_url"};
|
||||
base_url(validator) -> fun(#{query := _Query}) ->
|
||||
{error, "There must be no query in the base_url"};
|
||||
(_) -> ok
|
||||
end;
|
||||
end;
|
||||
base_url(_) -> undefined.
|
||||
|
||||
connect_timeout(type) -> connect_timeout();
|
||||
|
|
|
@ -28,6 +28,8 @@
|
|||
, bridges/0
|
||||
]).
|
||||
|
||||
-export([on_message_received/2]).
|
||||
|
||||
%% callbacks of behaviour emqx_resource
|
||||
-export([ on_start/2
|
||||
, on_stop/2
|
||||
|
@ -83,86 +85,97 @@ drop_bridge(Name) ->
|
|||
{error, Error}
|
||||
end.
|
||||
|
||||
%% ===================================================================
|
||||
%% When use this bridge as a data source, ?MODULE:on_message_received/2 will be called
|
||||
%% if the bridge received msgs from the remote broker.
|
||||
on_message_received(Msg, ChannelName) ->
|
||||
emqx:run_hook(ChannelName, [Msg]).
|
||||
|
||||
%% ===================================================================
|
||||
on_start(InstId, Conf) ->
|
||||
logger:info("starting mqtt connector: ~p, ~p", [InstId, Conf]),
|
||||
NamePrefix = binary_to_list(InstId),
|
||||
BasicConf = basic_config(Conf),
|
||||
InitRes = {ok, #{name_prefix => NamePrefix, baisc_conf => BasicConf, sub_bridges => []}},
|
||||
InOutConfigs = check_channel_id_dup(maps:get(message_in, Conf, [])
|
||||
++ maps:get(message_out, Conf, [])),
|
||||
InitRes = {ok, #{name_prefix => NamePrefix, baisc_conf => BasicConf, channels => []}},
|
||||
InOutConfigs = taged_map_list(ingress_channels, maps:get(ingress_channels, Conf, #{}))
|
||||
++ taged_map_list(egress_channels, maps:get(egress_channels, Conf, #{})),
|
||||
lists:foldl(fun
|
||||
(_InOutConf, {error, Reason}) ->
|
||||
{error, Reason};
|
||||
(InOutConf, {ok, #{sub_bridges := SubBridges} = Res}) ->
|
||||
(InOutConf, {ok, #{channels := SubBridges} = Res}) ->
|
||||
case create_channel(InOutConf, NamePrefix, BasicConf) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
{ok, Name} -> {ok, Res#{sub_bridges => [Name | SubBridges]}}
|
||||
{ok, Name} -> {ok, Res#{channels => [Name | SubBridges]}}
|
||||
end
|
||||
end, InitRes, InOutConfigs).
|
||||
|
||||
on_stop(InstId, #{}) ->
|
||||
on_stop(InstId, #{channels := NameList}) ->
|
||||
logger:info("stopping mqtt connector: ~p", [InstId]),
|
||||
case ?MODULE:drop_bridge(InstId) of
|
||||
ok -> ok;
|
||||
{error, not_found} -> ok;
|
||||
{error, Reason} ->
|
||||
logger:error("stop bridge failed, error: ~p", [Reason])
|
||||
end.
|
||||
lists:foreach(fun(Name) ->
|
||||
remove_channel(Name)
|
||||
end, NameList).
|
||||
|
||||
%% TODO: let the emqx_resource trigger on_query/4 automatically according to the
|
||||
%% `message_in` and `message_out` config
|
||||
on_query(InstId, {create_channel, Conf}, _AfterQuery, #{name_prefix := Prefix,
|
||||
%% `ingress_channels` and `egress_channels` config
|
||||
on_query(_InstId, {create_channel, Conf}, _AfterQuery, #{name_prefix := Prefix,
|
||||
baisc_conf := BasicConf}) ->
|
||||
logger:debug("create channel to connector: ~p, conf: ~p", [InstId, Conf]),
|
||||
create_channel(Conf, Prefix, BasicConf);
|
||||
on_query(InstId, {publish_to_local, Msg}, _AfterQuery, _State) ->
|
||||
logger:debug("publish to local node, connector: ~p, msg: ~p", [InstId, Msg]);
|
||||
on_query(InstId, {publish_to_remote, Msg}, _AfterQuery, _State) ->
|
||||
logger:debug("publish to remote node, connector: ~p, msg: ~p", [InstId, Msg]).
|
||||
on_query(_InstId, {send_to_remote, ChannelName, Msg}, _AfterQuery, _State) ->
|
||||
logger:debug("send msg to remote node on channel: ~p, msg: ~p", [ChannelName, Msg]),
|
||||
emqx_connector_mqtt_worker:send_to_remote(ChannelName, Msg).
|
||||
|
||||
on_health_check(_InstId, #{sub_bridges := NameList} = State) ->
|
||||
on_health_check(_InstId, #{channels := NameList} = State) ->
|
||||
Results = [{Name, emqx_connector_mqtt_worker:ping(Name)} || Name <- NameList],
|
||||
case lists:all(fun({_, pong}) -> true; ({_, _}) -> false end, Results) of
|
||||
true -> {ok, State};
|
||||
false -> {error, {some_sub_bridge_down, Results}, State}
|
||||
false -> {error, {some_channel_down, Results}, State}
|
||||
end.
|
||||
|
||||
check_channel_id_dup(Confs) ->
|
||||
lists:foreach(fun(#{id := Id}) ->
|
||||
case length([Id || #{id := Id0} <- Confs, Id0 == Id]) of
|
||||
1 -> ok;
|
||||
L when L > 1 -> error({mqtt_bridge_conf, {duplicate_id_found, Id}})
|
||||
end
|
||||
end, Confs),
|
||||
Confs.
|
||||
create_channel({{ingress_channels, Id}, #{subscribe_remote_topic := RemoteT} = Conf},
|
||||
NamePrefix, BasicConf) ->
|
||||
LocalT = maps:get(local_topic, Conf, undefined),
|
||||
Name = ingress_channel_name(NamePrefix, Id),
|
||||
logger:info("creating ingress channel ~p, remote ~s -> local ~s", [Name, RemoteT, LocalT]),
|
||||
do_create_channel(BasicConf#{
|
||||
name => Name,
|
||||
clientid => clientid(Name),
|
||||
subscriptions => Conf#{
|
||||
local_topic => LocalT,
|
||||
on_message_received => {fun ?MODULE:on_message_received/2, [Name]}
|
||||
},
|
||||
forwards => undefined});
|
||||
|
||||
%% this is an `message_in` bridge
|
||||
create_channel(#{subscribe_remote_topic := _, id := Id} = InConf, NamePrefix, BasicConf) ->
|
||||
logger:info("creating 'message_in' channel for: ~p", [Id]),
|
||||
create_sub_bridge(BasicConf#{
|
||||
name => bridge_name(NamePrefix, Id),
|
||||
clientid => clientid(Id),
|
||||
subscriptions => InConf, forwards => undefined});
|
||||
%% this is an `message_out` bridge
|
||||
create_channel(#{subscribe_local_topic := _, id := Id} = OutConf, NamePrefix, BasicConf) ->
|
||||
logger:info("creating 'message_out' channel for: ~p", [Id]),
|
||||
create_sub_bridge(BasicConf#{
|
||||
name => bridge_name(NamePrefix, Id),
|
||||
clientid => clientid(Id),
|
||||
subscriptions => undefined, forwards => OutConf}).
|
||||
create_channel({{egress_channels, Id}, #{remote_topic := RemoteT} = Conf},
|
||||
NamePrefix, BasicConf) ->
|
||||
LocalT = maps:get(subscribe_local_topic, Conf, undefined),
|
||||
Name = egress_channel_name(NamePrefix, Id),
|
||||
logger:info("creating egress channel ~p, local ~s -> remote ~s", [Name, LocalT, RemoteT]),
|
||||
do_create_channel(BasicConf#{
|
||||
name => Name,
|
||||
clientid => clientid(Name),
|
||||
subscriptions => undefined,
|
||||
forwards => Conf#{subscribe_local_topic => LocalT}}).
|
||||
|
||||
create_sub_bridge(#{name := Name} = Conf) ->
|
||||
remove_channel(ChannelName) ->
|
||||
logger:info("removing channel ~p", [ChannelName]),
|
||||
case ?MODULE:drop_bridge(ChannelName) of
|
||||
ok -> ok;
|
||||
{error, not_found} -> ok;
|
||||
{error, Reason} ->
|
||||
logger:error("stop channel ~p failed, error: ~p", [ChannelName, Reason])
|
||||
end.
|
||||
|
||||
do_create_channel(#{name := Name} = Conf) ->
|
||||
case ?MODULE:create_bridge(Conf) of
|
||||
{ok, _Pid} ->
|
||||
start_sub_bridge(Name);
|
||||
start_channel(Name);
|
||||
{error, {already_started, _Pid}} ->
|
||||
ok;
|
||||
{ok, Name};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
start_sub_bridge(Name) ->
|
||||
start_channel(Name) ->
|
||||
case emqx_connector_mqtt_worker:ensure_started(Name) of
|
||||
ok -> {ok, Name};
|
||||
{error, Reason} -> {error, Reason}
|
||||
|
@ -199,11 +212,19 @@ basic_config(#{
|
|||
if_record_metrics => true
|
||||
}.
|
||||
|
||||
bridge_name(Prefix, Id) ->
|
||||
list_to_atom(str(Prefix) ++ ":" ++ str(Id)).
|
||||
taged_map_list(Tag, Map) ->
|
||||
[{{Tag, K}, V} || {K, V} <- maps:to_list(Map)].
|
||||
|
||||
ingress_channel_name(Prefix, Id) ->
|
||||
channel_name("ingress_channels", Prefix, Id).
|
||||
egress_channel_name(Prefix, Id) ->
|
||||
channel_name("egress_channels", Prefix, Id).
|
||||
|
||||
channel_name(Type, Prefix, Id) ->
|
||||
list_to_atom(str(Prefix) ++ ":" ++ Type ++ ":" ++ str(Id)).
|
||||
|
||||
clientid(Id) ->
|
||||
list_to_binary(str(Id) ++ ":" ++ emqx_plugin_libs_id:gen(4)).
|
||||
list_to_binary(str(Id) ++ ":" ++ emqx_misc:gen_id(8)).
|
||||
|
||||
str(A) when is_atom(A) ->
|
||||
atom_to_list(A);
|
||||
|
|
|
@ -107,15 +107,15 @@ auto_reconnect(default) -> true;
|
|||
auto_reconnect(_) -> undefined.
|
||||
|
||||
cacertfile(type) -> string();
|
||||
cacertfile(default) -> "";
|
||||
cacertfile(nullable) -> true;
|
||||
cacertfile(_) -> undefined.
|
||||
|
||||
keyfile(type) -> string();
|
||||
keyfile(default) -> "";
|
||||
keyfile(nullable) -> true;
|
||||
keyfile(_) -> undefined.
|
||||
|
||||
certfile(type) -> string();
|
||||
certfile(default) -> "";
|
||||
certfile(nullable) -> true;
|
||||
certfile(_) -> undefined.
|
||||
|
||||
verify(type) -> boolean();
|
||||
|
|
|
@ -159,9 +159,15 @@ handle_puback(#{packet_id := PktId, reason_code := RC}, _Parent) ->
|
|||
|
||||
handle_publish(Msg, undefined) ->
|
||||
?LOG(error, "cannot publish to local broker as 'bridge.mqtt.<name>.in' not configured, msg: ~p", [Msg]);
|
||||
handle_publish(Msg, Vars) ->
|
||||
handle_publish(Msg, #{on_message_received := {OnMsgRcvdFunc, Args}} = Vars) ->
|
||||
?LOG(debug, "publish to local broker, msg: ~p, vars: ~p", [Msg, Vars]),
|
||||
emqx_broker:publish(emqx_connector_mqtt_msg:to_broker_msg(Msg, Vars)).
|
||||
emqx_metrics:inc('bridge.mqtt.message_received_from_remote', 1),
|
||||
_ = erlang:apply(OnMsgRcvdFunc, [Msg, Args]),
|
||||
case maps:get(local_topic, Vars, undefined) of
|
||||
undefined -> ok;
|
||||
_Topic ->
|
||||
emqx_broker:publish(emqx_connector_mqtt_msg:to_broker_msg(Msg, Vars))
|
||||
end.
|
||||
|
||||
handle_disconnected(Reason, Parent) ->
|
||||
Parent ! {disconnected, self(), Reason}.
|
||||
|
|
|
@ -36,17 +36,15 @@
|
|||
|
||||
-type variables() :: #{
|
||||
mountpoint := undefined | binary(),
|
||||
topic := binary(),
|
||||
remote_topic := binary(),
|
||||
qos := original | integer(),
|
||||
retain := original | boolean(),
|
||||
payload := binary()
|
||||
}.
|
||||
|
||||
make_pub_vars(_, undefined) -> undefined;
|
||||
make_pub_vars(Mountpoint, #{payload := _, qos := _, retain := _, remote_topic := Topic} = Conf) ->
|
||||
Conf#{topic => Topic, mountpoint => Mountpoint};
|
||||
make_pub_vars(Mountpoint, #{payload := _, qos := _, retain := _, local_topic := Topic} = Conf) ->
|
||||
Conf#{topic => Topic, mountpoint => Mountpoint}.
|
||||
make_pub_vars(Mountpoint, Conf) when is_map(Conf) ->
|
||||
Conf#{mountpoint => Mountpoint}.
|
||||
|
||||
%% @doc Make export format:
|
||||
%% 1. Mount topic to a prefix
|
||||
|
@ -61,7 +59,7 @@ to_remote_msg(#message{flags = Flags0} = Msg, Vars) ->
|
|||
Retain0 = maps:get(retain, Flags0, false),
|
||||
MapMsg = maps:put(retain, Retain0, emqx_message:to_map(Msg)),
|
||||
to_remote_msg(MapMsg, Vars);
|
||||
to_remote_msg(MapMsg, #{topic := TopicToken, payload := PayloadToken,
|
||||
to_remote_msg(MapMsg, #{remote_topic := TopicToken, payload := PayloadToken,
|
||||
qos := QoSToken, retain := RetainToken, mountpoint := Mountpoint}) when is_map(MapMsg) ->
|
||||
Topic = replace_vars_in_str(TopicToken, MapMsg),
|
||||
Payload = replace_vars_in_str(PayloadToken, MapMsg),
|
||||
|
@ -77,7 +75,7 @@ to_remote_msg(#message{topic = Topic} = Msg, #{mountpoint := Mountpoint}) ->
|
|||
|
||||
%% published from remote node over a MQTT connection
|
||||
to_broker_msg(#{dup := Dup, properties := Props} = MapMsg,
|
||||
#{topic := TopicToken, payload := PayloadToken,
|
||||
#{local_topic := TopicToken, payload := PayloadToken,
|
||||
qos := QoSToken, retain := RetainToken, mountpoint := Mountpoint}) ->
|
||||
Topic = replace_vars_in_str(TopicToken, MapMsg),
|
||||
Payload = replace_vars_in_str(PayloadToken, MapMsg),
|
||||
|
@ -115,6 +113,8 @@ from_binary(Bin) -> binary_to_term(Bin).
|
|||
%% Count only the topic length + payload size
|
||||
-spec estimate_size(msg()) -> integer().
|
||||
estimate_size(#message{topic = Topic, payload = Payload}) ->
|
||||
size(Topic) + size(Payload);
|
||||
estimate_size(#{topic := Topic, payload := Payload}) ->
|
||||
size(Topic) + size(Payload).
|
||||
|
||||
set_headers(undefined, Msg) ->
|
||||
|
|
|
@ -38,18 +38,20 @@ fields("config") ->
|
|||
, {retry_interval, hoconsc:mk(emqx_schema:duration_ms(), #{default => "30s"})}
|
||||
, {max_inflight, hoconsc:mk(integer(), #{default => 32})}
|
||||
, {replayq, hoconsc:mk(hoconsc:ref(?MODULE, "replayq"))}
|
||||
, {message_in, hoconsc:mk(hoconsc:array(hoconsc:ref(?MODULE, "message_in")), #{default => []})}
|
||||
, {message_out, hoconsc:mk(hoconsc:array(hoconsc:ref(?MODULE, "message_out")), #{default => []})}
|
||||
, {ingress_channels, hoconsc:mk(hoconsc:map(id, hoconsc:ref(?MODULE, "ingress_channels")), #{default => []})}
|
||||
, {egress_channels, hoconsc:mk(hoconsc:map(id, hoconsc:ref(?MODULE, "egress_channels")), #{default => []})}
|
||||
] ++ emqx_connector_schema_lib:ssl_fields();
|
||||
|
||||
fields("message_in") ->
|
||||
[ {subscribe_remote_topic, #{type => binary(), nullable => false}}
|
||||
, {local_topic, hoconsc:mk(binary(), #{default => <<"${topic}">>})}
|
||||
fields("ingress_channels") ->
|
||||
%% the message maybe subscribed by rules, in this case 'local_topic' is not necessary
|
||||
[ {subscribe_remote_topic, hoconsc:mk(binary(), #{nullable => false})}
|
||||
, {local_topic, hoconsc:mk(binary())}
|
||||
, {subscribe_qos, hoconsc:mk(qos(), #{default => 1})}
|
||||
] ++ common_inout_confs();
|
||||
|
||||
fields("message_out") ->
|
||||
[ {subscribe_local_topic, #{type => binary(), nullable => false}}
|
||||
fields("egress_channels") ->
|
||||
%% the message maybe sent from rules, in this case 'subscribe_local_topic' is not necessary
|
||||
[ {subscribe_local_topic, hoconsc:mk(binary())}
|
||||
, {remote_topic, hoconsc:mk(binary(), #{default => <<"${topic}">>})}
|
||||
] ++ common_inout_confs();
|
||||
|
||||
|
@ -61,9 +63,6 @@ fields("replayq") ->
|
|||
].
|
||||
|
||||
common_inout_confs() ->
|
||||
[{id, #{type => binary(), nullable => false}}] ++ publish_confs().
|
||||
|
||||
publish_confs() ->
|
||||
[ {qos, hoconsc:mk(qos(), #{default => <<"${qos}">>})}
|
||||
, {retain, hoconsc:mk(hoconsc:union([boolean(), binary()]), #{default => <<"${retain}">>})}
|
||||
, {payload, hoconsc:mk(binary(), #{default => <<"${payload}">>})}
|
||||
|
|
|
@ -87,6 +87,7 @@
|
|||
, ensure_stopped/1
|
||||
, status/1
|
||||
, ping/1
|
||||
, send_to_remote/2
|
||||
]).
|
||||
|
||||
-export([ get_forwards/1
|
||||
|
@ -104,11 +105,11 @@
|
|||
]).
|
||||
|
||||
-type id() :: atom() | string() | pid().
|
||||
-type qos() :: emqx_mqtt_types:qos().
|
||||
-type qos() :: emqx_types:qos().
|
||||
-type config() :: map().
|
||||
-type batch() :: [emqx_connector_mqtt_msg:exp_msg()].
|
||||
-type ack_ref() :: term().
|
||||
-type topic() :: emqx_topic:topic().
|
||||
-type topic() :: emqx_types:topic().
|
||||
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||
|
@ -171,19 +172,24 @@ ping(Pid) when is_pid(Pid) ->
|
|||
ping(Name) ->
|
||||
gen_statem:call(name(Name), ping).
|
||||
|
||||
send_to_remote(Pid, Msg) when is_pid(Pid) ->
|
||||
gen_statem:cast(Pid, {send_to_remote, Msg});
|
||||
send_to_remote(Name, Msg) ->
|
||||
gen_statem:cast(name(Name), {send_to_remote, Msg}).
|
||||
|
||||
%% @doc Return all forwards (local subscriptions).
|
||||
-spec get_forwards(id()) -> [topic()].
|
||||
get_forwards(Name) -> gen_statem:call(name(Name), get_forwards, timer:seconds(1000)).
|
||||
|
||||
%% @doc Return all subscriptions (subscription over mqtt connection to remote broker).
|
||||
-spec get_subscriptions(id()) -> [{emqx_topic:topic(), qos()}].
|
||||
-spec get_subscriptions(id()) -> [{emqx_types:topic(), qos()}].
|
||||
get_subscriptions(Name) -> gen_statem:call(name(Name), get_subscriptions).
|
||||
|
||||
callback_mode() -> [state_functions].
|
||||
|
||||
%% @doc Config should be a map().
|
||||
init(#{name := Name} = ConnectOpts) ->
|
||||
?LOG(info, "starting bridge worker for ~p", [Name]),
|
||||
?LOG(debug, "starting bridge worker for ~p", [Name]),
|
||||
erlang:process_flag(trap_exit, true),
|
||||
Queue = open_replayq(Name, maps:get(replayq, ConnectOpts, #{})),
|
||||
State = init_state(ConnectOpts),
|
||||
|
@ -194,7 +200,6 @@ init(#{name := Name} = ConnectOpts) ->
|
|||
}}.
|
||||
|
||||
init_state(Opts) ->
|
||||
IfRecordMetrics = maps:get(if_record_metrics, Opts, true),
|
||||
ReconnDelayMs = maps:get(reconnect_interval, Opts, ?DEFAULT_RECONNECT_DELAY_MS),
|
||||
StartType = maps:get(start_type, Opts, manual),
|
||||
Mountpoint = maps:get(forward_mountpoint, Opts, undefined),
|
||||
|
@ -208,7 +213,6 @@ init_state(Opts) ->
|
|||
inflight => [],
|
||||
max_inflight => MaxInflightSize,
|
||||
connection => undefined,
|
||||
if_record_metrics => IfRecordMetrics,
|
||||
name => Name}.
|
||||
|
||||
open_replayq(Name, QCfg) ->
|
||||
|
@ -321,17 +325,15 @@ common(_StateName, {call, From}, get_forwards, #{connect_opts := #{forwards := F
|
|||
{keep_state_and_data, [{reply, From, Forwards}]};
|
||||
common(_StateName, {call, From}, get_subscriptions, #{connection := Connection}) ->
|
||||
{keep_state_and_data, [{reply, From, maps:get(subscriptions, Connection, #{})}]};
|
||||
common(_StateName, info, {deliver, _, Msg},
|
||||
State = #{replayq := Q, if_record_metrics := IfRecordMetric}) ->
|
||||
common(_StateName, info, {deliver, _, Msg}, State = #{replayq := Q}) ->
|
||||
Msgs = collect([Msg]),
|
||||
bridges_metrics_inc(IfRecordMetric,
|
||||
'bridge.mqtt.message_received',
|
||||
length(Msgs)
|
||||
),
|
||||
NewQ = replayq:append(Q, Msgs),
|
||||
{keep_state, State#{replayq => NewQ}, {next_event, internal, maybe_send}};
|
||||
common(_StateName, info, {'EXIT', _, _}, State) ->
|
||||
{keep_state, State};
|
||||
common(_StateName, cast, {send_to_remote, Msg}, #{replayq := Q} = State) ->
|
||||
NewQ = replayq:append(Q, [Msg]),
|
||||
{keep_state, State#{replayq => NewQ}, {next_event, internal, maybe_send}};
|
||||
common(StateName, Type, Content, #{name := Name} = State) ->
|
||||
?LOG(notice, "Bridge ~p discarded ~p type event at state ~p:~p",
|
||||
[Name, Type, StateName, Content]),
|
||||
|
@ -401,11 +403,10 @@ do_send(#{connect_opts := #{forwards := undefined}}, _QAckRef, Batch) ->
|
|||
do_send(#{inflight := Inflight,
|
||||
connection := Connection,
|
||||
mountpoint := Mountpoint,
|
||||
connect_opts := #{forwards := Forwards},
|
||||
if_record_metrics := IfRecordMetrics} = State, QAckRef, [_ | _] = Batch) ->
|
||||
connect_opts := #{forwards := Forwards}} = State, QAckRef, [_ | _] = Batch) ->
|
||||
Vars = emqx_connector_mqtt_msg:make_pub_vars(Mountpoint, Forwards),
|
||||
ExportMsg = fun(Message) ->
|
||||
bridges_metrics_inc(IfRecordMetrics, 'bridge.mqtt.message_sent'),
|
||||
emqx_metrics:inc('bridge.mqtt.message_sent_to_remote'),
|
||||
emqx_connector_mqtt_msg:to_remote_msg(Message, Vars)
|
||||
end,
|
||||
?LOG(debug, "publish to remote broker, msg: ~p, vars: ~p", [Batch, Vars]),
|
||||
|
@ -464,6 +465,8 @@ drop_acked_batches(Q, [#{send_ack_ref := Refs,
|
|||
All
|
||||
end.
|
||||
|
||||
subscribe_local_topic(undefined, _Name) ->
|
||||
ok;
|
||||
subscribe_local_topic(Topic, Name) ->
|
||||
do_subscribe(Topic, Name).
|
||||
|
||||
|
@ -487,7 +490,7 @@ disconnect(#{connection := Conn} = State) when Conn =/= undefined ->
|
|||
emqx_connector_mqtt_mod:stop(Conn),
|
||||
State#{connection => undefined};
|
||||
disconnect(State) ->
|
||||
State.
|
||||
State.
|
||||
|
||||
%% Called only when replayq needs to dump it to disk.
|
||||
msg_marshaller(Bin) when is_binary(Bin) -> emqx_connector_mqtt_msg:from_binary(Bin);
|
||||
|
@ -502,20 +505,10 @@ name(Id) -> list_to_atom(str(Id)).
|
|||
|
||||
register_metrics() ->
|
||||
lists:foreach(fun emqx_metrics:ensure/1,
|
||||
['bridge.mqtt.message_sent',
|
||||
'bridge.mqtt.message_received'
|
||||
['bridge.mqtt.message_sent_to_remote',
|
||||
'bridge.mqtt.message_received_from_remote'
|
||||
]).
|
||||
|
||||
bridges_metrics_inc(true, Metric) ->
|
||||
emqx_metrics:inc(Metric);
|
||||
bridges_metrics_inc(_IsRecordMetric, _Metric) ->
|
||||
ok.
|
||||
|
||||
bridges_metrics_inc(true, Metric, Value) ->
|
||||
emqx_metrics:inc(Metric, Value);
|
||||
bridges_metrics_inc(_IsRecordMetric, _Metric, _Value) ->
|
||||
ok.
|
||||
|
||||
obfuscate(Map) ->
|
||||
maps:fold(fun(K, V, Acc) ->
|
||||
case is_sensitive(K) of
|
||||
|
@ -532,4 +525,4 @@ str(A) when is_atom(A) ->
|
|||
str(B) when is_binary(B) ->
|
||||
binary_to_list(B);
|
||||
str(S) when is_list(S) ->
|
||||
S.
|
||||
S.
|
||||
|
|
|
@ -27,7 +27,7 @@ defmodule EMQXDashboard.MixProject do
|
|||
defp deps do
|
||||
[
|
||||
{:emqx, in_umbrella: true, runtime: false},
|
||||
{:minirest, github: "emqx/minirest", tag: "1.2.2"}
|
||||
{:minirest, github: "emqx/minirest", tag: "1.2.4"}
|
||||
]
|
||||
end
|
||||
end
|
||||
|
|
|
@ -13,3 +13,4 @@
|
|||
{cover_enabled, true}.
|
||||
{cover_opts, [verbose]}.
|
||||
{cover_export_enabled, true}.
|
||||
{eunit_first_files, ["test/emqx_swagger_remote_schema.erl"]}.
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_dashboard,
|
||||
[{description, "EMQ X Web Dashboard"},
|
||||
{vsn, "5.0.0"}, % strict semver, bump manually!
|
||||
|
|
|
@ -29,135 +29,143 @@
|
|||
-behaviour(minirest_api).
|
||||
|
||||
-include("emqx_dashboard.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
-import(hoconsc, [mk/2, ref/2, array/1, enum/1]).
|
||||
|
||||
-import(emqx_mgmt_util, [ schema/1
|
||||
, object_schema/1
|
||||
, object_schema/2
|
||||
, object_array_schema/1
|
||||
, bad_request/0
|
||||
, properties/1
|
||||
]).
|
||||
|
||||
-export([api_spec/0]).
|
||||
|
||||
-export([ login/2
|
||||
, logout/2
|
||||
, users/2
|
||||
, user/2
|
||||
, change_pwd/2
|
||||
]).
|
||||
-export([api_spec/0, fields/1, paths/0, schema/1, namespace/0]).
|
||||
-export([login/2, logout/2, users/2, user/2, change_pwd/2]).
|
||||
|
||||
-define(EMPTY(V), (V == undefined orelse V == <<>>)).
|
||||
|
||||
-define(ERROR_USERNAME_OR_PWD, 'ERROR_USERNAME_OR_PWD').
|
||||
|
||||
namespace() -> "dashboard".
|
||||
|
||||
api_spec() ->
|
||||
{[ login_api()
|
||||
, logout_api()
|
||||
, users_api()
|
||||
, user_api()
|
||||
, change_pwd_api()
|
||||
],
|
||||
[]}.
|
||||
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
|
||||
|
||||
login_api() ->
|
||||
AuthProps = properties([{username, string, <<"Username">>},
|
||||
{password, string, <<"Password">>}]),
|
||||
paths() -> ["/login", "/logout", "/users",
|
||||
"/users/:username", "/users/:username/change_pwd"].
|
||||
|
||||
TokenProps = properties([{token, string, <<"JWT Token">>},
|
||||
{license, object, [{edition, string, <<"License">>, [community, enterprise]}]},
|
||||
{version, string}]),
|
||||
Metadata = #{
|
||||
schema("/login") ->
|
||||
#{
|
||||
operationId => login,
|
||||
post => #{
|
||||
tags => [dashboard],
|
||||
tags => [<<"dashboard">>],
|
||||
description => <<"Dashboard Auth">>,
|
||||
'requestBody' => object_schema(AuthProps),
|
||||
summary => <<"Dashboard Auth">>,
|
||||
requestBody =>
|
||||
[
|
||||
{username, mk(binary(),
|
||||
#{desc => <<"The User for which to create the token.">>,
|
||||
maxLength => 100, example => <<"admin">>})},
|
||||
{password, mk(binary(),
|
||||
#{desc => "password", example => "public"})}
|
||||
],
|
||||
responses => #{
|
||||
<<"200">> =>
|
||||
object_schema(TokenProps, <<"Dashboard Auth successfully">>),
|
||||
<<"401">> => unauthorized_request()
|
||||
200 => [
|
||||
{token, mk(string(), #{desc => <<"JWT Token">>})},
|
||||
{license, [{edition,
|
||||
mk(enum([community, enterprise]), #{desc => <<"license">>,
|
||||
example => "community"})}]},
|
||||
{version, mk(string(), #{desc => <<"version">>, example => <<"5.0.0">>})}],
|
||||
401 => [
|
||||
{code, mk(string(), #{example => 'ERROR_USERNAME_OR_PWD'})},
|
||||
{message, mk(string(), #{example => "Unauthorized"})}]
|
||||
},
|
||||
security => []
|
||||
}
|
||||
},
|
||||
{"/login", Metadata, login}.
|
||||
|
||||
logout_api() ->
|
||||
LogoutProps = properties([{username, string, <<"Username">>}]),
|
||||
Metadata = #{
|
||||
}};
|
||||
schema("/logout") ->
|
||||
#{
|
||||
operationId => logout,
|
||||
post => #{
|
||||
tags => [dashboard],
|
||||
description => <<"Dashboard Auth">>,
|
||||
'requestBody' => object_schema(LogoutProps),
|
||||
tags => [<<"dashboard">>],
|
||||
description => <<"Dashboard User logout">>,
|
||||
requestBody => [
|
||||
{username, mk(binary(),
|
||||
#{desc => <<"The User for which to create the token.">>,
|
||||
maxLength => 100, example => <<"admin">>})}
|
||||
],
|
||||
responses => #{
|
||||
<<"200">> => schema(<<"Dashboard Auth successfully">>)
|
||||
200 => <<"Dashboard logout successfully">>
|
||||
}
|
||||
}
|
||||
},
|
||||
{"/logout", Metadata, logout}.
|
||||
|
||||
users_api() ->
|
||||
BaseProps = properties([{username, string, <<"Username">>},
|
||||
{password, string, <<"Password">>},
|
||||
{tag, string, <<"Tag">>}]),
|
||||
Metadata = #{
|
||||
};
|
||||
schema("/users") ->
|
||||
#{
|
||||
operationId => users,
|
||||
get => #{
|
||||
tags => [dashboard],
|
||||
tags => [<<"dashboard">>],
|
||||
description => <<"Get dashboard users">>,
|
||||
responses => #{
|
||||
<<"200">> => object_array_schema(maps:without([password], BaseProps))
|
||||
200 => mk(array(ref(?MODULE, user)),
|
||||
#{desc => "User lists"})
|
||||
}
|
||||
},
|
||||
post => #{
|
||||
tags => [dashboard],
|
||||
tags => [<<"dashboard">>],
|
||||
description => <<"Create dashboard users">>,
|
||||
'requestBody' => object_schema(BaseProps),
|
||||
requestBody => fields(user_password),
|
||||
responses => #{
|
||||
<<"200">> => schema(<<"Create Users successfully">>),
|
||||
<<"400">> => bad_request()
|
||||
}
|
||||
200 => <<"Create user successfully">>,
|
||||
400 => [{code, mk(string(), #{example => 'CREATE_FAIL'})},
|
||||
{message, mk(string(), #{example => "Create user failed"})}]}
|
||||
}
|
||||
},
|
||||
{"/users", Metadata, users}.
|
||||
};
|
||||
|
||||
user_api() ->
|
||||
Metadata = #{
|
||||
delete => #{
|
||||
tags => [dashboard],
|
||||
description => <<"Delete dashboard users">>,
|
||||
parameters => parameters(),
|
||||
responses => #{
|
||||
<<"200">> => schema(<<"Delete User successfully">>),
|
||||
<<"400">> => bad_request()
|
||||
}
|
||||
},
|
||||
schema("/users/:username") ->
|
||||
#{
|
||||
operationId => user,
|
||||
put => #{
|
||||
tags => [dashboard],
|
||||
tags => [<<"dashboard">>],
|
||||
description => <<"Update dashboard users">>,
|
||||
parameters => parameters(),
|
||||
'requestBody' => object_schema(properties([{tag, string, <<"Tag">>}])),
|
||||
parameters => [{username, mk(binary(),
|
||||
#{in => path, example => <<"admin">>})}],
|
||||
requestBody => [{tag, mk(binary(), #{desc => <<"Tag">>})}],
|
||||
responses => #{
|
||||
<<"200">> => schema(<<"Update Users successfully">>),
|
||||
<<"400">> => bad_request()
|
||||
}
|
||||
}
|
||||
},
|
||||
{"/users/:username", Metadata, user}.
|
||||
|
||||
change_pwd_api() ->
|
||||
Metadata = #{
|
||||
200 => <<"Update User successfully">>,
|
||||
400 => [{code, mk(string(), #{example => 'UPDATE_FAIL'})},
|
||||
{message, mk(string(), #{example => "Update Failed unknown"})}]}},
|
||||
delete => #{
|
||||
tags => [<<"dashboard">>],
|
||||
description => <<"Delete dashboard users">>,
|
||||
parameters => [{username, mk(binary(),
|
||||
#{in => path, example => <<"admin">>})}],
|
||||
responses => #{
|
||||
200 => <<"Delete User successfully">>,
|
||||
400 => [
|
||||
{code, mk(string(), #{example => 'CANNOT_DELETE_ADMIN'})},
|
||||
{message, mk(string(), #{example => "CANNOT DELETE ADMIN"})}]}}
|
||||
};
|
||||
schema("/users/:username/change_pwd") ->
|
||||
#{
|
||||
operationId => change_pwd,
|
||||
put => #{
|
||||
tags => [dashboard],
|
||||
tags => [<<"dashboard">>],
|
||||
description => <<"Update dashboard users password">>,
|
||||
parameters => parameters(),
|
||||
'requestBody' => object_schema(properties([old_pwd, new_pwd])),
|
||||
parameters => [{username, mk(binary(),
|
||||
#{in => path, required => true, example => <<"admin">>})}],
|
||||
requestBody => [
|
||||
{old_pwd, mk(binary(), #{required => true})},
|
||||
{new_pwd, mk(binary(), #{required => true})}
|
||||
],
|
||||
responses => #{
|
||||
<<"200">> => schema(<<"Update Users password successfully">>),
|
||||
<<"400">> => bad_request()
|
||||
}
|
||||
}
|
||||
},
|
||||
{"/users/:username/change_pwd", Metadata, change_pwd}.
|
||||
200 => <<"Update user password successfully">>,
|
||||
400 => [
|
||||
{code, mk(string(), #{example => 'UPDATE_FAIL'})},
|
||||
{message, mk(string(), #{example => "Failed Reason"})}]}}
|
||||
}.
|
||||
|
||||
fields(user) ->
|
||||
[
|
||||
{tag,
|
||||
mk(binary(),
|
||||
#{desc => <<"tag">>, example => "administrator"})},
|
||||
{username,
|
||||
mk(binary(),
|
||||
#{desc => <<"username">>, example => "emqx"})}
|
||||
];
|
||||
fields(user_password) ->
|
||||
fields(user) ++ [{password, mk(binary(), #{desc => "Password"})}].
|
||||
|
||||
login(post, #{body := Params}) ->
|
||||
Username = maps:get(<<"username">>, Params),
|
||||
|
@ -171,7 +179,7 @@ login(post, #{body := Params}) ->
|
|||
end.
|
||||
|
||||
logout(_, #{body := #{<<"username">> := Username},
|
||||
headers := #{<<"authorization">> := <<"Bearer ", Token/binary>>}}) ->
|
||||
headers := #{<<"authorization">> := <<"Bearer ", Token/binary>>}}) ->
|
||||
case emqx_dashboard_admin:destroy_token_by_username(Username, Token) of
|
||||
ok ->
|
||||
200;
|
||||
|
@ -187,9 +195,9 @@ users(post, #{body := Params}) ->
|
|||
Username = maps:get(<<"username">>, Params),
|
||||
Password = maps:get(<<"password">>, Params),
|
||||
case ?EMPTY(Username) orelse ?EMPTY(Password) of
|
||||
true ->
|
||||
true ->
|
||||
{400, #{code => <<"CREATE_USER_FAIL">>,
|
||||
message => <<"Username or password undefined">>}};
|
||||
message => <<"Username or password undefined">>}};
|
||||
false ->
|
||||
case emqx_dashboard_admin:add_user(Username, Password, Tag) of
|
||||
ok -> {200};
|
||||
|
@ -208,8 +216,8 @@ user(put, #{bindings := #{username := Username}, body := Params}) ->
|
|||
|
||||
user(delete, #{bindings := #{username := Username}}) ->
|
||||
case Username == <<"admin">> of
|
||||
true -> {400, #{code => <<"CONNOT_DELETE_ADMIN">>,
|
||||
message => <<"Cannot delete admin">>}};
|
||||
true -> {400, #{code => <<"CANNOT_DELETE_ADMIN">>,
|
||||
message => <<"Cannot delete admin">>}};
|
||||
false ->
|
||||
_ = emqx_dashboard_admin:remove_user(Username),
|
||||
{200}
|
||||
|
@ -226,20 +234,3 @@ change_pwd(put, #{bindings := #{username := Username}, body := Params}) ->
|
|||
|
||||
row(#mqtt_admin{username = Username, tags = Tag}) ->
|
||||
#{username => Username, tag => Tag}.
|
||||
|
||||
parameters() ->
|
||||
[#{
|
||||
name => username,
|
||||
in => path,
|
||||
required => true,
|
||||
schema => #{type => string},
|
||||
example => <<"admin">>
|
||||
}].
|
||||
|
||||
unauthorized_request() ->
|
||||
object_schema(
|
||||
properties([{message, string},
|
||||
{code, string, <<"Resp Code">>, [?ERROR_USERNAME_OR_PWD]}
|
||||
]),
|
||||
<<"Unauthorized">>
|
||||
).
|
||||
|
|
|
@ -18,8 +18,10 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-export([ roots/0
|
||||
, fields/1]).
|
||||
, fields/1
|
||||
,namespace/0]).
|
||||
|
||||
namespace() -> <<"dashboard">>.
|
||||
roots() -> ["emqx_dashboard"].
|
||||
|
||||
fields("emqx_dashboard") ->
|
||||
|
|
|
@ -0,0 +1,345 @@
|
|||
-module(emqx_dashboard_swagger).
|
||||
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
|
||||
%% API
|
||||
-export([spec/1, spec/2]).
|
||||
-export([translate_req/2]).
|
||||
|
||||
-ifdef(TEST).
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
-endif.
|
||||
|
||||
-define(METHODS, [get, post, put, head, delete, patch, options, trace]).
|
||||
|
||||
-define(DEFAULT_FIELDS, [example, allowReserved, style,
|
||||
explode, maxLength, allowEmptyValue, deprecated, minimum, maximum]).
|
||||
|
||||
-define(DEFAULT_FILTER, #{filter => fun ?MODULE:translate_req/2}).
|
||||
|
||||
-define(INIT_SCHEMA, #{fields => #{}, translations => #{}, validations => [], namespace => undefined}).
|
||||
|
||||
-define(TO_REF(_N_, _F_), iolist_to_binary([to_bin(_N_), ".", to_bin(_F_)])).
|
||||
-define(TO_COMPONENTS(_M_, _F_), iolist_to_binary([<<"#/components/schemas/">>, ?TO_REF(namespace(_M_), _F_)])).
|
||||
|
||||
%% @equiv spec(Module, #{check_schema => false})
|
||||
-spec(spec(module()) ->
|
||||
{list({Path, Specs, OperationId, Options}), list(Component)} when
|
||||
Path :: string()|binary(),
|
||||
Specs :: map(),
|
||||
OperationId :: atom(),
|
||||
Options :: #{filter => fun((map(),
|
||||
#{module => module(), path => string(), method => atom()}) -> map())},
|
||||
Component :: map()).
|
||||
spec(Module) -> spec(Module, #{check_schema => false}).
|
||||
|
||||
-spec(spec(module(), #{check_schema => boolean()}) ->
|
||||
{list({Path, Specs, OperationId, Options}), list(Component)} when
|
||||
Path :: string()|binary(),
|
||||
Specs :: map(),
|
||||
OperationId :: atom(),
|
||||
Options :: #{filter => fun((map(),
|
||||
#{module => module(), path => string(), method => atom()}) -> map())},
|
||||
Component :: map()).
|
||||
spec(Module, Options) ->
|
||||
Paths = apply(Module, paths, []),
|
||||
{ApiSpec, AllRefs} =
|
||||
lists:foldl(fun(Path, {AllAcc, AllRefsAcc}) ->
|
||||
{OperationId, Specs, Refs} = parse_spec_ref(Module, Path),
|
||||
CheckSchema = support_check_schema(Options),
|
||||
{[{Path, Specs, OperationId, CheckSchema} | AllAcc],
|
||||
Refs ++ AllRefsAcc}
|
||||
end, {[], []}, Paths),
|
||||
{ApiSpec, components(lists:usort(AllRefs))}.
|
||||
|
||||
|
||||
-spec(translate_req(#{binding => list(), query_string => list(), body => map()},
|
||||
#{module => module(), path => string(), method => atom()}) ->
|
||||
{ok, #{binding => list(), query_string => list(), body => map()}}|
|
||||
{400, 'BAD_REQUEST', binary()}).
|
||||
translate_req(Request, #{module := Module, path := Path, method := Method}) ->
|
||||
#{Method := Spec} = apply(Module, schema, [Path]),
|
||||
try
|
||||
Params = maps:get(parameters, Spec, []),
|
||||
Body = maps:get(requestBody, Spec, []),
|
||||
{Bindings, QueryStr} = check_parameters(Request, Params),
|
||||
NewBody = check_requestBody(Request, Body, Module, hoconsc:is_schema(Body)),
|
||||
{ok, Request#{bindings => Bindings, query_string => QueryStr, body => NewBody}}
|
||||
catch throw:Error ->
|
||||
{_, [{validation_error, ValidErr}]} = Error,
|
||||
#{path := Key, reason := Reason} = ValidErr,
|
||||
{400, 'BAD_REQUEST', iolist_to_binary(io_lib:format("~s : ~p", [Key, Reason]))}
|
||||
end.
|
||||
|
||||
support_check_schema(#{check_schema := true}) -> ?DEFAULT_FILTER;
|
||||
support_check_schema(#{check_schema := Func})when is_function(Func, 2) -> #{filter => Func};
|
||||
support_check_schema(_) -> #{filter => undefined}.
|
||||
|
||||
parse_spec_ref(Module, Path) ->
|
||||
Schema =
|
||||
try
|
||||
erlang:apply(Module, schema, [Path])
|
||||
catch error: Reason -> %% better error message
|
||||
throw({error, #{mfa => {Module, schema, [Path]}, reason => Reason}})
|
||||
end,
|
||||
{Specs, Refs} = maps:fold(fun(Method, Meta, {Acc, RefsAcc}) ->
|
||||
(not lists:member(Method, ?METHODS))
|
||||
andalso throw({error, #{module => Module, path => Path, method => Method}}),
|
||||
{Spec, SubRefs} = meta_to_spec(Meta, Module),
|
||||
{Acc#{Method => Spec}, SubRefs ++ RefsAcc}
|
||||
end, {#{}, []},
|
||||
maps:without([operationId], Schema)),
|
||||
{maps:get(operationId, Schema), Specs, Refs}.
|
||||
|
||||
check_parameters(Request, Spec) ->
|
||||
#{bindings := Bindings, query_string := QueryStr} = Request,
|
||||
BindingsBin = maps:fold(fun(Key, Value, Acc) -> Acc#{atom_to_binary(Key) => Value} end, #{}, Bindings),
|
||||
check_parameter(Spec, BindingsBin, QueryStr, #{}, #{}).
|
||||
|
||||
check_parameter([], _Bindings, _QueryStr, NewBindings, NewQueryStr) -> {NewBindings, NewQueryStr};
|
||||
check_parameter([{Name, Type} | Spec], Bindings, QueryStr, BindingsAcc, QueryStrAcc) ->
|
||||
Schema = ?INIT_SCHEMA#{roots => [{Name, Type}]},
|
||||
case hocon_schema:field_schema(Type, in) of
|
||||
path ->
|
||||
NewBindings = hocon_schema:check_plain(Schema, Bindings, #{atom_key => true, override_env => false}),
|
||||
NewBindingsAcc = maps:merge(BindingsAcc, NewBindings),
|
||||
check_parameter(Spec, Bindings, QueryStr, NewBindingsAcc, QueryStrAcc);
|
||||
query ->
|
||||
NewQueryStr = hocon_schema:check_plain(Schema, QueryStr, #{override_env => false}),
|
||||
NewQueryStrAcc = maps:merge(QueryStrAcc, NewQueryStr),
|
||||
check_parameter(Spec, Bindings, QueryStr, BindingsAcc, NewQueryStrAcc)
|
||||
end.
|
||||
|
||||
check_requestBody(#{body := Body}, Schema, Module, true) ->
|
||||
Type0 = hocon_schema:field_schema(Schema, type),
|
||||
Type =
|
||||
case Type0 of
|
||||
?REF(StructName) -> ?R_REF(Module, StructName);
|
||||
_ -> Type0
|
||||
end,
|
||||
NewSchema = ?INIT_SCHEMA#{roots => [{root, Type}]},
|
||||
#{<<"root">> := NewBody} = hocon_schema:check_plain(NewSchema, #{<<"root">> => Body}, #{override_env => false}),
|
||||
NewBody;
|
||||
%% TODO not support nest object check yet, please use ref!
|
||||
%% RequestBody = [ {per_page, mk(integer(), #{}},
|
||||
%% {nest_object, [
|
||||
%% {good_nest_1, mk(integer(), #{})},
|
||||
%% {good_nest_2, mk(ref(?MODULE, good_ref), #{})}
|
||||
%% ]}
|
||||
%% ]
|
||||
check_requestBody(#{body := Body}, Spec, _Module, false) ->
|
||||
lists:foldl(fun({Name, Type}, Acc) ->
|
||||
Schema = ?INIT_SCHEMA#{roots => [{Name, Type}]},
|
||||
maps:merge(Acc, hocon_schema:check_plain(Schema, Body))
|
||||
end, #{}, Spec).
|
||||
|
||||
%% tags, description, summary, security, deprecated
|
||||
meta_to_spec(Meta, Module) ->
|
||||
{Params, Refs1} = parameters(maps:get(parameters, Meta, []), Module),
|
||||
{RequestBody, Refs2} = requestBody(maps:get(requestBody, Meta, []), Module),
|
||||
{Responses, Refs3} = responses(maps:get(responses, Meta, #{}), Module),
|
||||
{
|
||||
to_spec(Meta, Params, RequestBody, Responses),
|
||||
lists:usort(Refs1 ++ Refs2 ++ Refs3)
|
||||
}.
|
||||
|
||||
to_spec(Meta, Params, [], Responses) ->
|
||||
Spec = maps:without([parameters, requestBody, responses], Meta),
|
||||
Spec#{parameters => Params, responses => Responses};
|
||||
to_spec(Meta, Params, RequestBody, Responses) ->
|
||||
Spec = to_spec(Meta, Params, [], Responses),
|
||||
maps:put(requestBody, RequestBody, Spec).
|
||||
|
||||
parameters(Params, Module) ->
|
||||
{SpecList, AllRefs} =
|
||||
lists:foldl(fun({Name, Type}, {Acc, RefsAcc}) ->
|
||||
In = hocon_schema:field_schema(Type, in),
|
||||
In =:= undefined andalso throw({error, <<"missing in:path/query field in parameters">>}),
|
||||
Nullable = hocon_schema:field_schema(Type, nullable),
|
||||
Default = hocon_schema:field_schema(Type, default),
|
||||
HoconType = hocon_schema:field_schema(Type, type),
|
||||
Meta = init_meta(Nullable, Default),
|
||||
{ParamType, Refs} = hocon_schema_to_spec(HoconType, Module),
|
||||
Spec0 = init_prop([required | ?DEFAULT_FIELDS],
|
||||
#{schema => maps:merge(ParamType, Meta), name => Name, in => In}, Type),
|
||||
Spec1 = trans_required(Spec0, Nullable, In),
|
||||
Spec2 = trans_desc(Spec1, Type),
|
||||
{[Spec2 | Acc], Refs ++ RefsAcc}
|
||||
end, {[], []}, Params),
|
||||
{lists:reverse(SpecList), AllRefs}.
|
||||
|
||||
init_meta(Nullable, Default) ->
|
||||
Init =
|
||||
case Nullable of
|
||||
true -> #{nullable => true};
|
||||
_ -> #{}
|
||||
end,
|
||||
case Default =:= undefined of
|
||||
true -> Init;
|
||||
false -> Init#{default => Default}
|
||||
end.
|
||||
|
||||
init_prop(Keys, Init, Type) ->
|
||||
lists:foldl(fun(Key, Acc) ->
|
||||
case hocon_schema:field_schema(Type, Key) of
|
||||
undefined -> Acc;
|
||||
Schema -> Acc#{Key => to_bin(Schema)}
|
||||
end
|
||||
end, Init, Keys).
|
||||
|
||||
trans_required(Spec, false, _) -> Spec#{required => true};
|
||||
trans_required(Spec, _, path) -> Spec#{required => true};
|
||||
trans_required(Spec, _, _) -> Spec.
|
||||
|
||||
trans_desc(Spec, Hocon) ->
|
||||
case hocon_schema:field_schema(Hocon, desc) of
|
||||
undefined -> Spec;
|
||||
Desc -> Spec#{description => Desc}
|
||||
end.
|
||||
|
||||
requestBody([], _Module) -> {[], []};
|
||||
requestBody(Schema, Module) ->
|
||||
{Props, Refs} =
|
||||
case hoconsc:is_schema(Schema) of
|
||||
true ->
|
||||
HoconSchema = hocon_schema:field_schema(Schema, type),
|
||||
hocon_schema_to_spec(HoconSchema, Module);
|
||||
false -> parse_object(Schema, Module)
|
||||
end,
|
||||
{#{<<"content">> => #{<<"application/json">> => #{<<"schema">> => Props}}},
|
||||
Refs}.
|
||||
|
||||
responses(Responses, Module) ->
|
||||
{Spec, Refs, _} = maps:fold(fun response/3, {#{}, [], Module}, Responses),
|
||||
{Spec, Refs}.
|
||||
|
||||
response(Status, Bin, {Acc, RefsAcc, Module}) when is_binary(Bin) ->
|
||||
{Acc#{integer_to_binary(Status) => #{description => Bin}}, RefsAcc, Module};
|
||||
response(Status, ?REF(StructName), {Acc, RefsAcc, Module}) ->
|
||||
response(Status, ?R_REF(Module, StructName), {Acc, RefsAcc, Module});
|
||||
response(Status, ?R_REF(_Mod, _Name) = RRef, {Acc, RefsAcc, Module}) ->
|
||||
{Spec, Refs} = hocon_schema_to_spec(RRef, Module),
|
||||
Content = #{<<"application/json">> => #{<<"schema">> => Spec}},
|
||||
{Acc#{integer_to_binary(Status) => #{<<"content">> => Content}}, Refs ++ RefsAcc, Module};
|
||||
response(Status, Schema, {Acc, RefsAcc, Module}) ->
|
||||
case hoconsc:is_schema(Schema) of
|
||||
true ->
|
||||
Hocon = hocon_schema:field_schema(Schema, type),
|
||||
{Spec, Refs} = hocon_schema_to_spec(Hocon, Module),
|
||||
Init = trans_desc(#{}, Schema),
|
||||
Content = #{<<"application/json">> => #{<<"schema">> => Spec}},
|
||||
{Acc#{integer_to_binary(Status) => Init#{<<"content">> => Content}}, Refs ++ RefsAcc, Module};
|
||||
false ->
|
||||
{Props, Refs} = parse_object(Schema, Module),
|
||||
Content = #{<<"content">> => #{<<"application/json">> => #{<<"schema">> => Props}}},
|
||||
{Acc#{integer_to_binary(Status) => Content}, Refs ++ RefsAcc, Module}
|
||||
end.
|
||||
|
||||
components(Refs) ->
|
||||
lists:sort(maps:fold(fun(K, V, Acc) -> [#{K => V} | Acc] end, [],
|
||||
components(Refs, #{}, []))).
|
||||
|
||||
components([], SpecAcc, []) -> SpecAcc;
|
||||
components([], SpecAcc, SubRefAcc) -> components(SubRefAcc, SpecAcc, []);
|
||||
components([{Module, Field} | Refs], SpecAcc, SubRefsAcc) ->
|
||||
Props = apply(Module, fields, [Field]),
|
||||
Namespace = namespace(Module),
|
||||
{Object, SubRefs} = parse_object(Props, Module),
|
||||
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Object},
|
||||
components(Refs, NewSpecAcc, SubRefs ++ SubRefsAcc).
|
||||
|
||||
namespace(Module) ->
|
||||
case hocon_schema:namespace(Module) of
|
||||
undefined -> Module;
|
||||
NameSpace -> NameSpace
|
||||
end.
|
||||
|
||||
hocon_schema_to_spec(?R_REF(Module, StructName), _LocalModule) ->
|
||||
{#{<<"$ref">> => ?TO_COMPONENTS(Module, StructName)},
|
||||
[{Module, StructName}]};
|
||||
hocon_schema_to_spec(?REF(StructName), LocalModule) ->
|
||||
{#{<<"$ref">> => ?TO_COMPONENTS(LocalModule, StructName)},
|
||||
[{LocalModule, StructName}]};
|
||||
hocon_schema_to_spec(Type, _LocalModule) when ?IS_TYPEREFL(Type) ->
|
||||
{typename_to_spec(typerefl:name(Type)), []};
|
||||
hocon_schema_to_spec(?ARRAY(Item), LocalModule) ->
|
||||
{Schema, Refs} = hocon_schema_to_spec(Item, LocalModule),
|
||||
{#{type => array, items => Schema}, Refs};
|
||||
hocon_schema_to_spec(?ENUM(Items), _LocalModule) ->
|
||||
{#{type => string, enum => Items}, []};
|
||||
hocon_schema_to_spec(?UNION(Types), LocalModule) ->
|
||||
{OneOf, Refs} = lists:foldl(fun(Type, {Acc, RefsAcc}) ->
|
||||
{Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule),
|
||||
{[Schema | Acc], SubRefs ++ RefsAcc}
|
||||
end, {[], []}, Types),
|
||||
{#{<<"oneOf">> => OneOf}, Refs};
|
||||
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
||||
{#{type => string, enum => [Atom]}, []}.
|
||||
|
||||
typename_to_spec("boolean()") -> #{type => boolean, example => true};
|
||||
typename_to_spec("binary()") -> #{type => string, example =><<"binary example">>};
|
||||
typename_to_spec("float()") -> #{type =>number, example =>3.14159};
|
||||
typename_to_spec("integer()") -> #{type =>integer, example =>100};
|
||||
typename_to_spec("number()") -> #{type =>number, example =>42};
|
||||
typename_to_spec("string()") -> #{type =>string, example =><<"string example">>};
|
||||
typename_to_spec("atom()") -> #{type =>string, example =>atom};
|
||||
typename_to_spec("duration()") -> #{type =>string, example =><<"12m">>};
|
||||
typename_to_spec("duration_s()") -> #{type =>string, example =><<"1h">>};
|
||||
typename_to_spec("duration_ms()") -> #{type =>string, example =><<"32s">>};
|
||||
typename_to_spec("percent()") -> #{type =>number, example =><<"12%">>};
|
||||
typename_to_spec("file()") -> #{type =>string, example =><<"/path/to/file">>};
|
||||
typename_to_spec("ip_port()") -> #{type => string, example =><<"127.0.0.1:80">>};
|
||||
typename_to_spec(Name) ->
|
||||
case string:split(Name, "..") of
|
||||
[MinStr, MaxStr] -> %% 1..10
|
||||
{Min, []} = string:to_integer(MinStr),
|
||||
{Max, []} = string:to_integer(MaxStr),
|
||||
#{type => integer, example => Min, minimum => Min, maximum => Max};
|
||||
_ -> %% Module:Type().
|
||||
case string:split(Name, ":") of
|
||||
[_Module, Type] -> typename_to_spec(Type);
|
||||
_ -> throw({error, #{msg => <<"Unsupport Type">>, type => Name}})
|
||||
end
|
||||
end.
|
||||
|
||||
to_bin(List) when is_list(List) -> list_to_binary(List);
|
||||
to_bin(B) when is_boolean(B) -> B;
|
||||
to_bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8);
|
||||
to_bin(X) -> X.
|
||||
|
||||
parse_object(PropList = [_|_], Module) when is_list(PropList) ->
|
||||
{Props, Required, Refs} =
|
||||
lists:foldl(fun({Name, Hocon}, {Acc, RequiredAcc, RefsAcc}) ->
|
||||
NameBin = to_bin(Name),
|
||||
case hoconsc:is_schema(Hocon) of
|
||||
true ->
|
||||
HoconType = hocon_schema:field_schema(Hocon, type),
|
||||
Init0 = init_prop([default | ?DEFAULT_FIELDS], #{}, Hocon),
|
||||
Init = trans_desc(Init0, Hocon),
|
||||
{Prop, Refs1} = hocon_schema_to_spec(HoconType, Module),
|
||||
NewRequiredAcc =
|
||||
case is_required(Hocon) of
|
||||
true -> [NameBin | RequiredAcc];
|
||||
false -> RequiredAcc
|
||||
end,
|
||||
{[{NameBin, maps:merge(Prop, Init)} | Acc], NewRequiredAcc, Refs1 ++ RefsAcc};
|
||||
false ->
|
||||
{SubObject, SubRefs} = parse_object(Hocon, Module),
|
||||
{[{NameBin, SubObject} | Acc], RequiredAcc, SubRefs ++ RefsAcc}
|
||||
end
|
||||
end, {[], [], []}, PropList),
|
||||
Object = #{<<"type">> => object, <<"properties">> => lists:reverse(Props)},
|
||||
case Required of
|
||||
[] -> {Object, Refs};
|
||||
_ -> {maps:put(required, Required, Object), Refs}
|
||||
end;
|
||||
parse_object(Other, Module) ->
|
||||
erlang:throw({error,
|
||||
#{msg => <<"Object only supports not empty proplists">>,
|
||||
args => Other, module => Module}}).
|
||||
|
||||
is_required(Hocon) ->
|
||||
hocon_schema:field_schema(Hocon, required) =:= true orelse
|
||||
hocon_schema:field_schema(Hocon, nullable) =:= false.
|
|
@ -0,0 +1,13 @@
|
|||
%%%-------------------------------------------------------------------
|
||||
%%% @author zhongwen
|
||||
%%% @copyright (C) 2021, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 22. 9月 2021 13:38
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(emqx_swagger_util).
|
||||
-author("zhongwen").
|
||||
|
||||
%% API
|
||||
-export([]).
|
|
@ -0,0 +1,267 @@
|
|||
-module(emqx_swagger_parameter_SUITE).
|
||||
-behaviour(minirest_api).
|
||||
-behaviour(hocon_schema).
|
||||
|
||||
%% API
|
||||
-export([paths/0, api_spec/0, schema/1]).
|
||||
-export([t_in_path/1, t_in_query/1, t_in_mix/1, t_without_in/1]).
|
||||
-export([t_require/1, t_nullable/1, t_method/1, t_api_spec/1]).
|
||||
-export([t_in_path_trans/1, t_in_query_trans/1, t_in_mix_trans/1]).
|
||||
-export([t_in_path_trans_error/1, t_in_query_trans_error/1, t_in_mix_trans_error/1]).
|
||||
-export([all/0, suite/0, groups/0]).
|
||||
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
-import(hoconsc, [mk/2]).
|
||||
|
||||
-define(METHODS, [get, post, put, head, delete, patch, options, trace]).
|
||||
|
||||
all() -> [{group, spec}, {group, validation}].
|
||||
suite() -> [{timetrap, {minutes, 1}}].
|
||||
groups() -> [
|
||||
{spec, [parallel], [t_api_spec, t_in_path, t_in_query, t_in_mix,
|
||||
t_without_in, t_require, t_nullable, t_method]},
|
||||
{validation, [parallel], [t_in_path_trans, t_in_query_trans, t_in_mix_trans,
|
||||
t_in_path_trans_error, t_in_query_trans_error, t_in_mix_trans_error]}
|
||||
].
|
||||
|
||||
t_in_path(_Config) ->
|
||||
Expect =
|
||||
[#{description => <<"Indicates which sorts of issues to return">>,
|
||||
example => <<"all">>, in => path, name => filter,
|
||||
required => true,
|
||||
schema => #{enum => [assigned, created, mentioned, all], type => string}}
|
||||
],
|
||||
validate("/test/in/:filter", Expect),
|
||||
ok.
|
||||
|
||||
t_in_query(_Config) ->
|
||||
Expect =
|
||||
[#{description => <<"results per page (max 100)">>,
|
||||
example => 1, in => query, name => per_page,
|
||||
schema => #{example => 1, maximum => 100, minimum => 1, type => integer}}],
|
||||
validate("/test/in/query", Expect),
|
||||
ok.
|
||||
|
||||
t_in_mix(_Config) ->
|
||||
Expect =
|
||||
[#{description => <<"Indicates which sorts of issues to return">>,
|
||||
example => <<"all">>,in => query,name => filter,
|
||||
schema => #{enum => [assigned,created,mentioned,all],type => string}},
|
||||
#{description => <<"Indicates the state of the issues to return.">>,
|
||||
example => <<"12m">>,in => path,name => state,required => true,
|
||||
schema => #{example => <<"1h">>,type => string}},
|
||||
#{example => 10,in => query,name => per_page, required => false,
|
||||
schema => #{default => 5,example => 1,maximum => 50,minimum => 1, type => integer}},
|
||||
#{in => query,name => is_admin, schema => #{example => true,type => boolean}},
|
||||
#{in => query,name => timeout,
|
||||
schema => #{<<"oneOf">> => [#{enum => [infinity],type => string},
|
||||
#{example => 30,maximum => 60,minimum => 30, type => integer}]}}],
|
||||
ExpectMeta = #{
|
||||
tags => [tags, good],
|
||||
description => <<"good description">>,
|
||||
summary => <<"good summary">>,
|
||||
security => [],
|
||||
deprecated => true,
|
||||
responses => #{<<"200">> => #{description => <<"ok">>}}},
|
||||
GotSpec = validate("/test/in/mix/:state", Expect),
|
||||
?assertEqual(ExpectMeta, maps:without([parameters], maps:get(post, GotSpec))),
|
||||
ok.
|
||||
|
||||
t_without_in(_Config) ->
|
||||
?assertThrow({error, <<"missing in:path/query field in parameters">>},
|
||||
emqx_dashboard_swagger:parse_spec_ref(?MODULE, "/test/without/in")),
|
||||
ok.
|
||||
|
||||
t_require(_Config) ->
|
||||
ExpectSpec = [#{
|
||||
in => query,name => userid, required => false,
|
||||
schema => #{example => <<"binary example">>, type => string}}],
|
||||
validate("/required/false", ExpectSpec),
|
||||
ok.
|
||||
|
||||
t_nullable(_Config) ->
|
||||
NullableFalse = [#{in => query,name => userid, required => true,
|
||||
schema => #{example => <<"binary example">>, type => string}}],
|
||||
NullableTrue = [#{in => query,name => userid,
|
||||
schema => #{example => <<"binary example">>, type => string,
|
||||
nullable => true}}],
|
||||
validate("/nullable/false", NullableFalse),
|
||||
validate("/nullable/true", NullableTrue),
|
||||
ok.
|
||||
|
||||
t_method(_Config) ->
|
||||
PathOk = "/method/ok",
|
||||
PathError = "/method/error",
|
||||
{test, Spec, []} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, PathOk),
|
||||
?assertEqual(lists:sort(?METHODS), lists:sort(maps:keys(Spec))),
|
||||
?assertThrow({error, #{module := ?MODULE, path := PathError, method := bar}},
|
||||
emqx_dashboard_swagger:parse_spec_ref(?MODULE, PathError)),
|
||||
ok.
|
||||
|
||||
t_in_path_trans(_Config) ->
|
||||
Path = "/test/in/:filter",
|
||||
Bindings = #{filter => <<"created">>},
|
||||
Expect = {ok,#{bindings => #{filter => created},
|
||||
body => #{}, query_string => #{}}},
|
||||
?assertEqual(Expect, trans_parameters(Path, Bindings, #{})),
|
||||
ok.
|
||||
|
||||
t_in_query_trans(_Config) ->
|
||||
Path = "/test/in/query",
|
||||
Expect = {ok, #{bindings => #{},body => #{},
|
||||
query_string => #{<<"per_page">> => 100}}},
|
||||
?assertEqual(Expect, trans_parameters(Path, #{}, #{<<"per_page">> => 100})),
|
||||
ok.
|
||||
|
||||
t_in_mix_trans(_Config) ->
|
||||
Path = "/test/in/mix/:state",
|
||||
Bindings = #{
|
||||
state => <<"12m">>,
|
||||
per_page => <<"1">>
|
||||
},
|
||||
Query = #{
|
||||
<<"filter">> => <<"created">>,
|
||||
<<"is_admin">> => true,
|
||||
<<"timeout">> => <<"34">>
|
||||
},
|
||||
Expect = {ok,
|
||||
#{body => #{},
|
||||
bindings => #{state => 720},
|
||||
query_string => #{<<"filter">> => created,<<"is_admin">> => true, <<"per_page">> => 5,<<"timeout">> => 34}}},
|
||||
?assertEqual(Expect, trans_parameters(Path, Bindings, Query)),
|
||||
ok.
|
||||
|
||||
t_in_path_trans_error(_Config) ->
|
||||
Path = "/test/in/:filter",
|
||||
Bindings = #{filter => <<"created1">>},
|
||||
Expect = {400,'BAD_REQUEST', <<"filter : unable_to_convert_to_enum_symbol">>},
|
||||
?assertEqual(Expect, trans_parameters(Path, Bindings, #{})),
|
||||
ok.
|
||||
|
||||
t_in_query_trans_error(_Config) ->
|
||||
Path = "/test/in/query",
|
||||
{400,'BAD_REQUEST', Reason} = trans_parameters(Path, #{}, #{<<"per_page">> => 101}),
|
||||
?assertNotEqual(nomatch, binary:match(Reason, [<<"per_page">>])),
|
||||
ok.
|
||||
|
||||
t_in_mix_trans_error(_Config) ->
|
||||
Path = "/test/in/mix/:state",
|
||||
Bindings = #{
|
||||
state => <<"1d2m">>,
|
||||
per_page => <<"1">>
|
||||
},
|
||||
Query = #{
|
||||
<<"filter">> => <<"cdreated">>,
|
||||
<<"is_admin">> => true,
|
||||
<<"timeout">> => <<"34">>
|
||||
},
|
||||
Expect = {400,'BAD_REQUEST', <<"filter : unable_to_convert_to_enum_symbol">>},
|
||||
?assertEqual(Expect, trans_parameters(Path, Bindings, Query)),
|
||||
ok.
|
||||
|
||||
t_api_spec(_Config) ->
|
||||
{Spec, _Components} = emqx_dashboard_swagger:spec(?MODULE),
|
||||
Filter = fun(V, S) -> lists:all(fun({_, _, _, #{filter := Filter}}) -> Filter =:= V end, S) end,
|
||||
?assertEqual(true, Filter(undefined, Spec)),
|
||||
{Spec1, _Components1} = emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}),
|
||||
?assertEqual(true, Filter(fun emqx_dashboard_swagger:translate_req/2, Spec1)),
|
||||
{Spec2, _Components2} = emqx_dashboard_swagger:spec(?MODULE, #{check_schema => fun emqx_dashboard_swagger:translate_req/2}),
|
||||
?assertEqual(true, Filter(fun emqx_dashboard_swagger:translate_req/2, Spec2)),
|
||||
ok.
|
||||
|
||||
validate(Path, ExpectParams) ->
|
||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
||||
?assertEqual(test, OperationId),
|
||||
Params = maps:get(parameters, maps:get(post, Spec)),
|
||||
?assertEqual(ExpectParams, Params),
|
||||
?assertEqual([], Refs),
|
||||
Spec.
|
||||
|
||||
trans_parameters(Path, Bindings, QueryStr) ->
|
||||
Meta = #{module => ?MODULE, method => post, path => Path},
|
||||
Request = #{bindings => Bindings, query_string => QueryStr, body => #{}},
|
||||
emqx_dashboard_swagger:translate_req(Request, Meta).
|
||||
|
||||
api_spec() -> emqx_dashboard_swagger:spec(?MODULE).
|
||||
|
||||
paths() -> ["/test/in/:filter", "/test/in/query", "/test/in/mix/:state",
|
||||
"/required/false", "/nullable/false", "/nullable/true", "/method/ok"].
|
||||
|
||||
schema("/test/in/:filter") ->
|
||||
#{
|
||||
operationId => test,
|
||||
post => #{
|
||||
parameters => [
|
||||
{filter,
|
||||
mk(hoconsc:enum([assigned, created, mentioned, all]),
|
||||
#{in => path, desc => <<"Indicates which sorts of issues to return">>, example => "all"})}
|
||||
],
|
||||
responses => #{200 => <<"ok">>}
|
||||
}
|
||||
};
|
||||
schema("/test/in/query") ->
|
||||
#{
|
||||
operationId => test,
|
||||
post => #{
|
||||
parameters => [
|
||||
{per_page,
|
||||
mk(range(1, 100),
|
||||
#{in => query, desc => <<"results per page (max 100)">>, example => 1})}
|
||||
],
|
||||
responses => #{200 => <<"ok">>}
|
||||
}
|
||||
};
|
||||
schema("/test/in/mix/:state") ->
|
||||
#{
|
||||
operationId => test,
|
||||
post => #{
|
||||
tags => [tags, good],
|
||||
description => <<"good description">>,
|
||||
summary => <<"good summary">>,
|
||||
security => [],
|
||||
deprecated => true,
|
||||
parameters => [
|
||||
{filter, hoconsc:mk(hoconsc:enum([assigned, created, mentioned, all]),
|
||||
#{in => query, desc => <<"Indicates which sorts of issues to return">>, example => "all"})},
|
||||
{state, mk(emqx_schema:duration_s(),
|
||||
#{in => path, required => true, example => "12m", desc => <<"Indicates the state of the issues to return.">>})},
|
||||
{per_page, mk(range(1, 50),
|
||||
#{in => query, required => false, example => 10, default => 5})},
|
||||
{is_admin, mk(boolean(), #{in => query})},
|
||||
{timeout, mk(hoconsc:union([range(30, 60), infinity]), #{in => query})}
|
||||
],
|
||||
responses => #{200 => <<"ok">>}
|
||||
}
|
||||
};
|
||||
schema("/test/without/in") ->
|
||||
#{
|
||||
operationId => test,
|
||||
post => #{
|
||||
parameters => [
|
||||
{'x-request-id', mk(binary(), #{})}
|
||||
],
|
||||
responses => #{200 => <<"ok">>}
|
||||
}
|
||||
};
|
||||
schema("/required/false") ->
|
||||
to_schema([{'userid', mk(binary(), #{in => query, required => false})}]);
|
||||
schema("/nullable/false") ->
|
||||
to_schema([{'userid', mk(binary(), #{in => query, nullable => false})}]);
|
||||
schema("/nullable/true") ->
|
||||
to_schema([{'userid', mk(binary(), #{in => query, nullable => true})}]);
|
||||
schema("/method/ok") ->
|
||||
Response = #{responses => #{200 => <<"ok">>}},
|
||||
lists:foldl(fun(Method, Acc) -> Acc#{Method => Response} end,
|
||||
#{operationId => test}, ?METHODS);
|
||||
schema("/method/error") ->
|
||||
#{operationId => test, bar => #{200 => <<"ok">>}}.
|
||||
to_schema(Params) ->
|
||||
#{
|
||||
operationId => test,
|
||||
post => #{
|
||||
parameters => Params,
|
||||
responses => #{200 => <<"ok">>}
|
||||
}
|
||||
}.
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue