diff --git a/.ci/build_packages/tests.sh b/.ci/build_packages/tests.sh index da0ec7362..a0f85fa22 100755 --- a/.ci/build_packages/tests.sh +++ b/.ci/build_packages/tests.sh @@ -53,7 +53,7 @@ emqx_test(){ exit 1 fi IDLE_TIME=0 - while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do + while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do if [ $IDLE_TIME -gt 10 ] then echo "emqx running error" @@ -155,7 +155,7 @@ EOF exit 1 fi IDLE_TIME=0 - while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do + while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do if [ $IDLE_TIME -gt 10 ] then echo "emqx running error" @@ -184,7 +184,7 @@ EOF exit 1 fi IDLE_TIME=0 - while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do + while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do if [ $IDLE_TIME -gt 10 ] then echo "emqx service error" diff --git a/.ci/docker-compose-file/Makefile.local b/.ci/docker-compose-file/Makefile.local new file mode 100644 index 000000000..d5ef99d66 --- /dev/null +++ b/.ci/docker-compose-file/Makefile.local @@ -0,0 +1,49 @@ +.PHONY: help up down ct ct-all bash run + +define usage +make -f .ci/docker-compose-file/Makefile.local up +make -f .ci/docker-compose-file/Makefile.local ct CONTAINER=erlang24 SUITE=apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl +make -f .ci/docker-compose-file/Makefile.local down +endef +export usage + +help: + @echo "$$usage" + +up: + env \ + MYSQL_TAG=8 \ + REDIS_TAG=6 \ + MONGO_TAG=4 \ + PGSQL_TAG=13 \ + LDAP_TAG=2.4.50 \ + docker-compose \ + -f .ci/docker-compose-file/docker-compose.yaml \ + -f .ci/docker-compose-file/docker-compose-ldap-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-mongo-single-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \ + up -d --build + +down: + docker-compose \ + -f .ci/docker-compose-file/docker-compose.yaml \ + -f .ci/docker-compose-file/docker-compose-ldap-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-mongo-single-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \ + down + +ct: + docker exec -i "$(CONTAINER)" bash -c "rebar3 ct --name 'test@127.0.0.1' -v --suite $(SUITE)" + +ct-all: + docker exec -i "$(CONTAINER)" bash -c "make ct" + +bash: + docker exec -it "$(CONTAINER)" bash + +run: + docker exec -it "$(CONTAINER)" bash -c "make run"; diff --git a/.ci/docker-compose-file/docker-compose-emqx-cluster-rlog.override.yaml b/.ci/docker-compose-file/docker-compose-emqx-cluster-rlog.override.yaml new file mode 100644 index 000000000..4c53ec3ae --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-emqx-cluster-rlog.override.yaml @@ -0,0 +1,36 @@ +x-default-emqx: &default-emqx + env_file: + - conf.cluster.env + healthcheck: + test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"] + interval: 5s + timeout: 25s + retries: 5 + +services: + emqx1: + <<: *default-emqx + container_name: node1.emqx.io + restart: on-failure + environment: + - "EMQX_HOST=node1.emqx.io" + - "EMQX_DB__BACKEND=rlog" + - "EMQX_DB__ROLE=core" + - "EMQX_CLUSTER__STATIC__SEEDS=[emqx@node1.emqx.io]" + - "EMQX_LISTENERS__TCP__DEFAULT__PROXY_PROTOCOL=false" + - "EMQX_LISTENERS__WS__DEFAULT__PROXY_PROTOCOL=false" + + emqx2: + <<: *default-emqx + container_name: node2.emqx.io + depends_on: + - emqx1 + restart: on-failure + environment: + - "EMQX_HOST=node2.emqx.io" + - "EMQX_DB__BACKEND=rlog" + - "EMQX_DB__ROLE=replicant" + - "EMQX_DB__CORE_NODES=emqx@node1.emqx.io" + - "EMQX_CLUSTER__STATIC__SEEDS=[emqx@node1.emqx.io]" + - "EMQX_LISTENERS__TCP__DEFAULT__PROXY_PROTOCOL=false" + - "EMQX_LISTENERS__WS__DEFAULT__PROXY_PROTOCOL=false" diff --git a/.ci/docker-compose-file/docker-compose-emqx-cluster.yaml b/.ci/docker-compose-file/docker-compose-emqx-cluster.yaml index 656905eb0..65bf4faf8 100644 --- a/.ci/docker-compose-file/docker-compose-emqx-cluster.yaml +++ b/.ci/docker-compose-file/docker-compose-emqx-cluster.yaml @@ -1,5 +1,15 @@ version: '3.9' +x-default-emqx: &default-emqx + image: ${_EMQX_DOCKER_IMAGE_TAG} + env_file: + - conf.cluster.env + healthcheck: + test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"] + interval: 5s + timeout: 25s + retries: 5 + services: haproxy: container_name: haproxy @@ -28,34 +38,20 @@ services: haproxy -f /usr/local/etc/haproxy/haproxy.cfg emqx1: + <<: *default-emqx container_name: node1.emqx.io - image: $TARGET:$EMQX_TAG - env_file: - - conf.cluster.env environment: - "EMQX_HOST=node1.emqx.io" - healthcheck: - test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"] - interval: 5s - timeout: 25s - retries: 5 networks: emqx_bridge: aliases: - node1.emqx.io emqx2: + <<: *default-emqx container_name: node2.emqx.io - image: $TARGET:$EMQX_TAG - env_file: - - conf.cluster.env environment: - "EMQX_HOST=node2.emqx.io" - healthcheck: - test: ["CMD", "/opt/emqx/bin/emqx", "ping"] - interval: 5s - timeout: 25s - retries: 5 networks: emqx_bridge: aliases: diff --git a/.ci/docker-compose-file/docker-compose-mysql-tcp.yaml b/.ci/docker-compose-file/docker-compose-mysql-tcp.yaml index 70cc3d242..8a4c498df 100644 --- a/.ci/docker-compose-file/docker-compose-mysql-tcp.yaml +++ b/.ci/docker-compose-file/docker-compose-mysql-tcp.yaml @@ -5,6 +5,8 @@ services: container_name: mysql image: mysql:${MYSQL_TAG} restart: always + ports: + - "3306:3306" environment: MYSQL_ROOT_PASSWORD: public MYSQL_DATABASE: mqtt diff --git a/.ci/docker-compose-file/docker-compose.yaml b/.ci/docker-compose-file/docker-compose.yaml index 85b1a06be..2f0137428 100644 --- a/.ci/docker-compose-file/docker-compose.yaml +++ b/.ci/docker-compose-file/docker-compose.yaml @@ -3,7 +3,7 @@ version: '3.9' services: erlang23: container_name: erlang23 - image: ghcr.io/emqx/emqx-builder/5.0:23.2.7.2-emqx-2-ubuntu20.04 + image: ghcr.io/emqx/emqx-builder/5.0-2:23.3.4.9-3-ubuntu20.04 env_file: - conf.env environment: @@ -23,7 +23,7 @@ services: erlang24: container_name: erlang24 - image: ghcr.io/emqx/emqx-builder/5.0:24.1.1-emqx-1-ubuntu20.04 + image: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04 env_file: - conf.env environment: diff --git a/.ci/docker-compose-file/haproxy/haproxy.cfg b/.ci/docker-compose-file/haproxy/haproxy.cfg index b658789da..89c1d7d5d 100644 --- a/.ci/docker-compose-file/haproxy/haproxy.cfg +++ b/.ci/docker-compose-file/haproxy/haproxy.cfg @@ -54,7 +54,6 @@ backend emqx_dashboard_back server emqx-1 node1.emqx.io:18083 server emqx-2 node2.emqx.io:18083 - ##---------------------------------------------------------------- ## public ##---------------------------------------------------------------- diff --git a/.ci/docker-compose-file/openldap/Dockerfile b/.ci/docker-compose-file/openldap/Dockerfile index adbb80800..88a096066 100644 --- a/.ci/docker-compose-file/openldap/Dockerfile +++ b/.ci/docker-compose-file/openldap/Dockerfile @@ -10,9 +10,9 @@ RUN wget ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-${LDAP_TA && cd .. && rm -rf openldap-${LDAP_TAG} COPY .ci/docker-compose-file/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf -COPY apps/emqx_auth_ldap/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif -COPY apps/emqx_auth_ldap/emqx.schema /usr/local/etc/openldap/schema/emqx.schema -COPY apps/emqx_auth_ldap/test/certs/*.pem /usr/local/etc/openldap/ +COPY apps/emqx_authn/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif +COPY apps/emqx_authn/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema +COPY apps/emqx_authn/test/data/certs/*.pem /usr/local/etc/openldap/ RUN mkdir -p /usr/local/etc/openldap/data \ && slapadd -l /usr/local/etc/openldap/schema/emqx.io.ldif -f /usr/local/etc/openldap/slapd.conf diff --git a/.ci/docker-compose-file/python/pytest.sh b/.ci/docker-compose-file/python/pytest.sh index eacbecc3b..c079a65a4 100755 --- a/.ci/docker-compose-file/python/pytest.sh +++ b/.ci/docker-compose-file/python/pytest.sh @@ -1,21 +1,30 @@ #!/bin/sh ## This script is to run emqx cluster smoke tests (fvt) in github action -## This script is executed in pacho_client +## This script is executed in paho_client set -x set +e -LB="haproxy" +EMQX_TEST_DB_BACKEND=$1 +if [ "$EMQX_TEST_DB_BACKEND" = "rlog" ] +then + # TODO: target only replica to avoid replication races + # see: https://github.com/emqx/emqx/issues/6094 + TARGET_HOST="node2.emqx.io" +else + # use loadbalancer + TARGET_HOST="haproxy" +fi apk update && apk add git curl git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho.mqtt.testing pip install pytest -pytest -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "$LB" +pytest -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "$TARGET_HOST" RESULT=$? -pytest -v /paho.mqtt.testing/interoperability/test_client --host "$LB" +pytest -v /paho.mqtt.testing/interoperability/test_client --host "$TARGET_HOST" RESULT=$(( RESULT + $? )) # pytest -v /paho.mqtt.testing/interoperability/test_cluster --host1 "node1.emqx.io" --host2 "node2.emqx.io" diff --git a/.ci/docker-compose-file/scripts/run-emqx.sh b/.ci/docker-compose-file/scripts/run-emqx.sh new file mode 100755 index 000000000..4f95db3ce --- /dev/null +++ b/.ci/docker-compose-file/scripts/run-emqx.sh @@ -0,0 +1,51 @@ +#!/bin/bash +set -euxo pipefail + +# _EMQX_DOCKER_IMAGE_TAG is shared with docker-compose file +export _EMQX_DOCKER_IMAGE_TAG="$1" +_EMQX_TEST_DB_BACKEND="${2:-${_EMQX_TEST_DB_BACKEND:-mnesia}}" + +if [ "$_EMQX_TEST_DB_BACKEND" = "rlog" ] +then + CLUSTER_OVERRIDES="-f .ci/docker-compose-file/docker-compose-emqx-cluster-rlog.override.yaml" +else + CLUSTER_OVERRIDES="" +fi + +{ + echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_" + echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s" + echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10" +} >> .ci/docker-compose-file/conf.cluster.env + +is_node_up() { + local node + node="$1" + docker exec -i "$node" \ + bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1 +} + +is_node_listening() { + local node + node="$1" + docker exec -i "$node" \ + emqx eval "ok = case gen_tcp:connect(\"localhost\", 1883, []) of {ok, P} -> gen_tcp:close(P), ok; _ -> exit(1) end." > /dev/null 2>&1 +} + +is_cluster_up() { + is_node_up node1.emqx.io && \ + is_node_up node2.emqx.io && \ + is_node_listening node1.emqx.io && \ + is_node_listening node2.emqx.io +} + +docker-compose \ + -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \ + $CLUSTER_OVERRIDES \ + -f .ci/docker-compose-file/docker-compose-python.yaml \ + up -d + +while ! is_cluster_up; do + echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx"; + sleep 5; +done diff --git a/.github/ISSUE_TEMPLATE/support-needed.md b/.github/ISSUE_TEMPLATE/support-needed.md index 49ba5a913..e50bdfcbf 100644 --- a/.github/ISSUE_TEMPLATE/support-needed.md +++ b/.github/ISSUE_TEMPLATE/support-needed.md @@ -9,6 +9,9 @@ labels: "Support, needs-triage" ### Subject of the support diff --git a/.github/workflows/build_packages.yaml b/.github/workflows/build_packages.yaml index 4704e189b..f8f4eb9dc 100644 --- a/.github/workflows/build_packages.yaml +++ b/.github/workflows/build_packages.yaml @@ -11,92 +11,69 @@ on: types: - published workflow_dispatch: + inputs: + which_branch: + required: false jobs: prepare: - strategy: - matrix: - otp: - - "23.2.7.2-emqx-2" - - "24.1.1-emqx-1" - runs-on: ubuntu-20.04 - container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" + # prepare source with any OTP version, no need for a matrix + container: "ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04" outputs: - profiles: ${{ steps.set_profile.outputs.profiles }} - old_vsns: ${{ steps.set_profile.outputs.old_vsns }} + old_vsns: ${{ steps.find_old_versons.outputs.old_vsns }} steps: - uses: actions/checkout@v2 with: + ref: ${{ github.event.inputs.which_branch }} path: source fetch-depth: 0 - - name: set profile - id: set_profile + - name: find old versions + id: find_old_versons shell: bash working-directory: source run: | vsn="$(./pkg-vsn.sh)" pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')" - if make emqx-ee --dry-run > /dev/null 2>&1; then - old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")" - echo "::set-output name=old_vsns::$old_vsns" - echo "::set-output name=profiles::[\"emqx-ee\"]" - else - old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")" - echo "::set-output name=old_vsns::$old_vsns" - echo "::set-output name=profiles::[\"emqx\", \"emqx-edge\"]" - fi - - name: get otp version - id: get_otp_version + old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")" + echo "::set-output name=old_vsns::$old_vsns" + - name: get_all_deps run: | - otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)" - echo "::set-output name=otp::$otp" - - name: set get token - if: endsWith(github.repository, 'enterprise') - run: | - echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials - git config --global credential.helper store - - name: get deps - working-directory: source - run: | - make ensure-rebar3 - ./rebar3 as default get-deps - rm -rf rebar.lock - - name: gen zip file - run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]* + make -C source deps-all + zip -ryq source.zip source/* source/.[^.]* - uses: actions/upload-artifact@v2 with: - name: source-${{ steps.get_otp_version.outputs.otp }} - path: source-${{ steps.get_otp_version.outputs.otp }}.zip + name: source + path: source.zip windows: runs-on: windows-2019 needs: prepare - if: endsWith(github.repository, 'emqx') strategy: fail-fast: false matrix: - profile: ${{fromJSON(needs.prepare.outputs.profiles)}} - exclude: - - profile: emqx-edge + profile: # only CE for windows + - emqx + otp: + - 23.2 steps: - uses: actions/download-artifact@v2 with: - name: source-23.2.7.2-emqx-2 + name: source path: . - name: unzip source code - run: Expand-Archive -Path source-23.2.7.2-emqx-2.zip -DestinationPath ./ + run: Expand-Archive -Path source.zip -DestinationPath ./ - uses: ilammy/msvc-dev-cmd@v1 - uses: gleam-lang/setup-erlang@v1.1.2 id: install_erlang ## gleam-lang/setup-erlang does not yet support the installation of otp24 on windows with: - otp-version: 23.2 + otp-version: ${{ matrix.otp }} - name: build env: PYTHON: python @@ -108,10 +85,10 @@ jobs: $version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" ) if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") { $regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]+" - $pkg_name = "${{ matrix.profile }}-windows-$([regex]::matches($version, $regex).value).zip" + $pkg_name = "${{ matrix.profile }}-$([regex]::matches($version, $regex).value)-otp${{ matrix.otp }}-windows-amd64.zip" } else { - $pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip" + $pkg_name = "${{ matrix.profile }}-$($version -replace '/')-otp${{ matrix.otp }}-windows-amd64.zip" } ## We do not build/release bcrypt and quic for windows package Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/ @@ -140,34 +117,32 @@ jobs: - uses: actions/upload-artifact@v1 if: startsWith(github.ref, 'refs/tags/') with: - name: ${{ matrix.profile }}-23.2.7.2-emqx-2 + name: ${{ matrix.profile }} path: source/_packages/${{ matrix.profile }}/. mac: - needs: prepare - strategy: fail-fast: false matrix: - profile: ${{fromJSON(needs.prepare.outputs.profiles)}} + profile: # no EDGE for mac + - emqx + - emqx-ee + otp: + - 24.1.5-2 macos: - macos-11 - macos-10.15 - otp: - - 24.1.1-emqx-1 exclude: - profile: emqx-edge - runs-on: ${{ matrix.macos }} - steps: - uses: actions/download-artifact@v2 with: - name: source-${{ matrix.otp }} + name: source path: . - name: unzip source code - run: unzip -q source-${{ matrix.otp }}.zip + run: unzip -q source.zip - name: prepare run: | brew update @@ -182,8 +157,12 @@ jobs: - name: build erlang if: steps.cache.outputs.cache-hit != 'true' timeout-minutes: 60 + env: + KERL_BUILD_BACKEND: git + OTP_GITHUB_URL: https://github.com/emqx/otp run: | - kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }} + kerl update releases + kerl build ${{ matrix.otp }} kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }} - name: build working-directory: source @@ -191,11 +170,12 @@ jobs: . $HOME/.kerl/${{ matrix.otp }}/activate make ensure-rebar3 sudo cp rebar3 /usr/local/bin/rebar3 + rm -rf _build/${{ matrix.profile }}/lib make ${{ matrix.profile }}-zip - name: test working-directory: source run: | - pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip | head) + pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip) unzip -q $pkg_name # gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins ./emqx/bin/emqx start || cat emqx/log/erlang.log.1 @@ -230,7 +210,15 @@ jobs: strategy: fail-fast: false matrix: - profile: ${{fromJSON(needs.prepare.outputs.profiles)}} + profile: ## all editions for linux + - emqx-edge + - emqx + - emqx-ee + otp: + - 24.1.5-2 # we test with OTP 23, but only build package on OTP 24 versions + arch: + - amd64 + - arm64 os: - ubuntu20.04 - ubuntu18.04 @@ -240,18 +228,9 @@ jobs: # - opensuse - centos8 - centos7 - - centos6 - raspbian10 # - raspbian9 - arch: - - amd64 - - arm64 - otp: - - 23.2.7.2-emqx-2 - - 24.1.1-emqx-1 exclude: - - os: centos6 - arch: arm64 - os: raspbian9 arch: amd64 - os: raspbian10 @@ -277,12 +256,13 @@ jobs: platforms: all - uses: actions/download-artifact@v2 with: - name: source-${{ matrix.otp }} + name: source path: . - name: unzip source code - run: unzip -q source-${{ matrix.otp }}.zip + run: unzip -q source.zip - name: downloads old emqx zip packages env: + OTP_VSN: ${{ matrix.otp }} PROFILE: ${{ matrix.profile }} ARCH: ${{ matrix.arch }} SYSTEM: ${{ matrix.os }} @@ -302,10 +282,11 @@ jobs: cd _upgrade_base old_vsns=($(echo $OLD_VSNS | tr ' ' ' ')) for tag in ${old_vsns[@]}; do - if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip) | grep -oE "^[23]+")" ];then - wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip - wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256 - echo "$(cat $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256) $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip" | sha256sum -c || exit 1 + package_name="${PROFILE}-${tag#[e|v]}-otp${OTP_VSN}-${SYSTEM}-${ARCH}" + if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip) | grep -oE "^[23]+")" ]; then + wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip + wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip.sha256 + echo "$(cat $package_name.zip.sha256) $package_name.zip" | sha256sum -c || exit 1 fi done - name: build emqx packages @@ -320,7 +301,7 @@ jobs: -v $(pwd):/emqx \ --workdir /emqx \ --platform linux/$ARCH \ - ghcr.io/emqx/emqx-builder/5.0:$OTP-$SYSTEM \ + ghcr.io/emqx/emqx-builder/5.0-2:$OTP-$SYSTEM \ bash -euc "make $PROFILE-zip || cat rebar3.crashdump; \ make $PROFILE-pkg || cat rebar3.crashdump; \ EMQX_NAME=$PROFILE && .ci/build_packages/tests.sh" @@ -349,17 +330,21 @@ jobs: strategy: fail-fast: false matrix: - profile: ${{fromJSON(needs.prepare.outputs.profiles)}} + profile: # all editions for docker + - emqx-edge + - emqx + - emqx-ee + # NOTE: for docker, only support latest otp version, not a matrix otp: - - 24.1.1-emqx-1 + - 24.1.5-2 # update to latest steps: - uses: actions/download-artifact@v2 with: - name: source-${{ matrix.otp }} + name: source path: . - name: unzip source code - run: unzip -q source-${{ matrix.otp }}.zip + run: unzip -q source.zip - uses: docker/setup-buildx-action@v1 - uses: docker/setup-qemu-action@v1 with: @@ -376,7 +361,8 @@ jobs: type=ref,event=pr type=ref,event=tag type=semver,pattern={{version}} - type=semver,pattern={{major}}.{{minor}} + labels: + org.opencontainers.image.otp.version=${{ matrix.otp }} - uses: docker/login-action@v1 if: github.event_name == 'release' with: @@ -384,32 +370,26 @@ jobs: password: ${{ secrets.DOCKER_HUB_TOKEN }} - uses: docker/build-push-action@v2 with: - push: ${{ github.event_name == 'release' }} + push: ${{ github.event_name == 'release' && !github.event.release.prerelease }} pull: true no-cache: true platforms: linux/amd64,linux/arm64 tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} build-args: | - BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-alpine3.14 + BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-alpine3.14 RUN_FROM=alpine:3.14 EMQX_NAME=${{ matrix.profile }} file: source/deploy/docker/Dockerfile context: source delete-artifact: - runs-on: ubuntu-20.04 - strategy: - matrix: - otp: - - 23.2.7.2-emqx-2 - - 24.1.1-emqx-1 needs: [prepare, mac, linux, docker] steps: - uses: geekyeggo/delete-artifact@v1 with: - name: source-${{ matrix.otp }} + name: source upload: runs-on: ubuntu-20.04 @@ -420,9 +400,12 @@ jobs: strategy: matrix: - profile: ${{fromJSON(needs.prepare.outputs.profiles)}} + profile: + - emqx-edge + - emqx + - emqx-ee otp: - - 24.1.1-emqx-1 + - 24.1.5-2 steps: - uses: actions/checkout@v2 @@ -461,17 +444,11 @@ jobs: aws s3 cp --recursive _packages/${{ matrix.profile }} s3://${{ secrets.AWS_S3_BUCKET }}/$broker/${{ env.version }} aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CLOUDFRONT_ID }} --paths "/$broker/${{ env.version }}/*" - uses: Rory-Z/upload-release-asset@v1 - if: github.event_name == 'release' && matrix.profile != 'emqx-ee' + if: github.event_name == 'release' with: repo: emqx path: "_packages/${{ matrix.profile }}/emqx-*" token: ${{ github.token }} - - uses: Rory-Z/upload-release-asset@v1 - if: github.event_name == 'release' && matrix.profile == 'emqx-ee' - with: - repo: emqx-enterprise - path: "_packages/${{ matrix.profile }}/emqx-*" - token: ${{ github.token }} - name: update to emqx.io if: github.event_name == 'release' run: | @@ -484,32 +461,28 @@ jobs: -d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.version }}\" }" \ ${{ secrets.EMQX_IO_RELEASE_API }} - name: update repo.emqx.io - if: github.event_name == 'release' && endsWith(github.repository, 'enterprise') && matrix.profile == 'emqx-ee' + if: github.event_name == 'release' run: | + if [ "${{ matrix. profile }}" = 'emqx-ee' ]; then + BOOL_FLAG_NAME="emqx_ee" + else + BOOL_FLAG_NAME="emqx_ce" + fi curl --silent --show-error \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Accept: application/vnd.github.v3+json" \ -X POST \ - -d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \ - "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches" - - name: update repo.emqx.io - if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx' - run: | - curl --silent --show-error \ - -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ - -H "Accept: application/vnd.github.v3+json" \ - -X POST \ - -d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \ + -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\", \"${BOOL_FLAG_NAME}\": \"true\"}}" \ "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches" - name: update homebrew packages - if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx' + if: github.event_name == 'release' && matrix.profile == 'emqx' run: | if [ -z $(echo $version | grep -oE "(alpha|beta|rc)\.[0-9]") ]; then curl --silent --show-error \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Accept: application/vnd.github.v3+json" \ -X POST \ - -d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \ + -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \ "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches" fi - uses: geekyeggo/delete-artifact@v1 diff --git a/.github/workflows/build_slim_packages.yaml b/.github/workflows/build_slim_packages.yaml index 67e813747..46eb7aef7 100644 --- a/.github/workflows/build_slim_packages.yaml +++ b/.github/workflows/build_slim_packages.yaml @@ -4,7 +4,6 @@ concurrency: group: slim-${{ github.event_name }}-${{ github.ref }} cancel-in-progress: true - on: push: tags: @@ -14,48 +13,37 @@ on: workflow_dispatch: jobs: - build: + linux: runs-on: ubuntu-20.04 strategy: fail-fast: false matrix: + profile: + - emqx-edge + - emqx + - emqx-ee otp: - - 24.1.1-emqx-1 + - 24.1.5-2 os: - ubuntu20.04 - centos7 - container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-${{ matrix.os }}" + container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-${{ matrix.os }}" steps: - uses: actions/checkout@v1 - - name: prepare - run: | - if make emqx-ee --dry-run > /dev/null 2>&1; then - echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials - git config --global credential.helper store - echo "${{ secrets.CI_GIT_TOKEN }}" >> ./scripts/git-token - echo "EMQX_NAME=emqx-ee" >> $GITHUB_ENV - else - echo "EMQX_NAME=emqx" >> $GITHUB_ENV - fi - - name: build zip packages - run: make ${EMQX_NAME}-zip + - name: build zip package + run: make ${{ matrix.profile }}-zip - name: build deb/rpm packages - run: make ${EMQX_NAME}-pkg - - uses: actions/upload-artifact@v1 - if: failure() - with: - name: rebar3.crashdump - path: ./rebar3.crashdump + run: make ${{ matrix.profile }}-pkg - name: packages test run: | export CODE_PATH=$GITHUB_WORKSPACE - .ci/build_packages/tests.sh + EMQX_NAME=${{ matrix.profile }} .ci/build_packages/tests.sh - uses: actions/upload-artifact@v2 with: - name: ${{ matrix.os }} + name: ${{ matrix.profile}}-${{ matrix.otp }}-${{ matrix.os }} path: _packages/**/*.zip mac: @@ -63,32 +51,25 @@ jobs: strategy: fail-fast: false matrix: + profile: + - emqx + - emqx-ee + otp: + - 24.1.5-2 macos: - macos-11 - macos-10.15 - otp: - - 24.1.1-emqx-1 runs-on: ${{ matrix.macos }} steps: - - uses: actions/checkout@v1 - - name: prepare - run: | - if make emqx-ee --dry-run > /dev/null 2>&1; then - echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials - git config --global credential.helper store - echo "${{ secrets.CI_GIT_TOKEN }}" >> ./scripts/git-token - echo "EMQX_NAME=emqx-ee" >> $GITHUB_ENV - else - echo "EMQX_NAME=emqx" >> $GITHUB_ENV - fi + - uses: actions/checkout@v2 - name: prepare run: | brew update brew install curl zip unzip gnu-sed kerl unixodbc freetds echo "/usr/local/bin" >> $GITHUB_PATH - git config --global credential.helper store + echo "EMQX_NAME=${{ matrix.profile }}" >> $GITHUB_ENV - uses: actions/cache@v2 id: cache with: @@ -97,23 +78,23 @@ jobs: - name: build erlang if: steps.cache.outputs.cache-hit != 'true' timeout-minutes: 60 + env: + KERL_BUILD_BACKEND: git + OTP_GITHUB_URL: https://github.com/emqx/otp run: | - kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }} + kerl update releases + kerl build ${{ matrix.otp }} kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }} - - name: build + - name: build ${{ matrix.profile }} run: | . $HOME/.kerl/${{ matrix.otp }}/activate make ensure-rebar3 sudo cp rebar3 /usr/local/bin/rebar3 - make ${EMQX_NAME}-zip - - uses: actions/upload-artifact@v1 - if: failure() - with: - name: rebar3.crashdump - path: ./rebar3.crashdump + make ${{ matrix.profile }}-zip - name: test run: | - unzip -q $(find _packages/${EMQX_NAME} -mindepth 1 -maxdepth 1 -iname \*.zip | head) + pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip) + unzip -q $pkg_name # gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins ./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ready='no' diff --git a/.github/workflows/check_deps_integrity.yaml b/.github/workflows/check_deps_integrity.yaml index afcb384cb..c1a457665 100644 --- a/.github/workflows/check_deps_integrity.yaml +++ b/.github/workflows/check_deps_integrity.yaml @@ -5,7 +5,7 @@ on: [pull_request] jobs: check_deps_integrity: runs-on: ubuntu-20.04 - container: "ghcr.io/emqx/emqx-builder/5.0:24.1.1-emqx-1-ubuntu20.04" + container: "ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04" steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/code_style_check.yaml b/.github/workflows/code_style_check.yaml new file mode 100644 index 000000000..fae85baa2 --- /dev/null +++ b/.github/workflows/code_style_check.yaml @@ -0,0 +1,17 @@ +name: Code style check + +on: [pull_request] + +jobs: + build: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 1000 + - name: Run elvis check + run: | + ./scripts/elvis-check.sh $GITHUB_BASE_REF + - name: Check line-break at EOF + run: | + ./scripts/check-nl-at-eof.sh diff --git a/.github/workflows/elvis_lint.yaml b/.github/workflows/elvis_lint.yaml deleted file mode 100644 index 1fdbeba87..000000000 --- a/.github/workflows/elvis_lint.yaml +++ /dev/null @@ -1,16 +0,0 @@ -name: Elvis Linter - -on: [pull_request] - -jobs: - build: - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v2 - - name: Set git token - if: endsWith(github.repository, 'enterprise') - run: | - echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials - git config --global credential.helper store - - run: | - ./scripts/elvis-check.sh $GITHUB_BASE_REF diff --git a/.github/workflows/git_sync.yaml b/.github/workflows/git_sync.yaml index 48e29a37d..4d81bf055 100644 --- a/.github/workflows/git_sync.yaml +++ b/.github/workflows/git_sync.yaml @@ -24,11 +24,7 @@ jobs: id: create_pull_request run: | set -euo pipefail - if [ "$GITHUB_REF" = "refs/heads/master" ]; then - EE_REF="refs/heads/enterprise" - else - EE_REF="${GITHUB_REF}-enterprise" - fi + EE_REF="${GITHUB_REF}-enterprise" R=$(curl --silent --show-error \ -H "Accept: application/vnd.github.v3+json" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ diff --git a/.github/workflows/run_api_tests.yaml b/.github/workflows/run_api_tests.yaml index fc8630b95..fbc6ae6f8 100644 --- a/.github/workflows/run_api_tests.yaml +++ b/.github/workflows/run_api_tests.yaml @@ -61,7 +61,7 @@ jobs: - uses: actions/checkout@v2 with: repository: emqx/emqx-fvt - ref: v1.3.0 + ref: 1.0.2-dev1 path: . - uses: actions/setup-java@v1 with: diff --git a/.github/workflows/run_emqx_app_tests.yaml b/.github/workflows/run_emqx_app_tests.yaml index bbcec98c8..caa4e14ad 100644 --- a/.github/workflows/run_emqx_app_tests.yaml +++ b/.github/workflows/run_emqx_app_tests.yaml @@ -12,11 +12,11 @@ jobs: strategy: matrix: otp: - - "23.2.7.2-emqx-2" - - "24.1.1-emqx-1" + - 23.3.4.9-3 + - 24.1.5-2 runs-on: ubuntu-20.04 - container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" + container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-ubuntu20.04" steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/run_fvt_tests.yaml b/.github/workflows/run_fvt_tests.yaml index 509e84bab..f64e4e21f 100644 --- a/.github/workflows/run_fvt_tests.yaml +++ b/.github/workflows/run_fvt_tests.yaml @@ -8,58 +8,27 @@ on: push: tags: - v* - - e* pull_request: jobs: prepare: - strategy: - matrix: - otp: - - "23.2.7.2-emqx-2" - - "24.1.1-emqx-1" - runs-on: ubuntu-20.04 - container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" - - outputs: - profile: ${{ steps.profile.outputs.profile }} + # prepare source with any OTP version, no need for a matrix + container: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14 steps: - - name: get otp version - id: get_otp_version - run: | - otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)" - echo "::set-output name=otp::$otp" - uses: actions/checkout@v2 with: path: source fetch-depth: 0 - - name: set profile - id: profile - shell: bash - working-directory: source - run: | - vsn="$(./pkg-vsn.sh)" - if make emqx-ee --dry-run > /dev/null 2>&1; then - echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials - git config --global credential.helper store - echo "::set-output name=profile::emqx-ee" - else - echo "::set-output name=profile::emqx" - fi - name: get deps - working-directory: source run: | - make ensure-rebar3 - ./rebar3 as default get-deps - rm -rf rebar.lock - - name: gen zip file - run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]* + make -C source deps-all + zip -ryq source.zip source/* source/.[^.]* - uses: actions/upload-artifact@v2 with: - name: source-${{ steps.get_otp_version.outputs.otp }} - path: source-${{ steps.get_otp_version.outputs.otp }}.zip + name: source + path: source.zip docker_test: runs-on: ubuntu-20.04 @@ -68,47 +37,43 @@ jobs: strategy: fail-fast: false matrix: - otp: - - 23.2.7.2-emqx-2 - - 24.1.1-emqx-1 + profile: + - emqx-edge + - emqx + - emqx-ee + cluster_db_backend: + - mnesia + - rlog steps: - uses: actions/download-artifact@v2 with: - name: source-${{ matrix.otp }} + name: source path: . - name: unzip source code - run: unzip -q source-${{ matrix.otp }}.zip + run: unzip -q source.zip - name: make docker image working-directory: source env: - OTP: ${{ matrix.otp }} + EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14 run: | - make ${{ needs.prepare.outputs.profile }}-docker - echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV - echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV + make ${{ matrix.profile }}-docker - name: run emqx timeout-minutes: 5 working-directory: source run: | - set -e -u -x - echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_" >> .ci/docker-compose-file/conf.cluster.env - echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s" >> .ci/docker-compose-file/conf.cluster.env - echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10" >> .ci/docker-compose-file/conf.cluster.env - docker-compose \ - -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \ - -f .ci/docker-compose-file/docker-compose-python.yaml \ - up -d - while ! docker exec -i node1.emqx.io bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1; do - echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx"; - sleep 5; - done + set -x + IMAGE=emqx/${{ matrix.profile }}:$(./pkg-vsn.sh) + ./.ci/docker-compose-file/scripts/run-emqx.sh $IMAGE ${{ matrix.cluster_db_backend }} - name: make paho tests run: | - if ! docker exec -i python /scripts/pytest.sh; then + if ! docker exec -i python /scripts/pytest.sh "${{ matrix.cluster_db_backend }}"; then echo "DUMP_CONTAINER_LOGS_BGN" + echo "============== haproxy ==============" docker logs haproxy + echo "============== node1 ==============" docker logs node1.emqx.io + echo "============== node2 ==============" docker logs node2.emqx.io echo "DUMP_CONTAINER_LOGS_END" exit 1 @@ -121,24 +86,24 @@ jobs: strategy: fail-fast: false matrix: - otp: - - 23.2.7.2-emqx-2 - - 24.1.1-emqx-1 + profile: + - emqx + # - emqx-ee # TODO test enterprise steps: - uses: actions/download-artifact@v2 with: - name: source-${{ matrix.otp }} + name: source path: . - name: unzip source code - run: unzip -q source-${{ matrix.otp }}.zip + run: unzip -q source.zip - name: make docker image working-directory: source env: - OTP: ${{ matrix.otp }} + EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14 run: | - make ${{ needs.prepare.outputs.profile }}-docker - echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV + make ${{ matrix.profile }}-docker + echo "TARGET=emqx/${{ matrix.profile }}" >> $GITHUB_ENV echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV - run: minikube start - name: run emqx on chart diff --git a/.github/workflows/run_relup_tests.yaml b/.github/workflows/run_relup_tests.yaml index f56121d8a..6a73f846b 100644 --- a/.github/workflows/run_relup_tests.yaml +++ b/.github/workflows/run_relup_tests.yaml @@ -15,12 +15,14 @@ jobs: relup_test: strategy: matrix: - otp: - - "23.2.7.2-emqx-2" - - "24.1.1-emqx-1" + profile: + - emqx + - emqx-ee + otp_vsn: + - 24.1.5-2 runs-on: ubuntu-20.04 - container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" + container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp_vsn }}-ubuntu20.04" defaults: run: @@ -43,7 +45,7 @@ jobs: - uses: actions/checkout@v2 with: repository: emqx/emqtt-bench - ref: 0.3.4 + ref: 0.3.4 path: emqtt-bench - uses: actions/checkout@v2 with: @@ -55,26 +57,18 @@ jobs: repository: ${{ github.repository }} path: emqx fetch-depth: 0 - - name: prepare - run: | - if make -C emqx emqx-ee --dry-run > /dev/null 2>&1; then - echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials - git config --global credential.helper store - echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token - echo "PROFILE=emqx-ee" >> $GITHUB_ENV - else - echo "PROFILE=emqx" >> $GITHUB_ENV - fi - name: get version run: | set -e -x -u cd emqx + export PROFILE=${{ matrix.profile }} + export OTP_VSN=${{ matrix.otp_vsn }} + echo "PROFILE=$PROFILE" >> $GITHUB_ENV + echo "OTP_VSN=$OTP_VSN" >> $GITHUB_ENV if [ $PROFILE = "emqx" ];then broker="emqx-ce" - edition='opensource' else broker="emqx-ee" - edition='enterprise' fi echo "BROKER=$broker" >> $GITHUB_ENV @@ -82,11 +76,7 @@ jobs: echo "VSN=$vsn" >> $GITHUB_ENV pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')" - if [ $PROFILE = "emqx" ]; then - old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")" - else - old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")" - fi + old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")" echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV - name: download emqx run: | @@ -95,7 +85,7 @@ jobs: cd emqx/_upgrade_base old_vsns=($(echo $OLD_VSNS | tr ' ' ' ')) for old_vsn in ${old_vsns[@]}; do - wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip + wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-${old_vsn#[e|v]}-otp${OTP_VSN}-ubuntu20.04-amd64.zip done - name: build emqx run: make -C emqx ${PROFILE}-zip diff --git a/.github/workflows/run_test_cases.yaml b/.github/workflows/run_test_cases.yaml index 5c52ed61e..8d2da0e21 100644 --- a/.github/workflows/run_test_cases.yaml +++ b/.github/workflows/run_test_cases.yaml @@ -15,12 +15,11 @@ jobs: run_static_analysis: strategy: matrix: - otp: - - "23.2.7.2-emqx-2" - - "24.1.1-emqx-1" + emqx_builder: + - 5.0-2:24.1.5-2 # run dialyzer on latest OTP runs-on: ubuntu-20.04 - container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" + container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04" steps: - uses: actions/checkout@v2 @@ -38,12 +37,11 @@ jobs: run_proper_test: strategy: matrix: - otp: - - "23.2.7.2-emqx-2" - - "24.1.1-emqx-1" + emqx_builder: + - 5.0-2:24.1.5-2 runs-on: ubuntu-20.04 - container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" + container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04" steps: - uses: actions/checkout@v2 @@ -67,32 +65,19 @@ jobs: steps: - uses: actions/checkout@v2 - - name: set edition - id: set_edition - run: | - if make emqx-ee --dry-run > /dev/null 2>&1; then - echo "EDITION=enterprise" >> $GITHUB_ENV - else - echo "EDITION=opensource" >> $GITHUB_ENV - fi - name: docker compose up - if: env.EDITION == 'opensource' env: + MYSQL_TAG: 8 + PGSQL_TAG: 13 + REDIS_TAG: 6 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | docker-compose \ + -f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \ -f .ci/docker-compose-file/docker-compose.yaml \ up -d --build - - name: docker compose up - if: env.EDITION == 'enterprise' - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - timeout-minutes: 20 - run: | - docker-compose \ - -f .ci/docker-compose-file/docker-compose.yaml \ - -f .ci/docker-compose-file/docker-compose-enterprise.yaml \ - up -d --build - name: run eunit run: | docker exec -i ${{ matrix.otp_release }} bash -c "make eunit" diff --git a/.gitignore b/.gitignore index b7a358b31..6da25e641 100644 --- a/.gitignore +++ b/.gitignore @@ -58,3 +58,5 @@ erlang_ls.config # Emacs temporary files .#* *# +# For direnv +.envrc diff --git a/.tool-versions b/.tool-versions index 3eb01f497..6d9b11e28 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1 +1 @@ -erlang 24.0.5-emqx-1 +erlang 24.1.5-2 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5e73359e8..118e9a046 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -79,4 +79,4 @@ Just as in the **subject**, use the imperative, present tense: "change" not "cha The footer should contain any information about **Breaking Changes** and is also the place to reference GitHub issues that this commit **Closes**. -**Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines. The rest of the commit message is then used for this. \ No newline at end of file +**Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines. The rest of the commit message is then used for this. diff --git a/Makefile b/Makefile index b699bbe82..bb7024e88 100644 --- a/Makefile +++ b/Makefile @@ -3,16 +3,20 @@ REBAR_VERSION = 3.16.1-emqx-1 REBAR = $(CURDIR)/rebar3 BUILD = $(CURDIR)/build SCRIPTS = $(CURDIR)/scripts +export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-alpine3.14 +export EMQX_DEFAULT_RUNNER = alpine:3.14 +export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh) export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh) -export EMQX_DESC ?= EMQ X export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.18 +export DOCKERFILE := deploy/docker/Dockerfile +export DOCKERFILE_TESTING := deploy/docker/Dockerfile.testing ifeq ($(OS),Windows_NT) export REBAR_COLOR=none endif PROFILE ?= emqx -REL_PROFILES := emqx emqx-edge -PKG_PROFILES := emqx-pkg emqx-edge-pkg +REL_PROFILES := emqx emqx-edge emqx-ee +PKG_PROFILES := emqx-pkg emqx-edge-pkg emqx-ee-pkg PROFILES := $(REL_PROFILES) $(PKG_PROFILES) default CT_NODE_NAME ?= 'test@127.0.0.1' @@ -85,7 +89,6 @@ coveralls: $(REBAR) @ENABLE_COVER_COMPILE=1 $(REBAR) as test coveralls send .PHONY: $(REL_PROFILES) - $(REL_PROFILES:%=%): $(REBAR) get-dashboard conf-segs @$(REBAR) as $(@) do compile,release @@ -98,8 +101,10 @@ $(REL_PROFILES:%=%): $(REBAR) get-dashboard conf-segs clean: $(PROFILES:%=clean-%) $(PROFILES:%=clean-%): @if [ -d _build/$(@:clean-%=%) ]; then \ + rm rebar.lock \ rm -rf _build/$(@:clean-%=%)/rel; \ find _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \ + find _build/$(@:clean-%=%) -type l -delete; \ fi .PHONY: clean-all @@ -109,6 +114,7 @@ clean-all: .PHONY: deps-all deps-all: $(REBAR) $(PROFILES:%=deps-%) + @make clean # ensure clean at the end ## deps- is used in CI scripts to download deps and the ## share downloads between CI steps and/or copied into containers @@ -116,6 +122,7 @@ deps-all: $(REBAR) $(PROFILES:%=deps-%) .PHONY: $(PROFILES:%=deps-%) $(PROFILES:%=deps-%): $(REBAR) get-dashboard @$(REBAR) as $(@:deps-%=%) get-deps + @rm -f rebar.lock .PHONY: xref xref: $(REBAR) @@ -174,5 +181,17 @@ endef ALL_ZIPS = $(REL_PROFILES) $(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target,$(zt)))) +## emqx-docker-testing +## emqx-ee-docker-testing +## is to directly copy a unzipped zip-package to a +## base image such as ubuntu20.04. Mostly for testing +.PHONY: $(REL_PROFILES:%=%-docker-testing) +define gen-docker-target-testing +$1-docker-testing: $(COMMON_DEPS) + @$(BUILD) $1 docker-testing +endef +ALL_ZIPS = $(REL_PROFILES) +$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target-testing,$(zt)))) + conf-segs: @scripts/merge-config.escript diff --git a/apps/emqx/etc/emqx.conf b/apps/emqx/etc/emqx.conf index 786c3d1d6..486900a60 100644 --- a/apps/emqx/etc/emqx.conf +++ b/apps/emqx/etc/emqx.conf @@ -86,8 +86,8 @@ listeners.tcp.default { ## Set to "" to disable the feature. ## ## Variables in mountpoint string: - ## - %c: clientid - ## - %u: username + ## - ${clientid}: clientid + ## - ${username}: username ## ## @doc listeners.tcp..mountpoint ## ValueType: String @@ -185,8 +185,8 @@ listeners.ssl.default { ## Set to "" to disable the feature. ## ## Variables in mountpoint string: - ## - %c: clientid - ## - %u: username + ## - ${clientid}: clientid + ## - ${username}: username ## ## @doc listeners.ssl..mountpoint ## ValueType: String @@ -278,8 +278,8 @@ listeners.quic.default { ## Set to "" to disable the feature. ## ## Variables in mountpoint string: - ## - %c: clientid - ## - %u: username + ## - ${clientid}: clientid + ## - ${username}: username ## ## @doc listeners.quic..mountpoint ## ValueType: String @@ -372,8 +372,8 @@ listeners.ws.default { ## Set to "" to disable the feature. ## ## Variables in mountpoint string: - ## - %c: clientid - ## - %u: username + ## - ${clientid}: clientid + ## - ${username}: username ## ## @doc listeners.ws..mountpoint ## ValueType: String @@ -475,8 +475,8 @@ listeners.wss.default { ## Set to "" to disable the feature. ## ## Variables in mountpoint string: - ## - %c: clientid - ## - %u: username + ## - ${clientid}: clientid + ## - ${username}: username ## ## @doc listeners.wss..mountpoint ## ValueType: String diff --git a/apps/emqx/etc/emqx_cloud/vm.args b/apps/emqx/etc/emqx_cloud/vm.args index 1e6b0b4cb..0ee4b1e15 100644 --- a/apps/emqx/etc/emqx_cloud/vm.args +++ b/apps/emqx/etc/emqx_cloud/vm.args @@ -116,3 +116,7 @@ ## patches dir -pa {{ platform_data_dir }}/patches + +## Mnesia thresholds +-mnesia dump_log_write_threshold 5000 +-mnesia dump_log_time_threshold 60000 diff --git a/apps/emqx/etc/emqx_edge/vm.args b/apps/emqx/etc/emqx_edge/vm.args index ef9749738..70ce81f9f 100644 --- a/apps/emqx/etc/emqx_edge/vm.args +++ b/apps/emqx/etc/emqx_edge/vm.args @@ -114,3 +114,7 @@ ## patches dir -pa {{ platform_data_dir }}/patches + +## Mnesia thresholds +-mnesia dump_log_write_threshold 5000 +-mnesia dump_log_time_threshold 60000 diff --git a/apps/emqx/include/emqx.hrl b/apps/emqx/include/emqx.hrl index cf419edc5..f7d3418ca 100644 --- a/apps/emqx/include/emqx.hrl +++ b/apps/emqx/include/emqx.hrl @@ -48,6 +48,12 @@ %% Queue topic -define(QUEUE, <<"$queue/">>). +%%-------------------------------------------------------------------- +%% alarms +%%-------------------------------------------------------------------- +-define(ACTIVATED_ALARM, emqx_activated_alarm). +-define(DEACTIVATED_ALARM, emqx_deactivated_alarm). + %%-------------------------------------------------------------------- %% Message and Delivery %%-------------------------------------------------------------------- diff --git a/apps/emqx/include/emqx_placeholder.hrl b/apps/emqx/include/emqx_placeholder.hrl new file mode 100644 index 000000000..0b9ffca5f --- /dev/null +++ b/apps/emqx/include/emqx_placeholder.hrl @@ -0,0 +1,115 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2017-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-ifndef(EMQ_X_PLACEHOLDER_HRL). +-define(EMQ_X_PLACEHOLDER_HRL, true). + +-define(PH(Type), <<"${", Type/binary, "}">> ). + +%% action: publish/subscribe/all +-define(PH_ACTION, <<"${action}">> ). + +%% cert +-define(PH_CERT_SUBJECT, <<"${cert_subject}">> ). +-define(PH_CERT_CN_NAME, <<"${cert_common_name}">> ). + +%% MQTT +-define(PH_PASSWORD, <<"${password}">> ). +-define(PH_CLIENTID, <<"${clientid}">> ). +-define(PH_FROM_CLIENTID, <<"${from_clientid}">> ). +-define(PH_USERNAME, <<"${username}">> ). +-define(PH_FROM_USERNAME, <<"${from_username}">> ). +-define(PH_TOPIC, <<"${topic}">> ). +%% MQTT payload +-define(PH_PAYLOAD, <<"${payload}">> ). +%% client IPAddress +-define(PH_PEERHOST, <<"${peerhost}">> ). +%% ip & port +-define(PH_HOST, <<"${host}">> ). +-define(PH_PORT, <<"${port}">> ). +%% Enumeration of message QoS 0,1,2 +-define(PH_QOS, <<"${qos}">> ). +-define(PH_FLAGS, <<"${flags}">> ). +%% Additional data related to process within the MQTT message +-define(PH_HEADERS, <<"${headers}">> ). +%% protocol name +-define(PH_PROTONAME, <<"${proto_name}">> ). +%% protocol version +-define(PH_PROTOVER, <<"${proto_ver}">> ). +%% MQTT keepalive interval +-define(PH_KEEPALIVE, <<"${keepalive}">> ). +%% MQTT clean_start +-define(PH_CLEAR_START, <<"${clean_start}">> ). +%% MQTT Session Expiration time +-define(PH_EXPIRY_INTERVAL, <<"${expiry_interval}">> ). + +%% Time when PUBLISH message reaches Broker (ms) +-define(PH_PUBLISH_RECEIVED_AT, <<"${publish_received_at}">>). +%% Mountpoint for bridging messages +-define(PH_MOUNTPOINT, <<"${mountpoint}">> ). +%% IPAddress and Port of terminal +-define(PH_PEERNAME, <<"${peername}">> ). +%% IPAddress and Port listened by emqx +-define(PH_SOCKNAME, <<"${sockname}">> ). +%% whether it is MQTT bridge connection +-define(PH_IS_BRIDGE, <<"${is_bridge}">> ). +%% Terminal connection completion time (s) +-define(PH_CONNECTED_AT, <<"${connected_at}">> ). +%% Event trigger time(millisecond) +-define(PH_TIMESTAMP, <<"${timestamp}">> ). +%% Terminal disconnection completion time (s) +-define(PH_DISCONNECTED_AT, <<"${disconnected_at}">> ). + +-define(PH_NODE, <<"${node}">> ). +-define(PH_REASON, <<"${reason}">> ). + +-define(PH_ENDPOINT_NAME, <<"${endpoint_name}">> ). + +%% sync change these place holder with binary def. +-define(PH_S_ACTION, "${action}" ). +-define(PH_S_CERT_SUBJECT, "${cert_subject}" ). +-define(PH_S_CERT_CN_NAME, "${cert_common_name}" ). +-define(PH_S_PASSWORD, "${password}" ). +-define(PH_S_CLIENTID, "${clientid}" ). +-define(PH_S_FROM_CLIENTID, "${from_clientid}" ). +-define(PH_S_USERNAME, "${username}" ). +-define(PH_S_FROM_USERNAME, "${from_username}" ). +-define(PH_S_TOPIC, "${topic}" ). +-define(PH_S_PAYLOAD, "${payload}" ). +-define(PH_S_PEERHOST, "${peerhost}" ). +-define(PH_S_HOST, "${host}" ). +-define(PH_S_PORT, "${port}" ). +-define(PH_S_QOS, "${qos}" ). +-define(PH_S_FLAGS, "${flags}" ). +-define(PH_S_HEADERS, "${headers}" ). +-define(PH_S_PROTONAME, "${proto_name}" ). +-define(PH_S_PROTOVER, "${proto_ver}" ). +-define(PH_S_KEEPALIVE, "${keepalive}" ). +-define(PH_S_CLEAR_START, "${clean_start}" ). +-define(PH_S_EXPIRY_INTERVAL, "${expiry_interval}" ). +-define(PH_S_PUBLISH_RECEIVED_AT, "${publish_received_at}" ). +-define(PH_S_MOUNTPOINT, "${mountpoint}" ). +-define(PH_S_PEERNAME, "${peername}" ). +-define(PH_S_SOCKNAME, "${sockname}" ). +-define(PH_S_IS_BRIDGE, "${is_bridge}" ). +-define(PH_S_CONNECTED_AT, "${connected_at}" ). +-define(PH_S_TIMESTAMP, "${timestamp}" ). +-define(PH_S_DISCONNECTED_AT, "${disconnected_at}" ). +-define(PH_S_NODE, "${node}" ). +-define(PH_S_REASON, "${reason}" ). +-define(PH_S_ENDPOINT_NAME, "${endpoint_name}" ). + +-endif. diff --git a/apps/emqx/include/emqx_release.hrl b/apps/emqx/include/emqx_release.hrl index 5b4bc7d00..1424eb8a5 100644 --- a/apps/emqx/include/emqx_release.hrl +++ b/apps/emqx/include/emqx_release.hrl @@ -14,9 +14,6 @@ %% limitations under the License. %%-------------------------------------------------------------------- --ifndef(EMQX_RELEASE_HRL). --define(EMQX_RELEASE_HRL, true). - %% NOTE: this is the release version which is not always the same %% as the emqx app version defined in emqx.app.src %% App (plugin) versions are bumped independently. @@ -27,13 +24,4 @@ %% NOTE: This version number should be manually bumped for each release --ifndef(EMQX_ENTERPRISE). - --define(EMQX_RELEASE, {opensource, "5.0-beta.1"}). - --else. - - --endif. - --endif. +-define(EMQX_RELEASE, "5.0-beta.2"). diff --git a/apps/emqx/rebar.config b/apps/emqx/rebar.config index 61d5717d9..59c5cf045 100644 --- a/apps/emqx/rebar.config +++ b/apps/emqx/rebar.config @@ -17,10 +17,10 @@ , {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.0"}}} , {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.11.1"}}} , {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}} - , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.20.5"}}} + , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.20.6"}}} , {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}} , {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}} - , {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.14.1"}}} + , {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.15.0"}}} ]}. {plugins, [{rebar3_proper, "0.12.1"}]}. diff --git a/apps/emqx/src/emqx.erl b/apps/emqx/src/emqx.erl index 1df7f0149..b2c73c9bb 100644 --- a/apps/emqx/src/emqx.erl +++ b/apps/emqx/src/emqx.erl @@ -20,6 +20,7 @@ -include("logger.hrl"). -include("types.hrl"). +-elvis([{elvis_style, god_modules, disable}]). %% Start/Stop the application -export([ start/0 @@ -51,10 +52,6 @@ , run_fold_hook/3 ]). -%% Troubleshooting --export([ set_debug_secret/1 - ]). - %% Configs APIs -export([ get_config/1 , get_config/2 @@ -71,29 +68,6 @@ -define(APP, ?MODULE). -%% @hidden Path to the file which has debug_info encryption secret in it. -%% Evaluate this function if there is a need to access encrypted debug_info. -%% NOTE: Do not change the API to accept the secret text because it may -%% get logged everywhere. -set_debug_secret(PathToSecretFile) -> - SecretText = - case file:read_file(PathToSecretFile) of - {ok, Secret} -> - try string:trim(binary_to_list(Secret)) - catch _ : _ -> error({badfile, PathToSecretFile}) - end; - {error, Reason} -> - ?ULOG("Failed to read debug_info encryption key file ~ts: ~p~n", - [PathToSecretFile, Reason]), - error(Reason) - end, - F = fun(init) -> ok; - (clear) -> ok; - ({debug_info, _Mode, _Module, _Filename}) -> SecretText - end, - _ = beam_lib:clear_crypto_key_fun(), - ok = beam_lib:crypto_key_fun(F). - %%-------------------------------------------------------------------- %% Bootstrap, is_running... %%-------------------------------------------------------------------- diff --git a/apps/emqx/src/emqx_alarm.erl b/apps/emqx/src/emqx_alarm.erl index 2585494eb..d1c4dd748 100644 --- a/apps/emqx/src/emqx_alarm.erl +++ b/apps/emqx/src/emqx_alarm.erl @@ -17,7 +17,6 @@ -module(emqx_alarm). -behaviour(gen_server). --behaviour(emqx_config_handler). -include("emqx.hrl"). -include("logger.hrl"). @@ -27,22 +26,19 @@ -boot_mnesia({mnesia, [boot]}). --export([post_config_update/4]). - --export([ start_link/0 - , stop/0 +-export([start_link/0 ]). - --export([format/1]). - %% API -export([ activate/1 , activate/2 + , activate/3 , deactivate/1 , deactivate/2 + , deactivate/3 , delete_all_deactivated_alarms/0 , get_alarms/0 , get_alarms/1 + , format/1 ]). %% gen_server callbacks @@ -56,34 +52,19 @@ -record(activated_alarm, { name :: binary() | atom(), - details :: map() | list(), - message :: binary(), - activate_at :: integer() }). -record(deactivated_alarm, { activate_at :: integer(), - name :: binary() | atom(), - details :: map() | list(), - message :: binary(), - deactivate_at :: integer() | infinity }). --record(state, { - timer :: reference() - }). - --define(ACTIVATED_ALARM, emqx_activated_alarm). - --define(DEACTIVATED_ALARM, emqx_deactivated_alarm). - -ifdef(TEST). -compile(export_all). -compile(nowarn_export_all). @@ -114,20 +95,23 @@ mnesia(boot) -> start_link() -> gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). -stop() -> - gen_server:stop(?MODULE). - activate(Name) -> activate(Name, #{}). activate(Name, Details) -> - gen_server:call(?MODULE, {activate_alarm, Name, Details}). + activate(Name, Details, <<"">>). + +activate(Name, Details, Message) -> + gen_server:call(?MODULE, {activate_alarm, Name, Details, Message}). deactivate(Name) -> - gen_server:call(?MODULE, {deactivate_alarm, Name, no_details}). + deactivate(Name, no_details, <<"">>). deactivate(Name, Details) -> - gen_server:call(?MODULE, {deactivate_alarm, Name, Details}). + deactivate(Name, Details, <<"">>). + +deactivate(Name, Details, Message) -> + gen_server:call(?MODULE, {deactivate_alarm, Name, Details, Message}). delete_all_deactivated_alarms() -> gen_server:call(?MODULE, delete_all_deactivated_alarms). @@ -144,12 +128,10 @@ get_alarms(activated) -> get_alarms(deactivated) -> gen_server:call(?MODULE, {get_alarms, deactivated}). -post_config_update(_, #{validity_period := Period0}, _OldConf, _AppEnv) -> - ?MODULE ! {update_timer, Period0}, - ok. - format(#activated_alarm{name = Name, message = Message, activate_at = At, details = Details}) -> Now = erlang:system_time(microsecond), + %% mnesia db stored microsecond for high frequency alarm + %% format for dashboard using millisecond #{ node => node(), name => Name, @@ -159,20 +141,19 @@ format(#activated_alarm{name = Name, message = Message, activate_at = At, detail details => Details }; format(#deactivated_alarm{name = Name, message = Message, activate_at = At, details = Details, - deactivate_at = DAt}) -> + deactivate_at = DAt}) -> #{ node => node(), name => Name, message => Message, - duration => DAt - At, + duration => (DAt - At) div 1000, %% to millisecond activate_at => to_rfc3339(At), deactivate_at => to_rfc3339(DAt), details => Details - }; -format(_) -> - {error, unknow_alarm}. + }. to_rfc3339(Timestamp) -> + %% rfc3339 accuracy to millisecond list_to_binary(calendar:system_time_to_rfc3339(Timestamp div 1000, [{unit, millisecond}])). %%-------------------------------------------------------------------- @@ -180,85 +161,72 @@ to_rfc3339(Timestamp) -> %%-------------------------------------------------------------------- init([]) -> - _ = mria:wait_for_tables([?ACTIVATED_ALARM, ?DEACTIVATED_ALARM]), + ok = mria:wait_for_tables([?ACTIVATED_ALARM, ?DEACTIVATED_ALARM]), deactivate_all_alarms(), - ok = emqx_config_handler:add_handler([alarm], ?MODULE), - {ok, #state{timer = ensure_timer(undefined, get_validity_period())}}. + {ok, #{}, get_validity_period()}. -%% suppress dialyzer warning due to dirty read/write race condition. -%% TODO: change from dirty_read/write to transactional. -%% TODO: handle mnesia write errors. --dialyzer([{nowarn_function, [handle_call/3]}]). -handle_call({activate_alarm, Name, Details}, _From, State) -> - case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of - [#activated_alarm{name = Name}] -> - {reply, {error, already_existed}, State}; - [] -> - Alarm = #activated_alarm{name = Name, - details = Details, - message = normalize_message(Name, Details), - activate_at = erlang:system_time(microsecond)}, - mria:dirty_write(?ACTIVATED_ALARM, Alarm), +handle_call({activate_alarm, Name, Details, Message}, _From, State) -> + Res = mria:transaction(mria:local_content_shard(), + fun create_activate_alarm/3, + [Name, Details, Message]), + case Res of + {atomic, Alarm} -> do_actions(activate, Alarm, emqx:get_config([alarm, actions])), - {reply, ok, State} + {reply, ok, State, get_validity_period()}; + {aborted, Reason} -> + {reply, Reason, State, get_validity_period()} end; -handle_call({deactivate_alarm, Name, Details}, _From, State) -> +handle_call({deactivate_alarm, Name, Details, Message}, _From, State) -> case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of [] -> {reply, {error, not_found}, State}; [Alarm] -> - deactivate_alarm(Details, Alarm), - {reply, ok, State} + deactivate_alarm(Alarm, Details, Message), + {reply, ok, State, get_validity_period()} end; handle_call(delete_all_deactivated_alarms, _From, State) -> clear_table(?DEACTIVATED_ALARM), - {reply, ok, State}; + {reply, ok, State, get_validity_period()}; handle_call({get_alarms, all}, _From, State) -> {atomic, Alarms} = mria:ro_transaction( - ?COMMON_SHARD, + mria:local_content_shard(), fun() -> [normalize(Alarm) || Alarm <- ets:tab2list(?ACTIVATED_ALARM) ++ ets:tab2list(?DEACTIVATED_ALARM)] end), - {reply, Alarms, State}; + {reply, Alarms, State, get_validity_period()}; handle_call({get_alarms, activated}, _From, State) -> Alarms = [normalize(Alarm) || Alarm <- ets:tab2list(?ACTIVATED_ALARM)], - {reply, Alarms, State}; + {reply, Alarms, State, get_validity_period()}; handle_call({get_alarms, deactivated}, _From, State) -> Alarms = [normalize(Alarm) || Alarm <- ets:tab2list(?DEACTIVATED_ALARM)], - {reply, Alarms, State}; + {reply, Alarms, State, get_validity_period()}; -handle_call(Req, _From, State) -> - ?SLOG(error, #{msg => "unexpected_call", call => Req}), - {reply, ignored, State}. +handle_call(Req, From, State) -> + ?SLOG(error, #{msg => "unexpected_call", call_req => Req, from => From}), + {reply, ignored, State, get_validity_period()}. handle_cast(Msg, State) -> - ?SLOG(error, #{msg => "unexpected_cast", cast => Msg}), - {noreply, State}. + ?SLOG(error, #{msg => "unexpected_cast", cast_req => Msg}), + {noreply, State, get_validity_period()}. -handle_info({timeout, _TRef, delete_expired_deactivated_alarm}, - #state{timer = TRef} = State) -> +handle_info(timeout, State) -> Period = get_validity_period(), delete_expired_deactivated_alarms(erlang:system_time(microsecond) - Period * 1000), - {noreply, State#state{timer = ensure_timer(TRef, Period)}}; - -handle_info({update_timer, Period}, #state{timer = TRef} = State) -> - ?SLOG(warning, #{msg => "validity_timer_updated", period => Period}), - {noreply, State#state{timer = ensure_timer(TRef, Period)}}; + {noreply, State, Period}; handle_info(Info, State) -> - ?SLOG(error, #{msg => "unexpected_info", info => Info}), - {noreply, State}. + ?SLOG(error, #{msg => "unexpected_info", info_req => Info}), + {noreply, State, get_validity_period()}. terminate(_Reason, _State) -> - ok = emqx_config_handler:remove_handler([alarm]), ok. code_change(_OldVsn, State, _Extra) -> @@ -271,8 +239,21 @@ code_change(_OldVsn, State, _Extra) -> get_validity_period() -> emqx:get_config([alarm, validity_period]). -deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name, - details = Details0, message = Msg0}) -> +create_activate_alarm(Name, Details, Message) -> + case mnesia:read(?ACTIVATED_ALARM, Name) of + [#activated_alarm{name = Name}] -> + mnesia:abort({error, already_existed}); + [] -> + Alarm = #activated_alarm{name = Name, + details = Details, + message = normalize_message(Name, iolist_to_binary(Message)), + activate_at = erlang:system_time(microsecond)}, + ok = mnesia:write(?ACTIVATED_ALARM, Alarm, write), + Alarm + end. + +deactivate_alarm(#activated_alarm{activate_at = ActivateAt, name = Name, + details = Details0, message = Msg0}, Details, Message) -> SizeLimit = emqx:get_config([alarm, size_limit]), case SizeLimit > 0 andalso (mnesia:table_info(?DEACTIVATED_ALARM, size) >= SizeLimit) of true -> @@ -286,7 +267,7 @@ deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name HistoryAlarm = make_deactivated_alarm(ActivateAt, Name, Details0, Msg0, erlang:system_time(microsecond)), DeActAlarm = make_deactivated_alarm(ActivateAt, Name, Details, - normalize_message(Name, Details), + normalize_message(Name, iolist_to_binary(Message)), erlang:system_time(microsecond)), mria:dirty_write(?DEACTIVATED_ALARM, HistoryAlarm), mria:dirty_delete(?ACTIVATED_ALARM, Name), @@ -329,13 +310,6 @@ clear_table(TableName) -> ok end. -ensure_timer(OldTRef, Period) -> - _ = case is_reference(OldTRef) of - true -> erlang:cancel_timer(OldTRef); - false -> ok - end, - emqx_misc:start_timer(Period, delete_expired_deactivated_alarm). - delete_expired_deactivated_alarms(Checkpoint) -> delete_expired_deactivated_alarms(mnesia:dirty_first(?DEACTIVATED_ALARM), Checkpoint). @@ -368,16 +342,12 @@ do_actions(deactivate, Alarm = #deactivated_alarm{name = Name}, [log | More]) -> do_actions(deactivate, Alarm, More); do_actions(Operation, Alarm, [publish | More]) -> Topic = topic(Operation), - {ok, Payload} = encode_to_json(Alarm), + {ok, Payload} = emqx_json:safe_encode(normalize(Alarm)), Message = emqx_message:make(?MODULE, 0, Topic, Payload, #{sys => true}, #{properties => #{'Content-Type' => <<"application/json">>}}), - %% TODO log failed publishes _ = emqx_broker:safe_publish(Message), do_actions(Operation, Alarm, More). -encode_to_json(Alarm) -> - emqx_json:safe_encode(normalize(Alarm)). - topic(activate) -> emqx_topic:systop(<<"alarms/activate">>); topic(deactivate) -> @@ -405,25 +375,6 @@ normalize(#deactivated_alarm{activate_at = ActivateAt, deactivate_at => DeactivateAt, activated => false}. -normalize_message(Name, no_details) -> +normalize_message(Name, <<"">>) -> list_to_binary(io_lib:format("~p", [Name])); -normalize_message(runq_overload, #{node := Node, runq_length := Len}) -> - list_to_binary(io_lib:format("VM is overloaded on node: ~p: ~p", [Node, Len])); -normalize_message(high_system_memory_usage, #{high_watermark := HighWatermark}) -> - list_to_binary(io_lib:format("System memory usage is higher than ~p%", [HighWatermark])); -normalize_message(high_process_memory_usage, #{high_watermark := HighWatermark}) -> - list_to_binary(io_lib:format("Process memory usage is higher than ~p%", [HighWatermark])); -normalize_message(high_cpu_usage, #{usage := Usage}) -> - list_to_binary(io_lib:format("~ts cpu usage", [Usage])); -normalize_message(too_many_processes, #{usage := Usage}) -> - list_to_binary(io_lib:format("~ts process usage", [Usage])); -normalize_message(cluster_rpc_apply_failed, #{tnx_id := TnxId}) -> - list_to_binary(io_lib:format("cluster_rpc_apply_failed:~w", [TnxId])); -normalize_message(partition, #{occurred := Node}) -> - list_to_binary(io_lib:format("Partition occurs at node ~ts", [Node])); -normalize_message(<<"resource", _/binary>>, #{type := Type, id := ID}) -> - list_to_binary(io_lib:format("Resource ~ts(~ts) is down", [Type, ID])); -normalize_message(<<"conn_congestion/", Info/binary>>, _) -> - list_to_binary(io_lib:format("connection congested: ~ts", [Info])); -normalize_message(_Name, _UnknownDetails) -> - <<"Unknown alarm">>. +normalize_message(_Name, Message) -> Message. diff --git a/apps/emqx/src/emqx_alarm_handler.erl b/apps/emqx/src/emqx_alarm_handler.erl index f02f59721..057fef095 100644 --- a/apps/emqx/src/emqx_alarm_handler.erl +++ b/apps/emqx/src/emqx_alarm_handler.erl @@ -56,14 +56,18 @@ init(_) -> {ok, []}. handle_event({set_alarm, {system_memory_high_watermark, []}}, State) -> + HighWatermark = emqx_os_mon:get_sysmem_high_watermark(), + Message = to_bin("System memory usage is higher than ~p%", [HighWatermark]), emqx_alarm:activate(high_system_memory_usage, - #{high_watermark => emqx_os_mon:get_sysmem_high_watermark()}), + #{high_watermark => HighWatermark}, Message), {ok, State}; handle_event({set_alarm, {process_memory_high_watermark, Pid}}, State) -> + HighWatermark = emqx_os_mon:get_procmem_high_watermark(), + Message = to_bin("Process memory usage is higher than ~p%", [HighWatermark]), emqx_alarm:activate(high_process_memory_usage, #{pid => list_to_binary(pid_to_list(Pid)), - high_watermark => emqx_os_mon:get_procmem_high_watermark()}), + high_watermark => HighWatermark}, Message), {ok, State}; handle_event({clear_alarm, system_memory_high_watermark}, State) -> @@ -75,7 +79,9 @@ handle_event({clear_alarm, process_memory_high_watermark}, State) -> {ok, State}; handle_event({set_alarm, {?LC_ALARM_ID_RUNQ, Info}}, State) -> - emqx_alarm:activate(runq_overload, Info), + #{node := Node, runq_length := Len} = Info, + Message = to_bin("VM is overloaded on node: ~p: ~p", [Node, Len]), + emqx_alarm:activate(runq_overload, Info, Message), {ok, State}; handle_event({clear_alarm, ?LC_ALARM_ID_RUNQ}, State) -> @@ -95,3 +101,6 @@ terminate(swap, _State) -> {emqx_alarm_handler, []}; terminate(_, _) -> ok. + +to_bin(Format, Args) -> + io_lib:format(Format, Args). diff --git a/apps/emqx/src/emqx_app.erl b/apps/emqx/src/emqx_app.erl index d1090803d..4e130ef36 100644 --- a/apps/emqx/src/emqx_app.erl +++ b/apps/emqx/src/emqx_app.erl @@ -30,7 +30,6 @@ ]). -include("emqx.hrl"). --include("emqx_release.hrl"). -include("logger.hrl"). -define(APP, emqx). @@ -40,6 +39,7 @@ %%-------------------------------------------------------------------- start(_Type, _Args) -> + ok = emqx_release:put_edition(), ok = maybe_load_config(), ok = emqx_persistent_session:init_db_backend(), ok = maybe_start_quicer(), @@ -107,30 +107,7 @@ is_quicer_app_present() -> is_quic_listener_configured() -> emqx_listeners:has_enabled_listener_conf_by_type(quic). -get_description() -> - {ok, Descr0} = application:get_key(?APP, description), - case os:getenv("EMQX_DESCRIPTION") of - false -> Descr0; - "" -> Descr0; - Str -> string:strip(Str, both, $\n) - end. +get_description() -> emqx_release:description(). get_release() -> - case lists:keyfind(emqx_vsn, 1, ?MODULE:module_info(compile)) of - false -> %% For TEST build or depedency build. - release_in_macro(); - {_, Vsn} -> %% For emqx release build - VsnStr = release_in_macro(), - case string:str(Vsn, VsnStr) of - 1 -> ok; - _ -> - erlang:error(#{ reason => version_mismatch - , source => VsnStr - , built_for => Vsn - }) - end, - Vsn - end. - -release_in_macro() -> - element(2, ?EMQX_RELEASE). + emqx_release:version(). diff --git a/apps/emqx/src/emqx_authentication.erl b/apps/emqx/src/emqx_authentication.erl index 226d697d8..77a5e2cee 100644 --- a/apps/emqx/src/emqx_authentication.erl +++ b/apps/emqx/src/emqx_authentication.erl @@ -25,6 +25,8 @@ -include("emqx.hrl"). -include("logger.hrl"). +-include_lib("stdlib/include/ms_transform.hrl"). + %% The authentication entrypoint. -export([ authenticate/2 ]). @@ -45,6 +47,7 @@ , delete_chain/1 , lookup_chain/1 , list_chains/0 + , list_chain_names/0 , create_authenticator/2 , delete_authenticator/2 , update_authenticator/3 @@ -76,8 +79,8 @@ ]). %% proxy callback --export([ pre_config_update/2 - , post_config_update/4 +-export([ pre_config_update/3 + , post_config_update/5 ]). -export_type([ authenticator_id/0 @@ -92,9 +95,6 @@ -define(CHAINS_TAB, emqx_authn_chains). --define(VER_1, <<"1">>). --define(VER_2, <<"2">>). - -type chain_name() :: atom(). -type authenticator_id() :: binary(). -type position() :: top | bottom | {before, authenticator_id()}. @@ -120,10 +120,10 @@ %% parse and validate it, and reutrn parsed result. -callback check_config(config()) -> config(). --callback create(Config) +-callback create(AuthenticatorID, Config) -> {ok, State} | {error, term()} - when Config::config(), State::state(). + when AuthenticatorID::authenticator_id(), Config::config(), State::state(). -callback update(Config, State) -> {ok, NewState} @@ -192,29 +192,6 @@ authenticate(#{listener := Listener, protocol := Protocol} = Credential, _AuthRe NAuthenticators -> do_authenticate(NAuthenticators, Credential) end. -do_authenticate([], _) -> - {stop, {error, not_authorized}}; -do_authenticate([#authenticator{id = ID, provider = Provider, state = State} | More], Credential) -> - try Provider:authenticate(Credential, State) of - ignore -> - do_authenticate(More, Credential); - Result -> - %% {ok, Extra} - %% {ok, Extra, AuthData} - %% {continue, AuthCache} - %% {continue, AuthData, AuthCache} - %% {error, Reason} - {stop, Result} - catch - Class:Reason:Stacktrace -> - ?SLOG(warning, #{msg => "unexpected_error_in_authentication", - exception => Class, - reason => Reason, - stacktrace => Stacktrace, - authenticator => ID}), - do_authenticate(More, Credential) - end. - get_authenticators(Listener, Global) -> case ets:lookup(?CHAINS_TAB, Listener) of [#chain{authenticators = Authenticators}] -> @@ -235,11 +212,11 @@ get_enabled(Authenticators) -> %% APIs %%------------------------------------------------------------------------------ -pre_config_update(UpdateReq, OldConfig) -> - emqx_authentication_config:pre_config_update(UpdateReq, OldConfig). +pre_config_update(Path, UpdateReq, OldConfig) -> + emqx_authentication_config:pre_config_update(Path, UpdateReq, OldConfig). -post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs) -> - emqx_authentication_config:post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs). +post_config_update(Path, UpdateReq, NewConfig, OldConfig, AppEnvs) -> + emqx_authentication_config:post_config_update(Path, UpdateReq, NewConfig, OldConfig, AppEnvs). %% @doc Get all registered authentication providers. get_providers() -> @@ -274,6 +251,9 @@ initialize_authentication(ChainName, AuthenticatorsConfig) -> -spec start_link() -> {ok, pid()} | ignore | {error, term()}. start_link() -> + %% Create chains ETS table here so that it belongs to the supervisor + %% and survives `emqx_authentication` crashes. + ok = create_chain_table(), gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). -spec stop() -> ok. @@ -312,13 +292,24 @@ delete_chain(Name) -> -spec lookup_chain(chain_name()) -> {ok, chain()} | {error, term()}. lookup_chain(Name) -> - call({lookup_chain, Name}). + case ets:lookup(?CHAINS_TAB, Name) of + [] -> + {error, {not_found, {chain, Name}}}; + [Chain] -> + {ok, serialize_chain(Chain)} + end. -spec list_chains() -> {ok, [chain()]}. list_chains() -> Chains = ets:tab2list(?CHAINS_TAB), {ok, [serialize_chain(Chain) || Chain <- Chains]}. +-spec list_chain_names() -> {ok, [atom()]}. +list_chain_names() -> + Select = ets:fun2ms(fun(#chain{name = Name}) -> Name end), + ChainNames = ets:select(?CHAINS_TAB, Select), + {ok, ChainNames}. + -spec create_authenticator(chain_name(), config()) -> {ok, authenticator()} | {error, term()}. create_authenticator(ChainName, Config) -> call({create_authenticator, ChainName, Config}). @@ -327,11 +318,13 @@ create_authenticator(ChainName, Config) -> delete_authenticator(ChainName, AuthenticatorID) -> call({delete_authenticator, ChainName, AuthenticatorID}). --spec update_authenticator(chain_name(), authenticator_id(), config()) -> {ok, authenticator()} | {error, term()}. +-spec update_authenticator(chain_name(), authenticator_id(), config()) -> + {ok, authenticator()} | {error, term()}. update_authenticator(ChainName, AuthenticatorID, Config) -> call({update_authenticator, ChainName, AuthenticatorID, Config}). --spec lookup_authenticator(chain_name(), authenticator_id()) -> {ok, authenticator()} | {error, term()}. +-spec lookup_authenticator(chain_name(), authenticator_id()) -> + {ok, authenticator()} | {error, term()}. lookup_authenticator(ChainName, AuthenticatorID) -> case ets:lookup(?CHAINS_TAB, ChainName) of [] -> @@ -362,7 +355,8 @@ move_authenticator(ChainName, AuthenticatorID, Position) -> import_users(ChainName, AuthenticatorID, Filename) -> call({import_users, ChainName, AuthenticatorID, Filename}). --spec add_user(chain_name(), authenticator_id(), user_info()) -> {ok, user_info()} | {error, term()}. +-spec add_user(chain_name(), authenticator_id(), user_info()) -> + {ok, user_info()} | {error, term()}. add_user(ChainName, AuthenticatorID, UserInfo) -> call({add_user, ChainName, AuthenticatorID, UserInfo}). @@ -370,11 +364,13 @@ add_user(ChainName, AuthenticatorID, UserInfo) -> delete_user(ChainName, AuthenticatorID, UserID) -> call({delete_user, ChainName, AuthenticatorID, UserID}). --spec update_user(chain_name(), authenticator_id(), binary(), map()) -> {ok, user_info()} | {error, term()}. +-spec update_user(chain_name(), authenticator_id(), binary(), map()) -> + {ok, user_info()} | {error, term()}. update_user(ChainName, AuthenticatorID, UserID, NewUserInfo) -> call({update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}). --spec lookup_user(chain_name(), authenticator_id(), binary()) -> {ok, user_info()} | {error, term()}. +-spec lookup_user(chain_name(), authenticator_id(), binary()) -> + {ok, user_info()} | {error, term()}. lookup_user(ChainName, AuthenticatorID, UserID) -> call({lookup_user, ChainName, AuthenticatorID, UserID}). @@ -387,9 +383,6 @@ list_users(ChainName, AuthenticatorID, Params) -> %%-------------------------------------------------------------------- init(_Opts) -> - _ = ets:new(?CHAINS_TAB, [ named_table, set, public - , {keypos, #chain.name} - , {read_concurrency, true}]), ok = emqx_config_handler:add_handler([authentication], ?MODULE), ok = emqx_config_handler:add_handler([listeners, '?', '?', authentication], ?MODULE), {ok, #{hooked => false, providers => #{}}}. @@ -427,95 +420,36 @@ handle_call({delete_chain, Name}, _From, State) -> [] -> reply({error, {not_found, {chain, Name}}}, State); [#chain{authenticators = Authenticators}] -> - _ = [do_delete_authenticator(Authenticator) || Authenticator <- Authenticators], + _ = [do_destroy_authenticator(Authenticator) || Authenticator <- Authenticators], true = ets:delete(?CHAINS_TAB, Name), reply(ok, maybe_unhook(State)) end; -handle_call({lookup_chain, Name}, _From, State) -> - case ets:lookup(?CHAINS_TAB, Name) of - [] -> - reply({error, {not_found, {chain, Name}}}, State); - [Chain] -> - reply({ok, serialize_chain(Chain)}, State) - end; - handle_call({create_authenticator, ChainName, Config}, _From, #{providers := Providers} = State) -> - UpdateFun = - fun(#chain{authenticators = Authenticators} = Chain) -> - AuthenticatorID = authenticator_id(Config), - case lists:keymember(AuthenticatorID, #authenticator.id, Authenticators) of - true -> - {error, {already_exists, {authenticator, AuthenticatorID}}}; - false -> - case do_create_authenticator(ChainName, AuthenticatorID, Config, Providers) of - {ok, Authenticator} -> - NAuthenticators = Authenticators ++ [Authenticator#authenticator{enable = maps:get(enable, Config)}], - true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}), - {ok, serialize_authenticator(Authenticator)}; - {error, Reason} -> - {error, Reason} - end - end - end, + UpdateFun = fun(Chain) -> + handle_create_authenticator(Chain, Config, Providers) + end, Reply = update_chain(ChainName, UpdateFun), reply(Reply, maybe_hook(State)); handle_call({delete_authenticator, ChainName, AuthenticatorID}, _From, State) -> - UpdateFun = - fun(#chain{authenticators = Authenticators} = Chain) -> - case lists:keytake(AuthenticatorID, #authenticator.id, Authenticators) of - false -> - {error, {not_found, {authenticator, AuthenticatorID}}}; - {value, Authenticator, NAuthenticators} -> - _ = do_delete_authenticator(Authenticator), - true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}), - ok - end - end, + UpdateFun = fun(Chain) -> + handle_delete_authenticator(Chain, AuthenticatorID) + end, Reply = update_chain(ChainName, UpdateFun), reply(Reply, maybe_unhook(State)); handle_call({update_authenticator, ChainName, AuthenticatorID, Config}, _From, State) -> - UpdateFun = - fun(#chain{authenticators = Authenticators} = Chain) -> - case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of - false -> - {error, {not_found, {authenticator, AuthenticatorID}}}; - #authenticator{provider = Provider, - state = #{version := Version} = ST} = Authenticator -> - case AuthenticatorID =:= authenticator_id(Config) of - true -> - Unique = unique(ChainName, AuthenticatorID, Version), - case Provider:update(Config#{'_unique' => Unique}, ST) of - {ok, NewST} -> - NewAuthenticator = Authenticator#authenticator{state = switch_version(NewST), - enable = maps:get(enable, Config)}, - NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators), - true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NewAuthenticators}), - {ok, serialize_authenticator(NewAuthenticator)}; - {error, Reason} -> - {error, Reason} - end; - false -> - {error, change_of_authentication_type_is_not_allowed} - end - end - end, + UpdateFun = fun(Chain) -> + handle_update_authenticator(Chain, AuthenticatorID, Config) + end, Reply = update_chain(ChainName, UpdateFun), reply(Reply, State); handle_call({move_authenticator, ChainName, AuthenticatorID, Position}, _From, State) -> - UpdateFun = - fun(#chain{authenticators = Authenticators} = Chain) -> - case do_move_authenticator(AuthenticatorID, Authenticators, Position) of - {ok, NAuthenticators} -> - true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}), - ok; - {error, Reason} -> - {error, Reason} - end - end, + UpdateFun = fun(Chain) -> + handle_move_authenticator(Chain, AuthenticatorID, Position) + end, Reply = update_chain(ChainName, UpdateFun), reply(Reply, State); @@ -569,9 +503,115 @@ terminate(Reason, _State) -> code_change(_OldVsn, State, _Extra) -> {ok, State}. +%%------------------------------------------------------------------------------ +%% Private functions +%%------------------------------------------------------------------------------ + +handle_update_authenticator(Chain, AuthenticatorID, Config) -> + #chain{authenticators = Authenticators} = Chain, + case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of + false -> + {error, {not_found, {authenticator, AuthenticatorID}}}; + #authenticator{provider = Provider, state = ST} = Authenticator -> + case AuthenticatorID =:= authenticator_id(Config) of + true -> + case Provider:update(Config, ST) of + {ok, NewST} -> + NewAuthenticator = Authenticator#authenticator{ + state = NewST, + enable = maps:get(enable, Config)}, + NewAuthenticators = replace_authenticator( + AuthenticatorID, + NewAuthenticator, + Authenticators), + true = ets:insert( + ?CHAINS_TAB, + Chain#chain{authenticators = NewAuthenticators}), + {ok, serialize_authenticator(NewAuthenticator)}; + {error, Reason} -> + {error, Reason} + end; + false -> + {error, change_of_authentication_type_is_not_allowed} + end + end. + +handle_delete_authenticator(Chain, AuthenticatorID) -> + MatchFun = fun(#authenticator{id = ID}) -> + ID =:= AuthenticatorID + end, + case do_delete_authenticators(MatchFun, Chain) of + [] -> {error, {not_found, {authenticator, AuthenticatorID}}}; + [AuthenticatorID] -> ok + end. + +handle_move_authenticator(Chain, AuthenticatorID, Position) -> + #chain{authenticators = Authenticators} = Chain, + case do_move_authenticator(AuthenticatorID, Authenticators, Position) of + {ok, NAuthenticators} -> + true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}), + ok; + {error, Reason} -> + {error, Reason} + end. + +handle_create_authenticator(Chain, Config, Providers) -> + #chain{authenticators = Authenticators} = Chain, + AuthenticatorID = authenticator_id(Config), + case lists:keymember(AuthenticatorID, #authenticator.id, Authenticators) of + true -> + {error, {already_exists, {authenticator, AuthenticatorID}}}; + false -> + case do_create_authenticator(AuthenticatorID, Config, Providers) of + {ok, Authenticator} -> + NAuthenticators = + Authenticators ++ + [Authenticator#authenticator{enable = maps:get(enable, Config)}], + true = ets:insert(?CHAINS_TAB, + Chain#chain{authenticators = NAuthenticators}), + {ok, serialize_authenticator(Authenticator)}; + {error, Reason} -> + {error, Reason} + end + end. + +do_authenticate([], _) -> + {stop, {error, not_authorized}}; +do_authenticate([#authenticator{id = ID, provider = Provider, state = State} | More], Credential) -> + try Provider:authenticate(Credential, State) of + ignore -> + do_authenticate(More, Credential); + Result -> + %% {ok, Extra} + %% {ok, Extra, AuthData} + %% {continue, AuthCache} + %% {continue, AuthData, AuthCache} + %% {error, Reason} + {stop, Result} + catch + Class:Reason:Stacktrace -> + ?SLOG(warning, #{msg => "unexpected_error_in_authentication", + exception => Class, + reason => Reason, + stacktrace => Stacktrace, + authenticator => ID}), + do_authenticate(More, Credential) + end. + + reply(Reply, State) -> {reply, Reply, State}. +create_chain_table() -> + try + _ = ets:new(?CHAINS_TAB, [named_table, set, public, + {keypos, #chain.name}, + {read_concurrency, true}]), + ok + catch + error:badarg -> ok + end. + global_chain(mqtt) -> 'mqtt:global'; global_chain('mqtt-sn') -> @@ -611,25 +651,35 @@ maybe_unhook(#{hooked := true} = State) -> maybe_unhook(State) -> State. -do_create_authenticator(ChainName, AuthenticatorID, #{enable := Enable} = Config, Providers) -> +do_create_authenticator(AuthenticatorID, #{enable := Enable} = Config, Providers) -> case maps:get(authn_type(Config), Providers, undefined) of undefined -> {error, no_available_provider}; Provider -> - Unique = unique(ChainName, AuthenticatorID, ?VER_1), - case Provider:create(Config#{'_unique' => Unique}) of + case Provider:create(AuthenticatorID, Config) of {ok, State} -> Authenticator = #authenticator{id = AuthenticatorID, provider = Provider, enable = Enable, - state = switch_version(State)}, + state = State}, {ok, Authenticator}; {error, Reason} -> {error, Reason} end end. -do_delete_authenticator(#authenticator{provider = Provider, state = State}) -> +do_delete_authenticators(MatchFun, #chain{authenticators = Authenticators} = Chain) -> + {Matching, Others} = lists:partition(MatchFun, Authenticators), + + MatchingIDs = lists:map( + fun(#authenticator{id = ID}) -> ID end, + Matching), + + ok = lists:foreach(fun do_destroy_authenticator/1, Matching), + true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = Others}), + MatchingIDs. + +do_destroy_authenticator(#authenticator{provider = Provider, state = State}) -> _ = Provider:destroy(State), ok. @@ -702,17 +752,6 @@ serialize_authenticator(#authenticator{id = ID, , state => State }. -unique(ChainName, AuthenticatorID, Version) -> - NChainName = atom_to_binary(ChainName), - <>. - -switch_version(State = #{version := ?VER_1}) -> - State#{version := ?VER_2}; -switch_version(State = #{version := ?VER_2}) -> - State#{version := ?VER_1}; -switch_version(State) -> - State#{version => ?VER_2}. - authn_type(#{mechanism := Mechanism, backend := Backend}) -> {Mechanism, Backend}; authn_type(#{mechanism := Mechanism}) -> diff --git a/apps/emqx/src/emqx_authentication_config.erl b/apps/emqx/src/emqx_authentication_config.erl index 24abb951c..a7fa5673a 100644 --- a/apps/emqx/src/emqx_authentication_config.erl +++ b/apps/emqx/src/emqx_authentication_config.erl @@ -19,8 +19,8 @@ -behaviour(emqx_config_handler). --export([ pre_config_update/2 - , post_config_update/4 +-export([ pre_config_update/3 + , post_config_update/5 ]). -export([ authenticator_id/1 @@ -53,9 +53,9 @@ %% Callbacks of config handler %%------------------------------------------------------------------------------ --spec pre_config_update(update_request(), emqx_config:raw_config()) +-spec pre_config_update(list(atom()), update_request(), emqx_config:raw_config()) -> {ok, map() | list()} | {error, term()}. -pre_config_update(UpdateReq, OldConfig) -> +pre_config_update(_, UpdateReq, OldConfig) -> try do_pre_config_update(UpdateReq, to_list(OldConfig)) of {error, Reason} -> {error, Reason}; {ok, NewConfig} -> {ok, return_map(NewConfig)} @@ -102,34 +102,34 @@ do_pre_config_update({move_authenticator, _ChainName, AuthenticatorID, Position} end end. --spec post_config_update(update_request(), map() | list(), emqx_config:raw_config(), emqx_config:app_envs()) +-spec post_config_update(list(atom()), update_request(), map() | list(), emqx_config:raw_config(), emqx_config:app_envs()) -> ok | {ok, map()} | {error, term()}. -post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs) -> +post_config_update(_, UpdateReq, NewConfig, OldConfig, AppEnvs) -> do_post_config_update(UpdateReq, check_configs(to_list(NewConfig)), OldConfig, AppEnvs). -do_post_config_update({create_authenticator, ChainName, Config}, _NewConfig, _OldConfig, _AppEnvs) -> - NConfig = check_config(Config), +do_post_config_update({create_authenticator, ChainName, Config}, NewConfig, _OldConfig, _AppEnvs) -> + NConfig = get_authenticator_config(authenticator_id(Config), NewConfig), _ = emqx_authentication:create_chain(ChainName), emqx_authentication:create_authenticator(ChainName, NConfig); do_post_config_update({delete_authenticator, ChainName, AuthenticatorID}, _NewConfig, OldConfig, _AppEnvs) -> case emqx_authentication:delete_authenticator(ChainName, AuthenticatorID) of ok -> - [Config] = [Config0 || Config0 <- to_list(OldConfig), AuthenticatorID == authenticator_id(Config0)], + Config = get_authenticator_config(AuthenticatorID, to_list(OldConfig)), CertsDir = certs_dir(ChainName, AuthenticatorID), ok = clear_certs(CertsDir, Config); {error, Reason} -> {error, Reason} end; -do_post_config_update({update_authenticator, ChainName, AuthenticatorID, Config}, _NewConfig, _OldConfig, _AppEnvs) -> - NConfig = check_config(Config), - emqx_authentication:update_authenticator(ChainName, AuthenticatorID, NConfig); +do_post_config_update({update_authenticator, ChainName, AuthenticatorID, Config}, NewConfig, _OldConfig, _AppEnvs) -> + case get_authenticator_config(authenticator_id(Config), NewConfig) of + {error, not_found} -> + {error, {not_found, {authenticator, AuthenticatorID}}}; + NConfig -> + emqx_authentication:update_authenticator(ChainName, AuthenticatorID, NConfig) + end; do_post_config_update({move_authenticator, ChainName, AuthenticatorID, Position}, _NewConfig, _OldConfig, _AppEnvs) -> emqx_authentication:move_authenticator(ChainName, AuthenticatorID, Position). -check_config(Config) -> - [Checked] = check_configs([Config]), - Checked. - check_configs(Configs) -> Providers = emqx_authentication:get_providers(), lists:map(fun(C) -> do_check_conifg(C, Providers) end, Configs). @@ -208,6 +208,12 @@ clear_certs(CertsDir, Config) -> OldSSL = maps:get(<<"ssl">>, Config, undefined), ok = emqx_tls_lib:delete_ssl_files(CertsDir, undefined, OldSSL). +get_authenticator_config(AuthenticatorID, AuthenticatorsConfig) -> + case [C0 || C0 <- AuthenticatorsConfig, AuthenticatorID == authenticator_id(C0)] of + [C | _] -> C; + [] -> {error, not_found} + end. + split_by_id(ID, AuthenticatorsConfig) -> case lists:foldl( fun(C, {P1, P2, F0}) -> @@ -268,4 +274,3 @@ dir(ChainName, ID) when is_binary(ID) -> binary:replace(iolist_to_binary([to_bin(ChainName), "-", ID]), <<":">>, <<"-">>); dir(ChainName, Config) when is_map(Config) -> dir(ChainName, authenticator_id(Config)). - diff --git a/apps/emqx/src/emqx_banned.erl b/apps/emqx/src/emqx_banned.erl index dfd299d90..53a71736a 100644 --- a/apps/emqx/src/emqx_banned.erl +++ b/apps/emqx/src/emqx_banned.erl @@ -37,6 +37,7 @@ , info/1 , format/1 , parse/1 + , to_timestamp/1 ]). %% gen_server callbacks @@ -108,8 +109,8 @@ parse(Params) -> Who = pares_who(Params), By = maps:get(<<"by">>, Params, <<"mgmt_api">>), Reason = maps:get(<<"reason">>, Params, <<"">>), - At = pares_time(maps:get(<<"at">>, Params, undefined), erlang:system_time(second)), - Until = pares_time(maps:get(<<"until">>, Params, undefined), At + 5 * 60), + At = parse_time(maps:get(<<"at">>, Params, undefined), erlang:system_time(second)), + Until = parse_time(maps:get(<<"until">>, Params, undefined), At + 5 * 60), #banned{ who = Who, by = By, @@ -120,15 +121,15 @@ parse(Params) -> pares_who(#{as := As, who := Who}) -> pares_who(#{<<"as">> => As, <<"who">> => Who}); -pares_who(#{<<"as">> := <<"peerhost">>, <<"who">> := Peerhost0}) -> +pares_who(#{<<"as">> := peerhost, <<"who">> := Peerhost0}) -> {ok, Peerhost} = inet:parse_address(binary_to_list(Peerhost0)), {peerhost, Peerhost}; pares_who(#{<<"as">> := As, <<"who">> := Who}) -> - {binary_to_atom(As, utf8), Who}. + {As, Who}. -pares_time(undefined, Default) -> +parse_time(undefined, Default) -> Default; -pares_time(Rfc3339, _Default) -> +parse_time(Rfc3339, _Default) -> to_timestamp(Rfc3339). maybe_format_host({peerhost, Host}) -> @@ -145,19 +146,36 @@ to_timestamp(Rfc3339) when is_binary(Rfc3339) -> to_timestamp(Rfc3339) -> calendar:rfc3339_to_system_time(Rfc3339, [{unit, second}]). --spec(create(emqx_types:banned() | map()) -> ok). +-spec(create(emqx_types:banned() | map()) -> + {ok, emqx_types:banned()} | {error, {already_exist, emqx_types:banned()}}). create(#{who := Who, by := By, reason := Reason, at := At, until := Until}) -> - mria:dirty_write(?BANNED_TAB, #banned{who = Who, - by = By, - reason = Reason, - at = At, - until = Until}); -create(Banned) when is_record(Banned, banned) -> - mria:dirty_write(?BANNED_TAB, Banned). + Banned = #banned{ + who = Who, + by = By, + reason = Reason, + at = At, + until = Until + }, + create(Banned); + +create(Banned = #banned{who = Who}) -> + case look_up(Who) of + [] -> + mria:dirty_write(?BANNED_TAB, Banned), + {ok, Banned}; + [OldBanned = #banned{until = Until}] -> + case Until < erlang:system_time(second) of + true -> + {error, {already_exist, OldBanned}}; + false -> + mria:dirty_write(?BANNED_TAB, Banned), + {ok, Banned} + end + end. look_up(Who) when is_map(Who) -> look_up(pares_who(Who)); diff --git a/apps/emqx/src/emqx_broker.erl b/apps/emqx/src/emqx_broker.erl index c74fa22e7..4ae61d8e5 100644 --- a/apps/emqx/src/emqx_broker.erl +++ b/apps/emqx/src/emqx_broker.erl @@ -81,7 +81,7 @@ -define(SUBSCRIPTION, emqx_subscription). %% Guards --define(is_subid(Id), (is_binary(Id) orelse is_atom(Id))). +-define(IS_SUBID(Id), (is_binary(Id) orelse is_atom(Id))). -spec(start_link(atom(), pos_integer()) -> startlink_ret()). start_link(Pool, Id) -> @@ -117,15 +117,17 @@ subscribe(Topic) when is_binary(Topic) -> subscribe(Topic, undefined). -spec(subscribe(emqx_types:topic(), emqx_types:subid() | emqx_types:subopts()) -> ok). -subscribe(Topic, SubId) when is_binary(Topic), ?is_subid(SubId) -> +subscribe(Topic, SubId) when is_binary(Topic), ?IS_SUBID(SubId) -> subscribe(Topic, SubId, ?DEFAULT_SUBOPTS); subscribe(Topic, SubOpts) when is_binary(Topic), is_map(SubOpts) -> subscribe(Topic, undefined, SubOpts). -spec(subscribe(emqx_types:topic(), emqx_types:subid(), emqx_types:subopts()) -> ok). -subscribe(Topic, SubId, SubOpts0) when is_binary(Topic), ?is_subid(SubId), is_map(SubOpts0) -> +subscribe(Topic, SubId, SubOpts0) when is_binary(Topic), ?IS_SUBID(SubId), is_map(SubOpts0) -> SubOpts = maps:merge(?DEFAULT_SUBOPTS, SubOpts0), - case ets:member(?SUBOPTION, {SubPid = self(), Topic}) of + _ = emqx_trace:subscribe(Topic, SubId, SubOpts), + SubPid = self(), + case ets:member(?SUBOPTION, {SubPid, Topic}) of false -> %% New ok = emqx_broker_helper:register_sub(SubPid, SubId), do_subscribe(Topic, SubPid, with_subid(SubId, SubOpts)); @@ -171,6 +173,7 @@ unsubscribe(Topic) when is_binary(Topic) -> case ets:lookup(?SUBOPTION, {SubPid, Topic}) of [{_, SubOpts}] -> _ = emqx_broker_helper:reclaim_seq(Topic), + _ = emqx_trace:unsubscribe(Topic, SubOpts), do_unsubscribe(Topic, SubPid, SubOpts); [] -> ok end. @@ -198,7 +201,7 @@ do_unsubscribe(Group, Topic, SubPid, _SubOpts) -> -spec(publish(emqx_types:message()) -> emqx_types:publish_result()). publish(Msg) when is_record(Msg, message) -> - _ = emqx_tracer:trace(publish, Msg), + _ = emqx_trace:publish(Msg), emqx_message:is_sys(Msg) orelse emqx_metrics:inc('messages.publish'), case emqx_hooks:run_fold('message.publish', [], emqx_message:clean_dup(Msg)) of #message{headers = #{allow_publish := false}} -> @@ -267,7 +270,7 @@ aggre(Routes) -> end, [], Routes). %% @doc Forward message to another node. --spec(forward(node(), emqx_types:topic(), emqx_types:delivery(), RpcMode::sync|async) +-spec(forward(node(), emqx_types:topic(), emqx_types:delivery(), RpcMode::sync | async) -> emqx_types:deliver_result()). forward(Node, To, Delivery, async) -> case emqx_rpc:cast(To, Node, ?BROKER, dispatch, [To, Delivery]) of @@ -380,14 +383,14 @@ subscriptions(SubId) -> -spec(subscribed(pid() | emqx_types:subid(), emqx_types:topic()) -> boolean()). subscribed(SubPid, Topic) when is_pid(SubPid) -> ets:member(?SUBOPTION, {SubPid, Topic}); -subscribed(SubId, Topic) when ?is_subid(SubId) -> +subscribed(SubId, Topic) when ?IS_SUBID(SubId) -> SubPid = emqx_broker_helper:lookup_subpid(SubId), ets:member(?SUBOPTION, {SubPid, Topic}). -spec(get_subopts(pid(), emqx_types:topic()) -> maybe(emqx_types:subopts())). get_subopts(SubPid, Topic) when is_pid(SubPid), is_binary(Topic) -> lookup_value(?SUBOPTION, {SubPid, Topic}); -get_subopts(SubId, Topic) when ?is_subid(SubId) -> +get_subopts(SubId, Topic) when ?IS_SUBID(SubId) -> case emqx_broker_helper:lookup_subpid(SubId) of SubPid when is_pid(SubPid) -> get_subopts(SubPid, Topic); @@ -455,7 +458,8 @@ handle_call({subscribe, Topic}, _From, State) -> {reply, Ok, State}; handle_call({subscribe, Topic, I}, _From, State) -> - Ok = case get(Shard = {Topic, I}) of + Shard = {Topic, I}, + Ok = case get(Shard) of undefined -> _ = put(Shard, true), true = ets:insert(?SUBSCRIBER, {Topic, {shard, I}}), @@ -512,4 +516,3 @@ code_change(_OldVsn, State, _Extra) -> %%-------------------------------------------------------------------- %% Internal functions %%-------------------------------------------------------------------- - diff --git a/apps/emqx/src/emqx_channel.erl b/apps/emqx/src/emqx_channel.erl index 7df7cd42d..50f9fcf70 100644 --- a/apps/emqx/src/emqx_channel.erl +++ b/apps/emqx/src/emqx_channel.erl @@ -103,7 +103,7 @@ -type(reply() :: {outgoing, emqx_types:packet()} | {outgoing, [emqx_types:packet()]} - | {event, conn_state()|updated} + | {event, conn_state() | updated} | {close, Reason :: atom()}). -type(replies() :: emqx_types:packet() | reply() | [reply()]). @@ -132,7 +132,7 @@ info(Channel) -> maps:from_list(info(?INFO_KEYS, Channel)). --spec(info(list(atom())|atom(), channel()) -> term()). +-spec(info(list(atom()) | atom(), channel()) -> term()). info(Keys, Channel) when is_list(Keys) -> [{Key, info(Key, Channel)} || Key <- Keys]; info(conninfo, #channel{conninfo = ConnInfo}) -> @@ -287,7 +287,7 @@ handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = ConnState}) handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = connecting}) -> handle_out(connack, ?RC_PROTOCOL_ERROR, Channel); -handle_in(?CONNECT_PACKET(ConnPkt), Channel) -> +handle_in(?CONNECT_PACKET(ConnPkt) = Packet, Channel) -> case pipeline([fun overload_protection/2, fun enrich_conninfo/2, fun run_conn_hooks/2, @@ -297,6 +297,7 @@ handle_in(?CONNECT_PACKET(ConnPkt), Channel) -> fun check_banned/2 ], ConnPkt, Channel#channel{conn_state = connecting}) of {ok, NConnPkt, NChannel = #channel{clientinfo = ClientInfo}} -> + ?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]), NChannel1 = NChannel#channel{ will_msg = emqx_packet:will_msg(NConnPkt), alias_maximum = init_alias_maximum(NConnPkt, ClientInfo) @@ -328,17 +329,23 @@ handle_in(Packet = ?AUTH_PACKET(ReasonCode, _Properties), connecting -> process_connect(NProperties, ensure_connected(NChannel)); _ -> - handle_out(auth, {?RC_SUCCESS, NProperties}, NChannel#channel{conn_state = connected}) + handle_out( auth + , {?RC_SUCCESS, NProperties} + , NChannel#channel{conn_state = connected} + ) end; {continue, NProperties, NChannel} -> - handle_out(auth, {?RC_CONTINUE_AUTHENTICATION, NProperties}, NChannel#channel{conn_state = reauthenticating}); + handle_out( auth + , {?RC_CONTINUE_AUTHENTICATION, NProperties} + , NChannel#channel{conn_state = reauthenticating} + ); {error, NReasonCode} -> case ConnState of connecting -> handle_out(connack, NReasonCode, Channel); _ -> handle_out(disconnect, NReasonCode, Channel) - end + end end catch _Class:_Reason -> @@ -632,7 +639,7 @@ do_publish(PacketId, Msg = #message{qos = ?QOS_2}, ?SLOG(warning, #{ msg => "dropped_qos2_packet", reason => emqx_reason_codes:name(RC), - packetId => PacketId + packet_id => PacketId }), ok = emqx_metrics:inc('packets.publish.dropped'), handle_out(pubrec, {PacketId, RC}, Channel) @@ -655,7 +662,7 @@ ensure_quota(PubRes, Channel = #channel{quota = Limiter}) -> -compile({inline, [puback_reason_code/1]}). puback_reason_code([]) -> ?RC_NO_MATCHING_SUBSCRIBERS; -puback_reason_code([_|_]) -> ?RC_SUCCESS. +puback_reason_code([_ | _]) -> ?RC_SUCCESS. -compile({inline, [after_message_acked/3]}). after_message_acked(ClientInfo, Msg, PubAckProps) -> @@ -674,7 +681,7 @@ process_subscribe(TopicFilters, SubProps, Channel) -> process_subscribe([], _SubProps, Channel, Acc) -> {lists:reverse(Acc), Channel}; -process_subscribe([Topic = {TopicFilter, SubOpts}|More], SubProps, Channel, Acc) -> +process_subscribe([Topic = {TopicFilter, SubOpts} | More], SubProps, Channel, Acc) -> case check_sub_caps(TopicFilter, SubOpts, Channel) of ok -> {ReasonCode, NChannel} = do_subscribe(TopicFilter, @@ -716,9 +723,9 @@ process_unsubscribe(TopicFilters, UnSubProps, Channel) -> process_unsubscribe([], _UnSubProps, Channel, Acc) -> {lists:reverse(Acc), Channel}; -process_unsubscribe([{TopicFilter, SubOpts}|More], UnSubProps, Channel, Acc) -> +process_unsubscribe([{TopicFilter, SubOpts} | More], UnSubProps, Channel, Acc) -> {RC, NChannel} = do_unsubscribe(TopicFilter, SubOpts#{unsub_props => UnSubProps}, Channel), - process_unsubscribe(More, UnSubProps, NChannel, [RC|Acc]). + process_unsubscribe(More, UnSubProps, NChannel, [RC | Acc]). do_unsubscribe(TopicFilter, SubOpts, Channel = #channel{clientinfo = ClientInfo = #{mountpoint := MountPoint}, @@ -790,7 +797,9 @@ handle_deliver(Delivers, Channel = #channel{takeover = true, pendings = Pendings, session = Session, clientinfo = #{clientid := ClientId}}) -> - NPendings = lists:append(Pendings, emqx_session:ignore_local(maybe_nack(Delivers), ClientId, Session)), + NPendings = lists:append( + Pendings, + emqx_session:ignore_local(maybe_nack(Delivers), ClientId, Session)), {ok, Channel#channel{pendings = NPendings}}; handle_deliver(Delivers, Channel = #channel{session = Session, @@ -995,6 +1004,17 @@ handle_call({quota, Policy}, Channel) -> Quota = emqx_limiter:init(Zone, Policy), reply(ok, Channel#channel{quota = Quota}); +handle_call({keepalive, Interval}, Channel = #channel{keepalive = KeepAlive, + conninfo = ConnInfo}) -> + ClientId = info(clientid, Channel), + NKeepalive = emqx_keepalive:set(interval, Interval * 1000, KeepAlive), + NConnInfo = maps:put(keepalive, Interval, ConnInfo), + NChannel = Channel#channel{keepalive = NKeepalive, conninfo = NConnInfo}, + SockInfo = maps:get(sockinfo, emqx_cm:get_chan_info(ClientId), #{}), + ChanInfo1 = info(NChannel), + emqx_cm:set_chan_info(ClientId, ChanInfo1#{sockinfo => SockInfo}), + reply(ok, reset_timer(alive_timer, NChannel)); + handle_call(Req, Channel) -> ?SLOG(error, #{msg => "unexpected_call", call => Req}), reply(ignored, Channel). @@ -1045,7 +1065,7 @@ handle_info(clean_authz_cache, Channel) -> handle_info(die_if_test = Info, Channel) -> die_if_test_compiled(), - ?LOG(error, "Unexpected info: ~p", [Info]), + ?SLOG(error, #{msg => "unexpected_info", info => Info}), {ok, Channel}; handle_info(Info, Channel) -> @@ -1125,7 +1145,7 @@ handle_timeout(_TRef, expire_quota_limit, Channel) -> {ok, clean_timer(quota_timer, Channel)}; handle_timeout(_TRef, Msg, Channel) -> - ?SLOG(error, #{msg => "unexpected_timeout", timeout_message => Msg}), + ?SLOG(error, #{msg => "unexpected_timeout", timeout_msg => Msg}), {ok, Channel}. %%-------------------------------------------------------------------- @@ -1179,20 +1199,26 @@ terminate(_, #channel{conn_state = idle}) -> ok; terminate(normal, Channel) -> run_terminate_hook(normal, Channel); terminate({shutdown, kicked}, Channel) -> - _ = emqx_persistent_session:persist(Channel#channel.clientinfo, - Channel#channel.conninfo, - Channel#channel.session), run_terminate_hook(kicked, Channel); terminate({shutdown, Reason}, Channel) when Reason =:= discarded; Reason =:= takeovered -> run_terminate_hook(Reason, Channel); terminate(Reason, Channel = #channel{will_msg = WillMsg}) -> (WillMsg =/= undefined) andalso publish_will_msg(WillMsg), - _ = emqx_persistent_session:persist(Channel#channel.clientinfo, - Channel#channel.conninfo, - Channel#channel.session), + (Reason =:= expired) andalso persist_if_session(Channel), run_terminate_hook(Reason, Channel). +persist_if_session(#channel{session = Session} = Channel) -> + case emqx_session:is_session(Session) of + true -> + _ = emqx_persistent_session:persist(Channel#channel.clientinfo, + Channel#channel.conninfo, + Channel#channel.session), + ok; + false -> + ok + end. + run_terminate_hook(_Reason, #channel{session = undefined}) -> ok; run_terminate_hook(Reason, #channel{clientinfo = ClientInfo, session = Session}) -> emqx_session:terminate(ClientInfo, Reason, Session). @@ -1359,17 +1385,20 @@ authenticate(?AUTH_PACKET(_, #{'Authentication-Method' := AuthMethod} = Properti {error, ?RC_BAD_AUTHENTICATION_METHOD} end. -do_authenticate(#{auth_method := AuthMethod} = Credential, #channel{clientinfo = ClientInfo} = Channel) -> +do_authenticate(#{auth_method := AuthMethod} = Credential, + #channel{clientinfo = ClientInfo} = Channel) -> Properties = #{'Authentication-Method' => AuthMethod}, case emqx_access_control:authenticate(Credential) of {ok, Result} -> {ok, Properties, - Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)}, - auth_cache = #{}}}; + Channel#channel{ + clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)}, + auth_cache = #{}}}; {ok, Result, AuthData} -> {ok, Properties#{'Authentication-Data' => AuthData}, - Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)}, - auth_cache = #{}}}; + Channel#channel{ + clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)}, + auth_cache = #{}}}; {continue, AuthCache} -> {continue, Properties, Channel#channel{auth_cache = AuthCache}}; {continue, AuthData, AuthCache} -> @@ -1606,6 +1635,8 @@ ensure_connected(Channel = #channel{conninfo = ConnInfo, clientinfo = ClientInfo}) -> NConnInfo = ConnInfo#{connected_at => erlang:system_time(millisecond)}, ok = run_hooks('client.connected', [ClientInfo, NConnInfo]), + ChanPid = self(), + emqx_cm:mark_channel_connected(ChanPid), Channel#channel{conninfo = NConnInfo, conn_state = connected }. @@ -1691,6 +1722,8 @@ ensure_disconnected(Reason, Channel = #channel{conninfo = ConnInfo, clientinfo = ClientInfo}) -> NConnInfo = ConnInfo#{disconnected_at => erlang:system_time(millisecond)}, ok = run_hooks('client.disconnected', [ClientInfo, Reason, NConnInfo]), + ChanPid = self(), + emqx_cm:mark_channel_disconnected(ChanPid), Channel#channel{conninfo = NConnInfo, conn_state = disconnected}. %%-------------------------------------------------------------------- diff --git a/apps/emqx/src/emqx_cm.erl b/apps/emqx/src/emqx_cm.erl index 83f77050a..f1209dd6f 100644 --- a/apps/emqx/src/emqx_cm.erl +++ b/apps/emqx/src/emqx_cm.erl @@ -58,7 +58,10 @@ , lookup_channels/2 ]). --export([all_channels/0]). +%% Test/debug interface +-export([ all_channels/0 + , all_client_ids/0 + ]). %% gen_server callbacks -export([ init/1 @@ -70,7 +73,12 @@ ]). %% Internal export --export([stats_fun/0]). +-export([ stats_fun/0 + , clean_down/1 + , mark_channel_connected/1 + , mark_channel_disconnected/1 + , get_connected_client_count/0 + ]). -type(chan_pid() :: pid()). @@ -78,11 +86,13 @@ -define(CHAN_TAB, emqx_channel). -define(CHAN_CONN_TAB, emqx_channel_conn). -define(CHAN_INFO_TAB, emqx_channel_info). +-define(CHAN_LIVE_TAB, emqx_channel_live). -define(CHAN_STATS, [{?CHAN_TAB, 'channels.count', 'channels.max'}, {?CHAN_TAB, 'sessions.count', 'sessions.max'}, - {?CHAN_CONN_TAB, 'connections.count', 'connections.max'} + {?CHAN_CONN_TAB, 'connections.count', 'connections.max'}, + {?CHAN_LIVE_TAB, 'live_connections.count', 'live_connections.max'} ]). %% Batch drain @@ -91,7 +101,14 @@ %% Server name -define(CM, ?MODULE). --define(T_TAKEOVER, 15000). +-define(T_KICK, 5_000). +-define(T_GET_INFO, 5_000). +-define(T_TAKEOVER, 15_000). + +%% linting overrides +-elvis([ {elvis_style, invalid_dynamic_call, #{ignore => [emqx_cm]}} + , {elvis_style, god_modules, #{ignore => [emqx_cm]}} + ]). %% @doc Start the channel manager. -spec(start_link() -> startlink_ret()). @@ -162,7 +179,7 @@ get_chan_info(ClientId, ChanPid) when node(ChanPid) == node() -> error:badarg -> undefined end; get_chan_info(ClientId, ChanPid) -> - rpc_call(node(ChanPid), get_chan_info, [ClientId, ChanPid]). + rpc_call(node(ChanPid), get_chan_info, [ClientId, ChanPid], ?T_GET_INFO). %% @doc Update infos of the channel. -spec(set_chan_info(emqx_types:clientid(), emqx_types:attrs()) -> boolean()). @@ -187,7 +204,7 @@ get_chan_stats(ClientId, ChanPid) when node(ChanPid) == node() -> error:badarg -> undefined end; get_chan_stats(ClientId, ChanPid) -> - rpc_call(node(ChanPid), get_chan_stats, [ClientId, ChanPid]). + rpc_call(node(ChanPid), get_chan_stats, [ClientId, ChanPid], ?T_GET_INFO). %% @doc Set channel's stats. -spec(set_chan_stats(emqx_types:clientid(), emqx_types:stats()) -> boolean()). @@ -236,7 +253,10 @@ open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) -> pendings => Pendings}}; {living, ConnMod, ChanPid, Session} -> ok = emqx_session:resume(ClientInfo, Session), - Session1 = emqx_persistent_session:persist(ClientInfo, ConnInfo, Session), + Session1 = emqx_persistent_session:persist( ClientInfo + , ConnInfo + , Session + ), Pendings = ConnMod:call(ChanPid, {takeover, 'end'}, ?T_TAKEOVER), register_channel(ClientId, Self, ConnInfo), {ok, #{session => Session1, @@ -245,12 +265,18 @@ open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) -> {expired, OldSession} -> _ = emqx_persistent_session:discard(ClientId, OldSession), Session = create_session(ClientInfo, ConnInfo), - Session1 = emqx_persistent_session:persist(ClientInfo, ConnInfo, Session), + Session1 = emqx_persistent_session:persist( ClientInfo + , ConnInfo + , Session + ), register_channel(ClientId, Self, ConnInfo), {ok, #{session => Session1, present => false}}; none -> Session = create_session(ClientInfo, ConnInfo), - Session1 = emqx_persistent_session:persist(ClientInfo, ConnInfo, Session), + Session1 = emqx_persistent_session:persist( ClientInfo + , ConnInfo + , Session + ), register_channel(ClientId, Self, ConnInfo), {ok, #{session => Session1, present => false}} end @@ -300,7 +326,7 @@ takeover_session(ClientId) -> [ChanPid] -> takeover_session(ClientId, ChanPid); ChanPids -> - [ChanPid|StalePids] = lists:reverse(ChanPids), + [ChanPid | StalePids] = lists:reverse(ChanPids), ?SLOG(warning, #{msg => "more_than_one_channel_found", chan_pids => ChanPids}), lists:foreach(fun(StalePid) -> catch discard_session(ClientId, StalePid) @@ -308,82 +334,136 @@ takeover_session(ClientId) -> takeover_session(ClientId, ChanPid) end. -takeover_session(ClientId, ChanPid) when node(ChanPid) == node() -> +takeover_session(ClientId, Pid) -> + try do_takeover_session(ClientId, Pid) + catch + _ : noproc -> % emqx_ws_connection: call + emqx_persistent_session:lookup(ClientId); + _ : {noproc, _} -> % emqx_connection: gen_server:call + emqx_persistent_session:lookup(ClientId); + _ : {'EXIT', {noproc, _}} -> % rpc_call/3 + emqx_persistent_session:lookup(ClientId) + end. + +do_takeover_session(ClientId, ChanPid) when node(ChanPid) == node() -> case get_chann_conn_mod(ClientId, ChanPid) of undefined -> emqx_persistent_session:lookup(ClientId); ConnMod when is_atom(ConnMod) -> + %% TODO: if takeover times out, maybe kill the old? Session = ConnMod:call(ChanPid, {takeover, 'begin'}, ?T_TAKEOVER), {living, ConnMod, ChanPid, Session} end; - -takeover_session(ClientId, ChanPid) -> - rpc_call(node(ChanPid), takeover_session, [ClientId, ChanPid]). +do_takeover_session(ClientId, ChanPid) -> + rpc_call(node(ChanPid), takeover_session, [ClientId, ChanPid], ?T_TAKEOVER). %% @doc Discard all the sessions identified by the ClientId. -spec(discard_session(emqx_types:clientid()) -> ok). discard_session(ClientId) when is_binary(ClientId) -> case lookup_channels(ClientId) of [] -> ok; - ChanPids -> lists:foreach(fun(Pid) -> do_discard_session(ClientId, Pid) end, ChanPids) + ChanPids -> lists:foreach(fun(Pid) -> discard_session(ClientId, Pid) end, ChanPids) end. -do_discard_session(ClientId, Pid) -> +%% @private Kick a local stale session to force it step down. +%% If failed to kick (e.g. timeout) force a kill. +%% Keeping the stale pid around, or returning error or raise an exception +%% benefits nobody. +-spec kick_or_kill(kick | discard, module(), pid()) -> ok. +kick_or_kill(Action, ConnMod, Pid) -> try - discard_session(ClientId, Pid) + %% this is essentailly a gen_server:call implemented in emqx_connection + %% and emqx_ws_connection. + %% the handle_call is implemented in emqx_channel + ok = apply(ConnMod, call, [Pid, Action, ?T_KICK]) catch _ : noproc -> % emqx_ws_connection: call - ?tp(debug, "session_already_gone", #{pid => Pid}), - ok; + ok = ?tp(debug, "session_already_gone", #{pid => Pid, action => Action}); _ : {noproc, _} -> % emqx_connection: gen_server:call - ?tp(debug, "session_already_gone", #{pid => Pid}), - ok; - _ : {'EXIT', {noproc, _}} -> % rpc_call/3 - ?tp(debug, "session_already_gone", #{pid => Pid}), - ok; + ok = ?tp(debug, "session_already_gone", #{pid => Pid, action => Action}); + _ : {shutdown, _} -> + ok = ?tp(debug, "session_already_shutdown", #{pid => Pid, action => Action}); _ : {{shutdown, _}, _} -> - ?tp(debug, "session_already_shutdown", #{pid => Pid}), - ok; + ok = ?tp(debug, "session_already_shutdown", #{pid => Pid, action => Action}); + _ : {timeout, {gen_server, call, _}} -> + ?tp(warning, "session_kick_timeout", + #{pid => Pid, + action => Action, + stale_channel => stale_channel_info(Pid) + }), + ok = force_kill(Pid); _ : Error : St -> - ?tp(error, "failed_to_discard_session", - #{pid => Pid, reason => Error, stacktrace=>St}) + ?tp(error, "session_kick_exception", + #{pid => Pid, + action => Action, + reason => Error, + stacktrace => St, + stale_channel => stale_channel_info(Pid) + }), + ok = force_kill(Pid) end. -discard_session(ClientId, ChanPid) when node(ChanPid) == node() -> - case get_chann_conn_mod(ClientId, ChanPid) of - undefined -> ok; - ConnMod when is_atom(ConnMod) -> - ConnMod:call(ChanPid, discard, ?T_TAKEOVER) - end; +force_kill(Pid) -> + exit(Pid, kill), + ok. + +stale_channel_info(Pid) -> + process_info(Pid, [status, message_queue_len, current_stacktrace]). discard_session(ClientId, ChanPid) -> - rpc_call(node(ChanPid), discard_session, [ClientId, ChanPid]). + kick_session(discard, ClientId, ChanPid). + +kick_session(ClientId, ChanPid) -> + kick_session(kick, ClientId, ChanPid). + +%% @private This function is shared for session 'kick' and 'discard' (as the first arg Action). +kick_session(Action, ClientId, ChanPid) when node(ChanPid) == node() -> + case get_chann_conn_mod(ClientId, ChanPid) of + undefined -> + %% already deregistered + ok; + ConnMod when is_atom(ConnMod) -> + ok = kick_or_kill(Action, ConnMod, ChanPid) + end; +kick_session(Action, ClientId, ChanPid) -> + %% call remote node on the old APIs because we do not know if they have upgraded + %% to have kick_session/3 + Function = case Action of + discard -> discard_session; + kick -> kick_session + end, + try + rpc_call(node(ChanPid), Function, [ClientId, ChanPid], ?T_KICK) + catch + Error : Reason -> + %% This should mostly be RPC failures. + %% However, if the node is still running the old version + %% code (prior to emqx app 4.3.10) some of the RPC handler + %% exceptions may get propagated to a new version node + ?SLOG(error, #{ msg => "failed_to_kick_session_on_remote_node" + , node => node(ChanPid) + , action => Action + , error => Error + , reason => Reason + }) + end. kick_session(ClientId) -> case lookup_channels(ClientId) of - [] -> {error, not_found}; - [ChanPid] -> - kick_session(ClientId, ChanPid); + [] -> + ?SLOG(warning, #{msg => "kicked_an_unknown_session", + clientid => ClientId}), + ok; ChanPids -> - [ChanPid|StalePids] = lists:reverse(ChanPids), - ?SLOG(warning, #{msg => "more_than_one_channel_found", chan_pids => ChanPids}), - lists:foreach(fun(StalePid) -> - catch discard_session(ClientId, StalePid) - end, StalePids), - kick_session(ClientId, ChanPid) + case length(ChanPids) > 1 of + true -> + ?SLOG(warning, #{msg => "more_than_one_channel_found", + chan_pids => ChanPids}); + false -> ok + end, + lists:foreach(fun(Pid) -> kick_session(ClientId, Pid) end, ChanPids) end. -kick_session(ClientId, ChanPid) when node(ChanPid) == node() -> - case get_chan_info(ClientId, ChanPid) of - #{conninfo := #{conn_mod := ConnMod}} -> - ConnMod:call(ChanPid, kick, ?T_TAKEOVER); - undefined -> - {error, not_found} - end; - -kick_session(ClientId, ChanPid) -> - rpc_call(node(ChanPid), kick_session, [ClientId, ChanPid]). - %% @doc Is clean start? % is_clean_start(#{clean_start := false}) -> false; % is_clean_start(_Attrs) -> true. @@ -395,11 +475,17 @@ with_channel(ClientId, Fun) -> Pids -> Fun(lists:last(Pids)) end. -%% @doc Get all channels registed. +%% @doc Get all registed channel pids. Debugg/test interface all_channels() -> Pat = [{{'_', '$1'}, [], ['$1']}], ets:select(?CHAN_TAB, Pat). +%% @doc Get all registed clientIDs. Debugg/test interface +all_client_ids() -> + Pat = [{{'$1', '_'}, [], ['$1']}], + ets:select(?CHAN_TAB, Pat). + + %% @doc Lookup channels. -spec(lookup_channels(emqx_types:clientid()) -> list(chan_pid())). lookup_channels(ClientId) -> @@ -419,10 +505,16 @@ lookup_channels(local, ClientId) -> [ChanPid || {_, ChanPid} <- ets:lookup(?CHAN_TAB, ClientId)]. %% @private -rpc_call(Node, Fun, Args) -> - case rpc:call(Node, ?MODULE, Fun, Args) of - {badrpc, Reason} -> error(Reason); - Res -> Res +rpc_call(Node, Fun, Args, Timeout) -> + case rpc:call(Node, ?MODULE, Fun, Args, 2 * Timeout) of + {badrpc, Reason} -> + %% since eqmx app 4.3.10, the 'kick' and 'discard' calls hanndler + %% should catch all exceptions and always return 'ok'. + %% This leaves 'badrpc' only possible when there is problem + %% calling the remote node. + error({badrpc, Reason}); + Res -> + Res end. %% @private @@ -437,8 +529,10 @@ init([]) -> ok = emqx_tables:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]), ok = emqx_tables:new(?CHAN_CONN_TAB, [bag | TabOpts]), ok = emqx_tables:new(?CHAN_INFO_TAB, [set, compressed | TabOpts]), + ok = emqx_tables:new(?CHAN_LIVE_TAB, [set, {write_concurrency, true} | TabOpts]), ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0), - {ok, #{chan_pmon => emqx_pmon:new()}}. + State = #{chan_pmon => emqx_pmon:new()}, + {ok, State}. handle_call(Req, _From, State) -> ?SLOG(error, #{msg => "unexpected_call", call => Req}), @@ -447,17 +541,21 @@ handle_call(Req, _From, State) -> handle_cast({registered, {ClientId, ChanPid}}, State = #{chan_pmon := PMon}) -> PMon1 = emqx_pmon:monitor(ChanPid, ClientId, PMon), {noreply, State#{chan_pmon := PMon1}}; - handle_cast(Msg, State) -> ?SLOG(error, #{msg => "unexpected_cast", cast => Msg}), {noreply, State}. handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) -> + ?tp(emqx_cm_process_down, #{pid => Pid, reason => _Reason}), ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)], {Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon), - ok = emqx_pool:async_submit(fun lists:foreach/2, [fun clean_down/1, Items]), + lists:foreach( + fun({ChanPid, _ClientID}) -> + mark_channel_disconnected(ChanPid) + end, + Items), + ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]), {noreply, State#{chan_pmon := PMon1}}; - handle_info(Info, State) -> ?SLOG(error, #{msg => "unexpected_info", info => Info}), @@ -492,5 +590,20 @@ get_chann_conn_mod(ClientId, ChanPid) when node(ChanPid) == node() -> error:badarg -> undefined end; get_chann_conn_mod(ClientId, ChanPid) -> - rpc_call(node(ChanPid), get_chann_conn_mod, [ClientId, ChanPid]). + rpc_call(node(ChanPid), get_chann_conn_mod, [ClientId, ChanPid], ?T_GET_INFO). +mark_channel_connected(ChanPid) -> + ?tp(emqx_cm_connected_client_count_inc, #{}), + ets:insert_new(?CHAN_LIVE_TAB, {ChanPid, true}), + ok. + +mark_channel_disconnected(ChanPid) -> + ?tp(emqx_cm_connected_client_count_dec, #{}), + ets:delete(?CHAN_LIVE_TAB, ChanPid), + ok. + +get_connected_client_count() -> + case ets:info(?CHAN_LIVE_TAB, size) of + undefined -> 0; + Size -> Size + end. diff --git a/apps/emqx/src/emqx_config.erl b/apps/emqx/src/emqx_config.erl index 4632c45d2..8806d4bc3 100644 --- a/apps/emqx/src/emqx_config.erl +++ b/apps/emqx/src/emqx_config.erl @@ -16,6 +16,7 @@ -module(emqx_config). -compile({no_auto_import, [get/0, get/1, put/2, erase/1]}). +-elvis([{elvis_style, god_modules, disable}]). -export([ init_load/1 , init_load/2 @@ -138,10 +139,9 @@ get(KeyPath, Default) -> do_get(?CONF, KeyPath, Default). {ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}. find([]) -> Ref = make_ref(), - Res = do_get(?CONF, [], Ref), - case Res =:= Ref of - true -> {not_found, []}; - false -> {ok, Res} + case do_get(?CONF, [], Ref) of + Ref -> {not_found, []}; + Res -> {ok, Res} end; find(KeyPath) -> ?ATOM_CONF_PATH(KeyPath, emqx_map_lib:deep_find(AtomKeyPath, get_root(KeyPath)), @@ -151,10 +151,9 @@ find(KeyPath) -> {ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}. find_raw([]) -> Ref = make_ref(), - Res = do_get(?RAW_CONF, [], Ref), - case Res =:= Ref of - true -> {not_found, []}; - false -> {ok, Res} + case do_get(?RAW_CONF, [], Ref) of + Ref -> {not_found, []}; + Res -> {ok, Res} end; find_raw(KeyPath) -> emqx_map_lib:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)). @@ -288,8 +287,7 @@ check_config(SchemaMod, RawConf) -> }, {AppEnvs, CheckedConf} = hocon_schema:map_translate(SchemaMod, RawConf, Opts), - Conf = maps:with(maps:keys(RawConf), CheckedConf), - {AppEnvs, emqx_map_lib:unsafe_atom_key_map(Conf)}. + {AppEnvs, emqx_map_lib:unsafe_atom_key_map(CheckedConf)}. -spec fill_defaults(raw_config()) -> map(). fill_defaults(RawConf) -> @@ -349,7 +347,8 @@ get_root_names() -> get_atom_root_names() -> [atom(N) || N <- get_root_names()]. --spec save_configs(app_envs(), config(), raw_config(), raw_config(), update_opts()) -> ok | {error, term()}. +-spec save_configs(app_envs(), config(), raw_config(), raw_config(), update_opts()) -> + ok | {error, term()}. save_configs(_AppEnvs, Conf, RawConf, OverrideConf, Opts) -> %% We may need also support hot config update for the apps that use application envs. %% If that is the case uncomment the following line to update the configs to app env diff --git a/apps/emqx/src/emqx_config_handler.erl b/apps/emqx/src/emqx_config_handler.erl index c75f0ee4d..d99e18e70 100644 --- a/apps/emqx/src/emqx_config_handler.erl +++ b/apps/emqx/src/emqx_config_handler.erl @@ -45,14 +45,14 @@ -type handler_name() :: module(). -type handlers() :: #{emqx_config:config_key() => handlers(), ?MOD => handler_name()}. --optional_callbacks([ pre_config_update/2 - , post_config_update/4 +-optional_callbacks([ pre_config_update/3 + , post_config_update/5 ]). --callback pre_config_update(emqx_config:update_request(), emqx_config:raw_config()) -> +-callback pre_config_update([atom()], emqx_config:update_request(), emqx_config:raw_config()) -> {ok, emqx_config:update_request()} | {error, term()}. --callback post_config_update(emqx_config:update_request(), emqx_config:config(), +-callback post_config_update([atom()], emqx_config:update_request(), emqx_config:config(), emqx_config:config(), emqx_config:app_envs()) -> ok | {ok, Result::any()} | {error, Reason::term()}. @@ -181,14 +181,20 @@ process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) -> Error -> Error end. -do_update_config([], Handlers, OldRawConf, UpdateReq) -> - call_pre_config_update(Handlers, OldRawConf, UpdateReq); -do_update_config([ConfKey | ConfKeyPath], Handlers, OldRawConf, UpdateReq) -> +do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) -> + do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq, []). + +do_update_config([], Handlers, OldRawConf, UpdateReq, ConfKeyPath) -> + call_pre_config_update(Handlers, OldRawConf, UpdateReq, ConfKeyPath); +do_update_config([ConfKey | SubConfKeyPath], Handlers, OldRawConf, + UpdateReq, ConfKeyPath0) -> + ConfKeyPath = ConfKeyPath0 ++ [ConfKey], SubOldRawConf = get_sub_config(bin(ConfKey), OldRawConf), SubHandlers = get_sub_handlers(ConfKey, Handlers), - case do_update_config(ConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq) of + case do_update_config(SubConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq, ConfKeyPath) of {ok, NewUpdateReq} -> - call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq}); + call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq}, + ConfKeyPath); Error -> Error end. @@ -211,18 +217,25 @@ check_and_save_configs(SchemaModule, ConfKeyPath, Handlers, NewRawConf, Override Error -> Error end. -do_post_config_update([], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result) -> - call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs), Result); -do_post_config_update([ConfKey | ConfKeyPath], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, - Result) -> +do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result) -> + do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, + Result, []). + +do_post_config_update([], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result, + ConfKeyPath) -> + call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs), + Result, ConfKeyPath); +do_post_config_update([ConfKey | SubConfKeyPath], Handlers, OldConf, NewConf, AppEnvs, + UpdateArgs, Result, ConfKeyPath0) -> + ConfKeyPath = ConfKeyPath0 ++ [ConfKey], SubOldConf = get_sub_config(ConfKey, OldConf), SubNewConf = get_sub_config(ConfKey, NewConf), SubHandlers = get_sub_handlers(ConfKey, Handlers), - case do_post_config_update(ConfKeyPath, SubHandlers, SubOldConf, SubNewConf, AppEnvs, - UpdateArgs, Result) of + case do_post_config_update(SubConfKeyPath, SubHandlers, SubOldConf, SubNewConf, AppEnvs, + UpdateArgs, Result, ConfKeyPath) of {ok, Result1} -> call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs), - Result1); + Result1, ConfKeyPath); Error -> Error end. @@ -237,22 +250,23 @@ get_sub_config(ConfKey, Conf) when is_map(Conf) -> get_sub_config(_, _Conf) -> %% the Conf is a primitive undefined. -call_pre_config_update(Handlers, OldRawConf, UpdateReq) -> +call_pre_config_update(Handlers, OldRawConf, UpdateReq, ConfKeyPath) -> HandlerName = maps:get(?MOD, Handlers, undefined), - case erlang:function_exported(HandlerName, pre_config_update, 2) of + case erlang:function_exported(HandlerName, pre_config_update, 3) of true -> - case HandlerName:pre_config_update(UpdateReq, OldRawConf) of + case HandlerName:pre_config_update(ConfKeyPath, UpdateReq, OldRawConf) of {ok, NewUpdateReq} -> {ok, NewUpdateReq}; {error, Reason} -> {error, {pre_config_update, HandlerName, Reason}} end; false -> merge_to_old_config(UpdateReq, OldRawConf) end. -call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, UpdateReq, Result) -> +call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, UpdateReq, Result, ConfKeyPath) -> HandlerName = maps:get(?MOD, Handlers, undefined), - case erlang:function_exported(HandlerName, post_config_update, 4) of + case erlang:function_exported(HandlerName, post_config_update, 5) of true -> - case HandlerName:post_config_update(UpdateReq, NewConf, OldConf, AppEnvs) of + case HandlerName:post_config_update(ConfKeyPath, UpdateReq, NewConf, OldConf, + AppEnvs) of ok -> {ok, Result}; {ok, Result1} -> {ok, Result#{HandlerName => Result1}}; diff --git a/apps/emqx/src/emqx_congestion.erl b/apps/emqx/src/emqx_congestion.erl index 170c6bc69..783f4ee4a 100644 --- a/apps/emqx/src/emqx_congestion.erl +++ b/apps/emqx/src/emqx_congestion.erl @@ -78,13 +78,15 @@ cancel_alarm_congestion(Socket, Transport, Channel, Reason) -> do_alarm_congestion(Socket, Transport, Channel, Reason) -> ok = update_alarm_sent_at(Reason), AlarmDetails = tcp_congestion_alarm_details(Socket, Transport, Channel), - emqx_alarm:activate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails), + Message = io_lib:format("connection congested: ~ts", [AlarmDetails]), + emqx_alarm:activate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails, Message), ok. do_cancel_alarm_congestion(Socket, Transport, Channel, Reason) -> ok = remove_alarm_sent_at(Reason), AlarmDetails = tcp_congestion_alarm_details(Socket, Transport, Channel), - emqx_alarm:deactivate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails), + Message = io_lib:format("connection congested: ~ts", [AlarmDetails]), + emqx_alarm:deactivate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails, Message), ok. is_tcp_congested(Socket, Transport) -> diff --git a/apps/emqx/src/emqx_connection.erl b/apps/emqx/src/emqx_connection.erl index b01aad468..9ba126fc9 100644 --- a/apps/emqx/src/emqx_connection.erl +++ b/apps/emqx/src/emqx_connection.erl @@ -149,7 +149,7 @@ start_link(Transport, Socket, Options) -> %%-------------------------------------------------------------------- %% @doc Get infos of the connection/channel. --spec(info(pid()|state()) -> emqx_types:infos()). +-spec(info(pid() | state()) -> emqx_types:infos()). info(CPid) when is_pid(CPid) -> call(CPid, info); info(State = #state{channel = Channel}) -> @@ -176,7 +176,7 @@ info(limiter, #state{limiter = Limiter}) -> maybe_apply(fun emqx_limiter:info/1, Limiter). %% @doc Get stats of the connection/channel. --spec(stats(pid()|state()) -> emqx_types:stats()). +-spec(stats(pid() | state()) -> emqx_types:stats()). stats(CPid) when is_pid(CPid) -> call(CPid, stats); stats(#state{transport = Transport, @@ -373,7 +373,7 @@ cancel_stats_timer(State) -> State. process_msg([], State) -> {ok, State}; -process_msg([Msg|More], State) -> +process_msg([Msg | More], State) -> try case handle_msg(Msg, State) of ok -> @@ -475,7 +475,7 @@ handle_msg({Passive, _Sock}, State) handle_msg(Deliver = {deliver, _Topic, _Msg}, #state{ listener = {Type, Listener}} = State) -> ActiveN = get_active_n(Type, Listener), - Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)], + Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)], with_channel(handle_deliver, [Delivers], State); %% Something sent @@ -540,7 +540,7 @@ terminate(Reason, State = #state{channel = Channel, transport = Transport, ?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S}) end, ?tp(info, terminate, #{reason => Reason}), - maybe_raise_excption(Reason). + maybe_raise_exception(Reason). %% close socket, discard new state, always return ok. close_socket_ok(State) -> @@ -548,12 +548,12 @@ close_socket_ok(State) -> ok. %% tell truth about the original exception -maybe_raise_excption(#{exception := Exception, +maybe_raise_exception(#{exception := Exception, context := Context, stacktrace := Stacktrace }) -> erlang:raise(Exception, Context, Stacktrace); -maybe_raise_excption(Reason) -> +maybe_raise_exception(Reason) -> exit(Reason). %%-------------------------------------------------------------------- @@ -649,7 +649,7 @@ parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) -> {Packets, State#state{parse_state = NParseState}}; {ok, Packet, Rest, NParseState} -> NState = State#state{parse_state = NParseState}, - parse_incoming(Rest, [Packet|Packets], NState) + parse_incoming(Rest, [Packet | Packets], NState) catch throw : ?FRAME_PARSE_ERROR(Reason) -> ?SLOG(info, #{ reason => Reason @@ -679,7 +679,7 @@ next_incoming_msgs(Packets) -> handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) -> ok = inc_incoming_stats(Packet), - ?SLOG(debug, #{msg => "RECV_packet", packet => Packet}), + ?SLOG(debug, #{msg => "RECV_packet", packet => emqx_packet:format(Packet)}), with_channel(handle_in, [Packet], State); handle_incoming(FrameError, State) -> @@ -752,7 +752,7 @@ send(IoData, #state{transport = Transport, socket = Socket, channel = Channel}) ok = emqx_metrics:inc('bytes.sent', Oct), inc_counter(outgoing_bytes, Oct), emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel), - case Transport:async_send(Socket, IoData, [nosuspend]) of + case Transport:async_send(Socket, IoData, []) of ok -> ok; Error = {error, _Reason} -> %% Send an inet_reply to postpone handling the error diff --git a/apps/emqx/src/emqx_flapping.erl b/apps/emqx/src/emqx_flapping.erl index 0b4611c4c..600144adc 100644 --- a/apps/emqx/src/emqx_flapping.erl +++ b/apps/emqx/src/emqx_flapping.erl @@ -129,7 +129,8 @@ handle_cast({detected, #flapping{clientid = ClientId, reason = <<"flapping is detected">>, at = Now, until = Now + (Interval div 1000)}, - emqx_banned:create(Banned); + {ok, _} = emqx_banned:create(Banned), + ok; false -> ?SLOG(warning, #{ msg => "client_disconnected", diff --git a/apps/emqx/src/emqx_hooks.erl b/apps/emqx/src/emqx_hooks.erl index 7817a9b2d..3a91ed8cb 100644 --- a/apps/emqx/src/emqx_hooks.erl +++ b/apps/emqx/src/emqx_hooks.erl @@ -77,6 +77,8 @@ priority :: integer() }). +-type(callback() :: #callback{}). + -record(hook, { name :: hookpoint(), callbacks :: list(#callback{}) @@ -112,7 +114,7 @@ callback_priority(#callback{priority= P}) -> P. %%-------------------------------------------------------------------- %% @doc Register a callback --spec(add(hookpoint(), action() | #callback{}) -> ok_or_error(already_exists)). +-spec(add(hookpoint(), action() | callback()) -> ok_or_error(already_exists)). add(HookPoint, Callback) when is_record(Callback, callback) -> gen_server:call(?SERVER, {add, HookPoint, Callback}, infinity); add(HookPoint, Action) when is_function(Action); is_tuple(Action) -> @@ -131,7 +133,7 @@ add(HookPoint, Action, Filter, Priority) when is_integer(Priority) -> add(HookPoint, #callback{action = Action, filter = Filter, priority = Priority}). %% @doc Like add/2, it register a callback, discard 'already_exists' error. --spec(put(hookpoint(), action() | #callback{}) -> ok). +-spec(put(hookpoint(), action() | callback()) -> ok). put(HookPoint, Callback) when is_record(Callback, callback) -> case add(HookPoint, Callback) of ok -> ok; @@ -211,7 +213,7 @@ safe_execute({M, F, A}, Args) -> exception => Error, reason => Reason, stacktrace => Stacktrace, - failed_call => {M, F, A} + failed_call => {M, F, Args ++ A} }) end. @@ -220,7 +222,7 @@ execute({M, F, A}, Args) -> erlang:apply(M, F, Args ++ A). %% @doc Lookup callbacks. --spec(lookup(hookpoint()) -> [#callback{}]). +-spec(lookup(hookpoint()) -> [callback()]). lookup(HookPoint) -> case ets:lookup(?TAB, HookPoint) of [#hook{callbacks = Callbacks}] -> @@ -292,10 +294,10 @@ add_callback(C, Callbacks) -> add_callback(C, Callbacks, []). add_callback(C, [], Acc) -> - lists:reverse([C|Acc]); -add_callback(C1 = #callback{priority = P1}, [C2 = #callback{priority = P2}|More], Acc) + lists:reverse([C | Acc]); +add_callback(C1 = #callback{priority = P1}, [C2 = #callback{priority = P2} | More], Acc) when P1 =< P2 -> - add_callback(C1, More, [C2|Acc]); + add_callback(C1, More, [C2 | Acc]); add_callback(C1, More, Acc) -> lists:append(lists:reverse(Acc), [C1 | More]). @@ -310,4 +312,3 @@ del_callback(Action = {M, F}, [#callback{action = {M, F, _A}} | Callbacks], Acc) del_callback(Action, Callbacks, Acc); del_callback(Action, [Callback | Callbacks], Acc) -> del_callback(Action, Callbacks, [Callback | Acc]). - diff --git a/apps/emqx/src/emqx_keepalive.erl b/apps/emqx/src/emqx_keepalive.erl index 8fba00f50..7ec424d1d 100644 --- a/apps/emqx/src/emqx_keepalive.erl +++ b/apps/emqx/src/emqx_keepalive.erl @@ -20,8 +20,11 @@ , info/1 , info/2 , check/2 + , set/3 ]). +-elvis([{elvis_style, no_if_expression, disable}]). + -export_type([keepalive/0]). -record(keepalive, { @@ -49,7 +52,7 @@ info(#keepalive{interval = Interval, repeat => Repeat }. --spec(info(interval|statval|repeat, keepalive()) +-spec(info(interval | statval | repeat, keepalive()) -> non_neg_integer()). info(interval, #keepalive{interval = Interval}) -> Interval; @@ -71,3 +74,7 @@ check(NewVal, KeepAlive = #keepalive{statval = OldVal, true -> {error, timeout} end. +%% @doc Update keepalive's interval +-spec(set(interval, non_neg_integer(), keepalive()) -> keepalive()). +set(interval, Interval, KeepAlive) -> + KeepAlive#keepalive{interval = Interval}. diff --git a/apps/emqx/src/emqx_listeners.erl b/apps/emqx/src/emqx_listeners.erl index 187a55fdd..2b9a76fe3 100644 --- a/apps/emqx/src/emqx_listeners.erl +++ b/apps/emqx/src/emqx_listeners.erl @@ -17,6 +17,8 @@ %% @doc Start/Stop MQTT listeners. -module(emqx_listeners). +-elvis([{elvis_style, dont_repeat_yourself, #{min_complexity => 10000}}]). + -include("emqx_mqtt.hrl"). -include("logger.hrl"). @@ -28,6 +30,7 @@ , is_running/1 , current_conns/2 , max_conns/2 + , id_example/0 ]). -export([ start_listener/1 @@ -43,11 +46,23 @@ , parse_listener_id/1 ]). --export([post_config_update/4]). +-export([post_config_update/5]). -define(CONF_KEY_PATH, [listeners]). -define(TYPES_STRING, ["tcp","ssl","ws","wss","quic"]). +-spec(id_example() -> atom()). +id_example() -> + id_example(list()). + +id_example([]) -> + {ID, _} = hd(list()), + ID; +id_example([{'tcp:default', _} | _]) -> + 'tcp:default'; +id_example([_ | Listeners]) -> + id_example(Listeners). + %% @doc List configured listeners. -spec(list() -> [{ListenerId :: atom(), ListenerConf :: map()}]). list() -> @@ -235,10 +250,10 @@ do_start_listener(quic, ListenerName, #{bind := ListenOn} = Opts) -> , {key, maps:get(keyfile, Opts)} , {alpn, ["mqtt"]} , {conn_acceptors, lists:max([DefAcceptors, maps:get(acceptors, Opts, 0)])} - , {idle_timeout_ms, lists:max([ - emqx_config:get_zone_conf(zone(Opts), [mqtt, idle_timeout]) * 3 - , timer:seconds(maps:get(idle_timeout, Opts))] - )} + , {idle_timeout_ms, + lists:max([ + emqx_config:get_zone_conf(zone(Opts), [mqtt, idle_timeout]) * 3, + timer:seconds(maps:get(idle_timeout, Opts))])} ], ConnectionOpts = #{ conn_callback => emqx_quic_connection , peer_unidi_stream_count => 1 @@ -257,7 +272,7 @@ delete_authentication(Type, ListenerName, _Conf) -> emqx_authentication:delete_chain(listener_id(Type, ListenerName)). %% Update the listeners at runtime -post_config_update(_Req, NewListeners, OldListeners, _AppEnvs) -> +post_config_update(_, _Req, NewListeners, OldListeners, _AppEnvs) -> #{added := Added, removed := Removed, changed := Updated} = diff_listeners(NewListeners, OldListeners), perform_listener_changes(fun stop_listener/3, Removed), @@ -281,7 +296,8 @@ flatten_listeners(Conf0) -> || {Type, Conf} <- maps:to_list(Conf0)])). do_flatten_listeners(Type, Conf0) -> - [{listener_id(Type, Name), maps:remove(authentication, Conf)} || {Name, Conf} <- maps:to_list(Conf0)]. + [{listener_id(Type, Name), maps:remove(authentication, Conf)} || + {Name, Conf} <- maps:to_list(Conf0)]. esockd_opts(Type, Opts0) -> Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0), @@ -352,10 +368,13 @@ listener_id(Type, ListenerName) -> list_to_atom(lists:append([str(Type), ":", str(ListenerName)])). parse_listener_id(Id) -> - [Type, Name] = string:split(str(Id), ":", leading), - case lists:member(Type, ?TYPES_STRING) of - true -> {list_to_existing_atom(Type), list_to_atom(Name)}; - false -> {error, {invalid_listener_id, Id}} + case string:split(str(Id), ":", leading) of + [Type, Name] -> + case lists:member(Type, ?TYPES_STRING) of + true -> {list_to_existing_atom(Type), list_to_atom(Name)}; + false -> {error, {invalid_listener_id, Id}} + end; + _ -> {error, {invalid_listener_id, Id}} end. zone(Opts) -> diff --git a/apps/emqx/src/emqx_logger.erl b/apps/emqx/src/emqx_logger.erl index 29f5bd597..79ac5e6b8 100644 --- a/apps/emqx/src/emqx_logger.erl +++ b/apps/emqx/src/emqx_logger.erl @@ -20,6 +20,7 @@ -behaviour(gen_server). -behaviour(emqx_config_handler). +-elvis([{elvis_style, god_modules, disable}]). %% gen_server callbacks -export([ start_link/0 @@ -70,7 +71,7 @@ , stop_log_handler/1 ]). --export([post_config_update/4]). +-export([post_config_update/5]). -type(peername_str() :: list()). -type(logger_dst() :: file:filename() | console | unknown). @@ -78,10 +79,11 @@ id := logger:handler_id(), level := logger:level(), dst := logger_dst(), + filters := [{logger:filter_id(), logger:filter()}], status := started | stopped }). --define(stopped_handlers, {?MODULE, stopped_handlers}). +-define(STOPPED_HANDLERS, {?MODULE, stopped_handlers}). -define(CONF_PATH, [log]). start_link() -> @@ -123,7 +125,7 @@ code_change(_OldVsn, State, _Extra) -> %%-------------------------------------------------------------------- %% emqx_config_handler callbacks %%-------------------------------------------------------------------- -post_config_update(_Req, _NewConf, _OldConf, AppEnvs) -> +post_config_update(_, _Req, _NewConf, _OldConf, AppEnvs) -> gen_server:call(?MODULE, {update_config, AppEnvs}, 5000). %%-------------------------------------------------------------------- @@ -238,19 +240,19 @@ get_log_handlers() -> -spec(get_log_handlers(started | stopped) -> [logger_handler_info()]). get_log_handlers(started) -> - [log_hanlder_info(Conf, started) || Conf <- logger:get_handler_config()]; + [log_handler_info(Conf, started) || Conf <- logger:get_handler_config()]; get_log_handlers(stopped) -> - [log_hanlder_info(Conf, stopped) || Conf <- list_stopped_handler_config()]. + [log_handler_info(Conf, stopped) || Conf <- list_stopped_handler_config()]. -spec(get_log_handler(logger:handler_id()) -> logger_handler_info()). get_log_handler(HandlerId) -> case logger:get_handler_config(HandlerId) of {ok, Conf} -> - log_hanlder_info(Conf, started); + log_handler_info(Conf, started); {error, _} -> case read_stopped_handler_config(HandlerId) of error -> {error, {not_found, HandlerId}}; - {ok, Conf} -> log_hanlder_info(Conf, stopped) + {ok, Conf} -> log_handler_info(Conf, stopped) end end. @@ -305,21 +307,21 @@ set_log_level(Level) -> %% Internal Functions %%-------------------------------------------------------------------- -log_hanlder_info(#{id := Id, level := Level, module := logger_std_h, - config := #{type := Type}}, Status) when +log_handler_info(#{id := Id, level := Level, module := logger_std_h, + filters := Filters, config := #{type := Type}}, Status) when Type =:= standard_io; Type =:= standard_error -> - #{id => Id, level => Level, dst => console, status => Status}; -log_hanlder_info(#{id := Id, level := Level, module := logger_std_h, - config := Config = #{type := file}}, Status) -> - #{id => Id, level => Level, status => Status, + #{id => Id, level => Level, dst => console, status => Status, filters => Filters}; +log_handler_info(#{id := Id, level := Level, module := logger_std_h, + filters := Filters, config := Config = #{type := file}}, Status) -> + #{id => Id, level => Level, status => Status, filters => Filters, dst => maps:get(file, Config, atom_to_list(Id))}; -log_hanlder_info(#{id := Id, level := Level, module := logger_disk_log_h, - config := #{file := Filename}}, Status) -> - #{id => Id, level => Level, dst => Filename, status => Status}; -log_hanlder_info(#{id := Id, level := Level, module := _OtherModule}, Status) -> - #{id => Id, level => Level, dst => unknown, status => Status}. +log_handler_info(#{id := Id, level := Level, module := logger_disk_log_h, + filters := Filters, config := #{file := Filename}}, Status) -> + #{id => Id, level => Level, dst => Filename, status => Status, filters => Filters}; +log_handler_info(#{id := Id, level := Level, filters := Filters}, Status) -> + #{id => Id, level => Level, dst => unknown, status => Status, filters => Filters}. %% set level for all log handlers in one command set_all_log_handlers_level(Level) -> @@ -341,29 +343,29 @@ rollback([{ID, Level} | List]) -> rollback([]) -> ok. save_stopped_handler_config(HandlerId, Config) -> - case persistent_term:get(?stopped_handlers, undefined) of + case persistent_term:get(?STOPPED_HANDLERS, undefined) of undefined -> - persistent_term:put(?stopped_handlers, #{HandlerId => Config}); + persistent_term:put(?STOPPED_HANDLERS, #{HandlerId => Config}); ConfList -> - persistent_term:put(?stopped_handlers, ConfList#{HandlerId => Config}) + persistent_term:put(?STOPPED_HANDLERS, ConfList#{HandlerId => Config}) end. read_stopped_handler_config(HandlerId) -> - case persistent_term:get(?stopped_handlers, undefined) of + case persistent_term:get(?STOPPED_HANDLERS, undefined) of undefined -> error; ConfList -> maps:find(HandlerId, ConfList) end. remove_stopped_handler_config(HandlerId) -> - case persistent_term:get(?stopped_handlers, undefined) of + case persistent_term:get(?STOPPED_HANDLERS, undefined) of undefined -> ok; ConfList -> case maps:find(HandlerId, ConfList) of error -> ok; {ok, _} -> - persistent_term:put(?stopped_handlers, maps:remove(HandlerId, ConfList)) + persistent_term:put(?STOPPED_HANDLERS, maps:remove(HandlerId, ConfList)) end end. list_stopped_handler_config() -> - case persistent_term:get(?stopped_handlers, undefined) of + case persistent_term:get(?STOPPED_HANDLERS, undefined) of undefined -> []; ConfList -> maps:values(ConfList) end. diff --git a/apps/emqx/src/emqx_misc.erl b/apps/emqx/src/emqx_misc.erl index 446039778..0d3edd551 100644 --- a/apps/emqx/src/emqx_misc.erl +++ b/apps/emqx/src/emqx_misc.erl @@ -17,6 +17,7 @@ -module(emqx_misc). -compile(inline). +-elvis([{elvis_style, god_modules, disable}]). -include("types.hrl"). -include("logger.hrl"). @@ -65,21 +66,13 @@ maybe_parse_ip(Host) -> end. %% @doc Add `ipv6_probe' socket option if it's supported. +%% gen_tcp:ipv6_probe() -> true. is added to EMQ's OTP forks ipv6_probe(Opts) -> - case persistent_term:get({?MODULE, ipv6_probe_supported}, unknown) of - unknown -> - %% e.g. 23.2.7.1-emqx-2-x86_64-unknown-linux-gnu-64 - OtpVsn = emqx_vm:get_otp_version(), - Bool = (match =:= re:run(OtpVsn, "emqx", [{capture, none}])), - _ = persistent_term:put({?MODULE, ipv6_probe_supported}, Bool), - ipv6_probe(Bool, Opts); - Bool -> - ipv6_probe(Bool, Opts) + case erlang:function_exported(gen_tcp, ipv6_probe, 0) of + true -> [{ipv6_probe, true} | Opts]; + false -> Opts end. -ipv6_probe(false, Opts) -> Opts; -ipv6_probe(true, Opts) -> [{ipv6_probe, true} | Opts]. - %% @doc Merge options -spec(merge_opts(Opts, Opts) -> Opts when Opts :: proplists:proplist()). merge_opts(Defaults, Options) -> @@ -100,9 +93,9 @@ maybe_apply(Fun, Arg) when is_function(Fun) -> -spec(compose(list(F)) -> G when F :: fun((any()) -> any()), G :: fun((any()) -> any())). -compose([F|More]) -> compose(F, More). +compose([F | More]) -> compose(F, More). --spec(compose(F, G|[Gs]) -> C +-spec(compose(F, G | [Gs]) -> C when F :: fun((X1) -> X2), G :: fun((X2) -> X3), Gs :: [fun((Xn) -> Xn1)], @@ -110,19 +103,19 @@ compose([F|More]) -> compose(F, More). X3 :: any(), Xn :: any(), Xn1 :: any(), Xm :: any()). compose(F, G) when is_function(G) -> fun(X) -> G(F(X)) end; compose(F, [G]) -> compose(F, G); -compose(F, [G|More]) -> compose(compose(F, G), More). +compose(F, [G | More]) -> compose(compose(F, G), More). %% @doc RunFold run_fold([], Acc, _State) -> Acc; -run_fold([Fun|More], Acc, State) -> +run_fold([Fun | More], Acc, State) -> run_fold(More, Fun(Acc, State), State). %% @doc Pipeline pipeline([], Input, State) -> {ok, Input, State}; -pipeline([Fun|More], Input, State) -> +pipeline([Fun | More], Input, State) -> case apply_fun(Fun, Input, State) of ok -> pipeline(More, Input, State); {ok, NState} -> @@ -171,7 +164,7 @@ drain_deliver(0, Acc) -> drain_deliver(N, Acc) -> receive Deliver = {deliver, _Topic, _Msg} -> - drain_deliver(N-1, [Deliver|Acc]) + drain_deliver(N-1, [Deliver | Acc]) after 0 -> lists:reverse(Acc) end. @@ -186,7 +179,7 @@ drain_down(0, Acc) -> drain_down(Cnt, Acc) -> receive {'DOWN', _MRef, process, Pid, _Reason} -> - drain_down(Cnt-1, [Pid|Acc]) + drain_down(Cnt-1, [Pid | Acc]) after 0 -> lists:reverse(Acc) end. @@ -213,7 +206,7 @@ check_oom(Pid, #{max_message_queue_len := MaxQLen, end. do_check_oom([]) -> ok; -do_check_oom([{Val, Max, Reason}|Rest]) -> +do_check_oom([{Val, Max, Reason} | Rest]) -> case is_integer(Max) andalso (0 < Max) andalso (Max < Val) of true -> {shutdown, Reason}; false -> do_check_oom(Rest) @@ -256,8 +249,8 @@ proc_stats(Pid) -> reductions, memory]) of undefined -> []; - [{message_queue_len, Len}|ProcStats] -> - [{mailbox_len, Len}|ProcStats] + [{message_queue_len, Len} | ProcStats] -> + [{mailbox_len, Len} | ProcStats] end. rand_seed() -> @@ -277,9 +270,9 @@ index_of(E, L) -> index_of(_E, _I, []) -> error(badarg); -index_of(E, I, [E|_]) -> +index_of(E, I, [E | _]) -> I; -index_of(E, I, [_|L]) -> +index_of(E, I, [_ | L]) -> index_of(E, I+1, L). -spec(bin2hexstr_A_F(binary()) -> binary()). @@ -339,6 +332,12 @@ pad(L, Count) -> -include_lib("eunit/include/eunit.hrl"). ipv6_probe_test() -> - ?assertEqual([{ipv6_probe, true}], ipv6_probe([])). + try gen_tcp:ipv6_probe() of + true -> + ?assertEqual([{ipv6_probe, true}], ipv6_probe([])) + catch + _ : _ -> + ok + end. -endif. diff --git a/apps/emqx/src/emqx_mountpoint.erl b/apps/emqx/src/emqx_mountpoint.erl index f3c7a94de..daa73822e 100644 --- a/apps/emqx/src/emqx_mountpoint.erl +++ b/apps/emqx/src/emqx_mountpoint.erl @@ -17,6 +17,7 @@ -module(emqx_mountpoint). -include("emqx.hrl"). +-include("emqx_placeholder.hrl"). -include("types.hrl"). -export([ mount/2 @@ -66,14 +67,17 @@ unmount(MountPoint, Msg = #message{topic = Topic}) -> -spec(replvar(maybe(mountpoint()), map()) -> maybe(mountpoint())). replvar(undefined, _Vars) -> undefined; -replvar(MountPoint, #{clientid := ClientId, username := Username}) -> - lists:foldl(fun feed_var/2, MountPoint, - [{<<"%c">>, ClientId}, {<<"%u">>, Username}]). +replvar(MountPoint, Vars) -> + ClientID = maps:get(clientid, Vars, undefined), + UserName = maps:get(username, Vars, undefined), + EndpointName = maps:get(endpoint_name, Vars, undefined), + List = [ {?PH_CLIENTID, ClientID} + , {?PH_USERNAME, UserName} + , {?PH_ENDPOINT_NAME, EndpointName} + ], + lists:foldl(fun feed_var/2, MountPoint, List). -feed_var({<<"%c">>, ClientId}, MountPoint) -> - emqx_topic:feed_var(<<"%c">>, ClientId, MountPoint); -feed_var({<<"%u">>, undefined}, MountPoint) -> +feed_var({_PlaceHolder, undefined}, MountPoint) -> MountPoint; -feed_var({<<"%u">>, Username}, MountPoint) -> - emqx_topic:feed_var(<<"%u">>, Username, MountPoint). - +feed_var({PlaceHolder, Value}, MountPoint) -> + emqx_topic:feed_var(PlaceHolder, Value, MountPoint). diff --git a/apps/emqx/src/emqx_os_mon.erl b/apps/emqx/src/emqx_os_mon.erl index 24795c7ba..e0cfac7af 100644 --- a/apps/emqx/src/emqx_os_mon.erl +++ b/apps/emqx/src/emqx_os_mon.erl @@ -96,12 +96,26 @@ handle_info({timeout, _Timer, check}, State) -> _ = case emqx_vm:cpu_util() of %% TODO: should be improved? 0 -> ok; Busy when Busy >= CPUHighWatermark -> - emqx_alarm:activate(high_cpu_usage, #{usage => io_lib:format("~p%", [Busy]), - high_watermark => CPUHighWatermark, - low_watermark => CPULowWatermark}), + Usage = io_lib:format("~p%", [Busy]), + Message = [Usage, " cpu usage"], + emqx_alarm:activate(high_cpu_usage, + #{ + usage => Usage, + high_watermark => CPUHighWatermark, + low_watermark => CPULowWatermark + }, + Message), start_check_timer(); Busy when Busy =< CPULowWatermark -> - emqx_alarm:deactivate(high_cpu_usage), + Usage = io_lib:format("~p%", [Busy]), + Message = [Usage, " cpu usage"], + emqx_alarm:deactivate(high_cpu_usage, + #{ + usage => Usage, + high_watermark => CPUHighWatermark, + low_watermark => CPULowWatermark + }, + Message), start_check_timer(); _Busy -> start_check_timer() diff --git a/apps/emqx/src/emqx_passwd.erl b/apps/emqx/src/emqx_passwd.erl index ff3b40f9f..2104f1850 100644 --- a/apps/emqx/src/emqx_passwd.erl +++ b/apps/emqx/src/emqx_passwd.erl @@ -22,7 +22,7 @@ -include("logger.hrl"). --type(hash_type() :: plain | md5 | sha | sha256 | pbkdf2 | bcrypt). +-type(hash_type() :: plain | md5 | sha | sha256 | sha512 | pbkdf2 | bcrypt). -export_type([hash_type/0]). @@ -95,4 +95,3 @@ hexstring(<>) -> iolist_to_binary(io_lib:format("~64.16.0b", [X])); hexstring(<>) -> iolist_to_binary(io_lib:format("~128.16.0b", [X])). - diff --git a/apps/emqx/src/emqx_persistent_session.erl b/apps/emqx/src/emqx_persistent_session.erl index 71dac02c3..13c74b62e 100644 --- a/apps/emqx/src/emqx_persistent_session.erl +++ b/apps/emqx/src/emqx_persistent_session.erl @@ -179,12 +179,17 @@ timestamp_from_conninfo(ConnInfo) -> end. lookup(ClientID) when is_binary(ClientID) -> - case lookup_session_store(ClientID) of - none -> none; - {value, #session_store{session = S} = SS} -> - case persistent_session_status(SS) of - expired -> {expired, S}; - persistent -> {persistent, S} + case is_store_enabled() of + false -> + none; + true -> + case lookup_session_store(ClientID) of + none -> none; + {value, #session_store{session = S} = SS} -> + case persistent_session_status(SS) of + expired -> {expired, S}; + persistent -> {persistent, S} + end end end. diff --git a/apps/emqx/src/emqx_pool.erl b/apps/emqx/src/emqx_pool.erl index 8b9508768..c34ae61ae 100644 --- a/apps/emqx/src/emqx_pool.erl +++ b/apps/emqx/src/emqx_pool.erl @@ -32,7 +32,7 @@ ]). -ifdef(TEST). --export([worker/0]). +-export([worker/0, flush_async_tasks/0]). -endif. %% gen_server callbacks @@ -139,3 +139,15 @@ run({F, A}) when is_function(F), is_list(A) -> run(Fun) when is_function(Fun) -> Fun(). +-ifdef(TEST). +%% This help function creates a large enough number of async tasks +%% to force flush the pool workers. +%% The number of tasks should be large enough to ensure all workers have +%% the chance to work on at least one of the tasks. +flush_async_tasks() -> + Ref = make_ref(), + Self = self(), + L = lists:seq(1, 997), + lists:foreach(fun(I) -> emqx_pool:async_submit(fun() -> Self ! {done, Ref, I} end, []) end, L), + lists:foreach(fun(I) -> receive {done, Ref, I} -> ok end end, L). +-endif. diff --git a/apps/emqx/src/emqx_release.erl b/apps/emqx/src/emqx_release.erl new file mode 100644 index 000000000..1e362d0f0 --- /dev/null +++ b/apps/emqx/src/emqx_release.erl @@ -0,0 +1,86 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_release). + +-export([ edition/0 + , put_edition/0 + , put_edition/1 + , description/0 + , version/0 + ]). + +-include("emqx_release.hrl"). + +%% @doc Return EMQ X description. +description() -> + case os:getenv("EMQX_DESCRIPTION") of + false -> "EMQ X Community Edition"; + "" -> "EMQ X Community Edition"; + Str -> string:strip(Str, both, $\n) + end. + +%% @doc Return EMQ X edition info. +%% Read info from persistent_term at runtime. +%% Or meck this function to run tests for another eidtion. +-spec edition() -> ce | ee | edge. +edition() -> + try persistent_term:get(emqx_edition) + catch error : badarg -> get_edition() end. + +%% @private initiate EMQ X edition info in persistent_term. +put_edition() -> + ok = put_edition(get_edition()). + +%% @hidden This function is mostly for testing. +%% Switch to another eidtion at runtime to run edition-specific tests. +-spec put_edition(ce | ee | edge) -> ok. +put_edition(Which) -> + persistent_term:put(emqx_edition, Which), + ok. + +-spec get_edition() -> ce | ee | edge. +get_edition() -> + edition(description()). + +edition(Desc) -> + case re:run(Desc, "enterprise", [caseless]) of + {match, _} -> + ee; + _ -> + case re:run(Desc, "edge", [caseless]) of + {match, _} -> edge; + _ -> ce + end + end. + +%% @doc Return the release version. +version() -> + case lists:keyfind(emqx_vsn, 1, ?MODULE:module_info(compile)) of + false -> %% For TEST build or depedency build. + ?EMQX_RELEASE; + {_, Vsn} -> %% For emqx release build + VsnStr = ?EMQX_RELEASE, + case string:str(Vsn, VsnStr) of + 1 -> ok; + _ -> + erlang:error(#{ reason => version_mismatch + , source => VsnStr + , built_for => Vsn + }) + end, + Vsn + end. diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 99a7ed59b..44662d9f7 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -51,6 +51,7 @@ -export([ validate_heap_size/1 , parse_user_lookup_fun/1 + , validate_alarm_actions/1 ]). % workaround: prevent being recognized as unused functions @@ -74,6 +75,8 @@ -export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1, default_ciphers/1]). -export([sc/2, map/2]). +-elvis([{elvis_style, god_modules, disable}]). + namespace() -> undefined. roots() -> @@ -889,17 +892,36 @@ fields("sysmon_os") -> fields("alarm") -> [ {"actions", sc(hoconsc:array(atom()), - #{ default => [log, publish] + #{ default => [log, publish], + validator => fun ?MODULE:validate_alarm_actions/1, + example => [log, publish], + desc => + """The actions triggered when the alarm is activated.<\br> +Currently supports two actions, 'log' and 'publish'. +'log' is to write the alarm to log (console or file). +'publish' is to publish the alarm as an MQTT message to the system topics: +$SYS/brokers/emqx@xx.xx.xx.x/alarms/activate and +$SYS/brokers/emqx@xx.xx.xx.x/alarms/deactivate""" }) } , {"size_limit", - sc(integer(), - #{ default => 1000 + sc(range(1, 3000), + #{ default => 1000, + example => 1000, + desc => + """The maximum total number of deactivated alarms to keep as history.
+When this limit is exceeded, the oldest deactivated alarms are deleted to cap the total number. +""" }) } , {"validity_period", sc(duration(), - #{ default => "24h" + #{ default => "24h", + example => "24h", + desc => +"""Retention time of deactivated alarms. Alarms are not deleted immediately +when deactivated, but after the retention time. +""" }) } ]. @@ -1141,7 +1163,7 @@ client_ssl_opts_schema(Defaults) -> common_ssl_opts_schema(Defaults) ++ [ { "server_name_indication", sc(hoconsc:union([disable, string()]), - #{ default => disable + #{ nullable => true , desc => """Specify the host name to be used in TLS Server Name Indication extension.
For instance, when connecting to \"server.example.net\", the genuine server @@ -1163,7 +1185,8 @@ default_tls_vsns(dtls_all_available) -> default_tls_vsns(tls_all_available) -> emqx_tls_lib:default_versions(). --spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined) -> hocon_schema:field_schema(). +-spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined) + -> hocon_schema:field_schema(). ciphers_schema(Default) -> sc(hoconsc:array(string()), #{ default => default_ciphers(Default) @@ -1303,7 +1326,7 @@ to_bar_separated_list(Str) -> {ok, string:tokens(Str, "| ")}. to_ip_port(Str) -> - case string:tokens(Str, ":") of + case string:tokens(Str, ": ") of [Ip, Port] -> PortVal = list_to_integer(Port), case inet:parse_address(Ip) of @@ -1345,8 +1368,16 @@ validate_heap_size(Siz) -> true -> error(io_lib:format("force_shutdown_policy: heap-size ~ts is too large", [Siz])); false -> ok end. + +validate_alarm_actions(Actions) -> + UnSupported = lists:filter(fun(Action) -> Action =/= log andalso Action =/= publish end, Actions), + case UnSupported of + [] -> ok; + Error -> {error, Error} + end. + parse_user_lookup_fun(StrConf) -> - [ModStr, FunStr] = string:tokens(str(StrConf), ":"), + [ModStr, FunStr] = string:tokens(str(StrConf), ": "), Mod = list_to_atom(ModStr), Fun = list_to_atom(FunStr), {fun Mod:Fun/3, undefined}. diff --git a/apps/emqx/src/emqx_session.erl b/apps/emqx/src/emqx_session.erl index 03414fc60..ab80bd2be 100644 --- a/apps/emqx/src/emqx_session.erl +++ b/apps/emqx/src/emqx_session.erl @@ -58,6 +58,7 @@ -export([ info/1 , info/2 + , is_session/1 , stats/1 ]). @@ -202,6 +203,9 @@ init(Opts) -> %% Info, Stats %%-------------------------------------------------------------------- +is_session(#session{}) -> true; +is_session(_) -> false. + %% @doc Get infos of the session. -spec(info(session()) -> emqx_types:infos()). info(Session) -> diff --git a/apps/emqx/src/emqx_session_router.erl b/apps/emqx/src/emqx_session_router.erl index 45c7ecd85..99abb0c3c 100644 --- a/apps/emqx/src/emqx_session_router.erl +++ b/apps/emqx/src/emqx_session_router.erl @@ -215,7 +215,7 @@ handle_call({pending, SessionID, MarkerIDs}, _From, State) -> Res = emqx_persistent_session:pending_messages_in_db(SessionID, MarkerIDs), {reply, Res, State}; handle_call(Req, _From, State) -> - ?LOG(error, "Unexpected call: ~p", [Req]), + ?SLOG(error, #{msg => "unexpected_call", req => Req}), {reply, ignored, State}. handle_cast({delete_routes, SessionID, Subscriptions}, State) -> @@ -233,11 +233,11 @@ handle_cast({resume_end, SessionID, Pid}, State) -> _ = emqx_session_router_worker_sup:abort_worker(Pid), {noreply, State#{ pmon => Pmon }}; handle_cast(Msg, State) -> - ?LOG(error, "Unexpected cast: ~p", [Msg]), + ?SLOG(error, #{msg => "unexpected_cast", cast => Msg}), {noreply, State}. handle_info(Info, State) -> - ?LOG(error, "Unexpected info: ~p", [Info]), + ?SLOG(error, #{msg => "unexpected_info", info => Info}), {noreply, State}. terminate(_Reason, #{pool := Pool, id := Id}) -> diff --git a/apps/emqx/src/emqx_shared_sub.erl b/apps/emqx/src/emqx_shared_sub.erl index 79a7d5522..434f38694 100644 --- a/apps/emqx/src/emqx_shared_sub.erl +++ b/apps/emqx/src/emqx_shared_sub.erl @@ -292,6 +292,7 @@ subscribers(Group, Topic) -> %%-------------------------------------------------------------------- init([]) -> + ok = mria:wait_for_tables([?TAB]), {ok, _} = mnesia:subscribe({table, ?TAB, simple}), {atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun init_monitors/0), ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]), diff --git a/apps/emqx/src/emqx_stats.erl b/apps/emqx/src/emqx_stats.erl index 0d2b1a1fd..74411ee9c 100644 --- a/apps/emqx/src/emqx_stats.erl +++ b/apps/emqx/src/emqx_stats.erl @@ -21,6 +21,7 @@ -include("emqx.hrl"). -include("logger.hrl"). -include("types.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). %% APIs @@ -66,8 +67,10 @@ %% Connection stats -define(CONNECTION_STATS, - ['connections.count', %% Count of Concurrent Connections - 'connections.max' %% Maximum Number of Concurrent Connections + [ 'connections.count' %% Count of Concurrent Connections + , 'connections.max' %% Maximum Number of Concurrent Connections + , 'live_connections.count' %% Count of connected clients + , 'live_connections.max' %% Maximum number of connected clients ]). %% Channel stats @@ -215,6 +218,11 @@ handle_cast({setstat, Stat, MaxStat, Val}, State) -> ets:insert(?TAB, {MaxStat, Val}) end, safe_update_element(Stat, Val), + ?tp(emqx_stats_setstat, + #{ count_stat => Stat + , max_stat => MaxStat + , value => Val + }), {noreply, State}; handle_cast({update_interval, Update = #update{name = Name}}, @@ -225,7 +233,7 @@ handle_cast({update_interval, Update = #update{name = Name}}, name => Name }), State; - false -> State#state{updates = [Update|Updates]} + false -> State#state{updates = [Update | Updates]} end, {noreply, NState}; diff --git a/apps/emqx/src/emqx_sys_mon.erl b/apps/emqx/src/emqx_sys_mon.erl index 7d798060f..cdc4677f3 100644 --- a/apps/emqx/src/emqx_sys_mon.erl +++ b/apps/emqx/src/emqx_sys_mon.erl @@ -170,9 +170,11 @@ code_change(_OldVsn, State, _Extra) -> %%-------------------------------------------------------------------- handle_partition_event({partition, {occurred, Node}}) -> - emqx_alarm:activate(partition, #{occurred => Node}); -handle_partition_event({partition, {healed, _Node}}) -> - emqx_alarm:deactivate(partition). + Message = io_lib:format("Partition occurs at node ~ts", [Node]), + emqx_alarm:activate(partition, #{occurred => Node}, Message); +handle_partition_event({partition, {healed, Node}}) -> + Message = io_lib:format("Partition healed at node ~ts", [Node]), + emqx_alarm:deactivate(partition, no_details, Message). suppress(Key, SuccFun, State = #{events := Events}) -> case lists:member(Key, Events) of diff --git a/apps/emqx/src/emqx_trace/emqx_trace.erl b/apps/emqx/src/emqx_trace/emqx_trace.erl new file mode 100644 index 000000000..f85858be3 --- /dev/null +++ b/apps/emqx/src/emqx_trace/emqx_trace.erl @@ -0,0 +1,486 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_trace). + +-behaviour(gen_server). + +-include_lib("emqx/include/emqx.hrl"). +-include_lib("emqx/include/logger.hrl"). + +%% Mnesia bootstrap +-export([mnesia/1]). + +-boot_mnesia({mnesia, [boot]}). + +-export([ publish/1 + , subscribe/3 + , unsubscribe/2 + ]). + +-export([ start_link/0 + , list/0 + , list/1 + , get_trace_filename/1 + , create/1 + , delete/1 + , clear/0 + , update/2 + ]). + +-export([ format/1 + , zip_dir/0 + , filename/2 + , trace_dir/0 + , trace_file/1 + , delete_files_after_send/2 + ]). + +-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). + +-define(TRACE, ?MODULE). +-define(MAX_SIZE, 30). + +-ifdef(TEST). +-export([log_file/2]). +-endif. + +-export_type([ip_address/0]). +-type ip_address() :: string(). + +-record(?TRACE, + { name :: binary() | undefined | '_' + , type :: clientid | topic | ip_address | undefined | '_' + , filter :: emqx_types:topic() | emqx_types:clientid() | ip_address() | undefined | '_' + , enable = true :: boolean() | '_' + , start_at :: integer() | undefined | '_' + , end_at :: integer() | undefined | '_' + }). + +mnesia(boot) -> + ok = mria:create_table(?TRACE, [ + {type, set}, + {rlog_shard, ?COMMON_SHARD}, + {storage, disc_copies}, + {record_name, ?TRACE}, + {attributes, record_info(fields, ?TRACE)}]). + +publish(#message{topic = <<"$SYS/", _/binary>>}) -> ignore; +publish(#message{from = From, topic = Topic, payload = Payload}) when + is_binary(From); is_atom(From) -> + emqx_logger:info( + #{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}}, + "PUBLISH to ~s: ~0p", + [Topic, Payload] + ). + +subscribe(<<"$SYS/", _/binary>>, _SubId, _SubOpts) -> ignore; +subscribe(Topic, SubId, SubOpts) -> + emqx_logger:info( + #{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}}, + "~ts SUBSCRIBE ~ts: Options: ~0p", + [SubId, Topic, SubOpts] + ). + +unsubscribe(<<"$SYS/", _/binary>>, _SubOpts) -> ignore; +unsubscribe(Topic, SubOpts) -> + emqx_logger:info( + #{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}}, + "~ts UNSUBSCRIBE ~ts: Options: ~0p", + [maps:get(subid, SubOpts, ""), Topic, SubOpts] + ). + +-spec(start_link() -> emqx_types:startlink_ret()). +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). + +-spec list() -> [tuple()]. +list() -> + ets:match_object(?TRACE, #?TRACE{_ = '_'}). + +-spec list(boolean()) -> [tuple()]. +list(Enable) -> + ets:match_object(?TRACE, #?TRACE{enable = Enable, _ = '_'}). + +-spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) -> + ok | {error, {duplicate_condition, iodata()} | {already_existed, iodata()} | iodata()}. +create(Trace) -> + case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of + true -> + case to_trace(Trace) of + {ok, TraceRec} -> insert_new_trace(TraceRec); + {error, Reason} -> {error, Reason} + end; + false -> + {error, "The number of traces created has reache the maximum" + " please delete the useless ones first"} + end. + +-spec delete(Name :: binary()) -> ok | {error, not_found}. +delete(Name) -> + Tran = fun() -> + case mnesia:read(?TRACE, Name) of + [_] -> mnesia:delete(?TRACE, Name, write); + [] -> mnesia:abort(not_found) + end + end, + transaction(Tran). + +-spec clear() -> ok | {error, Reason :: term()}. +clear() -> + case mria:clear_table(?TRACE) of + {atomic, ok} -> ok; + {aborted, Reason} -> {error, Reason} + end. + +-spec update(Name :: binary(), Enable :: boolean()) -> + ok | {error, not_found | finished}. +update(Name, Enable) -> + Tran = fun() -> + case mnesia:read(?TRACE, Name) of + [] -> mnesia:abort(not_found); + [#?TRACE{enable = Enable}] -> ok; + [Rec] -> + case erlang:system_time(second) >= Rec#?TRACE.end_at of + false -> mnesia:write(?TRACE, Rec#?TRACE{enable = Enable}, write); + true -> mnesia:abort(finished) + end + end + end, + transaction(Tran). + +-spec get_trace_filename(Name :: binary()) -> + {ok, FileName :: string()} | {error, not_found}. +get_trace_filename(Name) -> + Tran = fun() -> + case mnesia:read(?TRACE, Name, read) of + [] -> mnesia:abort(not_found); + [#?TRACE{start_at = Start}] -> {ok, filename(Name, Start)} + end end, + transaction(Tran). + +-spec trace_file(File :: list()) -> + {ok, Node :: list(), Binary :: binary()} | + {error, Node :: list(), Reason :: term()}. +trace_file(File) -> + FileName = filename:join(trace_dir(), File), + Node = atom_to_list(node()), + case file:read_file(FileName) of + {ok, Bin} -> {ok, Node, Bin}; + {error, Reason} -> {error, Node, Reason} + end. + +delete_files_after_send(TraceLog, Zips) -> + gen_server:cast(?MODULE, {delete_tag, self(), [TraceLog | Zips]}). + +-spec format(list(#?TRACE{})) -> list(map()). +format(Traces) -> + Fields = record_info(fields, ?TRACE), + lists:map(fun(Trace0 = #?TRACE{}) -> + [_ | Values] = tuple_to_list(Trace0), + maps:from_list(lists:zip(Fields, Values)) + end, Traces). + +init([]) -> + erlang:process_flag(trap_exit, true), + OriginLogLevel = emqx_logger:get_primary_log_level(), + ok = filelib:ensure_dir(trace_dir()), + ok = filelib:ensure_dir(zip_dir()), + {ok, _} = mnesia:subscribe({table, ?TRACE, simple}), + Traces = get_enable_trace(), + ok = update_log_primary_level(Traces, OriginLogLevel), + TRef = update_trace(Traces), + {ok, #{timer => TRef, monitors => #{}, primary_log_level => OriginLogLevel}}. + +handle_call(Req, _From, State) -> + ?LOG(error, "Unexpected call: ~p", [Req]), + {reply, ok, State}. + +handle_cast({delete_tag, Pid, Files}, State = #{monitors := Monitors}) -> + erlang:monitor(process, Pid), + {noreply, State#{monitors => Monitors#{Pid => Files}}}; +handle_cast(Msg, State) -> + ?LOG(error, "Unexpected cast: ~p", [Msg]), + {noreply, State}. + +handle_info({'DOWN', _Ref, process, Pid, _Reason}, State = #{monitors := Monitors}) -> + case maps:take(Pid, Monitors) of + error -> {noreply, State}; + {Files, NewMonitors} -> + lists:foreach(fun file:delete/1, Files), + {noreply, State#{monitors => NewMonitors}} + end; +handle_info({timeout, TRef, update_trace}, + #{timer := TRef, primary_log_level := OriginLogLevel} = State) -> + Traces = get_enable_trace(), + ok = update_log_primary_level(Traces, OriginLogLevel), + NextTRef = update_trace(Traces), + {noreply, State#{timer => NextTRef}}; + +handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) -> + emqx_misc:cancel_timer(TRef), + handle_info({timeout, TRef, update_trace}, State); + +handle_info(Info, State) -> + ?LOG(error, "Unexpected info: ~p", [Info]), + {noreply, State}. + +terminate(_Reason, #{timer := TRef, primary_log_level := OriginLogLevel}) -> + ok = set_log_primary_level(OriginLogLevel), + _ = mnesia:unsubscribe({table, ?TRACE, simple}), + emqx_misc:cancel_timer(TRef), + stop_all_trace_handler(), + _ = file:del_dir_r(zip_dir()), + ok. + +code_change(_, State, _Extra) -> + {ok, State}. + +insert_new_trace(Trace) -> + Tran = fun() -> + case mnesia:read(?TRACE, Trace#?TRACE.name) of + [] -> + #?TRACE{start_at = StartAt, type = Type, filter = Filter} = Trace, + Match = #?TRACE{_ = '_', start_at = StartAt, type = Type, filter = Filter}, + case mnesia:match_object(?TRACE, Match, read) of + [] -> mnesia:write(?TRACE, Trace, write); + [#?TRACE{name = Name}] -> mnesia:abort({duplicate_condition, Name}) + end; + [#?TRACE{name = Name}] -> mnesia:abort({already_existed, Name}) + end + end, + transaction(Tran). + +update_trace(Traces) -> + Now = erlang:system_time(second), + {_Waiting, Running, Finished} = classify_by_time(Traces, Now), + disable_finished(Finished), + Started = emqx_trace_handler:running(), + {NeedRunning, AllStarted} = start_trace(Running, Started), + NeedStop = AllStarted -- NeedRunning, + ok = stop_trace(NeedStop, Started), + clean_stale_trace_files(), + NextTime = find_closest_time(Traces, Now), + emqx_misc:start_timer(NextTime, update_trace). + +stop_all_trace_handler() -> + lists:foreach(fun(#{id := Id}) -> emqx_trace_handler:uninstall(Id) end, + emqx_trace_handler:running()). +get_enable_trace() -> + {atomic, Traces} = + mria:transaction(?COMMON_SHARD, fun() -> + mnesia:match_object(?TRACE, #?TRACE{enable = true, _ = '_'}, read) + end), + Traces. + +find_closest_time(Traces, Now) -> + Sec = + lists:foldl( + fun(#?TRACE{start_at = Start, end_at = End}, Closest) + when Start >= Now andalso Now < End -> %% running + min(End - Now, Closest); + (#?TRACE{start_at = Start}, Closest) when Start < Now -> %% waiting + min(Now - Start, Closest); + (_, Closest) -> Closest %% finished + end, 60 * 15, Traces), + timer:seconds(Sec). + +disable_finished([]) -> ok; +disable_finished(Traces) -> + transaction(fun() -> + lists:map(fun(#?TRACE{name = Name}) -> + case mnesia:read(?TRACE, Name, write) of + [] -> ok; + [Trace] -> mnesia:write(?TRACE, Trace#?TRACE{enable = false}, write) + end end, Traces) + end). + +start_trace(Traces, Started0) -> + Started = lists:map(fun(#{name := Name}) -> Name end, Started0), + lists:foldl(fun(#?TRACE{name = Name} = Trace, {Running, StartedAcc}) -> + case lists:member(Name, StartedAcc) of + true -> + {[Name | Running], StartedAcc}; + false -> + case start_trace(Trace) of + ok -> {[Name | Running], [Name | StartedAcc]}; + {error, _Reason} -> {[Name | Running], StartedAcc} + end + end + end, {[], Started}, Traces). + +start_trace(Trace) -> + #?TRACE{name = Name + , type = Type + , filter = Filter + , start_at = Start + } = Trace, + Who = #{name => Name, type => Type, filter => Filter}, + emqx_trace_handler:install(Who, debug, log_file(Name, Start)). + +stop_trace(Finished, Started) -> + lists:foreach(fun(#{name := Name, type := Type}) -> + case lists:member(Name, Finished) of + true -> emqx_trace_handler:uninstall(Type, Name); + false -> ok + end + end, Started). + +clean_stale_trace_files() -> + TraceDir = trace_dir(), + case file:list_dir(TraceDir) of + {ok, AllFiles} when AllFiles =/= ["zip"] -> + FileFun = fun(#?TRACE{name = Name, start_at = StartAt}) -> filename(Name, StartAt) end, + KeepFiles = lists:map(FileFun, list()), + case AllFiles -- ["zip" | KeepFiles] of + [] -> ok; + DeleteFiles -> + DelFun = fun(F) -> file:delete(filename:join(TraceDir, F)) end, + lists:foreach(DelFun, DeleteFiles) + end; + _ -> ok + end. + +classify_by_time(Traces, Now) -> + classify_by_time(Traces, Now, [], [], []). + +classify_by_time([], _Now, Wait, Run, Finish) -> {Wait, Run, Finish}; +classify_by_time([Trace = #?TRACE{start_at = Start} | Traces], + Now, Wait, Run, Finish) when Start > Now -> + classify_by_time(Traces, Now, [Trace | Wait], Run, Finish); +classify_by_time([Trace = #?TRACE{end_at = End} | Traces], + Now, Wait, Run, Finish) when End =< Now -> + classify_by_time(Traces, Now, Wait, Run, [Trace | Finish]); +classify_by_time([Trace | Traces], Now, Wait, Run, Finish) -> + classify_by_time(Traces, Now, Wait, [Trace | Run], Finish). + +to_trace(TraceParam) -> + case to_trace(ensure_proplists(TraceParam), #?TRACE{}) of + {error, Reason} -> {error, Reason}; + {ok, #?TRACE{name = undefined}} -> + {error, "name required"}; + {ok, #?TRACE{type = undefined}} -> + {error, "type=[topic,clientid,ip_address] required"}; + {ok, #?TRACE{filter = undefined}} -> + {error, "topic/clientid/ip_address filter required"}; + {ok, TraceRec0} -> + case fill_default(TraceRec0) of + #?TRACE{start_at = Start, end_at = End} when End =< Start -> + {error, "failed by start_at >= end_at"}; + TraceRec -> {ok, TraceRec} + end + end. + +ensure_proplists(#{} = Trace) -> maps:to_list(Trace); +ensure_proplists(Trace) when is_list(Trace) -> + lists:foldl( + fun({K, V}, Acc) when is_binary(K) -> [{binary_to_existing_atom(K), V} | Acc]; + ({K, V}, Acc) when is_atom(K) -> [{K, V} | Acc]; + (_, Acc) -> Acc + end, [], Trace). + +fill_default(Trace = #?TRACE{start_at = undefined}) -> + fill_default(Trace#?TRACE{start_at = erlang:system_time(second)}); +fill_default(Trace = #?TRACE{end_at = undefined, start_at = StartAt}) -> + fill_default(Trace#?TRACE{end_at = StartAt + 10 * 60}); +fill_default(Trace) -> Trace. + +to_trace([], Rec) -> {ok, Rec}; +to_trace([{name, Name} | Trace], Rec) -> + case io_lib:printable_unicode_list(unicode:characters_to_list(Name, utf8)) of + true -> + case binary:match(Name, [<<"/">>], []) of + nomatch -> to_trace(Trace, Rec#?TRACE{name = Name}); + _ -> {error, "name cannot contain /"} + end; + false -> {error, "name must printable unicode"} + end; +to_trace([{type, Type} | Trace], Rec) -> + case lists:member(Type, [<<"clientid">>, <<"topic">>, <<"ip_address">>]) of + true -> to_trace(Trace, Rec#?TRACE{type = binary_to_existing_atom(Type)}); + false -> {error, "incorrect type: only support clientid/topic/ip_address"} + end; +to_trace([{topic, Topic} | Trace], Rec) -> + case validate_topic(Topic) of + ok -> to_trace(Trace, Rec#?TRACE{filter = Topic}); + {error, Reason} -> {error, Reason} + end; +to_trace([{clientid, ClientId} | Trace], Rec) -> + to_trace(Trace, Rec#?TRACE{filter = ClientId}); +to_trace([{ip_address, IP} | Trace], Rec) -> + case inet:parse_address(binary_to_list(IP)) of + {ok, _} -> to_trace(Trace, Rec#?TRACE{filter = binary_to_list(IP)}); + {error, Reason} -> {error, lists:flatten(io_lib:format("ip address: ~p", [Reason]))} + end; +to_trace([{start_at, StartAt} | Trace], Rec) -> + case to_system_second(StartAt) of + {ok, Sec} -> to_trace(Trace, Rec#?TRACE{start_at = Sec}); + {error, Reason} -> {error, Reason} + end; +to_trace([{end_at, EndAt} | Trace], Rec) -> + Now = erlang:system_time(second), + case to_system_second(EndAt) of + {ok, Sec} when Sec > Now -> + to_trace(Trace, Rec#?TRACE{end_at = Sec}); + {ok, _Sec} -> + {error, "end_at time has already passed"}; + {error, Reason} -> + {error, Reason} + end; +to_trace([Unknown | _Trace], _Rec) -> {error, io_lib:format("unknown field: ~p", [Unknown])}. + +validate_topic(TopicName) -> + try emqx_topic:validate(filter, TopicName) of + true -> ok + catch + error:Error -> + {error, io_lib:format("topic: ~s invalid by ~p", [TopicName, Error])} + end. + +to_system_second(At) -> + try + Sec = calendar:rfc3339_to_system_time(binary_to_list(At), [{unit, second}]), + {ok, Sec} + catch error: {badmatch, _} -> + {error, ["The rfc3339 specification not satisfied: ", At]} + end. + +zip_dir() -> + trace_dir() ++ "zip/". + +trace_dir() -> + filename:join(emqx:data_dir(), "trace") ++ "/". + +log_file(Name, Start) -> + filename:join(trace_dir(), filename(Name, Start)). + +filename(Name, Start) -> + [Time, _] = string:split(calendar:system_time_to_rfc3339(Start), "T", leading), + lists:flatten(["trace_", binary_to_list(Name), "_", Time, ".log"]). + +transaction(Tran) -> + case mria:transaction(?COMMON_SHARD, Tran) of + {atomic, Res} -> Res; + {aborted, Reason} -> {error, Reason} + end. + +update_log_primary_level([], OriginLevel) -> set_log_primary_level(OriginLevel); +update_log_primary_level(_, _) -> set_log_primary_level(debug). + +set_log_primary_level(NewLevel) -> + case NewLevel =/= emqx_logger:get_primary_log_level() of + true -> emqx_logger:set_primary_log_level(NewLevel); + false -> ok + end. diff --git a/apps/emqx/src/emqx_trace/emqx_trace_api.erl b/apps/emqx/src/emqx_trace/emqx_trace_api.erl new file mode 100644 index 000000000..10b39a43f --- /dev/null +++ b/apps/emqx/src/emqx_trace/emqx_trace_api.erl @@ -0,0 +1,210 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_trace_api). +-include_lib("emqx/include/logger.hrl"). +-include_lib("kernel/include/file.hrl"). + +%% API +-export([ list_trace/2 + , create_trace/2 + , update_trace/2 + , delete_trace/2 + , clear_traces/2 + , download_zip_log/2 + , stream_log_file/2 +]). +-export([ read_trace_file/3 + , get_trace_size/0 + ]). + +-define(TO_BIN(_B_), iolist_to_binary(_B_)). +-define(NOT_FOUND(N), {error, 'NOT_FOUND', ?TO_BIN([N, " NOT FOUND"])}). + +list_trace(_, _Params) -> + case emqx_trace:list() of + [] -> {ok, []}; + List0 -> + List = lists:sort(fun(#{start_at := A}, #{start_at := B}) -> A > B end, List0), + Nodes = mria_mnesia:running_nodes(), + TraceSize = cluster_call(?MODULE, get_trace_size, [], 30000), + AllFileSize = lists:foldl(fun(F, Acc) -> maps:merge(Acc, F) end, #{}, TraceSize), + Now = erlang:system_time(second), + Traces = + lists:map(fun(Trace = #{name := Name, start_at := Start, + end_at := End, enable := Enable, type := Type, filter := Filter}) -> + FileName = emqx_trace:filename(Name, Start), + LogSize = collect_file_size(Nodes, FileName, AllFileSize), + Trace0 = maps:without([enable, filter], Trace), + Trace0#{ log_size => LogSize + , Type => Filter + , start_at => list_to_binary(calendar:system_time_to_rfc3339(Start)) + , end_at => list_to_binary(calendar:system_time_to_rfc3339(End)) + , status => status(Enable, Start, End, Now) + } + end, emqx_trace:format(List)), + {ok, Traces} + end. + +create_trace(_, Param) -> + case emqx_trace:create(Param) of + ok -> ok; + {error, {already_existed, Name}} -> + {error, 'ALREADY_EXISTED', ?TO_BIN([Name, "Already Exists"])}; + {error, {duplicate_condition, Name}} -> + {error, 'DUPLICATE_CONDITION', ?TO_BIN([Name, "Duplication Condition"])}; + {error, Reason} -> + {error, 'INCORRECT_PARAMS', ?TO_BIN(Reason)} + end. + +delete_trace(#{name := Name}, _Param) -> + case emqx_trace:delete(Name) of + ok -> ok; + {error, not_found} -> ?NOT_FOUND(Name) + end. + +clear_traces(_, _) -> + emqx_trace:clear(). + +update_trace(#{name := Name, operation := Operation}, _Param) -> + Enable = case Operation of disable -> false; enable -> true end, + case emqx_trace:update(Name, Enable) of + ok -> {ok, #{enable => Enable, name => Name}}; + {error, not_found} -> ?NOT_FOUND(Name) + end. + +%% if HTTP request headers include accept-encoding: gzip and file size > 300 bytes. +%% cowboy_compress_h will auto encode gzip format. +download_zip_log(#{name := Name}, _Param) -> + case emqx_trace:get_trace_filename(Name) of + {ok, TraceLog} -> + TraceFiles = collect_trace_file(TraceLog), + ZipDir = emqx_trace:zip_dir(), + Zips = group_trace_file(ZipDir, TraceLog, TraceFiles), + ZipFileName = ZipDir ++ binary_to_list(Name) ++ ".zip", + {ok, ZipFile} = zip:zip(ZipFileName, Zips, [{cwd, ZipDir}]), + emqx_trace:delete_files_after_send(ZipFileName, Zips), + {ok, ZipFile}; + {error, Reason} -> + {error, Reason} + end. + +group_trace_file(ZipDir, TraceLog, TraceFiles) -> + lists:foldl(fun(Res, Acc) -> + case Res of + {ok, Node, Bin} -> + ZipName = ZipDir ++ Node ++ "-" ++ TraceLog, + ok = file:write_file(ZipName, Bin), + [Node ++ "-" ++ TraceLog | Acc]; + {error, Node, Reason} -> + ?LOG(error, "download trace log error:~p", [{Node, TraceLog, Reason}]), + Acc + end + end, [], TraceFiles). + +collect_trace_file(TraceLog) -> + cluster_call(emqx_trace, trace_file, [TraceLog], 60000). + +cluster_call(Mod, Fun, Args, Timeout) -> + Nodes = mria_mnesia:running_nodes(), + {GoodRes, BadNodes} = rpc:multicall(Nodes, Mod, Fun, Args, Timeout), + BadNodes =/= [] andalso ?LOG(error, "rpc call failed on ~p ~p", [BadNodes, {Mod, Fun, Args}]), + GoodRes. + +stream_log_file(#{name := Name}, Params) -> + Node0 = proplists:get_value(<<"node">>, Params, atom_to_binary(node())), + Position0 = proplists:get_value(<<"position">>, Params, <<"0">>), + Bytes0 = proplists:get_value(<<"bytes">>, Params, <<"1000">>), + case to_node(Node0) of + {ok, Node} -> + Position = binary_to_integer(Position0), + Bytes = binary_to_integer(Bytes0), + case rpc:call(Node, ?MODULE, read_trace_file, [Name, Position, Bytes]) of + {ok, Bin} -> + Meta = #{<<"position">> => Position + byte_size(Bin), <<"bytes">> => Bytes}, + {ok, #{meta => Meta, items => Bin}}; + {eof, Size} -> + Meta = #{<<"position">> => Size, <<"bytes">> => Bytes}, + {ok, #{meta => Meta, items => <<"">>}}; + {error, Reason} -> + logger:log(error, "read_file_failed by ~p", [{Name, Reason, Position, Bytes}]), + {error, Reason}; + {badrpc, nodedown} -> + {error, "BadRpc node down"} + end; + {error, Reason} -> {error, Reason} + end. + +get_trace_size() -> + TraceDir = emqx_trace:trace_dir(), + Node = node(), + case file:list_dir(TraceDir) of + {ok, AllFiles} -> + lists:foldl(fun(File, Acc) -> + FullFileName = filename:join(TraceDir, File), + Acc#{{Node, File} => filelib:file_size(FullFileName)} + end, #{}, lists:delete("zip", AllFiles)); + _ -> #{} + end. + +%% this is an rpc call for stream_log_file/2 +read_trace_file(Name, Position, Limit) -> + TraceDir = emqx_trace:trace_dir(), + {ok, AllFiles} = file:list_dir(TraceDir), + TracePrefix = "trace_" ++ binary_to_list(Name) ++ "_", + Filter = fun(FileName) -> nomatch =/= string:prefix(FileName, TracePrefix) end, + case lists:filter(Filter, AllFiles) of + [TraceFile] -> + TracePath = filename:join([TraceDir, TraceFile]), + read_file(TracePath, Position, Limit); + [] -> {error, not_found} + end. + +read_file(Path, Offset, Bytes) -> + {ok, IoDevice} = file:open(Path, [read, raw, binary]), + try + _ = case Offset of + 0 -> ok; + _ -> file:position(IoDevice, {bof, Offset}) + end, + case file:read(IoDevice, Bytes) of + {ok, Bin} -> {ok, Bin}; + {error, Reason} -> {error, Reason}; + eof -> + {ok, #file_info{size = Size}} = file:read_file_info(IoDevice), + {eof, Size} + end + after + file:close(IoDevice) + end. + +to_node(Node) -> + try {ok, binary_to_existing_atom(Node)} + catch _:_ -> + {error, "node not found"} + end. + +collect_file_size(Nodes, FileName, AllFiles) -> + lists:foldl(fun(Node, Acc) -> + Size = maps:get({Node, FileName}, AllFiles, 0), + Acc#{Node => Size} + end, #{}, Nodes). + +%% status(false, _Start, End, Now) when End > Now -> <<"stopped">>; +status(false, _Start, _End, _Now) -> <<"stopped">>; +status(true, Start, _End, Now) when Now < Start -> <<"waiting">>; +status(true, _Start, End, Now) when Now >= End -> <<"stopped">>; +status(true, _Start, _End, _Now) -> <<"running">>. diff --git a/apps/emqx/src/emqx_trace/emqx_trace_handler.erl b/apps/emqx/src/emqx_trace/emqx_trace_handler.erl new file mode 100644 index 000000000..27bada946 --- /dev/null +++ b/apps/emqx/src/emqx_trace/emqx_trace_handler.erl @@ -0,0 +1,218 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2018-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_trace_handler). + +-include("emqx.hrl"). +-include("logger.hrl"). + +-logger_header("[Tracer]"). + +%% APIs +-export([ running/0 + , install/3 + , install/4 + , uninstall/1 + , uninstall/2 + ]). + +%% For logger handler filters callbacks +-export([ filter_clientid/2 + , filter_topic/2 + , filter_ip_address/2 + ]). + +-export([handler_id/2]). + +-type tracer() :: #{ + name := binary(), + type := clientid | topic | ip_address, + filter := emqx_types:clientid() | emqx_types:topic() | emqx_trace:ip_address() + }. + +-define(FORMAT, + {logger_formatter, #{ + template => [ + time, " [", level, "] ", + {clientid, + [{peername, [clientid, "@", peername, " "], [clientid, " "]}], + [{peername, [peername, " "], []}] + }, + msg, "\n" + ], + single_line => false, + max_size => unlimited, + depth => unlimited + }} +). + +-define(CONFIG(_LogFile_), #{ + type => halt, + file => _LogFile_, + max_no_bytes => 512 * 1024 * 1024, + overload_kill_enable => true, + overload_kill_mem_size => 50 * 1024 * 1024, + overload_kill_qlen => 20000, + %% disable restart + overload_kill_restart_after => infinity + }). + +%%------------------------------------------------------------------------------ +%% APIs +%%------------------------------------------------------------------------------ + +-spec install(Name :: binary() | list(), + Type :: clientid | topic | ip_address, + Filter ::emqx_types:clientid() | emqx_types:topic() | string(), + Level :: logger:level() | all, + LogFilePath :: string()) -> ok | {error, term()}. +install(Name, Type, Filter, Level, LogFile) -> + Who = #{type => Type, filter => ensure_bin(Filter), name => ensure_bin(Name)}, + install(Who, Level, LogFile). + +-spec install(Type :: clientid | topic | ip_address, + Filter ::emqx_types:clientid() | emqx_types:topic() | string(), + Level :: logger:level() | all, + LogFilePath :: string()) -> ok | {error, term()}. +install(Type, Filter, Level, LogFile) -> + install(Filter, Type, Filter, Level, LogFile). + +-spec install(tracer(), logger:level() | all, string()) -> ok | {error, term()}. +install(Who, all, LogFile) -> + install(Who, debug, LogFile); +install(Who, Level, LogFile) -> + PrimaryLevel = emqx_logger:get_primary_log_level(), + try logger:compare_levels(Level, PrimaryLevel) of + lt -> + {error, + io_lib:format( + "Cannot trace at a log level (~s) " + "lower than the primary log level (~s)", + [Level, PrimaryLevel] + )}; + _GtOrEq -> + install_handler(Who, Level, LogFile) + catch + error:badarg -> + {error, {invalid_log_level, Level}} + end. + +-spec uninstall(Type :: clientid | topic | ip_address, + Name :: binary() | list()) -> ok | {error, term()}. +uninstall(Type, Name) -> + HandlerId = handler_id(ensure_bin(Name), Type), + uninstall(HandlerId). + +-spec uninstall(HandlerId :: atom()) -> ok | {error, term()}. +uninstall(HandlerId) -> + Res = logger:remove_handler(HandlerId), + show_prompts(Res, HandlerId, "Stop trace"), + Res. + +%% @doc Return all running trace handlers information. +-spec running() -> + [ + #{ + name => binary(), + type => topic | clientid | ip_address, + id => atom(), + filter => emqx_types:topic() | emqx_types:clienetid() | emqx_trace:ip_address(), + level => logger:level(), + dst => file:filename() | console | unknown + } + ]. +running() -> + lists:foldl(fun filter_traces/2, [], emqx_logger:get_log_handlers(started)). + +-spec filter_clientid(logger:log_event(), {string(), atom()}) -> logger:log_event() | ignore. +filter_clientid(#{meta := #{clientid := ClientId}} = Log, {ClientId, _Name}) -> Log; +filter_clientid(_Log, _ExpectId) -> ignore. + +-spec filter_topic(logger:log_event(), {string(), atom()}) -> logger:log_event() | ignore. +filter_topic(#{meta := #{topic := Topic}} = Log, {TopicFilter, _Name}) -> + case emqx_topic:match(Topic, TopicFilter) of + true -> Log; + false -> ignore + end; +filter_topic(_Log, _ExpectId) -> ignore. + +-spec filter_ip_address(logger:log_event(), {string(), atom()}) -> logger:log_event() | ignore. +filter_ip_address(#{meta := #{peername := Peername}} = Log, {IP, _Name}) -> + case lists:prefix(IP, Peername) of + true -> Log; + false -> ignore + end; +filter_ip_address(_Log, _ExpectId) -> ignore. + +install_handler(Who = #{name := Name, type := Type}, Level, LogFile) -> + HandlerId = handler_id(Name, Type), + Config = #{ level => Level, + formatter => ?FORMAT, + filter_default => stop, + filters => filters(Who), + config => ?CONFIG(LogFile) + }, + Res = logger:add_handler(HandlerId, logger_disk_log_h, Config), + show_prompts(Res, Who, "Start trace"), + Res. + +filters(#{type := clientid, filter := Filter, name := Name}) -> + [{clientid, {fun ?MODULE:filter_clientid/2, {ensure_list(Filter), Name}}}]; +filters(#{type := topic, filter := Filter, name := Name}) -> + [{topic, {fun ?MODULE:filter_topic/2, {ensure_bin(Filter), Name}}}]; +filters(#{type := ip_address, filter := Filter, name := Name}) -> + [{ip_address, {fun ?MODULE:filter_ip_address/2, {ensure_list(Filter), Name}}}]. + +filter_traces(#{id := Id, level := Level, dst := Dst, filters := Filters}, Acc) -> + Init = #{id => Id, level => Level, dst => Dst}, + case Filters of + [{Type, {_FilterFun, {Filter, Name}}}] when + Type =:= topic orelse + Type =:= clientid orelse + Type =:= ip_address -> + [Init#{type => Type, filter => Filter, name => Name} | Acc]; + _ -> + Acc + end. + +handler_id(Name, Type) -> + try + do_handler_id(Name, Type) + catch + _ : _ -> + Hash = emqx_misc:bin2hexstr_a_f(crypto:hash(md5, Name)), + do_handler_id(Hash, Type) + end. + +%% Handler ID must be an atom. +do_handler_id(Name, Type) -> + TypeStr = atom_to_list(Type), + NameStr = unicode:characters_to_list(Name, utf8), + FullNameStr = "trace_" ++ TypeStr ++ "_" ++ NameStr, + true = io_lib:printable_unicode_list(FullNameStr), + FullNameBin = unicode:characters_to_binary(FullNameStr, utf8), + binary_to_atom(FullNameBin, utf8). + +ensure_bin(List) when is_list(List) -> iolist_to_binary(List); +ensure_bin(Bin) when is_binary(Bin) -> Bin. + +ensure_list(Bin) when is_binary(Bin) -> binary_to_list(Bin); +ensure_list(List) when is_list(List) -> List. + +show_prompts(ok, Who, Msg) -> + ?LOG(info, Msg ++ " ~p " ++ "successfully~n", [Who]); +show_prompts({error, Reason}, Who, Msg) -> + ?LOG(error, Msg ++ " ~p " ++ "failed with ~p~n", [Who, Reason]). diff --git a/apps/emqx/src/emqx_tracer.erl b/apps/emqx/src/emqx_tracer.erl deleted file mode 100644 index 512ef45aa..000000000 --- a/apps/emqx/src/emqx_tracer.erl +++ /dev/null @@ -1,167 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2018-2021 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- - --module(emqx_tracer). - --include("emqx.hrl"). --include("logger.hrl"). - - -%% APIs --export([ trace/2 - , start_trace/3 - , lookup_traces/0 - , stop_trace/1 - ]). - --type(trace_who() :: {clientid | topic, binary()}). - --define(TRACER, ?MODULE). --define(FORMAT, {logger_formatter, - #{template => - [time, " [", level, "] ", - {clientid, - [{peername, - [clientid, "@", peername, " "], - [clientid, " "]}], - [{peername, - [peername, " "], - []}]}, - msg, "\n"], - single_line => false - }}). --define(TOPIC_TRACE_ID(T), "trace_topic_"++T). --define(CLIENT_TRACE_ID(C), "trace_clientid_"++C). --define(TOPIC_TRACE(T), {topic, T}). --define(CLIENT_TRACE(C), {clientid, C}). - --define(IS_LOG_LEVEL(L), - L =:= emergency orelse - L =:= alert orelse - L =:= critical orelse - L =:= error orelse - L =:= warning orelse - L =:= notice orelse - L =:= info orelse - L =:= debug). - --dialyzer({nowarn_function, [install_trace_handler/3]}). - -%%------------------------------------------------------------------------------ -%% APIs -%%------------------------------------------------------------------------------ -trace(publish, #message{topic = <<"$SYS/", _/binary>>}) -> - %% Do not trace '$SYS' publish - ignore; -trace(publish, #message{from = From, topic = Topic, payload = Payload}) - when is_binary(From); is_atom(From) -> - emqx_logger:info(#{topic => Topic, - mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY} }, - "PUBLISH to ~ts: ~0p", [Topic, Payload]). - -%% @doc Start to trace clientid or topic. --spec(start_trace(trace_who(), logger:level() | all, string()) -> ok | {error, term()}). -start_trace(Who, all, LogFile) -> - start_trace(Who, debug, LogFile); -start_trace(Who, Level, LogFile) -> - case ?IS_LOG_LEVEL(Level) of - true -> - #{level := PrimaryLevel} = logger:get_primary_config(), - try logger:compare_levels(Level, PrimaryLevel) of - lt -> - {error, - io_lib:format("Cannot trace at a log level (~ts) " - "lower than the primary log level (~ts)", - [Level, PrimaryLevel])}; - _GtOrEq -> - install_trace_handler(Who, Level, LogFile) - catch - _:Error -> - {error, Error} - end; - false -> {error, {invalid_log_level, Level}} - end. - -%% @doc Stop tracing clientid or topic. --spec(stop_trace(trace_who()) -> ok | {error, term()}). -stop_trace(Who) -> - uninstall_trance_handler(Who). - -%% @doc Lookup all traces --spec(lookup_traces() -> [{Who :: trace_who(), LogFile :: string()}]). -lookup_traces() -> - lists:foldl(fun filter_traces/2, [], emqx_logger:get_log_handlers(started)). - -install_trace_handler(Who, Level, LogFile) -> - case logger:add_handler(handler_id(Who), logger_disk_log_h, - #{level => Level, - formatter => ?FORMAT, - config => #{type => halt, file => LogFile}, - filter_default => stop, - filters => [{meta_key_filter, - {fun filter_by_meta_key/2, Who}}]}) - of - ok -> - ?SLOG(info, #{msg => "start_trace", who => Who}); - {error, Reason} -> - ?SLOG(error, #{msg => "failed_to_trace", who => Who, reason => Reason}), - {error, Reason} - end. - -uninstall_trance_handler(Who) -> - case logger:remove_handler(handler_id(Who)) of - ok -> - ?SLOG(info, #{msg => "stop_trace", who => Who}); - {error, Reason} -> - ?SLOG(error, #{msg => "failed_to_stop_trace", who => Who, reason => Reason}), - {error, Reason} - end. - -filter_traces(#{id := Id, level := Level, dst := Dst}, Acc) -> - case atom_to_list(Id) of - ?TOPIC_TRACE_ID(T)-> - [{?TOPIC_TRACE(T), {Level, Dst}} | Acc]; - ?CLIENT_TRACE_ID(C) -> - [{?CLIENT_TRACE(C), {Level, Dst}} | Acc]; - _ -> Acc - end. - -handler_id(?TOPIC_TRACE(Topic)) -> - list_to_atom(?TOPIC_TRACE_ID(handler_name(Topic))); -handler_id(?CLIENT_TRACE(ClientId)) -> - list_to_atom(?CLIENT_TRACE_ID(handler_name(ClientId))). - -filter_by_meta_key(#{meta := Meta} = Log, {Key, Value}) -> - case is_meta_match(Key, Value, Meta) of - true -> Log; - false -> ignore - end. - -is_meta_match(clientid, ClientId, #{clientid := ClientIdStr}) -> - ClientId =:= iolist_to_binary(ClientIdStr); -is_meta_match(topic, TopicFilter, #{topic := TopicMeta}) -> - emqx_topic:match(TopicMeta, TopicFilter); -is_meta_match(_, _, _) -> - false. - -handler_name(Bin) -> - case byte_size(Bin) of - Size when Size =< 200 -> binary_to_list(Bin); - _ -> hashstr(Bin) - end. - -hashstr(Bin) -> - binary_to_list(emqx_misc:bin2hexstr_A_F(Bin)). diff --git a/apps/emqx/src/emqx_vm_mon.erl b/apps/emqx/src/emqx_vm_mon.erl index 703aca52f..9a30e71f2 100644 --- a/apps/emqx/src/emqx_vm_mon.erl +++ b/apps/emqx/src/emqx_vm_mon.erl @@ -62,12 +62,23 @@ handle_info({timeout, _Timer, check}, State) -> ProcessCount = erlang:system_info(process_count), case ProcessCount / erlang:system_info(process_limit) of Percent when Percent >= ProcHighWatermark -> - emqx_alarm:activate(too_many_processes, #{ - usage => io_lib:format("~p%", [Percent*100]), - high_watermark => ProcHighWatermark, - low_watermark => ProcLowWatermark}); + Usage = io_lib:format("~p%", [Percent*100]), + Message = [Usage, " process usage"], + emqx_alarm:activate(too_many_processes, + #{ + usage => Usage, + high_watermark => ProcHighWatermark, + low_watermark => ProcLowWatermark}, + Message); Percent when Percent < ProcLowWatermark -> - emqx_alarm:deactivate(too_many_processes); + Usage = io_lib:format("~p%", [Percent*100]), + Message = [Usage, " process usage"], + emqx_alarm:deactivate(too_many_processes, + #{ + usage => Usage, + high_watermark => ProcHighWatermark, + low_watermark => ProcLowWatermark}, + Message); _Precent -> ok end, diff --git a/apps/emqx/test/emqx_alarm_SUITE.erl b/apps/emqx/test/emqx_alarm_SUITE.erl index 0a720ffc1..b542250b3 100644 --- a/apps/emqx/test/emqx_alarm_SUITE.erl +++ b/apps/emqx/test/emqx_alarm_SUITE.erl @@ -32,16 +32,12 @@ init_per_testcase(t_size_limit, Config) -> <<"size_limit">> => 2 }), Config; -init_per_testcase(t_validity_period, Config) -> +init_per_testcase(_, Config) -> emqx_common_test_helpers:boot_modules(all), emqx_common_test_helpers:start_apps([]), {ok, _} = emqx:update_config([alarm], #{ <<"validity_period">> => <<"1s">> }), - Config; -init_per_testcase(_, Config) -> - emqx_common_test_helpers:boot_modules(all), - emqx_common_test_helpers:start_apps([]), Config. end_per_testcase(_, _Config) -> @@ -86,17 +82,77 @@ t_size_limit(_) -> ?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))), emqx_alarm:delete_all_deactivated_alarms(). -t_validity_period(_) -> - ok = emqx_alarm:activate(a), - ok = emqx_alarm:deactivate(a), +t_validity_period(_Config) -> + ok = emqx_alarm:activate(a, #{msg => "Request frequency is too high"}, <<"Reach Rate Limit">>), + ok = emqx_alarm:deactivate(a, #{msg => "Request frequency returns to normal"}), ?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))), + %% call with unknown msg + ?assertEqual(ignored, gen_server:call(emqx_alarm, unknown_alarm)), ct:sleep(3000), ?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))). +t_validity_period_1(_Config) -> + ok = emqx_alarm:activate(a, #{msg => "Request frequency is too high"}, <<"Reach Rate Limit">>), + ok = emqx_alarm:deactivate(a, #{msg => "Request frequency returns to normal"}), + ?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))), + %% info with unknown msg + erlang:send(emqx_alarm, unknown_alarm), + ct:sleep(3000), + ?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))). + +t_validity_period_2(_Config) -> + ok = emqx_alarm:activate(a, #{msg => "Request frequency is too high"}, <<"Reach Rate Limit">>), + ok = emqx_alarm:deactivate(a, #{msg => "Request frequency returns to normal"}), + ?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))), + %% cast with unknown msg + gen_server:cast(emqx_alarm, unknown_alarm), + ct:sleep(3000), + ?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))). + +-record(activated_alarm, { + name :: binary() | atom(), + details :: map() | list(), + message :: binary(), + activate_at :: integer() +}). + +-record(deactivated_alarm, { + activate_at :: integer(), + name :: binary() | atom(), + details :: map() | list(), + message :: binary(), + deactivate_at :: integer() | infinity +}). + +t_format(_Config) -> + Name = test_alarm, + Message = "test_msg", + At = erlang:system_time(microsecond), + Details = "test_details", + Node = node(), + Activate = #activated_alarm{name = Name, message = Message, activate_at = At, details = Details}, + #{ + node := Node, + name := Name, + message := Message, + duration := 0, + details := Details + } = emqx_alarm:format(Activate), + Deactivate = #deactivated_alarm{name = Name, message = Message, activate_at = At, details = Details, + deactivate_at = At}, + #{ + node := Node, + name := Name, + message := Message, + duration := 0, + details := Details + } = emqx_alarm:format(Deactivate), + ok. + + get_alarm(Name, [Alarm = #{name := Name} | _More]) -> Alarm; get_alarm(Name, [_Alarm | More]) -> get_alarm(Name, More); get_alarm(_Name, []) -> {error, not_found}. - diff --git a/apps/emqx/test/emqx_authentication_SUITE.erl b/apps/emqx/test/emqx_authentication_SUITE.erl index 62224a87f..10a4e4091 100644 --- a/apps/emqx/test/emqx_authentication_SUITE.erl +++ b/apps/emqx/test/emqx_authentication_SUITE.erl @@ -28,7 +28,7 @@ -export([ roots/0, fields/1 ]). --export([ create/1 +-export([ create/2 , update/2 , authenticate/2 , destroy/1 @@ -70,7 +70,7 @@ check_config(C) -> #{atom_key => true}), R. -create(_Config) -> +create(_AuthenticatorID, _Config) -> {ok, #{mark => 1}}. update(_Config, _State) -> @@ -103,22 +103,28 @@ end_per_testcase(Case, Config) -> _ = ?MODULE:Case({'end', Config}), ok. + t_chain({_, Config}) -> Config; + t_chain(Config) when is_list(Config) -> % CRUD of authentication chain ChainName = 'test', ?assertMatch({ok, []}, ?AUTHN:list_chains()), + ?assertMatch({ok, []}, ?AUTHN:list_chain_names()), ?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:create_chain(ChainName)), ?assertEqual({error, {already_exists, {chain, ChainName}}}, ?AUTHN:create_chain(ChainName)), ?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:lookup_chain(ChainName)), ?assertMatch({ok, [#{name := ChainName}]}, ?AUTHN:list_chains()), + ?assertEqual({ok, [ChainName]}, ?AUTHN:list_chain_names()), ?assertEqual(ok, ?AUTHN:delete_chain(ChainName)), ?assertMatch({error, {not_found, {chain, ChainName}}}, ?AUTHN:lookup_chain(ChainName)), ok. + t_authenticator({'init', Config}) -> [{"auth1", {'password-based', 'built-in-database'}}, {"auth2", {'password-based', mysql}} | Config]; + t_authenticator(Config) when is_list(Config) -> ChainName = 'test', AuthenticatorConfig1 = #{mechanism => 'password-based', @@ -126,23 +132,43 @@ t_authenticator(Config) when is_list(Config) -> enable => true}, % Create an authenticator when the authentication chain does not exist - ?assertEqual({error, {not_found, {chain, ChainName}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), + ?assertEqual( + {error, {not_found, {chain, ChainName}}}, + ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), + ?AUTHN:create_chain(ChainName), % Create an authenticator when the provider does not exist - ?assertEqual({error, no_available_provider}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), + + ?assertEqual( + {error, no_available_provider}, + ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), AuthNType1 = ?config("auth1"), register_provider(AuthNType1, ?MODULE), ID1 = <<"password-based:built-in-database">>, % CRUD of authencaticator - ?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), + ?assertMatch( + {ok, #{id := ID1, state := #{mark := 1}}}, + ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), + ?assertMatch({ok, #{id := ID1}}, ?AUTHN:lookup_authenticator(ChainName, ID1)), ?assertMatch({ok, [#{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)), - ?assertEqual({error, {already_exists, {authenticator, ID1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), - ?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)), + + ?assertEqual( + {error, {already_exists, {authenticator, ID1}}}, + ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), + + ?assertMatch( + {ok, #{id := ID1, state := #{mark := 2}}}, + ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)), + ?assertEqual(ok, ?AUTHN:delete_authenticator(ChainName, ID1)), - ?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)), + + ?assertEqual( + {error, {not_found, {authenticator, ID1}}}, + ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)), + ?assertMatch({ok, []}, ?AUTHN:list_authenticators(ChainName)), % Multiple authenticators exist at the same time @@ -152,25 +178,37 @@ t_authenticator(Config) when is_list(Config) -> AuthenticatorConfig2 = #{mechanism => 'password-based', backend => mysql, enable => true}, - ?assertMatch({ok, #{id := ID1}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), - ?assertMatch({ok, #{id := ID2}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig2)), + + ?assertMatch( + {ok, #{id := ID1}}, + ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), + + ?assertMatch( + {ok, #{id := ID2}}, + ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig2)), % Move authenticator ?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)), + ?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, top)), ?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)), + ?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, bottom)), ?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)), + ?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, {before, ID1})), ?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)); + t_authenticator({'end', Config}) -> ?AUTHN:delete_chain(test), ?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]), ok. + t_authenticate({init, Config}) -> [{listener_id, 'tcp:default'}, {authn_type, {'password-based', 'built-in-database'}} | Config]; + t_authenticate(Config) when is_list(Config) -> ListenerID = ?config(listener_id), AuthNType = ?config(authn_type), @@ -188,13 +226,21 @@ t_authenticate(Config) when is_list(Config) -> enable => true}, ?AUTHN:create_chain(ListenerID), ?assertMatch({ok, _}, ?AUTHN:create_authenticator(ListenerID, AuthenticatorConfig)), - ?assertEqual({ok, #{is_superuser => true}}, emqx_access_control:authenticate(ClientInfo)), - ?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>})); + + ?assertEqual( + {ok, #{is_superuser => true}}, + emqx_access_control:authenticate(ClientInfo)), + + ?assertEqual( + {error, bad_username_or_password}, + emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>})); + t_authenticate({'end', Config}) -> ?AUTHN:delete_chain(?config(listener_id)), ?AUTHN:deregister_provider(?config(authn_type)), ok. + t_update_config({init, Config}) -> Global = 'mqtt:global', AuthNType1 = {'password-based', 'built-in-database'}, @@ -202,6 +248,7 @@ t_update_config({init, Config}) -> [{global, Global}, {"auth1", AuthNType1}, {"auth2", AuthNType2} | Config]; + t_update_config(Config) when is_list(Config) -> emqx_config_handler:add_handler([authentication], emqx_authentication), ok = register_provider(?config("auth1"), ?MODULE), @@ -217,46 +264,131 @@ t_update_config(Config) when is_list(Config) -> ID2 = <<"password-based:mysql">>, ?assertMatch({ok, []}, ?AUTHN:list_chains()), - ?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})), - ?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID1)), - ?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})), - ?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID2)), + ?assertMatch( + {ok, _}, + update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})), - ?assertMatch({ok, _}, update_config([authentication], {update_authenticator, Global, ID1, AuthenticatorConfig1#{<<"enable">> => false}})), - ?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(Global, ID1)), + ?assertMatch( + {ok, #{id := ID1, state := #{mark := 1}}}, + ?AUTHN:lookup_authenticator(Global, ID1)), + + ?assertMatch( + {ok, _}, + update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})), + + ?assertMatch( + {ok, #{id := ID2, state := #{mark := 1}}}, + ?AUTHN:lookup_authenticator(Global, ID2)), + + ?assertMatch( + {ok, _}, + update_config([authentication], + {update_authenticator, + Global, + ID1, + AuthenticatorConfig1#{<<"enable">> => false} + })), + + ?assertMatch( + {ok, #{id := ID1, state := #{mark := 2}}}, + ?AUTHN:lookup_authenticator(Global, ID1)), + + ?assertMatch( + {ok, _}, + update_config([authentication], {move_authenticator, Global, ID2, top})), - ?assertMatch({ok, _}, update_config([authentication], {move_authenticator, Global, ID2, top})), ?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(Global)), ?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})), - ?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(Global, ID1)), + ?assertEqual( + {error, {not_found, {authenticator, ID1}}}, + ?AUTHN:lookup_authenticator(Global, ID1)), - ?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID2})), - ?assertEqual({error, {not_found, {authenticator, ID2}}}, ?AUTHN:lookup_authenticator(Global, ID2)), + ?assertMatch( + {ok, _}, + update_config([authentication], {delete_authenticator, Global, ID2})), + + ?assertEqual( + {error, {not_found, {authenticator, ID2}}}, + ?AUTHN:lookup_authenticator(Global, ID2)), ListenerID = 'tcp:default', ConfKeyPath = [listeners, tcp, default, authentication], - ?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig1})), - ?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)), - ?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig2})), - ?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID2)), + ?assertMatch( + {ok, _}, + update_config(ConfKeyPath, + {create_authenticator, ListenerID, AuthenticatorConfig1})), - ?assertMatch({ok, _}, update_config(ConfKeyPath, {update_authenticator, ListenerID, ID1, AuthenticatorConfig1#{<<"enable">> => false}})), - ?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)), + ?assertMatch( + {ok, #{id := ID1, state := #{mark := 1}}}, + ?AUTHN:lookup_authenticator(ListenerID, ID1)), - ?assertMatch({ok, _}, update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, top})), - ?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ListenerID)), + ?assertMatch( + {ok, _}, + update_config(ConfKeyPath, + {create_authenticator, ListenerID, AuthenticatorConfig2})), + + ?assertMatch( + {ok, #{id := ID2, state := #{mark := 1}}}, + ?AUTHN:lookup_authenticator(ListenerID, ID2)), + + ?assertMatch( + {ok, _}, + update_config(ConfKeyPath, + {update_authenticator, + ListenerID, + ID1, + AuthenticatorConfig1#{<<"enable">> => false} + })), + + ?assertMatch( + {ok, #{id := ID1, state := #{mark := 2}}}, + ?AUTHN:lookup_authenticator(ListenerID, ID1)), + + ?assertMatch( + {ok, _}, + update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, top})), + + ?assertMatch( + {ok, [#{id := ID2}, #{id := ID1}]}, + ?AUTHN:list_authenticators(ListenerID)), + + ?assertMatch( + {ok, _}, + update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})), + + ?assertEqual( + {error, {not_found, {authenticator, ID1}}}, + ?AUTHN:lookup_authenticator(ListenerID, ID1)); - ?assertMatch({ok, _}, update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})), - ?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)); t_update_config({'end', Config}) -> ?AUTHN:delete_chain(?config(global)), ?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]), ok. + +t_restart({'init', Config}) -> Config; + +t_restart(Config) when is_list(Config) -> + ?assertEqual({ok, []}, ?AUTHN:list_chain_names()), + + ?AUTHN:create_chain(test_chain), + ?assertEqual({ok, [test_chain]}, ?AUTHN:list_chain_names()), + + ok = supervisor:terminate_child(emqx_authentication_sup, ?AUTHN), + {ok, _} = supervisor:restart_child(emqx_authentication_sup, ?AUTHN), + + ?assertEqual({ok, [test_chain]}, ?AUTHN:list_chain_names()); + +t_restart({'end', _Config}) -> + ?AUTHN:delete_chain(test_chain), + ok. + + t_convert_certs({_, Config}) -> Config; + t_convert_certs(Config) when is_list(Config) -> Global = <<"mqtt:global">>, Certs = certs([ {<<"keyfile">>, "key.pem"} @@ -270,7 +402,11 @@ t_convert_certs(Config) when is_list(Config) -> Certs2 = certs([ {<<"keyfile">>, "key.pem"} , {<<"certfile">>, "cert.pem"} ]), - #{<<"ssl">> := NCerts2} = convert_certs(CertsDir, #{<<"ssl">> => Certs2}, #{<<"ssl">> => NCerts}), + + #{<<"ssl">> := NCerts2} = convert_certs( + CertsDir, + #{<<"ssl">> => Certs2}, #{<<"ssl">> => NCerts}), + ?assertEqual(maps:get(<<"keyfile">>, NCerts), maps:get(<<"keyfile">>, NCerts2)), ?assertEqual(maps:get(<<"certfile">>, NCerts), maps:get(<<"certfile">>, NCerts2)), @@ -278,7 +414,11 @@ t_convert_certs(Config) when is_list(Config) -> , {<<"certfile">>, "client-cert.pem"} , {<<"cacertfile">>, "cacert.pem"} ]), - #{<<"ssl">> := NCerts3} = convert_certs(CertsDir, #{<<"ssl">> => Certs3}, #{<<"ssl">> => NCerts2}), + + #{<<"ssl">> := NCerts3} = convert_certs( + CertsDir, + #{<<"ssl">> => Certs3}, #{<<"ssl">> => NCerts2}), + ?assertNotEqual(maps:get(<<"keyfile">>, NCerts2), maps:get(<<"keyfile">>, NCerts3)), ?assertNotEqual(maps:get(<<"certfile">>, NCerts2), maps:get(<<"certfile">>, NCerts3)), diff --git a/apps/emqx/test/emqx_banned_SUITE.erl b/apps/emqx/test/emqx_banned_SUITE.erl index de117ab00..e09d0baae 100644 --- a/apps/emqx/test/emqx_banned_SUITE.erl +++ b/apps/emqx/test/emqx_banned_SUITE.erl @@ -41,16 +41,16 @@ t_add_delete(_) -> at = erlang:system_time(second), until = erlang:system_time(second) + 1000 }, - ok = emqx_banned:create(Banned), + {ok, _} = emqx_banned:create(Banned), ?assertEqual(1, emqx_banned:info(size)), ok = emqx_banned:delete({clientid, <<"TestClient">>}), ?assertEqual(0, emqx_banned:info(size)). t_check(_) -> - ok = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}}), - ok = emqx_banned:create(#banned{who = {username, <<"BannedUser">>}}), - ok = emqx_banned:create(#banned{who = {peerhost, {192,168,0,1}}}), + {ok, _} = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}}), + {ok, _} = emqx_banned:create(#banned{who = {username, <<"BannedUser">>}}), + {ok, _} = emqx_banned:create(#banned{who = {peerhost, {192,168,0,1}}}), ?assertEqual(3, emqx_banned:info(size)), ClientInfo1 = #{clientid => <<"BannedClient">>, username => <<"user">>, @@ -83,7 +83,7 @@ t_check(_) -> t_unused(_) -> {ok, Banned} = emqx_banned:start_link(), - ok = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}, + {ok, _} = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}, until = erlang:system_time(second)}), ?assertEqual(ignored, gen_server:call(Banned, unexpected_req)), ?assertEqual(ok, gen_server:cast(Banned, unexpected_msg)), diff --git a/apps/emqx/test/emqx_broker_SUITE.erl b/apps/emqx/test/emqx_broker_SUITE.erl index b6a99b8bc..440e0fe42 100644 --- a/apps/emqx/test/emqx_broker_SUITE.erl +++ b/apps/emqx/test/emqx_broker_SUITE.erl @@ -23,20 +23,71 @@ -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). -all() -> emqx_common_test_helpers:all(?MODULE). +all() -> + [ {group, all_cases} + , {group, connected_client_count_group} + ]. -init_per_suite(Config) -> +groups() -> + TCs = emqx_common_test_helpers:all(?MODULE), + ConnClientTCs = [ t_connected_client_count_persistent + , t_connected_client_count_anonymous + , t_connected_client_stats + ], + OtherTCs = TCs -- ConnClientTCs, + [ {all_cases, [], OtherTCs} + , {connected_client_count_group, [ {group, tcp} + , {group, ws} + , {group, quic} + ]} + , {tcp, [], ConnClientTCs} + , {ws, [], ConnClientTCs} + , {quic, [], ConnClientTCs} + ]. + +init_per_group(connected_client_count_group, Config) -> + Config; +init_per_group(tcp, Config) -> + emqx_common_test_helpers:boot_modules(all), + emqx_common_test_helpers:start_apps([]), + [{conn_fun, connect} | Config]; +init_per_group(ws, Config) -> + emqx_common_test_helpers:boot_modules(all), + emqx_common_test_helpers:start_apps([]), + [ {ssl, false} + , {enable_websocket, true} + , {conn_fun, ws_connect} + , {port, 8083} + , {host, "localhost"} + | Config + ]; +init_per_group(quic, Config) -> + emqx_common_test_helpers:boot_modules(all), + emqx_common_test_helpers:start_apps([]), + [ {conn_fun, quic_connect} + , {port, 14567} + | Config]; +init_per_group(_Group, Config) -> emqx_common_test_helpers:boot_modules(all), emqx_common_test_helpers:start_apps([]), Config. -end_per_suite(_Config) -> +end_per_group(connected_client_count_group, _Config) -> + ok; +end_per_group(_Group, _Config) -> emqx_common_test_helpers:stop_apps([]). +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + init_per_testcase(Case, Config) -> ?MODULE:Case({init, Config}). @@ -277,6 +328,240 @@ t_shard({'end', _Config}) -> emqx_broker:unsubscribe(<<"topic">>), ok = meck:unload(emqx_broker_helper). +%% persistent sessions, when gone, do not contribute to connected +%% client count +t_connected_client_count_persistent({init, Config}) -> + ok = snabbkaffe:start_trace(), + process_flag(trap_exit, true), + Config; +t_connected_client_count_persistent(Config) when is_list(Config) -> + ConnFun = ?config(conn_fun, Config), + ClientID = <<"clientid">>, + ?assertEqual(0, emqx_cm:get_connected_client_count()), + {ok, ConnPid0} = emqtt:start_link([ {clean_start, false} + , {clientid, ClientID} + | Config]), + {{ok, _}, {ok, [_]}} = wait_for_events( + fun() -> emqtt:ConnFun(ConnPid0) end, + [emqx_cm_connected_client_count_inc] + ), + timer:sleep(10), + ?assertEqual(1, emqx_cm:get_connected_client_count()), + {ok, {ok, [_]}} = wait_for_events( + fun() -> emqtt:disconnect(ConnPid0) end, + [emqx_cm_connected_client_count_dec] + ), + timer:sleep(10), + ?assertEqual(0, emqx_cm:get_connected_client_count()), + %% reconnecting + {ok, ConnPid1} = emqtt:start_link([ {clean_start, false} + , {clientid, ClientID} + | Config + ]), + {{ok, _}, {ok, [_]}} = wait_for_events( + fun() -> emqtt:ConnFun(ConnPid1) end, + [emqx_cm_connected_client_count_inc] + ), + ?assertEqual(1, emqx_cm:get_connected_client_count()), + %% taking over + {ok, ConnPid2} = emqtt:start_link([ {clean_start, false} + , {clientid, ClientID} + | Config + ]), + {{ok, _}, {ok, [_, _]}} = wait_for_events( + fun() -> emqtt:ConnFun(ConnPid2) end, + [ emqx_cm_connected_client_count_inc + , emqx_cm_connected_client_count_dec + ], + 500 + ), + ?assertEqual(1, emqx_cm:get_connected_client_count()), + %% abnormal exit of channel process + ChanPids = emqx_cm:all_channels(), + {ok, {ok, [_, _]}} = wait_for_events( + fun() -> + lists:foreach( + fun(ChanPid) -> exit(ChanPid, kill) end, + ChanPids) + end, + [ emqx_cm_connected_client_count_dec + , emqx_cm_process_down + ] + ), + ?assertEqual(0, emqx_cm:get_connected_client_count()), + ok; +t_connected_client_count_persistent({'end', _Config}) -> + snabbkaffe:stop(), + ok. + +%% connections without client_id also contribute to connected client +%% count +t_connected_client_count_anonymous({init, Config}) -> + ok = snabbkaffe:start_trace(), + process_flag(trap_exit, true), + Config; +t_connected_client_count_anonymous(Config) when is_list(Config) -> + ConnFun = ?config(conn_fun, Config), + ?assertEqual(0, emqx_cm:get_connected_client_count()), + %% first client + {ok, ConnPid0} = emqtt:start_link([ {clean_start, true} + | Config]), + {{ok, _}, {ok, [_]}} = wait_for_events( + fun() -> emqtt:ConnFun(ConnPid0) end, + [emqx_cm_connected_client_count_inc] + ), + ?assertEqual(1, emqx_cm:get_connected_client_count()), + %% second client + {ok, ConnPid1} = emqtt:start_link([ {clean_start, true} + | Config]), + {{ok, _}, {ok, [_]}} = wait_for_events( + fun() -> emqtt:ConnFun(ConnPid1) end, + [emqx_cm_connected_client_count_inc] + ), + ?assertEqual(2, emqx_cm:get_connected_client_count()), + %% when first client disconnects, shouldn't affect the second + {ok, {ok, [_, _]}} = wait_for_events( + fun() -> emqtt:disconnect(ConnPid0) end, + [ emqx_cm_connected_client_count_dec + , emqx_cm_process_down + ] + ), + ?assertEqual(1, emqx_cm:get_connected_client_count()), + %% reconnecting + {ok, ConnPid2} = emqtt:start_link([ {clean_start, true} + | Config + ]), + {{ok, _}, {ok, [_]}} = wait_for_events( + fun() -> emqtt:ConnFun(ConnPid2) end, + [emqx_cm_connected_client_count_inc] + ), + ?assertEqual(2, emqx_cm:get_connected_client_count()), + {ok, {ok, [_, _]}} = wait_for_events( + fun() -> emqtt:disconnect(ConnPid1) end, + [ emqx_cm_connected_client_count_dec + , emqx_cm_process_down + ] + ), + ?assertEqual(1, emqx_cm:get_connected_client_count()), + %% abnormal exit of channel process + Chans = emqx_cm:all_channels(), + {ok, {ok, [_, _]}} = wait_for_events( + fun() -> + lists:foreach( + fun(ChanPid) -> exit(ChanPid, kill) end, + Chans) + end, + [ emqx_cm_connected_client_count_dec + , emqx_cm_process_down + ] + ), + ?assertEqual(0, emqx_cm:get_connected_client_count()), + ok; +t_connected_client_count_anonymous({'end', _Config}) -> + snabbkaffe:stop(), + ok. + +t_connected_client_stats({init, Config}) -> + ok = supervisor:terminate_child(emqx_kernel_sup, emqx_stats), + {ok, _} = supervisor:restart_child(emqx_kernel_sup, emqx_stats), + ok = snabbkaffe:start_trace(), + Config; +t_connected_client_stats(Config) when is_list(Config) -> + ConnFun = ?config(conn_fun, Config), + ?assertEqual(0, emqx_cm:get_connected_client_count()), + ?assertEqual(0, emqx_stats:getstat('live_connections.count')), + ?assertEqual(0, emqx_stats:getstat('live_connections.max')), + {ok, ConnPid} = emqtt:start_link([ {clean_start, true} + , {clientid, <<"clientid">>} + | Config + ]), + {{ok, _}, {ok, [_]}} = wait_for_events( + fun() -> emqtt:ConnFun(ConnPid) end, + [emqx_cm_connected_client_count_inc] + ), + timer:sleep(20), + %% ensure stats are synchronized + {_, {ok, [_]}} = wait_for_stats( + fun emqx_cm:stats_fun/0, + [#{count_stat => 'live_connections.count', + max_stat => 'live_connections.max'}] + ), + ?assertEqual(1, emqx_stats:getstat('live_connections.count')), + ?assertEqual(1, emqx_stats:getstat('live_connections.max')), + {ok, {ok, [_]}} = wait_for_events( + fun() -> emqtt:disconnect(ConnPid) end, + [emqx_cm_connected_client_count_dec] + ), + timer:sleep(20), + %% ensure stats are synchronized + {_, {ok, [_]}} = wait_for_stats( + fun emqx_cm:stats_fun/0, + [#{count_stat => 'live_connections.count', + max_stat => 'live_connections.max'}] + ), + ?assertEqual(0, emqx_stats:getstat('live_connections.count')), + ?assertEqual(1, emqx_stats:getstat('live_connections.max')), + ok; +t_connected_client_stats({'end', _Config}) -> + ok = snabbkaffe:stop(), + ok = supervisor:terminate_child(emqx_kernel_sup, emqx_stats), + {ok, _} = supervisor:restart_child(emqx_kernel_sup, emqx_stats), + ok. + +%% the count must be always non negative +t_connect_client_never_negative({init, Config}) -> + Config; +t_connect_client_never_negative(Config) when is_list(Config) -> + ?assertEqual(0, emqx_cm:get_connected_client_count()), + %% would go to -1 + ChanPid = list_to_pid("<0.0.1>"), + emqx_cm:mark_channel_disconnected(ChanPid), + ?assertEqual(0, emqx_cm:get_connected_client_count()), + %% would be 0, if really went to -1 + emqx_cm:mark_channel_connected(ChanPid), + ?assertEqual(1, emqx_cm:get_connected_client_count()), + ok; +t_connect_client_never_negative({'end', _Config}) -> + ok. + +wait_for_events(Action, Kinds) -> + wait_for_events(Action, Kinds, 500). + +wait_for_events(Action, Kinds, Timeout) -> + Predicate = fun(#{?snk_kind := K}) -> + lists:member(K, Kinds) + end, + N = length(Kinds), + {ok, Sub} = snabbkaffe_collector:subscribe(Predicate, N, Timeout, 0), + Res = Action(), + case snabbkaffe_collector:receive_events(Sub) of + {timeout, _} -> + {Res, timeout}; + {ok, Events} -> + {Res, {ok, Events}} + end. + +wait_for_stats(Action, Stats) -> + Predicate = fun(Event = #{?snk_kind := emqx_stats_setstat}) -> + Stat = maps:with( + [ count_stat + , max_stat + ], Event), + lists:member(Stat, Stats); + (_) -> + false + end, + N = length(Stats), + Timeout = 500, + {ok, Sub} = snabbkaffe_collector:subscribe(Predicate, N, Timeout, 0), + Res = Action(), + case snabbkaffe_collector:receive_events(Sub) of + {timeout, _} -> + {Res, timeout}; + {ok, Events} -> + {Res, {ok, Events}} + end. + recv_msgs(Count) -> recv_msgs(Count, []). diff --git a/apps/emqx/test/emqx_broker_helper_SUITE.erl b/apps/emqx/test/emqx_broker_helper_SUITE.erl index 59b3847b0..29aca6e3b 100644 --- a/apps/emqx/test/emqx_broker_helper_SUITE.erl +++ b/apps/emqx/test/emqx_broker_helper_SUITE.erl @@ -41,7 +41,7 @@ t_lookup_subpid(_) -> emqx_broker_helper:register_sub(self(), <<"clientid">>), ct:sleep(10), ?assertEqual(self(), emqx_broker_helper:lookup_subpid(<<"clientid">>)). - + t_register_sub(_) -> ok = emqx_broker_helper:register_sub(self(), <<"clientid">>), ct:sleep(10), @@ -62,7 +62,7 @@ t_shard_seq(_) -> t_shards_num(_) -> ?assertEqual(emqx_vm:schedulers() * 32, emqx_broker_helper:shards_num()). - + t_get_sub_shard(_) -> ?assertEqual(0, emqx_broker_helper:get_sub_shard(self(), <<"topic">>)). @@ -72,4 +72,4 @@ t_terminate(_) -> t_uncovered_func(_) -> gen_server:call(emqx_broker_helper, test), gen_server:cast(emqx_broker_helper, test), - emqx_broker_helper ! test. \ No newline at end of file + emqx_broker_helper ! test. diff --git a/apps/emqx/test/emqx_channel_SUITE.erl b/apps/emqx/test/emqx_channel_SUITE.erl index 10e7db9cf..911341440 100644 --- a/apps/emqx/test/emqx_channel_SUITE.erl +++ b/apps/emqx/test/emqx_channel_SUITE.erl @@ -144,6 +144,8 @@ set_test_listenser_confs() -> init_per_suite(Config) -> %% CM Meck ok = meck:new(emqx_cm, [passthrough, no_history, no_link]), + ok = meck:expect(emqx_cm, mark_channel_connected, fun(_) -> ok end), + ok = meck:expect(emqx_cm, mark_channel_disconnected, fun(_) -> ok end), %% Access Control Meck ok = meck:new(emqx_access_control, [passthrough, no_history, no_link]), ok = meck:expect(emqx_access_control, authenticate, diff --git a/apps/emqx/test/emqx_cm_SUITE.erl b/apps/emqx/test/emqx_cm_SUITE.erl index 6ca6fef02..20dc433f8 100644 --- a/apps/emqx/test/emqx_cm_SUITE.erl +++ b/apps/emqx/test/emqx_cm_SUITE.erl @@ -32,6 +32,12 @@ conn_mod => emqx_connection, receive_maximum => 100}}). +-define(WAIT(PATTERN, TIMEOUT, RET), + fun() -> + receive PATTERN -> RET + after TIMEOUT -> error({timeout, ?LINE}) end + end()). + %%-------------------------------------------------------------------- %% CT callbacks %%-------------------------------------------------------------------- @@ -179,28 +185,100 @@ t_open_session_race_condition(_) -> exit(Winner, kill), receive {'DOWN', _, process, Winner, _} -> ok end, - ignored = gen_server:call(emqx_cm, ignore, infinity), %% sync + ignored = gen_server:call(?CM, ignore, infinity), %% sync + ok = emqx_pool:flush_async_tasks(), ?assertEqual([], emqx_cm:lookup_channels(ClientId)). -t_discard_session(_) -> +t_kick_session_discard_normal(_) -> + test_kick_session(discard, normal). + +t_kick_session_discard_shutdown(_) -> + test_kick_session(discard, shutdown). + +t_kick_session_discard_shutdown_with_reason(_) -> + test_kick_session(discard, {shutdown, discard}). + +t_kick_session_discard_timeout(_) -> + test_kick_session(discard, timeout). + +t_kick_session_discard_noproc(_) -> + test_kick_session(discard, noproc). + +t_kick_session_kick_normal(_) -> + test_kick_session(discard, normal). + +t_kick_session_kick_shutdown(_) -> + test_kick_session(discard, shutdown). + +t_kick_session_kick_shutdown_with_reason(_) -> + test_kick_session(discard, {shutdown, discard}). + +t_kick_session_kick_timeout(_) -> + test_kick_session(discard, timeout). + +t_kick_session_kick_noproc(_) -> + test_kick_session(discard, noproc). + +test_kick_session(Action, Reason) -> ClientId = rand_client_id(), #{conninfo := ConnInfo} = ?ChanInfo, - ok = emqx_cm:register_channel(ClientId, self(), ConnInfo), + FakeSessionFun = + fun Loop() -> + receive + {'$gen_call', From, A} when A =:= kick orelse + A =:= discard -> + case Reason of + normal -> + gen_server:reply(From, ok); + timeout -> + %% no response to the call + Loop(); + _ -> + exit(Reason) + end; + Msg -> + ct:pal("(~p) fake_session_discarded ~p", [Action, Msg]), + Loop() + end + end, + {Pid1, _} = spawn_monitor(FakeSessionFun), + {Pid2, _} = spawn_monitor(FakeSessionFun), + ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo), + ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo), + ok = emqx_cm:register_channel(ClientId, Pid2, ConnInfo), + ?assertEqual([Pid1, Pid2], lists:sort(emqx_cm:lookup_channels(ClientId))), + case Reason of + noproc -> exit(Pid1, kill), exit(Pid2, kill); + _ -> ok + end, + ok = case Action of + kick -> emqx_cm:kick_session(ClientId); + discard -> emqx_cm:discard_session(ClientId) + end, + case Reason =:= timeout orelse Reason =:= noproc of + true -> + ?assertEqual(killed, ?WAIT({'DOWN', _, process, Pid1, R}, 2_000, R)), + ?assertEqual(killed, ?WAIT({'DOWN', _, process, Pid2, R}, 2_000, R)); + false -> + ?assertEqual(Reason, ?WAIT({'DOWN', _, process, Pid1, R}, 2_000, R)), + ?assertEqual(Reason, ?WAIT({'DOWN', _, process, Pid2, R}, 2_000, R)) + end, + ignored = gen_server:call(?CM, ignore, infinity), % sync + ok = flush_emqx_pool(), + ?assertEqual([], emqx_cm:lookup_channels(ClientId)). - ok = meck:new(emqx_connection, [passthrough, no_history]), - ok = meck:expect(emqx_connection, call, fun(_, _) -> ok end), - ok = meck:expect(emqx_connection, call, fun(_, _, _) -> ok end), - ok = emqx_cm:discard_session(ClientId), - ok = emqx_cm:register_channel(ClientId, self(), ConnInfo), - ok = emqx_cm:discard_session(ClientId), - ok = emqx_cm:unregister_channel(ClientId), - ok = emqx_cm:register_channel(ClientId, self(), ConnInfo), - ok = emqx_cm:discard_session(ClientId), - ok = meck:expect(emqx_connection, call, fun(_, _) -> error(testing) end), - ok = meck:expect(emqx_connection, call, fun(_, _, _) -> error(testing) end), - ok = emqx_cm:discard_session(ClientId), - ok = emqx_cm:unregister_channel(ClientId), - ok = meck:unload(emqx_connection). +%% Channel deregistration is delegated to emqx_pool as a sync tasks. +%% The emqx_pool is pool of workers, and there is no way to know +%% which worker was picked for the last deregistration task. +%% This help function creates a large enough number of async tasks +%% to sync with the pool workers. +%% The number of tasks should be large enough to ensure all workers have +%% the chance to work on at least one of the tasks. +flush_emqx_pool() -> + Self = self(), + L = lists:seq(1, 1000), + lists:foreach(fun(I) -> emqx_pool:async_submit(fun() -> Self ! {done, I} end, []) end, L), + lists:foreach(fun(I) -> receive {done, I} -> ok end end, L). t_discard_session_race(_) -> ClientId = rand_client_id(), @@ -222,37 +300,55 @@ t_discard_session_race(_) -> t_takeover_session(_) -> #{conninfo := ConnInfo} = ?ChanInfo, none = emqx_cm:takeover_session(<<"clientid">>), + Parent = self(), erlang:spawn_link(fun() -> ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo), + Parent ! registered, receive {'$gen_call', From, {takeover, 'begin'}} -> gen_server:reply(From, test), ok end end), - timer:sleep(100), + receive registered -> ok end, {living, emqx_connection, _, test} = emqx_cm:takeover_session(<<"clientid">>), emqx_cm:unregister_channel(<<"clientid">>). -t_kick_session(_) -> - Info = #{conninfo := ConnInfo} = ?ChanInfo, - ok = meck:new(emqx_connection, [passthrough, no_history]), - ok = meck:expect(emqx_connection, call, fun(_, _) -> test end), - ok = meck:expect(emqx_connection, call, fun(_, _, _) -> test end), - {error, not_found} = emqx_cm:kick_session(<<"clientid">>), - ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo), - ok = emqx_cm:insert_channel_info(<<"clientid">>, Info, []), - test = emqx_cm:kick_session(<<"clientid">>), - erlang:spawn_link( - fun() -> - ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo), - ok = emqx_cm:insert_channel_info(<<"clientid">>, Info, []), - - timer:sleep(1000) - end), - ct:sleep(100), - test = emqx_cm:kick_session(<<"clientid">>), - ok = emqx_cm:unregister_channel(<<"clientid">>), - ok = meck:unload(emqx_connection). +t_takeover_session_process_gone(_) -> + #{conninfo := ConnInfo} = ?ChanInfo, + ClientIDTcp = <<"clientidTCP">>, + ClientIDWs = <<"clientidWs">>, + ClientIDRpc = <<"clientidRPC">>, + none = emqx_cm:takeover_session(ClientIDTcp), + none = emqx_cm:takeover_session(ClientIDWs), + meck:new(emqx_connection, [passthrough, no_history]), + meck:expect(emqx_connection, call, + fun(Pid, {takeover, 'begin'}, _) -> + exit({noproc, {gen_server,call,[Pid, takeover_session]}}); + (Pid, What, Args) -> + meck:passthrough([Pid, What, Args]) + end), + ok = emqx_cm:register_channel(ClientIDTcp, self(), ConnInfo), + none = emqx_cm:takeover_session(ClientIDTcp), + meck:expect(emqx_connection, call, + fun(_Pid, {takeover, 'begin'}, _) -> + exit(noproc); + (Pid, What, Args) -> + meck:passthrough([Pid, What, Args]) + end), + ok = emqx_cm:register_channel(ClientIDWs, self(), ConnInfo), + none = emqx_cm:takeover_session(ClientIDWs), + meck:expect(emqx_connection, call, + fun(Pid, {takeover, 'begin'}, _) -> + exit({'EXIT', {noproc, {gen_server,call,[Pid, takeover_session]}}}); + (Pid, What, Args) -> + meck:passthrough([Pid, What, Args]) + end), + ok = emqx_cm:register_channel(ClientIDRpc, self(), ConnInfo), + none = emqx_cm:takeover_session(ClientIDRpc), + emqx_cm:unregister_channel(ClientIDTcp), + emqx_cm:unregister_channel(ClientIDWs), + emqx_cm:unregister_channel(ClientIDRpc), + meck:unload(emqx_connection). t_all_channels(_) -> ?assertEqual(true, is_list(emqx_cm:all_channels())). diff --git a/apps/emqx/test/emqx_common_test_http.erl b/apps/emqx/test/emqx_common_test_http.erl index 27fcdc268..3e91f3afd 100644 --- a/apps/emqx/test/emqx_common_test_http.erl +++ b/apps/emqx/test/emqx_common_test_http.erl @@ -19,14 +19,14 @@ -include_lib("common_test/include/ct.hrl"). -export([ request_api/3 - , request_api/4 - , request_api/5 - , get_http_data/1 - , create_default_app/0 - , delete_default_app/0 - , default_auth_header/0 - , auth_header/2 -]). + , request_api/4 + , request_api/5 + , get_http_data/1 + , create_default_app/0 + , delete_default_app/0 + , default_auth_header/0 + , auth_header/2 + ]). request_api(Method, Url, Auth) -> request_api(Method, Url, [], Auth, []). @@ -57,15 +57,14 @@ do_request_api(Method, Request, HttpOpts) -> case httpc:request(Method, Request, HttpOpts, [{body_format, binary}]) of {error, socket_closed_remotely} -> {error, socket_closed_remotely}; - {ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } - when Code =:= 200 orelse Code =:= 201 -> - {ok, Return}; + {ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } -> + {ok, Code, Return}; {ok, {Reason, _, _}} -> {error, Reason} end. get_http_data(ResponseBody) -> - maps:get(<<"data">>, emqx_json:decode(ResponseBody, [return_maps])). + emqx_json:decode(ResponseBody, [return_maps]). auth_header(User, Pass) -> Encoded = base64:encode_to_string(lists:append([User,":",Pass])), diff --git a/apps/emqx/test/emqx_connection_SUITE.erl b/apps/emqx/test/emqx_connection_SUITE.erl index 987b39d77..d006a1a10 100644 --- a/apps/emqx/test/emqx_connection_SUITE.erl +++ b/apps/emqx/test/emqx_connection_SUITE.erl @@ -36,6 +36,8 @@ init_per_suite(Config) -> ok = meck:new(emqx_channel, [passthrough, no_history, no_link]), %% Meck Cm ok = meck:new(emqx_cm, [passthrough, no_history, no_link]), + ok = meck:expect(emqx_cm, mark_channel_connected, fun(_) -> ok end), + ok = meck:expect(emqx_cm, mark_channel_disconnected, fun(_) -> ok end), %% Meck Limiter ok = meck:new(emqx_limiter, [passthrough, no_history, no_link]), %% Meck Pd @@ -113,7 +115,7 @@ t_ws_pingreq_before_connected(_) -> t_info(_) -> CPid = spawn(fun() -> - receive + receive {'$gen_call', From, info} -> gen_server:reply(From, emqx_connection:info(st())) after @@ -132,7 +134,7 @@ t_info_limiter(_) -> t_stats(_) -> CPid = spawn(fun() -> - receive + receive {'$gen_call', From, stats} -> gen_server:reply(From, emqx_connection:stats(st())) after @@ -147,10 +149,10 @@ t_stats(_) -> {send_pend,0}| _] , Stats). t_process_msg(_) -> - with_conn(fun(CPid) -> - ok = meck:expect(emqx_channel, handle_in, - fun(_Packet, Channel) -> - {ok, Channel} + with_conn(fun(CPid) -> + ok = meck:expect(emqx_channel, handle_in, + fun(_Packet, Channel) -> + {ok, Channel} end), CPid ! {incoming, ?PACKET(?PINGREQ)}, CPid ! {incoming, undefined}, @@ -320,7 +322,7 @@ t_with_channel(_) -> t_handle_outgoing(_) -> ?assertEqual(ok, emqx_connection:handle_outgoing(?PACKET(?PINGRESP), st())), ?assertEqual(ok, emqx_connection:handle_outgoing([?PACKET(?PINGRESP)], st())). - + t_handle_info(_) -> ?assertMatch({ok, {event,running}, _NState}, emqx_connection:handle_info(activate_socket, st())), @@ -347,7 +349,7 @@ t_activate_socket(_) -> State = st(), {ok, NStats} = emqx_connection:activate_socket(State), ?assertEqual(running, emqx_connection:info(sockstate, NStats)), - + State1 = st(#{sockstate => blocked}), ?assertEqual({ok, State1}, emqx_connection:activate_socket(State1)), diff --git a/apps/emqx/test/emqx_mountpoint_SUITE.erl b/apps/emqx/test/emqx_mountpoint_SUITE.erl index e8b4c0e5c..c9566c286 100644 --- a/apps/emqx/test/emqx_mountpoint_SUITE.erl +++ b/apps/emqx/test/emqx_mountpoint_SUITE.erl @@ -55,12 +55,12 @@ t_unmount(_) -> t_replvar(_) -> ?assertEqual(undefined, replvar(undefined, #{})), ?assertEqual(<<"mount/user/clientid/">>, - replvar(<<"mount/%u/%c/">>, + replvar(<<"mount/${username}/${clientid}/">>, #{clientid => <<"clientid">>, username => <<"user">> })), - ?assertEqual(<<"mount/%u/clientid/">>, - replvar(<<"mount/%u/%c/">>, + ?assertEqual(<<"mount/${username}/clientid/">>, + replvar(<<"mount/${username}/${clientid}/">>, #{clientid => <<"clientid">>, username => undefined })). diff --git a/apps/emqx/test/emqx_mqtt_SUITE.erl b/apps/emqx/test/emqx_mqtt_SUITE.erl index 3d149ee08..4a4138505 100644 --- a/apps/emqx/test/emqx_mqtt_SUITE.erl +++ b/apps/emqx/test/emqx_mqtt_SUITE.erl @@ -62,79 +62,104 @@ t_conn_stats(_) -> t_tcp_sock_passive(_) -> with_client(fun(CPid) -> CPid ! {tcp_passive, sock} end, []). -t_message_expiry_interval_1(_) -> - ClientA = message_expiry_interval_init(), - [message_expiry_interval_exipred(ClientA, QoS) || QoS <- [0,1,2]], - emqtt:stop(ClientA). +t_message_expiry_interval(_) -> + {CPublish, CControl} = message_expiry_interval_init(), + [message_expiry_interval_exipred(CPublish, CControl, QoS) || QoS <- [0,1,2]], + emqtt:stop(CPublish), + emqtt:stop(CControl). -t_message_expiry_interval_2(_) -> - ClientA = message_expiry_interval_init(), - [message_expiry_interval_not_exipred(ClientA, QoS) || QoS <- [0,1,2]], - emqtt:stop(ClientA). +t_message_not_expiry_interval(_) -> + {CPublish, CControl} = message_expiry_interval_init(), + [message_expiry_interval_not_exipred(CPublish, CControl, QoS) || QoS <- [0,1,2]], + emqtt:stop(CPublish), + emqtt:stop(CControl). message_expiry_interval_init() -> - {ok, ClientA} = emqtt:start_link([{proto_ver,v5}, - {clientid, <<"client-a">>}, + {ok, CPublish} = emqtt:start_link([{proto_ver,v5}, + {clientid, <<"Client-Publish">>}, {clean_start, false}, {properties, #{'Session-Expiry-Interval' => 360}}]), - {ok, ClientB} = emqtt:start_link([{proto_ver,v5}, - {clientid, <<"client-b">>}, + {ok, CVerify} = emqtt:start_link([{proto_ver,v5}, + {clientid, <<"Client-Verify">>}, {clean_start, false}, {properties, #{'Session-Expiry-Interval' => 360}}]), - {ok, _} = emqtt:connect(ClientA), - {ok, _} = emqtt:connect(ClientB), - %% subscribe and disconnect client-b - emqtt:subscribe(ClientB, <<"t/a">>, 1), - emqtt:stop(ClientB), - ClientA. + {ok, CControl} = emqtt:start_link([{proto_ver,v5}, + {clientid, <<"Client-Control">>}, + {clean_start, false}, + {properties, #{'Session-Expiry-Interval' => 360}}]), + {ok, _} = emqtt:connect(CPublish), + {ok, _} = emqtt:connect(CVerify), + {ok, _} = emqtt:connect(CControl), + %% subscribe and disconnect Client-verify + emqtt:subscribe(CControl, <<"t/a">>, 1), + emqtt:subscribe(CVerify, <<"t/a">>, 1), + emqtt:stop(CVerify), + {CPublish, CControl}. -message_expiry_interval_exipred(ClientA, QoS) -> +message_expiry_interval_exipred(CPublish, CControl, QoS) -> ct:pal("~p ~p", [?FUNCTION_NAME, QoS]), %% publish to t/a and waiting for the message expired - emqtt:publish(ClientA, <<"t/a">>, #{'Message-Expiry-Interval' => 1}, <<"this will be purged in 1s">>, [{qos, QoS}]), - ct:sleep(1500), + emqtt:publish(CPublish, <<"t/a">>, #{'Message-Expiry-Interval' => 1}, + <<"this will be purged in 1s">>, [{qos, QoS}]), + %% CControl make sure publish already store in broker. + receive + {publish,#{client_pid := CControl, topic := <<"t/a">>}} -> + ok + after 1000 -> + ct:fail(should_receive_publish) + end, + ct:sleep(1100), - %% resume the session for client-b - {ok, ClientB1} = emqtt:start_link([{proto_ver,v5}, - {clientid, <<"client-b">>}, + %% resume the session for Client-Verify + {ok, CVerify} = emqtt:start_link([{proto_ver,v5}, + {clientid, <<"Client-Verify">>}, {clean_start, false}, {properties, #{'Session-Expiry-Interval' => 360}}]), - {ok, _} = emqtt:connect(ClientB1), + {ok, _} = emqtt:connect(CVerify), - %% verify client-b could not receive the publish message + %% verify Client-Verify could not receive the publish message receive - {publish,#{client_pid := ClientB1, topic := <<"t/a">>}} -> + {publish,#{client_pid := CVerify, topic := <<"t/a">>}} -> ct:fail(should_have_expired) after 300 -> ok end, - emqtt:stop(ClientB1). + emqtt:stop(CVerify). -message_expiry_interval_not_exipred(ClientA, QoS) -> +message_expiry_interval_not_exipred(CPublish, CControl, QoS) -> ct:pal("~p ~p", [?FUNCTION_NAME, QoS]), %% publish to t/a - emqtt:publish(ClientA, <<"t/a">>, #{'Message-Expiry-Interval' => 20}, <<"this will be purged in 1s">>, [{qos, QoS}]), + emqtt:publish(CPublish, <<"t/a">>, #{'Message-Expiry-Interval' => 20}, + <<"this will be purged in 20s">>, [{qos, QoS}]), - %% wait for 1s and then resume the session for client-b, the message should not expires + %% CControl make sure publish already store in broker. + receive + {publish,#{client_pid := CControl, topic := <<"t/a">>}} -> + ok + after 1000 -> + ct:fail(should_receive_publish) + end, + + %% wait for 1.2s and then resume the session for Client-Verify, the message should not expires %% as Message-Expiry-Interval = 20s - ct:sleep(1000), - {ok, ClientB1} = emqtt:start_link([{proto_ver,v5}, - {clientid, <<"client-b">>}, + ct:sleep(1200), + {ok, CVerify} = emqtt:start_link([{proto_ver,v5}, + {clientid, <<"Client-Verify">>}, {clean_start, false}, {properties, #{'Session-Expiry-Interval' => 360}}]), - {ok, _} = emqtt:connect(ClientB1), + {ok, _} = emqtt:connect(CVerify), - %% verify client-b could receive the publish message and the Message-Expiry-Interval is set + %% verify Client-Verify could receive the publish message and the Message-Expiry-Interval is set receive - {publish,#{client_pid := ClientB1, topic := <<"t/a">>, + {publish,#{client_pid := CVerify, topic := <<"t/a">>, properties := #{'Message-Expiry-Interval' := MsgExpItvl}}} - when MsgExpItvl < 20 -> ok; + when MsgExpItvl =< 20 -> ok; {publish, _} = Msg -> ct:fail({incorrect_publish, Msg}) after 300 -> ct:fail(no_publish_received) end, - emqtt:stop(ClientB1). + emqtt:stop(CVerify). with_client(TestFun, _Options) -> ClientId = <<"t_conn">>, diff --git a/apps/emqx/test/emqx_persistent_session_SUITE.erl b/apps/emqx/test/emqx_persistent_session_SUITE.erl index ac51636f0..d13d3ed8b 100644 --- a/apps/emqx/test/emqx_persistent_session_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_session_SUITE.erl @@ -113,6 +113,9 @@ init_per_group(snabbkaffe, Config) -> [ {kill_connection_process, true} | Config]; init_per_group(gc_tests, Config) -> %% We need to make sure the system does not interfere with this test group. + lists:foreach(fun(ClientId) -> + maybe_kill_connection_process(ClientId, [{kill_connection_process, true}]) + end, emqx_cm:all_client_ids()), emqx_common_test_helpers:stop_apps([]), SessionMsgEts = gc_tests_session_store, MsgEts = gc_tests_msg_store, @@ -230,50 +233,92 @@ receive_messages(Count, Msgs) -> maybe_kill_connection_process(ClientId, Config) -> case ?config(kill_connection_process, Config) of true -> - [ConnectionPid] = emqx_cm:lookup_channels(ClientId), - ?assert(is_pid(ConnectionPid)), - Ref = monitor(process, ConnectionPid), - ConnectionPid ! die_if_test, - receive {'DOWN', Ref, process, ConnectionPid, normal} -> ok - after 3000 -> error(process_did_not_die) + case emqx_cm:lookup_channels(ClientId) of + [] -> + ok; + [ConnectionPid] -> + ?assert(is_pid(ConnectionPid)), + Ref = monitor(process, ConnectionPid), + ConnectionPid ! die_if_test, + receive {'DOWN', Ref, process, ConnectionPid, normal} -> ok + after 3000 -> error(process_did_not_die) + end, + wait_for_cm_unregister(ClientId) end; false -> ok end. -snabbkaffe_sync_publish(Topic, Payloads, Config) -> - Fun = fun(Client, Payload) -> - ?wait_async_action( {ok, _} = emqtt:publish(Client, Topic, Payload, 2) - , #{?snk_kind := ps_persist_msg, payload := Payload} - ) - end, - do_publish(Payloads, Fun, Config). +wait_for_cm_unregister(ClientId) -> + wait_for_cm_unregister(ClientId, 10). -publish(Topic, Payloads, Config) -> +wait_for_cm_unregister(_ClientId, 0) -> + error(cm_did_not_unregister); +wait_for_cm_unregister(ClientId, N) -> + case emqx_cm:lookup_channels(ClientId) of + [] -> ok; + [_] -> timer:sleep(100), wait_for_cm_unregister(ClientId, N - 1) + end. + +snabbkaffe_sync_publish(Topic, Payloads) -> + Fun = fun(Client, Payload) -> + ?check_trace( + begin + ?wait_async_action( {ok, _} = emqtt:publish(Client, Topic, Payload, 2) + , #{?snk_kind := ps_persist_msg, payload := Payload} + ) + end, + fun(_, _Trace) -> ok end) + end, + do_publish(Payloads, Fun, true). + +publish(Topic, Payloads) -> + publish(Topic, Payloads, false). + +publish(Topic, Payloads, WaitForUnregister) -> Fun = fun(Client, Payload) -> {ok, _} = emqtt:publish(Client, Topic, Payload, 2) end, - do_publish(Payloads, Fun, Config). + do_publish(Payloads, Fun, WaitForUnregister). -do_publish(Payloads = [_|_], PublishFun, Config) -> +do_publish(Payloads = [_|_], PublishFun, WaitForUnregister) -> %% Publish from another process to avoid connection confusion. {Pid, Ref} = spawn_monitor( fun() -> %% For convenience, always publish using tcp. %% The publish path is not what we are testing. + ClientID = <<"ps_SUITE_publisher">>, {ok, Client} = emqtt:start_link([ {proto_ver, v5} + , {clientid, ClientID} , {port, 1883} ]), {ok, _} = emqtt:connect(Client), lists:foreach(fun(Payload) -> PublishFun(Client, Payload) end, Payloads), - ok = emqtt:disconnect(Client) + ok = emqtt:disconnect(Client), + %% Snabbkaffe sometimes fails unless all processes are gone. + case WaitForUnregister of + false -> + ok; + true -> + case emqx_cm:lookup_channels(ClientID) of + [] -> + ok; + [ConnectionPid] -> + ?assert(is_pid(ConnectionPid)), + Ref1 = monitor(process, ConnectionPid), + receive {'DOWN', Ref1, process, ConnectionPid, _} -> ok + after 3000 -> error(process_did_not_die) + end, + wait_for_cm_unregister(ClientID) + end + end end), receive {'DOWN', Ref, process, Pid, normal} -> ok; {'DOWN', Ref, process, Pid, What} -> error({failed_publish, What}) end; -do_publish(Payload, PublishFun, Config) -> - do_publish([Payload], PublishFun, Config). +do_publish(Payload, PublishFun, WaitForUnregister) -> + do_publish([Payload], PublishFun, WaitForUnregister). %%-------------------------------------------------------------------- %% Test Cases @@ -297,7 +342,7 @@ t_connect_session_expiry_interval(Config) -> maybe_kill_connection_process(ClientId, Config), - publish(Topic, Payload, Config), + publish(Topic, Payload), {ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {proto_ver, v5}, @@ -356,6 +401,8 @@ t_cancel_on_disconnect(Config) -> {ok, _} = emqtt:ConnFun(Client1), ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 0}), + wait_for_cm_unregister(ClientId), + {ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {proto_ver, v5}, {clean_start, false}, @@ -382,6 +429,8 @@ t_persist_on_disconnect(Config) -> %% Strangely enough, the disconnect is reported as successful by emqtt. ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 30}), + wait_for_cm_unregister(ClientId), + {ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {proto_ver, v5}, {clean_start, false}, @@ -424,7 +473,7 @@ t_process_dies_session_expires(Config) -> maybe_kill_connection_process(ClientId, Config), - ok = publish(Topic, [Payload], Config), + ok = publish(Topic, [Payload]), SessionId = case ?config(persistent_store_enabled, Config) of @@ -467,7 +516,8 @@ t_process_dies_session_expires(Config) -> %% The session should be a fresh one {persistent, NewSession} = emqx_persistent_session:lookup(ClientId), ?assertNotEqual(SessionId, emqx_session:info(id, NewSession)), - %% The old session should now either be marked as abandoned or already be garbage collected. + %% The old session should now either + %% be marked as abandoned or already be garbage collected. ?assertMatch([], emqx_persistent_session:pending(SessionId)); false -> skip @@ -498,7 +548,7 @@ t_publish_while_client_is_gone(Config) -> ok = emqtt:disconnect(Client1), maybe_kill_connection_process(ClientId, Config), - ok = publish(Topic, [Payload1, Payload2], Config), + ok = publish(Topic, [Payload1, Payload2]), {ok, Client2} = emqtt:start_link([ {proto_ver, v5}, {clientid, ClientId}, @@ -506,8 +556,9 @@ t_publish_while_client_is_gone(Config) -> {clean_start, false} | Config]), {ok, _} = emqtt:ConnFun(Client2), - [Msg1] = receive_messages(1), - [Msg2] = receive_messages(1), + Msgs = receive_messages(2), + ?assertEqual(length(Msgs), 2), + [Msg2, Msg1] = Msgs, ?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)), ?assertEqual({ok, 2}, maps:find(qos, Msg1)), ?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg2)), @@ -544,7 +595,7 @@ t_clean_start_drops_subscriptions(Config) -> maybe_kill_connection_process(ClientId, Config), %% 2. - ok = publish(Topic, Payload1, Config), + ok = publish(Topic, Payload1), %% 3. {ok, Client2} = emqtt:start_link([ {proto_ver, v5}, @@ -556,7 +607,7 @@ t_clean_start_drops_subscriptions(Config) -> ?assertEqual(0, client_info(session_present, Client2)), {ok, _, [2]} = emqtt:subscribe(Client2, STopic, qos2), - ok = publish(Topic, Payload2, Config), + ok = publish(Topic, Payload2), [Msg1] = receive_messages(1), ?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg1)), @@ -571,7 +622,7 @@ t_clean_start_drops_subscriptions(Config) -> | Config]), {ok, _} = emqtt:ConnFun(Client3), - ok = publish(Topic, Payload3, Config), + ok = publish(Topic, Payload3), [Msg2] = receive_messages(1), ?assertEqual({ok, iolist_to_binary(Payload3)}, maps:find(payload, Msg2)), @@ -625,7 +676,7 @@ t_multiple_subscription_matches(Config) -> maybe_kill_connection_process(ClientId, Config), - publish(Topic, Payload, Config), + publish(Topic, Payload), {ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {proto_ver, v5}, @@ -675,9 +726,9 @@ t_lost_messages_because_of_gc(Config) -> {ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2), emqtt:disconnect(Client1), maybe_kill_connection_process(ClientId, Config), - publish(Topic, Payload1, Config), + publish(Topic, Payload1), timer:sleep(2 * Retain), - publish(Topic, Payload2, Config), + publish(Topic, Payload2), emqx_persistent_session_gc:message_gc_worker(), {ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {clean_start, false}, @@ -747,7 +798,6 @@ check_snabbkaffe_vanilla(Trace) -> t_snabbkaffe_vanilla_stages(Config) -> %% Test that all stages of session resume works ok in the simplest case - process_flag(trap_exit, true), ConnFun = ?config(conn_fun, Config), ClientId = ?config(client_id, Config), EmqttOpts = [ {proto_ver, v5}, @@ -772,7 +822,6 @@ t_snabbkaffe_vanilla_stages(Config) -> t_snabbkaffe_pending_messages(Config) -> %% Make sure there are pending messages are fetched during the init stage. - process_flag(trap_exit, true), ConnFun = ?config(conn_fun, Config), ClientId = ?config(client_id, Config), Topic = ?config(topic, Config), @@ -790,7 +839,7 @@ t_snabbkaffe_pending_messages(Config) -> ?check_trace( begin - snabbkaffe_sync_publish(Topic, Payloads, Config), + snabbkaffe_sync_publish(Topic, Payloads), {ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]), {ok, _} = emqtt:ConnFun(Client2), Msgs = receive_messages(length(Payloads)), @@ -812,7 +861,6 @@ t_snabbkaffe_pending_messages(Config) -> t_snabbkaffe_buffered_messages(Config) -> %% Make sure to buffer messages during startup. - process_flag(trap_exit, true), ConnFun = ?config(conn_fun, Config), ClientId = ?config(client_id, Config), Topic = ?config(topic, Config), @@ -829,7 +877,7 @@ t_snabbkaffe_buffered_messages(Config) -> ok = emqtt:disconnect(Client1), maybe_kill_connection_process(ClientId, Config), - publish(Topic, Payloads1, Config), + publish(Topic, Payloads1), ?check_trace( begin @@ -837,8 +885,8 @@ t_snabbkaffe_buffered_messages(Config) -> ?force_ordering( #{ ?snk_kind := ps_worker_deliver }, #{ ?snk_kind := ps_resume_end }), spawn_link(fun() -> - ?block_until(#{ ?snk_kind := ps_marker_pendings_msgs }, infinity, 5000), - publish(Topic, Payloads2, Config) + ?block_until(#{?snk_kind := ps_marker_pendings_msgs}, infinity, 5000), + publish(Topic, Payloads2, true) end), {ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]), {ok, _} = emqtt:ConnFun(Client2), diff --git a/apps/emqx/test/emqx_pqueue_SUITE.erl b/apps/emqx/test/emqx_pqueue_SUITE.erl index a51c21473..797adb9ab 100644 --- a/apps/emqx/test/emqx_pqueue_SUITE.erl +++ b/apps/emqx/test/emqx_pqueue_SUITE.erl @@ -112,7 +112,8 @@ t_out(_) -> t_out_2(_) -> {empty, {pqueue, [{-1, {queue, [a], [], 1}}]}} = ?PQ:out(0, ?PQ:from_list([{1, a}])), {{value, a}, {queue, [], [], 0}} = ?PQ:out(1, ?PQ:from_list([{1, a}])), - {{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} = ?PQ:out(1, ?PQ:from_list([{1, a}, {1, b}])), + {{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} = + ?PQ:out(1, ?PQ:from_list([{1, a}, {1, b}])), {{value, a}, {queue, [b], [], 1}} = ?PQ:out(1, ?PQ:from_list([{1, a}, {0, b}])). t_out_p(_) -> @@ -174,4 +175,4 @@ t_filter(_) -> t_highest(_) -> empty = ?PQ:highest(?PQ:new()), 0 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}])), - 2 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}, {1, c}, {2, d}, {2, e}])). \ No newline at end of file + 2 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}, {1, c}, {2, d}, {2, e}])). diff --git a/apps/emqx/test/emqx_sup_SUITE.erl b/apps/emqx/test/emqx_sup_SUITE.erl index 185e1d752..2f5600e38 100644 --- a/apps/emqx/test/emqx_sup_SUITE.erl +++ b/apps/emqx/test/emqx_sup_SUITE.erl @@ -36,4 +36,4 @@ t_child(_) -> ?assertMatch({error, not_found}, emqx_sup:stop_child(undef)), ?assertMatch({error, _}, emqx_sup:start_child(emqx_broker_sup, supervisor)), ?assertEqual(ok, emqx_sup:stop_child(emqx_broker_sup)), - ?assertMatch({ok, _}, emqx_sup:start_child(emqx_broker_sup, supervisor)). \ No newline at end of file + ?assertMatch({ok, _}, emqx_sup:start_child(emqx_broker_sup, supervisor)). diff --git a/apps/emqx/test/emqx_trace_SUITE.erl b/apps/emqx/test/emqx_trace_SUITE.erl new file mode 100644 index 000000000..555fc357e --- /dev/null +++ b/apps/emqx/test/emqx_trace_SUITE.erl @@ -0,0 +1,318 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_trace_SUITE). + +%% API +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("emqx/include/emqx.hrl"). + +-record(emqx_trace, {name, type, filter, enable = true, start_at, end_at}). + +%%-------------------------------------------------------------------- +%% Setups +%%-------------------------------------------------------------------- + +all() -> + emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config) -> + application:load(emqx_plugin_libs), + emqx_common_test_helpers:start_apps([]), + Config. + +end_per_suite(_Config) -> + emqx_common_test_helpers:stop_apps([]). + +t_base_create_delete(_Config) -> + ok = emqx_trace:clear(), + Now = erlang:system_time(second), + Start = to_rfc3339(Now), + End = to_rfc3339(Now + 30 * 60), + Name = <<"name1">>, + ClientId = <<"test-device">>, + Trace = #{ + name => Name, + type => <<"clientid">>, + clientid => ClientId, + start_at => Start, + end_at => End + }, + AnotherTrace = Trace#{name => <<"anotherTrace">>}, + ok = emqx_trace:create(Trace), + ?assertEqual({error, {already_existed, Name}}, emqx_trace:create(Trace)), + ?assertEqual({error, {duplicate_condition, Name}}, emqx_trace:create(AnotherTrace)), + [TraceRec] = emqx_trace:list(), + Expect = #emqx_trace{ + name = Name, + type = clientid, + filter = ClientId, + start_at = Now, + end_at = Now + 30 * 60 + }, + ?assertEqual(Expect, TraceRec), + ExpectFormat = [ + #{ + filter => <<"test-device">>, + enable => true, + type => clientid, + name => <<"name1">>, + start_at => Now, + end_at => Now + 30 * 60 + } + ], + ?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])), + ?assertEqual(ok, emqx_trace:delete(Name)), + ?assertEqual({error, not_found}, emqx_trace:delete(Name)), + ?assertEqual([], emqx_trace:list()), + ok. + +t_create_size_max(_Config) -> + emqx_trace:clear(), + lists:map(fun(Seq) -> + Name = list_to_binary("name" ++ integer_to_list(Seq)), + Trace = [{name, Name}, {type, <<"topic">>}, + {topic, list_to_binary("/x/y/" ++ integer_to_list(Seq))}], + ok = emqx_trace:create(Trace) + end, lists:seq(1, 30)), + Trace31 = [{<<"name">>, <<"name31">>}, + {<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/31">>}], + {error, _} = emqx_trace:create(Trace31), + ok = emqx_trace:delete(<<"name30">>), + ok = emqx_trace:create(Trace31), + ?assertEqual(30, erlang:length(emqx_trace:list())), + ok. + +t_create_failed(_Config) -> + ok = emqx_trace:clear(), + UnknownField = [{<<"unknown">>, 12}], + {error, Reason1} = emqx_trace:create(UnknownField), + ?assertEqual(<<"unknown field: {unknown,12}">>, iolist_to_binary(Reason1)), + + InvalidTopic = [{<<"topic">>, "#/#//"}], + {error, Reason2} = emqx_trace:create(InvalidTopic), + ?assertEqual(<<"topic: #/#// invalid by function_clause">>, iolist_to_binary(Reason2)), + + InvalidStart = [{<<"start_at">>, <<"2021-12-3:12">>}], + {error, Reason3} = emqx_trace:create(InvalidStart), + ?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>, + iolist_to_binary(Reason3)), + + InvalidEnd = [{<<"end_at">>, <<"2021-12-3:12">>}], + {error, Reason4} = emqx_trace:create(InvalidEnd), + ?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>, + iolist_to_binary(Reason4)), + + {error, Reason7} = emqx_trace:create([{<<"name">>, <<"test">>}, {<<"type">>, <<"clientid">>}]), + ?assertEqual(<<"topic/clientid/ip_address filter required">>, iolist_to_binary(Reason7)), + + InvalidPackets4 = [{<<"name">>, <<"/test">>}, {<<"clientid">>, <<"t">>}, + {<<"type">>, <<"clientid">>}], + {error, Reason9} = emqx_trace:create(InvalidPackets4), + ?assertEqual(<<"name cannot contain /">>, iolist_to_binary(Reason9)), + + ?assertEqual({error, "type=[topic,clientid,ip_address] required"}, + emqx_trace:create([{<<"name">>, <<"test-name">>}, {<<"clientid">>, <<"good">>}])), + + ?assertEqual({error, "incorrect type: only support clientid/topic/ip_address"}, + emqx_trace:create([{<<"name">>, <<"test-name">>}, + {<<"clientid">>, <<"good">>}, {<<"type">>, <<"typeerror">> }])), + + ?assertEqual({error, "ip address: einval"}, + emqx_trace:create([{<<"ip_address">>, <<"test-name">>}])), + ok. + +t_create_default(_Config) -> + ok = emqx_trace:clear(), + {error, "name required"} = emqx_trace:create([]), + ok = emqx_trace:create([{<<"name">>, <<"test-name">>}, + {<<"type">>, <<"clientid">>}, {<<"clientid">>, <<"good">>}]), + [#emqx_trace{name = <<"test-name">>}] = emqx_trace:list(), + ok = emqx_trace:clear(), + Trace = [ + {<<"name">>, <<"test-name">>}, + {<<"type">>, <<"topic">>}, + {<<"topic">>, <<"/x/y/z">>}, + {<<"start_at">>, <<"2021-10-28T10:54:47+08:00">>}, + {<<"end_at">>, <<"2021-10-27T10:54:47+08:00">>} + ], + {error, "end_at time has already passed"} = emqx_trace:create(Trace), + Now = erlang:system_time(second), + Trace2 = [ + {<<"name">>, <<"test-name">>}, + {<<"type">>, <<"topic">>}, + {<<"topic">>, <<"/x/y/z">>}, + {<<"start_at">>, to_rfc3339(Now + 10)}, + {<<"end_at">>, to_rfc3339(Now + 3)} + ], + {error, "failed by start_at >= end_at"} = emqx_trace:create(Trace2), + ok = emqx_trace:create([{<<"name">>, <<"test-name">>}, + {<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/z">>}]), + [#emqx_trace{start_at = Start, end_at = End}] = emqx_trace:list(), + ?assertEqual(10 * 60, End - Start), + ?assertEqual(true, Start - erlang:system_time(second) < 5), + ok. + +t_update_enable(_Config) -> + ok = emqx_trace:clear(), + Name = <<"test-name">>, + Now = erlang:system_time(second), + End = list_to_binary(calendar:system_time_to_rfc3339(Now + 2)), + ok = emqx_trace:create([{<<"name">>, Name}, {<<"type">>, <<"topic">>}, + {<<"topic">>, <<"/x/y/z">>}, {<<"end_at">>, End}]), + [#emqx_trace{enable = Enable}] = emqx_trace:list(), + ?assertEqual(Enable, true), + ok = emqx_trace:update(Name, false), + [#emqx_trace{enable = false}] = emqx_trace:list(), + ok = emqx_trace:update(Name, false), + [#emqx_trace{enable = false}] = emqx_trace:list(), + ok = emqx_trace:update(Name, true), + [#emqx_trace{enable = true}] = emqx_trace:list(), + ok = emqx_trace:update(Name, false), + [#emqx_trace{enable = false}] = emqx_trace:list(), + ?assertEqual({error, not_found}, emqx_trace:update(<<"Name not found">>, true)), + ct:sleep(2100), + ?assertEqual({error, finished}, emqx_trace:update(Name, true)), + ok. + +t_load_state(_Config) -> + emqx_trace:clear(), + load(), + Now = erlang:system_time(second), + Running = [{<<"name">>, <<"Running">>}, {<<"type">>, <<"topic">>}, + {<<"topic">>, <<"/x/y/1">>}, {<<"start_at">>, to_rfc3339(Now - 1)}, + {<<"end_at">>, to_rfc3339(Now + 2)}], + Waiting = [{<<"name">>, <<"Waiting">>}, {<<"type">>, <<"topic">>}, + {<<"topic">>, <<"/x/y/2">>}, {<<"start_at">>, to_rfc3339(Now + 3)}, + {<<"end_at">>, to_rfc3339(Now + 8)}], + Finished = [{<<"name">>, <<"Finished">>}, {<<"type">>, <<"topic">>}, + {<<"topic">>, <<"/x/y/3">>}, {<<"start_at">>, to_rfc3339(Now - 5)}, + {<<"end_at">>, to_rfc3339(Now)}], + ok = emqx_trace:create(Running), + ok = emqx_trace:create(Waiting), + {error, "end_at time has already passed"} = emqx_trace:create(Finished), + Traces = emqx_trace:format(emqx_trace:list()), + ?assertEqual(2, erlang:length(Traces)), + Enables = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces), + ExpectEnables = [{<<"Running">>, true}, {<<"Waiting">>, true}], + ?assertEqual(ExpectEnables, lists:sort(Enables)), + ct:sleep(3500), + Traces2 = emqx_trace:format(emqx_trace:list()), + ?assertEqual(2, erlang:length(Traces2)), + Enables2 = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces2), + ExpectEnables2 = [{<<"Running">>, false}, {<<"Waiting">>, true}], + ?assertEqual(ExpectEnables2, lists:sort(Enables2)), + unload(), + ok. + +t_client_event(_Config) -> + application:set_env(emqx, allow_anonymous, true), + emqx_trace:clear(), + ClientId = <<"client-test">>, + load(), + Now = erlang:system_time(second), + Start = to_rfc3339(Now), + Name = <<"test_client_id_event">>, + ok = emqx_trace:create([{<<"name">>, Name}, + {<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]), + ct:sleep(200), + {ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]), + {ok, _} = emqtt:connect(Client), + emqtt:ping(Client), + ok = emqtt:publish(Client, <<"/test">>, #{}, <<"1">>, [{qos, 0}]), + ok = emqtt:publish(Client, <<"/test">>, #{}, <<"2">>, [{qos, 0}]), + ct:sleep(200), + ok = emqx_trace:create([{<<"name">>, <<"test_topic">>}, + {<<"type">>, <<"topic">>}, {<<"topic">>, <<"/test">>}, {<<"start_at">>, Start}]), + ct:sleep(200), + {ok, Bin} = file:read_file(emqx_trace:log_file(Name, Now)), + ok = emqtt:publish(Client, <<"/test">>, #{}, <<"3">>, [{qos, 0}]), + ok = emqtt:publish(Client, <<"/test">>, #{}, <<"4">>, [{qos, 0}]), + ok = emqtt:disconnect(Client), + ct:sleep(200), + {ok, Bin2} = file:read_file(emqx_trace:log_file(Name, Now)), + {ok, Bin3} = file:read_file(emqx_trace:log_file(<<"test_topic">>, Now)), + ct:pal("Bin ~p Bin2 ~p Bin3 ~p", [byte_size(Bin), byte_size(Bin2), byte_size(Bin3)]), + ?assert(erlang:byte_size(Bin) > 0), + ?assert(erlang:byte_size(Bin) < erlang:byte_size(Bin2)), + ?assert(erlang:byte_size(Bin3) > 0), + unload(), + ok. + +t_get_log_filename(_Config) -> + ok = emqx_trace:clear(), + load(), + Now = erlang:system_time(second), + Start = calendar:system_time_to_rfc3339(Now), + End = calendar:system_time_to_rfc3339(Now + 2), + Name = <<"name1">>, + Trace = [ + {<<"name">>, Name}, + {<<"type">>, <<"ip_address">>}, + {<<"ip_address">>, <<"127.0.0.1">>}, + {<<"start_at">>, list_to_binary(Start)}, + {<<"end_at">>, list_to_binary(End)} + ], + ok = emqx_trace:create(Trace), + ?assertEqual({error, not_found}, emqx_trace:get_trace_filename(<<"test">>)), + ?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))), + ct:sleep(3000), + ?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))), + unload(), + ok. + +t_trace_file(_Config) -> + FileName = "test.log", + Content = <<"test \n test">>, + TraceDir = emqx_trace:trace_dir(), + File = filename:join(TraceDir, FileName), + ok = file:write_file(File, Content), + {ok, Node, Bin} = emqx_trace:trace_file(FileName), + ?assertEqual(Node, atom_to_list(node())), + ?assertEqual(Content, Bin), + ok = file:delete(File), + ok. + +t_download_log(_Config) -> + emqx_trace:clear(), + load(), + ClientId = <<"client-test">>, + Now = erlang:system_time(second), + Start = to_rfc3339(Now), + Name = <<"test_client_id">>, + ok = emqx_trace:create([{<<"name">>, Name}, + {<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]), + {ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]), + {ok, _} = emqtt:connect(Client), + [begin _ = emqtt:ping(Client) end ||_ <- lists:seq(1, 5)], + ct:sleep(100), + {ok, ZipFile} = emqx_trace_api:download_zip_log(#{name => Name}, []), + ?assert(filelib:file_size(ZipFile) > 0), + ok = emqtt:disconnect(Client), + unload(), + ok. + +to_rfc3339(Second) -> + list_to_binary(calendar:system_time_to_rfc3339(Second)). + +load() -> + emqx_trace:start_link(). + +unload() -> + gen_server:stop(emqx_trace). diff --git a/apps/emqx/test/emqx_trace_handler_SUITE.erl b/apps/emqx/test/emqx_trace_handler_SUITE.erl new file mode 100644 index 000000000..6504530f1 --- /dev/null +++ b/apps/emqx/test/emqx_trace_handler_SUITE.erl @@ -0,0 +1,191 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2019-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_trace_handler_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("eunit/include/eunit.hrl"). + +-include_lib("common_test/include/ct.hrl"). +-define(CLIENT, [{host, "localhost"}, + {clientid, <<"client">>}, + {username, <<"testuser">>}, + {password, <<"pass">>} + ]). + +all() -> [t_trace_clientid, t_trace_topic, t_trace_ip_address]. + +init_per_suite(Config) -> + emqx_common_test_helpers:boot_modules(all), + emqx_common_test_helpers:start_apps([]), + Config. + +end_per_suite(_Config) -> + emqx_common_test_helpers:stop_apps([]). + +init_per_testcase(t_trace_clientid, Config) -> + Config; +init_per_testcase(_Case, Config) -> + ok = emqx_logger:set_log_level(debug), + _ = [logger:remove_handler(Id) ||#{id := Id} <- emqx_trace_handler:running()], + Config. + +end_per_testcase(_Case, _Config) -> + ok = emqx_logger:set_log_level(warning), + ok. + +t_trace_clientid(_Config) -> + %% Start tracing + emqx_logger:set_log_level(error), + {error, _} = emqx_trace_handler:install(clientid, <<"client">>, debug, "tmp/client.log"), + emqx_logger:set_log_level(debug), + %% add list clientid + ok = emqx_trace_handler:install(clientid, "client", debug, "tmp/client.log"), + ok = emqx_trace_handler:install(clientid, <<"client2">>, all, "tmp/client2.log"), + ok = emqx_trace_handler:install(clientid, <<"client3">>, all, "tmp/client3.log"), + {error, {invalid_log_level, bad_level}} = + emqx_trace_handler:install(clientid, <<"client4">>, bad_level, "tmp/client4.log"), + {error, {handler_not_added, {file_error, ".", eisdir}}} = + emqx_trace_handler:install(clientid, <<"client5">>, debug, "."), + ct:sleep(100), + + %% Verify the tracing file exits + ?assert(filelib:is_regular("tmp/client.log")), + ?assert(filelib:is_regular("tmp/client2.log")), + ?assert(filelib:is_regular("tmp/client3.log")), + + %% Get current traces + ?assertMatch([#{type := clientid, filter := "client", name := <<"client">>, + level := debug, dst := "tmp/client.log"}, + #{type := clientid, filter := "client2", name := <<"client2">> + , level := debug, dst := "tmp/client2.log"}, + #{type := clientid, filter := "client3", name := <<"client3">>, + level := debug, dst := "tmp/client3.log"} + ], emqx_trace_handler:running()), + + %% Client with clientid = "client" publishes a "hi" message to "a/b/c". + {ok, T} = emqtt:start_link(?CLIENT), + emqtt:connect(T), + emqtt:publish(T, <<"a/b/c">>, <<"hi">>), + emqtt:ping(T), + ct:sleep(200), + + %% Verify messages are logged to "tmp/client.log" but not "tmp/client2.log". + {ok, Bin} = file:read_file("tmp/client.log"), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"CONNECT">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"CONNACK">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"PUBLISH">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"PINGREQ">>])), + ?assert(filelib:file_size("tmp/client2.log") == 0), + + %% Stop tracing + ok = emqx_trace_handler:uninstall(clientid, <<"client">>), + ok = emqx_trace_handler:uninstall(clientid, <<"client2">>), + ok = emqx_trace_handler:uninstall(clientid, <<"client3">>), + + emqtt:disconnect(T), + ?assertEqual([], emqx_trace_handler:running()). + +t_trace_topic(_Config) -> + {ok, T} = emqtt:start_link(?CLIENT), + emqtt:connect(T), + + %% Start tracing + emqx_logger:set_log_level(debug), + ok = emqx_trace_handler:install(topic, <<"x/#">>, all, "tmp/topic_trace_x.log"), + ok = emqx_trace_handler:install(topic, <<"y/#">>, all, "tmp/topic_trace_y.log"), + ct:sleep(100), + + %% Verify the tracing file exits + ?assert(filelib:is_regular("tmp/topic_trace_x.log")), + ?assert(filelib:is_regular("tmp/topic_trace_y.log")), + + %% Get current traces + ?assertMatch([#{type := topic, filter := <<"x/#">>, + level := debug, dst := "tmp/topic_trace_x.log", name := <<"x/#">>}, + #{type := topic, filter := <<"y/#">>, + name := <<"y/#">>, level := debug, dst := "tmp/topic_trace_y.log"} + ], + emqx_trace_handler:running()), + + %% Client with clientid = "client" publishes a "hi" message to "x/y/z". + emqtt:publish(T, <<"x/y/z">>, <<"hi1">>), + emqtt:publish(T, <<"x/y/z">>, <<"hi2">>), + emqtt:subscribe(T, <<"x/y/z">>), + emqtt:unsubscribe(T, <<"x/y/z">>), + ct:sleep(200), + + {ok, Bin} = file:read_file("tmp/topic_trace_x.log"), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"hi1">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"hi2">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"PUBLISH">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"SUBSCRIBE">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"UNSUBSCRIBE">>])), + ?assert(filelib:file_size("tmp/topic_trace_y.log") =:= 0), + + %% Stop tracing + ok = emqx_trace_handler:uninstall(topic, <<"x/#">>), + ok = emqx_trace_handler:uninstall(topic, <<"y/#">>), + {error, _Reason} = emqx_trace_handler:uninstall(topic, <<"z/#">>), + ?assertEqual([], emqx_trace_handler:running()), + emqtt:disconnect(T). + +t_trace_ip_address(_Config) -> + {ok, T} = emqtt:start_link(?CLIENT), + emqtt:connect(T), + + %% Start tracing + ok = emqx_trace_handler:install(ip_address, "127.0.0.1", all, "tmp/ip_trace_x.log"), + ok = emqx_trace_handler:install(ip_address, "192.168.1.1", all, "tmp/ip_trace_y.log"), + ct:sleep(100), + + %% Verify the tracing file exits + ?assert(filelib:is_regular("tmp/ip_trace_x.log")), + ?assert(filelib:is_regular("tmp/ip_trace_y.log")), + + %% Get current traces + ?assertMatch([#{type := ip_address, filter := "127.0.0.1", + name := <<"127.0.0.1">>, + level := debug, dst := "tmp/ip_trace_x.log"}, + #{type := ip_address, filter := "192.168.1.1", + name := <<"192.168.1.1">>, + level := debug, dst := "tmp/ip_trace_y.log"} + ], + emqx_trace_handler:running()), + + %% Client with clientid = "client" publishes a "hi" message to "x/y/z". + emqtt:publish(T, <<"x/y/z">>, <<"hi1">>), + emqtt:publish(T, <<"x/y/z">>, <<"hi2">>), + emqtt:subscribe(T, <<"x/y/z">>), + emqtt:unsubscribe(T, <<"x/y/z">>), + ct:sleep(200), + + {ok, Bin} = file:read_file("tmp/ip_trace_x.log"), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"hi1">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"hi2">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"PUBLISH">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"SUBSCRIBE">>])), + ?assertNotEqual(nomatch, binary:match(Bin, [<<"UNSUBSCRIBE">>])), + ?assert(filelib:file_size("tmp/ip_trace_y.log") =:= 0), + + %% Stop tracing + ok = emqx_trace_handler:uninstall(ip_address, <<"127.0.0.1">>), + ok = emqx_trace_handler:uninstall(ip_address, <<"192.168.1.1">>), + {error, _Reason} = emqx_trace_handler:uninstall(ip_address, <<"127.0.0.2">>), + emqtt:disconnect(T), + ?assertEqual([], emqx_trace_handler:running()). diff --git a/apps/emqx/test/emqx_tracer_SUITE.erl b/apps/emqx/test/emqx_tracer_SUITE.erl deleted file mode 100644 index f6f4c7a5b..000000000 --- a/apps/emqx/test/emqx_tracer_SUITE.erl +++ /dev/null @@ -1,120 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2019-2021 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- - --module(emqx_tracer_SUITE). - --compile(export_all). --compile(nowarn_export_all). - --include_lib("eunit/include/eunit.hrl"). - --include_lib("common_test/include/ct.hrl"). - -all() -> [t_trace_clientid, t_trace_topic]. - -init_per_suite(Config) -> - emqx_common_test_helpers:boot_modules(all), - emqx_common_test_helpers:start_apps([]), - Config. - -end_per_suite(_Config) -> - emqx_common_test_helpers:stop_apps([]). - -t_trace_clientid(_Config) -> - {ok, T} = emqtt:start_link([{host, "localhost"}, - {clientid, <<"client">>}, - {username, <<"testuser">>}, - {password, <<"pass">>} - ]), - emqtt:connect(T), - - %% Start tracing - emqx_logger:set_log_level(error), - {error, _} = emqx_tracer:start_trace({clientid, <<"client">>}, debug, "tmp/client.log"), - emqx_logger:set_log_level(debug), - ok = emqx_tracer:start_trace({clientid, <<"client">>}, debug, "tmp/client.log"), - ok = emqx_tracer:start_trace({clientid, <<"client2">>}, all, "tmp/client2.log"), - ok = emqx_tracer:start_trace({clientid, <<"client3">>}, all, "tmp/client3.log"), - {error, {invalid_log_level, bad_level}} = emqx_tracer:start_trace({clientid, <<"client4">>}, bad_level, "tmp/client4.log"), - {error, {handler_not_added, {file_error,".",eisdir}}} = emqx_tracer:start_trace({clientid, <<"client5">>}, debug, "."), - ct:sleep(100), - - %% Verify the tracing file exits - ?assert(filelib:is_regular("tmp/client.log")), - ?assert(filelib:is_regular("tmp/client2.log")), - - %% Get current traces - ?assertEqual([{{clientid,"client"},{debug,"tmp/client.log"}}, - {{clientid,"client2"},{debug,"tmp/client2.log"}}, - {{clientid,"client3"},{debug,"tmp/client3.log"}} - ], emqx_tracer:lookup_traces()), - - %% set the overall log level to debug - emqx_logger:set_log_level(debug), - - %% Client with clientid = "client" publishes a "hi" message to "a/b/c". - emqtt:publish(T, <<"a/b/c">>, <<"hi">>), - ct:sleep(200), - - %% Verify messages are logged to "tmp/client.log" but not "tmp/client2.log". - ?assert(filelib:file_size("tmp/client.log") > 0), - ?assert(filelib:file_size("tmp/client2.log") == 0), - - %% Stop tracing - ok = emqx_tracer:stop_trace({clientid, <<"client">>}), - ok = emqx_tracer:stop_trace({clientid, <<"client2">>}), - ok = emqx_tracer:stop_trace({clientid, <<"client3">>}), - emqtt:disconnect(T), - - emqx_logger:set_log_level(warning). - -t_trace_topic(_Config) -> - {ok, T} = emqtt:start_link([{host, "localhost"}, - {clientid, <<"client">>}, - {username, <<"testuser">>}, - {password, <<"pass">>} - ]), - emqtt:connect(T), - - %% Start tracing - emqx_logger:set_log_level(debug), - ok = emqx_tracer:start_trace({topic, <<"x/#">>}, all, "tmp/topic_trace.log"), - ok = emqx_tracer:start_trace({topic, <<"y/#">>}, all, "tmp/topic_trace.log"), - ct:sleep(100), - - %% Verify the tracing file exits - ?assert(filelib:is_regular("tmp/topic_trace.log")), - - %% Get current traces - ?assertEqual([{{topic,"x/#"},{debug,"tmp/topic_trace.log"}}, - {{topic,"y/#"},{debug,"tmp/topic_trace.log"}}], emqx_tracer:lookup_traces()), - - %% set the overall log level to debug - emqx_logger:set_log_level(debug), - - %% Client with clientid = "client" publishes a "hi" message to "x/y/z". - emqtt:publish(T, <<"x/y/z">>, <<"hi">>), - ct:sleep(200), - - ?assert(filelib:file_size("tmp/topic_trace.log") > 0), - - %% Stop tracing - ok = emqx_tracer:stop_trace({topic, <<"x/#">>}), - ok = emqx_tracer:stop_trace({topic, <<"y/#">>}), - {error, _Reason} = emqx_tracer:stop_trace({topic, <<"z/#">>}), - emqtt:disconnect(T), - - emqx_logger:set_log_level(warning). diff --git a/apps/emqx/test/emqx_ws_connection_SUITE.erl b/apps/emqx/test/emqx_ws_connection_SUITE.erl index 116605a96..d554d3c8c 100644 --- a/apps/emqx/test/emqx_ws_connection_SUITE.erl +++ b/apps/emqx/test/emqx_ws_connection_SUITE.erl @@ -48,7 +48,10 @@ init_per_testcase(TestCase, Config) when TestCase =/= t_ws_pingreq_before_connected, TestCase =/= t_ws_non_check_origin -> - emqx_channel_SUITE:set_test_listenser_confs(), + %% Meck Cm + ok = meck:new(emqx_cm, [passthrough, no_history, no_link]), + ok = meck:expect(emqx_cm, mark_channel_connected, fun(_) -> ok end), + ok = meck:expect(emqx_cm, mark_channel_disconnected, fun(_) -> ok end), %% Mock cowboy_req ok = meck:new(cowboy_req, [passthrough, no_history, no_link]), ok = meck:expect(cowboy_req, header, fun(_, _, _) -> <<>> end), @@ -90,7 +93,8 @@ end_per_testcase(TestCase, _Config) when TestCase =/= t_ws_pingreq_before_connected -> lists:foreach(fun meck:unload/1, - [cowboy_req, + [emqx_cm, + cowboy_req, emqx_access_control, emqx_broker, emqx_hooks, @@ -363,14 +367,12 @@ t_handle_info_close(_) -> {[{close, _}], _St} = ?ws_conn:handle_info({close, protocol_error}, st()). t_handle_info_event(_) -> - ok = meck:new(emqx_cm, [passthrough, no_history]), ok = meck:expect(emqx_cm, register_channel, fun(_,_,_) -> ok end), ok = meck:expect(emqx_cm, insert_channel_info, fun(_,_,_) -> ok end), ok = meck:expect(emqx_cm, connection_closed, fun(_) -> true end), {ok, _} = ?ws_conn:handle_info({event, connected}, st()), {ok, _} = ?ws_conn:handle_info({event, disconnected}, st()), - {ok, _} = ?ws_conn:handle_info({event, updated}, st()), - ok = meck:unload(emqx_cm). + {ok, _} = ?ws_conn:handle_info({event, updated}, st()). t_handle_timeout_idle_timeout(_) -> TRef = make_ref(), diff --git a/apps/emqx_authn/src/emqx_authn_api.erl b/apps/emqx_authn/src/emqx_authn_api.erl index 1b02d37ae..eb71bec29 100644 --- a/apps/emqx_authn/src/emqx_authn_api.erl +++ b/apps/emqx_authn/src/emqx_authn_api.erl @@ -18,1868 +18,710 @@ -behaviour(minirest_api). +-include_lib("typerefl/include/types.hrl"). -include("emqx_authn.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). + +-import(hoconsc, [mk/2, ref/1]). +-import(emqx_dashboard_swagger, [error_codes/2]). + +-define(BAD_REQUEST, 'BAD_REQUEST'). +-define(NOT_FOUND, 'NOT_FOUND'). +-define(CONFLICT, 'CONFLICT'). + +% Swagger + +-define(API_TAGS_GLOBAL, [<<"authentication">>, <<"authentication config(global)">>]). +-define(API_TAGS_SINGLE, [<<"authentication">>, <<"authentication config(single listener)">>]). -export([ api_spec/0 - , authentication/2 - , authentication2/2 - , authentication3/2 - , authentication4/2 - , move/2 - , move2/2 - , import_users/2 - , import_users2/2 - , users/2 - , users2/2 - , users3/2 - , users4/2 + , paths/0 + , schema/1 ]). --define(EXAMPLE_1, #{mechanism => <<"password-based">>, - backend => <<"built-in-database">>, - user_id_type => <<"username">>, - password_hash_algorithm => #{ - name => <<"sha256">> - }}). +-export([ roots/0 + , fields/1 + ]). --define(EXAMPLE_2, #{mechanism => <<"password-based">>, - backend => <<"http">>, - method => <<"post">>, - url => <<"http://localhost:80/login">>, - headers => #{ - <<"content-type">> => <<"application/json">> - }, - body => #{ - <<"username">> => <<"${mqtt-username}">>, - <<"password">> => <<"${mqtt-password}">> - }}). +-export([ authenticators/2 + , authenticator/2 + , listener_authenticators/2 + , listener_authenticator/2 + , authenticator_move/2 + , listener_authenticator_move/2 + , authenticator_import_users/2 + , listener_authenticator_import_users/2 + , authenticator_users/2 + , authenticator_user/2 + , listener_authenticator_users/2 + , listener_authenticator_user/2 + ]). --define(EXAMPLE_3, #{mechanism => <<"jwt">>, - use_jwks => false, - algorithm => <<"hmac-based">>, - secret => <<"mysecret">>, - secret_base64_encoded => false, - verify_claims => #{ - <<"username">> => <<"${mqtt-username}">> - }}). +-export([ authenticator_examples/0 + , request_move_examples/0 + , request_import_users_examples/0 + , request_user_create_examples/0 + , request_user_update_examples/0 + , response_user_examples/0 + , response_users_example/0 + ]). --define(EXAMPLE_4, #{mechanism => <<"password-based">>, - backend => <<"mongodb">>, - server => <<"127.0.0.1:27017">>, - database => example, - collection => users, - selector => #{ - username => <<"${mqtt-username}">> - }, - password_hash_field => <<"password_hash">>, - salt_field => <<"salt">>, - is_superuser_field => <<"is_superuser">>, - password_hash_algorithm => <<"sha256">>, - salt_position => <<"prefix">> - }). +%% export these funcs for gateway +-export([ list_users/3 + , add_user/3 + , delete_user/3 + , find_user/3 + , update_user/4 + , serialize_error/1 + ]). --define(EXAMPLE_5, #{mechanism => <<"password-based">>, - backend => <<"redis">>, - server => <<"127.0.0.1:6379">>, - database => 0, - query => <<"HMGET ${mqtt-username} password_hash salt">>, - password_hash_algorithm => <<"sha256">>, - salt_position => <<"prefix">> - }). - --define(INSTANCE_EXAMPLE_1, maps:merge(?EXAMPLE_1, #{id => <<"password-based:built-in-database">>, - enable => true})). - --define(INSTANCE_EXAMPLE_2, maps:merge(?EXAMPLE_2, #{id => <<"password-based:http">>, - connect_timeout => "5s", - enable_pipelining => true, - headers => #{ - <<"accept">> => <<"application/json">>, - <<"cache-control">> => <<"no-cache">>, - <<"connection">> => <<"keepalive">>, - <<"content-type">> => <<"application/json">>, - <<"keep-alive">> => <<"timeout=5">> - }, - max_retries => 5, - pool_size => 8, - request_timeout => "5s", - retry_interval => "1s", - enable => true})). - --define(INSTANCE_EXAMPLE_3, maps:merge(?EXAMPLE_3, #{id => <<"jwt">>, - enable => true})). - --define(INSTANCE_EXAMPLE_4, maps:merge(?EXAMPLE_4, #{id => <<"password-based:mongodb">>, - mongo_type => <<"single">>, - pool_size => 8, - ssl => #{ - enable => false - }, - topology => #{ - max_overflow => 8, - pool_size => 8 - }, - enable => true})). - --define(INSTANCE_EXAMPLE_5, maps:merge(?EXAMPLE_5, #{id => <<"password-based:redis">>, - auto_reconnect => true, - redis_type => single, - pool_size => 8, - ssl => #{ - enable => false - }, - enable => true})). - --define(ERR_RESPONSE(Desc), #{description => Desc, - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"Error">>), - examples => #{ - example1 => #{ - summary => <<"Not Found">>, - value => #{code => <<"NOT_FOUND">>, message => <<"Authenticator '67e4c9d3' does not exist">>} - }, - example2 => #{ - summary => <<"Conflict">>, - value => #{code => <<"ALREADY_EXISTS">>, message => <<"Name has be used">>} - }, - example3 => #{ - summary => <<"Bad Request 1">>, - value => #{code => <<"OUT_OF_RANGE">>, message => <<"Out of range">>} - } - }}}}). +-elvis([{elvis_style, god_modules, disable}]). api_spec() -> - {[ authentication_api() - , authentication_api2() - , move_api() - , authentication_api3() - , authentication_api4() - , move_api2() - , import_users_api() - , import_users_api2() - , users_api() - , users2_api() - , users3_api() - , users4_api() - ], definitions()}. + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). -authentication_api() -> - Metadata = #{ - post => create_authenticator_api_spec(), - get => list_authenticators_api_spec() - }, - {"/authentication", Metadata, authentication}. +paths() -> [ "/authentication" + , "/authentication/:id" + , "/authentication/:id/move" + , "/authentication/:id/import_users" + , "/authentication/:id/users" + , "/authentication/:id/users/:user_id" -authentication_api2() -> - Metadata = #{ - get => find_authenticator_api_spec(), - put => update_authenticator_api_spec(), - delete => delete_authenticator_api_spec() - }, - {"/authentication/:id", Metadata, authentication2}. + , "/listeners/:listener_id/authentication" + , "/listeners/:listener_id/authentication/:id" + , "/listeners/:listener_id/authentication/:id/move" + , "/listeners/:listener_id/authentication/:id/import_users" + , "/listeners/:listener_id/authentication/:id/users" + , "/listeners/:listener_id/authentication/:id/users/:user_id" + ]. -authentication_api3() -> - Metadata = #{ - post => create_authenticator_api_spec2(), - get => list_authenticators_api_spec2() - }, - {"/listeners/:listener_id/authentication", Metadata, authentication3}. +roots() -> [ request_user_create + , request_user_update + , request_move + , request_import_users + , response_user + , response_users + ]. -authentication_api4() -> - Metadata = #{ - get => find_authenticator_api_spec2(), - put => update_authenticator_api_spec2(), - delete => delete_authenticator_api_spec2() - }, - {"/listeners/:listener_id/authentication/:id", Metadata, authentication4}. +fields(request_user_create) -> + [ + {user_id, binary()} + | fields(request_user_update) + ]; -move_api() -> - Metadata = #{ - post => move_authenticator_api_spec() - }, - {"/authentication/:id/move", Metadata, move}. +fields(request_user_update) -> + [ + {password, binary()}, + {is_superuser, mk(boolean(), #{default => false, nullable => true})} + ]; -move_api2() -> - Metadata = #{ - post => move_authenticator_api_spec2() - }, - {"/listeners/:listener_id/authentication/:id/move", Metadata, move2}. +fields(request_move) -> + [{position, binary()}]; -import_users_api() -> - Metadata = #{ - post => import_users_api_spec() - }, - {"/authentication/:id/import_users", Metadata, import_users}. +fields(request_import_users) -> + [{filename, binary()}]; -import_users_api2() -> - Metadata = #{ - post => import_users_api_spec2() - }, - {"/listeners/:listener_id/authentication/:id/import_users", Metadata, import_users2}. +fields(response_user) -> + [ + {user_id, binary()}, + {is_superuser, mk(boolean(), #{default => false, nullable => true})} + ]; -users_api() -> - Metadata = #{ - post => create_user_api_spec(), - get => list_users_api_spec() - }, - {"/authentication/:id/users", Metadata, users}. +fields(response_users) -> + paginated_list_type(ref(response_user)); -users2_api() -> - Metadata = #{ - put => update_user_api_spec(), - get => find_user_api_spec(), - delete => delete_user_api_spec() - }, - {"/authentication/:id/users/:user_id", Metadata, users2}. - -users3_api() -> - Metadata = #{ - post => create_user_api_spec2(), - get => list_users_api_spec2() - }, - {"/listeners/:listener_id/authentication/:id/users", Metadata, users3}. - -users4_api() -> - Metadata = #{ - put => update_user_api_spec2(), - get => find_user_api_spec2(), - delete => delete_user_api_spec2() - }, - {"/listeners/:listener_id/authentication/:id/users/:user_id", Metadata, users4}. - -create_authenticator_api_spec() -> - #{ - description => <<"Create a authenticator for global authentication">>, - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"AuthenticatorConfig">>), - examples => #{ - default => #{ - summary => <<"Default">>, - value => emqx_json:encode(?EXAMPLE_1) - }, - http => #{ - summary => <<"Authentication provided by HTTP Server">>, - value => emqx_json:encode(?EXAMPLE_2) - }, - jwt => #{ - summary => <<"JWT Authentication">>, - value => emqx_json:encode(?EXAMPLE_3) - }, - mongodb => #{ - summary => <<"Authentication with MongoDB">>, - value => emqx_json:encode(?EXAMPLE_4) - }, - redis => #{ - summary => <<"Authentication with Redis">>, - value => emqx_json:encode(?EXAMPLE_5) - } - } - } - } - }, - responses => #{ - <<"201">> => #{ - description => <<"Created">>, - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"AuthenticatorInstance">>), - examples => #{ - example1 => #{ - summary => <<"Example 1">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_1) - }, - example2 => #{ - summary => <<"Example 2">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_2) - }, - example3 => #{ - summary => <<"Example 3">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_3) - }, - example4 => #{ - summary => <<"Example 4">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_4) - }, - example5 => #{ - summary => <<"Example 5">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_5) - } - } - } - } - }, - <<"400">> => ?ERR_RESPONSE(<<"Bad Request">>), - <<"409">> => ?ERR_RESPONSE(<<"Conflict">>) - } - }. - -create_authenticator_api_spec2() -> - Spec = create_authenticator_api_spec(), - Spec#{ - description => <<"Create a authenticator for listener">>, - parameters => [ - #{ - name => listener_id, - in => path, - schema => #{ - type => string - }, - required => true - } - ] - }. - -list_authenticators_api_spec() -> - #{ - description => <<"List authenticators for global authentication">>, - responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => #{ - type => array, - items => minirest:ref(<<"AuthenticatorInstance">>) - }, - examples => #{ - example => #{ - summary => <<"Example">>, - value => emqx_json:encode([ ?INSTANCE_EXAMPLE_1 - , ?INSTANCE_EXAMPLE_2 - , ?INSTANCE_EXAMPLE_3 - , ?INSTANCE_EXAMPLE_4 - , ?INSTANCE_EXAMPLE_5 - ])}}}}}}}. - -list_authenticators_api_spec2() -> - Spec = list_authenticators_api_spec(), - Spec#{ - description => <<"List authenticators for listener">>, - parameters => [ - #{ - name => listener_id, - in => path, - schema => #{ - type => string - }, - required => true - } - ] - }. - -find_authenticator_api_spec() -> - #{ - description => <<"Get authenticator by id">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ], - responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"AuthenticatorInstance">>), - examples => #{ - example1 => #{ - summary => <<"Example 1">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_1) - }, - example2 => #{ - summary => <<"Example 2">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_2) - }, - example3 => #{ - summary => <<"Example 3">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_3) - }, - example4 => #{ - summary => <<"Example 4">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_4) - }, - example5 => #{ - summary => <<"Example 5">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_5) - } - } - } - } - }, - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>) - } - }. - -find_authenticator_api_spec2() -> - Spec = find_authenticator_api_spec(), - Spec#{ - parameters => [ - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ] - }. - -update_authenticator_api_spec() -> - #{ - description => <<"Update authenticator">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ], - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"AuthenticatorConfig">>), - examples => #{ - example1 => #{ - summary => <<"Example 1">>, - value => emqx_json:encode(?EXAMPLE_1) - }, - example2 => #{ - summary => <<"Example 2">>, - value => emqx_json:encode(?EXAMPLE_2) - }, - example3 => #{ - summary => <<"Example 3">>, - value => emqx_json:encode(?EXAMPLE_3) - }, - example4 => #{ - summary => <<"Example 4">>, - value => emqx_json:encode(?EXAMPLE_4) - }, - example5 => #{ - summary => <<"Example 5">>, - value => emqx_json:encode(?EXAMPLE_5) - } - } - } - } - }, - responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"AuthenticatorInstance">>), - examples => #{ - example1 => #{ - summary => <<"Example 1">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_1) - }, - example2 => #{ - summary => <<"Example 2">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_2) - }, - example3 => #{ - summary => <<"Example 3">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_3) - }, - example4 => #{ - summary => <<"Example 4">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_4) - }, - example5 => #{ - summary => <<"Example 5">>, - value => emqx_json:encode(?INSTANCE_EXAMPLE_5) - } - } - } - } - }, - <<"400">> => ?ERR_RESPONSE(<<"Bad Request">>), - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>), - <<"409">> => ?ERR_RESPONSE(<<"Conflict">>) - } - }. - -update_authenticator_api_spec2() -> - Spec = update_authenticator_api_spec(), - Spec#{ - parameters => [ - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ] - }. - -delete_authenticator_api_spec() -> - #{ - description => <<"Delete authenticator">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ], - responses => #{ - <<"204">> => #{ - description => <<"No Content">> - }, - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>) - } - }. - -delete_authenticator_api_spec2() -> - Spec = delete_authenticator_api_spec(), - Spec#{ - parameters => [ - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ] - }. - -move_authenticator_api_spec() -> - #{ - description => <<"Move authenticator">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ], - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => #{ - oneOf => [ - #{ - type => object, - required => [position], - properties => #{ - position => #{ - type => string, - enum => [<<"top">>, <<"bottom">>], - example => <<"top">> - } - } - }, - #{ - type => object, - required => [position], - properties => #{ - position => #{ - type => string, - description => <<"before:">>, - example => <<"before:password-based:mysql">> - } - } - } - ] - } - } - } - }, - responses => #{ - <<"204">> => #{ - description => <<"No Content">> - }, - <<"400">> => ?ERR_RESPONSE(<<"Bad Request">>), - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>) - } - }. - -move_authenticator_api_spec2() -> - Spec = move_authenticator_api_spec(), - Spec#{ - parameters => [ - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ] - }. - -import_users_api_spec() -> - #{ - description => <<"Import users from json/csv file">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ], - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => #{ - type => object, - required => [filename], - properties => #{ - filename => #{ - type => string - } - } - } - } - } - }, - responses => #{ - <<"204">> => #{ - description => <<"No Content">> - }, - <<"400">> => ?ERR_RESPONSE(<<"Bad Request">>), - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>) - } - }. - -import_users_api_spec2() -> - Spec = import_users_api_spec(), - Spec#{ - parameters => [ - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ] - }. - -create_user_api_spec() -> - #{ - description => <<"Add user">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ], - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => #{ - type => object, - required => [user_id, password], - properties => #{ - user_id => #{ - type => string - }, - password => #{ - type => string - }, - is_superuser => #{ - type => boolean, - default => false - } - } - } - } - } - }, - responses => #{ - <<"201">> => #{ - description => <<"Created">>, - content => #{ - 'application/json' => #{ - schema => #{ - type => object, - properties => #{ - user_id => #{ - type => string - }, - is_superuser => #{ - type => boolean - } - } - } - } - } - }, - <<"400">> => ?ERR_RESPONSE(<<"Bad Request">>), - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>) - } - }. - -create_user_api_spec2() -> - Spec = create_user_api_spec(), - Spec#{ - parameters => [ - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - } - ] - }. - -list_users_api_spec() -> - #{ - description => <<"List users">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => page, - in => query, - description => <<"Page Index">>, - schema => #{ - type => integer - }, - required => false - }, - #{ - name => limit, - in => query, - description => <<"Page limit">>, - schema => #{ - type => integer - }, - required => false - } - ], - responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => #{ - type => array, - items => #{ - type => object, - properties => #{ - user_id => #{ - type => string - }, - is_superuser => #{ - type => boolean - } - } - } - } - } - } - }, - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>) - } - }. - -list_users_api_spec2() -> - Spec = list_users_api_spec(), - Spec#{ - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => page, - in => query, - description => <<"Page Index">>, - schema => #{ - type => integer - }, - required => false - }, - #{ - name => limit, - in => query, - description => <<"Page limit">>, - schema => #{ - type => integer - }, - required => false - } - ] - }. - -update_user_api_spec() -> - #{ - description => <<"Update user">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => user_id, - in => path, - description => <<"User id">>, - schema => #{ - type => string - }, - required => true - } - ], - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => #{ - type => object, - properties => #{ - password => #{ - type => string - }, - is_superuser => #{ - type => boolean - } - } - } - } - } - }, - responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => #{ - type => array, - items => #{ - type => object, - properties => #{ - user_id => #{ - type => string - }, - is_superuser => #{ - type => boolean - } - } - } - } - } - } - }, - <<"400">> => ?ERR_RESPONSE(<<"Bad Request">>), - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>) - } - }. - -update_user_api_spec2() -> - Spec = update_user_api_spec(), - Spec#{ - parameters => [ - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => user_id, - in => path, - description => <<"User id">>, - schema => #{ - type => string - }, - required => true - } - ] - }. - -find_user_api_spec() -> - #{ - description => <<"Get user info">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => user_id, - in => path, - description => <<"User id">>, - schema => #{ - type => string - }, - required => true - } - ], - responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => #{ - type => array, - items => #{ - type => object, - properties => #{ - user_id => #{ - type => string - }, - is_superuser => #{ - type => boolean - } - } - } - } - } - } - }, - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>) - } - }. - -find_user_api_spec2() -> - Spec = find_user_api_spec(), - Spec#{ - parameters => [ - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => user_id, - in => path, - description => <<"User id">>, - schema => #{ - type => string - }, - required => true - } - ] - }. - -delete_user_api_spec() -> - #{ - description => <<"Delete user">>, - parameters => [ - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => user_id, - in => path, - description => <<"User id">>, - schema => #{ - type => string - }, - required => true - } - ], - responses => #{ - <<"204">> => #{ - description => <<"No Content">> - }, - <<"404">> => ?ERR_RESPONSE(<<"Not Found">>) - } - }. - -delete_user_api_spec2() -> - Spec = delete_user_api_spec(), - Spec#{ - parameters => [ - #{ - name => listener_id, - in => path, - description => <<"Listener id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => id, - in => path, - description => <<"Authenticator id">>, - schema => #{ - type => string - }, - required => true - }, - #{ - name => user_id, - in => path, - description => <<"User id">>, - schema => #{ - type => string - }, - required => true - } - ] - }. - - -definitions() -> - AuthenticatorConfigDef = #{ - allOf => [ - #{ - type => object, - properties => #{ - enable => #{ - type => boolean, - default => true, - example => true - } - } - }, - #{ - oneOf => [ minirest:ref(<<"PasswordBasedBuiltInDatabase">>) - , minirest:ref(<<"PasswordBasedMySQL">>) - , minirest:ref(<<"PasswordBasedPostgreSQL">>) - , minirest:ref(<<"PasswordBasedMongoDB">>) - , minirest:ref(<<"PasswordBasedRedis">>) - , minirest:ref(<<"PasswordBasedHTTPServer">>) - , minirest:ref(<<"JWT">>) - , minirest:ref(<<"SCRAMBuiltInDatabase">>) - ] - } - ] - }, - - AuthenticatorInstanceDef = #{ - allOf => [ - #{ - type => object, - properties => #{ - id => #{ - type => string - } - } - } - ] ++ maps:get(allOf, AuthenticatorConfigDef) - }, - - PasswordBasedBuiltInDatabaseDef = #{ - type => object, - required => [mechanism, backend], - properties => #{ - mechanism => #{ - type => string, - enum => [<<"password-based">>], - example => <<"password-based">> - }, - backend => #{ - type => string, - enum => [<<"built-in-database">>], - example => <<"built-in-database">> - }, - user_id_type => #{ - type => string, - enum => [<<"username">>, <<"clientid">>], - example => <<"username">> - }, - password_hash_algorithm => minirest:ref(<<"PasswordHashAlgorithm">>) - } - }, - - PasswordBasedMySQLDef = #{ - type => object, - required => [ mechanism - , backend - , server - , database - , username - , password - , query], - properties => #{ - mechanism => #{ - type => string, - enum => [<<"password-based">>], - example => <<"password-based">> - }, - backend => #{ - type => string, - enum => [<<"mysql">>], - example => <<"mysql">> - }, - server => #{ - type => string, - example => <<"localhost:3306">> - }, - database => #{ - type => string - }, - pool_size => #{ - type => integer, - default => 8 - }, - username => #{ - type => string - }, - password => #{ - type => string - }, - auto_reconnect => #{ - type => boolean, - default => true - }, - ssl => minirest:ref(<<"SSL">>), - password_hash_algorithm => #{ - type => string, - enum => [<<"plain">>, <<"md5">>, <<"sha">>, <<"sha256">>, <<"sha512">>, <<"bcrypt">>], - default => <<"sha256">> - }, - salt_position => #{ - type => string, - enum => [<<"prefix">>, <<"suffix">>], - default => <<"prefix">> - }, - query => #{ - type => string, - example => <<"SELECT password_hash FROM mqtt_user WHERE username = ${mqtt-username}">> - }, - query_timeout => #{ - type => string, - description => <<"Query timeout">>, - default => "5s" - } - } - }, - - PasswordBasedPostgreSQLDef = #{ - type => object, - required => [ mechanism - , backend - , server - , database - , username - , password - , query], - properties => #{ - mechanism => #{ - type => string, - enum => [<<"password-based">>], - example => <<"password-based">> - }, - backend => #{ - type => string, - enum => [<<"postgresql">>], - example => <<"postgresql">> - }, - server => #{ - type => string, - example => <<"localhost:5432">> - }, - database => #{ - type => string - }, - pool_size => #{ - type => integer, - default => 8 - }, - username => #{ - type => string - }, - password => #{ - type => string - }, - auto_reconnect => #{ - type => boolean, - default => true - }, - password_hash_algorithm => #{ - type => string, - enum => [<<"plain">>, <<"md5">>, <<"sha">>, <<"sha256">>, <<"sha512">>, <<"bcrypt">>], - default => <<"sha256">> - }, - salt_position => #{ - type => string, - enum => [<<"prefix">>, <<"suffix">>], - default => <<"prefix">> - }, - query => #{ - type => string, - example => <<"SELECT password_hash FROM mqtt_user WHERE username = ${mqtt-username}">> - } - } - }, - - PasswordBasedMongoDBDef = #{ - type => object, - required => [ mechanism - , backend - , server - , servers - , replica_set_name - , database - , username - , password - , collection - , selector - , password_hash_field - ], - properties => #{ - mechanism => #{ - type => string, - enum => [<<"password-based">>], - example => <<"password-based">> - }, - backend => #{ - type => string, - enum => [<<"mongodb">>], - example => <<"mongodb">> - }, - server => #{ - description => <<"Mutually exclusive with the 'servers' field, only valid in standalone mode">>, - type => string, - example => <<"127.0.0.1:27017">> - }, - servers => #{ - description => <<"Mutually exclusive with the 'server' field, only valid in replica set and sharded mode">>, - type => array, - items => #{ - type => string - }, - example => [<<"127.0.0.1:27017">>] - }, - replica_set_name => #{ - description => <<"Only valid in replica set mode">>, - type => string - }, - database => #{ - type => string - }, - username => #{ - type => string - }, - password => #{ - type => string - }, - auth_source => #{ - type => string, - default => <<"admin">> - }, - pool_size => #{ - type => integer, - default => 8 - }, - collection => #{ - type => string - }, - selector => #{ - type => object, - additionalProperties => true, - example => <<"{\"username\":\"${mqtt-username}\"}">> - }, - password_hash_field => #{ - type => string, - example => <<"password_hash">> - }, - salt_field => #{ - type => string, - example => <<"salt">> - }, - is_superuser_field => #{ - type => string, - example => <<"is_superuser">> - }, - password_hash_algorithm => #{ - type => string, - enum => [<<"plain">>, <<"md5">>, <<"sha">>, <<"sha256">>, <<"sha512">>, <<"bcrypt">>], - default => <<"sha256">>, - example => <<"sha256">> - }, - salt_position => #{ - description => <<"Only valid when the 'salt_field' field is specified">>, - type => string, - enum => [<<"prefix">>, <<"suffix">>], - default => <<"prefix">>, - example => <<"prefix">> - } - } - }, - - PasswordBasedRedisDef = #{ - type => object, - required => [ mechanism - , backend - , server - , servers - , password - , database - , query - ], - properties => #{ - mechanism => #{ - type => string, - enum => [<<"password-based">>], - example => <<"password-based">> - }, - backend => #{ - type => string, - enum => [<<"redis">>], - example => <<"redis">> - }, - server => #{ - description => <<"Mutually exclusive with the 'servers' field, only valid in standalone mode">>, - type => string, - example => <<"127.0.0.1:27017">> - }, - servers => #{ - description => <<"Mutually exclusive with the 'server' field, only valid in cluster and sentinel mode">>, - type => array, - items => #{ - type => string - }, - example => [<<"127.0.0.1:27017">>] - }, - sentinel => #{ - description => <<"Only valid in sentinel mode">>, - type => string - }, - password => #{ - type => string - }, - database => #{ - type => integer, - example => 0 - }, - query => #{ - type => string, - example => <<"HMGET ${mqtt-username} password_hash salt">> - }, - password_hash_algorithm => #{ - type => string, - enum => [<<"plain">>, <<"md5">>, <<"sha">>, <<"sha256">>, <<"sha512">>, <<"bcrypt">>], - default => <<"sha256">>, - example => <<"sha256">> - }, - salt_position => #{ - type => string, - enum => [<<"prefix">>, <<"suffix">>], - default => <<"prefix">>, - example => <<"prefix">> - }, - pool_size => #{ - type => integer, - default => 8 - }, - auto_reconnect => #{ - type => boolean, - default => true - } - } - }, - - PasswordBasedHTTPServerDef = #{ - type => object, - required => [ mechanism - , backend - , url - , body - ], - properties => #{ - mechanism => #{ - type => string, - enum => [<<"password-based">>], - example => <<"password-based">> - }, - backend => #{ - type => string, - enum => [<<"http">>], - example => <<"http">> - }, - method => #{ - type => string, - enum => [<<"get">>, <<"post">>], - default => <<"post">> - }, - url => #{ - type => string, - example => <<"http://localhost:80/login">> - }, - headers => #{ - type => object, - additionalProperties => #{ - type => string - } - }, - body => #{ - type => object - }, - connect_timeout => #{ - type => string, - default => <<"5s">> - }, - max_retries => #{ - type => integer, - default => 5 - }, - retry_interval => #{ - type => string, - default => <<"1s">> - }, - request_timout => #{ - type => integer, - default => 5000 - }, - pool_size => #{ - type => integer, - default => 8 - }, - enable_pipelining => #{ - type => boolean, - default => true - }, - ssl => minirest:ref(<<"SSL">>) - } - }, - - JWTDef = #{ - type => object, - required => [mechanism], - properties => #{ - mechanism => #{ - type => string, - enum => [<<"jwt">>], - example => <<"jwt">> - }, - use_jwks => #{ - type => boolean, - default => false, - example => false - }, - algorithm => #{ - type => string, - enum => [<<"hmac-based">>, <<"public-key">>], - default => <<"hmac-based">>, - example => <<"hmac-based">> - }, - secret => #{ - type => string - }, - secret_base64_encoded => #{ - type => boolean, - default => false - }, - certificate => #{ - type => string - }, - endpoint => #{ - type => string, - example => <<"http://localhost:80">> - }, - refresh_interval => #{ - type => integer, - default => 300, - example => 300 - }, - verify_claims => #{ - type => object, - additionalProperties => #{ - type => string - } - }, - ssl => minirest:ref(<<"SSL">>) - } - }, - - SCRAMBuiltInDatabaseDef = #{ - type => object, - required => [mechanism, backend], - properties => #{ - mechanism => #{ - type => string, - enum => [<<"scram">>], - example => <<"scram">> - }, - backend => #{ - type => string, - enum => [<<"built-in-database">>], - example => <<"built-in-database">> - }, - algorithm => #{ - type => string, - enum => [<<"sha256">>, <<"sha512">>], - default => <<"sha256">> - }, - iteration_count => #{ - type => integer, - default => 4096 - } - } - }, - - PasswordHashAlgorithmDef = #{ - type => object, - required => [name], - properties => #{ - name => #{ - type => string, - enum => [<<"plain">>, <<"md5">>, <<"sha">>, <<"sha256">>, <<"sha512">>, <<"bcrypt">>], - default => <<"sha256">> - }, - salt_rounds => #{ - type => integer, - description => <<"Only valid when the name field is set to bcrypt">>, - default => 10 - } - } - }, - - SSLDef = #{ - type => object, - properties => #{ - enable => #{ - type => boolean, - default => false - }, - certfile => #{ - type => string - }, - keyfile => #{ - type => string - }, - cacertfile => #{ - type => string - }, - verify => #{ - type => boolean, - default => true - }, - server_name_indication => #{ - type => object, - properties => #{ - enable => #{ - type => boolean, - default => false - }, - hostname => #{ - type => string - } - } - } - } - }, - - ErrorDef = #{ - type => object, - properties => #{ - code => #{ - type => string, - enum => [<<"NOT_FOUND">>], - example => <<"NOT_FOUND">> - }, - message => #{ - type => string - } - } - }, - - [ #{<<"AuthenticatorConfig">> => AuthenticatorConfigDef} - , #{<<"AuthenticatorInstance">> => AuthenticatorInstanceDef} - , #{<<"PasswordBasedBuiltInDatabase">> => PasswordBasedBuiltInDatabaseDef} - , #{<<"PasswordBasedMySQL">> => PasswordBasedMySQLDef} - , #{<<"PasswordBasedPostgreSQL">> => PasswordBasedPostgreSQLDef} - , #{<<"PasswordBasedMongoDB">> => PasswordBasedMongoDBDef} - , #{<<"PasswordBasedRedis">> => PasswordBasedRedisDef} - , #{<<"PasswordBasedHTTPServer">> => PasswordBasedHTTPServerDef} - , #{<<"JWT">> => JWTDef} - , #{<<"SCRAMBuiltInDatabase">> => SCRAMBuiltInDatabaseDef} - , #{<<"PasswordHashAlgorithm">> => PasswordHashAlgorithmDef} - , #{<<"SSL">> => SSLDef} - , #{<<"Error">> => ErrorDef} +fields(pagination_meta) -> + [ + {page, non_neg_integer()}, + {limit, non_neg_integer()}, + {count, non_neg_integer()} ]. -authentication(post, #{body := Config}) -> +schema("/authentication") -> + #{ + 'operationId' => authenticators, + get => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"List authenticators for global authentication">>, + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_example( + hoconsc:array(emqx_authn_schema:authenticator_type()), + authenticator_array_example()) + } + }, + post => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Create authenticator for global authentication">>, + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples()), + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples()), + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 409 => error_codes([?CONFLICT], <<"Conflict">>) + } + } + }; + +schema("/authentication/:id") -> + #{ + 'operationId' => authenticator, + get => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Get authenticator from global authentication chain">>, + parameters => [param_auth_id()], + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples()), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + }, + put => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Update authenticator from global authentication chain">>, + parameters => [param_auth_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples() + ), + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples()), + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>), + 409 => error_codes([?CONFLICT], <<"Conflict">>) + } + }, + delete => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Delete authenticator from global authentication chain">>, + parameters => [param_auth_id()], + responses => #{ + 204 => <<"Authenticator deleted">>, + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + } + }; + +schema("/listeners/:listener_id/authentication") -> + #{ + 'operationId' => listener_authenticators, + get => #{ + tags => ?API_TAGS_SINGLE, + description => <<"List authenticators for listener authentication">>, + parameters => [param_listener_id()], + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_example( + hoconsc:array(emqx_authn_schema:authenticator_type()), + authenticator_array_example()) + } + }, + post => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Create authenticator for listener authentication">>, + parameters => [param_listener_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples() + ), + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples()), + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 409 => error_codes([?CONFLICT], <<"Conflict">>) + } + } + }; + +schema("/listeners/:listener_id/authentication/:id") -> + #{ + 'operationId' => listener_authenticator, + get => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Get authenticator from listener authentication chain">>, + parameters => [param_listener_id(), param_auth_id()], + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples()), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + }, + put => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Update authenticator from listener authentication chain">>, + parameters => [param_listener_id(), param_auth_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples()), + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + authenticator_examples()), + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>), + 409 => error_codes([?CONFLICT], <<"Conflict">>) + } + }, + delete => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Delete authenticator from listener authentication chain">>, + parameters => [param_listener_id(), param_auth_id()], + responses => #{ + 204 => <<"Authenticator deleted">>, + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + } + }; + + +schema("/authentication/:id/move") -> + #{ + 'operationId' => authenticator_move, + post => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Move authenticator in global authentication chain">>, + parameters => [param_auth_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(request_move), + request_move_examples()), + responses => #{ + 204 => <<"Authenticator moved">>, + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + } + }; + +schema("/listeners/:listener_id/authentication/:id/move") -> + #{ + 'operationId' => listener_authenticator_move, + post => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Move authenticator in listener authentication chain">>, + parameters => [param_listener_id(), param_auth_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(request_move), + request_move_examples()), + responses => #{ + 204 => <<"Authenticator moved">>, + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + } + }; + +schema("/authentication/:id/import_users") -> + #{ + 'operationId' => authenticator_import_users, + post => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Import users into authenticator in global authentication chain">>, + parameters => [param_auth_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(request_import_users), + request_import_users_examples()), + responses => #{ + 204 => <<"Users imported">>, + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + } + }; + +schema("/listeners/:listener_id/authentication/:id/import_users") -> + #{ + 'operationId' => listener_authenticator_import_users, + post => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Import users into authenticator in listener authentication chain">>, + parameters => [param_listener_id(), param_auth_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(request_import_users), + request_import_users_examples()), + responses => #{ + 204 => <<"Users imported">>, + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + } + }; + +schema("/authentication/:id/users") -> + #{ + 'operationId' => authenticator_users, + post => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Create users for authenticator in global authentication chain">>, + parameters => [param_auth_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(request_user_create), + request_user_create_examples()), + responses => #{ + 201 => emqx_dashboard_swagger:schema_with_examples( + ref(response_user), + response_user_examples()), + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + }, + get => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"List users in authenticator in global authentication chain">>, + parameters => [ + param_auth_id(), + {page, mk(integer(), #{in => query, desc => <<"Page Index">>, nullable => true})}, + {limit, mk(integer(), #{in => query, desc => <<"Page Limit">>, nullable => true})} + ], + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_example( + ref(response_users), + response_users_example()), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + + } + }; + +schema("/listeners/:listener_id/authentication/:id/users") -> + #{ + 'operationId' => listener_authenticator_users, + post => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Create users for authenticator in global authentication chain">>, + parameters => [param_auth_id(), param_listener_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(request_user_create), + request_user_create_examples()), + responses => #{ + 201 => emqx_dashboard_swagger:schema_with_examples( + ref(response_user), + response_user_examples()), + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + }, + get => #{ + tags => ?API_TAGS_SINGLE, + description => <<"List users in authenticator in listener authentication chain">>, + parameters => [ + param_listener_id(), param_auth_id(), + {page, mk(integer(), #{in => query, desc => <<"Page Index">>, nullable => true})}, + {limit, mk(integer(), #{in => query, desc => <<"Page Limit">>, nullable => true})} + ], + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_example( + ref(response_users), + response_users_example()), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + + } + }; + +schema("/authentication/:id/users/:user_id") -> + #{ + 'operationId' => authenticator_user, + get => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Get user from authenticator in global authentication chain">>, + parameters => [param_auth_id(), param_user_id()], + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_examples( + ref(response_user), + response_user_examples()), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + }, + put => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Update user in authenticator in global authentication chain">>, + parameters => [param_auth_id(), param_user_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(request_user_update), + request_user_update_examples()), + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_example( + ref(response_user), + response_user_examples()), + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + }, + delete => #{ + tags => ?API_TAGS_GLOBAL, + description => <<"Update user in authenticator in global authentication chain">>, + parameters => [param_auth_id(), param_user_id()], + responses => #{ + 204 => <<"User deleted">>, + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + } + }; + +schema("/listeners/:listener_id/authentication/:id/users/:user_id") -> + #{ + 'operationId' => listener_authenticator_user, + get => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Get user from authenticator in listener authentication chain">>, + parameters => [param_listener_id(), param_auth_id(), param_user_id()], + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_example( + ref(response_user), + response_user_examples()), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + }, + put => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Update user in authenticator in listener authentication chain">>, + parameters => [param_listener_id(), param_auth_id(), param_user_id()], + 'requestBody' => emqx_dashboard_swagger:schema_with_example( + ref(request_user_update), + request_user_update_examples()), + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_example( + ref(response_user), + response_user_examples()), + 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + + }, + delete => #{ + tags => ?API_TAGS_SINGLE, + description => <<"Update user in authenticator in listener authentication chain">>, + parameters => [param_listener_id(), param_auth_id(), param_user_id()], + responses => #{ + 204 => <<"User deleted">>, + 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + } + } + }. + +param_auth_id() -> + { + id, + mk(binary(), #{ + in => path, + desc => <<"Authenticator ID">> + }) + }. + +param_listener_id() -> + { + listener_id, + mk(binary(), #{ + in => path, + desc => <<"Listener ID">>, + example => emqx_listeners:id_example() + }) + }. + +param_user_id() -> + { + user_id, + mk(binary(), #{ + in => path, + desc => <<"User ID">> + }) + }. + +authenticators(post, #{body := Config}) -> create_authenticator([authentication], ?GLOBAL, Config); -authentication(get, _Params) -> +authenticators(get, _Params) -> list_authenticators([authentication]). -authentication2(get, #{bindings := #{id := AuthenticatorID}}) -> +authenticator(get, #{bindings := #{id := AuthenticatorID}}) -> list_authenticator([authentication], AuthenticatorID); -authentication2(put, #{bindings := #{id := AuthenticatorID}, body := Config}) -> +authenticator(put, #{bindings := #{id := AuthenticatorID}, body := Config}) -> update_authenticator([authentication], ?GLOBAL, AuthenticatorID, Config); -authentication2(delete, #{bindings := #{id := AuthenticatorID}}) -> +authenticator(delete, #{bindings := #{id := AuthenticatorID}}) -> delete_authenticator([authentication], ?GLOBAL, AuthenticatorID). -authentication3(post, #{bindings := #{listener_id := ListenerID}, body := Config}) -> - case find_listener(ListenerID) of - {ok, {Type, Name}} -> - create_authenticator([listeners, Type, Name, authentication], ListenerID, Config); - {error, Reason} -> - serialize_error(Reason) - end; -authentication3(get, #{bindings := #{listener_id := ListenerID}}) -> - case find_listener(ListenerID) of - {ok, {Type, Name}} -> - list_authenticators([listeners, Type, Name, authentication]); - {error, Reason} -> - serialize_error(Reason) - end. +listener_authenticators(post, #{bindings := #{listener_id := ListenerID}, body := Config}) -> + with_listener(ListenerID, + fun(Type, Name, ChainName) -> + create_authenticator([listeners, Type, Name, authentication], + ChainName, + Config) + end); -authentication4(get, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}}) -> - case find_listener(ListenerID) of - {ok, {Type, Name}} -> - list_authenticator([listeners, Type, Name, authentication], AuthenticatorID); - {error, Reason} -> - serialize_error(Reason) - end; -authentication4(put, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}, body := Config}) -> - case find_listener(ListenerID) of - {ok, {Type, Name}} -> - update_authenticator([listeners, Type, Name, authentication], ListenerID, AuthenticatorID, Config); - {error, Reason} -> - serialize_error(Reason) - end; -authentication4(delete, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}}) -> - case find_listener(ListenerID) of - {ok, {Type, Name}} -> - delete_authenticator([listeners, Type, Name, authentication], ListenerID, AuthenticatorID); - {error, Reason} -> - serialize_error(Reason) - end. +listener_authenticators(get, #{bindings := #{listener_id := ListenerID}}) -> + with_listener(ListenerID, + fun(Type, Name, _) -> + list_authenticators([listeners, Type, Name, authentication]) + end). -move(post, #{bindings := #{id := AuthenticatorID}, body := #{<<"position">> := Position}}) -> +listener_authenticator(get, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}}) -> + with_listener(ListenerID, + fun(Type, Name, _) -> + list_authenticator([listeners, Type, Name, authentication], + AuthenticatorID) + end); +listener_authenticator(put, + #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}, + body := Config}) -> + with_listener(ListenerID, + fun(Type, Name, ChainName) -> + update_authenticator([listeners, Type, Name, authentication], + ChainName, + AuthenticatorID, + Config) + end); +listener_authenticator(delete, + #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}}) -> + with_listener(ListenerID, + fun(Type, Name, ChainName) -> + delete_authenticator([listeners, Type, Name, authentication], + ChainName, + AuthenticatorID) + end). + +authenticator_move(post, + #{bindings := #{id := AuthenticatorID}, + body := #{<<"position">> := Position}}) -> move_authenitcator([authentication], ?GLOBAL, AuthenticatorID, Position); -move(post, #{bindings := #{id := _}, body := _}) -> +authenticator_move(post, #{bindings := #{id := _}, body := _}) -> serialize_error({missing_parameter, position}). -move2(post, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}, body := #{<<"position">> := Position}}) -> - case find_listener(ListenerID) of - {ok, {Type, Name}} -> - move_authenitcator([listeners, Type, Name, authentication], ListenerID, AuthenticatorID, Position); - {error, Reason} -> - serialize_error(Reason) - end; -move2(post, #{bindings := #{listener_id := _, id := _}, body := _}) -> +listener_authenticator_move(post, + #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}, + body := #{<<"position">> := Position}}) -> + with_listener(ListenerID, + fun(Type, Name, ChainName) -> + move_authenitcator([listeners, Type, Name, authentication], + ChainName, + AuthenticatorID, + Position) + end); +listener_authenticator_move(post, #{bindings := #{listener_id := _, id := _}, body := _}) -> serialize_error({missing_parameter, position}). -import_users(post, #{bindings := #{id := AuthenticatorID}, body := #{<<"filename">> := Filename}}) -> - case ?AUTHN:import_users(?GLOBAL, AuthenticatorID, Filename) of +authenticator_import_users(post, + #{bindings := #{id := AuthenticatorID}, + body := #{<<"filename">> := Filename}}) -> + case emqx_authentication:import_users(?GLOBAL, AuthenticatorID, Filename) of ok -> {204}; {error, Reason} -> serialize_error(Reason) end; -import_users(post, #{bindings := #{id := _}, body := _}) -> +authenticator_import_users(post, #{bindings := #{id := _}, body := _}) -> serialize_error({missing_parameter, filename}). -import_users2(post, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}, body := #{<<"filename">> := Filename}}) -> - case ?AUTHN:import_users(ListenerID, AuthenticatorID, Filename) of - ok -> {204}; - {error, Reason} -> serialize_error(Reason) - end; -import_users2(post, #{bindings := #{listener_id := _, id := _}, body := _}) -> +listener_authenticator_import_users( + post, + #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}, + body := #{<<"filename">> := Filename}}) -> + with_chain( + ListenerID, + fun(ChainName) -> + case emqx_authentication:import_users(ChainName, AuthenticatorID, Filename) of + ok -> {204}; + {error, Reason} -> serialize_error(Reason) + end + end); +listener_authenticator_import_users(post, #{bindings := #{listener_id := _, id := _}, body := _}) -> serialize_error({missing_parameter, filename}). -users(post, #{bindings := #{id := AuthenticatorID}, body := UserInfo}) -> +authenticator_users(post, #{bindings := #{id := AuthenticatorID}, body := UserInfo}) -> add_user(?GLOBAL, AuthenticatorID, UserInfo); -users(get, #{bindings := #{id := AuthenticatorID}, query_string := PageParams}) -> +authenticator_users(get, #{bindings := #{id := AuthenticatorID}, query_string := PageParams}) -> list_users(?GLOBAL, AuthenticatorID, PageParams). -users2(put, #{bindings := #{id := AuthenticatorID, +authenticator_user(put, #{bindings := #{id := AuthenticatorID, user_id := UserID}, body := UserInfo}) -> update_user(?GLOBAL, AuthenticatorID, UserID, UserInfo); -users2(get, #{bindings := #{id := AuthenticatorID, user_id := UserID}}) -> +authenticator_user(get, #{bindings := #{id := AuthenticatorID, user_id := UserID}}) -> find_user(?GLOBAL, AuthenticatorID, UserID); -users2(delete, #{bindings := #{id := AuthenticatorID, user_id := UserID}}) -> +authenticator_user(delete, #{bindings := #{id := AuthenticatorID, user_id := UserID}}) -> delete_user(?GLOBAL, AuthenticatorID, UserID). -users3(post, #{bindings := #{listener_id := ListenerID, +listener_authenticator_users(post, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID}, body := UserInfo}) -> - add_user(ListenerID, AuthenticatorID, UserInfo); -users3(get, #{bindings := #{listener_id := ListenerID, - id := AuthenticatorID}, - query_string := PageParams}) -> - list_users(ListenerID, AuthenticatorID, PageParams). + with_chain(ListenerID, + fun(ChainName) -> + add_user(ChainName, AuthenticatorID, UserInfo) + end); +listener_authenticator_users(get, #{bindings := #{listener_id := ListenerID, + id := AuthenticatorID}, query_string := PageParams}) -> + with_chain(ListenerID, + fun(ChainName) -> + list_users(ChainName, AuthenticatorID, PageParams) + end). -users4(put, #{bindings := #{listener_id := ListenerID, +listener_authenticator_user(put, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID, user_id := UserID}, body := UserInfo}) -> - update_user(ListenerID, AuthenticatorID, UserID, UserInfo); -users4(get, #{bindings := #{listener_id := ListenerID, + with_chain(ListenerID, + fun(ChainName) -> + update_user(ChainName, AuthenticatorID, UserID, UserInfo) + end); +listener_authenticator_user(get, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID, user_id := UserID}}) -> - find_user(ListenerID, AuthenticatorID, UserID); -users4(delete, #{bindings := #{listener_id := ListenerID, + with_chain(ListenerID, + fun(ChainName) -> + find_user(ChainName, AuthenticatorID, UserID) + end); +listener_authenticator_user(delete, #{bindings := #{listener_id := ListenerID, id := AuthenticatorID, user_id := UserID}}) -> - delete_user(ListenerID, AuthenticatorID, UserID). + with_chain(ListenerID, + fun(ChainName) -> + delete_user(ChainName, AuthenticatorID, UserID) + end). %%------------------------------------------------------------------------------ %% Internal functions %%------------------------------------------------------------------------------ +with_listener(ListenerID, Fun) -> + case find_listener(ListenerID) of + {ok, {BType, BName}} -> + Type = binary_to_existing_atom(BType), + Name = binary_to_existing_atom(BName), + ChainName = binary_to_atom(ListenerID), + Fun(Type, Name, ChainName); + {error, Reason} -> + serialize_error(Reason) + end. + find_listener(ListenerID) -> - {Type, Name} = emqx_listeners:parse_listener_id(ListenerID), - case emqx_config:find([listeners, Type, Name]) of - {not_found, _, _} -> - {error, {not_found, {listener, ListenerID}}}; - {ok, _} -> - {ok, {Type, Name}} + case binary:split(ListenerID, <<":">>) of + [BType, BName] -> + case emqx_config:find([listeners, BType, BName]) of + {ok, _} -> + {ok, {BType, BName}}; + {not_found, _, _} -> + {error, {not_found, {listener, ListenerID}}} + end; + _ -> + {error, {not_found, {listener, ListenerID}}} + end. + +with_chain(ListenerID, Fun) -> + {ok, ChainNames} = emqx_authentication:list_chain_names(), + ListenerChainName = + [ Name || Name <- ChainNames, atom_to_binary(Name) =:= ListenerID ], + case ListenerChainName of + [ChainName] -> + Fun(ChainName); + _ -> + serialize_error({not_found, {chain, ListenerID}}) end. create_authenticator(ConfKeyPath, ChainName, Config) -> - case update_config(ConfKeyPath, {create_authenticator, to_atom(ChainName), Config}) of - {ok, #{post_config_update := #{?AUTHN := #{id := ID}}, + case update_config(ConfKeyPath, {create_authenticator, ChainName, Config}) of + {ok, #{post_config_update := #{emqx_authentication := #{id := ID}}, raw_config := AuthenticatorsConfig}} -> {ok, AuthenticatorConfig} = find_config(ID, AuthenticatorsConfig), {200, maps:put(id, ID, convert_certs(fill_defaults(AuthenticatorConfig)))}; @@ -1889,7 +731,10 @@ create_authenticator(ConfKeyPath, ChainName, Config) -> list_authenticators(ConfKeyPath) -> AuthenticatorsConfig = get_raw_config_with_defaults(ConfKeyPath), - NAuthenticators = [maps:put(id, ?AUTHN:authenticator_id(AuthenticatorConfig), convert_certs(AuthenticatorConfig)) + NAuthenticators = [ maps:put( + id, + emqx_authentication:authenticator_id(AuthenticatorConfig), + convert_certs(AuthenticatorConfig)) || AuthenticatorConfig <- AuthenticatorsConfig], {200, NAuthenticators}. @@ -1903,8 +748,8 @@ list_authenticator(ConfKeyPath, AuthenticatorID) -> end. update_authenticator(ConfKeyPath, ChainName, AuthenticatorID, Config) -> - case update_config(ConfKeyPath, {update_authenticator, to_atom(ChainName), AuthenticatorID, Config}) of - {ok, #{post_config_update := #{?AUTHN := #{id := ID}}, + case update_config(ConfKeyPath, {update_authenticator, ChainName, AuthenticatorID, Config}) of + {ok, #{post_config_update := #{emqx_authentication := #{id := ID}}, raw_config := AuthenticatorsConfig}} -> {ok, AuthenticatorConfig} = find_config(ID, AuthenticatorsConfig), {200, maps:put(id, ID, convert_certs(fill_defaults(AuthenticatorConfig)))}; @@ -1912,8 +757,7 @@ update_authenticator(ConfKeyPath, ChainName, AuthenticatorID, Config) -> serialize_error(Reason) end. -delete_authenticator(ConfKeyPath, ChainName0, AuthenticatorID) -> - ChainName = to_atom(ChainName0), +delete_authenticator(ConfKeyPath, ChainName, AuthenticatorID) -> case update_config(ConfKeyPath, {delete_authenticator, ChainName, AuthenticatorID}) of {ok, _} -> {204}; @@ -1921,11 +765,12 @@ delete_authenticator(ConfKeyPath, ChainName0, AuthenticatorID) -> serialize_error(Reason) end. -move_authenitcator(ConfKeyPath, ChainName0, AuthenticatorID, Position) -> - ChainName = to_atom(ChainName0), +move_authenitcator(ConfKeyPath, ChainName, AuthenticatorID, Position) -> case parse_position(Position) of {ok, NPosition} -> - case update_config(ConfKeyPath, {move_authenticator, ChainName, AuthenticatorID, NPosition}) of + case update_config( + ConfKeyPath, + {move_authenticator, ChainName, AuthenticatorID, NPosition}) of {ok, _} -> {204}; {error, {_, _, Reason}} -> @@ -1935,57 +780,55 @@ move_authenitcator(ConfKeyPath, ChainName0, AuthenticatorID, Position) -> serialize_error(Reason) end. -add_user(ChainName0, AuthenticatorID, #{<<"user_id">> := UserID, <<"password">> := Password} = UserInfo) -> - ChainName = to_atom(ChainName0), +add_user(ChainName, + AuthenticatorID, + #{<<"user_id">> := UserID, <<"password">> := Password} = UserInfo) -> IsSuperuser = maps:get(<<"is_superuser">>, UserInfo, false), - case ?AUTHN:add_user(ChainName, AuthenticatorID, #{ user_id => UserID + case emqx_authentication:add_user(ChainName, AuthenticatorID, #{ user_id => UserID , password => Password , is_superuser => IsSuperuser}) of {ok, User} -> {201, User}; {error, Reason} -> - serialize_error(Reason) + serialize_error({user_error, Reason}) end; add_user(_, _, #{<<"user_id">> := _}) -> serialize_error({missing_parameter, password}); add_user(_, _, _) -> serialize_error({missing_parameter, user_id}). -update_user(ChainName0, AuthenticatorID, UserID, UserInfo) -> - ChainName = to_atom(ChainName0), - case maps:with([<<"password">>, <<"is_superuser">>], UserInfo) =:= #{} of +update_user(ChainName, AuthenticatorID, UserID, UserInfo0) -> + case maps:with([<<"password">>, <<"is_superuser">>], UserInfo0) =:= #{} of true -> serialize_error({missing_parameter, password}); false -> - case ?AUTHN:update_user(ChainName, AuthenticatorID, UserID, UserInfo) of + UserInfo = emqx_map_lib:safe_atom_key_map(UserInfo0), + case emqx_authentication:update_user(ChainName, AuthenticatorID, UserID, UserInfo) of {ok, User} -> {200, User}; {error, Reason} -> - serialize_error(Reason) + serialize_error({user_error, Reason}) end end. -find_user(ChainName0, AuthenticatorID, UserID) -> - ChainName = to_atom(ChainName0), - case ?AUTHN:lookup_user(ChainName, AuthenticatorID, UserID) of +find_user(ChainName, AuthenticatorID, UserID) -> + case emqx_authentication:lookup_user(ChainName, AuthenticatorID, UserID) of {ok, User} -> {200, User}; {error, Reason} -> - serialize_error(Reason) + serialize_error({user_error, Reason}) end. -delete_user(ChainName0, AuthenticatorID, UserID) -> - ChainName = to_atom(ChainName0), - case ?AUTHN:delete_user(ChainName, AuthenticatorID, UserID) of +delete_user(ChainName, AuthenticatorID, UserID) -> + case emqx_authentication:delete_user(ChainName, AuthenticatorID, UserID) of ok -> {204}; {error, Reason} -> - serialize_error(Reason) + serialize_error({user_error, Reason}) end. -list_users(ChainName0, AuthenticatorID, PageParams) -> - ChainName = to_atom(ChainName0), - case ?AUTHN:list_users(ChainName, AuthenticatorID, PageParams) of +list_users(ChainName, AuthenticatorID, PageParams) -> + case emqx_authentication:list_users(ChainName, AuthenticatorID, PageParams) of {ok, Users} -> {200, Users}; {error, Reason} -> @@ -2001,7 +844,11 @@ get_raw_config_with_defaults(ConfKeyPath) -> ensure_list(fill_defaults(RawConfig)). find_config(AuthenticatorID, AuthenticatorsConfig) -> - case [AC || AC <- ensure_list(AuthenticatorsConfig), AuthenticatorID =:= ?AUTHN:authenticator_id(AC)] of + MatchingACs + = [AC + || AC <- ensure_list(AuthenticatorsConfig), + AuthenticatorID =:= emqx_authentication:authenticator_id(AC)], + case MatchingACs of [] -> {error, {not_found, {authenticator, AuthenticatorID}}}; [AuthenticatorConfig] -> {ok, AuthenticatorConfig} end. @@ -2011,7 +858,7 @@ fill_defaults(Configs) when is_list(Configs) -> fill_defaults(Config) -> emqx_authn:check_config(Config, #{only_fill_defaults => true}). -convert_certs(#{<<"ssl">> := SSLOpts} = Config) -> +convert_certs(#{ssl := SSLOpts} = Config) -> NSSLOpts = lists:foldl(fun(K, Acc) -> case maps:get(K, Acc, undefined) of undefined -> Acc; @@ -2019,11 +866,20 @@ convert_certs(#{<<"ssl">> := SSLOpts} = Config) -> {ok, Bin} = file:read_file(Filename), Acc#{K => Bin} end - end, SSLOpts, [<<"certfile">>, <<"keyfile">>, <<"cacertfile">>]), - Config#{<<"ssl">> => NSSLOpts}; + end, SSLOpts, [certfile, keyfile, cacertfile]), + Config#{ssl => NSSLOpts}; convert_certs(Config) -> Config. +serialize_error({user_error, not_found}) -> + {404, #{code => <<"NOT_FOUND">>, + message => binfmt("User not found", [])}}; +serialize_error({user_error, already_exist}) -> + {409, #{code => <<"BAD_REQUEST">>, + message => binfmt("User already exists", [])}}; +serialize_error({user_error, Reason}) -> + {400, #{code => <<"BAD_REQUEST">>, + message => binfmt("User error: ~p", [Reason])}}; serialize_error({not_found, {authenticator, ID}}) -> {404, #{code => <<"NOT_FOUND">>, message => binfmt("Authenticator '~ts' does not exist", [ID]) }}; @@ -2035,7 +891,7 @@ serialize_error({not_found, {chain, ?GLOBAL}}) -> message => <<"Authenticator not found in the 'global' scope">>}}; serialize_error({not_found, {chain, Name}}) -> {400, #{code => <<"BAD_REQUEST">>, - message => binfmt("No authentication has been create for listener '~ts'", [Name])}}; + message => binfmt("No authentication has been created for listener ~p", [Name])}}; serialize_error({already_exists, {authenticator, ID}}) -> {409, #{code => <<"ALREADY_EXISTS">>, message => binfmt("Authenticator '~ts' already exist", [ID])}}; @@ -2053,7 +909,7 @@ serialize_error({bad_ssl_config, Details}) -> message => binfmt("bad_ssl_config ~p", [Details])}}; serialize_error({missing_parameter, Detail}) -> {400, #{code => <<"MISSING_PARAMETER">>, - message => binfmt("Missing required parameter", [Detail])}}; + message => binfmt("Missing required parameter: ~p", [Detail])}}; serialize_error({invalid_parameter, Name}) -> {400, #{code => <<"INVALID_PARAMETER">>, message => binfmt("Invalid value for '~p'", [Name])}}; @@ -2079,9 +935,201 @@ parse_position(_) -> ensure_list(M) when is_map(M) -> [M]; ensure_list(L) when is_list(L) -> L. -to_atom(B) when is_binary(B) -> - binary_to_atom(B); -to_atom(A) when is_atom(A) -> - A. - binfmt(Fmt, Args) -> iolist_to_binary(io_lib:format(Fmt, Args)). + +paginated_list_type(Type) -> + [ + {data, hoconsc:array(Type)}, + {meta, ref(pagination_meta)} + ]. + +authenticator_array_example() -> + [Config || #{value := Config} <- maps:values(authenticator_examples())]. + +authenticator_examples() -> + #{ + 'password-based:built-in-database' => #{ + summary => <<"Built-in password-based authentication">>, + value => #{ + mechanism => <<"password-based">>, + backend => <<"built-in-database">>, + user_id_type => <<"username">>, + password_hash_algorithm => #{ + name => <<"sha256">> + } + } + }, + 'password-based:http' => #{ + summary => <<"Password-based authentication througth external HTTP API">>, + value => #{ + mechanism => <<"password-based">>, + backend => <<"http">>, + method => <<"post">>, + url => <<"http://127.0.0.2:8080">>, + headers => #{ + <<"content-type">> => <<"application/json">> + }, + body => #{ + <<"username">> => ?PH_USERNAME, + <<"password">> => ?PH_PASSWORD + }, + pool_size => 8, + connect_timeout => 5000, + request_timeout => 5000, + enable_pipelining => true, + ssl => #{enable => false} + } + }, + 'jwt' => #{ + summary => <<"JWT authentication">>, + value => #{ + mechanism => <<"jwt">>, + use_jwks => false, + algorithm => <<"hmac-based">>, + secret => <<"mysecret">>, + secret_base64_encoded => false, + verify_claims => #{ + <<"username">> => ?PH_USERNAME + } + } + }, + 'password-based:mongodb' => #{ + summary => <<"Password-based authentication with MongoDB backend">>, + value => #{ + mechanism => <<"password-based">>, + backend => <<"mongodb">>, + server => <<"127.0.0.1:27017">>, + database => example, + collection => users, + selector => #{ + username => ?PH_USERNAME + }, + password_hash_field => <<"password_hash">>, + salt_field => <<"salt">>, + is_superuser_field => <<"is_superuser">>, + password_hash_algorithm => <<"sha256">>, + salt_position => <<"prefix">> + } + }, + 'password-based:redis' => #{ + summary => <<"Password-based authentication with Redis backend">>, + value => #{ + mechanism => <<"password-based">>, + backend => <<"redis">>, + server => <<"127.0.0.1:6379">>, + database => 0, + query => <<"HMGET ${username} password_hash salt">>, + password_hash_algorithm => <<"sha256">>, + salt_position => <<"prefix">> + } + } + }. + +request_user_create_examples() -> + #{ + regular_user => #{ + summary => <<"Regular user">>, + value => #{ + user_id => <<"user1">>, + password => <<"secret">> + } + }, + super_user => #{ + summary => <<"Superuser">>, + value => #{ + user_id => <<"user2">>, + password => <<"secret">>, + is_superuser => true + } + } + }. + +request_user_update_examples() -> + #{ + regular_user => #{ + summary => <<"Update regular user">>, + value => #{ + password => <<"newsecret">> + } + }, + super_user => #{ + summary => <<"Update user and promote to superuser">>, + value => #{ + password => <<"newsecret">>, + is_superuser => true + } + } + }. + +request_move_examples() -> + #{ + move_to_top => #{ + summary => <<"Move authenticator to the beginning of the chain">>, + value => #{ + position => <<"top">> + } + }, + move_to_bottom => #{ + summary => <<"Move authenticator to the end of the chain">>, + value => #{ + position => <<"bottom">> + } + }, + 'move_before_password-based:built-in-database' => #{ + summary => <<"Move authenticator to the position preceding some other authenticator">>, + value => #{ + position => <<"before:password-based:built-in-database">> + } + } + }. + +request_import_users_examples() -> + #{ + import_csv => #{ + summary => <<"Import users from CSV file">>, + value => #{ + filename => <<"/path/to/user/data.csv">> + } + }, + import_json => #{ + summary => <<"Import users from JSON file">>, + value => #{ + filename => <<"/path/to/user/data.json">> + } + } + }. + +response_user_examples() -> + #{ + regular_user => #{ + summary => <<"Regular user">>, + value => #{ + user_id => <<"user1">> + } + }, + super_user => #{ + summary => <<"Superuser">>, + value => #{ + user_id => <<"user2">>, + is_superuser => true + } + } + }. + +response_users_example() -> + #{ + data => [ + #{ + user_id => <<"user1">> + }, + #{ + user_id => <<"user2">>, + is_superuser => true + } + ], + meta => #{ + page => 0, + limit => 20, + count => 300 + } + }. diff --git a/apps/emqx_authn/src/emqx_authn_app.erl b/apps/emqx_authn/src/emqx_authn_app.erl index f6e02a665..df7fbecd3 100644 --- a/apps/emqx_authn/src/emqx_authn_app.erl +++ b/apps/emqx_authn/src/emqx_authn_app.erl @@ -34,12 +34,11 @@ start(_StartType, _StartArgs) -> ok = mria_rlog:wait_for_shards([?AUTH_SHARD], infinity), {ok, Sup} = emqx_authn_sup:start_link(), - ok = ?AUTHN:register_providers(emqx_authn:providers()), ok = initialize(), {ok, Sup}. stop(_State) -> - ok = ?AUTHN:deregister_providers(provider_types()), + ok = deinitialize(), ok. %%------------------------------------------------------------------------------ @@ -47,12 +46,38 @@ stop(_State) -> %%------------------------------------------------------------------------------ initialize() -> - RawConfigs = emqx:get_raw_config([authentication], []), - Config = emqx_authn:check_configs(RawConfigs), - ?AUTHN:initialize_authentication(?GLOBAL, Config), - lists:foreach(fun({ListenerID, ListenerConfig}) -> - ?AUTHN:initialize_authentication(ListenerID, maps:get(authentication, ListenerConfig, [])) - end, emqx_listeners:list()). + ok = ?AUTHN:register_providers(emqx_authn:providers()), + + lists:foreach( + fun({ChainName, RawAuthConfigs}) -> + AuthConfig = emqx_authn:check_configs(RawAuthConfigs), + ?AUTHN:initialize_authentication( + ChainName, + AuthConfig) + end, + chain_configs()). + +deinitialize() -> + ok = ?AUTHN:deregister_providers(provider_types()), + ok = emqx_authn_utils:cleanup_resources(). + +chain_configs() -> + [global_chain_config() | listener_chain_configs()]. + +global_chain_config() -> + {?GLOBAL, emqx:get_raw_config([<<"authentication">>], [])}. + +listener_chain_configs() -> + lists:map( + fun({ListenerID, _}) -> + {ListenerID, emqx:get_raw_config(auth_config_path(ListenerID), [])} + end, + emqx_listeners:list()). + +auth_config_path(ListenerID) -> + [<<"listeners">>] + ++ binary:split(atom_to_binary(ListenerID), <<":">>) + ++ [<<"authentication">>]. provider_types() -> lists:map(fun({Type, _Module}) -> Type end, emqx_authn:providers()). diff --git a/apps/emqx_authn/src/emqx_authn_schema.erl b/apps/emqx_authn/src/emqx_authn_schema.erl index b36e88ebf..22f62f519 100644 --- a/apps/emqx_authn/src/emqx_authn_schema.erl +++ b/apps/emqx_authn/src/emqx_authn_schema.erl @@ -21,12 +21,11 @@ -export([ common_fields/0 , roots/0 , fields/1 + , authenticator_type/0 ]). %% only for doc generation -roots() -> [{authenticator_config, - #{type => hoconsc:union(config_refs([Module || {_AuthnType, Module} <- emqx_authn:providers()])) - }}]. +roots() -> [{authenticator_config, hoconsc:mk(authenticator_type())}]. fields(_) -> []. @@ -38,5 +37,8 @@ enable(type) -> boolean(); enable(default) -> true; enable(_) -> undefined. +authenticator_type() -> + hoconsc:union(config_refs([Module || {_AuthnType, Module} <- emqx_authn:providers()])). + config_refs(Modules) -> lists:append([Module:refs() || Module <- Modules]). diff --git a/apps/emqx_authn/src/emqx_authn_utils.erl b/apps/emqx_authn/src/emqx_authn_utils.erl index 4784c91c7..56f485afc 100644 --- a/apps/emqx_authn/src/emqx_authn_utils.erl +++ b/apps/emqx_authn/src/emqx_authn_utils.erl @@ -16,6 +16,8 @@ -module(emqx_authn_utils). +-include_lib("emqx/include/emqx_placeholder.hrl"). + -export([ replace_placeholders/2 , replace_placeholder/2 , check_password/3 @@ -23,8 +25,13 @@ , hash/4 , gen_salt/0 , bin/1 + , ensure_apps_started/1 + , cleanup_resources/0 + , make_resource_id/1 ]). +-define(RESOURCE_GROUP, <<"emqx_authn">>). + %%------------------------------------------------------------------------------ %% APIs %%------------------------------------------------------------------------------ @@ -42,17 +49,17 @@ replace_placeholders([Placeholder | More], Credential, Acc) -> replace_placeholders(More, Credential, [convert_to_sql_param(V) | Acc]) end. -replace_placeholder(<<"${mqtt-username}">>, Credential) -> +replace_placeholder(?PH_USERNAME, Credential) -> maps:get(username, Credential, undefined); -replace_placeholder(<<"${mqtt-clientid}">>, Credential) -> +replace_placeholder(?PH_CLIENTID, Credential) -> maps:get(clientid, Credential, undefined); -replace_placeholder(<<"${mqtt-password}">>, Credential) -> +replace_placeholder(?PH_PASSWORD, Credential) -> maps:get(password, Credential, undefined); -replace_placeholder(<<"${ip-address}">>, Credential) -> +replace_placeholder(?PH_PEERHOST, Credential) -> maps:get(peerhost, Credential, undefined); -replace_placeholder(<<"${cert-subject}">>, Credential) -> +replace_placeholder(?PH_CERT_SUBJECT, Credential) -> maps:get(dn, Credential, undefined); -replace_placeholder(<<"${cert-common-name}">>, Credential) -> +replace_placeholder(?PH_CERT_CN_NAME, Credential) -> maps:get(cn, Credential, undefined); replace_placeholder(Constant, _) -> Constant. @@ -62,22 +69,42 @@ check_password(undefined, _Selected, _State) -> check_password(Password, #{<<"password_hash">> := Hash}, #{password_hash_algorithm := bcrypt}) -> - case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of - true -> ok; - false -> {error, bad_username_or_password} + case emqx_passwd:hash(bcrypt, {Hash, Password}) of + Hash -> ok; + _ -> + {error, bad_username_or_password} end; check_password(Password, #{<<"password_hash">> := Hash} = Selected, #{password_hash_algorithm := Algorithm, salt_position := SaltPosition}) -> Salt = maps:get(<<"salt">>, Selected, <<>>), - case Hash =:= hash(Algorithm, Password, Salt, SaltPosition) of - true -> ok; - false -> {error, bad_username_or_password} + case hash(Algorithm, Password, Salt, SaltPosition) of + Hash -> ok; + _ -> + {error, bad_username_or_password} end. -is_superuser(Selected) -> - #{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}. +is_superuser(#{<<"is_superuser">> := <<"">>}) -> + #{is_superuser => false}; +is_superuser(#{<<"is_superuser">> := <<"0">>}) -> + #{is_superuser => false}; +is_superuser(#{<<"is_superuser">> := 0}) -> + #{is_superuser => false}; +is_superuser(#{<<"is_superuser">> := null}) -> + #{is_superuser => false}; +is_superuser(#{<<"is_superuser">> := false}) -> + #{is_superuser => false}; +is_superuser(#{<<"is_superuser">> := _}) -> + #{is_superuser => true}; +is_superuser(#{}) -> + #{is_superuser => false}. + +ensure_apps_started(bcrypt) -> + {ok, _} = application:ensure_all_started(bcrypt), + ok; +ensure_apps_started(_) -> + ok. hash(Algorithm, Password, Salt, prefix) -> emqx_passwd:hash(Algorithm, <>); @@ -92,6 +119,15 @@ bin(A) when is_atom(A) -> atom_to_binary(A, utf8); bin(L) when is_list(L) -> list_to_binary(L); bin(X) -> X. +cleanup_resources() -> + lists:foreach( + fun emqx_resource:remove_local/1, + emqx_resource:list_group_instances(?RESOURCE_GROUP)). + +make_resource_id(Name) -> + NameBin = bin(Name), + emqx_resource:generate_id(?RESOURCE_GROUP, NameBin). + %%------------------------------------------------------------------------------ %% Internal functions %%------------------------------------------------------------------------------ diff --git a/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl b/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl index 57ce97748..5a477c7e0 100644 --- a/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl +++ b/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl @@ -17,6 +17,7 @@ -module(emqx_enhanced_authn_scram_mnesia). -include("emqx_authn.hrl"). +-include_lib("stdlib/include/ms_transform.hrl"). -include_lib("typerefl/include/types.hrl"). -behaviour(hocon_schema). @@ -28,7 +29,7 @@ ]). -export([ refs/0 - , create/1 + , create/2 , update/2 , authenticate/2 , destroy/1 @@ -46,6 +47,8 @@ -define(TAB, ?MODULE). -define(FORMAT_FUN, {?MODULE, format_user_info}). +-type(user_group() :: binary()). + -export([mnesia/1]). -boot_mnesia({mnesia, [boot]}). @@ -58,6 +61,8 @@ , is_superuser }). +-reflect_type([user_group/0]). + %%------------------------------------------------------------------------------ %% Mnesia bootstrap %%------------------------------------------------------------------------------ @@ -102,17 +107,17 @@ iteration_count(_) -> undefined. refs() -> [hoconsc:ref(?MODULE, config)]. -create(#{ algorithm := Algorithm - , iteration_count := IterationCount - , '_unique' := Unique - }) -> - State = #{user_group => Unique, +create(AuthenticatorID, + #{algorithm := Algorithm, + iteration_count := IterationCount}) -> + State = #{user_group => AuthenticatorID, algorithm => Algorithm, iteration_count => IterationCount}, {ok, State}. -update(Config, #{user_group := Unique}) -> - create(Config#{'_unique' => Unique}). + +update(Config, #{user_group := ID}) -> + create(ID, Config). authenticate(#{auth_method := AuthMethod, auth_data := AuthData, @@ -132,9 +137,12 @@ authenticate(_Credential, _State) -> ignore. destroy(#{user_group := UserGroup}) -> + MatchSpec = ets:fun2ms( + fun(#user_info{user_id = {Group, _}} = User) when Group =:= UserGroup -> + User + end), trans( fun() -> - MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_', '_'}, [], ['$_']}], ok = lists:foreach(fun(UserInfo) -> mnesia:delete_object(?TAB, UserInfo, write) end, mnesia:select(?TAB, MatchSpec, write)) diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl index ceb4b30a8..c50b9cef1 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl @@ -30,7 +30,7 @@ ]). -export([ refs/0 - , create/1 + , create/2 , update/2 , authenticate/2 , destroy/1 @@ -113,24 +113,25 @@ refs() -> , hoconsc:ref(?MODULE, post) ]. -create(#{ method := Method - , url := URL - , headers := Headers - , body := Body - , request_timeout := RequestTimeout - , '_unique' := Unique - } = Config) -> +create(_AuthenticatorID, Config) -> + create(Config). + +create(#{method := Method, + url := URL, + headers := Headers, + body := Body, + request_timeout := RequestTimeout} = Config) -> #{path := Path, query := Query} = URIMap = parse_url(URL), - State = #{ method => Method - , path => Path - , base_query => cow_qs:parse_qs(list_to_binary(Query)) - , headers => maps:to_list(Headers) - , body => maps:to_list(Body) - , request_timeout => RequestTimeout - , '_unique' => Unique - }, - case emqx_resource:create_local(Unique, + ResourceId = emqx_authn_utils:make_resource_id(?MODULE), + State = #{method => Method, + path => Path, + base_query => cow_qs:parse_qs(list_to_binary(Query)), + headers => maps:to_list(Headers), + body => maps:to_list(Body), + request_timeout => RequestTimeout, + resource_id => ResourceId}, + case emqx_resource:create_local(ResourceId, emqx_connector_http, Config#{base_url => maps:remove(query, URIMap), pool_type => random}) of @@ -153,11 +154,11 @@ update(Config, State) -> authenticate(#{auth_method := _}, _) -> ignore; -authenticate(Credential, #{'_unique' := Unique, +authenticate(Credential, #{resource_id := ResourceId, method := Method, request_timeout := RequestTimeout} = State) -> Request = generate_request(Credential, State), - case emqx_resource:query(Unique, {Method, Request, RequestTimeout}) of + case emqx_resource:query(ResourceId, {Method, Request, RequestTimeout}) of {ok, 204, _Headers} -> {ok, #{is_superuser => false}}; {ok, 200, Headers, Body} -> ContentType = proplists:get_value(<<"content-type">>, Headers, <<"application/json">>), @@ -165,19 +166,35 @@ authenticate(Credential, #{'_unique' := Unique, {ok, NBody} -> %% TODO: Return by user property {ok, #{is_superuser => maps:get(<<"is_superuser">>, NBody, false), - user_property => NBody}}; + user_property => maps:remove(<<"is_superuser">>, NBody)}}; {error, _Reason} -> {ok, #{is_superuser => false}} end; {error, Reason} -> ?SLOG(error, #{msg => "http_server_query_failed", - resource => Unique, + resource => ResourceId, reason => Reason}), - ignore + ignore; + Other -> + Output = may_append_body(#{resource => ResourceId}, Other), + case erlang:element(2, Other) of + Code5xx when Code5xx >= 500 andalso Code5xx < 600 -> + ?SLOG(error, Output#{msg => "http_server_error", + code => Code5xx}), + ignore; + Code4xx when Code4xx >= 400 andalso Code4xx < 500 -> + ?SLOG(warning, Output#{msg => "refused_by_http_server", + code => Code4xx}), + {error, not_authorized}; + OtherCode -> + ?SLOG(error, Output#{msg => "undesired_response_code", + code => OtherCode}), + ignore + end end. -destroy(#{'_unique' := Unique}) -> - _ = emqx_resource:remove_local(Unique), +destroy(#{resource_id := ResourceId}) -> + _ = emqx_resource:remove_local(ResourceId), ok. %%-------------------------------------------------------------------- @@ -305,6 +322,11 @@ parse_body(<<"application/x-www-form-urlencoded">>, Body) -> parse_body(ContentType, _) -> {error, {unsupported_content_type, ContentType}}. +may_append_body(Output, {ok, _, _, Body}) -> + Output#{body => Body}; +may_append_body(Output, {ok, _, _}) -> + Output. + to_list(A) when is_atom(A) -> atom_to_list(A); to_list(B) when is_binary(B) -> diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_jwks_connector.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_jwks_connector.erl index 89bc565c6..d8ceb7f40 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_jwks_connector.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_jwks_connector.erl @@ -82,10 +82,10 @@ handle_info({refresh_jwks, _TRef, refresh}, #{request_id := RequestID} = State) _ -> ok = httpc:cancel_request(RequestID), receive - {http, _} -> ok - after 0 -> - ok - end + {http, _} -> ok + after 0 -> + ok + end end, {noreply, refresh_jwks(State)}; diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_jwt.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_jwt.erl index c4e04eac3..7a24afccb 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_jwt.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_jwt.erl @@ -27,7 +27,7 @@ ]). -export([ refs/0 - , create/1 + , create/2 , update/2 , authenticate/2 , destroy/1 @@ -139,18 +139,23 @@ refs() -> , hoconsc:ref(?MODULE, 'jwks') ]. +create(_AuthenticatorID, Config) -> + create(Config). + create(#{verify_claims := VerifyClaims} = Config) -> create2(Config#{verify_claims => handle_verify_claims(VerifyClaims)}). -update(#{use_jwks := false} = Config, #{jwk := Connector}) +update(#{use_jwks := false} = Config, + #{jwk := Connector}) when is_pid(Connector) -> _ = emqx_authn_jwks_connector:stop(Connector), create(Config); -update(#{use_jwks := false} = Config, _) -> +update(#{use_jwks := false} = Config, _State) -> create(Config); -update(#{use_jwks := true} = Config, #{jwk := Connector} = State) +update(#{use_jwks := true} = Config, + #{jwk := Connector} = State) when is_pid(Connector) -> ok = emqx_authn_jwks_connector:update(Connector, Config), case maps:get(verify_cliams, Config, undefined) of @@ -160,7 +165,7 @@ update(#{use_jwks := true} = Config, #{jwk := Connector} = State) {ok, State#{verify_claims => handle_verify_claims(VerifyClaims)}} end; -update(#{use_jwks := true} = Config, _) -> +update(#{use_jwks := true} = Config, _State) -> create(Config). authenticate(#{auth_method := _}, _) -> @@ -340,7 +345,7 @@ handle_placeholder(Placeholder0) -> Placeholder0 end. -validate_placeholder(<<"mqtt-clientid">>) -> +validate_placeholder(<<"clientid">>) -> clientid; -validate_placeholder(<<"mqtt-username">>) -> +validate_placeholder(<<"username">>) -> username. diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl index 3c816985b..fd02671fb 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl @@ -17,6 +17,7 @@ -module(emqx_authn_mnesia). -include("emqx_authn.hrl"). +-include_lib("stdlib/include/ms_transform.hrl"). -include_lib("typerefl/include/types.hrl"). -behaviour(hocon_schema). @@ -28,7 +29,7 @@ ]). -export([ refs/0 - , create/1 + , create/2 , update/2 , authenticate/2 , destroy/1 @@ -45,8 +46,7 @@ -export([format_user_info/1]). -type user_id_type() :: clientid | username. - --type user_group() :: {binary(), binary()}. +-type user_group() :: binary(). -type user_id() :: binary(). -record(user_info, @@ -56,7 +56,7 @@ , is_superuser :: boolean() }). --reflect_type([ user_id_type/0 ]). +-reflect_type([user_id_type/0]). -export([mnesia/1]). @@ -123,29 +123,28 @@ salt_rounds(_) -> undefined. refs() -> [hoconsc:ref(?MODULE, config)]. -create(#{ user_id_type := Type - , password_hash_algorithm := #{name := bcrypt, - salt_rounds := SaltRounds} - , '_unique' := Unique - }) -> - {ok, _} = application:ensure_all_started(bcrypt), - State = #{user_group => Unique, +create(AuthenticatorID, + #{user_id_type := Type, + password_hash_algorithm := #{name := bcrypt, + salt_rounds := SaltRounds}}) -> + ok = emqx_authn_utils:ensure_apps_started(bcrypt), + State = #{user_group => AuthenticatorID, user_id_type => Type, password_hash_algorithm => bcrypt, salt_rounds => SaltRounds}, {ok, State}; -create(#{ user_id_type := Type - , password_hash_algorithm := #{name := Name} - , '_unique' := Unique - }) -> - State = #{user_group => Unique, +create(AuthenticatorID, + #{user_id_type := Type, + password_hash_algorithm := #{name := Name}}) -> + ok = emqx_authn_utils:ensure_apps_started(Name), + State = #{user_group => AuthenticatorID, user_id_type => Type, password_hash_algorithm => Name}, {ok, State}. -update(Config, #{user_group := Unique}) -> - create(Config#{'_unique' => Unique}). +update(Config, #{user_group := ID}) -> + create(ID, Config). authenticate(#{auth_method := _}, _) -> ignore; @@ -170,10 +169,14 @@ authenticate(#{password := Password} = Credential, destroy(#{user_group := UserGroup}) -> trans( - fun() -> - MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}], - ok = lists:foreach(fun delete_user2/1, mnesia:select(?TAB, MatchSpec, write)) - end). + fun() -> + ok = lists:foreach( + fun(User) -> + mnesia:delete_object(?TAB, User, write) + end, + mnesia:select(?TAB, group_match_spec(UserGroup), write)) + end). + import_users(Filename0, State) -> Filename = to_binary(Filename0), @@ -246,8 +249,7 @@ lookup_user(UserID, #{user_group := UserGroup}) -> end. list_users(PageParams, #{user_group := UserGroup}) -> - MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}], - {ok, emqx_mgmt_api:paginate(?TAB, MatchSpec, PageParams, ?FORMAT_FUN)}. + {ok, emqx_mgmt_api:paginate(?TAB, group_match_spec(UserGroup), PageParams, ?FORMAT_FUN)}. %%------------------------------------------------------------------------------ %% Internal functions @@ -374,9 +376,6 @@ insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) -> is_superuser = IsSuperuser}, mnesia:write(?TAB, UserInfo, write). -delete_user2(UserInfo) -> - mnesia:delete_object(?TAB, UserInfo, write). - %% TODO: Support other type get_user_identity(#{username := Username}, username) -> Username; @@ -401,3 +400,9 @@ to_binary(L) when is_list(L) -> format_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) -> #{user_id => UserID, is_superuser => IsSuperuser}. + +group_match_spec(UserGroup) -> + ets:fun2ms( + fun(#user_info{user_id = {Group, _}} = User) when Group =:= UserGroup -> + User + end). diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl index ce5d3d8ee..40bd0c2c9 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl @@ -29,7 +29,7 @@ ]). -export([ refs/0 - , create/1 + , create/2 , update/2 , authenticate/2 , destroy/1 @@ -102,19 +102,24 @@ refs() -> , hoconsc:ref(?MODULE, 'sharded-cluster') ]. -create(#{ selector := Selector - , '_unique' := Unique - } = Config) -> +create(_AuthenticatorID, Config) -> + create(Config). + +create(#{selector := Selector} = Config) -> NSelector = parse_selector(Selector), - State = maps:with([ collection - , password_hash_field - , salt_field - , is_superuser_field - , password_hash_algorithm - , salt_position - , '_unique'], Config), - NState = State#{selector => NSelector}, - case emqx_resource:create_local(Unique, emqx_connector_mongo, Config) of + State = maps:with( + [collection, + password_hash_field, + salt_field, + is_superuser_field, + password_hash_algorithm, + salt_position], + Config), + ResourceId = emqx_authn_utils:make_resource_id(?MODULE), + NState = State#{ + selector => NSelector, + resource_id => ResourceId}, + case emqx_resource:create_local(ResourceId, emqx_connector_mongo, Config) of {ok, already_created} -> {ok, NState}; {ok, _} -> @@ -135,17 +140,16 @@ update(Config, State) -> authenticate(#{auth_method := _}, _) -> ignore; authenticate(#{password := Password} = Credential, - #{ collection := Collection - , selector := Selector0 - , '_unique' := Unique - } = State) -> + #{collection := Collection, + selector := Selector0, + resource_id := ResourceId} = State) -> Selector1 = replace_placeholders(Selector0, Credential), Selector2 = normalize_selector(Selector1), - case emqx_resource:query(Unique, {find_one, Collection, Selector2, #{}}) of + case emqx_resource:query(ResourceId, {find_one, Collection, Selector2, #{}}) of undefined -> ignore; {error, Reason} -> ?SLOG(error, #{msg => "mongodb_query_failed", - resource => Unique, + resource => ResourceId, reason => Reason}), ignore; Doc -> @@ -154,7 +158,7 @@ authenticate(#{password := Password} = Credential, {ok, #{is_superuser => is_superuser(Doc, State)}}; {error, {cannot_find_password_hash_field, PasswordHashField}} -> ?SLOG(error, #{msg => "cannot_find_password_hash_field", - resource => Unique, + resource => ResourceId, password_hash_field => PasswordHashField}), ignore; {error, Reason} -> @@ -162,8 +166,8 @@ authenticate(#{password := Password} = Credential, end end. -destroy(#{'_unique' := Unique}) -> - _ = emqx_resource:remove_local(Unique), +destroy(#{resource_id := ResourceId}) -> + _ = emqx_resource:remove_local(ResourceId), ok. %%------------------------------------------------------------------------------ @@ -205,7 +209,7 @@ check_password(Password, undefined -> {error, {cannot_find_password_hash_field, PasswordHashField}}; Hash -> - case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of + case {ok, to_list(Hash)} =:= bcrypt:hashpw(Password, Hash) of true -> ok; false -> {error, bad_username_or_password} end @@ -238,3 +242,7 @@ hash(Algorithm, Password, Salt, prefix) -> emqx_passwd:hash(Algorithm, <>); hash(Algorithm, Password, Salt, suffix) -> emqx_passwd:hash(Algorithm, <>). + +to_list(L) when is_list(L) -> L; +to_list(L) when is_binary(L) -> binary_to_list(L); +to_list(X) -> X. diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl index 98d515310..47ca0ae3c 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl @@ -29,7 +29,7 @@ ]). -export([ refs/0 - , create/1 + , create/2 , update/2 , authenticate/2 , destroy/1 @@ -76,20 +76,23 @@ query_timeout(_) -> undefined. refs() -> [hoconsc:ref(?MODULE, config)]. -create(#{ password_hash_algorithm := Algorithm - , salt_position := SaltPosition - , query := Query0 - , query_timeout := QueryTimeout - , '_unique' := Unique +create(_AuthenticatorID, Config) -> + create(Config). + +create(#{password_hash_algorithm := Algorithm, + salt_position := SaltPosition, + query := Query0, + query_timeout := QueryTimeout } = Config) -> {Query, PlaceHolders} = parse_query(Query0), + ResourceId = emqx_authn_utils:make_resource_id(?MODULE), State = #{password_hash_algorithm => Algorithm, salt_position => SaltPosition, query => Query, placeholders => PlaceHolders, query_timeout => QueryTimeout, - '_unique' => Unique}, - case emqx_resource:create_local(Unique, emqx_connector_mysql, Config) of + resource_id => ResourceId}, + case emqx_resource:create_local(ResourceId, emqx_connector_mysql, Config) of {ok, already_created} -> {ok, State}; {ok, _} -> @@ -113,12 +116,12 @@ authenticate(#{password := Password} = Credential, #{placeholders := PlaceHolders, query := Query, query_timeout := Timeout, - '_unique' := Unique} = State) -> + resource_id := ResourceId} = State) -> Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential), - case emqx_resource:query(Unique, {sql, Query, Params, Timeout}) of + case emqx_resource:query(ResourceId, {sql, Query, Params, Timeout}) of {ok, _Columns, []} -> ignore; - {ok, Columns, Rows} -> - Selected = maps:from_list(lists:zip(Columns, Rows)), + {ok, Columns, [Row | _]} -> + Selected = maps:from_list(lists:zip(Columns, Row)), case emqx_authn_utils:check_password(Password, Selected, State) of ok -> {ok, emqx_authn_utils:is_superuser(Selected)}; @@ -127,13 +130,13 @@ authenticate(#{password := Password} = Credential, end; {error, Reason} -> ?SLOG(error, #{msg => "mysql_query_failed", - resource => Unique, + resource => ResourceId, reason => Reason}), ignore end. -destroy(#{'_unique' := Unique}) -> - _ = emqx_resource:remove_local(Unique), +destroy(#{resource_id := ResourceId}) -> + _ = emqx_resource:remove_local(ResourceId), ok. %%------------------------------------------------------------------------------ diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl index d1390697a..660acf566 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl @@ -30,12 +30,17 @@ ]). -export([ refs/0 - , create/1 + , create/2 , update/2 , authenticate/2 , destroy/1 ]). +-ifdef(TEST). +-compile(export_all). +-compile(nowarn_export_all). +-endif. + %%------------------------------------------------------------------------------ %% Hocon Schema %%------------------------------------------------------------------------------ @@ -48,7 +53,7 @@ fields(config) -> [ {mechanism, {enum, ['password-based']}} , {backend, {enum, [postgresql]}} , {password_hash_algorithm, fun password_hash_algorithm/1} - , {salt_position, {enum, [prefix, suffix]}} + , {salt_position, fun salt_position/1} , {query, fun query/1} ] ++ emqx_authn_schema:common_fields() ++ emqx_connector_schema_lib:relational_db_fields() @@ -58,6 +63,10 @@ password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt password_hash_algorithm(default) -> sha256; password_hash_algorithm(_) -> undefined. +salt_position(type) -> {enum, [prefix, suffix]}; +salt_position(default) -> prefix; +salt_position(_) -> undefined. + query(type) -> string(); query(_) -> undefined. @@ -68,18 +77,20 @@ query(_) -> undefined. refs() -> [hoconsc:ref(?MODULE, config)]. -create(#{ query := Query0 - , password_hash_algorithm := Algorithm - , salt_position := SaltPosition - , '_unique' := Unique - } = Config) -> +create(_AuthenticatorID, Config) -> + create(Config). + +create(#{query := Query0, + password_hash_algorithm := Algorithm, + salt_position := SaltPosition} = Config) -> {Query, PlaceHolders} = parse_query(Query0), + ResourceId = emqx_authn_utils:make_resource_id(?MODULE), State = #{query => Query, placeholders => PlaceHolders, password_hash_algorithm => Algorithm, salt_position => SaltPosition, - '_unique' => Unique}, - case emqx_resource:create_local(Unique, emqx_connector_pgsql, Config) of + resource_id => ResourceId}, + case emqx_resource:create_local(ResourceId, emqx_connector_pgsql, Config) of {ok, already_created} -> {ok, State}; {ok, _} -> @@ -102,14 +113,13 @@ authenticate(#{auth_method := _}, _) -> authenticate(#{password := Password} = Credential, #{query := Query, placeholders := PlaceHolders, - '_unique' := Unique} = State) -> + resource_id := ResourceId} = State) -> Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential), - case emqx_resource:query(Unique, {sql, Query, Params}) of + case emqx_resource:query(ResourceId, {sql, Query, Params}) of {ok, _Columns, []} -> ignore; - {ok, Columns, Rows} -> + {ok, Columns, [Row | _]} -> NColumns = [Name || #column{name = Name} <- Columns], - NRows = [erlang:element(1, Row) || Row <- Rows], - Selected = maps:from_list(lists:zip(NColumns, NRows)), + Selected = maps:from_list(lists:zip(NColumns, erlang:tuple_to_list(Row))), case emqx_authn_utils:check_password(Password, Selected, State) of ok -> {ok, emqx_authn_utils:is_superuser(Selected)}; @@ -118,13 +128,13 @@ authenticate(#{password := Password} = Credential, end; {error, Reason} -> ?SLOG(error, #{msg => "postgresql_query_failed", - resource => Unique, + resource => ResourceId, reason => Reason}), ignore end. -destroy(#{'_unique' := Unique}) -> - _ = emqx_resource:remove_local(Unique), +destroy(#{resource_id := ResourceId}) -> + _ = emqx_resource:remove_local(ResourceId), ok. %%------------------------------------------------------------------------------ @@ -138,7 +148,7 @@ parse_query(Query) -> PlaceHolders = [PlaceHolder || [PlaceHolder] <- Captured], Replacements = ["$" ++ integer_to_list(I) || I <- lists:seq(1, length(Captured))], NQuery = lists:foldl(fun({PlaceHolder, Replacement}, Query0) -> - re:replace(Query0, PlaceHolder, Replacement, [{return, binary}]) + re:replace(Query0, "\\" ++ PlaceHolder, Replacement, [{return, binary}]) end, Query, lists:zip(PlaceHolders, Replacements)), {NQuery, PlaceHolders}; nomatch -> diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl index 3ae333d12..963536e0b 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl @@ -29,7 +29,7 @@ ]). -export([ refs/0 - , create/1 + , create/2 , update/2 , authenticate/2 , destroy/1 @@ -56,11 +56,11 @@ fields(sentinel) -> common_fields() ++ emqx_connector_redis:fields(sentinel). common_fields() -> - [ {mechanism, {enum, ['password-based']}} - , {backend, {enum, [redis]}} - , {query, fun query/1} - , {password_hash_algorithm, fun password_hash_algorithm/1} - , {salt_position, fun salt_position/1} + [{mechanism, {enum, ['password-based']}}, + {backend, {enum, [redis]}}, + {query, fun query/1}, + {password_hash_algorithm, fun password_hash_algorithm/1}, + {salt_position, fun salt_position/1} ] ++ emqx_authn_schema:common_fields(). query(type) -> string(); @@ -84,16 +84,22 @@ refs() -> , hoconsc:ref(?MODULE, sentinel) ]. -create(#{ query := Query - , '_unique' := Unique - } = Config) -> +create(_AuthenticatorID, Config) -> + create(Config). + +create(#{query := Query, + password_hash_algorithm := Algorithm} = Config) -> try NQuery = parse_query(Query), - State = maps:with([ password_hash_algorithm - , salt_position - , '_unique'], Config), - NState = State#{query => NQuery}, - case emqx_resource:create_local(Unique, emqx_connector_redis, Config) of + ok = emqx_authn_utils:ensure_apps_started(Algorithm), + State = maps:with( + [password_hash_algorithm, salt_position], + Config), + ResourceId = emqx_authn_utils:make_resource_id(?MODULE), + NState = State#{ + query => NQuery, + resource_id => ResourceId}, + case emqx_resource:create_local(ResourceId, emqx_connector_redis, Config) of {ok, already_created} -> {ok, NState}; {ok, _} -> @@ -102,12 +108,12 @@ create(#{ query := Query {error, Reason} end catch - error:{unsupported_query, Query} -> + error:{unsupported_query, _Query} -> {error, {unsupported_query, Query}}; error:missing_password_hash -> {error, missing_password_hash}; - error:{unsupported_field, Field} -> - {error, {unsupported_field, Field}} + error:{unsupported_fields, Fields} -> + {error, {unsupported_fields, Fields}} end. update(Config, State) -> @@ -122,11 +128,10 @@ update(Config, State) -> authenticate(#{auth_method := _}, _) -> ignore; authenticate(#{password := Password} = Credential, - #{ query := {Command, Key, Fields} - , '_unique' := Unique - } = State) -> + #{query := {Command, Key, Fields}, + resource_id := ResourceId} = State) -> NKey = binary_to_list(iolist_to_binary(replace_placeholders(Key, Credential))), - case emqx_resource:query(Unique, {cmd, [Command, NKey | Fields]}) of + case emqx_resource:query(ResourceId, {cmd, [Command, NKey | Fields]}) of {ok, Values} -> case merge(Fields, Values) of #{<<"password_hash">> := _} = Selected -> @@ -138,18 +143,18 @@ authenticate(#{password := Password} = Credential, end; _ -> ?SLOG(error, #{msg => "cannot_find_password_hash_field", - resource => Unique}), + resource => ResourceId}), ignore end; {error, Reason} -> ?SLOG(error, #{msg => "redis_query_failed", - resource => Unique, + resource => ResourceId, reason => Reason}), ignore end. -destroy(#{'_unique' := Unique}) -> - _ = emqx_resource:remove_local(Unique), +destroy(#{resource_id := ResourceId}) -> + _ = emqx_resource:remove_local(ResourceId), ok. %%------------------------------------------------------------------------------ @@ -169,20 +174,15 @@ parse_query(Query) -> end. check_fields(Fields) -> - check_fields(Fields, false). + HasPassHash = lists:member("password_hash", Fields), + KnownFields = ["password_hash", "salt", "is_superuser"], + UnknownFields = [F || F <- Fields, not lists:member(F, KnownFields)], -check_fields([], false) -> - error(missing_password_hash); -check_fields([], true) -> - ok; -check_fields(["password_hash" | More], false) -> - check_fields(More, true); -check_fields(["salt" | More], HasPassHash) -> - check_fields(More, HasPassHash); -check_fields(["is_superuser" | More], HasPassHash) -> - check_fields(More, HasPassHash); -check_fields([Field | _], _) -> - error({unsupported_field, Field}). + case {HasPassHash, UnknownFields} of + {true, []} -> ok; + {true, _} -> error({unsupported_fields, UnknownFields}); + {false, _} -> error(missing_password_hash) + end. parse_key(Key) -> Tokens = re:split(Key, "(" ++ ?RE_PLACEHOLDER ++ ")", [{return, binary}, group, trim]), diff --git a/apps/emqx_authn/test/data/certs/cacert.pem b/apps/emqx_authn/test/data/certs/cacert.pem new file mode 100644 index 000000000..604fd2362 --- /dev/null +++ b/apps/emqx_authn/test/data/certs/cacert.pem @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDUTCCAjmgAwIBAgIJAPPYCjTmxdt/MA0GCSqGSIb3DQEBCwUAMD8xCzAJBgNV +BAYTAkNOMREwDwYDVQQIDAhoYW5nemhvdTEMMAoGA1UECgwDRU1RMQ8wDQYDVQQD +DAZSb290Q0EwHhcNMjAwNTA4MDgwNjUyWhcNMzAwNTA2MDgwNjUyWjA/MQswCQYD +VQQGEwJDTjERMA8GA1UECAwIaGFuZ3pob3UxDDAKBgNVBAoMA0VNUTEPMA0GA1UE +AwwGUm9vdENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzcgVLex1 +EZ9ON64EX8v+wcSjzOZpiEOsAOuSXOEN3wb8FKUxCdsGrsJYB7a5VM/Jot25Mod2 +juS3OBMg6r85k2TWjdxUoUs+HiUB/pP/ARaaW6VntpAEokpij/przWMPgJnBF3Ur +MjtbLayH9hGmpQrI5c2vmHQ2reRZnSFbY+2b8SXZ+3lZZgz9+BaQYWdQWfaUWEHZ +uDaNiViVO0OT8DRjCuiDp3yYDj3iLWbTA/gDL6Tf5XuHuEwcOQUrd+h0hyIphO8D +tsrsHZ14j4AWYLk1CPA6pq1HIUvEl2rANx2lVUNv+nt64K/Mr3RnVQd9s8bK+TXQ +KGHd2Lv/PALYuwIDAQABo1AwTjAdBgNVHQ4EFgQUGBmW+iDzxctWAWxmhgdlE8Pj +EbQwHwYDVR0jBBgwFoAUGBmW+iDzxctWAWxmhgdlE8PjEbQwDAYDVR0TBAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAQEAGbhRUjpIred4cFAFJ7bbYD9hKu/yzWPWkMRa +ErlCKHmuYsYk+5d16JQhJaFy6MGXfLgo3KV2itl0d+OWNH0U9ULXcglTxy6+njo5 +CFqdUBPwN1jxhzo9yteDMKF4+AHIxbvCAJa17qcwUKR5MKNvv09C6pvQDJLzid7y +E2dkgSuggik3oa0427KvctFf8uhOV94RvEDyqvT5+pgNYZ2Yfga9pD/jjpoHEUlo +88IGU8/wJCx3Ds2yc8+oBg/ynxG8f/HmCC1ET6EHHoe2jlo8FpU/SgGtghS1YL30 +IWxNsPrUP+XsZpBJy/mvOhE5QXo6Y35zDqqj8tI7AGmAWu22jg== +-----END CERTIFICATE----- diff --git a/apps/emqx_authn/test/data/certs/cert.pem b/apps/emqx_authn/test/data/certs/cert.pem new file mode 100644 index 000000000..092390b1d --- /dev/null +++ b/apps/emqx_authn/test/data/certs/cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDEzCCAfugAwIBAgIBAjANBgkqhkiG9w0BAQsFADA/MQswCQYDVQQGEwJDTjER +MA8GA1UECAwIaGFuZ3pob3UxDDAKBgNVBAoMA0VNUTEPMA0GA1UEAwwGUm9vdENB +MB4XDTIwMDUwODA4MDcwNVoXDTMwMDUwNjA4MDcwNVowPzELMAkGA1UEBhMCQ04x +ETAPBgNVBAgMCGhhbmd6aG91MQwwCgYDVQQKDANFTVExDzANBgNVBAMMBlNlcnZl +cjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALNeWT3pE+QFfiRJzKmn +AMUrWo3K2j/Tm3+Xnl6WLz67/0rcYrJbbKvS3uyRP/stXyXEKw9CepyQ1ViBVFkW +Aoy8qQEOWFDsZc/5UzhXUnb6LXr3qTkFEjNmhj+7uzv/lbBxlUG1NlYzSeOB6/RT +8zH/lhOeKhLnWYPXdXKsa1FL6ij4X8DeDO1kY7fvAGmBn/THh1uTpDizM4YmeI+7 +4dmayA5xXvARte5h4Vu5SIze7iC057N+vymToMk2Jgk+ZZFpyXrnq+yo6RaD3ANc +lrc4FbeUQZ5a5s5Sxgs9a0Y3WMG+7c5VnVXcbjBRz/aq2NtOnQQjikKKQA8GF080 +BQkCAwEAAaMaMBgwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAwDQYJKoZIhvcNAQEL +BQADggEBAJefnMZpaRDHQSNUIEL3iwGXE9c6PmIsQVE2ustr+CakBp3TZ4l0enLt +iGMfEVFju69cO4oyokWv+hl5eCMkHBf14Kv51vj448jowYnF1zmzn7SEzm5Uzlsa +sqjtAprnLyof69WtLU1j5rYWBuFX86yOTwRAFNjm9fvhAcrEONBsQtqipBWkMROp +iUYMkRqbKcQMdwxov+lHBYKq9zbWRoqLROAn54SRqgQk6c15JdEfgOOjShbsOkIH +UhqcwRkQic7n1zwHVGVDgNIZVgmJ2IdIWBlPEC7oLrRrBD/X1iEEXtKab6p5o22n +KB5mN+iQaE+Oe2cpGKZJiJRdM+IqDDQ= +-----END CERTIFICATE----- diff --git a/apps/emqx_authn/test/data/certs/client-cert.pem b/apps/emqx_authn/test/data/certs/client-cert.pem new file mode 100644 index 000000000..09d855221 --- /dev/null +++ b/apps/emqx_authn/test/data/certs/client-cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDEzCCAfugAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MQswCQYDVQQGEwJDTjER +MA8GA1UECAwIaGFuZ3pob3UxDDAKBgNVBAoMA0VNUTEPMA0GA1UEAwwGUm9vdENB +MB4XDTIwMDUwODA4MDY1N1oXDTMwMDUwNjA4MDY1N1owPzELMAkGA1UEBhMCQ04x +ETAPBgNVBAgMCGhhbmd6aG91MQwwCgYDVQQKDANFTVExDzANBgNVBAMMBkNsaWVu +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMy4hoksKcZBDbY680u6 +TS25U51nuB1FBcGMlF9B/t057wPOlxF/OcmbxY5MwepS41JDGPgulE1V7fpsXkiW +1LUimYV/tsqBfymIe0mlY7oORahKji7zKQ2UBIVFhdlvQxunlIDnw6F9popUgyHt +dMhtlgZK8oqRwHxO5dbfoukYd6J/r+etS5q26sgVkf3C6dt0Td7B25H9qW+f7oLV +PbcHYCa+i73u9670nrpXsC+Qc7Mygwa2Kq/jwU+ftyLQnOeW07DuzOwsziC/fQZa +nbxR+8U9FNftgRcC3uP/JMKYUqsiRAuaDokARZxVTV5hUElfpO6z6/NItSDvvh3i +eikCAwEAAaMaMBgwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAwDQYJKoZIhvcNAQEL +BQADggEBABchYxKo0YMma7g1qDswJXsR5s56Czx/I+B41YcpMBMTrRqpUC0nHtLk +M7/tZp592u/tT8gzEnQjZLKBAhFeZaR3aaKyknLqwiPqJIgg0pgsBGITrAK3Pv4z +5/YvAJJKgTe5UdeTz6U4lvNEux/4juZ4pmqH4qSFJTOzQS7LmgSmNIdd072rwXBd +UzcSHzsJgEMb88u/LDLjj1pQ7AtZ4Tta8JZTvcgBFmjB0QUi6fgkHY6oGat/W4kR +jSRUBlMUbM/drr2PVzRc2dwbFIl3X+ZE6n5Sl3ZwRAC/s92JU6CPMRW02muVu6xl +goraNgPISnrbpR6KjxLZkVembXzjNNc= +-----END CERTIFICATE----- diff --git a/apps/emqx_authn/test/data/certs/client-key.pem b/apps/emqx_authn/test/data/certs/client-key.pem new file mode 100644 index 000000000..2b3f30cf6 --- /dev/null +++ b/apps/emqx_authn/test/data/certs/client-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAzLiGiSwpxkENtjrzS7pNLblTnWe4HUUFwYyUX0H+3TnvA86X +EX85yZvFjkzB6lLjUkMY+C6UTVXt+mxeSJbUtSKZhX+2yoF/KYh7SaVjug5FqEqO +LvMpDZQEhUWF2W9DG6eUgOfDoX2milSDIe10yG2WBkryipHAfE7l1t+i6Rh3on+v +561LmrbqyBWR/cLp23RN3sHbkf2pb5/ugtU9twdgJr6Lve73rvSeulewL5BzszKD +BrYqr+PBT5+3ItCc55bTsO7M7CzOIL99BlqdvFH7xT0U1+2BFwLe4/8kwphSqyJE +C5oOiQBFnFVNXmFQSV+k7rPr80i1IO++HeJ6KQIDAQABAoIBAGWgvPjfuaU3qizq +uti/FY07USz0zkuJdkANH6LiSjlchzDmn8wJ0pApCjuIE0PV/g9aS8z4opp5q/gD +UBLM/a8mC/xf2EhTXOMrY7i9p/I3H5FZ4ZehEqIw9sWKK9YzC6dw26HabB2BGOnW +5nozPSQ6cp2RGzJ7BIkxSZwPzPnVTgy3OAuPOiJytvK+hGLhsNaT+Y9bNDvplVT2 +ZwYTV8GlHZC+4b2wNROILm0O86v96O+Qd8nn3fXjGHbMsAnONBq10bZS16L4fvkH +5G+W/1PeSXmtZFppdRRDxIW+DWcXK0D48WRliuxcV4eOOxI+a9N2ZJZZiNLQZGwg +w3A8+mECgYEA8HuJFrlRvdoBe2U/EwUtG74dcyy30L4yEBnN5QscXmEEikhaQCfX +Wm6EieMcIB/5I5TQmSw0cmBMeZjSXYoFdoI16/X6yMMuATdxpvhOZGdUGXxhAH+x +xoTUavWZnEqW3fkUU71kT5E2f2i+0zoatFESXHeslJyz85aAYpP92H0CgYEA2e5A +Yozt5eaA1Gyhd8SeptkEU4xPirNUnVQHStpMWUb1kzTNXrPmNWccQ7JpfpG6DcYl +zUF6p6mlzY+zkMiyPQjwEJlhiHM2NlL1QS7td0R8ewgsFoyn8WsBI4RejWrEG9td +EDniuIw+pBFkcWthnTLHwECHdzgquToyTMjrBB0CgYEA28tdGbrZXhcyAZEhHAZA +Gzog+pKlkpEzeonLKIuGKzCrEKRecIK5jrqyQsCjhS0T7ZRnL4g6i0s+umiV5M5w +fcc292pEA1h45L3DD6OlKplSQVTv55/OYS4oY3YEJtf5mfm8vWi9lQeY8sxOlQpn +O+VZTdBHmTC8PGeTAgZXHZUCgYA6Tyv88lYowB7SN2qQgBQu8jvdGtqhcs/99GCr +H3N0I69LPsKAR0QeH8OJPXBKhDUywESXAaEOwS5yrLNP1tMRz5Vj65YUCzeDG3kx +gpvY4IMp7ArX0bSRvJ6mYSFnVxy3k174G3TVCfksrtagHioVBGQ7xUg5ltafjrms +n8l55QKBgQDVzU8tQvBVqY8/1lnw11Vj4fkE/drZHJ5UkdC1eenOfSWhlSLfUJ8j +ds7vEWpRPPoVuPZYeR1y78cyxKe1GBx6Wa2lF5c7xjmiu0xbRnrxYeLolce9/ntp +asClqpnHT8/VJYTD7Kqj0fouTTZf0zkig/y+2XERppd8k+pSKjUCPQ== +-----END RSA PRIVATE KEY----- diff --git a/apps/emqx_authn/test/data/certs/key.pem b/apps/emqx_authn/test/data/certs/key.pem new file mode 100644 index 000000000..6c338216e --- /dev/null +++ b/apps/emqx_authn/test/data/certs/key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAs15ZPekT5AV+JEnMqacAxStajcraP9Obf5eeXpYvPrv/Stxi +sltsq9Le7JE/+y1fJcQrD0J6nJDVWIFUWRYCjLypAQ5YUOxlz/lTOFdSdvotevep +OQUSM2aGP7u7O/+VsHGVQbU2VjNJ44Hr9FPzMf+WE54qEudZg9d1cqxrUUvqKPhf +wN4M7WRjt+8AaYGf9MeHW5OkOLMzhiZ4j7vh2ZrIDnFe8BG17mHhW7lIjN7uILTn +s36/KZOgyTYmCT5lkWnJeuer7KjpFoPcA1yWtzgVt5RBnlrmzlLGCz1rRjdYwb7t +zlWdVdxuMFHP9qrY206dBCOKQopADwYXTzQFCQIDAQABAoIBAQCuvCbr7Pd3lvI/ +n7VFQG+7pHRe1VKwAxDkx2t8cYos7y/QWcm8Ptwqtw58HzPZGWYrgGMCRpzzkRSF +V9g3wP1S5Scu5C6dBu5YIGc157tqNGXB+SpdZddJQ4Nc6yGHXYERllT04ffBGc3N +WG/oYS/1cSteiSIrsDy/91FvGRCi7FPxH3wIgHssY/tw69s1Cfvaq5lr2NTFzxIG +xCvpJKEdSfVfS9I7LYiymVjst3IOR/w76/ZFY9cRa8ZtmQSWWsm0TUpRC1jdcbkm +ZoJptYWlP+gSwx/fpMYftrkJFGOJhHJHQhwxT5X/ajAISeqjjwkWSEJLwnHQd11C +Zy2+29lBAoGBANlEAIK4VxCqyPXNKfoOOi5dS64NfvyH4A1v2+KaHWc7lqaqPN49 +ezfN2n3X+KWx4cviDD914Yc2JQ1vVJjSaHci7yivocDo2OfZDmjBqzaMp/y+rX1R +/f3MmiTqMa468rjaxI9RRZu7vDgpTR+za1+OBCgMzjvAng8dJuN/5gjlAoGBANNY +uYPKtearBmkqdrSV7eTUe49Nhr0XotLaVBH37TCW0Xv9wjO2xmbm5Ga/DCtPIsBb +yPeYwX9FjoasuadUD7hRvbFu6dBa0HGLmkXRJZTcD7MEX2Lhu4BuC72yDLLFd0r+ +Ep9WP7F5iJyagYqIZtz+4uf7gBvUDdmvXz3sGr1VAoGAdXTD6eeKeiI6PlhKBztF +zOb3EQOO0SsLv3fnodu7ZaHbUgLaoTMPuB17r2jgrYM7FKQCBxTNdfGZmmfDjlLB +0xZ5wL8ibU30ZXL8zTlWPElST9sto4B+FYVVF/vcG9sWeUUb2ncPcJ/Po3UAktDG +jYQTTyuNGtSJHpad/YOZctkCgYBtWRaC7bq3of0rJGFOhdQT9SwItN/lrfj8hyHA +OjpqTV4NfPmhsAtu6j96OZaeQc+FHvgXwt06cE6Rt4RG4uNPRluTFgO7XYFDfitP +vCppnoIw6S5BBvHwPP+uIhUX2bsi/dm8vu8tb+gSvo4PkwtFhEr6I9HglBKmcmog +q6waEQKBgHyecFBeM6Ls11Cd64vborwJPAuxIW7HBAFj/BS99oeG4TjBx4Sz2dFd +rzUibJt4ndnHIvCN8JQkjNG14i9hJln+H3mRss8fbZ9vQdqG+2vOWADYSzzsNI55 +RFY7JjluKcVkp/zCDeUxTU3O6sS+v6/3VE11Cob6OYQx3lN5wrZ3 +-----END RSA PRIVATE KEY----- diff --git a/apps/emqx_authn/test/data/emqx.io.ldif b/apps/emqx_authn/test/data/emqx.io.ldif new file mode 100644 index 000000000..4675717ec --- /dev/null +++ b/apps/emqx_authn/test/data/emqx.io.ldif @@ -0,0 +1,134 @@ +## create emqx.io + +dn:dc=emqx,dc=io +objectclass: top +objectclass: dcobject +objectclass: organization +dc:emqx +o:emqx,Inc. + +# create testdevice.emqx.io +dn:ou=testdevice,dc=emqx,dc=io +objectClass: top +objectclass:organizationalUnit +ou:testdevice + +# create user admin +dn:uid=admin,ou=testdevice,dc=emqx,dc=io +objectClass: top +objectClass: simpleSecurityObject +objectClass: account +userPassword:: e1NIQX1XNnBoNU1tNVB6OEdnaVVMYlBnekczN21qOWc9 +uid: admin + +## create user=mqttuser0001, +# password=mqttuser0001, +# passhash={SHA}mlb3fat40MKBTXUVZwCKmL73R/0= +# base64passhash=e1NIQX1tbGIzZmF0NDBNS0JUWFVWWndDS21MNzNSLzA9 +dn:uid=mqttuser0001,ou=testdevice,dc=emqx,dc=io +objectClass: top +objectClass: mqttUser +objectClass: mqttDevice +objectClass: mqttSecurity +uid: mqttuser0001 +isEnabled: TRUE +mqttAccountName: user1 +mqttPublishTopic: mqttuser0001/pub/1 +mqttPublishTopic: mqttuser0001/pub/+ +mqttPublishTopic: mqttuser0001/pub/# +mqttSubscriptionTopic: mqttuser0001/sub/1 +mqttSubscriptionTopic: mqttuser0001/sub/+ +mqttSubscriptionTopic: mqttuser0001/sub/# +mqttPubSubTopic: mqttuser0001/pubsub/1 +mqttPubSubTopic: mqttuser0001/pubsub/+ +mqttPubSubTopic: mqttuser0001/pubsub/# +userPassword:: e1NIQX1tbGIzZmF0NDBNS0JUWFVWWndDS21MNzNSLzA9 + +## create user=mqttuser0002 +# password=mqttuser0002, +# passhash={SSHA}n9XdtoG4Q/TQ3TQF4Y+khJbMBH4qXj4M +# base64passhash=e1NTSEF9bjlYZHRvRzRRL1RRM1RRRjRZK2toSmJNQkg0cVhqNE0= +dn:uid=mqttuser0002,ou=testdevice,dc=emqx,dc=io +objectClass: top +objectClass: mqttUser +objectClass: mqttDevice +objectClass: mqttSecurity +uid: mqttuser0002 +isEnabled: TRUE +mqttAccountName: user2 +mqttPublishTopic: mqttuser0002/pub/1 +mqttPublishTopic: mqttuser0002/pub/+ +mqttPublishTopic: mqttuser0002/pub/# +mqttSubscriptionTopic: mqttuser0002/sub/1 +mqttSubscriptionTopic: mqttuser0002/sub/+ +mqttSubscriptionTopic: mqttuser0002/sub/# +mqttPubSubTopic: mqttuser0002/pubsub/1 +mqttPubSubTopic: mqttuser0002/pubsub/+ +mqttPubSubTopic: mqttuser0002/pubsub/# +userPassword:: e1NTSEF9bjlYZHRvRzRRL1RRM1RRRjRZK2toSmJNQkg0cVhqNE0= + +## create user mqttuser0003 +# password=mqttuser0003, +# passhash={MD5}ybsPGoaK3nDyiQvveiCOIw== +# base64passhash=e01ENX15YnNQR29hSzNuRHlpUXZ2ZWlDT0l3PT0= +dn:uid=mqttuser0003,ou=testdevice,dc=emqx,dc=io +objectClass: top +objectClass: mqttUser +objectClass: mqttDevice +objectClass: mqttSecurity +uid: mqttuser0003 +isEnabled: TRUE +mqttPublishTopic: mqttuser0003/pub/1 +mqttPublishTopic: mqttuser0003/pub/+ +mqttPublishTopic: mqttuser0003/pub/# +mqttSubscriptionTopic: mqttuser0003/sub/1 +mqttSubscriptionTopic: mqttuser0003/sub/+ +mqttSubscriptionTopic: mqttuser0003/sub/# +mqttPubSubTopic: mqttuser0003/pubsub/1 +mqttPubSubTopic: mqttuser0003/pubsub/+ +mqttPubSubTopic: mqttuser0003/pubsub/# +userPassword:: e01ENX15YnNQR29hSzNuRHlpUXZ2ZWlDT0l3PT0= + +## create user mqttuser0004 +# password=mqttuser0004, +# passhash={MD5}2Br6pPDSEDIEvUlu9+s+MA== +# base64passhash=e01ENX0yQnI2cFBEU0VESUV2VWx1OStzK01BPT0= +dn:uid=mqttuser0004,ou=testdevice,dc=emqx,dc=io +objectClass: top +objectClass: mqttUser +objectClass: mqttDevice +objectClass: mqttSecurity +uid: mqttuser0004 +isEnabled: TRUE +mqttPublishTopic: mqttuser0004/pub/1 +mqttPublishTopic: mqttuser0004/pub/+ +mqttPublishTopic: mqttuser0004/pub/# +mqttSubscriptionTopic: mqttuser0004/sub/1 +mqttSubscriptionTopic: mqttuser0004/sub/+ +mqttSubscriptionTopic: mqttuser0004/sub/# +mqttPubSubTopic: mqttuser0004/pubsub/1 +mqttPubSubTopic: mqttuser0004/pubsub/+ +mqttPubSubTopic: mqttuser0004/pubsub/# +userPassword: {MD5}2Br6pPDSEDIEvUlu9+s+MA== + +## create user mqttuser0005 +# password=mqttuser0005, +# passhash={SHA}jKnxeEDGR14kE8AR7yuVFOelhz4= +# base64passhash=e1NIQX1qS254ZUVER1IxNGtFOEFSN3l1VkZPZWxoejQ9 +objectClass: top +dn:uid=mqttuser0005,ou=testdevice,dc=emqx,dc=io +objectClass: mqttUser +objectClass: mqttDevice +objectClass: mqttSecurity +uid: mqttuser0005 +isEnabled: TRUE +mqttPublishTopic: mqttuser0005/pub/1 +mqttPublishTopic: mqttuser0005/pub/+ +mqttPublishTopic: mqttuser0005/pub/# +mqttSubscriptionTopic: mqttuser0005/sub/1 +mqttSubscriptionTopic: mqttuser0005/sub/+ +mqttSubscriptionTopic: mqttuser0005/sub/# +mqttPubSubTopic: mqttuser0005/pubsub/1 +mqttPubSubTopic: mqttuser0005/pubsub/+ +mqttPubSubTopic: mqttuser0005/pubsub/# +userPassword: {SHA}jKnxeEDGR14kE8AR7yuVFOelhz4= diff --git a/apps/emqx_authn/test/data/emqx.schema b/apps/emqx_authn/test/data/emqx.schema new file mode 100644 index 000000000..55f92269b --- /dev/null +++ b/apps/emqx_authn/test/data/emqx.schema @@ -0,0 +1,46 @@ +# +# Preliminary Apple OS X Native LDAP Schema +# This file is subject to change. +# +attributetype ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.1.3 NAME 'isEnabled' + EQUALITY booleanMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 + SINGLE-VALUE + USAGE userApplications ) + +attributetype ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.4.1 NAME ( 'mqttPublishTopic' 'mpt' ) + EQUALITY caseIgnoreMatch + SUBSTR caseIgnoreSubstringsMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 + USAGE userApplications ) +attributetype ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.4.2 NAME ( 'mqttSubscriptionTopic' 'mst' ) + EQUALITY caseIgnoreMatch + SUBSTR caseIgnoreSubstringsMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 + USAGE userApplications ) +attributetype ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.4.3 NAME ( 'mqttPubSubTopic' 'mpst' ) + EQUALITY caseIgnoreMatch + SUBSTR caseIgnoreSubstringsMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 + USAGE userApplications ) +attributetype ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.4.4 NAME ( 'mqttAccountName' 'man' ) + EQUALITY caseIgnoreMatch + SUBSTR caseIgnoreSubstringsMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 + USAGE userApplications ) + + +objectclass ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.4 NAME 'mqttUser' + AUXILIARY + MAY ( mqttPublishTopic $ mqttSubscriptionTopic $ mqttPubSubTopic $ mqttAccountName) ) + +objectclass ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.2 NAME 'mqttDevice' + SUP top + STRUCTURAL + MUST ( uid ) + MAY ( isEnabled ) ) + +objectclass ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.3 NAME 'mqttSecurity' + SUP top + AUXILIARY + MAY ( userPassword $ userPKCS12 $ pwdAttribute $ pwdLockout ) ) diff --git a/apps/emqx_authn/test/data/user-credentials-malformed-0.json b/apps/emqx_authn/test/data/user-credentials-malformed-0.json new file mode 100644 index 000000000..e7d3b5741 --- /dev/null +++ b/apps/emqx_authn/test/data/user-credentials-malformed-0.json @@ -0,0 +1,14 @@ +[ + { + "userid":"myuser1", + "password_hash":"c5e46903df45e5dc096dc74657610dbee8deaacae656df88a1788f1847390242", + "salt": "e378187547bf2d6f0545a3f441aa4d8a", + "is_superuser": true + }, + { + "user_id":"myuser2", + "password_hash":"f4d17f300b11e522fd33f497c11b126ef1ea5149c74d2220f9a16dc876d4567b", + "salt": "6d3f9bd5b54d94b98adbcfe10b6d181f", + "is_superuser": false + } +] diff --git a/apps/emqx_authn/test/data/user-credentials-malformed-1.json b/apps/emqx_authn/test/data/user-credentials-malformed-1.json new file mode 100644 index 000000000..aa8f55d4a --- /dev/null +++ b/apps/emqx_authn/test/data/user-credentials-malformed-1.json @@ -0,0 +1,15 @@ +[ + { + "user_id":"myuser1", + "password_hash":"c5e46903df45e5dc096dc74657610dbee8deaacae656df88a1788f1847390242", + "salt": "e378187547bf2d6f0545a3f441aa4d8a", + "is_superuser": true + + , + { + "user_id":"myuser2", + "password_hash":"f4d17f300b11e522fd33f497c11b126ef1ea5149c74d2220f9a16dc876d4567b", + "salt": "6d3f9bd5b54d94b98adbcfe10b6d181f", + "is_superuser": false + } +] diff --git a/apps/emqx_authn/test/data/user-credentials-malformed.csv b/apps/emqx_authn/test/data/user-credentials-malformed.csv new file mode 100644 index 000000000..856b53eaf --- /dev/null +++ b/apps/emqx_authn/test/data/user-credentials-malformed.csv @@ -0,0 +1,3 @@ +user_id,password_hash,salt,is_superuser +myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235,true +myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a9 diff --git a/apps/emqx_authn/test/emqx_authn_SUITE.erl b/apps/emqx_authn/test/emqx_authn_SUITE.erl index 1ee419bb0..d3704679f 100644 --- a/apps/emqx_authn/test/emqx_authn_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_SUITE.erl @@ -19,4 +19,4 @@ -compile(export_all). -compile(nowarn_export_all). -all() -> emqx_common_test_helpers:all(?MODULE). \ No newline at end of file +all() -> emqx_common_test_helpers:all(?MODULE). diff --git a/apps/emqx_authn/test/emqx_authn_api_SUITE.erl b/apps/emqx_authn/test/emqx_authn_api_SUITE.erl index 32ba06c33..5b8fc24d4 100644 --- a/apps/emqx_authn/test/emqx_authn_api_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_api_SUITE.erl @@ -18,7 +18,7 @@ -compile(nowarn_export_all). -compile(export_all). --include("emqx_authz.hrl"). +-include("emqx_authn.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). @@ -27,14 +27,41 @@ -define(API_VERSION, "v5"). -define(BASE_PATH, "api"). +-define(TCP_DEFAULT, 'tcp:default'). + +-define( + assertAuthenticatorsMatch(Guard, Path), + (fun() -> + {ok, 200, Response} = request(get, uri(Path)), + ?assertMatch(Guard, jiffy:decode(Response, [return_maps])) + end)()). + all() -> emqx_common_test_helpers:all(?MODULE). groups() -> []. +init_per_testcase(_, Config) -> + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + + emqx_authn_test_lib:delete_authenticators( + [listeners, tcp, default, authentication], + ?TCP_DEFAULT), + + {atomic, ok} = mria:clear_table(emqx_authn_mnesia), + Config. + init_per_suite(Config) -> - ok = emqx_common_test_helpers:start_apps([emqx_authn, emqx_dashboard], fun set_special_configs/1), + ok = emqx_common_test_helpers:start_apps( + [emqx_authn, emqx_dashboard], + fun set_special_configs/1), + + ?AUTHN:delete_chain(?GLOBAL), + {ok, Chains} = ?AUTHN:list_chains(), + ?assertEqual(length(Chains), 0), Config. end_per_suite(_Config) -> @@ -55,10 +82,323 @@ set_special_configs(emqx_dashboard) -> set_special_configs(_App) -> ok. -t_create_http_authn(_) -> - {ok, 200, _} = request(post, uri(["authentication"]), - emqx_authn_test_lib:http_example()), - {ok, 200, _} = request(get, uri(["authentication"])). +%%------------------------------------------------------------------------------ +%% Tests +%%------------------------------------------------------------------------------ + +t_invalid_listener(_) -> + {ok, 404, _} = request(get, uri(["listeners", "invalid", "authentication"])), + {ok, 404, _} = request(get, uri(["listeners", "in:valid", "authentication"])). + +t_authenticators(_) -> + test_authenticators([]). + +t_authenticator(_) -> + test_authenticator([]). + +t_authenticator_users(_) -> + test_authenticator_users([]). + +t_authenticator_user(_) -> + test_authenticator_user([]). + +t_authenticator_move(_) -> + test_authenticator_move([]). + +t_authenticator_import_users(_) -> + test_authenticator_import_users([]). + +t_listener_authenticators(_) -> + test_authenticators(["listeners", ?TCP_DEFAULT]). + +t_listener_authenticator(_) -> + test_authenticator(["listeners", ?TCP_DEFAULT]). + +t_listener_authenticator_users(_) -> + test_authenticator_users(["listeners", ?TCP_DEFAULT]). + +t_listener_authenticator_user(_) -> + test_authenticator_user(["listeners", ?TCP_DEFAULT]). + +t_listener_authenticator_move(_) -> + test_authenticator_move(["listeners", ?TCP_DEFAULT]). + +t_listener_authenticator_import_users(_) -> + test_authenticator_import_users(["listeners", ?TCP_DEFAULT]). + +test_authenticators(PathPrefix) -> + + ValidConfig = emqx_authn_test_lib:http_example(), + {ok, 200, _} = request( + post, + uri(PathPrefix ++ ["authentication"]), + ValidConfig), + + InvalidConfig = ValidConfig#{method => <<"delete">>}, + {ok, 400, _} = request( + post, + uri(PathPrefix ++ ["authentication"]), + InvalidConfig), + + ?assertAuthenticatorsMatch( + [#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"http">>}], + PathPrefix ++ ["authentication"]). + +test_authenticator(PathPrefix) -> + ValidConfig0 = emqx_authn_test_lib:http_example(), + {ok, 200, _} = request( + post, + uri(PathPrefix ++ ["authentication"]), + ValidConfig0), + {ok, 200, _} = request( + get, + uri(PathPrefix ++ ["authentication", "password-based:http"])), + + {ok, 404, _} = request( + get, + uri(PathPrefix ++ ["authentication", "password-based:redis"])), + + + {ok, 404, _} = request( + put, + uri(PathPrefix ++ ["authentication", "password-based:built-in-database"]), + emqx_authn_test_lib:built_in_database_example()), + + InvalidConfig0 = ValidConfig0#{method => <<"delete">>}, + {ok, 400, _} = request( + put, + uri(PathPrefix ++ ["authentication", "password-based:http"]), + InvalidConfig0), + + ValidConfig1 = ValidConfig0#{pool_size => 9}, + {ok, 200, _} = request( + put, + uri(PathPrefix ++ ["authentication", "password-based:http"]), + ValidConfig1), + + {ok, 404, _} = request( + delete, + uri(PathPrefix ++ ["authentication", "password-based:redis"])), + + {ok, 204, _} = request( + delete, + uri(PathPrefix ++ ["authentication", "password-based:http"])), + + ?assertAuthenticatorsMatch([], PathPrefix ++ ["authentication"]). + +test_authenticator_users(PathPrefix) -> + UsersUri = uri(PathPrefix ++ ["authentication", "password-based:built-in-database", "users"]), + + {ok, 200, _} = request( + post, + uri(PathPrefix ++ ["authentication"]), + emqx_authn_test_lib:built_in_database_example()), + + InvalidUsers = [ + #{clientid => <<"u1">>, password => <<"p1">>}, + #{user_id => <<"u2">>}, + #{user_id => <<"u3">>, password => <<"p3">>, foobar => <<"foobar">>}], + + lists:foreach( + fun(User) -> {ok, 400, _} = request(post, UsersUri, User) end, + InvalidUsers), + + + ValidUsers = [ + #{user_id => <<"u1">>, password => <<"p1">>}, + #{user_id => <<"u2">>, password => <<"p2">>, is_superuser => true}, + #{user_id => <<"u3">>, password => <<"p3">>}], + + lists:foreach( + fun(User) -> + {ok, 201, UserData} = request(post, UsersUri, User), + CreatedUser = jiffy:decode(UserData, [return_maps]), + ?assertMatch(#{<<"user_id">> := _}, CreatedUser) + end, + ValidUsers), + + {ok, 200, Page1Data} = request(get, UsersUri ++ "?page=1&limit=2"), + + #{<<"data">> := Page1Users, + <<"meta">> := + #{<<"page">> := 1, + <<"limit">> := 2, + <<"count">> := 3}} = + jiffy:decode(Page1Data, [return_maps]), + + {ok, 200, Page2Data} = request(get, UsersUri ++ "?page=2&limit=2"), + + #{<<"data">> := Page2Users, + <<"meta">> := + #{<<"page">> := 2, + <<"limit">> := 2, + <<"count">> := 3}} = jiffy:decode(Page2Data, [return_maps]), + + ?assertEqual(2, length(Page1Users)), + ?assertEqual(1, length(Page2Users)), + + ?assertEqual( + [<<"u1">>, <<"u2">>, <<"u3">>], + lists:usort([ UserId || #{<<"user_id">> := UserId} <- Page1Users ++ Page2Users])). + +test_authenticator_user(PathPrefix) -> + UsersUri = uri(PathPrefix ++ ["authentication", "password-based:built-in-database", "users"]), + + {ok, 200, _} = request( + post, + uri(PathPrefix ++ ["authentication"]), + emqx_authn_test_lib:built_in_database_example()), + + User = #{user_id => <<"u1">>, password => <<"p1">>}, + {ok, 201, _} = request(post, UsersUri, User), + + {ok, 404, _} = request(get, UsersUri ++ "/u123"), + + {ok, 409, _} = request(post, UsersUri, User), + + {ok, 200, UserData} = request(get, UsersUri ++ "/u1"), + + FetchedUser = jiffy:decode(UserData, [return_maps]), + ?assertMatch(#{<<"user_id">> := <<"u1">>}, FetchedUser), + ?assertNotMatch(#{<<"password">> := _}, FetchedUser), + + ValidUserUpdates = [ + #{password => <<"p1">>}, + #{password => <<"p1">>, is_superuser => true}], + + lists:foreach( + fun(UserUpdate) -> {ok, 200, _} = request(put, UsersUri ++ "/u1", UserUpdate) end, + ValidUserUpdates), + + InvalidUserUpdates = [#{user_id => <<"u1">>, password => <<"p1">>}], + + lists:foreach( + fun(UserUpdate) -> {ok, 400, _} = request(put, UsersUri ++ "/u1", UserUpdate) end, + InvalidUserUpdates), + + {ok, 404, _} = request(delete, UsersUri ++ "/u123"), + {ok, 204, _} = request(delete, UsersUri ++ "/u1"). + +test_authenticator_move(PathPrefix) -> + AuthenticatorConfs = [ + emqx_authn_test_lib:http_example(), + emqx_authn_test_lib:jwt_example(), + emqx_authn_test_lib:built_in_database_example() + ], + + lists:foreach( + fun(Conf) -> + {ok, 200, _} = request( + post, + uri(PathPrefix ++ ["authentication"]), + Conf) + end, + AuthenticatorConfs), + + ?assertAuthenticatorsMatch( + [ + #{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"http">>}, + #{<<"mechanism">> := <<"jwt">>}, + #{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>} + ], + PathPrefix ++ ["authentication"]), + + % Invalid moves + + {ok, 400, _} = request( + post, + uri(PathPrefix ++ ["authentication", "jwt", "move"]), + #{position => <<"up">>}), + + {ok, 400, _} = request( + post, + uri(PathPrefix ++ ["authentication", "jwt", "move"]), + #{}), + + {ok, 404, _} = request( + post, + uri(PathPrefix ++ ["authentication", "jwt", "move"]), + #{position => <<"before:invalid">>}), + + {ok, 404, _} = request( + post, + uri(PathPrefix ++ ["authentication", "jwt", "move"]), + #{position => <<"before:password-based:redis">>}), + + {ok, 404, _} = request( + post, + uri(PathPrefix ++ ["authentication", "jwt", "move"]), + #{position => <<"before:password-based:redis">>}), + + % Valid moves + + {ok, 204, _} = request( + post, + uri(PathPrefix ++ ["authentication", "jwt", "move"]), + #{position => <<"top">>}), + + ?assertAuthenticatorsMatch( + [ + #{<<"mechanism">> := <<"jwt">>}, + #{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"http">>}, + #{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>} + ], + PathPrefix ++ ["authentication"]), + + {ok, 204, _} = request( + post, + uri(PathPrefix ++ ["authentication", "jwt", "move"]), + #{position => <<"bottom">>}), + + ?assertAuthenticatorsMatch( + [ + #{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"http">>}, + #{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>}, + #{<<"mechanism">> := <<"jwt">>} + ], + PathPrefix ++ ["authentication"]), + + {ok, 204, _} = request( + post, + uri(PathPrefix ++ ["authentication", "jwt", "move"]), + #{position => <<"before:password-based:built-in-database">>}), + + ?assertAuthenticatorsMatch( + [ + #{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"http">>}, + #{<<"mechanism">> := <<"jwt">>}, + #{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>} + ], + PathPrefix ++ ["authentication"]). + +test_authenticator_import_users(PathPrefix) -> + ImportUri = uri( + PathPrefix ++ + ["authentication", "password-based:built-in-database", "import_users"]), + + + {ok, 200, _} = request( + post, + uri(PathPrefix ++ ["authentication"]), + emqx_authn_test_lib:built_in_database_example()), + + {ok, 400, _} = request(post, ImportUri, #{}), + + {ok, 400, _} = request(post, ImportUri, #{filename => <<"/etc/passwd">>}), + + {ok, 400, _} = request(post, ImportUri, #{filename => <<"/not_exists.csv">>}), + + Dir = code:lib_dir(emqx_authn, test), + JSONFileName = filename:join([Dir, <<"data/user-credentials.json">>]), + CSVFileName = filename:join([Dir, <<"data/user-credentials.csv">>]), + + {ok, 204, _} = request(post, ImportUri, #{filename => JSONFileName}), + + {ok, 204, _} = request(post, ImportUri, #{filename => CSVFileName}). + +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ request(Method, Url) -> request(Method, Url, []). @@ -83,10 +423,7 @@ request(Method, Url, Body) -> uri() -> uri([]). uri(Parts) when is_list(Parts) -> - NParts = [E || E <- Parts], - ?HOST ++ filename:join([?BASE_PATH, ?API_VERSION | NParts]). - -get_sources(Result) -> jsx:decode(Result). + ?HOST ++ filename:join([?BASE_PATH, ?API_VERSION | Parts]). auth_header() -> Username = <<"admin">>, @@ -94,6 +431,5 @@ auth_header() -> {ok, Token} = emqx_dashboard_admin:sign_token(Username, Password), {"Authorization", "Bearer " ++ binary_to_list(Token)}. -to_json(Hocon) -> - {ok, Map} =hocon:binary(Hocon), +to_json(Map) -> jiffy:encode(Map). diff --git a/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl b/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl index a21d0e9ad..b5bca513c 100644 --- a/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl @@ -19,146 +19,223 @@ -compile(export_all). -compile(nowarn_export_all). -% -include_lib("common_test/include/ct.hrl"). -% -include_lib("eunit/include/eunit.hrl"). +-include_lib("eunit/include/eunit.hrl"). -% -include("emqx_authn.hrl"). +-include("emqx_authn.hrl"). -% -define(AUTH, emqx_authn). +-define(AUTHN_ID, <<"mechanism:backend">>). all() -> emqx_common_test_helpers:all(?MODULE). -% init_per_suite(Config) -> -% emqx_common_test_helpers:start_apps([emqx_authn]), -% Config. +init_per_suite(Config) -> + emqx_common_test_helpers:start_apps([emqx_authn]), + Config. -% end_per_suite(_) -> -% emqx_common_test_helpers:stop_apps([emqx_authn]), -% ok. +end_per_suite(_) -> + emqx_common_test_helpers:stop_apps([emqx_authn]), + ok. -% t_mnesia_authenticator(_) -> -% AuthenticatorName = <<"myauthenticator">>, -% AuthenticatorConfig = #{name => AuthenticatorName, -% mechanism => 'password-based', -% server_type => 'built-in-database', -% user_id_type => username, -% password_hash_algorithm => #{ -% name => sha256 -% }}, -% {ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig), +init_per_testcase(_Case, Config) -> + mnesia:clear_table(emqx_authn_mnesia), + Config. -% UserInfo = #{user_id => <<"myuser">>, -% password => <<"mypass">>}, -% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:add_user(?CHAIN, ID, UserInfo)), -% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser">>)), +end_per_testcase(_Case, Config) -> + Config. -% ClientInfo = #{zone => external, -% username => <<"myuser">>, -% password => <<"mypass">>}, -% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)), -% ?AUTH:enable(), -% ?assertEqual({ok, #{is_superuser => false}}, emqx_access_control:authenticate(ClientInfo)), +%%------------------------------------------------------------------------------ +%% Tests +%%------------------------------------------------------------------------------ -% ClientInfo2 = ClientInfo#{username => <<"baduser">>}, -% ?assertEqual({stop, {error, not_authorized}}, ?AUTH:authenticate(ClientInfo2, ignored)), -% ?assertEqual({error, not_authorized}, emqx_access_control:authenticate(ClientInfo2)), +t_check_schema(_Config) -> + ConfigOk = #{ + <<"mechanism">> => <<"password-based">>, + <<"backend">> => <<"built-in-database">>, + <<"user_id_type">> => <<"username">>, + <<"password_hash_algorithm">> => #{ + <<"name">> => <<"bcrypt">>, + <<"salt_rounds">> => <<"6">> + } + }, -% ClientInfo3 = ClientInfo#{password => <<"badpass">>}, -% ?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo3, ignored)), -% ?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo3)), + hocon_schema:check_plain(emqx_authn_mnesia, #{<<"config">> => ConfigOk}), -% UserInfo2 = UserInfo#{password => <<"mypass2">>}, -% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:update_user(?CHAIN, ID, <<"myuser">>, UserInfo2)), -% ClientInfo4 = ClientInfo#{password => <<"mypass2">>}, -% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo4, ignored)), + ConfigNotOk = #{ + <<"mechanism">> => <<"password-based">>, + <<"backend">> => <<"built-in-database">>, + <<"user_id_type">> => <<"username">>, + <<"password_hash_algorithm">> => #{ + <<"name">> => <<"md6">> + } + }, -% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:update_user(?CHAIN, ID, <<"myuser">>, #{is_superuser => true})), -% ?assertEqual({stop, {ok, #{is_superuser => true}}}, ?AUTH:authenticate(ClientInfo4, ignored)), + ?assertException( + throw, + {emqx_authn_mnesia, _}, + hocon_schema:check_plain(emqx_authn_mnesia, #{<<"config">> => ConfigNotOk})). -% ?assertEqual(ok, ?AUTH:delete_user(?CHAIN, ID, <<"myuser">>)), -% ?assertEqual({error, not_found}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser">>)), +t_create(_) -> + Config0 = config(), -% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:add_user(?CHAIN, ID, UserInfo)), -% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser">>)), -% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)), + {ok, _} = emqx_authn_mnesia:create(?AUTHN_ID, Config0), -% {ok, #{name := AuthenticatorName, id := ID1}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig), -% ?assertMatch({error, not_found}, ?AUTH:lookup_user(?CHAIN, ID1, <<"myuser">>)), -% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID1)), -% ok. + Config1 = Config0#{password_hash_algorithm => #{name => sha256}}, + {ok, _} = emqx_authn_mnesia:create(?AUTHN_ID, Config1). -% t_import(_) -> -% AuthenticatorName = <<"myauthenticator">>, -% AuthenticatorConfig = #{name => AuthenticatorName, -% mechanism => 'password-based', -% server_type => 'built-in-database', -% user_id_type => username, -% password_hash_algorithm => #{ -% name => sha256 -% }}, -% {ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig), +t_update(_) -> + Config0 = config(), + {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config0), -% Dir = code:lib_dir(emqx_authn, test), -% ?assertEqual(ok, ?AUTH:import_users(?CHAIN, ID, filename:join([Dir, "data/user-credentials.json"]))), -% ?assertEqual(ok, ?AUTH:import_users(?CHAIN, ID, filename:join([Dir, "data/user-credentials.csv"]))), -% ?assertMatch({ok, #{user_id := <<"myuser1">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser1">>)), -% ?assertMatch({ok, #{user_id := <<"myuser3">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser3">>)), + Config1 = Config0#{password_hash_algorithm => #{name => sha256}}, + {ok, _} = emqx_authn_mnesia:update(Config1, State). -% ClientInfo1 = #{username => <<"myuser1">>, -% password => <<"mypassword1">>}, -% ?assertEqual({stop, {ok, #{is_superuser => true}}}, ?AUTH:authenticate(ClientInfo1, ignored)), +t_destroy(_) -> + Config = config(), + OtherId = list_to_binary([?AUTHN_ID, <<"-other">>]), + {ok, State0} = emqx_authn_mnesia:create(?AUTHN_ID, Config), + {ok, StateOther} = emqx_authn_mnesia:create(OtherId, Config), -% ClientInfo2 = ClientInfo1#{username => <<"myuser2">>, -% password => <<"mypassword2">>}, -% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo2, ignored)), + User = #{user_id => <<"u">>, password => <<"p">>}, -% ClientInfo3 = ClientInfo1#{username => <<"myuser3">>, -% password => <<"mypassword3">>}, -% ?assertEqual({stop, {ok, #{is_superuser => true}}}, ?AUTH:authenticate(ClientInfo3, ignored)), + {ok, _} = emqx_authn_mnesia:add_user(User, State0), + {ok, _} = emqx_authn_mnesia:add_user(User, StateOther), -% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)), -% ok. + {ok, _} = emqx_authn_mnesia:lookup_user(<<"u">>, State0), + {ok, _} = emqx_authn_mnesia:lookup_user(<<"u">>, StateOther), -% t_multi_mnesia_authenticator(_) -> -% AuthenticatorName1 = <<"myauthenticator1">>, -% AuthenticatorConfig1 = #{name => AuthenticatorName1, -% mechanism => 'password-based', -% server_type => 'built-in-database', -% user_id_type => username, -% password_hash_algorithm => #{ -% name => sha256 -% }}, -% AuthenticatorName2 = <<"myauthenticator2">>, -% AuthenticatorConfig2 = #{name => AuthenticatorName2, -% mechanism => 'password-based', -% server_type => 'built-in-database', -% user_id_type => clientid, -% password_hash_algorithm => #{ -% name => sha256 -% }}, -% {ok, #{name := AuthenticatorName1, id := ID1}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig1), -% {ok, #{name := AuthenticatorName2, id := ID2}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig2), + ok = emqx_authn_mnesia:destroy(State0), -% ?assertMatch({ok, #{user_id := <<"myuser">>}}, -% ?AUTH:add_user(?CHAIN, ID1, -% #{user_id => <<"myuser">>, -% password => <<"mypass1">>})), -% ?assertMatch({ok, #{user_id := <<"myclient">>}}, -% ?AUTH:add_user(?CHAIN, ID2, -% #{user_id => <<"myclient">>, -% password => <<"mypass2">>})), + {ok, State1} = emqx_authn_mnesia:create(?AUTHN_ID, Config), + {error,not_found} = emqx_authn_mnesia:lookup_user(<<"u">>, State1), + {ok, _} = emqx_authn_mnesia:lookup_user(<<"u">>, StateOther). -% ClientInfo1 = #{username => <<"myuser">>, -% clientid => <<"myclient">>, -% password => <<"mypass1">>}, -% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo1, ignored)), -% ?assertEqual(ok, ?AUTH:move_authenticator(?CHAIN, ID2, top)), +t_authenticate(_) -> + Config = config(), + {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config), -% ?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo1, ignored)), -% ClientInfo2 = ClientInfo1#{password => <<"mypass2">>}, -% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo2, ignored)), + User = #{user_id => <<"u">>, password => <<"p">>}, + {ok, _} = emqx_authn_mnesia:add_user(User, State), -% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID1)), -% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID2)), -% ok. + {ok, _} = emqx_authn_mnesia:authenticate( + #{username => <<"u">>, password => <<"p">>}, + State), + {error, bad_username_or_password} = emqx_authn_mnesia:authenticate( + #{username => <<"u">>, password => <<"badpass">>}, + State), + ignore = emqx_authn_mnesia:authenticate( + #{clientid => <<"u">>, password => <<"p">>}, + State). + +t_add_user(_) -> + Config = config(), + {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config), + + User = #{user_id => <<"u">>, password => <<"p">>}, + {ok, _} = emqx_authn_mnesia:add_user(User, State), + {error, already_exist} = emqx_authn_mnesia:add_user(User, State). + +t_delete_user(_) -> + Config = config(), + {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config), + + {error, not_found} = emqx_authn_mnesia:delete_user(<<"u">>, State), + User = #{user_id => <<"u">>, password => <<"p">>}, + {ok, _} = emqx_authn_mnesia:add_user(User, State), + + ok = emqx_authn_mnesia:delete_user(<<"u">>, State), + {error, not_found} = emqx_authn_mnesia:delete_user(<<"u">>, State). + +t_update_user(_) -> + Config = config(), + {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config), + + User = #{user_id => <<"u">>, password => <<"p">>}, + {ok, _} = emqx_authn_mnesia:add_user(User, State), + + {error, not_found} = emqx_authn_mnesia:update_user(<<"u1">>, #{password => <<"p1">>}, State), + {ok, + #{user_id := <<"u">>, + is_superuser := true}} = emqx_authn_mnesia:update_user( + <<"u">>, + #{password => <<"p1">>, is_superuser => true}, + State), + + {ok, _} = emqx_authn_mnesia:authenticate( + #{username => <<"u">>, password => <<"p1">>}, + State), + + {ok, #{is_superuser := true}} = emqx_authn_mnesia:lookup_user(<<"u">>, State). + +t_list_users(_) -> + Config = config(), + {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config), + + Users = [#{user_id => <<"u1">>, password => <<"p">>}, + #{user_id => <<"u2">>, password => <<"p">>}, + #{user_id => <<"u3">>, password => <<"p">>}], + + lists:foreach( + fun(U) -> {ok, _} = emqx_authn_mnesia:add_user(U, State) end, + Users), + + {ok, + #{data := [#{user_id := _}, #{user_id := _}], + meta := #{page := 1, limit := 2, count := 3}}} = emqx_authn_mnesia:list_users( + #{<<"page">> => 1, <<"limit">> => 2}, + State), + {ok, + #{data := [#{user_id := _}], + meta := #{page := 2, limit := 2, count := 3}}} = emqx_authn_mnesia:list_users( + #{<<"page">> => 2, <<"limit">> => 2}, + State). + +t_import_users(_) -> + Config0 = config(), + Config = Config0#{password_hash_algorithm => #{name => sha256}}, + {ok, State} = emqx_authn_mnesia:create(?AUTHN_ID, Config), + + ok = emqx_authn_mnesia:import_users( + data_filename(<<"user-credentials.json">>), + State), + + ok = emqx_authn_mnesia:import_users( + data_filename(<<"user-credentials.csv">>), + State), + + {error, {unsupported_file_format, _}} = emqx_authn_mnesia:import_users( + <<"/file/with/unknown.extension">>, + State), + + {error, unknown_file_format} = emqx_authn_mnesia:import_users( + <<"/file/with/no/extension">>, + State), + + {error, enoent} = emqx_authn_mnesia:import_users( + <<"/file/that/not/exist.json">>, + State), + + {error, bad_format} = emqx_authn_mnesia:import_users( + data_filename(<<"user-credentials-malformed-0.json">>), + State), + + {error, {_, invalid_json}} = emqx_authn_mnesia:import_users( + data_filename(<<"user-credentials-malformed-1.json">>), + State), + + {error, bad_format} = emqx_authn_mnesia:import_users( + data_filename(<<"user-credentials-malformed.csv">>), + State). + +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ + +data_filename(Name) -> + Dir = code:lib_dir(emqx_authn, test), + filename:join([Dir, <<"data">>, Name]). + +config() -> + #{user_id_type => username, + password_hash_algorithm => #{name => bcrypt, + salt_rounds => 8} + }. diff --git a/apps/emqx_authn/test/emqx_authn_mysql_SUITE.erl b/apps/emqx_authn/test/emqx_authn_mysql_SUITE.erl new file mode 100644 index 000000000..9073dd38a --- /dev/null +++ b/apps/emqx_authn/test/emqx_authn_mysql_SUITE.erl @@ -0,0 +1,419 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_authn_mysql_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include("emqx_authn.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). + + +-define(MYSQL_HOST, "mysql"). +-define(MYSQL_PORT, 3306). +-define(MYSQL_RESOURCE, <<"emqx_authn_mysql_SUITE">>). + +-define(PATH, [authentication]). + +all() -> + [{group, require_seeds}, t_create, t_create_invalid]. + +groups() -> + [{require_seeds, [], [t_authenticate, t_update, t_destroy]}]. + +init_per_testcase(_, Config) -> + emqx_authentication:initialize_authentication(?GLOBAL, []), + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + Config. + +init_per_group(require_seeds, Config) -> + ok = init_seeds(), + Config. + +end_per_group(require_seeds, Config) -> + ok = drop_seeds(), + Config. + +init_per_suite(Config) -> + case emqx_authn_test_lib:is_tcp_server_available(?MYSQL_HOST, ?MYSQL_PORT) of + true -> + ok = emqx_common_test_helpers:start_apps([emqx_authn]), + ok = start_apps([emqx_resource, emqx_connector]), + {ok, _} = emqx_resource:create_local( + ?MYSQL_RESOURCE, + emqx_connector_mysql, + mysql_config()), + Config; + false -> + {skip, no_mysql} + end. + +end_per_suite(_Config) -> + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + ok = emqx_resource:remove_local(?MYSQL_RESOURCE), + ok = stop_apps([emqx_resource, emqx_connector]), + ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + +%%------------------------------------------------------------------------------ +%% Tests +%%------------------------------------------------------------------------------ + +t_create(_Config) -> + AuthConfig = raw_mysql_auth_config(), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig}), + + {ok, [#{provider := emqx_authn_mysql}]} = emqx_authentication:list_authenticators(?GLOBAL). + +t_create_invalid(_Config) -> + AuthConfig = raw_mysql_auth_config(), + + InvalidConfigs = + [ + maps:without([server], AuthConfig), + AuthConfig#{server => <<"unknownhost:3333">>}, + AuthConfig#{password => <<"wrongpass">>}, + AuthConfig#{database => <<"wrongdatabase">>} + ], + + lists:foreach( + fun(Config) -> + {error, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, Config}), + + {ok, []} = emqx_authentication:list_authenticators(?GLOBAL) + end, + InvalidConfigs). + +t_authenticate(_Config) -> + ok = lists:foreach( + fun(Sample) -> + ct:pal("test_user_auth sample: ~p", [Sample]), + test_user_auth(Sample) + end, + user_seeds()). + +test_user_auth(#{credentials := Credentials0, + config_params := SpecificConfgParams, + result := Result}) -> + AuthConfig = maps:merge(raw_mysql_auth_config(), SpecificConfgParams), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig}), + + Credentials = Credentials0#{ + listener => 'tcp:default', + protocol => mqtt + }, + + ?assertEqual(Result, emqx_access_control:authenticate(Credentials)), + + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL). + +t_destroy(_Config) -> + AuthConfig = raw_mysql_auth_config(), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig}), + + {ok, [#{provider := emqx_authn_mysql, state := State}]} + = emqx_authentication:list_authenticators(?GLOBAL), + + {ok, _} = emqx_authn_mysql:authenticate( + #{username => <<"plain">>, + password => <<"plain">> + }, + State), + + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + + % Authenticator should not be usable anymore + ?assertException( + error, + _, + emqx_authn_mysql:authenticate( + #{username => <<"plain">>, + password => <<"plain">> + }, + State)). + +t_update(_Config) -> + CorrectConfig = raw_mysql_auth_config(), + IncorrectConfig = + CorrectConfig#{ + query => <<"SELECT password_hash, salt, is_superuser_str as is_superuser + FROM wrong_table where username = ${username} LIMIT 1">>}, + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, IncorrectConfig}), + + {error, not_authorized} = emqx_access_control:authenticate( + #{username => <<"plain">>, + password => <<"plain">>, + listener => 'tcp:default', + protocol => mqtt + }), + + % We update with config with correct query, provider should update and work properly + {ok, _} = emqx:update_config( + ?PATH, + {update_authenticator, ?GLOBAL, <<"password-based:mysql">>, CorrectConfig}), + + {ok,_} = emqx_access_control:authenticate( + #{username => <<"plain">>, + password => <<"plain">>, + listener => 'tcp:default', + protocol => mqtt + }). + +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ + +raw_mysql_auth_config() -> + #{ + mechanism => <<"password-based">>, + password_hash_algorithm => <<"plain">>, + salt_position => <<"suffix">>, + enable => <<"true">>, + + backend => <<"mysql">>, + database => <<"mqtt">>, + username => <<"root">>, + password => <<"public">>, + + query => <<"SELECT password_hash, salt, is_superuser_str as is_superuser + FROM users where username = ${username} LIMIT 1">>, + server => mysql_server() + }. + +user_seeds() -> + [#{data => #{ + username => "plain", + password_hash => "plainsalt", + salt => "salt", + is_superuser_str => "1" + }, + credentials => #{ + username => <<"plain">>, + password => <<"plain">>}, + config_params => #{}, + result => {ok,#{is_superuser => true}} + }, + + #{data => #{ + username => "md5", + password_hash => "9b4d0c43d206d48279e69b9ad7132e22", + salt => "salt", + is_superuser_str => "0" + }, + credentials => #{ + username => <<"md5">>, + password => <<"md5">> + }, + config_params => #{ + password_hash_algorithm => <<"md5">>, + salt_position => <<"suffix">> + }, + result => {ok,#{is_superuser => false}} + }, + + #{data => #{ + username => "sha256", + password_hash => "ac63a624e7074776d677dd61a003b8c803eb11db004d0ec6ae032a5d7c9c5caf", + salt => "salt", + is_superuser_int => 1 + }, + credentials => #{ + clientid => <<"sha256">>, + password => <<"sha256">> + }, + config_params => #{ + query => <<"SELECT password_hash, salt, is_superuser_int as is_superuser + FROM users where username = ${clientid} LIMIT 1">>, + password_hash_algorithm => <<"sha256">>, + salt_position => <<"prefix">> + }, + result => {ok,#{is_superuser => true}} + }, + + #{data => #{ + username => <<"bcrypt">>, + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser_int => 0 + }, + credentials => #{ + username => <<"bcrypt">>, + password => <<"bcrypt">> + }, + config_params => #{ + query => <<"SELECT password_hash, salt, is_superuser_int as is_superuser + FROM users where username = ${username} LIMIT 1">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> % should be ignored + }, + result => {ok,#{is_superuser => false}} + }, + + #{data => #{ + username => <<"bcrypt">>, + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve" + }, + credentials => #{ + username => <<"bcrypt">>, + password => <<"bcrypt">> + }, + config_params => #{ + query => <<"SELECT password_hash, salt, is_superuser_int as is_superuser + FROM users where username = ${username} LIMIT 1">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> % should be ignored + }, + result => {ok,#{is_superuser => false}} + }, + + #{data => #{ + username => <<"bcrypt0">>, + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser_str => "0" + }, + credentials => #{ + username => <<"bcrypt0">>, + password => <<"bcrypt">> + }, + config_params => #{ + % clientid variable & username credentials + query => <<"SELECT password_hash, salt, is_superuser_int as is_superuser + FROM users where username = ${clientid} LIMIT 1">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> + }, + result => {error,not_authorized} + }, + + #{data => #{ + username => <<"bcrypt1">>, + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser_str => "0" + }, + credentials => #{ + username => <<"bcrypt1">>, + password => <<"bcrypt">> + }, + config_params => #{ + % Bad keys in query + query => <<"SELECT 1 AS unknown_field + FROM users where username = ${username} LIMIT 1">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> + }, + result => {error,not_authorized} + }, + + #{data => #{ + username => <<"bcrypt2">>, + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser => "0" + }, + credentials => #{ + username => <<"bcrypt2">>, + % Wrong password + password => <<"wrongpass">> + }, + config_params => #{ + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> + }, + result => {error,bad_username_or_password} + } + ]. + +init_seeds() -> + ok = drop_seeds(), + ok = q("CREATE TABLE users( + username VARCHAR(255), + password_hash VARCHAR(255), + salt VARCHAR(255), + is_superuser_str VARCHAR(255), + is_superuser_int TINYINT)"), + + Fields = [username, password_hash, salt, is_superuser_str, is_superuser_int], + InsertQuery = "INSERT INTO users(username, password_hash, salt, " + " is_superuser_str, is_superuser_int) VALUES(?, ?, ?, ?, ?)", + + lists:foreach( + fun(#{data := Values}) -> + Params = [maps:get(F, Values, null) || F <- Fields], + ok = q(InsertQuery, Params) + end, + user_seeds()). + +q(Sql) -> + emqx_resource:query( + ?MYSQL_RESOURCE, + {sql, Sql}). + +q(Sql, Params) -> + emqx_resource:query( + ?MYSQL_RESOURCE, + {sql, Sql, Params}). + +drop_seeds() -> + ok = q("DROP TABLE IF EXISTS users"). + +mysql_server() -> + iolist_to_binary( + io_lib:format( + "~s:~b", + [?MYSQL_HOST, ?MYSQL_PORT])). + +mysql_config() -> + #{auto_reconnect => true, + database => <<"mqtt">>, + username => <<"root">>, + password => <<"public">>, + pool_size => 8, + server => {?MYSQL_HOST, ?MYSQL_PORT}, + ssl => #{enable => false} + }. + +start_apps(Apps) -> + lists:foreach(fun application:ensure_all_started/1, Apps). + +stop_apps(Apps) -> + lists:foreach(fun application:stop/1, Apps). diff --git a/apps/emqx_authn/test/emqx_authn_pgsql_SUITE.erl b/apps/emqx_authn/test/emqx_authn_pgsql_SUITE.erl new file mode 100644 index 000000000..08bb2ee2e --- /dev/null +++ b/apps/emqx_authn/test/emqx_authn_pgsql_SUITE.erl @@ -0,0 +1,477 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_authn_pgsql_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include("emqx_authn.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("epgsql/include/epgsql.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). + +-define(PGSQL_HOST, "pgsql"). +-define(PGSQL_PORT, 5432). +-define(PGSQL_RESOURCE, <<"emqx_authn_pgsql_SUITE">>). + +-define(PATH, [authentication]). + +all() -> + [{group, require_seeds}, t_create, t_create_invalid, t_parse_query]. + +groups() -> + [{require_seeds, [], [t_authenticate, t_update, t_destroy, t_is_superuser]}]. + +init_per_testcase(_, Config) -> + emqx_authentication:initialize_authentication(?GLOBAL, []), + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + Config. + +init_per_group(require_seeds, Config) -> + ok = init_seeds(), + Config. + +end_per_group(require_seeds, Config) -> + ok = drop_seeds(), + Config. + +init_per_suite(Config) -> + case emqx_authn_test_lib:is_tcp_server_available(?PGSQL_HOST, ?PGSQL_PORT) of + true -> + ok = emqx_common_test_helpers:start_apps([emqx_authn]), + ok = start_apps([emqx_resource, emqx_connector]), + {ok, _} = emqx_resource:create_local( + ?PGSQL_RESOURCE, + emqx_connector_pgsql, + pgsql_config()), + Config; + false -> + {skip, no_pgsql} + end. + +end_per_suite(_Config) -> + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + ok = emqx_resource:remove_local(?PGSQL_RESOURCE), + ok = stop_apps([emqx_resource, emqx_connector]), + ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + +%%------------------------------------------------------------------------------ +%% Tests +%%------------------------------------------------------------------------------ + +t_create(_Config) -> + AuthConfig = raw_pgsql_auth_config(), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig}), + + {ok, [#{provider := emqx_authn_pgsql}]} = emqx_authentication:list_authenticators(?GLOBAL). + +t_create_invalid(_Config) -> + AuthConfig = raw_pgsql_auth_config(), + + InvalidConfigs = + [ + maps:without([server], AuthConfig), + AuthConfig#{server => <<"unknownhost:3333">>}, + AuthConfig#{password => <<"wrongpass">>}, + AuthConfig#{database => <<"wrongdatabase">>} + ], + + lists:foreach( + fun(Config) -> + {error, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, Config}), + + {ok, []} = emqx_authentication:list_authenticators(?GLOBAL) + end, + InvalidConfigs). + +t_authenticate(_Config) -> + ok = lists:foreach( + fun(Sample) -> + ct:pal("test_user_auth sample: ~p", [Sample]), + test_user_auth(Sample) + end, + user_seeds()). + +test_user_auth(#{credentials := Credentials0, + config_params := SpecificConfgParams, + result := Result}) -> + AuthConfig = maps:merge(raw_pgsql_auth_config(), SpecificConfgParams), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig}), + + Credentials = Credentials0#{ + listener => 'tcp:default', + protocol => mqtt + }, + + ?assertEqual(Result, emqx_access_control:authenticate(Credentials)), + + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL). + +t_destroy(_Config) -> + AuthConfig = raw_pgsql_auth_config(), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig}), + + {ok, [#{provider := emqx_authn_pgsql, state := State}]} + = emqx_authentication:list_authenticators(?GLOBAL), + + {ok, _} = emqx_authn_pgsql:authenticate( + #{username => <<"plain">>, + password => <<"plain">> + }, + State), + + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + + % Authenticator should not be usable anymore + ?assertException( + error, + _, + emqx_authn_pgsql:authenticate( + #{username => <<"plain">>, + password => <<"plain">> + }, + State)). + +t_update(_Config) -> + CorrectConfig = raw_pgsql_auth_config(), + IncorrectConfig = + CorrectConfig#{ + query => <<"SELECT password_hash, salt, is_superuser_str as is_superuser + FROM wrong_table where username = ${username} LIMIT 1">>}, + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, IncorrectConfig}), + + {error, not_authorized} = emqx_access_control:authenticate( + #{username => <<"plain">>, + password => <<"plain">>, + listener => 'tcp:default', + protocol => mqtt + }), + + % We update with config with correct query, provider should update and work properly + {ok, _} = emqx:update_config( + ?PATH, + {update_authenticator, ?GLOBAL, <<"password-based:postgresql">>, CorrectConfig}), + + {ok,_} = emqx_access_control:authenticate( + #{username => <<"plain">>, + password => <<"plain">>, + listener => 'tcp:default', + protocol => mqtt + }). + +t_is_superuser(_Config) -> + Config = raw_pgsql_auth_config(), + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, Config}), + + Checks = [ + {is_superuser_str, "0", false}, + {is_superuser_str, "", false}, + {is_superuser_str, null, false}, + {is_superuser_str, "1", true}, + {is_superuser_str, "val", true}, + + {is_superuser_int, 0, false}, + {is_superuser_int, null, false}, + {is_superuser_int, 1, true}, + {is_superuser_int, 123, true}, + + {is_superuser_bool, false, false}, + {is_superuser_bool, null, false}, + {is_superuser_bool, true, true} + ], + + lists:foreach(fun test_is_superuser/1, Checks). + +test_is_superuser({Field, Value, ExpectedValue}) -> + {ok, _} = q("DELETE FROM users"), + + UserData = #{ + username => "user", + password_hash => "plainsalt", + salt => "salt", + Field => Value + }, + + ok = create_user(UserData), + + Query = "SELECT password_hash, salt, " ++ atom_to_list(Field) ++ " as is_superuser " + "FROM users where username = ${username} LIMIT 1", + + Config = maps:put(query, Query, raw_pgsql_auth_config()), + {ok, _} = emqx:update_config( + ?PATH, + {update_authenticator, ?GLOBAL, <<"password-based:postgresql">>, Config}), + + Credentials = #{ + listener => 'tcp:default', + protocol => mqtt, + username => <<"user">>, + password => <<"plain">> + }, + + ?assertEqual( + {ok, #{is_superuser => ExpectedValue}}, + emqx_access_control:authenticate(Credentials)). + + +t_parse_query(_) -> + Query1 = ?PH_USERNAME, + ?assertEqual({<<"$1">>, [?PH_USERNAME]}, emqx_authn_pgsql:parse_query(Query1)), + + Query2 = <>, + ?assertEqual({<<"$1, $2">>, [?PH_USERNAME, ?PH_CLIENTID]}, + emqx_authn_pgsql:parse_query(Query2)), + + Query3 = <<"nomatch">>, + ?assertEqual({<<"nomatch">>, []}, emqx_authn_pgsql:parse_query(Query3)). + +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ + +raw_pgsql_auth_config() -> + #{ + mechanism => <<"password-based">>, + password_hash_algorithm => <<"plain">>, + salt_position => <<"suffix">>, + enable => <<"true">>, + + backend => <<"postgresql">>, + database => <<"mqtt">>, + username => <<"root">>, + password => <<"public">>, + + query => <<"SELECT password_hash, salt, is_superuser_str as is_superuser + FROM users where username = ${username} LIMIT 1">>, + server => pgsql_server() + }. + +user_seeds() -> + [#{data => #{ + username => "plain", + password_hash => "plainsalt", + salt => "salt", + is_superuser_str => "1" + }, + credentials => #{ + username => <<"plain">>, + password => <<"plain">>}, + config_params => #{}, + result => {ok,#{is_superuser => true}} + }, + + #{data => #{ + username => "md5", + password_hash => "9b4d0c43d206d48279e69b9ad7132e22", + salt => "salt", + is_superuser_str => "0" + }, + credentials => #{ + username => <<"md5">>, + password => <<"md5">> + }, + config_params => #{ + password_hash_algorithm => <<"md5">>, + salt_position => <<"suffix">> + }, + result => {ok,#{is_superuser => false}} + }, + + #{data => #{ + username => "sha256", + password_hash => "ac63a624e7074776d677dd61a003b8c803eb11db004d0ec6ae032a5d7c9c5caf", + salt => "salt", + is_superuser_int => 1 + }, + credentials => #{ + clientid => <<"sha256">>, + password => <<"sha256">> + }, + config_params => #{ + query => <<"SELECT password_hash, salt, is_superuser_int as is_superuser + FROM users where username = ${clientid} LIMIT 1">>, + password_hash_algorithm => <<"sha256">>, + salt_position => <<"prefix">> + }, + result => {ok,#{is_superuser => true}} + }, + + #{data => #{ + username => <<"bcrypt">>, + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser_int => 0 + }, + credentials => #{ + username => <<"bcrypt">>, + password => <<"bcrypt">> + }, + config_params => #{ + query => <<"SELECT password_hash, salt, is_superuser_int as is_superuser + FROM users where username = ${username} LIMIT 1">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> % should be ignored + }, + result => {ok,#{is_superuser => false}} + }, + + #{data => #{ + username => <<"bcrypt0">>, + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser_str => "0" + }, + credentials => #{ + username => <<"bcrypt0">>, + password => <<"bcrypt">> + }, + config_params => #{ + % clientid variable & username credentials + query => <<"SELECT password_hash, salt, is_superuser_int as is_superuser + FROM users where username = ${clientid} LIMIT 1">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> + }, + result => {error,not_authorized} + }, + + #{data => #{ + username => <<"bcrypt1">>, + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser_str => "0" + }, + credentials => #{ + username => <<"bcrypt1">>, + password => <<"bcrypt">> + }, + config_params => #{ + % Bad keys in query + query => <<"SELECT 1 AS unknown_field + FROM users where username = ${username} LIMIT 1">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> + }, + result => {error,not_authorized} + }, + + #{data => #{ + username => <<"bcrypt2">>, + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser => "0" + }, + credentials => #{ + username => <<"bcrypt2">>, + % Wrong password + password => <<"wrongpass">> + }, + config_params => #{ + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> + }, + result => {error,bad_username_or_password} + } + ]. + +init_seeds() -> + ok = drop_seeds(), + {ok, _, _} = q("CREATE TABLE users( + username varchar(255), + password_hash varchar(255), + salt varchar(255), + is_superuser_str varchar(255), + is_superuser_int smallint, + is_superuser_bool boolean)"), + + lists:foreach( + fun(#{data := Values}) -> + ok = create_user(Values) + end, + user_seeds()). + +create_user(Values) -> + Fields = [username, password_hash, salt, is_superuser_str, is_superuser_int, is_superuser_bool], + + InsertQuery = "INSERT INTO users(username, password_hash, salt," + "is_superuser_str, is_superuser_int, is_superuser_bool) " + "VALUES($1, $2, $3, $4, $5, $6)", + + Params = [maps:get(F, Values, null) || F <- Fields], + {ok, 1} = q(InsertQuery, Params), + ok. + +q(Sql) -> + emqx_resource:query( + ?PGSQL_RESOURCE, + {sql, Sql}). + +q(Sql, Params) -> + emqx_resource:query( + ?PGSQL_RESOURCE, + {sql, Sql, Params}). + +drop_seeds() -> + {ok, _, _} = q("DROP TABLE IF EXISTS users"), + ok. + +pgsql_server() -> + iolist_to_binary( + io_lib:format( + "~s:~b", + [?PGSQL_HOST, ?PGSQL_PORT])). + +pgsql_config() -> + #{auto_reconnect => true, + database => <<"mqtt">>, + username => <<"root">>, + password => <<"public">>, + pool_size => 8, + server => {?PGSQL_HOST, ?PGSQL_PORT}, + ssl => #{enable => false} + }. + +start_apps(Apps) -> + lists:foreach(fun application:ensure_all_started/1, Apps). + +stop_apps(Apps) -> + lists:foreach(fun application:stop/1, Apps). diff --git a/apps/emqx_authn/test/emqx_authn_redis_SUITE.erl b/apps/emqx_authn/test/emqx_authn_redis_SUITE.erl new file mode 100644 index 000000000..8669080b0 --- /dev/null +++ b/apps/emqx_authn/test/emqx_authn_redis_SUITE.erl @@ -0,0 +1,390 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_authn_redis_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include("emqx_authn.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). + + +-define(REDIS_HOST, "redis"). +-define(REDIS_PORT, 6379). +-define(REDIS_RESOURCE, <<"emqx_authn_redis_SUITE">>). + + +-define(PATH, [authentication]). + +all() -> + [{group, require_seeds}, t_create, t_create_invalid]. + +groups() -> + [{require_seeds, [], [t_authenticate, t_update, t_destroy]}]. + +init_per_testcase(_, Config) -> + emqx_authentication:initialize_authentication(?GLOBAL, []), + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + Config. + +init_per_group(require_seeds, Config) -> + ok = init_seeds(), + Config. + +end_per_group(require_seeds, Config) -> + ok = drop_seeds(), + Config. + +init_per_suite(Config) -> + case emqx_authn_test_lib:is_tcp_server_available(?REDIS_HOST, ?REDIS_PORT) of + true -> + ok = emqx_common_test_helpers:start_apps([emqx_authn]), + ok = start_apps([emqx_resource, emqx_connector]), + {ok, _} = emqx_resource:create_local( + ?REDIS_RESOURCE, + emqx_connector_redis, + redis_config()), + Config; + false -> + {skip, no_redis} + end. + +end_per_suite(_Config) -> + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + ok = emqx_resource:remove_local(?REDIS_RESOURCE), + ok = stop_apps([emqx_resource, emqx_connector]), + ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + +%%------------------------------------------------------------------------------ +%% Tests +%%------------------------------------------------------------------------------ + +t_create(_Config) -> + {ok, []} = emqx_authentication:list_authenticators(?GLOBAL), + + AuthConfig = raw_redis_auth_config(), + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig}), + + {ok, [#{provider := emqx_authn_redis}]} = emqx_authentication:list_authenticators(?GLOBAL). + +t_create_invalid(_Config) -> + AuthConfig = raw_redis_auth_config(), + + InvalidConfigs = + [ + maps:without([server], AuthConfig), + AuthConfig#{server => <<"unknownhost:3333">>}, + AuthConfig#{password => <<"wrongpass">>}, + AuthConfig#{database => <<"5678">>}, + AuthConfig#{ + query => <<"MGET password_hash:${username} salt:${username}">>}, + AuthConfig#{ + query => <<"HMGET mqtt_user:${username} password_hash invalid_field">>}, + AuthConfig#{ + query => <<"HMGET mqtt_user:${username} salt is_superuser">>} + ], + + lists:foreach( + fun(Config) -> + {error, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, Config}), + + {ok, []} = emqx_authentication:list_authenticators(?GLOBAL) + end, + InvalidConfigs). + +t_authenticate(_Config) -> + ok = lists:foreach( + fun(Sample) -> + ct:pal("test_user_auth sample: ~p", [Sample]), + test_user_auth(Sample) + end, + user_seeds()). + +test_user_auth(#{credentials := Credentials0, + config_params := SpecificConfgParams, + result := Result}) -> + AuthConfig = maps:merge(raw_redis_auth_config(), SpecificConfgParams), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig}), + + Credentials = Credentials0#{ + listener => 'tcp:default', + protocol => mqtt + }, + + ?assertEqual(Result, emqx_access_control:authenticate(Credentials)), + + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL). + +t_destroy(_Config) -> + AuthConfig = raw_redis_auth_config(), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig}), + + {ok, [#{provider := emqx_authn_redis, state := State}]} + = emqx_authentication:list_authenticators(?GLOBAL), + + {ok, _} = emqx_authn_redis:authenticate( + #{username => <<"plain">>, + password => <<"plain">> + }, + State), + + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL), + + % Authenticator should not be usable anymore + ?assertException( + error, + _, + emqx_authn_redis:authenticate( + #{username => <<"plain">>, + password => <<"plain">> + }, + State)). + +t_update(_Config) -> + CorrectConfig = raw_redis_auth_config(), + IncorrectConfig = + CorrectConfig#{ + query => <<"HMGET invalid_key:${username} password_hash salt is_superuser">>}, + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, IncorrectConfig}), + + {error, not_authorized} = emqx_access_control:authenticate( + #{username => <<"plain">>, + password => <<"plain">>, + listener => 'tcp:default', + protocol => mqtt + }), + + % We update with config with correct query, provider should update and work properly + {ok, _} = emqx:update_config( + ?PATH, + {update_authenticator, ?GLOBAL, <<"password-based:redis">>, CorrectConfig}), + + {ok,_} = emqx_access_control:authenticate( + #{username => <<"plain">>, + password => <<"plain">>, + listener => 'tcp:default', + protocol => mqtt + }). + +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ + +raw_redis_auth_config() -> + #{ + mechanism => <<"password-based">>, + password_hash_algorithm => <<"plain">>, + salt_position => <<"suffix">>, + enable => <<"true">>, + + backend => <<"redis">>, + query => <<"HMGET mqtt_user:${username} password_hash salt is_superuser">>, + database => <<"1">>, + password => <<"public">>, + server => redis_server() + }. + +user_seeds() -> + [#{data => #{ + password_hash => "plainsalt", + salt => "salt", + is_superuser => "1" + }, + credentials => #{ + username => <<"plain">>, + password => <<"plain">>}, + key => "mqtt_user:plain", + config_params => #{}, + result => {ok,#{is_superuser => true}} + }, + + #{data => #{ + password_hash => "9b4d0c43d206d48279e69b9ad7132e22", + salt => "salt", + is_superuser => "0" + }, + credentials => #{ + username => <<"md5">>, + password => <<"md5">> + }, + key => "mqtt_user:md5", + config_params => #{ + password_hash_algorithm => <<"md5">>, + salt_position => <<"suffix">> + }, + result => {ok,#{is_superuser => false}} + }, + + #{data => #{ + password_hash => "ac63a624e7074776d677dd61a003b8c803eb11db004d0ec6ae032a5d7c9c5caf", + salt => "salt", + is_superuser => "1" + }, + credentials => #{ + clientid => <<"sha256">>, + password => <<"sha256">> + }, + key => "mqtt_user:sha256", + config_params => #{ + query => <<"HMGET mqtt_user:${clientid} password_hash salt is_superuser">>, + password_hash_algorithm => <<"sha256">>, + salt_position => <<"prefix">> + }, + result => {ok,#{is_superuser => true}} + }, + + #{data => #{ + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser => "0" + }, + credentials => #{ + username => <<"bcrypt">>, + password => <<"bcrypt">> + }, + key => "mqtt_user:bcrypt", + config_params => #{ + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> % should be ignored + }, + result => {ok,#{is_superuser => false}} + }, + + #{data => #{ + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser => "0" + }, + credentials => #{ + username => <<"bcrypt0">>, + password => <<"bcrypt">> + }, + key => "mqtt_user:bcrypt0", + config_params => #{ + % clientid variable & username credentials + query => <<"HMGET mqtt_client:${clientid} password_hash salt is_superuser">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> + }, + result => {error,not_authorized} + }, + + #{data => #{ + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser => "0" + }, + credentials => #{ + username => <<"bcrypt1">>, + password => <<"bcrypt">> + }, + key => "mqtt_user:bcrypt1", + config_params => #{ + % Bad key in query + query => <<"HMGET badkey:${username} password_hash salt is_superuser">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> + }, + result => {error,not_authorized} + }, + + #{data => #{ + password_hash => "$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u", + salt => "$2b$12$wtY3h20mUjjmeaClpqZVve", + is_superuser => "0" + }, + credentials => #{ + username => <<"bcrypt2">>, + % Wrong password + password => <<"wrongpass">> + }, + key => "mqtt_user:bcrypt2", + config_params => #{ + query => <<"HMGET mqtt_user:${username} password_hash salt is_superuser">>, + password_hash_algorithm => <<"bcrypt">>, + salt_position => <<"suffix">> + }, + result => {error,bad_username_or_password} + } + ]. + +init_seeds() -> + ok = drop_seeds(), + lists:foreach( + fun(#{key := UserKey, data := Values}) -> + lists:foreach(fun({Key, Value}) -> + q(["HSET", UserKey, atom_to_list(Key), Value]) + end, + maps:to_list(Values)) + end, + user_seeds()). + +q(Command) -> + emqx_resource:query( + ?REDIS_RESOURCE, + {cmd, Command}). + +drop_seeds() -> + lists:foreach( + fun(#{key := UserKey}) -> + q(["DEL", UserKey]) + end, + user_seeds()). + +redis_server() -> + iolist_to_binary( + io_lib:format( + "~s:~b", + [?REDIS_HOST, ?REDIS_PORT])). + +redis_config() -> + #{auto_reconnect => true, + database => 1, + pool_size => 8, + redis_type => single, + password => "public", + server => {?REDIS_HOST, ?REDIS_PORT}, + ssl => #{enable => false} + }. + +start_apps(Apps) -> + lists:foreach(fun application:ensure_all_started/1, Apps). + +stop_apps(Apps) -> + lists:foreach(fun application:stop/1, Apps). diff --git a/apps/emqx_authn/test/emqx_authn_test_lib.erl b/apps/emqx_authn/test/emqx_authn_test_lib.erl index e30854318..b14821a9c 100644 --- a/apps/emqx_authn/test/emqx_authn_test_lib.erl +++ b/apps/emqx_authn/test/emqx_authn_test_lib.erl @@ -16,23 +16,53 @@ -module(emqx_authn_test_lib). +-include("emqx_authn.hrl"). + -compile(nowarn_export_all). -compile(export_all). +-define(DEFAULT_CHECK_AVAIL_TIMEOUT, 1000). + +authenticator_example(Id) -> + #{Id := #{value := Example}} = emqx_authn_api:authenticator_examples(), + Example. + http_example() -> -""" -{ - mechanism = \"password-based\" - backend = http - method = post - url = \"http://127.0.0.2:8080\" - headers = {\"content-type\" = \"application/json\"} - body = {username = \"${username}\", - password = \"${password}\"} - pool_size = 8 - connect_timeout = 5000 - request_timeout = 5000 - enable_pipelining = true - ssl = {enable = false} -} -""". + authenticator_example('password-based:http'). + +built_in_database_example() -> + authenticator_example('password-based:built-in-database'). + +jwt_example() -> + authenticator_example(jwt). + +delete_authenticators(Path, Chain) -> + case emqx_authentication:list_authenticators(Chain) of + {error, _} -> ok; + {ok, Authenticators} -> + lists:foreach( + fun(#{id := ID}) -> + emqx:update_config( + Path, + {delete_authenticator, Chain, ID}, + #{rawconf_with_defaults => true}) + end, + Authenticators) + end. + +delete_config(ID) -> + {ok, _} = + emqx:update_config( + [authentication], + {delete_authenticator, ?GLOBAL, ID}, + #{rawconf_with_defaults => false}). + +is_tcp_server_available(Host, Port) -> + case gen_tcp:connect(Host, Port, [], ?DEFAULT_CHECK_AVAIL_TIMEOUT) of + {ok, Socket} -> + gen_tcp:close(Socket), + true; + {error, _} -> + false + end. + diff --git a/apps/emqx_authz/include/emqx_authz.hrl b/apps/emqx_authz/include/emqx_authz.hrl index c22b93c48..371d56e30 100644 --- a/apps/emqx_authz/include/emqx_authz.hrl +++ b/apps/emqx_authz/include/emqx_authz.hrl @@ -67,3 +67,52 @@ -define(AUTHZ_METRICS(K), ?METRICS(authz_metrics, K)). -define(CONF_KEY_PATH, [authorization, sources]). + +-define(USERNAME_RULES_EXAMPLE, #{username => user1, + rules => [ #{topic => <<"test/toopic/1">>, + permission => <<"allow">>, + action => <<"publish">> + } + , #{topic => <<"test/toopic/2">>, + permission => <<"allow">>, + action => <<"subscribe">> + } + , #{topic => <<"eq test/#">>, + permission => <<"deny">>, + action => <<"all">> + } + ] + }). +-define(CLIENTID_RULES_EXAMPLE, #{clientid => client1, + rules => [ #{topic => <<"test/toopic/1">>, + permission => <<"allow">>, + action => <<"publish">> + } + , #{topic => <<"test/toopic/2">>, + permission => <<"allow">>, + action => <<"subscribe">> + } + , #{topic => <<"eq test/#">>, + permission => <<"deny">>, + action => <<"all">> + } + ] + }). +-define(ALL_RULES_EXAMPLE, #{rules => [ #{topic => <<"test/toopic/1">>, + permission => <<"allow">>, + action => <<"publish">> + } + , #{topic => <<"test/toopic/2">>, + permission => <<"allow">>, + action => <<"subscribe">> + } + , #{topic => <<"eq test/#">>, + permission => <<"deny">>, + action => <<"all">> + } + ] + }). +-define(META_EXAMPLE, #{ page => 1 + , limit => 100 + , count => 1 + }). diff --git a/apps/emqx_authz/src/emqx_authz.erl b/apps/emqx_authz/src/emqx_authz.erl index cfabdefa9..4496e0299 100644 --- a/apps/emqx_authz/src/emqx_authz.erl +++ b/apps/emqx_authz/src/emqx_authz.erl @@ -36,7 +36,7 @@ , authorize/5 ]). --export([post_config_update/4, pre_config_update/2]). +-export([post_config_update/5, pre_config_update/3]). -export([acl_conf_file/0]). @@ -127,13 +127,13 @@ do_update({_, Sources}, _Conf) when is_list(Sources)-> do_update({Op, Sources}, Conf) -> error({bad_request, #{op => Op, sources => Sources, conf => Conf}}). -pre_config_update(Cmd, Conf) -> +pre_config_update(_, Cmd, Conf) -> {ok, do_update(Cmd, Conf)}. -post_config_update(_, undefined, _Conf, _AppEnvs) -> +post_config_update(_, _, undefined, _Conf, _AppEnvs) -> ok; -post_config_update(Cmd, NewSources, _OldSource, _AppEnvs) -> +post_config_update(_, Cmd, NewSources, _OldSource, _AppEnvs) -> ok = do_post_update(Cmd, NewSources), ok = emqx_authz_cache:drain_cache(). diff --git a/apps/emqx_authz/src/emqx_authz_api_mnesia.erl b/apps/emqx_authz/src/emqx_authz_api_mnesia.erl index 77d5f155f..5448cbfd8 100644 --- a/apps/emqx_authz/src/emqx_authz_api_mnesia.erl +++ b/apps/emqx_authz/src/emqx_authz_api_mnesia.erl @@ -21,492 +21,252 @@ -include("emqx_authz.hrl"). -include_lib("emqx/include/logger.hrl"). -include_lib("stdlib/include/ms_transform.hrl"). +-include_lib("typerefl/include/types.hrl"). --define(EXAMPLE_USERNAME, #{username => user1, - rules => [ #{topic => <<"test/toopic/1">>, - permission => <<"allow">>, - action => <<"publish">> - } - , #{topic => <<"test/toopic/2">>, - permission => <<"allow">>, - action => <<"subscribe">> - } - , #{topic => <<"eq test/#">>, - permission => <<"deny">>, - action => <<"all">> - } - ] - }). --define(EXAMPLE_CLIENTID, #{clientid => client1, - rules => [ #{topic => <<"test/toopic/1">>, - permission => <<"allow">>, - action => <<"publish">> - } - , #{topic => <<"test/toopic/2">>, - permission => <<"allow">>, - action => <<"subscribe">> - } - , #{topic => <<"eq test/#">>, - permission => <<"deny">>, - action => <<"all">> - } - ] - }). --define(EXAMPLE_ALL , #{rules => [ #{topic => <<"test/toopic/1">>, - permission => <<"allow">>, - action => <<"publish">> - } - , #{topic => <<"test/toopic/2">>, - permission => <<"allow">>, - action => <<"subscribe">> - } - , #{topic => <<"eq test/#">>, - permission => <<"deny">>, - action => <<"all">> - } - ] - }). -define(FORMAT_USERNAME_FUN, {?MODULE, format_by_username}). -define(FORMAT_CLIENTID_FUN, {?MODULE, format_by_clientid}). - -export([ api_spec/0 - , purge/2 - , users/2 - , user/2 + , paths/0 + , schema/1 + , fields/1 + ]). + +%% operation funs +-export([ users/2 , clients/2 + , user/2 , client/2 , all/2 + , purge/2 ]). -export([ format_by_username/1 , format_by_clientid/1]). +-define(BAD_REQUEST, 'BAD_REQUEST'). +-define(NOT_FOUND, 'NOT_FOUND'). + +-define(TYPE_REF, ref). +-define(TYPE_ARRAY, array). +-define(PAGE_QUERY_EXAMPLE, example_in_data). +-define(PUT_MAP_EXAMPLE, in_put_requestBody). +-define(POST_ARRAY_EXAMPLE, in_post_requestBody). + api_spec() -> - {[ purge_api() - , users_api() - , user_api() - , clients_api() - , client_api() - , all_api() - ], definitions()}. + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). -definitions() -> - Rules = #{ - type => array, - items => #{ - type => object, - required => [topic, permission, action], - properties => #{ - topic => #{ - type => string, - example => <<"test/topic/1">> - }, - permission => #{ - type => string, - enum => [<<"allow">>, <<"deny">>], - example => <<"allow">> - }, - action => #{ - type => string, - enum => [<<"publish">>, <<"subscribe">>, <<"all">>], - example => <<"publish">> - } - } - } - }, - Username = #{ - type => object, - required => [username, rules], - properties => #{ - username => #{ - type => string, - example => <<"username">> - }, - rules => minirest:ref(<<"rules">>) - } - }, - Clientid = #{ - type => object, - required => [clientid, rules], - properties => #{ - clientid => #{ - type => string, - example => <<"clientid">> - }, - rules => minirest:ref(<<"rules">>) - } - }, - ALL = #{ - type => object, - required => [rules], - properties => #{ - rules => minirest:ref(<<"rules">>) - } - }, - [ #{<<"rules">> => Rules} - , #{<<"username">> => Username} - , #{<<"clientid">> => Clientid} - , #{<<"all">> => ALL} - ]. +paths() -> + [ "/authorization/sources/built-in-database/username" + , "/authorization/sources/built-in-database/clientid" + , "/authorization/sources/built-in-database/username/:username" + , "/authorization/sources/built-in-database/clientid/:clientid" + , "/authorization/sources/built-in-database/all" + , "/authorization/sources/built-in-database/purge-all"]. -users_api() -> - Metadata = #{ +%%-------------------------------------------------------------------- +%% Schema for each URI +%%-------------------------------------------------------------------- + +schema("/authorization/sources/built-in-database/username") -> + #{ + 'operationId' => users, get => #{ - description => "Show the list of record for username", - parameters => [ - #{ - name => page, - in => query, - required => false, - description => <<"Page Index">>, - schema => #{type => integer} - }, - #{ - name => limit, - in => query, - required => false, - description => <<"Page limit">>, - schema => #{type => integer} - } - ], + tags => [<<"authorization">>], + description => <<"Show the list of record for username">>, + parameters => [ hoconsc:ref(emqx_dashboard_swagger, page) + , hoconsc:ref(emqx_dashboard_swagger, limit)], responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => #{ - type => array, - items => minirest:ref(<<"username">>) - }, - examples => #{ - username => #{ - summary => <<"Username">>, - value => jsx:encode([?EXAMPLE_USERNAME]) - } - } - } - } - } + 200 => swagger_with_example( {username_response_data, ?TYPE_REF} + , {username, ?PAGE_QUERY_EXAMPLE}) } }, post => #{ - description => "Add new records for username", - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => #{ - type => array, - items => #{ - oneOf => [ minirest:ref(<<"username">>) - ] - } - }, - examples => #{ - username => #{ - summary => <<"Username">>, - value => jsx:encode([?EXAMPLE_USERNAME]) - } - } - } - } - }, + tags => [<<"authorization">>], + description => <<"Add new records for username">>, + 'requestBody' => swagger_with_example( {rules_for_username, ?TYPE_ARRAY} + , {username, ?POST_ARRAY_EXAMPLE}), responses => #{ - <<"204">> => #{description => <<"Created">>}, - <<"400">> => emqx_mgmt_util:bad_request() + 204 => <<"Created">>, + 400 => emqx_dashboard_swagger:error_codes( [?BAD_REQUEST] + , <<"Bad username or bad rule schema">>) } } - }, - {"/authorization/sources/built-in-database/username", Metadata, users}. - -clients_api() -> - Metadata = #{ + }; +schema("/authorization/sources/built-in-database/clientid") -> + #{ + 'operationId' => clients, get => #{ - description => "Show the list of record for clientid", - parameters => [ - #{ - name => page, - in => query, - required => false, - description => <<"Page Index">>, - schema => #{type => integer} - }, - #{ - name => limit, - in => query, - required => false, - description => <<"Page limit">>, - schema => #{type => integer} - } - ], + tags => [<<"authorization">>], + description => <<"Show the list of record for clientid">>, + parameters => [ hoconsc:ref(emqx_dashboard_swagger, page) + , hoconsc:ref(emqx_dashboard_swagger, limit)], responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => #{ - type => array, - items => minirest:ref(<<"clientid">>) - }, - examples => #{ - clientid => #{ - summary => <<"Clientid">>, - value => jsx:encode([?EXAMPLE_CLIENTID]) - } - } - } - } - } + 200 => swagger_with_example( {clientid_response_data, ?TYPE_REF} + , {clientid, ?PAGE_QUERY_EXAMPLE}) } }, post => #{ - description => "Add new records for clientid", - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => #{ - type => array, - items => #{ - oneOf => [ minirest:ref(<<"clientid">>) - ] - } - }, - examples => #{ - clientid => #{ - summary => <<"Clientid">>, - value => jsx:encode([?EXAMPLE_CLIENTID]) - } - } - } - } - }, + tags => [<<"authorization">>], + description => <<"Add new records for clientid">>, + 'requestBody' => swagger_with_example( {rules_for_clientid, ?TYPE_ARRAY} + , {clientid, ?POST_ARRAY_EXAMPLE}), responses => #{ - <<"204">> => #{description => <<"Created">>}, - <<"400">> => emqx_mgmt_util:bad_request() + 204 => <<"Created">>, + 400 => emqx_dashboard_swagger:error_codes( [?BAD_REQUEST] + , <<"Bad clientid or bad rule schema">>) } } - }, - {"/authorization/sources/built-in-database/clientid", Metadata, clients}. - -user_api() -> - Metadata = #{ + }; +schema("/authorization/sources/built-in-database/username/:username") -> + #{ + 'operationId' => user, get => #{ - description => "Get record info for username", - parameters => [ - #{ - name => username, - in => path, - schema => #{ - type => string - }, - required => true - } - ], + tags => [<<"authorization">>], + description => <<"Get record info for username">>, + parameters => [hoconsc:ref(username)], responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"username">>), - examples => #{ - username => #{ - summary => <<"Username">>, - value => jsx:encode(?EXAMPLE_USERNAME) - } - } - } - } - }, - <<"404">> => emqx_mgmt_util:bad_request(<<"Not Found">>) + 200 => swagger_with_example( {rules_for_username, ?TYPE_REF} + , {username, ?PUT_MAP_EXAMPLE}), + 404 => emqx_dashboard_swagger:error_codes([?NOT_FOUND], <<"Not Found">>) } }, put => #{ - description => "Set record for username", - parameters => [ - #{ - name => username, - in => path, - schema => #{ - type => string - }, - required => true - } - ], - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"username">>), - examples => #{ - username => #{ - summary => <<"Username">>, - value => jsx:encode(?EXAMPLE_USERNAME) - } - } - } - } - }, + tags => [<<"authorization">>], + description => <<"Set record for username">>, + parameters => [hoconsc:ref(username)], + 'requestBody' => swagger_with_example( {rules_for_username, ?TYPE_REF} + , {username, ?PUT_MAP_EXAMPLE}), responses => #{ - <<"204">> => #{description => <<"Updated">>}, - <<"400">> => emqx_mgmt_util:bad_request() + 204 => <<"Updated">>, + 400 => emqx_dashboard_swagger:error_codes( [?BAD_REQUEST] + , <<"Bad username or bad rule schema">>) } }, delete => #{ - description => "Delete one record for username", - parameters => [ - #{ - name => username, - in => path, - schema => #{ - type => string - }, - required => true - } - ], + tags => [<<"authorization">>], + description => <<"Delete one record for username">>, + parameters => [hoconsc:ref(username)], responses => #{ - <<"204">> => #{description => <<"No Content">>}, - <<"400">> => emqx_mgmt_util:bad_request() + 204 => <<"Deleted">>, + 400 => emqx_dashboard_swagger:error_codes([?BAD_REQUEST], <<"Bad username">>) } } - }, - {"/authorization/sources/built-in-database/username/:username", Metadata, user}. - -client_api() -> - Metadata = #{ + }; +schema("/authorization/sources/built-in-database/clientid/:clientid") -> + #{ + 'operationId' => client, get => #{ - description => "Get record info for clientid", - parameters => [ - #{ - name => clientid, - in => path, - schema => #{ - type => string - }, - required => true - } - ], + tags => [<<"authorization">>], + description => <<"Get record info for clientid">>, + parameters => [hoconsc:ref(clientid)], responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"clientid">>), - examples => #{ - clientid => #{ - summary => <<"Clientid">>, - value => jsx:encode(?EXAMPLE_CLIENTID) - } - } - } - } - }, - <<"404">> => emqx_mgmt_util:bad_request(<<"Not Found">>) + 200 => swagger_with_example( {rules_for_clientid, ?TYPE_REF} + , {clientid, ?PUT_MAP_EXAMPLE}), + 404 => emqx_dashboard_swagger:error_codes([?NOT_FOUND], <<"Not Found">>) } }, put => #{ - description => "Set record for clientid", - parameters => [ - #{ - name => clientid, - in => path, - schema => #{ - type => string - }, - required => true - } - ], - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"clientid">>), - examples => #{ - clientid => #{ - summary => <<"Clientid">>, - value => jsx:encode(?EXAMPLE_CLIENTID) - } - } - } - } - }, + tags => [<<"authorization">>], + description => <<"Set record for clientid">>, + parameters => [hoconsc:ref(clientid)], + 'requestBody' => swagger_with_example( {rules_for_clientid, ?TYPE_REF} + , {clientid, ?PUT_MAP_EXAMPLE}), responses => #{ - <<"204">> => #{description => <<"Updated">>}, - <<"400">> => emqx_mgmt_util:bad_request() + 204 => <<"Updated">>, + 400 => emqx_dashboard_swagger:error_codes( + [?BAD_REQUEST], <<"Bad clientid or bad rule schema">>) } }, delete => #{ - description => "Delete one record for clientid", - parameters => [ - #{ - name => clientid, - in => path, - schema => #{ - type => string - }, - required => true - } - ], + tags => [<<"authorization">>], + description => <<"Delete one record for clientid">>, + parameters => [hoconsc:ref(clientid)], responses => #{ - <<"204">> => #{description => <<"No Content">>}, - <<"400">> => emqx_mgmt_util:bad_request() + 204 => <<"Deleted">>, + 400 => emqx_dashboard_swagger:error_codes([?BAD_REQUEST], <<"Bad clientid">>) } } - }, - {"/authorization/sources/built-in-database/clientid/:clientid", Metadata, client}. - -all_api() -> - Metadata = #{ + }; +schema("/authorization/sources/built-in-database/all") -> + #{ + 'operationId' => all, get => #{ - description => "Show the list of rules for all", + tags => [<<"authorization">>], + description => <<"Show the list of rules for all">>, responses => #{ - <<"200">> => #{ - description => <<"OK">>, - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"clientid">>), - examples => #{ - clientid => #{ - summary => <<"All">>, - value => jsx:encode(?EXAMPLE_ALL) - } - } - } - } - } + 200 => swagger_with_example({rules_for_all, ?TYPE_REF}, {all, ?PUT_MAP_EXAMPLE}) } }, put => #{ - description => "Set the list of rules for all", - requestBody => #{ - content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"all">>), - examples => #{ - all => #{ - summary => <<"All">>, - value => jsx:encode(?EXAMPLE_ALL) - } - } - } - } - }, + tags => [<<"authorization">>], + description => <<"Set the list of rules for all">>, + 'requestBody' => + swagger_with_example({rules_for_all, ?TYPE_REF}, {all, ?PUT_MAP_EXAMPLE}), responses => #{ - <<"204">> => #{description => <<"Created">>}, - <<"400">> => emqx_mgmt_util:bad_request() + 204 => <<"Created">>, + 400 => emqx_dashboard_swagger:error_codes([?BAD_REQUEST], <<"Bad rule schema">>) } } - }, - {"/authorization/sources/built-in-database/all", Metadata, all}. - -purge_api() -> - Metadata = #{ + }; +schema("/authorization/sources/built-in-database/purge-all") -> + #{ + 'operationId' => purge, delete => #{ - description => "Purge all records", + tags => [<<"authorization">>], + description => <<"Purge all records">>, responses => #{ - <<"204">> => #{description => <<"No Content">>}, - <<"400">> => emqx_mgmt_util:bad_request() + 204 => <<"Deleted">>, + 400 => emqx_dashboard_swagger:error_codes([?BAD_REQUEST], <<"Bad Request">>) } } - }, - {"/authorization/sources/built-in-database/purge-all", Metadata, purge}. + }. + +fields(rule_item) -> + [ {topic, hoconsc:mk( string() + , #{ required => true + , desc => <<"Rule on specific topic">> + , example => <<"test/topic/1">>})} + , {permission, hoconsc:mk( hoconsc:enum([allow, deny]) + , #{desc => <<"Permission">>, required => true, example => allow})} + , {action, hoconsc:mk( hoconsc:enum([publish, subscribe, all]) + , #{ required => true, example => publish + , desc => <<"Authorized action">> })} ]; +fields(clientid) -> + [ {clientid, hoconsc:mk( binary() + , #{ in => path, required => true + , desc => <<"ClientID">>, example => <<"client1">>})} + ]; +fields(username) -> + [ {username, hoconsc:mk( binary() + , #{ in => path, required => true + , desc => <<"Username">>, example => <<"user1">>})} + ]; +fields(rules_for_username) -> + [ {rules, hoconsc:mk(hoconsc:array(hoconsc:ref(rule_item)), #{})} + ] ++ fields(username); +fields(username_response_data) -> + [ {data, hoconsc:mk(hoconsc:array(hoconsc:ref(rules_for_username)), #{})} + , {meta, hoconsc:ref(meta)} + ]; +fields(rules_for_clientid) -> + [ {rules, hoconsc:mk(hoconsc:array(hoconsc:ref(rule_item)), #{})} + ] ++ fields(clientid); +fields(clientid_response_data) -> + [ {data, hoconsc:mk(hoconsc:array(hoconsc:ref(rules_for_clientid)), #{})} + , {meta, hoconsc:ref(meta)} + ]; +fields(rules_for_all) -> + [ {rules, hoconsc:mk(hoconsc:array(hoconsc:ref(rule_item)), #{})} + ]; +fields(meta) -> + emqx_dashboard_swagger:fields(page) + ++ emqx_dashboard_swagger:fields(limit) + ++ [{count, hoconsc:mk(integer(), #{example => 1})}]. + +%%-------------------------------------------------------------------- +%% HTTP API +%%-------------------------------------------------------------------- users(get, #{query_string := PageParams}) -> MatchSpec = ets:fun2ms( @@ -609,7 +369,8 @@ purge(delete, _) -> {204}; [#{<<"enable">> := true}] -> {400, #{code => <<"BAD_REQUEST">>, - message => <<"'built-in-database' type source must be disabled before purge.">>}}; + message => + <<"'built-in-database' type source must be disabled before purge.">>}}; [] -> {404, #{code => <<"BAD_REQUEST">>, message => <<"'built-in-database' type source is not found.">> @@ -642,6 +403,43 @@ format_by_clientid([{clientid, Clientid}, {rules, Rules}]) -> atom(B) when is_binary(B) -> try binary_to_existing_atom(B, utf8) catch - _ -> binary_to_atom(B) + _Error:_Expection -> binary_to_atom(B) end; atom(A) when is_atom(A) -> A. + +%%-------------------------------------------------------------------- +%% Internal functions +%%-------------------------------------------------------------------- + +swagger_with_example({Ref, TypeP}, {_Name, _Type} = Example) -> + emqx_dashboard_swagger:schema_with_examples( + case TypeP of + ?TYPE_REF -> hoconsc:ref(?MODULE, Ref); + ?TYPE_ARRAY -> hoconsc:array(hoconsc:ref(?MODULE, Ref)) + end, + rules_example(Example)). + +rules_example({ExampleName, ExampleType}) -> + {Summary, Example} = + case ExampleName of + username -> {<<"Username">>, ?USERNAME_RULES_EXAMPLE}; + clientid -> {<<"ClientID">>, ?CLIENTID_RULES_EXAMPLE}; + all -> {<<"All">>, ?ALL_RULES_EXAMPLE} + end, + Value = + case ExampleType of + ?PAGE_QUERY_EXAMPLE -> #{ + data => [Example], + meta => ?META_EXAMPLE + }; + ?PUT_MAP_EXAMPLE -> + Example; + ?POST_ARRAY_EXAMPLE -> + [Example] + end, + #{ + 'password-based:built-in-database' => #{ + summary => Summary, + value => Value + } + }. diff --git a/apps/emqx_authz/src/emqx_authz_api_schema.erl b/apps/emqx_authz/src/emqx_authz_api_schema.erl index b05476b03..b9e6b2def 100644 --- a/apps/emqx_authz/src/emqx_authz_api_schema.erl +++ b/apps/emqx_authz/src/emqx_authz_api_schema.erl @@ -20,18 +20,18 @@ definitions() -> Sources = #{ - oneOf => [ minirest:ref(<<"http">>) - , minirest:ref(<<"built-in-database">>) - , minirest:ref(<<"mongo_single">>) - , minirest:ref(<<"mongo_rs">>) - , minirest:ref(<<"mongo_sharded">>) - , minirest:ref(<<"mysql">>) - , minirest:ref(<<"postgresql">>) - , minirest:ref(<<"redis_single">>) - , minirest:ref(<<"redis_sentinel">>) - , minirest:ref(<<"redis_cluster">>) - , minirest:ref(<<"file">>) - ] + 'oneOf' => [ minirest:ref(<<"http">>) + , minirest:ref(<<"built-in-database">>) + , minirest:ref(<<"mongo_single">>) + , minirest:ref(<<"mongo_rs">>) + , minirest:ref(<<"mongo_sharded">>) + , minirest:ref(<<"mysql">>) + , minirest:ref(<<"postgresql">>) + , minirest:ref(<<"redis_single">>) + , minirest:ref(<<"redis_sentinel">>) + , minirest:ref(<<"redis_cluster">>) + , minirest:ref(<<"file">>) + ] }, SSL = #{ type => object, @@ -86,7 +86,7 @@ definitions() -> pool_type => #{ type => string, enum => [<<"random">>, <<"hash">>], - example => <<"random">> + example => <<"hash">> }, pool_size => #{type => integer}, enable_pipelining => #{type => boolean}, @@ -119,6 +119,7 @@ definitions() -> type => boolean, example => true }, + srv_record => #{type => boolean, example => false, default => false}, collection => #{type => string}, selector => #{type => object}, mongo_type => #{type => string, @@ -175,6 +176,7 @@ definitions() -> type => boolean, example => true }, + srv_record => #{type => boolean, example => false, default => false}, collection => #{type => string}, selector => #{type => object}, mongo_type => #{type => string, @@ -232,6 +234,7 @@ definitions() -> type => boolean, example => true }, + srv_record => #{type => boolean, example => false, default => false}, collection => #{type => string}, selector => #{type => object}, mongo_type => #{type => string, @@ -479,7 +482,9 @@ definitions() -> type => array, items => #{ type => string, - example => <<"{allow,{username,\"^dashboard?\"},subscribe,[\"$SYS/#\"]}.\n{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">> + example => + <<"{allow,{username,\"^dashboard?\"},","subscribe,[\"$SYS/#\"]}.\n", + "{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">> } }, path => #{ diff --git a/apps/emqx_authz/src/emqx_authz_api_sources.erl b/apps/emqx_authz/src/emqx_authz_api_sources.erl index 25135fa34..23c6077fa 100644 --- a/apps/emqx_authz/src/emqx_authz_api_sources.erl +++ b/apps/emqx_authz/src/emqx_authz_api_sources.erl @@ -32,7 +32,8 @@ -define(EXAMPLE_FILE, #{type=> file, enable => true, - rules => <<"{allow,{username,\"^dashboard?\"},subscribe,[\"$SYS/#\"]}.\n{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">> + rules => <<"{allow,{username,\"^dashboard?\"},subscribe,[\"$SYS/#\"]}.\n", + "{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">> }). -define(EXAMPLE_RETURNED, @@ -90,7 +91,7 @@ sources_api() -> }, post => #{ description => "Add new source", - requestBody => #{ + 'requestBody' => #{ content => #{ 'application/json' => #{ schema => minirest:ref(<<"sources">>), @@ -114,7 +115,7 @@ sources_api() -> }, put => #{ description => "Update all sources", - requestBody => #{ + 'requestBody' => #{ content => #{ 'application/json' => #{ schema => #{ @@ -206,7 +207,7 @@ source_api() -> required => true } ], - requestBody => #{ + 'requestBody' => #{ content => #{ 'application/json' => #{ schema => minirest:ref(<<"sources">>), @@ -250,7 +251,7 @@ source_api() -> } ], responses => #{ - <<"204">> => #{description => <<"No Content">>}, + <<"204">> => #{description => <<"Deleted">>}, <<"400">> => emqx_mgmt_util:bad_request() } } @@ -279,7 +280,7 @@ move_source_api() -> required => true } ], - requestBody => #{ + 'requestBody' => #{ content => #{ 'application/json' => #{ schema => #{ @@ -287,7 +288,7 @@ move_source_api() -> required => [position], properties => #{ position => #{ - oneOf => [ + 'oneOf' => [ #{type => string, enum => [<<"top">>, <<"bottom">>] }, @@ -326,7 +327,8 @@ move_source_api() -> {"/authorization/sources/:type/move", Metadata, move_source}. sources(get, _) -> - Sources = lists:foldl(fun (#{<<"type">> := <<"file">>, <<"enable">> := Enable, <<"path">> := Path}, AccIn) -> + Sources = lists:foldl(fun (#{<<"type">> := <<"file">>, + <<"enable">> := Enable, <<"path">> := Path}, AccIn) -> case file:read_file(Path) of {ok, Rules} -> lists:append(AccIn, [#{type => file, @@ -345,7 +347,8 @@ sources(get, _) -> {200, #{sources => Sources}}; sources(post, #{body := #{<<"type">> := <<"file">>, <<"rules">> := Rules}}) -> {ok, Filename} = write_file(acl_conf_file(), Rules), - update_config(?CMD_PREPEND, [#{<<"type">> => <<"file">>, <<"enable">> => true, <<"path">> => Filename}]); + update_config(?CMD_PREPEND, [#{<<"type">> => <<"file">>, + <<"enable">> => true, <<"path">> => Filename}]); sources(post, #{body := Body}) when is_map(Body) -> update_config(?CMD_PREPEND, [maybe_write_certs(Body)]); sources(put, #{body := Body}) when is_list(Body) -> @@ -377,9 +380,13 @@ source(get, #{bindings := #{type := Type}}) -> [Source] -> {200, read_certs(Source)} end; -source(put, #{bindings := #{type := <<"file">>}, body := #{<<"type">> := <<"file">>, <<"rules">> := Rules, <<"enable">> := Enable}}) -> +source(put, #{bindings := #{type := <<"file">>}, body := #{<<"type">> := <<"file">>, + <<"rules">> := Rules, + <<"enable">> := Enable}}) -> {ok, Filename} = write_file(maps:get(path, emqx_authz:lookup(file), ""), Rules), - case emqx_authz:update({?CMD_REPLACE, <<"file">>}, #{<<"type">> => <<"file">>, <<"enable">> => Enable, <<"path">> => Filename}) of + case emqx_authz:update({?CMD_REPLACE, <<"file">>}, #{<<"type">> => <<"file">>, + <<"enable">> => Enable, + <<"path">> => Filename}) of {ok, _} -> {204}; {error, Reason} -> {400, #{code => <<"BAD_REQUEST">>, @@ -405,7 +412,8 @@ get_raw_sources() -> RawSources = emqx:get_raw_config([authorization, sources], []), Schema = #{roots => emqx_authz_schema:fields("authorization"), fields => #{}}, Conf = #{<<"sources">> => RawSources}, - #{<<"sources">> := Sources} = hocon_schema:check_plain(Schema, Conf, #{only_fill_defaults => true}), + #{<<"sources">> := Sources} = hocon_schema:check_plain(Schema, Conf, + #{only_fill_defaults => true}), Sources. get_raw_source(Type) -> @@ -429,10 +437,11 @@ update_config(Cmd, Sources) -> read_certs(#{<<"ssl">> := SSL} = Source) -> case emqx_tls_lib:file_content_as_options(SSL) of - {ok, NewSSL} -> Source#{<<"ssl">> => NewSSL}; {error, Reason} -> ?SLOG(error, Reason#{msg => failed_to_readd_ssl_file}), - throw(failed_to_readd_ssl_file) + throw(failed_to_readd_ssl_file); + NewSSL -> + Source#{<<"ssl">> => NewSSL} end; read_certs(Source) -> Source. diff --git a/apps/emqx_authz/src/emqx_authz_http.erl b/apps/emqx_authz/src/emqx_authz_http.erl index 93aa634f3..4c6af402c 100644 --- a/apps/emqx_authz/src/emqx_authz_http.erl +++ b/apps/emqx_authz/src/emqx_authz_http.erl @@ -19,6 +19,7 @@ -include("emqx_authz.hrl"). -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). %% AuthZ Callbacks -export([ authorize/4 @@ -42,7 +43,7 @@ authorize(Client, PubSub, Topic, annotations := #{id := ResourceID} } = Source) -> Request = case Method of - get -> + get -> Query = maps:get(query, Url, ""), Path1 = replvar(Path ++ "?" ++ Query, PubSub, Topic, Client), {Path1, maps:to_list(Headers)}; @@ -68,7 +69,9 @@ query_string([], Acc) -> <<$&, Str/binary>> = iolist_to_binary(lists:reverse(Acc)), Str; query_string([{K, V} | More], Acc) -> - query_string(More, [["&", emqx_http_lib:uri_encode(K), "=", emqx_http_lib:uri_encode(V)] | Acc]). + query_string( More + , [ ["&", emqx_http_lib:uri_encode(K), "=", emqx_http_lib:uri_encode(V)] + | Acc]). serialize_body(<<"application/json">>, Body) -> jsx:encode(Body); @@ -84,13 +87,20 @@ replvar(Str0, PubSub, Topic, }) when is_list(Str0); is_binary(Str0) -> NTopic = emqx_http_lib:uri_encode(Topic), - Str1 = re:replace(Str0, "%c", Clientid, [global, {return, binary}]), - Str2 = re:replace(Str1, "%u", bin(Username), [global, {return, binary}]), - Str3 = re:replace(Str2, "%a", inet_parse:ntoa(IpAddress), [global, {return, binary}]), - Str4 = re:replace(Str3, "%r", bin(Protocol), [global, {return, binary}]), - Str5 = re:replace(Str4, "%m", Mountpoint, [global, {return, binary}]), - Str6 = re:replace(Str5, "%t", NTopic, [global, {return, binary}]), - Str7 = re:replace(Str6, "%A", bin(PubSub), [global, {return, binary}]), + Str1 = re:replace( Str0, ?PH_S_CLIENTID + , Clientid, [global, {return, binary}]), + Str2 = re:replace( Str1, ?PH_S_USERNAME + , bin(Username), [global, {return, binary}]), + Str3 = re:replace( Str2, ?PH_S_HOST + , inet_parse:ntoa(IpAddress), [global, {return, binary}]), + Str4 = re:replace( Str3, ?PH_S_PROTONAME + , bin(Protocol), [global, {return, binary}]), + Str5 = re:replace( Str4, ?PH_S_MOUNTPOINT + , Mountpoint, [global, {return, binary}]), + Str6 = re:replace( Str5, ?PH_S_TOPIC + , NTopic, [global, {return, binary}]), + Str7 = re:replace( Str6, ?PH_S_ACTION + , bin(PubSub), [global, {return, binary}]), Str7. bin(A) when is_atom(A) -> atom_to_binary(A, utf8); diff --git a/apps/emqx_authz/src/emqx_authz_mongodb.erl b/apps/emqx_authz/src/emqx_authz_mongodb.erl index c599ba5ad..ec34a266c 100644 --- a/apps/emqx_authz/src/emqx_authz_mongodb.erl +++ b/apps/emqx_authz/src/emqx_authz_mongodb.erl @@ -19,6 +19,7 @@ -include("emqx_authz.hrl"). -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). %% AuthZ Callbacks -export([ authorize/4 @@ -40,12 +41,16 @@ authorize(Client, PubSub, Topic, }) -> case emqx_resource:query(ResourceID, {find, Collection, replvar(Selector, Client), #{}}) of {error, Reason} -> - ?SLOG(error, #{msg => "query_mongo_error", reason => Reason, resource_id => ResourceID}), + ?SLOG(error, #{msg => "query_mongo_error", + reason => Reason, + resource_id => ResourceID}), nomatch; [] -> nomatch; Rows -> Rules = [ emqx_authz_rule:compile({Permission, all, Action, Topics}) - || #{<<"topics">> := Topics, <<"permission">> := Permission, <<"action">> := Action} <- Rows], + || #{<<"topics">> := Topics, + <<"permission">> := Permission, + <<"action">> := Action} <- Rows], do_authorize(Client, PubSub, Topic, Rules) end. @@ -62,19 +67,23 @@ replvar(Selector, #{clientid := Clientid, peerhost := IpAddress }) -> Fun = fun - _Fun(K, V, AccIn) when is_map(V) -> maps:put(K, maps:fold(_Fun, AccIn, V), AccIn); - _Fun(K, V, AccIn) when is_list(V) -> + InFun(K, V, AccIn) when is_map(V) -> + maps:put(K, maps:fold(InFun, AccIn, V), AccIn); + InFun(K, V, AccIn) when is_list(V) -> maps:put(K, [ begin [{K1, V1}] = maps:to_list(M), - _Fun(K1, V1, AccIn) + InFun(K1, V1, AccIn) end || M <- V], AccIn); - _Fun(K, V, AccIn) when is_binary(V) -> - V1 = re:replace(V, "%c", bin(Clientid), [global, {return, binary}]), - V2 = re:replace(V1, "%u", bin(Username), [global, {return, binary}]), - V3 = re:replace(V2, "%a", inet_parse:ntoa(IpAddress), [global, {return, binary}]), + InFun(K, V, AccIn) when is_binary(V) -> + V1 = re:replace( V, ?PH_S_CLIENTID + , bin(Clientid), [global, {return, binary}]), + V2 = re:replace( V1, ?PH_S_USERNAME + , bin(Username), [global, {return, binary}]), + V3 = re:replace( V2, ?PH_S_HOST + , inet_parse:ntoa(IpAddress), [global, {return, binary}]), maps:put(K, V3, AccIn); - _Fun(K, V, AccIn) -> maps:put(K, V, AccIn) + InFun(K, V, AccIn) -> maps:put(K, V, AccIn) end, maps:fold(Fun, #{}, Selector). @@ -82,4 +91,3 @@ bin(A) when is_atom(A) -> atom_to_binary(A, utf8); bin(B) when is_binary(B) -> B; bin(L) when is_list(L) -> list_to_binary(L); bin(X) -> X. - diff --git a/apps/emqx_authz/src/emqx_authz_mysql.erl b/apps/emqx_authz/src/emqx_authz_mysql.erl index 6a5845db7..6821f15c3 100644 --- a/apps/emqx_authz/src/emqx_authz_mysql.erl +++ b/apps/emqx_authz/src/emqx_authz_mysql.erl @@ -19,6 +19,7 @@ -include("emqx_authz.hrl"). -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). %% AuthZ Callbacks -export([ description/0 @@ -55,7 +56,9 @@ authorize(Client, PubSub, Topic, {ok, Columns, Rows} -> do_authorize(Client, PubSub, Topic, Columns, Rows); {error, Reason} -> - ?SLOG(error, #{msg => "query_mysql_error", reason => Reason, resource_id => ResourceID}), + ?SLOG(error, #{ msg => "query_mysql_error" + , reason => Reason + , resource_id => ResourceID}), nomatch end. @@ -87,16 +90,16 @@ replvar(Params, ClientInfo) -> replvar([], _ClientInfo, Acc) -> lists:reverse(Acc); -replvar(["'%u'" | Params], ClientInfo, Acc) -> +replvar([?PH_S_USERNAME | Params], ClientInfo, Acc) -> replvar(Params, ClientInfo, [safe_get(username, ClientInfo) | Acc]); -replvar(["'%c'" | Params], ClientInfo = #{clientid := ClientId}, Acc) -> +replvar([?PH_S_CLIENTID | Params], ClientInfo = #{clientid := ClientId}, Acc) -> replvar(Params, ClientInfo, [ClientId | Acc]); -replvar(["'%a'" | Params], ClientInfo = #{peerhost := IpAddr}, Acc) -> +replvar([?PH_S_PEERHOST | Params], ClientInfo = #{peerhost := IpAddr}, Acc) -> replvar(Params, ClientInfo, [inet_parse:ntoa(IpAddr) | Acc]); -replvar(["'%C'" | Params], ClientInfo, Acc) -> - replvar(Params, ClientInfo, [safe_get(cn, ClientInfo)| Acc]); -replvar(["'%d'" | Params], ClientInfo, Acc) -> - replvar(Params, ClientInfo, [safe_get(dn, ClientInfo)| Acc]); +replvar([?PH_S_CERT_CN_NAME | Params], ClientInfo, Acc) -> + replvar(Params, ClientInfo, [safe_get(cn, ClientInfo) | Acc]); +replvar([?PH_S_CERT_SUBJECT | Params], ClientInfo, Acc) -> + replvar(Params, ClientInfo, [safe_get(dn, ClientInfo) | Acc]); replvar([Param | Params], ClientInfo, Acc) -> replvar(Params, ClientInfo, [Param | Acc]). @@ -107,4 +110,3 @@ bin(A) when is_atom(A) -> atom_to_binary(A, utf8); bin(B) when is_binary(B) -> B; bin(L) when is_list(L) -> list_to_binary(L); bin(X) -> X. - diff --git a/apps/emqx_authz/src/emqx_authz_postgresql.erl b/apps/emqx_authz/src/emqx_authz_postgresql.erl index 2b74eb56e..d88b35b41 100644 --- a/apps/emqx_authz/src/emqx_authz_postgresql.erl +++ b/apps/emqx_authz/src/emqx_authz_postgresql.erl @@ -19,6 +19,7 @@ -include("emqx_authz.hrl"). -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). %% AuthZ Callbacks -export([ description/0 @@ -59,7 +60,9 @@ authorize(Client, PubSub, Topic, {ok, Columns, Rows} -> do_authorize(Client, PubSub, Topic, Columns, Rows); {error, Reason} -> - ?SLOG(error, #{msg => "query_postgresql_error", reason => Reason, resource_id => ResourceID}), + ?SLOG(error, #{ msg => "query_postgresql_error" + , reason => Reason + , resource_id => ResourceID}), nomatch end. @@ -92,16 +95,16 @@ replvar(Params, ClientInfo) -> replvar([], _ClientInfo, Acc) -> lists:reverse(Acc); -replvar(["'%u'" | Params], ClientInfo, Acc) -> +replvar([?PH_S_USERNAME | Params], ClientInfo, Acc) -> replvar(Params, ClientInfo, [safe_get(username, ClientInfo) | Acc]); -replvar(["'%c'" | Params], ClientInfo = #{clientid := ClientId}, Acc) -> +replvar([?PH_S_CLIENTID | Params], ClientInfo = #{clientid := ClientId}, Acc) -> replvar(Params, ClientInfo, [ClientId | Acc]); -replvar(["'%a'" | Params], ClientInfo = #{peerhost := IpAddr}, Acc) -> +replvar([?PH_S_PEERHOST | Params], ClientInfo = #{peerhost := IpAddr}, Acc) -> replvar(Params, ClientInfo, [inet_parse:ntoa(IpAddr) | Acc]); -replvar(["'%C'" | Params], ClientInfo, Acc) -> - replvar(Params, ClientInfo, [safe_get(cn, ClientInfo)| Acc]); -replvar(["'%d'" | Params], ClientInfo, Acc) -> - replvar(Params, ClientInfo, [safe_get(dn, ClientInfo)| Acc]); +replvar([?PH_S_CERT_CN_NAME | Params], ClientInfo, Acc) -> + replvar(Params, ClientInfo, [safe_get(cn, ClientInfo) | Acc]); +replvar([?PH_S_CERT_SUBJECT | Params], ClientInfo, Acc) -> + replvar(Params, ClientInfo, [safe_get(dn, ClientInfo) | Acc]); replvar([Param | Params], ClientInfo, Acc) -> replvar(Params, ClientInfo, [Param | Acc]). @@ -112,4 +115,3 @@ bin(A) when is_atom(A) -> atom_to_binary(A, utf8); bin(B) when is_binary(B) -> B; bin(L) when is_list(L) -> list_to_binary(L); bin(X) -> X. - diff --git a/apps/emqx_authz/src/emqx_authz_redis.erl b/apps/emqx_authz/src/emqx_authz_redis.erl index 44c1f6f41..50e8c9a7d 100644 --- a/apps/emqx_authz/src/emqx_authz_redis.erl +++ b/apps/emqx_authz/src/emqx_authz_redis.erl @@ -19,6 +19,7 @@ -include("emqx_authz.hrl"). -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). %% AuthZ Callbacks -export([ authorize/4 @@ -43,7 +44,9 @@ authorize(Client, PubSub, Topic, {ok, Rows} -> do_authorize(Client, PubSub, Topic, Rows); {error, Reason} -> - ?SLOG(error, #{msg => "query_redis_error", reason => Reason, resource_id => ResourceID}), + ?SLOG(error, #{ msg => "query_redis_error" + , reason => Reason + , resource_id => ResourceID}), nomatch end. @@ -58,13 +61,13 @@ do_authorize(Client, PubSub, Topic, [TopicFilter, Action | Tail]) -> end. replvar(Cmd, Client = #{cn := CN}) -> - replvar(repl(Cmd, "%C", CN), maps:remove(cn, Client)); + replvar(repl(Cmd, ?PH_S_CERT_CN_NAME, CN), maps:remove(cn, Client)); replvar(Cmd, Client = #{dn := DN}) -> - replvar(repl(Cmd, "%d", DN), maps:remove(dn, Client)); + replvar(repl(Cmd, ?PH_S_CERT_SUBJECT, DN), maps:remove(dn, Client)); replvar(Cmd, Client = #{clientid := ClientId}) -> - replvar(repl(Cmd, "%c", ClientId), maps:remove(clientid, Client)); + replvar(repl(Cmd, ?PH_S_CLIENTID, ClientId), maps:remove(clientid, Client)); replvar(Cmd, Client = #{username := Username}) -> - replvar(repl(Cmd, "%u", Username), maps:remove(username, Client)); + replvar(repl(Cmd, ?PH_S_USERNAME, Username), maps:remove(username, Client)); replvar(Cmd, _) -> Cmd. diff --git a/apps/emqx_authz/src/emqx_authz_rule.erl b/apps/emqx_authz/src/emqx_authz_rule.erl index 5f4dcfcab..5b6885e22 100644 --- a/apps/emqx_authz/src/emqx_authz_rule.erl +++ b/apps/emqx_authz/src/emqx_authz_rule.erl @@ -18,6 +18,7 @@ -include("emqx_authz.hrl"). -include_lib("emqx/include/logger.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). -ifdef(TEST). -compile(export_all). @@ -32,9 +33,12 @@ -export_type([rule/0]). -compile({Permission, all}) when ?ALLOW_DENY(Permission) -> {Permission, all, all, [compile_topic(<<"#">>)]}; -compile({Permission, Who, Action, TopicFilters}) when ?ALLOW_DENY(Permission), ?PUBSUB(Action), is_list(TopicFilters) -> - {atom(Permission), compile_who(Who), atom(Action), [compile_topic(Topic) || Topic <- TopicFilters]}. +compile({Permission, all}) + when ?ALLOW_DENY(Permission) -> {Permission, all, all, [compile_topic(<<"#">>)]}; +compile({Permission, Who, Action, TopicFilters}) + when ?ALLOW_DENY(Permission), ?PUBSUB(Action), is_list(TopicFilters) -> + { atom(Permission), compile_who(Who), atom(Action) + , [compile_topic(Topic) || Topic <- TopicFilters]}. compile_who(all) -> all; compile_who({user, Username}) -> compile_who({username, Username}); @@ -68,12 +72,12 @@ compile_topic(Topic) -> end. pattern(Words) -> - lists:member(<<"%u">>, Words) orelse lists:member(<<"%c">>, Words). + lists:member(?PH_USERNAME, Words) orelse lists:member(?PH_CLIENTID, Words). atom(B) when is_binary(B) -> try binary_to_existing_atom(B, utf8) catch - _ -> binary_to_atom(B) + _E:_S -> binary_to_atom(B) end; atom(A) when is_atom(A) -> A. @@ -143,11 +147,11 @@ match_who(_, _) -> false. match_topics(_ClientInfo, _Topic, []) -> false; -match_topics(ClientInfo, Topic, [{pattern, PatternFilter}|Filters]) -> +match_topics(ClientInfo, Topic, [{pattern, PatternFilter} | Filters]) -> TopicFilter = feed_var(ClientInfo, PatternFilter), match_topic(emqx_topic:words(Topic), TopicFilter) orelse match_topics(ClientInfo, Topic, Filters); -match_topics(ClientInfo, Topic, [TopicFilter|Filters]) -> +match_topics(ClientInfo, Topic, [TopicFilter | Filters]) -> match_topic(emqx_topic:words(Topic), TopicFilter) orelse match_topics(ClientInfo, Topic, Filters). @@ -160,13 +164,13 @@ feed_var(ClientInfo, Pattern) -> feed_var(ClientInfo, Pattern, []). feed_var(_ClientInfo, [], Acc) -> lists:reverse(Acc); -feed_var(ClientInfo = #{clientid := undefined}, [<<"%c">>|Words], Acc) -> - feed_var(ClientInfo, Words, [<<"%c">>|Acc]); -feed_var(ClientInfo = #{clientid := ClientId}, [<<"%c">>|Words], Acc) -> - feed_var(ClientInfo, Words, [ClientId |Acc]); -feed_var(ClientInfo = #{username := undefined}, [<<"%u">>|Words], Acc) -> - feed_var(ClientInfo, Words, [<<"%u">>|Acc]); -feed_var(ClientInfo = #{username := Username}, [<<"%u">>|Words], Acc) -> - feed_var(ClientInfo, Words, [Username|Acc]); -feed_var(ClientInfo, [W|Words], Acc) -> - feed_var(ClientInfo, Words, [W|Acc]). +feed_var(ClientInfo = #{clientid := undefined}, [?PH_CLIENTID | Words], Acc) -> + feed_var(ClientInfo, Words, [?PH_CLIENTID | Acc]); +feed_var(ClientInfo = #{clientid := ClientId}, [?PH_CLIENTID | Words], Acc) -> + feed_var(ClientInfo, Words, [ClientId | Acc]); +feed_var(ClientInfo = #{username := undefined}, [?PH_USERNAME | Words], Acc) -> + feed_var(ClientInfo, Words, [?PH_USERNAME | Acc]); +feed_var(ClientInfo = #{username := Username}, [?PH_USERNAME | Words], Acc) -> + feed_var(ClientInfo, Words, [Username | Acc]); +feed_var(ClientInfo, [W | Words], Acc) -> + feed_var(ClientInfo, Words, [W | Acc]). diff --git a/apps/emqx_authz/test/emqx_authz_SUITE.erl b/apps/emqx_authz/test/emqx_authz_SUITE.erl index d7df8eaa0..130e266fb 100644 --- a/apps/emqx_authz/test/emqx_authz_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_SUITE.erl @@ -21,8 +21,7 @@ -include("emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). - --define(CONF_DEFAULT, <<"authorization: {sources: []}">>). +-include_lib("emqx/include/emqx_placeholder.hrl"). all() -> emqx_common_test_helpers:all(?MODULE). @@ -31,35 +30,37 @@ groups() -> []. init_per_suite(Config) -> - meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]), - meck:expect(emqx_schema, fields, fun("authorization") -> - meck:passthrough(["authorization"]) ++ - emqx_authz_schema:fields("authorization"); - (F) -> meck:passthrough([F]) - end), - meck:new(emqx_resource, [non_strict, passthrough, no_history, no_link]), meck:expect(emqx_resource, create, fun(_, _, _) -> {ok, meck_data} end), meck:expect(emqx_resource, update, fun(_, _, _, _) -> {ok, meck_data} end), meck:expect(emqx_resource, remove, fun(_) -> ok end ), - ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT), - ok = emqx_common_test_helpers:start_apps([emqx_authz]), - {ok, _} = emqx:update_config([authorization, cache, enable], false), - {ok, _} = emqx:update_config([authorization, no_match], deny), + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz], fun set_special_configs/1), Config. end_per_suite(_Config) -> - {ok, _} = emqx_authz:update(?CMD_REPLACE, []), - emqx_common_test_helpers:stop_apps([emqx_authz, emqx_resource]), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_authz, emqx_conf]), meck:unload(emqx_resource), - meck:unload(emqx_schema), ok. init_per_testcase(_, Config) -> {ok, _} = emqx_authz:update(?CMD_REPLACE, []), Config. +set_special_configs(emqx_authz) -> + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], []), + ok; +set_special_configs(_App) -> + ok. + -define(SOURCE1, #{<<"type">> => <<"http">>, <<"enable">> => true, <<"url">> => <<"https://fake.com:443/">>, @@ -107,7 +108,7 @@ init_per_testcase(_, Config) -> <<"password">> => <<"ee">>, <<"auto_reconnect">> => true, <<"ssl">> => #{<<"enable">> => false}, - <<"cmd">> => <<"HGETALL mqtt_authz:%u">> + <<"cmd">> => <<"HGETALL mqtt_authz:", ?PH_USERNAME/binary>> }). -define(SOURCE6, #{<<"type">> => <<"file">>, <<"enable">> => true, @@ -153,7 +154,9 @@ t_update_source(_) -> {ok, _} = emqx_authz:update(?CMD_REPLACE, []). t_move_source(_) -> - {ok, _} = emqx_authz:update(?CMD_REPLACE, [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]), + {ok, _} = emqx_authz:update(?CMD_REPLACE, + [?SOURCE1, ?SOURCE2, ?SOURCE3, + ?SOURCE4, ?SOURCE5, ?SOURCE6]), ?assertMatch([ #{type := http} , #{type := mongodb} , #{type := mysql} diff --git a/apps/emqx_authz/test/emqx_authz_api_mnesia_SUITE.erl b/apps/emqx_authz/test/emqx_authz_api_mnesia_SUITE.erl index d9b308514..b4a8f2756 100644 --- a/apps/emqx_authz/test/emqx_authz_api_mnesia_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_api_mnesia_SUITE.erl @@ -22,70 +22,10 @@ -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). --define(CONF_DEFAULT, <<""" -authorization - {sources = [ - { type = \"built-in-database\" - enable = true - } - ]} -""">>). - -define(HOST, "http://127.0.0.1:18083/"). -define(API_VERSION, "v5"). -define(BASE_PATH, "api"). --define(EXAMPLE_USERNAME, #{username => user1, - rules => [ #{topic => <<"test/toopic/1">>, - permission => <<"allow">>, - action => <<"publish">> - } - , #{topic => <<"test/toopic/2">>, - permission => <<"allow">>, - action => <<"subscribe">> - } - , #{topic => <<"eq test/#">>, - permission => <<"deny">>, - action => <<"all">> - } - ] - }). --define(EXAMPLE_CLIENTID, #{clientid => client1, - rules => [ #{topic => <<"test/toopic/1">>, - permission => <<"allow">>, - action => <<"publish">> - } - , #{topic => <<"test/toopic/2">>, - permission => <<"allow">>, - action => <<"subscribe">> - } - , #{topic => <<"eq test/#">>, - permission => <<"deny">>, - action => <<"all">> - } - ] - }). --define(EXAMPLE_ALL , #{rules => [ #{topic => <<"test/toopic/1">>, - permission => <<"allow">>, - action => <<"publish">> - } - , #{topic => <<"test/toopic/2">>, - permission => <<"allow">>, - action => <<"subscribe">> - } - , #{topic => <<"eq test/#">>, - permission => <<"deny">>, - action => <<"all">> - } - ] - }). - -roots() -> ["authorization"]. - -fields("authorization") -> - emqx_authz_schema:fields("authorization") ++ - emqx_schema:fields("authorization"). - all() -> emqx_common_test_helpers:all(?MODULE). @@ -93,13 +33,18 @@ groups() -> []. init_per_suite(Config) -> - ok = emqx_common_test_helpers:start_apps([emqx_authz, emqx_dashboard], - fun set_special_configs/1), + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz, emqx_dashboard], + fun set_special_configs/1), Config. end_per_suite(_Config) -> - {ok, _} = emqx_authz:update(replace, []), - emqx_common_test_helpers:stop_apps([emqx_authz, emqx_dashboard]), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]), ok. set_special_configs(emqx_dashboard) -> @@ -114,9 +59,10 @@ set_special_configs(emqx_dashboard) -> emqx_config:put([emqx_dashboard], Config), ok; set_special_configs(emqx_authz) -> - ok = emqx_config:init_load(?MODULE, ?CONF_DEFAULT), {ok, _} = emqx:update_config([authorization, cache, enable], false), {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], + [#{<<"type">> => <<"built-in-database">>}]), ok; set_special_configs(_App) -> ok. @@ -126,65 +72,151 @@ set_special_configs(_App) -> %%------------------------------------------------------------------------------ t_api(_) -> - {ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "username"]), [?EXAMPLE_USERNAME]), - {ok, 200, Request1} = request(get, uri(["authorization", "sources", "built-in-database", "username"]), []), - {ok, 200, Request2} = request(get, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []), + {ok, 204, _} = + request( post + , uri(["authorization", "sources", "built-in-database", "username"]) + , [?USERNAME_RULES_EXAMPLE]), + {ok, 200, Request1} = + request( get + , uri(["authorization", "sources", "built-in-database", "username"]) + , []), + {ok, 200, Request2} = + request( get + , uri(["authorization", "sources", "built-in-database", "username", "user1"]) + , []), #{<<"data">> := [#{<<"username">> := <<"user1">>, <<"rules">> := Rules1}], - <<"meta">> := #{<<"count">> := 1,<<"limit">> := 100,<<"page">> := 1}} = jsx:decode(Request1), + <<"meta">> := #{<<"count">> := 1, + <<"limit">> := 100, + <<"page">> := 1}} = jsx:decode(Request1), #{<<"username">> := <<"user1">>, <<"rules">> := Rules1} = jsx:decode(Request2), ?assertEqual(3, length(Rules1)), - {ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "username", "user1"]), ?EXAMPLE_USERNAME#{rules => []}), - {ok, 200, Request3} = request(get, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []), + {ok, 204, _} = + request( put + , uri(["authorization", "sources", "built-in-database", "username", "user1"]) + , ?USERNAME_RULES_EXAMPLE#{rules => []}), + {ok, 200, Request3} = + request( get + , uri(["authorization", "sources", "built-in-database", "username", "user1"]) + , []), #{<<"username">> := <<"user1">>, <<"rules">> := Rules2} = jsx:decode(Request3), ?assertEqual(0, length(Rules2)), - {ok, 204, _} = request(delete, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []), - {ok, 404, _} = request(get, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []), + {ok, 204, _} = + request( delete + , uri(["authorization", "sources", "built-in-database", "username", "user1"]) + , []), + {ok, 404, _} = + request( get + , uri(["authorization", "sources", "built-in-database", "username", "user1"]) + , []), - {ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "clientid"]), [?EXAMPLE_CLIENTID]), - {ok, 200, Request4} = request(get, uri(["authorization", "sources", "built-in-database", "clientid"]), []), - {ok, 200, Request5} = request(get, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []), + {ok, 204, _} = + request( post + , uri(["authorization", "sources", "built-in-database", "clientid"]) + , [?CLIENTID_RULES_EXAMPLE]), + {ok, 200, Request4} = + request( get + , uri(["authorization", "sources", "built-in-database", "clientid"]) + , []), + {ok, 200, Request5} = + request( get + , uri(["authorization", "sources", "built-in-database", "clientid", "client1"]) + , []), #{<<"data">> := [#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3}], - <<"meta">> := #{<<"count">> := 1, <<"limit">> := 100, <<"page">> := 1}} = jsx:decode(Request4), + <<"meta">> := #{<<"count">> := 1, <<"limit">> := 100, <<"page">> := 1}} + = jsx:decode(Request4), #{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3} = jsx:decode(Request5), ?assertEqual(3, length(Rules3)), - {ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), ?EXAMPLE_CLIENTID#{rules => []}), - {ok, 200, Request6} = request(get, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []), + {ok, 204, _} = + request( put + , uri(["authorization", "sources", "built-in-database", "clientid", "client1"]) + , ?CLIENTID_RULES_EXAMPLE#{rules => []}), + {ok, 200, Request6} = + request( get + , uri(["authorization", "sources", "built-in-database", "clientid", "client1"]) + , []), #{<<"clientid">> := <<"client1">>, <<"rules">> := Rules4} = jsx:decode(Request6), ?assertEqual(0, length(Rules4)), - {ok, 204, _} = request(delete, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []), - {ok, 404, _} = request(get, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []), + {ok, 204, _} = + request( delete + , uri(["authorization", "sources", "built-in-database", "clientid", "client1"]) + , []), + {ok, 404, _} = + request( get + , uri(["authorization", "sources", "built-in-database", "clientid", "client1"]) + , []), - {ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "all"]), ?EXAMPLE_ALL), - {ok, 200, Request7} = request(get, uri(["authorization", "sources", "built-in-database", "all"]), []), + {ok, 204, _} = + request( put + , uri(["authorization", "sources", "built-in-database", "all"]) + , ?ALL_RULES_EXAMPLE), + {ok, 200, Request7} = + request( get + , uri(["authorization", "sources", "built-in-database", "all"]) + , []), #{<<"rules">> := Rules5} = jsx:decode(Request7), ?assertEqual(3, length(Rules5)), - {ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "all"]), ?EXAMPLE_ALL#{rules => []}), - {ok, 200, Request8} = request(get, uri(["authorization", "sources", "built-in-database", "all"]), []), + {ok, 204, _} = + request( put + , uri(["authorization", "sources", "built-in-database", "all"]) + , ?ALL_RULES_EXAMPLE#{rules => []}), + {ok, 200, Request8} = + request( get + , uri(["authorization", "sources", "built-in-database", "all"]) + , []), #{<<"rules">> := Rules6} = jsx:decode(Request8), ?assertEqual(0, length(Rules6)), - {ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "username"]), [ #{username => N, rules => []} || N <- lists:seq(1, 20) ]), - {ok, 200, Request9} = request(get, uri(["authorization", "sources", "built-in-database", "username?page=2&limit=5"]), []), + {ok, 204, _} = + request( post + , uri(["authorization", "sources", "built-in-database", "username"]) + , [ #{username => erlang:integer_to_binary(N), rules => []} + || N <- lists:seq(1, 20) ]), + {ok, 200, Request9} = + request( get + , uri(["authorization", "sources", "built-in-database", "username?page=2&limit=5"]) + , []), #{<<"data">> := Data1} = jsx:decode(Request9), ?assertEqual(5, length(Data1)), - {ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "clientid"]), [ #{clientid => N, rules => []} || N <- lists:seq(1, 20) ]), - {ok, 200, Request10} = request(get, uri(["authorization", "sources", "built-in-database", "clientid?limit=5"]), []), + {ok, 204, _} = + request( post + , uri(["authorization", "sources", "built-in-database", "clientid"]) + , [ #{clientid => erlang:integer_to_binary(N), rules => []} + || N <- lists:seq(1, 20) ]), + {ok, 200, Request10} = + request( get + , uri(["authorization", "sources", "built-in-database", "clientid?limit=5"]) + , []), #{<<"data">> := Data2} = jsx:decode(Request10), ?assertEqual(5, length(Data2)), - {ok, 400, Msg1} = request(delete, uri(["authorization", "sources", "built-in-database", "purge-all"]), []), + {ok, 400, Msg1} = + request( delete + , uri(["authorization", "sources", "built-in-database", "purge-all"]) + , []), ?assertMatch({match, _}, re:run(Msg1, "must\sbe\sdisabled\sbefore")), - {ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database"]), #{<<"enable">> => true}), + {ok, 204, _} = + request( put + , uri(["authorization", "sources", "built-in-database"]) + , #{<<"enable">> => true}), %% test idempotence - {ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database"]), #{<<"enable">> => true}), - {ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database"]), #{<<"enable">> => false}), - {ok, 204, _} = request(delete, uri(["authorization", "sources", "built-in-database", "purge-all"]), []), + {ok, 204, _} = + request( put + , uri(["authorization", "sources", "built-in-database"]) + , #{<<"enable">> => true}), + {ok, 204, _} = + request( put + , uri(["authorization", "sources", "built-in-database"]) + , #{<<"enable">> => false}), + {ok, 204, _} = + request( delete + , uri(["authorization", "sources", "built-in-database", "purge-all"]) + , []), ?assertEqual([], mnesia:dirty_all_keys(?ACL_TABLE)), ok. diff --git a/apps/emqx_authz/test/emqx_authz_api_settings_SUITE.erl b/apps/emqx_authz/test/emqx_authz_api_settings_SUITE.erl index 6e6207bbc..cf1110a2a 100644 --- a/apps/emqx_authz/test/emqx_authz_api_settings_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_api_settings_SUITE.erl @@ -22,8 +22,6 @@ -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). --define(CONF_DEFAULT, <<"authorization: {sources: []}">>). - -define(HOST, "http://127.0.0.1:18083/"). -define(API_VERSION, "v5"). -define(BASE_PATH, "api"). @@ -35,14 +33,18 @@ groups() -> []. init_per_suite(Config) -> - ok = emqx_common_test_helpers:start_apps([emqx_authz, emqx_dashboard], fun set_special_configs/1), - {ok, _} = emqx:update_config([authorization, cache, enable], false), - {ok, _} = emqx:update_config([authorization, no_match], deny), - + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz, emqx_dashboard], + fun set_special_configs/1), Config. end_per_suite(_Config) -> - emqx_common_test_helpers:stop_apps([emqx_resource, emqx_authz, emqx_dashboard]), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]), ok. set_special_configs(emqx_dashboard) -> @@ -56,6 +58,11 @@ set_special_configs(emqx_dashboard) -> }, emqx_config:put([emqx_dashboard], Config), ok; +set_special_configs(emqx_authz) -> + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], []), + ok; set_special_configs(_App) -> ok. diff --git a/apps/emqx_authz/test/emqx_authz_api_sources_SUITE.erl b/apps/emqx_authz/test/emqx_authz_api_sources_SUITE.erl index fb1e381e3..67e1b05da 100644 --- a/apps/emqx_authz/test/emqx_authz_api_sources_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_api_sources_SUITE.erl @@ -21,8 +21,7 @@ -include("emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). - --define(CONF_DEFAULT, <<"authorization: {sources: []}">>). +-include_lib("emqx/include/emqx_placeholder.hrl"). -define(HOST, "http://127.0.0.1:18083/"). -define(API_VERSION, "v5"). @@ -79,11 +78,13 @@ <<"password">> => <<"ee">>, <<"auto_reconnect">> => true, <<"ssl">> => #{<<"enable">> => false}, - <<"cmd">> => <<"HGETALL mqtt_authz:%u">> + <<"cmd">> => <<"HGETALL mqtt_authz:", ?PH_USERNAME/binary>> }). -define(SOURCE6, #{<<"type">> => <<"file">>, <<"enable">> => true, - <<"rules">> => <<"{allow,{username,\"^dashboard?\"},subscribe,[\"$SYS/#\"]}.\n{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">> + <<"rules">> => +<<"{allow,{username,\"^dashboard?\"},subscribe,[\"$SYS/#\"]}." + "\n{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">> }). all() -> @@ -94,35 +95,29 @@ groups() -> []. init_per_suite(Config) -> - meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]), - meck:expect(emqx_schema, fields, fun("authorization") -> - meck:passthrough(["authorization"]) ++ - emqx_authz_schema:fields("authorization"); - (F) -> meck:passthrough([F]) - end), - meck:new(emqx_resource, [non_strict, passthrough, no_history, no_link]), meck:expect(emqx_resource, create, fun(_, _, _) -> {ok, meck_data} end), - meck:expect(emqx_resource, create_dry_run, fun(emqx_connector_mysql, _) -> {ok, meck_data}; - (T, C) -> meck:passthrough([T, C]) - end), + meck:expect(emqx_resource, create_dry_run, + fun(emqx_connector_mysql, _) -> {ok, meck_data}; + (T, C) -> meck:passthrough([T, C]) + end), meck:expect(emqx_resource, update, fun(_, _, _, _) -> {ok, meck_data} end), meck:expect(emqx_resource, health_check, fun(_) -> ok end), meck:expect(emqx_resource, remove, fun(_) -> ok end ), - ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT), - - ok = emqx_common_test_helpers:start_apps([emqx_authz, emqx_dashboard], fun set_special_configs/1), - {ok, _} = emqx:update_config([authorization, cache, enable], false), - {ok, _} = emqx:update_config([authorization, no_match], deny), - + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz, emqx_dashboard], + fun set_special_configs/1), Config. end_per_suite(_Config) -> - {ok, _} = emqx_authz:update(replace, []), - emqx_common_test_helpers:stop_apps([emqx_resource, emqx_authz, emqx_dashboard]), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_dashboard, emqx_authz, emqx_conf]), meck:unload(emqx_resource), - meck:unload(emqx_schema), ok. set_special_configs(emqx_dashboard) -> @@ -137,7 +132,9 @@ set_special_configs(emqx_dashboard) -> emqx_config:put([emqx_dashboard], Config), ok; set_special_configs(emqx_authz) -> - emqx_config:put([authorization], #{sources => []}), + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], []), ok; set_special_configs(_App) -> ok. @@ -147,10 +144,11 @@ init_per_testcase(t_api, Config) -> meck:expect(emqx_misc, gen_id, fun() -> "fake" end), meck:new(emqx, [non_strict, passthrough, no_history, no_link]), - meck:expect(emqx, data_dir, fun() -> - {data_dir, Data} = lists:keyfind(data_dir, 1, Config), - Data - end), + meck:expect(emqx, data_dir, + fun() -> + {data_dir, Data} = lists:keyfind(data_dir, 1, Config), + Data + end), Config; init_per_testcase(_, Config) -> Config. @@ -168,7 +166,8 @@ t_api(_) -> {ok, 200, Result1} = request(get, uri(["authorization", "sources"]), []), ?assertEqual([], get_sources(Result1)), - {ok, 204, _} = request(put, uri(["authorization", "sources"]), [?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]), + {ok, 204, _} = request(put, uri(["authorization", "sources"]), + [?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]), {ok, 204, _} = request(post, uri(["authorization", "sources"]), ?SOURCE1), {ok, 200, Result2} = request(get, uri(["authorization", "sources"]), []), @@ -182,7 +181,8 @@ t_api(_) -> ], Sources), ?assert(filelib:is_file(emqx_authz:acl_conf_file())), - {ok, 204, _} = request(put, uri(["authorization", "sources", "http"]), ?SOURCE1#{<<"enable">> := false}), + {ok, 204, _} = request(put, uri(["authorization", "sources", "http"]), + ?SOURCE1#{<<"enable">> := false}), {ok, 200, Result3} = request(get, uri(["authorization", "sources", "http"]), []), ?assertMatch(#{<<"type">> := <<"http">>, <<"enable">> := false}, jsx:decode(Result3)), @@ -207,14 +207,28 @@ t_api(_) -> ?assert(filelib:is_file(filename:join([data_dir(), "certs", "cert-fake.pem"]))), ?assert(filelib:is_file(filename:join([data_dir(), "certs", "key-fake.pem"]))), - {ok, 204, _} = request(put, uri(["authorization", "sources", "mysql"]), ?SOURCE3#{<<"server">> := <<"192.168.1.100:3306">>}), + {ok, 204, _} = request( + put, + uri(["authorization", "sources", "mysql"]), + ?SOURCE3#{<<"server">> := <<"192.168.1.100:3306">>}), - {ok, 400, _} = request(put, uri(["authorization", "sources", "postgresql"]), ?SOURCE4#{<<"server">> := <<"fake">>}), - {ok, 400, _} = request(put, uri(["authorization", "sources", "redis"]), ?SOURCE5#{<<"servers">> := [<<"192.168.1.100:6379">>, <<"192.168.1.100:6380">>]}), + {ok, 400, _} = request( + put, + uri(["authorization", "sources", "postgresql"]), + ?SOURCE4#{<<"server">> := <<"fake">>}), + {ok, 400, _} = request( + put, + uri(["authorization", "sources", "redis"]), + ?SOURCE5#{<<"servers">> := [<<"192.168.1.100:6379">>, + <<"192.168.1.100:6380">>]}), - lists:foreach(fun(#{<<"type">> := Type}) -> - {ok, 204, _} = request(delete, uri(["authorization", "sources", binary_to_list(Type)]), []) - end, Sources), + lists:foreach( + fun(#{<<"type">> := Type}) -> + {ok, 204, _} = request( + delete, + uri(["authorization", "sources", binary_to_list(Type)]), + []) + end, Sources), {ok, 200, Result5} = request(get, uri(["authorization", "sources"]), []), ?assertEqual([], get_sources(Result5)), ?assertEqual([], emqx:get_config([authorization, sources])), diff --git a/apps/emqx_authz/test/emqx_authz_http_SUITE.erl b/apps/emqx_authz/test/emqx_authz_http_SUITE.erl index c0e66751a..c438b3f4b 100644 --- a/apps/emqx_authz/test/emqx_authz_http_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_http_SUITE.erl @@ -21,7 +21,6 @@ -include("emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). --define(CONF_DEFAULT, <<"authorization: {sources: []}">>). all() -> emqx_common_test_helpers:all(?MODULE). @@ -30,22 +29,14 @@ groups() -> []. init_per_suite(Config) -> - meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]), - meck:expect(emqx_schema, fields, fun("authorization") -> - meck:passthrough(["authorization"]) ++ - emqx_authz_schema:fields("authorization"); - (F) -> meck:passthrough([F]) - end), - meck:new(emqx_resource, [non_strict, passthrough, no_history, no_link]), meck:expect(emqx_resource, create, fun(_, _, _) -> {ok, meck_data} end), meck:expect(emqx_resource, remove, fun(_) -> ok end ), - ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT), - ok = emqx_common_test_helpers:start_apps([emqx_authz]), + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz], + fun set_special_configs/1), - {ok, _} = emqx:update_config([authorization, cache, enable], false), - {ok, _} = emqx:update_config([authorization, no_match], deny), Rules = [#{<<"type">> => <<"http">>, <<"url">> => <<"https://fake.com:443/">>, <<"headers">> => #{}, @@ -57,10 +48,21 @@ init_per_suite(Config) -> Config. end_per_suite(_Config) -> - {ok, _} = emqx_authz:update(replace, []), - emqx_common_test_helpers:stop_apps([emqx_authz, emqx_resource]), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_authz, emqx_conf]), meck:unload(emqx_resource), - meck:unload(emqx_schema), + ok. + +set_special_configs(emqx_authz) -> + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], []), + ok; +set_special_configs(_App) -> ok. %%------------------------------------------------------------------------------ @@ -68,8 +70,8 @@ end_per_suite(_Config) -> %%------------------------------------------------------------------------------ t_authz(_) -> - ClientInfo = #{clientid => <<"clientid">>, - username => <<"username">>, + ClientInfo = #{clientid => <<"my-clientid">>, + username => <<"my-username">>, peerhost => {127,0,0,1}, protocol => mqtt, mountpoint => <<"fake">>, @@ -92,4 +94,3 @@ t_authz(_) -> ?assertEqual(deny, emqx_access_control:authorize(ClientInfo, publish, <<"+">>)), ok. - diff --git a/apps/emqx_authz/test/emqx_authz_mnesia_SUITE.erl b/apps/emqx_authz/test/emqx_authz_mnesia_SUITE.erl index f2562becc..49056ed68 100644 --- a/apps/emqx_authz/test/emqx_authz_mnesia_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_mnesia_SUITE.erl @@ -21,8 +21,7 @@ -include("emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). - --define(CONF_DEFAULT, <<"authorization: {sources: []}">>). +-include_lib("emqx/include/emqx_placeholder.hrl"). all() -> emqx_common_test_helpers:all(?MODULE). @@ -31,36 +30,38 @@ groups() -> []. init_per_suite(Config) -> - meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]), - meck:expect(emqx_schema, fields, fun("authorization") -> - meck:passthrough(["authorization"]) ++ - emqx_authz_schema:fields("authorization"); - (F) -> meck:passthrough([F]) - end), - - ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT), - ok = emqx_common_test_helpers:start_apps([emqx_authz]), - - {ok, _} = emqx:update_config([authorization, cache, enable], false), - {ok, _} = emqx:update_config([authorization, no_match], deny), - Rules = [#{<<"type">> => <<"built-in-database">>}], - {ok, _} = emqx_authz:update(replace, Rules), + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz], + fun set_special_configs/1 + ), Config. end_per_suite(_Config) -> - {ok, _} = emqx_authz:update(replace, []), - emqx_common_test_helpers:stop_apps([emqx_authz]), - meck:unload(emqx_schema), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_authz, emqx_conf]), + ok. + +set_special_configs(emqx_authz) -> + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], + [#{<<"type">> => <<"built-in-database">>}]), + ok; +set_special_configs(_App) -> ok. init_per_testcase(t_authz, Config) -> mria:dirty_write(#emqx_acl{who = {?ACL_TABLE_USERNAME, <<"test_username">>}, - rules = [{allow, publish, <<"test/%u">>}, + rules = [{allow, publish, <<"test/", ?PH_S_USERNAME>>}, {allow, subscribe, <<"eq #">>} ] }), mria:dirty_write(#emqx_acl{who = {?ACL_TABLE_CLIENTID, <<"test_clientid">>}, - rules = [{allow, publish, <<"test/%c">>}, + rules = [{allow, publish, <<"test/", ?PH_S_CLIENTID>>}, {deny, subscribe, <<"eq #">>} ] }), @@ -96,13 +97,19 @@ t_authz(_) -> listener => {tcp, default} }, - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo1, subscribe, <<"#">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo1, publish, <<"#">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo1, subscribe, <<"#">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo1, publish, <<"#">>)), - ?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, publish, <<"test/test_username">>)), - ?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, subscribe, <<"#">>)), + ?assertEqual(allow, emqx_access_control:authorize( + ClientInfo2, publish, <<"test/test_username">>)), + ?assertEqual(allow, emqx_access_control:authorize( + ClientInfo2, subscribe, <<"#">>)), - ?assertEqual(allow, emqx_access_control:authorize(ClientInfo3, publish, <<"test/test_clientid">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo3, subscribe, <<"#">>)), + ?assertEqual(allow, emqx_access_control:authorize( + ClientInfo3, publish, <<"test/test_clientid">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo3, subscribe, <<"#">>)), ok. diff --git a/apps/emqx_authz/test/emqx_authz_mongodb_SUITE.erl b/apps/emqx_authz/test/emqx_authz_mongodb_SUITE.erl index 357d8a9ed..d854e680f 100644 --- a/apps/emqx_authz/test/emqx_authz_mongodb_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_mongodb_SUITE.erl @@ -21,8 +21,7 @@ -include("emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). - --define(CONF_DEFAULT, <<"authorization: {sources: []}">>). +-include_lib("emqx/include/emqx_placeholder.hrl"). all() -> emqx_common_test_helpers:all(?MODULE). @@ -31,21 +30,15 @@ groups() -> []. init_per_suite(Config) -> - meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]), - meck:expect(emqx_schema, fields, fun("authorization") -> - meck:passthrough(["authorization"]) ++ - emqx_authz_schema:fields("authorization"); - (F) -> meck:passthrough([F]) - end), - meck:new(emqx_resource, [non_strict, passthrough, no_history, no_link]), meck:expect(emqx_resource, create, fun(_, _, _) -> {ok, meck_data} end), meck:expect(emqx_resource, remove, fun(_) -> ok end ), - ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT), - ok = emqx_common_test_helpers:start_apps([emqx_authz]), - {ok, _} = emqx:update_config([authorization, cache, enable], false), - {ok, _} = emqx:update_config([authorization, no_match], deny), + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz], + fun set_special_configs/1 + ), + Rules = [#{<<"type">> => <<"mongodb">>, <<"mongo_type">> => <<"single">>, <<"server">> => <<"127.0.0.1:27017">>, @@ -59,10 +52,21 @@ init_per_suite(Config) -> Config. end_per_suite(_Config) -> - {ok, _} = emqx_authz:update(replace, []), - emqx_common_test_helpers:stop_apps([emqx_authz, emqx_resource]), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_authz, emqx_conf]), meck:unload(emqx_resource), - meck:unload(emqx_schema), + ok. + +set_special_configs(emqx_authz) -> + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], []), + ok; +set_special_configs(_App) -> ok. -define(SOURCE1,[#{<<"topics">> => [<<"#">>], @@ -71,10 +75,10 @@ end_per_suite(_Config) -> -define(SOURCE2,[#{<<"topics">> => [<<"eq #">>], <<"permission">> => <<"allow">>, <<"action">> => <<"all">>}]). --define(SOURCE3,[#{<<"topics">> => [<<"test/%c">>], +-define(SOURCE3,[#{<<"topics">> => [<<"test/", ?PH_CLIENTID/binary>>], <<"permission">> => <<"allow">>, <<"action">> => <<"subscribe">>}]). --define(SOURCE4,[#{<<"topics">> => [<<"test/%u">>], +-define(SOURCE4,[#{<<"topics">> => [<<"test/", ?PH_USERNAME/binary>>], <<"permission">> => <<"allow">>, <<"action">> => <<"publish">>}]). @@ -115,11 +119,16 @@ t_authz(_) -> ?assertEqual(deny, emqx_access_control:authorize(ClientInfo1, subscribe, <<"+">>)), meck:expect(emqx_resource, query, fun(_, _) -> ?SOURCE3 ++ ?SOURCE4 end), - ?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, subscribe, <<"test/test_clientid">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo2, publish, <<"test/test_clientid">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo2, subscribe, <<"test/test_username">>)), - ?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, publish, <<"test/test_username">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo3, subscribe, <<"test">>)), % nomatch - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo3, publish, <<"test">>)), % nomatch + ?assertEqual(allow, emqx_access_control:authorize( + ClientInfo2, subscribe, <<"test/test_clientid">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo2, publish, <<"test/test_clientid">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo2, subscribe, <<"test/test_username">>)), + ?assertEqual(allow, emqx_access_control:authorize( + ClientInfo2, publish, <<"test/test_username">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo3, subscribe, <<"test">>)), % nomatch + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo3, publish, <<"test">>)), % nomatch ok. - diff --git a/apps/emqx_authz/test/emqx_authz_mysql_SUITE.erl b/apps/emqx_authz/test/emqx_authz_mysql_SUITE.erl index c85422122..4aa0df606 100644 --- a/apps/emqx_authz/test/emqx_authz_mysql_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_mysql_SUITE.erl @@ -21,6 +21,7 @@ -include("emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). -define(CONF_DEFAULT, <<"authorization: {sources: []}">>). @@ -31,22 +32,14 @@ groups() -> []. init_per_suite(Config) -> - meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]), - meck:expect(emqx_schema, fields, fun("authorization") -> - meck:passthrough(["authorization"]) ++ - emqx_authz_schema:fields("authorization"); - (F) -> meck:passthrough([F]) - end), - meck:new(emqx_resource, [non_strict, passthrough, no_history, no_link]), meck:expect(emqx_resource, create, fun(_, _, _) -> {ok, meck_data} end ), meck:expect(emqx_resource, remove, fun(_) -> ok end ), - ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT), - ok = emqx_common_test_helpers:start_apps([emqx_authz]), + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz], + fun set_special_configs/1), - {ok, _} = emqx:update_config([authorization, cache, enable], false), - {ok, _} = emqx:update_config([authorization, no_match], deny), Rules = [#{<<"type">> => <<"mysql">>, <<"server">> => <<"127.0.0.1:27017">>, <<"pool_size">> => 1, @@ -61,10 +54,21 @@ init_per_suite(Config) -> Config. end_per_suite(_Config) -> - {ok, _} = emqx_authz:update(replace, []), - emqx_common_test_helpers:stop_apps([emqx_authz, emqx_resource]), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_authz, emqx_conf]), meck:unload(emqx_resource), - meck:unload(emqx_schema), + ok. + +set_special_configs(emqx_authz) -> + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], []), + ok; +set_special_configs(_App) -> ok. -define(COLUMNS, [ <<"action">> @@ -73,8 +77,8 @@ end_per_suite(_Config) -> ]). -define(SOURCE1, [[<<"all">>, <<"deny">>, <<"#">>]]). -define(SOURCE2, [[<<"all">>, <<"allow">>, <<"eq #">>]]). --define(SOURCE3, [[<<"subscribe">>, <<"allow">>, <<"test/%c">>]]). --define(SOURCE4, [[<<"publish">>, <<"allow">>, <<"test/%u">>]]). +-define(SOURCE3, [[<<"subscribe">>, <<"allow">>, <<"test/", ?PH_CLIENTID/binary>>]]). +-define(SOURCE4, [[<<"publish">>, <<"allow">>, <<"test/", ?PH_USERNAME/binary>>]]). %%------------------------------------------------------------------------------ %% Testcases @@ -113,11 +117,16 @@ t_authz(_) -> ?assertEqual(deny, emqx_access_control:authorize(ClientInfo1, subscribe, <<"+">>)), meck:expect(emqx_resource, query, fun(_, _) -> {ok, ?COLUMNS, ?SOURCE3 ++ ?SOURCE4} end), - ?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, subscribe, <<"test/test_clientid">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo2, publish, <<"test/test_clientid">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo2, subscribe, <<"test/test_username">>)), - ?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, publish, <<"test/test_username">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo3, subscribe, <<"test">>)), % nomatch - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo3, publish, <<"test">>)), % nomatch + ?assertEqual(allow, emqx_access_control:authorize( + ClientInfo2, subscribe, <<"test/test_clientid">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo2, publish, <<"test/test_clientid">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo2, subscribe, <<"test/test_username">>)), + ?assertEqual(allow, emqx_access_control:authorize( + ClientInfo2, publish, <<"test/test_username">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo3, subscribe, <<"test">>)), % nomatch + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo3, publish, <<"test">>)), % nomatch ok. - diff --git a/apps/emqx_authz/test/emqx_authz_postgresql_SUITE.erl b/apps/emqx_authz/test/emqx_authz_postgresql_SUITE.erl index 2b9d4c62e..0a4001757 100644 --- a/apps/emqx_authz/test/emqx_authz_postgresql_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_postgresql_SUITE.erl @@ -21,8 +21,7 @@ -include("emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). - --define(CONF_DEFAULT, <<"authorization: {sources: []}">>). +-include_lib("emqx/include/emqx_placeholder.hrl"). all() -> emqx_common_test_helpers:all(?MODULE). @@ -31,22 +30,14 @@ groups() -> []. init_per_suite(Config) -> - meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]), - meck:expect(emqx_schema, fields, fun("authorization") -> - meck:passthrough(["authorization"]) ++ - emqx_authz_schema:fields("authorization"); - (F) -> meck:passthrough([F]) - end), - meck:new(emqx_resource, [non_strict, passthrough, no_history, no_link]), meck:expect(emqx_resource, create, fun(_, _, _) -> {ok, meck_data} end ), meck:expect(emqx_resource, remove, fun(_) -> ok end ), - ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT), - ok = emqx_common_test_helpers:start_apps([emqx_authz]), + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz], + fun set_special_configs/1), - {ok, _} = emqx:update_config([authorization, cache, enable], false), - {ok, _} = emqx:update_config([authorization, no_match], deny), Rules = [#{<<"type">> => <<"postgresql">>, <<"server">> => <<"127.0.0.1:27017">>, <<"pool_size">> => 1, @@ -61,10 +52,21 @@ init_per_suite(Config) -> Config. end_per_suite(_Config) -> - {ok, _} = emqx_authz:update(replace, []), - emqx_common_test_helpers:stop_apps([emqx_authz, emqx_resource]), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_authz, emqx_conf]), meck:unload(emqx_resource), - meck:unload(emqx_schema), + ok. + +set_special_configs(emqx_authz) -> + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], []), + ok; +set_special_configs(_App) -> ok. -define(COLUMNS, [ {column, <<"action">>, meck, meck, meck, meck, meck, meck, meck} @@ -73,8 +75,8 @@ end_per_suite(_Config) -> ]). -define(SOURCE1, [{<<"all">>, <<"deny">>, <<"#">>}]). -define(SOURCE2, [{<<"all">>, <<"allow">>, <<"eq #">>}]). --define(SOURCE3, [{<<"subscribe">>, <<"allow">>, <<"test/%c">>}]). --define(SOURCE4, [{<<"publish">>, <<"allow">>, <<"test/%u">>}]). +-define(SOURCE3, [{<<"subscribe">>, <<"allow">>, <<"test/", ?PH_CLIENTID/binary>>}]). +-define(SOURCE4, [{<<"publish">>, <<"allow">>, <<"test/", ?PH_USERNAME/binary>>}]). %%------------------------------------------------------------------------------ %% Testcases @@ -113,11 +115,16 @@ t_authz(_) -> ?assertEqual(deny, emqx_access_control:authorize(ClientInfo2, subscribe, <<"+">>)), meck:expect(emqx_resource, query, fun(_, _) -> {ok, ?COLUMNS, ?SOURCE3 ++ ?SOURCE4} end), - ?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, subscribe, <<"test/test_clientid">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo2, publish, <<"test/test_clientid">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo2, subscribe, <<"test/test_username">>)), - ?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, publish, <<"test/test_username">>)), - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo3, subscribe, <<"test">>)), % nomatch - ?assertEqual(deny, emqx_access_control:authorize(ClientInfo3, publish, <<"test">>)), % nomatch + ?assertEqual(allow, emqx_access_control:authorize( + ClientInfo2, subscribe, <<"test/test_clientid">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo2, publish, <<"test/test_clientid">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo2, subscribe, <<"test/test_username">>)), + ?assertEqual(allow, emqx_access_control:authorize( + ClientInfo2, publish, <<"test/test_username">>)), + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo3, subscribe, <<"test">>)), % nomatch + ?assertEqual(deny, emqx_access_control:authorize( + ClientInfo3, publish, <<"test">>)), % nomatch ok. - diff --git a/apps/emqx_authz/test/emqx_authz_redis_SUITE.erl b/apps/emqx_authz/test/emqx_authz_redis_SUITE.erl index b1657d558..3951ebfb6 100644 --- a/apps/emqx_authz/test/emqx_authz_redis_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_redis_SUITE.erl @@ -21,6 +21,7 @@ -include("emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). -define(CONF_DEFAULT, <<"authorization: {sources: []}">>). all() -> @@ -30,22 +31,14 @@ groups() -> []. init_per_suite(Config) -> - meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]), - meck:expect(emqx_schema, fields, fun("authorization") -> - meck:passthrough(["authorization"]) ++ - emqx_authz_schema:fields("authorization"); - (F) -> meck:passthrough([F]) - end), - meck:new(emqx_resource, [non_strict, passthrough, no_history, no_link]), meck:expect(emqx_resource, create, fun(_, _, _) -> {ok, meck_data} end ), meck:expect(emqx_resource, remove, fun(_) -> ok end ), - ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT), - ok = emqx_common_test_helpers:start_apps([emqx_authz]), + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz], + fun set_special_configs/1), - {ok, _} = emqx:update_config([authorization, cache, enable], false), - {ok, _} = emqx:update_config([authorization, no_match], deny), Rules = [#{<<"type">> => <<"redis">>, <<"server">> => <<"127.0.0.1:27017">>, <<"pool_size">> => 1, @@ -53,20 +46,31 @@ init_per_suite(Config) -> <<"password">> => <<"ee">>, <<"auto_reconnect">> => true, <<"ssl">> => #{<<"enable">> => false}, - <<"cmd">> => <<"HGETALL mqtt_authz:%u">> + <<"cmd">> => <<"HGETALL mqtt_authz:", ?PH_USERNAME/binary>> }], {ok, _} = emqx_authz:update(replace, Rules), Config. end_per_suite(_Config) -> - {ok, _} = emqx_authz:update(replace, []), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), emqx_common_test_helpers:stop_apps([emqx_authz, emqx_resource]), meck:unload(emqx_resource), - meck:unload(emqx_schema), ok. --define(SOURCE1, [<<"test/%u">>, <<"publish">>]). --define(SOURCE2, [<<"test/%c">>, <<"publish">>]). +set_special_configs(emqx_authz) -> + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], []), + ok; +set_special_configs(_App) -> + ok. + +-define(SOURCE1, [<<"test/", ?PH_USERNAME/binary>>, <<"publish">>]). +-define(SOURCE2, [<<"test/", ?PH_CLIENTID/binary>>, <<"publish">>]). -define(SOURCE3, [<<"#">>, <<"subscribe">>]). %%------------------------------------------------------------------------------ @@ -110,4 +114,3 @@ t_authz(_) -> ?assertEqual(deny, emqx_access_control:authorize(ClientInfo, publish, <<"#">>)), ok. - diff --git a/apps/emqx_authz/test/emqx_authz_rule_SUITE.erl b/apps/emqx_authz/test/emqx_authz_rule_SUITE.erl index ec8ca929a..25995748d 100644 --- a/apps/emqx_authz/test/emqx_authz_rule_SUITE.erl +++ b/apps/emqx_authz/test/emqx_authz_rule_SUITE.erl @@ -21,34 +21,54 @@ -include("emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). -define(SOURCE1, {deny, all}). -define(SOURCE2, {allow, {ipaddr, "127.0.0.1"}, all, [{eq, "#"}, {eq, "+"}]}). --define(SOURCE3, {allow, {ipaddrs, ["127.0.0.1", "192.168.1.0/24"]}, subscribe, ["%c"]}). +-define(SOURCE3, {allow, {ipaddrs, ["127.0.0.1", "192.168.1.0/24"]}, subscribe, [?PH_S_CLIENTID]}). -define(SOURCE4, {allow, {'and', [{client, "test"}, {user, "test"}]}, publish, ["topic/test"]}). --define(SOURCE5, {allow, {'or', [{username, {re, "^test"}}, {clientid, {re, "test?"}}]}, publish, ["%u", "%c"]}). +-define(SOURCE5, {allow, {'or', + [{username, {re, "^test"}}, + {clientid, {re, "test?"}}]}, + publish, [?PH_S_USERNAME, ?PH_S_CLIENTID]}). all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - ok = emqx_common_test_helpers:start_apps([emqx_authz]), + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz], + fun set_special_configs/1), Config. end_per_suite(_Config) -> - emqx_common_test_helpers:stop_apps([emqx_authz]), + {ok, _} = emqx:update_config( + [authorization], + #{<<"no_match">> => <<"allow">>, + <<"cache">> => #{<<"enable">> => <<"true">>}, + <<"sources">> => []}), + emqx_common_test_helpers:stop_apps([emqx_authz, emqx_conf]), + ok. + +set_special_configs(emqx_authz) -> + {ok, _} = emqx:update_config([authorization, cache, enable], false), + {ok, _} = emqx:update_config([authorization, no_match], deny), + {ok, _} = emqx:update_config([authorization, sources], []), + ok; +set_special_configs(_App) -> ok. t_compile(_) -> ?assertEqual({deny, all, all, [['#']]}, emqx_authz_rule:compile(?SOURCE1)), - ?assertEqual({allow, {ipaddr, {{127,0,0,1}, {127,0,0,1}, 32}}, all, [{eq, ['#']}, {eq, ['+']}]}, emqx_authz_rule:compile(?SOURCE2)), + ?assertEqual({allow, {ipaddr, {{127,0,0,1}, {127,0,0,1}, 32}}, + all, [{eq, ['#']}, {eq, ['+']}]}, emqx_authz_rule:compile(?SOURCE2)), ?assertEqual({allow, {ipaddrs,[{{127,0,0,1},{127,0,0,1},32}, {{192,168,1,0},{192,168,1,255},24}]}, subscribe, - [{pattern,[<<"%c">>]}] + [{pattern,[?PH_CLIENTID]}] }, emqx_authz_rule:compile(?SOURCE3)), ?assertMatch({allow, @@ -58,9 +78,9 @@ t_compile(_) -> }, emqx_authz_rule:compile(?SOURCE4)), ?assertMatch({allow, - {'or', [{username, {re_pattern, _, _, _, _}}, {clientid, {re_pattern, _, _, _, _}}]}, - publish, - [{pattern, [<<"%u">>]}, {pattern, [<<"%c">>]}] + {'or', [{username, {re_pattern, _, _, _, _}}, + {clientid, {re_pattern, _, _, _, _}}]}, + publish, [{pattern, [?PH_USERNAME]}, {pattern, [?PH_CLIENTID]}] }, emqx_authz_rule:compile(?SOURCE5)), ok. @@ -92,47 +112,64 @@ t_match(_) -> }, ?assertEqual({matched, deny}, - emqx_authz_rule:match(ClientInfo1, subscribe, <<"#">>, emqx_authz_rule:compile(?SOURCE1))), + emqx_authz_rule:match(ClientInfo1, subscribe, <<"#">>, + emqx_authz_rule:compile(?SOURCE1))), ?assertEqual({matched, deny}, - emqx_authz_rule:match(ClientInfo2, subscribe, <<"+">>, emqx_authz_rule:compile(?SOURCE1))), + emqx_authz_rule:match(ClientInfo2, subscribe, <<"+">>, + emqx_authz_rule:compile(?SOURCE1))), ?assertEqual({matched, deny}, - emqx_authz_rule:match(ClientInfo3, subscribe, <<"topic/test">>, emqx_authz_rule:compile(?SOURCE1))), + emqx_authz_rule:match(ClientInfo3, subscribe, <<"topic/test">>, + emqx_authz_rule:compile(?SOURCE1))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo1, subscribe, <<"#">>, emqx_authz_rule:compile(?SOURCE2))), + emqx_authz_rule:match(ClientInfo1, subscribe, <<"#">>, + emqx_authz_rule:compile(?SOURCE2))), ?assertEqual(nomatch, - emqx_authz_rule:match(ClientInfo1, subscribe, <<"topic/test">>, emqx_authz_rule:compile(?SOURCE2))), + emqx_authz_rule:match(ClientInfo1, subscribe, <<"topic/test">>, + emqx_authz_rule:compile(?SOURCE2))), ?assertEqual(nomatch, - emqx_authz_rule:match(ClientInfo2, subscribe, <<"#">>, emqx_authz_rule:compile(?SOURCE2))), + emqx_authz_rule:match(ClientInfo2, subscribe, <<"#">>, + emqx_authz_rule:compile(?SOURCE2))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo1, subscribe, <<"test">>, emqx_authz_rule:compile(?SOURCE3))), + emqx_authz_rule:match(ClientInfo1, subscribe, <<"test">>, + emqx_authz_rule:compile(?SOURCE3))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo2, subscribe, <<"test">>, emqx_authz_rule:compile(?SOURCE3))), + emqx_authz_rule:match(ClientInfo2, subscribe, <<"test">>, + emqx_authz_rule:compile(?SOURCE3))), ?assertEqual(nomatch, - emqx_authz_rule:match(ClientInfo2, subscribe, <<"topic/test">>, emqx_authz_rule:compile(?SOURCE3))), + emqx_authz_rule:match(ClientInfo2, subscribe, <<"topic/test">>, + emqx_authz_rule:compile(?SOURCE3))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo1, publish, <<"topic/test">>, emqx_authz_rule:compile(?SOURCE4))), + emqx_authz_rule:match(ClientInfo1, publish, <<"topic/test">>, + emqx_authz_rule:compile(?SOURCE4))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo2, publish, <<"topic/test">>, emqx_authz_rule:compile(?SOURCE4))), + emqx_authz_rule:match(ClientInfo2, publish, <<"topic/test">>, + emqx_authz_rule:compile(?SOURCE4))), ?assertEqual(nomatch, - emqx_authz_rule:match(ClientInfo3, publish, <<"topic/test">>, emqx_authz_rule:compile(?SOURCE4))), + emqx_authz_rule:match(ClientInfo3, publish, <<"topic/test">>, + emqx_authz_rule:compile(?SOURCE4))), ?assertEqual(nomatch, - emqx_authz_rule:match(ClientInfo4, publish, <<"topic/test">>, emqx_authz_rule:compile(?SOURCE4))), + emqx_authz_rule:match(ClientInfo4, publish, <<"topic/test">>, + emqx_authz_rule:compile(?SOURCE4))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo1, publish, <<"test">>, emqx_authz_rule:compile(?SOURCE5))), + emqx_authz_rule:match(ClientInfo1, publish, <<"test">>, + emqx_authz_rule:compile(?SOURCE5))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo2, publish, <<"test">>, emqx_authz_rule:compile(?SOURCE5))), + emqx_authz_rule:match(ClientInfo2, publish, <<"test">>, + emqx_authz_rule:compile(?SOURCE5))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo3, publish, <<"test">>, emqx_authz_rule:compile(?SOURCE5))), + emqx_authz_rule:match(ClientInfo3, publish, <<"test">>, + emqx_authz_rule:compile(?SOURCE5))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo3, publish, <<"fake">>, emqx_authz_rule:compile(?SOURCE5))), + emqx_authz_rule:match(ClientInfo3, publish, <<"fake">>, + emqx_authz_rule:compile(?SOURCE5))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo4, publish, <<"test">>, emqx_authz_rule:compile(?SOURCE5))), + emqx_authz_rule:match(ClientInfo4, publish, <<"test">>, + emqx_authz_rule:compile(?SOURCE5))), ?assertEqual({matched, allow}, - emqx_authz_rule:match(ClientInfo4, publish, <<"fake">>, emqx_authz_rule:compile(?SOURCE5))), - + emqx_authz_rule:match(ClientInfo4, publish, <<"fake">>, + emqx_authz_rule:compile(?SOURCE5))), ok. - diff --git a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe_placeholder.erl b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe_placeholder.erl index 70779770d..6ab6ad114 100644 --- a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe_placeholder.erl +++ b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe_placeholder.erl @@ -15,6 +15,8 @@ %%-------------------------------------------------------------------- -module(emqx_auto_subscribe_placeholder). +-include_lib("emqx/include/emqx_placeholder.hrl"). + -export([generate/1]). -export([to_topic_table/3]). @@ -40,13 +42,13 @@ to_topic_table(PHs, ClientInfo, ConnInfo) -> generate(<<"">>, Result) -> lists:reverse(Result); -generate(<<"${clientid}", Tail/binary>>, Result) -> +generate(<>, Result) -> generate(Tail, [clientid | Result]); -generate(<<"${username}", Tail/binary>>, Result) -> +generate(<>, Result) -> generate(Tail, [username | Result]); -generate(<<"${host}", Tail/binary>>, Result) -> +generate(<>, Result) -> generate(Tail, [host | Result]); -generate(<<"${port}", Tail/binary>>, Result) -> +generate(<>, Result) -> generate(Tail, [port | Result]); generate(<>, []) -> generate(Tail, [<>]); @@ -62,7 +64,7 @@ to_topic([Binary | PTs], C, Co, Res) when is_binary(Binary) -> to_topic([clientid | PTs], C = #{clientid := ClientID}, Co, Res) -> to_topic(PTs, C, Co, [ClientID | Res]); to_topic([username | PTs], C = #{username := undefined}, Co, Res) -> - to_topic(PTs, C, Co, [<<"${username}">> | Res]); + to_topic(PTs, C, Co, [?PH_USERNAME | Res]); to_topic([username | PTs], C = #{username := Username}, Co, Res) -> to_topic(PTs, C, Co, [Username | Res]); to_topic([host | PTs], C, Co = #{peername := {Host, _}}, Res) -> diff --git a/apps/emqx_bridge/etc/emqx_bridge.conf b/apps/emqx_bridge/etc/emqx_bridge.conf index f26172ef6..c658bc4ce 100644 --- a/apps/emqx_bridge/etc/emqx_bridge.conf +++ b/apps/emqx_bridge/etc/emqx_bridge.conf @@ -2,54 +2,37 @@ ## EMQ X Bridge ##-------------------------------------------------------------------- -#bridges.mqtt.my_mqtt_bridge_to_aws { -# server = "127.0.0.1:1883" -# proto_ver = "v4" -# username = "username1" -# password = "" -# clean_start = true -# keepalive = 300 -# retry_interval = "30s" -# max_inflight = 32 -# reconnect_interval = "30s" -# bridge_mode = true -# replayq { -# dir = "{{ platform_data_dir }}/replayq/bridge_mqtt/" -# seg_bytes = "100MB" -# offload = false -# max_total_bytes = "1GB" -# } -# ssl { -# enable = false -# keyfile = "{{ platform_etc_dir }}/certs/client-key.pem" -# certfile = "{{ platform_etc_dir }}/certs/client-cert.pem" -# cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem" -# } -# ## We will create one MQTT connection for each element of the `ingress_channels` -# ## Syntax: ingress_channels. -# ingress_channels.pull_msgs_from_aws { -# subscribe_remote_topic = "aws/#" -# subscribe_qos = 1 -# local_topic = "from_aws/${topic}" -# payload = "${payload}" -# qos = "${qos}" -# retain = "${retain}" -# } -# ## We will create one MQTT connection for each element of the `egress_channels` -# ## Syntax: egress_channels. -# egress_channels.push_msgs_to_aws { -# subscribe_local_topic = "emqx/#" -# remote_topic = "from_emqx/${topic}" -# payload = "${payload}" -# qos = 1 -# retain = false -# } +## MQTT bridges to/from another MQTT broker +#bridges.mqtt.my_ingress_mqtt_bridge { +# connector = "mqtt:my_mqtt_connector" +# direction = ingress +# ## topic mappings for this bridge +# from_remote_topic = "aws/#" +# subscribe_qos = 1 +# to_local_topic = "from_aws/${topic}" +# payload = "${payload}" +# qos = "${qos}" +# retain = "${retain}" #} # +#bridges.mqtt.my_egress_mqtt_bridge { +# connector = "mqtt:my_mqtt_connector" +# direction = egress +# ## topic mappings for this bridge +# from_local_topic = "emqx/#" +# to_remote_topic = "from_emqx/${topic}" +# payload = "${payload}" +# qos = 1 +# retain = false +#} +# +## HTTP bridges to an HTTP server #bridges.http.my_http_bridge { -# base_url: "http://localhost:9901" -# connect_timeout: "30s" -# max_retries: 3 +# ## NOTE: we cannot use placehodler variables in the `scheme://host:port` part of the url +# url = "http://localhost:9901/messages/${topic}" +# request_timeout = "30s" +# connect_timeout = "30s" +# max_retries = 3 # retry_interval = "10s" # pool_type = "random" # pool_size = 4 @@ -60,15 +43,13 @@ # certfile = "{{ platform_etc_dir }}/certs/client-cert.pem" # cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem" # } -# egress_channels.post_messages { -# subscribe_local_topic = "emqx_http/#" -# request_timeout: "30s" -# ## following config entries can use placehodler variables -# method = post -# path = "/messages/${topic}" -# body = "${payload}" -# headers { -# "content-type": "application/json" -# } +# +# from_local_topic = "emqx_http/#" +# ## the following config entries can use placehodler variables: +# ## url, method, body, headers +# method = post +# body = "${payload}" +# headers { +# "content-type": "application/json" # } #} diff --git a/apps/emqx_bridge/src/emqx_bridge.erl b/apps/emqx_bridge/src/emqx_bridge.erl index c07a5b842..01e5faf07 100644 --- a/apps/emqx_bridge/src/emqx_bridge.erl +++ b/apps/emqx_bridge/src/emqx_bridge.erl @@ -18,52 +18,57 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). --export([post_config_update/4]). +-export([post_config_update/5]). --export([reload_hook/0, unload_hook/0]). +-export([ load_hook/0 + , unload_hook/0 + ]). -export([on_message_publish/1]). --export([ load_bridges/0 - , get_bridge/2 - , get_bridge/3 - , list_bridges/0 - , create_bridge/3 - , remove_bridge/3 - , update_bridge/3 - , start_bridge/2 - , stop_bridge/2 - , restart_bridge/2 - , send_message/2 - ]). - --export([ config_key_path/0 - ]). - -export([ resource_type/1 , bridge_type/1 , resource_id/1 , resource_id/2 , parse_bridge_id/1 - , channel_id/4 - , parse_channel_id/1 ]). -reload_hook() -> - unload_hook(), - Bridges = emqx_conf:get([bridges], #{}), +-export([ load/0 + , lookup/2 + , lookup/3 + , list/0 + , create/3 + , recreate/2 + , recreate/3 + , create_dry_run/2 + , remove/3 + , update/3 + , start/2 + , stop/2 + , restart/2 + ]). + +-export([ send_message/2 + ]). + +-export([ config_key_path/0 + ]). + +load_hook() -> + Bridges = emqx:get_config([bridges], #{}), + load_hook(Bridges). + +load_hook(Bridges) -> lists:foreach(fun({_Type, Bridge}) -> lists:foreach(fun({_Name, BridgeConf}) -> - load_hook(BridgeConf) + do_load_hook(BridgeConf) end, maps:to_list(Bridge)) end, maps:to_list(Bridges)). -load_hook(#{egress_channels := Channels}) -> - case has_subscribe_local_topic(Channels) of - true -> ok; - false -> emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}) - end; -load_hook(_Conf) -> ok. +do_load_hook(#{from_local_topic := _}) -> + emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}), + ok; +do_load_hook(_Conf) -> ok. unload_hook() -> ok = emqx_hooks:del('message.publish', {?MODULE, on_message_publish}). @@ -71,40 +76,40 @@ unload_hook() -> on_message_publish(Message = #message{topic = Topic, flags = Flags}) -> case maps:get(sys, Flags, false) of false -> - ChannelIds = get_matched_channels(Topic), - lists:foreach(fun(ChannelId) -> - send_message(ChannelId, emqx_message:to_map(Message)) - end, ChannelIds); + lists:foreach(fun (Id) -> + send_message(Id, emqx_rule_events:eventmsg_publish(Message)) + end, get_matched_bridges(Topic)); true -> ok end, {ok, Message}. -%% TODO: remove this clause, treat mqtt bridges the same as other bridges -send_message(ChannelId, Message) -> - {BridgeType, BridgeName, _, _} = parse_channel_id(ChannelId), +send_message(BridgeId, Message) -> + {BridgeType, BridgeName} = parse_bridge_id(BridgeId), ResId = emqx_bridge:resource_id(BridgeType, BridgeName), - do_send_message(ResId, ChannelId, Message). - -do_send_message(ResId, ChannelId, Message) -> - emqx_resource:query(ResId, {send_message, ChannelId, Message}). + emqx_resource:query(ResId, {send_message, Message}). config_key_path() -> [bridges]. +resource_type(<<"mqtt">>) -> emqx_connector_mqtt; resource_type(mqtt) -> emqx_connector_mqtt; +resource_type(<<"http">>) -> emqx_connector_http; resource_type(http) -> emqx_connector_http. bridge_type(emqx_connector_mqtt) -> mqtt; bridge_type(emqx_connector_http) -> http. -post_config_update(_Req, NewConf, OldConf, _AppEnv) -> +post_config_update(_, _Req, NewConf, OldConf, _AppEnv) -> #{added := Added, removed := Removed, changed := Updated} = diff_confs(NewConf, OldConf), - perform_bridge_changes([ - {fun remove_bridge/3, Removed}, - {fun create_bridge/3, Added}, - {fun update_bridge/3, Updated} - ]). + Result = perform_bridge_changes([ + {fun remove/3, Removed}, + {fun create/3, Added}, + {fun update/3, Updated} + ]), + ok = unload_hook(), + ok = load_hook(NewConf), + Result. perform_bridge_changes(Tasks) -> perform_bridge_changes(Tasks, ok). @@ -123,8 +128,8 @@ perform_bridge_changes([{Action, MapConfs} | Tasks], Result0) -> end, Result0, MapConfs), perform_bridge_changes(Tasks, Result). -load_bridges() -> - Bridges = emqx_conf:get([bridges], #{}), +load() -> + Bridges = emqx:get_config([bridges], #{}), emqx_bridge_monitor:ensure_all_started(Bridges). resource_id(BridgeId) when is_binary(BridgeId) -> @@ -145,55 +150,41 @@ parse_bridge_id(BridgeId) -> _ -> error({invalid_bridge_id, BridgeId}) end. -channel_id(BridgeType, BridgeName, ChannelType, ChannelName) -> - BType = bin(BridgeType), - BName = bin(BridgeName), - CType = bin(ChannelType), - CName = bin(ChannelName), - <>. - -parse_channel_id(ChannelId) -> - case string:split(bin(ChannelId), ":", all) of - [BridgeType, BridgeName, ChannelType, ChannelName] -> - {BridgeType, BridgeName, ChannelType, ChannelName}; - _ -> error({invalid_bridge_id, ChannelId}) - end. - -list_bridges() -> +list() -> lists:foldl(fun({Type, NameAndConf}, Bridges) -> lists:foldl(fun({Name, RawConf}, Acc) -> - case get_bridge(Type, Name, RawConf) of + case lookup(Type, Name, RawConf) of {error, not_found} -> Acc; {ok, Res} -> [Res | Acc] end end, Bridges, maps:to_list(NameAndConf)) - end, [], maps:to_list(emqx:get_raw_config([bridges]))). + end, [], maps:to_list(emqx:get_raw_config([bridges], #{}))). -get_bridge(Type, Name) -> +lookup(Type, Name) -> RawConf = emqx:get_raw_config([bridges, Type, Name], #{}), - get_bridge(Type, Name, RawConf). -get_bridge(Type, Name, RawConf) -> + lookup(Type, Name, RawConf). +lookup(Type, Name, RawConf) -> case emqx_resource:get_instance(resource_id(Type, Name)) of {error, not_found} -> {error, not_found}; {ok, Data} -> {ok, #{id => bridge_id(Type, Name), resource_data => Data, raw_config => RawConf}} end. -start_bridge(Type, Name) -> - restart_bridge(Type, Name). +start(Type, Name) -> + restart(Type, Name). -stop_bridge(Type, Name) -> +stop(Type, Name) -> emqx_resource:stop(resource_id(Type, Name)). -restart_bridge(Type, Name) -> +restart(Type, Name) -> emqx_resource:restart(resource_id(Type, Name)). -create_bridge(Type, Name, Conf) -> +create(Type, Name, Conf) -> ?SLOG(info, #{msg => "create bridge", type => Type, name => Name, config => Conf}), ResId = resource_id(Type, Name), - case emqx_resource:create(ResId, - emqx_bridge:resource_type(Type), Conf) of + case emqx_resource:create_local(ResId, + emqx_bridge:resource_type(Type), parse_confs(Type, Name, Conf)) of {ok, already_created} -> emqx_resource:get_instance(ResId); {ok, Data} -> @@ -202,23 +193,38 @@ create_bridge(Type, Name, Conf) -> {error, Reason} end. -update_bridge(Type, Name, {_OldConf, Conf}) -> +update(Type, Name, {_OldConf, Conf}) -> %% TODO: sometimes its not necessary to restart the bridge connection. %% - %% - if the connection related configs like `username` is updated, we should restart/start + %% - if the connection related configs like `servers` is updated, we should restart/start %% or stop bridges according to the change. - %% - if the connection related configs are not update, but channel configs `ingress_channels` or - %% `egress_channels` are changed, then we should not restart the bridge, we only restart/start - %% the channels. + %% - if the connection related configs are not update, only non-connection configs like + %% the `method` or `headers` of a HTTP bridge is changed, then the bridge can be updated + %% without restarting the bridge. %% ?SLOG(info, #{msg => "update bridge", type => Type, name => Name, config => Conf}), - emqx_resource:recreate(resource_id(Type, Name), - emqx_bridge:resource_type(Type), Conf, []). + recreate(Type, Name, Conf). -remove_bridge(Type, Name, _Conf) -> +recreate(Type, Name) -> + recreate(Type, Name, emqx:get_raw_config([bridges, Type, Name])). + +recreate(Type, Name, Conf) -> + emqx_resource:recreate_local(resource_id(Type, Name), + emqx_bridge:resource_type(Type), parse_confs(Type, Name, Conf), []). + +create_dry_run(Type, Conf) -> + Conf0 = Conf#{<<"ingress">> => #{<<"from_remote_topic">> => <<"t">>}}, + case emqx_resource:check_config(emqx_bridge:resource_type(Type), Conf0) of + {ok, Conf1} -> + emqx_resource:create_dry_run_local(emqx_bridge:resource_type(Type), Conf1); + {error, _} = Error -> + Error + end. + +remove(Type, Name, _Conf) -> ?SLOG(info, #{msg => "remove bridge", type => Type, name => Name}), - case emqx_resource:remove(resource_id(Type, Name)) of + case emqx_resource:remove_local(resource_id(Type, Name)) of ok -> ok; {error, not_found} -> ok; {error, Reason} -> @@ -238,34 +244,83 @@ flatten_confs(Conf0) -> do_flatten_confs(Type, Conf0) -> [{{Type, Name}, Conf} || {Name, Conf} <- maps:to_list(Conf0)]. -has_subscribe_local_topic(Channels) -> - lists:any(fun (#{subscribe_local_topic := _}) -> true; - (_) -> false - end, maps:to_list(Channels)). - -get_matched_channels(Topic) -> - Bridges = emqx_conf:get([bridges], #{}), - maps:fold(fun - %% TODO: also trigger 'message.publish' for mqtt bridges. - (mqtt, _Conf, Acc0) -> Acc0; - (BType, Conf, Acc0) -> - maps:fold(fun - (BName, #{egress_channels := Channels}, Acc1) -> - do_get_matched_channels(Topic, Channels, BType, BName, egress_channels) - ++ Acc1; - (_Name, _BridgeConf, Acc1) -> Acc1 - end, Acc0, Conf) +get_matched_bridges(Topic) -> + Bridges = emqx:get_config([bridges], #{}), + maps:fold(fun (BType, Conf, Acc0) -> + maps:fold(fun + %% Confs for MQTT, Kafka bridges have the `direction` flag + (_BName, #{direction := ingress}, Acc1) -> + Acc1; + (BName, #{direction := egress} = Egress, Acc1) -> + get_matched_bridge_id(Egress, Topic, BType, BName, Acc1); + %% HTTP, MySQL bridges only have egress direction + (BName, BridgeConf, Acc1) -> + get_matched_bridge_id(BridgeConf, Topic, BType, BName, Acc1) + end, Acc0, Conf) end, [], Bridges). -do_get_matched_channels(Topic, Channels, BType, BName, CType) -> - maps:fold(fun - (ChannName, #{subscribe_local_topic := Filter}, Acc) -> - case emqx_topic:match(Topic, Filter) of - true -> [channel_id(BType, BName, CType, ChannName) | Acc]; - false -> Acc +get_matched_bridge_id(#{from_local_topic := Filter}, Topic, BType, BName, Acc) -> + case emqx_topic:match(Topic, Filter) of + true -> [bridge_id(BType, BName) | Acc]; + false -> Acc + end. + +parse_confs(http, _Name, + #{ url := Url + , method := Method + , body := Body + , headers := Headers + , request_timeout := ReqTimeout + } = Conf) -> + {BaseUrl, Path} = parse_url(Url), + {ok, BaseUrl2} = emqx_http_lib:uri_parse(BaseUrl), + Conf#{ base_url => BaseUrl2 + , request => + #{ path => Path + , method => Method + , body => Body + , headers => Headers + , request_timeout => ReqTimeout + } + }; +parse_confs(Type, Name, #{connector := ConnId, direction := Direction} = Conf) + when is_binary(ConnId) -> + case emqx_connector:parse_connector_id(ConnId) of + {Type, ConnName} -> + ConnectorConfs = emqx:get_config([connectors, Type, ConnName]), + make_resource_confs(Direction, ConnectorConfs, + maps:without([connector, direction], Conf), Name); + {_ConnType, _ConnName} -> + error({cannot_use_connector_with_different_type, ConnId}) + end; +parse_confs(_Type, Name, #{connector := ConnectorConfs, direction := Direction} = Conf) + when is_map(ConnectorConfs) -> + make_resource_confs(Direction, ConnectorConfs, + maps:without([connector, direction], Conf), Name). + +make_resource_confs(ingress, ConnectorConfs, BridgeConf, Name) -> + BName = bin(Name), + ConnectorConfs#{ + ingress => BridgeConf#{hookpoint => <<"$bridges/", BName/binary>>} + }; +make_resource_confs(egress, ConnectorConfs, BridgeConf, _Name) -> + ConnectorConfs#{ + egress => BridgeConf + }. + +parse_url(Url) -> + case string:split(Url, "//", leading) of + [Scheme, UrlRem] -> + case string:split(UrlRem, "/", leading) of + [HostPort, Path] -> + {iolist_to_binary([Scheme, "//", HostPort]), Path}; + [HostPort] -> + {iolist_to_binary([Scheme, "//", HostPort]), <<>>} end; - (_ChannName, _ChannConf, Acc) -> Acc - end, [], Channels). + [Url] -> + error({invalid_url, Url}) + end. + bin(Bin) when is_binary(Bin) -> Bin; bin(Str) when is_list(Str) -> list_to_binary(Str); diff --git a/apps/emqx_bridge/src/emqx_bridge_api.erl b/apps/emqx_bridge/src/emqx_bridge_api.erl index 40a101640..417fa49f7 100644 --- a/apps/emqx_bridge/src/emqx_bridge_api.erl +++ b/apps/emqx_bridge/src/emqx_bridge_api.erl @@ -19,23 +19,40 @@ -export([api_spec/0]). --export([ list_bridges/2 +-export([ list_create_bridges_in_cluster/2 , list_local_bridges/1 - , crud_bridges_cluster/2 - , crud_bridges/3 + , crud_bridges_in_cluster/2 , manage_bridges/2 + , lookup_from_local_node/2 ]). --define(TYPES, [mqtt]). --define(BRIDGE(N, T, C), #{<<"id">> => N, <<"type">> => T, <<"config">> => C}). +-define(TYPES, [mqtt, http]). -define(TRY_PARSE_ID(ID, EXPR), try emqx_bridge:parse_bridge_id(Id) of {BridgeType, BridgeName} -> EXPR catch error:{invalid_bridge_id, Id0} -> - {400, #{code => 102, message => <<"invalid_bridge_id: ", Id0/binary>>}} + {400, #{code => 'INVALID_ID', message => <<"invalid_bridge_id: ", Id0/binary, + ". Bridge Ids must be of format :">>}} end). +-define(METRICS(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX), + #{ matched => MATCH, + success => SUCC, + failed => FAILED, + speed => RATE, + speed_last5m => RATE_5, + speed_max => RATE_MAX + }). +-define(metrics(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX), + #{ matched := MATCH, + success := SUCC, + failed := FAILED, + speed := RATE, + speed_last5m := RATE_5, + speed_max := RATE_MAX + }). + req_schema() -> Schema = [ case maps:to_list(emqx:get_raw_config([bridges, T], #{})) of @@ -45,19 +62,56 @@ req_schema() -> emqx_mgmt_api_configs:gen_schema(Conf) end || T <- ?TYPES], - #{oneOf => Schema}. + #{'oneOf' => Schema}. + +node_schema() -> + #{type => string, example => "emqx@127.0.0.1"}. + +status_schema() -> + #{type => string, enum => [connected, disconnected]}. + +metrics_schema() -> + #{ type => object + , properties => #{ + matched => #{type => integer, example => "0"}, + success => #{type => integer, example => "0"}, + failed => #{type => integer, example => "0"}, + speed => #{type => number, format => float, example => "0.0"}, + speed_last5m => #{type => number, format => float, example => "0.0"}, + speed_max => #{type => number, format => float, example => "0.0"} + } + }. + +per_node_schema(Key, Schema) -> + #{ + type => array, + items => #{ + type => object, + properties => #{ + node => node_schema(), + Key => Schema + } + } + }. resp_schema() -> - #{oneOf := Schema} = req_schema(), AddMetadata = fun(Prop) -> - Prop#{is_connected => #{type => boolean}, - id => #{type => string}, + Prop#{status => status_schema(), + node_status => per_node_schema(status, status_schema()), + metrics => metrics_schema(), + node_metrics => per_node_schema(metrics, metrics_schema()), + id => #{type => string, example => "http:my_http_bridge"}, bridge_type => #{type => string, enum => ?TYPES}, - node => #{type => string}} + node => node_schema() + } end, + more_props_resp_schema(AddMetadata). + +more_props_resp_schema(AddMetadata) -> + #{'oneOf' := Schema} = req_schema(), Schema1 = [S#{properties => AddMetadata(Prop)} || S = #{properties := Prop} <- Schema], - #{oneOf => Schema1}. + #{'oneOf' => Schema1}. api_spec() -> {bridge_apis(), []}. @@ -66,6 +120,10 @@ bridge_apis() -> [list_all_bridges_api(), crud_bridges_apis(), operation_apis()]. list_all_bridges_api() -> + ReqSchema = more_props_resp_schema(fun(Prop) -> + Prop#{id => #{type => string, required => true}} + end), + RespSchema = resp_schema(), Metadata = #{ get => #{ description => <<"List all created bridges">>, @@ -73,9 +131,18 @@ list_all_bridges_api() -> <<"200">> => emqx_mgmt_util:array_schema(resp_schema(), <<"A list of the bridges">>) } + }, + post => #{ + description => <<"Create a new bridge">>, + 'requestBody' => emqx_mgmt_util:schema(ReqSchema), + responses => #{ + <<"201">> => emqx_mgmt_util:schema(RespSchema, <<"Bridge created">>), + <<"400">> => emqx_mgmt_util:error_schema(<<"Create bridge failed">>, + ['UPDATE_FAILED']) + } } }, - {"/bridges/", Metadata, list_bridges}. + {"/bridges/", Metadata, list_create_bridges_in_cluster}. crud_bridges_apis() -> ReqSchema = req_schema(), @@ -91,7 +158,7 @@ crud_bridges_apis() -> } }, put => #{ - description => <<"Create or update a bridge">>, + description => <<"Update a bridge">>, parameters => [param_path_id()], 'requestBody' => emqx_mgmt_util:schema(ReqSchema), responses => #{ @@ -104,12 +171,12 @@ crud_bridges_apis() -> description => <<"Delete a bridge">>, parameters => [param_path_id()], responses => #{ - <<"200">> => emqx_mgmt_util:schema(<<"Bridge deleted">>), + <<"204">> => emqx_mgmt_util:schema(<<"Bridge deleted">>), <<"404">> => emqx_mgmt_util:error_schema(<<"Bridge not found">>, ['NOT_FOUND']) } } }, - {"/bridges/:id", Metadata, crud_bridges_cluster}. + {"/bridges/:id", Metadata, crud_bridges_in_cluster}. operation_apis() -> Metadata = #{ @@ -120,7 +187,8 @@ operation_apis() -> param_path_id(), param_path_operation()], responses => #{ - <<"500">> => emqx_mgmt_util:error_schema(<<"Operation Failed">>, ['INTERNAL_ERROR']), + <<"500">> => emqx_mgmt_util:error_schema(<<"Operation Failed">>, + ['INTERNAL_ERROR']), <<"200">> => emqx_mgmt_util:schema(<<"Operation success">>)}}}, {"/nodes/:node/bridges/:id/operation/:operation", Metadata, manage_bridges}. @@ -152,80 +220,150 @@ param_path_operation()-> example => restart }. -list_bridges(get, _Params) -> - {200, lists:append([list_local_bridges(Node) || Node <- mria_mnesia:running_nodes()])}. +list_create_bridges_in_cluster(post, #{body := #{<<"id">> := Id} = Conf}) -> + ?TRY_PARSE_ID(Id, + case emqx_bridge:lookup(BridgeType, BridgeName) of + {ok, _} -> {400, #{code => 'ALREADY_EXISTS', message => <<"bridge already exists">>}}; + {error, not_found} -> + case ensure_bridge(BridgeType, BridgeName, maps:remove(<<"id">>, Conf)) of + ok -> lookup_from_all_nodes(Id, BridgeType, BridgeName, 201); + {error, Error} -> {400, Error} + end + end); +list_create_bridges_in_cluster(get, _Params) -> + {200, zip_bridges([list_local_bridges(Node) || Node <- mria_mnesia:running_nodes()])}. list_local_bridges(Node) when Node =:= node() -> - [format_resp(Data) || Data <- emqx_bridge:list_bridges()]; + [format_resp(Data) || Data <- emqx_bridge:list()]; list_local_bridges(Node) -> rpc_call(Node, list_local_bridges, [Node]). -crud_bridges_cluster(Method, Params) -> - Results = [crud_bridges(Node, Method, Params) || Node <- mria_mnesia:running_nodes()], - case lists:filter(fun({200}) -> false; ({200, _}) -> false; (_) -> true end, Results) of - [] -> - case Results of - [{200} | _] -> {200}; - _ -> {200, [Res || {200, Res} <- Results]} - end; - Errors -> - hd(Errors) - end. +crud_bridges_in_cluster(get, #{bindings := #{id := Id}}) -> + ?TRY_PARSE_ID(Id, lookup_from_all_nodes(Id, BridgeType, BridgeName, 200)); -crud_bridges(Node, Method, Params) when Node =/= node() -> - rpc_call(Node, crud_bridges, [Node, Method, Params]); - -crud_bridges(_, get, #{bindings := #{id := Id}}) -> - ?TRY_PARSE_ID(Id, case emqx_bridge:get_bridge(BridgeType, BridgeName) of - {ok, Data} -> {200, format_resp(Data)}; - {error, not_found} -> - {404, #{code => 102, message => <<"not_found: ", Id/binary>>}} - end); - -crud_bridges(_, put, #{bindings := #{id := Id}, body := Conf}) -> +crud_bridges_in_cluster(put, #{bindings := #{id := Id}, body := Conf}) -> ?TRY_PARSE_ID(Id, - case emqx:update_config(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], Conf, - #{rawconf_with_defaults => true}) of - {ok, #{raw_config := RawConf, post_config_update := #{emqx_bridge := Data}}} -> - {200, format_resp(#{id => Id, raw_config => RawConf, resource_data => Data})}; - {ok, _} -> %% the bridge already exits - {ok, Data} = emqx_bridge:get_bridge(BridgeType, BridgeName), - {200, format_resp(Data)}; - {error, Reason} -> - {500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}} + case emqx_bridge:lookup(BridgeType, BridgeName) of + {ok, _} -> + case ensure_bridge(BridgeType, BridgeName, Conf) of + ok -> lookup_from_all_nodes(Id, BridgeType, BridgeName, 200); + {error, Error} -> {400, Error} + end; + {error, not_found} -> + {404, #{code => 'NOT_FOUND', message => <<"bridge not found">>}} end); -crud_bridges(_, delete, #{bindings := #{id := Id}}) -> +crud_bridges_in_cluster(delete, #{bindings := #{id := Id}}) -> ?TRY_PARSE_ID(Id, - case emqx:remove_config(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName]) of - {ok, _} -> {200}; + case emqx_conf:remove(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], + #{override_to => cluster}) of + {ok, _} -> {204}; {error, Reason} -> - {500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}} + {500, #{code => 102, message => emqx_resource_api:stringify(Reason)}} end). +lookup_from_all_nodes(Id, BridgeType, BridgeName, SuccCode) -> + case rpc_multicall(lookup_from_local_node, [BridgeType, BridgeName]) of + {ok, [{ok, _} | _] = Results} -> + {SuccCode, format_bridge_info([R || {ok, R} <- Results])}; + {ok, [{error, not_found} | _]} -> + {404, error_msg('NOT_FOUND', <<"not_found: ", Id/binary>>)}; + {error, ErrL} -> + {500, error_msg('UNKNOWN_ERROR', ErrL)} + end. + +lookup_from_local_node(BridgeType, BridgeName) -> + case emqx_bridge:lookup(BridgeType, BridgeName) of + {ok, Res} -> {ok, format_resp(Res)}; + Error -> Error + end. + manage_bridges(post, #{bindings := #{node := Node, id := Id, operation := Op}}) -> OperFun = - fun (<<"start">>) -> start_bridge; - (<<"stop">>) -> stop_bridge; - (<<"restart">>) -> restart_bridge + fun (<<"start">>) -> start; + (<<"stop">>) -> stop; + (<<"restart">>) -> restart end, ?TRY_PARSE_ID(Id, case rpc_call(binary_to_atom(Node, latin1), emqx_bridge, OperFun(Op), [BridgeType, BridgeName]) of ok -> {200}; {error, Reason} -> - {500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}} + {500, #{code => 102, message => emqx_resource_api:stringify(Reason)}} end). -format_resp(#{id := Id, raw_config := RawConf, resource_data := #{mod := Mod, status := Status}}) -> - IsConnected = fun(started) -> true; (_) -> false end, +ensure_bridge(BridgeType, BridgeName, Conf) -> + case emqx_conf:update(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], Conf, + #{override_to => cluster}) of + {ok, _} -> ok; + {error, Reason} -> + {error, error_msg('BAD_ARG', Reason)} + end. + +zip_bridges([BridgesFirstNode | _] = BridgesAllNodes) -> + lists:foldl(fun(#{id := Id}, Acc) -> + Bridges = pick_bridges_by_id(Id, BridgesAllNodes), + [format_bridge_info(Bridges) | Acc] + end, [], BridgesFirstNode). + +pick_bridges_by_id(Id, BridgesAllNodes) -> + lists:foldl(fun(BridgesOneNode, Acc) -> + [BridgeInfo] = [Bridge || Bridge = #{id := Id0} <- BridgesOneNode, Id0 == Id], + [BridgeInfo | Acc] + end, [], BridgesAllNodes). + +format_bridge_info([FirstBridge | _] = Bridges) -> + Res = maps:remove(node, FirstBridge), + NodeStatus = collect_status(Bridges), + NodeMetrics = collect_metrics(Bridges), + Res#{ status => aggregate_status(NodeStatus) + , node_status => NodeStatus + , metrics => aggregate_metrics(NodeMetrics) + , node_metrics => NodeMetrics + }. + +collect_status(Bridges) -> + [maps:with([node, status], B) || B <- Bridges]. + +aggregate_status(AllStatus) -> + AllConnected = lists:all(fun (#{status := connected}) -> true; + (_) -> false + end, AllStatus), + case AllConnected of + true -> connected; + false -> disconnected + end. + +collect_metrics(Bridges) -> + [maps:with([node, metrics], B) || B <- Bridges]. + +aggregate_metrics(AllMetrics) -> + InitMetrics = ?METRICS(0,0,0,0,0,0), + lists:foldl(fun(#{metrics := ?metrics(Match1, Succ1, Failed1, Rate1, Rate5m1, RateMax1)}, + ?metrics(Match0, Succ0, Failed0, Rate0, Rate5m0, RateMax0)) -> + ?METRICS(Match1 + Match0, Succ1 + Succ0, Failed1 + Failed0, + Rate1 + Rate0, Rate5m1 + Rate5m0, RateMax1 + RateMax0) + end, InitMetrics, AllMetrics). + +format_resp(#{id := Id, raw_config := RawConf, + resource_data := #{mod := Mod, status := Status, metrics := Metrics}}) -> + IsConnected = fun(started) -> connected; (_) -> disconnected end, RawConf#{ id => Id, node => node(), bridge_type => emqx_bridge:bridge_type(Mod), - is_connected => IsConnected(Status) + status => IsConnected(Status), + metrics => Metrics }. +rpc_multicall(Func, Args) -> + Nodes = mria_mnesia:running_nodes(), + ResL = erpc:multicall(Nodes, ?MODULE, Func, Args, 15000), + case lists:filter(fun({ok, _}) -> false; (_) -> true end, ResL) of + [] -> {ok, [Res || {ok, Res} <- ResL]}; + ErrL -> {error, ErrL} + end. + rpc_call(Node, Fun, Args) -> rpc_call(Node, ?MODULE, Fun, Args). @@ -236,3 +374,8 @@ rpc_call(Node, Mod, Fun, Args) -> {badrpc, Reason} -> {error, Reason}; Res -> Res end. + +error_msg(Code, Msg) when is_binary(Msg) -> + #{code => Code, message => Msg}; +error_msg(Code, Msg) -> + #{code => Code, message => list_to_binary(io_lib:format("~p", [Msg]))}. diff --git a/apps/emqx_bridge/src/emqx_bridge_app.erl b/apps/emqx_bridge/src/emqx_bridge_app.erl index 3fa8f12dd..846b6dd00 100644 --- a/apps/emqx_bridge/src/emqx_bridge_app.erl +++ b/apps/emqx_bridge/src/emqx_bridge_app.erl @@ -21,9 +21,9 @@ start(_StartType, _StartArgs) -> {ok, Sup} = emqx_bridge_sup:start_link(), - ok = emqx_bridge:load_bridges(), - ok = emqx_bridge:reload_hook(), - emqx_conf:add_handler(emqx_bridge:config_key_path(), emqx_bridge), + ok = emqx_bridge:load(), + ok = emqx_bridge:load_hook(), + emqx_config_handler:add_handler(emqx_bridge:config_key_path(), emqx_bridge), {ok, Sup}. stop(_State) -> diff --git a/apps/emqx_bridge/src/emqx_bridge_monitor.erl b/apps/emqx_bridge/src/emqx_bridge_monitor.erl index 3136a74c9..4f444f7c0 100644 --- a/apps/emqx_bridge/src/emqx_bridge_monitor.erl +++ b/apps/emqx_bridge/src/emqx_bridge_monitor.erl @@ -67,18 +67,6 @@ code_change(_OldVsn, State, _Extra) -> load_bridges(Configs) -> lists:foreach(fun({Type, NamedConf}) -> lists:foreach(fun({Name, Conf}) -> - load_bridge(Name, Type, Conf) + emqx_bridge:create(Type, Name, Conf) end, maps:to_list(NamedConf)) end, maps:to_list(Configs)). - -%% TODO: move this monitor into emqx_resource -%% emqx_resource:check_and_create_local(ResourceId, ResourceType, Config, #{keep_retry => true}). -load_bridge(Name, Type, Config) -> - case emqx_resource:create_local( - emqx_bridge:resource_id(Type, Name), - emqx_bridge:resource_type(Type), Config) of - {ok, already_created} -> ok; - {ok, _} -> ok; - {error, Reason} -> - error({load_bridge, Reason}) - end. diff --git a/apps/emqx_bridge/src/emqx_bridge_schema.erl b/apps/emqx_bridge/src/emqx_bridge_schema.erl index 2072d15ec..26a1d5bd1 100644 --- a/apps/emqx_bridge/src/emqx_bridge_schema.erl +++ b/apps/emqx_bridge/src/emqx_bridge_schema.erl @@ -10,16 +10,114 @@ roots() -> [bridges]. fields(bridges) -> - [ {mqtt, hoconsc:mk(hoconsc:map(name, hoconsc:ref(?MODULE, "mqtt_bridge")))} - , {http, hoconsc:mk(hoconsc:map(name, hoconsc:ref(?MODULE, "http_bridge")))} + [ {mqtt, + sc(hoconsc:map(name, hoconsc:union([ ref("ingress_mqtt_bridge") + , ref("egress_mqtt_bridge") + ])), + #{ desc => "MQTT bridges" + })} + , {http, + sc(hoconsc:map(name, ref("http_bridge")), + #{ desc => "HTTP bridges" + })} ]; -fields("mqtt_bridge") -> - emqx_connector_mqtt:fields("config"); +fields("ingress_mqtt_bridge") -> + [ direction(ingress, emqx_connector_mqtt_schema:ingress_desc()) + , connector_name() + ] ++ proplists:delete(hookpoint, emqx_connector_mqtt_schema:fields("ingress")); + +fields("egress_mqtt_bridge") -> + [ direction(egress, emqx_connector_mqtt_schema:egress_desc()) + , connector_name() + ] ++ emqx_connector_mqtt_schema:fields("egress"); fields("http_bridge") -> - emqx_connector_http:fields(config) ++ http_channels(). + basic_config_http() ++ + [ {url, + sc(binary(), + #{ nullable => false + , desc =>""" +The URL of the HTTP Bridge.
+Template with variables is allowed in the path, but variables cannot be used in the scheme, host, +or port part.
+For example, http://localhost:9901/${topic} is allowed, but + http://${host}:9901/message or http://localhost:${port}/message +is not allowed. +""" + })} + , {from_local_topic, + sc(binary(), + #{ desc =>""" +The MQTT topic filter to be forwarded to the HTTP server. All MQTT PUBLISH messages which topic +match the from_local_topic will be forwarded.
+NOTE: if this bridge is used as the output of a rule (emqx rule engine), and also from_local_topic is configured, then both the data got from the rule and the MQTT messages that matches +from_local_topic will be forwarded. +""" + })} + , {method, + sc(method(), + #{ default => post + , desc =>""" +The method of the HTTP request. All the available methods are: post, put, get, delete.
+Template with variables is allowed.
+""" + })} + , {headers, + sc(map(), + #{ default => #{ + <<"accept">> => <<"application/json">>, + <<"cache-control">> => <<"no-cache">>, + <<"connection">> => <<"keep-alive">>, + <<"content-type">> => <<"application/json">>, + <<"keep-alive">> => <<"timeout=5">>} + , desc =>""" +The headers of the HTTP request.
+Template with variables is allowed. +""" + }) + } + , {body, + sc(binary(), + #{ default => <<"${payload}">> + , desc =>""" +The body of the HTTP request.
+Template with variables is allowed. +""" + })} + , {request_timeout, + sc(emqx_schema:duration_ms(), + #{ default => <<"30s">> + , desc =>""" +How long will the HTTP request timeout. +""" + })} + ]. -http_channels() -> - [{egress_channels, hoconsc:mk(hoconsc:map(id, - hoconsc:ref(emqx_connector_http, "http_request")))}]. +direction(Dir, Desc) -> + {direction, + sc(Dir, + #{ nullable => false + , desc => "The direction of the bridge. Can be one of 'ingress' or 'egress'.
" ++ + Desc + })}. + +connector_name() -> + {connector, + sc(binary(), + #{ nullable => false + , desc =>""" +The connector name to be used for this bridge. +Connectors are configured by 'connectors.. +""" + })}. + +basic_config_http() -> + proplists:delete(base_url, emqx_connector_http:fields(config)). + +method() -> + hoconsc:enum([post, put, get, delete]). + +sc(Type, Meta) -> hoconsc:mk(Type, Meta). + +ref(Field) -> hoconsc:ref(?MODULE, Field). diff --git a/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl b/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl new file mode 100644 index 000000000..23d4691f5 --- /dev/null +++ b/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl @@ -0,0 +1,292 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_bridge_api_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). +-define(CONF_DEFAULT, <<"bridges: {}">>). +-define(TEST_ID, <<"http:test_bridge">>). +-define(URL(PORT, PATH), list_to_binary( + io_lib:format("http://localhost:~s/~s", + [integer_to_list(PORT), PATH]))). +-define(HTTP_BRIDGE(URL), +#{ + <<"url">> => URL, + <<"from_local_topic">> => <<"emqx_http/#">>, + <<"method">> => <<"post">>, + <<"ssl">> => #{<<"enable">> => false}, + <<"body">> => <<"${payload}">>, + <<"headers">> => #{ + <<"content-type">> => <<"application/json">> + } + +}). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +groups() -> + []. + +suite() -> + [{timetrap,{seconds,30}}]. + +init_per_suite(Config) -> + ok = emqx_config:put([emqx_dashboard], #{ + default_username => <<"admin">>, + default_password => <<"public">>, + listeners => [#{ + protocol => http, + port => 18083 + }] + }), + _ = application:load(emqx_conf), + %% some testcases (may from other app) already get emqx_connector started + _ = application:stop(emqx_resource), + _ = application:stop(emqx_connector), + ok = emqx_common_test_helpers:start_apps([emqx_bridge, emqx_dashboard]), + ok = emqx_config:init_load(emqx_bridge_schema, ?CONF_DEFAULT), + Config. + +end_per_suite(_Config) -> + emqx_common_test_helpers:stop_apps([emqx_bridge, emqx_dashboard]), + ok. + +init_per_testcase(_, Config) -> + {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), + Config. +end_per_testcase(_, _Config) -> + ok. + +%%------------------------------------------------------------------------------ +%% HTTP server for testing +%%------------------------------------------------------------------------------ +start_http_server(HandleFun) -> + Parent = self(), + spawn_link(fun() -> + {Port, Sock} = listen_on_random_port(), + Parent ! {port, Port}, + loop(Sock, HandleFun) + end), + receive + {port, Port} -> Port + after + 2000 -> error({timeout, start_http_server}) + end. + +listen_on_random_port() -> + Min = 1024, Max = 65000, + Port = rand:uniform(Max - Min) + Min, + case gen_tcp:listen(Port, [{active, false}, {reuseaddr, true}]) of + {ok, Sock} -> {Port, Sock}; + {error, eaddrinuse} -> listen_on_random_port() + end. + +loop(Sock, HandleFun) -> + {ok, Conn} = gen_tcp:accept(Sock), + Handler = spawn(fun () -> HandleFun(Conn) end), + gen_tcp:controlling_process(Conn, Handler), + loop(Sock, HandleFun). + +make_response(CodeStr, Str) -> + B = iolist_to_binary(Str), + iolist_to_binary( + io_lib:fwrite( + "HTTP/1.0 ~s\nContent-Type: text/html\nContent-Length: ~p\n\n~s", + [CodeStr, size(B), B])). + +handle_fun_200_ok(Conn) -> + case gen_tcp:recv(Conn, 0) of + {ok, Request} -> + gen_tcp:send(Conn, make_response("200 OK", "Request OK")), + self() ! {http_server, received, Request}, + handle_fun_200_ok(Conn); + {error, closed} -> + gen_tcp:close(Conn) + end. + +%%------------------------------------------------------------------------------ +%% Testcases +%%------------------------------------------------------------------------------ + +t_http_crud_apis(_) -> + Port = start_http_server(fun handle_fun_200_ok/1), + %% assert we there's no bridges at first + {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), + + %% then we add a http bridge, using POST + %% POST /bridges/ will create a bridge + URL1 = ?URL(Port, "path1"), + {ok, 201, Bridge} = request(post, uri(["bridges"]), + ?HTTP_BRIDGE(URL1)#{<<"id">> => ?TEST_ID}), + + %ct:pal("---bridge: ~p", [Bridge]), + ?assertMatch(#{ <<"id">> := ?TEST_ID + , <<"bridge_type">> := <<"http">> + , <<"status">> := _ + , <<"node_status">> := [_|_] + , <<"metrics">> := _ + , <<"node_metrics">> := [_|_] + , <<"url">> := URL1 + }, jsx:decode(Bridge)), + + %% create a again returns an error + {ok, 400, RetMsg} = request(post, uri(["bridges"]), + ?HTTP_BRIDGE(URL1)#{<<"id">> => ?TEST_ID}), + ?assertMatch( + #{ <<"code">> := _ + , <<"message">> := <<"bridge already exists">> + }, jsx:decode(RetMsg)), + + %% update the request-path of the bridge + URL2 = ?URL(Port, "path2"), + {ok, 200, Bridge2} = request(put, uri(["bridges", ?TEST_ID]), + ?HTTP_BRIDGE(URL2)), + ?assertMatch(#{ <<"id">> := ?TEST_ID + , <<"bridge_type">> := <<"http">> + , <<"status">> := _ + , <<"node_status">> := [_|_] + , <<"metrics">> := _ + , <<"node_metrics">> := [_|_] + , <<"url">> := URL2 + }, jsx:decode(Bridge2)), + + %% list all bridges again, assert Bridge2 is in it + {ok, 200, Bridge2Str} = request(get, uri(["bridges"]), []), + ?assertMatch([#{ <<"id">> := ?TEST_ID + , <<"bridge_type">> := <<"http">> + , <<"status">> := _ + , <<"node_status">> := [_|_] + , <<"metrics">> := _ + , <<"node_metrics">> := [_|_] + , <<"url">> := URL2 + }], jsx:decode(Bridge2Str)), + + %% get the bridge by id + {ok, 200, Bridge3Str} = request(get, uri(["bridges", ?TEST_ID]), []), + ?assertMatch(#{ <<"id">> := ?TEST_ID + , <<"bridge_type">> := <<"http">> + , <<"status">> := _ + , <<"node_status">> := [_|_] + , <<"metrics">> := _ + , <<"node_metrics">> := [_|_] + , <<"url">> := URL2 + }, jsx:decode(Bridge3Str)), + + %% delete the bridge + {ok, 204, <<>>} = request(delete, uri(["bridges", ?TEST_ID]), []), + {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), + + %% update a deleted bridge returns an error + {ok, 404, ErrMsg2} = request(put, uri(["bridges", ?TEST_ID]), + ?HTTP_BRIDGE(URL2)), + ?assertMatch( + #{ <<"code">> := _ + , <<"message">> := <<"bridge not found">> + }, jsx:decode(ErrMsg2)), + ok. + +t_start_stop_bridges(_) -> + Port = start_http_server(fun handle_fun_200_ok/1), + URL1 = ?URL(Port, "abc"), + {ok, 201, Bridge} = request(post, uri(["bridges"]), + ?HTTP_BRIDGE(URL1)#{<<"id">> => ?TEST_ID}), + %ct:pal("the bridge ==== ~p", [Bridge]), + ?assertMatch( + #{ <<"id">> := ?TEST_ID + , <<"bridge_type">> := <<"http">> + , <<"status">> := _ + , <<"node_status">> := [_|_] + , <<"metrics">> := _ + , <<"node_metrics">> := [_|_] + , <<"url">> := URL1 + }, jsx:decode(Bridge)), + %% stop it + {ok, 200, <<>>} = request(post, + uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "stop"]), + <<"">>), + {ok, 200, Bridge2} = request(get, uri(["bridges", ?TEST_ID]), []), + ?assertMatch(#{ <<"id">> := ?TEST_ID + , <<"status">> := <<"disconnected">> + }, jsx:decode(Bridge2)), + %% start again + {ok, 200, <<>>} = request(post, + uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "start"]), + <<"">>), + {ok, 200, Bridge3} = request(get, uri(["bridges", ?TEST_ID]), []), + ?assertMatch(#{ <<"id">> := ?TEST_ID + , <<"status">> := <<"connected">> + }, jsx:decode(Bridge3)), + %% restart an already started bridge + {ok, 200, <<>>} = request(post, + uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "restart"]), + <<"">>), + {ok, 200, Bridge3} = request(get, uri(["bridges", ?TEST_ID]), []), + ?assertMatch(#{ <<"id">> := ?TEST_ID + , <<"status">> := <<"connected">> + }, jsx:decode(Bridge3)), + %% stop it again + {ok, 200, <<>>} = request(post, + uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "stop"]), + <<"">>), + %% restart a stopped bridge + {ok, 200, <<>>} = request(post, + uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "restart"]), + <<"">>), + {ok, 200, Bridge4} = request(get, uri(["bridges", ?TEST_ID]), []), + ?assertMatch(#{ <<"id">> := ?TEST_ID + , <<"status">> := <<"connected">> + }, jsx:decode(Bridge4)), + %% delete the bridge + {ok, 204, <<>>} = request(delete, uri(["bridges", ?TEST_ID]), []), + {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []). + +%%-------------------------------------------------------------------- +%% HTTP Request +%%-------------------------------------------------------------------- +-define(HOST, "http://127.0.0.1:18083/"). +-define(API_VERSION, "v5"). +-define(BASE_PATH, "api"). + +request(Method, Url, Body) -> + Request = case Body of + [] -> {Url, [auth_header_()]}; + _ -> {Url, [auth_header_()], "application/json", jsx:encode(Body)} + end, + ct:pal("Method: ~p, Request: ~p", [Method, Request]), + case httpc:request(Method, Request, [], [{body_format, binary}]) of + {error, socket_closed_remotely} -> + {error, socket_closed_remotely}; + {ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } -> + {ok, Code, Return}; + {ok, {Reason, _, _}} -> + {error, Reason} + end. + +uri() -> uri([]). +uri(Parts) when is_list(Parts) -> + NParts = [E || E <- Parts], + ?HOST ++ filename:join([?BASE_PATH, ?API_VERSION | NParts]). + +auth_header_() -> + Username = <<"admin">>, + Password = <<"public">>, + {ok, Token} = emqx_dashboard_admin:sign_token(Username, Password), + {"Authorization", "Bearer " ++ binary_to_list(Token)}. + diff --git a/apps/emqx_conf/etc/emqx_conf.conf b/apps/emqx_conf/etc/emqx_conf.conf index fcb7a2947..e57bc4869 100644 --- a/apps/emqx_conf/etc/emqx_conf.conf +++ b/apps/emqx_conf/etc/emqx_conf.conf @@ -306,14 +306,32 @@ cluster { ## Default: default namespace = default } +} - db_backend = mnesia +##================================================================== +## Internal database +##================================================================== +db { + ## Database backend + ## + ## @doc db.backend + ## ValueType: mnesia | rlog + ## Default: mnesia + backend = mnesia - rlog { - # role: core - # core_nodes: [] - } + ## RLOG role + ## + ## @doc db.role + ## ValueType: core | replicant + ## Default: core + role = core + ## Replicant core nodes + ## + ## @doc db.core_nodes + ## ValueType: comma-separated node list + ## Default: "" + core_nodes = "" } ##================================================================== diff --git a/apps/emqx_conf/src/emqx_cluster_rpc.erl b/apps/emqx_conf/src/emqx_cluster_rpc.erl index 4187b35aa..153800414 100644 --- a/apps/emqx_conf/src/emqx_cluster_rpc.erl +++ b/apps/emqx_conf/src/emqx_cluster_rpc.erl @@ -320,21 +320,23 @@ apply_mfa(TnxId, {M, F, A}) -> end, Meta = #{tnx_id => TnxId, module => M, function => F, args => ?TO_BIN(A)}, IsSuccess = is_success(Res), - log_and_alarm(IsSuccess, Res, Meta), + log_and_alarm(IsSuccess, Res, Meta, TnxId), {IsSuccess, Res}. is_success(ok) -> true; is_success({ok, _}) -> true; is_success(_) -> false. -log_and_alarm(true, Res, Meta) -> +log_and_alarm(true, Res, Meta, TnxId) -> OkMeta = Meta#{msg => <<"succeeded to apply MFA">>, result => Res}, ?SLOG(debug, OkMeta), - emqx_alarm:deactivate(cluster_rpc_apply_failed, OkMeta#{result => ?TO_BIN(Res)}); -log_and_alarm(false, Res, Meta) -> + Message = ["cluster_rpc_apply_failed:", integer_to_binary(TnxId)], + emqx_alarm:deactivate(cluster_rpc_apply_failed, OkMeta#{result => ?TO_BIN(Res)}, Message); +log_and_alarm(false, Res, Meta, TnxId) -> NotOkMeta = Meta#{msg => <<"failed to apply MFA">>, result => Res}, ?SLOG(error, NotOkMeta), - emqx_alarm:activate(cluster_rpc_apply_failed, NotOkMeta#{result => ?TO_BIN(Res)}). + Message = ["cluster_rpc_apply_failed:", integer_to_binary(TnxId)], + emqx_alarm:activate(cluster_rpc_apply_failed, NotOkMeta#{result => ?TO_BIN(Res)}, Message). wait_for_all_nodes_commit(TnxId, Delay, Remain) -> case lagging_node(TnxId) of diff --git a/apps/emqx_conf/src/emqx_cluster_rpc_handler.erl b/apps/emqx_conf/src/emqx_cluster_rpc_handler.erl index ab2b24d27..b7022db35 100644 --- a/apps/emqx_conf/src/emqx_cluster_rpc_handler.erl +++ b/apps/emqx_conf/src/emqx_cluster_rpc_handler.erl @@ -41,22 +41,22 @@ init([State]) -> {ok, ensure_timer(State)}. handle_call(Req, _From, State) -> - ?LOG(error, "unexpected call: ~p", [Req]), + ?SLOG(error, #{msg => "unexpected_call", call => Req}), {reply, ignored, State}. handle_cast(Msg, State) -> - ?LOG(error, "unexpected msg: ~p", [Msg]), + ?SLOG(error, #{msg => "unexpected_msg", cast => Msg}), {noreply, State}. handle_info({timeout, TRef, del_stale_mfa}, State = #{timer := TRef, max_history := MaxHistory}) -> case mria:transaction(?CLUSTER_RPC_SHARD, fun del_stale_mfa/1, [MaxHistory]) of {atomic, ok} -> ok; - Error -> ?LOG(error, "del_stale_cluster_rpc_mfa error:~p", [Error]) + Error -> ?SLOG(error, #{msg => "del_stale_cluster_rpc_mfa_error", error => Error}) end, {noreply, ensure_timer(State), hibernate}; handle_info(Info, State) -> - ?LOG(error, "unexpected info: ~p", [Info]), + ?SLOG(error, #{msg => "unexpected_info", info => Info}), {noreply, State}. terminate(_Reason, #{timer := TRef}) -> diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index 7c039377a..c3dfa8c49 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -81,8 +81,7 @@ get_node_and_config(KeyPath) -> {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. update(KeyPath, UpdateReq, Opts0) -> Args = [KeyPath, UpdateReq, Opts0], - {ok, _TnxId, Res} = emqx_cluster_rpc:multicall(emqx, update_config, Args), - Res. + multicall(emqx, update_config, Args). %% @doc Update the specified node's key path in local-override.conf. -spec update(node(), emqx_map_lib:config_key_path(), emqx_config:update_request(), @@ -98,8 +97,7 @@ update(Node, KeyPath, UpdateReq, Opts0) -> {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. remove(KeyPath, Opts0) -> Args = [KeyPath, Opts0], - {ok, _TnxId, Res} = emqx_cluster_rpc:multicall(emqx, remove_config, Args), - Res. + multicall(emqx, remove_config, Args). %% @doc remove the specified node's key path in local-override.conf. -spec remove(node(), emqx_map_lib:config_key_path(), emqx_config:update_opts()) -> @@ -114,8 +112,7 @@ remove(Node, KeyPath, Opts) -> {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. reset(KeyPath, Opts0) -> Args = [KeyPath, Opts0], - {ok, _TnxId, Res} = emqx_cluster_rpc:multicall(emqx, reset_config, Args), - Res. + multicall(emqx, reset_config, Args). %% @doc reset the specified node's key path in local-override.conf. -spec reset(node(), emqx_map_lib:config_key_path(), emqx_config:update_opts()) -> @@ -124,3 +121,15 @@ reset(Node, KeyPath, Opts) when Node =:= node() -> emqx:reset_config(KeyPath, Opts#{override_to => local}); reset(Node, KeyPath, Opts) -> rpc:call(Node, ?MODULE, reset, [KeyPath, Opts]). + +%%-------------------------------------------------------------------- +%% Internal funcs +%%-------------------------------------------------------------------- + +multicall(M, F, Args) -> + case emqx_cluster_rpc:multicall(M, F, Args) of + {ok, _TnxId, Res} -> + Res; + {error, Reason} -> + {error, Reason} + end. diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index a187bc58c..d8bb2423b 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -56,6 +56,7 @@ , emqx_exhook_schema , emqx_psk_schema , emqx_limiter_schema + , emqx_connector_schema ]). namespace() -> undefined. @@ -72,8 +73,7 @@ roots() -> sc(hoconsc:ref("cluster"), #{ desc => "EMQ X nodes can form a cluster to scale up the total capacity.
" "Here holds the configs to instruct how individual nodes " - "can discover each other, also the database replication " - "role of this node etc." + "can discover each other." })} , {"log", sc(hoconsc:ref("log"), @@ -101,6 +101,10 @@ natively in the EMQ X node;
'postgresql' etc. to look up clients or rules from external databases;
""" })} + , {"db", + sc(ref("db"), + #{ desc => "Settings of the embedded database." + })} ] ++ emqx_schema:roots(medium) ++ emqx_schema:roots(low) ++ @@ -146,14 +150,6 @@ fields("cluster") -> , {"k8s", sc(ref(cluster_k8s), #{})} - , {"db_backend", - sc(hoconsc:enum([mnesia, rlog]), - #{ mapping => "mria.db_backend" - , default => mnesia - })} - , {"rlog", - sc(ref("rlog"), - #{})} ]; fields(cluster_static) -> @@ -251,19 +247,6 @@ fields(cluster_k8s) -> })} ]; -fields("rlog") -> - [ {"role", - sc(hoconsc:enum([core, replicant]), - #{ mapping => "mria.node_role" - , default => core - })} - , {"core_nodes", - sc(emqx_schema:comma_separated_atoms(), - #{ mapping => "mria.core_nodes" - , default => [] - })} - ]; - fields("node") -> [ {"name", sc(string(), @@ -328,6 +311,46 @@ fields("node") -> )} ]; +fields("db") -> + [ {"backend", + sc(hoconsc:enum([mnesia, rlog]), + #{ mapping => "mria.db_backend" + , default => mnesia + , desc => """ +Select the backend for the embedded database.
+Important! This setting should be the same on all nodes in the cluster.
+Important! Changing this setting in the runtime is not allowed.
+mnesia is the default backend, that offers decent performance in small clusters.
+rlog is a new experimantal backend that is suitable for very large clusters. +""" + })} + , {"role", + sc(hoconsc:enum([core, replicant]), + #{ mapping => "mria.node_role" + , default => core + , desc => """ +Select a node role.
+core nodes provide durability of the data, and take care of writes. +It is recommended to place core nodes in different racks or different availability zones.
+replicant nodes are ephemeral worker nodes. Removing them from the cluster +doesn't affect database redundancy
+It is recommended to have more replicant nodes than core nodes.
+Note: this parameter only takes effect when the backend is set +to rlog. +""" + })} + , {"core_nodes", + sc(emqx_schema:comma_separated_atoms(), + #{ mapping => "mria.core_nodes" + , default => [] + , desc => """ +List of core nodes that the replicant will connect to.
+Note: this parameter only takes effect when the backend is set +to rlog and the role is set to replicant. +""" + })} + ]; + fields("cluster_call") -> [ {"retry_interval", sc(emqx_schema:duration(), @@ -341,8 +364,9 @@ fields("cluster_call") -> })} , {"cleanup_interval", sc(emqx_schema:duration(), - #{ desc => "Time interval to clear completed but stale transactions. - Ensure that the number of completed transactions is less than the max_history." + #{ desc => +"Time interval to clear completed but stale transactions. +Ensure that the number of completed transactions is less than the max_history." , default => "5m" })} ]; @@ -505,7 +529,7 @@ fields("authorization") -> translations() -> ["ekka", "kernel", "emqx"]. translation("ekka") -> - [ {"cluster_discovery", fun tr_cluster__discovery/1}]; + [ {"cluster_discovery", fun tr_cluster_discovery/1}]; translation("kernel") -> [ {"logger_level", fun tr_logger_level/1} , {"logger", fun tr_logger/1}]; @@ -540,7 +564,7 @@ tr_override_conf_file(Conf, Filename) -> [_ | _] = DataDir, filename:join([DataDir, "configs", Filename]). -tr_cluster__discovery(Conf) -> +tr_cluster_discovery(Conf) -> Strategy = conf_get("cluster.discovery_strategy", Conf), {Strategy, filter(options(Strategy, Conf))}. @@ -718,22 +742,10 @@ sort_log_levels(Levels) -> %% utils -spec(conf_get(string() | [string()], hocon:config()) -> term()). conf_get(Key, Conf) -> - V = hocon_schema:get_value(Key, Conf), - case is_binary(V) of - true -> - binary_to_list(V); - false -> - V - end. + ensure_list(hocon_schema:get_value(Key, Conf)). conf_get(Key, Conf, Default) -> - V = hocon_schema:get_value(Key, Conf, Default), - case is_binary(V) of - true -> - binary_to_list(V); - false -> - V - end. + ensure_list(hocon_schema:get_value(Key, Conf, Default)). filter(Opts) -> [{K, V} || {K, V} <- Opts, V =/= undefined]. @@ -789,5 +801,14 @@ to_atom(Str) when is_list(Str) -> to_atom(Bin) when is_binary(Bin) -> binary_to_atom(Bin, utf8). +-spec ensure_list(binary() | list(char())) -> list(char()). +ensure_list(V) -> + case is_binary(V) of + true -> + binary_to_list(V); + false -> + V + end. + roots(Module) -> lists:map(fun({_BinName, Root}) -> Root end, hocon_schema:roots(Module)). diff --git a/apps/emqx_conf/test/emqx_cluster_rpc_SUITE.erl b/apps/emqx_conf/test/emqx_cluster_rpc_SUITE.erl index cb79151ce..993ab3dc5 100644 --- a/apps/emqx_conf/test/emqx_cluster_rpc_SUITE.erl +++ b/apps/emqx_conf/test/emqx_cluster_rpc_SUITE.erl @@ -43,8 +43,8 @@ init_per_suite(Config) -> ok = ekka:start(), ok = mria_rlog:wait_for_shards([?CLUSTER_RPC_SHARD], infinity), meck:new(emqx_alarm, [non_strict, passthrough, no_link]), - meck:expect(emqx_alarm, activate, 2, ok), - meck:expect(emqx_alarm, deactivate, 2, ok), + meck:expect(emqx_alarm, activate, 3, ok), + meck:expect(emqx_alarm, deactivate, 3, ok), Config. end_per_suite(_Config) -> @@ -121,18 +121,18 @@ t_catch_up_status_handle_next_commit(_Config) -> t_commit_ok_apply_fail_on_other_node_then_recover(_Config) -> emqx_cluster_rpc:reset(), {atomic, []} = emqx_cluster_rpc:status(), - Now = erlang:system_time(millisecond), - {M, F, A} = {?MODULE, failed_on_other_recover_after_5_second, [erlang:whereis(?NODE1), Now]}, - {ok, _, ok} = emqx_cluster_rpc:multicall(M, F, A, 1, 1000), - {ok, _, ok} = emqx_cluster_rpc:multicall(io, format, ["test"], 1, 1000), - {atomic, [Status|L]} = emqx_cluster_rpc:status(), + ets:new(test, [named_table, public]), + ets:insert(test, {other_mfa_result, failed}), + ct:pal("111:~p~n", [ets:tab2list(cluster_rpc_commit)]), + {M, F, A} = {?MODULE, failed_on_other_recover_after_retry, [erlang:whereis(?NODE1)]}, + {ok, 1, ok} = emqx_cluster_rpc:multicall(M, F, A, 1, 1000), + ct:pal("222:~p~n", [ets:tab2list(cluster_rpc_commit)]), + ct:pal("333:~p~n", [emqx_cluster_rpc:status()]), + {atomic, [_Status|L]} = emqx_cluster_rpc:status(), ?assertEqual([], L), - ?assertEqual({io, format, ["test"]}, maps:get(mfa, Status)), - ?assertEqual(node(), maps:get(node, Status)), - sleep(2300), - {atomic, [Status1]} = emqx_cluster_rpc:status(), - ?assertEqual(Status, Status1), - sleep(3600), + ets:insert(test, {other_mfa_result, ok}), + {ok, 2, ok} = emqx_cluster_rpc:multicall(io, format, ["test"], 1, 1000), + ct:sleep(1000), {atomic, NewStatus} = emqx_cluster_rpc:status(), ?assertEqual(3, length(NewStatus)), Pid = self(), @@ -161,7 +161,7 @@ t_del_stale_mfa(_Config) -> {ok, TnxId, ok} = emqx_cluster_rpc:multicall(M, F, A), TnxId end || _ <- Keys2], ?assertEqual(Keys2, Ids2), - sleep(1200), + ct:sleep(1200), [begin ?assertEqual({aborted, not_found}, emqx_cluster_rpc:query(I)) end || I <- lists:seq(1, 50)], @@ -177,7 +177,7 @@ t_skip_failed_commit(_Config) -> emqx_cluster_rpc:reset(), {atomic, []} = emqx_cluster_rpc:status(), {ok, 1, ok} = emqx_cluster_rpc:multicall(io, format, ["test~n"], all, 1000), - sleep(180), + ct:sleep(180), {atomic, List1} = emqx_cluster_rpc:status(), Node = node(), ?assertEqual([{Node, 1}, {{Node, ?NODE2}, 1}, {{Node, ?NODE3}, 1}], @@ -240,18 +240,10 @@ failed_on_node_by_odd(Pid) -> end end. -failed_on_other_recover_after_5_second(Pid, CreatedAt) -> - Now = erlang:system_time(millisecond), +failed_on_other_recover_after_retry(Pid) -> case Pid =:= self() of true -> ok; false -> - case Now < CreatedAt + 5001 of - true -> "MFA return not ok"; - false -> ok - end - end. - -sleep(Ms) -> - receive _ -> ok - after Ms -> timeout + [{_, Res}] = ets:lookup(test, other_mfa_result), + Res end. diff --git a/apps/emqx_connector/etc/emqx_connector.conf b/apps/emqx_connector/etc/emqx_connector.conf new file mode 100644 index 000000000..06395ac94 --- /dev/null +++ b/apps/emqx_connector/etc/emqx_connector.conf @@ -0,0 +1,23 @@ +#connectors.mqtt.my_mqtt_connector { +# server = "127.0.0.1:1883" +# proto_ver = "v4" +# username = "username1" +# password = "" +# clean_start = true +# keepalive = 300 +# retry_interval = "30s" +# max_inflight = 32 +# reconnect_interval = "30s" +# bridge_mode = true +# replayq { +# dir = "{{ platform_data_dir }}/replayq/bridge_mqtt/" +# seg_bytes = "100MB" +# offload = false +# } +# ssl { +# enable = false +# keyfile = "{{ platform_etc_dir }}/certs/client-key.pem" +# certfile = "{{ platform_etc_dir }}/certs/client-cert.pem" +# cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem" +# } +#} diff --git a/apps/emqx_connector/rebar.config b/apps/emqx_connector/rebar.config index 85ee7c488..4773d0859 100644 --- a/apps/emqx_connector/rebar.config +++ b/apps/emqx_connector/rebar.config @@ -8,7 +8,7 @@ {mysql, {git, "https://github.com/emqx/mysql-otp", {tag, "1.7.1"}}}, {epgsql, {git, "https://github.com/epgsql/epgsql", {tag, "4.4.0"}}}, %% NOTE: mind poolboy version when updating mongodb-erlang version - {mongodb, {git,"https://github.com/emqx/mongodb-erlang", {tag, "v3.0.9"}}}, + {mongodb, {git,"https://github.com/emqx/mongodb-erlang", {tag, "v3.0.10"}}}, %% NOTE: mind poolboy version when updating eredis_cluster version {eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.6.7"}}}, %% mongodb-erlang uses a special fork https://github.com/comtihon/poolboy.git diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src index 3e59d3528..fe8bb6c97 100644 --- a/apps/emqx_connector/src/emqx_connector.app.src +++ b/apps/emqx_connector/src/emqx_connector.app.src @@ -14,6 +14,7 @@ epgsql, mysql, mongodb, + ehttpc, emqx, emqtt ]}, diff --git a/apps/emqx_connector/src/emqx_connector.erl b/apps/emqx_connector/src/emqx_connector.erl index dd0359348..940e958e3 100644 --- a/apps/emqx_connector/src/emqx_connector.erl +++ b/apps/emqx_connector/src/emqx_connector.erl @@ -14,3 +14,101 @@ %% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_connector). + +-export([config_key_path/0]). + +-export([ parse_connector_id/1 + , connector_id/2 + ]). + +-export([ list/0 + , lookup/1 + , lookup/2 + , create_dry_run/2 + , update/2 + , update/3 + , delete/1 + , delete/2 + ]). + +-export([ post_config_update/5 + ]). + +config_key_path() -> + [connectors]. + +post_config_update([connectors, Type, Name], '$remove', _, _OldConf, _AppEnvs) -> + ConnId = connector_id(Type, Name), + LinkedBridgeIds = lists:foldl(fun + (#{id := BId, raw_config := #{<<"connector">> := ConnId0}}, Acc) + when ConnId0 == ConnId -> + [BId | Acc]; + (_, Acc) -> Acc + end, [], emqx_bridge:list()), + case LinkedBridgeIds of + [] -> ok; + _ -> {error, {dependency_bridges_exist, LinkedBridgeIds}} + end; +post_config_update([connectors, Type, Name], _Req, NewConf, _OldConf, _AppEnvs) -> + ConnId = connector_id(Type, Name), + lists:foreach(fun + (#{id := BId, raw_config := #{<<"connector">> := ConnId0}}) when ConnId0 == ConnId -> + {BType, BName} = emqx_bridge:parse_bridge_id(BId), + BridgeConf = emqx:get_config([bridges, BType, BName]), + case emqx_bridge:recreate(BType, BName, BridgeConf#{connector => NewConf}) of + {ok, _} -> ok; + {error, Reason} -> error({update_bridge_error, Reason}) + end; + (_) -> + ok + end, emqx_bridge:list()). + +connector_id(Type0, Name0) -> + Type = bin(Type0), + Name = bin(Name0), + <>. + +parse_connector_id(ConnectorId) -> + case string:split(bin(ConnectorId), ":", all) of + [Type, Name] -> {binary_to_atom(Type, utf8), binary_to_atom(Name, utf8)}; + _ -> error({invalid_connector_id, ConnectorId}) + end. + +list() -> + lists:foldl(fun({Type, NameAndConf}, Connectors) -> + lists:foldl(fun({Name, RawConf}, Acc) -> + [RawConf#{<<"id">> => connector_id(Type, Name)} | Acc] + end, Connectors, maps:to_list(NameAndConf)) + end, [], maps:to_list(emqx:get_raw_config(config_key_path(), #{}))). + +lookup(Id) when is_binary(Id) -> + {Type, Name} = parse_connector_id(Id), + lookup(Type, Name). + +lookup(Type, Name) -> + Id = connector_id(Type, Name), + case emqx:get_raw_config(config_key_path() ++ [Type, Name], not_found) of + not_found -> {error, not_found}; + Conf -> {ok, Conf#{<<"id">> => Id}} + end. + +create_dry_run(Type, Conf) -> + emqx_bridge:create_dry_run(Type, Conf). + +update(Id, Conf) when is_binary(Id) -> + {Type, Name} = parse_connector_id(Id), + update(Type, Name, Conf). + +update(Type, Name, Conf) -> + emqx_conf:update(config_key_path() ++ [Type, Name], Conf, #{override_to => cluster}). + +delete(Id) when is_binary(Id) -> + {Type, Name} = parse_connector_id(Id), + delete(Type, Name). + +delete(Type, Name) -> + emqx_conf:remove(config_key_path() ++ [Type, Name], #{override_to => cluster}). + +bin(Bin) when is_binary(Bin) -> Bin; +bin(Str) when is_list(Str) -> list_to_binary(Str); +bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8). diff --git a/apps/emqx_connector/src/emqx_connector_api.erl b/apps/emqx_connector/src/emqx_connector_api.erl new file mode 100644 index 000000000..6eb397519 --- /dev/null +++ b/apps/emqx_connector/src/emqx_connector_api.erl @@ -0,0 +1,203 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_connector_api). + +-behaviour(minirest_api). + +-include("emqx_connector.hrl"). + +-include_lib("typerefl/include/types.hrl"). + +-import(hoconsc, [mk/2, ref/2, array/1, enum/1]). + +%% Swagger specs from hocon schema +-export([api_spec/0, paths/0, schema/1, namespace/0]). + +%% API callbacks +-export(['/connectors_test'/2, '/connectors'/2, '/connectors/:id'/2]). + +-define(TRY_PARSE_ID(ID, EXPR), + try emqx_connector:parse_connector_id(Id) of + {ConnType, ConnName} -> + _ = ConnName, + EXPR + catch + error:{invalid_bridge_id, Id0} -> + {400, #{code => 'INVALID_ID', message => <<"invalid_bridge_id: ", Id0/binary, + ". Bridge Ids must be of format :">>}} + end). + +namespace() -> "connector". + +api_spec() -> + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => false}). + +paths() -> ["/connectors_test", "/connectors", "/connectors/:id"]. + +error_schema(Code, Message) -> + [ {code, mk(string(), #{example => Code})} + , {message, mk(string(), #{example => Message})} + ]. + +connector_info() -> + hoconsc:union([ ref(emqx_connector_schema, "mqtt_connector_info") + ]). + +connector_test_info() -> + hoconsc:union([ ref(emqx_connector_schema, "mqtt_connector_test_info") + ]). + +connector_req() -> + hoconsc:union([ ref(emqx_connector_schema, "mqtt_connector") + ]). + +param_path_id() -> + [{id, mk(binary(), #{in => path, example => <<"mqtt:my_mqtt_connector">>})}]. + +schema("/connectors_test") -> + #{ + operationId => '/connectors_test', + post => #{ + tags => [<<"connectors">>], + description => <<"Test creating a new connector by given Id
" + "The Id must be of format :">>, + summary => <<"Test creating connector">>, + requestBody => connector_test_info(), + responses => #{ + 200 => <<"Test connector OK">>, + 400 => error_schema('TEST_FAILED', "connector test failed") + } + } + }; + +schema("/connectors") -> + #{ + operationId => '/connectors', + get => #{ + tags => [<<"connectors">>], + description => <<"List all connectors">>, + summary => <<"List connectors">>, + responses => #{ + 200 => mk(array(connector_info()), #{desc => "List of connectors"}) + } + }, + post => #{ + tags => [<<"connectors">>], + description => <<"Create a new connector by given Id
" + "The Id must be of format :">>, + summary => <<"Create connector">>, + requestBody => connector_info(), + responses => #{ + 201 => connector_info(), + 400 => error_schema('ALREADY_EXISTS', "connector already exists") + } + } + }; + +schema("/connectors/:id") -> + #{ + operationId => '/connectors/:id', + get => #{ + tags => [<<"connectors">>], + description => <<"Get the connector by Id">>, + summary => <<"Get connector">>, + parameters => param_path_id(), + responses => #{ + 200 => connector_info(), + 404 => error_schema('NOT_FOUND', "Connector not found") + } + }, + put => #{ + tags => [<<"connectors">>], + description => <<"Update an existing connector by Id">>, + summary => <<"Update connector">>, + parameters => param_path_id(), + requestBody => connector_req(), + responses => #{ + 200 => <<"Update connector successfully">>, + 400 => error_schema('UPDATE_FAIL', "Update failed"), + 404 => error_schema('NOT_FOUND', "Connector not found") + }}, + delete => #{ + tags => [<<"connectors">>], + description => <<"Delete a connector by Id">>, + summary => <<"Delete connector">>, + parameters => param_path_id(), + responses => #{ + 204 => <<"Delete connector successfully">>, + 400 => error_schema('DELETE_FAIL', "Delete failed") + }} + }. + +'/connectors_test'(post, #{body := #{<<"bridge_type">> := ConnType} = Params}) -> + case emqx_connector:create_dry_run(ConnType, maps:remove(<<"bridge_type">>, Params)) of + ok -> {200}; + {error, Error} -> + {400, error_msg('BAD_ARG', Error)} + end. + +'/connectors'(get, _Request) -> + {200, emqx_connector:list()}; + +'/connectors'(post, #{body := #{<<"id">> := Id} = Params}) -> + ?TRY_PARSE_ID(Id, + case emqx_connector:lookup(ConnType, ConnName) of + {ok, _} -> + {400, error_msg('ALREADY_EXISTS', <<"connector already exists">>)}; + {error, not_found} -> + case emqx_connector:update(ConnType, ConnName, maps:remove(<<"id">>, Params)) of + {ok, #{raw_config := RawConf}} -> {201, RawConf#{<<"id">> => Id}}; + {error, Error} -> {400, error_msg('BAD_ARG', Error)} + end + end). + +'/connectors/:id'(get, #{bindings := #{id := Id}}) -> + ?TRY_PARSE_ID(Id, + case emqx_connector:lookup(ConnType, ConnName) of + {ok, Conf} -> {200, Conf#{<<"id">> => Id}}; + {error, not_found} -> + {404, error_msg('NOT_FOUND', <<"connector not found">>)} + end); + +'/connectors/:id'(put, #{bindings := #{id := Id}, body := Params}) -> + ?TRY_PARSE_ID(Id, + case emqx_connector:lookup(ConnType, ConnName) of + {ok, _} -> + case emqx_connector:update(ConnType, ConnName, Params) of + {ok, #{raw_config := RawConf}} -> {200, RawConf#{<<"id">> => Id}}; + {error, Error} -> {400, error_msg('BAD_ARG', Error)} + end; + {error, not_found} -> + {404, error_msg('NOT_FOUND', <<"connector not found">>)} + end); + +'/connectors/:id'(delete, #{bindings := #{id := Id}}) -> + ?TRY_PARSE_ID(Id, + case emqx_connector:lookup(ConnType, ConnName) of + {ok, _} -> + case emqx_connector:delete(ConnType, ConnName) of + {ok, _} -> {204}; + {error, Error} -> {400, error_msg('BAD_ARG', Error)} + end; + {error, not_found} -> + {404, error_msg('NOT_FOUND', <<"connector not found">>)} + end). + +error_msg(Code, Msg) when is_binary(Msg) -> + #{code => Code, message => Msg}; +error_msg(Code, Msg) -> + #{code => Code, message => list_to_binary(io_lib:format("~p", [Msg]))}. diff --git a/apps/emqx_connector/src/emqx_connector_app.erl b/apps/emqx_connector/src/emqx_connector_app.erl index 4de078076..93e577fde 100644 --- a/apps/emqx_connector/src/emqx_connector_app.erl +++ b/apps/emqx_connector/src/emqx_connector_app.erl @@ -20,11 +20,15 @@ -export([start/2, stop/1]). +-define(CONF_HDLR_PATH, (emqx_connector:config_key_path() ++ ['?', '?'])). + start(_StartType, _StartArgs) -> + ok = emqx_config_handler:add_handler(?CONF_HDLR_PATH, emqx_connector), emqx_connector_mqtt_worker:register_metrics(), emqx_connector_sup:start_link(). stop(_State) -> + emqx_config_handler:remove_handler(?CONF_HDLR_PATH), ok. %% internal functions diff --git a/apps/emqx_connector/src/emqx_connector_http.erl b/apps/emqx_connector/src/emqx_connector_http.erl index c724ddb7a..bae4e334b 100644 --- a/apps/emqx_connector/src/emqx_connector_http.erl +++ b/apps/emqx_connector/src/emqx_connector_http.erl @@ -19,10 +19,10 @@ -include("emqx_connector.hrl"). -include_lib("typerefl/include/types.hrl"). --include_lib("emqx_resource/include/emqx_resource_behaviour.hrl"). - -include_lib("emqx/include/logger.hrl"). +-behaviour(emqx_resource). + %% callbacks of behaviour emqx_resource -export([ on_start/2 , on_stop/2 @@ -38,7 +38,8 @@ , fields/1 , validations/0]). --export([ check_ssl_opts/2 ]). +-export([ check_ssl_opts/2 + ]). -type connect_timeout() :: emqx_schema:duration() | infinity. -type pool_type() :: random | hash. @@ -50,73 +51,84 @@ %%===================================================================== %% Hocon schema roots() -> - [{config, #{type => hoconsc:ref(?MODULE, config)}}]. - -fields("http_request") -> - [ {subscribe_local_topic, hoconsc:mk(binary())} - , {method, hoconsc:mk(method(), #{default => post})} - , {path, hoconsc:mk(binary(), #{default => <<"">>})} - , {headers, hoconsc:mk(map(), - #{default => #{ - <<"accept">> => <<"application/json">>, - <<"cache-control">> => <<"no-cache">>, - <<"connection">> => <<"keep-alive">>, - <<"content-type">> => <<"application/json">>, - <<"keep-alive">> => <<"timeout=5">>}}) - } - , {body, hoconsc:mk(binary(), #{default => <<"${payload}">>})} - , {request_timeout, hoconsc:mk(emqx_schema:duration_ms(), #{default => <<"30s">>})} - ]; + fields(config). fields(config) -> - [ {base_url, fun base_url/1} - , {connect_timeout, fun connect_timeout/1} - , {max_retries, fun max_retries/1} - , {retry_interval, fun retry_interval/1} - , {pool_type, fun pool_type/1} - , {pool_size, fun pool_size/1} - , {enable_pipelining, fun enable_pipelining/1} - ] ++ emqx_connector_schema_lib:ssl_fields(). + [ {base_url, + sc(url(), + #{ nullable => false + , validator => fun(#{query := _Query}) -> + {error, "There must be no query in the base_url"}; + (_) -> ok + end + , desc => """ +The base URL is the URL includes only the scheme, host and port.
+When send an HTTP request, the real URL to be used is the concatenation of the base URL and the +path parameter (passed by the emqx_resource:query/2,3 or provided by the request parameter).
+For example: http://localhost:9901/ +""" + })} + , {connect_timeout, + sc(emqx_schema:duration_ms(), + #{ default => "30s" + , desc => "The timeout when connecting to the HTTP server" + })} + , {max_retries, + sc(non_neg_integer(), + #{ default => 5 + , desc => "Max retry times if error on sending request" + })} + , {retry_interval, + sc(emqx_schema:duration(), + #{ default => "1s" + , desc => "Interval before next retry if error on sending request" + })} + , {pool_type, + sc(pool_type(), + #{ default => random + , desc => "The type of the pool. Canbe one of random, hash" + })} + , {pool_size, + sc(non_neg_integer(), + #{ default => 8 + , desc => "The pool size" + })} + , {enable_pipelining, + sc(boolean(), + #{ default => true + , desc => "Enable the HTTP pipeline" + })} + , {request, hoconsc:mk( + ref("request"), + #{ default => undefined + , nullable => true + , desc => """ +If the request is provided, the caller can send HTTP requests via +emqx_resource:query(ResourceId, {send_message, BridgeId, Message}) +""" + })} + ] ++ emqx_connector_schema_lib:ssl_fields(); -method() -> - hoconsc:enum([post, put, get, delete]). +fields("request") -> + [ {method, hoconsc:mk(hoconsc:enum([post, put, get, delete]), #{nullable => true})} + , {path, hoconsc:mk(binary(), #{nullable => true})} + , {body, hoconsc:mk(binary(), #{nullable => true})} + , {headers, hoconsc:mk(map(), #{nullable => true})} + , {request_timeout, + sc(emqx_schema:duration_ms(), + #{ nullable => true + , desc => "The timeout when sending request to the HTTP server" + })} + ]. validations() -> [ {check_ssl_opts, fun check_ssl_opts/1} ]. -base_url(type) -> url(); -base_url(nullable) -> false; -base_url(validator) -> fun(#{query := _Query}) -> - {error, "There must be no query in the base_url"}; - (_) -> ok - end; -base_url(_) -> undefined. - -connect_timeout(type) -> emqx_schema:duration_ms(); -connect_timeout(default) -> <<"5s">>; -connect_timeout(_) -> undefined. - -max_retries(type) -> non_neg_integer(); -max_retries(default) -> 5; -max_retries(_) -> undefined. - -retry_interval(type) -> emqx_schema:duration(); -retry_interval(default) -> <<"1s">>; -retry_interval(_) -> undefined. - -pool_type(type) -> pool_type(); -pool_type(default) -> random; -pool_type(_) -> undefined. - -pool_size(type) -> non_neg_integer(); -pool_size(default) -> 8; -pool_size(_) -> undefined. - -enable_pipelining(type) -> boolean(); -enable_pipelining(default) -> true; -enable_pipelining(_) -> undefined. +sc(Type, Meta) -> hoconsc:mk(Type, Meta). +ref(Field) -> hoconsc:ref(?MODULE, Field). %% =================================================================== + on_start(InstId, #{base_url := #{scheme := Scheme, host := Host, port := Port, @@ -153,7 +165,7 @@ on_start(InstId, #{base_url := #{scheme := Scheme, host => Host, port => Port, base_path => BasePath, - channels => preproc_channels(InstId, Config) + request => preprocess_request(maps:get(request, Config, undefined)) }, case ehttpc_sup:start_pool(PoolName, PoolOpts) of {ok, _} -> {ok, State}; @@ -167,12 +179,12 @@ on_stop(InstId, #{pool_name := PoolName}) -> connector => InstId}), ehttpc_sup:stop_pool(PoolName). -on_query(InstId, {send_message, ChannelId, Msg}, AfterQuery, #{channels := Channels} = State) -> - case maps:find(ChannelId, Channels) of - error -> ?SLOG(error, #{msg => "channel not found", channel_id => ChannelId}); - {ok, ChannConf} -> +on_query(InstId, {send_message, Msg}, AfterQuery, State) -> + case maps:get(request, State, undefined) of + undefined -> ?SLOG(error, #{msg => "request not found", connector => InstId}); + Request -> #{method := Method, path := Path, body := Body, headers := Headers, - request_timeout := Timeout} = proc_channel_conf(ChannConf, Msg), + request_timeout := Timeout} = process_request(Request, Msg), on_query(InstId, {Method, {Path, Headers, Body}, Timeout}, AfterQuery, State) end; on_query(InstId, {Method, Request}, AfterQuery, State) -> @@ -212,25 +224,22 @@ on_health_check(_InstId, #{host := Host, port := Port} = State) -> %% Internal functions %%-------------------------------------------------------------------- -preproc_channels(<<"bridge:", BridgeId/binary>>, Config) -> - {BridgeType, BridgeName} = emqx_bridge:parse_bridge_id(BridgeId), - maps:fold(fun(ChannName, ChannConf, Acc) -> - Acc#{emqx_bridge:channel_id(BridgeType, BridgeName, egress_channels, ChannName) => - preproc_channel_conf(ChannConf)} - end, #{}, maps:get(egress_channels, Config, #{})); -preproc_channels(_InstId, _Config) -> - #{}. - -preproc_channel_conf(#{ - method := Method, - path := Path, - body := Body, - headers := Headers} = Conf) -> - Conf#{ method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method)) - , path => emqx_plugin_libs_rule:preproc_tmpl(Path) - , body => emqx_plugin_libs_rule:preproc_tmpl(Body) - , headers => preproc_headers(Headers) - }. +preprocess_request(undefined) -> + undefined; +preprocess_request(Req) when map_size(Req) == 0 -> + undefined; +preprocess_request(#{ + method := Method, + path := Path, + body := Body, + headers := Headers + } = Req) -> + #{ method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method)) + , path => emqx_plugin_libs_rule:preproc_tmpl(Path) + , body => emqx_plugin_libs_rule:preproc_tmpl(Body) + , headers => preproc_headers(Headers) + , request_timeout => maps:get(request_timeout, Req, 30000) + }. preproc_headers(Headers) -> maps:fold(fun(K, V, Acc) -> @@ -238,15 +247,18 @@ preproc_headers(Headers) -> emqx_plugin_libs_rule:preproc_tmpl(bin(V))} end, #{}, Headers). -proc_channel_conf(#{ - method := MethodTks, - path := PathTks, - body := BodyTks, - headers := HeadersTks} = Conf, Msg) -> +process_request(#{ + method := MethodTks, + path := PathTks, + body := BodyTks, + headers := HeadersTks, + request_timeout := ReqTimeout + } = Conf, Msg) -> Conf#{ method => make_method(emqx_plugin_libs_rule:proc_tmpl(MethodTks, Msg)) , path => emqx_plugin_libs_rule:proc_tmpl(PathTks, Msg) , body => emqx_plugin_libs_rule:proc_tmpl(BodyTks, Msg) , headers => maps:to_list(proc_headers(HeadersTks, Msg)) + , request_timeout => ReqTimeout }. proc_headers(HeaderTks, Msg) -> @@ -264,7 +276,7 @@ check_ssl_opts(Conf) -> check_ssl_opts("base_url", Conf). check_ssl_opts(URLFrom, Conf) -> - #{schema := Scheme} = hocon_schema:get_value(URLFrom, Conf), + #{scheme := Scheme} = hocon_schema:get_value(URLFrom, Conf), SSL= hocon_schema:get_value("ssl", Conf), case {Scheme, maps:get(enable, SSL, false)} of {http, false} -> true; diff --git a/apps/emqx_connector/src/emqx_connector_ldap.erl b/apps/emqx_connector/src/emqx_connector_ldap.erl index 85e42b0f3..8af516b82 100644 --- a/apps/emqx_connector/src/emqx_connector_ldap.erl +++ b/apps/emqx_connector/src/emqx_connector_ldap.erl @@ -17,11 +17,12 @@ -include("emqx_connector.hrl"). -include_lib("typerefl/include/types.hrl"). --include_lib("emqx_resource/include/emqx_resource_behaviour.hrl"). -include_lib("emqx/include/logger.hrl"). -export([roots/0, fields/1]). +-behaviour(emqx_resource). + %% callbacks of behaviour emqx_resource -export([ on_start/2 , on_stop/2 @@ -60,7 +61,10 @@ on_start(InstId, #{servers := Servers0, SslOpts = case maps:get(enable, SSL) of true -> [{ssl, true}, - {sslopts, emqx_plugin_libs_ssl:save_files_return_opts(SSL, "connectors", InstId)} + {sslopts, emqx_plugin_libs_ssl:save_files_return_opts( + SSL, + "connectors", + InstId)} ]; false -> [{ssl, false}] end, @@ -86,7 +90,10 @@ on_query(InstId, {search, Base, Filter, Attributes}, AfterQuery, #{poolname := P ?SLOG(debug, #{msg => "ldap connector received request", request => Request, connector => InstId, state => State}), - case Result = ecpool:pick_and_do(PoolName, {?MODULE, search, [Base, Filter, Attributes]}, no_handover) of + case Result = ecpool:pick_and_do( + PoolName, + {?MODULE, search, [Base, Filter, Attributes]}, + no_handover) of {error, Reason} -> ?SLOG(error, #{msg => "ldap connector do request failed", request => Request, connector => InstId, @@ -110,7 +117,7 @@ search(Conn, Base, Filter, Attributes) -> eldap2:search(Conn, [{base, Base}, {filter, Filter}, {attributes, Attributes}, - {deref, eldap2:derefFindingBaseObj()}]). + {deref, eldap2:'derefFindingBaseObj'()}]). %% =================================================================== connect(Opts) -> diff --git a/apps/emqx_connector/src/emqx_connector_mongo.erl b/apps/emqx_connector/src/emqx_connector_mongo.erl index 59c80e959..c2d992cb6 100644 --- a/apps/emqx_connector/src/emqx_connector_mongo.erl +++ b/apps/emqx_connector/src/emqx_connector_mongo.erl @@ -17,13 +17,14 @@ -include("emqx_connector.hrl"). -include_lib("typerefl/include/types.hrl"). --include_lib("emqx_resource/include/emqx_resource_behaviour.hrl"). -include_lib("emqx/include/logger.hrl"). -type server() :: emqx_schema:ip_port(). -reflect_type([server/0]). -typerefl_from_string({server/0, emqx_connector_schema_lib, to_ip_port}). +-behaviour(emqx_resource). + %% callbacks of behaviour emqx_resource -export([ on_start/2 , on_stop/2 @@ -37,6 +38,7 @@ -export([roots/0, fields/1]). -export([mongo_query/5]). + %%===================================================================== roots() -> [ {config, #{type => hoconsc:union( @@ -55,7 +57,7 @@ fields(rs) -> [ {mongo_type, #{type => rs, default => rs}} , {servers, fun servers/1} - , {replica_set_name, fun emqx_connector_schema_lib:database/1} + , {replica_set_name, fun replica_set_name/1} ] ++ mongo_fields(); fields(sharded) -> [ {mongo_type, #{type => sharded, @@ -77,7 +79,8 @@ fields(topology) -> ]. mongo_fields() -> - [ {pool_size, fun emqx_connector_schema_lib:pool_size/1} + [ {srv_record, fun srv_record/1} + , {pool_size, fun emqx_connector_schema_lib:pool_size/1} , {username, fun emqx_connector_schema_lib:username/1} , {password, fun emqx_connector_schema_lib:password/1} , {auth_source, #{type => binary(), @@ -92,47 +95,54 @@ on_jsonify(Config) -> Config. %% =================================================================== -on_start(InstId, Config = #{server := Server, - mongo_type := single}) -> - ?SLOG(info, #{msg => "starting mongodb single connector", - connector => InstId, config => Config}), - Opts = [{type, single}, - {hosts, [emqx_connector_schema_lib:ip_port_to_string(Server)]} - ], - do_start(InstId, Opts, Config); -on_start(InstId, Config = #{servers := Servers, - mongo_type := rs, - replica_set_name := RsName}) -> - ?SLOG(info, #{msg => "starting mongodb rs connector", - connector => InstId, config => Config}), - Opts = [{type, {rs, RsName}}, - {hosts, [emqx_connector_schema_lib:ip_port_to_string(S) - || S <- Servers]} - ], - do_start(InstId, Opts, Config); - -on_start(InstId, Config = #{servers := Servers, - mongo_type := sharded}) -> - ?SLOG(info, #{msg => "starting mongodb sharded connector", - connector => InstId, config => Config}), - Opts = [{type, sharded}, - {hosts, [emqx_connector_schema_lib:ip_port_to_string(S) - || S <- Servers]} - ], - do_start(InstId, Opts, Config). +on_start(InstId, Config = #{mongo_type := Type, + pool_size := PoolSize, + ssl := SSL}) -> + Msg = case Type of + single -> "starting_mongodb_single_connector"; + rs -> "starting_mongodb_replica_set_connector"; + sharded -> "starting_mongodb_sharded_connector" + end, + ?SLOG(info, #{msg => Msg, connector => InstId, config => Config}), + NConfig = #{hosts := Hosts} = may_parse_srv_and_txt_records(Config), + SslOpts = case maps:get(enable, SSL) of + true -> + [{ssl, true}, + {ssl_opts, + emqx_plugin_libs_ssl:save_files_return_opts( + SSL, + "connectors", + InstId)} + ]; + false -> [{ssl, false}] + end, + Topology = maps:get(topology, NConfig, #{}), + Opts = [{type, init_type(NConfig)}, + {hosts, Hosts}, + {pool_size, PoolSize}, + {options, init_topology_options(maps:to_list(Topology), [])}, + {worker_options, init_worker_options(maps:to_list(NConfig), SslOpts)}], + PoolName = emqx_plugin_libs_pool:pool_name(InstId), + _ = emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts), + {ok, #{poolname => PoolName, type => Type}}. on_stop(InstId, #{poolname := PoolName}) -> ?SLOG(info, #{msg => "stopping mongodb connector", connector => InstId}), emqx_plugin_libs_pool:stop_pool(PoolName). -on_query(InstId, {Action, Collection, Selector, Docs}, AfterQuery, #{poolname := PoolName} = State) -> +on_query(InstId, + {Action, Collection, Selector, Docs}, + AfterQuery, + #{poolname := PoolName} = State) -> Request = {Action, Collection, Selector, Docs}, ?SLOG(debug, #{msg => "mongodb connector received request", request => Request, connector => InstId, state => State}), - case ecpool:pick_and_do(PoolName, {?MODULE, mongo_query, [Action, Collection, Selector, Docs]}, no_handover) of + case ecpool:pick_and_do(PoolName, + {?MODULE, mongo_query, [Action, Collection, Selector, Docs]}, + no_handover) of {error, Reason} -> ?SLOG(error, #{msg => "mongodb connector do query failed", request => Request, reason => Reason, @@ -141,7 +151,7 @@ on_query(InstId, {Action, Collection, Selector, Docs}, AfterQuery, #{poolname := {error, Reason}; {ok, Cursor} when is_pid(Cursor) -> emqx_resource:query_success(AfterQuery), - mc_cursor:foldl(fun(O, Acc2) -> [O|Acc2] end, [], Cursor, 1000); + mc_cursor:foldl(fun(O, Acc2) -> [O | Acc2] end, [], Cursor, 1000); Result -> emqx_resource:query_success(AfterQuery), Result @@ -184,63 +194,34 @@ mongo_query(Conn, find_one, Collection, Selector, Projector) -> mongo_query(_Conn, _Action, _Collection, _Selector, _Projector) -> ok. -do_start(InstId, Opts0, Config = #{mongo_type := Type, - database := Database, - pool_size := PoolSize, - ssl := SSL}) -> - SslOpts = case maps:get(enable, SSL) of - true -> - [{ssl, true}, - {ssl_opts, emqx_plugin_libs_ssl:save_files_return_opts(SSL, "connectors", InstId)} - ]; - false -> [{ssl, false}] - end, - Topology= maps:get(topology, Config, #{}), - Opts = Opts0 ++ - [{pool_size, PoolSize}, - {options, init_topology_options(maps:to_list(Topology), [])}, - {worker_options, init_worker_options(maps:to_list(Config), SslOpts)}], - %% test the connection - TestOpts = case maps:is_key(server, Config) of - true -> - Server = maps:get(server, Config), - host_port(Server); - false -> - Servers = maps:get(servers, Config), - host_port(erlang:hd(Servers)) - end ++ [{database, Database}], - {ok, TestConn} = mc_worker_api:connect(TestOpts), +init_type(#{mongo_type := rs, replica_set_name := ReplicaSetName}) -> + {rs, ReplicaSetName}; +init_type(#{mongo_type := Type}) -> + Type. - PoolName = emqx_plugin_libs_pool:pool_name(InstId), - _ = emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts ++ SslOpts), - {ok, #{poolname => PoolName, - type => Type, - test_conn => TestConn, - test_opts => TestOpts}}. - -init_topology_options([{pool_size, Val}| R], Acc) -> - init_topology_options(R, [{pool_size, Val}| Acc]); -init_topology_options([{max_overflow, Val}| R], Acc) -> - init_topology_options(R, [{max_overflow, Val}| Acc]); -init_topology_options([{overflow_ttl, Val}| R], Acc) -> - init_topology_options(R, [{overflow_ttl, Val}| Acc]); -init_topology_options([{overflow_check_period, Val}| R], Acc) -> - init_topology_options(R, [{overflow_check_period, Val}| Acc]); -init_topology_options([{local_threshold_ms, Val}| R], Acc) -> - init_topology_options(R, [{'localThresholdMS', Val}| Acc]); -init_topology_options([{connect_timeout_ms, Val}| R], Acc) -> - init_topology_options(R, [{'connectTimeoutMS', Val}| Acc]); -init_topology_options([{socket_timeout_ms, Val}| R], Acc) -> - init_topology_options(R, [{'socketTimeoutMS', Val}| Acc]); -init_topology_options([{server_selection_timeout_ms, Val}| R], Acc) -> - init_topology_options(R, [{'serverSelectionTimeoutMS', Val}| Acc]); -init_topology_options([{wait_queue_timeout_ms, Val}| R], Acc) -> - init_topology_options(R, [{'waitQueueTimeoutMS', Val}| Acc]); -init_topology_options([{heartbeat_frequency_ms, Val}| R], Acc) -> - init_topology_options(R, [{'heartbeatFrequencyMS', Val}| Acc]); -init_topology_options([{min_heartbeat_frequency_ms, Val}| R], Acc) -> - init_topology_options(R, [{'minHeartbeatFrequencyMS', Val}| Acc]); -init_topology_options([_| R], Acc) -> +init_topology_options([{pool_size, Val} | R], Acc) -> + init_topology_options(R, [{pool_size, Val} | Acc]); +init_topology_options([{max_overflow, Val} | R], Acc) -> + init_topology_options(R, [{max_overflow, Val} | Acc]); +init_topology_options([{overflow_ttl, Val} | R], Acc) -> + init_topology_options(R, [{overflow_ttl, Val} | Acc]); +init_topology_options([{overflow_check_period, Val} | R], Acc) -> + init_topology_options(R, [{overflow_check_period, Val} | Acc]); +init_topology_options([{local_threshold_ms, Val} | R], Acc) -> + init_topology_options(R, [{'localThresholdMS', Val} | Acc]); +init_topology_options([{connect_timeout_ms, Val} | R], Acc) -> + init_topology_options(R, [{'connectTimeoutMS', Val} | Acc]); +init_topology_options([{socket_timeout_ms, Val} | R], Acc) -> + init_topology_options(R, [{'socketTimeoutMS', Val} | Acc]); +init_topology_options([{server_selection_timeout_ms, Val} | R], Acc) -> + init_topology_options(R, [{'serverSelectionTimeoutMS', Val} | Acc]); +init_topology_options([{wait_queue_timeout_ms, Val} | R], Acc) -> + init_topology_options(R, [{'waitQueueTimeoutMS', Val} | Acc]); +init_topology_options([{heartbeat_frequency_ms, Val} | R], Acc) -> + init_topology_options(R, [{'heartbeatFrequencyMS', Val} | Acc]); +init_topology_options([{min_heartbeat_frequency_ms, Val} | R], Acc) -> + init_topology_options(R, [{'minHeartbeatFrequencyMS', Val} | Acc]); +init_topology_options([_ | R], Acc) -> init_topology_options(R, Acc); init_topology_options([], Acc) -> Acc. @@ -261,17 +242,106 @@ init_worker_options([_ | R], Acc) -> init_worker_options(R, Acc); init_worker_options([], Acc) -> Acc. -host_port({Host, Port}) -> - [{host, Host}, {port, Port}]. - -server(type) -> server(); +server(type) -> binary(); server(validator) -> [?NOT_EMPTY("the value of the field 'server' cannot be empty")]; server(_) -> undefined. -servers(type) -> hoconsc:array(server()); +servers(type) -> binary(); servers(validator) -> [?NOT_EMPTY("the value of the field 'servers' cannot be empty")]; servers(_) -> undefined. duration(type) -> emqx_schema:duration_ms(); duration(nullable) -> true; duration(_) -> undefined. + +replica_set_name(type) -> binary(); +replica_set_name(nullable) -> true; +replica_set_name(_) -> undefined. + +srv_record(type) -> boolean(); +srv_record(default) -> false; +srv_record(_) -> undefined. + +parse_servers(Type, Servers) when is_binary(Servers) -> + parse_servers(Type, binary_to_list(Servers)); +parse_servers(Type, Servers) when is_list(Servers) -> + case string:split(Servers, ",", trailing) of + [Host | _] when Type =:= single -> + [Host]; + Hosts -> + Hosts + end. + +may_parse_srv_and_txt_records(#{server := Server} = Config) -> + NConfig = maps:remove(server, Config), + may_parse_srv_and_txt_records_(NConfig#{servers => Server}); +may_parse_srv_and_txt_records(Config) -> + may_parse_srv_and_txt_records_(Config). + +may_parse_srv_and_txt_records_(#{mongo_type := Type, + srv_record := false, + servers := Servers} = Config) -> + case Type =:= rs andalso maps:is_key(replica_set_name, Config) =:= false of + true -> + error({missing_parameter, replica_set_name}); + false -> + Config#{hosts => parse_servers(Type, Servers)} + end; +may_parse_srv_and_txt_records_(#{mongo_type := Type, + srv_record := true, + servers := Servers} = Config) -> + NServers = binary_to_list(Servers), + Hosts = parse_srv_records(Type, NServers), + ExtraOpts = parse_txt_records(Type, NServers), + maps:merge(Config#{hosts => Hosts}, ExtraOpts). + +parse_srv_records(Type, Server) -> + case inet_res:lookup("_mongodb._tcp." ++ Server, in, srv) of + [] -> + error(service_not_found); + Services -> + case [Host ++ ":" ++ integer_to_list(Port) || {_, _, Port, Host} <- Services] of + [H | _] when Type =:= single -> + [H]; + Hosts -> + Hosts + end + end. + +parse_txt_records(Type, Server) -> + case inet_res:lookup(Server, in, txt) of + [] -> + #{}; + [[QueryString]] -> + case uri_string:dissect_query(QueryString) of + {error, _, _} -> + error({invalid_txt_record, invalid_query_string}); + Options -> + Fields = case Type of + rs -> ["authSource", "replicaSet"]; + _ -> ["authSource"] + end, + take_and_convert(Fields, Options) + end; + _ -> + error({invalid_txt_record, multiple_records}) + end. + +take_and_convert(Fields, Options) -> + take_and_convert(Fields, Options, #{}). + +take_and_convert([], [_ | _], _Acc) -> + error({invalid_txt_record, invalid_option}); +take_and_convert([], [], Acc) -> + Acc; +take_and_convert([Field | More], Options, Acc) -> + case lists:keytake(Field, 1, Options) of + {value, {"authSource", V}, NOptions} -> + take_and_convert(More, NOptions, Acc#{auth_source => list_to_binary(V)}); + {value, {"replicaSet", V}, NOptions} -> + take_and_convert(More, NOptions, Acc#{replica_set_name => list_to_binary(V)}); + {value, _, _} -> + error({invalid_txt_record, invalid_option}); + false -> + take_and_convert(More, Options, Acc) + end. diff --git a/apps/emqx_connector/src/emqx_connector_mqtt.erl b/apps/emqx_connector/src/emqx_connector_mqtt.erl index a4527984a..1acd8b298 100644 --- a/apps/emqx_connector/src/emqx_connector_mqtt.erl +++ b/apps/emqx_connector/src/emqx_connector_mqtt.erl @@ -16,10 +16,10 @@ -module(emqx_connector_mqtt). -include_lib("typerefl/include/types.hrl"). --include_lib("emqx_resource/include/emqx_resource_behaviour.hrl"). -include_lib("emqx/include/logger.hrl"). -behaviour(supervisor). +-behaviour(emqx_resource). %% API and callbacks for supervisor -export([ start_link/0 @@ -46,7 +46,7 @@ %%===================================================================== %% Hocon schema roots() -> - [{config, #{type => hoconsc:ref(?MODULE, "config")}}]. + fields("config"). fields("config") -> emqx_connector_mqtt_schema:fields("config"). @@ -89,111 +89,75 @@ drop_bridge(Name) -> %% =================================================================== %% When use this bridge as a data source, ?MODULE:on_message_received/2 will be called %% if the bridge received msgs from the remote broker. -on_message_received(Msg, ChannId) -> - Name = atom_to_binary(ChannId, utf8), - emqx:run_hook(<<"$bridges/", Name/binary>>, [Msg]). +on_message_received(Msg, HookPoint) -> + emqx:run_hook(HookPoint, [Msg]). %% =================================================================== on_start(InstId, Conf) -> + InstanceId = binary_to_atom(InstId, utf8), ?SLOG(info, #{msg => "starting mqtt connector", - connector => InstId, config => Conf}), - "bridge:" ++ NamePrefix = binary_to_list(InstId), + connector => InstanceId, config => Conf}), BasicConf = basic_config(Conf), - InitRes = {ok, #{name_prefix => NamePrefix, baisc_conf => BasicConf, channels => []}}, - InOutConfigs = taged_map_list(ingress_channels, maps:get(ingress_channels, Conf, #{})) - ++ taged_map_list(egress_channels, maps:get(egress_channels, Conf, #{})), - lists:foldl(fun - (_InOutConf, {error, Reason}) -> - {error, Reason}; - (InOutConf, {ok, #{channels := SubBridges} = Res}) -> - case create_channel(InOutConf, NamePrefix, BasicConf) of - {error, Reason} -> {error, Reason}; - {ok, Name} -> {ok, Res#{channels => [Name | SubBridges]}} - end - end, InitRes, InOutConfigs). - -on_stop(InstId, #{channels := NameList}) -> - ?SLOG(info, #{msg => "stopping mqtt connector", - connector => InstId}), - lists:foreach(fun(Name) -> - remove_channel(Name) - end, NameList). - -%% TODO: let the emqx_resource trigger on_query/4 automatically according to the -%% `ingress_channels` and `egress_channels` config -on_query(_InstId, {create_channel, Conf}, _AfterQuery, #{name_prefix := Prefix, - baisc_conf := BasicConf}) -> - create_channel(Conf, Prefix, BasicConf); -on_query(_InstId, {send_message, ChannelId, Msg}, _AfterQuery, _State) -> - ?SLOG(debug, #{msg => "send msg to remote node", message => Msg, - channel_id => ChannelId}), - emqx_connector_mqtt_worker:send_to_remote(ChannelId, Msg). - -on_health_check(_InstId, #{channels := NameList} = State) -> - Results = [{Name, emqx_connector_mqtt_worker:ping(Name)} || Name <- NameList], - case lists:all(fun({_, pong}) -> true; ({_, _}) -> false end, Results) of - true -> {ok, State}; - false -> {error, {some_channel_down, Results}, State} - end. - -create_channel({{ingress_channels, Id}, #{subscribe_remote_topic := RemoteT} = Conf}, - NamePrefix, BasicConf) -> - LocalT = maps:get(local_topic, Conf, undefined), - ChannId = ingress_channel_id(NamePrefix, Id), - ?SLOG(info, #{msg => "creating ingress channel", - remote_topic => RemoteT, - local_topic => LocalT, - channel_id => ChannId}), - do_create_channel(BasicConf#{ - name => ChannId, - clientid => clientid(ChannId), - subscriptions => Conf#{ - local_topic => LocalT, - on_message_received => {fun ?MODULE:on_message_received/2, [ChannId]} - }, - forwards => undefined}); - -create_channel({{egress_channels, Id}, #{remote_topic := RemoteT} = Conf}, - NamePrefix, BasicConf) -> - LocalT = maps:get(subscribe_local_topic, Conf, undefined), - ChannId = egress_channel_id(NamePrefix, Id), - ?SLOG(info, #{msg => "creating egress channel", - remote_topic => RemoteT, - local_topic => LocalT, - channel_id => ChannId}), - do_create_channel(BasicConf#{ - name => ChannId, - clientid => clientid(ChannId), - subscriptions => undefined, - forwards => Conf#{subscribe_local_topic => LocalT}}). - -remove_channel(ChannId) -> - ?SLOG(info, #{msg => "removing channel", - channel_id => ChannId}), - case ?MODULE:drop_bridge(ChannId) of - ok -> ok; - {error, not_found} -> ok; - {error, Reason} -> - ?SLOG(error, #{msg => "stop channel failed", - channel_id => ChannId, reason => Reason}) - end. - -do_create_channel(#{name := Name} = Conf) -> - case ?MODULE:create_bridge(Conf) of + BridgeConf = BasicConf#{ + name => InstanceId, + clientid => clientid(InstanceId), + subscriptions => make_sub_confs(maps:get(ingress, Conf, undefined)), + forwards => make_forward_confs(maps:get(egress, Conf, undefined)) + }, + case ?MODULE:create_bridge(BridgeConf) of {ok, _Pid} -> - start_channel(Name); + case emqx_connector_mqtt_worker:ensure_started(InstanceId) of + ok -> {ok, #{name => InstanceId}}; + {error, Reason} -> {error, Reason} + end; {error, {already_started, _Pid}} -> - {ok, Name}; + {ok, #{name => InstanceId}}; {error, Reason} -> {error, Reason} end. -start_channel(Name) -> - case emqx_connector_mqtt_worker:ensure_started(Name) of - ok -> {ok, Name}; - {error, Reason} -> {error, Reason} +on_stop(_InstId, #{name := InstanceId}) -> + ?SLOG(info, #{msg => "stopping mqtt connector", + connector => InstanceId}), + case ?MODULE:drop_bridge(InstanceId) of + ok -> ok; + {error, not_found} -> ok; + {error, Reason} -> + ?SLOG(error, #{msg => "stop mqtt connector", + connector => InstanceId, reason => Reason}) end. +on_query(_InstId, {send_message, Msg}, AfterQuery, #{name := InstanceId}) -> + ?SLOG(debug, #{msg => "send msg to remote node", message => Msg, + connector => InstanceId}), + emqx_connector_mqtt_worker:send_to_remote(InstanceId, Msg), + emqx_resource:query_success(AfterQuery). + +on_health_check(_InstId, #{name := InstanceId} = State) -> + case emqx_connector_mqtt_worker:ping(InstanceId) of + pong -> {ok, State}; + _ -> {error, {connector_down, InstanceId}, State} + end. + +make_sub_confs(EmptyMap) when map_size(EmptyMap) == 0 -> + undefined; +make_sub_confs(undefined) -> + undefined; +make_sub_confs(SubRemoteConf) -> + case maps:take(hookpoint, SubRemoteConf) of + error -> SubRemoteConf; + {HookPoint, SubConf} -> + MFA = {?MODULE, on_message_received, [HookPoint]}, + SubConf#{on_message_received => MFA} + end. + +make_forward_confs(EmptyMap) when map_size(EmptyMap) == 0 -> + undefined; +make_forward_confs(undefined) -> + undefined; +make_forward_confs(FrowardConf) -> + FrowardConf. + basic_config(#{ server := Server, reconnect_interval := ReconnIntv, @@ -225,23 +189,5 @@ basic_config(#{ if_record_metrics => true }. -taged_map_list(Tag, Map) -> - [{{Tag, K}, V} || {K, V} <- maps:to_list(Map)]. - -ingress_channel_id(Prefix, Id) -> - channel_name("ingress_channels", Prefix, Id). -egress_channel_id(Prefix, Id) -> - channel_name("egress_channels", Prefix, Id). - -channel_name(Type, Prefix, Id) -> - list_to_atom(str(Prefix) ++ ":" ++ Type ++ ":" ++ str(Id)). - clientid(Id) -> - list_to_binary(str(Id) ++ ":" ++ emqx_misc:gen_id(8)). - -str(A) when is_atom(A) -> - atom_to_list(A); -str(B) when is_binary(B) -> - binary_to_list(B); -str(S) when is_list(S) -> - S. + list_to_binary(lists:concat([Id, ":", node()])). diff --git a/apps/emqx_connector/src/emqx_connector_mysql.erl b/apps/emqx_connector/src/emqx_connector_mysql.erl index 845c96161..c93a1e350 100644 --- a/apps/emqx_connector/src/emqx_connector_mysql.erl +++ b/apps/emqx_connector/src/emqx_connector_mysql.erl @@ -16,9 +16,10 @@ -module(emqx_connector_mysql). -include_lib("typerefl/include/types.hrl"). --include_lib("emqx_resource/include/emqx_resource_behaviour.hrl"). -include_lib("emqx/include/logger.hrl"). +-behaviour(emqx_resource). + %% callbacks of behaviour emqx_resource -export([ on_start/2 , on_stop/2 @@ -86,7 +87,10 @@ on_query(InstId, {sql, SQL, Params}, AfterQuery, #{poolname := _PoolName} = Stat on_query(InstId, {sql, SQL, Params, Timeout}, AfterQuery, #{poolname := PoolName} = State) -> ?SLOG(debug, #{msg => "mysql connector received sql query", connector => InstId, sql => SQL, state => State}), - case Result = ecpool:pick_and_do(PoolName, {mysql, query, [SQL, Params, Timeout]}, no_handover) of + case Result = ecpool:pick_and_do( + PoolName, + {mysql, query, [SQL, Params, Timeout]}, + no_handover) of {error, Reason} -> ?SLOG(error, #{msg => "mysql connector do sql query failed", connector => InstId, sql => SQL, reason => Reason}), diff --git a/apps/emqx_connector/src/emqx_connector_pgsql.erl b/apps/emqx_connector/src/emqx_connector_pgsql.erl index 5b0adbeb9..f42bed666 100644 --- a/apps/emqx_connector/src/emqx_connector_pgsql.erl +++ b/apps/emqx_connector/src/emqx_connector_pgsql.erl @@ -16,11 +16,12 @@ -module(emqx_connector_pgsql). -include_lib("typerefl/include/types.hrl"). --include_lib("emqx_resource/include/emqx_resource_behaviour.hrl"). -include_lib("emqx/include/logger.hrl"). -export([roots/0, fields/1]). +-behaviour(emqx_resource). + %% callbacks of behaviour emqx_resource -export([ on_start/2 , on_stop/2 @@ -118,15 +119,15 @@ conn_opts(Opts) -> conn_opts(Opts, []). conn_opts([], Acc) -> Acc; -conn_opts([Opt = {database, _}|Opts], Acc) -> - conn_opts(Opts, [Opt|Acc]); -conn_opts([Opt = {ssl, _}|Opts], Acc) -> - conn_opts(Opts, [Opt|Acc]); -conn_opts([Opt = {port, _}|Opts], Acc) -> - conn_opts(Opts, [Opt|Acc]); -conn_opts([Opt = {timeout, _}|Opts], Acc) -> - conn_opts(Opts, [Opt|Acc]); -conn_opts([Opt = {ssl_opts, _}|Opts], Acc) -> - conn_opts(Opts, [Opt|Acc]); -conn_opts([_Opt|Opts], Acc) -> +conn_opts([Opt = {database, _} | Opts], Acc) -> + conn_opts(Opts, [Opt | Acc]); +conn_opts([Opt = {ssl, _} | Opts], Acc) -> + conn_opts(Opts, [Opt | Acc]); +conn_opts([Opt = {port, _} | Opts], Acc) -> + conn_opts(Opts, [Opt | Acc]); +conn_opts([Opt = {timeout, _} | Opts], Acc) -> + conn_opts(Opts, [Opt | Acc]); +conn_opts([Opt = {ssl_opts, _} | Opts], Acc) -> + conn_opts(Opts, [Opt | Acc]); +conn_opts([_Opt | Opts], Acc) -> conn_opts(Opts, Acc). diff --git a/apps/emqx_connector/src/emqx_connector_redis.erl b/apps/emqx_connector/src/emqx_connector_redis.erl index aed06e724..075ede0bc 100644 --- a/apps/emqx_connector/src/emqx_connector_redis.erl +++ b/apps/emqx_connector/src/emqx_connector_redis.erl @@ -17,7 +17,6 @@ -include("emqx_connector.hrl"). -include_lib("typerefl/include/types.hrl"). --include_lib("emqx_resource/include/emqx_resource_behaviour.hrl"). -include_lib("emqx/include/logger.hrl"). -type server() :: tuple(). @@ -30,6 +29,8 @@ -export([roots/0, fields/1]). +-behaviour(emqx_resource). + %% callbacks of behaviour emqx_resource -export([ on_start/2 , on_stop/2 @@ -100,7 +101,8 @@ on_start(InstId, #{redis_type := Type, Options = case maps:get(enable, SSL) of true -> [{ssl, true}, - {ssl_options, emqx_plugin_libs_ssl:save_files_return_opts(SSL, "connectors", InstId)} + {ssl_options, + emqx_plugin_libs_ssl:save_files_return_opts(SSL, "connectors", InstId)} ]; false -> [{ssl, false}] end ++ [{sentinel, maps:get(sentinel, Config, undefined)}], @@ -182,4 +184,4 @@ to_server(Server) -> case string:tokens(Server, ":") of [Host, Port] -> {ok, {Host, list_to_integer(Port)}}; _ -> {error, Server} - end. \ No newline at end of file + end. diff --git a/apps/emqx_connector/src/emqx_connector_schema.erl b/apps/emqx_connector/src/emqx_connector_schema.erl new file mode 100644 index 000000000..264a6dbd6 --- /dev/null +++ b/apps/emqx_connector/src/emqx_connector_schema.erl @@ -0,0 +1,36 @@ +-module(emqx_connector_schema). + +-behaviour(hocon_schema). + +-include_lib("typerefl/include/types.hrl"). + +-export([roots/0, fields/1]). + +%%====================================================================================== +%% Hocon Schema Definitions + +roots() -> ["connectors"]. + +fields("connectors") -> + [ {mqtt, + sc(hoconsc:map(name, + hoconsc:union([ ref("mqtt_connector") + ])), + #{ desc => "MQTT bridges" + })} + ]; + +fields("mqtt_connector") -> + emqx_connector_mqtt_schema:fields("connector"); + +fields("mqtt_connector_info") -> + [{id, sc(binary(), #{desc => "The connector Id"})}] + ++ fields("mqtt_connector"); + +fields("mqtt_connector_test_info") -> + [{bridge_type, sc(mqtt, #{desc => "The Bridge Type"})}] + ++ fields("mqtt_connector"). + +sc(Type, Meta) -> hoconsc:mk(Type, Meta). + +ref(Field) -> hoconsc:ref(?MODULE, Field). diff --git a/apps/emqx_connector/src/emqx_connector_schema_lib.erl b/apps/emqx_connector/src/emqx_connector_schema_lib.erl index 9ecfb56b3..b8a32c401 100644 --- a/apps/emqx_connector/src/emqx_connector_schema_lib.erl +++ b/apps/emqx_connector/src/emqx_connector_schema_lib.erl @@ -105,7 +105,7 @@ servers(validator) -> [?NOT_EMPTY("the value of the field 'servers' cannot be em servers(_) -> undefined. to_ip_port(Str) -> - case string:tokens(Str, ":") of + case string:tokens(Str, ": ") of [Ip, Port] -> case inet:parse_address(Ip) of {ok, R} -> {ok, {R, list_to_integer(Port)}}; @@ -121,7 +121,7 @@ ip_port_to_string({Ip, Port}) when is_tuple(Ip) -> to_servers(Str) -> {ok, lists:map(fun(Server) -> - case string:tokens(Server, ":") of + case string:tokens(Server, ": ") of [Ip] -> [{host, Ip}]; [Ip, Port] -> diff --git a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_mod.erl b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_mod.erl index 853221eec..4cc240d9d 100644 --- a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_mod.erl +++ b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_mod.erl @@ -65,7 +65,7 @@ start(Config) -> case emqtt:connect(Pid) of {ok, _} -> try - ok = subscribe_remote_topics(Pid, Subscriptions), + ok = from_remote_topics(Pid, Subscriptions), {ok, #{client_pid => Pid, subscriptions => Subscriptions}} catch throw : Reason -> @@ -160,14 +160,18 @@ handle_puback(#{packet_id := PktId, reason_code := RC}, _Parent) -> handle_publish(Msg, undefined) -> ?SLOG(error, #{msg => "cannot publish to local broker as" - " ingress_channles' is not configured", + " 'ingress' is not configured", message => Msg}); -handle_publish(Msg, #{on_message_received := {OnMsgRcvdFunc, Args}} = Vars) -> +handle_publish(Msg, Vars) -> ?SLOG(debug, #{msg => "publish to local broker", message => Msg, vars => Vars}), emqx_metrics:inc('bridge.mqtt.message_received_from_remote', 1), - _ = erlang:apply(OnMsgRcvdFunc, [Msg | Args]), - case maps:get(local_topic, Vars, undefined) of + case Vars of + #{on_message_received := {Mod, Func, Args}} -> + _ = erlang:apply(Mod, Func, [Msg | Args]); + _ -> ok + end, + case maps:get(to_local_topic, Vars, undefined) of undefined -> ok; _Topic -> emqx_broker:publish(emqx_connector_mqtt_msg:to_broker_msg(Msg, Vars)) @@ -182,8 +186,8 @@ make_hdlr(Parent, Vars) -> disconnected => {fun ?MODULE:handle_disconnected/2, [Parent]} }. -subscribe_remote_topics(_ClientPid, undefined) -> ok; -subscribe_remote_topics(ClientPid, #{subscribe_remote_topic := FromTopic, subscribe_qos := QoS}) -> +from_remote_topics(_ClientPid, undefined) -> ok; +from_remote_topics(ClientPid, #{from_remote_topic := FromTopic, subscribe_qos := QoS}) -> case emqtt:subscribe(ClientPid, FromTopic, QoS) of {ok, _, _} -> ok; Error -> throw(Error) diff --git a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_msg.erl b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_msg.erl index 6009cc084..7b49f21fe 100644 --- a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_msg.erl +++ b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_msg.erl @@ -36,7 +36,7 @@ -type variables() :: #{ mountpoint := undefined | binary(), - remote_topic := binary(), + to_remote_topic := binary(), qos := original | integer(), retain := original | boolean(), payload := binary() @@ -59,7 +59,7 @@ to_remote_msg(#message{flags = Flags0} = Msg, Vars) -> Retain0 = maps:get(retain, Flags0, false), MapMsg = maps:put(retain, Retain0, emqx_message:to_map(Msg)), to_remote_msg(MapMsg, Vars); -to_remote_msg(MapMsg, #{remote_topic := TopicToken, payload := PayloadToken, +to_remote_msg(MapMsg, #{to_remote_topic := TopicToken, payload := PayloadToken, qos := QoSToken, retain := RetainToken, mountpoint := Mountpoint}) when is_map(MapMsg) -> Topic = replace_vars_in_str(TopicToken, MapMsg), Payload = replace_vars_in_str(PayloadToken, MapMsg), @@ -75,7 +75,7 @@ to_remote_msg(#message{topic = Topic} = Msg, #{mountpoint := Mountpoint}) -> %% published from remote node over a MQTT connection to_broker_msg(#{dup := Dup, properties := Props} = MapMsg, - #{local_topic := TopicToken, payload := PayloadToken, + #{to_local_topic := TopicToken, payload := PayloadToken, qos := QoSToken, retain := RetainToken, mountpoint := Mountpoint}) -> Topic = replace_vars_in_str(TopicToken, MapMsg), Payload = replace_vars_in_str(PayloadToken, MapMsg), diff --git a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_schema.erl b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_schema.erl index b0aaeb8b6..6436a4c96 100644 --- a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_schema.erl +++ b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_schema.erl @@ -21,58 +21,203 @@ -behaviour(hocon_schema). -export([ roots/0 - , fields/1]). + , fields/1 + ]). + +-export([ ingress_desc/0 + , egress_desc/0 + ]). -import(emqx_schema, [mk_duration/2]). roots() -> - [{config, #{type => hoconsc:ref(?MODULE, "config")}}]. + fields("config"). fields("config") -> - [ {server, hoconsc:mk(emqx_schema:ip_port(), #{default => "127.0.0.1:1883"})} + fields("connector") ++ + topic_mappings(); + +fields("connector") -> + [ {server, + sc(emqx_schema:ip_port(), + #{ default => "127.0.0.1:1883" + , desc => "The host and port of the remote MQTT broker" + })} , {reconnect_interval, mk_duration("reconnect interval", #{default => "30s"})} - , {proto_ver, fun proto_ver/1} - , {bridge_mode, hoconsc:mk(boolean(), #{default => true})} - , {username, hoconsc:mk(string())} - , {password, hoconsc:mk(string())} - , {clean_start, hoconsc:mk(boolean(), #{default => true})} + , {proto_ver, + sc(hoconsc:enum([v3, v4, v5]), + #{ default => v4 + , desc => "The MQTT protocol version" + })} + , {bridge_mode, + sc(boolean(), + #{ default => true + , desc => "The bridge mode of the MQTT protocol" + })} + , {username, + sc(binary(), + #{ default => "emqx" + , desc => "The username of the MQTT protocol" + })} + , {password, + sc(binary(), + #{ default => "emqx" + , desc => "The password of the MQTT protocol" + })} + , {clientid, + sc(binary(), + #{ default => "emqx_${nodename}" + , desc => "The clientid of the MQTT protocol" + })} + , {clean_start, + sc(boolean(), + #{ default => true + , desc => "The clean-start or the clean-session of the MQTT protocol" + })} , {keepalive, mk_duration("keepalive", #{default => "300s"})} , {retry_interval, mk_duration("retry interval", #{default => "30s"})} - , {max_inflight, hoconsc:mk(integer(), #{default => 32})} - , {replayq, hoconsc:mk(hoconsc:ref(?MODULE, "replayq"))} - , {ingress_channels, hoconsc:mk(hoconsc:map(id, hoconsc:ref(?MODULE, "ingress_channels")), #{default => []})} - , {egress_channels, hoconsc:mk(hoconsc:map(id, hoconsc:ref(?MODULE, "egress_channels")), #{default => []})} + , {max_inflight, + sc(integer(), + #{ default => 32 + , desc => "Max inflight messages (sent but ACK has not received) of the MQTT protocol" + })} + , {replayq, + sc(ref("replayq"), + #{ desc => """ +Queue messages in disk files. +""" + })} ] ++ emqx_connector_schema_lib:ssl_fields(); -fields("ingress_channels") -> - %% the message maybe subscribed by rules, in this case 'local_topic' is not necessary - [ {subscribe_remote_topic, hoconsc:mk(binary(), #{nullable => false})} - , {local_topic, hoconsc:mk(binary())} - , {subscribe_qos, hoconsc:mk(qos(), #{default => 1})} +fields("ingress") -> + %% the message maybe subscribed by rules, in this case 'to_local_topic' is not necessary + [ {from_remote_topic, + sc(binary(), + #{ nullable => false + , desc => "Receive messages from which topic of the remote broker" + })} + , {subscribe_qos, + sc(qos(), + #{ default => 1 + , desc => "The QoS level to be used when subscribing to the remote broker" + })} + , {to_local_topic, + sc(binary(), + #{ desc => """ +Send messages to which topic of the local broker.
+Template with variables is allowed. +""" + })} + , {hookpoint, + sc(binary(), + #{ desc => """ +The hookpoint will be triggered when there's any message received from the remote broker. +""" + })} ] ++ common_inout_confs(); -fields("egress_channels") -> - %% the message maybe sent from rules, in this case 'subscribe_local_topic' is not necessary - [ {subscribe_local_topic, hoconsc:mk(binary())} - , {remote_topic, hoconsc:mk(binary(), #{default => <<"${topic}">>})} +fields("egress") -> + %% the message maybe sent from rules, in this case 'from_local_topic' is not necessary + [ {from_local_topic, + sc(binary(), + #{ desc => "The local topic to be forwarded to the remote broker" + })} + , {to_remote_topic, + sc(binary(), + #{ default => <<"${topic}">> + , desc => """ +Forward to which topic of the remote broker.
+Template with variables is allowed. +""" + })} ] ++ common_inout_confs(); fields("replayq") -> - [ {dir, hoconsc:union([boolean(), string()])} - , {seg_bytes, hoconsc:mk(emqx_schema:bytesize(), #{default => "100MB"})} - , {offload, hoconsc:mk(boolean(), #{default => false})} - , {max_total_bytes, hoconsc:mk(emqx_schema:bytesize(), #{default => "1024MB"})} + [ {dir, + sc(hoconsc:union([boolean(), string()]), + #{ desc => """ +The dir where the replayq file saved.
+Set to 'false' disables the replayq feature. +""" + })} + , {seg_bytes, + sc(emqx_schema:bytesize(), + #{ default => "100MB" + , desc => """ +The size in bytes of a single segment.
+A segment is mapping to a file in the replayq dir. If the current segment is full, a new segment +(file) will be opened to write. +""" + })} + , {offload, + sc(boolean(), + #{ default => false + , desc => """ +In offload mode, the disk queue is only used to offload queue tail segments.
+The messages are cached in the memory first, then it write to the replayq files after the size of +the memory cache reaches 'seg_bytes'. +""" + })} ]. +topic_mappings() -> + [ {ingress, + sc(ref("ingress"), + #{ default => #{} + , desc => ingress_desc() + })} + , {egress, + sc(ref("egress"), + #{ default => #{} + , desc => egress_desc() + })} + ]. + +ingress_desc() -> """ +The ingress config defines how this bridge receive messages from the remote MQTT broker, and then +send them to the local broker.
+Template with variables is allowed in 'to_local_topic', 'subscribe_qos', 'qos', 'retain', +'payload'.
+NOTE: if this bridge is used as the input of a rule (emqx rule engine), and also to_local_topic is +configured, then messages got from the remote broker will be sent to both the 'to_local_topic' and +the rule. +""". + +egress_desc() -> """ +The egress config defines how this bridge forwards messages from the local broker to the remote +broker.
+Template with variables is allowed in 'to_remote_topic', 'qos', 'retain', 'payload'.
+NOTE: if this bridge is used as the output of a rule (emqx rule engine), and also from_local_topic +is configured, then both the data got from the rule and the MQTT messages that matches +from_local_topic will be forwarded. +""". + common_inout_confs() -> - [ {qos, hoconsc:mk(qos(), #{default => <<"${qos}">>})} - , {retain, hoconsc:mk(hoconsc:union([boolean(), binary()]), #{default => <<"${retain}">>})} - , {payload, hoconsc:mk(binary(), #{default => <<"${payload}">>})} + [ {qos, + sc(qos(), + #{ default => <<"${qos}">> + , desc => """ +The QoS of the MQTT message to be sent.
+Template with variables is allowed.""" + })} + , {retain, + sc(hoconsc:union([boolean(), binary()]), + #{ default => <<"${retain}">> + , desc => """ +The retain flag of the MQTT message to be sent.
+Template with variables is allowed.""" + })} + , {payload, + sc(binary(), + #{ default => <<"${payload}">> + , desc => """ +The payload of the MQTT message to be sent.
+Template with variables is allowed.""" + })} ]. qos() -> hoconsc:union([typerefl:integer(0), typerefl:integer(1), typerefl:integer(2), binary()]). -proto_ver(type) -> hoconsc:enum([v3, v4, v5]); -proto_ver(default) -> v4; -proto_ver(_) -> undefined. +sc(Type, Meta) -> hoconsc:mk(Type, Meta). +ref(Field) -> hoconsc:ref(?MODULE, Field). diff --git a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_worker.erl b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_worker.erl index 990d15ef5..95424fe3a 100644 --- a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_worker.erl +++ b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_worker.erl @@ -101,14 +101,12 @@ -export([msg_marshaller/1]). -export_type([ config/0 - , batch/0 , ack_ref/0 ]). -type id() :: atom() | string() | pid(). -type qos() :: emqx_types:qos(). -type config() :: map(). --type batch() :: [emqx_connector_mqtt_msg:exp_msg()]. -type ack_ref() :: term(). -type topic() :: emqx_types:topic(). @@ -117,7 +115,7 @@ %% same as default in-flight limit for emqtt --define(DEFAULT_BATCH_SIZE, 32). +-define(DEFAULT_INFLIGHT_SIZE, 32). -define(DEFAULT_RECONNECT_DELAY_MS, timer:seconds(5)). -define(DEFAULT_SEG_BYTES, (1 bsl 20)). -define(DEFAULT_MAX_TOTAL_SIZE, (1 bsl 31)). @@ -205,12 +203,10 @@ init_state(Opts) -> ReconnDelayMs = maps:get(reconnect_interval, Opts, ?DEFAULT_RECONNECT_DELAY_MS), StartType = maps:get(start_type, Opts, manual), Mountpoint = maps:get(forward_mountpoint, Opts, undefined), - MaxInflightSize = maps:get(max_inflight, Opts, ?DEFAULT_BATCH_SIZE), - BatchSize = maps:get(batch_size, Opts, ?DEFAULT_BATCH_SIZE), + MaxInflightSize = maps:get(max_inflight, Opts, ?DEFAULT_INFLIGHT_SIZE), Name = maps:get(name, Opts, undefined), #{start_type => StartType, reconnect_interval => ReconnDelayMs, - batch_size => BatchSize, mountpoint => format_mountpoint(Mountpoint), inflight => [], max_inflight => MaxInflightSize, @@ -235,8 +231,8 @@ pre_process_opts(#{subscriptions := InConf, forwards := OutConf} = ConnectOpts) pre_process_in_out(undefined) -> undefined; pre_process_in_out(Conf) when is_map(Conf) -> - Conf1 = pre_process_conf(local_topic, Conf), - Conf2 = pre_process_conf(remote_topic, Conf1), + Conf1 = pre_process_conf(to_local_topic, Conf), + Conf2 = pre_process_conf(to_remote_topic, Conf1), Conf3 = pre_process_conf(payload, Conf2), Conf4 = pre_process_conf(qos, Conf3), pre_process_conf(retain, Conf4). @@ -327,10 +323,6 @@ common(_StateName, {call, From}, get_forwards, #{connect_opts := #{forwards := F {keep_state_and_data, [{reply, From, Forwards}]}; common(_StateName, {call, From}, get_subscriptions, #{connection := Connection}) -> {keep_state_and_data, [{reply, From, maps:get(subscriptions, Connection, #{})}]}; -common(_StateName, info, {deliver, _, Msg}, State = #{replayq := Q}) -> - Msgs = collect([Msg]), - NewQ = replayq:append(Q, Msgs), - {keep_state, State#{replayq => NewQ}, {next_event, internal, maybe_send}}; common(_StateName, info, {'EXIT', _, _}, State) -> {keep_state, State}; common(_StateName, cast, {send_to_remote, Msg}, #{replayq := Q} = State) -> @@ -342,13 +334,9 @@ common(StateName, Type, Content, #{name := Name} = State) -> content => Content}), {keep_state, State}. -do_connect(#{connect_opts := ConnectOpts = #{forwards := Forwards}, +do_connect(#{connect_opts := ConnectOpts, inflight := Inflight, name := Name} = State) -> - case Forwards of - undefined -> ok; - #{subscribe_local_topic := Topic} -> subscribe_local_topic(Topic, Name) - end, case emqx_connector_mqtt_mod:start(ConnectOpts) of {ok, Conn} -> ?tp(info, connected, #{name => Name, inflight => length(Inflight)}), @@ -360,19 +348,10 @@ do_connect(#{connect_opts := ConnectOpts = #{forwards := Forwards}, {error, Reason, State} end. -collect(Acc) -> - receive - {deliver, _, Msg} -> - collect([Msg | Acc]) - after - 0 -> - lists:reverse(Acc) - end. - %% Retry all inflight (previously sent but not acked) batches. retry_inflight(State, []) -> {ok, State}; -retry_inflight(State, [#{q_ack_ref := QAckRef, batch := Batch} | Rest] = OldInf) -> - case do_send(State, QAckRef, Batch) of +retry_inflight(State, [#{q_ack_ref := QAckRef, msg := Msg} | Rest] = OldInf) -> + case do_send(State, QAckRef, Msg) of {ok, State1} -> retry_inflight(State1, Rest); {error, #{inflight := NewInf} = State1} -> @@ -393,34 +372,33 @@ pop_and_send_loop(#{replayq := Q} = State, N) -> false -> BatchSize = 1, Opts = #{count_limit => BatchSize, bytes_limit => 999999999}, - {Q1, QAckRef, Batch} = replayq:pop(Q, Opts), - case do_send(State#{replayq := Q1}, QAckRef, Batch) of + {Q1, QAckRef, [Msg]} = replayq:pop(Q, Opts), + case do_send(State#{replayq := Q1}, QAckRef, Msg) of {ok, NewState} -> pop_and_send_loop(NewState, N - 1); {error, NewState} -> {error, NewState} end end. -%% Assert non-empty batch because we have a is_empty check earlier. -do_send(#{connect_opts := #{forwards := undefined}}, _QAckRef, Batch) -> +do_send(#{connect_opts := #{forwards := undefined}}, _QAckRef, Msg) -> ?SLOG(error, #{msg => "cannot forward messages to remote broker" - " as egress_channel is not configured", - messages => Batch}); + " as 'egress' is not configured", + messages => Msg}); do_send(#{inflight := Inflight, connection := Connection, mountpoint := Mountpoint, - connect_opts := #{forwards := Forwards}} = State, QAckRef, [_ | _] = Batch) -> + connect_opts := #{forwards := Forwards}} = State, QAckRef, Msg) -> Vars = emqx_connector_mqtt_msg:make_pub_vars(Mountpoint, Forwards), ExportMsg = fun(Message) -> emqx_metrics:inc('bridge.mqtt.message_sent_to_remote'), emqx_connector_mqtt_msg:to_remote_msg(Message, Vars) end, ?SLOG(debug, #{msg => "publish to remote broker", - message => Batch, vars => Vars}), - case emqx_connector_mqtt_mod:send(Connection, [ExportMsg(M) || M <- Batch]) of + message => Msg, vars => Vars}), + case emqx_connector_mqtt_mod:send(Connection, [ExportMsg(Msg)]) of {ok, Refs} -> {ok, State#{inflight := Inflight ++ [#{q_ack_ref => QAckRef, send_ack_ref => map_set(Refs), - batch => Batch}]}}; + msg => Msg}]}}; {error, Reason} -> ?SLOG(info, #{msg => "mqtt_bridge_produce_failed", reason => Reason}), @@ -473,27 +451,6 @@ drop_acked_batches(Q, [#{send_ack_ref := Refs, All end. -subscribe_local_topic(undefined, _Name) -> - ok; -subscribe_local_topic(Topic, Name) -> - do_subscribe(Topic, Name). - -topic(T) -> iolist_to_binary(T). - -validate(RawTopic) -> - Topic = topic(RawTopic), - try emqx_topic:validate(Topic) of - _Success -> Topic - catch - error:Reason -> - error({bad_topic, Topic, Reason}) - end. - -do_subscribe(RawTopic, Name) -> - TopicFilter = validate(RawTopic), - {Topic, SubOpts} = emqx_topic:parse(TopicFilter, #{qos => ?QOS_2}), - emqx_broker:subscribe(Topic, Name, SubOpts). - disconnect(#{connection := Conn} = State) when Conn =/= undefined -> emqx_connector_mqtt_mod:stop(Conn), State#{connection => undefined}; diff --git a/apps/emqx_connector/test/emqx_connector_api_SUITE.erl b/apps/emqx_connector/test/emqx_connector_api_SUITE.erl new file mode 100644 index 000000000..96f530563 --- /dev/null +++ b/apps/emqx_connector/test/emqx_connector_api_SUITE.erl @@ -0,0 +1,397 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_connector_api_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include("emqx/include/emqx.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). + +-define(CONF_DEFAULT, <<"connectors: {}">>). +-define(BRIDGE_CONF_DEFAULT, <<"bridges: {}">>). +-define(CONNECTR_ID, <<"mqtt:test_connector">>). +-define(BRIDGE_ID_INGRESS, <<"mqtt:ingress_test_bridge">>). +-define(BRIDGE_ID_EGRESS, <<"mqtt:egress_test_bridge">>). +-define(MQTT_CONNECOTR(Username), +#{ + <<"server">> => <<"127.0.0.1:1883">>, + <<"username">> => Username, + <<"password">> => <<"">>, + <<"proto_ver">> => <<"v4">>, + <<"ssl">> => #{<<"enable">> => false} +}). +-define(MQTT_CONNECOTR2(Server), + ?MQTT_CONNECOTR(<<"user1">>)#{<<"server">> => Server}). + +-define(MQTT_BRIDGE_INGRESS(ID), +#{ + <<"connector">> => ID, + <<"direction">> => <<"ingress">>, + <<"from_remote_topic">> => <<"remote_topic/#">>, + <<"to_local_topic">> => <<"local_topic/${topic}">>, + <<"subscribe_qos">> => 1, + <<"payload">> => <<"${payload}">>, + <<"qos">> => <<"${qos}">>, + <<"retain">> => <<"${retain}">> +}). + +-define(MQTT_BRIDGE_EGRESS(ID), +#{ + <<"connector">> => ID, + <<"direction">> => <<"egress">>, + <<"from_local_topic">> => <<"local_topic/#">>, + <<"to_remote_topic">> => <<"remote_topic/${topic}">>, + <<"payload">> => <<"${payload}">>, + <<"qos">> => <<"${qos}">>, + <<"retain">> => <<"${retain}">> +}). + +-define(metrics(MATCH, SUCC, FAILED, SPEED, SPEED5M, SPEEDMAX), + #{<<"matched">> := MATCH, <<"success">> := SUCC, + <<"failed">> := FAILED, <<"speed">> := SPEED, + <<"speed_last5m">> := SPEED5M, <<"speed_max">> := SPEEDMAX}). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +groups() -> + []. + +suite() -> + [{timetrap,{seconds,30}}]. + +init_per_suite(Config) -> + ok = emqx_config:put([emqx_dashboard], #{ + default_username => <<"admin">>, + default_password => <<"public">>, + listeners => [#{ + protocol => http, + port => 18083 + }] + }), + _ = application:load(emqx_conf), + %% some testcases (may from other app) already get emqx_connector started + _ = application:stop(emqx_resource), + _ = application:stop(emqx_connector), + ok = emqx_common_test_helpers:start_apps([emqx_connector, emqx_bridge, emqx_dashboard]), + ok = emqx_config:init_load(emqx_connector_schema, ?CONF_DEFAULT), + ok = emqx_config:init_load(emqx_bridge_schema, ?BRIDGE_CONF_DEFAULT), + Config. + +end_per_suite(_Config) -> + emqx_common_test_helpers:stop_apps([emqx_connector, emqx_bridge, emqx_dashboard]), + ok. + +init_per_testcase(_, Config) -> + {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), + Config. +end_per_testcase(_, _Config) -> + ok. + +%%------------------------------------------------------------------------------ +%% Testcases +%%------------------------------------------------------------------------------ + +t_mqtt_crud_apis(_) -> + %% assert we there's no connectors at first + {ok, 200, <<"[]">>} = request(get, uri(["connectors"]), []), + + %% then we add a mqtt connector, using POST + %% POST /connectors/ will create a connector + User1 = <<"user1">>, + {ok, 201, Connector} = request(post, uri(["connectors"]), + ?MQTT_CONNECOTR(User1)#{<<"id">> => ?CONNECTR_ID}), + + %ct:pal("---connector: ~p", [Connector]), + ?assertMatch(#{ <<"id">> := ?CONNECTR_ID + , <<"server">> := <<"127.0.0.1:1883">> + , <<"username">> := User1 + , <<"password">> := <<"">> + , <<"proto_ver">> := <<"v4">> + , <<"ssl">> := #{<<"enable">> := false} + }, jsx:decode(Connector)), + + %% create a again returns an error + {ok, 400, RetMsg} = request(post, uri(["connectors"]), + ?MQTT_CONNECOTR(User1)#{<<"id">> => ?CONNECTR_ID}), + ?assertMatch( + #{ <<"code">> := _ + , <<"message">> := <<"connector already exists">> + }, jsx:decode(RetMsg)), + + %% update the request-path of the connector + User2 = <<"user2">>, + {ok, 200, Connector2} = request(put, uri(["connectors", ?CONNECTR_ID]), + ?MQTT_CONNECOTR(User2)), + ?assertMatch(#{ <<"id">> := ?CONNECTR_ID + , <<"server">> := <<"127.0.0.1:1883">> + , <<"username">> := User2 + , <<"password">> := <<"">> + , <<"proto_ver">> := <<"v4">> + , <<"ssl">> := #{<<"enable">> := false} + }, jsx:decode(Connector2)), + + %% list all connectors again, assert Connector2 is in it + {ok, 200, Connector2Str} = request(get, uri(["connectors"]), []), + ?assertMatch([#{ <<"id">> := ?CONNECTR_ID + , <<"server">> := <<"127.0.0.1:1883">> + , <<"username">> := User2 + , <<"password">> := <<"">> + , <<"proto_ver">> := <<"v4">> + , <<"ssl">> := #{<<"enable">> := false} + }], jsx:decode(Connector2Str)), + + %% get the connector by id + {ok, 200, Connector3Str} = request(get, uri(["connectors", ?CONNECTR_ID]), []), + ?assertMatch(#{ <<"id">> := ?CONNECTR_ID + , <<"server">> := <<"127.0.0.1:1883">> + , <<"username">> := User2 + , <<"password">> := <<"">> + , <<"proto_ver">> := <<"v4">> + , <<"ssl">> := #{<<"enable">> := false} + }, jsx:decode(Connector3Str)), + + %% delete the connector + {ok, 204, <<>>} = request(delete, uri(["connectors", ?CONNECTR_ID]), []), + {ok, 200, <<"[]">>} = request(get, uri(["connectors"]), []), + + %% update a deleted connector returns an error + {ok, 404, ErrMsg2} = request(put, uri(["connectors", ?CONNECTR_ID]), + ?MQTT_CONNECOTR(User2)), + ?assertMatch( + #{ <<"code">> := _ + , <<"message">> := <<"connector not found">> + }, jsx:decode(ErrMsg2)), + ok. + +t_mqtt_conn_bridge_ingress(_) -> + %% assert we there's no connectors and no bridges at first + {ok, 200, <<"[]">>} = request(get, uri(["connectors"]), []), + {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), + + %% then we add a mqtt connector, using POST + User1 = <<"user1">>, + {ok, 201, Connector} = request(post, uri(["connectors"]), + ?MQTT_CONNECOTR(User1)#{<<"id">> => ?CONNECTR_ID}), + + %ct:pal("---connector: ~p", [Connector]), + ?assertMatch(#{ <<"id">> := ?CONNECTR_ID + , <<"server">> := <<"127.0.0.1:1883">> + , <<"username">> := User1 + , <<"password">> := <<"">> + , <<"proto_ver">> := <<"v4">> + , <<"ssl">> := #{<<"enable">> := false} + }, jsx:decode(Connector)), + + %% ... and a MQTT bridge, using POST + %% we bind this bridge to the connector created just now + {ok, 201, Bridge} = request(post, uri(["bridges"]), + ?MQTT_BRIDGE_INGRESS(?CONNECTR_ID)#{<<"id">> => ?BRIDGE_ID_INGRESS}), + + %ct:pal("---bridge: ~p", [Bridge]), + ?assertMatch(#{ <<"id">> := ?BRIDGE_ID_INGRESS + , <<"bridge_type">> := <<"mqtt">> + , <<"status">> := <<"connected">> + , <<"connector">> := ?CONNECTR_ID + }, jsx:decode(Bridge)), + + %% we now test if the bridge works as expected + + RemoteTopic = <<"remote_topic/1">>, + LocalTopic = <<"local_topic/", RemoteTopic/binary>>, + Payload = <<"hello">>, + emqx:subscribe(LocalTopic), + %% PUBLISH a message to the 'remote' broker, as we have only one broker, + %% the remote broker is also the local one. + emqx:publish(emqx_message:make(RemoteTopic, Payload)), + + %% we should receive a message on the local broker, with specified topic + ?assert( + receive + {deliver, LocalTopic, #message{payload = Payload}} -> + ct:pal("local broker got message: ~p on topic ~p", [Payload, LocalTopic]), + true; + Msg -> + ct:pal("Msg: ~p", [Msg]), + false + after 100 -> + false + end), + + %% delete the bridge + {ok, 204, <<>>} = request(delete, uri(["bridges", ?BRIDGE_ID_INGRESS]), []), + {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), + + %% delete the connector + {ok, 204, <<>>} = request(delete, uri(["connectors", ?CONNECTR_ID]), []), + {ok, 200, <<"[]">>} = request(get, uri(["connectors"]), []), + ok. + +t_mqtt_conn_bridge_egress(_) -> + %% assert we there's no connectors and no bridges at first + {ok, 200, <<"[]">>} = request(get, uri(["connectors"]), []), + {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), + + %% then we add a mqtt connector, using POST + User1 = <<"user1">>, + {ok, 201, Connector} = request(post, uri(["connectors"]), + ?MQTT_CONNECOTR(User1)#{<<"id">> => ?CONNECTR_ID}), + + %ct:pal("---connector: ~p", [Connector]), + ?assertMatch(#{ <<"id">> := ?CONNECTR_ID + , <<"server">> := <<"127.0.0.1:1883">> + , <<"username">> := User1 + , <<"password">> := <<"">> + , <<"proto_ver">> := <<"v4">> + , <<"ssl">> := #{<<"enable">> := false} + }, jsx:decode(Connector)), + + %% ... and a MQTT bridge, using POST + %% we bind this bridge to the connector created just now + {ok, 201, Bridge} = request(post, uri(["bridges"]), + ?MQTT_BRIDGE_EGRESS(?CONNECTR_ID)#{<<"id">> => ?BRIDGE_ID_EGRESS}), + + %ct:pal("---bridge: ~p", [Bridge]), + ?assertMatch(#{ <<"id">> := ?BRIDGE_ID_EGRESS + , <<"bridge_type">> := <<"mqtt">> + , <<"status">> := <<"connected">> + , <<"connector">> := ?CONNECTR_ID + }, jsx:decode(Bridge)), + + %% we now test if the bridge works as expected + LocalTopic = <<"local_topic/1">>, + RemoteTopic = <<"remote_topic/", LocalTopic/binary>>, + Payload = <<"hello">>, + emqx:subscribe(RemoteTopic), + %% PUBLISH a message to the 'local' broker, as we have only one broker, + %% the remote broker is also the local one. + emqx:publish(emqx_message:make(LocalTopic, Payload)), + + %% we should receive a message on the "remote" broker, with specified topic + ?assert( + receive + {deliver, RemoteTopic, #message{payload = Payload}} -> + ct:pal("local broker got message: ~p on topic ~p", [Payload, RemoteTopic]), + true; + Msg -> + ct:pal("Msg: ~p", [Msg]), + false + after 100 -> + false + end), + + %% verify the metrics of the bridge + {ok, 200, BridgeStr} = request(get, uri(["bridges", ?BRIDGE_ID_EGRESS]), []), + ?assertMatch(#{ <<"id">> := ?BRIDGE_ID_EGRESS + , <<"metrics">> := ?metrics(1, 1, 0, _, _, _) + , <<"node_metrics">> := + [#{<<"node">> := _, <<"metrics">> := ?metrics(1, 1, 0, _, _, _)}] + }, jsx:decode(BridgeStr)), + + %% delete the bridge + {ok, 204, <<>>} = request(delete, uri(["bridges", ?BRIDGE_ID_EGRESS]), []), + {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), + + %% delete the connector + {ok, 204, <<>>} = request(delete, uri(["connectors", ?CONNECTR_ID]), []), + {ok, 200, <<"[]">>} = request(get, uri(["connectors"]), []), + ok. + +%% t_mqtt_conn_update: +%% - update a connector should also update all of the the bridges +%% - cannot delete a connector that is used by at least one bridge +t_mqtt_conn_update(_) -> + %% assert we there's no connectors and no bridges at first + {ok, 200, <<"[]">>} = request(get, uri(["connectors"]), []), + {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), + + %% then we add a mqtt connector, using POST + {ok, 201, Connector} = request(post, uri(["connectors"]), + ?MQTT_CONNECOTR2(<<"127.0.0.1:1883">>)#{<<"id">> => ?CONNECTR_ID}), + + %ct:pal("---connector: ~p", [Connector]), + ?assertMatch(#{ <<"id">> := ?CONNECTR_ID + , <<"server">> := <<"127.0.0.1:1883">> + }, jsx:decode(Connector)), + + %% ... and a MQTT bridge, using POST + %% we bind this bridge to the connector created just now + {ok, 201, Bridge} = request(post, uri(["bridges"]), + ?MQTT_BRIDGE_EGRESS(?CONNECTR_ID)#{<<"id">> => ?BRIDGE_ID_EGRESS}), + ?assertMatch(#{ <<"id">> := ?BRIDGE_ID_EGRESS + , <<"bridge_type">> := <<"mqtt">> + , <<"status">> := <<"connected">> + , <<"connector">> := ?CONNECTR_ID + }, jsx:decode(Bridge)), + + %% then we try to update 'server' of the connector, to an unavailable IP address + %% the update should fail because of 'unreachable' or 'connrefused' + {ok, 400, _ErrorMsg} = request(put, uri(["connectors", ?CONNECTR_ID]), + ?MQTT_CONNECOTR2(<<"127.0.0.1:2883">>)), + %% we fix the 'server' parameter to a normal one, it should work + {ok, 200, _} = request(put, uri(["connectors", ?CONNECTR_ID]), + ?MQTT_CONNECOTR2(<<"127.0.0.1 : 1883">>)), + %% delete the bridge + {ok, 204, <<>>} = request(delete, uri(["bridges", ?BRIDGE_ID_EGRESS]), []), + {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), + + %% delete the connector + {ok, 204, <<>>} = request(delete, uri(["connectors", ?CONNECTR_ID]), []), + {ok, 200, <<"[]">>} = request(get, uri(["connectors"]), []). + +t_mqtt_conn_testing(_) -> + %% APIs for testing the connectivity + %% then we add a mqtt connector, using POST + {ok, 200, <<>>} = request(post, uri(["connectors_test"]), + ?MQTT_CONNECOTR2(<<"127.0.0.1:1883">>)#{<<"bridge_type">> => <<"mqtt">>}), + {ok, 400, _} = request(post, uri(["connectors_test"]), + ?MQTT_CONNECOTR2(<<"127.0.0.1:2883">>)#{<<"bridge_type">> => <<"mqtt">>}). + +%%-------------------------------------------------------------------- +%% HTTP Request +%%-------------------------------------------------------------------- +-define(HOST, "http://127.0.0.1:18083/"). +-define(API_VERSION, "v5"). +-define(BASE_PATH, "api"). + +request(Method, Url, Body) -> + Request = case Body of + [] -> {Url, [auth_header_()]}; + _ -> {Url, [auth_header_()], "application/json", jsx:encode(Body)} + end, + ct:pal("Method: ~p, Request: ~p", [Method, Request]), + case httpc:request(Method, Request, [], [{body_format, binary}]) of + {error, socket_closed_remotely} -> + {error, socket_closed_remotely}; + {ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } -> + {ok, Code, Return}; + {ok, {Reason, _, _}} -> + {error, Reason} + end. + +uri() -> uri([]). +uri(Parts) when is_list(Parts) -> + NParts = [E || E <- Parts], + ?HOST ++ filename:join([?BASE_PATH, ?API_VERSION | NParts]). + +auth_header_() -> + Username = <<"admin">>, + Password = <<"public">>, + {ok, Token} = emqx_dashboard_admin:sign_token(Username, Password), + {"Authorization", "Bearer " ++ binary_to_list(Token)}. + diff --git a/apps/emqx_dashboard/etc/emqx_dashboard.conf b/apps/emqx_dashboard/etc/emqx_dashboard.conf index 70b1d1d71..ba2a68eeb 100644 --- a/apps/emqx_dashboard/etc/emqx_dashboard.conf +++ b/apps/emqx_dashboard/etc/emqx_dashboard.conf @@ -37,4 +37,7 @@ emqx_dashboard { # ciphers = ["TLS_AES_256_GCM_SHA384","TLS_AES_128_GCM_SHA256","TLS_CHACHA20_POLY1305_SHA256","TLS_AES_128_CCM_SHA256","TLS_AES_128_CCM_8_SHA256","ECDHE-ECDSA-AES256-GCM-SHA384","ECDHE-RSA-AES256-GCM-SHA384","ECDHE-ECDSA-AES256-SHA384","ECDHE-RSA-AES256-SHA384","ECDHE-ECDSA-DES-CBC3-SHA","ECDH-ECDSA-AES256-GCM-SHA384","ECDH-RSA-AES256-GCM-SHA384","ECDH-ECDSA-AES256-SHA384","ECDH-RSA-AES256-SHA384","DHE-DSS-AES256-GCM-SHA384","DHE-DSS-AES256-SHA256","AES256-GCM-SHA384","AES256-SHA256","ECDHE-ECDSA-AES128-GCM-SHA256","ECDHE-RSA-AES128-GCM-SHA256","ECDHE-ECDSA-AES128-SHA256","ECDHE-RSA-AES128-SHA256","ECDH-ECDSA-AES128-GCM-SHA256","ECDH-RSA-AES128-GCM-SHA256","ECDH-ECDSA-AES128-SHA256","ECDH-RSA-AES128-SHA256","DHE-DSS-AES128-GCM-SHA256","DHE-DSS-AES128-SHA256","AES128-GCM-SHA256","AES128-SHA256","ECDHE-ECDSA-AES256-SHA","ECDHE-RSA-AES256-SHA","DHE-DSS-AES256-SHA","ECDH-ECDSA-AES256-SHA","ECDH-RSA-AES256-SHA","AES256-SHA","ECDHE-ECDSA-AES128-SHA","ECDHE-RSA-AES128-SHA","DHE-DSS-AES128-SHA","ECDH-ECDSA-AES128-SHA","ECDH-RSA-AES128-SHA","AES128-SHA"] # } ] + + ## CORS Support. don't set cors true if you don't know what it means. + # cors = false } diff --git a/apps/emqx_dashboard/include/emqx_dashboard.hrl b/apps/emqx_dashboard/include/emqx_dashboard.hrl index e7fb4557b..db7bc8007 100644 --- a/apps/emqx_dashboard/include/emqx_dashboard.hrl +++ b/apps/emqx_dashboard/include/emqx_dashboard.hrl @@ -13,25 +13,27 @@ %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- +-define(ADMIN, emqx_admin). --record(emqx_admin, { +-record(?ADMIN, { username :: binary(), pwdhash :: binary(), - tags :: list() | binary(), + description :: binary(), role = undefined :: atom(), extra = [] :: term() %% not used so far, for future extension }). --define(ADMIN, emqx_admin). --record(emqx_admin_jwt, { +-define(ADMIN_JWT, emqx_admin_jwt). + +-record(?ADMIN_JWT, { token :: binary(), username :: binary(), exptime :: integer(), extra = [] :: term() %% not used so far, fur future extension }). --define(ADMIN_JWT, emqx_admin_jwt). +-define(TAB_COLLECT, emqx_collect). -define(EMPTY_KEY(Key), ((Key == undefined) orelse (Key == <<>>))). diff --git a/apps/emqx_dashboard/src/emqx_dashboard.erl b/apps/emqx_dashboard/src/emqx_dashboard.erl index 2a5066d50..0dbcb8f1e 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard.erl @@ -29,6 +29,8 @@ -define(BASE_PATH, "/api/v5"). +-define(EMQX_MIDDLE, emqx_dashboard_middleware). + %%-------------------------------------------------------------------- %% Start/Stop Listeners %%-------------------------------------------------------------------- @@ -38,26 +40,33 @@ start_listeners() -> Authorization = {?MODULE, authorize_appid}, GlobalSpec = #{ openapi => "3.0.0", - info => #{title => "EMQ X Dashboard API", version => "5.0.0"}, + info => #{title => "EMQ X API", version => "5.0.0"}, servers => [#{url => ?BASE_PATH}], components => #{ schemas => #{}, - securitySchemes => #{ + 'securitySchemes' => #{ application => #{ - type => apiKey, + type => 'apiKey', name => "authorization", in => header}}}}, - Dispatch = [{"/", cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}}, - {"/static/[...]", cowboy_static, {priv_dir, emqx_dashboard, "www/static"}}, - {'_', cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}} - ], + Dispatch = + case os:getenv("_EMQX_ENABLE_DASHBOARD") of + V when V =:= "true" orelse V =:= "1" -> + [{"/", cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}}, + {"/static/[...]", cowboy_static, {priv_dir, emqx_dashboard, "www/static"}}, + {'_', cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}} + ]; + _ -> + [] + end, BaseMinirest = #{ base_path => ?BASE_PATH, modules => minirest_api:find_api_modules(apps()), authorization => Authorization, security => [#{application => []}], swagger_global_spec => GlobalSpec, - dispatch => Dispatch + dispatch => Dispatch, + middlewares => [cowboy_router, ?EMQX_MIDDLE, cowboy_handler] }, [begin Minirest = maps:put(protocol, Protocol, BaseMinirest), @@ -101,19 +110,16 @@ ranch_opts(RanchOptions) -> R#{socket_opts => maps:fold(fun key_only/3, [], S)}. -key_take({K, K1}, {All, R}) -> +key_take(Key, {All, R}) -> + {K, KX} = case Key of + {K1, K2} -> {K1, K2}; + _ -> {Key, Key} + end, case maps:get(K, All, undefined) of undefined -> {All, R}; V -> - {maps:remove(K, All), R#{K1 => V}} - end; -key_take(K, {All, R}) -> - case maps:get(K, All, undefined) of - undefined -> - {All, R}; - V -> - {maps:remove(K, All), R#{K => V}} + {maps:remove(K, All), R#{KX => V}} end. key_only(K , true , S) -> [K | S]; diff --git a/apps/emqx_dashboard/src/emqx_dashboard_admin.erl b/apps/emqx_dashboard/src/emqx_dashboard_admin.erl index 9622e6ed8..8f40427e5 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_admin.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_admin.erl @@ -19,6 +19,7 @@ -module(emqx_dashboard_admin). -include("emqx_dashboard.hrl"). +-include_lib("stdlib/include/ms_transform.hrl"). -boot_mnesia({mnesia, [boot]}). @@ -63,17 +64,17 @@ mnesia(boot) -> %% API %%-------------------------------------------------------------------- --spec(add_user(binary(), binary(), binary()) -> ok | {error, any()}). -add_user(Username, Password, Tags) when is_binary(Username), is_binary(Password) -> - Admin = #?ADMIN{username = Username, pwdhash = hash(Password), tags = Tags}, - return(mria:transaction(?DASHBOARD_SHARD, fun add_user_/1, [Admin])). +-spec(add_user(binary(), binary(), binary()) -> {ok, map()} | {error, any()}). +add_user(Username, Password, Desc) + when is_binary(Username), is_binary(Password) -> + return(mria:transaction(?DASHBOARD_SHARD, fun add_user_/3, [Username, Password, Desc])). %% black-magic: force overwrite a user -force_add_user(Username, Password, Tags) -> +force_add_user(Username, Password, Desc) -> AddFun = fun() -> mnesia:write(#?ADMIN{username = Username, pwdhash = hash(Password), - tags = Tags}) + description = Desc}) end, case mria:transaction(?DASHBOARD_SHARD, AddFun) of {atomic, ok} -> ok; @@ -81,33 +82,38 @@ force_add_user(Username, Password, Tags) -> end. %% @private -add_user_(Admin = #?ADMIN{username = Username}) -> +add_user_(Username, Password, Desc) -> case mnesia:wread({?ADMIN, Username}) of - [] -> mnesia:write(Admin); - [_] -> mnesia:abort(<<"Username Already Exist">>) + [] -> + Admin = #?ADMIN{username = Username, pwdhash = hash(Password), description = Desc}, + mnesia:write(Admin), + #{username => Username, description => Desc}; + [_] -> + mnesia:abort(<<"Username Already Exist">>) end. --spec(remove_user(binary()) -> ok | {error, any()}). +-spec(remove_user(binary()) -> {ok, any()} | {error, any()}). remove_user(Username) when is_binary(Username) -> Trans = fun() -> case lookup_user(Username) of - [] -> - mnesia:abort(<<"Username Not Found">>); - _ -> ok - end, - mnesia:delete({?ADMIN, Username}) + [] -> mnesia:abort(<<"Username Not Found">>); + _ -> mnesia:delete({?ADMIN, Username}) + end end, return(mria:transaction(?DASHBOARD_SHARD, Trans)). --spec(update_user(binary(), binary()) -> ok | {error, term()}). -update_user(Username, Tags) when is_binary(Username) -> - return(mria:transaction(?DASHBOARD_SHARD, fun update_user_/2, [Username, Tags])). +-spec(update_user(binary(), binary()) -> {ok, map()} | {error, term()}). +update_user(Username, Desc) when is_binary(Username) -> + return(mria:transaction(?DASHBOARD_SHARD, fun update_user_/2, [Username, Desc])). %% @private -update_user_(Username, Tags) -> +update_user_(Username, Desc) -> case mnesia:wread({?ADMIN, Username}) of - [] -> mnesia:abort(<<"Username Not Found">>); - [Admin] -> mnesia:write(Admin#?ADMIN{tags = Tags}) + [] -> + mnesia:abort(<<"Username Not Found">>); + [Admin] -> + mnesia:write(Admin#?ADMIN{description = Desc}), + #{username => Username, description => Desc} end. change_password(Username, OldPasswd, NewPasswd) when is_binary(Username) -> @@ -146,17 +152,15 @@ lookup_user(Username) when is_binary(Username) -> -spec(all_users() -> [map()]). all_users() -> lists:map(fun(#?ADMIN{username = Username, - tags = Tags + description = Desc }) -> #{username => Username, - %% named tag but not tags, for unknown reason - %% TODO: fix this comment - tag => Tags + description => Desc } end, ets:tab2list(?ADMIN)). -return({atomic, _}) -> - ok; +return({atomic, Result}) -> + {ok, Result}; return({aborted, Reason}) -> {error, Reason}. @@ -167,7 +171,7 @@ check(_, undefined) -> check(Username, Password) -> case lookup_user(Username) of [#?ADMIN{pwdhash = <>}] -> - case Hash =:= md5_hash(Salt, Password) of + case Hash =:= sha256(Salt, Password) of true -> ok; false -> {error, <<"BAD_USERNAME_OR_PASSWORD">>} end; @@ -201,16 +205,11 @@ destroy_token_by_username(Username, Token) -> %%-------------------------------------------------------------------- hash(Password) -> - SaltBin = salt(), - <>. + SaltBin = emqx_dashboard_token:salt(), + <>. -md5_hash(SaltBin, Password) -> - erlang:md5(<>). - -salt() -> - _ = emqx_misc:rand_seed(), - Salt = rand:uniform(16#ffffffff), - <>. +sha256(SaltBin, Password) -> + crypto:hash('sha256', <>). add_default_user() -> add_default_user(binenv(default_username), binenv(default_password)). @@ -224,5 +223,5 @@ add_default_user(Username, Password) when ?EMPTY_KEY(Username) orelse ?EMPTY_KEY add_default_user(Username, Password) -> case lookup_user(Username) of [] -> add_user(Username, Password, <<"administrator">>); - _ -> ok + _ -> {ok, default_user_exists} end. diff --git a/apps/emqx_dashboard/src/emqx_dashboard_api.erl b/apps/emqx_dashboard/src/emqx_dashboard_api.erl index 5cc4d2a16..c40c4bd22 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_api.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_api.erl @@ -16,16 +16,6 @@ -module(emqx_dashboard_api). --ifndef(EMQX_ENTERPRISE). - --define(RELEASE, community). - --else. - --define(VERSION, enterprise). - --endif. - -behaviour(minirest_api). -include("emqx_dashboard.hrl"). @@ -37,27 +27,33 @@ -define(EMPTY(V), (V == undefined orelse V == <<>>)). -define(ERROR_USERNAME_OR_PWD, 'ERROR_USERNAME_OR_PWD'). +-define(USER_NOT_FOUND_BODY, #{ code => <<"USER_NOT_FOUND">> + , message => <<"User not found">>}). + namespace() -> "dashboard". api_spec() -> - emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true, translate_body => true}). -paths() -> ["/login", "/logout", "/users", - "/users/:username", "/users/:username/change_pwd"]. +paths() -> + [ "/login" + , "/logout" + , "/users" + , "/users/:username" + , "/users/:username/change_pwd"]. schema("/login") -> #{ - operationId => login, + 'operationId' => login, post => #{ tags => [<<"dashboard">>], description => <<"Dashboard Auth">>, summary => <<"Dashboard Auth">>, - requestBody => - [ + 'requestBody' => [ {username, mk(binary(), #{desc => <<"The User for which to create the token.">>, - maxLength => 100, example => <<"admin">>})}, + 'maxLength' => 100, example => <<"admin">>})}, {password, mk(binary(), #{desc => "password", example => "public"})} ], @@ -67,105 +63,126 @@ schema("/login") -> {license, [{edition, mk(enum([community, enterprise]), #{desc => <<"license">>, example => "community"})}]}, - {version, mk(string(), #{desc => <<"version">>, example => <<"5.0.0">>})}], + {version, mk(string(), #{desc => <<"version">>, example => <<"5.0.0">>})} + ], 401 => [ {code, mk(string(), #{example => 'ERROR_USERNAME_OR_PWD'})}, - {message, mk(string(), #{example => "Unauthorized"})}] + {message, mk(string(), #{example => "Unauthorized"})} + ] }, security => [] }}; schema("/logout") -> #{ - operationId => logout, + 'operationId' => logout, post => #{ tags => [<<"dashboard">>], description => <<"Dashboard User logout">>, - requestBody => [ + 'requestBody' => [ {username, mk(binary(), #{desc => <<"The User for which to create the token.">>, - maxLength => 100, example => <<"admin">>})} + 'maxLength' => 100, example => <<"admin">>})} ], responses => #{ - 200 => <<"Dashboard logout successfully">> + 204 => <<"Dashboard logout successfully">> } } }; schema("/users") -> #{ - operationId => users, + 'operationId' => users, get => #{ tags => [<<"dashboard">>], - description => <<"Get dashboard users">>, + description => <<"Get dashboard users list">>, responses => #{ - 200 => mk(array(ref(?MODULE, user)), - #{desc => "User lists"}) + 200 => mk( array(ref(?MODULE, user)) + , #{desc => "User lists"}) } }, post => #{ tags => [<<"dashboard">>], description => <<"Create dashboard users">>, - requestBody => fields(user_password), + 'requestBody' => fields(user_password), responses => #{ - 200 => <<"Create user successfully">>, + 200 => mk( ref(?MODULE, user) + , #{desc => <<"Create User successfully">>}), 400 => [{code, mk(string(), #{example => 'CREATE_FAIL'})}, - {message, mk(string(), #{example => "Create user failed"})}]} + {message, mk(string(), #{example => "Create user failed"})} + ] + } } }; schema("/users/:username") -> #{ - operationId => user, + 'operationId' => user, put => #{ tags => [<<"dashboard">>], description => <<"Update dashboard users">>, parameters => [{username, mk(binary(), #{in => path, example => <<"admin">>})}], - requestBody => [{tag, mk(binary(), #{desc => <<"Tag">>})}], + 'requestBody' => [ + { description + , mk(binary(), #{desc => <<"User description">>, example => <<"administrator">>})} + ], responses => #{ - 200 => <<"Update User successfully">>, - 400 => [{code, mk(string(), #{example => 'UPDATE_FAIL'})}, - {message, mk(string(), #{example => "Update Failed unknown"})}]}}, + 200 => mk( ref(?MODULE, user) + , #{desc => <<"Update User successfully">>}), + 400 => [ + {code, mk(string(), #{example => 'UPDATE_FAIL'})}, + {message, mk(string(), #{example => "Update Failed unknown"})} + ], + 404 => emqx_dashboard_swagger:error_codes(['USER_NOT_FOUND'], <<"User Not Found">>) + } + }, delete => #{ tags => [<<"dashboard">>], description => <<"Delete dashboard users">>, parameters => [{username, mk(binary(), #{in => path, example => <<"admin">>})}], responses => #{ - 200 => <<"Delete User successfully">>, + 204 => <<"Delete User successfully">>, 400 => [ {code, mk(string(), #{example => 'CANNOT_DELETE_ADMIN'})}, - {message, mk(string(), #{example => "CANNOT DELETE ADMIN"})}]}} + {message, mk(string(), #{example => "CANNOT DELETE ADMIN"})} + ], + 404 => emqx_dashboard_swagger:error_codes(['USER_NOT_FOUND'], <<"User Not Found">>) + } + } }; schema("/users/:username/change_pwd") -> #{ - operationId => change_pwd, + 'operationId' => change_pwd, put => #{ tags => [<<"dashboard">>], description => <<"Update dashboard users password">>, parameters => [{username, mk(binary(), #{in => path, required => true, example => <<"admin">>})}], - requestBody => [ + 'requestBody' => [ {old_pwd, mk(binary(), #{required => true})}, {new_pwd, mk(binary(), #{required => true})} ], responses => #{ - 200 => <<"Update user password successfully">>, + 204 => <<"Update user password successfully">>, 400 => [ {code, mk(string(), #{example => 'UPDATE_FAIL'})}, - {message, mk(string(), #{example => "Failed Reason"})}]}} + {message, mk(string(), #{example => "Failed Reason"})} + ] + } + } }. fields(user) -> [ - {tag, + {description, mk(binary(), - #{desc => <<"tag">>, example => "administrator"})}, + #{desc => <<"User description">>, example => "administrator"})}, {username, mk(binary(), #{desc => <<"username">>, example => "emqx"})} ]; fields(user_password) -> - fields(user) ++ [{password, mk(binary(), #{desc => "Password"})}]. + fields(user) ++ [{password, mk(binary(), #{desc => "Password", example => <<"public">>})}]. login(post, #{body := Params}) -> Username = maps:get(<<"username">>, Params), @@ -173,7 +190,10 @@ login(post, #{body := Params}) -> case emqx_dashboard_admin:sign_token(Username, Password) of {ok, Token} -> Version = iolist_to_binary(proplists:get_value(version, emqx_sys:info())), - {200, #{token => Token, version => Version, license => #{edition => ?RELEASE}}}; + {200, #{token => Token, + version => Version, + license => #{edition => emqx_release:edition()} + }}; {error, _} -> {401, #{code => ?ERROR_USERNAME_OR_PWD, message => <<"Auth filed">>}} end. @@ -182,7 +202,7 @@ logout(_, #{body := #{<<"username">> := Username}, headers := #{<<"authorization">> := <<"Bearer ", Token/binary>>}}) -> case emqx_dashboard_admin:destroy_token_by_username(Username, Token) of ok -> - 200; + 204; _R -> {401, 'BAD_TOKEN_OR_USERNAME', <<"Ensure your token & username">>} end. @@ -191,7 +211,7 @@ users(get, _Request) -> {200, emqx_dashboard_admin:all_users()}; users(post, #{body := Params}) -> - Tag = maps:get(<<"tag">>, Params), + Desc = maps:get(<<"description">>, Params), Username = maps:get(<<"username">>, Params), Password = maps:get(<<"password">>, Params), case ?EMPTY(Username) orelse ?EMPTY(Password) of @@ -199,35 +219,43 @@ users(post, #{body := Params}) -> {400, #{code => <<"CREATE_USER_FAIL">>, message => <<"Username or password undefined">>}}; false -> - case emqx_dashboard_admin:add_user(Username, Password, Tag) of - ok -> {200}; + case emqx_dashboard_admin:add_user(Username, Password, Desc) of + {ok, Result} -> + {200, Result}; {error, Reason} -> {400, #{code => <<"CREATE_USER_FAIL">>, message => Reason}} end end. user(put, #{bindings := #{username := Username}, body := Params}) -> - Tag = maps:get(<<"tag">>, Params), - case emqx_dashboard_admin:update_user(Username, Tag) of - ok -> {200}; - {error, Reason} -> - {400, #{code => <<"UPDATE_FAIL">>, message => Reason}} + Desc = maps:get(<<"description">>, Params), + case emqx_dashboard_admin:update_user(Username, Desc) of + {ok, Result} -> + {200, Result}; + {error, _Reason} -> + {404, ?USER_NOT_FOUND_BODY} end; user(delete, #{bindings := #{username := Username}}) -> case Username == <<"admin">> of - true -> {400, #{code => <<"CANNOT_DELETE_ADMIN">>, - message => <<"Cannot delete admin">>}}; + true -> + {400, #{code => <<"ACTION_NOT_ALLOWED">>, + message => <<"Cannot delete admin">>}}; false -> - _ = emqx_dashboard_admin:remove_user(Username), - {200} + case emqx_dashboard_admin:remove_user(Username) of + {error, _Reason} -> + {404, ?USER_NOT_FOUND_BODY}; + {ok, _} -> + {204} + end end. change_pwd(put, #{bindings := #{username := Username}, body := Params}) -> OldPwd = maps:get(<<"old_pwd">>, Params), NewPwd = maps:get(<<"new_pwd">>, Params), case emqx_dashboard_admin:change_password(Username, OldPwd, NewPwd) of - ok -> {200}; + {ok, _} -> + {204}; {error, Reason} -> {400, #{code => <<"CHANGE_PWD_FAIL">>, message => Reason}} end. diff --git a/apps/emqx_dashboard/src/emqx_dashboard_app.erl b/apps/emqx_dashboard/src/emqx_dashboard_app.erl index 16acdb182..b6fb8dcb8 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_app.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_app.erl @@ -29,7 +29,7 @@ start(_StartType, _StartArgs) -> ok = mria_rlog:wait_for_shards([?DASHBOARD_SHARD], infinity), _ = emqx_dashboard:start_listeners(), emqx_dashboard_cli:load(), - ok = emqx_dashboard_admin:add_default_user(), + {ok, _Result} = emqx_dashboard_admin:add_default_user(), {ok, Sup}. stop(_State) -> diff --git a/apps/emqx_dashboard/src/emqx_dashboard_cli.erl b/apps/emqx_dashboard/src/emqx_dashboard_cli.erl index 14c5009ff..a651b5cac 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_cli.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_cli.erl @@ -27,9 +27,9 @@ load() -> admins(["add", Username, Password]) -> admins(["add", Username, Password, ""]); -admins(["add", Username, Password, Tag]) -> - case emqx_dashboard_admin:add_user(bin(Username), bin(Password), bin(Tag)) of - ok -> +admins(["add", Username, Password, Desc]) -> + case emqx_dashboard_admin:add_user(bin(Username), bin(Password), bin(Desc)) of + {ok, _} -> emqx_ctl:print("ok~n"); {error, already_existed} -> emqx_ctl:print("Error: already existed~n"); @@ -46,9 +46,10 @@ admins(["del", Username]) -> emqx_ctl:print("~p~n", [Status]); admins(_) -> - emqx_ctl:usage([{"admins add ", "Add dashboard user"}, - {"admins passwd ", "Reset dashboard user password"}, - {"admins del ", "Delete dashboard user" }]). + emqx_ctl:usage( + [{"admins add ", "Add dashboard user"}, + {"admins passwd ", "Reset dashboard user password"}, + {"admins del ", "Delete dashboard user" }]). unload() -> emqx_ctl:unregister_command(admins). diff --git a/apps/emqx_dashboard/src/emqx_dashboard_collection.erl b/apps/emqx_dashboard/src/emqx_dashboard_collection.erl index ab5767229..dc9c894b6 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_collection.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_collection.erl @@ -40,10 +40,10 @@ -define(EXPIRE_INTERVAL, 86400000 * 7). mnesia(boot) -> - ok = mria:create_table(emqx_collect, [ + ok = mria:create_table(?TAB_COLLECT, [ {type, set}, {local_content, true}, - {storage, disc_only_copies}, + {storage, disc_copies}, {record_name, mqtt_collect}, {attributes, record_info(fields, mqtt_collect)}]). @@ -87,7 +87,10 @@ handle_call(_Req, _From, State) -> handle_cast(_Req, State) -> {noreply, State}. -handle_info(collect, State = #{collect := Collect, count := 1, temp_collect := TempCollect, last_collects := LastCollect}) -> +handle_info(collect, State = #{ collect := Collect + , count := 1 + , temp_collect := TempCollect + , last_collects := LastCollect}) -> timer(next_interval(), collect), NewLastCollect = flush(collect_all(Collect), LastCollect), TempCollect1 = temp_collect(TempCollect), @@ -107,9 +110,9 @@ handle_info(clear_expire_data, State = #{expire_interval := ExpireInterval}) -> timer(?CLEAR_INTERVAL, clear_expire_data), T1 = get_local_time(), Spec = ets:fun2ms(fun({_, T, _C} = Data) when (T1 - T) > ExpireInterval -> Data end), - Collects = dets:select(emqx_collect, Spec), + Collects = ets:select(?TAB_COLLECT, Spec), lists:foreach(fun(Collect) -> - dets:delete_object(emqx_collect, Collect) + true = ets:delete_object(?TAB_COLLECT, Collect) end, Collects), {noreply, State, hibernate}; @@ -131,9 +134,9 @@ temp_collect({_, _, Received, Sent}) -> Sent1}. collect_all({Connection, Route, Subscription}) -> - {[collect(connections)| Connection], - [collect(routes)| Route], - [collect(subscriptions)| Subscription]}. + {[collect(connections) | Connection], + [collect(routes) | Route], + [collect(subscriptions) | Subscription]}. collect(connections) -> emqx_stats:getstat('connections.count'); @@ -159,8 +162,11 @@ flush({Connection, Route, Subscription}, {Received0, Sent0, Dropped0}) -> diff(Sent, Sent0), diff(Dropped, Dropped0)}, Ts = get_local_time(), - _ = mria:transaction(mria:local_content_shard(), - fun mnesia:write/1, [#mqtt_collect{timestamp = Ts, collect = Collect}]), + {atomic, ok} = mria:transaction(mria:local_content_shard(), + fun mnesia:write/3, + [ ?TAB_COLLECT + , #mqtt_collect{timestamp = Ts, collect = Collect} + , write]), {Received, Sent, Dropped}. avg(Items) -> diff --git a/apps/emqx_resource/src/emqx_resource_uitils.erl b/apps/emqx_dashboard/src/emqx_dashboard_middleware.erl similarity index 61% rename from apps/emqx_resource/src/emqx_resource_uitils.erl rename to apps/emqx_dashboard/src/emqx_dashboard_middleware.erl index ab3f6dd1e..8a7de99b8 100644 --- a/apps/emqx_resource/src/emqx_resource_uitils.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_middleware.erl @@ -13,4 +13,21 @@ %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_resource_uitils). \ No newline at end of file + +-module(emqx_dashboard_middleware). + +-behaviour(cowboy_middleware). + +-export([execute/2]). + +execute(Req, Env) -> + CORS = emqx_conf:get([emqx_dashboard, cors], false), + case CORS andalso cowboy_req:header(<<"origin">>, Req, undefined) of + false -> + {ok, Req, Env}; + undefined -> + {ok, Req, Env}; + _ -> + Req2 = cowboy_req:set_resp_header(<<"Access-Control-Allow-Origin">>, <<"*">>, Req), + {ok, Req2, Env} + end. diff --git a/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl b/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl index ba2fc6dbe..efbb973da 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl @@ -18,6 +18,10 @@ , current_counters/2 ]). +-export([ sampling/1 + , sampling/2 + ]). + -define(COUNTERS, [ connection , route , subscriptions @@ -174,8 +178,10 @@ format_current_metrics(Collects) -> format_current_metrics(Collects, {0,0,0,0}). format_current_metrics([], Acc) -> Acc; -format_current_metrics([{Received, Sent, Sub, Conn} | Collects], {Received1, Sent1, Sub1, Conn1}) -> - format_current_metrics(Collects, {Received1 + Received, Sent1 + Sent, Sub1 + Sub, Conn1 + Conn}). +format_current_metrics([{Received, Sent, Sub, Conn} | Collects], + {Received1, Sent1, Sub1, Conn1}) -> + format_current_metrics(Collects, + {Received1 + Received, Sent1 + Sent, Sub1 + Sub, Conn1 + Conn}). %%%============================================================================================== @@ -260,19 +266,19 @@ key_replace([Term | List], All, Comparison, Default) -> end. sampling(Node) when Node =:= node() -> - Time = emqx_dashboard_collection:get_local_time() - 7200000, - All = dets:select(emqx_collect, [{{mqtt_collect,'$1','$2'}, [{'>', '$1', Time}], ['$_']}]), - format(lists:sort(All)); + format(lists:sort(select_data())); sampling(Node) -> rpc:call(Node, ?MODULE, sampling, [Node]). sampling(Node, Counter) when Node =:= node() -> - Time = emqx_dashboard_collection:get_local_time() - 7200000, - All = dets:select(emqx_collect, [{{mqtt_collect,'$1','$2'}, [{'>', '$1', Time}], ['$_']}]), - format_single(lists:sort(All), Counter); + format_single(lists:sort(select_data()), Counter); sampling(Node, Counter) -> rpc:call(Node, ?MODULE, sampling, [Node, Counter]). +select_data() -> + Time = emqx_dashboard_collection:get_local_time() - 7200000, + ets:select(?TAB_COLLECT, [{{mqtt_collect,'$1','$2'}, [{'>', '$1', Time}], ['$_']}]). + format(Collects) -> format(Collects, {[],[],[],[],[],[]}). format([], {Connection, Route, Subscription, Received, Sent, Dropped}) -> diff --git a/apps/emqx_dashboard/src/emqx_dashboard_schema.erl b/apps/emqx_dashboard/src/emqx_dashboard_schema.erl index 58e154da8..8ef95a8cf 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_schema.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_schema.erl @@ -31,6 +31,7 @@ fields("emqx_dashboard") -> , {default_password, fun default_password/1} , {sample_interval, sc(emqx_schema:duration_s(), #{default => "10s"})} , {token_expired_time, sc(emqx_schema:duration(), #{default => "30m"})} + , {cors, fun cors/1} ]; fields("http") -> @@ -41,7 +42,7 @@ fields("http") -> , {"backlog", sc(integer(), #{default => 1024})} , {"send_timeout", sc(emqx_schema:duration(), #{default => "5s"})} , {"inet6", sc(boolean(), #{default => false})} - , {"ipv6_v6only", sc(boolean(), #{dfeault => false})} + , {"ipv6_v6only", sc(boolean(), #{default => false})} ]; fields("https") -> @@ -63,4 +64,9 @@ The initial default password for dashboard 'admin' user. For safty, it should be changed as soon as possible."""; default_password(_) -> undefined. +cors(type) -> boolean(); +cors(default) -> false; +cors(nullable) -> true; +cors(_) -> undefined. + sc(Type, Meta) -> hoconsc:mk(Type, Meta). diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index 1c8aeaf83..b6724f701 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -5,16 +5,16 @@ %% API -export([spec/1, spec/2]). --export([translate_req/2]). -export([namespace/0, fields/1]). +-export([schema_with_example/2, schema_with_examples/2]). -export([error_codes/1, error_codes/2]). --define(MAX_ROW_LIMIT, 100). -%% API +-export([filter_check_request/2, filter_check_request_and_translate_body/2]). -ifdef(TEST). --compile(export_all). --compile(nowarn_export_all). +-export([ parse_spec_ref/2 + , components/1 + ]). -endif. -define(METHODS, [get, post, put, head, delete, patch, options, trace]). @@ -22,33 +22,42 @@ -define(DEFAULT_FIELDS, [example, allowReserved, style, explode, maxLength, allowEmptyValue, deprecated, minimum, maximum]). --define(DEFAULT_FILTER, #{filter => fun ?MODULE:translate_req/2}). - --define(INIT_SCHEMA, #{fields => #{}, translations => #{}, validations => [], namespace => undefined}). +-define(INIT_SCHEMA, #{fields => #{}, translations => #{}, + validations => [], namespace => undefined}). -define(TO_REF(_N_, _F_), iolist_to_binary([to_bin(_N_), ".", to_bin(_F_)])). --define(TO_COMPONENTS_SCHEMA(_M_, _F_), iolist_to_binary([<<"#/components/schemas/">>, ?TO_REF(namespace(_M_), _F_)])). --define(TO_COMPONENTS_PARAM(_M_, _F_), iolist_to_binary([<<"#/components/parameters/">>, ?TO_REF(namespace(_M_), _F_)])). +-define(TO_COMPONENTS_SCHEMA(_M_, _F_), iolist_to_binary([<<"#/components/schemas/">>, + ?TO_REF(namespace(_M_), _F_)])). +-define(TO_COMPONENTS_PARAM(_M_, _F_), iolist_to_binary([<<"#/components/parameters/">>, + ?TO_REF(namespace(_M_), _F_)])). + +-define(MAX_ROW_LIMIT, 100). + +-type(request() :: #{bindings => map(), query_string => map(), body => map()}). +-type(request_meta() :: #{module => module(), path => string(), method => atom()}). + +-type(filter_result() :: {ok, request()} | {400, 'BAD_REQUEST', binary()}). +-type(filter() :: fun((request(), request_meta()) -> filter_result())). + +-type(spec_opts() :: #{check_schema => boolean() | filter(), translate_body => boolean()}). + +-type(route_path() :: string() | binary()). +-type(route_methods() :: map()). +-type(route_handler() :: atom()). +-type(route_options() :: #{filter => filter() | undefined}). + +-type(api_spec_entry() :: {route_path(), route_methods(), route_handler(), route_options()}). +-type(api_spec_component() :: map()). + +%%------------------------------------------------------------------------------ +%% API +%%------------------------------------------------------------------------------ %% @equiv spec(Module, #{check_schema => false}) --spec(spec(module()) -> - {list({Path, Specs, OperationId, Options}), list(Component)} when - Path :: string()|binary(), - Specs :: map(), - OperationId :: atom(), - Options :: #{filter => fun((map(), - #{module => module(), path => string(), method => atom()}) -> map())}, - Component :: map()). +-spec(spec(module()) -> {list(api_spec_entry()), list(api_spec_component())}). spec(Module) -> spec(Module, #{check_schema => false}). --spec(spec(module(), #{check_schema => boolean()}) -> - {list({Path, Specs, OperationId, Options}), list(Component)} when - Path :: string()|binary(), - Specs :: map(), - OperationId :: atom(), - Options :: #{filter => fun((map(), - #{module => module(), path => string(), method => atom()}) -> map())}, - Component :: map()). +-spec(spec(module(), spec_opts()) -> {list(api_spec_entry()), list(api_spec_component())}). spec(Module, Options) -> Paths = apply(Module, paths, []), {ApiSpec, AllRefs} = @@ -60,26 +69,10 @@ spec(Module, Options) -> end, {[], []}, Paths), {ApiSpec, components(lists:usort(AllRefs))}. --spec(translate_req(#{binding => list(), query_string => list(), body => map()}, - #{module => module(), path => string(), method => atom()}) -> - {ok, #{binding => list(), query_string => list(), body => map()}}| - {400, 'BAD_REQUEST', binary()}). -translate_req(Request, #{module := Module, path := Path, method := Method}) -> - #{Method := Spec} = apply(Module, schema, [Path]), - try - Params = maps:get(parameters, Spec, []), - Body = maps:get(requestBody, Spec, []), - {Bindings, QueryStr} = check_parameters(Request, Params, Module), - NewBody = check_requestBody(Request, Body, Module, hoconsc:is_schema(Body)), - {ok, Request#{bindings => Bindings, query_string => QueryStr, body => NewBody}} - catch throw:Error -> - {_, [{validation_error, ValidErr}]} = Error, - #{path := Key, reason := Reason} = ValidErr, - {400, 'BAD_REQUEST', iolist_to_binary(io_lib:format("~ts : ~p", [Key, Reason]))} - end. - +-spec(namespace() -> hocon_schema:name()). namespace() -> "public". +-spec(fields(hocon_schema:name()) -> hocon_schema:fields()). fields(page) -> Desc = <<"Page number of the results to fetch.">>, Meta = #{in => query, desc => Desc, default => 1, example => 1}, @@ -90,9 +83,19 @@ fields(limit) -> Meta = #{in => query, desc => Desc, default => ?MAX_ROW_LIMIT, example => 50}, [{limit, hoconsc:mk(range(1, ?MAX_ROW_LIMIT), Meta)}]. +-spec(schema_with_example(hocon_schema:type(), term()) -> hocon_schema:field_schema_map()). +schema_with_example(Type, Example) -> + hoconsc:mk(Type, #{examples => #{<<"example">> => Example}}). + +-spec(schema_with_examples(hocon_schema:type(), map()) -> hocon_schema:field_schema_map()). +schema_with_examples(Type, Examples) -> + hoconsc:mk(Type, #{examples => #{<<"examples">> => Examples}}). + +-spec(error_codes(list(atom())) -> hocon_schema:fields()). error_codes(Codes) -> error_codes(Codes, <<"Error code to troubleshoot problems.">>). +-spec(error_codes(nonempty_list(atom()), binary()) -> hocon_schema:fields()). error_codes(Codes = [_ | _], MsgExample) -> [ {code, hoconsc:mk(hoconsc:enum(Codes))}, @@ -102,9 +105,45 @@ error_codes(Codes = [_ | _], MsgExample) -> })} ]. -support_check_schema(#{check_schema := true}) -> ?DEFAULT_FILTER; -support_check_schema(#{check_schema := Func}) when is_function(Func, 2) -> #{filter => Func}; -support_check_schema(_) -> #{filter => undefined}. +%%------------------------------------------------------------------------------ +%% Private functions +%%------------------------------------------------------------------------------ + +filter_check_request_and_translate_body(Request, RequestMeta) -> + translate_req(Request, RequestMeta, fun check_and_translate/3). + +filter_check_request(Request, RequestMeta) -> + translate_req(Request, RequestMeta, fun check_only/3). + +translate_req(Request, #{module := Module, path := Path, method := Method}, CheckFun) -> + #{Method := Spec} = apply(Module, schema, [Path]), + try + Params = maps:get(parameters, Spec, []), + Body = maps:get('requestBody', Spec, []), + {Bindings, QueryStr} = check_parameters(Request, Params, Module), + NewBody = check_request_body(Request, Body, Module, CheckFun, hoconsc:is_schema(Body)), + {ok, Request#{bindings => Bindings, query_string => QueryStr, body => NewBody}} + catch throw:Error -> + {_, [{validation_error, ValidErr}]} = Error, + #{path := Key, reason := Reason} = ValidErr, + {400, 'BAD_REQUEST', iolist_to_binary(io_lib:format("~ts : ~p", [Key, Reason]))} + end. + +check_and_translate(Schema, Map, Opts) -> + hocon_schema:check_plain(Schema, Map, Opts). + +check_only(Schema, Map, Opts) -> + _ = hocon_schema:check_plain(Schema, Map, Opts), + Map. + +support_check_schema(#{check_schema := true, translate_body := true}) -> + #{filter => fun ?MODULE:filter_check_request_and_translate_body/2}; +support_check_schema(#{check_schema := true}) -> + #{filter => fun ?MODULE:filter_check_request/2}; +support_check_schema(#{check_schema := Filter}) when is_function(Filter, 2) -> + #{filter => Filter}; +support_check_schema(_) -> + #{filter => undefined}. parse_spec_ref(Module, Path) -> Schema = @@ -119,34 +158,41 @@ parse_spec_ref(Module, Path) -> {Spec, SubRefs} = meta_to_spec(Meta, Module), {Acc#{Method => Spec}, SubRefs ++ RefsAcc} end, {#{}, []}, - maps:without([operationId], Schema)), - {maps:get(operationId, Schema), Specs, Refs}. + maps:without(['operationId'], Schema)), + {maps:get('operationId', Schema), Specs, Refs}. check_parameters(Request, Spec, Module) -> #{bindings := Bindings, query_string := QueryStr} = Request, - BindingsBin = maps:fold(fun(Key, Value, Acc) -> Acc#{atom_to_binary(Key) => Value} end, #{}, Bindings), + BindingsBin = maps:fold(fun(Key, Value, Acc) -> + Acc#{atom_to_binary(Key) => Value} + end, #{}, Bindings), check_parameter(Spec, BindingsBin, QueryStr, Module, #{}, #{}). check_parameter([?REF(Fields) | Spec], Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc) -> - check_parameter([?R_REF(LocalMod, Fields) | Spec], Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc); -check_parameter([?R_REF(Module, Fields) | Spec], Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc) -> + check_parameter([?R_REF(LocalMod, Fields) | Spec], + Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc); +check_parameter([?R_REF(Module, Fields) | Spec], + Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc) -> Params = apply(Module, fields, [Fields]), check_parameter(Params ++ Spec, Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc); -check_parameter([], _Bindings, _QueryStr, _Module, NewBindings, NewQueryStr) -> {NewBindings, NewQueryStr}; +check_parameter([], _Bindings, _QueryStr, _Module, NewBindings, NewQueryStr) -> + {NewBindings, NewQueryStr}; check_parameter([{Name, Type} | Spec], Bindings, QueryStr, Module, BindingsAcc, QueryStrAcc) -> Schema = ?INIT_SCHEMA#{roots => [{Name, Type}]}, case hocon_schema:field_schema(Type, in) of path -> - NewBindings = hocon_schema:check_plain(Schema, Bindings, #{atom_key => true, override_env => false}), + Option = #{atom_key => true, override_env => false}, + NewBindings = hocon_schema:check_plain(Schema, Bindings, Option), NewBindingsAcc = maps:merge(BindingsAcc, NewBindings), check_parameter(Spec, Bindings, QueryStr, Module, NewBindingsAcc, QueryStrAcc); query -> - NewQueryStr = hocon_schema:check_plain(Schema, QueryStr, #{override_env => false}), + Option = #{override_env => false}, + NewQueryStr = hocon_schema:check_plain(Schema, QueryStr, Option), NewQueryStrAcc = maps:merge(QueryStrAcc, NewQueryStr), - check_parameter(Spec, Bindings, QueryStr, Module, BindingsAcc, NewQueryStrAcc) + check_parameter(Spec, Bindings, QueryStr, Module,BindingsAcc, NewQueryStrAcc) end. -check_requestBody(#{body := Body}, Schema, Module, true) -> +check_request_body(#{body := Body}, Schema, Module, CheckFun, true) -> Type0 = hocon_schema:field_schema(Schema, type), Type = case Type0 of @@ -154,25 +200,26 @@ check_requestBody(#{body := Body}, Schema, Module, true) -> _ -> Type0 end, NewSchema = ?INIT_SCHEMA#{roots => [{root, Type}]}, - #{<<"root">> := NewBody} = hocon_schema:check_plain(NewSchema, #{<<"root">> => Body}, #{override_env => false}), + Option = #{override_env => false, nullable => true}, + #{<<"root">> := NewBody} = CheckFun(NewSchema, #{<<"root">> => Body}, Option), NewBody; %% TODO not support nest object check yet, please use ref! -%% RequestBody = [ {per_page, mk(integer(), #{}}, +%% 'requestBody' = [ {per_page, mk(integer(), #{}}, %% {nest_object, [ %% {good_nest_1, mk(integer(), #{})}, %% {good_nest_2, mk(ref(?MODULE, good_ref), #{})} %% ]} %% ] -check_requestBody(#{body := Body}, Spec, _Module, false) -> +check_request_body(#{body := Body}, Spec, _Module, CheckFun, false) -> lists:foldl(fun({Name, Type}, Acc) -> Schema = ?INIT_SCHEMA#{roots => [{Name, Type}]}, - maps:merge(Acc, hocon_schema:check_plain(Schema, Body)) + maps:merge(Acc, CheckFun(Schema, Body, #{override_env => false})) end, #{}, Spec). %% tags, description, summary, security, deprecated meta_to_spec(Meta, Module) -> {Params, Refs1} = parameters(maps:get(parameters, Meta, []), Module), - {RequestBody, Refs2} = requestBody(maps:get(requestBody, Meta, []), Module), + {RequestBody, Refs2} = request_body(maps:get('requestBody', Meta, []), Module), {Responses, Refs3} = responses(maps:get(responses, Meta, #{}), Module), { to_spec(Meta, Params, RequestBody, Responses), @@ -180,25 +227,22 @@ meta_to_spec(Meta, Module) -> }. to_spec(Meta, Params, [], Responses) -> - Spec = maps:without([parameters, requestBody, responses], Meta), + Spec = maps:without([parameters, 'requestBody', responses], Meta), Spec#{parameters => Params, responses => Responses}; to_spec(Meta, Params, RequestBody, Responses) -> Spec = to_spec(Meta, Params, [], Responses), - maps:put(requestBody, RequestBody, Spec). + maps:put('requestBody', RequestBody, Spec). parameters(Params, Module) -> {SpecList, AllRefs} = lists:foldl(fun(Param, {Acc, RefsAcc}) -> case Param of - ?REF(StructName) -> - {[#{<<"$ref">> => ?TO_COMPONENTS_PARAM(Module, StructName)} | Acc], - [{Module, StructName, parameter} | RefsAcc]}; - ?R_REF(RModule, StructName) -> - {[#{<<"$ref">> => ?TO_COMPONENTS_PARAM(RModule, StructName)} | Acc], - [{RModule, StructName, parameter} | RefsAcc]}; + ?REF(StructName) -> to_ref(Module, StructName, Acc, RefsAcc); + ?R_REF(RModule, StructName) -> to_ref(RModule, StructName, Acc, RefsAcc); {Name, Type} -> In = hocon_schema:field_schema(Type, in), - In =:= undefined andalso throw({error, <<"missing in:path/query field in parameters">>}), + In =:= undefined andalso + throw({error, <<"missing in:path/query field in parameters">>}), Nullable = hocon_schema:field_schema(Type, nullable), Default = hocon_schema:field_schema(Type, default), HoconType = hocon_schema:field_schema(Type, type), @@ -242,16 +286,17 @@ trans_desc(Spec, Hocon) -> Desc -> Spec#{description => to_bin(Desc)} end. -requestBody([], _Module) -> {[], []}; -requestBody(Schema, Module) -> - {Props, Refs} = +request_body([], _Module) -> {[], []}; +request_body(Schema, Module) -> + {{Props, Refs}, Examples} = case hoconsc:is_schema(Schema) of true -> HoconSchema = hocon_schema:field_schema(Schema, type), - hocon_schema_to_spec(HoconSchema, Module); - false -> parse_object(Schema, Module) + SchemaExamples = hocon_schema:field_schema(Schema, examples), + {hocon_schema_to_spec(HoconSchema, Module), SchemaExamples}; + false -> {parse_object(Schema, Module), undefined} end, - {#{<<"content">> => #{<<"application/json">> => #{<<"schema">> => Props}}}, + {#{<<"content">> => content(Props, Examples)}, Refs}. responses(Responses, Module) -> @@ -264,19 +309,23 @@ response(Status, ?REF(StructName), {Acc, RefsAcc, Module}) -> response(Status, ?R_REF(Module, StructName), {Acc, RefsAcc, Module}); response(Status, ?R_REF(_Mod, _Name) = RRef, {Acc, RefsAcc, Module}) -> {Spec, Refs} = hocon_schema_to_spec(RRef, Module), - Content = #{<<"application/json">> => #{<<"schema">> => Spec}}, + Content = content(Spec), {Acc#{integer_to_binary(Status) => #{<<"content">> => Content}}, Refs ++ RefsAcc, Module}; response(Status, Schema, {Acc, RefsAcc, Module}) -> case hoconsc:is_schema(Schema) of true -> Hocon = hocon_schema:field_schema(Schema, type), + Examples = hocon_schema:field_schema(Schema, examples), {Spec, Refs} = hocon_schema_to_spec(Hocon, Module), Init = trans_desc(#{}, Schema), - Content = #{<<"application/json">> => #{<<"schema">> => Spec}}, - {Acc#{integer_to_binary(Status) => Init#{<<"content">> => Content}}, Refs ++ RefsAcc, Module}; + Content = content(Spec, Examples), + { + Acc#{integer_to_binary(Status) => Init#{<<"content">> => Content}}, + Refs ++ RefsAcc, Module + }; false -> {Props, Refs} = parse_object(Schema, Module), - Content = #{<<"content">> => #{<<"application/json">> => #{<<"schema">> => Props}}}, + Content = #{<<"content">> => content(Props)}, {Acc#{integer_to_binary(Status) => Content}, Refs ++ RefsAcc, Module} end. @@ -362,12 +411,17 @@ typename_to_spec("timeout()", _Mod) -> #{<<"oneOf">> => [#{type => string, examp #{type => integer, example => 100}], example => infinity}; typename_to_spec("bytesize()", _Mod) -> #{type => string, example => <<"32MB">>}; typename_to_spec("wordsize()", _Mod) -> #{type => string, example => <<"1024KB">>}; -typename_to_spec("map()", _Mod) -> #{type => string, example => <<>>}; -typename_to_spec("comma_separated_list()", _Mod) -> #{type => string, example => <<"item1,item2">>}; -typename_to_spec("comma_separated_atoms()", _Mod) -> #{type => string, example => <<"item1,item2">>}; -typename_to_spec("pool_type()", _Mod) -> #{type => string, enum => [random, hash], example => hash}; +typename_to_spec("map()", _Mod) -> #{type => object, example => #{}}; +typename_to_spec("comma_separated_list()", _Mod) -> + #{type => string, example => <<"item1,item2">>}; +typename_to_spec("comma_separated_atoms()", _Mod) -> + #{type => string, example => <<"item1,item2">>}; +typename_to_spec("pool_type()", _Mod) -> + #{type => string, enum => [random, hash], example => hash}; typename_to_spec("log_level()", _Mod) -> - #{type => string, enum => [debug, info, notice, warning, error, critical, alert, emergency, all]}; + #{ type => string, + enum => [debug, info, notice, warning, error, critical, alert, emergency, all] + }; typename_to_spec("rate()", _Mod) -> #{type => string, example => <<"10M/s">>}; typename_to_spec("bucket_rate()", _Mod) -> @@ -427,9 +481,12 @@ add_integer_prop(Schema, Key, Value) -> {Int, []} -> Schema#{Key => Int} end. -to_bin([Atom | _] = List) when is_atom(Atom) -> iolist_to_binary(io_lib:format("~p", [List])); -to_bin(List) when is_list(List) -> unicode:characters_to_binary(List); -to_bin(B) when is_boolean(B) -> B; +to_bin(List) when is_list(List) -> + case io_lib:printable_list(List) of + true -> unicode:characters_to_binary(List); + false -> List + end; +to_bin(Boolean) when is_boolean(Boolean) -> Boolean; to_bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8); to_bin(X) -> X. @@ -467,3 +524,15 @@ parse_object(Other, Module) -> is_required(Hocon) -> hocon_schema:field_schema(Hocon, required) =:= true orelse hocon_schema:field_schema(Hocon, nullable) =:= false. + +content(ApiSpec) -> + content(ApiSpec, undefined). + +content(ApiSpec, undefined) -> + #{<<"application/json">> => #{<<"schema">> => ApiSpec}}; +content(ApiSpec, Examples) when is_map(Examples) -> + #{<<"application/json">> => Examples#{<<"schema">> => ApiSpec}}. + +to_ref(Mod, StructName, Acc, RefsAcc) -> + Ref = #{<<"$ref">> => ?TO_COMPONENTS_PARAM(Mod, StructName)}, + {[Ref | Acc], [{Mod, StructName, parameter} | RefsAcc]}. diff --git a/apps/emqx_dashboard/src/emqx_dashboard_token.erl b/apps/emqx_dashboard/src/emqx_dashboard_token.erl index f8c023b46..ffd45241b 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_token.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_token.erl @@ -40,7 +40,7 @@ %% gen server part -behaviour(gen_server). --export([start_link/0]). +-export([start_link/0, salt/0]). -export([ init/1 , handle_call/3 @@ -75,6 +75,12 @@ destroy(Token) when is_binary(Token)-> destroy_by_username(Username) -> do_destroy_by_username(Username). +%% @doc create 4 bytes salt. +-spec(salt() -> binary()). +salt() -> + <> = crypto:strong_rand_bytes(2), + iolist_to_binary(io_lib:format("~4.16.0b", [X])). + mnesia(boot) -> ok = mria:create_table(?TAB, [ {type, set}, @@ -110,7 +116,9 @@ do_verify(Token)-> case ExpTime > erlang:system_time(millisecond) of true -> NewJWT = JWT#?ADMIN_JWT{exptime = jwt_expiration_time()}, - {atomic, Res} = mria:transaction(?DASHBOARD_SHARD, fun mnesia:write/1, [NewJWT]), + {atomic, Res} = mria:transaction(?DASHBOARD_SHARD, + fun mnesia:write/1, + [NewJWT]), Res; _ -> {error, token_timeout} @@ -145,7 +153,7 @@ lookup_by_username(Username) -> List. jwk(Username, Password, Salt) -> - Key = erlang:md5(<>), + Key = crypto:hash(md5, <>), #{ <<"kty">> => <<"oct">>, <<"k">> => jose_base64url:encode(Key) @@ -157,11 +165,6 @@ jwt_expiration_time() -> token_ttl() -> emqx_conf:get([emqx_dashboard, token_expired_time], ?EXPTIME). -salt() -> - _ = emqx_misc:rand_seed(), - Salt = rand:uniform(16#ffffffff), - <>. - format(Token, Username, ExpTime) -> #?ADMIN_JWT{ token = Token, diff --git a/apps/emqx_dashboard/test/emqx_dashboard_SUITE.erl b/apps/emqx_dashboard/test/emqx_dashboard_SUITE.erl index 42ffd7d45..061f7c839 100644 --- a/apps/emqx_dashboard/test/emqx_dashboard_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_dashboard_SUITE.erl @@ -31,21 +31,51 @@ -define(CONTENT_TYPE, "application/x-www-form-urlencoded"). --define(HOST, "http://127.0.0.1:18083/"). +-define(HOST, "http://127.0.0.1:18083"). --define(API_VERSION, "v4"). +%% -define(API_VERSION, "v5"). --define(BASE_PATH, "api"). +-define(BASE_PATH, "/api/v5"). --define(OVERVIEWS, ['alarms/activated', 'alarms/deactivated', banned, brokers, stats, metrics, listeners, clients, subscriptions, routes, plugins]). +-define(APP_DASHBOARD, emqx_dashboard). +-define(APP_MANAGEMENT, emqx_management). + +-define(OVERVIEWS, ['alarms/activated', + 'alarms/deactivated', + banned, + brokers, + stats, + metrics, + listeners, + clients, + subscriptions, + routes, + plugins + ]). all() -> -%% TODO: V5 API -% emqx_common_test_helpers:all(?MODULE). - [t_cli, t_lookup_by_username_jwt, t_clean_expired_jwt]. + %% TODO: V5 API + %% emqx_common_test_helpers:all(?MODULE). + [t_cli, t_lookup_by_username_jwt, t_clean_expired_jwt, t_rest_api]. + +init_suite() -> + init_suite([]). + +init_suite(Apps) -> + mria:start(), + application:load(emqx_management), + emqx_common_test_helpers:start_apps(Apps ++ [emqx_dashboard], fun set_special_configs/1). + +end_suite() -> + end_suite([]). + +end_suite(Apps) -> + application:unload(emqx_management), + emqx_common_test_helpers:stop_apps(Apps ++ [emqx_dashboard]). init_per_suite(Config) -> - emqx_common_test_helpers:start_apps([emqx_management, emqx_dashboard],fun set_special_configs/1), + emqx_common_test_helpers:start_apps([emqx_management, emqx_dashboard], + fun set_special_configs/1), Config. end_per_suite(_Config) -> @@ -53,52 +83,66 @@ end_per_suite(_Config) -> mria:stop(). set_special_configs(emqx_management) -> - emqx_config:put([emqx_management], #{listeners => [#{protocol => http, port => 8081}], - applications =>[#{id => "admin", secret => "public"}]}), + Listeners = [#{protocol => http, port => 8081}], + Config = #{listeners => Listeners, + applications => [#{id => "admin", secret => "public"}]}, + emqx_config:put([emqx_management], Config), + ok; +set_special_configs(emqx_dashboard) -> + Listeners = [#{protocol => http, port => 18083}], + Config = #{listeners => Listeners, + default_username => <<"admin">>, + default_password => <<"public">> + }, + emqx_config:put([emqx_dashboard], Config), ok; set_special_configs(_) -> ok. t_overview(_) -> mnesia:clear_table(?ADMIN), - emqx_dashboard_admin:add_user(<<"admin">>, <<"public">>, <<"tag">>), - [?assert(request_dashboard(get, api_path(erlang:atom_to_list(Overview)), auth_header_()))|| Overview <- ?OVERVIEWS]. + emqx_dashboard_admin:add_user(<<"admin">>, <<"public">>, <<"simple_description">>), + [?assert(request_dashboard(get, api_path(erlang:atom_to_list(Overview)), + auth_header_())) || Overview <- ?OVERVIEWS]. t_admins_add_delete(_) -> mnesia:clear_table(?ADMIN), - ok = emqx_dashboard_admin:add_user(<<"username">>, <<"password">>, <<"tag">>), - ok = emqx_dashboard_admin:add_user(<<"username1">>, <<"password1">>, <<"tag1">>), + Desc = <<"simple description">>, + ok = emqx_dashboard_admin:add_user(<<"username">>, <<"password">>, Desc), + ok = emqx_dashboard_admin:add_user(<<"username1">>, <<"password1">>, Desc), Admins = emqx_dashboard_admin:all_users(), ?assertEqual(2, length(Admins)), ok = emqx_dashboard_admin:remove_user(<<"username1">>), Users = emqx_dashboard_admin:all_users(), ?assertEqual(1, length(Users)), - ok = emqx_dashboard_admin:change_password(<<"username">>, <<"password">>, <<"pwd">>), + ok = emqx_dashboard_admin:change_password(<<"username">>, + <<"password">>, + <<"pwd">>), timer:sleep(10), - ?assert(request_dashboard(get, api_path("brokers"), auth_header_("username", "pwd"))), + Header = auth_header_(<<"username">>, <<"pwd">>), + ?assert(request_dashboard(get, api_path("brokers"), Header)), ok = emqx_dashboard_admin:remove_user(<<"username">>), - ?assertNotEqual(true, request_dashboard(get, api_path("brokers"), auth_header_("username", "pwd"))). + ?assertNotEqual(true, request_dashboard(get, api_path("brokers"), Header)). t_rest_api(_Config) -> mnesia:clear_table(?ADMIN), - emqx_dashboard_admin:add_user(<<"admin">>, <<"public">>, <<"administrator">>), - {ok, Res0} = http_get("users"), - + Desc = <<"administrator">>, + emqx_dashboard_admin:add_user(<<"admin">>, <<"public">>, Desc), + {ok, 200, Res0} = http_get(["users"]), ?assertEqual([#{<<"username">> => <<"admin">>, - <<"tags">> => <<"administrator">>}], get_http_data(Res0)), - - AssertSuccess = fun({ok, Res}) -> - ?assertEqual(#{<<"code">> => 0}, json(Res)) - end, - [AssertSuccess(R) - || R <- [ http_put("users/admin", #{<<"tags">> => <<"a_new_tag">>}) - , http_post("users", #{<<"username">> => <<"usera">>, <<"password">> => <<"passwd">>}) - , http_post("auth", #{<<"username">> => <<"usera">>, <<"password">> => <<"passwd">>}) - , http_delete("users/usera") - , http_put("users/admin/change_pwd", #{<<"old_pwd">> => <<"public">>, <<"new_pwd">> => <<"newpwd">>}) - , http_post("auth", #{<<"username">> => <<"admin">>, <<"password">> => <<"newpwd">>}) - ]], + <<"description">> => <<"administrator">>}], get_http_data(Res0)), + {ok, 200, _} = http_put(["users", "admin"], #{<<"description">> => <<"a_new_description">>}), + {ok, 200, _} = http_post(["users"], #{<<"username">> => <<"usera">>, + <<"password">> => <<"passwd">>, + <<"description">> => Desc}), + {ok, 204, _} = http_delete(["users", "usera"]), + {ok, 404, _} = http_delete(["users", "usera"]), + {ok, 204, _} = http_put( ["users", "admin", "change_pwd"] + , #{<<"old_pwd">> => <<"public">>, + <<"new_pwd">> => <<"newpwd">>}), + mnesia:clear_table(?ADMIN), + emqx_dashboard_admin:add_user(<<"admin">>, <<"public">>, <<"administrator">>), ok. t_cli(_Config) -> @@ -106,11 +150,11 @@ t_cli(_Config) -> emqx_dashboard_cli:admins(["add", "username", "password"]), [#?ADMIN{ username = <<"username">>, pwdhash = <>}] = emqx_dashboard_admin:lookup_user(<<"username">>), - ?assertEqual(Hash, erlang:md5(<>/binary>>)), + ?assertEqual(Hash, crypto:hash(sha256, <>/binary>>)), emqx_dashboard_cli:admins(["passwd", "username", "newpassword"]), [#?ADMIN{username = <<"username">>, pwdhash = <>}] = emqx_dashboard_admin:lookup_user(<<"username">>), - ?assertEqual(Hash1, erlang:md5(<>/binary>>)), + ?assertEqual(Hash1, crypto:hash(sha256, <>/binary>>)), emqx_dashboard_cli:admins(["del", "username"]), [] = emqx_dashboard_admin:lookup_user(<<"username">>), emqx_dashboard_cli:admins(["add", "admin1", "pass1"]), @@ -152,17 +196,17 @@ bin(X) -> iolist_to_binary(X). random_num() -> erlang:system_time(nanosecond). -http_get(Path) -> - request_api(get, api_path(Path), auth_header_()). +http_get(Parts) -> + request_api(get, api_path(Parts), auth_header_()). -http_delete(Path) -> - request_api(delete, api_path(Path), auth_header_()). +http_delete(Parts) -> + request_api(delete, api_path(Parts), auth_header_()). -http_post(Path, Body) -> - request_api(post, api_path(Path), [], auth_header_(), Body). +http_post(Parts, Body) -> + request_api(post, api_path(Parts), [], auth_header_(), Body). -http_put(Path, Body) -> - request_api(put, api_path(Path), [], auth_header_(), Body). +http_put(Parts, Body) -> + request_api(put, api_path(Parts), [], auth_header_(), Body). request_dashboard(Method, Url, Auth) -> Request = {Url, [Auth]}, @@ -175,21 +219,22 @@ do_request_dashboard(Method, Request)-> case httpc:request(Method, Request, [], []) of {error, socket_closed_remotely} -> {error, socket_closed_remotely}; - {ok, {{"HTTP/1.1", 200, _}, _, _Return} } -> - true; + {ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } + when Code >= 200 andalso Code =< 299 -> + {ok, Return}; {ok, {Reason, _, _}} -> {error, Reason} end. auth_header_() -> - auth_header_("admin", "public"). + auth_header_(<<"admin">>, <<"public">>). -auth_header_(User, Pass) -> - Encoded = base64:encode_to_string(lists:append([User,":",Pass])), - {"Authorization","Basic " ++ Encoded}. +auth_header_(Username, Password) -> + {ok, Token} = emqx_dashboard_admin:sign_token(Username, Password), + {"Authorization","Bearer " ++ binary_to_list(Token)}. -api_path(Path) -> - ?HOST ++ filename:join([?BASE_PATH, ?API_VERSION, Path]). +api_path(Parts) -> + ?HOST ++ filename:join([?BASE_PATH | Parts]). json(Data) -> {ok, Jsx} = emqx_json:safe_decode(Data, [return_maps]), Jsx. diff --git a/apps/emqx_dashboard/test/emqx_swagger_parameter_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_parameter_SUITE.erl index 9c9958880..c938788e4 100644 --- a/apps/emqx_dashboard/test/emqx_swagger_parameter_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_swagger_parameter_SUITE.erl @@ -209,14 +209,32 @@ t_in_mix_trans_error(_Config) -> ok. t_api_spec(_Config) -> - {Spec, _Components} = emqx_dashboard_swagger:spec(?MODULE), - Filter = fun(V, S) -> lists:all(fun({_, _, _, #{filter := Filter}}) -> Filter =:= V end, S) end, - ?assertEqual(true, Filter(undefined, Spec)), - {Spec1, _Components1} = emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}), - ?assertEqual(true, Filter(fun emqx_dashboard_swagger:translate_req/2, Spec1)), - {Spec2, _Components2} = emqx_dashboard_swagger:spec(?MODULE, #{check_schema => fun emqx_dashboard_swagger:translate_req/2}), - ?assertEqual(true, Filter(fun emqx_dashboard_swagger:translate_req/2, Spec2)), - ok. + {Spec0, _} = emqx_dashboard_swagger:spec(?MODULE), + assert_all_filters_equal(Spec0, undefined), + + {Spec1, _} = emqx_dashboard_swagger:spec(?MODULE, #{check_schema => false}), + assert_all_filters_equal(Spec1, undefined), + + CustomFilter = fun(Request, _RequestMeta) -> {ok, Request} end, + {Spec2, _} = emqx_dashboard_swagger:spec(?MODULE, #{check_schema => CustomFilter}), + assert_all_filters_equal(Spec2, CustomFilter), + + {Spec3, _} = emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}), + Path = "/test/in/:filter", + + Filter = filter(Spec3, Path), + Bindings = #{filter => <<"created">>}, + + ?assertMatch( + {ok, #{bindings := #{filter := created}}}, + trans_parameters(Path, Bindings, #{}, Filter)). + +assert_all_filters_equal(Spec, Filter) -> + lists:foreach( + fun({_, _, _, #{filter := F}}) -> + ?assertEqual(Filter, F) + end, + Spec). validate(Path, ExpectParams) -> {OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path), @@ -226,10 +244,17 @@ validate(Path, ExpectParams) -> ?assertEqual([], Refs), Spec. +filter(ApiSpec, Path) -> + [Filter] = [F || {P, _, _, #{filter := F}} <- ApiSpec, P =:= Path], + Filter. + trans_parameters(Path, Bindings, QueryStr) -> + trans_parameters(Path, Bindings, QueryStr, fun emqx_dashboard_swagger:filter_check_request/2). + +trans_parameters(Path, Bindings, QueryStr, Filter) -> Meta = #{module => ?MODULE, method => post, path => Path}, Request = #{bindings => Bindings, query_string => QueryStr, body => #{}}, - emqx_dashboard_swagger:translate_req(Request, Meta). + Filter(Request, Meta). api_spec() -> emqx_dashboard_swagger:spec(?MODULE). diff --git a/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl index 7aa986d1d..149657ec0 100644 --- a/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl @@ -10,7 +10,7 @@ t_ref_array_with_key/1, t_ref_array_without_key/1 ]). -export([ - t_object_trans/1, t_nest_object_trans/1, t_local_ref_trans/1, + t_object_trans/1, t_object_notrans/1, t_nest_object_trans/1, t_local_ref_trans/1, t_remote_ref_trans/1, t_nest_ref_trans/1, t_ref_array_with_key_trans/1, t_ref_array_without_key_trans/1, t_ref_trans_error/1, t_object_trans_error/1 @@ -32,7 +32,7 @@ groups() -> [ t_ref_array_with_key, t_ref_array_without_key, t_nest_ref]}, {validation, [parallel], [ - t_object_trans, t_local_ref_trans, t_remote_ref_trans, + t_object_trans, t_object_notrans, t_local_ref_trans, t_remote_ref_trans, t_ref_array_with_key_trans, t_ref_array_without_key_trans, t_nest_ref_trans, t_ref_trans_error, t_object_trans_error %% t_nest_object_trans, @@ -173,8 +173,29 @@ t_ref_array_without_key(_Config) -> ok. t_api_spec(_Config) -> - emqx_dashboard_swagger:spec(?MODULE), - ok. + {Spec0, _} = emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}), + Path = "/object", + Body = #{ + <<"per_page">> => 1, + <<"timeout">> => <<"infinity">>, + <<"inner_ref">> => #{ + <<"webhook-host">> => <<"127.0.0.1:80">>, + <<"log_dir">> => <<"var/log/test">>, + <<"tag">> => <<"god_tag">> + } + }, + + Filter0 = filter(Spec0, Path), + ?assertMatch( + {ok, #{body := #{<<"timeout">> := <<"infinity">>}}}, + trans_requestBody(Path, Body, Filter0)), + + {Spec1, _} = emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true, translate_body => true}), + Filter1 = filter(Spec1, Path), + ?assertMatch( + {ok, #{body := #{<<"timeout">> := infinity}}}, + trans_requestBody(Path, Body, Filter1)). + t_object_trans(_Config) -> Path = "/object", @@ -205,6 +226,21 @@ t_object_trans(_Config) -> ?assertEqual(Expect, ActualBody), ok. +t_object_notrans(_Config) -> + Path = "/object", + Body = #{ + <<"per_page">> => 1, + <<"timeout">> => <<"infinity">>, + <<"inner_ref">> => #{ + <<"webhook-host">> => <<"127.0.0.1:80">>, + <<"log_dir">> => <<"var/log/test">>, + <<"tag">> => <<"god_tag">> + } + }, + {ok, #{body := ActualBody}} = trans_requestBody(Path, Body, fun emqx_dashboard_swagger:filter_check_request/2), + ?assertEqual(Body, ActualBody), + ok. + t_nest_object_trans(_Config) -> Path = "/nest/object", Body = #{ @@ -337,6 +373,7 @@ t_ref_array_with_key_trans(_Config) -> {ok, NewRequest} = trans_requestBody(Path, Body), ?assertEqual(Expect, NewRequest), ok. + t_ref_array_without_key_trans(_Config) -> Path = "/ref/array/without/key", Body = [#{ @@ -401,10 +438,18 @@ validate(Path, ExpectSpec, ExpectRefs) -> ?assertEqual(ExpectRefs, Refs), {Spec, emqx_dashboard_swagger:components(Refs)}. + +filter(ApiSpec, Path) -> + [Filter] = [F || {P, _, _, #{filter := F}} <- ApiSpec, P =:= Path], + Filter. + trans_requestBody(Path, Body) -> + trans_requestBody(Path, Body, fun emqx_dashboard_swagger:filter_check_request_and_translate_body/2). + +trans_requestBody(Path, Body, Filter) -> Meta = #{module => ?MODULE, method => post, path => Path}, Request = #{bindings => #{}, query_string => #{}, body => Body}, - emqx_dashboard_swagger:translate_req(Request, Meta). + Filter(Request, Meta). api_spec() -> emqx_dashboard_swagger:spec(?MODULE). paths() -> diff --git a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl index 92f411da7..dff491225 100644 --- a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl @@ -172,7 +172,7 @@ t_complicated_type(_Config) -> [#{example => infinity, type => string}, #{example => 100, type => integer}]}}, {<<"bytesize">>, #{example => <<"32MB">>, type => string}}, {<<"wordsize">>, #{example => <<"1024KB">>, type => string}}, - {<<"maps">>, #{example => <<>>, type => string}}, + {<<"maps">>, #{example => #{}, type => object}}, {<<"comma_separated_list">>, #{example => <<"item1,item2">>, type => string}}, {<<"comma_separated_atoms">>, #{example => <<"item1,item2">>, type => string}}, {<<"log_level">>, diff --git a/apps/emqx_exhook/src/emqx_exhook_handler.erl b/apps/emqx_exhook/src/emqx_exhook_handler.erl index 1e81646e0..43f0d43e4 100644 --- a/apps/emqx_exhook/src/emqx_exhook_handler.erl +++ b/apps/emqx_exhook/src/emqx_exhook_handler.erl @@ -304,7 +304,7 @@ merge_responsed_bool(Req, #{type := Type, value := {bool_result, NewBool}}) 'STOP_AND_RETURN' -> {stop, NReq} end; merge_responsed_bool(_Req, Resp) -> - ?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]), + ?SLOG(warning, #{msg => "unknown_responsed_value", resp => Resp}), ignore. merge_responsed_message(_Req, #{type := 'IGNORE'}) -> @@ -316,5 +316,5 @@ merge_responsed_message(Req, #{type := Type, value := {message, NMessage}}) -> 'STOP_AND_RETURN' -> {stop, NReq} end; merge_responsed_message(_Req, Resp) -> - ?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]), + ?SLOG(warning, #{msg => "unknown_responsed_value", resp => Resp}), ignore. diff --git a/apps/emqx_exhook/src/emqx_exhook_mngr.erl b/apps/emqx_exhook/src/emqx_exhook_mngr.erl index eef1e67ec..a982c6505 100644 --- a/apps/emqx_exhook/src/emqx_exhook_mngr.erl +++ b/apps/emqx_exhook/src/emqx_exhook_mngr.erl @@ -185,8 +185,9 @@ handle_info({timeout, _Ref, {reload, Name}}, State) -> {error, not_found} -> {noreply, NState}; {error, Reason} -> - ?LOG(warning, "Failed to reload exhook callback server \"~ts\", " - "Reason: ~0p", [Name, Reason]), + ?SLOG(warning, #{msg => "failed_to_reload_exhook_callback_server", + server_name => Name, + reason => Reason}), {noreply, ensure_reload_timer(NState)} end; @@ -230,8 +231,8 @@ do_load_server(Name, State0 = #state{ case emqx_exhook_server:load(Name, Options, ReqOpts) of {ok, ServerState} -> save(Name, ServerState), - ?LOG(info, "Load exhook callback server " - "\"~ts\" successfully!", [Name]), + ?SLOG(info, #{msg => "load_exhook_callback_server_successfully", + server_name => Name}), {ok, State#state{ running = maps:put(Name, Options, Running), waiting = maps:remove(Name, Waiting), diff --git a/apps/emqx_exhook/src/emqx_exhook_server.erl b/apps/emqx_exhook/src/emqx_exhook_server.erl index 84a143117..5e24ce8c4 100644 --- a/apps/emqx_exhook/src/emqx_exhook_server.erl +++ b/apps/emqx_exhook/src/emqx_exhook_server.erl @@ -155,8 +155,10 @@ do_init(ChannName, ReqOpts) -> try {ok, resolve_hookspec(maps:get(hooks, InitialResp, []))} catch _:Reason:Stk -> - ?LOG(error, "try to init ~p failed, reason: ~p, stacktrace: ~0p", - [ChannName, Reason, Stk]), + ?SLOG(error, #{msg => "failed_to_init_channel", + channel_name => ChannName, + reason => Reason, + stacktrace => Stk}), {error, Reason} end; {error, Reason} -> @@ -194,7 +196,7 @@ ensure_hooks(HookSpecs) -> lists:foreach(fun(Hookpoint) -> case lists:keyfind(Hookpoint, 1, ?ENABLED_HOOKS) of false -> - ?LOG(error, "Unknown name ~ts to hook, skip it!", [Hookpoint]); + ?SLOG(error, #{msg => "skipped_unknown_hookpoint", hookpoint => Hookpoint}); {Hookpoint, {M, F, A}} -> emqx_hooks:put(Hookpoint, {M, F, A}), ets:update_counter(?CNTER, Hookpoint, {2, 1}, {Hookpoint, 0}) @@ -267,22 +269,23 @@ match_topic_filter(TopicName, TopicFilter) -> -spec do_call(binary(), atom(), map(), map()) -> {ok, map()} | {error, term()}. do_call(ChannName, Fun, Req, ReqOpts) -> Options = ReqOpts#{channel => ChannName}, - ?LOG(debug, "Call ~0p:~0p(~0p, ~0p)", [?PB_CLIENT_MOD, Fun, Req, Options]), + ?SLOG(debug, #{msg => "do_call", module => ?PB_CLIENT_MOD, function => Fun, + req => Req, options => Options}), case catch apply(?PB_CLIENT_MOD, Fun, [Req, Options]) of - {ok, Resp, _Metadata} -> - ?LOG(debug, "Response {ok, ~0p, ~0p}", [Resp, _Metadata]), + {ok, Resp, Metadata} -> + ?SLOG(debug, #{msg => "do_call_ok", resp => Resp, metadata => Metadata}), {ok, Resp}; {error, {Code, Msg}, _Metadata} -> - ?LOG(error, "CALL ~0p:~0p(~0p, ~0p) response errcode: ~0p, errmsg: ~0p", - [?PB_CLIENT_MOD, Fun, Req, Options, Code, Msg]), + ?SLOG(error, #{msg => "exhook_call_error", module => ?PB_CLIENT_MOD, function => Fun, + req => Req, options => Options, code => Code, packet => Msg}), {error, {Code, Msg}}; {error, Reason} -> - ?LOG(error, "CALL ~0p:~0p(~0p, ~0p) error: ~0p", - [?PB_CLIENT_MOD, Fun, Req, Options, Reason]), + ?SLOG(error, #{msg => "exhook_call_error", module => ?PB_CLIENT_MOD, function => Fun, + req => Req, options => Options, reason => Reason}), {error, Reason}; {'EXIT', {Reason, Stk}} -> - ?LOG(error, "CALL ~0p:~0p(~0p, ~0p) throw an exception: ~0p, stacktrace: ~0p", - [?PB_CLIENT_MOD, Fun, Req, Options, Reason, Stk]), + ?SLOG(error, #{msg => "exhook_call_exception", module => ?PB_CLIENT_MOD, function => Fun, + req => Req, options => Options, stacktrace => Stk}), {error, Reason} end. diff --git a/apps/emqx_gateway/etc/emqx_gateway.conf.example b/apps/emqx_gateway/etc/emqx_gateway.conf.example index fc85bb1e8..03a9a7868 100644 --- a/apps/emqx_gateway/etc/emqx_gateway.conf.example +++ b/apps/emqx_gateway/etc/emqx_gateway.conf.example @@ -256,7 +256,7 @@ gateway.lwm2m { enable_stats = true ## When publishing or subscribing, prefix all topics with a mountpoint string. - mountpoint = "lwm2m/%u" + mountpoint = "lwm2m/${username}" xml_dir = "{{ platform_etc_dir }}/lwm2m_xml" diff --git a/apps/emqx_gateway/src/coap/handler/emqx_coap_pubsub_handler.erl b/apps/emqx_gateway/src/coap/handler/emqx_coap_pubsub_handler.erl index 014f2b2ca..d80aaade8 100644 --- a/apps/emqx_gateway/src/coap/handler/emqx_coap_pubsub_handler.erl +++ b/apps/emqx_gateway/src/coap/handler/emqx_coap_pubsub_handler.erl @@ -56,6 +56,7 @@ handle_method(post, Topic, #coap_message{payload = Payload} = Msg, Ctx, CInfo) - #{clientid := ClientId} = CInfo, MountTopic = mount(CInfo, Topic), QOS = get_publish_qos(Msg), + %% TODO: Append message metadata into headers MQTTMsg = emqx_message:make(ClientId, QOS, MountTopic, Payload), MQTTMsg2 = apply_publish_opts(Msg, MQTTMsg), _ = emqx_broker:publish(MQTTMsg2), diff --git a/apps/emqx_gateway/src/emqx_gateway_api.erl b/apps/emqx_gateway/src/emqx_gateway_api.erl index 1b7dbf146..d79a880a1 100644 --- a/apps/emqx_gateway/src/emqx_gateway_api.erl +++ b/apps/emqx_gateway/src/emqx_gateway_api.erl @@ -16,6 +16,8 @@ %% -module(emqx_gateway_api). +-include_lib("emqx/include/emqx_placeholder.hrl"). + -behaviour(minirest_api). -import(emqx_gateway_http, @@ -293,7 +295,7 @@ schema_gateway_overview_list() -> <<"type">> => <<"udp">>, <<"running">> => true, <<"bind">> => 5783}], - <<"mountpoint">> => <<"lwm2m/%e/">>, + <<"mountpoint">> => <<"lwm2m/", ?PH_S_ENDPOINT_NAME, "/">>, <<"qmode_time_windonw">> => 22, <<"translators">> => #{<<"command">> => <<"dn/#">>,<<"notify">> => <<"up/notify">>, diff --git a/apps/emqx_gateway/src/emqx_gateway_api_authn.erl b/apps/emqx_gateway/src/emqx_gateway_api_authn.erl index 4cd2d8867..105a96989 100644 --- a/apps/emqx_gateway/src/emqx_gateway_api_authn.erl +++ b/apps/emqx_gateway/src/emqx_gateway_api_authn.erl @@ -18,21 +18,34 @@ -behaviour(minirest_api). +-include_lib("typerefl/include/types.hrl"). + +-define(BAD_REQUEST, 'BAD_REQUEST'). +-define(NOT_FOUND, 'NOT_FOUND'). +-define(INTERNAL_ERROR, 'INTERNAL_SERVER_ERROR'). + +-import(hoconsc, [mk/2, ref/2]). +-import(emqx_dashboard_swagger, [error_codes/2]). + -import(emqx_gateway_http, [ return_http_error/2 - , schema_bad_request/0 - , schema_not_found/0 - , schema_internal_error/0 - , schema_no_content/0 , with_gateway/2 + , with_authn/2 , checks/2 ]). -%% minirest behaviour callbacks --export([api_spec/0]). +%% minirest/dashbaord_swagger behaviour callbacks +-export([ api_spec/0 + , paths/0 + , schema/1 + ]). %% http handlers --export([authn/2]). +-export([ authn/2 + , users/2 + , users_insta/2 + , import_users/2 + ]). %% internal export for emqx_gateway_api_listeners module -export([schema_authn/0]). @@ -42,10 +55,13 @@ %%-------------------------------------------------------------------- api_spec() -> - {metadata(apis()), []}. + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). -apis() -> - [ {"/gateway/:name/authentication", authn} +paths() -> + [ "/gateway/:name/authentication" + , "/gateway/:name/authentication/users" + , "/gateway/:name/authentication/users/:uid" + , "/gateway/:name/authentication/import_users" ]. %%-------------------------------------------------------------------- @@ -66,6 +82,7 @@ authn(get, #{bindings := #{name := Name0}}) -> authn(put, #{bindings := #{name := Name0}, body := Body}) -> with_gateway(Name0, fun(GwName, _) -> + %% TODO: return the authn instances? ok = emqx_gateway_http:update_authn(GwName, Body), {204} end); @@ -73,6 +90,7 @@ authn(put, #{bindings := #{name := Name0}, authn(post, #{bindings := #{name := Name0}, body := Body}) -> with_gateway(Name0, fun(GwName, _) -> + %% TODO: return the authn instances? ok = emqx_gateway_http:add_authn(GwName, Body), {204} end); @@ -83,87 +101,251 @@ authn(delete, #{bindings := #{name := Name0}}) -> {204} end). +users(get, #{bindings := #{name := Name0}, query_string := Qs}) -> + with_authn(Name0, fun(_GwName, #{id := AuthId, + chain_name := ChainName}) -> + emqx_authn_api:list_users(ChainName, AuthId, page_pramas(Qs)) + end); +users(post, #{bindings := #{name := Name0}, + body := Body}) -> + with_authn(Name0, fun(_GwName, #{id := AuthId, + chain_name := ChainName}) -> + emqx_authn_api:add_user(ChainName, AuthId, Body) + end). + +users_insta(get, #{bindings := #{name := Name0, uid := UserId}}) -> + with_authn(Name0, fun(_GwName, #{id := AuthId, + chain_name := ChainName}) -> + emqx_authn_api:find_user(ChainName, AuthId, UserId) + end); +users_insta(put, #{bindings := #{name := Name0, uid := UserId}, + body := Body}) -> + with_authn(Name0, fun(_GwName, #{id := AuthId, + chain_name := ChainName}) -> + emqx_authn_api:update_user(ChainName, AuthId, UserId, Body) + end); +users_insta(delete, #{bindings := #{name := Name0, uid := UserId}}) -> + with_authn(Name0, fun(_GwName, #{id := AuthId, + chain_name := ChainName}) -> + emqx_authn_api:delete_user(ChainName, AuthId, UserId) + end). + +import_users(post, #{bindings := #{name := Name0}, + body := Body}) -> + with_authn(Name0, fun(_GwName, #{id := AuthId, + chain_name := ChainName}) -> + case maps:get(<<"filename">>, Body, undefined) of + undefined -> + emqx_authn_api:serialize_error({missing_parameter, filename}); + Filename -> + case emqx_authentication:import_users( + ChainName, AuthId, Filename) of + ok -> {204}; + {error, Reason} -> + emqx_authn_api:serialize_error(Reason) + end + end + end). + +%%-------------------------------------------------------------------- +%% Utils + +page_pramas(Qs) -> + maps:with([<<"page">>, <<"limit">>], Qs). + %%-------------------------------------------------------------------- %% Swagger defines %%-------------------------------------------------------------------- -metadata(APIs) -> - metadata(APIs, []). -metadata([], APIAcc) -> - lists:reverse(APIAcc); -metadata([{Path, Fun}|More], APIAcc) -> - Methods = [get, post, put, delete, patch], - Mds = lists:foldl(fun(M, Acc) -> - try - Acc#{M => swagger(Path, M)} - catch - error : function_clause -> - Acc - end - end, #{}, Methods), - metadata(More, [{Path, Mds, Fun} | APIAcc]). -swagger("/gateway/:name/authentication", get) -> - #{ description => <<"Get the gateway authentication">> - , parameters => params_gateway_name_in_path() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"200">> => schema_authn() - , <<"204">> => schema_no_content() - } +schema("/gateway/:name/authentication") -> + #{ 'operationId' => authn, + get => + #{ description => <<"Get the gateway authentication">> + , parameters => params_gateway_name_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => schema_authn() + , 204 => <<"Authentication does not initiated">> + } + }, + put => + #{ description => <<"Update authentication for the gateway">> + , parameters => params_gateway_name_in_path() + , 'requestBody' => schema_authn() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 204 => <<"Updated">> %% XXX: ??? return the updated object + } + }, + post => + #{ description => <<"Add authentication for the gateway">> + , parameters => params_gateway_name_in_path() + , 'requestBody' => schema_authn() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 204 => <<"Added">> + } + }, + delete => + #{ description => <<"Remove the gateway authentication">> + , parameters => params_gateway_name_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 204 => <<"Deleted">> + } + } }; -swagger("/gateway/:name/authentication", put) -> - #{ description => <<"Update authentication for the gateway">> - , parameters => params_gateway_name_in_path() - , requestBody => schema_authn() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"204">> => schema_no_content() - } +schema("/gateway/:name/authentication/users") -> + #{ 'operationId' => users + , get => + #{ description => <<"Get the users for the authentication">> + , parameters => params_gateway_name_in_path() ++ + params_paging_in_qs() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => emqx_dashboard_swagger:schema_with_example( + ref(emqx_authn_api, response_user), + emqx_authn_api:response_user_examples()) + } + }, + post => + #{ description => <<"Add user for the authentication">> + , parameters => params_gateway_name_in_path() + , 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(emqx_authn_api, request_user_create), + emqx_authn_api:request_user_create_examples()) + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 201 => emqx_dashboard_swagger:schema_with_example( + ref(emqx_authn_api, response_user), + emqx_authn_api:response_user_examples()) + } + } }; -swagger("/gateway/:name/authentication", post) -> - #{ description => <<"Add authentication for the gateway">> - , parameters => params_gateway_name_in_path() - , requestBody => schema_authn() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"204">> => schema_no_content() - } +schema("/gateway/:name/authentication/users/:uid") -> + #{ 'operationId' => users_insta + , get => + #{ description => <<"Get user info from the gateway " + "authentication">> + , parameters => params_gateway_name_in_path() ++ + params_userid_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => emqx_dashboard_swagger:schema_with_example( + ref(emqx_authn_api, response_user), + emqx_authn_api:response_user_examples()) + } + }, + put => + #{ description => <<"Update the user info for the gateway " + "authentication">> + , parameters => params_gateway_name_in_path() ++ + params_userid_in_path() + , 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(emqx_authn_api, request_user_update), + emqx_authn_api:request_user_update_examples()) + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => emqx_dashboard_swagger:schema_with_example( + ref(emqx_authn_api, response_user), + emqx_authn_api:response_user_examples()) + } + }, + delete => + #{ description => <<"Delete the user for the gateway " + "authentication">> + , parameters => params_gateway_name_in_path() ++ + params_userid_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 204 => <<"User Deleted">> + } + } }; -swagger("/gateway/:name/authentication", delete) -> - #{ description => <<"Remove the gateway authentication">> - , parameters => params_gateway_name_in_path() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"204">> => schema_no_content() - } +schema("/gateway/:name/authentication/import_users") -> + #{ 'operationId' => import_users + , post => + #{ description => <<"Import users into the gateway authentication">> + , parameters => params_gateway_name_in_path() + , 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(emqx_authn_api, request_import_users), + emqx_authn_api:request_import_users_examples() + ) + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + %% XXX: Put a hint message into 204 return ? + , 204 => <<"Imported">> + } + } }. %%-------------------------------------------------------------------- %% params defines params_gateway_name_in_path() -> - [#{ name => name - , in => path - , schema => #{type => string} - , required => true - }]. + [{name, + mk(binary(), + #{ in => path + , desc => <<"Gateway Name">> + })} + ]. + +params_userid_in_path() -> + [{uid, mk(binary(), + #{ in => path + , desc => <<"User ID">> + })} + ]. + +params_paging_in_qs() -> + [{page, mk(integer(), + #{ in => query + , nullable => true + , desc => <<"Page Index">> + })}, + {limit, mk(integer(), + #{ in => query + , nullable => true + , desc => <<"Page Limit">> + })} + ]. %%-------------------------------------------------------------------- %% schemas schema_authn() -> - #{ description => <<"OK">> - , content => #{ - 'application/json' => #{ - schema => minirest:ref(<<"AuthenticatorInstance">>) - }} - }. + emqx_dashboard_swagger:schema_with_examples( + emqx_authn_schema:authenticator_type(), + emqx_authn_api:authenticator_examples() + ). diff --git a/apps/emqx_gateway/src/emqx_gateway_api_listeners.erl b/apps/emqx_gateway/src/emqx_gateway_api_listeners.erl index 0ab054df8..f1744363c 100644 --- a/apps/emqx_gateway/src/emqx_gateway_api_listeners.erl +++ b/apps/emqx_gateway/src/emqx_gateway_api_listeners.erl @@ -18,25 +18,41 @@ -behaviour(minirest_api). +-include_lib("typerefl/include/types.hrl"). + +-define(BAD_REQUEST, 'BAD_REQUEST'). +-define(NOT_FOUND, 'NOT_FOUND'). +-define(INTERNAL_ERROR, 'INTERNAL_SERVER_ERROR'). + +-import(hoconsc, [mk/2, ref/1, ref/2]). +-import(emqx_dashboard_swagger, [error_codes/2]). + -import(emqx_gateway_http, [ return_http_error/2 - , schema_bad_request/0 - , schema_not_found/0 - , schema_internal_error/0 - , schema_no_content/0 , with_gateway/2 + , with_listener_authn/3 , checks/2 ]). -import(emqx_gateway_api_authn, [schema_authn/0]). -%% minirest behaviour callbacks --export([api_spec/0]). +%% minirest/dashbaord_swagger behaviour callbacks +-export([ api_spec/0 + , paths/0 + , schema/1 + ]). + +-export([ roots/0 + , fields/1 + ]). %% http handlers -export([ listeners/2 , listeners_insta/2 , listeners_insta_authn/2 + , users/2 + , users_insta/2 + , import_users/2 ]). %%-------------------------------------------------------------------- @@ -44,12 +60,15 @@ %%-------------------------------------------------------------------- api_spec() -> - {metadata(apis()), []}. + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). -apis() -> - [ {"/gateway/:name/listeners", listeners} - , {"/gateway/:name/listeners/:id", listeners_insta} - , {"/gateway/:name/listeners/:id/authentication", listeners_insta_authn} +paths() -> + [ "/gateway/:name/listeners" + , "/gateway/:name/listeners/:id" + , "/gateway/:name/listeners/:id/authentication" + , "/gateway/:name/listeners/:id/authentication/users" + , "/gateway/:name/listeners/:id/authentication/users/:uid" + , "/gateway/:name/listeners/:id/authentication/import_users" ]. %%-------------------------------------------------------------------- @@ -145,247 +164,481 @@ listeners_insta_authn(delete, #{bindings := #{name := Name0, {204} end). +users(get, #{bindings := #{name := Name0, id := Id}, query_string := Qs}) -> + with_listener_authn(Name0, Id, + fun(_GwName, #{id := AuthId, chain_name := ChainName}) -> + emqx_authn_api:list_users(ChainName, AuthId, page_pramas(Qs)) + end); +users(post, #{bindings := #{name := Name0, id := Id}, + body := Body}) -> + with_listener_authn(Name0, Id, + fun(_GwName, #{id := AuthId, chain_name := ChainName}) -> + emqx_authn_api:add_user(ChainName, AuthId, Body) + end). + +users_insta(get, #{bindings := #{name := Name0, id := Id, uid := UserId}}) -> + with_listener_authn(Name0, Id, + fun(_GwName, #{id := AuthId, chain_name := ChainName}) -> + emqx_authn_api:find_user(ChainName, AuthId, UserId) + end); +users_insta(put, #{bindings := #{name := Name0, id := Id, uid := UserId}, + body := Body}) -> + with_listener_authn(Name0, Id, + fun(_GwName, #{id := AuthId, chain_name := ChainName}) -> + emqx_authn_api:update_user(ChainName, AuthId, UserId, Body) + end); +users_insta(delete, #{bindings := #{name := Name0, id := Id, uid := UserId}}) -> + with_listener_authn(Name0, Id, + fun(_GwName, #{id := AuthId, chain_name := ChainName}) -> + emqx_authn_api:delete_user(ChainName, AuthId, UserId) + end). + +import_users(post, #{bindings := #{name := Name0, id := Id}, + body := Body}) -> + with_listener_authn(Name0, Id, + fun(_GwName, #{id := AuthId, chain_name := ChainName}) -> + case maps:get(<<"filename">>, Body, undefined) of + undefined -> + emqx_authn_api:serialize_error({missing_parameter, filename}); + Filename -> + case emqx_authentication:import_users( + ChainName, AuthId, Filename) of + ok -> {204}; + {error, Reason} -> + emqx_authn_api:serialize_error(Reason) + end + end + end). + +%%-------------------------------------------------------------------- +%% Utils + +page_pramas(Qs) -> + maps:with([<<"page">>, <<"limit">>], Qs). + %%-------------------------------------------------------------------- %% Swagger defines %%-------------------------------------------------------------------- -metadata(APIs) -> - metadata(APIs, []). -metadata([], APIAcc) -> - lists:reverse(APIAcc); -metadata([{Path, Fun}|More], APIAcc) -> - Methods = [get, post, put, delete, patch], - Mds = lists:foldl(fun(M, Acc) -> - try - Acc#{M => swagger(Path, M)} - catch - error : function_clause -> - Acc - end - end, #{}, Methods), - metadata(More, [{Path, Mds, Fun} | APIAcc]). - -swagger("/gateway/:name/listeners", get) -> - #{ description => <<"Get the gateway listeners">> - , parameters => params_gateway_name_in_path() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"200">> => schema_listener_list() - } +schema("/gateway/:name/listeners") -> + #{ 'operationId' => listeners, + get => + #{ description => <<"Get the gateway listeners">> + , parameters => params_gateway_name_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => emqx_dashboard_swagger:schema_with_examples( + hoconsc:array(ref(listener)), + examples_listener_list()) + } + }, + post => + #{ description => <<"Create the gateway listener">> + , parameters => params_gateway_name_in_path() + , 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(listener), + examples_listener()) + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 204 => <<"Created">> + } + } }; -swagger("/gateway/:name/listeners", post) -> - #{ description => <<"Create the gateway listener">> - , parameters => params_gateway_name_in_path() - , requestBody => schema_listener() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"200">> => schema_listener_list() - } +schema("/gateway/:name/listeners/:id") -> + #{ 'operationId' => listeners_insta, + get => + #{ description => <<"Get the gateway listener configurations">> + , parameters => params_gateway_name_in_path() + ++ params_listener_id_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => emqx_dashboard_swagger:schema_with_examples( + ref(listener), + examples_listener()) + } + }, + delete => + #{ description => <<"Delete the gateway listener">> + , parameters => params_gateway_name_in_path() + ++ params_listener_id_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 204 => <<"Deleted">> + } + }, + put => + #{ description => <<"Update the gateway listener">> + , parameters => params_gateway_name_in_path() + ++ params_listener_id_in_path() + , 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(listener), + examples_listener()) + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => <<"Updated">> + } + } }; -swagger("/gateway/:name/listeners/:id", get) -> - #{ description => <<"Get the gateway listener configurations">> - , parameters => params_gateway_name_in_path() - ++ params_listener_id_in_path() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"200">> => schema_listener() - } - }; -swagger("/gateway/:name/listeners/:id", delete) -> - #{ description => <<"Delete the gateway listener">> - , parameters => params_gateway_name_in_path() - ++ params_listener_id_in_path() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"204">> => schema_no_content() - } - }; -swagger("/gateway/:name/listeners/:id", put) -> - #{ description => <<"Update the gateway listener">> - , parameters => params_gateway_name_in_path() - ++ params_listener_id_in_path() - , requestBody => schema_listener() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"200">> => schema_no_content() - } +schema("/gateway/:name/listeners/:id/authentication") -> + #{ 'operationId' => listeners_insta_authn, + get => + #{ description => <<"Get the listener's authentication info">> + , parameters => params_gateway_name_in_path() + ++ params_listener_id_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => schema_authn() + , 204 => <<"Authentication does not initiated">> + } + }, + post => + #{ description => <<"Add authentication for the listener">> + , parameters => params_gateway_name_in_path() + ++ params_listener_id_in_path() + , 'requestBody' => schema_authn() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 204 => <<"Added">> + } + }, + put => + #{ description => <<"Update authentication for the listener">> + , parameters => params_gateway_name_in_path() + ++ params_listener_id_in_path() + , 'requestBody' => schema_authn() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 204 => <<"Updated">> + } + }, + delete => + #{ description => <<"Remove authentication for the listener">> + , parameters => params_gateway_name_in_path() + ++ params_listener_id_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 204 => <<"Deleted">> + } + } }; -swagger("/gateway/:name/listeners/:id/authentication", get) -> - #{ description => <<"Get the listener's authentication info">> - , parameters => params_gateway_name_in_path() - ++ params_listener_id_in_path() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"200">> => schema_authn() - , <<"204">> => schema_no_content() - } +schema("/gateway/:name/listeners/:id/authentication/users") -> + #{ 'operationId' => users + , get => + #{ description => <<"Get the users for the authentication">> + , parameters => params_gateway_name_in_path() ++ + params_listener_id_in_path() ++ + params_paging_in_qs() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => emqx_dashboard_swagger:schema_with_example( + ref(emqx_authn_api, response_user), + emqx_authn_api:response_user_examples()) + } + }, + post => + #{ description => <<"Add user for the authentication">> + , parameters => params_gateway_name_in_path() ++ + params_listener_id_in_path() + , 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(emqx_authn_api, request_user_create), + emqx_authn_api:request_user_create_examples()) + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 201 => emqx_dashboard_swagger:schema_with_example( + ref(emqx_authn_api, response_user), + emqx_authn_api:response_user_examples()) + } + } }; -swagger("/gateway/:name/listeners/:id/authentication", post) -> - #{ description => <<"Add authentication for the listener">> - , parameters => params_gateway_name_in_path() - ++ params_listener_id_in_path() - , requestBody => schema_authn() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"204">> => schema_no_content() - } +schema("/gateway/:name/listeners/:id/authentication/users/:uid") -> + #{ 'operationId' => users_insta + , get => + #{ description => <<"Get user info from the gateway " + "authentication">> + , parameters => params_gateway_name_in_path() ++ + params_listener_id_in_path() ++ + params_userid_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => emqx_dashboard_swagger:schema_with_example( + ref(emqx_authn_api, response_user), + emqx_authn_api:response_user_examples()) + } + }, + put => + #{ description => <<"Update the user info for the gateway " + "authentication">> + , parameters => params_gateway_name_in_path() ++ + params_listener_id_in_path() ++ + params_userid_in_path() + , 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(emqx_authn_api, request_user_update), + emqx_authn_api:request_user_update_examples()) + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => emqx_dashboard_swagger:schema_with_example( + ref(emqx_authn_api, response_user), + emqx_authn_api:response_user_examples()) + } + }, + delete => + #{ description => <<"Delete the user for the gateway " + "authentication">> + , parameters => params_gateway_name_in_path() ++ + params_listener_id_in_path() ++ + params_userid_in_path() + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + , 200 => emqx_dashboard_swagger:schema_with_example( + ref(emqx_authn_api, response_user), + emqx_authn_api:response_user_examples()) + } + } }; -swagger("/gateway/:name/listeners/:id/authentication", put) -> - #{ description => <<"Update authentication for the listener">> - , parameters => params_gateway_name_in_path() - ++ params_listener_id_in_path() - , requestBody => schema_authn() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"204">> => schema_no_content() - } - }; -swagger("/gateway/:name/listeners/:id/authentication", delete) -> - #{ description => <<"Remove authentication for the listener">> - , parameters => params_gateway_name_in_path() - ++ params_listener_id_in_path() - , responses => - #{ <<"400">> => schema_bad_request() - , <<"404">> => schema_not_found() - , <<"500">> => schema_internal_error() - , <<"204">> => schema_no_content() - } +schema("/gateway/:name/listeners/:id/authentication/import_users") -> + #{ 'operationId' => import_users + , post => + #{ description => <<"Import users into the gateway authentication">> + , parameters => params_gateway_name_in_path() ++ + params_listener_id_in_path() + , 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + ref(emqx_authn_api, request_import_users), + emqx_authn_api:request_import_users_examples() + ) + , responses => + #{ 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>) + , 404 => error_codes([?NOT_FOUND], <<"Not Found">>) + , 500 => error_codes([?INTERNAL_ERROR], + <<"Ineternal Server Error">>) + %% XXX: Put a hint message into 204 return ? + , 204 => <<"Imported">> + } + } }. %%-------------------------------------------------------------------- %% params defines params_gateway_name_in_path() -> - [#{ name => name - , in => path - , schema => #{type => string} - , required => true - }]. + [{name, + mk(binary(), + #{ in => path + , desc => <<"Gateway Name">> + })} + ]. params_listener_id_in_path() -> - [#{ name => id - , in => path - , schema => #{type => string} - , required => true - }]. + [{id, + mk(binary(), + #{ in => path + , desc => <<"Listener ID">> + })} + ]. + +params_userid_in_path() -> + [{uid, mk(binary(), + #{ in => path + , desc => <<"User ID">> + })} + ]. + +params_paging_in_qs() -> + [{page, mk(integer(), + #{ in => query + , nullable => true + , desc => <<"Page Index">> + })}, + {limit, mk(integer(), + #{ in => query + , nullable => true + , desc => <<"Page Limit">> + })} + ]. %%-------------------------------------------------------------------- %% schemas -schema_listener_list() -> - emqx_mgmt_util:array_schema( - #{ type => object - , properties => properties_listener() - }, - <<"Listener list">> - ). - -schema_listener() -> - emqx_mgmt_util:schema( - #{ type => object - , properties => properties_listener() - } - ). - -%%-------------------------------------------------------------------- -%% properties - -properties_listener() -> - emqx_mgmt_util:properties( - raw_properties_common_listener() ++ - [ {tcp, object, raw_properties_tcp_opts()} - , {ssl, object, raw_properties_ssl_opts()} - , {udp, object, raw_properties_udp_opts()} - , {dtls, object, raw_properties_dtls_opts()} - ]). - -raw_properties_tcp_opts() -> - [ {active_n, integer, <<>>} - , {backlog, integer, <<>>} - , {buffer, string, <<>>} - , {recbuf, string, <<>>} - , {sndbuf, string, <<>>} - , {high_watermark, string, <<>>} - , {nodelay, boolean, <<>>} - , {reuseaddr, boolean, <<>>} - , {send_timeout, string, <<>>} - , {send_timeout_close, boolean, <<>>} +roots() -> + [ listener ]. -raw_properties_ssl_opts() -> - [ {cacertfile, string, <<>>} - , {certfile, string, <<>>} - , {keyfile, string, <<>>} - , {verify, string, <<>>} - , {fail_if_no_peer_cert, boolean, <<>>} - , {server_name_indication, boolean, <<>>} - , {depth, integer, <<>>} - , {password, string, <<>>} - , {handshake_timeout, string, <<>>} - , {versions, {array, string}, <<>>} - , {ciphers, {array, string}, <<>>} - , {user_lookup_fun, string, <<>>} - , {reuse_sessions, boolean, <<>>} - , {secure_renegotiate, boolean, <<>>} - , {honor_cipher_order, boolean, <<>>} - , {dhfile, string, <<>>} - ]. - -raw_properties_udp_opts() -> - [ {active_n, integer, <<>>} - , {buffer, string, <<>>} - , {recbuf, string, <<>>} - , {sndbuf, string, <<>>} - , {reuseaddr, boolean, <<>>} - ]. - -raw_properties_dtls_opts() -> +fields(listener) -> + common_listener_opts() ++ + [ {tcp, + mk(ref(tcp_listener_opts), + #{ nullable => {true, recursively} + , desc => <<"The tcp socket options for tcp or ssl listener">> + })} + , {ssl, + mk(ref(ssl_listener_opts), + #{ nullable => {true, recursively} + , desc => <<"The ssl socket options for ssl listener">> + })} + , {udp, + mk(ref(udp_listener_opts), + #{ nullable => {true, recursively} + , desc => <<"The udp socket options for udp or dtls listener">> + })} + , {dtls, + mk(ref(dtls_listener_opts), + #{ nullable => {true, recursively} + , desc => <<"The dtls socket options for dtls listener">> + })} + ]; +fields(tcp_listener_opts) -> + [ {active_n, mk(integer(), #{})} + , {backlog, mk(integer(), #{})} + , {buffer, mk(binary(), #{})} + , {recbuf, mk(binary(), #{})} + , {sndbuf, mk(binary(), #{})} + , {high_watermark, mk(binary(), #{})} + , {nodelay, mk(boolean(), #{})} + , {reuseaddr, boolean()} + , {send_timeout, binary()} + , {send_timeout_close, boolean()} + ]; +fields(ssl_listener_opts) -> + [ {cacertfile, binary()} + , {certfile, binary()} + , {keyfile, binary()} + , {verify, binary()} + , {fail_if_no_peer_cert, boolean()} + , {server_name_indication, boolean()} + , {depth, integer()} + , {password, binary()} + , {handshake_timeout, binary()} + , {versions, hoconsc:array(binary())} + , {ciphers, hoconsc:array(binary())} + , {user_lookup_fun, binary()} + , {reuse_sessions, boolean()} + , {secure_renegotiate, boolean()} + , {honor_cipher_order, boolean()} + , {dhfile, binary()} + ]; +fields(udp_listener_opts) -> + [ {active_n, integer()} + , {buffer, binary()} + , {recbuf, binary()} + , {sndbuf, binary()} + , {reuseaddr, boolean()} + ]; +fields(dtls_listener_opts) -> Ls = lists_key_without( [versions,ciphers,handshake_timeout], 1, - raw_properties_ssl_opts() + fields(ssl_listener_opts) ), - [ {versions, {array, string}, <<>>} - , {ciphers, {array, string}, <<>>} + [ {versions, hoconsc:array(binary())} + , {ciphers, hoconsc:array(binary())} | Ls]. lists_key_without([], _N, L) -> L; -lists_key_without([K|Ks], N, L) -> +lists_key_without([K | Ks], N, L) -> lists_key_without(Ks, N, lists:keydelete(K, N, L)). -raw_properties_common_listener() -> - [ {enable, boolean, <<"Whether to enable this listener">>} - , {id, string, <<"Listener Id">>} - , {name, string, <<"Listener name">>} - , {type, string, - <<"Listener type. Enum: tcp, udp, ssl, dtls">>, - [<<"tcp">>, <<"ssl">>, <<"udp">>, <<"dtls">>]} - , {running, boolean, <<"Listener running status">>} - , {bind, string, <<"Listener bind address or port">>} - , {acceptors, integer, <<"Listener acceptors number">>} - , {access_rules, {array, string}, <<"Listener Access rules for client">>} - , {max_conn_rate, integer, <<"Max connection rate for the listener">>} - , {max_connections, integer, <<"Max connections for the listener">>} - , {mountpoint, string, - <<"The Mounpoint for clients of the listener. " - "The gateway-level mountpoint configuration can be overloaded " - "when it is not null or empty string">>} +common_listener_opts() -> + [ {enable, + mk(boolean(), + #{ nullable => true + , desc => <<"Whether to enable this listener">>})} + , {id, + mk(binary(), + #{ nullable => true + , desc => <<"Listener Id">>})} + , {name, + mk(binary(), + #{ nullable => true + , desc => <<"Listener name">>})} + , {type, + mk(hoconsc:enum([tcp, ssl, udp, dtls]), + #{ nullable => true + , desc => <<"Listener type. Enum: tcp, udp, ssl, dtls">>})} + , {running, + mk(boolean(), + #{ nullable => true + , desc => <<"Listener running status">>})} + , {bind, + mk(binary(), + #{ nullable => true + , desc => <<"Listener bind address or port">>})} + , {acceptors, + mk(integer(), + #{ nullable => true + , desc => <<"Listener acceptors number">>})} + , {access_rules, + mk(hoconsc:array(binary()), + #{ nullable => true + , desc => <<"Listener Access rules for client">>})} + , {max_conn_rate, + mk(integer(), + #{ nullable => true + , desc => <<"Max connection rate for the listener">>})} + , {max_connections, + mk(integer(), + #{ nullable => true + , desc => <<"Max connections for the listener">>})} + , {mountpoint, + mk(binary(), + #{ nullable => true + , desc => +<<"The Mounpoint for clients of the listener. " + "The gateway-level mountpoint configuration can be overloaded " + "when it is not null or empty string">>})} %% FIXME: - , {authentication, string, <<"NOT-SUPPORTED-NOW">>} - ]. + , {authentication, + mk(emqx_authn_schema:authenticator_type(), + #{ nullable => {true, recursively} + , desc => <<"The authenticatior for this listener">> + })} + ]. + +%%-------------------------------------------------------------------- +%% examples + +examples_listener_list() -> + #{stomp_listeners => [examples_listener()]}. + +examples_listener() -> + #{}. diff --git a/apps/emqx_gateway/src/emqx_gateway_cm_registry.erl b/apps/emqx_gateway/src/emqx_gateway_cm_registry.erl index e82537285..7cd45ef53 100644 --- a/apps/emqx_gateway/src/emqx_gateway_cm_registry.erl +++ b/apps/emqx_gateway/src/emqx_gateway_cm_registry.erl @@ -19,12 +19,8 @@ -behaviour(gen_server). - -export([start_link/1]). -%% XXX: needless -%-export([is_enabled/0]). - -export([ register_channel/2 , unregister_channel/2 ]). @@ -40,8 +36,7 @@ , code_change/3 ]). --include_lib("emqx/include/emqx.hrl"). - +-define(CM_SHARD, emqx_gateway_cm_shard). -define(LOCK, {?MODULE, cleanup_down}). -record(channel, {chid, pid}). @@ -113,8 +108,7 @@ handle_info({membership, {mnesia, down, Node}}, State = #{type := Type}) -> Tab = tabname(Type), global:trans({?LOCK, self()}, fun() -> - %% FIXME: The shard name should be fixed later - mria:transaction(?MODULE, fun cleanup_channels/2, [Node, Tab]) + mria:transaction(?CM_SHARD, fun cleanup_channels/2, [Node, Tab]) end), {noreply, State}; diff --git a/apps/emqx_gateway/src/emqx_gateway_conf.erl b/apps/emqx_gateway/src/emqx_gateway_conf.erl index 557e46693..ddbf99189 100644 --- a/apps/emqx_gateway/src/emqx_gateway_conf.erl +++ b/apps/emqx_gateway/src/emqx_gateway_conf.erl @@ -17,6 +17,8 @@ %% @doc The gateway configuration management module -module(emqx_gateway_conf). +-include_lib("emqx/include/logger.hrl"). + %% Load/Unload -export([ load/0 , unload/0 @@ -50,8 +52,8 @@ ]). %% callbacks for emqx_config_handler --export([ pre_config_update/2 - , post_config_update/4 +-export([ pre_config_update/3 + , post_config_update/5 ]). -type atom_or_bin() :: atom() | binary(). @@ -87,18 +89,19 @@ load_gateway(GwName, Conf) -> unconvert_listeners(Ls) when is_list(Ls) -> lists:foldl(fun(Lis, Acc) -> {[Type, Name], Lis1} = maps_key_take([<<"type">>, <<"name">>], Lis), - emqx_map_lib:deep_merge(Acc, #{Type => #{Name => Lis1}}) + NLis1 = maps:without([<<"id">>], Lis1), + emqx_map_lib:deep_merge(Acc, #{Type => #{Name => NLis1}}) end, #{}, Ls). maps_key_take(Ks, M) -> maps_key_take(Ks, M, []). maps_key_take([], M, Acc) -> {lists:reverse(Acc), M}; -maps_key_take([K|Ks], M, Acc) -> +maps_key_take([K | Ks], M, Acc) -> case maps:take(K, M) of error -> throw(bad_key); {V, M1} -> - maps_key_take(Ks, M1, [V|Acc]) + maps_key_take(Ks, M1, [V | Acc]) end. -spec update_gateway(atom_or_bin(), map()) -> ok_or_err(). @@ -107,6 +110,8 @@ update_gateway(GwName, Conf0) -> <<"listeners">>, <<"authentication">>], Conf0), update({?FUNCTION_NAME, bin(GwName), Conf}). +%% FIXME: delete cert files ?? + -spec unload_gateway(atom_or_bin()) -> ok_or_err(). unload_gateway(GwName) -> update({?FUNCTION_NAME, bin(GwName)}). @@ -224,10 +229,10 @@ remove_authn(GwName, ListenerRef) -> %% @private update(Req) -> - res(emqx:update_config([gateway], Req)). + res(emqx_conf:update([gateway], Req, #{override_to => cluster})). res({ok, _Result}) -> ok; -res({error, {pre_config_update,emqx_gateway_conf,Reason}}) -> {error, Reason}; +res({error, {error, {pre_config_update,emqx_gateway_conf,Reason}}}) -> {error, Reason}; res({error, Reason}) -> {error, Reason}. bin({LType, LName}) -> @@ -241,17 +246,19 @@ bin(B) when is_binary(B) -> %% Config Handler %%-------------------------------------------------------------------- --spec pre_config_update(emqx_config:update_request(), +-spec pre_config_update(list(atom()), + emqx_config:update_request(), emqx_config:raw_config()) -> {ok, emqx_config:update_request()} | {error, term()}. -pre_config_update({load_gateway, GwName, Conf}, RawConf) -> +pre_config_update(_, {load_gateway, GwName, Conf}, RawConf) -> case maps:get(GwName, RawConf, undefined) of undefined -> - {ok, emqx_map_lib:deep_merge(RawConf, #{GwName => Conf})}; + NConf = tune_gw_certs(fun convert_certs/2, GwName, Conf), + {ok, emqx_map_lib:deep_merge(RawConf, #{GwName => NConf})}; _ -> {error, already_exist} end; -pre_config_update({update_gateway, GwName, Conf}, RawConf) -> +pre_config_update(_, {update_gateway, GwName, Conf}, RawConf) -> case maps:get(GwName, RawConf, undefined) of undefined -> {error, not_found}; @@ -260,37 +267,49 @@ pre_config_update({update_gateway, GwName, Conf}, RawConf) -> <<"authentication">>], Conf), {ok, emqx_map_lib:deep_merge(RawConf, #{GwName => NConf})} end; -pre_config_update({unload_gateway, GwName}, RawConf) -> +pre_config_update(_, {unload_gateway, GwName}, RawConf) -> + _ = tune_gw_certs(fun clear_certs/2, + GwName, + maps:get(GwName, RawConf, #{}) + ), {ok, maps:remove(GwName, RawConf)}; -pre_config_update({add_listener, GwName, {LType, LName}, Conf}, RawConf) -> +pre_config_update(_, {add_listener, GwName, {LType, LName}, Conf}, RawConf) -> case emqx_map_lib:deep_get( [GwName, <<"listeners">>, LType, LName], RawConf, undefined) of undefined -> - NListener = #{LType => #{LName => Conf}}, + NConf = convert_certs(certs_dir(GwName), Conf), + NListener = #{LType => #{LName => NConf}}, {ok, emqx_map_lib:deep_merge( RawConf, #{GwName => #{<<"listeners">> => NListener}})}; _ -> {error, already_exist} end; -pre_config_update({update_listener, GwName, {LType, LName}, Conf}, RawConf) -> +pre_config_update(_, {update_listener, GwName, {LType, LName}, Conf}, RawConf) -> case emqx_map_lib:deep_get( [GwName, <<"listeners">>, LType, LName], RawConf, undefined) of undefined -> {error, not_found}; - _OldConf -> - NListener = #{LType => #{LName => Conf}}, + OldConf -> + NConf = convert_certs(certs_dir(GwName), Conf, OldConf), + NListener = #{LType => #{LName => NConf}}, {ok, emqx_map_lib:deep_merge( RawConf, #{GwName => #{<<"listeners">> => NListener}})} end; -pre_config_update({remove_listener, GwName, {LType, LName}}, RawConf) -> - {ok, emqx_map_lib:deep_remove( - [GwName, <<"listeners">>, LType, LName], RawConf)}; +pre_config_update(_, {remove_listener, GwName, {LType, LName}}, RawConf) -> + Path = [GwName, <<"listeners">>, LType, LName], + case emqx_map_lib:deep_get(Path, RawConf, undefined) of + undefined -> + {ok, RawConf}; + OldConf -> + clear_certs(certs_dir(GwName), OldConf), + {ok, emqx_map_lib:deep_remove(Path, RawConf)} + end; -pre_config_update({add_authn, GwName, Conf}, RawConf) -> +pre_config_update(_, {add_authn, GwName, Conf}, RawConf) -> case emqx_map_lib:deep_get( [GwName, <<"authentication">>], RawConf, undefined) of undefined -> @@ -300,7 +319,7 @@ pre_config_update({add_authn, GwName, Conf}, RawConf) -> _ -> {error, already_exist} end; -pre_config_update({add_authn, GwName, {LType, LName}, Conf}, RawConf) -> +pre_config_update(_, {add_authn, GwName, {LType, LName}, Conf}, RawConf) -> case emqx_map_lib:deep_get( [GwName, <<"listeners">>, LType, LName], RawConf, undefined) of @@ -318,7 +337,7 @@ pre_config_update({add_authn, GwName, {LType, LName}, Conf}, RawConf) -> {error, already_exist} end end; -pre_config_update({update_authn, GwName, Conf}, RawConf) -> +pre_config_update(_, {update_authn, GwName, Conf}, RawConf) -> case emqx_map_lib:deep_get( [GwName, <<"authentication">>], RawConf, undefined) of undefined -> @@ -328,7 +347,7 @@ pre_config_update({update_authn, GwName, Conf}, RawConf) -> RawConf, #{GwName => #{<<"authentication">> => Conf}})} end; -pre_config_update({update_authn, GwName, {LType, LName}, Conf}, RawConf) -> +pre_config_update(_, {update_authn, GwName, {LType, LName}, Conf}, RawConf) -> case emqx_map_lib:deep_get( [GwName, <<"listeners">>, LType, LName], RawConf, undefined) of @@ -350,23 +369,25 @@ pre_config_update({update_authn, GwName, {LType, LName}, Conf}, RawConf) -> {ok, emqx_map_lib:deep_merge(RawConf, NGateway)} end end; -pre_config_update({remove_authn, GwName}, RawConf) -> +pre_config_update(_, {remove_authn, GwName}, RawConf) -> {ok, emqx_map_lib:deep_remove( [GwName, <<"authentication">>], RawConf)}; -pre_config_update({remove_authn, GwName, {LType, LName}}, RawConf) -> +pre_config_update(_, {remove_authn, GwName, {LType, LName}}, RawConf) -> Path = [GwName, <<"listeners">>, LType, LName, <<"authentication">>], {ok, emqx_map_lib:deep_remove(Path, RawConf)}; -pre_config_update(UnknownReq, _RawConf) -> +pre_config_update(_, UnknownReq, _RawConf) -> logger:error("Unknown configuration update request: ~0p", [UnknownReq]), {error, badreq}. --spec post_config_update(emqx_config:update_request(), emqx_config:config(), +-spec post_config_update(list(atom()), + emqx_config:update_request(), + emqx_config:config(), emqx_config:config(), emqx_config:app_envs()) -> ok | {ok, Result::any()} | {error, Reason::term()}. -post_config_update(Req, NewConfig, OldConfig, _AppEnvs) when is_tuple(Req) -> - [_Tag, GwName0|_] = tuple_to_list(Req), +post_config_update(_, Req, NewConfig, OldConfig, _AppEnvs) when is_tuple(Req) -> + [_Tag, GwName0 | _] = tuple_to_list(Req), GwName = binary_to_existing_atom(GwName0), case {maps:get(GwName, NewConfig, undefined), @@ -380,5 +401,58 @@ post_config_update(Req, NewConfig, OldConfig, _AppEnvs) when is_tuple(Req) -> {New, Old} when is_map(New), is_map(Old) -> emqx_gateway:update(GwName, New) end; -post_config_update(_Req, _NewConfig, _OldConfig, _AppEnvs) -> +post_config_update(_, _Req, _NewConfig, _OldConfig, _AppEnvs) -> ok. + +%%-------------------------------------------------------------------- +%% Internal funcs +%%-------------------------------------------------------------------- + + +tune_gw_certs(Fun, GwName, Conf) -> + SubDir = certs_dir(GwName), + case maps:get(<<"listeners">>, Conf, undefined) of + undefined -> Conf; + Liss -> + maps:put(<<"listeners">>, + maps:map(fun(_, Lis) -> + maps:map(fun(_, LisConf) -> + erlang:apply(Fun, [SubDir, LisConf]) + end, Lis) + end, Liss), + Conf) + end. + +certs_dir(GwName) when is_binary(GwName) -> + GwName. + +convert_certs(SubDir, Conf) -> + case emqx_tls_lib:ensure_ssl_files( + SubDir, + maps:get(<<"ssl">>, Conf, undefined) + ) of + {ok, SSL} -> + new_ssl_config(Conf, SSL); + {error, Reason} -> + ?SLOG(error, Reason#{msg => bad_ssl_config}), + throw({bad_ssl_config, Reason}) + end. + +convert_certs(SubDir, NConf, OConf) -> + OSSL = maps:get(<<"ssl">>, OConf, undefined), + NSSL = maps:get(<<"ssl">>, NConf, undefined), + case emqx_tls_lib:ensure_ssl_files(SubDir, NSSL) of + {ok, NSSL1} -> + ok = emqx_tls_lib:delete_ssl_files(SubDir, NSSL1, OSSL), + new_ssl_config(NConf, NSSL1); + {error, Reason} -> + ?SLOG(error, Reason#{msg => bad_ssl_config}), + throw({bad_ssl_config, Reason}) + end. + +new_ssl_config(Conf, undefined) -> Conf; +new_ssl_config(Conf, SSL) -> Conf#{<<"ssl">> => SSL}. + +clear_certs(SubDir, Conf) -> + SSL = maps:get(<<"ssl">>, Conf, undefined), + ok = emqx_tls_lib:delete_ssl_files(SubDir, undefined, SSL). diff --git a/apps/emqx_gateway/src/emqx_gateway_http.erl b/apps/emqx_gateway/src/emqx_gateway_http.erl index da440dba8..0d2f765c5 100644 --- a/apps/emqx_gateway/src/emqx_gateway_http.erl +++ b/apps/emqx_gateway/src/emqx_gateway_http.erl @@ -53,6 +53,8 @@ %% Utils for http, swagger, etc. -export([ return_http_error/2 , with_gateway/2 + , with_authn/2 + , with_listener_authn/3 , checks/2 , schema_bad_request/0 , schema_not_found/0 @@ -69,6 +71,10 @@ , listeners => [] }. +-elvis([{elvis_style, god_modules, disable}]). +-elvis([{elvis_style, no_nested_try_catch, disable}]). + + -define(DEFAULT_CALL_TIMEOUT, 15000). %%-------------------------------------------------------------------- @@ -159,14 +165,31 @@ remove_listener(ListenerId) -> -spec authn(gateway_name()) -> map(). authn(GwName) -> + %% XXX: Need append chain-nanme, authenticator-id? Path = [gateway, GwName, authentication], - emqx_map_lib:jsonable_map(emqx:get_config(Path)). + ChainName = emqx_gateway_utils:global_chain(GwName), + wrap_chain_name( + ChainName, + emqx_map_lib:jsonable_map(emqx:get_config(Path)) + ). -spec authn(gateway_name(), binary()) -> map(). authn(GwName, ListenerId) -> {_, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId), Path = [gateway, GwName, listeners, Type, Name, authentication], - emqx_map_lib:jsonable_map(emqx:get_config(Path)). + ChainName = emqx_gateway_utils:listener_chain(GwName, Type, Name), + wrap_chain_name( + ChainName, + emqx_map_lib:jsonable_map(emqx:get_config(Path)) + ). + +wrap_chain_name(ChainName, Conf) -> + case emqx_authentication:list_authenticators(ChainName) of + {ok, [#{id := Id} | _]} -> + Conf#{chain_name => ChainName, id => Id}; + _ -> + Conf + end. -spec add_authn(gateway_name(), map()) -> ok. add_authn(GwName, AuthConf) -> @@ -303,6 +326,20 @@ codestr(401) -> 'NOT_SUPPORTED_NOW'; codestr(404) -> 'RESOURCE_NOT_FOUND'; codestr(500) -> 'UNKNOW_ERROR'. +-spec with_authn(binary(), function()) -> any(). +with_authn(GwName0, Fun) -> + with_gateway(GwName0, fun(GwName, _GwConf) -> + Authn = emqx_gateway_http:authn(GwName), + Fun(GwName, Authn) + end). + +-spec with_listener_authn(binary(), binary(), function()) -> any(). +with_listener_authn(GwName0, Id, Fun) -> + with_gateway(GwName0, fun(GwName, _GwConf) -> + Authn = emqx_gateway_http:authn(GwName, Id), + Fun(GwName, Authn) + end). + -spec with_gateway(binary(), function()) -> any(). with_gateway(GwName0, Fun) -> try @@ -346,7 +383,7 @@ with_gateway(GwName0, Fun) -> -spec checks(list(), map()) -> ok. checks([], _) -> ok; -checks([K|Ks], Map) -> +checks([K | Ks], Map) -> case maps:is_key(K, Map) of true -> checks(Ks, Map); false -> diff --git a/apps/emqx_gateway/src/emqx_gateway_insta_sup.erl b/apps/emqx_gateway/src/emqx_gateway_insta_sup.erl index 52c23d459..efb3f6fe6 100644 --- a/apps/emqx_gateway/src/emqx_gateway_insta_sup.erl +++ b/apps/emqx_gateway/src/emqx_gateway_insta_sup.erl @@ -52,6 +52,8 @@ stopped_at :: integer() | undefined }). +-elvis([{elvis_style, invalid_dynamic_call, disable}]). + %%-------------------------------------------------------------------- %% APIs %%-------------------------------------------------------------------- @@ -219,23 +221,6 @@ detailed_gateway_info(State) -> %% Internal funcs %%-------------------------------------------------------------------- -%% same with emqx_authentication:global_chain/1 -global_chain(mqtt) -> - 'mqtt:global'; -global_chain('mqtt-sn') -> - 'mqtt-sn:global'; -global_chain(coap) -> - 'coap:global'; -global_chain(lwm2m) -> - 'lwm2m:global'; -global_chain(stomp) -> - 'stomp:global'; -global_chain(_) -> - 'unknown:global'. - -listener_chain(GwName, Type, LisName) -> - emqx_gateway_utils:listener_id(GwName, Type, LisName). - %% There are two layer authentication configs %% stomp.authn %% / \ @@ -254,22 +239,23 @@ init_authn(GwName, Config) -> do_init_authn([], Names) -> Names; -do_init_authn([{_ChainName, _AuthConf = #{enable := false}}|More], Names) -> +do_init_authn([{_ChainName, _AuthConf = #{enable := false}} | More], Names) -> do_init_authn(More, Names); -do_init_authn([{ChainName, AuthConf}|More], Names) when is_map(AuthConf) -> +do_init_authn([{ChainName, AuthConf} | More], Names) when is_map(AuthConf) -> _ = application:ensure_all_started(emqx_authn), do_create_authn_chain(ChainName, AuthConf), - do_init_authn(More, [ChainName|Names]); -do_init_authn([_BadConf|More], Names) -> + do_init_authn(More, [ChainName | Names]); +do_init_authn([_BadConf | More], Names) -> do_init_authn(More, Names). authns(GwName, Config) -> Listeners = maps:to_list(maps:get(listeners, Config, #{})), lists:append( - [ [{listener_chain(GwName, LisType, LisName), authn_conf(Opts)} + [ [{emqx_gateway_utils:listener_chain(GwName, LisType, LisName), + authn_conf(Opts)} || {LisName, Opts} <- maps:to_list(LisNames) ] || {LisType, LisNames} <- Listeners]) - ++ [{global_chain(GwName), authn_conf(Config)}]. + ++ [{emqx_gateway_utils:global_chain(GwName), authn_conf(Config)}]. authn_conf(Conf) -> maps:get(authentication, Conf, #{enable => false}). @@ -328,13 +314,13 @@ do_update_one_by_one(NCfg, State = #state{ OAuths = authns(GwName, OCfg), NAuths = authns(GwName, NCfg), - if - Status == stopped, NEnable == true -> + case {Status, NEnable} of + {stopped, true} -> NState = State#state{config = NCfg}, cb_gateway_load(NState); - Status == stopped, NEnable == false -> + {stopped, false} -> {ok, State#state{config = NCfg}}; - Status == running, NEnable == true -> + {running, true} -> NState = case NAuths == OAuths of true -> State; false -> @@ -345,12 +331,12 @@ do_update_one_by_one(NCfg, State = #state{ end, %% XXX: minimum impact update ??? cb_gateway_update(NCfg, NState); - Status == running, NEnable == false -> + {running, false} -> case cb_gateway_unload(State) of {ok, NState} -> {ok, NState#state{config = NCfg}}; {error, Reason} -> {error, Reason} end; - true -> + _ -> throw(nomatch) end. @@ -448,7 +434,7 @@ cb_gateway_update(Config, end. start_child_process([]) -> []; -start_child_process([Indictor|_] = ChildPidOrSpecs) -> +start_child_process([Indictor | _] = ChildPidOrSpecs) -> case erlang:is_pid(Indictor) of true -> ChildPidOrSpecs; diff --git a/apps/emqx_gateway/src/emqx_gateway_schema.erl b/apps/emqx_gateway/src/emqx_gateway_schema.erl index 9a28e5e0d..9f35225b7 100644 --- a/apps/emqx_gateway/src/emqx_gateway_schema.erl +++ b/apps/emqx_gateway/src/emqx_gateway_schema.erl @@ -45,6 +45,7 @@ , comma_separated_list/0 , ip_port/0 ]). +-elvis([{elvis_style, dont_repeat_yourself, disable}]). -export([namespace/0, roots/0 , fields/1]). @@ -53,11 +54,40 @@ namespace() -> gateway. roots() -> [gateway]. fields(gateway) -> - [{stomp, sc_meta(ref(stomp) , #{nullable => {true, recursively}})}, - {mqttsn, sc_meta(ref(mqttsn) , #{nullable => {true, recursively}})}, - {coap, sc_meta(ref(coap) , #{nullable => {true, recursively}})}, - {lwm2m, sc_meta(ref(lwm2m) , #{nullable => {true, recursively}})}, - {exproto, sc_meta(ref(exproto), #{nullable => {true, recursively}})} + [{stomp, + sc(ref(stomp), + #{ nullable => {true, recursively} + , desc => +"The Stomp Gateway configuration.
+This gateway supports v1.2/1.1/1.0" + })}, + {mqttsn, + sc(ref(mqttsn), + #{ nullable => {true, recursively} + , desc => +"The MQTT-SN Gateway configuration.
+This gateway only supports the v1.2 protocol" + })}, + {coap, + sc(ref(coap), + #{ nullable => {true, recursively} + , desc => +"The CoAP Gateway configuration.
+This gateway is implemented based on RFC-7252 and +https://core-wg.github.io/coap-pubsub/draft-ietf-core-pubsub.html" + })}, + {lwm2m, + sc(ref(lwm2m), + #{ nullable => {true, recursively} + , desc => +"The LwM2M Gateway configuration.
+This gateway only supports the v1.0.1 protocol" + })}, + {exproto, + sc(ref(exproto), + #{ nullable => {true, recursively} + , desc => "The Extension Protocol configuration" + })} ]; fields(stomp) -> @@ -66,61 +96,198 @@ fields(stomp) -> ] ++ gateway_common_options(); fields(stomp_frame) -> - [ {max_headers, sc(integer(), 10)} - , {max_headers_length, sc(integer(), 1024)} - , {max_body_length, sc(integer(), 8192)} + [ {max_headers, + sc(integer(), + #{ default => 10 + , desc => "The maximum number of Header" + })} + , {max_headers_length, + sc(integer(), + #{ default => 1024 + , desc => "The maximum string length of the Header Value" + })} + , {max_body_length, + sc(integer(), + #{ default => 65536 + , desc => "Maximum number of bytes of Body allowed per Stomp packet" + })} ]; fields(mqttsn) -> - [ {gateway_id, sc(integer())} - , {broadcast, sc(boolean(), false)} - , {enable_qos3, sc(boolean(), true)} - , {predefined, hoconsc:array(ref(mqttsn_predefined))} + [ {gateway_id, + sc(integer(), + #{ default => 1 + , desc => +"MQTT-SN Gateway Id.
+When the broadcast option is enabled, +the gateway will broadcast ADVERTISE message with this value" + })} + , {broadcast, + sc(boolean(), + #{ default => false + , desc => "Whether to periodically broadcast ADVERTISE messages" + })} + %% TODO: rename + , {enable_qos3, + sc(boolean(), + #{ default => true + , desc => +"Allows connectionless clients to publish messages with a Qos of -1.
+This feature is defined for very simple client implementations +which do not support any other features except this one.
+There is no connection setup nor tear down, no registration nor subscription.
+The client just sends its PUBLISH messages to a GW" + })} + , {predefined, + sc(hoconsc:array(ref(mqttsn_predefined)), + #{ default => [] + , desc => +"The Pre-defined topic ids and topic names.
+A 'pre-defined' topic id is a topic id whose mapping to a topic name +is known in advance by both the client’s application and the gateway" + })} , {listeners, sc(ref(udp_listeners))} ] ++ gateway_common_options(); fields(mqttsn_predefined) -> - [ {id, sc(integer())} - , {topic, sc(binary())} + [ {id, sc(integer(), #{desc => "Topic Id.
Range: 1-65535"})} + , {topic, sc(binary(), #{desc => "Topic Name"})} ]; fields(coap) -> - [ {heartbeat, sc(duration(), <<"30s">>)} - , {connection_required, sc(boolean(), false)} - , {notify_type, sc(hoconsc:union([non, con, qos]), qos)} - , {subscribe_qos, sc(hoconsc:union([qos0, qos1, qos2, coap]), coap)} - , {publish_qos, sc(hoconsc:union([qos0, qos1, qos2, coap]), coap)} + [ {heartbeat, + sc(duration(), + #{ default => <<"30s">> + , desc => +"The gateway server required minimum hearbeat interval.
+When connection mode is enabled, this parameter is used to set the minimum +heartbeat interval for the connection to be alive." + })} + , {connection_required, + sc(boolean(), + #{ default => false + , desc => +"Enable or disable connection mode.
+Connection mode is a feature of non-standard protocols. When connection mode +is enabled, it is necessary to maintain the creation, authentication and alive +of connection resources" + })} + , {notify_type, + sc(hoconsc:union([non, con, qos]), + #{ default => qos + , desc => +"The Notification Message will be delivered to the CoAP client if a new message +received on an observed topic. +The type of delivered coap message can be set to:
+1. non: Non-confirmable;
+2. con: Confirmable;
+3. qos: Mapping from QoS type of recevied message, QoS0 -> non, QoS1,2 -> con" + })} + , {subscribe_qos, + sc(hoconsc:union([qos0, qos1, qos2, coap]), + #{ default => coap + , desc => +"The Default QoS Level indicator for subscribe request.
+This option specifies the QoS level for the CoAP Client when establishing a +subscription membership, if the subscribe request is not carried `qos` option. +The indicator can be set to: + - qos0, qos1, qos2: Fixed default QoS level + - coap: Dynamic QoS level by the message type of subscribe request + * qos0: If the subscribe request is non-confirmable + * qos1: If the subscribe request is confirmable" + })} + , {publish_qos, + sc(hoconsc:union([qos0, qos1, qos2, coap]), + #{ default => coap + , desc => +"The Default QoS Level indicator for publish request.
+This option specifies the QoS level for the CoAP Client when publishing a +message to EMQ X PUB/SUB system, if the publish request is not carried `qos` +option. The indicator can be set to: + - qos0, qos1, qos2: Fixed default QoS level + - coap: Dynamic QoS level by the message type of publish request + * qos0: If the publish request is non-confirmable + * qos1: If the publish request is confirmable" + })} , {listeners, sc(ref(udp_listeners))} ] ++ gateway_common_options(); fields(lwm2m) -> - [ {xml_dir, sc(binary(), "etc/lwm2m_xml")} - , {lifetime_min, sc(duration(), "1s")} - , {lifetime_max, sc(duration(), "86400s")} - , {qmode_time_window, sc(duration_s(), "22s")} + [ {xml_dir, + sc(binary(), + #{ default =>"etc/lwm2m_xml" + , desc => "The Directory for LwM2M Resource defination" + })} + , {lifetime_min, + sc(duration(), + #{ default => "1s" + , desc => "Minimum value of lifetime allowed to be set by the LwM2M client" + })} + , {lifetime_max, + sc(duration(), + #{ default => "86400s" + , desc => "Maximum value of lifetime allowed to be set by the LwM2M client" + })} + , {qmode_time_window, + sc(duration_s(), + #{ default => "22s" + , desc => +"The value of the time window during which the network link is considered +valid by the LwM2M Gateway in QMode mode.
+For example, after receiving an update message from a client, any messages +within this time window are sent directly to the LwM2M client, and all messages +beyond this time window are temporarily stored in memory." + })} %% TODO: Support config resource path - , {auto_observe, sc(boolean(), false)} - , {update_msg_publish_condition, sc(hoconsc:union([always, contains_object_list]))} - , {translators, sc_meta(ref(translators), #{nullable => false})} + , {auto_observe, + sc(boolean(), + #{ default => false + , desc => "Automatically observe the object list of REGISTER packet" + })} + %% FIXME: not working now + , {update_msg_publish_condition, + sc(hoconsc:union([always, contains_object_list]), + #{ default => "contains_object_list" + , desc => +"Policy for publishing UPDATE event message to EMQ X.
+ - always: send update events as long as the UPDATE request is received. + - contains_object_list: send update events only if the UPDATE request carries any Object List." + })} + , {translators, + sc(ref(lwm2m_translators), + #{ nullable => false + , desc => "Topic configuration for LwM2M's gateway publishing and subscription" + })} , {listeners, sc(ref(udp_listeners))} ] ++ gateway_common_options(); fields(exproto) -> - [ {server, sc(ref(exproto_grpc_server))} - , {handler, sc(ref(exproto_grpc_handler))} + [ {server, + sc(ref(exproto_grpc_server), + #{ desc => "Configurations for starting the ConnectionAdapter service" + })} + , {handler, + sc(ref(exproto_grpc_handler), + #{ desc => "Configurations for request to ConnectionHandler service" + })} , {listeners, sc(ref(udp_tcp_listeners))} ] ++ gateway_common_options(); fields(exproto_grpc_server) -> - [ {bind, sc(hoconsc:union([ip_port(), integer()]))} - , {ssl, sc_meta(ref(ssl_server_opts), - #{nullable => {true, recursively}})} + [ {bind, + sc(hoconsc:union([ip_port(), integer()]))} + , {ssl, + sc(ref(ssl_server_opts), + #{ nullable => {true, recursively} + })} ]; fields(exproto_grpc_handler) -> [ {address, sc(binary())} - , {ssl, sc_meta(ref(ssl_client_opts), - #{nullable => {true, recursively}})} + , {ssl, + sc(ref(ssl_client_opts), + #{ nullable => {true, recursively} + })} ]; fields(ssl_server_opts) -> @@ -140,17 +307,42 @@ fields(clientinfo_override) -> , {clientid, sc(binary())} ]; -fields(translators) -> - [ {command, sc(ref(translator))} - , {response, sc(ref(translator))} - , {notify, sc(ref(translator))} - , {register, sc(ref(translator))} - , {update, sc(ref(translator))} +fields(lwm2m_translators) -> + [ {command, + sc(ref(translator), + #{ desc => +"The topic for receiving downstream commands.
+For each new LwM2M client that succeeds in going online, the gateway creates +a the subscription relationship to receive downstream commands and send it to +the LwM2M client" + })} + , {response, + sc(ref(translator), + #{ desc => +"The topic for gateway to publish the acknowledge events from LwM2M client" + })} + , {notify, + sc(ref(translator), + #{ desc => +"The topic for gateway to publish the notify events from LwM2M client.
+After succeed observe a resource of LwM2M client, Gateway will send the +notifyevents via this topic, if the client reports any resource changes" + })} + , {register, + sc(ref(translator), + #{ desc => +"The topic for gateway to publish the register events from LwM2M client.
" + })} + , {update, + sc(ref(translator), + #{ desc => +"The topic for gateway to publish the update events from LwM2M client.
" + })} ]; fields(translator) -> [ {topic, sc(binary())} - , {qos, sc(range(0, 2), 0)} + , {qos, sc(range(0, 2), #{default => 0})} ]; fields(udp_listeners) -> @@ -172,7 +364,7 @@ fields(udp_tcp_listeners) -> fields(tcp_listener) -> [ %% some special confs for tcp listener - {acceptors, sc(integer(), 16)} + {acceptors, sc(integer(), #{default => 16})} ] ++ tcp_opts() ++ proxy_protocol_opts() ++ @@ -180,9 +372,11 @@ fields(tcp_listener) -> fields(ssl_listener) -> fields(tcp_listener) ++ - [{ssl, sc_meta(hoconsc:ref(emqx_schema, "listener_ssl_opts"), - #{desc => "SSL listener options"})}]; - + [{ssl, + sc(hoconsc:ref(emqx_schema, "listener_ssl_opts"), + #{ desc => "SSL listener options" + })} + ]; fields(udp_listener) -> [ @@ -192,18 +386,17 @@ fields(udp_listener) -> common_listener_opts(); fields(dtls_listener) -> - [ {acceptors, sc(integer(), 16)} + [ {acceptors, sc(integer(), #{default => 16})} ] ++ fields(udp_listener) ++ - [{dtls, sc_meta(ref(dtls_opts), - #{desc => "DTLS listener options"})}]; + [{dtls, sc(ref(dtls_opts), #{desc => "DTLS listener options"})}]; fields(udp_opts) -> - [ {active_n, sc(integer(), 100)} + [ {active_n, sc(integer(), #{default => 100})} , {recbuf, sc(bytesize())} , {sndbuf, sc(bytesize())} , {buffer, sc(bytesize())} - , {reuseaddr, sc(boolean(), true)} + , {reuseaddr, sc(boolean(), #{default => true})} ]; fields(dtls_opts) -> @@ -215,66 +408,113 @@ fields(dtls_opts) -> }, false). authentication() -> - sc_meta(hoconsc:union( - [ hoconsc:ref(emqx_authn_mnesia, config) - , hoconsc:ref(emqx_authn_mysql, config) - , hoconsc:ref(emqx_authn_pgsql, config) - , hoconsc:ref(emqx_authn_mongodb, standalone) - , hoconsc:ref(emqx_authn_mongodb, 'replica-set') - , hoconsc:ref(emqx_authn_mongodb, 'sharded-cluster') - , hoconsc:ref(emqx_authn_redis, standalone) - , hoconsc:ref(emqx_authn_redis, cluster) - , hoconsc:ref(emqx_authn_redis, sentinel) - , hoconsc:ref(emqx_authn_http, get) - , hoconsc:ref(emqx_authn_http, post) - , hoconsc:ref(emqx_authn_jwt, 'hmac-based') - , hoconsc:ref(emqx_authn_jwt, 'public-key') - , hoconsc:ref(emqx_authn_jwt, 'jwks') - , hoconsc:ref(emqx_enhanced_authn_scram_mnesia, config) - ]), - #{nullable => {true, recursively}, - desc => + sc(hoconsc:union( + [ hoconsc:ref(emqx_authn_mnesia, config) + , hoconsc:ref(emqx_authn_mysql, config) + , hoconsc:ref(emqx_authn_pgsql, config) + , hoconsc:ref(emqx_authn_mongodb, standalone) + , hoconsc:ref(emqx_authn_mongodb, 'replica-set') + , hoconsc:ref(emqx_authn_mongodb, 'sharded-cluster') + , hoconsc:ref(emqx_authn_redis, standalone) + , hoconsc:ref(emqx_authn_redis, cluster) + , hoconsc:ref(emqx_authn_redis, sentinel) + , hoconsc:ref(emqx_authn_http, get) + , hoconsc:ref(emqx_authn_http, post) + , hoconsc:ref(emqx_authn_jwt, 'hmac-based') + , hoconsc:ref(emqx_authn_jwt, 'public-key') + , hoconsc:ref(emqx_authn_jwt, 'jwks') + , hoconsc:ref(emqx_enhanced_authn_scram_mnesia, config) + ]), + #{ nullable => {true, recursively} + , desc => """Default authentication configs for all of the gateway listeners.
For per-listener overrides see authentication -in listener configs"""}). +in listener configs""" + }). gateway_common_options() -> - [ {enable, sc(boolean(), true)} - , {enable_stats, sc(boolean(), true)} - , {idle_timeout, sc(duration(), <<"30s">>)} - , {mountpoint, sc(binary(), <<>>)} - , {clientinfo_override, sc(ref(clientinfo_override))} + [ {enable, + sc(boolean(), + #{ default => true + , desc => "Whether to enable this gateway" + })} + , {enable_stats, + sc(boolean(), + #{ default => true + , desc => "Whether to enable client process statistic" + })} + , {idle_timeout, + sc(duration(), + #{ default => <<"30s">> + , desc => +"The idle time of the client connection process.
+it has two purposes: +1. A newly created client process that does not receive any client requests + after that time will be closed directly. +2. A running client process that does not receive any client requests after + this time will go into hibernation to save resources." + })} + , {mountpoint, + sc(binary(), + #{ default => <<>> + %% TODO: variable support? + , desc => "" + })} + , {clientinfo_override, + sc(ref(clientinfo_override), + #{ desc => "" + })} , {authentication, authentication()} ]. common_listener_opts() -> - [ {enable, sc(boolean(), true)} - , {bind, sc(hoconsc:union([ip_port(), integer()]))} - , {max_connections, sc(integer(), 1024)} - , {max_conn_rate, sc(integer())} + [ {enable, + sc(boolean(), + #{ default => true + })} + , {bind, + sc(hoconsc:union([ip_port(), integer()]), + #{})} + , {max_connections, + sc(integer(), + #{ default => 1024 + })} + , {max_conn_rate, + sc(integer(), + #{ default => 1000 + })} , {authentication, authentication()} - , {mountpoint, sc(binary(), undefined)} - , {access_rules, sc(hoconsc:array(string()), [])} + , {mountpoint, + sc(binary(), + #{ default => undefined + })} + , {access_rules, + sc(hoconsc:array(string()), + #{ default => [] + })} ]. tcp_opts() -> - [{tcp, sc_meta(ref(emqx_schema, "tcp_opts"), #{})}]. + [{tcp, sc(ref(emqx_schema, "tcp_opts"), #{})}]. udp_opts() -> - [{udp, sc_meta(ref(udp_opts), #{})}]. + [{udp, sc(ref(udp_opts), #{})}]. proxy_protocol_opts() -> - [ {proxy_protocol, sc(boolean(), false)} - , {proxy_protocol_timeout, sc(duration(), "15s")} + [ {proxy_protocol, + sc(boolean(), + #{ default => false + })} + , {proxy_protocol_timeout, + sc(duration(), + #{ default => "15s" + })} ]. sc(Type) -> - sc_meta(Type, #{}). + sc(Type, #{}). -sc(Type, Default) -> - sc_meta(Type, #{default => Default}). - -sc_meta(Type, Meta) -> +sc(Type, Meta) -> hoconsc:mk(Type, Meta). map(Name, Type) -> diff --git a/apps/emqx_gateway/src/emqx_gateway_utils.erl b/apps/emqx_gateway/src/emqx_gateway_utils.erl index a497e11d0..5d17fe6ca 100644 --- a/apps/emqx_gateway/src/emqx_gateway_utils.erl +++ b/apps/emqx_gateway/src/emqx_gateway_utils.erl @@ -34,6 +34,8 @@ , listener_id/3 , parse_listener_id/1 , is_running/2 + , global_chain/1 + , listener_chain/3 ]). -export([ stringfy/1 @@ -64,6 +66,8 @@ -define(DEFAULT_OOM_POLICY, #{max_heap_size => 4194304, message_queue_len => 32000}). +-elvis([{elvis_style, god_modules, disable}]). + -spec childspec(supervisor:worker(), Mod :: atom()) -> supervisor:child_spec(). childspec(Type, Mod) -> @@ -159,6 +163,23 @@ is_running(ListenerId, #{<<"bind">> := ListenOn0}) -> false end. +%% same with emqx_authentication:global_chain/1 +global_chain(mqtt) -> + 'mqtt:global'; +global_chain('mqtt-sn') -> + 'mqtt-sn:global'; +global_chain(coap) -> + 'coap:global'; +global_chain(lwm2m) -> + 'lwm2m:global'; +global_chain(stomp) -> + 'stomp:global'; +global_chain(_) -> + 'unknown:global'. + +listener_chain(GwName, Type, LisName) -> + listener_id(GwName, Type, LisName). + bin(A) when is_atom(A) -> atom_to_binary(A); bin(L) when is_list(L); is_binary(L) -> @@ -183,7 +204,7 @@ stringfy(T) when is_list(T); is_binary(T) -> stringfy(T) -> iolist_to_binary(io_lib:format("~0p", [T])). --spec parse_address(binary()|list()) -> {list(), integer()}. +-spec parse_address(binary() | list()) -> {list(), integer()}. parse_address(S) when is_binary(S); is_list(S) -> S1 = case is_binary(S) of true -> lists:reverse(binary_to_list(S)); @@ -215,9 +236,9 @@ normalize_config(RawConf) -> [bind, tcp, ssl, udp, dtls] ++ proplists:get_keys(SocketOpts), Confs), Cfg = maps:merge(Cfg0, RemainCfgs), - [{Type, Name, ListenOn, SocketOpts, Cfg}|AccIn2] + [{Type, Name, ListenOn, SocketOpts, Cfg} | AccIn2] end, [], Liss), - [Listeners|AccIn1] + [Listeners | AccIn1] end, [], LisMap)). esockd_opts(Type, Opts0) -> diff --git a/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_api.erl b/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_api.erl index 98c9fabe8..b174fdcc0 100644 --- a/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_api.erl +++ b/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_api.erl @@ -17,50 +17,112 @@ -module(emqx_lwm2m_api). -behaviour(minirest_api). +-include_lib("typerefl/include/types.hrl"). --export([api_spec/0]). +-export([api_spec/0, paths/0, schema/1, fields/1, namespace/0]). --export([lookup_cmd/2]). +-export([lookup_cmd/2, observe/2, read/2, write/2]). --define(PREFIX, "/gateway/lwm2m/:clientid"). +-define(PATH(Suffix), "/gateway/lwm2m/:clientid"Suffix). +-define(DATA_TYPE, ['Integer', 'Float', 'Time', 'String', 'Boolean', 'Opaque', 'Objlnk']). --import(emqx_mgmt_util, [ object_schema/1 - , error_schema/2 - , properties/1]). +-import(hoconsc, [mk/2, ref/1, ref/2]). +-import(emqx_dashboard_swagger, [error_codes/2]). + +namespace() -> "lwm2m". api_spec() -> - {[lookup_cmd_api()], []}. + emqx_dashboard_swagger:spec(?MODULE). -lookup_cmd_paramters() -> - [ make_paramter(clientid, path, true, "string") - , make_paramter(path, query, true, "string") - , make_paramter(action, query, true, "string")]. +paths() -> + [?PATH("/lookup_cmd"), ?PATH("/observe"), ?PATH("/read"), ?PATH("/write")]. -lookup_cmd_properties() -> - properties([ {clientid, string} - , {path, string} - , {action, string} - , {code, string} - , {codeMsg, string} - , {content, {array, object}, lookup_cmd_content_props()}]). - -lookup_cmd_content_props() -> - [ {operations, string, <<"Resource Operations">>} - , {dataType, string, <<"Resource Type">>} - , {path, string, <<"Resource Path">>} - , {name, string, <<"Resource Name">>}]. - -lookup_cmd_api() -> - Metadata = #{get => - #{description => <<"look up resource">>, - parameters => lookup_cmd_paramters(), - responses => - #{<<"200">> => object_schema(lookup_cmd_properties()), - <<"404">> => error_schema("client not found error", ['CLIENT_NOT_FOUND']) - } - }}, - {?PREFIX ++ "/lookup_cmd", Metadata, lookup_cmd}. +schema(?PATH("/lookup_cmd")) -> + #{ + 'operationId' => lookup_cmd, + get => #{ + tags => [<<"lwm2m">>], + description => <<"Look up resource">>, + parameters => [ + {clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})}, + {path, mk(binary(), #{in => query, required => true, example => "/3/0/7"})}, + {action, mk(binary(), #{in => query, required => true, example => "discover"})} + ], + 'requestBody' => [], + responses => #{ + 200 => [ + {clientid, mk(binary(), #{example => "urn:oma:lwm2m:oma:2"})}, + {path, mk(binary(), #{example => "/3/0/7"})}, + {action, mk(binary(), #{example => "discover"})}, + {'codeMsg', mk(binary(), #{example => "reply_not_received"})}, + {content, mk(hoconsc:array(ref(resource)), #{})} + ], + 404 => error_codes(['CLIENT_NOT_FOUND'], <<"Client not found">>) + } + } + }; +schema(?PATH("/observe")) -> + #{ + 'operationId' => observe, + post => #{ + tags => [<<"lwm2m">>], + description => <<"(cancel) observe resource">>, + parameters => [ + {clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})}, + {path, mk(binary(), #{in => query, required => true, example => "/3/0/7"})}, + {enable, mk(boolean(), #{in => query, required => true, example => true})} + ], + 'requestBody' => [], + responses => #{ + 200 => <<"No Content">>, + 404 => error_codes(['CLIENT_NOT_FOUND'], <<"Clientid not found">>) + } + } + }; +schema(?PATH("/read")) -> + #{ + 'operationId' => read, + post => #{ + tags => [<<"lwm2m">>], + description => <<"Send a read command to resource">>, + parameters => [ + {clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})}, + {path, mk(binary(), #{in => query, required => true, example => "/3/0/7"})} + ], + responses => #{ + 200 => <<"No Content">>, + 404 => error_codes(['CLIENT_NOT_FOUND'], <<"clientid not found">>) + } + } + }; +schema(?PATH("/write")) -> + #{ + 'operationId' => write, + post => #{ + description => <<"Send a write command to resource">>, + tags => [<<"lwm2m">>], + parameters => [ + {clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})}, + {path, mk(binary(), #{in => query, required => true, example => "/3/0/7"})}, + {type, mk(hoconsc:enum(?DATA_TYPE), + #{in => query, required => true, example => 'Integer'})}, + {value, mk(binary(), #{in => query, required => true, example => 123})} + ], + responses => #{ + 200 => <<"No Content">>, + 404 => error_codes(['CLIENT_NOT_FOUND'], <<"Clientid not found">>) + } + } + }. +fields(resource) -> + [ + {operations, mk(binary(), #{desc => <<"Resource Operations">>, example => "E"})}, + {'dataType', mk(hoconsc:enum(?DATA_TYPE), #{desc => <<"Data Type">>, + example => 'Integer'})}, + {path, mk(binary(), #{desc => <<"Resource Path">>, example => "urn:oma:lwm2m:oma:2"})}, + {name, mk(binary(), #{desc => <<"Resource Name">>, example => "lwm2m-test"})} + ]. lookup_cmd(get, #{bindings := Bindings, query_string := QS}) -> ClientId = maps:get(clientid, Bindings), @@ -99,37 +161,40 @@ format_cmd_content(Content, <<"discover">>, Result) -> [H | Content1] = Content, {_, [HObjId]} = emqx_lwm2m_session:parse_object_list(H), [ObjId | _]= path_list(HObjId), - ObjectList = case Content1 of - [Content2 | _] -> - {_, ObjL} = emqx_lwm2m_session:parse_object_list(Content2), - ObjL; - [] -> [] - end, + ObjectList = + case Content1 of + [Content2 | _] -> + {_, ObjL} = emqx_lwm2m_session:parse_object_list(Content2), + ObjL; + [] -> [] + end, R = case emqx_lwm2m_xml_object:get_obj_def(binary_to_integer(ObjId), true) of {error, _} -> lists:map(fun(Object) -> #{Object => Object} end, ObjectList); ObjDefinition -> - lists:map( - fun(Object) -> - [_, _, RawResId| _] = path_list(Object), - ResId = binary_to_integer(RawResId), - Operations = case emqx_lwm2m_xml_object:get_resource_operations(ResId, ObjDefinition) of - "E" -> - #{operations => list_to_binary("E")}; - Oper -> - #{'dataType' => list_to_binary(emqx_lwm2m_xml_object:get_resource_type(ResId, ObjDefinition)), - operations => list_to_binary(Oper)} - end, - Operations#{path => Object, - name => list_to_binary(emqx_lwm2m_xml_object:get_resource_name(ResId, ObjDefinition))} - end, ObjectList) + lists:map(fun(Obj) -> to_operations(Obj, ObjDefinition) end, ObjectList) end, Result#{content => R}; format_cmd_content(Content, _, Result) -> Result#{content => Content}. +to_operations(Obj, ObjDefinition) -> + [_, _, RawResId| _] = path_list(Obj), + ResId = binary_to_integer(RawResId), + Operations = + case emqx_lwm2m_xml_object:get_resource_operations(ResId, ObjDefinition) of + "E" -> #{operations => <<"E">>}; + Oper -> + #{'dataType' => + list_to_binary(emqx_lwm2m_xml_object:get_resource_type(ResId, ObjDefinition)), + operations => list_to_binary(Oper) + } + end, + Operations#{path => Obj, + name => list_to_binary(emqx_lwm2m_xml_object:get_resource_name(ResId, ObjDefinition))}. + path_list(Path) -> case binary:split(binary_util:trim(Path, $/), [<<$/>>], [global]) of [ObjId, ObjInsId, ResId, ResInstId] -> [ObjId, ObjInsId, ResId, ResInstId]; @@ -138,8 +203,43 @@ path_list(Path) -> [ObjId] -> [ObjId] end. -make_paramter(Name, In, IsRequired, Type) -> - #{name => Name, - in => In, - required => IsRequired, - schema => #{type => Type}}. +observe(post, #{bindings := #{clientid := ClientId}, + query_string := #{<<"path">> := Path, <<"enable">> := Enable}}) -> + MsgType = case Enable of + true -> <<"observe">>; + _ -> <<"cancel-observe">> + end, + + Cmd = #{<<"msgType">> => MsgType, + <<"data">> => #{<<"path">> => Path} + }, + + send_cmd(ClientId, Cmd). + + +read(post, #{bindings := #{clientid := ClientId}, + query_string := Qs}) -> + + Cmd = #{<<"msgType">> => <<"read">>, + <<"data">> => Qs + }, + + send_cmd(ClientId, Cmd). + +write(post, #{bindings := #{clientid := ClientId}, + query_string := Qs}) -> + + Cmd = #{<<"msgType">> => <<"write">>, + <<"data">> => Qs + }, + + send_cmd(ClientId, Cmd). + +send_cmd(ClientId, Cmd) -> + case emqx_gateway_cm_registry:lookup_channels(lwm2m, ClientId) of + [Channel | _] -> + ok = emqx_lwm2m_channel:send_cmd(Channel, Cmd), + {200}; + _ -> + {404, #{code => 'CLIENT_NOT_FOUND'}} + end. diff --git a/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_channel.erl b/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_channel.erl index d291d7e91..0cd404154 100644 --- a/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_channel.erl +++ b/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_channel.erl @@ -26,7 +26,9 @@ , stats/1 , with_context/2 , do_takeover/3 - , lookup_cmd/3]). + , lookup_cmd/3 + , send_cmd/2 + ]). -export([ init/2 , handle_in/2 @@ -133,6 +135,9 @@ with_context(Ctx, ClientInfo) -> lookup_cmd(Channel, Path, Action) -> gen_server:call(Channel, {?FUNCTION_NAME, Path, Action}). +send_cmd(Channel, Cmd) -> + gen_server:call(Channel, {?FUNCTION_NAME, Cmd}). + %%-------------------------------------------------------------------- %% Handle incoming packet %%-------------------------------------------------------------------- @@ -171,6 +176,10 @@ handle_call({lookup_cmd, Path, Type}, _From, #channel{session = Session} = Chann Result = emqx_lwm2m_session:find_cmd_record(Path, Type, Session), {reply, {ok, Result}, Channel}; +handle_call({send_cmd, Cmd}, _From, Channel) -> + {ok, Outs, Channel2} = call_session(send_cmd, Cmd, Channel), + {reply, ok, Outs, Channel2}; + handle_call(Req, _From, Channel) -> ?SLOG(error, #{ msg => "unexpected_call" , call => Req diff --git a/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_session.erl b/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_session.erl index 9b38a6c62..e32e4ceb3 100644 --- a/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_session.erl +++ b/apps/emqx_gateway/src/lwm2m/emqx_lwm2m_session.erl @@ -34,6 +34,7 @@ , handle_protocol_in/3 , handle_deliver/3 , timeout/3 + , send_cmd/3 , set_reply/2]). -export_type([session/0]). @@ -235,6 +236,9 @@ set_reply(Msg, #session{coap = Coap} = Session) -> Coap2 = emqx_coap_tm:set_reply(Msg, Coap), Session#session{coap = Coap2}. +send_cmd(Cmd, _, Session) -> + return(send_cmd_impl(Cmd, Session)). + %%-------------------------------------------------------------------- %% Protocol Stack %%-------------------------------------------------------------------- @@ -599,6 +603,7 @@ send_to_mqtt(Ctx, EventType, Payload, {Topic, Qos}, proto_publish(Topic, Payload, Qos, Headers, WithContext, #session{endpoint_name = Epn} = Session) -> MountedTopic = mount(Topic, Session), + %% TODO: Append message metadata into headers Msg = emqx_message:make(Epn, Qos, MountedTopic, emqx_json:encode(Payload), #{}, Headers), WithContext(publish, [MountedTopic, Msg]), @@ -683,6 +688,16 @@ get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' : get_expiry_time(_) -> 0. +%%-------------------------------------------------------------------- +%% Send CMD +%%-------------------------------------------------------------------- +send_cmd_impl(Cmd, #session{reg_info = RegInfo} = Session) -> + CacheMode = is_cache_mode(Session), + AlternatePath = maps:get(<<"alternatePath">>, RegInfo, <<"/">>), + {Req, Ctx} = emqx_lwm2m_cmd:mqtt_to_coap(AlternatePath, Cmd), + Session2 = record_request(Ctx, Session), + maybe_do_deliver_to_coap(Ctx, Req, 0, CacheMode, Session2). + %%-------------------------------------------------------------------- %% Call CoAP %%-------------------------------------------------------------------- @@ -726,7 +741,6 @@ do_out([{Ctx, Out} | T], TM, Msgs) -> do_out(_, TM, Msgs) -> {ok, TM, Msgs}. - %%-------------------------------------------------------------------- %% CMD Record %%-------------------------------------------------------------------- diff --git a/apps/emqx_gateway/src/mqttsn/emqx_sn_broadcast.erl b/apps/emqx_gateway/src/mqttsn/emqx_sn_broadcast.erl index 3003bb5fc..93f8301e7 100644 --- a/apps/emqx_gateway/src/mqttsn/emqx_sn_broadcast.erl +++ b/apps/emqx_gateway/src/mqttsn/emqx_sn_broadcast.erl @@ -51,6 +51,7 @@ stop() -> %%-------------------------------------------------------------------- init([GwId, Port]) -> + %% FIXME: Duration = application:get_env(emqx_sn, advertise_duration, ?DEFAULT_DURATION), {ok, Sock} = gen_udp:open(0, [binary, {broadcast, true}]), {ok, ensure_advertise(#state{gwid = GwId, addrs = boradcast_addrs(), diff --git a/apps/emqx_gateway/src/mqttsn/emqx_sn_channel.erl b/apps/emqx_gateway/src/mqttsn/emqx_sn_channel.erl index 111e84817..ffaa96c50 100644 --- a/apps/emqx_gateway/src/mqttsn/emqx_sn_channel.erl +++ b/apps/emqx_gateway/src/mqttsn/emqx_sn_channel.erl @@ -790,16 +790,26 @@ check_pub_authz({TopicName, _Flags, _Data}, end. convert_pub_to_msg({TopicName, Flags, Data}, - Channel = #channel{ - clientinfo = #{clientid := ClientId}}) -> + Channel = #channel{clientinfo = #{clientid := ClientId}}) -> #mqtt_sn_flags{qos = QoS, dup = Dup, retain = Retain} = Flags, NewQoS = get_corrected_qos(QoS), - Message = emqx_message:make(ClientId, NewQoS, TopicName, Data), - NMessage = emqx_message:set_flags( - #{dup => Dup, retain => Retain}, - Message - ), - {ok, NMessage, Channel}. + Message = put_message_headers( + emqx_message:make( + ClientId, NewQoS, TopicName, Data, + #{dup => Dup, retain => Retain}, #{}), Channel), + {ok, Message, Channel}. + +put_message_headers(Msg, #channel{ + conninfo = #{proto_ver := ProtoVer}, + clientinfo = #{ + protocol := Protocol, + username := Username, + peerhost := PeerHost}}) -> + emqx_message:set_headers( + #{proto_ver => ProtoVer, + protocol => Protocol, + username => Username, + peerhost => PeerHost}, Msg). get_corrected_qos(?QOS_NEG1) -> ?QOS_0; get_corrected_qos(QoS) -> QoS. @@ -1307,7 +1317,7 @@ ensure_disconnected(Reason, Channel = #channel{ mabye_publish_will_msg(Channel = #channel{will_msg = undefined}) -> Channel; mabye_publish_will_msg(Channel = #channel{will_msg = WillMsg}) -> - ok = publish_will_msg(WillMsg), + ok = publish_will_msg(put_message_headers(WillMsg, Channel)), Channel#channel{will_msg = undefined}. publish_will_msg(Msg) -> diff --git a/apps/emqx_gateway/src/mqttsn/emqx_sn_registry.erl b/apps/emqx_gateway/src/mqttsn/emqx_sn_registry.erl index b51272f2d..25c7bd2b0 100644 --- a/apps/emqx_gateway/src/mqttsn/emqx_sn_registry.erl +++ b/apps/emqx_gateway/src/mqttsn/emqx_sn_registry.erl @@ -52,21 +52,6 @@ -record(emqx_sn_registry, {key, value}). -%% Mnesia bootstrap -%-export([mnesia/1]). - -%-boot_mnesia({mnesia, [boot]}). - -%%% @doc Create or replicate tables. -%-spec(mnesia(boot | copy) -> ok). -%mnesia(boot) -> -% %% Optimize storage -% StoreProps = [{ets, [{read_concurrency, true}]}], -% ok = mria:create_table(?MODULE, [ -% {attributes, record_info(fields, emqx_sn_registry)}, -% {ram_copies, [node()]}, -% {storage_properties, StoreProps}]). - -type registry() :: {Tab :: atom(), RegistryPid :: pid()}. @@ -145,8 +130,6 @@ init([InstaId, PredefTopics]) -> {rlog_shard, ?SN_SHARD} ]), ok = mria:wait_for_tables([Tab]), - % FIXME: - %ok = mria_rlog:wait_for_shards([?CM_SHARD], infinity), MaxPredefId = lists:foldl( fun(#{id := TopicId, topic := TopicName0}, AccId) -> TopicName = iolist_to_binary(TopicName0), diff --git a/apps/emqx_gateway/src/stomp/emqx_stomp_frame.erl b/apps/emqx_gateway/src/stomp/emqx_stomp_frame.erl index 991a1585a..18b5c8f55 100644 --- a/apps/emqx_gateway/src/stomp/emqx_stomp_frame.erl +++ b/apps/emqx_gateway/src/stomp/emqx_stomp_frame.erl @@ -123,7 +123,7 @@ initial_parse_state(Opts) -> limit(Opts) -> #frame_limit{ max_header_num = g(max_header_num, Opts, ?MAX_HEADER_NUM), - max_header_length = g(max_header_length, Opts, ?MAX_BODY_LENGTH), + max_header_length = g(max_header_length, Opts, ?MAX_HEADER_LENGTH), max_body_length = g(max_body_length, Opts, ?MAX_BODY_LENGTH) }. diff --git a/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl b/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl index 3f709fa5a..e49a78e73 100644 --- a/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl @@ -28,6 +28,10 @@ -include_lib("eunit/include/eunit.hrl"). +-define(CONF_DEFAULT, <<" +gateway {} +">>). + %%-------------------------------------------------------------------- %% Setup %%-------------------------------------------------------------------- @@ -35,18 +39,12 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Conf) -> - %% FIXME: Magic line. for saving gateway schema name for emqx_config - emqx_config:init_load(emqx_gateway_schema, <<"gateway {}">>), - emqx_mgmt_api_test_util:init_suite([emqx_gateway]), - %% Start emqx-authn separately, due to emqx_authn_schema - %% not implementing the roots/0 method, it cannot be started with - %% emqx-ct-helpers at the moment. - {ok, _} = application:ensure_all_started(emqx_authn), + emqx_config:init_load(emqx_gateway_schema, ?CONF_DEFAULT), + emqx_mgmt_api_test_util:init_suite([emqx_conf, emqx_authn, emqx_gateway]), Conf. end_per_suite(Conf) -> - application:stop(emqx_authn), - emqx_mgmt_api_test_util:end_suite([emqx_gateway]), + emqx_mgmt_api_test_util:end_suite([emqx_gateway, emqx_authn, emqx_conf]), Conf. %%-------------------------------------------------------------------- @@ -209,6 +207,50 @@ t_authn(_) -> {204, _} = request(get, "/gateway/stomp/authentication"), {204, _} = request(delete, "/gateway/stomp"). +t_authn_data_mgmt(_) -> + GwConf = #{name => <<"stomp">>}, + {204, _} = request(post, "/gateway", GwConf), + {204, _} = request(get, "/gateway/stomp/authentication"), + + AuthConf = #{mechanism => <<"password-based">>, + backend => <<"built-in-database">>, + user_id_type => <<"clientid">> + }, + {204, _} = request(post, "/gateway/stomp/authentication", AuthConf), + {200, ConfResp} = request(get, "/gateway/stomp/authentication"), + assert_confs(AuthConf, ConfResp), + + User1 = #{ user_id => <<"test">> + , password => <<"123456">> + , is_superuser => false + }, + {201, _} = request(post, "/gateway/stomp/authentication/users", User1), + {200, #{data := [UserRespd1]}} = request(get, "/gateway/stomp/authentication/users"), + assert_confs(UserRespd1, User1), + + {200, UserRespd2} = request(get, + "/gateway/stomp/authentication/users/test"), + assert_confs(UserRespd2, User1), + + {200, UserRespd3} = request(put, + "/gateway/stomp/authentication/users/test", + #{password => <<"654321">>, + is_superuser => true}), + assert_confs(UserRespd3, User1#{is_superuser => true}), + + {200, UserRespd4} = request(get, + "/gateway/stomp/authentication/users/test"), + assert_confs(UserRespd4, User1#{is_superuser => true}), + + {204, _} = request(delete, "/gateway/stomp/authentication/users/test"), + + {200, #{data := []}} = request(get, + "/gateway/stomp/authentication/users"), + + {204, _} = request(delete, "/gateway/stomp/authentication"), + {204, _} = request(get, "/gateway/stomp/authentication"), + {204, _} = request(delete, "/gateway/stomp"). + t_listeners(_) -> GwConf = #{name => <<"stomp">>}, {204, _} = request(post, "/gateway", GwConf), @@ -264,6 +306,65 @@ t_listeners_authn(_) -> assert_confs(AuthConf2, ConfResp3), {204, _} = request(delete, "/gateway/stomp"). +t_listeners_authn_data_mgmt(_) -> + GwConf = #{name => <<"stomp">>, + listeners => [ + #{name => <<"def">>, + type => <<"tcp">>, + bind => <<"61613">> + }]}, + {204, _} = request(post, "/gateway", GwConf), + {200, ConfResp} = request(get, "/gateway/stomp"), + assert_confs(GwConf, ConfResp), + + AuthConf = #{mechanism => <<"password-based">>, + backend => <<"built-in-database">>, + user_id_type => <<"clientid">> + }, + Path = "/gateway/stomp/listeners/stomp:tcp:def/authentication", + {204, _} = request(post, Path, AuthConf), + {200, ConfResp2} = request(get, Path), + assert_confs(AuthConf, ConfResp2), + + User1 = #{ user_id => <<"test">> + , password => <<"123456">> + , is_superuser => false + }, + {201, _} = request(post, + "/gateway/stomp/listeners/stomp:tcp:def/authentication/users", + User1), + + {200, + #{data := [UserRespd1]} } = request( + get, + "/gateway/stomp/listeners/stomp:tcp:def/authentication/users"), + assert_confs(UserRespd1, User1), + + {200, UserRespd2} = request( + get, + "/gateway/stomp/listeners/stomp:tcp:def/authentication/users/test"), + assert_confs(UserRespd2, User1), + + {200, UserRespd3} = request( + put, + "/gateway/stomp/listeners/stomp:tcp:def/authentication/users/test", + #{password => <<"654321">>, is_superuser => true}), + assert_confs(UserRespd3, User1#{is_superuser => true}), + + {200, UserRespd4} = request( + get, + "/gateway/stomp/listeners/stomp:tcp:def/authentication/users/test"), + assert_confs(UserRespd4, User1#{is_superuser => true}), + + {204, _} = request( + delete, + "/gateway/stomp/listeners/stomp:tcp:def/authentication/users/test"), + + {200, #{data := []}} = request( + get, + "/gateway/stomp/listeners/stomp:tcp:def/authentication/users"), + {204, _} = request(delete, "/gateway/stomp"). + %%-------------------------------------------------------------------- %% Asserts diff --git a/apps/emqx_gateway/test/emqx_gateway_conf_SUITE.erl b/apps/emqx_gateway/test/emqx_gateway_conf_SUITE.erl index 5f8eed20a..810f56e76 100644 --- a/apps/emqx_gateway/test/emqx_gateway_conf_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_gateway_conf_SUITE.erl @@ -34,15 +34,12 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Conf) -> - %% FIXME: Magic line. for saving gateway schema name for emqx_config emqx_config:init_load(emqx_gateway_schema, <<"gateway {}">>), - emqx_common_test_helpers:start_apps([emqx_gateway]), - {ok, _} = application:ensure_all_started(emqx_authn), + emqx_common_test_helpers:start_apps([emqx_conf, emqx_authn, emqx_gateway]), Conf. end_per_suite(_Conf) -> - application:stop(emqx_authn), - emqx_common_test_helpers:stop_apps([emqx_gateway]). + emqx_common_test_helpers:stop_apps([emqx_gateway, emqx_authn, emqx_conf]). init_per_testcase(_CaseName, Conf) -> _ = emqx_gateway_conf:unload_gateway(stomp), @@ -52,6 +49,133 @@ init_per_testcase(_CaseName, Conf) -> %% Cases %%-------------------------------------------------------------------- +-define(SVR_CA, +<<"-----BEGIN CERTIFICATE----- +MIIDUTCCAjmgAwIBAgIJAPPYCjTmxdt/MA0GCSqGSIb3DQEBCwUAMD8xCzAJBgNV +BAYTAkNOMREwDwYDVQQIDAhoYW5nemhvdTEMMAoGA1UECgwDRU1RMQ8wDQYDVQQD +DAZSb290Q0EwHhcNMjAwNTA4MDgwNjUyWhcNMzAwNTA2MDgwNjUyWjA/MQswCQYD +VQQGEwJDTjERMA8GA1UECAwIaGFuZ3pob3UxDDAKBgNVBAoMA0VNUTEPMA0GA1UE +AwwGUm9vdENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzcgVLex1 +EZ9ON64EX8v+wcSjzOZpiEOsAOuSXOEN3wb8FKUxCdsGrsJYB7a5VM/Jot25Mod2 +juS3OBMg6r85k2TWjdxUoUs+HiUB/pP/ARaaW6VntpAEokpij/przWMPgJnBF3Ur +MjtbLayH9hGmpQrI5c2vmHQ2reRZnSFbY+2b8SXZ+3lZZgz9+BaQYWdQWfaUWEHZ +uDaNiViVO0OT8DRjCuiDp3yYDj3iLWbTA/gDL6Tf5XuHuEwcOQUrd+h0hyIphO8D +tsrsHZ14j4AWYLk1CPA6pq1HIUvEl2rANx2lVUNv+nt64K/Mr3RnVQd9s8bK+TXQ +KGHd2Lv/PALYuwIDAQABo1AwTjAdBgNVHQ4EFgQUGBmW+iDzxctWAWxmhgdlE8Pj +EbQwHwYDVR0jBBgwFoAUGBmW+iDzxctWAWxmhgdlE8PjEbQwDAYDVR0TBAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAQEAGbhRUjpIred4cFAFJ7bbYD9hKu/yzWPWkMRa +ErlCKHmuYsYk+5d16JQhJaFy6MGXfLgo3KV2itl0d+OWNH0U9ULXcglTxy6+njo5 +CFqdUBPwN1jxhzo9yteDMKF4+AHIxbvCAJa17qcwUKR5MKNvv09C6pvQDJLzid7y +E2dkgSuggik3oa0427KvctFf8uhOV94RvEDyqvT5+pgNYZ2Yfga9pD/jjpoHEUlo +88IGU8/wJCx3Ds2yc8+oBg/ynxG8f/HmCC1ET6EHHoe2jlo8FpU/SgGtghS1YL30 +IWxNsPrUP+XsZpBJy/mvOhE5QXo6Y35zDqqj8tI7AGmAWu22jg== +-----END CERTIFICATE----- +">>). + +-define(SVR_CERT, +<<"-----BEGIN CERTIFICATE----- +MIIDEzCCAfugAwIBAgIBAjANBgkqhkiG9w0BAQsFADA/MQswCQYDVQQGEwJDTjER +MA8GA1UECAwIaGFuZ3pob3UxDDAKBgNVBAoMA0VNUTEPMA0GA1UEAwwGUm9vdENB +MB4XDTIwMDUwODA4MDcwNVoXDTMwMDUwNjA4MDcwNVowPzELMAkGA1UEBhMCQ04x +ETAPBgNVBAgMCGhhbmd6aG91MQwwCgYDVQQKDANFTVExDzANBgNVBAMMBlNlcnZl +cjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALNeWT3pE+QFfiRJzKmn +AMUrWo3K2j/Tm3+Xnl6WLz67/0rcYrJbbKvS3uyRP/stXyXEKw9CepyQ1ViBVFkW +Aoy8qQEOWFDsZc/5UzhXUnb6LXr3qTkFEjNmhj+7uzv/lbBxlUG1NlYzSeOB6/RT +8zH/lhOeKhLnWYPXdXKsa1FL6ij4X8DeDO1kY7fvAGmBn/THh1uTpDizM4YmeI+7 +4dmayA5xXvARte5h4Vu5SIze7iC057N+vymToMk2Jgk+ZZFpyXrnq+yo6RaD3ANc +lrc4FbeUQZ5a5s5Sxgs9a0Y3WMG+7c5VnVXcbjBRz/aq2NtOnQQjikKKQA8GF080 +BQkCAwEAAaMaMBgwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAwDQYJKoZIhvcNAQEL +BQADggEBAJefnMZpaRDHQSNUIEL3iwGXE9c6PmIsQVE2ustr+CakBp3TZ4l0enLt +iGMfEVFju69cO4oyokWv+hl5eCMkHBf14Kv51vj448jowYnF1zmzn7SEzm5Uzlsa +sqjtAprnLyof69WtLU1j5rYWBuFX86yOTwRAFNjm9fvhAcrEONBsQtqipBWkMROp +iUYMkRqbKcQMdwxov+lHBYKq9zbWRoqLROAn54SRqgQk6c15JdEfgOOjShbsOkIH +UhqcwRkQic7n1zwHVGVDgNIZVgmJ2IdIWBlPEC7oLrRrBD/X1iEEXtKab6p5o22n +KB5mN+iQaE+Oe2cpGKZJiJRdM+IqDDQ= +-----END CERTIFICATE----- +">>). + +-define(SVR_KEY, +<<"-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAs15ZPekT5AV+JEnMqacAxStajcraP9Obf5eeXpYvPrv/Stxi +sltsq9Le7JE/+y1fJcQrD0J6nJDVWIFUWRYCjLypAQ5YUOxlz/lTOFdSdvotevep +OQUSM2aGP7u7O/+VsHGVQbU2VjNJ44Hr9FPzMf+WE54qEudZg9d1cqxrUUvqKPhf +wN4M7WRjt+8AaYGf9MeHW5OkOLMzhiZ4j7vh2ZrIDnFe8BG17mHhW7lIjN7uILTn +s36/KZOgyTYmCT5lkWnJeuer7KjpFoPcA1yWtzgVt5RBnlrmzlLGCz1rRjdYwb7t +zlWdVdxuMFHP9qrY206dBCOKQopADwYXTzQFCQIDAQABAoIBAQCuvCbr7Pd3lvI/ +n7VFQG+7pHRe1VKwAxDkx2t8cYos7y/QWcm8Ptwqtw58HzPZGWYrgGMCRpzzkRSF +V9g3wP1S5Scu5C6dBu5YIGc157tqNGXB+SpdZddJQ4Nc6yGHXYERllT04ffBGc3N +WG/oYS/1cSteiSIrsDy/91FvGRCi7FPxH3wIgHssY/tw69s1Cfvaq5lr2NTFzxIG +xCvpJKEdSfVfS9I7LYiymVjst3IOR/w76/ZFY9cRa8ZtmQSWWsm0TUpRC1jdcbkm +ZoJptYWlP+gSwx/fpMYftrkJFGOJhHJHQhwxT5X/ajAISeqjjwkWSEJLwnHQd11C +Zy2+29lBAoGBANlEAIK4VxCqyPXNKfoOOi5dS64NfvyH4A1v2+KaHWc7lqaqPN49 +ezfN2n3X+KWx4cviDD914Yc2JQ1vVJjSaHci7yivocDo2OfZDmjBqzaMp/y+rX1R +/f3MmiTqMa468rjaxI9RRZu7vDgpTR+za1+OBCgMzjvAng8dJuN/5gjlAoGBANNY +uYPKtearBmkqdrSV7eTUe49Nhr0XotLaVBH37TCW0Xv9wjO2xmbm5Ga/DCtPIsBb +yPeYwX9FjoasuadUD7hRvbFu6dBa0HGLmkXRJZTcD7MEX2Lhu4BuC72yDLLFd0r+ +Ep9WP7F5iJyagYqIZtz+4uf7gBvUDdmvXz3sGr1VAoGAdXTD6eeKeiI6PlhKBztF +zOb3EQOO0SsLv3fnodu7ZaHbUgLaoTMPuB17r2jgrYM7FKQCBxTNdfGZmmfDjlLB +0xZ5wL8ibU30ZXL8zTlWPElST9sto4B+FYVVF/vcG9sWeUUb2ncPcJ/Po3UAktDG +jYQTTyuNGtSJHpad/YOZctkCgYBtWRaC7bq3of0rJGFOhdQT9SwItN/lrfj8hyHA +OjpqTV4NfPmhsAtu6j96OZaeQc+FHvgXwt06cE6Rt4RG4uNPRluTFgO7XYFDfitP +vCppnoIw6S5BBvHwPP+uIhUX2bsi/dm8vu8tb+gSvo4PkwtFhEr6I9HglBKmcmog +q6waEQKBgHyecFBeM6Ls11Cd64vborwJPAuxIW7HBAFj/BS99oeG4TjBx4Sz2dFd +rzUibJt4ndnHIvCN8JQkjNG14i9hJln+H3mRss8fbZ9vQdqG+2vOWADYSzzsNI55 +RFY7JjluKcVkp/zCDeUxTU3O6sS+v6/3VE11Cob6OYQx3lN5wrZ3 +-----END RSA PRIVATE KEY----- +">>). + +-define(SVR_CERT2, +<<"-----BEGIN CERTIFICATE----- +MIIDEzCCAfugAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MQswCQYDVQQGEwJDTjER +MA8GA1UECAwIaGFuZ3pob3UxDDAKBgNVBAoMA0VNUTEPMA0GA1UEAwwGUm9vdENB +MB4XDTIwMDUwODA4MDY1N1oXDTMwMDUwNjA4MDY1N1owPzELMAkGA1UEBhMCQ04x +ETAPBgNVBAgMCGhhbmd6aG91MQwwCgYDVQQKDANFTVExDzANBgNVBAMMBkNsaWVu +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMy4hoksKcZBDbY680u6 +TS25U51nuB1FBcGMlF9B/t057wPOlxF/OcmbxY5MwepS41JDGPgulE1V7fpsXkiW +1LUimYV/tsqBfymIe0mlY7oORahKji7zKQ2UBIVFhdlvQxunlIDnw6F9popUgyHt +dMhtlgZK8oqRwHxO5dbfoukYd6J/r+etS5q26sgVkf3C6dt0Td7B25H9qW+f7oLV +PbcHYCa+i73u9670nrpXsC+Qc7Mygwa2Kq/jwU+ftyLQnOeW07DuzOwsziC/fQZa +nbxR+8U9FNftgRcC3uP/JMKYUqsiRAuaDokARZxVTV5hUElfpO6z6/NItSDvvh3i +eikCAwEAAaMaMBgwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAwDQYJKoZIhvcNAQEL +BQADggEBABchYxKo0YMma7g1qDswJXsR5s56Czx/I+B41YcpMBMTrRqpUC0nHtLk +M7/tZp592u/tT8gzEnQjZLKBAhFeZaR3aaKyknLqwiPqJIgg0pgsBGITrAK3Pv4z +5/YvAJJKgTe5UdeTz6U4lvNEux/4juZ4pmqH4qSFJTOzQS7LmgSmNIdd072rwXBd +UzcSHzsJgEMb88u/LDLjj1pQ7AtZ4Tta8JZTvcgBFmjB0QUi6fgkHY6oGat/W4kR +jSRUBlMUbM/drr2PVzRc2dwbFIl3X+ZE6n5Sl3ZwRAC/s92JU6CPMRW02muVu6xl +goraNgPISnrbpR6KjxLZkVembXzjNNc= +-----END CERTIFICATE----- +">>). + +-define(SVR_KEY2, +<<"-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAzLiGiSwpxkENtjrzS7pNLblTnWe4HUUFwYyUX0H+3TnvA86X +EX85yZvFjkzB6lLjUkMY+C6UTVXt+mxeSJbUtSKZhX+2yoF/KYh7SaVjug5FqEqO +LvMpDZQEhUWF2W9DG6eUgOfDoX2milSDIe10yG2WBkryipHAfE7l1t+i6Rh3on+v +561LmrbqyBWR/cLp23RN3sHbkf2pb5/ugtU9twdgJr6Lve73rvSeulewL5BzszKD +BrYqr+PBT5+3ItCc55bTsO7M7CzOIL99BlqdvFH7xT0U1+2BFwLe4/8kwphSqyJE +C5oOiQBFnFVNXmFQSV+k7rPr80i1IO++HeJ6KQIDAQABAoIBAGWgvPjfuaU3qizq +uti/FY07USz0zkuJdkANH6LiSjlchzDmn8wJ0pApCjuIE0PV/g9aS8z4opp5q/gD +UBLM/a8mC/xf2EhTXOMrY7i9p/I3H5FZ4ZehEqIw9sWKK9YzC6dw26HabB2BGOnW +5nozPSQ6cp2RGzJ7BIkxSZwPzPnVTgy3OAuPOiJytvK+hGLhsNaT+Y9bNDvplVT2 +ZwYTV8GlHZC+4b2wNROILm0O86v96O+Qd8nn3fXjGHbMsAnONBq10bZS16L4fvkH +5G+W/1PeSXmtZFppdRRDxIW+DWcXK0D48WRliuxcV4eOOxI+a9N2ZJZZiNLQZGwg +w3A8+mECgYEA8HuJFrlRvdoBe2U/EwUtG74dcyy30L4yEBnN5QscXmEEikhaQCfX +Wm6EieMcIB/5I5TQmSw0cmBMeZjSXYoFdoI16/X6yMMuATdxpvhOZGdUGXxhAH+x +xoTUavWZnEqW3fkUU71kT5E2f2i+0zoatFESXHeslJyz85aAYpP92H0CgYEA2e5A +Yozt5eaA1Gyhd8SeptkEU4xPirNUnVQHStpMWUb1kzTNXrPmNWccQ7JpfpG6DcYl +zUF6p6mlzY+zkMiyPQjwEJlhiHM2NlL1QS7td0R8ewgsFoyn8WsBI4RejWrEG9td +EDniuIw+pBFkcWthnTLHwECHdzgquToyTMjrBB0CgYEA28tdGbrZXhcyAZEhHAZA +Gzog+pKlkpEzeonLKIuGKzCrEKRecIK5jrqyQsCjhS0T7ZRnL4g6i0s+umiV5M5w +fcc292pEA1h45L3DD6OlKplSQVTv55/OYS4oY3YEJtf5mfm8vWi9lQeY8sxOlQpn +O+VZTdBHmTC8PGeTAgZXHZUCgYA6Tyv88lYowB7SN2qQgBQu8jvdGtqhcs/99GCr +H3N0I69LPsKAR0QeH8OJPXBKhDUywESXAaEOwS5yrLNP1tMRz5Vj65YUCzeDG3kx +gpvY4IMp7ArX0bSRvJ6mYSFnVxy3k174G3TVCfksrtagHioVBGQ7xUg5ltafjrms +n8l55QKBgQDVzU8tQvBVqY8/1lnw11Vj4fkE/drZHJ5UkdC1eenOfSWhlSLfUJ8j +ds7vEWpRPPoVuPZYeR1y78cyxKe1GBx6Wa2lF5c7xjmiu0xbRnrxYeLolce9/ntp +asClqpnHT8/VJYTD7Kqj0fouTTZf0zkig/y+2XERppd8k+pSKjUCPQ== +-----END RSA PRIVATE KEY----- +">>). + -define(CONF_STOMP_BAISC_1, #{ <<"idle_timeout">> => <<"10s">>, <<"mountpoint">> => <<"t/">>, @@ -76,6 +200,31 @@ init_per_testcase(_CaseName, Conf) -> -define(CONF_STOMP_LISTENER_2, #{ <<"bind">> => <<"61614">> }). +-define(CONF_STOMP_LISTENER_SSL, + #{ <<"bind">> => <<"61614">>, + <<"ssl">> => + #{ <<"cacertfile">> => ?SVR_CA, + <<"certfile">> => ?SVR_CERT, + <<"keyfile">> => ?SVR_KEY + } + }). +-define(CONF_STOMP_LISTENER_SSL_2, + #{ <<"bind">> => <<"61614">>, + <<"ssl">> => + #{ <<"cacertfile">> => ?SVR_CA, + <<"certfile">> => ?SVR_CERT2, + <<"keyfile">> => ?SVR_KEY2 + } + }). +-define(CERTS_PATH(CertName), filename:join(["../../lib/emqx/etc/certs/", CertName])). +-define(CONF_STOMP_LISTENER_SSL_PATH, + #{ <<"bind">> => <<"61614">>, + <<"ssl">> => + #{ <<"cacertfile">> => ?CERTS_PATH("cacert.pem"), + <<"certfile">> => ?CERTS_PATH("cert.pem"), + <<"keyfile">> => ?CERTS_PATH("key.pem") + } + }). -define(CONF_STOMP_AUTHN_1, #{ <<"mechanism">> => <<"password-based">>, <<"backend">> => <<"built-in-database">>, @@ -95,7 +244,6 @@ t_load_unload_gateway(_) -> StompConf2 = compose(?CONF_STOMP_BAISC_2, ?CONF_STOMP_AUTHN_1, ?CONF_STOMP_LISTENER_1), - ok = emqx_gateway_conf:load_gateway(stomp, StompConf1), {error, already_exist} = emqx_gateway_conf:load_gateway(stomp, StompConf1), @@ -213,6 +361,83 @@ t_load_remove_listener_authn(_) -> ), ok. +t_load_gateway_with_certs_content(_) -> + StompConf = compose_ssl_listener( + ?CONF_STOMP_BAISC_1, + ?CONF_STOMP_LISTENER_SSL + ), + ok = emqx_gateway_conf:load_gateway(<<"stomp">>, StompConf), + assert_confs(StompConf, emqx:get_raw_config([gateway, stomp])), + SslConf = emqx_map_lib:deep_get( + [<<"listeners">>, <<"ssl">>, <<"default">>, <<"ssl">>], + emqx:get_raw_config([gateway, stomp]) + ), + ok = emqx_gateway_conf:unload_gateway(<<"stomp">>), + assert_ssl_confs_files_deleted(SslConf), + ?assertException(error, {config_not_found, [gateway, stomp]}, + emqx:get_raw_config([gateway, stomp])), + ok. + +%% TODO: Comment out this test case for now, because emqx_tls_lib +%% will delete the configured certificate file. + +%t_load_gateway_with_certs_path(_) -> +% StompConf = compose_ssl_listener( +% ?CONF_STOMP_BAISC_1, +% ?CONF_STOMP_LISTENER_SSL_PATH +% ), +% ok = emqx_gateway_conf:load_gateway(<<"stomp">>, StompConf), +% assert_confs(StompConf, emqx:get_raw_config([gateway, stomp])), +% SslConf = emqx_map_lib:deep_get( +% [<<"listeners">>, <<"ssl">>, <<"default">>, <<"ssl">>], +% emqx:get_raw_config([gateway, stomp]) +% ), +% ok = emqx_gateway_conf:unload_gateway(<<"stomp">>), +% assert_ssl_confs_files_deleted(SslConf), +% ?assertException(error, {config_not_found, [gateway, stomp]}, +% emqx:get_raw_config([gateway, stomp])), +% ok. + +t_add_listener_with_certs_content(_) -> + StompConf = ?CONF_STOMP_BAISC_1, + ok = emqx_gateway_conf:load_gateway(<<"stomp">>, StompConf), + assert_confs(StompConf, emqx:get_raw_config([gateway, stomp])), + + ok = emqx_gateway_conf:add_listener( + <<"stomp">>, {<<"ssl">>, <<"default">>}, ?CONF_STOMP_LISTENER_SSL), + assert_confs( + maps:merge(StompConf, ssl_listener(?CONF_STOMP_LISTENER_SSL)), + emqx:get_raw_config([gateway, stomp])), + + ok = emqx_gateway_conf:update_listener( + <<"stomp">>, {<<"ssl">>, <<"default">>}, ?CONF_STOMP_LISTENER_SSL_2), + assert_confs( + maps:merge(StompConf, ssl_listener(?CONF_STOMP_LISTENER_SSL_2)), + emqx:get_raw_config([gateway, stomp])), + + SslConf = emqx_map_lib:deep_get( + [<<"listeners">>, <<"ssl">>, <<"default">>, <<"ssl">>], + emqx:get_raw_config([gateway, stomp]) + ), + ok = emqx_gateway_conf:remove_listener( + <<"stomp">>, {<<"ssl">>, <<"default">>}), + assert_ssl_confs_files_deleted(SslConf), + {error, not_found} = + emqx_gateway_conf:update_listener( + <<"stomp">>, {<<"ssl">>, <<"default">>}, ?CONF_STOMP_LISTENER_SSL_2), + ?assertException( + error, {config_not_found, [gateway, stomp, listeners, ssl, default]}, + emqx:get_raw_config([gateway, stomp, listeners, ssl, default]) + ), + ok. + +assert_ssl_confs_files_deleted(SslConf) when is_map(SslConf) -> + Ks = [<<"cacertfile">>, <<"certfile">>, <<"keyfile">>], + lists:foreach(fun(K) -> + Path = maps:get(K, SslConf), + {error, enoent} = file:read_file(Path) + end, Ks). + %%-------------------------------------------------------------------- %% Utils @@ -224,6 +449,9 @@ compose(Basic, Authn, Listener) -> compose_listener(Basic, Listener) -> maps:merge(Basic, listener(Listener)). +compose_ssl_listener(Basic, Listener) -> + maps:merge(Basic, ssl_listener(Listener)). + compose_authn(Basic, Authn) -> maps:merge(Basic, #{<<"authentication">> => Authn}). @@ -235,3 +463,7 @@ compose_listener_authn(Basic, Listener, Authn) -> listener(L) -> #{<<"listeners">> => [L#{<<"type">> => <<"tcp">>, <<"name">> => <<"default">>}]}. + +ssl_listener(L) -> + #{<<"listeners">> => [L#{<<"type">> => <<"ssl">>, + <<"name">> => <<"default">>}]}. diff --git a/apps/emqx_gateway/test/emqx_gateway_test_utils.erl b/apps/emqx_gateway/test/emqx_gateway_test_utils.erl index d7fd12c3d..329e97e8f 100644 --- a/apps/emqx_gateway/test/emqx_gateway_test_utils.erl +++ b/apps/emqx_gateway/test/emqx_gateway_test_utils.erl @@ -21,7 +21,7 @@ assert_confs(Expected0, Effected) -> Expected = maybe_unconvert_listeners(Expected0), - case do_assert_confs(Expected, Effected) of + case do_assert_confs(root, Expected, Effected) of false -> io:format(standard_error, "Expected config: ~p,\n" "Effected config: ~p", @@ -31,23 +31,36 @@ assert_confs(Expected0, Effected) -> ok end. -do_assert_confs(Expected, Effected) when is_map(Expected), - is_map(Effected) -> +do_assert_confs(_Key, Expected, Effected) when is_map(Expected), + is_map(Effected) -> Ks1 = maps:keys(Expected), lists:all(fun(K) -> - do_assert_confs(maps:get(K, Expected), + do_assert_confs(K, + maps:get(K, Expected), maps:get(K, Effected, undefined)) end, Ks1); -do_assert_confs([Expected|More1], [Effected|More2]) -> - do_assert_confs(Expected, Effected) andalso do_assert_confs(More1, More2); -do_assert_confs([], []) -> +do_assert_confs(Key, Expected, Effected) when Key == <<"cacertfile">>; + Key == <<"certfile">>; + Key == <<"keyfile">> -> + case Expected == Effected of + true -> true; + false -> + case file:read_file(Effected) of + {ok, Content} -> Expected == Content; + _ -> false + end + end; +do_assert_confs(Key, [Expected|More1], [Effected|More2]) -> + do_assert_confs(Key, Expected, Effected) + andalso do_assert_confs(Key, More1, More2); +do_assert_confs(_Key, [], []) -> true; -do_assert_confs(Expected, Effected) -> +do_assert_confs(Key, Expected, Effected) -> Res = Expected =:= Effected, Res == false andalso - ct:pal("Errors: conf not match, " - "expected: ~p, got: ~p~n", [Expected, Effected]), + ct:pal("Errors: ~p value not match, " + "expected: ~p, got: ~p~n", [Key, Expected, Effected]), Res. maybe_unconvert_listeners(Conf) when is_map(Conf) -> diff --git a/apps/emqx_gateway/test/emqx_lwm2m_SUITE.erl b/apps/emqx_gateway/test/emqx_lwm2m_SUITE.erl index 8f9e6108f..dd8fc2f7f 100644 --- a/apps/emqx_gateway/test/emqx_lwm2m_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_lwm2m_SUITE.erl @@ -35,7 +35,7 @@ gateway.lwm2m { lifetime_max = 86400s qmode_time_window = 22 auto_observe = false - mountpoint = \"lwm2m/%u\" + mountpoint = \"lwm2m/${username}\" update_msg_publish_condition = contains_object_list translators { command = {topic = \"/dn/#\", qos = 0} @@ -80,7 +80,8 @@ groups() -> [ case01_register, case01_register_additional_opts, - %% case01_register_incorrect_opts, %% TODO now we can't handle partial decode packet + %% TODO now we can't handle partial decode packet + %% case01_register_incorrect_opts, case01_register_report, case02_update_deregister, case03_register_wrong_version, @@ -150,12 +151,13 @@ groups() -> ]. init_per_suite(Config) -> - emqx_common_test_helpers:start_apps([]), + emqx_common_test_helpers:start_apps([emqx_conf]), Config. end_per_suite(Config) -> timer:sleep(300), - emqx_common_test_helpers:stop_apps([]), + {ok, _} = emqx_conf:remove([<<"gateway">>,<<"lwm2m">>], #{}), + emqx_common_test_helpers:stop_apps([emqx_conf]), Config. init_per_testcase(_AllTestCase, Config) -> @@ -163,7 +165,9 @@ init_per_testcase(_AllTestCase, Config) -> {ok, _} = application:ensure_all_started(emqx_gateway), {ok, ClientUdpSock} = gen_udp:open(0, [binary, {active, false}]), - {ok, C} = emqtt:start_link([{host, "localhost"},{port, 1883},{clientid, <<"c1">>}]), + {ok, C} = emqtt:start_link([{host, "localhost"}, + {port, 1883}, + {clientid, <<"c1">>}]), {ok, _} = emqtt:connect(C), timer:sleep(100), @@ -188,12 +192,14 @@ case01_register(Config) -> MsgId = 12, SubTopic = list_to_binary("lwm2m/"++Epn++"/dn/#"), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , ">>}, + [], + MsgId), %% checkpoint 1 - response #coap_message{type = Type, method = Method, id = RspId, options = Opts} = @@ -214,13 +220,16 @@ case01_register(Config) -> %%---------------------------------------- ?LOGT("start to send DE-REGISTER command", []), MsgId3 = 52, - test_send_coap_request( UdpSock, - delete, - sprintf("coap://127.0.0.1:~b~ts", [?PORT, join_path(Location, <<>>)]), - #coap_content{payload = <<>>}, - [], - MsgId3), - #coap_message{type = ack, id = RspId3, method = Method3} = test_recv_coap_response(UdpSock), + test_send_coap_request( + UdpSock, + delete, + sprintf("coap://127.0.0.1:~b~ts", [?PORT, join_path(Location, <<>>)]), + #coap_content{payload = <<>>}, + [], + MsgId3), + #coap_message{type = ack, + id = RspId3, + method = Method3} = test_recv_coap_response(UdpSock), {ok,deleted} = Method3, MsgId3 = RspId3, timer:sleep(50), @@ -235,13 +244,16 @@ case01_register_additional_opts(Config) -> MsgId = 12, SubTopic = list_to_binary("lwm2m/"++Epn++"/dn/#"), - AddOpts = "ep=~ts<=345&lwm2m=1&apn=psmA.eDRX0.ctnb&cust_opt=shawn&im=123&ct=1.4&mt=mdm9620&mv=1.2", - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?" ++ AddOpts, [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, - [], - MsgId), + AddOpts = "ep=~ts<=345&lwm2m=1&apn=psmA.eDRX0.ctnb&cust_opt=shawn&" + "im=123&ct=1.4&mt=mdm9620&mv=1.2", + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?" ++ AddOpts, [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , ">>}, + [], + MsgId), %% checkpoint 1 - response #coap_message{type = Type, method = Method, id = RspId, options = Opts} = @@ -262,13 +274,16 @@ case01_register_additional_opts(Config) -> %%---------------------------------------- ?LOGT("start to send DE-REGISTER command", []), MsgId3 = 52, - test_send_coap_request( UdpSock, - delete, - sprintf("coap://127.0.0.1:~b~ts", [?PORT, join_path(Location, <<>>)]), - #coap_content{payload = <<>>}, - [], - MsgId3), - #coap_message{type = ack, id = RspId3, method = Method3} = test_recv_coap_response(UdpSock), + test_send_coap_request( + UdpSock, + delete, + sprintf("coap://127.0.0.1:~b~ts", [?PORT, join_path(Location, <<>>)]), + #coap_content{payload = <<>>}, + [], + MsgId3), + #coap_message{type = ack, + id = RspId3, + method = Method3} = test_recv_coap_response(UdpSock), {ok,deleted} = Method3, MsgId3 = RspId3, timer:sleep(50), @@ -284,12 +299,14 @@ case01_register_incorrect_opts(Config) -> AddOpts = "ep=~ts<=345&lwm2m=1&incorrect_opt", - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?" ++ AddOpts, [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?" ++ AddOpts, [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , ">>}, + [], + MsgId), %% checkpoint 1 - response #coap_message{type = ack, method = Method, id = MsgId} = @@ -308,12 +325,14 @@ case01_register_report(Config) -> emqtt:subscribe(?config(emqx_c, Config), ReportTopic, qos0), timer:sleep(200), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , ">>}, + [], + MsgId), #coap_message{type = Type, method = Method, id = RspId, options = Opts} = test_recv_coap_response(UdpSock), @@ -326,16 +345,16 @@ case01_register_report(Config) -> timer:sleep(50), true = lists:member(SubTopic, test_mqtt_broker:get_subscrbied_topics()), - ReadResult = emqx_json:encode(#{ - <<"msgType">> => <<"register">>, - <<"data">> => #{ - <<"alternatePath">> => <<"/">>, - <<"ep">> => list_to_binary(Epn), - <<"lt">> => 345, - <<"lwm2m">> => <<"1">>, - <<"objectList">> => [<<"/1">>, <<"/2">>, <<"/3">>, <<"/4">>, <<"/5">>] - } - }), + ReadResult = emqx_json:encode( + #{<<"msgType">> => <<"register">>, + <<"data">> => #{ + <<"alternatePath">> => <<"/">>, + <<"ep">> => list_to_binary(Epn), + <<"lt">> => 345, + <<"lwm2m">> => <<"1">>, + <<"objectList">> => [<<"/1">>, <<"/2">>, + <<"/3">>, <<"/4">>, <<"/5">>] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(ReportTopic)), %%---------------------------------------- @@ -343,13 +362,16 @@ case01_register_report(Config) -> %%---------------------------------------- ?LOGT("start to send DE-REGISTER command", []), MsgId3 = 52, - test_send_coap_request( UdpSock, - delete, - sprintf("coap://127.0.0.1:~b~ts", [?PORT, join_path(Location, <<>>)]), - #coap_content{payload = <<>>}, - [], - MsgId3), - #coap_message{type = ack, id = RspId3, method = Method3} = test_recv_coap_response(UdpSock), + test_send_coap_request( + UdpSock, + delete, + sprintf("coap://127.0.0.1:~b~ts", [?PORT, join_path(Location, <<>>)]), + #coap_content{payload = <<>>}, + [], + MsgId3), + #coap_message{type = ack, + id = RspId3, + method = Method3} = test_recv_coap_response(UdpSock), {ok,deleted} = Method3, MsgId3 = RspId3, timer:sleep(50), @@ -367,28 +389,32 @@ case02_update_deregister(Config) -> emqtt:subscribe(?config(emqx_c, Config), ReportTopic, qos0), timer:sleep(200), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , ">>}, + [], + MsgId), timer:sleep(100), - #coap_message{type = ack, method = Method, options = Opts} = test_recv_coap_response(UdpSock), + #coap_message{type = ack, + method = Method, + options = Opts} = test_recv_coap_response(UdpSock), ?assertEqual({ok,created}, Method), ?LOGT("Options got: ~p", [Opts]), Location = maps:get(location_path, Opts), - Register = emqx_json:encode(#{ - <<"msgType">> => <<"register">>, - <<"data">> => #{ - <<"alternatePath">> => <<"/">>, - <<"ep">> => list_to_binary(Epn), - <<"lt">> => 345, - <<"lwm2m">> => <<"1">>, - <<"objectList">> => [<<"/1">>, <<"/2">>, <<"/3">>, <<"/4">>, <<"/5">>] - } - }), + Register = emqx_json:encode( + #{<<"msgType">> => <<"register">>, + <<"data">> => #{ + <<"alternatePath">> => <<"/">>, + <<"ep">> => list_to_binary(Epn), + <<"lt">> => 345, + <<"lwm2m">> => <<"1">>, + <<"objectList">> => [<<"/1">>, <<"/2">>, <<"/3">>, + <<"/4">>, <<"/5">>] + }}), ?assertEqual(Register, test_recv_mqtt_response(ReportTopic)), %%---------------------------------------- @@ -396,25 +422,29 @@ case02_update_deregister(Config) -> %%---------------------------------------- ?LOGT("start to send UPDATE command", []), MsgId2 = 27, - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b~ts?lt=789", [?PORT, join_path(Location, <<>>)]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , , ">>}, - [], - MsgId2), - #coap_message{type = ack, id = RspId2, method = Method2} = test_recv_coap_response(UdpSock), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b~ts?lt=789", [?PORT, join_path(Location, <<>>)]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , , ">>}, + [], + MsgId2), + #coap_message{type = ack, + id = RspId2, + method = Method2} = test_recv_coap_response(UdpSock), {ok,changed} = Method2, MsgId2 = RspId2, - Update = emqx_json:encode(#{ - <<"msgType">> => <<"update">>, - <<"data">> => #{ - <<"alternatePath">> => <<"/">>, - <<"ep">> => list_to_binary(Epn), - <<"lt">> => 789, - <<"lwm2m">> => <<"1">>, - <<"objectList">> => [<<"/1">>, <<"/2">>, <<"/3">>, <<"/4">>, <<"/5">>, <<"/6">>] - } - }), + Update = emqx_json:encode( + #{<<"msgType">> => <<"update">>, + <<"data">> => #{ + <<"alternatePath">> => <<"/">>, + <<"ep">> => list_to_binary(Epn), + <<"lt">> => 789, + <<"lwm2m">> => <<"1">>, + <<"objectList">> => [<<"/1">>, <<"/2">>, <<"/3">>, + <<"/4">>, <<"/5">>, <<"/6">>] + }}), ?assertEqual(Update, test_recv_mqtt_response(ReportTopic)), %%---------------------------------------- @@ -422,13 +452,16 @@ case02_update_deregister(Config) -> %%---------------------------------------- ?LOGT("start to send DE-REGISTER command", []), MsgId3 = 52, - test_send_coap_request( UdpSock, - delete, - sprintf("coap://127.0.0.1:~b~ts", [?PORT, join_path(Location, <<>>)]), - #coap_content{payload = <<>>}, - [], - MsgId3), - #coap_message{type = ack, id = RspId3, method = Method3} = test_recv_coap_response(UdpSock), + test_send_coap_request( + UdpSock, + delete, + sprintf("coap://127.0.0.1:~b~ts", [?PORT, join_path(Location, <<>>)]), + #coap_content{payload = <<>>}, + [], + MsgId3), + #coap_message{type = ack, + id = RspId3, + method = Method3} = test_recv_coap_response(UdpSock), {ok,deleted} = Method3, MsgId3 = RspId3, @@ -443,12 +476,14 @@ case03_register_wrong_version(Config) -> Epn = "urn:oma:lwm2m:oma:3", MsgId = 12, SubTopic = list_to_binary("lwm2m/"++Epn++"/dn/#"), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=8.3", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=8.3", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , ">>}, + [], + MsgId), #coap_message{type = ack, method = Method} = test_recv_coap_response(UdpSock), ?assertEqual({error, bad_request}, Method), timer:sleep(50), @@ -464,12 +499,14 @@ case04_register_and_lifetime_timeout(Config) -> MsgId = 12, SubTopic = list_to_binary("lwm2m/"++Epn++"/dn/#"), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=2&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=2&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , ">>}, + [], + MsgId), timer:sleep(100), #coap_message{type = ack, method = Method} = test_recv_coap_response(UdpSock), ?assertEqual({ok,created}, Method), @@ -490,12 +527,14 @@ case05_register_wrong_epn(Config) -> MsgId = 12, UdpSock = ?config(sock, Config), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?lt=345&lwm2m=1.0", [?PORT]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?lt=345&lwm2m=1.0", [?PORT]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , ">>}, + [], + MsgId), #coap_message{type = ack, method = Method} = test_recv_coap_response(UdpSock), ?assertEqual({error,bad_request}, Method). @@ -507,13 +546,16 @@ case05_register_wrong_epn(Config) -> %% Epn = "urn:oma:lwm2m:oma:3", %% MsgId = 12, -%% test_send_coap_request( UdpSock, -%% post, -%% sprintf("coap://127.0.0.1:~b/rd?ep=~ts&lwm2m=1", [?PORT, Epn]), -%% #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, -%% [], -%% MsgId), -%% #coap_message{type = ack, method = Method} = test_recv_coap_response(UdpSock), +%% test_send_coap_request( +%% UdpSock, +%% post, +%% sprintf("coap://127.0.0.1:~b/rd?ep=~ts&lwm2m=1", [?PORT, Epn]), +%% #coap_content{content_format = <<"text/plain">>, +%% payload = <<", , , , ">>}, +%% [], +%% MsgId), +%% #coap_message{type = ack, +%% method = Method} = test_recv_coap_response(UdpSock), %% ?assertEqual({error,bad_request}, Method), %% timer:sleep(50), %% ?assertEqual([], test_mqtt_broker:get_subscrbied_topics()). @@ -530,13 +572,15 @@ case07_register_alternate_path_01(Config) -> emqtt:subscribe(?config(emqx_c, Config), ReportTopic, qos0), timer:sleep(200), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, - payload = <<";rt=\"oma.lwm2m\";ct=11543,,,">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<";rt=\"oma.lwm2m\";ct=11543," + ",,">>}, + [], + MsgId), timer:sleep(50), true = lists:member(SubTopic, test_mqtt_broker:get_subscrbied_topics()). @@ -552,13 +596,15 @@ case07_register_alternate_path_02(Config) -> emqtt:subscribe(?config(emqx_c, Config), ReportTopic, qos0), timer:sleep(200), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, - payload = <<";rt=\"oma.lwm2m\";ct=11543,,,">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<";rt=\"oma.lwm2m\";ct=11543," + ",,">>}, + [], + MsgId), timer:sleep(50), true = lists:member(SubTopic, test_mqtt_broker:get_subscrbied_topics()). @@ -574,39 +620,40 @@ case08_reregister(Config) -> emqtt:subscribe(?config(emqx_c, Config), ReportTopic, qos0), timer:sleep(200), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, - payload = <<";rt=\"oma.lwm2m\";ct=11543,,,">>}, - [], - MsgId), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<";rt=\"oma.lwm2m\";ct=11543," + ",,">>}, + [], + MsgId), timer:sleep(50), true = lists:member(SubTopic, test_mqtt_broker:get_subscrbied_topics()), ReadResult = emqx_json:encode( - #{ - <<"msgType">> => <<"register">>, + #{<<"msgType">> => <<"register">>, <<"data">> => #{ - <<"alternatePath">> => <<"/lwm2m">>, - <<"ep">> => list_to_binary(Epn), - <<"lt">> => 345, - <<"lwm2m">> => <<"1">>, - <<"objectList">> => [<<"/1/0">>, <<"/2/0">>, <<"/3/0">>] - } - } - ), + <<"alternatePath">> => <<"/lwm2m">>, + <<"ep">> => list_to_binary(Epn), + <<"lt">> => 345, + <<"lwm2m">> => <<"1">>, + <<"objectList">> => [<<"/1/0">>, <<"/2/0">>, <<"/3/0">>] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(ReportTopic)), timer:sleep(1000), %% the same lwm2mc client registers to server again - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, - payload = <<";rt=\"oma.lwm2m\";ct=11543,,,">>}, - [], - MsgId + 1), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<";rt=\"oma.lwm2m\";ct=11543," + ",,">>}, + [], + MsgId + 1), %% verify the lwm2m client is still online ?assertEqual(ReadResult, test_recv_mqtt_response(ReportTopic)). @@ -619,13 +666,15 @@ case10_read(Config) -> emqtt:subscribe(?config(emqx_c, Config), RespTopic, qos0), timer:sleep(200), %% step 1, device register ... - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, - payload = <<";rt=\"oma.lwm2m\";ct=11543,,,">>}, - [], - MsgId1), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<";rt=\"oma.lwm2m\";ct=11543," + ",,">>}, + [], + MsgId1), #coap_message{method = Method1} = test_recv_coap_response(UdpSock), ?assertEqual({ok,created}, Method1), test_recv_mqtt_response(RespTopic), @@ -645,7 +694,9 @@ case10_read(Config) -> test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options=Options2, + payload=Payload2} = Request2, ?LOGT("LwM2M client got ~p", [Request2]), ?assertEqual(get, Method2), @@ -653,21 +704,29 @@ case10_read(Config) -> ?assertEqual(<<>>, Payload2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, content}, #coap_content{content_format = <<"text/plain">>, payload = <<"EMQ">>}, Request2, true), + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"text/plain">>, + payload = <<"EMQ">>}, + Request2, + true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"read">>, - <<"data">> => #{ - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"reqPath">> => <<"/3/0/0">>, - <<"content">> => [#{ - path => <<"/3/0/0">>, - value => <<"EMQ">> - }] - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"msgType">> => <<"read">>, + <<"data">> => #{ + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"reqPath">> => <<"/3/0/0">>, + <<"content">> => [#{path => <<"/3/0/0">>, + value => <<"EMQ">>} + ] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case10_read_separate_ack(Config) -> @@ -698,7 +757,8 @@ case10_read_separate_ack(Config) -> test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, payload = Payload2} = Request2, ?LOGT("LwM2M client got ~p", [Request2]), ?assertEqual(get, Method2), @@ -706,31 +766,36 @@ case10_read_separate_ack(Config) -> ?assertEqual(<<>>, Payload2), test_send_empty_ack(UdpSock, "127.0.0.1", ?PORT, Request2), - ReadResultACK = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"ack">>, - <<"data">> => #{ - <<"path">> => <<"/3/0/0">> - } - }), + ReadResultACK = emqx_json:encode( + #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + <<"msgType">> => <<"ack">>, + <<"data">> => #{ <<"path">> => <<"/3/0/0">> } + }), ?assertEqual(ReadResultACK, test_recv_mqtt_response(RespTopic)), timer:sleep(100), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, content}, #coap_content{content_format = <<"text/plain">>, payload = <<"EMQ">>}, Request2, false), + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"text/plain">>, + payload = <<"EMQ">>}, + Request2, + false), timer:sleep(100), - ReadResult = emqx_json:encode(#{ <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"read">>, - <<"data">> => #{ - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"reqPath">> => <<"/3/0/0">>, - <<"content">> => [#{ - path => <<"/3/0/0">>, - value => <<"EMQ">> - }] - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"msgType">> => <<"read">>, + <<"data">> => #{ + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"reqPath">> => <<"/3/0/0">>, + <<"content">> => [#{path => <<"/3/0/0">>, + value => <<"EMQ">>}] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case11_read_object_tlv(Config) -> @@ -766,32 +831,41 @@ case11_read_object_tlv(Config) -> ?assertEqual(get, Method2), timer:sleep(50), - Tlv = <<16#08, 16#00, 16#3C, 16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65, 16#C8, 16#01, 16#16, 16#4C, 16#69, 16#67, 16#68, 16#74, 16#77, 16#65, 16#69, 16#67, 16#68, 16#74, 16#20, 16#4D, 16#32, 16#4D, 16#20, 16#43, 16#6C, 16#69, 16#65, 16#6E, 16#74, 16#C8, 16#02, 16#09, 16#33, 16#34, 16#35, 16#30, 16#30, 16#30, 16#31, 16#32, 16#33>>, - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, content}, #coap_content{content_format = <<"application/vnd.oma.lwm2m+tlv">>, payload = Tlv}, Request2, true), + Tlv = <<16#08, 16#00, 16#3C, 16#C8, 16#00, 16#14, 16#4F, 16#70, 16#65, + 16#6E, 16#20, 16#4D, 16#6F, 16#62, 16#69, 16#6C, 16#65, 16#20, + 16#41, 16#6C, 16#6C, 16#69, 16#61, 16#6E, 16#63, 16#65, 16#C8, + 16#01, 16#16, 16#4C, 16#69, 16#67, 16#68, 16#74, 16#77, 16#65, + 16#69, 16#67, 16#68, 16#74, 16#20, 16#4D, 16#32, 16#4D, 16#20, + 16#43, 16#6C, 16#69, 16#65, 16#6E, 16#74, 16#C8, 16#02, 16#09, + 16#33, 16#34, 16#35, 16#30, 16#30, 16#30, 16#31, 16#32, 16#33>>, + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"application/vnd.oma.lwm2m+tlv">>, + payload = Tlv}, + Request2, + true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"read">>, - <<"data">> => #{ - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"reqPath">> => <<"/3/0">>, - <<"content">> => [ - #{ - path => <<"/3/0/0">>, - value => <<"Open Mobile Alliance">> - }, - #{ - path => <<"/3/0/1">>, - value => <<"Lightweight M2M Client">> - }, - #{ - path => <<"/3/0/2">>, - value => <<"345000123">> - } - ] - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"msgType">> => <<"read">>, + <<"data">> => #{ + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"reqPath">> => <<"/3/0">>, + <<"content">> => + [#{path => <<"/3/0/0">>, + value => <<"Open Mobile Alliance">>}, + #{path => <<"/3/0/1">>, + value => <<"Lightweight M2M Client">>}, + #{path => <<"/3/0/2">>, + value => <<"345000123">>} + ] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case11_read_object_json(Config) -> @@ -828,32 +902,37 @@ case11_read_object_json(Config) -> ?assertEqual(get, Method2), timer:sleep(50), - Json = <<"{\"bn\":\"/3/0\",\"e\":[{\"n\":\"0\",\"sv\":\"Open Mobile Alliance\"},{\"n\":\"1\",\"sv\":\"Lightweight M2M Client\"},{\"n\":\"2\",\"sv\":\"345000123\"}]}">>, - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, content}, #coap_content{content_format = <<"application/vnd.oma.lwm2m+json">>, payload = Json}, Request2, true), + Json = <<"{\"bn\":\"/3/0\",\"e\":[{\"n\":\"0\",\"sv\":\"Open Mobile " + "Alliance\"},{\"n\":\"1\",\"sv\":\"Lightweight M2M Client\"}," + "{\"n\":\"2\",\"sv\":\"345000123\"}]}">>, + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"application/vnd.oma.lwm2m+json">>, + payload = Json}, + Request2, + true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"read">>, - <<"data">> => #{ - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"reqPath">> => <<"/3/0">>, - <<"content">> => [ - #{ - path => <<"/3/0/0">>, - value => <<"Open Mobile Alliance">> - }, - #{ - path => <<"/3/0/1">>, - value => <<"Lightweight M2M Client">> - }, - #{ - path => <<"/3/0/2">>, - value => <<"345000123">> - } - ] - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"msgType">> => <<"read">>, + <<"data">> => #{ + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"reqPath">> => <<"/3/0">>, + <<"content">> => + [#{path => <<"/3/0/0">>, + value => <<"Open Mobile Alliance">>}, + #{path => <<"/3/0/1">>, + value => <<"Lightweight M2M Client">>}, + #{path => <<"/3/0/2">>, + value => <<"345000123">>} + ] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case12_read_resource_opaque(Config) -> @@ -890,23 +969,29 @@ case12_read_resource_opaque(Config) -> timer:sleep(50), Opaque = <<20, 21, 22, 23>>, - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, content}, #coap_content{content_format = <<"application/octet-stream">>, payload = Opaque}, Request2, true), + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"application/octet-stream">>, + payload = Opaque}, + Request2, + true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"read">>, - <<"data">> => #{ - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"reqPath">> => <<"/3/0/8">>, - <<"content">> => [ - #{ - path => <<"/3/0/8">>, - value => base64:encode(Opaque) - } - ] - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + <<"msgType">> => <<"read">>, + <<"data">> => #{ + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"reqPath">> => <<"/3/0/8">>, + <<"content">> => + [#{path => <<"/3/0/8">>, + value => base64:encode(Opaque)} + ] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case13_read_no_xml(Config) -> @@ -924,12 +1009,10 @@ case13_read_no_xml(Config) -> %% step2, send a READ command to device CmdId = 206, CommandTopic = <<"lwm2m/", (list_to_binary(Epn))/binary, "/dn/dm">>, - Command = #{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, + Command = #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, <<"msgType">> => <<"read">>, - <<"data">> => #{ - <<"path">> => <<"/9723/0/0">> - } + <<"data">> => #{ <<"path">> => <<"/9723/0/0">> } }, CommandJson = emqx_json:encode(Command), ?LOGT("CommandJson=~p", [CommandJson]), @@ -942,17 +1025,26 @@ case13_read_no_xml(Config) -> ?assertEqual(get, Method2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, content}, #coap_content{content_format = <<"text/plain">>, payload = <<"EMQ">>}, Request2, true), + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"text/plain">>, + payload = <<"EMQ">>}, + Request2, + true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"read">>, - <<"data">> => #{ - <<"reqPath">> => <<"/9723/0/0">>, - <<"code">> => <<"4.00">>, - <<"codeMsg">> => <<"bad_request">> - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"msgType">> => <<"read">>, + <<"data">> => #{ + <<"reqPath">> => <<"/9723/0/0">>, + <<"code">> => <<"4.00">>, + <<"codeMsg">> => <<"bad_request">> + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case20_single_write(Config) -> @@ -982,7 +1074,8 @@ case20_single_write(Config) -> test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, payload = Payload2} = Request2, Path2 = get_coap_path(Options2), ?assertEqual(put, Method2), ?assertEqual(<<"/3/0/13">>, Path2), @@ -990,18 +1083,19 @@ case20_single_write(Config) -> ?assertEqual(Tlv_Value, Payload2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, changed}, #coap_content{}, Request2, true), + test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, + {ok, changed}, #coap_content{}, Request2, true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/13">>, - <<"code">> => <<"2.04">>, - <<"codeMsg">> => <<"changed">> - }, - <<"msgType">> => <<"write">> - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/13">>, + <<"code">> => <<"2.04">>, + <<"codeMsg">> => <<"changed">>}, + <<"msgType">> => <<"write">> + }), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case20_write(Config) -> @@ -1019,21 +1113,22 @@ case20_write(Config) -> %% step2, send a WRITE command to device CommandTopic = <<"lwm2m/", (list_to_binary(Epn))/binary, "/dn/dm">>, CmdId = 307, - Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + Command = #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, <<"msgType">> => <<"write">>, <<"data">> => #{ - <<"basePath">> => <<"/3/0/13">>, - <<"content">> => [#{ - type => <<"Float">>, - value => <<"12345.0">> - }] - } - }, + <<"basePath">> => <<"/3/0/13">>, + <<"content">> => + [#{type => <<"Float">>, + value => <<"12345.0">>}] + }}, CommandJson = emqx_json:encode(Command), test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, + payload = Payload2} = Request2, Path2 = get_coap_path(Options2), ?assertEqual(put, Method2), ?assertEqual(<<"/3/0/13">>, Path2), @@ -1041,18 +1136,18 @@ case20_write(Config) -> ?assertEqual(Tlv_Value, Payload2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, changed}, #coap_content{}, Request2, true), + test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, + {ok, changed}, #coap_content{}, Request2, true), timer:sleep(100), - WriteResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/13">>, - <<"code">> => <<"2.04">>, - <<"codeMsg">> => <<"changed">> - }, - <<"msgType">> => <<"write">> - }), + WriteResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/13">>, + <<"code">> => <<"2.04">>, + <<"codeMsg">> => <<"changed">> }, + <<"msgType">> => <<"write">>}), ?assertEqual(WriteResult, test_recv_mqtt_response(RespTopic)). case21_write_object(Config) -> @@ -1070,26 +1165,26 @@ case21_write_object(Config) -> %% step2, send a WRITE command to device CommandTopic = <<"lwm2m/", (list_to_binary(Epn))/binary, "/dn/dm">>, CmdId = 307, - Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + Command = #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, <<"msgType">> => <<"write">>, <<"data">> => #{ - <<"basePath">> => <<"/3/0/">>, - <<"content">> => [#{ - path => <<"13">>, - type => <<"Integer">>, - value => <<"12345">> - },#{ - path => <<"14">>, - type => <<"String">>, - value => <<"87x">> - }] - } - }, + <<"basePath">> => <<"/3/0/">>, + <<"content">> => + [#{path => <<"13">>, + type => <<"Integer">>, + value => <<"12345">>}, + #{path => <<"14">>, + type => <<"String">>, + value => <<"87x">>}] + }}, CommandJson = emqx_json:encode(Command), test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, + payload=Payload2} = Request2, Path2 = get_coap_path(Options2), ?assertEqual(post, Method2), ?assertEqual(<<"/3/0">>, Path2), @@ -1098,19 +1193,19 @@ case21_write_object(Config) -> ?assertEqual(Tlv_Value, Payload2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, changed}, #coap_content{}, Request2, true), + test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, + {ok, changed}, #coap_content{}, Request2, true), timer:sleep(100), - - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"write">>, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/">>, - <<"code">> => <<"2.04">>, - <<"codeMsg">> => <<"changed">> - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"msgType">> => <<"write">>, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/">>, + <<"code">> => <<"2.04">>, + <<"codeMsg">> => <<"changed">> + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case22_write_error(Config) -> @@ -1131,15 +1226,11 @@ case22_write_error(Config) -> Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, <<"msgType">> => <<"write">>, <<"data">> => #{ - <<"basePath">> => <<"/3/0/1">>, - <<"content">> => [ - #{ - type => <<"Integer">>, - value => <<"12345">> - } - ] - } - }, + <<"basePath">> => <<"/3/0/1">>, + <<"content">> => + [#{type => <<"Integer">>, + value => <<"12345">>}] + }}, CommandJson = emqx_json:encode(Command), test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), @@ -1150,18 +1241,20 @@ case22_write_error(Config) -> ?assertEqual(<<"/3/0/1">>, Path2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {error, bad_request}, #coap_content{}, Request2, true), + test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, + {error, bad_request}, #coap_content{}, + Request2, true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/1">>, - <<"code">> => <<"4.00">>, - <<"codeMsg">> => <<"bad_request">> - }, - <<"msgType">> => <<"write">> - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/1">>, + <<"code">> => <<"4.00">>, + <<"codeMsg">> => <<"bad_request">>}, + <<"msgType">> => <<"write">> + }), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case_create_basic(Config) -> @@ -1188,25 +1281,28 @@ case_create_basic(Config) -> test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, + payload = Payload2} = Request2, Path2 = get_coap_path(Options2), ?assertEqual(post, Method2), ?assertEqual(<<"/5">>, Path2), ?assertEqual(<<"">>, Payload2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, created}, #coap_content{}, Request2, true), + test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, + {ok, created}, #coap_content{}, Request2, true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"data">> => #{ - <<"reqPath">> => <<"/5">>, - <<"code">> => <<"2.01">>, - <<"codeMsg">> => <<"created">> - }, - <<"msgType">> => <<"create">> - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"data">> => #{ + <<"reqPath">> => <<"/5">>, + <<"code">> => <<"2.01">>, + <<"codeMsg">> => <<"created">>}, + <<"msgType">> => <<"create">> + }), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case_delete_basic(Config) -> @@ -1226,33 +1322,34 @@ case_delete_basic(Config) -> CmdId = 307, Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, <<"msgType">> => <<"delete">>, - <<"data">> => #{ - <<"path">> => <<"/5/0">> - } + <<"data">> => #{ <<"path">> => <<"/5/0">> } }, CommandJson = emqx_json:encode(Command), test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, + payload = Payload2} = Request2, Path2 = get_coap_path(Options2), ?assertEqual(delete, Method2), ?assertEqual(<<"/5/0">>, Path2), ?assertEqual(<<"">>, Payload2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, deleted}, #coap_content{}, Request2, true), + test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, + {ok, deleted}, #coap_content{}, Request2, true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"data">> => #{ - <<"reqPath">> => <<"/5/0">>, - <<"code">> => <<"2.02">>, - <<"codeMsg">> => <<"deleted">> - }, - <<"msgType">> => <<"delete">> - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"data">> => #{ + <<"reqPath">> => <<"/5/0">>, + <<"code">> => <<"2.02">>, + <<"codeMsg">> => <<"deleted">>}, + <<"msgType">> => <<"delete">> + }), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case30_execute(Config) -> @@ -1270,37 +1367,41 @@ case30_execute(Config) -> %% step2, send a WRITE command to device CommandTopic = <<"lwm2m/", (list_to_binary(Epn))/binary, "/dn/dm">>, CmdId = 307, - Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + Command = #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, <<"msgType">> => <<"execute">>, <<"data">> => #{ - <<"path">> => <<"/3/0/4">>, - %% "args" should not be present for "/3/0/4", only for testing the encoding here - <<"args">> => <<"2,7">> - } + <<"path">> => <<"/3/0/4">>, + %% "args" should not be present for "/3/0/4", only for + %% testing the encoding here + <<"args">> => <<"2,7">>} }, CommandJson = emqx_json:encode(Command), test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, + payload = Payload2} = Request2, Path2 = get_coap_path(Options2), ?assertEqual(post, Method2), ?assertEqual(<<"/3/0/4">>, Path2), ?assertEqual(<<"2,7">>, Payload2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, changed}, #coap_content{}, Request2, true), + test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, + {ok, changed}, #coap_content{}, Request2, true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/4">>, - <<"code">> => <<"2.04">>, - <<"codeMsg">> => <<"changed">> - }, - <<"msgType">> => <<"execute">> - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/4">>, + <<"code">> => <<"2.04">>, + <<"codeMsg">> => <<"changed">>}, + <<"msgType">> => <<"execute">> + }), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case31_execute_error(Config) -> @@ -1321,33 +1422,36 @@ case31_execute_error(Config) -> Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, <<"msgType">> => <<"execute">>, <<"data">> => #{ - <<"path">> => <<"/3/0/4">>, - <<"args">> => <<"2,7">> - } + <<"path">> => <<"/3/0/4">>, + <<"args">> => <<"2,7">>} }, CommandJson = emqx_json:encode(Command), test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, + payload = Payload2} = Request2, Path2 = get_coap_path(Options2), ?assertEqual(post, Method2), ?assertEqual(<<"/3/0/4">>, Path2), ?assertEqual(<<"2,7">>, Payload2), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {error, unauthorized}, #coap_content{}, Request2, true), + test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, + {error, unauthorized}, #coap_content{}, + Request2, true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/4">>, - <<"code">> => <<"4.01">>, - <<"codeMsg">> => <<"unauthorized">> - }, - <<"msgType">> => <<"execute">> - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, + <<"cacheID">> => CmdId, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/4">>, + <<"code">> => <<"4.01">>, + <<"codeMsg">> => <<"unauthorized">>}, + <<"msgType">> => <<"execute">> + }), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case40_discover(Config) -> @@ -1374,7 +1478,9 @@ case40_discover(Config) -> test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, + payload = Payload2} = Request2, Path2 = get_coap_path(Options2), ?assertEqual(get, Method2), ?assertEqual(<<"/3/0/7">>, Path2), @@ -1382,26 +1488,28 @@ case40_discover(Config) -> timer:sleep(50), PayloadDiscover = <<";dim=8;pmin=10;pmax=60;gt=50;lt=42.2,">>, - test_send_coap_response(UdpSock, - "127.0.0.1", - ?PORT, - {ok, content}, - #coap_content{content_format = <<"application/link-format">>, payload = PayloadDiscover}, - Request2, - true), + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"application/link-format">>, + payload = PayloadDiscover}, + Request2, + true), timer:sleep(100), - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"discover">>, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/7">>, - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"content">> => - [<<";dim=8;pmin=10;pmax=60;gt=50;lt=42.2">>, <<"">>] - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + <<"msgType">> => <<"discover">>, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/7">>, + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"content">> => + [<<";dim=8;pmin=10;pmax=60;gt=50;lt=42.2">>, + <<"">>] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case50_write_attribute(Config) -> @@ -1431,34 +1539,33 @@ case50_write_attribute(Config) -> test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(100), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, + payload = Payload2} = Request2, ?LOGT("got options: ~p", [Options2]), Path2 = get_coap_path(Options2), Query2 = lists:sort(maps:to_list(get_coap_query(Options2))), ?assertEqual(put, Method2), ?assertEqual(<<"/3/0/9">>, Path2), - ?assertEqual(lists:sort([{<<"pmax">>, <<"5">>},{<<"lt">>, <<"5">>},{<<"pmin">>,<<"1">>}]), Query2), + ?assertEqual(lists:sort([{<<"pmax">>, <<"5">>}, + {<<"lt">>, <<"5">>}, + {<<"pmin">>,<<"1">>}]), Query2), ?assertEqual(<<>>, Payload2), timer:sleep(50), - test_send_coap_response(UdpSock, - "127.0.0.1", - ?PORT, - {ok, changed}, - #coap_content{}, - Request2, - true), - timer:sleep(100), + test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, + {ok, changed}, #coap_content{}, + Request2, true), + timer:sleep(100), - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/9">>, - <<"code">> => <<"2.04">>, - <<"codeMsg">> => <<"changed">> - }, - <<"msgType">> => <<"write-attr">> - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/9">>, + <<"code">> => <<"2.04">>, + <<"codeMsg">> => <<"changed">>}, + <<"msgType">> => <<"write-attr">> + }), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case60_observe(Config) -> @@ -1480,15 +1587,15 @@ case60_observe(Config) -> CmdId = 307, Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, <<"msgType">> => <<"observe">>, - <<"data">> => #{ - <<"path">> => <<"/3/0/10">> - } + <<"data">> => #{<<"path">> => <<"/3/0/10">>} }, CommandJson = emqx_json:encode(Command), test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50), Request2 = test_recv_coap_request(UdpSock), - #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, + #coap_message{method = Method2, + options = Options2, + payload = Payload2} = Request2, Path2 = get_coap_path(Options2), Observe = get_coap_observe(Options2), ?assertEqual(get, Method2), @@ -1497,55 +1604,53 @@ case60_observe(Config) -> ?assertEqual(<<>>, Payload2), timer:sleep(50), - test_send_coap_observe_ack( UdpSock, - "127.0.0.1", - ?PORT, - {ok, content}, - #coap_content{content_format = <<"text/plain">>, payload = <<"2048">>}, - Request2), + test_send_coap_observe_ack( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"text/plain">>, payload = <<"2048">>}, + Request2), timer:sleep(100), - ReadResult = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"observe">>, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/10">>, - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"content">> => [#{ - path => <<"/3/0/10">>, - value => 2048 - }] - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + <<"msgType">> => <<"observe">>, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/10">>, + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"content">> => + [#{path => <<"/3/0/10">>, + value => 2048}] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)), %% step3 the notifications timer:sleep(200), ObSeq = 3, - test_send_coap_notif( UdpSock, - "127.0.0.1", - ?PORT, - #coap_content{content_format = <<"text/plain">>, payload = <<"4096">>}, - ObSeq, - Request2), + test_send_coap_notif( + UdpSock, + "127.0.0.1", + ?PORT, + #coap_content{content_format = <<"text/plain">>, payload = <<"4096">>}, + ObSeq, + Request2), timer:sleep(100), #coap_message{} = test_recv_coap_response(UdpSock), - ReadResult2 = emqx_json:encode(#{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"notify">>, - <<"seqNum">> => ObSeq, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/10">>, - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"content">> => [#{ - path => <<"/3/0/10">>, - value => 4096 - }] - } - }), + ReadResult2 = emqx_json:encode( + #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + <<"msgType">> => <<"notify">>, + <<"seqNum">> => ObSeq, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/10">>, + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"content">> => + [#{path => <<"/3/0/10">>, + value => 4096}] + }}), ?assertEqual(ReadResult2, test_recv_mqtt_response(RespTopicAD)), %% Step3. cancel observe @@ -1560,7 +1665,9 @@ case60_observe(Config) -> test_mqtt_broker:publish(CommandTopic, CommandJson3, 0), timer:sleep(50), Request3 = test_recv_coap_request(UdpSock), - #coap_message{method = Method3, options=Options3, payload=Payload3} = Request3, + #coap_message{method = Method3, + options = Options3, + payload = Payload3} = Request3, Path3 = get_coap_path(Options3), Observe3 = get_coap_observe(Options3), ?assertEqual(get, Method3), @@ -1569,31 +1676,31 @@ case60_observe(Config) -> ?assertEqual(<<>>, Payload3), timer:sleep(50), - test_send_coap_observe_ack( UdpSock, - "127.0.0.1", - ?PORT, - {ok, content}, - #coap_content{content_format = <<"text/plain">>, payload = <<"1150">>}, - Request3), + test_send_coap_observe_ack( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"text/plain">>, payload = <<"1150">>}, + Request3), timer:sleep(100), - ReadResult3 = emqx_json:encode(#{ - <<"requestID">> => CmdId3, <<"cacheID">> => CmdId3, - <<"msgType">> => <<"cancel-observe">>, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/10">>, - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"content">> => [#{ - path => <<"/3/0/10">>, - value => 1150 - }] - } - }), + ReadResult3 = emqx_json:encode( + #{<<"requestID">> => CmdId3, + <<"cacheID">> => CmdId3, + <<"msgType">> => <<"cancel-observe">>, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/10">>, + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"content">> => + [#{path => <<"/3/0/10">>, + value => 1150}] + }}), ?assertEqual(ReadResult3, test_recv_mqtt_response(RespTopic)). %% case80_specail_object_19_0_0_notify(Config) -> -%% %% step 1, device register, with extra register options +%% %% step 1, device register, with extra register options %% Epn = "urn:oma:lwm2m:oma:3", %% RegOptionWangYi = "&apn=psmA.eDRX0.ctnb&im=13456&ct=2.0&mt=MDM9206&mv=4.0", %% MsgId1 = 15, @@ -1602,64 +1709,67 @@ case60_observe(Config) -> %% RespTopic = list_to_binary("lwm2m/"++Epn++"/up/resp"), %% emqtt:subscribe(?config(emqx_c, Config), RespTopic, qos0), %% timer:sleep(200), - -%% test_send_coap_request( UdpSock, -%% post, -%% sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1"++RegOptionWangYi, [?PORT, Epn]), -%% #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, -%% [], -%% MsgId1), -%% #coap_message{method = Method1} = test_recv_coap_response(UdpSock), -%% ?assertEqual({ok,created}, Method1), -%% ReadResult = emqx_json:encode(#{ -%% <<"msgType">> => <<"register">>, -%% <<"data">> => #{ -%% <<"alternatePath">> => <<"/">>, -%% <<"ep">> => list_to_binary(Epn), -%% <<"lt">> => 345, -%% <<"lwm2m">> => <<"1">>, -%% <<"objectList">> => [<<"/1">>, <<"/2">>, <<"/3">>, <<"/4">>, <<"/5">>], -%% <<"apn">> => <<"psmA.eDRX0.ctnb">>, -%% <<"im">> => <<"13456">>, -%% <<"ct">> => <<"2.0">>, -%% <<"mt">> => <<"MDM9206">>, -%% <<"mv">> => <<"4.0">> -%% } -%% }), -%% ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)), - -%% %% step2, send a OBSERVE command to device -%% CommandTopic = <<"lwm2m/", (list_to_binary(Epn))/binary, "/dn/dm">>, -%% CmdId = 307, -%% Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, -%% <<"msgType">> => <<"observe">>, -%% <<"data">> => #{ -%% <<"path">> => <<"/19/0/0">> -%% } -%% }, -%% CommandJson = emqx_json:encode(Command), -%% test_mqtt_broker:publish(CommandTopic, CommandJson, 0), -%% timer:sleep(50), -%% Request2 = test_recv_coap_request(UdpSock), -%% #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, -%% Path2 = get_coap_path(Options2), -%% Observe = get_coap_observe(Options2), -%% ?assertEqual(get, Method2), -%% ?assertEqual(<<"/19/0/0">>, Path2), -%% ?assertEqual(Observe, 0), -%% ?assertEqual(<<>>, Payload2), -%% timer:sleep(50), - -%% test_send_coap_observe_ack( UdpSock, -%% "127.0.0.1", -%% ?PORT, -%% {ok, content}, -%% #coap_content{content_format = <<"text/plain">>, payload = <<"2048">>}, -%% Request2), -%% timer:sleep(100). - -%% step 3, device send uplink data notifications - +%% +%% test_send_coap_request( +%% UdpSock, +%% post, +%% sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1"++RegOptionWangYi, [?PORT, Epn]), +%% #coap_content{content_format = <<"text/plain">>, +%% payload = <<", , , , ">>}, +%% [], +%% MsgId1), +%% #coap_message{method = Method1} = test_recv_coap_response(UdpSock), +%% ?assertEqual({ok,created}, Method1), +%% ReadResult = emqx_json:encode( +%% #{<<"msgType">> => <<"register">>, +%% <<"data">> => #{ +%% <<"alternatePath">> => <<"/">>, +%% <<"ep">> => list_to_binary(Epn), +%% <<"lt">> => 345, +%% <<"lwm2m">> => <<"1">>, +%% <<"objectList">> => [<<"/1">>, <<"/2">>, <<"/3">>, +%% <<"/4">>, <<"/5">>], +%% <<"apn">> => <<"psmA.eDRX0.ctnb">>, +%% <<"im">> => <<"13456">>, +%% <<"ct">> => <<"2.0">>, +%% <<"mt">> => <<"MDM9206">>, +%% <<"mv">> => <<"4.0">>} +%% }), +%% ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)), +%% +%% %% step2, send a OBSERVE command to device +%% CommandTopic = <<"lwm2m/", (list_to_binary(Epn))/binary, "/dn/dm">>, +%% CmdId = 307, +%% Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, +%% <<"msgType">> => <<"observe">>, +%% <<"data">> => #{ +%% <<"path">> => <<"/19/0/0">> +%% } +%% }, +%% CommandJson = emqx_json:encode(Command), +%% test_mqtt_broker:publish(CommandTopic, CommandJson, 0), +%% timer:sleep(50), +%% Request2 = test_recv_coap_request(UdpSock), +%% #coap_message{method = Method2, +%% options = Options2, +%% payload = Payload2} = Request2, +%% Path2 = get_coap_path(Options2), +%% Observe = get_coap_observe(Options2), +%% ?assertEqual(get, Method2), +%% ?assertEqual(<<"/19/0/0">>, Path2), +%% ?assertEqual(Observe, 0), +%% ?assertEqual(<<>>, Payload2), +%% timer:sleep(50), +%% +%% test_send_coap_observe_ack( +%% UdpSock, +%% "127.0.0.1", +%% ?PORT, +%% {ok, content}, +%% #coap_content{content_format = <<"text/plain">>, payload = <<"2048">>}, +%% Request2), +%% timer:sleep(100). +%% %% case80_specail_object_19_1_0_write(Config) -> %% Epn = "urn:oma:lwm2m:oma:3", %% RegOptionWangYi = "&apn=psmA.eDRX0.ctnb&im=13456&ct=2.0&mt=MDM9206&mv=4.0", @@ -1668,52 +1778,57 @@ case60_observe(Config) -> %% RespTopic = list_to_binary("lwm2m/"++Epn++"/up/resp"), %% emqtt:subscribe(?config(emqx_c, Config), RespTopic, qos0), %% timer:sleep(200), - -%% test_send_coap_request( UdpSock, -%% post, -%% sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1"++RegOptionWangYi, [?PORT, Epn]), -%% #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, -%% [], -%% MsgId1), +%% +%% test_send_coap_request( +%% UdpSock, +%% post, +%% sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1"++RegOptionWangYi, [?PORT, Epn]), +%% #coap_content{content_format = <<"text/plain">>, +%% payload = <<", , , , ">>}, +%% [], +%% MsgId1), %% #coap_message{method = Method1} = test_recv_coap_response(UdpSock), %% ?assertEqual({ok,created}, Method1), %% test_recv_mqtt_response(RespTopic), - -%% %% step2, send a WRITE command to device +%% +%% %% step2, send a WRITE command to device %% CommandTopic = <<"lwm2m/", (list_to_binary(Epn))/binary, "/dn/dm">>, %% CmdId = 307, -%% Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, +%% Command = #{<<"requestID">> => CmdId, +%% <<"cacheID">> => CmdId, %% <<"msgType">> => <<"write">>, %% <<"data">> => #{ -%% <<"path">> => <<"/19/1/0">>, -%% <<"type">> => <<"Opaque">>, -%% <<"value">> => base64:encode(<<12345:32>>) -%% } -%% }, - +%% <<"path">> => <<"/19/1/0">>, +%% <<"type">> => <<"Opaque">>, +%% <<"value">> => base64:encode(<<12345:32>>) +%% }}, +%% %% CommandJson = emqx_json:encode(Command), %% test_mqtt_broker:publish(CommandTopic, CommandJson, 0), %% timer:sleep(50), %% Request2 = test_recv_coap_request(UdpSock), -%% #coap_message{method = Method2, options=Options2, payload=Payload2} = Request2, +%% #coap_message{method = Method2, +%% options = Options2, +%% payload = Payload2} = Request2, %% Path2 = get_coap_path(Options2), %% ?assertEqual(put, Method2), %% ?assertEqual(<<"/19/1/0">>, Path2), %% ?assertEqual(<<3:2, 0:1, 0:2, 4:3, 0, 12345:32>>, Payload2), %% timer:sleep(50), - -%% test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, changed}, #coap_content{}, Request2, true), +%% +%% test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, +%% {ok, changed}, #coap_content{}, Request2, true), %% timer:sleep(100), - -%% ReadResult = emqx_json:encode(#{ -%% <<"requestID">> => CmdId, <<"cacheID">> => CmdId, -%% <<"data">> => #{ -%% <<"reqPath">> => <<"/19/1/0">>, -%% <<"code">> => <<"2.04">>, -%% <<"codeMsg">> => <<"changed">> -%% }, -%% <<"msgType">> => <<"write">> -%% }), +%% +%% ReadResult = emqx_json:encode( +%% #{<<"requestID">> => CmdId, +%% <<"cacheID">> => CmdId, +%% <<"data">> => #{ +%% <<"reqPath">> => <<"/19/1/0">>, +%% <<"code">> => <<"2.04">>, +%% <<"codeMsg">> => <<"changed">>}, +%% <<"msgType">> => <<"write">> +%% }), %% ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). case90_psm_mode(Config) -> @@ -1735,13 +1850,17 @@ server_cache_mode(Config, RegOption) -> emqtt:subscribe(?config(emqx_c, Config), RespTopic, qos0), timer:sleep(200), - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?"++RegOption, [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, payload = <<", , , , ">>}, - [], - MsgId1), - #coap_message{type = ack, method = Method1, options = Opts} = test_recv_coap_response(UdpSock), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?"++RegOption, [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<", , , , ">>}, + [], + MsgId1), + #coap_message{type = ack, + method = Method1, + options = Opts} = test_recv_coap_response(UdpSock), ?assertEqual({ok,created}, Method1), ?LOGT("Options got: ~p", [Opts]), Location = maps:get(location_path, Opts), @@ -1759,7 +1878,8 @@ server_cache_mode(Config, RegOption) -> send_read_command_1(2, UdpSock), send_read_command_1(3, UdpSock), - ?assertEqual(timeout_test_recv_coap_request, test_recv_coap_request(UdpSock)), + ?assertEqual(timeout_test_recv_coap_request, + test_recv_coap_request(UdpSock)), device_update_1(UdpSock, Location), @@ -1774,13 +1894,10 @@ server_cache_mode(Config, RegOption) -> send_read_command_1(CmdId, _UdpSock) -> Epn = "urn:oma:lwm2m:oma:3", CommandTopic = <<"lwm2m/", (list_to_binary(Epn))/binary, "/dn/dm">>, - Command = #{ - <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"read">>, - <<"data">> => #{ - <<"path">> => <<"/3/0/0">> - } - }, + Command = #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + <<"msgType">> => <<"read">>, + <<"data">> => #{<<"path">> => <<"/3/0/0">>} + }, CommandJson = emqx_json:encode(Command), test_mqtt_broker:publish(CommandTopic, CommandJson, 0), timer:sleep(50). @@ -1794,20 +1911,28 @@ verify_read_response_1(CmdId, UdpSock) -> ?LOGT("LwM2M client got ~p", [Request]), %% device replies the commond - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, content}, #coap_content{content_format = <<"text/plain">>, payload = <<"EMQ">>}, Request, true), + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"text/plain">>, + payload = <<"EMQ">>}, + Request, + true), - ReadResult = emqx_json:encode(#{ <<"requestID">> => CmdId, <<"cacheID">> => CmdId, - <<"msgType">> => <<"read">>, - <<"data">> => #{ - <<"reqPath">> => <<"/3/0/0">>, - <<"code">> => <<"2.05">>, - <<"codeMsg">> => <<"content">>, - <<"content">> => [#{ - path => <<"/3/0/0">>, - value => <<"EMQ">> - }] - } - }), + ReadResult = emqx_json:encode( + #{<<"requestID">> => CmdId, <<"cacheID">> => CmdId, + <<"msgType">> => <<"read">>, + <<"data">> => #{ + <<"reqPath">> => <<"/3/0/0">>, + <<"code">> => <<"2.05">>, + <<"codeMsg">> => <<"content">>, + <<"content">> => + [#{path => <<"/3/0/0">>, + value => <<"EMQ">> + }] + }}), ?assertEqual(ReadResult, test_recv_mqtt_response(RespTopic)). device_update_1(UdpSock, Location) -> @@ -1815,13 +1940,16 @@ device_update_1(UdpSock, Location) -> RespTopic = list_to_binary("lwm2m/"++Epn++"/up/resp"), ?LOGT("send UPDATE command", []), MsgId2 = 27, - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b~ts?lt=789", [?PORT, join_path(Location, <<>>)]), - #coap_content{payload = <<>>}, - [], - MsgId2), - #coap_message{type = ack, id = MsgId2, method = Method2} = test_recv_coap_response(UdpSock), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b~ts?lt=789", [?PORT, join_path(Location, <<>>)]), + #coap_content{payload = <<>>}, + [], + MsgId2), + #coap_message{type = ack, + id = MsgId2, + method = Method2} = test_recv_coap_response(UdpSock), {ok,changed} = Method2, test_recv_mqtt_response(RespTopic). @@ -1834,34 +1962,56 @@ test_recv_mqtt_response(RespTopic) -> end. test_send_coap_request(UdpSock, Method, Uri, Content, Options, MsgId) -> - is_record(Content, coap_content) orelse error("Content must be a #coap_content!"), - is_list(Options) orelse error("Options must be a list"), + is_record(Content, coap_content) orelse + error("Content must be a #coap_content!"), + is_list(Options) orelse + error("Options must be a list"), case resolve_uri(Uri) of {coap, {IpAddr, Port}, Path, Query} -> - Request0 = request(con, Method, Content, [{uri_path, Path}, {uri_query, Query} | Options]), + Request0 = request( + con, Method, Content, + [{uri_path, Path}, {uri_query, Query} | Options] + ), Request = Request0#coap_message{id = MsgId}, ?LOGT("send_coap_request Request=~p", [Request]), + RequestBinary = emqx_coap_frame:serialize_pkt(Request, undefined), - ?LOGT("test udp socket send to ~p:~p, data=~p", [IpAddr, Port, RequestBinary]), + ?LOGT("test udp socket send to ~p:~p, data=~p", + [IpAddr, Port, RequestBinary]), ok = gen_udp:send(UdpSock, IpAddr, Port, RequestBinary); {SchemeDiff, ChIdDiff, _, _} -> - error(lists:flatten(io_lib:format("scheme ~ts or ChId ~ts does not match with socket", [SchemeDiff, ChIdDiff]))) + error( + lists:flatten( + io_lib:format( + "scheme ~ts or ChId ~ts does not match with socket", + [SchemeDiff, ChIdDiff]))) end. test_recv_coap_response(UdpSock) -> {ok, {Address, Port, Packet}} = gen_udp:recv(UdpSock, 0, 2000), {ok, Response, _, _} = emqx_coap_frame:parse(Packet, undefined), - ?LOGT("test udp receive from ~p:~p, data1=~p, Response=~p", [Address, Port, Packet, Response]), - #coap_message{type = ack, method = Method, id=Id, token = Token, options = Options, payload = Payload} = Response, - ?LOGT("receive coap response Method=~p, Id=~p, Token=~p, Options=~p, Payload=~p", [Method, Id, Token, Options, Payload]), + ?LOGT("test udp receive from ~p:~p, data1=~p, Response=~p", + [Address, Port, Packet, Response]), + #coap_message{type = ack, method = Method, id = Id, + token = Token, options = Options, payload = Payload} = Response, + ?LOGT("receive coap response Method=~p, Id=~p, Token=~p, " + "Options=~p, Payload=~p", [Method, Id, Token, Options, Payload]), Response. test_recv_coap_request(UdpSock) -> case gen_udp:recv(UdpSock, 0, 2000) of {ok, {_Address, _Port, Packet}} -> {ok, Request, _, _} = emqx_coap_frame:parse(Packet, undefined), - #coap_message{type = con, method = Method, id=Id, token = Token, payload = Payload, options = Options} = Request, - ?LOGT("receive coap request Method=~p, Id=~p, Token=~p, Options=~p, Payload=~p", [Method, Id, Token, Options, Payload]), + #coap_message{ + type = con, + id = Id, + method = Method, + token = Token, + payload = Payload, + options = Options} = Request, + ?LOGT("receive coap request Method=~p, Id=~p, Token=~p, " + "Options=~p, Payload=~p", + [Method, Id, Token, Options, Payload]), Request; {error, Reason} -> ?LOGT("test_recv_coap_request failed, Reason=~p", [Reason]), @@ -1869,7 +2019,8 @@ test_recv_coap_request(UdpSock) -> end. test_send_coap_response(UdpSock, Host, Port, Code, Content, Request, Ack) -> - is_record(Content, coap_content) orelse error("Content must be a #coap_content!"), + is_record(Content, coap_content) orelse + error("Content must be a #coap_content!"), is_list(Host) orelse error("Host is not a string"), {ok, IpAddr} = inet:getaddr(Host, inet), @@ -1879,17 +2030,20 @@ test_send_coap_response(UdpSock, Host, Port, Code, Content, Request, Ack) -> false -> Response end, ?LOGT("test_send_coap_response Response=~p", [Response2]), - ok = gen_udp:send(UdpSock, IpAddr, Port, emqx_coap_frame:serialize_pkt(Response2, undefined)). + ok = gen_udp:send(UdpSock, IpAddr, Port, + emqx_coap_frame:serialize_pkt(Response2, undefined)). test_send_empty_ack(UdpSock, Host, Port, Request) -> is_list(Host) orelse error("Host is not a string"), {ok, IpAddr} = inet:getaddr(Host, inet), EmptyACK = emqx_coap_message:ack(Request), ?LOGT("test_send_empty_ack EmptyACK=~p", [EmptyACK]), - ok = gen_udp:send(UdpSock, IpAddr, Port, emqx_coap_frame:serialize_pkt(EmptyACK, undefined)). + ok = gen_udp:send(UdpSock, IpAddr, Port, + emqx_coap_frame:serialize_pkt(EmptyACK, undefined)). test_send_coap_observe_ack(UdpSock, Host, Port, Code, Content, Request) -> - is_record(Content, coap_content) orelse error("Content must be a #coap_content!"), + is_record(Content, coap_content) orelse + error("Content must be a #coap_content!"), is_list(Host) orelse error("Host is not a string"), {ok, IpAddr} = inet:getaddr(Host, inet), @@ -1902,7 +2056,8 @@ test_send_coap_observe_ack(UdpSock, Host, Port, Code, Content, Request) -> ok = gen_udp:send(UdpSock, IpAddr, Port, ResponseBinary). test_send_coap_notif(UdpSock, Host, Port, Content, ObSeq, Request) -> - is_record(Content, coap_content) orelse error("Content must be a #coap_content!"), + is_record(Content, coap_content) orelse + error("Content must be a #coap_content!"), is_list(Host) orelse error("Host is not a string"), {ok, IpAddr} = inet:getaddr(Host, inet), @@ -1914,12 +2069,13 @@ test_send_coap_notif(UdpSock, Host, Port, Content, ObSeq, Request) -> ok = gen_udp:send(UdpSock, IpAddr, Port, NotifBinary). std_register(UdpSock, Epn, ObjectList, MsgId1, RespTopic) -> - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, payload = ObjectList}, - [], - MsgId1), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=345&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, payload = ObjectList}, + [], + MsgId1), #coap_message{method = {ok,created}} = test_recv_coap_response(UdpSock), test_recv_mqtt_response(RespTopic), timer:sleep(100). @@ -1977,8 +2133,11 @@ sprintf(Format, Args) -> lists:flatten(io_lib:format(Format, Args)). response(Code, #coap_content{content_format = Format, payload = Payload}, Req) -> - #coap_message{options = Opts} = Msg = emqx_coap_message:response(Code, Payload, Req), + Msg = #coap_message{options = Opts} + = emqx_coap_message:response(Code, Payload, Req), Msg#coap_message{options = Opts#{content_format => Format}}. -request(Type, Method, #coap_content{content_format = Format, payload = Payload}, Opts) -> - emqx_coap_message:request(Type, Method, Payload, [{content_format, Format} | Opts]). +request(Type, Method, #coap_content{content_format = Format, + payload = Payload}, Opts) -> + emqx_coap_message:request(Type, Method, + Payload, [{content_format, Format} | Opts]). diff --git a/apps/emqx_gateway/test/emqx_lwm2m_api_SUITE.erl b/apps/emqx_gateway/test/emqx_lwm2m_api_SUITE.erl index ed817dbd8..65c3ba2fb 100644 --- a/apps/emqx_gateway/test/emqx_lwm2m_api_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_lwm2m_api_SUITE.erl @@ -31,11 +31,11 @@ -define(CONF_DEFAULT, <<" gateway.lwm2m { xml_dir = \"../../lib/emqx_gateway/src/lwm2m/lwm2m_xml\" - lifetime_min = 1s + lifetime_min = 100s lifetime_max = 86400s - qmode_time_window = 22 + qmode_time_window = 200 auto_observe = false - mountpoint = \"lwm2m/%u\" + mountpoint = \"lwm2m/${username}\" update_msg_publish_condition = contains_object_list translators { command = {topic = \"/dn/#\", qos = 0} @@ -70,12 +70,13 @@ all() -> init_per_suite(Config) -> ok = emqx_config:init_load(emqx_gateway_schema, ?CONF_DEFAULT), - emqx_mgmt_api_test_util:init_suite([emqx_gateway]), + emqx_mgmt_api_test_util:init_suite([emqx_conf, emqx_gateway]), Config. end_per_suite(Config) -> timer:sleep(300), - emqx_mgmt_api_test_util:end_suite([emqx_gateway]), + {ok, _} = emqx_conf:remove([<<"gateway">>,<<"lwm2m">>], #{}), + emqx_mgmt_api_test_util:end_suite([emqx_gateway, emqx_conf]), Config. init_per_testcase(_AllTestCase, Config) -> @@ -90,29 +91,32 @@ init_per_testcase(_AllTestCase, Config) -> [{sock, ClientUdpSock}, {emqx_c, C} | Config]. end_per_testcase(_AllTestCase, Config) -> - timer:sleep(300), gen_udp:close(?config(sock, Config)), emqtt:disconnect(?config(emqx_c, Config)), - ok = application:stop(emqx_gateway). + ok = application:stop(emqx_gateway), + timer:sleep(300). %%-------------------------------------------------------------------- %% Cases %%-------------------------------------------------------------------- t_lookup_cmd_read(Config) -> UdpSock = ?config(sock, Config), - Epn = "urn:oma:lwm2m:oma:3", + Epn = "urn:oma:lwm2m:oma:1", MsgId1 = 15, RespTopic = list_to_binary("lwm2m/"++Epn++"/up/resp"), emqtt:subscribe(?config(emqx_c, Config), RespTopic, qos0), timer:sleep(200), %% step 1, device register ... - test_send_coap_request( UdpSock, - post, - sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=600&lwm2m=1", [?PORT, Epn]), - #coap_content{content_format = <<"text/plain">>, - payload = <<";rt=\"oma.lwm2m\";ct=11543,,,">>}, - [], - MsgId1), + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=600&lwm2m=1", [?PORT, Epn]), + #coap_content{ + content_format = <<"text/plain">>, + payload = <<";rt=\"oma.lwm2m\";ct=11543," + ",,">>}, + [], + MsgId1), #coap_message{method = Method1} = test_recv_coap_response(UdpSock), ?assertEqual({ok,created}, Method1), @@ -140,14 +144,21 @@ t_lookup_cmd_read(Config) -> ?LOGT("LwM2M client got ~p", [Request2]), timer:sleep(50), - test_send_coap_response(UdpSock, "127.0.0.1", ?PORT, {ok, content}, #coap_content{content_format = <<"text/plain">>, payload = <<"EMQ">>}, Request2, true), + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"text/plain">>, payload = <<"EMQ">>}, + Request2, + true), timer:sleep(200), normal_received_request(Epn, <<"/3/0/0">>, <<"read">>). t_lookup_cmd_discover(Config) -> %% step 1, device register ... - Epn = "urn:oma:lwm2m:oma:3", + Epn = "urn:oma:lwm2m:oma:2", MsgId1 = 15, UdpSock = ?config(sock, Config), ObjectList = <<", , , , ">>, @@ -176,20 +187,120 @@ t_lookup_cmd_discover(Config) -> timer:sleep(50), PayloadDiscover = <<";dim=8;pmin=10;pmax=60;gt=50;lt=42.2,">>, - test_send_coap_response(UdpSock, - "127.0.0.1", - ?PORT, - {ok, content}, - #coap_content{content_format = <<"application/link-format">>, payload = PayloadDiscover}, - Request2, - true), + test_send_coap_response( + UdpSock, + "127.0.0.1", + ?PORT, + {ok, content}, + #coap_content{content_format = <<"application/link-format">>, + payload = PayloadDiscover}, + Request2, + true), timer:sleep(200), discover_received_request(Epn, <<"/3/0/7">>, <<"discover">>). +t_read(Config) -> + UdpSock = ?config(sock, Config), + Epn = "urn:oma:lwm2m:oma:3", + MsgId1 = 15, + RespTopic = list_to_binary("lwm2m/"++Epn++"/up/resp"), + emqtt:subscribe(?config(emqx_c, Config), RespTopic, qos0), + timer:sleep(200), + %% step 1, device register ... + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=600&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<";rt=\"oma.lwm2m\";ct=11543," + ",,">>}, + [], + MsgId1), + #coap_message{method = Method1} = test_recv_coap_response(UdpSock), + ?assertEqual({ok,created}, Method1), + + timer:sleep(100), + test_recv_mqtt_response(RespTopic), + + %% step2, call Read API + call_send_api(Epn, "read", "path=/3/0/0"), + timer:sleep(100), + #coap_message{type = Type, method = Method, options = Opts} = test_recv_coap_request(UdpSock), + ?assertEqual(con, Type), + ?assertEqual(get, Method), + ?assertEqual([<<"lwm2m">>, <<"3">>, <<"0">>, <<"0">>], maps:get(uri_path, Opts)). + + +t_write(Config) -> + UdpSock = ?config(sock, Config), + Epn = "urn:oma:lwm2m:oma:4", + MsgId1 = 15, + RespTopic = list_to_binary("lwm2m/"++Epn++"/up/resp"), + emqtt:subscribe(?config(emqx_c, Config), RespTopic, qos0), + timer:sleep(200), + %% step 1, device register ... + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=600&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<";rt=\"oma.lwm2m\";ct=11543," + ",,">>}, + [], + MsgId1), + #coap_message{method = Method1} = test_recv_coap_response(UdpSock), + ?assertEqual({ok,created}, Method1), + + timer:sleep(100), + test_recv_mqtt_response(RespTopic), + + %% step2, call write API + call_send_api(Epn, "write", "path=/3/0/13&type=Integer&value=123"), + timer:sleep(100), + #coap_message{type = Type, method = Method, options = Opts} = test_recv_coap_request(UdpSock), + ?assertEqual(con, Type), + ?assertEqual(put, Method), + ?assertEqual([<<"lwm2m">>, <<"3">>, <<"0">>, <<"13">>], maps:get(uri_path, Opts)), + ?assertEqual(<<"application/vnd.oma.lwm2m+tlv">>, maps:get(content_format, Opts)). + + + +t_observe(Config) -> + UdpSock = ?config(sock, Config), + Epn = "urn:oma:lwm2m:oma:5", + MsgId1 = 15, + RespTopic = list_to_binary("lwm2m/"++Epn++"/up/resp"), + emqtt:subscribe(?config(emqx_c, Config), RespTopic, qos0), + timer:sleep(200), + %% step 1, device register ... + test_send_coap_request( + UdpSock, + post, + sprintf("coap://127.0.0.1:~b/rd?ep=~ts<=600&lwm2m=1", [?PORT, Epn]), + #coap_content{content_format = <<"text/plain">>, + payload = <<";rt=\"oma.lwm2m\";ct=11543," + ",,">>}, + [], + MsgId1), + #coap_message{method = Method1} = test_recv_coap_response(UdpSock), + ?assertEqual({ok,created}, Method1), + + timer:sleep(100), + test_recv_mqtt_response(RespTopic), + + %% step2, call observe API + call_send_api(Epn, "observe", "path=/3/0/1&enable=false"), + timer:sleep(100), + #coap_message{type = Type, method = Method, options = Opts} = test_recv_coap_request(UdpSock), + ?assertEqual(con, Type), + ?assertEqual(get, Method), + ?assertEqual([<<"lwm2m">>, <<"3">>, <<"0">>, <<"1">>], maps:get(uri_path, Opts)), + ?assertEqual(1, maps:get(observe, Opts)). + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% Internal Functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -send_request(ClientId, Path, Action) -> +call_lookup_api(ClientId, Path, Action) -> ApiPath = emqx_mgmt_api_test_util:api_path(["gateway/lwm2m", ClientId, "lookup_cmd"]), Auth = emqx_mgmt_api_test_util:auth_header_(), Query = io_lib:format("path=~ts&action=~ts", [Path, Action]), @@ -197,8 +308,15 @@ send_request(ClientId, Path, Action) -> ?LOGT("rest api response:~ts~n", [Response]), Response. +call_send_api(ClientId, Cmd, Query) -> + ApiPath = emqx_mgmt_api_test_util:api_path(["gateway/lwm2m", ClientId, Cmd]), + Auth = emqx_mgmt_api_test_util:auth_header_(), + {ok, Response} = emqx_mgmt_api_test_util:request_api(post, ApiPath, Query, Auth), + ?LOGT("rest api response:~ts~n", [Response]), + Response. + no_received_request(ClientId, Path, Action) -> - Response = send_request(ClientId, Path, Action), + Response = call_lookup_api(ClientId, Path, Action), NotReceived = #{<<"clientid">> => list_to_binary(ClientId), <<"action">> => Action, <<"code">> => <<"6.01">>, @@ -206,7 +324,7 @@ no_received_request(ClientId, Path, Action) -> <<"path">> => Path}, ?assertEqual(NotReceived, emqx_json:decode(Response, [return_maps])). normal_received_request(ClientId, Path, Action) -> - Response = send_request(ClientId, Path, Action), + Response = call_lookup_api(ClientId, Path, Action), RCont = emqx_json:decode(Response, [return_maps]), ?assertEqual(list_to_binary(ClientId), maps:get(<<"clientid">>, RCont, undefined)), ?assertEqual(Path, maps:get(<<"path">>, RCont, undefined)), diff --git a/apps/emqx_gateway/test/emqx_sn_frame_SUITE.erl b/apps/emqx_gateway/test/emqx_sn_frame_SUITE.erl index c02b60dd0..335e00b20 100644 --- a/apps/emqx_gateway/test/emqx_sn_frame_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_sn_frame_SUITE.erl @@ -164,7 +164,7 @@ t_random_test(_) -> random_test_body() -> Data = generate_random_binary(), case catch parse(Data) of - {ok, _Msg} -> ok; + Msg when is_record(Msg, mqtt_sn_message) -> ok; {'EXIT', {Err, _Stack}} when Err =:= unkown_message_type; Err =:= malformed_message_len; diff --git a/apps/emqx_gateway/test/emqx_stomp_SUITE.erl b/apps/emqx_gateway/test/emqx_stomp_SUITE.erl index 8436b7312..179471127 100644 --- a/apps/emqx_gateway/test/emqx_stomp_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_stomp_SUITE.erl @@ -87,31 +87,36 @@ t_connect(_) -> %% Connect will be failed, because of bad login or passcode %% FIXME: Waiting for authentication works - %with_connection(fun(Sock) -> - % gen_tcp:send(Sock, serialize(<<"CONNECT">>, - % [{<<"accept-version">>, ?STOMP_VER}, - % {<<"host">>, <<"127.0.0.1:61613">>}, - % {<<"login">>, <<"admin">>}, - % {<<"passcode">>, <<"admin">>}, - % {<<"heart-beat">>, <<"1000,2000">>}])), - % {ok, Data} = gen_tcp:recv(Sock, 0), - % {ok, #stomp_frame{command = <<"ERROR">>, - % headers = _, - % body = <<"Login or passcode error!">>}, _, _} = parse(Data) - % end), + %with_connection( + % fun(Sock) -> + % gen_tcp:send(Sock, serialize(<<"CONNECT">>, + % [{<<"accept-version">>, ?STOMP_VER}, + % {<<"host">>, <<"127.0.0.1:61613">>}, + % {<<"login">>, <<"admin">>}, + % {<<"passcode">>, <<"admin">>}, + % {<<"heart-beat">>, <<"1000,2000">>}])), + % {ok, Data} = gen_tcp:recv(Sock, 0), + % {ok, Frame, _, _} = parse(Data), + % #stomp_frame{command = <<"ERROR">>, + % headers = _, + % body = <<"Login or passcode error!">>} = Frame + % end), %% Connect will be failed, because of bad version with_connection(fun(Sock) -> - gen_tcp:send(Sock, serialize(<<"CONNECT">>, - [{<<"accept-version">>, <<"2.0,2.1">>}, - {<<"host">>, <<"127.0.0.1:61613">>}, - {<<"login">>, <<"guest">>}, - {<<"passcode">>, <<"guest">>}, - {<<"heart-beat">>, <<"1000,2000">>}])), + gen_tcp:send(Sock, + serialize(<<"CONNECT">>, + [{<<"accept-version">>, <<"2.0,2.1">>}, + {<<"host">>, <<"127.0.0.1:61613">>}, + {<<"login">>, <<"guest">>}, + {<<"passcode">>, <<"guest">>}, + {<<"heart-beat">>, <<"1000,2000">>}])), {ok, Data} = gen_tcp:recv(Sock, 0), - {ok, #stomp_frame{command = <<"ERROR">>, - headers = _, - body = <<"Login Failed: Supported protocol versions < 1.2">>}, _, _} = parse(Data) + {ok, Frame, _, _} = parse(Data), + #stomp_frame{ + command = <<"ERROR">>, + headers = _, + body = <<"Login Failed: Supported protocol versions < 1.2">>} = Frame end). t_heartbeat(_) -> @@ -403,6 +408,8 @@ t_rest_clienit_info(_) -> %% kickout {204, _} = request(delete, ClientPath), + ignored = gen_server:call(emqx_cm, ignore, infinity), % sync + ok = emqx_pool:flush_async_tasks(), {200, Clients2} = request(get, "/gateway/stomp/clients"), ?assertEqual(0, length(maps:get(data, Clients2))) end). diff --git a/apps/emqx_limiter/etc/emqx_limiter.conf b/apps/emqx_limiter/etc/emqx_limiter.conf index 44bbb1740..7298931e3 100644 --- a/apps/emqx_limiter/etc/emqx_limiter.conf +++ b/apps/emqx_limiter/etc/emqx_limiter.conf @@ -47,4 +47,4 @@ emqx_limiter { per_client = "100/10s,10" } } -} \ No newline at end of file +} diff --git a/apps/emqx_machine/test/emqx_machine_SUITE.erl b/apps/emqx_machine/test/emqx_machine_SUITE.erl index cce0778e2..03d9e6ba9 100644 --- a/apps/emqx_machine/test/emqx_machine_SUITE.erl +++ b/apps/emqx_machine/test/emqx_machine_SUITE.erl @@ -26,6 +26,23 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> + %% CASE-SIDE-EFFICT: + %% + %% Running-Seq: + %% emqx_authz_api_mnesia_SUITE.erl + %% emqx_gateway_api_SUITE.erl + %% emqx_machine_SUITE.erl + %% + %% Reason: + %% the `emqx_machine_boot:ensure_apps_started()` will crashed + %% on starting `emqx_authz` with dirty confs, which caused the file + %% `.._build/test/lib/emqx_conf/etc/acl.conf` could not be found + %% + %% Workaround: + %% Unload emqx_authz to avoid reboot this application + %% + application:unload(emqx_authz), + emqx_common_test_helpers:start_apps([]), Config. diff --git a/apps/emqx_management/src/emqx_mgmt.erl b/apps/emqx_management/src/emqx_mgmt.erl index eca5b3e56..f82b881a8 100644 --- a/apps/emqx_management/src/emqx_mgmt.erl +++ b/apps/emqx_management/src/emqx_mgmt.erl @@ -17,6 +17,8 @@ -module(emqx_mgmt). -include("emqx_mgmt.hrl"). +-elvis([{elvis_style, invalid_dynamic_call, disable}]). +-elvis([{elvis_style, god_modules, disable}]). -include_lib("stdlib/include/qlc.hrl"). -include_lib("emqx/include/emqx.hrl"). @@ -51,6 +53,7 @@ , clean_authz_cache_all/1 , set_ratelimit_policy/2 , set_quota_policy/2 + , set_keepalive/2 ]). %% Internal funcs @@ -119,6 +122,8 @@ -define(APP, emqx_management). +-elvis([{elvis_style, god_modules, disable}]). + %% TODO: remove these function after all api use minirest version 1.X return() -> ok. @@ -147,7 +152,9 @@ node_info(Node) when Node =:= node() -> memory_used => proplists:get_value(total, Memory), process_available => erlang:system_info(process_limit), process_used => erlang:system_info(process_count), - max_fds => proplists:get_value(max_fds, lists:usort(lists:flatten(erlang:system_info(check_io)))), + + max_fds => proplists:get_value( + max_fds, lists:usort(lists:flatten(erlang:system_info(check_io)))), connections => ets:info(emqx_channel, size), node_status => 'Running', uptime => proplists:get_value(uptime, BrokerInfo), @@ -232,10 +239,13 @@ nodes_info_count(PropList) -> %%-------------------------------------------------------------------- lookup_client({clientid, ClientId}, FormatFun) -> - lists:append([lookup_client(Node, {clientid, ClientId}, FormatFun) || Node <- mria_mnesia:running_nodes()]); + + lists:append([lookup_client(Node, {clientid, ClientId}, FormatFun) + || Node <- mria_mnesia:running_nodes()]); lookup_client({username, Username}, FormatFun) -> - lists:append([lookup_client(Node, {username, Username}, FormatFun) || Node <- mria_mnesia:running_nodes()]). + lists:append([lookup_client(Node, {username, Username}, FormatFun) + || Node <- mria_mnesia:running_nodes()]). lookup_client(Node, {clientid, ClientId}, {M,F}) when Node =:= node() -> lists:append(lists:map( @@ -256,11 +266,13 @@ lookup_client(Node, {username, Username}, {M,F}) when Node =:= node() -> lookup_client(Node, {username, Username}, FormatFun) -> rpc_call(Node, lookup_client, [Node, {username, Username}, FormatFun]). -kickout_client(ClientId) -> - Results = [kickout_client(Node, ClientId) || Node <- mria_mnesia:running_nodes()], - case lists:any(fun(Item) -> Item =:= ok end, Results) of - true -> ok; - false -> lists:last(Results) +kickout_client({ClientID, FormatFun}) -> + case lookup_client({clientid, ClientID}, FormatFun) of + [] -> + {error, not_found}; + _ -> + Results = [kickout_client(Node, ClientID) || Node <- mria_mnesia:running_nodes()], + check_results(Results) end. kickout_client(Node, ClientId) when Node =:= node() -> @@ -280,7 +292,7 @@ list_client_subscriptions(ClientId) -> end, Results), case Expected of [] -> []; - [Result|_] -> Result + [Result | _] -> Result end. client_subscriptions(Node, ClientId) when Node =:= node() -> @@ -291,10 +303,8 @@ client_subscriptions(Node, ClientId) -> clean_authz_cache(ClientId) -> Results = [clean_authz_cache(Node, ClientId) || Node <- mria_mnesia:running_nodes()], - case lists:any(fun(Item) -> Item =:= ok end, Results) of - true -> ok; - false -> lists:last(Results) - end. + check_results(Results). + clean_authz_cache(Node, ClientId) when Node =:= node() -> case emqx_cm:lookup_channels(ClientId) of @@ -326,6 +336,9 @@ set_ratelimit_policy(ClientId, Policy) -> set_quota_policy(ClientId, Policy) -> call_client(ClientId, {quota, Policy}). +set_keepalive(ClientId, Interval) -> + call_client(ClientId, {keepalive, Interval}). + %% @private call_client(ClientId, Req) -> Results = [call_client(Node, ClientId, Req) || Node <- mria_mnesia:running_nodes()], @@ -334,7 +347,7 @@ call_client(ClientId, Req) -> end, Results), case Expected of [] -> {error, not_found}; - [Result|_] -> Result + [Result | _] -> Result end. %% @private @@ -345,7 +358,7 @@ call_client(Node, ClientId, Req) when Node =:= node() -> Pid = lists:last(Pids), case emqx_cm:get_chan_info(ClientId, Pid) of #{conninfo := #{conn_mod := ConnMod}} -> - ConnMod:call(Pid, Req); + erlang:apply(ConnMod, call, [Pid, Req]); undefined -> {error, not_found} end end; @@ -366,11 +379,13 @@ list_subscriptions(Node) -> rpc_call(Node, list_subscriptions, [Node]). list_subscriptions_via_topic(Topic, FormatFun) -> - lists:append([list_subscriptions_via_topic(Node, Topic, FormatFun) || Node <- mria_mnesia:running_nodes()]). + lists:append([list_subscriptions_via_topic(Node, Topic, FormatFun) + || Node <- mria_mnesia:running_nodes()]). + list_subscriptions_via_topic(Node, Topic, {M,F}) when Node =:= node() -> MatchSpec = [{{{'_', '$1'}, '_'}, [{'=:=','$1', Topic}], ['$_']}], - M:F(ets:select(emqx_suboption, MatchSpec)); + erlang:apply(M, F, [ets:select(emqx_suboption, MatchSpec)]); list_subscriptions_via_topic(Node, Topic, FormatFun) -> rpc_call(Node, list_subscriptions_via_topic, [Node, Topic, FormatFun]). @@ -497,8 +512,12 @@ listener_id_filter(Id, Listeners) -> Filter = fun(#{id := Id0}) -> Id0 =:= Id end, lists:filter(Filter, Listeners). --spec manage_listener(Operation :: start_listener|stop_listener|restart_listener, Param :: map()) -> - ok | {error, Reason :: term()}. + +-spec manage_listener( Operation :: start_listener + | stop_listener + | restart_listener + , Param :: map()) -> + ok | {error, Reason :: term()}. manage_listener(Operation, #{id := ID, node := Node}) when Node =:= node()-> erlang:apply(emqx_listeners, Operation, [ID]); manage_listener(Operation, Param = #{node := Node}) -> @@ -566,9 +585,13 @@ add_duration_field(Alarms) -> add_duration_field([], _Now, Acc) -> Acc; -add_duration_field([Alarm = #{activated := true, activate_at := ActivateAt}| Rest], Now, Acc) -> +add_duration_field([Alarm = #{activated := true, activate_at := ActivateAt} | Rest], Now, Acc) -> add_duration_field(Rest, Now, [Alarm#{duration => Now - ActivateAt} | Acc]); -add_duration_field([Alarm = #{activated := false, activate_at := ActivateAt, deactivate_at := DeactivateAt}| Rest], Now, Acc) -> + +add_duration_field( [Alarm = #{ activated := false + , activate_at := ActivateAt + , deactivate_at := DeactivateAt} | Rest] + , Now, Acc) -> add_duration_field(Rest, Now, [Alarm#{duration => DeactivateAt - ActivateAt} | Acc]). %%-------------------------------------------------------------------- @@ -611,13 +634,20 @@ check_row_limit(Tables) -> check_row_limit([], _Limit) -> ok; -check_row_limit([Tab|Tables], Limit) -> +check_row_limit([Tab | Tables], Limit) -> case table_size(Tab) > Limit of true -> false; false -> check_row_limit(Tables, Limit) end. +check_results(Results) -> + case lists:any(fun(Item) -> Item =:= ok end, Results) of + true -> ok; + false -> lists:last(Results) + end. + max_row_limit() -> ?MAX_ROW_LIMIT. table_size(Tab) -> ets:info(Tab, size). + diff --git a/apps/emqx_management/src/emqx_mgmt_api.erl b/apps/emqx_management/src/emqx_mgmt_api.erl index c032093f7..8fd0a29a1 100644 --- a/apps/emqx_management/src/emqx_mgmt_api.erl +++ b/apps/emqx_management/src/emqx_mgmt_api.erl @@ -18,6 +18,8 @@ -include_lib("stdlib/include/qlc.hrl"). +-elvis([{elvis_style, dont_repeat_yourself, #{min_complexity => 100}}]). + -define(FRESH_SELECT, fresh_select). -export([ paginate/3 @@ -35,23 +37,14 @@ paginate(Tables, Params, {Module, FormatFun}) -> Qh = query_handle(Tables), Count = count(Tables), - Page = b2i(page(Params)), - Limit = b2i(limit(Params)), - Cursor = qlc:cursor(Qh), - case Page > 1 of - true -> - _ = qlc:next_answers(Cursor, (Page - 1) * Limit), - ok; - false -> ok - end, - Rows = qlc:next_answers(Cursor, Limit), - qlc:delete_cursor(Cursor), - #{meta => #{page => Page, limit => Limit, count => Count}, - data => [Module:FormatFun(Row) || Row <- Rows]}. + do_paginate(Qh, Count, Params, {Module, FormatFun}). paginate(Tables, MatchSpec, Params, {Module, FormatFun}) -> Qh = query_handle(Tables, MatchSpec), Count = count(Tables, MatchSpec), + do_paginate(Qh, Count, Params, {Module, FormatFun}). + +do_paginate(Qh, Count, Params, {Module, FormatFun}) -> Page = b2i(page(Params)), Limit = b2i(limit(Params)), Cursor = qlc:cursor(Qh), @@ -64,7 +57,7 @@ paginate(Tables, MatchSpec, Params, {Module, FormatFun}) -> Rows = qlc:next_answers(Cursor, Limit), qlc:delete_cursor(Cursor), #{meta => #{page => Page, limit => Limit, count => Count}, - data => [Module:FormatFun(Row) || Row <- Rows]}. + data => [erlang:apply(Module, FormatFun, [Row]) || Row <- Rows]}. query_handle(Table) when is_atom(Table) -> qlc:q([R || R <- ets:table(Table)]); @@ -95,9 +88,7 @@ count(Table, MatchSpec) when is_atom(Table) -> NMatchSpec = [{MatchPattern, Where, [true]}], ets:select_count(Table, NMatchSpec); count([Table], MatchSpec) when is_atom(Table) -> - [{MatchPattern, Where, _Re}] = MatchSpec, - NMatchSpec = [{MatchPattern, Where, [true]}], - ets:select_count(Table, NMatchSpec); + count(Table, MatchSpec); count(Tables, MatchSpec) -> lists:sum([count(T, MatchSpec) || T <- Tables]). @@ -111,16 +102,23 @@ limit(Params) when is_map(Params) -> limit(Params) -> proplists:get_value(<<"limit">>, Params, emqx_mgmt:max_row_limit()). +init_meta(Params) -> + Limit = b2i(limit(Params)), + Page = b2i(page(Params)), + #{ + page => Page, + limit => Limit, + count => 0 + }. + %%-------------------------------------------------------------------- %% Node Query %%-------------------------------------------------------------------- node_query(Node, Params, Tab, QsSchema, QueryFun) -> {_CodCnt, Qs} = params2qs(Params, QsSchema), - Limit = b2i(limit(Params)), - Page = b2i(page(Params)), - Meta = #{page => Page, limit => Limit, count => 0}, - page_limit_check_query(Meta, {fun do_node_query/5, [Node, Tab, Qs, QueryFun, Meta]}). + page_limit_check_query(init_meta(Params), + {fun do_node_query/5, [Node, Tab, Qs, QueryFun, init_meta(Params)]}). %% @private do_node_query(Node, Tab, Qs, QueryFun, Meta) -> @@ -129,34 +127,15 @@ do_node_query(Node, Tab, Qs, QueryFun, Meta) -> do_node_query( Node, Tab, Qs, QueryFun, Continuation , Meta = #{limit := Limit} , Results) -> - {Len, Rows, NContinuation} = do_query(Node, Tab, Qs, QueryFun, Continuation, Limit), - case judge_page_with_counting(Len, Meta) of - {more, NMeta} -> - case NContinuation of - ?FRESH_SELECT -> - #{meta => NMeta, data => []}; %% page and limit too big - _ -> - do_node_query(Node, Tab, Qs, QueryFun, NContinuation, NMeta, []) - end; - {cutrows, NMeta} -> - {SubStart, NeedNowNum} = rows_sub_params(Len, NMeta), - ThisRows = lists:sublist(Rows, SubStart, NeedNowNum), - NResults = lists:sublist( lists:append(Results, ThisRows) - , SubStart, Limit), - case NContinuation of - ?FRESH_SELECT -> - #{meta => NMeta, data => NResults}; - _ -> - do_node_query(Node, Tab, Qs, QueryFun, NContinuation, NMeta, NResults) - end; - {enough, NMeta} -> - NResults = lists:sublist(lists:append(Results, Rows), 1, Limit), - case NContinuation of - ?FRESH_SELECT -> - #{meta => NMeta, data => NResults}; - _ -> - do_node_query(Node, Tab, Qs, QueryFun, NContinuation, NMeta, NResults) - end + case do_query(Node, Tab, Qs, QueryFun, Continuation, Limit) of + {error, {badrpc, R}} -> + {error, Node, {badrpc, R}}; + {Len, Rows, ?FRESH_SELECT} -> + {NMeta, NResults} = sub_query_result(Len, Rows, Limit, Results, Meta), + #{meta => NMeta, data => NResults}; + {Len, Rows, NContinuation} -> + {NMeta, NResults} = sub_query_result(Len, Rows, Limit, Results, Meta), + do_node_query(Node, Tab, Qs, QueryFun, NContinuation, NMeta, NResults) end. %%-------------------------------------------------------------------- @@ -165,11 +144,9 @@ do_node_query( Node, Tab, Qs, QueryFun, Continuation cluster_query(Params, Tab, QsSchema, QueryFun) -> {_CodCnt, Qs} = params2qs(Params, QsSchema), - Limit = b2i(limit(Params)), - Page = b2i(page(Params)), Nodes = mria_mnesia:running_nodes(), - Meta = #{page => Page, limit => Limit, count => 0}, - page_limit_check_query(Meta, {fun do_cluster_query/5, [Nodes, Tab, Qs, QueryFun, Meta]}). + page_limit_check_query(init_meta(Params), + {fun do_cluster_query/5, [Nodes, Tab, Qs, QueryFun, init_meta(Params)]}). %% @private do_cluster_query(Nodes, Tab, Qs, QueryFun, Meta) -> @@ -177,37 +154,17 @@ do_cluster_query(Nodes, Tab, Qs, QueryFun, Meta) -> do_cluster_query([], _Tab, _Qs, _QueryFun, _Continuation, Meta, Results) -> #{meta => Meta, data => Results}; -do_cluster_query( [Node | Nodes], Tab, Qs, QueryFun, Continuation - , Meta = #{limit := Limit} - , Results) -> - {Len, Rows, NContinuation} = do_query(Node, Tab, Qs, QueryFun, Continuation, Limit), - case judge_page_with_counting(Len, Meta) of - {more, NMeta} -> - case NContinuation of - ?FRESH_SELECT -> - do_cluster_query(Nodes, Tab, Qs, QueryFun, NContinuation, NMeta, []); %% next node with parts of results - _ -> - do_cluster_query([Node | Nodes], Tab, Qs, QueryFun, NContinuation, NMeta, []) %% continue this node - end; - {cutrows, NMeta} -> - {SubStart, NeedNowNum} = rows_sub_params(Len, NMeta), - ThisRows = lists:sublist(Rows, SubStart, NeedNowNum), - NResults = lists:sublist( lists:append(Results, ThisRows) - , SubStart, Limit), - case NContinuation of - ?FRESH_SELECT -> - do_cluster_query(Nodes, Tab, Qs, QueryFun, NContinuation, NMeta, NResults); %% next node with parts of results - _ -> - do_cluster_query([Node | Nodes], Tab, Qs, QueryFun, NContinuation, NMeta, NResults) %% continue this node - end; - {enough, NMeta} -> - NResults = lists:sublist(lists:append(Results, Rows), 1, Limit), - case NContinuation of - ?FRESH_SELECT -> - do_cluster_query(Nodes, Tab, Qs, QueryFun, NContinuation, NMeta, NResults); %% next node with parts of results - _ -> - do_cluster_query([Node | Nodes], Tab, Qs, QueryFun, NContinuation, NMeta, NResults) %% continue this node - end +do_cluster_query([Node | Tail] = Nodes, Tab, Qs, QueryFun, Continuation, + Meta = #{limit := Limit}, Results) -> + case do_query(Node, Tab, Qs, QueryFun, Continuation, Limit) of + {error, {badrpc, R}} -> + {error, Node, {bar_rpc, R}}; + {Len, Rows, ?FRESH_SELECT} -> + {NMeta, NResults} = sub_query_result(Len, Rows, Limit, Results, Meta), + do_cluster_query(Tail, Tab, Qs, QueryFun, ?FRESH_SELECT, NMeta, NResults); + {Len, Rows, NContinuation} -> + {NMeta, NResults} = sub_query_result(Len, Rows, Limit, Results, Meta), + do_cluster_query(Nodes, Tab, Qs, QueryFun, NContinuation, NMeta, NResults) end. %%-------------------------------------------------------------------- @@ -216,11 +173,26 @@ do_cluster_query( [Node | Nodes], Tab, Qs, QueryFun, Continuation %% @private do_query(Node, Tab, Qs, {M,F}, Continuation, Limit) when Node =:= node() -> - M:F(Tab, Qs, Continuation, Limit); + erlang:apply(M, F, [Tab, Qs, Continuation, Limit]); do_query(Node, Tab, Qs, QueryFun, Continuation, Limit) -> rpc_call(Node, ?MODULE, do_query, [Node, Tab, Qs, QueryFun, Continuation, Limit], 50000). +sub_query_result(Len, Rows, Limit, Results, Meta) -> + {Flag, NMeta} = judge_page_with_counting(Len, Meta), + NResults = + case Flag of + more -> + []; + cutrows -> + {SubStart, NeedNowNum} = rows_sub_params(Len, NMeta), + ThisRows = lists:sublist(Rows, SubStart, NeedNowNum), + lists:sublist(lists:append(Results, ThisRows), SubStart, Limit); + enough -> + lists:sublist(lists:append(Results, Rows), 1, Limit) + end, + {NMeta, NResults}. + %% @private rpc_call(Node, M, F, A, T) -> case rpc:call(Node, M, F, A, T) of @@ -241,9 +213,9 @@ select_table_with_count(Tab, {Ms, FuzzyFilterFun}, ?FRESH_SELECT, Limit, FmtFun) Rows = FuzzyFilterFun(RawResult), {length(Rows), lists:map(FmtFun, Rows), NContinuation} end; -select_table_with_count(_Tab, {_Ms, FuzzyFilterFun}, Continuation, _Limit, FmtFun) +select_table_with_count(_Tab, {Ms, FuzzyFilterFun}, Continuation, _Limit, FmtFun) when is_function(FuzzyFilterFun) -> - case ets:select(Continuation) of + case ets:select(ets:repair_continuation(Continuation, Ms)) of '$end_of_table' -> {0, [], ?FRESH_SELECT}; {RawResult, NContinuation} -> @@ -258,8 +230,8 @@ select_table_with_count(Tab, Ms, ?FRESH_SELECT, Limit, FmtFun) {RawResult, NContinuation} -> {length(RawResult), lists:map(FmtFun, RawResult), NContinuation} end; -select_table_with_count(_Tab, _Ms, Continuation, _Limit, FmtFun) -> - case ets:select(Continuation) of +select_table_with_count(_Tab, Ms, Continuation, _Limit, FmtFun) -> + case ets:select(ets:repair_continuation(Continuation, Ms)) of '$end_of_table' -> {0, [], ?FRESH_SELECT}; {RawResult, NContinuation} -> @@ -294,16 +266,20 @@ pick_params_to_qs([{Key, Value} | Params], QsSchema, Acc1, Acc2) -> end, case lists:keytake(OpposeKey, 1, Params) of false -> - pick_params_to_qs(Params, QsSchema, [qs(Key, Value, Type) | Acc1], Acc2); + pick_params_to_qs(Params, QsSchema, + [qs(Key, Value, Type) | Acc1], Acc2); {value, {K2, V2}, NParams} -> - pick_params_to_qs(NParams, QsSchema, [qs(Key, Value, K2, V2, Type) | Acc1], Acc2) + pick_params_to_qs(NParams, QsSchema, + [qs(Key, Value, K2, V2, Type) | Acc1], Acc2) end; _ -> case is_fuzzy_key(Key) of true -> - pick_params_to_qs(Params, QsSchema, Acc1, [qs(Key, Value, Type) | Acc2]); + pick_params_to_qs(Params, QsSchema, Acc1, + [qs(Key, Value, Type) | Acc2]); _ -> - pick_params_to_qs(Params, QsSchema, [qs(Key, Value, Type) | Acc1], Acc2) + pick_params_to_qs(Params, QsSchema, + [qs(Key, Value, Type) | Acc1], Acc2) end end @@ -340,10 +316,9 @@ is_fuzzy_key(<<"match_", _/binary>>) -> is_fuzzy_key(_) -> false. -page_start(Page, Limit) -> - if Page > 1 -> (Page-1) * Limit + 1; - true -> 1 - end. +page_start(1, _) -> 1; +page_start(Page, Limit) -> (Page-1) * Limit + 1. + judge_page_with_counting(Len, Meta = #{page := Page, limit := Limit, count := Count}) -> PageStart = page_start(Page, Limit), @@ -359,11 +334,12 @@ judge_page_with_counting(Len, Meta = #{page := Page, limit := Limit, count := Co rows_sub_params(Len, _Meta = #{page := Page, limit := Limit, count := Count}) -> PageStart = page_start(Page, Limit), - if Count - Len < PageStart -> + case (Count - Len) < PageStart of + true -> NeedNowNum = Count - PageStart + 1, SubStart = Len - NeedNowNum + 1, {SubStart, NeedNowNum}; - true -> + false -> {_SubStart = 1, _NeedNowNum = Len} end. diff --git a/apps/emqx_management/src/emqx_mgmt_api_alarms.erl b/apps/emqx_management/src/emqx_mgmt_api_alarms.erl index c4e49a616..1204ee22f 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_alarms.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_alarms.erl @@ -18,72 +18,87 @@ -behaviour(minirest_api). --export([api_spec/0]). +-include_lib("emqx/include/emqx.hrl"). +-include_lib("typerefl/include/types.hrl"). + +-export([api_spec/0, paths/0, schema/1, fields/1]). -export([alarms/2]). %% internal export (for query) --export([ query/4 - ]). - -%% notice: from emqx_alarms --define(ACTIVATED_ALARM, emqx_activated_alarm). --define(DEACTIVATED_ALARM, emqx_deactivated_alarm). - --import(emqx_mgmt_util, [ object_array_schema/2 - , schema/1 - , properties/1 - ]). +-export([query/4]). api_spec() -> - {[alarms_api()], []}. + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). -properties() -> - properties([ - {node, string, <<"Alarm in node">>}, - {name, string, <<"Alarm name">>}, - {message, string, <<"Alarm readable information">>}, - {details, object}, - {duration, integer, <<"Alarms duration time; UNIX time stamp, millisecond">>}, - {activate_at, string, <<"Alarms activate time, RFC 3339">>}, - {deactivate_at, string, <<"Nullable, alarms deactivate time, RFC 3339">>} - ]). +paths() -> + ["/alarms"]. -alarms_api() -> - Metadata = #{ +schema("/alarms") -> + #{ + 'operationId' => alarms, get => #{ description => <<"EMQ X alarms">>, - parameters => emqx_mgmt_util:page_params() ++ [#{ - name => activated, - in => query, - description => <<"All alarms, if not specified">>, - required => false, - schema => #{type => boolean, default => true} - }], + parameters => [ + hoconsc:ref(emqx_dashboard_swagger, page), + hoconsc:ref(emqx_dashboard_swagger, limit), + {activated, hoconsc:mk(boolean(), #{in => query, + desc => <<"All alarms, if not specified">>, + nullable => true})} + ], responses => #{ - <<"200">> => - object_array_schema(properties(), <<"List all alarms">>)}}, - delete => #{ + 200 => [ + {data, hoconsc:mk(hoconsc:array(hoconsc:ref(?MODULE, alarm)), #{})}, + {meta, hoconsc:mk(hoconsc:ref(?MODULE, meta), #{})} + ] + } + }, + delete => #{ description => <<"Remove all deactivated alarms">>, responses => #{ - <<"200">> => - schema(<<"Remove all deactivated alarms ok">>)}}}, - {"/alarms", Metadata, alarms}. + 204 => <<"Remove all deactivated alarms ok">> + } + } + }. +fields(alarm) -> + [ + {node, hoconsc:mk(binary(), + #{desc => <<"Alarm in node">>, example => atom_to_list(node())})}, + {name, hoconsc:mk(binary(), + #{desc => <<"Alarm name">>, example => <<"high_system_memory_usage">>})}, + {message, hoconsc:mk(binary(), #{desc => <<"Alarm readable information">>, + example => <<"System memory usage is higher than 70%">>})}, + {details, hoconsc:mk(map(), #{desc => <<"Alarm details information">>, + example => #{<<"high_watermark">> => 70}})}, + {duration, hoconsc:mk(integer(), + #{desc => <<"Alarms duration time; UNIX time stamp, millisecond">>, + example => 297056})}, + {activate_at, hoconsc:mk(binary(), #{desc => <<"Alarms activate time, RFC 3339">>, + example => <<"2021-10-25T11:52:52.548+08:00">>})}, + {deactivate_at, hoconsc:mk(binary(), + #{desc => <<"Nullable, alarms deactivate time, RFC 3339">>, + example => <<"2021-10-31T10:52:52.548+08:00">>})} + ]; + +fields(meta) -> + emqx_dashboard_swagger:fields(page) ++ + emqx_dashboard_swagger:fields(limit) ++ + [{count, hoconsc:mk(integer(), #{example => 1})}]. %%%============================================================================================== %% parameters trans alarms(get, #{query_string := Qs}) -> Table = - case maps:get(<<"activated">>, Qs, <<"true">>) of - <<"true">> -> ?ACTIVATED_ALARM; - <<"false">> -> ?DEACTIVATED_ALARM + case maps:get(<<"activated">>, Qs, true) of + true -> ?ACTIVATED_ALARM; + false -> ?DEACTIVATED_ALARM end, Response = emqx_mgmt_api:cluster_query(Qs, Table, [], {?MODULE, query}), emqx_mgmt_util:generate_response(Response); alarms(delete, _Params) -> _ = emqx_mgmt:delete_all_deactivated_alarms(), - {200}. + {204}. %%%============================================================================================== %% internal diff --git a/apps/emqx_management/src/emqx_mgmt_api_banned.erl b/apps/emqx_management/src/emqx_mgmt_api_banned.erl index 26fafc2e8..c9ae1401d 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_banned.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_banned.erl @@ -17,105 +17,146 @@ -module(emqx_mgmt_api_banned). -include_lib("emqx/include/emqx.hrl"). +-include_lib("typerefl/include/types.hrl"). -include("emqx_mgmt.hrl"). -behaviour(minirest_api). --export([api_spec/0]). +-export([ api_spec/0 + , paths/0 + , schema/1 + , fields/1]). + +-export([format/1]). -export([ banned/2 , delete_banned/2 ]). --import(emqx_mgmt_util, [ page_params/0 - , schema/1 - , object_schema/1 - , page_object_schema/1 - , properties/1 - , error_schema/1 - ]). - --export([format/1]). - -define(TAB, emqx_banned). --define(FORMAT_FUN, {?MODULE, format}). - - -api_spec() -> - {[banned_api(), delete_banned_api()], []}. -define(BANNED_TYPES, [clientid, username, peerhost]). -properties() -> - properties([ - {as, string, <<"Banned type clientid, username, peerhost">>, [clientid, username, peerhost]}, - {who, string, <<"Client info as banned type">>}, - {by, integer, <<"Commander">>}, - {reason, string, <<"Banned reason">>}, - {at, integer, <<"Create banned time. Nullable, rfc3339, default is now">>}, - {until, string, <<"Cancel banned time. Nullable, rfc3339, default is now + 5 minute">>} - ]). +-define(FORMAT_FUN, {?MODULE, format}). -banned_api() -> - Path = "/banned", - MetaData = #{ +api_spec() -> + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true, translate_body => true}). + +paths() -> + ["/banned", "/banned/:as/:who"]. + +schema("/banned") -> + #{ + 'operationId' => banned, get => #{ description => <<"List banned">>, - parameters => page_params(), + parameters => [ + hoconsc:ref(emqx_dashboard_swagger, page), + hoconsc:ref(emqx_dashboard_swagger, limit) + ], responses => #{ - <<"200">> => - page_object_schema(properties())}}, + 200 =>[ + {data, hoconsc:mk(hoconsc:array(hoconsc:ref(ban)), #{})}, + {meta, hoconsc:mk(hoconsc:ref(meta), #{})} + ] + } + }, post => #{ description => <<"Create banned">>, - 'requestBody' => object_schema(properties()), + 'requestBody' => hoconsc:mk(hoconsc:ref(ban)), responses => #{ - <<"200">> => schema(<<"Create success">>)}}}, - {Path, MetaData, banned}. - -delete_banned_api() -> - Path = "/banned/:as/:who", - MetaData = #{ + 200 => [{data, hoconsc:mk(hoconsc:array(hoconsc:ref(ban)), #{})}], + 400 => emqx_dashboard_swagger:error_codes(['ALREADY_EXISTED'], + <<"Banned already existed">>) + } + } + }; +schema("/banned/:as/:who") -> + #{ + 'operationId' => delete_banned, delete => #{ description => <<"Delete banned">>, parameters => [ - #{ - name => as, + {as, hoconsc:mk(hoconsc:enum(?BANNED_TYPES), #{ + desc => <<"Banned type">>, in => path, - required => true, - description => <<"Banned type">>, - schema => #{type => string, enum => ?BANNED_TYPES} - }, - #{ - name => who, + example => username})}, + {who, hoconsc:mk(binary(), #{ + desc => <<"Client info as banned type">>, in => path, - required => true, - description => <<"Client info as banned type">>, - schema => #{type => string} - } - ], + example => <<"Badass">>})} + ], responses => #{ - <<"200">> => schema(<<"Delete banned success">>), - <<"404">> => error_schema(<<"Banned not found">>)}}}, - {Path, MetaData, delete_banned}. + 204 => <<"Delete banned success">>, + 404 => emqx_dashboard_swagger:error_codes(['RESOURCE_NOT_FOUND'], + <<"Banned not found">>) + } + } + }. + +fields(ban) -> + [ + {as, hoconsc:mk(hoconsc:enum(?BANNED_TYPES), #{ + desc => <<"Banned type clientid, username, peerhost">>, + nullable => false, + example => username})}, + {who, hoconsc:mk(binary(), #{ + desc => <<"Client info as banned type">>, + nullable => false, + example => <<"Badass">>})}, + {by, hoconsc:mk(binary(), #{ + desc => <<"Commander">>, + nullable => true, + example => <<"mgmt_api">>})}, + {reason, hoconsc:mk(binary(), #{ + desc => <<"Banned reason">>, + nullable => true, + example => <<"Too many requests">>})}, + {at, hoconsc:mk(binary(), #{ + desc => <<"Create banned time, rfc3339, now if not specified">>, + nullable => true, + validator => fun is_rfc3339/1, + example => <<"2021-10-25T21:48:47+08:00">>})}, + {until, hoconsc:mk(binary(), #{ + desc => <<"Cancel banned time, rfc3339, now + 5 minute if not specified">>, + nullable => true, + validator => fun is_rfc3339/1, + example => <<"2021-10-25T21:53:47+08:00">>}) + } + ]; +fields(meta) -> + emqx_dashboard_swagger:fields(page) ++ + emqx_dashboard_swagger:fields(limit) ++ + [{count, hoconsc:mk(integer(), #{example => 1})}]. + +is_rfc3339(Time) -> + try + emqx_banned:to_timestamp(Time), + ok + catch _:_ -> {error, Time} + end. banned(get, #{query_string := Params}) -> Response = emqx_mgmt_api:paginate(?TAB, Params, ?FORMAT_FUN), {200, Response}; banned(post, #{body := Body}) -> - _ = emqx_banned:create(emqx_banned:parse(Body)), - {200}. + case emqx_banned:create(emqx_banned:parse(Body)) of + {ok, Banned} -> + {200, format(Banned)}; + {error, {already_exist, Old}} -> + {400, #{code => 'ALREADY_EXISTED', message => format(Old)}} + end. delete_banned(delete, #{bindings := Params}) -> case emqx_banned:look_up(Params) of [] -> - As0 = maps:get(as, Params), - Who0 = maps:get(who, Params), - Message = list_to_binary(io_lib:format("~p: ~p not found", [As0, Who0])), + #{as := As0, who := Who0} = Params, + Message = list_to_binary(io_lib:format("~p: ~s not found", [As0, Who0])), {404, #{code => 'RESOURCE_NOT_FOUND', message => Message}}; _ -> ok = emqx_banned:delete(Params), - {200} + {204} end. format(Banned) -> diff --git a/apps/emqx_management/src/emqx_mgmt_api_clients.erl b/apps/emqx_management/src/emqx_mgmt_api_clients.erl index e89561122..ff429a9a6 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_clients.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_clients.erl @@ -34,6 +34,7 @@ , subscribe/2 , unsubscribe/2 , subscribe_batch/2 + , set_keepalive/2 ]). -export([ query/4 @@ -67,8 +68,8 @@ , {<<"gte_connected_at">>, timestamp} , {<<"lte_connected_at">>, timestamp}]}). --define(query_fun, {?MODULE, query}). --define(format_fun, {?MODULE, format_channel_info}). +-define(QUERY_FUN, {?MODULE, query}). +-define(FORMAT_FUN, {?MODULE, format_channel_info}). -define(CLIENT_ID_NOT_FOUND, <<"{\"code\": \"RESOURCE_NOT_FOUND\", \"reason\": \"Client id not found\"}">>). @@ -82,7 +83,9 @@ apis() -> , clients_authz_cache_api() , clients_subscriptions_api() , subscribe_api() - , unsubscribe_api()]. + , unsubscribe_api() + , keepalive_api() + ]. schemas() -> Client = #{ @@ -101,43 +104,80 @@ schemas() -> properties(client) -> [ - {awaiting_rel_cnt, integer, <<"v4 api name [awaiting_rel] Number of awaiting PUBREC packet">>}, - {awaiting_rel_max, integer, <<"v4 api name [max_awaiting_rel]. Maximum allowed number of awaiting PUBREC packet">>}, - {clean_start, boolean, <<"Indicate whether the client is using a brand new session">>}, - {clientid, string , <<"Client identifier">>}, - {connected, boolean, <<"Whether the client is connected">>}, - {connected_at, string , <<"Client connection time, rfc3339">>}, - {created_at, string , <<"Session creation time, rfc3339">>}, - {disconnected_at, string , <<"Client offline time, This field is only valid and returned when connected is false, rfc3339">>}, - {expiry_interval, integer, <<"Session expiration interval, with the unit of second">>}, - {heap_size, integer, <<"Process heap size with the unit of byte">>}, - {inflight_cnt, integer, <<"Current length of inflight">>}, - {inflight_max, integer, <<"v4 api name [max_inflight]. Maximum length of inflight">>}, - {ip_address, string , <<"Client's IP address">>}, - {port, integer, <<"Client's port">>}, - {is_bridge, boolean, <<"Indicates whether the client is connectedvia bridge">>}, - {keepalive, integer, <<"keepalive time, with the unit of second">>}, - {mailbox_len, integer, <<"Process mailbox size">>}, - {mqueue_dropped, integer, <<"Number of messages dropped by the message queue due to exceeding the length">>}, - {mqueue_len, integer, <<"Current length of message queue">>}, - {mqueue_max, integer, <<"v4 api name [max_mqueue]. Maximum length of message queue">>}, - {node, string , <<"Name of the node to which the client is connected">>}, - {proto_name, string , <<"Client protocol name">>}, - {proto_ver, integer, <<"Protocol version used by the client">>}, - {recv_cnt, integer, <<"Number of TCP packets received">>}, - {recv_msg, integer, <<"Number of PUBLISH packets received">>}, - {recv_oct, integer, <<"Number of bytes received by EMQ X Broker (the same below)">>}, - {recv_pkt, integer, <<"Number of MQTT packets received">>}, - {reductions, integer, <<"Erlang reduction">>}, - {send_cnt, integer, <<"Number of TCP packets sent">>}, - {send_msg, integer, <<"Number of PUBLISH packets sent">>}, - {send_oct, integer, <<"Number of bytes sent">>}, - {send_pkt, integer, <<"Number of MQTT packets sent">>}, - {subscriptions_cnt, integer, <<"Number of subscriptions established by this client.">>}, - {subscriptions_max, integer, <<"v4 api name [max_subscriptions] Maximum number of subscriptions allowed by this client">>}, - {username, string , <<"User name of client when connecting">>}, - {will_msg, string , <<"Client will message">>}, - {zone, string , <<"Indicate the configuration group used by the client">>} + {awaiting_rel_cnt, integer, + <<"v4 api name [awaiting_rel] Number of awaiting PUBREC packet">>}, + {awaiting_rel_max, integer, + <<"v4 api name [max_awaiting_rel]. Maximum allowed number of awaiting PUBREC packet">>}, + {clean_start, boolean, + <<"Indicate whether the client is using a brand new session">>}, + {clientid, string , + <<"Client identifier">>}, + {connected, boolean, + <<"Whether the client is connected">>}, + {connected_at, string , + <<"Client connection time, rfc3339">>}, + {created_at, string , + <<"Session creation time, rfc3339">>}, + {disconnected_at, string , + <<"Client offline time. It's Only valid and returned when connected is false, rfc3339">>}, + {expiry_interval, integer, + <<"Session expiration interval, with the unit of second">>}, + {heap_size, integer, + <<"Process heap size with the unit of byte">>}, + {inflight_cnt, integer, + <<"Current length of inflight">>}, + {inflight_max, integer, + <<"v4 api name [max_inflight]. Maximum length of inflight">>}, + {ip_address, string , + <<"Client's IP address">>}, + {port, integer, + <<"Client's port">>}, + {is_bridge, boolean, + <<"Indicates whether the client is connectedvia bridge">>}, + {keepalive, integer, + <<"keepalive time, with the unit of second">>}, + {mailbox_len, integer, + <<"Process mailbox size">>}, + {mqueue_dropped, integer, + <<"Number of messages dropped by the message queue due to exceeding the length">>}, + {mqueue_len, integer, + <<"Current length of message queue">>}, + {mqueue_max, integer, + <<"v4 api name [max_mqueue]. Maximum length of message queue">>}, + {node, string , + <<"Name of the node to which the client is connected">>}, + {proto_name, string , + <<"Client protocol name">>}, + {proto_ver, integer, + <<"Protocol version used by the client">>}, + {recv_cnt, integer, + <<"Number of TCP packets received">>}, + {recv_msg, integer, + <<"Number of PUBLISH packets received">>}, + {recv_oct, integer, + <<"Number of bytes received by EMQ X Broker (the same below)">>}, + {recv_pkt, integer, + <<"Number of MQTT packets received">>}, + {reductions, integer, + <<"Erlang reduction">>}, + {send_cnt, integer, + <<"Number of TCP packets sent">>}, + {send_msg, integer, + <<"Number of PUBLISH packets sent">>}, + {send_oct, integer, + <<"Number of bytes sent">>}, + {send_pkt, integer, + <<"Number of MQTT packets sent">>}, + {subscriptions_cnt, integer, + <<"Number of subscriptions established by this client.">>}, + {subscriptions_max, integer, + <<"v4 api name [max_subscriptions] Maximum number of subscriptions allowed by this client">>}, + {username, string , + <<"User name of client when connecting">>}, + {will_msg, string , + <<"Client will message">>}, + {zone, string , + <<"Indicate the configuration group used by the client">>} ]; properties(authz_cache) -> [ @@ -197,7 +237,9 @@ clients_api() -> name => conn_state, in => query, required => false, - description => <<"The current connection status of the client, the possible values are connected,idle,disconnected">>, + description => + <<"The current connection status of the client, ", + "the possible values are connected,idle,disconnected">>, schema => #{type => string, enum => [connected, idle, disconnected]} }, #{ @@ -211,7 +253,9 @@ clients_api() -> name => proto_name, in => query, required => false, - description => <<"Client protocol name, the possible values are MQTT,CoAP,LwM2M,MQTT-SN">>, + description => + <<"Client protocol name, ", + "the possible values are MQTT,CoAP,LwM2M,MQTT-SN">>, schema => #{type => string, enum => ['MQTT', 'CoAP', 'LwM2M', 'MQTT-SN']} }, #{ @@ -239,34 +283,43 @@ clients_api() -> name => gte_created_at, in => query, required => false, - description => <<"Search client session creation time by greater than or equal method, rfc3339 or timestamp(millisecond)">>, + description => + <<"Search client session creation time by greater than or equal method, " + "rfc3339 or timestamp(millisecond)">>, schema => #{type => string} }, #{ name => lte_created_at, in => query, required => false, - description => <<"Search client session creation time by less than or equal method, rfc3339 or timestamp(millisecond)">>, + description => + <<"Search client session creation time by less than or equal method, ", + "rfc3339 or timestamp(millisecond)">>, schema => #{type => string} }, #{ name => gte_connected_at, in => query, required => false, - description => <<"Search client connection creation time by greater than or equal method, rfc3339 or timestamp(millisecond)">>, + description => + <<"Search client connection creation time by greater than or equal method, ", + "rfc3339 or timestamp(millisecond)">>, schema => #{type => string} }, #{ name => lte_connected_at, in => query, required => false, - description => <<"Search client connection creation time by less than or equal method, rfc3339 or timestamp(millisecond) ">>, + description => + <<"Search client connection creation time by less than or equal method, ", + "rfc3339 or timestamp(millisecond) ">>, schema => #{type => string} } ], responses => #{ <<"200">> => emqx_mgmt_util:array_schema(client, <<"List clients 200 OK">>), - <<"400">> => emqx_mgmt_util:error_schema(<<"Invalid parameters">>, ['INVALID_PARAMETER'])}}}, + <<"400">> => emqx_mgmt_util:error_schema( <<"Invalid parameters">> + , ['INVALID_PARAMETER'])}}}, {"/clients", Metadata, clients}. client_api() -> @@ -292,7 +345,7 @@ client_api() -> }], responses => #{ <<"404">> => emqx_mgmt_util:error_schema(<<"Client id not found">>), - <<"200">> => emqx_mgmt_util:schema(client, <<"List clients 200 OK">>)}}}, + <<"204">> => emqx_mgmt_util:schema(<<"Kick out client successfully">>)}}}, {"/clients/:clientid", Metadata, client}. clients_authz_cache_api() -> @@ -318,7 +371,7 @@ clients_authz_cache_api() -> }], responses => #{ <<"404">> => emqx_mgmt_util:error_schema(<<"Client id not found">>), - <<"200">> => emqx_mgmt_util:schema(<<"Delete clients 200 OK">>)}}}, + <<"204">> => emqx_mgmt_util:schema(<<"Clean client authz cache successfully">>)}}}, {"/clients/:clientid/authz_cache", Metadata, authz_cache}. clients_subscriptions_api() -> @@ -385,6 +438,27 @@ subscribe_api() -> <<"200">> => emqx_mgmt_util:schema(<<"Subscribe ok">>)}}}, {"/clients/:clientid/subscribe", Metadata, subscribe}. +keepalive_api() -> + Metadata = #{ + put => #{ + description => <<"set the online client keepalive by second ">>, + parameters => [#{ + name => clientid, + in => path, + schema => #{type => string}, + required => true + }, + #{ + name => interval, + in => query, + schema => #{type => integer}, + required => true + } + ], + responses => #{ + <<"404">> => emqx_mgmt_util:error_schema(<<"Client id not found">>), + <<"200">> => emqx_mgmt_util:schema(<<"ok">>)}}}, + {"/clients/:clientid/keepalive", Metadata, set_keepalive}. %%%============================================================================================== %% parameters trans clients(get, #{query_string := Qs}) -> @@ -428,6 +502,17 @@ subscriptions(get, #{bindings := #{clientid := ClientID}}) -> end, Subs0), {200, Subs}. +set_keepalive(put, #{bindings := #{clientid := ClientID}, query_string := Query}) -> + case maps:find(<<"interval">>, Query) of + error -> {404, "Interval Not Found"}; + {ok, Interval0} -> + Interval = binary_to_integer(Interval0), + case emqx_mgmt:set_keepalive(emqx_mgmt_util:urldecode(ClientID), Interval) of + ok -> {200}; + {error, not_found} ->{404, ?CLIENT_ID_NOT_FOUND} + end + end. + %%%============================================================================================== %% api apply @@ -436,18 +521,18 @@ list(Params) -> case maps:get(<<"node">>, Params, undefined) of undefined -> Response = emqx_mgmt_api:cluster_query(Params, Tab, - QuerySchema, ?query_fun), + QuerySchema, ?QUERY_FUN), emqx_mgmt_util:generate_response(Response); Node1 -> Node = binary_to_atom(Node1, utf8), ParamsWithoutNode = maps:without([<<"node">>], Params), Response = emqx_mgmt_api:node_query(Node, ParamsWithoutNode, - Tab, QuerySchema, ?query_fun), + Tab, QuerySchema, ?QUERY_FUN), emqx_mgmt_util:generate_response(Response) end. lookup(#{clientid := ClientID}) -> - case emqx_mgmt:lookup_client({clientid, ClientID}, ?format_fun) of + case emqx_mgmt:lookup_client({clientid, ClientID}, ?FORMAT_FUN) of [] -> {404, ?CLIENT_ID_NOT_FOUND}; ClientInfo -> @@ -455,8 +540,12 @@ lookup(#{clientid := ClientID}) -> end. kickout(#{clientid := ClientID}) -> - emqx_mgmt:kickout_client(ClientID), - {200}. + case emqx_mgmt:kickout_client({ClientID, ?FORMAT_FUN}) of + {error, not_found} -> + {404, ?CLIENT_ID_NOT_FOUND}; + _ -> + {204} + end. get_authz_cache(#{clientid := ClientID})-> case emqx_mgmt:list_authz_cache(ClientID) of @@ -678,6 +767,7 @@ format_channel_info({_, ClientInfo, ClientStats}) -> , sockname , retry_interval , upgrade_qos + , id %% sessionID, defined in emqx_session.erl ], TimesKeys = [created_at, connected_at, disconnected_at], %% format timestamp to rfc3339 diff --git a/apps/emqx_management/src/emqx_mgmt_api_configs.erl b/apps/emqx_management/src/emqx_mgmt_api_configs.erl index abf9d7cff..963db10b7 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_configs.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_configs.erl @@ -40,14 +40,16 @@ paths() -> schema("/configs") -> #{ - operationId => configs, + 'operationId' => configs, get => #{ tags => [conf], - description => <<"Get all the configurations of the specified node, including hot and non-hot updatable items.">>, + description => +<<"Get all the configurations of the specified node, including hot and non-hot updatable items.">>, parameters => [ {node, hoconsc:mk(typerefl:atom(), #{in => query, required => false, example => <<"emqx@127.0.0.1">>, - desc => <<"Node's name: If you do not fill in the fields, this node will be used by default.">>})}], + desc => + <<"Node's name: If you do not fill in the fields, this node will be used by default.">>})}], responses => #{ 200 => config_list([]) } @@ -56,17 +58,19 @@ schema("/configs") -> schema("/configs_reset/:rootname") -> Paths = lists:map(fun({Path, _}) -> Path end, config_list(?EXCLUDES)), #{ - operationId => config_reset, + 'operationId' => config_reset, post => #{ tags => [conf], - description => <<"Reset the config entry specified by the query string parameter `conf_path`.
+ description => +<<"Reset the config entry specified by the query string parameter `conf_path`.
- For a config entry that has default value, this resets it to the default value; - For a config entry that has no default value, an error 400 will be returned">>, %% We only return "200" rather than the new configs that has been changed, as %% the schema of the changed configs is depends on the request parameter %% `conf_path`, it cannot be defined here. parameters => [ - {rootname, hoconsc:mk(hoconsc:enum(Paths), #{in => path, example => <<"authorization">>})}, + {rootname, hoconsc:mk( hoconsc:enum(Paths) + , #{in => path, example => <<"authorization">>})}, {conf_path, hoconsc:mk(typerefl:binary(), #{in => query, required => false, example => <<"cache.enable">>, desc => <<"The config path separated by '.' character">>})}], @@ -79,10 +83,12 @@ schema("/configs_reset/:rootname") -> schema(Path) -> {Root, Schema} = find_schema(Path), #{ - operationId => config, + 'operationId' => config, get => #{ tags => [conf], - description => iolist_to_binary([<<"Get the sub-configurations under *">>, Root, <<"*">>]), + description => iolist_to_binary([ <<"Get the sub-configurations under *">> + , Root + , <<"*">>]), responses => #{ 200 => Schema, 404 => emqx_dashboard_swagger:error_codes(['NOT_FOUND'], <<"config not found">>) @@ -90,8 +96,10 @@ schema(Path) -> }, put => #{ tags => [conf], - description => iolist_to_binary([<<"Update the sub-configurations under *">>, Root, <<"*">>]), - requestBody => Schema, + description => iolist_to_binary([ <<"Update the sub-configurations under *">> + , Root + , <<"*">>]), + 'requestBody' => Schema, responses => #{ 200 => Schema, 400 => emqx_dashboard_swagger:error_codes(['UPDATE_FAILED']) @@ -147,8 +155,20 @@ config_reset(post, _Params, Req) -> configs(get, Params, _Req) -> Node = maps:get(node, Params, node()), - Res = rpc:call(Node, ?MODULE, get_full_config, [[]]), - {200, Res}. + case + lists:member(Node, mria_mnesia:running_nodes()) + andalso + rpc:call(Node, ?MODULE, get_full_config, []) + of + false -> + Message = list_to_binary(io_lib:format("Bad node ~p, reason not found", [Node])), + {500, #{code => 'BAD_NODE', message => Message}}; + {error, {badrpc, R}} -> + Message = list_to_binary(io_lib:format("Bad node ~p, reason ~p", [Node, R])), + {500, #{code => 'BAD_NODE', message => Message}}; + Res -> + {200, Res} + end. conf_path_reset(Req) -> <<"/api/v5", ?PREFIX_RESET, Path/binary>> = cowboy_req:path(Req), diff --git a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl index 4568bcd9a..f21b1d8de 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl @@ -59,10 +59,10 @@ req_schema() -> Schema = [emqx_mgmt_api_configs:gen_schema( emqx:get_raw_config([listeners, T, default], #{})) || T <- ?TYPES_ATOM], - #{oneOf => Schema}. + #{'oneOf' => Schema}. resp_schema() -> - #{oneOf := Schema} = req_schema(), + #{'oneOf' := Schema} = req_schema(), AddMetadata = fun(Prop) -> Prop#{running => #{type => boolean}, id => #{type => string}, @@ -70,7 +70,7 @@ resp_schema() -> end, Schema1 = [S#{properties => AddMetadata(Prop)} || S = #{properties := Prop} <- Schema], - #{oneOf => Schema1}. + #{'oneOf' => Schema1}. api_list_listeners() -> Metadata = #{ @@ -78,7 +78,8 @@ api_list_listeners() -> description => <<"List listeners from all nodes in the cluster">>, responses => #{ <<"200">> => - emqx_mgmt_util:array_schema(resp_schema(), <<"List listeners successfully">>)}}}, + emqx_mgmt_util:array_schema(resp_schema(), + <<"List listeners successfully">>)}}}, {"/listeners", Metadata, list_listeners}. api_list_update_listeners_by_id() -> @@ -92,25 +93,28 @@ api_list_update_listeners_by_id() -> <<"200">> => emqx_mgmt_util:array_schema(resp_schema(), <<"List listeners successfully">>)}}, put => #{ - description => <<"Create or update a listener by a given Id to all nodes in the cluster">>, + description => + <<"Create or update a listener by a given Id to all nodes in the cluster">>, parameters => [param_path_id()], - requestBody => emqx_mgmt_util:schema(req_schema(), <<"Listener Config">>), + 'requestBody' => emqx_mgmt_util:schema(req_schema(), <<"Listener Config">>), responses => #{ <<"400">> => - emqx_mgmt_util:error_schema(?UPDATE_CONFIG_FAILED, ['BAD_LISTENER_ID', 'BAD_CONFIG_SCHEMA']), + emqx_mgmt_util:error_schema(?UPDATE_CONFIG_FAILED, + ['BAD_LISTENER_ID', 'BAD_CONFIG_SCHEMA']), <<"404">> => emqx_mgmt_util:error_schema(?LISTENER_NOT_FOUND, ['BAD_LISTENER_ID']), <<"500">> => emqx_mgmt_util:error_schema(?OPERATION_FAILED, ['INTERNAL_ERROR']), <<"200">> => - emqx_mgmt_util:array_schema(resp_schema(), <<"Create or update listener successfully">>)}}, + emqx_mgmt_util:array_schema(resp_schema(), + <<"Create or update listener successfully">>)}}, delete => #{ description => <<"Delete a listener by a given Id to all nodes in the cluster">>, parameters => [param_path_id()], responses => #{ <<"404">> => emqx_mgmt_util:error_schema(?LISTENER_NOT_FOUND, ['BAD_LISTENER_ID']), - <<"200">> => + <<"204">> => emqx_mgmt_util:schema(<<"Delete listener successfully">>)}} }, {"/listeners/:id", Metadata, crud_listeners_by_id}. @@ -143,10 +147,11 @@ api_get_update_listener_by_id_on_node() -> put => #{ description => <<"Create or update a listener by a given Id on a specific node">>, parameters => [param_path_node(), param_path_id()], - requestBody => emqx_mgmt_util:schema(req_schema(), <<"Listener Config">>), + 'requestBody' => emqx_mgmt_util:schema(req_schema(), <<"Listener Config">>), responses => #{ <<"400">> => - emqx_mgmt_util:error_schema(?UPDATE_CONFIG_FAILED, ['BAD_LISTENER_ID', 'BAD_CONFIG_SCHEMA']), + emqx_mgmt_util:error_schema(?UPDATE_CONFIG_FAILED, + ['BAD_LISTENER_ID', 'BAD_CONFIG_SCHEMA']), <<"404">> => emqx_mgmt_util:error_schema(?NODE_LISTENER_NOT_FOUND, ['BAD_NODE_NAME', 'BAD_LISTENER_ID']), @@ -160,7 +165,7 @@ api_get_update_listener_by_id_on_node() -> responses => #{ <<"404">> => emqx_mgmt_util:error_schema(?LISTENER_NOT_FOUND, ['BAD_LISTENER_ID']), - <<"200">> => + <<"204">> => emqx_mgmt_util:schema(<<"Delete listener successfully">>)}} }, {"/nodes/:node/listeners/:id", Metadata, crud_listener_by_id_on_node}. @@ -205,7 +210,7 @@ param_path_id() -> #{ name => id, in => path, - schema => #{type => string}, + schema => #{type => string, example => emqx_listeners:id_example()}, required => true }. @@ -251,7 +256,7 @@ crud_listeners_by_id(put, #{bindings := #{id := Id}, body := Conf}) -> crud_listeners_by_id(delete, #{bindings := #{id := Id}}) -> Results = emqx_mgmt:remove_listener(Id), case lists:filter(fun filter_errors/1, Results) of - [] -> {200}; + [] -> {204}; Errors -> {500, #{code => 'UNKNOW_ERROR', message => err_msg(Errors)}} end. @@ -291,7 +296,7 @@ crud_listener_by_id_on_node(put, #{bindings := #{id := Id, node := Node}, body : end; crud_listener_by_id_on_node(delete, #{bindings := #{id := Id, node := Node}}) -> case emqx_mgmt:remove_listener(atom(Node), Id) of - ok -> {200}; + ok -> {204}; {error, Reason} -> {500, #{code => 'UNKNOW_ERROR', message => err_msg(Reason)}} end. diff --git a/apps/emqx_management/src/emqx_mgmt_cli.erl b/apps/emqx_management/src/emqx_mgmt_cli.erl index d664828bf..6e082e4ca 100644 --- a/apps/emqx_management/src/emqx_mgmt_cli.erl +++ b/apps/emqx_management/src/emqx_mgmt_cli.erl @@ -36,6 +36,7 @@ , vm/1 , mnesia/1 , trace/1 + , traces/1 , log/1 , authz/1 , olp/1 @@ -79,10 +80,12 @@ broker([]) -> emqx_ctl:print("~-10s: ~p~n", [uptime, emqx_sys:uptime()]); broker(["stats"]) -> - [emqx_ctl:print("~-30s: ~w~n", [Stat, Val]) || {Stat, Val} <- lists:sort(emqx_stats:getstats())]; + [emqx_ctl:print("~-30s: ~w~n", [Stat, Val]) + || {Stat, Val} <- lists:sort(emqx_stats:getstats())]; broker(["metrics"]) -> - [emqx_ctl:print("~-30s: ~w~n", [Metric, Val]) || {Metric, Val} <- lists:sort(emqx_metrics:all())]; + [emqx_ctl:print("~-30s: ~w~n", [Metric, Val]) + || {Metric, Val} <- lists:sort(emqx_metrics:all())]; broker(_) -> emqx_ctl:usage([{"broker", "Show broker version, uptime and description"}, @@ -142,10 +145,8 @@ clients(["show", ClientId]) -> if_client(ClientId, fun print/1); clients(["kick", ClientId]) -> - case emqx_cm:kick_session(bin(ClientId)) of - ok -> emqx_ctl:print("ok~n"); - _ -> emqx_ctl:print("Not Found.~n") - end; + ok = emqx_cm:kick_session(bin(ClientId)), + emqx_ctl:print("ok~n"); clients(_) -> emqx_ctl:usage([{"clients list", "List all clients"}, @@ -209,10 +210,11 @@ subscriptions(["del", ClientId, Topic]) -> end; subscriptions(_) -> - emqx_ctl:usage([{"subscriptions list", "List all subscriptions"}, - {"subscriptions show ", "Show subscriptions of a client"}, - {"subscriptions add ", "Add a static subscription manually"}, - {"subscriptions del ", "Delete a static subscription manually"}]). + emqx_ctl:usage( + [{"subscriptions list", "List all subscriptions"}, + {"subscriptions show ", "Show subscriptions of a client"}, + {"subscriptions add ", "Add a static subscription manually"}, + {"subscriptions del ", "Delete a static subscription manually"}]). if_valid_qos(QoS, Fun) -> try list_to_integer(QoS) of @@ -281,14 +283,17 @@ vm(["memory"]) -> [emqx_ctl:print("memory/~-17s: ~w~n", [Cat, Val]) || {Cat, Val} <- erlang:memory()]; vm(["process"]) -> - [emqx_ctl:print("process/~-16s: ~w~n", [Name, erlang:system_info(Key)]) || {Name, Key} <- [{limit, process_limit}, {count, process_count}]]; + [emqx_ctl:print("process/~-16s: ~w~n", [Name, erlang:system_info(Key)]) + || {Name, Key} <- [{limit, process_limit}, {count, process_count}]]; vm(["io"]) -> IoInfo = lists:usort(lists:flatten(erlang:system_info(check_io))), - [emqx_ctl:print("io/~-21s: ~w~n", [Key, proplists:get_value(Key, IoInfo)]) || Key <- [max_fds, active_fds]]; + [emqx_ctl:print("io/~-21s: ~w~n", [Key, proplists:get_value(Key, IoInfo)]) + || Key <- [max_fds, active_fds]]; vm(["ports"]) -> - [emqx_ctl:print("ports/~-18s: ~w~n", [Name, erlang:system_info(Key)]) || {Name, Key} <- [{count, port_count}, {limit, port_limit}]]; + [emqx_ctl:print("ports/~-18s: ~w~n", [Name, erlang:system_info(Key)]) + || {Name, Key} <- [{count, port_count}, {limit, port_limit}]]; vm(_) -> emqx_ctl:usage([{"vm all", "Show info of Erlang VM"}, @@ -325,8 +330,14 @@ log(["primary-level", Level]) -> emqx_ctl:print("~ts~n", [emqx_logger:get_primary_log_level()]); log(["handlers", "list"]) -> - _ = [emqx_ctl:print("LogHandler(id=~ts, level=~ts, destination=~ts, status=~ts)~n", [Id, Level, Dst, Status]) - || #{id := Id, level := Level, dst := Dst, status := Status} <- emqx_logger:get_log_handlers()], + _ = [emqx_ctl:print( + "LogHandler(id=~ts, level=~ts, destination=~ts, status=~ts)~n", + [Id, Level, Dst, Status] + ) + || #{id := Id, + level := Level, + dst := Dst, + status := Status} <- emqx_logger:get_log_handlers()], ok; log(["handlers", "start", HandlerId]) -> @@ -353,63 +364,146 @@ log(["handlers", "set-level", HandlerId, Level]) -> end; log(_) -> - emqx_ctl:usage([{"log set-level ", "Set the overall log level"}, - {"log primary-level", "Show the primary log level now"}, - {"log primary-level ","Set the primary log level"}, - {"log handlers list", "Show log handlers"}, - {"log handlers start ", "Start a log handler"}, - {"log handlers stop ", "Stop a log handler"}, - {"log handlers set-level ", "Set log level of a log handler"}]). + emqx_ctl:usage( + [{"log set-level ", "Set the overall log level"}, + {"log primary-level", "Show the primary log level now"}, + {"log primary-level ","Set the primary log level"}, + {"log handlers list", "Show log handlers"}, + {"log handlers start ", "Start a log handler"}, + {"log handlers stop ", "Stop a log handler"}, + {"log handlers set-level ", "Set log level of a log handler"}]). %%-------------------------------------------------------------------- %% @doc Trace Command trace(["list"]) -> - lists:foreach(fun({{Who, Name}, {Level, LogFile}}) -> - emqx_ctl:print("Trace(~ts=~ts, level=~ts, destination=~p)~n", [Who, Name, Level, LogFile]) - end, emqx_tracer:lookup_traces()); + lists:foreach(fun(Trace) -> + #{type := Type, filter := Filter, level := Level, dst := Dst} = Trace, + emqx_ctl:print("Trace(~s=~s, level=~s, destination=~p)~n", [Type, Filter, Level, Dst]) + end, emqx_trace_handler:running()); -trace(["stop", "client", ClientId]) -> - trace_off(clientid, ClientId); +trace(["stop", Operation, ClientId]) -> + case trace_type(Operation) of + {ok, Type} -> trace_off(Type, ClientId); + error -> trace([]) + end; -trace(["start", "client", ClientId, LogFile]) -> - trace_on(clientid, ClientId, all, LogFile); +trace(["start", Operation, ClientId, LogFile]) -> + trace(["start", Operation, ClientId, LogFile, "all"]); -trace(["start", "client", ClientId, LogFile, Level]) -> - trace_on(clientid, ClientId, list_to_atom(Level), LogFile); - -trace(["stop", "topic", Topic]) -> - trace_off(topic, Topic); - -trace(["start", "topic", Topic, LogFile]) -> - trace_on(topic, Topic, all, LogFile); - -trace(["start", "topic", Topic, LogFile, Level]) -> - trace_on(topic, Topic, list_to_atom(Level), LogFile); +trace(["start", Operation, ClientId, LogFile, Level]) -> + case trace_type(Operation) of + {ok, Type} -> trace_on(Type, ClientId, list_to_existing_atom(Level), LogFile); + error -> trace([]) + end; trace(_) -> - emqx_ctl:usage([{"trace list", "List all traces started"}, - {"trace start client []", "Traces for a client"}, - {"trace stop client ", "Stop tracing for a client"}, - {"trace start topic [] ", "Traces for a topic"}, - {"trace stop topic ", "Stop tracing for a topic"}]). + emqx_ctl:usage([{"trace list", "List all traces started on local node"}, + {"trace start client []", + "Traces for a client on local node"}, + {"trace stop client ", + "Stop tracing for a client on local node"}, + {"trace start topic [] ", + "Traces for a topic on local node"}, + {"trace stop topic ", + "Stop tracing for a topic on local node"}, + {"trace start ip_address [] ", + "Traces for a client ip on local node"}, + {"trace stop ip_addresss ", + "Stop tracing for a client ip on local node"} + ]). trace_on(Who, Name, Level, LogFile) -> - case emqx_tracer:start_trace({Who, iolist_to_binary(Name)}, Level, LogFile) of + case emqx_trace_handler:install(Who, Name, Level, LogFile) of ok -> - emqx_ctl:print("trace ~ts ~ts successfully~n", [Who, Name]); + emqx_ctl:print("trace ~s ~s successfully~n", [Who, Name]); {error, Error} -> - emqx_ctl:print("[error] trace ~ts ~ts: ~p~n", [Who, Name, Error]) + emqx_ctl:print("[error] trace ~s ~s: ~p~n", [Who, Name, Error]) end. trace_off(Who, Name) -> - case emqx_tracer:stop_trace({Who, iolist_to_binary(Name)}) of + case emqx_trace_handler:uninstall(Who, Name) of ok -> - emqx_ctl:print("stop tracing ~ts ~ts successfully~n", [Who, Name]); + emqx_ctl:print("stop tracing ~s ~s successfully~n", [Who, Name]); {error, Error} -> - emqx_ctl:print("[error] stop tracing ~ts ~ts: ~p~n", [Who, Name, Error]) + emqx_ctl:print("[error] stop tracing ~s ~s: ~p~n", [Who, Name, Error]) end. +%%-------------------------------------------------------------------- +%% @doc Trace Cluster Command +traces(["list"]) -> + {ok, List} = emqx_trace_api:list_trace(get, []), + case List of + [] -> + emqx_ctl:print("Cluster Trace is empty~n", []); + _ -> + lists:foreach(fun(Trace) -> + #{type := Type, name := Name, status := Status, + log_size := LogSize} = Trace, + emqx_ctl:print("Trace(~s: ~s=~s, ~s, LogSize:~p)~n", + [Name, Type, maps:get(Type, Trace), Status, LogSize]) + end, List) + end, + length(List); + +traces(["stop", Name]) -> + trace_cluster_off(Name); + +traces(["delete", Name]) -> + trace_cluster_del(Name); + +traces(["start", Name, Operation, Filter]) -> + traces(["start", Name, Operation, Filter, "900"]); + +traces(["start", Name, Operation, Filter, DurationS]) -> + case trace_type(Operation) of + {ok, Type} -> trace_cluster_on(Name, Type, Filter, DurationS); + error -> traces([]) + end; + +traces(_) -> + emqx_ctl:usage([{"traces list", "List all cluster traces started"}, + {"traces start client ", "Traces for a client in cluster"}, + {"traces start topic ", "Traces for a topic in cluster"}, + {"traces start ip_address ", "Traces for a IP in cluster"}, + {"traces stop ", "Stop trace in cluster"}, + {"traces delete ", "Delete trace in cluster"} + ]). + +trace_cluster_on(Name, Type, Filter, DurationS0) -> + DurationS = list_to_integer(DurationS0), + Now = erlang:system_time(second), + Trace = #{ name => list_to_binary(Name) + , type => atom_to_binary(Type) + , Type => list_to_binary(Filter) + , start_at => list_to_binary(calendar:system_time_to_rfc3339(Now)) + , end_at => list_to_binary(calendar:system_time_to_rfc3339(Now + DurationS)) + }, + case emqx_trace:create(Trace) of + ok -> + emqx_ctl:print("cluster_trace ~p ~s ~s successfully~n", [Type, Filter, Name]); + {error, Error} -> + emqx_ctl:print("[error] cluster_trace ~s ~s=~s ~p~n", + [Name, Type, Filter, Error]) + end. + +trace_cluster_del(Name) -> + case emqx_trace:delete(list_to_binary(Name)) of + ok -> emqx_ctl:print("Del cluster_trace ~s successfully~n", [Name]); + {error, Error} -> emqx_ctl:print("[error] Del cluster_trace ~s: ~p~n", [Name, Error]) + end. + +trace_cluster_off(Name) -> + case emqx_trace:update(list_to_binary(Name), false) of + ok -> emqx_ctl:print("Stop cluster_trace ~s successfully~n", [Name]); + {error, Error} -> emqx_ctl:print("[error] Stop cluster_trace ~s: ~p~n", [Name, Error]) + end. + +trace_type("client") -> {ok, clientid}; +trace_type("topic") -> {ok, topic}; +trace_type("ip_address") -> {ok, ip_address}; +trace_type(_) -> error. + %%-------------------------------------------------------------------- %% @doc Listeners Command @@ -491,10 +585,11 @@ authz(["cache-clean", ClientId]) -> emqx_mgmt:clean_authz_cache(ClientId); authz(_) -> - emqx_ctl:usage([{"authz cache-clean all", "Clears authorization cache on all nodes"}, - {"authz cache-clean node ", "Clears authorization cache on given node"}, - {"authz cache-clean ", "Clears authorization cache for given client"} - ]). + emqx_ctl:usage( + [{"authz cache-clean all", "Clears authorization cache on all nodes"}, + {"authz cache-clean node ", "Clears authorization cache on given node"}, + {"authz cache-clean ", "Clears authorization cache for given client"} + ]). %%-------------------------------------------------------------------- @@ -562,23 +657,24 @@ print({client, {ClientId, ChanPid}}) -> maps:with([peername, clean_start, keepalive, expiry_interval, connected_at, disconnected_at], ConnInfo), maps:with([created_at], Session)]), - InfoKeys = [clientid, username, peername, - clean_start, keepalive, expiry_interval, - subscriptions_cnt, inflight_cnt, awaiting_rel_cnt, send_msg, mqueue_len, mqueue_dropped, + InfoKeys = [clientid, username, peername, clean_start, keepalive, + expiry_interval, subscriptions_cnt, inflight_cnt, + awaiting_rel_cnt, send_msg, mqueue_len, mqueue_dropped, connected, created_at, connected_at] ++ case maps:is_key(disconnected_at, Info) of true -> [disconnected_at]; false -> [] end, Info1 = Info#{expiry_interval => maps:get(expiry_interval, Info) div 1000}, - emqx_ctl:print("Client(~ts, username=~ts, peername=~ts, " - "clean_start=~ts, keepalive=~w, session_expiry_interval=~w, " - "subscriptions=~w, inflight=~w, awaiting_rel=~w, delivered_msgs=~w, enqueued_msgs=~w, dropped_msgs=~w, " - "connected=~ts, created_at=~w, connected_at=~w" ++ - case maps:is_key(disconnected_at, Info1) of - true -> ", disconnected_at=~w)~n"; - false -> ")~n" - end, + emqx_ctl:print( + "Client(~ts, username=~ts, peername=~ts, clean_start=~ts, " + "keepalive=~w, session_expiry_interval=~w, subscriptions=~w, " + "inflight=~w, awaiting_rel=~w, delivered_msgs=~w, enqueued_msgs=~w, " + "dropped_msgs=~w, connected=~ts, created_at=~w, connected_at=~w" + ++ case maps:is_key(disconnected_at, Info1) of + true -> ", disconnected_at=~w)~n"; + false -> ")~n" + end, [format(K, maps:get(K, Info1)) || K <- InfoKeys]); print({emqx_route, #route{topic = Topic, dest = {_, Node}}}) -> diff --git a/apps/emqx_management/src/emqx_mgmt_util.erl b/apps/emqx_management/src/emqx_mgmt_util.erl index b276988bd..cce9276ed 100644 --- a/apps/emqx_management/src/emqx_mgmt_util.erl +++ b/apps/emqx_management/src/emqx_mgmt_util.erl @@ -126,7 +126,8 @@ array_schema(Schema, Desc) -> object_array_schema(Properties) when is_map(Properties) -> json_content_schema(#{type => array, items => #{type => object, properties => Properties}}). object_array_schema(Properties, Desc) -> - json_content_schema(#{type => array, items => #{type => object, properties => Properties}}, Desc). + json_content_schema(#{type => array, + items => #{type => object, properties => Properties}}, Desc). page_schema(Ref) when is_atom(Ref) -> page_schema(minirest:ref(atom_to_binary(Ref, utf8))); @@ -201,7 +202,10 @@ batch_operation(Module, Function, ArgsList) -> Failed = batch_operation(Module, Function, ArgsList, []), Len = erlang:length(Failed), Success = erlang:length(ArgsList) - Len, - Fun = fun({Args, Reason}, Detail) -> [#{data => Args, reason => io_lib:format("~p", [Reason])} | Detail] end, + Fun = + fun({Args, Reason}, Detail) -> + [#{data => Args, reason => io_lib:format("~p", [Reason])} | Detail] + end, #{success => Success, failed => Len, detail => lists:foldl(Fun, [], Failed)}. batch_operation(_Module, _Function, [], Failed) -> @@ -218,7 +222,7 @@ properties(Props) -> properties(Props, #{}). properties([], Acc) -> Acc; -properties([Key| Props], Acc) when is_atom(Key) -> +properties([Key | Props], Acc) when is_atom(Key) -> properties(Props, maps:put(Key, #{type => string}, Acc)); properties([{Key, Type} | Props], Acc) -> properties(Props, maps:put(Key, #{type => Type}, Acc)); @@ -266,6 +270,9 @@ generate_response(QueryResult) -> case QueryResult of {error, page_limit_invalid} -> {400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}}; + {error, Node, {badrpc, R}} -> + Message = list_to_binary(io_lib:format("bad rpc call ~p, Reason ~p", [Node, R])), + {500, #{code => <<"NODE_DOWN">>, message => Message}}; Response -> {200, Response} end. diff --git a/apps/emqx_management/test/emqx_mgmt_api_test_util.erl b/apps/emqx_management/test/emqx_mgmt_api_test_util.erl index cdad91b0c..dd14d0a81 100644 --- a/apps/emqx_management/test/emqx_mgmt_api_test_util.erl +++ b/apps/emqx_management/test/emqx_mgmt_api_test_util.erl @@ -88,7 +88,7 @@ do_request_api(Method, Request)-> {error, socket_closed_remotely} -> {error, socket_closed_remotely}; {ok, {{"HTTP/1.1", Code, _}, _, Return} } - when Code =:= 200 orelse Code =:= 201 -> + when Code >= 200 andalso Code =< 299 -> {ok, Return}; {ok, {Reason, _, _}} -> {error, Reason} diff --git a/apps/emqx_management/test/emqx_mgmt_clients_api_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_clients_api_SUITE.erl index 615445f66..691828ffd 100644 --- a/apps/emqx_management/test/emqx_mgmt_clients_api_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_clients_api_SUITE.erl @@ -77,22 +77,27 @@ t_clients(_) -> ?assertEqual({error, {"HTTP/1.1", 404, "Not Found"}}, AfterKickoutResponse2), %% get /clients/:clientid/authz_cache should has no authz cache - Client1AuthzCachePath = emqx_mgmt_api_test_util:api_path(["clients", binary_to_list(ClientId1), "authz_cache"]), + Client1AuthzCachePath = emqx_mgmt_api_test_util:api_path(["clients", + binary_to_list(ClientId1), "authz_cache"]), {ok, Client1AuthzCache} = emqx_mgmt_api_test_util:request_api(get, Client1AuthzCachePath), ?assertEqual("[]", Client1AuthzCache), %% post /clients/:clientid/subscribe SubscribeBody = #{topic => Topic, qos => Qos}, - SubscribePath = emqx_mgmt_api_test_util:api_path(["clients", binary_to_list(ClientId1), "subscribe"]), - {ok, _} = emqx_mgmt_api_test_util:request_api(post, SubscribePath, "", AuthHeader, SubscribeBody), + SubscribePath = emqx_mgmt_api_test_util:api_path(["clients", + binary_to_list(ClientId1), "subscribe"]), + {ok, _} = emqx_mgmt_api_test_util:request_api(post, SubscribePath, + "", AuthHeader, SubscribeBody), timer:sleep(100), [{{_, AfterSubTopic}, #{qos := AfterSubQos}}] = emqx_mgmt:lookup_subscriptions(ClientId1), ?assertEqual(AfterSubTopic, Topic), ?assertEqual(AfterSubQos, Qos), %% post /clients/:clientid/unsubscribe - UnSubscribePath = emqx_mgmt_api_test_util:api_path(["clients", binary_to_list(ClientId1), "unsubscribe"]), - {ok, _} = emqx_mgmt_api_test_util:request_api(post, UnSubscribePath, "", AuthHeader, SubscribeBody), + UnSubscribePath = emqx_mgmt_api_test_util:api_path(["clients", + binary_to_list(ClientId1), "unsubscribe"]), + {ok, _} = emqx_mgmt_api_test_util:request_api(post, UnSubscribePath, + "", AuthHeader, SubscribeBody), timer:sleep(100), ?assertEqual([], emqx_mgmt:lookup_subscriptions(Client1)), @@ -123,7 +128,8 @@ t_query_clients_with_time(_) -> %% get /clients with time(rfc3339) NowTimeStampInt = erlang:system_time(millisecond), %% Do not uri_encode `=` to `%3D` - Rfc3339String = emqx_http_lib:uri_encode(binary:bin_to_list(emqx_mgmt_api_clients:unix_ts_to_rfc3339_bin(NowTimeStampInt))), + Rfc3339String = emqx_http_lib:uri_encode(binary:bin_to_list( + emqx_mgmt_api_clients:unix_ts_to_rfc3339_bin(NowTimeStampInt))), TimeStampString = emqx_http_lib:uri_encode(integer_to_list(NowTimeStampInt)), LteKeys = ["lte_created_at=", "lte_connected_at="], @@ -133,8 +139,10 @@ t_query_clients_with_time(_) -> GteParamRfc3339 = [Param ++ Rfc3339String || Param <- GteKeys], GteParamStamp = [Param ++ TimeStampString || Param <- GteKeys], - RequestResults = [emqx_mgmt_api_test_util:request_api(get, ClientsPath, Param, AuthHeader) - || Param <- LteParamRfc3339 ++ LteParamStamp ++ GteParamRfc3339 ++ GteParamStamp], + RequestResults = + [emqx_mgmt_api_test_util:request_api(get, ClientsPath, Param, AuthHeader) + || Param <- LteParamRfc3339 ++ LteParamStamp + ++ GteParamRfc3339 ++ GteParamStamp], DecodedResults = [emqx_json:decode(Response, [return_maps]) || {ok, Response} <- RequestResults], {LteResponseDecodeds, GteResponseDecodeds} = lists:split(4, DecodedResults), @@ -153,3 +161,22 @@ t_query_clients_with_time(_) -> Client2Path = emqx_mgmt_api_test_util:api_path(["clients", binary_to_list(ClientId2)]), {ok, _} = emqx_mgmt_api_test_util:request_api(delete, Client1Path), {ok, _} = emqx_mgmt_api_test_util:request_api(delete, Client2Path). + +t_keepalive(_Config) -> + Username = "user_keepalive", + ClientId = "client_keepalive", + AuthHeader = emqx_mgmt_api_test_util:auth_header_(), + Path = emqx_mgmt_api_test_util:api_path(["clients", ClientId, "keepalive"]), + Query = "interval=11", + {error,{"HTTP/1.1",404,"Not Found"}} = + emqx_mgmt_api_test_util:request_api(put, Path, Query, AuthHeader, <<"">>), + {ok, C1} = emqtt:start_link(#{username => Username, clientid => ClientId}), + {ok, _} = emqtt:connect(C1), + {ok, Ok} = emqx_mgmt_api_test_util:request_api(put, Path, Query, AuthHeader, <<"">>), + ?assertEqual("", Ok), + [Pid] = emqx_cm:lookup_channels(list_to_binary(ClientId)), + State = sys:get_state(Pid), + ct:pal("~p~n", [State]), + ?assertEqual(11000, element(2, element(5, element(11, State)))), + emqtt:disconnect(C1), + ok. diff --git a/apps/emqx_modules/src/emqx_delayed_api.erl b/apps/emqx_modules/src/emqx_delayed_api.erl index 95b586fef..8137d9e63 100644 --- a/apps/emqx_modules/src/emqx_delayed_api.erl +++ b/apps/emqx_modules/src/emqx_delayed_api.erl @@ -52,7 +52,7 @@ paths() -> ["/mqtt/delayed", "/mqtt/delayed/messages", "/mqtt/delayed/messages/: schema("/mqtt/delayed") -> #{ - operationId => status, + 'operationId' => status, get => #{ tags => [<<"mqtt">>], description => <<"Get delayed status">>, @@ -64,25 +64,28 @@ schema("/mqtt/delayed") -> put => #{ tags => [<<"mqtt">>], description => <<"Enable or disable delayed, set max delayed messages">>, - requestBody => ref(emqx_modules_schema, "delayed"), + 'requestBody' => ref(emqx_modules_schema, "delayed"), responses => #{ 200 => mk(ref(emqx_modules_schema, "delayed"), #{desc => <<"Enable or disable delayed successfully">>}), - 400 => emqx_dashboard_swagger:error_codes([?BAD_REQUEST], <<"Max limit illegality">>) + 400 => emqx_dashboard_swagger:error_codes( [?BAD_REQUEST] + , <<"Max limit illegality">>) } } }; schema("/mqtt/delayed/messages/:msgid") -> - #{operationId => delayed_message, + #{'operationId' => delayed_message, get => #{ tags => [<<"mqtt">>], description => <<"Get delayed message">>, parameters => [{msgid, mk(binary(), #{in => path, desc => <<"delay message ID">>})}], responses => #{ 200 => ref("message_without_payload"), - 400 => emqx_dashboard_swagger:error_codes([?MESSAGE_ID_SCHEMA_ERROR], <<"Bad MsgId format">>), - 404 => emqx_dashboard_swagger:error_codes([?MESSAGE_ID_NOT_FOUND], <<"MsgId not found">>) + 400 => emqx_dashboard_swagger:error_codes( [?MESSAGE_ID_SCHEMA_ERROR] + , <<"Bad MsgId format">>), + 404 => emqx_dashboard_swagger:error_codes( [?MESSAGE_ID_NOT_FOUND] + , <<"MsgId not found">>) } }, delete => #{ @@ -90,15 +93,17 @@ schema("/mqtt/delayed/messages/:msgid") -> description => <<"Delete delayed message">>, parameters => [{msgid, mk(binary(), #{in => path, desc => <<"delay message ID">>})}], responses => #{ - 200 => <<"Delete delayed message success">>, - 400 => emqx_dashboard_swagger:error_codes([?MESSAGE_ID_SCHEMA_ERROR], <<"Bad MsgId format">>), - 404 => emqx_dashboard_swagger:error_codes([?MESSAGE_ID_NOT_FOUND], <<"MsgId not found">>) + 204 => <<"Delete delayed message success">>, + 400 => emqx_dashboard_swagger:error_codes( [?MESSAGE_ID_SCHEMA_ERROR] + , <<"Bad MsgId format">>), + 404 => emqx_dashboard_swagger:error_codes( [?MESSAGE_ID_NOT_FOUND] + , <<"MsgId not found">>) } } }; schema("/mqtt/delayed/messages") -> #{ - operationId => delayed_messages, + 'operationId' => delayed_messages, get => #{ tags => [<<"mqtt">>], description => <<"List delayed messages">>, @@ -130,7 +135,8 @@ fields("message_without_payload") -> {from_username, mk(binary(), #{desc => <<"From Username">>})} ]; fields("message") -> - PayloadDesc = io_lib:format("Payload, base64 encode. Payload will be ~p if length large than ~p", + PayloadDesc = io_lib:format( + "Payload, base64 encode. Payload will be ~p if length large than ~p", [?PAYLOAD_TOO_LARGE, ?MAX_PAYLOAD_LENGTH]), fields("message_without_payload") ++ [{payload, mk(binary(), #{desc => iolist_to_binary(PayloadDesc)})}]. @@ -166,7 +172,7 @@ delayed_message(delete, #{bindings := #{msgid := Id}}) -> case emqx_delayed:get_delayed_message(Id) of {ok, _Message} -> _ = emqx_delayed:delete_delayed_message(Id), - {200}; + {204}; {error, id_schema_error} -> {400, generate_http_code_map(id_schema_error, Id)}; {error, not_found} -> @@ -233,9 +239,11 @@ update_config_(Node, Config) -> rpc_call(Node, ?MODULE, ?FUNCTION_NAME, [Node, Config]). generate_http_code_map(id_schema_error, Id) -> - #{code => ?MESSAGE_ID_SCHEMA_ERROR, message => iolist_to_binary(io_lib:format("Message ID ~p schema error", [Id]))}; + #{code => ?MESSAGE_ID_SCHEMA_ERROR, message => + iolist_to_binary(io_lib:format("Message ID ~p schema error", [Id]))}; generate_http_code_map(not_found, Id) -> - #{code => ?MESSAGE_ID_NOT_FOUND, message => iolist_to_binary(io_lib:format("Message ID ~p not found", [Id]))}. + #{code => ?MESSAGE_ID_NOT_FOUND, message => + iolist_to_binary(io_lib:format("Message ID ~p not found", [Id]))}. rpc_call(Node, Module, Fun, Args) -> case rpc:call(Node, Module, Fun, Args) of diff --git a/apps/emqx_modules/src/emqx_modules_sup.erl b/apps/emqx_modules/src/emqx_modules_sup.erl index 570082896..629aa7aba 100644 --- a/apps/emqx_modules/src/emqx_modules_sup.erl +++ b/apps/emqx_modules/src/emqx_modules_sup.erl @@ -38,6 +38,7 @@ start_link() -> %%-------------------------------------------------------------------- init([]) -> {ok, {{one_for_one, 10, 3600}, - [?CHILD(emqx_telemetry), - ?CHILD(emqx_topic_metrics), - ?CHILD(emqx_delayed)]}}. + [ ?CHILD(emqx_telemetry) + , ?CHILD(emqx_topic_metrics) + , ?CHILD(emqx_trace) + , ?CHILD(emqx_delayed)]}}. diff --git a/apps/emqx_modules/src/emqx_rewrite_api.erl b/apps/emqx_modules/src/emqx_rewrite_api.erl index 1fa5e9467..3f92cd11f 100644 --- a/apps/emqx_modules/src/emqx_rewrite_api.erl +++ b/apps/emqx_modules/src/emqx_rewrite_api.erl @@ -33,7 +33,7 @@ ]). api_spec() -> - emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true, translate_body => true}). paths() -> ["/mqtt/topic_rewrite"]. diff --git a/apps/emqx_modules/src/emqx_telemetry_api.erl b/apps/emqx_modules/src/emqx_telemetry_api.erl index 799f192d0..c3e1476bc 100644 --- a/apps/emqx_modules/src/emqx_telemetry_api.erl +++ b/apps/emqx_modules/src/emqx_telemetry_api.erl @@ -63,10 +63,12 @@ status_api() -> responses => #{<<"200">> => object_schema(Props)} }, put => #{ - description => "Enable or disbale telemetry", + description => "Enable or disable telemetry", 'requestBody' => object_schema(Props), responses => #{ - <<"200">> => schema(<<"Enable or disbale telemetry successfully">>), + <<"200">> => + object_schema(properties([{enable, boolean, <<"">>}]), + <<"Enable or disable telemetry successfully">>), <<"400">> => bad_request() } } @@ -77,10 +79,7 @@ data_api() -> Metadata = #{ get => #{ responses => #{ - <<"200">> => object_schema(properties(), <<"Get telemetry data">>) - } - } - }, + <<"200">> => object_schema(properties(), <<"Get telemetry data">>)}}}, {"/telemetry/data", Metadata, data}. %%-------------------------------------------------------------------- @@ -97,10 +96,10 @@ status(put, #{body := Body}) -> true -> <<"Telemetry status is already enabled">>; false -> <<"Telemetry status is already disable">> end, - {400, #{code => "BAD_REQUEST", message => Reason}}; + {400, #{code => 'BAD_REQUEST', message => Reason}}; false -> enable_telemetry(Enable), - {200} + {200, #{<<"enable">> => emqx_telemetry:get_status()}} end. data(get, _Request) -> diff --git a/apps/emqx_modules/src/emqx_topic_metrics_api.erl b/apps/emqx_modules/src/emqx_topic_metrics_api.erl index 00740584e..e8be39c47 100644 --- a/apps/emqx_modules/src/emqx_topic_metrics_api.erl +++ b/apps/emqx_modules/src/emqx_topic_metrics_api.erl @@ -96,7 +96,8 @@ topic_metrics_api() -> responses => #{ <<"200">> => schema(<<"Create topic metrics success">>), <<"409">> => error_schema(<<"Topic metrics max limit">>, [?EXCEED_LIMIT]), - <<"400">> => error_schema(<<"Topic metrics already exist or bad topic">>, [?BAD_REQUEST, ?BAD_TOPIC]) + <<"400">> => error_schema( <<"Topic metrics already exist or bad topic">> + , [?BAD_REQUEST, ?BAD_TOPIC]) } } }, @@ -115,7 +116,7 @@ operation_topic_metrics_api() -> description => <<"Deregister topic metrics">>, parameters => [topic_param()], responses => #{ - <<"200">> => schema(<<"Deregister topic metrics">>), + <<"204">> => schema(<<"Deregister topic metrics">>), <<"404">> => error_schema(<<"Topic not found">>, [?ERROR_TOPIC]) } } @@ -174,8 +175,10 @@ register(Topic) -> [Topic])), {400, #{code => ?BAD_TOPIC, message => Message}}; {error, {quota_exceeded, bad_topic}} -> - Message = list_to_binary(io_lib:format("Max topic metrics count is ~p, and topic cannot have wildcard ~p", - [emqx_topic_metrics:max_limit(), Topic])), + Message = list_to_binary( + io_lib:format( + "Max topic metrics count is ~p, and topic cannot have wildcard ~p", + [emqx_topic_metrics:max_limit(), Topic])), {400, #{code => ?BAD_REQUEST, message => Message}}; {error, already_existed} -> Message = list_to_binary(io_lib:format("Topic ~p already registered", [Topic])), diff --git a/apps/emqx_plugin_libs/src/emqx_placeholder.erl b/apps/emqx_plugin_libs/src/emqx_placeholder.erl new file mode 100644 index 000000000..7c5dfac3b --- /dev/null +++ b/apps/emqx_plugin_libs/src/emqx_placeholder.erl @@ -0,0 +1,182 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_placeholder). + +%% preprocess and process template string with place holders +-export([ preproc_tmpl/1 + , proc_tmpl/2 + , proc_tmpl/3 + , preproc_cmd/1 + , proc_cmd/2 + , proc_cmd/3 + , preproc_sql/1 + , preproc_sql/2 + , proc_sql/2 + , proc_sql_param_str/2 + , proc_cql_param_str/2 + ]). + +-import(emqx_plugin_libs_rule, [bin/1]). + +-define(EX_PLACE_HOLDER, "(\\$\\{[a-zA-Z0-9\\._]+\\})"). +-define(EX_WITHE_CHARS, "\\s"). %% Space and CRLF + +-type(tmpl_token() :: list({var, binary()} | {str, binary()})). + +-type(tmpl_cmd() :: list(tmpl_token())). + +-type(prepare_statement_key() :: binary()). + +%% preprocess template string with place holders +-spec(preproc_tmpl(binary()) -> tmpl_token()). +preproc_tmpl(Str) -> + Tokens = re:split(Str, ?EX_PLACE_HOLDER, [{return,binary},group,trim]), + preproc_tmpl(Tokens, []). + +preproc_tmpl([], Acc) -> + lists:reverse(Acc); +preproc_tmpl([[Str, Phld] | Tokens], Acc) -> + preproc_tmpl(Tokens, + put_head(var, parse_nested(unwrap(Phld)), + put_head(str, Str, Acc))); +preproc_tmpl([[Str] | Tokens], Acc) -> + preproc_tmpl(Tokens, put_head(str, Str, Acc)). + +put_head(_Type, <<>>, List) -> List; +put_head(Type, Term, List) -> + [{Type, Term} | List]. + +-spec(proc_tmpl(tmpl_token(), map()) -> binary()). +proc_tmpl(Tokens, Data) -> + proc_tmpl(Tokens, Data, #{return => full_binary}). + +-spec(proc_tmpl(tmpl_token(), map(), map()) -> binary() | list()). +proc_tmpl(Tokens, Data, Opts = #{return := full_binary}) -> + Trans = maps:get(var_trans, Opts, fun emqx_plugin_libs_rule:bin/1), + list_to_binary( + proc_tmpl(Tokens, Data, #{return => rawlist, var_trans => Trans})); + +proc_tmpl(Tokens, Data, Opts = #{return := rawlist}) -> + Trans = maps:get(var_trans, Opts, undefined), + lists:map( + fun ({str, Str}) -> Str; + ({var, Phld}) when is_function(Trans) -> + Trans(get_phld_var(Phld, Data)); + ({var, Phld}) -> + get_phld_var(Phld, Data) + end, Tokens). + + +-spec(preproc_cmd(binary()) -> tmpl_cmd()). +preproc_cmd(Str) -> + SubStrList = re:split(Str, ?EX_WITHE_CHARS, [{return,binary},trim]), + [preproc_tmpl(SubStr) || SubStr <- SubStrList]. + +-spec(proc_cmd([tmpl_token()], map()) -> binary() | list()). +proc_cmd(Tokens, Data) -> + proc_cmd(Tokens, Data, #{return => full_binary}). +-spec(proc_cmd([tmpl_token()], map(), map()) -> list()). +proc_cmd(Tokens, Data, Opts) -> + [proc_tmpl(Tks, Data, Opts) || Tks <- Tokens]. + +%% preprocess SQL with place holders +-spec(preproc_sql(Sql::binary()) -> {prepare_statement_key(), tmpl_token()}). +preproc_sql(Sql) -> + preproc_sql(Sql, '?'). + +-spec(preproc_sql(Sql::binary(), ReplaceWith :: '?' | '$n') + -> {prepare_statement_key(), tmpl_token()}). + +preproc_sql(Sql, ReplaceWith) -> + case re:run(Sql, ?EX_PLACE_HOLDER, [{capture, all_but_first, binary}, global]) of + {match, PlaceHolders} -> + PhKs = [parse_nested(unwrap(Phld)) || [Phld | _] <- PlaceHolders], + {replace_with(Sql, ReplaceWith), [{var, Phld} || Phld <- PhKs]}; + nomatch -> + {Sql, []} + end. + +-spec(proc_sql(tmpl_token(), map()) -> list()). +proc_sql(Tokens, Data) -> + proc_tmpl(Tokens, Data, #{return => rawlist, var_trans => fun sql_data/1}). + +-spec(proc_sql_param_str(tmpl_token(), map()) -> binary()). +proc_sql_param_str(Tokens, Data) -> + proc_param_str(Tokens, Data, fun quote_sql/1). + +-spec(proc_cql_param_str(tmpl_token(), map()) -> binary()). +proc_cql_param_str(Tokens, Data) -> + proc_param_str(Tokens, Data, fun quote_cql/1). + +proc_param_str(Tokens, Data, Quote) -> + iolist_to_binary( + proc_tmpl(Tokens, Data, #{return => rawlist, var_trans => Quote})). + +%% backward compatibility for hot upgrading from =< e4.2.1 +get_phld_var(Fun, Data) when is_function(Fun) -> + Fun(Data); +get_phld_var(Phld, Data) -> + emqx_rule_maps:nested_get(Phld, Data). + +replace_with(Tmpl, '?') -> + re:replace(Tmpl, ?EX_PLACE_HOLDER, "?", [{return, binary}, global]); +replace_with(Tmpl, '$n') -> + Parts = re:split(Tmpl, ?EX_PLACE_HOLDER, [{return, binary}, trim, group]), + {Res, _} = + lists:foldl( + fun([Tkn, _Phld], {Acc, Seq}) -> + Seq1 = erlang:integer_to_binary(Seq), + {<>, Seq + 1}; + ([Tkn], {Acc, Seq}) -> + {<>, Seq} + end, {<<>>, 1}, Parts), + Res. + +parse_nested(Attr) -> + case string:split(Attr, <<".">>, all) of + [Attr] -> {var, Attr}; + Nested -> {path, [{key, P} || P <- Nested]} + end. + +unwrap(<<"${", Val/binary>>) -> + binary:part(Val, {0, byte_size(Val)-1}). + +sql_data(undefined) -> null; +sql_data(List) when is_list(List) -> List; +sql_data(Bin) when is_binary(Bin) -> Bin; +sql_data(Num) when is_number(Num) -> Num; +sql_data(Bool) when is_boolean(Bool) -> Bool; +sql_data(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8); +sql_data(Map) when is_map(Map) -> emqx_json:encode(Map). + +quote_sql(Str) -> + quote(Str, <<"\\\\'">>). + +quote_cql(Str) -> + quote(Str, <<"''">>). + +quote(Str, ReplaceWith) when + is_list(Str); + is_binary(Str); + is_atom(Str); + is_map(Str) -> + [$', escape_apo(bin(Str), ReplaceWith), $']; +quote(Val, _) -> + bin(Val). + +escape_apo(Str, ReplaceWith) -> + re:replace(Str, <<"'">>, ReplaceWith, [{return, binary}, global]). diff --git a/apps/emqx_plugin_libs/src/emqx_plugin_libs_metrics.erl b/apps/emqx_plugin_libs/src/emqx_plugin_libs_metrics.erl new file mode 100644 index 000000000..824890efc --- /dev/null +++ b/apps/emqx_plugin_libs/src/emqx_plugin_libs_metrics.erl @@ -0,0 +1,315 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_plugin_libs_metrics). + +-behaviour(gen_server). + +%% API functions +-export([ start_link/1 + , stop/1 + , child_spec/1 + ]). + +-export([ inc/3 + , inc/4 + , get/3 + , get_speed/2 + , create_metrics/2 + , clear_metrics/2 + ]). + +-export([ get_metrics/2 + , get_matched/2 + , get_success/2 + , get_failed/2 + , inc_matched/2 + , inc_success/2 + , inc_failed/2 + ]). + +%% gen_server callbacks +-export([ init/1 + , handle_call/3 + , handle_info/2 + , handle_cast/2 + , code_change/3 + , terminate/2 + ]). + +-ifndef(TEST). +-define(SECS_5M, 300). +-define(SAMPLING, 10). +-else. +%% Use 5 secs average speed instead of 5 mins in case of testing +-define(SECS_5M, 5). +-define(SAMPLING, 1). +-endif. + +-export_type([metrics/0]). + +-type metrics() :: #{ + matched => integer(), + success => integer(), + failed => integer(), + speed => float(), + speed_max => float(), + speed_last5m => float() +}. +-type handler_name() :: atom(). +-type metric_id() :: binary(). + +-define(CntrRef(Name), {?MODULE, Name}). +-define(SAMPCOUNT_5M, (?SECS_5M div ?SAMPLING)). + +%% the speed of 'matched' +-record(speed, { + max = 0 :: number(), + current = 0 :: number(), + last5m = 0 :: number(), + %% metadata for calculating the avg speed + tick = 1 :: number(), + last_v = 0 :: number(), + %% metadata for calculating the 5min avg speed + last5m_acc = 0 :: number(), + last5m_smpl = [] :: list() + }). + +-record(state, { + metric_ids = sets:new(), + speeds :: undefined | #{metric_id() => #speed{}} + }). + +%%------------------------------------------------------------------------------ +%% APIs +%%------------------------------------------------------------------------------ + +-spec(child_spec(handler_name()) -> supervisor:child_spec()). +child_spec(Name) -> + #{ id => emqx_plugin_libs_metrics + , start => {emqx_plugin_libs_metrics, start_link, [Name]} + , restart => permanent + , shutdown => 5000 + , type => worker + , modules => [emqx_plugin_libs_metrics] + }. + +-spec(create_metrics(handler_name(), metric_id()) -> ok). +create_metrics(Name, Id) -> + gen_server:call(Name, {create_metrics, Id}). + +-spec(clear_metrics(handler_name(), metric_id()) -> ok). +clear_metrics(Name, Id) -> + gen_server:call(Name, {delete_metrics, Id}). + +-spec(get(handler_name(), metric_id(), atom()) -> number()). +get(Name, Id, Metric) -> + case get_couters_ref(Name, Id) of + not_found -> 0; + Ref -> counters:get(Ref, metrics_idx(Metric)) + end. + +-spec(get_speed(handler_name(), metric_id()) -> map()). +get_speed(Name, Id) -> + gen_server:call(Name, {get_speed, Id}). + +-spec(get_metrics(handler_name(), metric_id()) -> metrics()). +get_metrics(Name, Id) -> + #{max := Max, current := Current, last5m := Last5M} = get_speed(Name, Id), + #{matched => get_matched(Name, Id), + success => get_success(Name, Id), + failed => get_failed(Name, Id), + speed => Current, + speed_max => Max, + speed_last5m => Last5M + }. + +-spec inc(handler_name(), metric_id(), atom()) -> ok. +inc(Name, Id, Metric) -> + inc(Name, Id, Metric, 1). + +-spec inc(handler_name(), metric_id(), atom(), pos_integer()) -> ok. +inc(Name, Id, Metric, Val) -> + case get_couters_ref(Name, Id) of + not_found -> + %% this may occur when increasing a counter for + %% a rule that was created from a remove node. + create_metrics(Name, Id), + counters:add(get_couters_ref(Name, Id), metrics_idx(Metric), Val); + Ref -> + counters:add(Ref, metrics_idx(Metric), Val) + end. + +inc_matched(Name, Id) -> + inc(Name, Id, 'matched', 1). + +inc_success(Name, Id) -> + inc(Name, Id, 'success', 1). + +inc_failed(Name, Id) -> + inc(Name, Id, 'failed', 1). + +get_matched(Name, Id) -> + get(Name, Id, 'matched'). + +get_success(Name, Id) -> + get(Name, Id, 'success'). + +get_failed(Name, Id) -> + get(Name, Id, 'failed'). + +start_link(Name) -> + gen_server:start_link({local, Name}, ?MODULE, Name, []). + +init(Name) -> + erlang:process_flag(trap_exit, true), + %% the speed metrics + erlang:send_after(timer:seconds(?SAMPLING), self(), ticking), + persistent_term:put(?CntrRef(Name), #{}), + {ok, #state{}}. + +handle_call({get_speed, _Id}, _From, State = #state{speeds = undefined}) -> + {reply, format_speed(#speed{}), State}; +handle_call({get_speed, Id}, _From, State = #state{speeds = Speeds}) -> + {reply, case maps:get(Id, Speeds, undefined) of + undefined -> format_speed(#speed{}); + Speed -> format_speed(Speed) + end, State}; + +handle_call({create_metrics, Id}, _From, + State = #state{metric_ids = MIDs, speeds = Speeds}) -> + {reply, create_counters(get_self_name(), Id), + State#state{metric_ids = sets:add_element(Id, MIDs), + speeds = case Speeds of + undefined -> #{Id => #speed{}}; + _ -> Speeds#{Id => #speed{}} + end}}; + +handle_call({delete_metrics, Id}, _From, + State = #state{metric_ids = MIDs, speeds = Speeds}) -> + {reply, delete_counters(get_self_name(), Id), + State#state{metric_ids = sets:del_element(Id, MIDs), + speeds = case Speeds of + undefined -> undefined; + _ -> maps:remove(Id, Speeds) + end}}; + +handle_call(_Request, _From, State) -> + {reply, ok, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(ticking, State = #state{speeds = undefined}) -> + erlang:send_after(timer:seconds(?SAMPLING), self(), ticking), + {noreply, State}; + +handle_info(ticking, State = #state{speeds = Speeds0}) -> + Speeds = maps:map( + fun(Id, Speed) -> + calculate_speed(get_matched(get_self_name(), Id), Speed) + end, Speeds0), + erlang:send_after(timer:seconds(?SAMPLING), self(), ticking), + {noreply, State#state{speeds = Speeds}}; + +handle_info(_Info, State) -> + {noreply, State}. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. + +terminate(_Reason, #state{metric_ids = MIDs}) -> + Name = get_self_name(), + [delete_counters(Name, Id) || Id <- sets:to_list(MIDs)], + persistent_term:erase(?CntrRef(Name)). + +stop(Name) -> + gen_server:stop(Name). + +%%------------------------------------------------------------------------------ +%% Internal Functions +%%------------------------------------------------------------------------------ + +create_counters(Name, Id) -> + case get_couters_ref(Name, Id) of + not_found -> + Counters = get_all_counters(Name), + CntrRef = counters:new(max_counters_size(), [write_concurrency]), + persistent_term:put(?CntrRef(Name), Counters#{Id => CntrRef}); + _Ref -> ok + end. + +delete_counters(Name, Id) -> + persistent_term:put(?CntrRef(Name), maps:remove(Id, get_all_counters(Name))). + +get_couters_ref(Name, Id) -> + maps:get(Id, get_all_counters(Name), not_found). + +get_all_counters(Name) -> + persistent_term:get(?CntrRef(Name), #{}). + +calculate_speed(_CurrVal, undefined) -> + undefined; +calculate_speed(CurrVal, #speed{max = MaxSpeed0, last_v = LastVal, + tick = Tick, last5m_acc = AccSpeed5Min0, + last5m_smpl = Last5MinSamples0}) -> + %% calculate the current speed based on the last value of the counter + CurrSpeed = (CurrVal - LastVal) / ?SAMPLING, + + %% calculate the max speed since the emqx startup + MaxSpeed = + if MaxSpeed0 >= CurrSpeed -> MaxSpeed0; + true -> CurrSpeed + end, + + %% calculate the average speed in last 5 mins + {Last5MinSamples, Acc5Min, Last5Min} = + if Tick =< ?SAMPCOUNT_5M -> + Acc = AccSpeed5Min0 + CurrSpeed, + {lists:reverse([CurrSpeed | lists:reverse(Last5MinSamples0)]), + Acc, Acc / Tick}; + true -> + [FirstSpeed | Speeds] = Last5MinSamples0, + Acc = AccSpeed5Min0 + CurrSpeed - FirstSpeed, + {lists:reverse([CurrSpeed | lists:reverse(Speeds)]), + Acc, Acc / ?SAMPCOUNT_5M} + end, + + #speed{max = MaxSpeed, current = CurrSpeed, last5m = Last5Min, + last_v = CurrVal, last5m_acc = Acc5Min, + last5m_smpl = Last5MinSamples, tick = Tick + 1}. + +format_speed(#speed{max = Max, current = Current, last5m = Last5Min}) -> + #{max => Max, current => precision(Current, 2), last5m => precision(Last5Min, 2)}. + +precision(Float, N) -> + Base = math:pow(10, N), + round(Float * Base) / Base. + +get_self_name() -> + {registered_name, Name} = process_info(self(), registered_name), + Name. + +%%------------------------------------------------------------------------------ +%% Metrics Definitions +%%------------------------------------------------------------------------------ + +max_counters_size() -> 32. +metrics_idx('matched') -> 1; +metrics_idx('success') -> 2; +metrics_idx('failed') -> 3; +metrics_idx(_) -> 32. + diff --git a/apps/emqx_plugin_libs/src/emqx_plugin_libs_rule.erl b/apps/emqx_plugin_libs/src/emqx_plugin_libs_rule.erl index b3393de04..24dfbfd85 100644 --- a/apps/emqx_plugin_libs/src/emqx_plugin_libs_rule.erl +++ b/apps/emqx_plugin_libs/src/emqx_plugin_libs_rule.erl @@ -15,8 +15,9 @@ %%-------------------------------------------------------------------- -module(emqx_plugin_libs_rule). +-elvis([{elvis_style, god_modules, disable}]). -%% preprocess and process tempalte string with place holders +%% preprocess and process template string with place holders -export([ preproc_tmpl/1 , proc_tmpl/2 , proc_tmpl/3 @@ -76,107 +77,49 @@ %% preprocess template string with place holders -spec(preproc_tmpl(binary()) -> tmpl_token()). preproc_tmpl(Str) -> - Tokens = re:split(Str, ?EX_PLACE_HOLDER, [{return,binary},group,trim]), - preproc_tmpl(Tokens, []). - -preproc_tmpl([], Acc) -> - lists:reverse(Acc); -preproc_tmpl([[Str, Phld]| Tokens], Acc) -> - preproc_tmpl(Tokens, - put_head(var, parse_nested(unwrap(Phld)), - put_head(str, Str, Acc))); -preproc_tmpl([[Str]| Tokens], Acc) -> - preproc_tmpl(Tokens, put_head(str, Str, Acc)). - -put_head(_Type, <<>>, List) -> List; -put_head(Type, Term, List) -> - [{Type, Term} | List]. + emqx_placeholder:preproc_tmpl(Str). -spec(proc_tmpl(tmpl_token(), map()) -> binary()). proc_tmpl(Tokens, Data) -> - proc_tmpl(Tokens, Data, #{return => full_binary}). + emqx_placeholder:proc_tmpl(Tokens, Data). -spec(proc_tmpl(tmpl_token(), map(), map()) -> binary() | list()). -proc_tmpl(Tokens, Data, Opts = #{return := full_binary}) -> - Trans = maps:get(var_trans, Opts, fun bin/1), - list_to_binary( - proc_tmpl(Tokens, Data, #{return => rawlist, var_trans => Trans})); - -proc_tmpl(Tokens, Data, Opts = #{return := rawlist}) -> - Trans = maps:get(var_trans, Opts, undefined), - lists:map( - fun ({str, Str}) -> Str; - ({var, Phld}) when is_function(Trans) -> - Trans(get_phld_var(Phld, Data)); - ({var, Phld}) -> - get_phld_var(Phld, Data) - end, Tokens). - +proc_tmpl(Tokens, Data, Opts) -> + emqx_placeholder:proc_tmpl(Tokens, Data, Opts). -spec(preproc_cmd(binary()) -> tmpl_cmd()). preproc_cmd(Str) -> - SubStrList = re:split(Str, ?EX_WITHE_CHARS, [{return,binary},trim]), - [preproc_tmpl(SubStr) || SubStr <- SubStrList]. + emqx_placeholder:preproc_cmd(Str). -spec(proc_cmd([tmpl_token()], map()) -> binary() | list()). proc_cmd(Tokens, Data) -> - proc_cmd(Tokens, Data, #{return => full_binary}). + emqx_placeholder:proc_cmd(Tokens, Data). -spec(proc_cmd([tmpl_token()], map(), map()) -> list()). proc_cmd(Tokens, Data, Opts) -> - [proc_tmpl(Tks, Data, Opts) || Tks <- Tokens]. + emqx_placeholder:proc_cmd(Tokens, Data, Opts). %% preprocess SQL with place holders -spec(preproc_sql(Sql::binary()) -> {prepare_statement_key(), tmpl_token()}). preproc_sql(Sql) -> - preproc_sql(Sql, '?'). + emqx_placeholder:preproc_sql(Sql). -spec(preproc_sql(Sql::binary(), ReplaceWith :: '?' | '$n') -> {prepare_statement_key(), tmpl_token()}). preproc_sql(Sql, ReplaceWith) -> - case re:run(Sql, ?EX_PLACE_HOLDER, [{capture, all_but_first, binary}, global]) of - {match, PlaceHolders} -> - PhKs = [parse_nested(unwrap(Phld)) || [Phld | _] <- PlaceHolders], - {replace_with(Sql, ReplaceWith), [{var, Phld} || Phld <- PhKs]}; - nomatch -> - {Sql, []} - end. + emqx_placeholder:preproc_sql(Sql, ReplaceWith). -spec(proc_sql(tmpl_token(), map()) -> list()). proc_sql(Tokens, Data) -> - proc_tmpl(Tokens, Data, #{return => rawlist, var_trans => fun sql_data/1}). + emqx_placeholder:proc_sql(Tokens, Data). -spec(proc_sql_param_str(tmpl_token(), map()) -> binary()). proc_sql_param_str(Tokens, Data) -> - proc_param_str(Tokens, Data, fun quote_sql/1). + emqx_placeholder:proc_sql_param_str(Tokens, Data). -spec(proc_cql_param_str(tmpl_token(), map()) -> binary()). proc_cql_param_str(Tokens, Data) -> - proc_param_str(Tokens, Data, fun quote_cql/1). - -proc_param_str(Tokens, Data, Quote) -> - iolist_to_binary( - proc_tmpl(Tokens, Data, #{return => rawlist, var_trans => Quote})). - -%% backward compatibility for hot upgrading from =< e4.2.1 -get_phld_var(Fun, Data) when is_function(Fun) -> - Fun(Data); -get_phld_var(Phld, Data) -> - emqx_rule_maps:nested_get(Phld, Data). - -replace_with(Tmpl, '?') -> - re:replace(Tmpl, ?EX_PLACE_HOLDER, "?", [{return, binary}, global]); -replace_with(Tmpl, '$n') -> - Parts = re:split(Tmpl, ?EX_PLACE_HOLDER, [{return, binary}, trim, group]), - {Res, _} = - lists:foldl( - fun([Tkn, _Phld], {Acc, Seq}) -> - Seq1 = erlang:integer_to_binary(Seq), - {<>, Seq + 1}; - ([Tkn], {Acc, Seq}) -> - {<>, Seq} - end, {<<>>, 1}, Parts), - Res. + emqx_placeholder:proc_cql_param_str(Tokens, Data). unsafe_atom_key(Key) when is_atom(Key) -> Key; @@ -227,35 +170,6 @@ tcp_connectivity(Host, Port, Timeout) -> {error, Reason} -> {error, Reason} end. -unwrap(<<"${", Val/binary>>) -> - binary:part(Val, {0, byte_size(Val)-1}). - -sql_data(undefined) -> null; -sql_data(List) when is_list(List) -> List; -sql_data(Bin) when is_binary(Bin) -> Bin; -sql_data(Num) when is_number(Num) -> Num; -sql_data(Bool) when is_boolean(Bool) -> Bool; -sql_data(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8); -sql_data(Map) when is_map(Map) -> emqx_json:encode(Map). - -quote_sql(Str) -> - quote(Str, <<"\\\\'">>). - -quote_cql(Str) -> - quote(Str, <<"''">>). - -quote(Str, ReplaceWith) when - is_list(Str); - is_binary(Str); - is_atom(Str); - is_map(Str) -> - [$', escape_apo(bin(Str), ReplaceWith), $']; -quote(Val, _) -> - bin(Val). - -escape_apo(Str, ReplaceWith) -> - re:replace(Str, <<"'">>, ReplaceWith, [{return, binary}, global]). - str(Bin) when is_binary(Bin) -> binary_to_list(Bin); str(Num) when is_number(Num) -> number_to_list(Num); str(Atom) when is_atom(Atom) -> atom_to_list(Atom); @@ -345,12 +259,6 @@ number_to_list(Int) when is_integer(Int) -> number_to_list(Float) when is_float(Float) -> float_to_list(Float, [{decimals, 10}, compact]). -parse_nested(Attr) -> - case string:split(Attr, <<".">>, all) of - [Attr] -> {var, Attr}; - Nested -> {path, [{key, P} || P <- Nested]} - end. - now_ms() -> erlang:system_time(millisecond). diff --git a/apps/emqx_plugin_libs/src/emqx_plugin_libs_ssl.erl b/apps/emqx_plugin_libs/src/emqx_plugin_libs_ssl.erl index b000a8be8..2bcf66763 100644 --- a/apps/emqx_plugin_libs/src/emqx_plugin_libs_ssl.erl +++ b/apps/emqx_plugin_libs/src/emqx_plugin_libs_ssl.erl @@ -73,10 +73,7 @@ save_files_return_opts(Options, Dir) -> Key = do_save_file(KeyFile, Dir), Cert = do_save_file(CertFile, Dir), CA = do_save_file(CAFile, Dir), - Verify = case GetD(verify, false) of - false -> verify_none; - _ -> verify_peer - end, + Verify = GetD(verify, verify_none), SNI = Get(server_name_indication), Versions = emqx_tls_lib:integral_versions(Get(tls_versions)), Ciphers = emqx_tls_lib:integral_ciphers(Versions, Get(ciphers)), @@ -92,6 +89,7 @@ save_file(Param, SubDir) -> do_save_file(Param, Dir). filter([]) -> []; +filter([{_, undefined} | T]) -> filter(T); filter([{_, ""} | T]) -> filter(T); filter([H | T]) -> [H | filter(T)]. diff --git a/apps/emqx_plugin_libs/test/emqx_placeholder_SUITE.erl b/apps/emqx_plugin_libs/test/emqx_placeholder_SUITE.erl new file mode 100644 index 000000000..1e3c1bfaf --- /dev/null +++ b/apps/emqx_plugin_libs/test/emqx_placeholder_SUITE.erl @@ -0,0 +1,91 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_placeholder_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("eunit/include/eunit.hrl"). + +all() -> emqx_common_test_helpers:all(?MODULE). + + +t_proc_tmpl(_) -> + Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + Tks = emqx_placeholder:preproc_tmpl(<<"a:${a},b:${b},c:${c},d:${d}">>), + ?assertEqual(<<"a:1,b:1,c:1.0,d:{\"d1\":\"hi\"}">>, + emqx_placeholder:proc_tmpl(Tks, Selected)). + +t_proc_tmpl1(_) -> + Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + Tks = emqx_placeholder:preproc_tmpl(<<"a:$a,b:b},c:{c},d:${d">>), + ?assertEqual(<<"a:$a,b:b},c:{c},d:${d">>, + emqx_placeholder:proc_tmpl(Tks, Selected)). + +t_proc_cmd(_) -> + Selected = #{v0 => <<"x">>, v1 => <<"1">>, v2 => #{d1 => <<"hi">>}}, + Tks = emqx_placeholder:preproc_cmd(<<"hset name a:${v0} ${v1} b ${v2} ">>), + ?assertEqual([<<"hset">>, <<"name">>, + <<"a:x">>, <<"1">>, + <<"b">>, <<"{\"d1\":\"hi\"}">>], + emqx_placeholder:proc_cmd(Tks, Selected)). + +t_preproc_sql(_) -> + Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + {PrepareStatement, ParamsTokens} = + emqx_placeholder:preproc_sql(<<"a:${a},b:${b},c:${c},d:${d}">>, '?'), + ?assertEqual(<<"a:?,b:?,c:?,d:?">>, PrepareStatement), + ?assertEqual([<<"1">>,1,1.0,<<"{\"d1\":\"hi\"}">>], + emqx_placeholder:proc_sql(ParamsTokens, Selected)). + +t_preproc_sql1(_) -> + Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + {PrepareStatement, ParamsTokens} = + emqx_placeholder:preproc_sql(<<"a:${a},b:${b},c:${c},d:${d}">>, '$n'), + ?assertEqual(<<"a:$1,b:$2,c:$3,d:$4">>, PrepareStatement), + ?assertEqual([<<"1">>,1,1.0,<<"{\"d1\":\"hi\"}">>], + emqx_placeholder:proc_sql(ParamsTokens, Selected)). +t_preproc_sql2(_) -> + Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + {PrepareStatement, ParamsTokens} = + emqx_placeholder:preproc_sql(<<"a:$a,b:b},c:{c},d:${d">>, '?'), + ?assertEqual(<<"a:$a,b:b},c:{c},d:${d">>, PrepareStatement), + ?assertEqual([], emqx_placeholder:proc_sql(ParamsTokens, Selected)). + +t_preproc_sql3(_) -> + Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + ParamsTokens = emqx_placeholder:preproc_tmpl(<<"a:${a},b:${b},c:${c},d:${d}">>), + ?assertEqual(<<"a:'1',b:1,c:1.0,d:'{\"d1\":\"hi\"}'">>, + emqx_placeholder:proc_sql_param_str(ParamsTokens, Selected)). + +t_preproc_sql4(_) -> + %% with apostrophes + %% https://github.com/emqx/emqx/issues/4135 + Selected = #{a => <<"1''2">>, b => 1, c => 1.0, + d => #{d1 => <<"someone's phone">>}}, + ParamsTokens = emqx_placeholder:preproc_tmpl(<<"a:${a},b:${b},c:${c},d:${d}">>), + ?assertEqual(<<"a:'1\\'\\'2',b:1,c:1.0,d:'{\"d1\":\"someone\\'s phone\"}'">>, + emqx_placeholder:proc_sql_param_str(ParamsTokens, Selected)). + +t_preproc_sql5(_) -> + %% with apostrophes for cassandra + %% https://github.com/emqx/emqx/issues/4148 + Selected = #{a => <<"1''2">>, b => 1, c => 1.0, + d => #{d1 => <<"someone's phone">>}}, + ParamsTokens = emqx_placeholder:preproc_tmpl(<<"a:${a},b:${b},c:${c},d:${d}">>), + ?assertEqual(<<"a:'1''''2',b:1,c:1.0,d:'{\"d1\":\"someone''s phone\"}'">>, + emqx_placeholder:proc_cql_param_str(ParamsTokens, Selected)). diff --git a/apps/emqx_plugin_libs/test/emqx_plugin_libs_metrics_SUITE.erl b/apps/emqx_plugin_libs/test/emqx_plugin_libs_metrics_SUITE.erl new file mode 100644 index 000000000..3a74cd232 --- /dev/null +++ b/apps/emqx_plugin_libs/test/emqx_plugin_libs_metrics_SUITE.erl @@ -0,0 +1,96 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_plugin_libs_metrics_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). + +all() -> + [ {group, metrics} + , {group, speed} ]. + +suite() -> + [{ct_hooks, [cth_surefire]}, {timetrap, {seconds, 30}}]. + +groups() -> + [{metrics, [sequence], + [ t_rule + , t_no_creation_1 + ]}, + {speed, [sequence], + [ rule_speed + ]} + ]. + +-define(NAME, ?MODULE). + +init_per_suite(Config) -> + emqx_common_test_helpers:start_apps([emqx_conf]), + {ok, _} = emqx_plugin_libs_metrics:start_link(?NAME), + Config. + +end_per_suite(_Config) -> + catch emqx_plugin_libs_metrics:stop(?NAME), + emqx_common_test_helpers:stop_apps([emqx_conf]), + ok. + +init_per_testcase(_, Config) -> + catch emqx_plugin_libs_metrics:stop(?NAME), + {ok, _} = emqx_plugin_libs_metrics:start_link(?NAME), + Config. + +end_per_testcase(_, _Config) -> + ok. + +t_no_creation_1(_) -> + ?assertEqual(ok, emqx_plugin_libs_metrics:inc(?NAME, <<"rule1">>, 'rules.matched')). + +t_rule(_) -> + ok = emqx_plugin_libs_metrics:create_metrics(?NAME, <<"rule1">>), + ok = emqx_plugin_libs_metrics:create_metrics(?NAME, <<"rule2">>), + ok = emqx_plugin_libs_metrics:inc(?NAME, <<"rule1">>, 'rules.matched'), + ok = emqx_plugin_libs_metrics:inc(?NAME, <<"rule2">>, 'rules.matched'), + ok = emqx_plugin_libs_metrics:inc(?NAME, <<"rule2">>, 'rules.matched'), + ?assertEqual(1, emqx_plugin_libs_metrics:get(?NAME, <<"rule1">>, 'rules.matched')), + ?assertEqual(2, emqx_plugin_libs_metrics:get(?NAME, <<"rule2">>, 'rules.matched')), + ?assertEqual(0, emqx_plugin_libs_metrics:get(?NAME, <<"rule3">>, 'rules.matched')), + ok = emqx_plugin_libs_metrics:clear_metrics(?NAME, <<"rule1">>), + ok = emqx_plugin_libs_metrics:clear_metrics(?NAME, <<"rule2">>). + +rule_speed(_) -> + ok = emqx_plugin_libs_metrics:create_metrics(?NAME, <<"rule1">>), + ok = emqx_plugin_libs_metrics:create_metrics(?NAME, <<"rule:2">>), + ok = emqx_plugin_libs_metrics:inc(?NAME, <<"rule1">>, 'rules.matched'), + ok = emqx_plugin_libs_metrics:inc(?NAME, <<"rule1">>, 'rules.matched'), + ok = emqx_plugin_libs_metrics:inc(?NAME, <<"rule:2">>, 'rules.matched'), + ?assertEqual(2, emqx_plugin_libs_metrics:get(?NAME, <<"rule1">>, 'rules.matched')), + ct:sleep(1000), + ?LET(#{max := Max, current := Current}, + emqx_plugin_libs_metrics:get_speed(?NAME, <<"rule1">>), + {?assert(Max =< 2), + ?assert(Current =< 2)}), + ct:sleep(2100), + ?LET(#{max := Max, current := Current, last5m := Last5Min}, emqx_plugin_libs_metrics:get_speed(?NAME, <<"rule1">>), + {?assert(Max =< 2), + ?assert(Current == 0), + ?assert(Last5Min =< 0.67)}), + ct:sleep(3000), + ok = emqx_plugin_libs_metrics:clear_metrics(?NAME, <<"rule1">>), + ok = emqx_plugin_libs_metrics:clear_metrics(?NAME, <<"rule:2">>). diff --git a/apps/emqx_plugin_libs/test/emqx_plugin_libs_rule_SUITE.erl b/apps/emqx_plugin_libs/test/emqx_plugin_libs_rule_SUITE.erl index b2054e99e..4ce24b6b7 100644 --- a/apps/emqx_plugin_libs/test/emqx_plugin_libs_rule_SUITE.erl +++ b/apps/emqx_plugin_libs/test/emqx_plugin_libs_rule_SUITE.erl @@ -27,9 +27,11 @@ all() -> emqx_common_test_helpers:all(?MODULE). t_http_connectivity(_) -> {ok, Socket} = gen_tcp:listen(?PORT, []), - ok = emqx_plugin_libs_rule:http_connectivity("http://127.0.0.1:"++emqx_plugin_libs_rule:str(?PORT), 1000), + ok = emqx_plugin_libs_rule:http_connectivity( + "http://127.0.0.1:"++emqx_plugin_libs_rule:str(?PORT), 1000), gen_tcp:close(Socket), - {error, _} = emqx_plugin_libs_rule:http_connectivity("http://127.0.0.1:"++emqx_plugin_libs_rule:str(?PORT), 1000). + {error, _} = emqx_plugin_libs_rule:http_connectivity( + "http://127.0.0.1:"++emqx_plugin_libs_rule:str(?PORT), 1000). t_tcp_connectivity(_) -> {ok, Socket} = gen_tcp:listen(?PORT, []), @@ -68,69 +70,8 @@ t_atom_key(_) -> t_unsafe_atom_key(_) -> ?assertEqual([xyz876gv], emqx_plugin_libs_rule:unsafe_atom_key([<<"xyz876gv">>])), - ?assertEqual([xyz876gv33, port], emqx_plugin_libs_rule:unsafe_atom_key([<<"xyz876gv33">>, port])), - ?assertEqual([xyz876gv331, port1221], emqx_plugin_libs_rule:unsafe_atom_key([<<"xyz876gv331">>, <<"port1221">>])), + ?assertEqual([xyz876gv33, port], + emqx_plugin_libs_rule:unsafe_atom_key([<<"xyz876gv33">>, port])), + ?assertEqual([xyz876gv331, port1221], + emqx_plugin_libs_rule:unsafe_atom_key([<<"xyz876gv331">>, <<"port1221">>])), ?assertEqual(xyz876gv3312, emqx_plugin_libs_rule:unsafe_atom_key(<<"xyz876gv3312">>)). - -t_proc_tmpl(_) -> - Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, - Tks = emqx_plugin_libs_rule:preproc_tmpl(<<"a:${a},b:${b},c:${c},d:${d}">>), - ?assertEqual(<<"a:1,b:1,c:1.0,d:{\"d1\":\"hi\"}">>, - emqx_plugin_libs_rule:proc_tmpl(Tks, Selected)). - -t_proc_tmpl1(_) -> - Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, - Tks = emqx_plugin_libs_rule:preproc_tmpl(<<"a:$a,b:b},c:{c},d:${d">>), - ?assertEqual(<<"a:$a,b:b},c:{c},d:${d">>, - emqx_plugin_libs_rule:proc_tmpl(Tks, Selected)). - -t_proc_cmd(_) -> - Selected = #{v0 => <<"x">>, v1 => <<"1">>, v2 => #{d1 => <<"hi">>}}, - Tks = emqx_plugin_libs_rule:preproc_cmd(<<"hset name a:${v0} ${v1} b ${v2} ">>), - ?assertEqual([<<"hset">>, <<"name">>, - <<"a:x">>, <<"1">>, - <<"b">>, <<"{\"d1\":\"hi\"}">>], - emqx_plugin_libs_rule:proc_cmd(Tks, Selected)). - -t_preproc_sql(_) -> - Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, - {PrepareStatement, ParamsTokens} = emqx_plugin_libs_rule:preproc_sql(<<"a:${a},b:${b},c:${c},d:${d}">>, '?'), - ?assertEqual(<<"a:?,b:?,c:?,d:?">>, PrepareStatement), - ?assertEqual([<<"1">>,1,1.0,<<"{\"d1\":\"hi\"}">>], - emqx_plugin_libs_rule:proc_sql(ParamsTokens, Selected)). - -t_preproc_sql1(_) -> - Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, - {PrepareStatement, ParamsTokens} = emqx_plugin_libs_rule:preproc_sql(<<"a:${a},b:${b},c:${c},d:${d}">>, '$n'), - ?assertEqual(<<"a:$1,b:$2,c:$3,d:$4">>, PrepareStatement), - ?assertEqual([<<"1">>,1,1.0,<<"{\"d1\":\"hi\"}">>], - emqx_plugin_libs_rule:proc_sql(ParamsTokens, Selected)). -t_preproc_sql2(_) -> - Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, - {PrepareStatement, ParamsTokens} = emqx_plugin_libs_rule:preproc_sql(<<"a:$a,b:b},c:{c},d:${d">>, '?'), - ?assertEqual(<<"a:$a,b:b},c:{c},d:${d">>, PrepareStatement), - ?assertEqual([], emqx_plugin_libs_rule:proc_sql(ParamsTokens, Selected)). - -t_preproc_sql3(_) -> - Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, - ParamsTokens = emqx_plugin_libs_rule:preproc_tmpl(<<"a:${a},b:${b},c:${c},d:${d}">>), - ?assertEqual(<<"a:'1',b:1,c:1.0,d:'{\"d1\":\"hi\"}'">>, - emqx_plugin_libs_rule:proc_sql_param_str(ParamsTokens, Selected)). - -t_preproc_sql4(_) -> - %% with apostrophes - %% https://github.com/emqx/emqx/issues/4135 - Selected = #{a => <<"1''2">>, b => 1, c => 1.0, - d => #{d1 => <<"someone's phone">>}}, - ParamsTokens = emqx_plugin_libs_rule:preproc_tmpl(<<"a:${a},b:${b},c:${c},d:${d}">>), - ?assertEqual(<<"a:'1\\'\\'2',b:1,c:1.0,d:'{\"d1\":\"someone\\'s phone\"}'">>, - emqx_plugin_libs_rule:proc_sql_param_str(ParamsTokens, Selected)). - -t_preproc_sql5(_) -> - %% with apostrophes for cassandra - %% https://github.com/emqx/emqx/issues/4148 - Selected = #{a => <<"1''2">>, b => 1, c => 1.0, - d => #{d1 => <<"someone's phone">>}}, - ParamsTokens = emqx_plugin_libs_rule:preproc_tmpl(<<"a:${a},b:${b},c:${c},d:${d}">>), - ?assertEqual(<<"a:'1''''2',b:1,c:1.0,d:'{\"d1\":\"someone''s phone\"}'">>, - emqx_plugin_libs_rule:proc_cql_param_str(ParamsTokens, Selected)). diff --git a/apps/emqx_prometheus/grafana_template/EMQ.json b/apps/emqx_prometheus/grafana_template/EMQ.json index 137e3a5a4..54d0a9d47 100644 --- a/apps/emqx_prometheus/grafana_template/EMQ.json +++ b/apps/emqx_prometheus/grafana_template/EMQ.json @@ -2099,4 +2099,4 @@ "title": "EMQ", "uid": "tjRlQw6Zk", "version": 29 -} \ No newline at end of file +} diff --git a/apps/emqx_prometheus/grafana_template/EMQ_Dashboard.json b/apps/emqx_prometheus/grafana_template/EMQ_Dashboard.json index 0b0e2036b..0ee3c6741 100644 --- a/apps/emqx_prometheus/grafana_template/EMQ_Dashboard.json +++ b/apps/emqx_prometheus/grafana_template/EMQ_Dashboard.json @@ -630,4 +630,4 @@ "title": "EMQ Dashboard", "uid": "5sreUw6Wz", "version": 11 -} \ No newline at end of file +} diff --git a/apps/emqx_prometheus/grafana_template/ErlangVM.json b/apps/emqx_prometheus/grafana_template/ErlangVM.json index 556d815b0..23088123c 100644 --- a/apps/emqx_prometheus/grafana_template/ErlangVM.json +++ b/apps/emqx_prometheus/grafana_template/ErlangVM.json @@ -1471,4 +1471,4 @@ "title": "ErlangVM", "uid": "stprQQ6Zk", "version": 13 -} \ No newline at end of file +} diff --git a/apps/emqx_resource/Makefile b/apps/emqx_resource/Makefile deleted file mode 100644 index 596b9b2a1..000000000 --- a/apps/emqx_resource/Makefile +++ /dev/null @@ -1,43 +0,0 @@ -REBAR := rebar3 - -.PHONY: all -all: es - -.PHONY: compile -compile: - $(REBAR) compile - -.PHONY: clean -clean: distclean - -.PHONY: distclean -distclean: - @rm -rf _build erl_crash.dump rebar3.crashdump - -.PHONY: xref -xref: - $(REBAR) xref - -.PHONY: eunit -eunit: compile - $(REBAR) eunit -v -c - $(REBAR) cover - -.PHONY: ct -ct: compile - $(REBAR) as test ct -v - -cover: - $(REBAR) cover - -.PHONY: dialyzer -dialyzer: - $(REBAR) dialyzer - -.PHONY: es -es: compile - $(REBAR) escriptize - -.PHONY: elvis -elvis: - ./scripts/elvis-check.sh diff --git a/apps/emqx_resource/README.md b/apps/emqx_resource/README.md index 9302c404d..04f3c2205 100644 --- a/apps/emqx_resource/README.md +++ b/apps/emqx_resource/README.md @@ -13,36 +13,6 @@ The main idea of the emqx resource is to put all the `general` code in a common the config operations (like config validation, config dump back to files), and the state management. And we put all the `specific` codes to the callback modules. -## Try it out - - $ ./demo.sh - Eshell V11.1.8 (abort with ^G) - 1> == the demo log tracer <<"log_tracer_clientid_shawn">> started. - config: #{<<"config">> => - #{<<"bulk">> => <<"10KB">>,<<"cache_log_dir">> => <<"/tmp">>, - <<"condition">> => #{<<"clientid">> => <<"abc">>}, - <<"level">> => <<"debug">>}, - <<"id">> => <<"log_tracer_clientid_shawn">>, - <<"resource_type">> => <<"log_tracer">>} - 1> emqx_resource_instance:health_check(<<"log_tracer_clientid_shawn">>). - == the demo log tracer <<"log_tracer_clientid_shawn">> is working well - state: #{health_checked => 1,logger_handler_id => abc} - ok - - 2> emqx_resource_instance:health_check(<<"log_tracer_clientid_shawn">>). - == the demo log tracer <<"log_tracer_clientid_shawn">> is working well - state: #{health_checked => 2,logger_handler_id => abc} - ok - - 3> emqx_resource_instance:query(<<"log_tracer_clientid_shawn">>, get_log). - == the demo log tracer <<"log_tracer_clientid_shawn">> received request: get_log - state: #{health_checked => 2,logger_handler_id => abc} - "this is a demo log messages..." - - 4> emqx_resource_instance:remove(<<"log_tracer_clientid_shawn">>). - == the demo log tracer <<"log_tracer_clientid_shawn">> stopped. - state: #{health_checked => 0,logger_handler_id => abc} - ok - - 5> emqx_resource_instance:query(<<"log_tracer_clientid_shawn">>, get_log). - ** exception error: {get_instance,{<<"log_tracer_clientid_shawn">>,not_found}} +See +* `test/emqx_test_resource.erl` for a minimal `emqx_resource` implementation; +* `test/emqx_resource_SUITE.erl` for examples of `emqx_resource` usage. diff --git a/apps/emqx_resource/demo.sh b/apps/emqx_resource/demo.sh deleted file mode 100755 index 19cbab809..000000000 --- a/apps/emqx_resource/demo.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -set -e - -rebar3 compile - -erl -sname abc -pa _build/default/lib/*/ebin _build/default/lib/emqx_resource/examples -s demo diff --git a/apps/emqx_resource/elvis.config b/apps/emqx_resource/elvis.config deleted file mode 100644 index 5a0ec61dd..000000000 --- a/apps/emqx_resource/elvis.config +++ /dev/null @@ -1,15 +0,0 @@ -%% -*- mode: erlang -*- -[{elvis, [{config, [ - -#{dirs => ["src"], - filter => "*.erl", - %ignore => [], - ruleset => erl_files, - rules => [{elvis_style, operator_spaces, #{ - rules => [{right, ","}, - {right, "|"}, - {left, "|"}, - {right, "||"}, - {left, "||"}]}}, - {elvis_style, god_modules, #{limit => 100}}]} -]}]}]. diff --git a/apps/emqx_resource/examples/demo.erl b/apps/emqx_resource/examples/demo.erl deleted file mode 100644 index 171a80b61..000000000 --- a/apps/emqx_resource/examples/demo.erl +++ /dev/null @@ -1,13 +0,0 @@ --module(demo). - --export([start/0]). - -start() -> - code:load_file(log_tracer), - code:load_file(log_tracer_schema), - {ok, _} = application:ensure_all_started(minirest), - {ok, _} = application:ensure_all_started(emqx_resource), - emqx_resource:load_instances("./_build/default/lib/emqx_resource/examples"), - Handlers = [{"/", minirest:handler(#{modules => [log_tracer]})}], - Dispatch = [{"/[...]", minirest, Handlers}], - minirest:start_http(?MODULE, #{socket_opts => [inet, {port, 9900}]}, Dispatch). diff --git a/apps/emqx_resource/examples/demo.md b/apps/emqx_resource/examples/demo.md deleted file mode 100644 index c5d3bb52c..000000000 --- a/apps/emqx_resource/examples/demo.md +++ /dev/null @@ -1,152 +0,0 @@ ---- -theme: gaia -color: #000 -colorSecondary: #333 -backgroundColor: #fff -backgroundImage: url('https://marp.app/assets/hero-background.jpg') -paginate: true -marp: true ---- - - - -# EMQ X Resource - ---- - -## What is it for - -The [emqx_resource](https://github.com/emqx/emqx/tree/master/apps/emqx_resource) is a behavior that manages configuration specs and runtime states for resources like mysql or redis backends. - -It is intended to be used by the emqx_bridges and all other resources that need CRUD operations to their configs, and need to initialize the states when creating. - ---- - - - -# The Demo - -The bridge for mysql - ---- -## The callback module 'emqx_mysql_connector' - -1. include the emqx_resource_behaviour.hrl: -``` --include_lib("emqx_resource/include/emqx_resource_behaviour.hrl"). -``` ---- -2. provide the hocon schema for validating the configs: -``` -schema() -> - emqx_connector_schema_lib:relational_db_fields() ++ - emqx_connector_schema_lib:ssl_fields(). -... -``` - ---- -3. write the callback functions for starting or stopping the resource instance: - -``` -on_start/2, -on_stop/2, -on_query/4, -on_health_check/2 - -``` ---- -## Start the emqx_bridge - -``` -application:ensure_all_started(emqx_bridge). -``` - ---- - -## To use the mysql resource from code: - -``` -emqx_resource:query(ResourceID, {sql, SQL}). -``` - -``` -(emqx@127.0.0.1)2> emqx_resource:list_instances_verbose(). -[#{config => - #{<<"auto_reconnect">> => true,<<"cacertfile">> => [], - <<"certfile">> => [],<<"database">> => "mqtt", - <<"keyfile">> => [],<<"password">> => "public", - <<"pool_size">> => 1, - <<"server">> => {{127,0,0,1},3306}, - <<"ssl">> => false,<<"username">> => "root", - <<"verify">> => false}, - id => <<"bridge:mysql-def">>,mod => emqx_connector_mysql, - state => #{poolname => 'bridge:mysql-def'}, - status => started}] - -(emqx@127.0.0.1)3> emqx_resource:query(<<"bridge:mysql-def">>, {sql, <<"SELECT count(1)">>}). -{ok,[<<"count(1)">>],[[1]]} -``` - ---- - -## To get all available data bridges: - -``` -curl -q --basic -u admin:public -X GET "http://localhost:8081/api/v4/data_bridges/" | jq . -``` - ---- - -## Create - -To create a mysql data bridge: - -``` -BridgeMySQL='{ - "type": "mysql", - "status": "started", - "config": { - "verify": false, - "username": "root", - "ssl": false, - "server": "127.0.0.1:3306", - "pool_size": 1, - "password": "public", - "keyfile": "", - "database": "mqtt", - "certfile": "", - "cacertfile": "", - "auto_reconnect": true - } - }' - -curl -q --basic -u admin:public -X POST "http://localhost:8081/api/v4/data_bridges/mysql-aaaa" -d $BridgeMySQL | jq . -``` - ---- - -## Update - -To update an existing data bridge: - -``` -BridgeMySQL='{ - "type": "mysql", - "status": "started", - "config": { - "verify": false, - "username": "root", - "ssl": false, - "server": "127.0.0.1:3306", - "pool_size": 2, - "password": "public", - "keyfile": "", - "database": "mqtt", - "certfile": "", - "cacertfile": "", - "auto_reconnect": true - } - }' - -curl -q --basic -u admin:public -X PUT "http://localhost:8081/api/v4/data_bridges/mysql-aaaa" -d $BridgeMySQL | jq . -``` diff --git a/apps/emqx_resource/examples/log_tracer.conf b/apps/emqx_resource/examples/log_tracer.conf deleted file mode 100644 index 7b438ec1f..000000000 --- a/apps/emqx_resource/examples/log_tracer.conf +++ /dev/null @@ -1,11 +0,0 @@ -{ - "id": "log_tracer_clientid_shawn" - "resource_type": "log_tracer" - "config": { - "condition": {"app": "emqx"} - "level": "debug" - "cache_log_dir": "/tmp" - "bulk": "10KB" - "chars_limit": 1024 - } -} \ No newline at end of file diff --git a/apps/emqx_resource/examples/log_tracer.erl b/apps/emqx_resource/examples/log_tracer.erl deleted file mode 100644 index ed3a1a84c..000000000 --- a/apps/emqx_resource/examples/log_tracer.erl +++ /dev/null @@ -1,43 +0,0 @@ --module(log_tracer). - --include_lib("emqx_resource/include/emqx_resource_behaviour.hrl"). - -%% callbacks of behaviour emqx_resource --export([ on_start/2 - , on_stop/2 - , on_query/4 - , on_health_check/2 - , on_api_reply_format/1 - , on_config_merge/3 - ]). - -%% callbacks for emqx_resource config schema --export([schema/0]). - -schema() -> - log_tracer_schema:schema(). - -on_start(InstId, Config) -> - io:format("== the demo log tracer ~p started.~nconfig: ~p~n", [InstId, Config]), - {ok, #{logger_handler_id => abc, health_checked => 0}}. - -on_stop(InstId, State) -> - io:format("== the demo log tracer ~p stopped.~nstate: ~p~n", [InstId, State]), - ok. - -on_query(InstId, Request, AfterQuery, State) -> - io:format("== the demo log tracer ~p received request: ~p~nstate: ~p~n", - [InstId, Request, State]), - emqx_resource:query_success(AfterQuery), - "this is a demo log messages...". - -on_health_check(InstId, State = #{health_checked := Checked}) -> - NState = State#{health_checked => Checked + 1}, - io:format("== the demo log tracer ~p is working well~nstate: ~p~n", [InstId, NState]), - {ok, NState}. - -on_api_reply_format(#{id := Id, status := Status, state := #{health_checked := NChecked}}) -> - #{id => Id, status => Status, checked_count => NChecked}. - -on_config_merge(OldConfig, NewConfig, _Params) -> - maps:merge(OldConfig, NewConfig). diff --git a/apps/emqx_resource/examples/log_tracer_schema.erl b/apps/emqx_resource/examples/log_tracer_schema.erl deleted file mode 100644 index a8fc55411..000000000 --- a/apps/emqx_resource/examples/log_tracer_schema.erl +++ /dev/null @@ -1,44 +0,0 @@ --module(log_tracer_schema). - --include_lib("typerefl/include/types.hrl"). - --export([schema/0]). - --reflect_type([t_level/0, t_cache_logs_in/0]). - --type t_level() :: debug | info | notice | warning | error | critical | alert | emergency. - --type t_cache_logs_in() :: memory | file. - -schema() -> - [ {condition, fun condition/1} - , {level, fun level/1} - , {enable_cache, fun enable_cache/1} - , {cache_logs_in, fun cache_logs_in/1} - , {cache_log_dir, fun cache_log_dir/1} - , {bulk, fun bulk/1} - ]. - -condition(mapping) -> "config.condition"; -condition(type) -> map(); -condition(_) -> undefined. - -level(mapping) -> "config.level"; -level(type) -> t_level(); -level(_) -> undefined. - -enable_cache(mapping) -> "config.enable_cache"; -enable_cache(type) -> boolean(); -enable_cache(_) -> undefined. - -cache_logs_in(mapping) -> "config.cache_logs_in"; -cache_logs_in(type) -> t_cache_logs_in(); -cache_logs_in(_) -> undefined. - -cache_log_dir(mapping) -> "config.cache_log_dir"; -cache_log_dir(type) -> typerefl:regexp_string("^(.*)$"); -cache_log_dir(_) -> undefined. - -bulk(mapping) -> "config.bulk"; -bulk(type) -> typerefl:regexp_string("^[. 0-9]+(B|KB|MB|GB)$"); -bulk(_) -> undefined. diff --git a/apps/emqx_resource/include/emqx_resource.hrl b/apps/emqx_resource/include/emqx_resource.hrl index 5e73e1cf4..e5eb1785f 100644 --- a/apps/emqx_resource/include/emqx_resource.hrl +++ b/apps/emqx_resource/include/emqx_resource.hrl @@ -21,14 +21,15 @@ -type resource_spec() :: map(). -type resource_state() :: term(). -type resource_data() :: #{ - id => instance_id(), - mod => module(), - config => resource_config(), - state => resource_state(), - status => started | stopped + id := instance_id(), + mod := module(), + config := resource_config(), + state := resource_state(), + status := started | stopped, + metrics := emqx_plugin_libs_metrics:metrics() }. - --type after_query() :: {OnSuccess :: after_query_fun(), OnFailed :: after_query_fun()} | +-type resource_group() :: binary(). +-type after_query() :: {[OnSuccess :: after_query_fun()], [OnFailed :: after_query_fun()]} | undefined. %% the `after_query_fun()` is mainly for callbacks that increment counters or do some fallback diff --git a/apps/emqx_resource/include/emqx_resource_behaviour.hrl b/apps/emqx_resource/include/emqx_resource_behaviour.hrl deleted file mode 100644 index bb4f18b55..000000000 --- a/apps/emqx_resource/include/emqx_resource_behaviour.hrl +++ /dev/null @@ -1,18 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- --include_lib("emqx_resource/include/emqx_resource.hrl"). --behaviour(emqx_resource). --compile({parse_transform, emqx_resource_transform}). diff --git a/apps/emqx_resource/scripts/elvis-check.sh b/apps/emqx_resource/scripts/elvis-check.sh deleted file mode 100755 index 3fae0f191..000000000 --- a/apps/emqx_resource/scripts/elvis-check.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -set -euo pipefail - -ELVIS_VERSION='1.0.0-emqx-2' - -elvis_version="${2:-$ELVIS_VERSION}" - -echo "elvis -v: $elvis_version" - -if [ ! -f ./elvis ] || [ "$(./elvis -v | grep -oE '[1-9]+\.[0-9]+\.[0-9]+\-emqx-[0-9]+')" != "$elvis_version" ]; then - curl -fLO "https://github.com/emqx/elvis/releases/download/$elvis_version/elvis" - chmod +x ./elvis -fi - -./elvis rock --config elvis.config - diff --git a/apps/emqx_resource/src/emqx_resource.erl b/apps/emqx_resource/src/emqx_resource.erl index 6d420e0d7..b062e83ae 100644 --- a/apps/emqx_resource/src/emqx_resource.erl +++ b/apps/emqx_resource/src/emqx_resource.erl @@ -21,15 +21,9 @@ %% APIs for resource types --export([ get_type/1 - , list_types/0 - , list_types_verbose/0 - ]). +-export([list_types/0]). --export([ discover_resource_mods/0 - , is_resource_mod/1 - , call_instance/2 - ]). +%% APIs for behaviour implementations -export([ query_success/1 , query_failed/1 @@ -42,7 +36,6 @@ , check_and_create_local/3 , check_and_recreate/4 , check_and_recreate_local/4 - , resource_type_from_str/1 ]). %% Sync resource instances and files @@ -79,22 +72,21 @@ , list_instances_verbose/0 %% list all the instances , get_instance/1 %% return the data of the instance , list_instances_by_type/1 %% return all the instances of the same resource type - % , dependents/1 - % , inc_counter/2 %% increment the counter of the instance - % , inc_counter/3 %% increment the counter by a given integer + , generate_id/1 + , generate_id/2 + , list_group_instances/1 ]). --define(HOCON_CHECK_OPTS, #{atom_key => true, nullable => false}). +-define(HOCON_CHECK_OPTS, #{atom_key => true, nullable => true}). + +-define(DEFAULT_RESOURCE_GROUP, <<"default">>). -optional_callbacks([ on_query/4 , on_health_check/2 , on_config_merge/3 , on_jsonify/1 - , on_api_reply_format/1 ]). --callback on_api_reply_format(resource_data()) -> jsx:json_term(). - -callback on_config_merge(resource_config(), resource_config(), term()) -> resource_config(). -callback on_jsonify(resource_config()) -> jsx:json_term(). @@ -113,49 +105,41 @@ -callback on_health_check(instance_id(), resource_state()) -> {ok, resource_state()} | {error, Reason:: term(), resource_state()}. -%% load specs and return the loaded resources this time. --spec list_types_verbose() -> [resource_spec()]. -list_types_verbose() -> - [get_spec(Mod) || Mod <- list_types()]. - -spec list_types() -> [module()]. list_types() -> discover_resource_mods(). --spec get_type(module()) -> {ok, resource_spec()} | {error, not_found}. -get_type(Mod) -> - case is_resource_mod(Mod) of - true -> {ok, get_spec(Mod)}; - false -> {error, not_found} - end. - --spec get_spec(module()) -> resource_spec(). -get_spec(Mod) -> - maps:put(<<"resource_type">>, Mod, Mod:emqx_resource_schema()). - -spec discover_resource_mods() -> [module()]. discover_resource_mods() -> [Mod || {Mod, _} <- code:all_loaded(), is_resource_mod(Mod)]. -spec is_resource_mod(module()) -> boolean(). -is_resource_mod(Mod) -> - erlang:function_exported(Mod, emqx_resource_schema, 0). +is_resource_mod(Module) -> + Info = Module:module_info(attributes), + Behaviour = proplists:get_value(behavior, Info, []) ++ + proplists:get_value(behaviour, Info, []), + lists:member(?MODULE, Behaviour). -spec query_success(after_query()) -> ok. query_success(undefined) -> ok; -query_success({{OnSucc, Args}, _}) -> - safe_apply(OnSucc, Args). +query_success({OnSucc, _}) -> + apply_query_after_calls(OnSucc). -spec query_failed(after_query()) -> ok. query_failed(undefined) -> ok; -query_failed({_, {OnFailed, Args}}) -> - safe_apply(OnFailed, Args). +query_failed({_, OnFailed}) -> + apply_query_after_calls(OnFailed). + +apply_query_after_calls(Funcs) -> + lists:foreach(fun({Fun, Args}) -> + safe_apply(Fun, Args) + end, Funcs). %% ================================================================================= %% APIs for resource instances %% ================================================================================= -spec create(instance_id(), resource_type(), resource_config()) -> - {ok, resource_data() |'already_created'} | {error, Reason :: term()}. + {ok, resource_data() | 'already_created'} | {error, Reason :: term()}. create(InstId, ResourceType, Config) -> cluster_call(create_local, [InstId, ResourceType, Config]). @@ -196,15 +180,17 @@ remove_local(InstId) -> %% ================================================================================= -spec query(instance_id(), Request :: term()) -> Result :: term(). query(InstId, Request) -> - query(InstId, Request, undefined). + query(InstId, Request, inc_metrics_funcs(InstId)). %% same to above, also defines what to do when the Module:on_query success or failed -%% it is the duty of the Moudle to apply the `after_query()` functions. +%% it is the duty of the Module to apply the `after_query()` functions. -spec query(instance_id(), Request :: term(), after_query()) -> Result :: term(). query(InstId, Request, AfterQuery) -> case get_instance(InstId) of - {ok, #{mod := Mod, state := ResourceState}} -> - %% the resource state is readonly to Moudle:on_query/4 + {ok, #{status := stopped}} -> + error({InstId, stopped}); + {ok, #{mod := Mod, state := ResourceState, status := started}} -> + %% the resource state is readonly to Module:on_query/4 %% and the `after_query()` functions should be thread safe Mod:on_query(InstId, Request, AfterQuery, ResourceState); {error, Reason} -> @@ -235,9 +221,29 @@ list_instances() -> list_instances_verbose() -> emqx_resource_instance:list_all(). --spec list_instances_by_type(module()) -> [resource_data()]. +-spec list_instances_by_type(module()) -> [instance_id()]. list_instances_by_type(ResourceType) -> - emqx_resource_instance:lookup_by_type(ResourceType). + filter_instances(fun(_, RT) when RT =:= ResourceType -> true; + (_, _) -> false + end). + +-spec generate_id(term()) -> instance_id(). +generate_id(Name) when is_binary(Name) -> + generate_id(?DEFAULT_RESOURCE_GROUP, Name). + +-spec generate_id(resource_group(), binary()) -> instance_id(). +generate_id(Group, Name) when is_binary(Group) and is_binary(Name) -> + Id = integer_to_binary(erlang:unique_integer([positive])), + <>. + +-spec list_group_instances(resource_group()) -> [instance_id()]. +list_group_instances(Group) -> + filter_instances(fun(Id, _) -> + case binary:split(Id, <<"/">>) of + [Group | _] -> true; + _ -> false + end + end). -spec call_start(instance_id(), module(), resource_config()) -> {ok, resource_state()} | {error, Reason :: term()}. @@ -286,7 +292,7 @@ check_config(ResourceType, RawConfigTerm) -> end. -spec check_and_create(instance_id(), resource_type(), raw_resource_config()) -> - {ok, resource_data() |'already_created'} | {error, term()}. + {ok, resource_data() | 'already_created'} | {error, term()}. check_and_create(InstId, ResourceType, RawConfig) -> check_and_do(ResourceType, RawConfig, fun(InstConf) -> create(InstId, ResourceType, InstConf) end). @@ -317,16 +323,15 @@ check_and_do(ResourceType, RawConfig, Do) when is_function(Do) -> %% ================================================================================= --spec resource_type_from_str(string()) -> {ok, resource_type()} | {error, term()}. -resource_type_from_str(ResourceType) -> - try Mod = list_to_existing_atom(str(ResourceType)), - case emqx_resource:is_resource_mod(Mod) of - true -> {ok, Mod}; - false -> {error, {invalid_resource, Mod}} - end - catch error:badarg -> - {error, {resource_not_found, ResourceType}} - end. +filter_instances(Filter) -> + [Id || #{id := Id, mod := Mod} <- list_instances_verbose(), Filter(Id, Mod)]. + +inc_metrics_funcs(InstId) -> + OnFailed = [{fun emqx_plugin_libs_metrics:inc_failed/2, [resource_metrics, InstId]}], + OnSucc = [ {fun emqx_plugin_libs_metrics:inc_matched/2, [resource_metrics, InstId]} + , {fun emqx_plugin_libs_metrics:inc_success/2, [resource_metrics, InstId]} + ], + {OnSucc, OnFailed}. call_instance(InstId, Query) -> emqx_resource_instance:hash_call(InstId, Query). @@ -334,9 +339,6 @@ call_instance(InstId, Query) -> safe_apply(Func, Args) -> ?SAFE_CALL(erlang:apply(Func, Args)). -str(S) when is_binary(S) -> binary_to_list(S); -str(S) when is_list(S) -> S. - cluster_call(Func, Args) -> case emqx_cluster_rpc:multicall(?MODULE, Func, Args) of {ok, _TxnId, Result} -> Result; diff --git a/apps/emqx_resource/src/emqx_resource_api.erl b/apps/emqx_resource/src/emqx_resource_api.erl index fe1ca4509..8a5d0059f 100644 --- a/apps/emqx_resource/src/emqx_resource_api.erl +++ b/apps/emqx_resource/src/emqx_resource_api.erl @@ -15,19 +15,9 @@ %%-------------------------------------------------------------------- -module(emqx_resource_api). --export([ list_instances/1 - , format_data/1 - , stringnify/1 - ]). +-export([stringify/1]). -list_instances(Filter) -> - [format_data(Data) || Data <- emqx_resource:list_instances_verbose(), Filter(Data)]. - -format_data(#{id := Id, mod := Mod, status := Status, config := Config}) -> - #{id => Id, status => Status, resource_type => Mod, - config => emqx_resource:call_jsonify(Mod, Config)}. - -stringnify(Bin) when is_binary(Bin) -> Bin; -stringnify(Str) when is_list(Str) -> list_to_binary(Str); -stringnify(Reason) -> +stringify(Bin) when is_binary(Bin) -> Bin; +stringify(Str) when is_list(Str) -> list_to_binary(Str); +stringify(Reason) -> iolist_to_binary(io_lib:format("~p", [Reason])). diff --git a/apps/emqx_resource/src/emqx_resource_instance.erl b/apps/emqx_resource/src/emqx_resource_instance.erl index dcd5255b5..eaf6db0b2 100644 --- a/apps/emqx_resource/src/emqx_resource_instance.erl +++ b/apps/emqx_resource/src/emqx_resource_instance.erl @@ -24,8 +24,8 @@ %% load resource instances from *.conf files -export([ lookup/1 + , get_metrics/1 , list_all/0 - , lookup_by_type/1 , create_local/3 ]). @@ -66,21 +66,25 @@ hash_call(InstId, Request, Timeout) -> lookup(InstId) -> case ets:lookup(emqx_resource_instance, InstId) of [] -> {error, not_found}; - [{_, Data}] -> {ok, Data#{id => InstId}} + [{_, Data}] -> + {ok, Data#{id => InstId, metrics => get_metrics(InstId)}} end. +get_metrics(InstId) -> + emqx_plugin_libs_metrics:get_metrics(resource_metrics, InstId). + force_lookup(InstId) -> {ok, Data} = lookup(InstId), Data. -spec list_all() -> [resource_data()]. list_all() -> - [Data#{id => Id} || {Id, Data} <- ets:tab2list(emqx_resource_instance)]. + try + [Data#{id => Id} || {Id, Data} <- ets:tab2list(emqx_resource_instance)] + catch + error:badarg -> [] + end. --spec lookup_by_type(module()) -> [resource_data()]. -lookup_by_type(ResourceType) -> - [Data || #{mod := Mod} = Data <- list_all() - , Mod =:= ResourceType]. -spec create_local(instance_id(), resource_type(), resource_config()) -> {ok, resource_data()} | {error, term()}. @@ -141,7 +145,12 @@ code_change(_OldVsn, State, _Extra) -> %%------------------------------------------------------------------------------ %% suppress the race condition check, as these functions are protected in gproc workers --dialyzer({nowarn_function, [do_recreate/4, do_create/3, do_restart/1, do_stop/1, do_health_check/1]}). +-dialyzer({nowarn_function, [do_recreate/4, + do_create/3, + do_restart/1, + do_stop/1, + do_health_check/1]}). + do_recreate(InstId, ResourceType, NewConfig, Params) -> case lookup(InstId) of {ok, #{mod := ResourceType, state := ResourceState, config := OldConfig}} -> @@ -170,9 +179,11 @@ do_create(InstId, ResourceType, Config) -> #{mod => ResourceType, config => Config, state => ResourceState, status => stopped}}), _ = do_health_check(InstId), + ok = emqx_plugin_libs_metrics:create_metrics(resource_metrics, InstId), {ok, force_lookup(InstId)}; {error, Reason} -> - logger:error("start ~ts resource ~ts failed: ~p", [ResourceType, InstId, Reason]), + logger:error("start ~ts resource ~ts failed: ~p", + [ResourceType, InstId, Reason]), {error, Reason} end end. @@ -202,6 +213,7 @@ do_remove(InstId) -> do_remove(Mod, InstId, ResourceState) -> _ = emqx_resource:call_stop(InstId, Mod, ResourceState), ets:delete(emqx_resource_instance, InstId), + ok = emqx_plugin_libs_metrics:clear_metrics(resource_metrics, InstId), ok. do_restart(InstId) -> @@ -209,9 +221,9 @@ do_restart(InstId) -> {ok, #{mod := Mod, state := ResourceState, config := Config} = Data} -> _ = emqx_resource:call_stop(InstId, Mod, ResourceState), case emqx_resource:call_start(InstId, Mod, Config) of - {ok, ResourceState} -> + {ok, NewResourceState} -> ets:insert(emqx_resource_instance, - {InstId, Data#{state => ResourceState, status => started}}), + {InstId, Data#{state => NewResourceState, status => started}}), ok; {error, Reason} -> ets:insert(emqx_resource_instance, {InstId, Data#{status => stopped}}), diff --git a/apps/emqx_resource/src/emqx_resource_sup.erl b/apps/emqx_resource/src/emqx_resource_sup.erl index 22984b940..534777b69 100644 --- a/apps/emqx_resource/src/emqx_resource_sup.erl +++ b/apps/emqx_resource/src/emqx_resource_sup.erl @@ -32,17 +32,20 @@ init([]) -> _ = ets:new(emqx_resource_instance, TabOpts), SupFlags = #{strategy => one_for_one, intensity => 10, period => 10}, + Metrics = emqx_plugin_libs_metrics:child_spec(resource_metrics), + Pool = ?RESOURCE_INST_MOD, Mod = ?RESOURCE_INST_MOD, ensure_pool(Pool, hash, [{size, ?POOL_SIZE}]), - {ok, {SupFlags, [ + ResourceInsts = [ begin ensure_pool_worker(Pool, {Pool, Idx}, Idx), #{id => {Mod, Idx}, start => {Mod, start_link, [Pool, Idx]}, restart => transient, shutdown => 5000, type => worker, modules => [Mod]} - end || Idx <- lists:seq(1, ?POOL_SIZE)]}}. + end || Idx <- lists:seq(1, ?POOL_SIZE)], + {ok, {SupFlags, [Metrics | ResourceInsts]}}. %% internal functions ensure_pool(Pool, Type, Opts) -> @@ -55,4 +58,4 @@ ensure_pool_worker(Pool, Name, Slot) -> try gproc_pool:add_worker(Pool, Name, Slot) catch error:exists -> ok - end. \ No newline at end of file + end. diff --git a/apps/emqx_resource/src/emqx_resource_transform.erl b/apps/emqx_resource/src/emqx_resource_transform.erl deleted file mode 100644 index ac632cd45..000000000 --- a/apps/emqx_resource/src/emqx_resource_transform.erl +++ /dev/null @@ -1,70 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- --module(emqx_resource_transform). - --include_lib("syntax_tools/include/merl.hrl"). - --export([parse_transform/2]). - -parse_transform(Forms, _Opts) -> - Mod = hd([M || {attribute, _, module, M} <- Forms]), - AST = trans(Mod, proplists:delete(eof, Forms)), - _ = debug_print(Mod, AST), - AST. - --ifdef(RESOURCE_DEBUG). - -debug_print(Mod, Ts) -> - {ok, Io} = file:open("./" ++ atom_to_list(Mod) ++ ".trans.erl", [write]), - _ = do_debug_print(Io, Ts), - file:close(Io). - -do_debug_print(Io, Ts) when is_list(Ts) -> - lists:foreach(fun(T) -> do_debug_print(Io, T) end, Ts); -do_debug_print(Io, T) -> - io:put_chars(Io, erl_prettypr:format(merl:tree(T))), - io:nl(Io). --else. -debug_print(_Mod, _AST) -> - ok. --endif. - -trans(Mod, Forms) -> - forms(Mod, Forms) ++ [erl_syntax:revert(erl_syntax:eof_marker())]. - -forms(Mod, [F0 | Fs0]) -> - case form(Mod, F0) of - {CurrForms, AppendedForms} -> - CurrForms ++ forms(Mod, Fs0) ++ AppendedForms; - {CurrForms, FollowerForms, AppendedForms} -> - CurrForms ++ FollowerForms ++ forms(Mod, Fs0) ++ AppendedForms - end; -forms(_, []) -> []. - -form(Mod, Form) -> - case Form of - ?Q("-module('@_').") -> - {[Form], fix_spec_attrs(), fix_spec_funcs(Mod)}; - _ -> - {[Form], [], []} - end. - -fix_spec_attrs() -> - [ ?Q("-export([emqx_resource_schema/0]).") - ]. -fix_spec_funcs(_Mod) -> - [ ?Q("emqx_resource_schema() -> <<\"demo_swagger_schema\">>.") - ]. diff --git a/apps/emqx_resource/test/emqx_resource_SUITE.erl b/apps/emqx_resource/test/emqx_resource_SUITE.erl new file mode 100644 index 000000000..4f641c85a --- /dev/null +++ b/apps/emqx_resource/test/emqx_resource_SUITE.erl @@ -0,0 +1,181 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_resource_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include("emqx_authn.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). + +-define(TEST_RESOURCE, emqx_test_resource). +-define(ID, <<"id">>). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +groups() -> + []. + +init_per_testcase(_, Config) -> + Config. + +init_per_suite(Config) -> + code:ensure_loaded(?TEST_RESOURCE), + ok = emqx_common_test_helpers:start_apps([]), + {ok, _} = application:ensure_all_started(emqx_resource), + Config. + +end_per_suite(_Config) -> + ok = emqx_common_test_helpers:stop_apps([emqx_resource]). + +%%------------------------------------------------------------------------------ +%% Tests +%%------------------------------------------------------------------------------ + +t_list_types(_) -> + ?assert(lists:member(?TEST_RESOURCE, emqx_resource:list_types())). + +t_check_config(_) -> + {ok, #{}} = emqx_resource:check_config(?TEST_RESOURCE, bin_config()), + {ok, #{}} = emqx_resource:check_config(?TEST_RESOURCE, config()), + + {error, _} = emqx_resource:check_config(?TEST_RESOURCE, <<"not a config">>), + {error, _} = emqx_resource:check_config(?TEST_RESOURCE, #{invalid => config}). + +t_create_remove(_) -> + {error, _} = emqx_resource:check_and_create_local( + ?ID, + ?TEST_RESOURCE, + #{unknown => <<"test_resource">>}), + + {ok, _} = emqx_resource:create_local( + ?ID, + ?TEST_RESOURCE, + #{name => <<"test_resource">>}), + + #{pid := Pid} = emqx_resource:query(?ID, get_state), + + ?assert(is_process_alive(Pid)), + + ok = emqx_resource:remove_local(?ID), + {error, _} = emqx_resource:remove_local(?ID), + + ?assertNot(is_process_alive(Pid)). + +t_query(_) -> + {ok, _} = emqx_resource:create_local( + ?ID, + ?TEST_RESOURCE, + #{name => <<"test_resource">>}), + + Pid = self(), + Success = fun() -> Pid ! success end, + Failure = fun() -> Pid ! failure end, + + #{pid := _} = emqx_resource:query(?ID, get_state), + #{pid := _} = emqx_resource:query(?ID, get_state, {[{Success, []}], [{Failure, []}]}), + + receive + Message -> ?assertEqual(success, Message) + after 100 -> + ?assert(false) + end, + + ?assertException( + error, + {get_instance, _Reason}, + emqx_resource:query(<<"unknown">>, get_state)), + + ok = emqx_resource:remove_local(?ID). + +t_healthy(_) -> + {ok, _} = emqx_resource:create_local( + ?ID, + ?TEST_RESOURCE, + #{name => <<"test_resource">>}), + + #{pid := Pid} = emqx_resource:query(?ID, get_state), + + ok = emqx_resource:health_check(?ID), + + [#{status := started}] = emqx_resource:list_instances_verbose(), + + erlang:exit(Pid, shutdown), + + {error, dead} = emqx_resource:health_check(?ID), + + [#{status := stopped}] = emqx_resource:list_instances_verbose(), + + ok = emqx_resource:remove_local(?ID). + +t_stop_start(_) -> + {error, _} = emqx_resource:check_and_create_local( + ?ID, + ?TEST_RESOURCE, + #{unknown => <<"test_resource">>}), + + {ok, _} = emqx_resource:create_local( + ?ID, + ?TEST_RESOURCE, + #{name => <<"test_resource">>}), + + #{pid := Pid0} = emqx_resource:query(?ID, get_state), + + ?assert(is_process_alive(Pid0)), + + ok = emqx_resource:stop(?ID), + + ?assertNot(is_process_alive(Pid0)), + + ?assertException( + error, + {?ID, stopped}, + emqx_resource:query(?ID, get_state)), + + ok = emqx_resource:restart(?ID), + + #{pid := Pid1} = emqx_resource:query(?ID, get_state), + + ?assert(is_process_alive(Pid1)). + +t_list_filter(_) -> + {ok, _} = emqx_resource:create_local( + emqx_resource:generate_id(<<"a">>), + ?TEST_RESOURCE, + #{name => a}), + {ok, _} = emqx_resource:create_local( + emqx_resource:generate_id(<<"group">>, <<"a">>), + ?TEST_RESOURCE, + #{name => grouped_a}), + + [Id1] = emqx_resource:list_group_instances(<<"default">>), + {ok, #{config := #{name := a}}} = emqx_resource:get_instance(Id1), + + [Id2] = emqx_resource:list_group_instances(<<"group">>), + {ok, #{config := #{name := grouped_a}}} = emqx_resource:get_instance(Id2). + +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ + +bin_config() -> + <<"\"name\": \"test_resource\"">>. + +config() -> + {ok, Config} = hocon:binary(bin_config()), + Config. diff --git a/apps/emqx_resource/test/emqx_test_resource.erl b/apps/emqx_resource/test/emqx_test_resource.erl new file mode 100644 index 000000000..a35206aca --- /dev/null +++ b/apps/emqx_resource/test/emqx_test_resource.erl @@ -0,0 +1,69 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_test_resource). + +-include_lib("typerefl/include/types.hrl"). + +-behaviour(emqx_resource). + +%% callbacks of behaviour emqx_resource +-export([ on_start/2 + , on_stop/2 + , on_query/4 + , on_health_check/2 + , on_config_merge/3 + ]). + +%% callbacks for emqx_resource config schema +-export([roots/0]). + +roots() -> [{"name", fun name/1}]. + +name(type) -> binary(); +name(nullable) -> false; +name(_) -> undefined. + +on_start(InstId, #{name := Name}) -> + {ok, #{name => Name, + id => InstId, + pid => spawn_dummy_process()}}. + +on_stop(_InstId, #{pid := Pid}) -> + erlang:exit(Pid, shutdown), + ok. + +on_query(_InstId, get_state, AfterQuery, State) -> + emqx_resource:query_success(AfterQuery), + State. + +on_health_check(_InstId, State = #{pid := Pid}) -> + case is_process_alive(Pid) of + true -> {ok, State}; + false -> {error, dead, State} + end. + +on_config_merge(OldConfig, NewConfig, _Params) -> + maps:merge(OldConfig, NewConfig). + +spawn_dummy_process() -> + spawn( + fun() -> + Ref = make_ref(), + receive + Ref -> ok + end + end). diff --git a/apps/emqx_retainer/.gitignore b/apps/emqx_retainer/.gitignore index d51b4e87f..a0c149280 100644 --- a/apps/emqx_retainer/.gitignore +++ b/apps/emqx_retainer/.gitignore @@ -23,4 +23,4 @@ logs/ rebar.lock test/ct.cover.spec etc/emqx_retainer.conf.rendered -.rebar3/ \ No newline at end of file +.rebar3/ diff --git a/apps/emqx_retainer/src/emqx_retainer_api.erl b/apps/emqx_retainer/src/emqx_retainer_api.erl index 7315d5a63..61085d9a9 100644 --- a/apps/emqx_retainer/src/emqx_retainer_api.erl +++ b/apps/emqx_retainer/src/emqx_retainer_api.erl @@ -88,7 +88,7 @@ with_topic_api() -> description => <<"delete matching messages">>, parameters => parameters(), responses => #{ - <<"200">> => schema(<<"Successed">>), + <<"204">> => schema(<<"Successed">>), <<"405">> => schema(<<"NotAllowed">>) } } @@ -147,11 +147,11 @@ with_topic(get, #{bindings := Bindings} = Params) -> with_topic(delete, #{bindings := Bindings}) -> Topic = maps:get(topic, Bindings), emqx_retainer_mnesia:delete_message(undefined, Topic), - {200}. + {204}. -spec lookup(undefined | binary(), map(), - fun((#message{}) -> map())) -> + fun((emqx_types:message()) -> map())) -> {200, map()}. lookup(Topic, #{query_string := Qs}, Formatter) -> Page = maps:get(page, Qs, 1), @@ -166,11 +166,13 @@ format_message(Messages, Formatter) when is_list(Messages)-> format_message(Message, Formatter) -> Formatter(Message). -format_message(#message{id = ID, qos = Qos, topic = Topic, from = From, timestamp = Timestamp, headers = Headers}) -> +format_message(#message{ id = ID, qos = Qos, topic = Topic, from = From + , timestamp = Timestamp, headers = Headers}) -> #{msgid => emqx_guid:to_hexstr(ID), qos => Qos, topic => Topic, - publish_at => list_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, millisecond}])), + publish_at => list_to_binary(calendar:system_time_to_rfc3339( + Timestamp, [{unit, millisecond}])), from_clientid => to_bin_string(From), from_username => maps:get(username, Headers, <<>>) }. diff --git a/apps/emqx_retainer/src/emqx_retainer_mnesia.erl b/apps/emqx_retainer/src/emqx_retainer_mnesia.erl index 6b5a9ac14..e5e347fdc 100644 --- a/apps/emqx_retainer/src/emqx_retainer_mnesia.erl +++ b/apps/emqx_retainer/src/emqx_retainer_mnesia.erl @@ -48,12 +48,16 @@ create_resource(#{storage_type := StorageType}) -> disc -> disc_copies; disc_only -> disc_only_copies end, + TableType = case StorageType of + disc_only -> set; + _ -> ordered_set + end, StoreProps = [{ets, [compressed, {read_concurrency, true}, {write_concurrency, true}]}, {dets, [{auto_save, 1000}]}], ok = mria:create_table(?TAB, [ - {type, set}, + {type, TableType}, {rlog_shard, ?RETAINER_SHARD}, {storage, Copies}, {record_name, retained}, diff --git a/apps/emqx_retainer/src/emqx_retainer_pool.erl b/apps/emqx_retainer/src/emqx_retainer_pool.erl index 59ea1077a..6b48c0453 100644 --- a/apps/emqx_retainer/src/emqx_retainer_pool.erl +++ b/apps/emqx_retainer/src/emqx_retainer_pool.erl @@ -172,7 +172,7 @@ cast(Msg) -> %% @private worker() -> - gproc_pool:pick_worker(?POOL). + gproc_pool:pick_worker(?POOL, self()). run({M, F, A}) -> erlang:apply(M, F, A); diff --git a/apps/emqx_retainer/src/emqx_retainer_sup.erl b/apps/emqx_retainer/src/emqx_retainer_sup.erl index 3811ed8f2..0234c20e7 100644 --- a/apps/emqx_retainer/src/emqx_retainer_sup.erl +++ b/apps/emqx_retainer/src/emqx_retainer_sup.erl @@ -26,7 +26,7 @@ start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []). init([]) -> - PoolSpec = emqx_pool_sup:spec([emqx_retainer_pool, random, emqx_vm:schedulers(), + PoolSpec = emqx_pool_sup:spec([emqx_retainer_pool, hash, emqx_vm:schedulers(), {emqx_retainer_pool, start_link, []}]), {ok, {{one_for_one, 10, 3600}, [#{id => retainer, diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.erl b/apps/emqx_rule_engine/src/emqx_rule_engine.erl index 42f652d59..974c6b8a4 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.erl @@ -25,7 +25,7 @@ -export([start_link/0]). --export([ post_config_update/4 +-export([ post_config_update/5 , config_key_path/0 ]). @@ -81,7 +81,7 @@ start_link() -> %%------------------------------------------------------------------------------ %% The config handler for emqx_rule_engine %%------------------------------------------------------------------------------ -post_config_update(_Req, NewRules, OldRules, _AppEnvs) -> +post_config_update(_, _Req, NewRules, OldRules, _AppEnvs) -> #{added := Added, removed := Removed, changed := Updated} = emqx_map_lib:diff_maps(NewRules, OldRules), maps_foreach(fun({Id, {_Old, New}}) -> @@ -163,10 +163,10 @@ load_hooks_for_rule(#{from := Topics}) -> lists:foreach(fun emqx_rule_events:load/1, Topics). add_metrics_for_rule(#{id := Id}) -> - ok = emqx_rule_metrics:create_rule_metrics(Id). + ok = emqx_plugin_libs_metrics:create_metrics(rule_metrics, Id). clear_metrics_for_rule(#{id := Id}) -> - ok = emqx_rule_metrics:clear_rule_metrics(Id). + ok = emqx_plugin_libs_metrics:clear_metrics(rule_metrics, Id). unload_hooks_for_rule(#{id := Id, from := Topics}) -> lists:foreach(fun(Topic) -> diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl index b9a3b16f7..9e341b388 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl @@ -62,7 +62,7 @@ api_rules_list_create() -> emqx_mgmt_util:array_schema(resp_schema(), <<"List rules successfully">>)}}, post => #{ description => <<"Create a new rule using given Id to all nodes in the cluster">>, - requestBody => emqx_mgmt_util:schema(post_req_schema(), <<"Rule parameters">>), + 'requestBody' => emqx_mgmt_util:schema(post_req_schema(), <<"Rule parameters">>), responses => #{ <<"400">> => emqx_mgmt_util:error_schema(<<"Invalid Parameters">>, ['BAD_ARGS']), @@ -94,17 +94,18 @@ api_rules_crud() -> put => #{ description => <<"Create or update a rule by given Id to all nodes in the cluster">>, parameters => [param_path_id()], - requestBody => emqx_mgmt_util:schema(put_req_schema(), <<"Rule parameters">>), + 'requestBody' => emqx_mgmt_util:schema(put_req_schema(), <<"Rule parameters">>), responses => #{ <<"400">> => emqx_mgmt_util:error_schema(<<"Invalid Parameters">>, ['BAD_ARGS']), <<"200">> => - emqx_mgmt_util:schema(resp_schema(), <<"Create or update rule successfully">>)}}, + emqx_mgmt_util:schema(resp_schema(), + <<"Create or update rule successfully">>)}}, delete => #{ description => <<"Delete a rule by given Id from all nodes in the cluster">>, parameters => [param_path_id()], responses => #{ - <<"200">> => + <<"204">> => emqx_mgmt_util:schema(<<"Delete rule successfully">>)}} }, {"/rules/:id", Metadata, crud_rules_by_id}. @@ -113,7 +114,7 @@ api_rule_test() -> Metadata = #{ post => #{ description => <<"Test a rule">>, - requestBody => emqx_mgmt_util:schema(rule_test_req_schema(), <<"Rule parameters">>), + 'requestBody' => emqx_mgmt_util:schema(rule_test_req_schema(), <<"Rule parameters">>), responses => #{ <<"400">> => emqx_mgmt_util:error_schema(<<"Invalid Parameters">>, ['BAD_ARGS']), @@ -141,7 +142,7 @@ put_req_schema() -> description => <<"The outputs of the rule">>, type => array, items => #{ - oneOf => [ + 'oneOf' => [ #{ type => string, example => <<"channel_id_of_my_bridge">>, @@ -253,7 +254,7 @@ crud_rules(post, #{body := #{<<"id">> := Id} = Params}) -> not_found -> case emqx:update_config(ConfPath, maps:remove(<<"id">>, Params), #{}) of {ok, #{post_config_update := #{emqx_rule_engine := AllRules}}} -> - [Rule] = [R || R = #{id := Id0} <- AllRules, Id0 == Id], + [Rule] = get_one_rule(AllRules, Id), {201, format_rule_resp(Rule)}; {error, Reason} -> ?SLOG(error, #{msg => "create_rule_failed", @@ -280,7 +281,7 @@ crud_rules_by_id(put, #{bindings := #{id := Id}, body := Params}) -> ConfPath = emqx_rule_engine:config_key_path() ++ [Id], case emqx:update_config(ConfPath, maps:remove(<<"id">>, Params), #{}) of {ok, #{post_config_update := #{emqx_rule_engine := AllRules}}} -> - [Rule] = [R || R = #{id := Id0} <- AllRules, Id0 == Id], + [Rule] = get_one_rule(AllRules, Id), {200, format_rule_resp(Rule)}; {error, Reason} -> ?SLOG(error, #{msg => "update_rule_failed", @@ -291,7 +292,7 @@ crud_rules_by_id(put, #{bindings := #{id := Id}, body := Params}) -> crud_rules_by_id(delete, #{bindings := #{id := Id}}) -> ConfPath = emqx_rule_engine:config_key_path() ++ [Id], case emqx:remove_config(ConfPath, #{}) of - {ok, _} -> {200}; + {ok, _} -> {204}; {error, Reason} -> ?SLOG(error, #{msg => "delete_rule_failed", id => Id, reason => Reason}), @@ -337,5 +338,20 @@ do_format_output(BridgeChannelId) when is_binary(BridgeChannelId) -> BridgeChannelId. get_rule_metrics(Id) -> - [maps:put(node, Node, rpc:call(Node, emqx_rule_metrics, get_rule_metrics, [Id])) + Format = fun (Node, #{matched := Matched, + speed := Current, + speed_max := Max, + speed_last5m := Last5M + }) -> + #{ matched => Matched + , speed => Current + , speed_max => Max + , speed_last5m => Last5M + , node => Node + } + end, + [Format(Node, rpc:call(Node, emqx_plugin_libs_metrics, get_metrics, [rule_metrics, Id])) || Node <- mria_mnesia:running_nodes()]. + +get_one_rule(AllRules, Id) -> + [R || R = #{id := Id0} <- AllRules, Id0 == Id]. diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_sup.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_sup.erl index 7fd44df82..356062c1f 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_sup.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_sup.erl @@ -34,10 +34,5 @@ init([]) -> shutdown => 5000, type => worker, modules => [emqx_rule_engine]}, - Metrics = #{id => emqx_rule_metrics, - start => {emqx_rule_metrics, start_link, []}, - restart => permanent, - shutdown => 5000, - type => worker, - modules => [emqx_rule_metrics]}, + Metrics = emqx_plugin_libs_metrics:child_spec(rule_metrics), {ok, {{one_for_one, 10, 10}, [Registry, Metrics]}}. diff --git a/apps/emqx_rule_engine/src/emqx_rule_events.erl b/apps/emqx_rule_engine/src/emqx_rule_events.erl index 614dc841b..0aff9f018 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_events.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_events.erl @@ -68,7 +68,7 @@ reload() -> ok = emqx_rule_engine:load_hooks_for_rule(Rule) end, emqx_rule_engine:get_rules()). -load(<<"$bridges/", _ChannelId/binary>> = BridgeTopic) -> +load(<<"$bridges/", _BridgeId/binary>> = BridgeTopic) -> emqx_hooks:put(BridgeTopic, {?MODULE, on_bridge_message_received, [#{bridge_topic => BridgeTopic}]}); load(Topic) -> @@ -114,11 +114,15 @@ on_client_disconnected(ClientInfo, Reason, ConnInfo, Env) -> on_session_subscribed(ClientInfo, Topic, SubOpts, Env) -> apply_event('session.subscribed', - fun() -> eventmsg_sub_or_unsub('session.subscribed', ClientInfo, Topic, SubOpts) end, Env). + fun() -> + eventmsg_sub_or_unsub('session.subscribed', ClientInfo, Topic, SubOpts) + end, Env). on_session_unsubscribed(ClientInfo, Topic, SubOpts, Env) -> apply_event('session.unsubscribed', - fun() -> eventmsg_sub_or_unsub('session.unsubscribed', ClientInfo, Topic, SubOpts) end, Env). + fun() -> + eventmsg_sub_or_unsub('session.unsubscribed', ClientInfo, Topic, SubOpts) + end, Env). on_message_dropped(Message, _, Reason, Env) -> case ignore_sys_message(Message) of @@ -151,7 +155,8 @@ on_message_acked(ClientInfo, Message, Env) -> %% Event Messages %%-------------------------------------------------------------------- -eventmsg_publish(Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, topic = Topic, headers = Headers, payload = Payload, timestamp = Timestamp}) -> +eventmsg_publish(Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, + topic = Topic, headers = Headers, payload = Payload, timestamp = Timestamp}) -> with_basic_columns('message.publish', #{id => emqx_guid:to_hexstr(Id), clientid => ClientId, @@ -236,7 +241,8 @@ eventmsg_sub_or_unsub(Event, _ClientInfo = #{ qos => QoS }). -eventmsg_dropped(Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, topic = Topic, headers = Headers, payload = Payload, timestamp = Timestamp}, Reason) -> +eventmsg_dropped(Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, + topic = Topic, headers = Headers, payload = Payload, timestamp = Timestamp}, Reason) -> with_basic_columns('message.dropped', #{id => emqx_guid:to_hexstr(Id), reason => Reason, @@ -257,7 +263,9 @@ eventmsg_delivered(_ClientInfo = #{ peerhost := PeerHost, clientid := ReceiverCId, username := ReceiverUsername - }, Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, topic = Topic, headers = Headers, payload = Payload, timestamp = Timestamp}) -> + }, Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, + topic = Topic, headers = Headers, payload = Payload, + timestamp = Timestamp}) -> with_basic_columns('message.delivered', #{id => emqx_guid:to_hexstr(Id), from_clientid => ClientId, @@ -279,7 +287,10 @@ eventmsg_acked(_ClientInfo = #{ peerhost := PeerHost, clientid := ReceiverCId, username := ReceiverUsername - }, Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, topic = Topic, headers = Headers, payload = Payload, timestamp = Timestamp}) -> + }, + Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, + topic = Topic, headers = Headers, payload = Payload, + timestamp = Timestamp}) -> with_basic_columns('message.acked', #{id => emqx_guid:to_hexstr(Id), from_clientid => ClientId, @@ -455,37 +466,9 @@ columns_with_exam('message.publish') -> , {<<"node">>, node()} ]; columns_with_exam('message.delivered') -> - [ {<<"event">>, 'message.delivered'} - , {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())} - , {<<"from_clientid">>, <<"c_emqx_1">>} - , {<<"from_username">>, <<"u_emqx_1">>} - , {<<"clientid">>, <<"c_emqx_2">>} - , {<<"username">>, <<"u_emqx_2">>} - , {<<"payload">>, <<"{\"msg\": \"hello\"}">>} - , {<<"peerhost">>, <<"192.168.0.10">>} - , {<<"topic">>, <<"t/a">>} - , {<<"qos">>, 1} - , {<<"flags">>, #{}} - , {<<"publish_received_at">>, erlang:system_time(millisecond)} - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} - ]; + columns_message_ack_delivered('message.delivered'); columns_with_exam('message.acked') -> - [ {<<"event">>, 'message.acked'} - , {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())} - , {<<"from_clientid">>, <<"c_emqx_1">>} - , {<<"from_username">>, <<"u_emqx_1">>} - , {<<"clientid">>, <<"c_emqx_2">>} - , {<<"username">>, <<"u_emqx_2">>} - , {<<"payload">>, <<"{\"msg\": \"hello\"}">>} - , {<<"peerhost">>, <<"192.168.0.10">>} - , {<<"topic">>, <<"t/a">>} - , {<<"qos">>, 1} - , {<<"flags">>, #{}} - , {<<"publish_received_at">>, erlang:system_time(millisecond)} - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} - ]; + columns_message_ack_delivered('message.acked'); columns_with_exam('message.dropped') -> [ {<<"event">>, 'message.dropped'} , {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())} @@ -530,7 +513,12 @@ columns_with_exam('client.disconnected') -> , {<<"node">>, node()} ]; columns_with_exam('session.subscribed') -> - [ {<<"event">>, 'session.subscribed'} + columns_message_sub_unsub('session.subscribed'); +columns_with_exam('session.unsubscribed') -> + columns_message_sub_unsub('session.unsubscribed'). + +columns_message_sub_unsub(EventName) -> + [ {<<"event">>, EventName} , {<<"clientid">>, <<"c_emqx">>} , {<<"username">>, <<"u_emqx">>} , {<<"peerhost">>, <<"192.168.0.10">>} @@ -538,14 +526,21 @@ columns_with_exam('session.subscribed') -> , {<<"qos">>, 1} , {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"node">>, node()} - ]; -columns_with_exam('session.unsubscribed') -> - [ {<<"event">>, 'session.unsubscribed'} - , {<<"clientid">>, <<"c_emqx">>} - , {<<"username">>, <<"u_emqx">>} + ]. + +columns_message_ack_delivered(EventName) -> + [ {<<"event">>, EventName} + , {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())} + , {<<"from_clientid">>, <<"c_emqx_1">>} + , {<<"from_username">>, <<"u_emqx_1">>} + , {<<"clientid">>, <<"c_emqx_2">>} + , {<<"username">>, <<"u_emqx_2">>} + , {<<"payload">>, <<"{\"msg\": \"hello\"}">>} , {<<"peerhost">>, <<"192.168.0.10">>} , {<<"topic">>, <<"t/a">>} , {<<"qos">>, 1} + , {<<"flags">>, #{}} + , {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"node">>, node()} ]. diff --git a/apps/emqx_rule_engine/src/emqx_rule_funcs.erl b/apps/emqx_rule_engine/src/emqx_rule_funcs.erl index c94242a99..06b710492 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_funcs.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_funcs.erl @@ -884,29 +884,27 @@ time_unit(<<"nanosecond">>) -> nanosecond. %% Here the emqx_rule_funcs module acts as a proxy, forwarding %% the function handling to the worker module. %% @end --ifdef(EMQX_ENTERPRISE). -'$handle_undefined_function'(schema_decode, [SchemaId, Data|MoreArgs]) -> - emqx_schema_parser:decode(SchemaId, Data, MoreArgs); -'$handle_undefined_function'(schema_decode, Args) -> - error({args_count_error, {schema_decode, Args}}); +% '$handle_undefined_function'(schema_decode, [SchemaId, Data|MoreArgs]) -> +% emqx_schema_parser:decode(SchemaId, Data, MoreArgs); +% '$handle_undefined_function'(schema_decode, Args) -> +% error({args_count_error, {schema_decode, Args}}); -'$handle_undefined_function'(schema_encode, [SchemaId, Term|MoreArgs]) -> - emqx_schema_parser:encode(SchemaId, Term, MoreArgs); -'$handle_undefined_function'(schema_encode, Args) -> - error({args_count_error, {schema_encode, Args}}); +% '$handle_undefined_function'(schema_encode, [SchemaId, Term|MoreArgs]) -> +% emqx_schema_parser:encode(SchemaId, Term, MoreArgs); +% '$handle_undefined_function'(schema_encode, Args) -> +% error({args_count_error, {schema_encode, Args}}); + +% '$handle_undefined_function'(sprintf, [Format|Args]) -> +% erlang:apply(fun sprintf_s/2, [Format, Args]); + +% '$handle_undefined_function'(Fun, Args) -> +% error({sql_function_not_supported, function_literal(Fun, Args)}). '$handle_undefined_function'(sprintf, [Format|Args]) -> erlang:apply(fun sprintf_s/2, [Format, Args]); '$handle_undefined_function'(Fun, Args) -> error({sql_function_not_supported, function_literal(Fun, Args)}). --else. -'$handle_undefined_function'(sprintf, [Format|Args]) -> - erlang:apply(fun sprintf_s/2, [Format, Args]); - -'$handle_undefined_function'(Fun, Args) -> - error({sql_function_not_supported, function_literal(Fun, Args)}). --endif. % EMQX_ENTERPRISE map_path(Key) -> {path, [{key, P} || P <- string:split(Key, ".", all)]}. diff --git a/apps/emqx_rule_engine/src/emqx_rule_maps.erl b/apps/emqx_rule_engine/src/emqx_rule_maps.erl index 4bb104f7f..fe4595c03 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_maps.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_maps.erl @@ -33,7 +33,7 @@ nested_get({var, Key}, Data, Default) -> nested_get({path, Path}, Data, Default) when is_list(Path) -> do_nested_get(Path, Data, Data, Default). -do_nested_get([Key|More], Data, OrgData, Default) -> +do_nested_get([Key | More], Data, OrgData, Default) -> case general_map_get(Key, Data, OrgData, undefined) of undefined -> Default; Val -> do_nested_get(More, Val, OrgData, Default) @@ -51,7 +51,7 @@ nested_put({var, Key}, Val, Map) -> nested_put({path, Path}, Val, Map) when is_list(Path) -> do_nested_put(Path, Val, Map, Map). -do_nested_put([Key|More], Val, Map, OrgData) -> +do_nested_put([Key | More], Val, Map, OrgData) -> SubMap = general_map_get(Key, Map, OrgData, undefined), general_map_put(Key, do_nested_put(More, Val, SubMap, OrgData), Map, OrgData); do_nested_put([], Val, _Map, _OrgData) -> @@ -131,13 +131,13 @@ setnth(tail, List, Val) when is_list(List) -> List ++ [Val]; setnth(tail, _List, Val) -> [Val]; setnth(I, List, _Val) when not is_integer(I) -> List; setnth(0, List, _Val) -> List; -setnth(I, List, _Val) when is_integer(I), I > 0 -> - do_setnth(I, List, _Val); -setnth(I, List, _Val) when is_integer(I), I < 0 -> - lists:reverse(do_setnth(-I, lists:reverse(List), _Val)). +setnth(I, List, Val) when is_integer(I), I > 0 -> + do_setnth(I, List, Val); +setnth(I, List, Val) when is_integer(I), I < 0 -> + lists:reverse(do_setnth(-I, lists:reverse(List), Val)). -do_setnth(1, [_|Rest], Val) -> [Val|Rest]; -do_setnth(I, [E|Rest], Val) -> [E|setnth(I-1, Rest, Val)]; +do_setnth(1, [_ | Rest], Val) -> [Val | Rest]; +do_setnth(I, [E | Rest], Val) -> [E | setnth(I-1, Rest, Val)]; do_setnth(_, [], _Val) -> []. getnth(0, _) -> @@ -206,4 +206,4 @@ unsafe_atom_key_map(BinKeyMap) when is_map(BinKeyMap) -> end, #{}, BinKeyMap); unsafe_atom_key_map(ListV) when is_list(ListV) -> [unsafe_atom_key_map(V) || V <- ListV]; -unsafe_atom_key_map(Val) -> Val. \ No newline at end of file +unsafe_atom_key_map(Val) -> Val. diff --git a/apps/emqx_rule_engine/src/emqx_rule_metrics.erl b/apps/emqx_rule_engine/src/emqx_rule_metrics.erl deleted file mode 100644 index 990911801..000000000 --- a/apps/emqx_rule_engine/src/emqx_rule_metrics.erl +++ /dev/null @@ -1,265 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- - --module(emqx_rule_metrics). - --behaviour(gen_server). - --include("rule_engine.hrl"). - -%% API functions --export([ start_link/0 - , stop/0 - ]). - --export([ get_rules_matched/1 - ]). - --export([ inc/2 - , inc/3 - , get/2 - , get_rule_speed/1 - , create_rule_metrics/1 - , clear_rule_metrics/1 - ]). - --export([ get_rule_metrics/1 - ]). - -%% gen_server callbacks --export([ init/1 - , handle_call/3 - , handle_info/2 - , handle_cast/2 - , code_change/3 - , terminate/2 - ]). - --ifndef(TEST). --define(SECS_5M, 300). --define(SAMPLING, 10). --else. -%% Use 5 secs average speed instead of 5 mins in case of testing --define(SECS_5M, 5). --define(SAMPLING, 1). --endif. - --define(CntrRef, ?MODULE). --define(SAMPCOUNT_5M, (?SECS_5M div ?SAMPLING)). - --record(rule_speed, { - max = 0 :: number(), - current = 0 :: number(), - last5m = 0 :: number(), - %% metadata for calculating the avg speed - tick = 1 :: number(), - last_v = 0 :: number(), - %% metadata for calculating the 5min avg speed - last5m_acc = 0 :: number(), - last5m_smpl = [] :: list() - }). - --record(state, { - metric_ids = sets:new(), - rule_speeds :: undefined | #{rule_id() => #rule_speed{}} - }). - -%%------------------------------------------------------------------------------ -%% APIs -%%------------------------------------------------------------------------------ - --spec(create_rule_metrics(rule_id()) -> ok). -create_rule_metrics(Id) -> - gen_server:call(?MODULE, {create_rule_metrics, Id}). - --spec(clear_rule_metrics(rule_id()) -> ok). -clear_rule_metrics(Id) -> - gen_server:call(?MODULE, {delete_rule_metrics, Id}). - --spec(get(rule_id(), atom()) -> number()). -get(Id, Metric) -> - case get_couters_ref(Id) of - not_found -> 0; - Ref -> counters:get(Ref, metrics_idx(Metric)) - end. - --spec(get_rule_speed(rule_id()) -> map()). -get_rule_speed(Id) -> - gen_server:call(?MODULE, {get_rule_speed, Id}). - --spec(get_rule_metrics(rule_id()) -> map()). -get_rule_metrics(Id) -> - #{max := Max, current := Current, last5m := Last5M} = get_rule_speed(Id), - #{matched => get_rules_matched(Id), - speed => Current, - speed_max => Max, - speed_last5m => Last5M - }. - --spec inc(rule_id(), atom()) -> ok. -inc(Id, Metric) -> - inc(Id, Metric, 1). - --spec inc(rule_id(), atom(), pos_integer()) -> ok. -inc(Id, Metric, Val) -> - case get_couters_ref(Id) of - not_found -> - %% this may occur when increasing a counter for - %% a rule that was created from a remove node. - create_rule_metrics(Id), - counters:add(get_couters_ref(Id), metrics_idx(Metric), Val); - Ref -> - counters:add(Ref, metrics_idx(Metric), Val) - end. - -get_rules_matched(Id) -> - get(Id, 'rules.matched'). - -start_link() -> - gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). - -init([]) -> - erlang:process_flag(trap_exit, true), - %% the speed metrics - erlang:send_after(timer:seconds(?SAMPLING), self(), ticking), - persistent_term:put(?CntrRef, #{}), - {ok, #state{}}. - -handle_call({get_rule_speed, _Id}, _From, State = #state{rule_speeds = undefined}) -> - {reply, format_rule_speed(#rule_speed{}), State}; -handle_call({get_rule_speed, Id}, _From, State = #state{rule_speeds = RuleSpeeds}) -> - {reply, case maps:get(Id, RuleSpeeds, undefined) of - undefined -> format_rule_speed(#rule_speed{}); - Speed -> format_rule_speed(Speed) - end, State}; - -handle_call({create_rule_metrics, Id}, _From, - State = #state{metric_ids = MIDs, rule_speeds = RuleSpeeds}) -> - {reply, create_counters(Id), - State#state{metric_ids = sets:add_element(Id, MIDs), - rule_speeds = case RuleSpeeds of - undefined -> #{Id => #rule_speed{}}; - _ -> RuleSpeeds#{Id => #rule_speed{}} - end}}; - -handle_call({delete_rule_metrics, Id}, _From, - State = #state{metric_ids = MIDs, rule_speeds = RuleSpeeds}) -> - {reply, delete_counters(Id), - State#state{metric_ids = sets:del_element(Id, MIDs), - rule_speeds = case RuleSpeeds of - undefined -> undefined; - _ -> maps:remove(Id, RuleSpeeds) - end}}; - -handle_call(_Request, _From, State) -> - {reply, ok, State}. - -handle_cast(_Msg, State) -> - {noreply, State}. - -handle_info(ticking, State = #state{rule_speeds = undefined}) -> - erlang:send_after(timer:seconds(?SAMPLING), self(), ticking), - {noreply, State}; - -handle_info(ticking, State = #state{rule_speeds = RuleSpeeds0}) -> - RuleSpeeds = maps:map( - fun(Id, RuleSpeed) -> - calculate_speed(get_rules_matched(Id), RuleSpeed) - end, RuleSpeeds0), - erlang:send_after(timer:seconds(?SAMPLING), self(), ticking), - {noreply, State#state{rule_speeds = RuleSpeeds}}; - -handle_info(_Info, State) -> - {noreply, State}. - -code_change(_OldVsn, State, _Extra) -> - {ok, State}. - -terminate(_Reason, #state{metric_ids = MIDs}) -> - [delete_counters(Id) || Id <- sets:to_list(MIDs)], - persistent_term:erase(?CntrRef). - -stop() -> - gen_server:stop(?MODULE). - -%%------------------------------------------------------------------------------ -%% Internal Functions -%%------------------------------------------------------------------------------ - -create_counters(Id) -> - case get_couters_ref(Id) of - not_found -> - Counters = get_all_counters(), - CntrRef = counters:new(max_counters_size(), [write_concurrency]), - persistent_term:put(?CntrRef, Counters#{Id => CntrRef}); - _Ref -> ok - end. - -delete_counters(Id) -> - persistent_term:put(?CntrRef, maps:remove(Id, get_all_counters())). - -get_couters_ref(Id) -> - maps:get(Id, get_all_counters(), not_found). - -get_all_counters() -> - persistent_term:get(?CntrRef, #{}). - -calculate_speed(_CurrVal, undefined) -> - undefined; -calculate_speed(CurrVal, #rule_speed{max = MaxSpeed0, last_v = LastVal, - tick = Tick, last5m_acc = AccSpeed5Min0, - last5m_smpl = Last5MinSamples0}) -> - %% calculate the current speed based on the last value of the counter - CurrSpeed = (CurrVal - LastVal) / ?SAMPLING, - - %% calculate the max speed since the emqx startup - MaxSpeed = - if MaxSpeed0 >= CurrSpeed -> MaxSpeed0; - true -> CurrSpeed - end, - - %% calculate the average speed in last 5 mins - {Last5MinSamples, Acc5Min, Last5Min} = - if Tick =< ?SAMPCOUNT_5M -> - Acc = AccSpeed5Min0 + CurrSpeed, - {lists:reverse([CurrSpeed | lists:reverse(Last5MinSamples0)]), - Acc, Acc / Tick}; - true -> - [FirstSpeed | Speeds] = Last5MinSamples0, - Acc = AccSpeed5Min0 + CurrSpeed - FirstSpeed, - {lists:reverse([CurrSpeed | lists:reverse(Speeds)]), - Acc, Acc / ?SAMPCOUNT_5M} - end, - - #rule_speed{max = MaxSpeed, current = CurrSpeed, last5m = Last5Min, - last_v = CurrVal, last5m_acc = Acc5Min, - last5m_smpl = Last5MinSamples, tick = Tick + 1}. - -format_rule_speed(#rule_speed{max = Max, current = Current, last5m = Last5Min}) -> - #{max => Max, current => precision(Current, 2), last5m => precision(Last5Min, 2)}. - -precision(Float, N) -> - Base = math:pow(10, N), - round(Float * Base) / Base. - -%%------------------------------------------------------------------------------ -%% Metrics Definitions -%%------------------------------------------------------------------------------ - -max_counters_size() -> 2. -metrics_idx('rules.matched') -> 1; -metrics_idx(_) -> 2. - diff --git a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl index dc162665c..7b68b3ee3 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl @@ -99,7 +99,7 @@ do_apply_rule(#{ case ?RAISE(match_conditions(Conditions, ColumnsAndSelected), {match_conditions_error, {_EXCLASS_,_EXCPTION_,_ST_}}) of true -> - ok = emqx_rule_metrics:inc(RuleId, 'rules.matched'), + ok = emqx_plugin_libs_metrics:inc_matched(rule_metrics, RuleId), Collection2 = filter_collection(Input, InCase, DoEach, Collection), {ok, [handle_output_list(Outputs, Coll, Input) || Coll <- Collection2]}; false -> @@ -117,7 +117,7 @@ do_apply_rule(#{id := RuleId, case ?RAISE(match_conditions(Conditions, maps:merge(Input, Selected)), {match_conditions_error, {_EXCLASS_,_EXCPTION_,_ST_}}) of true -> - ok = emqx_rule_metrics:inc(RuleId, 'rules.matched'), + ok = emqx_plugin_libs_metrics:inc_matched(rule_metrics, RuleId), {ok, handle_output_list(Outputs, Selected, Input)}; false -> {error, nomatch} diff --git a/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl b/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl index a67e62355..7cd9448db 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl @@ -41,7 +41,7 @@ test(#{sql := Sql, context := Context}) -> test_rule(Sql, Select, Context, EventTopics) -> RuleId = iolist_to_binary(["sql_tester:", emqx_misc:gen_id(16)]), - ok = emqx_rule_metrics:create_rule_metrics(RuleId), + ok = emqx_plugin_libs_metrics:create_metrics(rule_metrics, RuleId), Rule = #{ id => RuleId, sql => Sql, @@ -62,7 +62,7 @@ test_rule(Sql, Select, Context, EventTopics) -> {ok, Data} -> {ok, flatten(Data)}; {error, nomatch} -> {error, nomatch} after - emqx_rule_metrics:clear_rule_metrics(RuleId) + ok = emqx_plugin_libs_metrics:clear_metrics(rule_metrics, RuleId) end. get_selected_data(Selected, _Envs, _Args) -> diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl index 10ea55a69..712d113f9 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl @@ -60,7 +60,7 @@ t_crud_rule_api(_Config) -> ?assertEqual(Rule3, Rule2), ?assertEqual(<<"select * from \"t/b\"">>, maps:get(sql, Rule3)), - ?assertMatch({200}, emqx_rule_engine_api:crud_rules_by_id(delete, + ?assertMatch({204}, emqx_rule_engine_api:crud_rules_by_id(delete, #{bindings => #{id => RuleID}})), %ct:pal("Show After Deleted: ~p", [NotFound]), diff --git a/apps/emqx_rule_engine/test/emqx_rule_metrics_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_metrics_SUITE.erl deleted file mode 100644 index 418e8dd0f..000000000 --- a/apps/emqx_rule_engine/test/emqx_rule_metrics_SUITE.erl +++ /dev/null @@ -1,94 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- - --module(emqx_rule_metrics_SUITE). - --compile(export_all). --compile(nowarn_export_all). - --include_lib("eunit/include/eunit.hrl"). --include_lib("common_test/include/ct.hrl"). - -all() -> - [ {group, metrics} - , {group, speed} ]. - -suite() -> - [{ct_hooks, [cth_surefire]}, {timetrap, {seconds, 30}}]. - -groups() -> - [{metrics, [sequence], - [ t_rule - , t_no_creation_1 - ]}, - {speed, [sequence], - [ rule_speed - ]} - ]. - -init_per_suite(Config) -> - emqx_common_test_helpers:start_apps([emqx_conf]), - {ok, _} = emqx_rule_metrics:start_link(), - Config. - -end_per_suite(_Config) -> - catch emqx_rule_metrics:stop(), - emqx_common_test_helpers:stop_apps([emqx_conf]), - ok. - -init_per_testcase(_, Config) -> - catch emqx_rule_metrics:stop(), - {ok, _} = emqx_rule_metrics:start_link(), - Config. - -end_per_testcase(_, _Config) -> - ok. - -t_no_creation_1(_) -> - ?assertEqual(ok, emqx_rule_metrics:inc(<<"rule1">>, 'rules.matched')). - -t_rule(_) -> - ok = emqx_rule_metrics:create_rule_metrics(<<"rule1">>), - ok = emqx_rule_metrics:create_rule_metrics(<<"rule2">>), - ok = emqx_rule_metrics:inc(<<"rule1">>, 'rules.matched'), - ok = emqx_rule_metrics:inc(<<"rule2">>, 'rules.matched'), - ok = emqx_rule_metrics:inc(<<"rule2">>, 'rules.matched'), - ct:pal("----couters: ---~p", [persistent_term:get(emqx_rule_metrics)]), - ?assertEqual(1, emqx_rule_metrics:get(<<"rule1">>, 'rules.matched')), - ?assertEqual(2, emqx_rule_metrics:get(<<"rule2">>, 'rules.matched')), - ?assertEqual(0, emqx_rule_metrics:get(<<"rule3">>, 'rules.matched')), - ok = emqx_rule_metrics:clear_rule_metrics(<<"rule1">>), - ok = emqx_rule_metrics:clear_rule_metrics(<<"rule2">>). - -rule_speed(_) -> - ok = emqx_rule_metrics:create_rule_metrics(<<"rule1">>), - ok = emqx_rule_metrics:create_rule_metrics(<<"rule:2">>), - ok = emqx_rule_metrics:inc(<<"rule1">>, 'rules.matched'), - ok = emqx_rule_metrics:inc(<<"rule1">>, 'rules.matched'), - ok = emqx_rule_metrics:inc(<<"rule:2">>, 'rules.matched'), - ?assertEqual(2, emqx_rule_metrics:get(<<"rule1">>, 'rules.matched')), - ct:sleep(1000), - ?LET(#{max := Max, current := Current}, emqx_rule_metrics:get_rule_speed(<<"rule1">>), - {?assert(Max =< 2), - ?assert(Current =< 2)}), - ct:sleep(2100), - ?LET(#{max := Max, current := Current, last5m := Last5Min}, emqx_rule_metrics:get_rule_speed(<<"rule1">>), - {?assert(Max =< 2), - ?assert(Current == 0), - ?assert(Last5Min =< 0.67)}), - ct:sleep(3000), - ok = emqx_rule_metrics:clear_rule_metrics(<<"rule1">>), - ok = emqx_rule_metrics:clear_rule_metrics(<<"rule:2">>). diff --git a/bin/emqx b/bin/emqx index 23b991337..63c09cc41 100755 --- a/bin/emqx +++ b/bin/emqx @@ -2,8 +2,7 @@ # -*- tab-width:4;indent-tabs-mode:nil -*- # ex: ts=4 sw=4 et -set -e -set -o pipefail +set -euo pipefail DEBUG="${DEBUG:-0}" if [ "$DEBUG" -eq 1 ]; then @@ -44,9 +43,8 @@ export ERTS_DIR="$ROOTDIR/erts-$ERTS_VSN" export BINDIR="$ERTS_DIR/bin" export EMU="beam" export PROGNAME="erl" -export LD_LIBRARY_PATH="$ERTS_DIR/lib:$LD_LIBRARY_PATH" export ERTS_LIB_DIR="$ERTS_DIR/../lib" -MNESIA_DATA_DIR="$RUNNER_DATA_DIR/mnesia/$NAME" +DYNLIBS_DIR="$RUNNER_ROOT_DIR/dynlibs" # Echo to stderr on errors echoerr() { echo "ERROR: $*" 1>&2; } @@ -63,62 +61,166 @@ assert_node_alive() { fi } -relx_usage() { - command="$1" + +# Echo to stderr on errors +echoerr() { echo "$*" 1>&2; } + +check_erlang_start() { + "$BINDIR/$PROGNAME" -noshell -boot "$REL_DIR/start_clean" -s crypto start -s erlang halt +} + +usage() { + local command="$1" case "$command" in - unpack) - echo "Usage: $REL_NAME unpack [VERSION]" - echo "Unpacks a release package VERSION, it assumes that this" - echo "release package tarball has already been deployed at one" - echo "of the following locations:" - echo " releases/-.tar.gz" - echo " releases/-.zip" - ;; - install) - echo "Usage: $REL_NAME install [VERSION]" - echo "Installs a release package VERSION, it assumes that this" - echo "release package tarball has already been deployed at one" - echo "of the following locations:" - echo " releases/-.tar.gz" - echo " releases/-.zip" - echo "" - echo " --no-permanent Install release package VERSION but" - echo " don't make it permanent" - ;; - uninstall) - echo "Usage: $REL_NAME uninstall [VERSION]" - echo "Uninstalls a release VERSION, it will only accept" - echo "versions that are not currently in use" - ;; - upgrade) - echo "Usage: $REL_NAME upgrade [VERSION]" - echo "Upgrades the currently running release to VERSION, it assumes" - echo "that a release package tarball has already been deployed at one" - echo "of the following locations:" - echo " releases/-.tar.gz" - echo " releases/-.zip" - echo "" - echo " --no-permanent Install release package VERSION but" - echo " don't make it permanent" - ;; - downgrade) - echo "Usage: $REL_NAME downgrade [VERSION]" - echo "Downgrades the currently running release to VERSION, it assumes" - echo "that a release package tarball has already been deployed at one" - echo "of the following locations:" - echo " releases/-.tar.gz" - echo " releases/-.zip" - echo "" - echo " --no-permanent Install release package VERSION but" - echo " don't make it permanent" - ;; - *) - echo "Usage: $REL_NAME {start|start_boot |ertspath|foreground|stop|pid|ping|console|console_clean|console_boot |attach|remote_console|upgrade|downgrade|install|uninstall|versions|escript|ctl|rpc|rpcterms|eval|root_dir}" - ;; + start) + echo "Start EMQ X service in daemon mode" + ;; + stop) + echo "Stop the running EMQ X program" + ;; + restart|reboot) + echo "Restart $EMQX_DESCRIPTION" + ;; + pid) + echo "Print out $EMQX_DESCRIPTION process identifier" + ;; + ping) + echo "Check if the $EMQX_DESCRIPTION node is up and running" + echo "This command exit with 0 silently if node is running" + ;; + escript) + echo "Execute a escript using the Erlang runtime from EMQ X package installation" + echo "For example $REL_NAME escript /path/to/my/escript my_arg1 my_arg2" + ;; + attach) + echo "This command is applicable when $EMQX_DESCRIPTION is started in daemon" + echo "mode. it attaches the current shell to EMQ X's control console" + echo "through a named pipe" + echo "WARNING: try to use the safer alternative, remote_console command." + ;; + remote_console) + echo "Start a dummy Erlang node and hidden-connect $EMQX_DESCRIPTION to" + echo "with an interactive Erlang shell" + ;; + console) + echo "Boot up $EMQX_DESCRIPTION service in an interactive Erlang shell" + echo "This command is useful for troubleshooting" + ;; + console_clean) + echo "This command does NOT boot up the $EMQX_DESCRIPTION service" + echo "It only starts an interactive Erlang console with all the" + echo "EMQ X code available" + ;; + foreground) + echo "Start $EMQX_DESCRIPTION in foreground mode" + ;; + ertspath) + echo "Print path to Erlang runtime dir" + ;; + rpc) + echo "Usge $REL_NAME rpc MODULE FUNCTION [ARGS, ...]" + echo "Connect to the $EMQX_DESCRIPTION node and make an Erlang RPC" + echo "The result of the RPC call must be 'ok'" + echo "This command blocks for at most 60 seconds in case the node" + echo "does not reply the call in time" + ;; + rpcterms) + echo "Usge $REL_NAME rpcterms MODULE FUNCTION [ARGS, ...]" + echo "Connect to the $EMQX_DESCRIPTION node and make an Erlang RPC" + echo "The result of the RPC call is pretty-printed as an Erlang term" + ;; + root_dir) + echo "Print EMQ X installation root dir" + ;; + eval) + echo "Evaluate an Erlang expression in the EMQ X node" + ;; + versions) + echo "List installed EMQ X versions and their status" + ;; + unpack) + echo "Usage: $REL_NAME unpack [VERSION]" + echo "Unpacks a release package VERSION, it assumes that this" + echo "release package tarball has already been deployed at one" + echo "of the following locations:" + echo " releases/-.tar.gz" + echo " releases/-.zip" + ;; + install) + echo "Usage: $REL_NAME install [VERSION]" + echo "Installs a release package VERSION, it assumes that this" + echo "release package tarball has already been deployed at one" + echo "of the following locations:" + echo " releases/-.tar.gz" + echo " releases/-.zip" + echo "" + echo " --no-permanent Install release package VERSION but" + echo " don't make it permanent" + ;; + uninstall) + echo "Usage: $REL_NAME uninstall [VERSION]" + echo "Uninstalls a release VERSION, it will only accept" + echo "versions that are not currently in use" + ;; + upgrade) + echo "Usage: $REL_NAME upgrade [VERSION]" + echo "Upgrades the currently running release to VERSION, it assumes" + echo "that a release package tarball has already been deployed at one" + echo "of the following locations:" + echo " releases/-.tar.gz" + echo " releases/-.zip" + echo "" + echo " --no-permanent Install release package VERSION but" + echo " don't make it permanent" + ;; + downgrade) + echo "Usage: $REL_NAME downgrade [VERSION]" + echo "Downgrades the currently running release to VERSION, it assumes" + echo "that a release package tarball has already been deployed at one" + echo "of the following locations:" + echo " releases/-.tar.gz" + echo " releases/-.zip" + echo "" + echo " --no-permanent Install release package VERSION but" + echo " don't make it permanent" + ;; + *) + echo "Usage: $REL_NAME {start|ertspath|foreground|stop|pid|ping|console|console_clean|attach|remote_console|upgrade|downgrade|install|uninstall|versions|escript|ctl|rpc|rpcterms|eval|root_dir} " + ;; esac } +COMMAND="${1:-}" + +if [ "${2:-}" = 'help' ]; then + ## 'ctl' command has its own usage info + if [ "$COMMAND" != 'ctl' ]; then + usage "$COMMAND" + exit 0 + fi +fi + +if ! check_erlang_start >/dev/null 2>&1; then + BUILT_ON="$(head -1 "${REL_DIR}/BUILT_ON")" + ## failed to start, might be due to missing libs, try to be portable + export LD_LIBRARY_PATH="$DYNLIBS_DIR:$LD_LIBRARY_PATH" + if ! check_erlang_start; then + ## it's hopeless + echoerr "FATAL: Unable to start Erlang." + echoerr "Please make sure openssl-1.1.1 (libcrypto) and libncurses are installed." + echoerr "Also ensure it's running on the correct platform," + echoerr "this EMQ X release is built for $BUILT_ON" + exit 1 + fi + echoerr "WARNING: There seem to be missing dynamic libs from the OS. Using libs from ${DYNLIBS_DIR}" +fi + +## backward compatible +if [ -d "$ERTS_DIR/lib" ]; then + export LD_LIBRARY_PATH="$ERTS_DIR/lib:$LD_LIBRARY_PATH" +fi + # Simple way to check the correct user and fail early check_user() { # Validate that the user running the script is the owner of the @@ -145,11 +247,9 @@ if [ "$ES" -ne 0 ]; then exit $ES fi -if [ -z "$WITH_EPMD" ]; then - EPMD_ARG="-start_epmd false -epmd_module ekka_epmd -proto_dist ekka" -else - EPMD_ARG="-start_epmd true $PROTO_DIST_ARG" -fi +# EPMD_ARG="-start_epmd true $PROTO_DIST_ARG" +NO_EPMD="-start_epmd false -epmd_module ekka_epmd -proto_dist ekka" +EPMD_ARG="${EPMD_ARG:-${NO_EPMD}}" # Warn the user if ulimit -n is less than 1024 ULIMIT_F=$(ulimit -n) @@ -202,7 +302,7 @@ relx_gen_id() { # Control a node relx_nodetool() { command="$1"; shift - ERL_FLAGS="$ERL_FLAGS $EPMD_ARG" \ + ERL_FLAGS="${ERL_FLAGS:-} $EPMD_ARG" \ "$ERTS_DIR/bin/escript" "$ROOTDIR/bin/nodetool" "$NAME_TYPE" "$NAME" \ -setcookie "$COOKIE" "$command" "$@" } @@ -288,13 +388,6 @@ generate_config() { fi } -# Call bootstrapd for daemon commands like start/stop/console -bootstrapd() { - if [ -e "$RUNNER_DATA_DIR/.erlang.cookie" ]; then - chown "$RUNNER_USER" "$RUNNER_DATA_DIR"/.erlang.cookie - fi -} - # check if a PID is down is_down() { PID="$1" @@ -332,25 +425,29 @@ wait_for() { done } -# Use $CWD/etc/sys.config if exists -if [ -z "$RELX_CONFIG_PATH" ]; then - if [ -f "$RUNNER_ETC_DIR/sys.config" ]; then - RELX_CONFIG_PATH="-config $RUNNER_ETC_DIR/sys.config" +latest_vm_args() { + local hint_var_name="$1" + local vm_args_file + vm_args_file="$(find "$CONFIGS_DIR" -type f -name "vm.*.args" | sort | tail -1)" + if [ -f "$vm_args_file" ]; then + echo "$vm_args_file" else - RELX_CONFIG_PATH="" + echoerr "ERRRO: node not initialized?" + echoerr "Generated config file vm.*.args is not found for command '$COMMAND'" + echoerr "in config dir: $CONFIGS_DIR" + echoerr "In case the file has been deleted while the node is running," + echoerr "set environment variable '$hint_var_name' to continue" + exit 1 fi -fi +} -IS_BOOT_COMMAND='no' -case "$1" in - start|start_boot) +## IS_BOOT_COMMAND is set for later to inspect node name and cookie from hocon config (or env variable) +case "${COMMAND}" in + start|console|console_clean|foreground) IS_BOOT_COMMAND='yes' ;; - console|console_clean|console_boot) - IS_BOOT_COMMAND='yes' - ;; - foreground) - IS_BOOT_COMMAND='yes' + *) + IS_BOOT_COMMAND='no' ;; esac @@ -365,15 +462,14 @@ if [ -z "$NAME" ]; then # for boot commands, inspect emqx.conf for node name NAME="$(call_hocon -s $SCHEMA_MOD -I "$CONFIGS_DIR/" -c "$RUNNER_ETC_DIR"/emqx.conf get node.name | tr -d \")" else - # for non-boot commands, inspect vm.