diff --git a/.ci/docker-compose-file/.env b/.ci/docker-compose-file/.env index e99a6d13f..b7033caae 100644 --- a/.ci/docker-compose-file/.env +++ b/.ci/docker-compose-file/.env @@ -10,6 +10,8 @@ CASSANDRA_TAG=3.11.6 MINIO_TAG=RELEASE.2023-03-20T20-16-18Z OPENTS_TAG=9aa7f88 KINESIS_TAG=2.1 +HSTREAMDB_TAG=v0.15.0 +HSTREAMDB_ZK_TAG=3.8.1 MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server SQLSERVER_TAG=2019-CU19-ubuntu-20.04 diff --git a/.ci/docker-compose-file/docker-compose-hstreamdb.yaml b/.ci/docker-compose-file/docker-compose-hstreamdb.yaml index f3c4dbd4c..d42fd9fa2 100644 --- a/.ci/docker-compose-file/docker-compose-hstreamdb.yaml +++ b/.ci/docker-compose-file/docker-compose-hstreamdb.yaml @@ -2,11 +2,13 @@ version: "3.5" services: hserver: - image: hstreamdb/hstream:v0.15.0 + image: hstreamdb/hstream:${HSTREAMDB_TAG} container_name: hstreamdb depends_on: - - zookeeper - - hstore + zookeeper: + condition: service_started + hstore: + condition: service_healthy # ports: # - "127.0.0.1:6570:6570" expose: @@ -37,7 +39,7 @@ services: --io-tasks-network emqx_bridge hstore: - image: hstreamdb/hstream:v0.15.0 + image: hstreamdb/hstream:${HSTREAMDB_TAG} networks: - emqx_bridge volumes: @@ -53,10 +55,17 @@ services: --use-tcp --tcp-host $$(hostname -I | awk '{print $$1}') \ --user-admin-port 6440 \ --param enable-dscp-reflection=false \ - --no-interactive + --no-interactive \ + > /data/store/hstore.log 2>&1 + healthcheck: + test: ["CMD", "grep", "LogDevice Cluster running", "/data/store/hstore.log"] + interval: 10s + timeout: 10s + retries: 60 + start_period: 60s zookeeper: - image: zookeeper + image: zookeeper:${HSTREAMDB_ZK_TAG} expose: - 2181 networks: diff --git a/.ci/docker-compose-file/docker-compose-kafka.yaml b/.ci/docker-compose-file/docker-compose-kafka.yaml index 18ef3991c..f5bdb24ec 100644 --- a/.ci/docker-compose-file/docker-compose-kafka.yaml +++ b/.ci/docker-compose-file/docker-compose-kafka.yaml @@ -18,7 +18,7 @@ services: - /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret kdc: hostname: kdc.emqx.net - image: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu20.04 + image: ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu20.04 container_name: kdc.emqx.net expose: - 88 # kdc diff --git a/.ci/docker-compose-file/docker-compose.yaml b/.ci/docker-compose-file/docker-compose.yaml index 504358419..9adbef02e 100644 --- a/.ci/docker-compose-file/docker-compose.yaml +++ b/.ci/docker-compose-file/docker-compose.yaml @@ -3,7 +3,7 @@ version: '3.9' services: erlang: container_name: erlang - image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu20.04} + image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu20.04} env_file: - conf.env environment: diff --git a/.ci/docker-compose-file/python/pytest.sh b/.ci/docker-compose-file/python/pytest.sh index 924c30212..649357cff 100755 --- a/.ci/docker-compose-file/python/pytest.sh +++ b/.ci/docker-compose-file/python/pytest.sh @@ -20,7 +20,7 @@ fi apk update && apk add git curl git clone -b develop-5.0 https://github.com/emqx/paho.mqtt.testing.git /paho.mqtt.testing -pip install pytest==7.1.2 pytest-retry +pip install pytest==7.1.2 pytest-retry==1.3.0 pytest --retries 3 -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "$TARGET_HOST" RESULT=$? diff --git a/.github/actions/package-macos/action.yaml b/.github/actions/package-macos/action.yaml index 6b47ceafa..25edcb5f5 100644 --- a/.github/actions/package-macos/action.yaml +++ b/.github/actions/package-macos/action.yaml @@ -3,7 +3,7 @@ inputs: profile: # emqx, emqx-enterprise required: true type: string - otp: # 25.3.2-1 + otp: # 25.3.2-2 required: true type: string os: diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index d75661a8a..0c4fe2765 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,6 +1,6 @@ Fixes - + ## Summary copilot:summary diff --git a/.github/workflows/_pr_entrypoint.yaml b/.github/workflows/_pr_entrypoint.yaml index ec2bbf2e1..7de9a64fd 100644 --- a/.github/workflows/_pr_entrypoint.yaml +++ b/.github/workflows/_pr_entrypoint.yaml @@ -17,7 +17,7 @@ env: jobs: sanity-checks: runs-on: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }} - container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" + container: "ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu22.04" outputs: ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} ct-host: ${{ steps.matrix.outputs.ct-host }} @@ -25,9 +25,9 @@ jobs: version-emqx: ${{ steps.matrix.outputs.version-emqx }} version-emqx-enterprise: ${{ steps.matrix.outputs.version-emqx-enterprise }} runner: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }} - builder: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" - builder_vsn: "5.1-3" - otp_vsn: "25.3.2-1" + builder: "ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu22.04" + builder_vsn: "5.1-4" + otp_vsn: "25.3.2-2" elixir_vsn: "1.14.5" steps: @@ -63,7 +63,8 @@ jobs: ./actionlint -color \ -shellcheck= \ -ignore 'label ".+" is unknown' \ - -ignore 'value "emqx-enterprise" in "exclude"' + -ignore 'value "emqx-enterprise" in "exclude"' \ + -ignore 'value "emqx-enterprise-elixir" in "exclude"' - name: Check line-break at EOF run: | ./scripts/check-nl-at-eof.sh @@ -92,13 +93,13 @@ jobs: MATRIX="$(echo "${APPS}" | jq -c ' [ (.[] | select(.profile == "emqx") | . + { - builder: "5.1-3", - otp: "25.3.2-1", + builder: "5.1-4", + otp: "25.3.2-2", elixir: "1.14.5" }), (.[] | select(.profile == "emqx-enterprise") | . + { - builder: "5.1-3", - otp: ["25.3.2-1"][], + builder: "5.1-4", + otp: ["25.3.2-2"][], elixir: "1.14.5" }) ] @@ -146,6 +147,17 @@ jobs: path: ${{ matrix.profile }}.zip retention-days: 1 + run_emqx_app_tests: + needs: + - sanity-checks + - compile + uses: ./.github/workflows/run_emqx_app_tests.yaml + with: + runner: ${{ needs.sanity-checks.outputs.runner }} + builder: ${{ needs.sanity-checks.outputs.builder }} + before_ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }} + after_ref: ${{ github.sha }} + run_test_cases: needs: - sanity-checks diff --git a/.github/workflows/_push-entrypoint.yaml b/.github/workflows/_push-entrypoint.yaml index a65df1234..afdf2a050 100644 --- a/.github/workflows/_push-entrypoint.yaml +++ b/.github/workflows/_push-entrypoint.yaml @@ -12,6 +12,7 @@ on: branches: - 'master' - 'release-51' + - 'release-52' - 'ci/**' env: @@ -20,7 +21,7 @@ env: jobs: prepare: runs-on: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }} - container: 'ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04' + container: 'ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu22.04' outputs: profile: ${{ steps.parse-git-ref.outputs.profile }} release: ${{ steps.parse-git-ref.outputs.release }} @@ -30,9 +31,9 @@ jobs: ct-host: ${{ steps.matrix.outputs.ct-host }} ct-docker: ${{ steps.matrix.outputs.ct-docker }} runner: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }} - builder: 'ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04' - builder_vsn: '5.1-3' - otp_vsn: '25.3.2-1' + builder: 'ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu22.04' + builder_vsn: '5.1-4' + otp_vsn: '25.3.2-2' elixir_vsn: '1.14.5' steps: @@ -63,13 +64,13 @@ jobs: MATRIX="$(echo "${APPS}" | jq -c ' [ (.[] | select(.profile == "emqx") | . + { - builder: "5.1-3", - otp: "25.3.2-1", + builder: "5.1-4", + otp: "25.3.2-2", elixir: "1.14.5" }), (.[] | select(.profile == "emqx-enterprise") | . + { - builder: "5.1-3", - otp: ["25.3.2-1"][], + builder: "5.1-4", + otp: ["25.3.2-2"][], elixir: "1.14.5" }) ] @@ -82,20 +83,8 @@ jobs: echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT - build_slim_packages: - if: ${{ needs.prepare.outputs.release != 'true' }} - needs: - - prepare - uses: ./.github/workflows/build_slim_packages.yaml - with: - runner: ${{ needs.prepare.outputs.runner }} - builder: ${{ needs.prepare.outputs.builder }} - builder_vsn: ${{ needs.prepare.outputs.builder_vsn }} - otp_vsn: ${{ needs.prepare.outputs.otp_vsn }} - elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }} - build_packages: - if: ${{ needs.prepare.outputs.release == 'true' }} + if: needs.prepare.outputs.release == 'true' needs: - prepare uses: ./.github/workflows/build_packages.yaml @@ -109,7 +98,7 @@ jobs: secrets: inherit build_and_push_docker_images: - if: ${{ needs.prepare.outputs.release == 'true' }} + if: needs.prepare.outputs.release == 'true' needs: - prepare uses: ./.github/workflows/build_and_push_docker_images.yaml @@ -124,7 +113,20 @@ jobs: runner: ${{ needs.prepare.outputs.runner }} secrets: inherit + build_slim_packages: + if: needs.prepare.outputs.release != 'true' + needs: + - prepare + uses: ./.github/workflows/build_slim_packages.yaml + with: + runner: ${{ needs.prepare.outputs.runner }} + builder: ${{ needs.prepare.outputs.builder }} + builder_vsn: ${{ needs.prepare.outputs.builder_vsn }} + otp_vsn: ${{ needs.prepare.outputs.otp_vsn }} + elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }} + compile: + if: needs.prepare.outputs.release != 'true' runs-on: ${{ needs.prepare.outputs.runner }} container: ${{ needs.prepare.outputs.builder }} needs: @@ -156,7 +158,19 @@ jobs: path: ${{ matrix.profile }}.zip retention-days: 1 + run_emqx_app_tests: + needs: + - prepare + - compile + uses: ./.github/workflows/run_emqx_app_tests.yaml + with: + runner: ${{ needs.prepare.outputs.runner }} + builder: ${{ needs.prepare.outputs.builder }} + before_ref: ${{ github.event.before }} + after_ref: ${{ github.sha }} + run_test_cases: + if: needs.prepare.outputs.release != 'true' needs: - prepare - compile @@ -169,6 +183,7 @@ jobs: ct-docker: ${{ needs.prepare.outputs.ct-docker }} run_conf_tests: + if: needs.prepare.outputs.release != 'true' needs: - prepare - compile @@ -178,6 +193,7 @@ jobs: builder: ${{ needs.prepare.outputs.builder }} static_checks: + if: needs.prepare.outputs.release != 'true' needs: - prepare - compile diff --git a/.github/workflows/build_and_push_docker_images.yaml b/.github/workflows/build_and_push_docker_images.yaml index b0d6aa481..3f568e430 100644 --- a/.github/workflows/build_and_push_docker_images.yaml +++ b/.github/workflows/build_and_push_docker_images.yaml @@ -61,7 +61,7 @@ on: otp_vsn: required: false type: string - default: '25.3.2-1' + default: '25.3.2-2' elixir_vsn: required: false type: string @@ -69,7 +69,7 @@ on: builder_vsn: required: false type: string - default: '5.1-3' + default: '5.1-4' runner: required: false type: string @@ -88,6 +88,11 @@ jobs: registry: - 'docker.io' - 'public.ecr.aws' + exclude: + - profile: emqx-enterprise + registry: 'public.ecr.aws' + - profile: emqx-enterprise-elixir + registry: 'public.ecr.aws' steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/build_packages.yaml b/.github/workflows/build_packages.yaml index cbe95b974..d482d2c0e 100644 --- a/.github/workflows/build_packages.yaml +++ b/.github/workflows/build_packages.yaml @@ -57,7 +57,7 @@ on: otp_vsn: required: false type: string - default: '25.3.2-1' + default: '25.3.2-2' elixir_vsn: required: false type: string @@ -69,7 +69,7 @@ on: builder_vsn: required: false type: string - default: '5.1-3' + default: '5.1-4' jobs: windows: @@ -264,7 +264,7 @@ jobs: path: _packages/${{ matrix.profile }}/ publish_artifacts: - runs-on: ${{ inputs.runner }} + runs-on: ubuntu-latest needs: - mac - linux @@ -280,7 +280,7 @@ jobs: name: ${{ matrix.profile }} path: packages/${{ matrix.profile }} - name: install dos2unix - run: apt-get update && apt install -y dos2unix + run: sudo apt-get update && sudo apt install -y dos2unix - name: get packages run: | set -eu @@ -300,7 +300,7 @@ jobs: env: PROFILE: ${{ matrix.profile }} run: | - set -e -u + set -eu if [ $PROFILE = 'emqx' ]; then s3dir='emqx-ce' elif [ $PROFILE = 'emqx-enterprise' ]; then diff --git a/.github/workflows/build_packages_cron.yaml b/.github/workflows/build_packages_cron.yaml index b245078da..a67ab81d2 100644 --- a/.github/workflows/build_packages_cron.yaml +++ b/.github/workflows/build_packages_cron.yaml @@ -13,8 +13,6 @@ jobs: linux: if: github.repository_owner == 'emqx' runs-on: aws-${{ matrix.arch }} - # always run in builder container because the host might have the wrong OTP version etc. - # otherwise buildx.sh does not run docker if arch and os matches the target arch and os. container: image: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os }}" @@ -24,15 +22,17 @@ jobs: profile: - ['emqx', 'master'] - ['emqx-enterprise', 'release-51'] + - ['emqx-enterprise', 'release-52'] otp: - - 25.3.2-1 + - 25.3.2-2 arch: - amd64 os: + - debian10 - ubuntu22.04 - amzn2023 builder: - - 5.1-3 + - 5.1-4 elixir: - 1.14.5 @@ -47,25 +47,32 @@ jobs: ref: ${{ matrix.profile[1] }} fetch-depth: 0 - - name: build emqx packages - env: - ELIXIR: ${{ matrix.elixir }} - PROFILE: ${{ matrix.profile[0] }} - ARCH: ${{ matrix.arch }} + - name: fix workdir run: | set -eu git config --global --add safe.directory "$GITHUB_WORKSPACE" - PKGTYPES="tgz pkg" - IS_ELIXIR="no" - for PKGTYPE in ${PKGTYPES}; - do - ./scripts/buildx.sh \ - --profile "${PROFILE}" \ - --pkgtype "${PKGTYPE}" \ - --arch "${ARCH}" \ - --elixir "${IS_ELIXIR}" \ - --builder "force_host" - done + # Align path for CMake caches + if [ ! "$PWD" = "/emqx" ]; then + ln -s $PWD /emqx + cd /emqx + fi + echo "pwd is $PWD" + + - name: build emqx packages + env: + PROFILE: ${{ matrix.profile[0] }} + ACLOCAL_PATH: "/usr/share/aclocal:/usr/local/share/aclocal" + run: | + set -eu + make "${PROFILE}-tgz" + make "${PROFILE}-pkg" + - name: test emqx packages + env: + PROFILE: ${{ matrix.profile[0] }} + run: | + set -eu + ./scripts/pkg-tests.sh "${PROFILE}-tgz" + ./scripts/pkg-tests.sh "${PROFILE}-pkg" - uses: actions/upload-artifact@v3 if: success() with: @@ -92,7 +99,7 @@ jobs: branch: - master otp: - - 25.3.2-1 + - 25.3.2-2 os: - macos-13 - macos-12-arm64 diff --git a/.github/workflows/build_slim_packages.yaml b/.github/workflows/build_slim_packages.yaml index b7ba78ef4..8e13ec0a1 100644 --- a/.github/workflows/build_slim_packages.yaml +++ b/.github/workflows/build_slim_packages.yaml @@ -34,15 +34,15 @@ on: builder: required: false type: string - default: 'ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04' + default: 'ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu22.04' builder_vsn: required: false type: string - default: '5.1-3' + default: '5.1-4' otp_vsn: required: false type: string - default: '25.3.2-1' + default: '25.3.2-2' elixir_vsn: required: false type: string @@ -58,8 +58,8 @@ jobs: fail-fast: false matrix: profile: - - ["emqx", "25.3.2-1", "ubuntu20.04", "elixir"] - - ["emqx-enterprise", "25.3.2-1", "ubuntu20.04", "erlang"] + - ["emqx", "25.3.2-2", "ubuntu20.04", "elixir"] + - ["emqx-enterprise", "25.3.2-2", "ubuntu20.04", "erlang"] container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" diff --git a/.github/workflows/codeql.yaml b/.github/workflows/codeql.yaml index 6d4cc3dc4..a0b701d17 100644 --- a/.github/workflows/codeql.yaml +++ b/.github/workflows/codeql.yaml @@ -18,7 +18,7 @@ jobs: contents: read security-events: write container: - image: ghcr.io/emqx/emqx-builder/5.1-1:1.14.5-25.3.2-1-ubuntu22.04 + image: ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu22.04 strategy: fail-fast: false diff --git a/.github/workflows/performance_test.yaml b/.github/workflows/performance_test.yaml index 10b040271..224cfb0b3 100644 --- a/.github/workflows/performance_test.yaml +++ b/.github/workflows/performance_test.yaml @@ -23,7 +23,7 @@ jobs: prepare: runs-on: ubuntu-latest if: github.repository_owner == 'emqx' - container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu20.04 + container: ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu20.04 outputs: BENCH_ID: ${{ steps.prepare.outputs.BENCH_ID }} PACKAGE_FILE: ${{ steps.package_file.outputs.PACKAGE_FILE }} diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 1945caab0..ab145a764 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -59,7 +59,7 @@ jobs: with: asset_paths: '["packages/*"]' - name: update to emqx.io - if: startsWith(github.ref_name, 'v') && (github.event_name == 'release' || inputs.publish_release_artefacts) + if: startsWith(github.ref_name, 'v') && ((github.event_name == 'release' && !github.event.prerelease) || inputs.publish_release_artefacts) run: | set -eux curl -w %{http_code} \ @@ -70,6 +70,7 @@ jobs: -d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ github.ref_name }}\" }" \ ${{ secrets.EMQX_IO_RELEASE_API }} - name: Push to packagecloud.io + if: (github.event_name == 'release' && !github.event.prerelease) || inputs.publish_release_artefacts env: PROFILE: ${{ steps.profile.outputs.profile }} VERSION: ${{ steps.profile.outputs.version }} diff --git a/.github/workflows/run_emqx_app_tests.yaml b/.github/workflows/run_emqx_app_tests.yaml new file mode 100644 index 000000000..72fe2b0d5 --- /dev/null +++ b/.github/workflows/run_emqx_app_tests.yaml @@ -0,0 +1,65 @@ +name: Check emqx app standalone + +# These tests are needed because we provide the `emqx` application as a standalone +# dependency for plugins. + +concurrency: + group: test-standalone-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +on: + workflow_call: + inputs: + runner: + required: true + type: string + builder: + required: true + type: string + before_ref: + required: true + type: string + after_ref: + required: true + type: string + +env: + IS_CI: "yes" + +jobs: + run_emqx_app_tests: + runs-on: ${{ inputs.runner }} + container: ${{ inputs.builder }} + + defaults: + run: + shell: bash + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: run + env: + BEFORE_REF: ${{ inputs.before_ref }} + AFTER_REF: ${{ inputs.after_ref }} + run: | + git config --global --add safe.directory "$GITHUB_WORKSPACE" + changed_files="$(git diff --name-only ${BEFORE_REF} ${AFTER_REF} apps/emqx)" + if [ "$changed_files" = '' ]; then + echo "nothing changed in apps/emqx, ignored." + exit 0 + fi + make ensure-rebar3 + cp rebar3 apps/emqx/ + cd apps/emqx + ./rebar3 xref + ./rebar3 dialyzer + ./rebar3 eunit -v + ./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true + ./rebar3 proper -d test/props + - uses: actions/upload-artifact@v3 + if: failure() + with: + name: logs-${{ inputs.runner }} + path: apps/emqx/_build/test/logs diff --git a/.github/workflows/run_helm_tests.yaml b/.github/workflows/run_helm_tests.yaml index 1106b6057..ba3ebee5a 100644 --- a/.github/workflows/run_helm_tests.yaml +++ b/.github/workflows/run_helm_tests.yaml @@ -121,7 +121,7 @@ jobs: path: paho.mqtt.testing - name: install pytest run: | - pip install pytest==7.1.2 pytest-retry + pip install pytest==7.1.2 pytest-retry==1.3.0 echo "$HOME/.local/bin" >> $GITHUB_PATH - name: run paho test timeout-minutes: 10 diff --git a/.github/workflows/run_test_cases.yaml b/.github/workflows/run_test_cases.yaml index 48e551612..82b2bbeb9 100644 --- a/.github/workflows/run_test_cases.yaml +++ b/.github/workflows/run_test_cases.yaml @@ -29,6 +29,7 @@ env: jobs: eunit_and_proper: runs-on: ${{ inputs.runner }} + name: "eunit_and_proper (${{ matrix.profile }})" strategy: fail-fast: false matrix: @@ -69,6 +70,7 @@ jobs: ct_docker: runs-on: ${{ inputs.runner }} + name: "ct_docker (${{ matrix.app }}-${{ matrix.suitegroup }})" strategy: fail-fast: false matrix: @@ -116,6 +118,7 @@ jobs: ct: runs-on: ${{ inputs.runner }} + name: "ct (${{ matrix.app }}-${{ matrix.suitegroup }})" strategy: fail-fast: false matrix: @@ -155,6 +158,17 @@ jobs: name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }} path: _build/test/logs + tests_passed: + needs: + - eunit_and_proper + - ct + - ct_docker + runs-on: ${{ inputs.runner }} + strategy: + fail-fast: false + steps: + - run: echo "All tests passed" + make_cover: needs: - eunit_and_proper diff --git a/.github/workflows/static_checks.yaml b/.github/workflows/static_checks.yaml index 3b32a36b4..21a753a37 100644 --- a/.github/workflows/static_checks.yaml +++ b/.github/workflows/static_checks.yaml @@ -23,6 +23,7 @@ env: jobs: static_checks: runs-on: ${{ inputs.runner }} + name: "static_checks (${{ matrix.profile }})" strategy: fail-fast: false matrix: diff --git a/.github/workflows/upload-helm-charts.yaml b/.github/workflows/upload-helm-charts.yaml index 4b18efef8..593a78a7c 100644 --- a/.github/workflows/upload-helm-charts.yaml +++ b/.github/workflows/upload-helm-charts.yaml @@ -42,7 +42,8 @@ jobs: echo "version=$(./pkg-vsn.sh emqx-enterprise)" >> $GITHUB_OUTPUT ;; esac - - uses: emqx/push-helm-action@v1 + - uses: emqx/push-helm-action@v1.1 + if: github.event_name == 'release' && !github.event.prerelease with: charts_dir: "${{ github.workspace }}/deploy/charts/${{ steps.profile.outputs.profile }}" version: ${{ steps.profile.outputs.version }} diff --git a/.tool-versions b/.tool-versions index 3a2251dc8..a988325fa 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,2 +1,2 @@ -erlang 25.3.2-1 +erlang 25.3.2-2 elixir 1.14.5-otp-25 diff --git a/Makefile b/Makefile index 037d33cea..fc9286837 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ REBAR = $(CURDIR)/rebar3 BUILD = $(CURDIR)/build SCRIPTS = $(CURDIR)/scripts export EMQX_RELUP ?= true -export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-debian11 +export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-debian11 export EMQX_DEFAULT_RUNNER = debian:11-slim export EMQX_REL_FORM ?= tgz export QUICER_DOWNLOAD_FROM_RELEASE = 1 @@ -16,7 +16,7 @@ endif # Dashboard version # from https://github.com/emqx/emqx-dashboard5 export EMQX_DASHBOARD_VERSION ?= v1.3.2 -export EMQX_EE_DASHBOARD_VERSION ?= e1.1.1 +export EMQX_EE_DASHBOARD_VERSION ?= e1.2.0-beta.4 # `:=` should be used here, otherwise the `$(shell ...)` will be executed every time when the variable is used # In make 4.4+, for backward-compatibility the value from the original environment is used. diff --git a/README-CN.md b/README-CN.md index 314b34b9a..8c6f8d8c3 100644 --- a/README-CN.md +++ b/README-CN.md @@ -1,7 +1,7 @@ # EMQX [![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases) -[![Build Status](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml) +[![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml) [![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx) [![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/) diff --git a/README-RU.md b/README-RU.md index 6baf38e2c..c22df7f3f 100644 --- a/README-RU.md +++ b/README-RU.md @@ -1,7 +1,7 @@ # Брокер EMQX [![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases) -[![Build Status](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml) +[![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml) [![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx) [![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/) diff --git a/README.md b/README.md index 87d611c5e..3624e90f8 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # EMQX [![GitHub Release](https://img.shields.io/github/release/emqx/emqx?color=brightgreen&label=Release)](https://github.com/emqx/emqx/releases) -[![Build Status](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/run_test_cases.yaml) +[![Build Status](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml/badge.svg)](https://github.com/emqx/emqx/actions/workflows/_push-entrypoint.yaml) [![Coverage Status](https://img.shields.io/coveralls/github/emqx/emqx/master?label=Coverage)](https://coveralls.io/github/emqx/emqx?branch=master) [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx?label=Docker%20Pulls)](https://hub.docker.com/r/emqx/emqx) [![Slack](https://img.shields.io/badge/Slack-EMQ-39AE85?logo=slack)](https://slack-invite.emqx.io/) diff --git a/apps/emqx/include/emqx_mqtt.hrl b/apps/emqx/include/emqx_mqtt.hrl index 2bd27339c..4d0188f71 100644 --- a/apps/emqx/include/emqx_mqtt.hrl +++ b/apps/emqx/include/emqx_mqtt.hrl @@ -679,4 +679,8 @@ end). -define(THROW_FRAME_ERROR(Reason), erlang:throw({?FRAME_PARSE_ERROR, Reason})). -define(THROW_SERIALIZE_ERROR(Reason), erlang:throw({?FRAME_SERIALIZE_ERROR, Reason})). +-define(MAX_PAYLOAD_FORMAT_SIZE, 1024). +-define(TRUNCATED_PAYLOAD_SIZE, 100). +-define(MAX_PAYLOAD_FORMAT_LIMIT(Bin), (byte_size(Bin) =< ?MAX_PAYLOAD_FORMAT_SIZE)). + -endif. diff --git a/apps/emqx/include/emqx_release.hrl b/apps/emqx/include/emqx_release.hrl index 1cd35ad58..d66c7982a 100644 --- a/apps/emqx/include/emqx_release.hrl +++ b/apps/emqx/include/emqx_release.hrl @@ -32,10 +32,10 @@ %% `apps/emqx/src/bpapi/README.md' %% Opensource edition --define(EMQX_RELEASE_CE, "5.1.5-patch.2"). +-define(EMQX_RELEASE_CE, "5.1.6"). %% Enterprise edition --define(EMQX_RELEASE_EE, "5.1.1"). +-define(EMQX_RELEASE_EE, "5.2.0-alpha.3"). %% The HTTP API version -define(EMQX_API_VERSION, "5.0"). diff --git a/apps/emqx/integration_test/.gitkeep b/apps/emqx/integration_test/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/apps/emqx/rebar.config b/apps/emqx/rebar.config index c2dfccad6..730155805 100644 --- a/apps/emqx/rebar.config +++ b/apps/emqx/rebar.config @@ -30,7 +30,7 @@ {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}}, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.10"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}}, - {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.14"}}}, + {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.16"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}}, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}, @@ -41,6 +41,16 @@ {extra_src_dirs, [{"etc", [recursive]}]}. {profiles, [ {test, [ + {deps, [ + {meck, "0.9.2"}, + {proper, "1.4.0"}, + {bbmustache, "1.10.0"}, + {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.8.6"}}} + ]}, + {extra_src_dirs, [{"test", [recursive]}, + {"integration_test", [recursive]}]} + ]}, + {standalone_test, [ {deps, [ {meck, "0.9.2"}, {proper, "1.4.0"}, diff --git a/apps/emqx/src/emqx.app.src b/apps/emqx/src/emqx.app.src index 73e390215..a80d6482a 100644 --- a/apps/emqx/src/emqx.app.src +++ b/apps/emqx/src/emqx.app.src @@ -2,7 +2,7 @@ {application, emqx, [ {id, "emqx"}, {description, "EMQX Core"}, - {vsn, "5.1.6"}, + {vsn, "5.1.8"}, {modules, []}, {registered, []}, {applications, [ @@ -14,7 +14,6 @@ esockd, cowboy, sasl, - os_mon, lc, hocon, emqx_durable_storage diff --git a/apps/emqx/src/emqx_alarm.erl b/apps/emqx/src/emqx_alarm.erl index 056f36050..8c0c35334 100644 --- a/apps/emqx/src/emqx_alarm.erl +++ b/apps/emqx/src/emqx_alarm.erl @@ -213,7 +213,7 @@ format(Node, #deactivated_alarm{ to_rfc3339(Timestamp) -> %% rfc3339 accuracy to millisecond - list_to_binary(calendar:system_time_to_rfc3339(Timestamp div 1000, [{unit, millisecond}])). + emqx_utils_calendar:epoch_to_rfc3339(Timestamp div 1000). %%-------------------------------------------------------------------- %% gen_server callbacks diff --git a/apps/emqx/src/emqx_banned.erl b/apps/emqx/src/emqx_banned.erl index a5c46da19..e246bb2c5 100644 --- a/apps/emqx/src/emqx_banned.erl +++ b/apps/emqx/src/emqx_banned.erl @@ -38,7 +38,8 @@ delete/1, info/1, format/1, - parse/1 + parse/1, + clear/0 ]). %% gen_server callbacks @@ -171,7 +172,7 @@ maybe_format_host({As, Who}) -> {As, Who}. to_rfc3339(Timestamp) -> - list_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, second}])). + emqx_utils_calendar:epoch_to_rfc3339(Timestamp, second). -spec create(emqx_types:banned() | map()) -> {ok, emqx_types:banned()} | {error, {already_exist, emqx_types:banned()}}. @@ -226,6 +227,10 @@ delete(Who) -> info(InfoKey) -> mnesia:table_info(?BANNED_TAB, InfoKey). +clear() -> + _ = mria:clear_table(?BANNED_TAB), + ok. + %%-------------------------------------------------------------------- %% gen_server callbacks %%-------------------------------------------------------------------- diff --git a/apps/emqx/src/emqx_datetime.erl b/apps/emqx/src/emqx_datetime.erl deleted file mode 100644 index 60f40130b..000000000 --- a/apps/emqx/src/emqx_datetime.erl +++ /dev/null @@ -1,137 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- --module(emqx_datetime). - --include_lib("typerefl/include/types.hrl"). - -%% API --export([ - to_epoch_millisecond/1, - to_epoch_second/1, - human_readable_duration_string/1 -]). --export([ - epoch_to_rfc3339/1, - epoch_to_rfc3339/2 -]). - --reflect_type([ - epoch_millisecond/0, - epoch_second/0 -]). - --type epoch_second() :: non_neg_integer(). --type epoch_millisecond() :: non_neg_integer(). --typerefl_from_string({epoch_second/0, ?MODULE, to_epoch_second}). --typerefl_from_string({epoch_millisecond/0, ?MODULE, to_epoch_millisecond}). - -to_epoch_second(DateTime) -> - to_epoch(DateTime, second). - -to_epoch_millisecond(DateTime) -> - to_epoch(DateTime, millisecond). - -to_epoch(DateTime, Unit) -> - try - case string:to_integer(DateTime) of - {Epoch, []} when Epoch >= 0 -> {ok, Epoch}; - {_Epoch, []} -> {error, bad_epoch}; - _ -> {ok, calendar:rfc3339_to_system_time(DateTime, [{unit, Unit}])} - end - catch - error:_ -> - {error, bad_rfc3339_timestamp} - end. - -epoch_to_rfc3339(TimeStamp) -> - epoch_to_rfc3339(TimeStamp, millisecond). - -epoch_to_rfc3339(TimeStamp, Unit) when is_integer(TimeStamp) -> - list_to_binary(calendar:system_time_to_rfc3339(TimeStamp, [{unit, Unit}])). - --spec human_readable_duration_string(integer()) -> string(). -human_readable_duration_string(Milliseconds) -> - Seconds = Milliseconds div 1000, - {D, {H, M, S}} = calendar:seconds_to_daystime(Seconds), - L0 = [{D, " days"}, {H, " hours"}, {M, " minutes"}, {S, " seconds"}], - L1 = lists:dropwhile(fun({K, _}) -> K =:= 0 end, L0), - L2 = lists:map(fun({Time, Unit}) -> [integer_to_list(Time), Unit] end, L1), - lists:flatten(lists:join(", ", L2)). - --ifdef(TEST). --include_lib("eunit/include/eunit.hrl"). --compile(nowarn_export_all). --compile(export_all). -roots() -> [bar]. - -fields(bar) -> - [ - {second, ?MODULE:epoch_second()}, - {millisecond, ?MODULE:epoch_millisecond()} - ]. - --define(FORMAT(_Sec_, _Ms_), - lists:flatten( - io_lib:format("bar={second=~w,millisecond=~w}", [_Sec_, _Ms_]) - ) -). - -epoch_ok_test() -> - Args = [ - {0, 0, 0, 0}, - {1, 1, 1, 1}, - {"2022-01-01T08:00:00+08:00", "2022-01-01T08:00:00+08:00", 1640995200, 1640995200000} - ], - lists:foreach( - fun({Sec, Ms, EpochSec, EpochMs}) -> - check_ok(?FORMAT(Sec, Ms), EpochSec, EpochMs) - end, - Args - ), - ok. - -check_ok(Input, Sec, Ms) -> - {ok, Data} = hocon:binary(Input, #{}), - ?assertMatch( - #{bar := #{second := Sec, millisecond := Ms}}, - hocon_tconf:check_plain(?MODULE, Data, #{atom_key => true}, [bar]) - ), - ok. - -epoch_failed_test() -> - Args = [ - {-1, -1}, - {"1s", "1s"}, - {"2022-13-13T08:00:00+08:00", "2022-13-13T08:00:00+08:00"} - ], - lists:foreach( - fun({Sec, Ms}) -> - check_failed(?FORMAT(Sec, Ms)) - end, - Args - ), - ok. - -check_failed(Input) -> - {ok, Data} = hocon:binary(Input, #{}), - ?assertException( - throw, - _, - hocon_tconf:check_plain(?MODULE, Data, #{atom_key => true}, [bar]) - ), - ok. - --endif. diff --git a/apps/emqx/src/emqx_limiter/src/emqx_htb_limiter.erl b/apps/emqx/src/emqx_limiter/src/emqx_htb_limiter.erl index bcd4166af..7f50161a8 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_htb_limiter.erl +++ b/apps/emqx/src/emqx_limiter/src/emqx_htb_limiter.erl @@ -139,7 +139,7 @@ make_local_limiter(Cfg, Bucket) -> tokens => emqx_limiter_server:get_initial_val(Cfg), lasttime => ?NOW, bucket => Bucket, - capacity => emqx_limiter_schema:calc_capacity(Cfg) + capacity => emqx_limiter_utils:calc_capacity(Cfg) }. %%@doc create a limiter server's reference diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_decimal.erl b/apps/emqx/src/emqx_limiter/src/emqx_limiter_decimal.erl index 33ba0e511..6bf4e9b20 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_limiter_decimal.erl +++ b/apps/emqx/src/emqx_limiter/src/emqx_limiter_decimal.erl @@ -24,13 +24,19 @@ sub/2, mul/2, put_to_counter/3, - floor_div/2 + floor_div/2, + precisely_add/2 ]). --export_type([decimal/0, zero_or_float/0]). +-export_type([decimal/0, zero_or_float/0, correction_value/0]). -type decimal() :: infinity | number(). -type zero_or_float() :: 0 | float(). +-type correction_value() :: #{ + correction := emqx_limiter_decimal:zero_or_float(), + any() => any() +}. + %%-------------------------------------------------------------------- %%% API %%-------------------------------------------------------------------- @@ -43,6 +49,12 @@ add(A, B) when add(A, B) -> A + B. +-spec precisely_add(number(), correction_value()) -> {integer(), correction_value()}. +precisely_add(Inc, #{correction := Correction} = Data) -> + FixedInc = Inc + Correction, + IntInc = erlang:floor(FixedInc), + {IntInc, Data#{correction := FixedInc - IntInc}}. + -spec sub(decimal(), decimal()) -> decimal(). sub(A, B) when A =:= infinity orelse diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_manager.erl b/apps/emqx/src/emqx_limiter/src/emqx_limiter_manager.erl index afabc2580..91d59b3be 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_limiter_manager.erl +++ b/apps/emqx/src/emqx_limiter/src/emqx_limiter_manager.erl @@ -131,7 +131,7 @@ delete_root(Type) -> delete_bucket(?ROOT_ID, Type). post_config_update([limiter], _Config, NewConf, _OldConf, _AppEnvs) -> - Conf = emqx_limiter_schema:convert_node_opts(NewConf), + Conf = emqx_limiter_utils:convert_node_opts(NewConf), _ = [on_post_config_update(Type, Cfg) || {Type, Cfg} <- maps:to_list(Conf)], ok. diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl b/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl index e2951c302..802b29837 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl +++ b/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl @@ -33,14 +33,7 @@ desc/1, types/0, short_paths/0, - calc_capacity/1, - extract_with_type/2, - default_client_config/0, - default_bucket_config/0, - short_paths_fields/1, - get_listener_opts/1, - get_node_opts/1, - convert_node_opts/1 + short_paths_fields/1 ]). -define(KILOBYTE, 1024). @@ -263,80 +256,6 @@ types() -> short_paths() -> [max_conn_rate, messages_rate, bytes_rate]. -calc_capacity(#{rate := infinity}) -> - infinity; -calc_capacity(#{rate := Rate, burst := Burst}) -> - erlang:floor(1000 * Rate / default_period()) + Burst. - -extract_with_type(_Type, undefined) -> - undefined; -extract_with_type(Type, #{client := ClientCfg} = BucketCfg) -> - BucketVal = maps:find(Type, BucketCfg), - ClientVal = maps:find(Type, ClientCfg), - merge_client_bucket(Type, ClientVal, BucketVal); -extract_with_type(Type, BucketCfg) -> - BucketVal = maps:find(Type, BucketCfg), - merge_client_bucket(Type, undefined, BucketVal). - -%% Since the client configuration can be absent and be a undefined value, -%% but we must need some basic settings to control the behaviour of the limiter, -%% so here add this helper function to generate a default setting. -%% This is a temporary workaround until we found a better way to simplify. -default_client_config() -> - #{ - rate => infinity, - initial => 0, - low_watermark => 0, - burst => 0, - divisible => true, - max_retry_time => timer:hours(1), - failure_strategy => force - }. - -default_bucket_config() -> - #{ - rate => infinity, - burst => 0, - initial => 0 - }. - -get_listener_opts(Conf) -> - Limiter = maps:get(limiter, Conf, undefined), - ShortPaths = maps:with(short_paths(), Conf), - get_listener_opts(Limiter, ShortPaths). - -get_node_opts(Type) -> - Opts = emqx:get_config([limiter, Type], default_bucket_config()), - case type_to_short_path_name(Type) of - undefined -> - Opts; - Name -> - case emqx:get_config([limiter, Name], undefined) of - undefined -> - Opts; - Rate -> - Opts#{rate := Rate} - end - end. - -convert_node_opts(Conf) -> - DefBucket = default_bucket_config(), - ShorPaths = short_paths(), - Fun = fun - %% The `client` in the node options was deprecated - (client, _Value, Acc) -> - Acc; - (Name, Value, Acc) -> - case lists:member(Name, ShorPaths) of - true -> - Type = short_path_name_to_type(Name), - Acc#{Type => DefBucket#{rate => Value}}; - _ -> - Acc#{Name => Value} - end - end, - maps:fold(Fun, #{}, Conf). - %%-------------------------------------------------------------------- %% Internal functions %%-------------------------------------------------------------------- @@ -538,51 +457,3 @@ alias_of_type(bytes) -> [bytes_in]; alias_of_type(_) -> []. - -merge_client_bucket(Type, {ok, ClientVal}, {ok, BucketVal}) -> - #{Type => BucketVal, client => #{Type => ClientVal}}; -merge_client_bucket(Type, {ok, ClientVal}, _) -> - #{client => #{Type => ClientVal}}; -merge_client_bucket(Type, _, {ok, BucketVal}) -> - #{Type => BucketVal}; -merge_client_bucket(_, _, _) -> - undefined. - -short_path_name_to_type(max_conn_rate) -> - connection; -short_path_name_to_type(messages_rate) -> - messages; -short_path_name_to_type(bytes_rate) -> - bytes. - -type_to_short_path_name(connection) -> - max_conn_rate; -type_to_short_path_name(messages) -> - messages_rate; -type_to_short_path_name(bytes) -> - bytes_rate; -type_to_short_path_name(_) -> - undefined. - -get_listener_opts(Limiter, ShortPaths) when map_size(ShortPaths) =:= 0 -> - Limiter; -get_listener_opts(undefined, ShortPaths) -> - convert_listener_short_paths(ShortPaths); -get_listener_opts(Limiter, ShortPaths) -> - Shorts = convert_listener_short_paths(ShortPaths), - emqx_utils_maps:deep_merge(Limiter, Shorts). - -convert_listener_short_paths(ShortPaths) -> - DefBucket = default_bucket_config(), - DefClient = default_client_config(), - Fun = fun(Name, Rate, Acc) -> - Type = short_path_name_to_type(Name), - case Name of - max_conn_rate -> - Acc#{Type => DefBucket#{rate => Rate}}; - _ -> - Client = maps:get(client, Acc, #{}), - Acc#{client => Client#{Type => DefClient#{rate => Rate}}} - end - end, - maps:fold(Fun, #{}, ShortPaths). diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_server.erl b/apps/emqx/src/emqx_limiter/src/emqx_limiter_server.erl index fcb1fd66c..00d255c9c 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_limiter_server.erl +++ b/apps/emqx/src/emqx_limiter/src/emqx_limiter_server.erl @@ -383,7 +383,7 @@ longitudinal( case lists:min([ShouldAlloc, Flow, Capacity]) of Available when Available > 0 -> - {Inc, Bucket2} = emqx_limiter_correction:add(Available, Bucket), + {Inc, Bucket2} = emqx_limiter_decimal:precisely_add(Available, Bucket), counters:add(Counter, Index, Inc), {Available, Buckets#{Name := Bucket2#{obtained := Obtained + Available}}}; @@ -419,7 +419,7 @@ maybe_adjust_root_tokens(#{root := #{rate := Rate} = Root, counter := Counter} = State; _ -> Available = erlang:min(Rate - Token, InFlow), - {Inc, Root2} = emqx_limiter_correction:add(Available, Root), + {Inc, Root2} = emqx_limiter_decimal:precisely_add(Available, Root), counters:add(Counter, ?ROOT_COUNTER_IDX, Inc), State#{root := Root2} end. @@ -473,7 +473,7 @@ dispatch_burst_to_buckets([Bucket | T], InFlow, Alloced, Buckets) -> index := Index, obtained := Obtained } = Bucket, - {Inc, Bucket2} = emqx_limiter_correction:add(InFlow, Bucket), + {Inc, Bucket2} = emqx_limiter_decimal:precisely_add(InFlow, Bucket), counters:add(Counter, Index, Inc), @@ -484,7 +484,7 @@ dispatch_burst_to_buckets([], _, Alloced, Buckets) -> -spec init_tree(emqx_limiter_schema:limiter_type()) -> state(). init_tree(Type) when is_atom(Type) -> - Cfg = emqx_limiter_schema:get_node_opts(Type), + Cfg = emqx_limiter_utils:get_node_opts(Type), init_tree(Type, Cfg). init_tree(Type, #{rate := Rate} = Cfg) -> @@ -515,7 +515,7 @@ do_add_bucket(Id, #{rate := Rate} = Cfg, #{buckets := Buckets} = State) -> undefined -> make_bucket(Id, Cfg, State); Bucket -> - Bucket2 = Bucket#{rate := Rate, capacity := emqx_limiter_schema:calc_capacity(Cfg)}, + Bucket2 = Bucket#{rate := Rate, capacity := emqx_limiter_utils:calc_capacity(Cfg)}, State#{buckets := Buckets#{Id := Bucket2}} end. @@ -536,7 +536,7 @@ make_bucket( rate => Rate, obtained => Initial, correction => 0, - capacity => emqx_limiter_schema:calc_capacity(Cfg), + capacity => emqx_limiter_utils:calc_capacity(Cfg), counter => Counter, index => NewIndex }, @@ -601,7 +601,7 @@ create_limiter_without_client(Id, Type, BucketCfg) -> false -> {ok, emqx_htb_limiter:make_infinity_limiter()}; {ok, Bucket, RefCfg} -> - ClientCfg = emqx_limiter_schema:default_client_config(), + ClientCfg = emqx_limiter_utils:default_client_config(), create_limiter_with_ref(Bucket, ClientCfg, RefCfg); Error -> Error @@ -627,7 +627,7 @@ find_referenced_bucket(Id, Type, #{rate := Rate} = Cfg) when Rate =/= infinity - end; %% this is a node-level reference find_referenced_bucket(_Id, Type, _) -> - case emqx_limiter_schema:get_node_opts(Type) of + case emqx_limiter_utils:get_node_opts(Type) of #{rate := infinity} -> false; NodeCfg -> diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_server_sup.erl b/apps/emqx/src/emqx_limiter/src/emqx_limiter_server_sup.erl index be9b62d01..8f45da561 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_limiter_server_sup.erl +++ b/apps/emqx/src/emqx_limiter/src/emqx_limiter_server_sup.erl @@ -86,7 +86,7 @@ init([]) -> %% Internal functions %%--================================================================== make_child(Type) -> - Cfg = emqx_limiter_schema:get_node_opts(Type), + Cfg = emqx_limiter_utils:get_node_opts(Type), make_child(Type, Cfg). make_child(Type, Cfg) -> diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_utils.erl b/apps/emqx/src/emqx_limiter/src/emqx_limiter_utils.erl new file mode 100644 index 000000000..6e528188b --- /dev/null +++ b/apps/emqx/src/emqx_limiter/src/emqx_limiter_utils.erl @@ -0,0 +1,158 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2021-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_limiter_utils). + +-export([ + calc_capacity/1, + extract_with_type/2, + get_listener_opts/1, + get_node_opts/1, + convert_node_opts/1, + default_client_config/0, + default_bucket_config/0 +]). + +-import(emqx_limiter_schema, [default_period/0, short_paths/0]). + +%%-------------------------------------------------------------------- +%% Configuration-related runtime code +%%-------------------------------------------------------------------- + +calc_capacity(#{rate := infinity}) -> + infinity; +calc_capacity(#{rate := Rate, burst := Burst}) -> + erlang:floor(1000 * Rate / default_period()) + Burst. + +%% @doc extract data of a type from the nested config +extract_with_type(_Type, undefined) -> + undefined; +extract_with_type(Type, #{client := ClientCfg} = BucketCfg) -> + BucketVal = maps:find(Type, BucketCfg), + ClientVal = maps:find(Type, ClientCfg), + merge_client_bucket(Type, ClientVal, BucketVal); +extract_with_type(Type, BucketCfg) -> + BucketVal = maps:find(Type, BucketCfg), + merge_client_bucket(Type, undefined, BucketVal). + +%% @doc get the limiter configuration from the listener setting +%% and compatible with the old version limiter schema +get_listener_opts(Conf) -> + Limiter = maps:get(limiter, Conf, undefined), + ShortPaths = maps:with(short_paths(), Conf), + get_listener_opts(Limiter, ShortPaths). + +get_listener_opts(Limiter, ShortPaths) when map_size(ShortPaths) =:= 0 -> + Limiter; +get_listener_opts(undefined, ShortPaths) -> + convert_listener_short_paths(ShortPaths); +get_listener_opts(Limiter, ShortPaths) -> + Shorts = convert_listener_short_paths(ShortPaths), + emqx_utils_maps:deep_merge(Limiter, Shorts). + +convert_listener_short_paths(ShortPaths) -> + DefBucket = default_bucket_config(), + DefClient = default_client_config(), + Fun = fun(Name, Rate, Acc) -> + Type = short_path_name_to_type(Name), + case Name of + max_conn_rate -> + Acc#{Type => DefBucket#{rate => Rate}}; + _ -> + Client = maps:get(client, Acc, #{}), + Acc#{client => Client#{Type => DefClient#{rate => Rate}}} + end + end, + maps:fold(Fun, #{}, ShortPaths). + +%% @doc get the node-level limiter configuration and compatible with the old version limiter schema +get_node_opts(Type) -> + Opts = emqx:get_config([limiter, Type], default_bucket_config()), + case type_to_short_path_name(Type) of + undefined -> + Opts; + Name -> + case emqx:get_config([limiter, Name], undefined) of + undefined -> + Opts; + Rate -> + Opts#{rate := Rate} + end + end. + +convert_node_opts(Conf) -> + DefBucket = default_bucket_config(), + ShorPaths = short_paths(), + Fun = fun + %% The `client` in the node options was deprecated + (client, _Value, Acc) -> + Acc; + (Name, Value, Acc) -> + case lists:member(Name, ShorPaths) of + true -> + Type = short_path_name_to_type(Name), + Acc#{Type => DefBucket#{rate => Value}}; + _ -> + Acc#{Name => Value} + end + end, + maps:fold(Fun, #{}, Conf). + +merge_client_bucket(Type, {ok, ClientVal}, {ok, BucketVal}) -> + #{Type => BucketVal, client => #{Type => ClientVal}}; +merge_client_bucket(Type, {ok, ClientVal}, _) -> + #{client => #{Type => ClientVal}}; +merge_client_bucket(Type, _, {ok, BucketVal}) -> + #{Type => BucketVal}; +merge_client_bucket(_, _, _) -> + undefined. + +short_path_name_to_type(max_conn_rate) -> + connection; +short_path_name_to_type(messages_rate) -> + messages; +short_path_name_to_type(bytes_rate) -> + bytes. + +type_to_short_path_name(connection) -> + max_conn_rate; +type_to_short_path_name(messages) -> + messages_rate; +type_to_short_path_name(bytes) -> + bytes_rate; +type_to_short_path_name(_) -> + undefined. + +%% Since the client configuration can be absent and be a undefined value, +%% but we must need some basic settings to control the behaviour of the limiter, +%% so here add this helper function to generate a default setting. +%% This is a temporary workaround until we found a better way to simplify. +default_client_config() -> + #{ + rate => infinity, + initial => 0, + low_watermark => 0, + burst => 0, + divisible => true, + max_retry_time => timer:hours(1), + failure_strategy => force + }. + +default_bucket_config() -> + #{ + rate => infinity, + burst => 0, + initial => 0 + }. diff --git a/apps/emqx/src/emqx_listeners.erl b/apps/emqx/src/emqx_listeners.erl index b1bb29159..964873e53 100644 --- a/apps/emqx/src/emqx_listeners.erl +++ b/apps/emqx/src/emqx_listeners.erl @@ -587,7 +587,7 @@ esockd_opts(ListenerId, Type, Opts0) -> Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0), Limiter = limiter(Opts0), Opts2 = - case emqx_limiter_schema:extract_with_type(connection, Limiter) of + case emqx_limiter_utils:extract_with_type(connection, Limiter) of undefined -> Opts1; BucketCfg -> @@ -732,7 +732,7 @@ zone(Opts) -> maps:get(zone, Opts, undefined). limiter(Opts) -> - emqx_limiter_schema:get_listener_opts(Opts). + emqx_limiter_utils:get_listener_opts(Opts). add_limiter_bucket(_Id, undefined) -> ok; diff --git a/apps/emqx/src/emqx_os_mon.erl b/apps/emqx/src/emqx_os_mon.erl index 144d2bfe5..4114a0b17 100644 --- a/apps/emqx/src/emqx_os_mon.erl +++ b/apps/emqx/src/emqx_os_mon.erl @@ -38,15 +38,14 @@ %% gen_server callbacks -export([ init/1, + handle_continue/2, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3 ]). --ifdef(TEST). --export([is_sysmem_check_supported/0]). --endif. +-export([is_os_check_supported/0]). -include("emqx.hrl"). @@ -56,7 +55,7 @@ start_link() -> gen_server:start_link({local, ?OS_MON}, ?MODULE, [], []). update(OS) -> - erlang:send(?MODULE, {monitor_conf_update, OS}). + gen_server:cast(?MODULE, {monitor_conf_update, OS}). %%-------------------------------------------------------------------- %% API @@ -83,12 +82,17 @@ current_sysmem_percent() -> %%-------------------------------------------------------------------- init([]) -> + {ok, undefined, {continue, setup}}. + +handle_continue(setup, undefined) -> + %% start os_mon temporarily + {ok, _} = application:ensure_all_started(os_mon), %% memsup is not reliable, ignore memsup:set_sysmem_high_watermark(1.0), SysHW = init_os_monitor(), MemRef = start_mem_check_timer(), CpuRef = start_cpu_check_timer(), - {ok, #{sysmem_high_watermark => SysHW, mem_time_ref => MemRef, cpu_time_ref => CpuRef}}. + {noreply, #{sysmem_high_watermark => SysHW, mem_time_ref => MemRef, cpu_time_ref => CpuRef}}. init_os_monitor() -> init_os_monitor(emqx:get_config([sysmon, os])). @@ -110,6 +114,12 @@ handle_call({set_sysmem_high_watermark, New}, _From, #{sysmem_high_watermark := handle_call(Req, _From, State) -> {reply, {error, {unexpected_call, Req}}, State}. +handle_cast({monitor_conf_update, OS}, State) -> + cancel_outdated_timer(State), + SysHW = init_os_monitor(OS), + MemRef = start_mem_check_timer(), + CpuRef = start_cpu_check_timer(), + {noreply, #{sysmem_high_watermark => SysHW, mem_time_ref => MemRef, cpu_time_ref => CpuRef}}; handle_cast(Msg, State) -> ?SLOG(error, #{msg => "unexpected_cast", cast => Msg}), {noreply, State}. @@ -151,12 +161,6 @@ handle_info({timeout, _Timer, cpu_check}, State) -> end, Ref = start_cpu_check_timer(), {noreply, State#{cpu_time_ref => Ref}}; -handle_info({monitor_conf_update, OS}, State) -> - cancel_outdated_timer(State), - SysHW = init_os_monitor(OS), - MemRef = start_mem_check_timer(), - CpuRef = start_cpu_check_timer(), - {noreply, #{sysmem_high_watermark => SysHW, mem_time_ref => MemRef, cpu_time_ref => CpuRef}}; handle_info(Info, State) -> ?SLOG(error, #{msg => "unexpected_info", info => Info}), {noreply, State}. @@ -182,12 +186,12 @@ start_cpu_check_timer() -> _ -> start_timer(Interval, cpu_check) end. -is_sysmem_check_supported() -> +is_os_check_supported() -> {unix, linux} =:= os:type(). start_mem_check_timer() -> Interval = emqx:get_config([sysmon, os, mem_check_interval]), - case is_integer(Interval) andalso is_sysmem_check_supported() of + case is_integer(Interval) andalso is_os_check_supported() of true -> start_timer(Interval, mem_check); false -> @@ -205,7 +209,7 @@ update_mem_alarm_status(HWM) when HWM > 1.0 orelse HWM < 0.0 -> <<"Deactivated mem usage alarm due to out of range threshold">> ); update_mem_alarm_status(HWM) -> - is_sysmem_check_supported() andalso + is_os_check_supported() andalso do_update_mem_alarm_status(HWM), ok. diff --git a/apps/emqx/src/emqx_packet.erl b/apps/emqx/src/emqx_packet.erl index 96eacc5a9..9cb23be2e 100644 --- a/apps/emqx/src/emqx_packet.erl +++ b/apps/emqx/src/emqx_packet.erl @@ -55,7 +55,7 @@ format/2 ]). --export([encode_hex/1]). +-export([format_truncated_payload/3]). -define(TYPE_NAMES, {'CONNECT', 'CONNACK', 'PUBLISH', 'PUBACK', 'PUBREC', 'PUBREL', 'PUBCOMP', 'SUBSCRIBE', @@ -616,9 +616,32 @@ format_password(undefined) -> ""; format_password(<<>>) -> ""; format_password(_Password) -> "******". -format_payload(Payload, text) -> ["Payload=", io_lib:format("~ts", [Payload])]; -format_payload(Payload, hex) -> ["Payload(hex)=", encode_hex(Payload)]; -format_payload(_, hidden) -> "Payload=******". +format_payload(_, hidden) -> + "Payload=******"; +format_payload(Payload, text) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) -> + ["Payload=", unicode:characters_to_list(Payload)]; +format_payload(Payload, hex) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) -> + ["Payload(hex)=", binary:encode_hex(Payload)]; +format_payload(<> = Payload, Type) -> + [ + "Payload=", + format_truncated_payload(Part, byte_size(Payload), Type) + ]. + +format_truncated_payload(Bin, Size, Type) -> + Bin2 = + case Type of + text -> Bin; + hex -> binary:encode_hex(Bin) + end, + unicode:characters_to_list( + [ + Bin2, + "... The ", + integer_to_list(Size - ?TRUNCATED_PAYLOAD_SIZE), + " bytes of this log are truncated" + ] + ). i(true) -> 1; i(false) -> 0; @@ -641,71 +664,3 @@ format_topic_filters(Filters) -> ), "]" ]. - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% Hex encoding functions -%% Copy from binary:encode_hex/1 (was only introduced in OTP24). -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% --define(HEX(X), (hex(X)):16). --compile({inline, [hex/1]}). --spec encode_hex(Bin) -> Bin2 when - Bin :: binary(), - Bin2 :: <<_:_*16>>. -encode_hex(Data) when byte_size(Data) rem 8 =:= 0 -> - << - <> - || <> <= Data - >>; -encode_hex(Data) when byte_size(Data) rem 7 =:= 0 -> - << - <> - || <> <= Data - >>; -encode_hex(Data) when byte_size(Data) rem 6 =:= 0 -> - <<<> || <> <= Data>>; -encode_hex(Data) when byte_size(Data) rem 5 =:= 0 -> - <<<> || <> <= Data>>; -encode_hex(Data) when byte_size(Data) rem 4 =:= 0 -> - <<<> || <> <= Data>>; -encode_hex(Data) when byte_size(Data) rem 3 =:= 0 -> - <<<> || <> <= Data>>; -encode_hex(Data) when byte_size(Data) rem 2 =:= 0 -> - <<<> || <> <= Data>>; -encode_hex(Data) when is_binary(Data) -> - <<<> || <> <= Data>>; -encode_hex(Bin) -> - erlang:error(badarg, [Bin]). - -hex(X) -> - element( - X + 1, - {16#3030, 16#3031, 16#3032, 16#3033, 16#3034, 16#3035, 16#3036, 16#3037, 16#3038, 16#3039, - 16#3041, 16#3042, 16#3043, 16#3044, 16#3045, 16#3046, 16#3130, 16#3131, 16#3132, - 16#3133, 16#3134, 16#3135, 16#3136, 16#3137, 16#3138, 16#3139, 16#3141, 16#3142, - 16#3143, 16#3144, 16#3145, 16#3146, 16#3230, 16#3231, 16#3232, 16#3233, 16#3234, - 16#3235, 16#3236, 16#3237, 16#3238, 16#3239, 16#3241, 16#3242, 16#3243, 16#3244, - 16#3245, 16#3246, 16#3330, 16#3331, 16#3332, 16#3333, 16#3334, 16#3335, 16#3336, - 16#3337, 16#3338, 16#3339, 16#3341, 16#3342, 16#3343, 16#3344, 16#3345, 16#3346, - 16#3430, 16#3431, 16#3432, 16#3433, 16#3434, 16#3435, 16#3436, 16#3437, 16#3438, - 16#3439, 16#3441, 16#3442, 16#3443, 16#3444, 16#3445, 16#3446, 16#3530, 16#3531, - 16#3532, 16#3533, 16#3534, 16#3535, 16#3536, 16#3537, 16#3538, 16#3539, 16#3541, - 16#3542, 16#3543, 16#3544, 16#3545, 16#3546, 16#3630, 16#3631, 16#3632, 16#3633, - 16#3634, 16#3635, 16#3636, 16#3637, 16#3638, 16#3639, 16#3641, 16#3642, 16#3643, - 16#3644, 16#3645, 16#3646, 16#3730, 16#3731, 16#3732, 16#3733, 16#3734, 16#3735, - 16#3736, 16#3737, 16#3738, 16#3739, 16#3741, 16#3742, 16#3743, 16#3744, 16#3745, - 16#3746, 16#3830, 16#3831, 16#3832, 16#3833, 16#3834, 16#3835, 16#3836, 16#3837, - 16#3838, 16#3839, 16#3841, 16#3842, 16#3843, 16#3844, 16#3845, 16#3846, 16#3930, - 16#3931, 16#3932, 16#3933, 16#3934, 16#3935, 16#3936, 16#3937, 16#3938, 16#3939, - 16#3941, 16#3942, 16#3943, 16#3944, 16#3945, 16#3946, 16#4130, 16#4131, 16#4132, - 16#4133, 16#4134, 16#4135, 16#4136, 16#4137, 16#4138, 16#4139, 16#4141, 16#4142, - 16#4143, 16#4144, 16#4145, 16#4146, 16#4230, 16#4231, 16#4232, 16#4233, 16#4234, - 16#4235, 16#4236, 16#4237, 16#4238, 16#4239, 16#4241, 16#4242, 16#4243, 16#4244, - 16#4245, 16#4246, 16#4330, 16#4331, 16#4332, 16#4333, 16#4334, 16#4335, 16#4336, - 16#4337, 16#4338, 16#4339, 16#4341, 16#4342, 16#4343, 16#4344, 16#4345, 16#4346, - 16#4430, 16#4431, 16#4432, 16#4433, 16#4434, 16#4435, 16#4436, 16#4437, 16#4438, - 16#4439, 16#4441, 16#4442, 16#4443, 16#4444, 16#4445, 16#4446, 16#4530, 16#4531, - 16#4532, 16#4533, 16#4534, 16#4535, 16#4536, 16#4537, 16#4538, 16#4539, 16#4541, - 16#4542, 16#4543, 16#4544, 16#4545, 16#4546, 16#4630, 16#4631, 16#4632, 16#4633, - 16#4634, 16#4635, 16#4636, 16#4637, 16#4638, 16#4639, 16#4641, 16#4642, 16#4643, - 16#4644, 16#4645, 16#4646} - ). diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index e6bff790e..4fdec3179 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -46,7 +46,6 @@ -type timeout_duration_s() :: 0..?MAX_INT_TIMEOUT_S. -type timeout_duration_ms() :: 0..?MAX_INT_TIMEOUT_MS. -type bytesize() :: integer(). --type mqtt_max_packet_size() :: 1..?MAX_INT_MQTT_PACKET_SIZE. -type wordsize() :: bytesize(). -type percent() :: float(). -type file() :: string(). @@ -73,7 +72,6 @@ -typerefl_from_string({timeout_duration_s/0, emqx_schema, to_timeout_duration_s}). -typerefl_from_string({timeout_duration_ms/0, emqx_schema, to_timeout_duration_ms}). -typerefl_from_string({bytesize/0, emqx_schema, to_bytesize}). --typerefl_from_string({mqtt_max_packet_size/0, emqx_schema, to_bytesize}). -typerefl_from_string({wordsize/0, emqx_schema, to_wordsize}). -typerefl_from_string({percent/0, emqx_schema, to_percent}). -typerefl_from_string({comma_separated_list/0, emqx_schema, to_comma_separated_list}). @@ -93,6 +91,7 @@ -export([ validate_heap_size/1, + validate_packet_size/1, user_lookup_fun_tr/2, validate_alarm_actions/1, validate_keepalive_multiplier/1, @@ -154,7 +153,6 @@ timeout_duration_s/0, timeout_duration_ms/0, bytesize/0, - mqtt_max_packet_size/0, wordsize/0, percent/0, file/0, @@ -1584,7 +1582,7 @@ fields("sysmon_os") -> sc( hoconsc:union([disabled, duration()]), #{ - default => <<"60s">>, + default => default_mem_check_interval(), desc => ?DESC(sysmon_os_mem_check_interval) } )}, @@ -2003,8 +2001,8 @@ filter(Opts) -> %% SSL listener and client. -spec common_ssl_opts_schema(map(), server | client) -> hocon_schema:field_schema(). common_ssl_opts_schema(Defaults, Type) -> - D = fun(Field) -> maps:get(to_atom(Field), Defaults, undefined) end, - Df = fun(Field, Default) -> maps:get(to_atom(Field), Defaults, Default) end, + D = fun(Field) -> maps:get(Field, Defaults, undefined) end, + Df = fun(Field, Default) -> maps:get(Field, Defaults, Default) end, Collection = maps:get(versions, Defaults, tls_all_available), DefaultVersions = default_tls_vsns(Collection), [ @@ -2047,7 +2045,7 @@ common_ssl_opts_schema(Defaults, Type) -> sc( hoconsc:enum([verify_peer, verify_none]), #{ - default => Df("verify", verify_none), + default => Df(verify, verify_none), desc => ?DESC(common_ssl_opts_schema_verify) } )}, @@ -2055,7 +2053,7 @@ common_ssl_opts_schema(Defaults, Type) -> sc( boolean(), #{ - default => Df("reuse_sessions", true), + default => Df(reuse_sessions, true), desc => ?DESC(common_ssl_opts_schema_reuse_sessions) } )}, @@ -2063,7 +2061,7 @@ common_ssl_opts_schema(Defaults, Type) -> sc( non_neg_integer(), #{ - default => Df("depth", 10), + default => Df(depth, 10), desc => ?DESC(common_ssl_opts_schema_depth) } )}, @@ -2090,7 +2088,7 @@ common_ssl_opts_schema(Defaults, Type) -> validator => fun(Input) -> validate_tls_versions(Collection, Input) end } )}, - {"ciphers", ciphers_schema(D("ciphers"))}, + {"ciphers", ciphers_schema(D(ciphers))}, {"user_lookup_fun", sc( typerefl:alias("string", any()), @@ -2105,7 +2103,7 @@ common_ssl_opts_schema(Defaults, Type) -> sc( boolean(), #{ - default => Df("secure_renegotiate", true), + default => Df(secure_renegotiate, true), desc => ?DESC(common_ssl_opts_schema_secure_renegotiate) } )}, @@ -2125,7 +2123,7 @@ common_ssl_opts_schema(Defaults, Type) -> sc( duration(), #{ - default => Df("hibernate_after", <<"5s">>), + default => Df(hibernate_after, <<"5s">>), desc => ?DESC(common_ssl_opts_schema_hibernate_after) } )} @@ -2134,15 +2132,15 @@ common_ssl_opts_schema(Defaults, Type) -> %% @doc Make schema for SSL listener options. -spec server_ssl_opts_schema(map(), boolean()) -> hocon_schema:field_schema(). server_ssl_opts_schema(Defaults, IsRanchListener) -> - D = fun(Field) -> maps:get(to_atom(Field), Defaults, undefined) end, - Df = fun(Field, Default) -> maps:get(to_atom(Field), Defaults, Default) end, + D = fun(Field) -> maps:get(Field, Defaults, undefined) end, + Df = fun(Field, Default) -> maps:get(Field, Defaults, Default) end, common_ssl_opts_schema(Defaults, server) ++ [ {"dhfile", sc( string(), #{ - default => D("dhfile"), + default => D(dhfile), required => false, desc => ?DESC(server_ssl_opts_schema_dhfile) } @@ -2151,7 +2149,7 @@ server_ssl_opts_schema(Defaults, IsRanchListener) -> sc( boolean(), #{ - default => Df("fail_if_no_peer_cert", false), + default => Df(fail_if_no_peer_cert, false), desc => ?DESC(server_ssl_opts_schema_fail_if_no_peer_cert) } )}, @@ -2159,7 +2157,7 @@ server_ssl_opts_schema(Defaults, IsRanchListener) -> sc( boolean(), #{ - default => Df("honor_cipher_order", true), + default => Df(honor_cipher_order, true), desc => ?DESC(server_ssl_opts_schema_honor_cipher_order) } )}, @@ -2167,7 +2165,7 @@ server_ssl_opts_schema(Defaults, IsRanchListener) -> sc( boolean(), #{ - default => Df("client_renegotiation", true), + default => Df(client_renegotiation, true), desc => ?DESC(server_ssl_opts_schema_client_renegotiation) } )}, @@ -2175,7 +2173,7 @@ server_ssl_opts_schema(Defaults, IsRanchListener) -> sc( duration(), #{ - default => Df("handshake_timeout", <<"15s">>), + default => Df(handshake_timeout, <<"15s">>), desc => ?DESC(server_ssl_opts_schema_handshake_timeout) } )} @@ -2618,6 +2616,16 @@ validate_heap_size(Siz) when is_integer(Siz) -> validate_heap_size(_SizStr) -> {error, invalid_heap_size}. +validate_packet_size(Siz) when is_integer(Siz) andalso Siz < 1 -> + {error, #{reason => max_mqtt_packet_size_too_small, minimum => 1}}; +validate_packet_size(Siz) when is_integer(Siz) andalso Siz > ?MAX_INT_MQTT_PACKET_SIZE -> + Max = integer_to_list(round(?MAX_INT_MQTT_PACKET_SIZE / 1024 / 1024)) ++ "M", + {error, #{reason => max_mqtt_packet_size_too_large, maximum => Max}}; +validate_packet_size(Siz) when is_integer(Siz) -> + ok; +validate_packet_size(_SizStr) -> + {error, invalid_packet_size}. + validate_keepalive_multiplier(Multiplier) when is_number(Multiplier) andalso Multiplier >= 1.0 andalso Multiplier =< 65535.0 -> @@ -3380,9 +3388,10 @@ mqtt_general() -> )}, {"max_packet_size", sc( - mqtt_max_packet_size(), + bytesize(), #{ default => <<"1MB">>, + validator => fun ?MODULE:validate_packet_size/1, desc => ?DESC(mqtt_max_packet_size) } )}, @@ -3648,3 +3657,9 @@ shared_subscription_strategy() -> desc => ?DESC(broker_shared_subscription_strategy) } )}. + +default_mem_check_interval() -> + case emqx_os_mon:is_os_check_supported() of + true -> <<"60s">>; + false -> disabled + end. diff --git a/apps/emqx/src/emqx_stats.erl b/apps/emqx/src/emqx_stats.erl index ef9109e33..e590577da 100644 --- a/apps/emqx/src/emqx_stats.erl +++ b/apps/emqx/src/emqx_stats.erl @@ -37,7 +37,8 @@ setstat/2, setstat/3, statsfun/1, - statsfun/2 + statsfun/2, + names/0 ]). -export([ @@ -157,6 +158,28 @@ getstats() -> _ -> ets:tab2list(?TAB) end. +names() -> + [ + emqx_connections_count, + emqx_connections_max, + emqx_live_connections_count, + emqx_live_connections_max, + emqx_sessions_count, + emqx_sessions_max, + emqx_topics_count, + emqx_topics_max, + emqx_suboptions_count, + emqx_suboptions_max, + emqx_subscribers_count, + emqx_subscribers_max, + emqx_subscriptions_count, + emqx_subscriptions_max, + emqx_subscriptions_shared_count, + emqx_subscriptions_shared_max, + emqx_retained_count, + emqx_retained_max + ]. + %% @doc Get stats by name. -spec getstat(atom()) -> non_neg_integer(). getstat(Name) -> diff --git a/apps/emqx/src/emqx_sys_mon.erl b/apps/emqx/src/emqx_sys_mon.erl index f1190f586..1d3d32199 100644 --- a/apps/emqx/src/emqx_sys_mon.erl +++ b/apps/emqx/src/emqx_sys_mon.erl @@ -29,6 +29,7 @@ %% gen_server callbacks -export([ init/1, + handle_continue/2, handle_call/3, handle_cast/2, handle_info/2, @@ -70,11 +71,14 @@ update(VM) -> init([]) -> emqx_logger:set_proc_metadata(#{sysmon => true}), - init_system_monitor(), + {ok, undefined, {continue, setup}}. +handle_continue(setup, undefined) -> + init_system_monitor(), %% Monitor cluster partition event ekka:monitor(partition, fun handle_partition_event/1), - {ok, start_timer(#{timer => undefined, events => []})}. + NewState = start_timer(#{timer => undefined, events => []}), + {noreply, NewState, hibernate}. start_timer(State) -> State#{timer := emqx_utils:start_timer(timer:seconds(2), reset)}. diff --git a/apps/emqx/src/emqx_sys_sup.erl b/apps/emqx/src/emqx_sys_sup.erl index 8c26df020..25718ba76 100644 --- a/apps/emqx/src/emqx_sys_sup.erl +++ b/apps/emqx/src/emqx_sys_sup.erl @@ -19,21 +19,25 @@ -behaviour(supervisor). -export([start_link/0]). - -export([init/1]). start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []). init([]) -> - Childs = [ - child_spec(emqx_sys), - child_spec(emqx_alarm), - child_spec(emqx_sys_mon), - child_spec(emqx_os_mon), - child_spec(emqx_vm_mon) - ], - {ok, {{one_for_one, 10, 100}, Childs}}. + OsMon = + case emqx_os_mon:is_os_check_supported() of + true -> [child_spec(emqx_os_mon)]; + false -> [] + end, + Children = + [ + child_spec(emqx_sys), + child_spec(emqx_alarm), + child_spec(emqx_sys_mon), + child_spec(emqx_vm_mon) + ] ++ OsMon, + {ok, {{one_for_one, 10, 100}, Children}}. %%-------------------------------------------------------------------- %% Internal functions diff --git a/apps/emqx/src/emqx_tls_lib.erl b/apps/emqx/src/emqx_tls_lib.erl index b5b653f56..9113bd5e6 100644 --- a/apps/emqx/src/emqx_tls_lib.erl +++ b/apps/emqx/src/emqx_tls_lib.erl @@ -62,6 +62,8 @@ [ocsp, issuer_pem] ]). +-define(ALLOW_EMPTY_PEM, [[<<"cacertfile">>], [cacertfile]]). + %% non-empty string -define(IS_STRING(L), (is_list(L) andalso L =/= [] andalso is_integer(hd(L)))). %% non-empty list of strings @@ -330,6 +332,13 @@ ensure_ssl_files_per_key(Dir, SSL, [KeyPath | KeyPaths], Opts) -> ensure_ssl_file(_Dir, _KeyPath, SSL, undefined, _Opts) -> {ok, SSL}; +ensure_ssl_file(_Dir, KeyPath, SSL, MaybePem, _Opts) when + MaybePem =:= "" orelse MaybePem =:= <<"">> +-> + case lists:member(KeyPath, ?ALLOW_EMPTY_PEM) of + true -> {ok, SSL}; + false -> {error, #{reason => pem_file_path_or_string_is_required}} + end; ensure_ssl_file(Dir, KeyPath, SSL, MaybePem, Opts) -> case is_valid_string(MaybePem) of true -> diff --git a/apps/emqx/src/emqx_topic_index.erl b/apps/emqx/src/emqx_topic_index.erl new file mode 100644 index 000000000..a6f662f74 --- /dev/null +++ b/apps/emqx/src/emqx_topic_index.erl @@ -0,0 +1,242 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +%% @doc Topic index for matching topics to topic filters. +%% +%% Works on top of ETS ordered_set table. Keys are tuples constructed from +%% parsed topic filters and record IDs, wrapped in a tuple to order them +%% strictly greater than unit tuple (`{}`). Existing table may be used if +%% existing keys will not collide with index keys. +%% +%% Designed to effectively answer questions like: +%% 1. Does any topic filter match given topic? +%% 2. Which records are associated with topic filters matching given topic? +%% 3. Which topic filters match given topic? +%% 4. Which record IDs are associated with topic filters matching given topic? + +-module(emqx_topic_index). + +-export([new/0]). +-export([insert/4]). +-export([delete/3]). +-export([match/2]). +-export([matches/3]). + +-export([get_id/1]). +-export([get_topic/1]). +-export([get_record/2]). + +-type word() :: binary() | '+' | '#'. +-type key(ID) :: {[word()], {ID}}. +-type match(ID) :: key(ID). + +%% @doc Create a new ETS table suitable for topic index. +%% Usable mostly for testing purposes. +-spec new() -> ets:table(). +new() -> + ets:new(?MODULE, [public, ordered_set, {read_concurrency, true}]). + +%% @doc Insert a new entry into the index that associates given topic filter to given +%% record ID, and attaches arbitrary record to the entry. This allows users to choose +%% between regular and "materialized" indexes, for example. +-spec insert(emqx_types:topic(), _ID, _Record, ets:table()) -> true. +insert(Filter, ID, Record, Tab) -> + ets:insert(Tab, {{words(Filter), {ID}}, Record}). + +%% @doc Delete an entry from the index that associates given topic filter to given +%% record ID. Deleting non-existing entry is not an error. +-spec delete(emqx_types:topic(), _ID, ets:table()) -> true. +delete(Filter, ID, Tab) -> + ets:delete(Tab, {words(Filter), {ID}}). + +%% @doc Match given topic against the index and return the first match, or `false` if +%% no match is found. +-spec match(emqx_types:topic(), ets:table()) -> match(_ID) | false. +match(Topic, Tab) -> + {Words, RPrefix} = match_init(Topic), + match(Words, RPrefix, Tab). + +match(Words, RPrefix, Tab) -> + Prefix = lists:reverse(RPrefix), + match(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Tab). + +match(K, Prefix, Words, RPrefix, Tab) -> + case match_next(Prefix, K, Words) of + true -> + K; + skip -> + match(ets:next(Tab, K), Prefix, Words, RPrefix, Tab); + stop -> + false; + Matched -> + match_rest(Matched, Words, RPrefix, Tab) + end. + +match_rest([W1 | [W2 | _] = SLast], [W1 | [W2 | _] = Rest], RPrefix, Tab) -> + % NOTE + % Fast-forward through identical words in the topic and the last key suffixes. + % This should save us a few redundant `ets:next` calls at the cost of slightly + % more complex match patterns. + match_rest(SLast, Rest, [W1 | RPrefix], Tab); +match_rest(SLast, [W | Rest], RPrefix, Tab) when is_list(SLast) -> + match(Rest, [W | RPrefix], Tab); +match_rest(plus, [W | Rest], RPrefix, Tab) -> + % NOTE + % There's '+' in the key suffix, meaning we should consider 2 alternatives: + % 1. Match the rest of the topic as if there was '+' in the current position. + % 2. Skip this key and try to match the topic as it is. + case match(Rest, ['+' | RPrefix], Tab) of + Match = {_, _} -> + Match; + false -> + match(Rest, [W | RPrefix], Tab) + end; +match_rest(_, [], _RPrefix, _Tab) -> + false. + +%% @doc Match given topic against the index and return _all_ matches. +%% If `unique` option is given, return only unique matches by record ID. +-spec matches(emqx_types:topic(), ets:table(), _Opts :: [unique]) -> [match(_ID)]. +matches(Topic, Tab, Opts) -> + {Words, RPrefix} = match_init(Topic), + AccIn = + case Opts of + [unique | _] -> #{}; + [] -> [] + end, + Matches = matches(Words, RPrefix, AccIn, Tab), + case Matches of + #{} -> maps:values(Matches); + _ -> Matches + end. + +matches(Words, RPrefix, Acc, Tab) -> + Prefix = lists:reverse(RPrefix), + matches(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Acc, Tab). + +matches(Words, RPrefix, K = {Filter, _}, Acc, Tab) -> + Prefix = lists:reverse(RPrefix), + case Prefix > Filter of + true -> + % NOTE: Prefix already greater than the last key seen, need to `ets:next/2`. + matches(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Acc, Tab); + false -> + % NOTE: Prefix is still less than or equal to the last key seen, reuse it. + matches(K, Prefix, Words, RPrefix, Acc, Tab) + end. + +matches(K, Prefix, Words, RPrefix, Acc, Tab) -> + case match_next(Prefix, K, Words) of + true -> + matches(ets:next(Tab, K), Prefix, Words, RPrefix, match_add(K, Acc), Tab); + skip -> + matches(ets:next(Tab, K), Prefix, Words, RPrefix, Acc, Tab); + stop -> + Acc; + Matched -> + % NOTE: Prserve next key on the stack to save on `ets:next/2` calls. + matches_rest(Matched, Words, RPrefix, K, Acc, Tab) + end. + +matches_rest([W1 | [W2 | _] = SLast], [W1 | [W2 | _] = Rest], RPrefix, K, Acc, Tab) -> + % NOTE + % Fast-forward through identical words in the topic and the last key suffixes. + % This should save us a few redundant `ets:next` calls at the cost of slightly + % more complex match patterns. + matches_rest(SLast, Rest, [W1 | RPrefix], K, Acc, Tab); +matches_rest(SLast, [W | Rest], RPrefix, K, Acc, Tab) when is_list(SLast) -> + matches(Rest, [W | RPrefix], K, Acc, Tab); +matches_rest(plus, [W | Rest], RPrefix, K, Acc, Tab) -> + % NOTE + % There's '+' in the key suffix, meaning we should accumulate all matches from + % each of 2 branches: + % 1. Match the rest of the topic as if there was '+' in the current position. + % 2. Skip this key and try to match the topic as it is. + NAcc = matches(Rest, ['+' | RPrefix], K, Acc, Tab), + matches(Rest, [W | RPrefix], K, NAcc, Tab); +matches_rest(_, [], _RPrefix, _K, Acc, _Tab) -> + Acc. + +match_add(K = {_Filter, ID}, Acc = #{}) -> + % NOTE: ensuring uniqueness by record ID + Acc#{ID => K}; +match_add(K, Acc) -> + [K | Acc]. + +match_next(Prefix, {Filter, _ID}, Suffix) -> + match_filter(Prefix, Filter, Suffix); +match_next(_, '$end_of_table', _) -> + stop. + +match_filter([], [], []) -> + % NOTE: we matched the topic exactly + true; +match_filter([], [], _Suffix) -> + % NOTE: we matched the prefix, but there may be more matches next + skip; +match_filter([], ['#'], _Suffix) -> + % NOTE: naturally, '#' < '+', so this is already optimal for `match/2` + true; +match_filter([], ['+' | _], _Suffix) -> + plus; +match_filter([], [_H | _] = Rest, _Suffix) -> + Rest; +match_filter([H | T1], [H | T2], Suffix) -> + match_filter(T1, T2, Suffix); +match_filter([H1 | _], [H2 | _], _Suffix) when H2 > H1 -> + % NOTE: we're strictly past the prefix, no need to continue + stop. + +match_init(Topic) -> + case words(Topic) of + [W = <<"$", _/bytes>> | Rest] -> + % NOTE + % This will effectively skip attempts to match special topics to `#` or `+/...`. + {Rest, [W]}; + Words -> + {Words, []} + end. + +%% @doc Extract record ID from the match. +-spec get_id(match(ID)) -> ID. +get_id({_Filter, {ID}}) -> + ID. + +%% @doc Extract topic (or topic filter) from the match. +-spec get_topic(match(_ID)) -> emqx_types:topic(). +get_topic({Filter, _ID}) -> + emqx_topic:join(Filter). + +%% @doc Fetch the record associated with the match. +%% NOTE: Only really useful for ETS tables where the record ID is the first element. +-spec get_record(match(_ID), ets:table()) -> _Record. +get_record(K, Tab) -> + ets:lookup_element(Tab, K, 2). + +%% + +-spec words(emqx_types:topic()) -> [word()]. +words(Topic) when is_binary(Topic) -> + % NOTE + % This is almost identical to `emqx_topic:words/1`, but it doesn't convert empty + % tokens to ''. This is needed to keep ordering of words consistent with what + % `match_filter/3` expects. + [word(W) || W <- emqx_topic:tokens(Topic)]. + +-spec word(binary()) -> word(). +word(<<"+">>) -> '+'; +word(<<"#">>) -> '#'; +word(Bin) -> Bin. diff --git a/apps/emqx/src/emqx_trace/emqx_trace_formatter.erl b/apps/emqx/src/emqx_trace/emqx_trace_formatter.erl index a44237bd0..ae2596808 100644 --- a/apps/emqx/src/emqx_trace/emqx_trace_formatter.erl +++ b/apps/emqx/src/emqx_trace/emqx_trace_formatter.erl @@ -14,6 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_trace_formatter). +-include("emqx_mqtt.hrl"). -export([format/2]). -export([format_meta_map/1]). @@ -27,7 +28,7 @@ format( #{level := debug, meta := Meta = #{trace_tag := Tag}, msg := Msg}, #{payload_encode := PEncode} ) -> - Time = calendar:system_time_to_rfc3339(erlang:system_time(microsecond), [{unit, microsecond}]), + Time = emqx_utils_calendar:now_to_rfc3339(microsecond), ClientId = to_iolist(maps:get(clientid, Meta, "")), Peername = maps:get(peername, Meta, ""), MetaBin = format_meta(Meta, PEncode), @@ -68,10 +69,15 @@ weight({K, _}) -> {1, K}. format_packet(undefined, _) -> ""; format_packet(Packet, Encode) -> emqx_packet:format(Packet, Encode). -format_payload(undefined, _) -> ""; -format_payload(Payload, text) -> io_lib:format("~ts", [Payload]); -format_payload(Payload, hex) -> emqx_packet:encode_hex(Payload); -format_payload(_, hidden) -> "******". +format_payload(undefined, _) -> + ""; +format_payload(_, hidden) -> + "******"; +format_payload(Payload, text) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) -> + unicode:characters_to_list(Payload); +format_payload(Payload, hex) when ?MAX_PAYLOAD_FORMAT_LIMIT(Payload) -> binary:encode_hex(Payload); +format_payload(<> = Payload, Type) -> + emqx_packet:format_truncated_payload(Part, byte_size(Payload), Type). to_iolist(Atom) when is_atom(Atom) -> atom_to_list(Atom); to_iolist(Int) when is_integer(Int) -> integer_to_list(Int); diff --git a/apps/emqx/src/emqx_vm.erl b/apps/emqx/src/emqx_vm.erl index 0d861f671..79ad9905c 100644 --- a/apps/emqx/src/emqx_vm.erl +++ b/apps/emqx/src/emqx_vm.erl @@ -44,7 +44,7 @@ get_otp_version/0 ]). --export([cpu_util/0]). +-export([cpu_util/0, cpu_util/1]). -ifdef(TEST). -compile(export_all). @@ -378,16 +378,25 @@ avg15() -> cpu_util() -> compat_windows(fun cpu_sup:util/0). +cpu_util(Args) -> + compat_windows(fun cpu_sup:util/1, Args). + compat_windows(Fun) -> - case os:type() of - {win32, nt} -> - 0.0; - _Type -> - case catch Fun() of - Val when is_float(Val) -> floor(Val * 100) / 100; - Val when is_number(Val) -> Val; - _Error -> 0.0 - end + case compat_windows(Fun, []) of + Val when is_float(Val) -> floor(Val * 100) / 100; + Val when is_number(Val) -> Val; + _ -> 0.0 + end. + +compat_windows(Fun, Args) -> + try + case emqx_os_mon:is_os_check_supported() of + false -> 0.0; + true when Args =:= [] -> Fun(); + true -> Fun(Args) + end + catch + _:_ -> 0.0 end. load(Avg) -> diff --git a/apps/emqx/test/emqx_cth_suite.erl b/apps/emqx/test/emqx_cth_suite.erl index 1ae6ceded..9b3e58da4 100644 --- a/apps/emqx/test/emqx_cth_suite.erl +++ b/apps/emqx/test/emqx_cth_suite.erl @@ -55,6 +55,11 @@ -type config() :: #{atom() => scalar() | [scalar()] | config() | [config()]}. -type scalar() :: atom() | number() | string() | binary(). +-type hookfun(R) :: + fun(() -> R) + | fun((appname()) -> R) + | fun((appname(), appspec_opts()) -> R). + -type appspec_opts() :: #{ %% 1. Enable loading application config %% If not defined or set to `false`, this step will be skipped. @@ -70,19 +75,19 @@ %% 3. Perform anything right before starting the application %% If not defined or set to `false`, this step will be skipped. %% Merging amounts to redefining. - before_start => fun(() -> _) | fun((appname()) -> _) | false, + before_start => hookfun(_) | false, %% 4. Starting the application %% If not defined or set to `true`, `application:ensure_all_started/1` is used. %% If custom function is used, it should return list of all applications that were started. %% If set to `false`, application will not be started. %% Merging amounts to redefining. - start => fun(() -> {ok, [appname()]}) | fun((appname()) -> {ok, [appname()]}) | boolean(), + start => hookfun({ok, [appname()]}) | boolean(), %% 5. Perform anything right after starting the application %% If not defined or set to `false`, this step will be skipped. %% Merging amounts to redefining. - after_start => fun(() -> _) | fun((appname()) -> _) | false + after_start => hookfun(_) | false }. %% @doc Start applications with a clean slate. @@ -214,29 +219,30 @@ maybe_override_env(App, #{override_env := Env = [{_, _} | _]}) -> maybe_override_env(_App, #{}) -> ok. -maybe_before_start(App, #{before_start := Fun}) when is_function(Fun, 1) -> - Fun(App); -maybe_before_start(_App, #{before_start := Fun}) when is_function(Fun, 0) -> - Fun(); +maybe_before_start(App, #{before_start := Fun} = Opts) when is_function(Fun) -> + apply_hookfun(Fun, App, Opts); maybe_before_start(_App, #{}) -> ok. maybe_start(_App, #{start := false}) -> {ok, []}; -maybe_start(_App, #{start := Fun}) when is_function(Fun, 0) -> - Fun(); -maybe_start(App, #{start := Fun}) when is_function(Fun, 1) -> - Fun(App); +maybe_start(App, #{start := Fun} = Opts) when is_function(Fun) -> + apply_hookfun(Fun, App, Opts); maybe_start(App, #{}) -> application:ensure_all_started(App). -maybe_after_start(App, #{after_start := Fun}) when is_function(Fun, 1) -> - Fun(App); -maybe_after_start(_App, #{after_start := Fun}) when is_function(Fun, 0) -> - Fun(); +maybe_after_start(App, #{after_start := Fun} = Opts) when is_function(Fun) -> + apply_hookfun(Fun, App, Opts); maybe_after_start(_App, #{}) -> ok. +apply_hookfun(Fun, _App, _Opts) when is_function(Fun, 0) -> + Fun(); +apply_hookfun(Fun, App, _Opts) when is_function(Fun, 1) -> + Fun(App); +apply_hookfun(Fun, App, Opts) when is_function(Fun, 2) -> + Fun(App, Opts). + -spec merge_appspec(appspec_opts(), appspec_opts()) -> appspec_opts(). merge_appspec(Opts1, Opts2) -> @@ -270,7 +276,11 @@ default_appspec(ekka, _SuiteOpts) -> }; default_appspec(emqx, SuiteOpts) -> #{ - override_env => [{data_dir, maps:get(work_dir, SuiteOpts, "data")}] + override_env => [{data_dir, maps:get(work_dir, SuiteOpts, "data")}], + % NOTE + % We inform `emqx` of our config loader before starting it so that it won't + % overwrite everything with a default configuration. + before_start => fun inhibit_config_loader/2 }; default_appspec(emqx_authz, _SuiteOpts) -> #{ @@ -307,9 +317,7 @@ default_appspec(emqx_conf, SuiteOpts) -> % NOTE % We inform `emqx` of our config loader before starting `emqx_conf` so that it won't % overwrite everything with a default configuration. - before_start => fun() -> - emqx_app:set_config_loader(?MODULE) - end + before_start => fun inhibit_config_loader/2 }; default_appspec(emqx_dashboard, _SuiteOpts) -> #{ @@ -329,6 +337,11 @@ start_ekka() -> ok = emqx_common_test_helpers:start_ekka(), {ok, [mnesia, ekka]}. +inhibit_config_loader(_App, #{config := Config}) when Config /= false -> + ok = emqx_app:set_config_loader(?MODULE); +inhibit_config_loader(_App, #{}) -> + ok. + %% -spec stop(_StartedApps :: [appname()]) -> diff --git a/apps/emqx/test/emqx_flapping_SUITE.erl b/apps/emqx/test/emqx_flapping_SUITE.erl index 942a262a6..6204d9b6d 100644 --- a/apps/emqx/test/emqx_flapping_SUITE.erl +++ b/apps/emqx/test/emqx_flapping_SUITE.erl @@ -20,31 +20,27 @@ -compile(nowarn_export_all). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - emqx_common_test_helpers:boot_modules(all), - emqx_common_test_helpers:start_apps([]), - %% update global default config - {ok, _} = emqx:update_config( - [flapping_detect], - #{ - <<"enable">> => true, - <<"max_count">> => 3, - % 0.1s - <<"window_time">> => <<"100ms">>, - %% 2s - <<"ban_time">> => <<"2s">> - } + Apps = emqx_cth_suite:start( + [ + {emqx, + "flapping_detect {" + "\n enable = true" + "\n max_count = 3" + "\n window_time = 100ms" + "\n ban_time = 2s" + "\n }"} + ], + #{work_dir => ?config(priv_dir, Config)} ), - Config. + [{suite_apps, Apps} | Config]. -end_per_suite(_Config) -> - emqx_common_test_helpers:stop_apps([]), - %% Clean emqx_banned table - mria_mnesia:delete_schema(), - ok. +end_per_suite(Config) -> + emqx_cth_suite:stop(?config(suite_apps, Config)). t_detect_check(_) -> ClientInfo = #{ diff --git a/apps/emqx/test/emqx_os_mon_SUITE.erl b/apps/emqx/test/emqx_os_mon_SUITE.erl index e76928114..1833be48e 100644 --- a/apps/emqx/test/emqx_os_mon_SUITE.erl +++ b/apps/emqx/test/emqx_os_mon_SUITE.erl @@ -39,29 +39,47 @@ init_per_testcase(t_cpu_check_alarm, Config) -> %% 200ms cpu_check_interval => 200 }), - ok = supervisor:terminate_child(emqx_sys_sup, emqx_os_mon), - {ok, _} = supervisor:restart_child(emqx_sys_sup, emqx_os_mon), + restart_os_mon(), Config; init_per_testcase(t_sys_mem_check_alarm, Config) -> - case emqx_os_mon:is_sysmem_check_supported() of + case emqx_os_mon:is_os_check_supported() of true -> SysMon = emqx_config:get([sysmon, os], #{}), emqx_config:put([sysmon, os], SysMon#{ sysmem_high_watermark => 0.51, %% 200ms mem_check_interval => 200 - }), - ok = supervisor:terminate_child(emqx_sys_sup, emqx_os_mon), - {ok, _} = supervisor:restart_child(emqx_sys_sup, emqx_os_mon), - Config; + }); false -> - Config - end; + ok + end, + restart_os_mon(), + Config; init_per_testcase(_, Config) -> - emqx_common_test_helpers:boot_modules(all), - emqx_common_test_helpers:start_apps([]), + restart_os_mon(), Config. +restart_os_mon() -> + case emqx_os_mon:is_os_check_supported() of + true -> + ok = supervisor:terminate_child(emqx_sys_sup, emqx_os_mon), + {ok, _} = supervisor:restart_child(emqx_sys_sup, emqx_os_mon); + false -> + _ = supervisor:terminate_child(emqx_sys_sup, emqx_os_mon), + _ = supervisor:delete_child(emqx_sys_sup, emqx_os_mon), + %% run test on mac/windows. + Mod = emqx_os_mon, + OsMon = #{ + id => Mod, + start => {Mod, start_link, []}, + restart => permanent, + shutdown => 5000, + type => worker, + modules => [Mod] + }, + {ok, _} = supervisor:start_child(emqx_sys_sup, OsMon) + end. + t_api(_) -> ?assertEqual(0.7, emqx_os_mon:get_sysmem_high_watermark()), ?assertEqual(ok, emqx_os_mon:set_sysmem_high_watermark(0.8)), @@ -81,7 +99,7 @@ t_api(_) -> ok. t_sys_mem_check_disable(Config) -> - case emqx_os_mon:is_sysmem_check_supported() of + case emqx_os_mon:is_os_check_supported() of true -> do_sys_mem_check_disable(Config); false -> skip end. @@ -100,7 +118,7 @@ do_sys_mem_check_disable(_Config) -> ok. t_sys_mem_check_alarm(Config) -> - case emqx_os_mon:is_sysmem_check_supported() of + case emqx_os_mon:is_os_check_supported() of true -> do_sys_mem_check_alarm(Config); false -> skip end. @@ -167,7 +185,7 @@ t_cpu_check_alarm(_) -> util, fun() -> CpuUtil end, fun() -> - timer:sleep(500), + timer:sleep(1000), Alarms = emqx_alarm:get_alarms(activated), ?assert( emqx_vm_mon_SUITE:is_existing(high_cpu_usage, emqx_alarm:get_alarms(activated)) @@ -193,7 +211,7 @@ t_cpu_check_alarm(_) -> ?assert(is_binary(Msg)), emqx_config:put([sysmon, os, cpu_high_watermark], 1), emqx_config:put([sysmon, os, cpu_low_watermark], 0.96), - timer:sleep(500), + timer:sleep(800), ?assertNot( emqx_vm_mon_SUITE:is_existing(high_cpu_usage, emqx_alarm:get_alarms(activated)) ) diff --git a/apps/emqx/test/emqx_ratelimiter_SUITE.erl b/apps/emqx/test/emqx_ratelimiter_SUITE.erl index fc9960c81..f414c3759 100644 --- a/apps/emqx/test/emqx_ratelimiter_SUITE.erl +++ b/apps/emqx/test/emqx_ratelimiter_SUITE.erl @@ -589,11 +589,11 @@ t_extract_with_type(_) -> (Type, Cfg) -> IsOnly(Type, Cfg) end, - ?assertEqual(undefined, emqx_limiter_schema:extract_with_type(messages, undefined)), + ?assertEqual(undefined, emqx_limiter_utils:extract_with_type(messages, undefined)), ?assert( Checker( messages, - emqx_limiter_schema:extract_with_type(messages, #{ + emqx_limiter_utils:extract_with_type(messages, #{ messages => #{rate => 1}, bytes => #{rate => 1} }) ) @@ -601,7 +601,7 @@ t_extract_with_type(_) -> ?assert( Checker( messages, - emqx_limiter_schema:extract_with_type(messages, #{ + emqx_limiter_utils:extract_with_type(messages, #{ messages => #{rate => 1}, bytes => #{rate => 1}, client => #{messages => #{rate => 2}} @@ -611,7 +611,7 @@ t_extract_with_type(_) -> ?assert( Checker( messages, - emqx_limiter_schema:extract_with_type(messages, #{ + emqx_limiter_utils:extract_with_type(messages, #{ client => #{messages => #{rate => 2}, bytes => #{rate => 1}} }) ) @@ -622,7 +622,7 @@ t_add_bucket(_) -> #{buckets := Buckets} = sys:get_state(emqx_limiter_server:whereis(bytes)), ?assertEqual(Size, maps:size(Buckets), Buckets) end, - DefBucket = emqx_limiter_schema:default_bucket_config(), + DefBucket = emqx_limiter_utils:default_bucket_config(), ?assertEqual(ok, emqx_limiter_server:add_bucket(?FUNCTION_NAME, bytes, undefined)), Checker(0), ?assertEqual(ok, emqx_limiter_server:add_bucket(?FUNCTION_NAME, bytes, DefBucket)), @@ -765,7 +765,7 @@ t_esockd_htb_consume(_) -> t_node_short_paths(_) -> CfgStr = <<"limiter {max_conn_rate = \"1000\", messages_rate = \"100\", bytes_rate = \"10\"}">>, ok = emqx_common_test_helpers:load_config(emqx_limiter_schema, CfgStr), - Accessor = fun emqx_limiter_schema:get_node_opts/1, + Accessor = fun emqx_limiter_utils:get_node_opts/1, ?assertMatch(#{rate := 100.0}, Accessor(connection)), ?assertMatch(#{rate := 10.0}, Accessor(messages)), ?assertMatch(#{rate := 1.0}, Accessor(bytes)), @@ -776,7 +776,7 @@ t_compatibility_for_node_short_paths(_) -> CfgStr = <<"limiter {max_conn_rate = \"1000\", connection.rate = \"500\", bytes.rate = \"200\"}">>, ok = emqx_common_test_helpers:load_config(emqx_limiter_schema, CfgStr), - Accessor = fun emqx_limiter_schema:get_node_opts/1, + Accessor = fun emqx_limiter_utils:get_node_opts/1, ?assertMatch(#{rate := 100.0}, Accessor(connection)), ?assertMatch(#{rate := 20.0}, Accessor(bytes)). @@ -796,7 +796,7 @@ t_listener_short_paths(_) -> }, connection := #{rate := 100.0} }, - emqx_limiter_schema:get_listener_opts(ListenerOpt) + emqx_limiter_utils:get_listener_opts(ListenerOpt) ). t_compatibility_for_listener_short_paths(_) -> @@ -809,7 +809,7 @@ t_compatibility_for_listener_short_paths(_) -> #{ connection := #{rate := 100.0} }, - emqx_limiter_schema:get_listener_opts(ListenerOpt) + emqx_limiter_utils:get_listener_opts(ListenerOpt) ). t_no_limiter_for_listener(_) -> @@ -818,7 +818,7 @@ t_no_limiter_for_listener(_) -> ListenerOpt = emqx:get_config([listeners, tcp, default]), ?assertEqual( undefined, - emqx_limiter_schema:get_listener_opts(ListenerOpt) + emqx_limiter_utils:get_listener_opts(ListenerOpt) ). %%-------------------------------------------------------------------- @@ -1135,5 +1135,5 @@ parse_schema(ConfigString) -> ). default_client_config() -> - Conf = emqx_limiter_schema:default_client_config(), + Conf = emqx_limiter_utils:default_client_config(), Conf#{divisible := false, max_retry_time := timer:seconds(10)}. diff --git a/apps/emqx/test/emqx_tls_lib_tests.erl b/apps/emqx/test/emqx_tls_lib_tests.erl index 481b9378e..ae7caa4fb 100644 --- a/apps/emqx/test/emqx_tls_lib_tests.erl +++ b/apps/emqx/test/emqx_tls_lib_tests.erl @@ -113,11 +113,22 @@ ssl_files_failure_test_() -> }) ) end}, + {"empty_cacertfile", fun() -> + ?assertMatch( + {ok, _}, + emqx_tls_lib:ensure_ssl_files("/tmp", #{ + <<"keyfile">> => test_key(), + <<"certfile">> => test_key(), + <<"cacertfile">> => <<"">> + }) + ) + end}, {"bad_pem_string", fun() -> %% empty string ?assertMatch( {error, #{ - reason := invalid_file_path_or_pem_string, which_options := [[<<"keyfile">>]] + reason := pem_file_path_or_string_is_required, + which_options := [[<<"keyfile">>]] }}, emqx_tls_lib:ensure_ssl_files("/tmp", #{ <<"keyfile">> => <<>>, diff --git a/apps/emqx/test/emqx_topic_index_SUITE.erl b/apps/emqx/test/emqx_topic_index_SUITE.erl new file mode 100644 index 000000000..ade98acec --- /dev/null +++ b/apps/emqx/test/emqx_topic_index_SUITE.erl @@ -0,0 +1,331 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_topic_index_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("proper/include/proper.hrl"). +-include_lib("eunit/include/eunit.hrl"). + +-import(emqx_proper_types, [scaled/2]). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +t_insert(_) -> + Tab = emqx_topic_index:new(), + true = emqx_topic_index:insert(<<"sensor/1/metric/2">>, t_insert_1, <<>>, Tab), + true = emqx_topic_index:insert(<<"sensor/+/#">>, t_insert_2, <<>>, Tab), + true = emqx_topic_index:insert(<<"sensor/#">>, t_insert_3, <<>>, Tab), + ?assertEqual(<<"sensor/#">>, topic(match(<<"sensor">>, Tab))), + ?assertEqual(t_insert_3, id(match(<<"sensor">>, Tab))). + +t_match(_) -> + Tab = emqx_topic_index:new(), + true = emqx_topic_index:insert(<<"sensor/1/metric/2">>, t_match_1, <<>>, Tab), + true = emqx_topic_index:insert(<<"sensor/+/#">>, t_match_2, <<>>, Tab), + true = emqx_topic_index:insert(<<"sensor/#">>, t_match_3, <<>>, Tab), + ?assertMatch( + [<<"sensor/#">>, <<"sensor/+/#">>], + [topic(M) || M <- matches(<<"sensor/1">>, Tab)] + ). + +t_match2(_) -> + Tab = emqx_topic_index:new(), + true = emqx_topic_index:insert(<<"#">>, t_match2_1, <<>>, Tab), + true = emqx_topic_index:insert(<<"+/#">>, t_match2_2, <<>>, Tab), + true = emqx_topic_index:insert(<<"+/+/#">>, t_match2_3, <<>>, Tab), + ?assertEqual( + [<<"#">>, <<"+/#">>, <<"+/+/#">>], + [topic(M) || M <- matches(<<"a/b/c">>, Tab)] + ), + ?assertEqual( + false, + emqx_topic_index:match(<<"$SYS/broker/zenmq">>, Tab) + ). + +t_match3(_) -> + Tab = emqx_topic_index:new(), + Records = [ + {<<"d/#">>, t_match3_1}, + {<<"a/b/+">>, t_match3_2}, + {<<"a/#">>, t_match3_3}, + {<<"#">>, t_match3_4}, + {<<"$SYS/#">>, t_match3_sys} + ], + lists:foreach( + fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end, + Records + ), + Matched = matches(<<"a/b/c">>, Tab), + case length(Matched) of + 3 -> ok; + _ -> error({unexpected, Matched}) + end, + ?assertEqual( + t_match3_sys, + id(match(<<"$SYS/a/b/c">>, Tab)) + ). + +t_match4(_) -> + Tab = emqx_topic_index:new(), + Records = [{<<"/#">>, t_match4_1}, {<<"/+">>, t_match4_2}, {<<"/+/a/b/c">>, t_match4_3}], + lists:foreach( + fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end, + Records + ), + ?assertEqual( + [<<"/#">>, <<"/+">>], + [topic(M) || M <- matches(<<"/">>, Tab)] + ), + ?assertEqual( + [<<"/#">>, <<"/+/a/b/c">>], + [topic(M) || M <- matches(<<"/0/a/b/c">>, Tab)] + ). + +t_match5(_) -> + Tab = emqx_topic_index:new(), + T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>, + Records = [ + {<<"#">>, t_match5_1}, + {<>, t_match5_2}, + {<>, t_match5_3} + ], + lists:foreach( + fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end, + Records + ), + ?assertEqual( + [<<"#">>, <>], + [topic(M) || M <- matches(T, Tab)] + ), + ?assertEqual( + [<<"#">>, <>, <>], + [topic(M) || M <- matches(<>, Tab)] + ). + +t_match6(_) -> + Tab = emqx_topic_index:new(), + T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>, + W = <<"+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/#">>, + emqx_topic_index:insert(W, ID = t_match6, <<>>, Tab), + ?assertEqual(ID, id(match(T, Tab))). + +t_match7(_) -> + Tab = emqx_topic_index:new(), + T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>, + W = <<"a/+/c/+/e/+/g/+/i/+/k/+/m/+/o/+/q/+/s/+/u/+/w/+/y/+/#">>, + emqx_topic_index:insert(W, t_match7, <<>>, Tab), + ?assertEqual(W, topic(match(T, Tab))). + +t_match_fast_forward(_) -> + Tab = emqx_topic_index:new(), + emqx_topic_index:insert(<<"a/b/1/2/3/4/5/6/7/8/9/#">>, id1, <<>>, Tab), + emqx_topic_index:insert(<<"z/y/x/+/+">>, id2, <<>>, Tab), + emqx_topic_index:insert(<<"a/b/c/+">>, id3, <<>>, Tab), + % dbg:tracer(), + % dbg:p(all, c), + % dbg:tpl({ets, next, '_'}, x), + ?assertEqual(id1, id(match(<<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab))), + ?assertEqual([id1], [id(M) || M <- matches(<<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab)]). + +t_match_unique(_) -> + Tab = emqx_topic_index:new(), + emqx_topic_index:insert(<<"a/b/c">>, t_match_id1, <<>>, Tab), + emqx_topic_index:insert(<<"a/b/+">>, t_match_id1, <<>>, Tab), + emqx_topic_index:insert(<<"a/b/c/+">>, t_match_id2, <<>>, Tab), + ?assertEqual( + [t_match_id1, t_match_id1], + [id(M) || M <- emqx_topic_index:matches(<<"a/b/c">>, Tab, [])] + ), + ?assertEqual( + [t_match_id1], + [id(M) || M <- emqx_topic_index:matches(<<"a/b/c">>, Tab, [unique])] + ). + +t_match_wildcard_edge_cases(_) -> + CommonTopics = [ + <<"a/b">>, + <<"a/b/#">>, + <<"a/b/#">>, + <<"a/b/c">>, + <<"a/b/+">>, + <<"a/b/d">>, + <<"a/+/+">>, + <<"a/+/#">> + ], + Datasets = + [ + %% Topics, TopicName, Results + {CommonTopics, <<"a/b/c">>, [2, 3, 4, 5, 7, 8]}, + {CommonTopics, <<"a/b">>, [1, 2, 3, 8]}, + {[<<"+/b/c">>, <<"/">>], <<"a/b/c">>, [1]}, + {[<<"#">>, <<"/">>], <<"a">>, [1]}, + {[<<"/">>, <<"+">>], <<"a">>, [2]} + ], + F = fun({Topics, TopicName, Expected}) -> + Tab = emqx_topic_index:new(), + _ = [emqx_topic_index:insert(T, N, <<>>, Tab) || {N, T} <- lists:enumerate(Topics)], + ?assertEqual( + lists:last(Expected), + id(emqx_topic_index:match(TopicName, Tab)), + #{"Base topics" => Topics, "Topic name" => TopicName} + ), + ?assertEqual( + Expected, + [id(M) || M <- emqx_topic_index:matches(TopicName, Tab, [unique])], + #{"Base topics" => Topics, "Topic name" => TopicName} + ) + end, + lists:foreach(F, Datasets). + +t_prop_matches(_) -> + ?assert( + proper:quickcheck( + topic_matches_prop(), + [{max_size, 100}, {numtests, 100}] + ) + ), + Statistics = [{C, account(C)} || C <- [filters, topics, matches, maxhits]], + ct:pal("Statistics: ~p", [maps:from_list(Statistics)]). + +topic_matches_prop() -> + ?FORALL( + % Generate a longer list of topics and a shorter list of topic filter patterns. + #{ + topics := TTopics, + patterns := Pats + }, + emqx_proper_types:fixedmap(#{ + % NOTE + % Beware adding non-empty contraint, proper will have a hard time with `topic_t/1` + % for some reason. + topics => scaled(4, list(topic_t([1, 2, 3, 4]))), + patterns => list(topic_filter_pattern_t()) + }), + begin + Tab = emqx_topic_index:new(), + Topics = [emqx_topic:join(T) || T <- TTopics], + % Produce topic filters from generated topics and patterns. + % Number of filters is equal to the number of patterns, most of the time. + Filters = lists:enumerate(mk_filters(Pats, TTopics)), + _ = [emqx_topic_index:insert(F, N, <<>>, Tab) || {N, F} <- Filters], + % Gather some basic statistics + _ = account(filters, length(Filters)), + _ = account(topics, NTopics = length(Topics)), + _ = account(maxhits, NTopics * NTopics), + % Verify that matching each topic against index returns the same results as + % matching it against the list of filters one by one. + lists:all( + fun(Topic) -> + Ids1 = [id(M) || M <- emqx_topic_index:matches(Topic, Tab, [unique])], + Ids2 = lists:filtermap( + fun({N, F}) -> + case emqx_topic:match(Topic, F) of + true -> {true, N}; + false -> false + end + end, + Filters + ), + % Account a number of matches to compute hitrate later + _ = account(matches, length(Ids1)), + case (Ids2 -- Ids1) ++ (Ids2 -- Ids1) of + [] -> + true; + [_ | _] = _Differences -> + ct:pal( + "Topic name: ~p~n" + "Index results: ~p~n" + "Topic match results:: ~p~n", + [Topic, Ids1, Ids2] + ), + false + end + end, + Topics + ) + end + ). + +mk_filters([Pat | PRest], [Topic | TRest]) -> + [emqx_topic:join(mk_topic_filter(Pat, Topic)) | mk_filters(PRest, TRest)]; +mk_filters(_, _) -> + []. + +account(Counter, N) -> + put({?MODULE, Counter}, account(Counter) + N). + +account(Counter) -> + emqx_maybe:define(get({?MODULE, Counter}), 0). + +%% + +match(T, Tab) -> + emqx_topic_index:match(T, Tab). + +matches(T, Tab) -> + lists:sort(emqx_topic_index:matches(T, Tab, [])). + +id(Match) -> + emqx_topic_index:get_id(Match). + +topic(Match) -> + emqx_topic_index:get_topic(Match). + +%% + +topic_t(EntropyWeights) -> + EWLast = lists:last(EntropyWeights), + ?LET(L, scaled(1 / 4, list(EWLast)), begin + EWs = lists:sublist(EntropyWeights ++ L, length(L)), + ?SIZED(S, [oneof([topic_level_t(S * EW), topic_level_fixed_t()]) || EW <- EWs]) + end). + +topic_level_t(Entropy) -> + S = floor(1 + math:log2(Entropy) / 4), + ?LET(I, range(1, Entropy), iolist_to_binary(io_lib:format("~*.16.0B", [S, I]))). + +topic_level_fixed_t() -> + oneof([ + <<"foo">>, + <<"bar">>, + <<"baz">>, + <<"xyzzy">> + ]). + +topic_filter_pattern_t() -> + list(topic_level_pattern_t()). + +topic_level_pattern_t() -> + frequency([ + {5, level}, + {2, '+'}, + {1, '#'} + ]). + +mk_topic_filter([], _) -> + []; +mk_topic_filter(_, []) -> + []; +mk_topic_filter(['#' | _], _) -> + ['#']; +mk_topic_filter(['+' | Rest], [_ | Levels]) -> + ['+' | mk_topic_filter(Rest, Levels)]; +mk_topic_filter([level | Rest], [L | Levels]) -> + [L | mk_topic_filter(Rest, Levels)]. diff --git a/apps/emqx/test/emqx_trace_SUITE.erl b/apps/emqx/test/emqx_trace_SUITE.erl index 0166613a4..ce7d7e887 100644 --- a/apps/emqx/test/emqx_trace_SUITE.erl +++ b/apps/emqx/test/emqx_trace_SUITE.erl @@ -274,7 +274,6 @@ t_load_state(_Config) -> ok. t_client_event(_Config) -> - application:set_env(emqx, allow_anonymous, true), ClientId = <<"client-test">>, Now = erlang:system_time(second), Name = <<"test_client_id_event">>, @@ -312,6 +311,60 @@ t_client_event(_Config) -> ?assert(erlang:byte_size(Bin3) > 0), ok. +t_client_huge_payload_truncated(_Config) -> + ClientId = <<"client-truncated1">>, + Now = erlang:system_time(second), + Name = <<"test_client_id_truncated1">>, + {ok, _} = emqx_trace:create([ + {<<"name">>, Name}, + {<<"type">>, clientid}, + {<<"clientid">>, ClientId}, + {<<"start_at">>, Now} + ]), + ok = emqx_trace_handler_SUITE:filesync(Name, clientid), + {ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]), + {ok, _} = emqtt:connect(Client), + emqtt:ping(Client), + NormalPayload = iolist_to_binary(lists:duplicate(1024, "x")), + ok = emqtt:publish(Client, <<"/test">>, #{}, NormalPayload, [{qos, 0}]), + HugePayload1 = iolist_to_binary(lists:duplicate(1025, "y")), + ok = emqtt:publish(Client, <<"/test">>, #{}, HugePayload1, [{qos, 0}]), + HugePayload2 = iolist_to_binary(lists:duplicate(1024 * 10, "y")), + ok = emqtt:publish(Client, <<"/test">>, #{}, HugePayload2, [{qos, 0}]), + ok = emqx_trace_handler_SUITE:filesync(Name, clientid), + {ok, _} = emqx_trace:create([ + {<<"name">>, <<"test_topic">>}, + {<<"type">>, topic}, + {<<"topic">>, <<"/test">>}, + {<<"start_at">>, Now} + ]), + ok = emqx_trace_handler_SUITE:filesync(<<"test_topic">>, topic), + {ok, Bin} = file:read_file(emqx_trace:log_file(Name, Now)), + ok = emqtt:publish(Client, <<"/test">>, #{}, NormalPayload, [{qos, 0}]), + ok = emqtt:publish(Client, <<"/test">>, #{}, HugePayload1, [{qos, 0}]), + ok = emqtt:publish(Client, <<"/test">>, #{}, HugePayload2, [{qos, 0}]), + ok = emqtt:disconnect(Client), + ok = emqx_trace_handler_SUITE:filesync(Name, clientid), + ok = emqx_trace_handler_SUITE:filesync(<<"test_topic">>, topic), + {ok, Bin2} = file:read_file(emqx_trace:log_file(Name, Now)), + {ok, Bin3} = file:read_file(emqx_trace:log_file(<<"test_topic">>, Now)), + ct:pal("Bin ~p Bin2 ~p Bin3 ~p", [byte_size(Bin), byte_size(Bin2), byte_size(Bin3)]), + ?assert(erlang:byte_size(Bin) > 1024), + ?assert(erlang:byte_size(Bin) < erlang:byte_size(Bin2)), + ?assert(erlang:byte_size(Bin3) > 1024), + + %% Don't have format crash + CrashBin = <<"CRASH">>, + ?assertEqual(nomatch, binary:match(Bin, [CrashBin])), + ?assertEqual(nomatch, binary:match(Bin2, [CrashBin])), + ?assertEqual(nomatch, binary:match(Bin3, [CrashBin])), + %% have "this log are truncated" for huge payload + TruncatedLog = <<"this log are truncated">>, + ?assertNotEqual(nomatch, binary:match(Bin, [TruncatedLog])), + ?assertNotEqual(nomatch, binary:match(Bin2, [TruncatedLog])), + ?assertNotEqual(nomatch, binary:match(Bin3, [TruncatedLog])), + ok. + t_get_log_filename(_Config) -> Now = erlang:system_time(second), Name = <<"name1">>, diff --git a/apps/emqx_authn/src/emqx_authn.app.src b/apps/emqx_authn/src/emqx_authn.app.src index 4ab86ef4a..ae7bea5da 100644 --- a/apps/emqx_authn/src/emqx_authn.app.src +++ b/apps/emqx_authn/src/emqx_authn.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_authn, [ {description, "EMQX Authentication"}, - {vsn, "0.1.24"}, + {vsn, "0.1.25"}, {modules, []}, {registered, [emqx_authn_sup, emqx_authn_registry]}, {applications, [ diff --git a/apps/emqx_authn/src/emqx_authn_enterprise.erl b/apps/emqx_authn/src/emqx_authn_enterprise.erl index 029872694..69752555c 100644 --- a/apps/emqx_authn/src/emqx_authn_enterprise.erl +++ b/apps/emqx_authn/src/emqx_authn_enterprise.erl @@ -9,7 +9,10 @@ -if(?EMQX_RELEASE_EDITION == ee). providers() -> - [{{password_based, ldap}, emqx_ldap_authn}]. + [ + {{password_based, ldap}, emqx_ldap_authn}, + {gcp_device, emqx_gcp_device_authn} + ]. resource_provider() -> [emqx_ldap_authn]. diff --git a/apps/emqx_bridge_azure_event_hub/rebar.config b/apps/emqx_bridge_azure_event_hub/rebar.config index 85c39ce01..dbcc8269c 100644 --- a/apps/emqx_bridge_azure_event_hub/rebar.config +++ b/apps/emqx_bridge_azure_event_hub/rebar.config @@ -1,6 +1,6 @@ %% -*- mode: erlang; -*- {erl_opts, [debug_info]}. -{deps, [ {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.7.6"}}} +{deps, [ {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.7.7"}}} , {kafka_protocol, {git, "https://github.com/kafka4beam/kafka_protocol.git", {tag, "4.1.3"}}} , {brod_gssapi, {git, "https://github.com/kafka4beam/brod_gssapi.git", {tag, "v0.1.0"}}} , {brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.16.8"}}} diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src index 9faf65860..c7dcea5c0 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_gcp_pubsub, [ {description, "EMQX Enterprise GCP Pub/Sub Bridge"}, - {vsn, "0.1.6"}, + {vsn, "0.1.7"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl index b3792da71..d1e827d84 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl @@ -113,6 +113,22 @@ fields(connector_config) -> ]; fields(producer) -> [ + {attributes_template, + sc( + hoconsc:array(ref(key_value_pair)), + #{ + default => [], + desc => ?DESC("attributes_template") + } + )}, + {ordering_key_template, + sc( + binary(), + #{ + default => <<>>, + desc => ?DESC("ordering_key_template") + } + )}, {payload_template, sc( binary(), @@ -203,6 +219,18 @@ fields("consumer_resource_opts") -> fun({Field, _Sc}) -> lists:member(Field, SupportedFields) end, ResourceFields ); +fields(key_value_pair) -> + [ + {key, + mk(binary(), #{ + required => true, + validator => [ + emqx_resource_validator:not_empty("Key templates must not be empty") + ], + desc => ?DESC(kv_pair_key) + })}, + {value, mk(binary(), #{required => true, desc => ?DESC(kv_pair_value)})} + ]; fields("get_producer") -> emqx_bridge_schema:status_fields() ++ fields("post_producer"); fields("post_producer") -> @@ -218,6 +246,8 @@ fields("put_consumer") -> desc("config_producer") -> ?DESC("desc_config"); +desc(key_value_pair) -> + ?DESC("kv_pair_desc"); desc("config_consumer") -> ?DESC("desc_config"); desc("consumer_resource_opts") -> diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_client.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_client.erl index cb4aa853c..eeceb0c43 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_client.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_client.erl @@ -205,7 +205,7 @@ get_topic(Topic, ConnectorState) -> Path = <<"/v1/projects/", ProjectId/binary, "/topics/", Topic/binary>>, Body = <<>>, PreparedRequest = {prepared_request, {Method, Path, Body}}, - query_sync(PreparedRequest, ConnectorState). + ?MODULE:query_sync(PreparedRequest, ConnectorState). %%------------------------------------------------------------------------------------------------- %% Helper fns diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_consumer_worker.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_consumer_worker.erl index ddceb4a11..d984b42ed 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_consumer_worker.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_consumer_worker.erl @@ -217,7 +217,9 @@ handle_continue(?ensure_subscription, State0) -> {noreply, State0, {continue, ?ensure_subscription}}; not_found -> %% there's nothing much to do if the topic suddenly doesn't exist anymore. - {stop, {error, topic_not_found}, State0} + {stop, {error, topic_not_found}, State0}; + permission_denied -> + {stop, {error, permission_denied}, State0} end; handle_continue(?patch_subscription, State0) -> ?tp(gcp_pubsub_consumer_worker_patch_subscription_enter, #{}), @@ -291,14 +293,17 @@ handle_info(Msg, State0) -> }), {noreply, State0}. -terminate({error, topic_not_found} = _Reason, State) -> +terminate({error, Reason}, State) when + Reason =:= topic_not_found; + Reason =:= permission_denied +-> #{ instance_id := InstanceId, topic := _Topic } = State, optvar:unset(?OPTVAR_SUB_OK(self())), - emqx_bridge_gcp_pubsub_impl_consumer:mark_topic_as_nonexistent(InstanceId), - ?tp(gcp_pubsub_consumer_worker_terminate, #{reason => _Reason, topic => _Topic}), + emqx_bridge_gcp_pubsub_impl_consumer:mark_as_unhealthy(InstanceId, Reason), + ?tp(gcp_pubsub_consumer_worker_terminate, #{reason => {error, Reason}, topic => _Topic}), ok; terminate(_Reason, _State) -> optvar:unset(?OPTVAR_SUB_OK(self())), @@ -329,7 +334,8 @@ ensure_pull_timer(State = #{pull_timer := TRef}) when is_reference(TRef) -> ensure_pull_timer(State = #{pull_retry_interval := PullRetryInterval}) -> State#{pull_timer := emqx_utils:start_timer(PullRetryInterval, pull)}. --spec ensure_subscription_exists(state()) -> continue | retry | not_found | already_exists. +-spec ensure_subscription_exists(state()) -> + continue | retry | not_found | permission_denied | already_exists. ensure_subscription_exists(State) -> ?tp(gcp_pubsub_consumer_worker_create_subscription_enter, #{}), #{ @@ -367,6 +373,17 @@ ensure_subscription_exists(State) -> } ), not_found; + {error, #{status_code := 403}} -> + %% permission denied + ?tp( + warning, + "gcp_pubsub_consumer_worker_permission_denied", + #{ + instance_id => InstanceId, + topic => Topic + } + ), + permission_denied; {ok, #{status_code := 200}} -> ?tp( debug, diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl index 74ee941ec..998a95a48 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl @@ -17,9 +17,9 @@ %% health check API -export([ - mark_topic_as_nonexistent/1, - unset_nonexistent_topic/1, - is_nonexistent_topic/1 + mark_as_unhealthy/2, + clear_unhealthy/1, + check_if_unhealthy/1 ]). -include_lib("emqx/include/logger.hrl"). @@ -47,11 +47,15 @@ -define(AUTO_RECONNECT_S, 2). -define(DEFAULT_FORGET_INTERVAL, timer:seconds(60)). --define(OPTVAR_TOPIC_NOT_FOUND(INSTANCE_ID), {?MODULE, topic_not_found, INSTANCE_ID}). +-define(OPTVAR_UNHEALTHY(INSTANCE_ID), {?MODULE, topic_not_found, INSTANCE_ID}). -define(TOPIC_MESSAGE, "GCP PubSub topics are invalid. Please check the logs, check if the " "topics exist in GCP and if the service account has permissions to use them." ). +-define(PERMISSION_MESSAGE, + "Permission denied while verifying topic existence. Please check that the " + "provided service account has the correct permissions configured." +). %%------------------------------------------------------------------------------------------------- %% `emqx_resource' API @@ -77,7 +81,7 @@ on_start(InstanceId, Config0) -> -spec on_stop(resource_id(), state()) -> ok | {error, term()}. on_stop(InstanceId, _State) -> ?tp(gcp_pubsub_consumer_stop_enter, #{}), - unset_nonexistent_topic(InstanceId), + clear_unhealthy(InstanceId), ok = stop_consumers(InstanceId), emqx_bridge_gcp_pubsub_client:stop(InstanceId). @@ -85,10 +89,12 @@ on_stop(InstanceId, _State) -> on_get_status(InstanceId, State) -> %% We need to check this flag separately because the workers might be gone when we %% check them. - case is_nonexistent_topic(InstanceId) of - true -> + case check_if_unhealthy(InstanceId) of + {error, topic_not_found} -> {disconnected, State, {unhealthy_target, ?TOPIC_MESSAGE}}; - false -> + {error, permission_denied} -> + {disconnected, State, {unhealthy_target, ?PERMISSION_MESSAGE}}; + ok -> #{client := Client} = State, check_workers(InstanceId, Client) end. @@ -97,24 +103,24 @@ on_get_status(InstanceId, State) -> %% Health check API (signalled by consumer worker) %%------------------------------------------------------------------------------------------------- --spec mark_topic_as_nonexistent(resource_id()) -> ok. -mark_topic_as_nonexistent(InstanceId) -> - optvar:set(?OPTVAR_TOPIC_NOT_FOUND(InstanceId), true), +-spec mark_as_unhealthy(resource_id(), topic_not_found | permission_denied) -> ok. +mark_as_unhealthy(InstanceId, Reason) -> + optvar:set(?OPTVAR_UNHEALTHY(InstanceId), Reason), ok. --spec unset_nonexistent_topic(resource_id()) -> ok. -unset_nonexistent_topic(InstanceId) -> - optvar:unset(?OPTVAR_TOPIC_NOT_FOUND(InstanceId)), - ?tp(gcp_pubsub_consumer_unset_nonexistent_topic, #{}), +-spec clear_unhealthy(resource_id()) -> ok. +clear_unhealthy(InstanceId) -> + optvar:unset(?OPTVAR_UNHEALTHY(InstanceId)), + ?tp(gcp_pubsub_consumer_clear_unhealthy, #{}), ok. --spec is_nonexistent_topic(resource_id()) -> boolean(). -is_nonexistent_topic(InstanceId) -> - case optvar:peek(?OPTVAR_TOPIC_NOT_FOUND(InstanceId)) of - {ok, true} -> - true; - _ -> - false +-spec check_if_unhealthy(resource_id()) -> ok | {error, topic_not_found | permission_denied}. +check_if_unhealthy(InstanceId) -> + case optvar:peek(?OPTVAR_UNHEALTHY(InstanceId)) of + {ok, Reason} -> + {error, Reason}; + undefined -> + ok end. %%------------------------------------------------------------------------------------------------- @@ -153,6 +159,11 @@ start_consumers(InstanceId, Client, Config) -> throw( {unhealthy_target, ?TOPIC_MESSAGE} ); + {error, permission_denied} -> + _ = emqx_bridge_gcp_pubsub_client:stop(InstanceId), + throw( + {unhealthy_target, ?PERMISSION_MESSAGE} + ); {error, _} -> %% connection might be down; we'll have to check topic existence during health %% check, or the workers will kill themselves when they realized there's no @@ -229,6 +240,8 @@ check_for_topic_existence(Topic, Client) -> ok; {error, #{status_code := 404}} -> {error, not_found}; + {error, #{status_code := 403}} -> + {error, permission_denied}; {error, Reason} -> ?tp(warning, "gcp_pubsub_consumer_check_topic_error", #{reason => Reason}), {error, Reason} diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_producer.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_producer.erl index b1ded2121..dc5eb01aa 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_producer.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_producer.erl @@ -9,15 +9,20 @@ -include_lib("snabbkaffe/include/snabbkaffe.hrl"). -type config() :: #{ + attributes_template := [#{key := binary(), value := binary()}], connect_timeout := emqx_schema:duration_ms(), max_retries := non_neg_integer(), + ordering_key_template := binary(), + payload_template := binary(), pubsub_topic := binary(), resource_opts := #{request_ttl := infinity | emqx_schema:duration_ms(), any() => term()}, service_account_json := emqx_bridge_gcp_pubsub_client:service_account_json(), any() => term() }. -type state() :: #{ + attributes_template := #{emqx_placeholder:tmpl_token() => emqx_placeholder:tmpl_token()}, client := emqx_bridge_gcp_pubsub_client:state(), + ordering_key_template := emqx_placeholder:tmpl_token(), payload_template := emqx_placeholder:tmpl_token(), project_id := emqx_bridge_gcp_pubsub_client:project_id(), pubsub_topic := binary() @@ -57,6 +62,8 @@ on_start(InstanceId, Config0) -> }), Config = maps:update_with(service_account_json, fun emqx_utils_maps:binary_key_map/1, Config0), #{ + attributes_template := AttributesTemplate, + ordering_key_template := OrderingKeyTemplate, payload_template := PayloadTemplate, pubsub_topic := PubSubTopic, service_account_json := #{<<"project_id">> := ProjectId} @@ -65,6 +72,8 @@ on_start(InstanceId, Config0) -> {ok, Client} -> State = #{ client => Client, + attributes_template => preproc_attributes(AttributesTemplate), + ordering_key_template => emqx_placeholder:preproc_tmpl(OrderingKeyTemplate), payload_template => emqx_placeholder:preproc_tmpl(PayloadTemplate), project_id => ProjectId, pubsub_topic => PubSubTopic @@ -197,14 +206,107 @@ do_send_requests_async(State, Requests, ReplyFunAndArgs0) -> Request, ReplyFunAndArgs, Client ). --spec encode_payload(state(), Selected :: map()) -> #{data := binary()}. -encode_payload(_State = #{payload_template := PayloadTemplate}, Selected) -> - Interpolated = - case PayloadTemplate of - [] -> emqx_utils_json:encode(Selected); - _ -> emqx_placeholder:proc_tmpl(PayloadTemplate, Selected) +-spec encode_payload(state(), Selected :: map()) -> + #{ + data := binary(), + attributes => #{binary() => binary()}, + 'orderingKey' => binary() + }. +encode_payload(State, Selected) -> + #{ + attributes_template := AttributesTemplate, + ordering_key_template := OrderingKeyTemplate, + payload_template := PayloadTemplate + } = State, + Data = render_payload(PayloadTemplate, Selected), + OrderingKey = render_key(OrderingKeyTemplate, Selected), + Attributes = proc_attributes(AttributesTemplate, Selected), + Payload0 = #{data => base64:encode(Data)}, + Payload1 = put_if(Payload0, attributes, Attributes, map_size(Attributes) > 0), + put_if(Payload1, 'orderingKey', OrderingKey, OrderingKey =/= <<>>). + +put_if(Acc, K, V, true) -> + Acc#{K => V}; +put_if(Acc, _K, _V, false) -> + Acc. + +-spec render_payload(emqx_placeholder:tmpl_token(), map()) -> binary(). +render_payload([] = _Template, Selected) -> + emqx_utils_json:encode(Selected); +render_payload(Template, Selected) -> + render_value(Template, Selected). + +render_key(Template, Selected) -> + Opts = #{ + return => full_binary, + var_trans => fun + (_Var, undefined) -> + <<>>; + (Var, X) when is_boolean(X) -> + throw({bad_value_for_key, Var, X}); + (_Var, X) when is_binary(X); is_number(X); is_atom(X) -> + emqx_utils_conv:bin(X); + (Var, X) -> + throw({bad_value_for_key, Var, X}) + end + }, + try + emqx_placeholder:proc_tmpl(Template, Selected, Opts) + catch + throw:{bad_value_for_key, Var, X} -> + ?tp( + warning, + "gcp_pubsub_producer_bad_value_for_key", + #{ + placeholder => Var, + value => X, + action => "key ignored", + hint => "only plain values like strings and numbers can be used in keys" + } + ), + <<>> + end. + +render_value(Template, Selected) -> + Opts = #{ + return => full_binary, + var_trans => fun + (undefined) -> <<>>; + (X) -> emqx_utils_conv:bin(X) + end + }, + emqx_placeholder:proc_tmpl(Template, Selected, Opts). + +-spec preproc_attributes([#{key := binary(), value := binary()}]) -> + #{emqx_placeholder:tmpl_token() => emqx_placeholder:tmpl_token()}. +preproc_attributes(AttributesTemplate) -> + lists:foldl( + fun(#{key := K, value := V}, Acc) -> + KT = emqx_placeholder:preproc_tmpl(K), + VT = emqx_placeholder:preproc_tmpl(V), + Acc#{KT => VT} end, - #{data => base64:encode(Interpolated)}. + #{}, + AttributesTemplate + ). + +-spec proc_attributes(#{emqx_placeholder:tmpl_token() => emqx_placeholder:tmpl_token()}, map()) -> + #{binary() => binary()}. +proc_attributes(AttributesTemplate, Selected) -> + maps:fold( + fun(KT, VT, Acc) -> + K = render_key(KT, Selected), + case K =:= <<>> of + true -> + Acc; + false -> + V = render_value(VT, Selected), + Acc#{K => V} + end + end, + #{}, + AttributesTemplate + ). -spec to_pubsub_request([#{data := binary()}]) -> binary(). to_pubsub_request(Payloads) -> diff --git a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl index 8cb0ef2f9..681e5fed7 100644 --- a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl +++ b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl @@ -760,6 +760,64 @@ prop_acked_ids_eventually_forgotten(Trace) -> ), ok. +permission_denied_response() -> + Link = + <<"https://console.developers.google.com/project/9999/apiui/credential">>, + {error, #{ + status_code => 403, + headers => + [ + {<<"vary">>, <<"X-Origin">>}, + {<<"vary">>, <<"Referer">>}, + {<<"content-type">>, <<"application/json; charset=UTF-8">>}, + {<<"date">>, <<"Tue, 15 Aug 2023 13:59:09 GMT">>}, + {<<"server">>, <<"ESF">>}, + {<<"cache-control">>, <<"private">>}, + {<<"x-xss-protection">>, <<"0">>}, + {<<"x-frame-options">>, <<"SAMEORIGIN">>}, + {<<"x-content-type-options">>, <<"nosniff">>}, + {<<"alt-svc">>, <<"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000">>}, + {<<"accept-ranges">>, <<"none">>}, + {<<"vary">>, <<"Origin,Accept-Encoding">>}, + {<<"transfer-encoding">>, <<"chunked">>} + ], + body => emqx_utils_json:encode( + #{ + <<"error">> => + #{ + <<"code">> => 403, + <<"details">> => + [ + #{ + <<"@type">> => <<"type.googleapis.com/google.rpc.Help">>, + <<"links">> => + [ + #{ + <<"description">> => + <<"Google developer console API key">>, + <<"url">> => + Link + } + ] + }, + #{ + <<"@type">> => <<"type.googleapis.com/google.rpc.ErrorInfo">>, + <<"domain">> => <<"googleapis.com">>, + <<"metadata">> => + #{ + <<"consumer">> => <<"projects/9999">>, + <<"service">> => <<"pubsub.googleapis.com">> + }, + <<"reason">> => <<"CONSUMER_INVALID">> + } + ], + <<"message">> => <<"Project #9999 has been deleted.">>, + <<"status">> => <<"PERMISSION_DENIED">> + } + } + ) + }}. + %%------------------------------------------------------------------------------ %% Testcases %%------------------------------------------------------------------------------ @@ -785,7 +843,7 @@ t_start_stop(Config) -> prop_client_stopped(), prop_workers_stopped(PubSubTopic), fun(Trace) -> - ?assertMatch([_], ?of_kind(gcp_pubsub_consumer_unset_nonexistent_topic, Trace)), + ?assertMatch([_], ?of_kind(gcp_pubsub_consumer_clear_unhealthy, Trace)), ok end ] @@ -1992,6 +2050,81 @@ t_get_subscription(Config) -> ), ok. +t_permission_denied_topic_check(Config) -> + [#{pubsub_topic := PubSubTopic}] = ?config(topic_mapping, Config), + ResourceId = resource_id(Config), + ?check_trace( + begin + %% the emulator does not check any credentials + emqx_common_test_helpers:with_mock( + emqx_bridge_gcp_pubsub_client, + query_sync, + fun(PreparedRequest = {prepared_request, {Method, Path, _Body}}, Client) -> + RE = iolist_to_binary(["/topics/", PubSubTopic, "$"]), + case {Method =:= get, re:run(Path, RE)} of + {true, {match, _}} -> + permission_denied_response(); + _ -> + meck:passthrough([PreparedRequest, Client]) + end + end, + fun() -> + {{ok, _}, {ok, _}} = + ?wait_async_action( + create_bridge(Config), + #{?snk_kind := gcp_pubsub_stop}, + 5_000 + ), + ?assertMatch( + {ok, disconnected}, + emqx_resource_manager:health_check(ResourceId) + ), + ?assertMatch( + {ok, _Group, #{error := {unhealthy_target, "Permission denied" ++ _}}}, + emqx_resource_manager:lookup_cached(ResourceId) + ), + ok + end + ), + ok + end, + [] + ), + ok. + +t_permission_denied_worker(Config) -> + ?check_trace( + begin + emqx_common_test_helpers:with_mock( + emqx_bridge_gcp_pubsub_client, + query_sync, + fun(PreparedRequest = {prepared_request, {Method, _Path, _Body}}, Client) -> + case Method =:= put of + true -> + permission_denied_response(); + false -> + meck:passthrough([PreparedRequest, Client]) + end + end, + fun() -> + {{ok, _}, {ok, _}} = + ?wait_async_action( + create_bridge( + Config + ), + #{?snk_kind := gcp_pubsub_consumer_worker_terminate}, + 10_000 + ), + + ok + end + ), + ok + end, + [] + ), + ok. + t_cluster_subscription(Config) -> [ #{ diff --git a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_producer_SUITE.erl b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_producer_SUITE.erl index a9bbf6178..acfe3df8b 100644 --- a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_producer_SUITE.erl +++ b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_producer_SUITE.erl @@ -63,7 +63,9 @@ single_config_tests() -> t_get_status_down, t_get_status_no_worker, t_get_status_timeout_calling_workers, - t_on_start_ehttpc_pool_already_started + t_on_start_ehttpc_pool_already_started, + t_attributes, + t_bad_attributes ]. only_sync_tests() -> @@ -212,7 +214,9 @@ create_bridge_http(Config, GCPPubSubConfigOverrides) -> Error end, ct:pal("bridge creation result: ~p", [Res]), - ?assertEqual(element(1, ProbeResult), element(1, Res)), + ?assertEqual(element(1, ProbeResult), element(1, Res), #{ + creation_result => Res, probe_result => ProbeResult + }), case ProbeResult of {error, {{_, 500, _}, _, _}} -> error({bad_probe_result, ProbeResult}); _ -> ok @@ -456,6 +460,7 @@ assert_valid_request_headers(Headers, ServiceAccountJSON) -> assert_valid_request_body(Body) -> BodyMap = emqx_utils_json:decode(Body, [return_maps]), ?assertMatch(#{<<"messages">> := [_ | _]}, BodyMap), + ct:pal("request: ~p", [BodyMap]), #{<<"messages">> := Messages} = BodyMap, lists:map( fun(Msg) -> @@ -480,6 +485,31 @@ assert_http_request(ServiceAccountJSON) -> error({timeout, #{mailbox => Mailbox}}) end. +receive_http_requests(ServiceAccountJSON, Opts) -> + Default = #{n => 1}, + #{n := N} = maps:merge(Default, Opts), + lists:flatmap(fun(_) -> receive_http_request(ServiceAccountJSON) end, lists:seq(1, N)). + +receive_http_request(ServiceAccountJSON) -> + receive + {http, Headers, Body} -> + ct:pal("received publish:\n ~p", [#{headers => Headers, body => Body}]), + assert_valid_request_headers(Headers, ServiceAccountJSON), + #{<<"messages">> := Msgs} = emqx_utils_json:decode(Body, [return_maps]), + lists:map( + fun(Msg) -> + #{<<"data">> := Content64} = Msg, + Content = base64:decode(Content64), + Decoded = emqx_utils_json:decode(Content, [return_maps]), + Msg#{<<"data">> := Decoded} + end, + Msgs + ) + after 5_000 -> + {messages, Mailbox} = process_info(self(), messages), + error({timeout, #{mailbox => Mailbox}}) + end. + install_telemetry_handler(TestCase) -> Tid = ets:new(TestCase, [ordered_set, public]), HandlerId = TestCase, @@ -585,8 +615,8 @@ t_publish_success(Config) -> <<"topic">> := Topic, <<"payload">> := Payload, <<"metadata">> := #{<<"rule_id">> := RuleId} - } - ], + } = Msg + ] when not (is_map_key(<<"attributes">>, Msg) orelse is_map_key(<<"orderingKey">>, Msg)), DecodedMessages ), %% to avoid test flakiness @@ -1524,3 +1554,251 @@ t_query_sync(Config) -> [] ), ok. + +t_attributes(Config) -> + Name = ?config(gcp_pubsub_name, Config), + ServiceAccountJSON = ?config(service_account_json, Config), + LocalTopic = <<"t/topic">>, + ?check_trace( + begin + {ok, _} = create_bridge_http( + Config, + #{ + <<"local_topic">> => LocalTopic, + <<"attributes_template">> => + [ + #{ + <<"key">> => <<"${.payload.key}">>, + <<"value">> => <<"fixed_value">> + }, + #{ + <<"key">> => <<"${.payload.key}2">>, + <<"value">> => <<"${.payload.value}">> + }, + #{ + <<"key">> => <<"fixed_key">>, + <<"value">> => <<"fixed_value">> + }, + #{ + <<"key">> => <<"fixed_key2">>, + <<"value">> => <<"${.payload.value}">> + } + ], + <<"ordering_key_template">> => <<"${.payload.ok}">> + } + ), + %% without ordering key + Payload0 = + emqx_utils_json:encode( + #{ + <<"value">> => <<"payload_value">>, + <<"key">> => <<"payload_key">> + } + ), + Message0 = emqx_message:make(LocalTopic, Payload0), + emqx:publish(Message0), + DecodedMessages0 = receive_http_request(ServiceAccountJSON), + ?assertMatch( + [ + #{ + <<"attributes">> := + #{ + <<"fixed_key">> := <<"fixed_value">>, + <<"fixed_key2">> := <<"payload_value">>, + <<"payload_key">> := <<"fixed_value">>, + <<"payload_key2">> := <<"payload_value">> + }, + <<"data">> := #{ + <<"topic">> := _, + <<"payload">> := _ + } + } = Msg + ] when not is_map_key(<<"orderingKey">>, Msg), + DecodedMessages0 + ), + %% with ordering key + Payload1 = + emqx_utils_json:encode( + #{ + <<"value">> => <<"payload_value">>, + <<"key">> => <<"payload_key">>, + <<"ok">> => <<"ordering_key">> + } + ), + Message1 = emqx_message:make(LocalTopic, Payload1), + emqx:publish(Message1), + DecodedMessages1 = receive_http_request(ServiceAccountJSON), + ?assertMatch( + [ + #{ + <<"attributes">> := + #{ + <<"fixed_key">> := <<"fixed_value">>, + <<"fixed_key2">> := <<"payload_value">>, + <<"payload_key">> := <<"fixed_value">>, + <<"payload_key2">> := <<"payload_value">> + }, + <<"orderingKey">> := <<"ordering_key">>, + <<"data">> := #{ + <<"topic">> := _, + <<"payload">> := _ + } + } + ], + DecodedMessages1 + ), + %% will result in empty key + Payload2 = + emqx_utils_json:encode( + #{ + <<"value">> => <<"payload_value">>, + <<"ok">> => <<"ordering_key">> + } + ), + Message2 = emqx_message:make(LocalTopic, Payload2), + emqx:publish(Message2), + [DecodedMessage2] = receive_http_request(ServiceAccountJSON), + ?assertEqual( + #{ + <<"fixed_key">> => <<"fixed_value">>, + <<"fixed_key2">> => <<"payload_value">>, + <<"2">> => <<"payload_value">> + }, + maps:get(<<"attributes">>, DecodedMessage2) + ), + %% ensure loading cluster override file doesn't mangle the attribute + %% placeholders... + #{<<"bridges">> := #{?BRIDGE_TYPE_BIN := #{Name := RawConf}}} = + emqx_config:read_override_conf(#{override_to => cluster}), + ?assertEqual( + [ + #{ + <<"key">> => <<"${.payload.key}">>, + <<"value">> => <<"fixed_value">> + }, + #{ + <<"key">> => <<"${.payload.key}2">>, + <<"value">> => <<"${.payload.value}">> + }, + #{ + <<"key">> => <<"fixed_key">>, + <<"value">> => <<"fixed_value">> + }, + #{ + <<"key">> => <<"fixed_key2">>, + <<"value">> => <<"${.payload.value}">> + } + ], + maps:get(<<"attributes_template">>, RawConf) + ), + ok + end, + [] + ), + ok. + +t_bad_attributes(Config) -> + ServiceAccountJSON = ?config(service_account_json, Config), + LocalTopic = <<"t/topic">>, + ?check_trace( + begin + {ok, _} = create_bridge_http( + Config, + #{ + <<"local_topic">> => LocalTopic, + <<"attributes_template">> => + [ + #{ + <<"key">> => <<"${.payload.key}">>, + <<"value">> => <<"${.payload.value}">> + } + ], + <<"ordering_key_template">> => <<"${.payload.ok}">> + } + ), + %% Ok: attribute value is a map or list + lists:foreach( + fun(OkValue) -> + Payload0 = + emqx_utils_json:encode( + #{ + <<"ok">> => <<"ord_key">>, + <<"value">> => OkValue, + <<"key">> => <<"attr_key">> + } + ), + Message0 = emqx_message:make(LocalTopic, Payload0), + emqx:publish(Message0) + end, + [ + #{<<"some">> => <<"map">>}, + [1, <<"str">>, #{<<"deep">> => true}] + ] + ), + DecodedMessages0 = receive_http_requests(ServiceAccountJSON, #{n => 1}), + ?assertMatch( + [ + #{ + <<"attributes">> := + #{<<"attr_key">> := <<"{\"some\":\"map\"}">>}, + <<"orderingKey">> := <<"ord_key">> + }, + #{ + <<"attributes">> := + #{<<"attr_key">> := <<"[1,\"str\",{\"deep\":true}]">>}, + <<"orderingKey">> := <<"ord_key">> + } + ], + DecodedMessages0 + ), + %% Bad: key is not a plain value + lists:foreach( + fun(BadKey) -> + Payload1 = + emqx_utils_json:encode( + #{ + <<"value">> => <<"v">>, + <<"key">> => BadKey, + <<"ok">> => BadKey + } + ), + Message1 = emqx_message:make(LocalTopic, Payload1), + emqx:publish(Message1) + end, + [ + #{<<"some">> => <<"map">>}, + [1, <<"list">>, true], + true, + false + ] + ), + DecodedMessages1 = receive_http_request(ServiceAccountJSON), + lists:foreach( + fun(DMsg) -> + ?assertNot(is_map_key(<<"orderingKey">>, DMsg), #{decoded_message => DMsg}), + ?assertNot(is_map_key(<<"attributes">>, DMsg), #{decoded_message => DMsg}), + ok + end, + DecodedMessages1 + ), + ok + end, + fun(Trace) -> + ct:pal("trace:\n ~p", [Trace]), + ?assertMatch( + [ + #{placeholder := [<<"payload">>, <<"ok">>], value := #{}}, + #{placeholder := [<<"payload">>, <<"key">>], value := #{}}, + #{placeholder := [<<"payload">>, <<"ok">>], value := [_ | _]}, + #{placeholder := [<<"payload">>, <<"key">>], value := [_ | _]}, + #{placeholder := [<<"payload">>, <<"ok">>], value := true}, + #{placeholder := [<<"payload">>, <<"key">>], value := true}, + #{placeholder := [<<"payload">>, <<"ok">>], value := false}, + #{placeholder := [<<"payload">>, <<"key">>], value := false} + ], + ?of_kind("gcp_pubsub_producer_bad_value_for_key", Trace) + ), + ok + end + ), + ok. diff --git a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_tests.erl b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_tests.erl new file mode 100644 index 000000000..885754470 --- /dev/null +++ b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_tests.erl @@ -0,0 +1,149 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_bridge_gcp_pubsub_tests). + +-include_lib("eunit/include/eunit.hrl"). + +%%=========================================================================== +%% Data section +%%=========================================================================== + +%% erlfmt-ignore +gcp_pubsub_producer_hocon() -> +""" +bridges.gcp_pubsub.my_producer { + attributes_template = [ + {key = \"${payload.key}\", value = fixed_value} + {key = \"${payload.key}2\", value = \"${.payload.value}\"} + {key = fixed_key, value = fixed_value} + {key = fixed_key2, value = \"${.payload.value}\"} + ] + connect_timeout = 15s + enable = false + local_topic = \"t/gcp/produ\" + max_retries = 2 + ordering_key_template = \"${.payload.ok}\" + payload_template = \"${.}\" + pipelining = 100 + pool_size = 8 + pubsub_topic = my-topic + resource_opts { + batch_size = 1 + batch_time = 0ms + health_check_interval = 15s + inflight_window = 100 + max_buffer_bytes = 256MB + query_mode = async + request_ttl = 15s + start_after_created = true + start_timeout = 5s + worker_pool_size = 16 + } + service_account_json { + auth_provider_x509_cert_url = \"https://www.googleapis.com/oauth2/v1/certs\" + auth_uri = \"https://accounts.google.com/o/oauth2/auth\" + client_email = \"test@myproject.iam.gserviceaccount.com\" + client_id = \"123812831923812319190\" + client_x509_cert_url = \"https://www.googleapis.com/robot/v1/metadata/x509/...\" + private_key = \"-----BEGIN PRIVATE KEY-----...\" + private_key_id = \"kid\" + project_id = myproject + token_uri = \"https://oauth2.googleapis.com/token\" + type = service_account + } +} +""". + +%%=========================================================================== +%% Helper functions +%%=========================================================================== + +parse(Hocon) -> + {ok, Conf} = hocon:binary(Hocon), + Conf. + +check(Conf) when is_map(Conf) -> + hocon_tconf:check_plain(emqx_bridge_schema, Conf). + +-define(validation_error(Reason, Value), + {emqx_bridge_schema, [ + #{ + kind := validation_error, + reason := Reason, + value := Value + } + ]} +). + +-define(ok_config(Cfg), #{ + <<"bridges">> := + #{ + <<"gcp_pubsub">> := + #{ + <<"my_producer">> := + Cfg + } + } +}). + +%%=========================================================================== +%% Test cases +%%=========================================================================== + +producer_attributes_validator_test_() -> + %% ensure this module is loaded when testing only this file + _ = emqx_bridge_enterprise:module_info(), + BaseConf = parse(gcp_pubsub_producer_hocon()), + Override = fun(Cfg) -> + emqx_utils_maps:deep_merge( + BaseConf, + #{ + <<"bridges">> => + #{ + <<"gcp_pubsub">> => + #{<<"my_producer">> => Cfg} + } + } + ) + end, + [ + {"base config", + ?_assertMatch( + ?ok_config(#{ + <<"attributes_template">> := [_, _, _, _] + }), + check(BaseConf) + )}, + {"empty key template", + ?_assertThrow( + ?validation_error("Key templates must not be empty", _), + check( + Override(#{ + <<"attributes_template">> => [ + #{ + <<"key">> => <<>>, + <<"value">> => <<"some_value">> + } + ] + }) + ) + )}, + {"empty value template", + ?_assertMatch( + ?ok_config(#{ + <<"attributes_template">> := [_] + }), + check( + Override(#{ + <<"attributes_template">> => [ + #{ + <<"key">> => <<"some_key">>, + <<"value">> => <<>> + } + ] + }) + ) + )} + ]. diff --git a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src index a612c225b..2a0eef72e 100644 --- a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src +++ b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_influxdb, [ {description, "EMQX Enterprise InfluxDB Bridge"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl index b178f77e0..47eeecb4e 100644 --- a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl +++ b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl @@ -168,6 +168,9 @@ write_syntax(format) -> write_syntax(_) -> undefined. +to_influx_lines(Lines = [#{} | _]) -> + %% already parsed/converted (e.g.: bridge_probe, after hocon_tconf:check_plain) + Lines; to_influx_lines(RawLines) -> try influx_lines(str(RawLines), []) diff --git a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb_connector.erl b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb_connector.erl index be5ed6b1c..b39d46b59 100644 --- a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb_connector.erl +++ b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb_connector.erl @@ -66,7 +66,9 @@ on_start(InstId, Config) -> on_stop(InstId, _State) -> case emqx_resource:get_allocated_resources(InstId) of #{?influx_client := Client} -> - influxdb:stop_client(Client); + Res = influxdb:stop_client(Client), + ?tp(influxdb_client_stopped, #{instance_id => InstId}), + Res; _ -> ok end. diff --git a/apps/emqx_bridge_influxdb/test/emqx_bridge_influxdb_SUITE.erl b/apps/emqx_bridge_influxdb/test/emqx_bridge_influxdb_SUITE.erl index 3976d187a..c0d63002b 100644 --- a/apps/emqx_bridge_influxdb/test/emqx_bridge_influxdb_SUITE.erl +++ b/apps/emqx_bridge_influxdb/test/emqx_bridge_influxdb_SUITE.erl @@ -124,6 +124,9 @@ init_per_group(InfluxDBType, Config0) when {influxdb_config, InfluxDBConfig}, {influxdb_config_string, ConfigString}, {ehttpc_pool_name, EHttpcPoolName}, + {bridge_type, influxdb_api_v1}, + {bridge_name, Name}, + {bridge_config, InfluxDBConfig}, {influxdb_name, Name} | Config ]; @@ -193,6 +196,9 @@ init_per_group(InfluxDBType, Config0) when {influxdb_config, InfluxDBConfig}, {influxdb_config_string, ConfigString}, {ehttpc_pool_name, EHttpcPoolName}, + {bridge_type, influxdb_api_v2}, + {bridge_name, Name}, + {bridge_config, InfluxDBConfig}, {influxdb_name, Name} | Config ]; @@ -570,6 +576,10 @@ t_start_ok(Config) -> ), ok. +t_start_stop(Config) -> + ok = emqx_bridge_testlib:t_start_stop(Config, influxdb_client_stopped), + ok. + t_start_already_started(Config) -> Type = influxdb_type_bin(?config(influxdb_type, Config)), Name = ?config(influxdb_name, Config), diff --git a/apps/emqx_bridge_kafka/rebar.config b/apps/emqx_bridge_kafka/rebar.config index 945ccbdba..8246fa8cf 100644 --- a/apps/emqx_bridge_kafka/rebar.config +++ b/apps/emqx_bridge_kafka/rebar.config @@ -1,6 +1,6 @@ %% -*- mode: erlang; -*- {erl_opts, [debug_info]}. -{deps, [ {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.7.6"}}} +{deps, [ {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.7.7"}}} , {kafka_protocol, {git, "https://github.com/kafka4beam/kafka_protocol.git", {tag, "4.1.3"}}} , {brod_gssapi, {git, "https://github.com/kafka4beam/brod_gssapi.git", {tag, "v0.1.0"}}} , {brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.16.8"}}} diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src index 3792409c6..55b02560b 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge_kafka, [ {description, "EMQX Enterprise Kafka Bridge"}, - {vsn, "0.1.7"}, + {vsn, "0.1.8"}, {registered, [emqx_bridge_kafka_consumer_sup]}, {applications, [ kernel, diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl index 544c95b85..6b3f3cd64 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl @@ -268,7 +268,8 @@ fields(producer_opts) -> required => true, desc => ?DESC(producer_kafka_opts), validator => fun producer_strategy_key_validator/1 - })} + })}, + {resource_opts, mk(ref(resource_opts), #{default => #{}})} ]; fields(producer_kafka_opts) -> [ @@ -425,7 +426,8 @@ fields(consumer_opts) -> {value_encoding_mode, mk(enum([none, base64]), #{ default => none, desc => ?DESC(consumer_value_encoding_mode) - })} + })}, + {resource_opts, mk(ref(resource_opts), #{default => #{}})} ]; fields(consumer_topic_mapping) -> [ @@ -460,10 +462,16 @@ fields(consumer_kafka_opts) -> emqx_schema:timeout_duration_s(), #{default => <<"5s">>, desc => ?DESC(consumer_offset_commit_interval_seconds)} )} - ]. + ]; +fields(resource_opts) -> + SupportedFields = [health_check_interval], + CreationOpts = emqx_resource_schema:create_opts(_Overrides = []), + lists:filter(fun({Field, _}) -> lists:member(Field, SupportedFields) end, CreationOpts). desc("config") -> ?DESC("desc_config"); +desc(resource_opts) -> + ?DESC(emqx_resource_schema, "resource_opts"); desc("get_" ++ Type) when Type =:= "consumer"; Type =:= "producer" -> ["Configuration for Kafka using `GET` method."]; desc("put_" ++ Type) when Type =:= "consumer"; Type =:= "producer" -> diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl index 31cd4c66a..d93b6dd7d 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl @@ -596,7 +596,6 @@ t_send_message_with_headers(Config) -> }, KafkaMsg ), - ?assertMatch(#kafka_message{key = BinTime}, KafkaMsg), %% TODO: refactor those into init/end per testcase ok = ?PRODUCER:on_stop(ResourceId, State), ?assertEqual([], supervisor:which_children(wolff_client_sup)), diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl index 367423cd4..f476ded39 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl @@ -306,6 +306,9 @@ kafka_producer_new_hocon() -> " sndbuf = \"1024KB\"\n" " }\n" " ssl {enable = false, verify = \"verify_peer\"}\n" + " resource_opts {\n" + " health_check_interval = 10s\n" + " }\n" " }\n" "}\n" "". @@ -351,5 +354,8 @@ bridges.kafka_consumer.my_consumer { verify = verify_none server_name_indication = \"auto\" } + resource_opts { + health_check_interval = 10s + } } """. diff --git a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.app.src b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.app.src index 36f6c8b0b..3eb923b5d 100644 --- a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.app.src +++ b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_kinesis, [ {description, "EMQX Enterprise Amazon Kinesis Bridge"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis_connector_client.erl b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis_connector_client.erl index bb1000e5f..d9dc0220f 100644 --- a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis_connector_client.erl +++ b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis_connector_client.erl @@ -111,7 +111,14 @@ init(#{ erlcloud_config:configure( to_str(AwsAccessKey), to_str(AwsSecretAccessKey), Host, Port, Scheme, New ), - {ok, State}. + % check the connection + case erlcloud_kinesis:list_streams() of + {ok, _} -> + {ok, State}; + {error, Reason} -> + ?tp(kinesis_init_failed, #{instance_id => InstanceId, reason => Reason}), + {stop, Reason} + end. handle_call(connection_status, _From, #{stream_name := StreamName} = State) -> Status = diff --git a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis_impl_producer.erl b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis_impl_producer.erl index 7948581b5..1e07ae96e 100644 --- a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis_impl_producer.erl +++ b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis_impl_producer.erl @@ -114,7 +114,12 @@ on_get_status(_InstanceId, #{pool_name := Pool} = State) -> false -> disconnected end end; - {error, _} -> + {error, Reason} -> + ?SLOG(error, #{ + msg => "kinesis_producer_get_status_failed", + state => State, + reason => Reason + }), disconnected end. diff --git a/apps/emqx_bridge_kinesis/test/emqx_bridge_kinesis_impl_producer_SUITE.erl b/apps/emqx_bridge_kinesis/test/emqx_bridge_kinesis_impl_producer_SUITE.erl index 114f324a9..d0fe4a1b4 100644 --- a/apps/emqx_bridge_kinesis/test/emqx_bridge_kinesis_impl_producer_SUITE.erl +++ b/apps/emqx_bridge_kinesis/test/emqx_bridge_kinesis_impl_producer_SUITE.erl @@ -796,7 +796,9 @@ t_publish_connection_down(Config0) -> ok. t_wrong_server(Config) -> + TypeBin = ?BRIDGE_TYPE_BIN, Name = ?config(kinesis_name, Config), + KinesisConfig0 = ?config(kinesis_config, Config), ResourceId = ?config(resource_id, Config), Overrides = #{ @@ -806,12 +808,57 @@ t_wrong_server(Config) -> <<"health_check_interval">> => <<"60s">> } }, + % probe + KinesisConfig = emqx_utils_maps:deep_merge(KinesisConfig0, Overrides), + Params = KinesisConfig#{<<"type">> => TypeBin, <<"name">> => Name}, + ProbePath = emqx_mgmt_api_test_util:api_path(["bridges_probe"]), + AuthHeader = emqx_mgmt_api_test_util:auth_header_(), + ?assertMatch( + {error, {_, 400, _}}, + emqx_mgmt_api_test_util:request_api(post, ProbePath, "", AuthHeader, Params) + ), + % create ?wait_async_action( create_bridge(Config, Overrides), - #{?snk_kind := emqx_bridge_kinesis_impl_producer_start_ok}, + #{?snk_kind := start_pool_failed}, 30_000 ), - ?assertEqual({error, timeout}, emqx_resource_manager:health_check(ResourceId)), - emqx_bridge_resource:stop(?BRIDGE_TYPE, Name), - emqx_bridge_resource:remove(?BRIDGE_TYPE, Name), + ?assertMatch( + {ok, _, #{error := {start_pool_failed, ResourceId, _}}}, + emqx_resource_manager:lookup_cached(ResourceId) + ), + ok. + +t_access_denied(Config) -> + TypeBin = ?BRIDGE_TYPE_BIN, + Name = ?config(kinesis_name, Config), + KinesisConfig = ?config(kinesis_config, Config), + ResourceId = ?config(resource_id, Config), + AccessError = {<<"AccessDeniedException">>, <<>>}, + Params = KinesisConfig#{<<"type">> => TypeBin, <<"name">> => Name}, + ProbePath = emqx_mgmt_api_test_util:api_path(["bridges_probe"]), + AuthHeader = emqx_mgmt_api_test_util:auth_header_(), + emqx_common_test_helpers:with_mock( + erlcloud_kinesis, + list_streams, + fun() -> {error, AccessError} end, + fun() -> + % probe + ?assertMatch( + {error, {_, 400, _}}, + emqx_mgmt_api_test_util:request_api(post, ProbePath, "", AuthHeader, Params) + ), + % create + ?wait_async_action( + create_bridge(Config), + #{?snk_kind := kinesis_init_failed}, + 30_000 + ), + ?assertMatch( + {ok, _, #{error := {start_pool_failed, ResourceId, AccessError}}}, + emqx_resource_manager:lookup_cached(ResourceId) + ), + ok + end + ), ok. diff --git a/apps/emqx_bridge_mongodb/test/emqx_bridge_mongodb_SUITE.erl b/apps/emqx_bridge_mongodb/test/emqx_bridge_mongodb_SUITE.erl index 758124713..785afc4a0 100644 --- a/apps/emqx_bridge_mongodb/test/emqx_bridge_mongodb_SUITE.erl +++ b/apps/emqx_bridge_mongodb/test/emqx_bridge_mongodb_SUITE.erl @@ -29,7 +29,8 @@ group_tests() -> t_payload_template, t_collection_template, t_mongo_date_rule_engine_functions, - t_get_status_server_selection_too_short + t_get_status_server_selection_too_short, + t_use_legacy_protocol_option ]. groups() -> @@ -180,6 +181,7 @@ mongo_config(MongoHost, MongoPort0, rs = Type, Config) -> " replica_set_name = rs0\n" " servers = [~p]\n" " w_mode = safe\n" + " use_legacy_protocol = auto\n" " database = mqtt\n" " resource_opts = {\n" " query_mode = ~s\n" @@ -205,6 +207,7 @@ mongo_config(MongoHost, MongoPort0, sharded = Type, Config) -> " collection = mycol\n" " servers = [~p]\n" " w_mode = safe\n" + " use_legacy_protocol = auto\n" " database = mqtt\n" " resource_opts = {\n" " query_mode = ~s\n" @@ -230,6 +233,7 @@ mongo_config(MongoHost, MongoPort0, single = Type, Config) -> " collection = mycol\n" " server = ~p\n" " w_mode = safe\n" + " use_legacy_protocol = auto\n" " database = mqtt\n" " resource_opts = {\n" " query_mode = ~s\n" @@ -286,10 +290,8 @@ clear_db(Config) -> mongo_api:disconnect(Client). find_all(Config) -> - Type = mongo_type_bin(?config(mongo_type, Config)), - Name = ?config(mongo_name, Config), #{<<"collection">> := Collection} = ?config(mongo_config, Config), - ResourceID = emqx_bridge_resource:resource_id(Type, Name), + ResourceID = resource_id(Config), emqx_resource:simple_sync_query(ResourceID, {find, Collection, #{}, #{}}). find_all_wait_until_non_empty(Config) -> @@ -340,6 +342,27 @@ probe_bridge_api(Config, Overrides) -> ct:pal("bridge probe result: ~p", [Res]), Res. +resource_id(Config) -> + Type0 = ?config(mongo_type, Config), + Name = ?config(mongo_name, Config), + Type = mongo_type_bin(Type0), + emqx_bridge_resource:resource_id(Type, Name). + +get_worker_pids(Config) -> + ResourceID = resource_id(Config), + %% abusing health check api a bit... + GetWorkerPid = fun(TopologyPid) -> + mongoc:transaction_query(TopologyPid, fun(#{pool := WorkerPid}) -> WorkerPid end) + end, + {ok, WorkerPids = [_ | _]} = + emqx_resource_pool:health_check_workers( + ResourceID, + GetWorkerPid, + 5_000, + #{return_values => true} + ), + WorkerPids. + %%------------------------------------------------------------------------------ %% Testcases %%------------------------------------------------------------------------------ @@ -494,3 +517,30 @@ t_get_status_server_selection_too_short(Config) -> emqx_utils_json:decode(Body) ), ok. + +t_use_legacy_protocol_option(Config) -> + ResourceID = resource_id(Config), + {ok, _} = create_bridge(Config, #{<<"use_legacy_protocol">> => <<"true">>}), + ?retry( + _Interval0 = 200, + _NAttempts0 = 20, + ?assertMatch({ok, connected}, emqx_resource_manager:health_check(ResourceID)) + ), + WorkerPids0 = get_worker_pids(Config), + Expected0 = maps:from_keys(WorkerPids0, true), + LegacyOptions0 = maps:from_list([{Pid, mc_utils:use_legacy_protocol(Pid)} || Pid <- WorkerPids0]), + ?assertEqual(Expected0, LegacyOptions0), + {ok, _} = delete_bridge(Config), + + {ok, _} = create_bridge(Config, #{<<"use_legacy_protocol">> => <<"false">>}), + ?retry( + _Interval0 = 200, + _NAttempts0 = 20, + ?assertMatch({ok, connected}, emqx_resource_manager:health_check(ResourceID)) + ), + WorkerPids1 = get_worker_pids(Config), + Expected1 = maps:from_keys(WorkerPids1, false), + LegacyOptions1 = maps:from_list([{Pid, mc_utils:use_legacy_protocol(Pid)} || Pid <- WorkerPids1]), + ?assertEqual(Expected1, LegacyOptions1), + + ok. diff --git a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl index 38b112e99..fb358906f 100644 --- a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl +++ b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl @@ -1089,7 +1089,7 @@ t_strategy_key_validation(Config) -> #{ <<"kind">> := <<"validation_error">>, <<"reason">> := <<"Message key cannot be empty", _/binary>> - } = Msg + } }}}, probe_bridge_api( Config, @@ -1103,7 +1103,7 @@ t_strategy_key_validation(Config) -> #{ <<"kind">> := <<"validation_error">>, <<"reason">> := <<"Message key cannot be empty", _/binary>> - } = Msg + } }}}, create_bridge_api( Config, diff --git a/apps/emqx_conf/src/emqx_conf.app.src b/apps/emqx_conf/src/emqx_conf.app.src index 86fb169a6..ab65c03c8 100644 --- a/apps/emqx_conf/src/emqx_conf.app.src +++ b/apps/emqx_conf/src/emqx_conf.app.src @@ -1,6 +1,6 @@ {application, emqx_conf, [ {description, "EMQX configuration management"}, - {vsn, "0.1.25"}, + {vsn, "0.1.26"}, {registered, []}, {mod, {emqx_conf_app, []}}, {applications, [kernel, stdlib, emqx_ctl]}, diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index eea2bf1b8..246f36f41 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -63,6 +63,7 @@ emqx_psk_schema, emqx_limiter_schema, emqx_slow_subs_schema, + emqx_otel_schema, emqx_mgmt_api_key_schema ]). %% 1 million default ports counter diff --git a/apps/emqx_connector/rebar.config b/apps/emqx_connector/rebar.config index 132863127..78515abe6 100644 --- a/apps/emqx_connector/rebar.config +++ b/apps/emqx_connector/rebar.config @@ -9,7 +9,6 @@ {emqx, {path, "../emqx"}}, {emqx_utils, {path, "../emqx_utils"}}, {emqx_resource, {path, "../emqx_resource"}}, - {eldap2, {git, "https://github.com/emqx/eldap2", {tag, "v0.2.2"}}}, {epgsql, {git, "https://github.com/emqx/epgsql", {tag, "4.7.0.1"}}} ]}. diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src index cd8ce864c..397cd0093 100644 --- a/apps/emqx_connector/src/emqx_connector.app.src +++ b/apps/emqx_connector/src/emqx_connector.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_connector, [ {description, "EMQX Data Integration Connectors"}, - {vsn, "0.1.29"}, + {vsn, "0.1.30"}, {registered, []}, {mod, {emqx_connector_app, []}}, {applications, [ @@ -12,7 +12,6 @@ eredis_cluster, eredis, epgsql, - eldap2, ehttpc, jose, emqx, diff --git a/apps/emqx_dashboard/src/emqx_dashboard.app.src b/apps/emqx_dashboard/src/emqx_dashboard.app.src index ee4e60118..f8395025e 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard.app.src +++ b/apps/emqx_dashboard/src/emqx_dashboard.app.src @@ -2,7 +2,7 @@ {application, emqx_dashboard, [ {description, "EMQX Web Dashboard"}, % strict semver, bump manually! - {vsn, "5.0.25"}, + {vsn, "5.0.26"}, {modules, []}, {registered, [emqx_dashboard_sup]}, {applications, [kernel, stdlib, mnesia, minirest, emqx, emqx_ctl, emqx_bridge_http]}, @@ -12,6 +12,6 @@ {maintainers, ["EMQX Team "]}, {links, [ {"Homepage", "https://emqx.io/"}, - {"Github", "https://github.com/emqx/emqx-dashboard"} + {"Github", "https://github.com/emqx/emqx-dashboard5"} ]} ]}. diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index b0c78f0fe..a86c30893 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -856,8 +856,6 @@ typename_to_spec("timeout()", _Mod) -> }; typename_to_spec("bytesize()", _Mod) -> #{type => string, example => <<"32MB">>}; -typename_to_spec("mqtt_max_packet_size()", _Mod) -> - #{type => string, example => <<"32MB">>}; typename_to_spec("wordsize()", _Mod) -> #{type => string, example => <<"1024KB">>}; typename_to_spec("map()", _Mod) -> diff --git a/apps/emqx_ft/src/emqx_ft.app.src b/apps/emqx_ft/src/emqx_ft.app.src index ac498d6c6..8518958e0 100644 --- a/apps/emqx_ft/src/emqx_ft.app.src +++ b/apps/emqx_ft/src/emqx_ft.app.src @@ -1,6 +1,6 @@ {application, emqx_ft, [ {description, "EMQX file transfer over MQTT"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {mod, {emqx_ft_app, []}}, {applications, [ diff --git a/apps/emqx_ft/src/emqx_ft.erl b/apps/emqx_ft/src/emqx_ft.erl index 34dfc09a7..41046907b 100644 --- a/apps/emqx_ft/src/emqx_ft.erl +++ b/apps/emqx_ft/src/emqx_ft.erl @@ -71,7 +71,7 @@ %% the resulting file is corrupted during transmission). size => _Bytes :: non_neg_integer(), checksum => checksum(), - expire_at := emqx_datetime:epoch_second(), + expire_at := emqx_utils_calendar:epoch_second(), %% TTL of individual segments %% Somewhat confusing that we won't know it on the nodes where the filemeta %% is missing. diff --git a/apps/emqx_ft/src/emqx_ft_api.erl b/apps/emqx_ft/src/emqx_ft_api.erl index c4877fc68..be99618ca 100644 --- a/apps/emqx_ft/src/emqx_ft_api.erl +++ b/apps/emqx_ft/src/emqx_ft_api.erl @@ -278,7 +278,7 @@ format_file_info( end. format_timestamp(Timestamp) -> - iolist_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, second}])). + emqx_utils_calendar:epoch_to_rfc3339(Timestamp, second). format_name(NameBin) when is_binary(NameBin) -> NameBin; diff --git a/apps/emqx_ft/src/emqx_ft_storage.erl b/apps/emqx_ft/src/emqx_ft_storage.erl index 2d068466c..04fac3b38 100644 --- a/apps/emqx_ft/src/emqx_ft_storage.erl +++ b/apps/emqx_ft/src/emqx_ft_storage.erl @@ -68,7 +68,7 @@ transfer := emqx_ft:transfer(), name := file:name(), size := _Bytes :: non_neg_integer(), - timestamp := emqx_datetime:epoch_second(), + timestamp := emqx_utils_calendar:epoch_second(), uri => uri_string:uri_string(), meta => emqx_ft:filemeta() }. diff --git a/apps/emqx_ft/src/emqx_ft_storage_exporter_s3.erl b/apps/emqx_ft/src/emqx_ft_storage_exporter_s3.erl index ac06ab957..844896a2f 100644 --- a/apps/emqx_ft/src/emqx_ft_storage_exporter_s3.erl +++ b/apps/emqx_ft/src/emqx_ft_storage_exporter_s3.erl @@ -43,7 +43,7 @@ transfer := transfer(), name := file:name(), uri := uri_string:uri_string(), - timestamp := emqx_datetime:epoch_second(), + timestamp := emqx_utils_calendar:epoch_second(), size := _Bytes :: non_neg_integer(), filemeta => filemeta() }. diff --git a/apps/emqx_ft/src/emqx_ft_storage_fs.erl b/apps/emqx_ft/src/emqx_ft_storage_fs.erl index 1fd4d3a5d..5d0395989 100644 --- a/apps/emqx_ft/src/emqx_ft_storage_fs.erl +++ b/apps/emqx_ft/src/emqx_ft_storage_fs.erl @@ -76,7 +76,7 @@ % TODO naming -type filefrag(T) :: #{ path := file:name(), - timestamp := emqx_datetime:epoch_second(), + timestamp := emqx_utils_calendar:epoch_second(), size := _Bytes :: non_neg_integer(), fragment := T }. diff --git a/apps/emqx_gateway/src/emqx_gateway.app.src b/apps/emqx_gateway/src/emqx_gateway.app.src index b5fe5e100..582269ce6 100644 --- a/apps/emqx_gateway/src/emqx_gateway.app.src +++ b/apps/emqx_gateway/src/emqx_gateway.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway, [ {description, "The Gateway management application"}, - {vsn, "0.1.22"}, + {vsn, "0.1.23"}, {registered, []}, {mod, {emqx_gateway_app, []}}, {applications, [kernel, stdlib, emqx, emqx_authn, emqx_ctl]}, diff --git a/apps/emqx_gateway/src/emqx_gateway_api_clients.erl b/apps/emqx_gateway/src/emqx_gateway_api_clients.erl index 8cfcb70e6..b698446b9 100644 --- a/apps/emqx_gateway/src/emqx_gateway_api_clients.erl +++ b/apps/emqx_gateway/src/emqx_gateway_api_clients.erl @@ -397,13 +397,13 @@ format_channel_info(WhichNode, {_, Infos, Stats} = R) -> {ip_address, {peername, ConnInfo, fun peer_to_binary_addr/1}}, {port, {peername, ConnInfo, fun peer_to_port/1}}, {is_bridge, ClientInfo, false}, - {connected_at, {connected_at, ConnInfo, fun emqx_gateway_utils:unix_ts_to_rfc3339/1}}, - {disconnected_at, {disconnected_at, ConnInfo, fun emqx_gateway_utils:unix_ts_to_rfc3339/1}}, + {connected_at, {connected_at, ConnInfo, fun emqx_utils_calendar:epoch_to_rfc3339/1}}, + {disconnected_at, {disconnected_at, ConnInfo, fun emqx_utils_calendar:epoch_to_rfc3339/1}}, {connected, {conn_state, Infos, fun conn_state_to_connected/1}}, {keepalive, ClientInfo, 0}, {clean_start, ConnInfo, true}, {expiry_interval, ConnInfo, 0}, - {created_at, {created_at, SessInfo, fun emqx_gateway_utils:unix_ts_to_rfc3339/1}}, + {created_at, {created_at, SessInfo, fun emqx_utils_calendar:epoch_to_rfc3339/1}}, {subscriptions_cnt, Stats, 0}, {subscriptions_max, Stats, infinity}, {inflight_cnt, Stats, 0}, @@ -640,28 +640,28 @@ params_client_searching_in_qs() -> )}, {gte_created_at, mk( - emqx_datetime:epoch_millisecond(), + emqx_utils_calendar:epoch_millisecond(), M#{ desc => ?DESC(param_gte_created_at) } )}, {lte_created_at, mk( - emqx_datetime:epoch_millisecond(), + emqx_utils_calendar:epoch_millisecond(), M#{ desc => ?DESC(param_lte_created_at) } )}, {gte_connected_at, mk( - emqx_datetime:epoch_millisecond(), + emqx_utils_calendar:epoch_millisecond(), M#{ desc => ?DESC(param_gte_connected_at) } )}, {lte_connected_at, mk( - emqx_datetime:epoch_millisecond(), + emqx_utils_calendar:epoch_millisecond(), M#{ desc => ?DESC(param_lte_connected_at) } @@ -888,12 +888,12 @@ common_client_props() -> )}, {connected_at, mk( - emqx_datetime:epoch_millisecond(), + emqx_utils_calendar:epoch_millisecond(), #{desc => ?DESC(connected_at)} )}, {disconnected_at, mk( - emqx_datetime:epoch_millisecond(), + emqx_utils_calendar:epoch_millisecond(), #{ desc => ?DESC(disconnected_at) } @@ -931,7 +931,7 @@ common_client_props() -> )}, {created_at, mk( - emqx_datetime:epoch_millisecond(), + emqx_utils_calendar:epoch_millisecond(), #{desc => ?DESC(created_at)} )}, {subscriptions_cnt, diff --git a/apps/emqx_gateway/src/emqx_gateway_cli.erl b/apps/emqx_gateway/src/emqx_gateway_cli.erl index fb4261065..36d61e458 100644 --- a/apps/emqx_gateway/src/emqx_gateway_cli.erl +++ b/apps/emqx_gateway/src/emqx_gateway_cli.erl @@ -313,9 +313,9 @@ format_gateway( [ Name, Status, - emqx_gateway_utils:unix_ts_to_rfc3339(CreatedAt), + emqx_utils_calendar:epoch_to_rfc3339(CreatedAt), StopOrStart, - emqx_gateway_utils:unix_ts_to_rfc3339(Timestamp), + emqx_utils_calendar:epoch_to_rfc3339(Timestamp), Config ] ). diff --git a/apps/emqx_gateway/src/emqx_gateway_utils.erl b/apps/emqx_gateway/src/emqx_gateway_utils.erl index eb4ce9fdf..10c71e3a7 100644 --- a/apps/emqx_gateway/src/emqx_gateway_utils.erl +++ b/apps/emqx_gateway/src/emqx_gateway_utils.erl @@ -38,7 +38,6 @@ -export([ apply/2, parse_listenon/1, - unix_ts_to_rfc3339/1, unix_ts_to_rfc3339/2, listener_id/3, parse_listener_id/1, @@ -364,14 +363,10 @@ unix_ts_to_rfc3339(Key, Map) -> Map; Ts -> Map#{ - Key => - emqx_rule_funcs:unix_ts_to_rfc3339(Ts, <<"millisecond">>) + Key => emqx_utils_calendar:epoch_to_rfc3339(Ts) } end. -unix_ts_to_rfc3339(Ts) -> - emqx_rule_funcs:unix_ts_to_rfc3339(Ts, <<"millisecond">>). - -spec stringfy(term()) -> binary(). stringfy(T) when is_list(T); is_binary(T) -> iolist_to_binary(T); diff --git a/apps/emqx_gateway_mqttsn/test/emqx_sn_protocol_SUITE.erl b/apps/emqx_gateway_mqttsn/test/emqx_sn_protocol_SUITE.erl index c3fa89c70..a0afd90c1 100644 --- a/apps/emqx_gateway_mqttsn/test/emqx_sn_protocol_SUITE.erl +++ b/apps/emqx_gateway_mqttsn/test/emqx_sn_protocol_SUITE.erl @@ -2312,9 +2312,7 @@ t_socket_passvice(_) -> ok. t_clients_api(_) -> - TsNow = emqx_gateway_utils:unix_ts_to_rfc3339( - erlang:system_time(millisecond) - ), + TsNow = emqx_utils_calendar:now_to_rfc3339(millisecond), ClientId = <<"client_id_test1">>, {ok, Socket} = gen_udp:open(0, [binary]), send_connect_msg(Socket, ClientId), diff --git a/apps/emqx_gcp_device/BSL.txt b/apps/emqx_gcp_device/BSL.txt new file mode 100644 index 000000000..0acc0e696 --- /dev/null +++ b/apps/emqx_gcp_device/BSL.txt @@ -0,0 +1,94 @@ +Business Source License 1.1 + +Licensor: Hangzhou EMQ Technologies Co., Ltd. +Licensed Work: EMQX Enterprise Edition + The Licensed Work is (c) 2023 + Hangzhou EMQ Technologies Co., Ltd. +Additional Use Grant: Students and educators are granted right to copy, + modify, and create derivative work for research + or education. +Change Date: 2027-02-01 +Change License: Apache License, Version 2.0 + +For information about alternative licensing arrangements for the Software, +please contact Licensor: https://www.emqx.com/en/contact + +Notice + +The Business Source License (this document, or the “License”) is not an Open +Source license. However, the Licensed Work will eventually be made available +under an Open Source License, as stated in this License. + +License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved. +“Business Source License” is a trademark of MariaDB Corporation Ab. + +----------------------------------------------------------------------------- + +Business Source License 1.1 + +Terms + +The Licensor hereby grants you the right to copy, modify, create derivative +works, redistribute, and make non-production use of the Licensed Work. The +Licensor may make an Additional Use Grant, above, permitting limited +production use. + +Effective on the Change Date, or the fourth anniversary of the first publicly +available distribution of a specific version of the Licensed Work under this +License, whichever comes first, the Licensor hereby grants you rights under +the terms of the Change License, and the rights granted in the paragraph +above terminate. + +If your use of the Licensed Work does not comply with the requirements +currently in effect as described in this License, you must purchase a +commercial license from the Licensor, its affiliated entities, or authorized +resellers, or you must refrain from using the Licensed Work. + +All copies of the original and modified Licensed Work, and derivative works +of the Licensed Work, are subject to this License. This License applies +separately for each version of the Licensed Work and the Change Date may vary +for each version of the Licensed Work released by Licensor. + +You must conspicuously display this License on each original or modified copy +of the Licensed Work. If you receive the Licensed Work in original or +modified form from a third party, the terms and conditions set forth in this +License apply to your use of that work. + +Any use of the Licensed Work in violation of this License will automatically +terminate your rights under this License for the current and all other +versions of the Licensed Work. + +This License does not grant you any right in any trademark or logo of +Licensor or its affiliates (provided that you may use a trademark or logo of +Licensor as expressly required by this License). + +TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON +AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, +EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND +TITLE. + +MariaDB hereby grants you permission to use this License’s text to license +your works, and to refer to it using the trademark “Business Source License”, +as long as you comply with the Covenants of Licensor below. + +Covenants of Licensor + +In consideration of the right to use this License’s text and the “Business +Source License” name and trademark, Licensor covenants to MariaDB, and to all +other recipients of the licensed work to be provided by Licensor: + +1. To specify as the Change License the GPL Version 2.0 or any later version, + or a license that is compatible with GPL Version 2.0 or a later version, + where “compatible” means that software provided under the Change License can + be included in a program with software provided under GPL Version 2.0 or a + later version. Licensor may specify additional Change Licenses without + limitation. + +2. To either: (a) specify an additional grant of rights to use that does not + impose any additional restriction on the right granted in this License, as + the Additional Use Grant; or (b) insert the text “None”. + +3. To specify a Change Date. + +4. Not to modify this License in any other way. diff --git a/apps/emqx_gcp_device/README.md b/apps/emqx_gcp_device/README.md new file mode 100644 index 000000000..8e4d49050 --- /dev/null +++ b/apps/emqx_gcp_device/README.md @@ -0,0 +1,7 @@ +# emqx_gcp_device + +An application for simplified migration from Google IoT Core. + +It implements import of IoT Core device config and authentication data, +so that end devices can authenticate and obtain config as usual. + diff --git a/apps/emqx_gcp_device/rebar.config b/apps/emqx_gcp_device/rebar.config new file mode 100644 index 000000000..575a874ea --- /dev/null +++ b/apps/emqx_gcp_device/rebar.config @@ -0,0 +1,6 @@ +{erl_opts, [debug_info]}. +{deps, [ + {emqx, {path, "../emqx"}}, + {emqx_utils, {path, "../emqx_utils"}}, + {emqx_authn, {path, "../emqx_authn"}} +]}. diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device.app.src b/apps/emqx_gcp_device/src/emqx_gcp_device.app.src new file mode 100644 index 000000000..dc1b567ac --- /dev/null +++ b/apps/emqx_gcp_device/src/emqx_gcp_device.app.src @@ -0,0 +1,15 @@ +{application, emqx_gcp_device, [ + {description, "Application simplifying migration from GCP IoT Core"}, + {vsn, "0.1.0"}, + {registered, []}, + {mod, {emqx_gcp_device_app, []}}, + {applications, [ + kernel, + stdlib, + emqx_authn + ]}, + {env, []}, + {modules, []}, + + {links, []} +]}. diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device.appup.src b/apps/emqx_gcp_device/src/emqx_gcp_device.appup.src new file mode 100644 index 000000000..781e0767f --- /dev/null +++ b/apps/emqx_gcp_device/src/emqx_gcp_device.appup.src @@ -0,0 +1,9 @@ +%% -*- mode: erlang -*- +{VSN, + [ {<<".*">>, + []} + ], + [ {<<".*">>, + []} + ] +}. diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device.erl b/apps/emqx_gcp_device/src/emqx_gcp_device.erl new file mode 100644 index 000000000..57191b7c4 --- /dev/null +++ b/apps/emqx_gcp_device/src/emqx_gcp_device.erl @@ -0,0 +1,268 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_gcp_device). + +-include_lib("emqx_authn/include/emqx_authn.hrl"). +-include_lib("emqx/include/emqx.hrl"). +-include_lib("emqx/include/logger.hrl"). +-include_lib("stdlib/include/ms_transform.hrl"). + +%% Management +-export([put_device/1, get_device/1, remove_device/1]). +%% Management: import +-export([import_devices/1]). +%% Authentication +-export([get_device_actual_keys/1]). +%% Internal API +-export([create_table/0, clear_table/0, format_device/1]). + +-ifdef(TEST). +-export([config_topic/1]). +% to avoid test flakiness +-define(ACTIVITY, sync_dirty). +-else. +-define(ACTIVITY, async_dirty). +-endif. + +-type deviceid() :: binary(). +-type project() :: binary(). +-type location() :: binary(). +-type registry() :: binary(). +-type device_loc() :: {project(), location(), registry()}. +-type key_type() :: binary(). +-type key() :: binary(). +-type expires_at() :: pos_integer(). +-type key_record() :: {key_type(), key(), expires_at()}. +-type created_at() :: pos_integer(). +-type extra() :: map(). + +-record(emqx_gcp_device, { + id :: deviceid(), + keys :: [key_record()], + device_loc :: device_loc(), + created_at :: created_at(), + extra :: extra() +}). +-type emqx_gcp_device() :: #emqx_gcp_device{}. + +-type formatted_key() :: + #{ + key_type := key_type(), + key := key(), + expires_at := expires_at() + }. +-type encoded_config() :: binary(). +-type formatted_device() :: + #{ + deviceid := deviceid(), + keys := [formatted_key()], + config := encoded_config(), + project => project(), + location => location(), + registry => registry(), + created_at => created_at() + }. +-export_type([formatted_device/0, deviceid/0, encoded_config/0]). + +-define(TAB, ?MODULE). + +-dialyzer({nowarn_function, perform_dirty/2}). + +%%-------------------------------------------------------------------- +%% API +%%-------------------------------------------------------------------- + +-spec put_device(formatted_device()) -> ok. +put_device(FormattedDevice) -> + try + perform_dirty(?ACTIVITY, fun() -> put_device_no_transaction(FormattedDevice) end) + catch + _Error:Reason -> + ?SLOG(error, #{ + msg => "Failed to put device", + device => FormattedDevice, + reason => Reason + }), + {error, Reason} + end. + +-spec get_device(deviceid()) -> {ok, formatted_device()} | not_found. +get_device(DeviceId) -> + case ets:lookup(?TAB, DeviceId) of + [] -> + not_found; + [Device] -> + {ok, format_device(Device)} + end. + +-spec remove_device(deviceid()) -> ok. +remove_device(DeviceId) -> + ok = mria:dirty_delete({?TAB, DeviceId}), + ok = put_config(DeviceId, <<>>). + +-spec get_device_actual_keys(deviceid()) -> [key()] | not_found. +get_device_actual_keys(DeviceId) -> + try ets:lookup(?TAB, DeviceId) of + [] -> + not_found; + [Device] -> + actual_keys(Device) + catch + error:badarg -> + not_found + end. + +-spec import_devices([formatted_device()]) -> + {NumImported :: non_neg_integer(), NumError :: non_neg_integer()}. +import_devices(FormattedDevices) when is_list(FormattedDevices) -> + perform_dirty(fun() -> lists:foldl(fun import_device/2, {0, 0}, FormattedDevices) end). + +%%-------------------------------------------------------------------- +%% Internal API +%%-------------------------------------------------------------------- + +-spec create_table() -> ok. +create_table() -> + ok = mria:create_table(?TAB, [ + {rlog_shard, ?AUTH_SHARD}, + {type, ordered_set}, + {storage, disc_copies}, + {record_name, emqx_gcp_device}, + {attributes, record_info(fields, emqx_gcp_device)}, + {storage_properties, [{ets, [{read_concurrency, true}]}, {dets, [{auto_save, 10_000}]}]} + ]), + ok = mria:wait_for_tables([?TAB]). + +-spec clear_table() -> ok. +clear_table() -> + {atomic, ok} = mria:clear_table(?TAB), + ok. + +%%-------------------------------------------------------------------- +%% Internal functions +%%-------------------------------------------------------------------- + +-spec perform_dirty(function()) -> term(). +perform_dirty(Fun) -> + perform_dirty(?ACTIVITY, Fun). + +-spec perform_dirty(async_dirty | sync_dirty, function()) -> term(). +perform_dirty(async_dirty, Fun) -> + mria:async_dirty(?AUTH_SHARD, Fun); +perform_dirty(sync_dirty, Fun) -> + mria:sync_dirty(?AUTH_SHARD, Fun). + +-spec put_device_no_transaction(formatted_device()) -> ok. +put_device_no_transaction( + #{ + deviceid := DeviceId, + keys := Keys, + config := EncodedConfig + } = Device +) -> + DeviceLoc = + list_to_tuple([maps:get(Key, Device, <<>>) || Key <- [project, location, registry]]), + ok = put_device_no_transaction(DeviceId, DeviceLoc, Keys), + ok = put_config(DeviceId, EncodedConfig). + +-spec put_device_no_transaction(deviceid(), device_loc(), [key()]) -> ok. +put_device_no_transaction(DeviceId, DeviceLoc, Keys) -> + CreatedAt = erlang:system_time(second), + Extra = #{}, + Device = + #emqx_gcp_device{ + id = DeviceId, + keys = formatted_keys_to_records(Keys), + device_loc = DeviceLoc, + created_at = CreatedAt, + extra = Extra + }, + mnesia:write(Device). + +-spec formatted_keys_to_records([formatted_key()]) -> [key_record()]. +formatted_keys_to_records(Keys) -> + lists:map(fun formatted_key_to_record/1, Keys). + +-spec formatted_key_to_record(formatted_key()) -> key_record(). +formatted_key_to_record(#{ + key_type := KeyType, + key := Key, + expires_at := ExpiresAt +}) -> + {KeyType, Key, ExpiresAt}. + +-spec format_device(emqx_gcp_device()) -> formatted_device(). +format_device(#emqx_gcp_device{ + id = DeviceId, + device_loc = {Project, Location, Registry}, + keys = Keys, + created_at = CreatedAt +}) -> + #{ + deviceid => DeviceId, + project => Project, + location => Location, + registry => Registry, + keys => lists:map(fun format_key/1, Keys), + created_at => CreatedAt, + config => base64:encode(get_device_config(DeviceId)) + }. + +-spec format_key(key_record()) -> formatted_key(). +format_key({KeyType, Key, ExpiresAt}) -> + #{ + key_type => KeyType, + key => Key, + expires_at => ExpiresAt + }. + +-spec put_config(deviceid(), encoded_config()) -> ok. +put_config(DeviceId, EncodedConfig) -> + Config = base64:decode(EncodedConfig), + Topic = config_topic(DeviceId), + Message = emqx_message:make(DeviceId, 1, Topic, Config, #{retain => true}, #{}), + _ = emqx_broker:publish(Message), + ok. + +-spec get_device_config(deviceid()) -> emqx_types:payload(). +get_device_config(DeviceId) -> + Topic = config_topic(DeviceId), + get_retained_payload(Topic). + +-spec actual_keys(emqx_gcp_device()) -> [key()]. +actual_keys(#emqx_gcp_device{keys = Keys}) -> + Now = erlang:system_time(second), + [Key || {_KeyType, Key, ExpiresAt} <- Keys, ExpiresAt == 0 orelse ExpiresAt >= Now]. + +-spec import_device(formatted_device(), { + NumImported :: non_neg_integer(), NumError :: non_neg_integer() +}) -> {NumImported :: non_neg_integer(), NumError :: non_neg_integer()}. +import_device(Device, {NumImported, NumError}) -> + try + ok = put_device_no_transaction(Device), + {NumImported + 1, NumError} + catch + Error:Reason:Stacktrace -> + ?SLOG(error, #{ + msg => "Failed to import device", + exception => Error, + reason => Reason, + stacktrace => Stacktrace + }), + {NumImported, NumError + 1} + end. + +-spec get_retained_payload(binary()) -> emqx_types:payload(). +get_retained_payload(Topic) -> + case emqx_retainer:read_message(Topic) of + {ok, []} -> + <<>>; + {ok, [Message]} -> + Message#message.payload + end. + +-spec config_topic(deviceid()) -> binary(). +config_topic(DeviceId) -> + <<"/devices/", DeviceId/binary, "/config">>. diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device_api.erl b/apps/emqx_gcp_device/src/emqx_gcp_device_api.erl new file mode 100644 index 000000000..a08e0af24 --- /dev/null +++ b/apps/emqx_gcp_device/src/emqx_gcp_device_api.erl @@ -0,0 +1,456 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_gcp_device_api). + +-behaviour(minirest_api). + +-include_lib("hocon/include/hoconsc.hrl"). +-include_lib("stdlib/include/qlc.hrl"). +-include_lib("stdlib/include/ms_transform.hrl"). +-include_lib("emqx/include/logger.hrl"). + +-define(TAGS, [<<"GCP Devices">>]). +-define(TAB, emqx_gcp_device). +-define(FORMAT_FUN, {emqx_gcp_device, format_device}). + +-export([import_devices/1]). +-export([get_device/1, update_device/1, remove_device/1]). + +-export([ + api_spec/0, + paths/0, + schema/1, + fields/1 +]). + +-export([ + '/gcp_devices'/2, + '/gcp_devices/:deviceid'/2 +]). + +-type deviceid() :: emqx_gcp_device:deviceid(). +-type formatted_device() :: emqx_gcp_device:formatted_device(). +-type base64_encoded_config() :: emqx_gcp_device:encoded_config(). +-type imported_key() :: #{ + binary() := binary() | non_neg_integer() + % #{ + % <<"key">> => binary(), + % <<"key_type">> => binary(), + % <<"expires_at">> => non_neg_integer() + % }. +}. +-type key_fields() :: key | key_type | expires_at. +-type imported_device() :: #{ + binary() := deviceid() | binary() | [imported_key()] | base64_encoded_config() | boolean() + % #{ + % <<"deviceid">> => deviceid(), + % <<"project">> => binary(), + % <<"location">> => binary(), + % <<"registry">> => binary(), + % <<"keys">> => [imported_key()], + % <<"config">> => base64_encoded_config(), + % <<"blocked">> => boolean(), + % }. +}. +-type device_fields() :: deviceid | project | location | registry | keys | config. +-type checked_device_fields() :: device_fields() | key_fields(). +-type validated_device() :: #{checked_device_fields() := term()}. + +%%------------------------------------------------------------------------------------------------- +%% `minirest' and `minirest_trails' API +%%------------------------------------------------------------------------------------------------- + +api_spec() -> + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). + +paths() -> + [ + "/gcp_devices", + "/gcp_devices/:deviceid" + ]. + +schema("/gcp_devices") -> + #{ + 'operationId' => '/gcp_devices', + get => #{ + description => ?DESC(gcp_devices_get), + tags => ?TAGS, + parameters => [ + hoconsc:ref(emqx_dashboard_swagger, page), + hoconsc:ref(emqx_dashboard_swagger, limit) + ], + responses => #{ + 200 => [ + {data, hoconsc:mk(hoconsc:array(hoconsc:ref(gcp_device_all_info)), #{})}, + {meta, hoconsc:mk(hoconsc:ref(emqx_dashboard_swagger, meta), #{})} + ] + } + }, + post => #{ + description => ?DESC(gcp_devices_post), + tags => ?TAGS, + 'requestBody' => hoconsc:mk(hoconsc:array(?R_REF(gcp_exported_device)), #{}), + responses => + #{ + 200 => hoconsc:ref(import_result), + 400 => emqx_dashboard_swagger:error_codes( + ['BAD_REQUEST'], + <<"Bad Request">> + ) + } + } + }; +schema("/gcp_devices/:deviceid") -> + #{ + 'operationId' => '/gcp_devices/:deviceid', + get => + #{ + description => ?DESC(gcp_device_get), + tags => ?TAGS, + parameters => [deviceid(#{in => path})], + responses => + #{ + 200 => hoconsc:mk( + hoconsc:ref(gcp_device_all_info), + #{ + desc => ?DESC(gcp_device_all_info) + } + ), + 404 => emqx_dashboard_swagger:error_codes( + ['NOT_FOUND'], + ?DESC(gcp_device_response404) + ) + } + }, + put => + #{ + description => ?DESC(gcp_device_put), + tags => ?TAGS, + parameters => [deviceid(#{in => path})], + 'requestBody' => hoconsc:ref(gcp_device), + responses => + #{ + 200 => hoconsc:mk( + hoconsc:ref(gcp_device_info), + #{ + desc => ?DESC(gcp_device_info) + } + ), + 400 => emqx_dashboard_swagger:error_codes( + ['BAD_REQUEST'], + <<"Bad Request">> + ) + } + }, + delete => #{ + description => ?DESC(gcp_device_delete), + tags => ?TAGS, + parameters => [deviceid(#{in => path})], + responses => #{ + 204 => <<"GCP device deleted">> + } + } + }. + +fields(gcp_device) -> + [ + {registry, + hoconsc:mk( + binary(), + #{ + desc => ?DESC(registry), + default => <<>>, + example => <<"my-registry">> + } + )}, + {project, + hoconsc:mk( + binary(), + #{ + desc => ?DESC(project), + default => <<>>, + example => <<"iot-export">> + } + )}, + {location, + hoconsc:mk( + binary(), + #{ + desc => ?DESC(location), + default => <<>>, + example => <<"europe-west1">> + } + )}, + {keys, + hoconsc:mk( + ?ARRAY(hoconsc:ref(key)), + #{ + desc => ?DESC(keys), + default => [] + } + )}, + {config, + hoconsc:mk( + binary(), + #{ + desc => ?DESC(config), + required => true, + example => <<"bXktY29uZmln">> + } + )} + ]; +fields(gcp_device_info) -> + fields(deviceid) ++ fields(gcp_device); +fields(gcp_device_all_info) -> + [ + {created_at, + hoconsc:mk( + non_neg_integer(), + #{ + desc => ?DESC(created_at), + required => true, + example => 1690484400 + } + )} + ] ++ fields(gcp_device_info); +fields(gcp_exported_device) -> + [ + {blocked, + hoconsc:mk( + boolean(), + #{ + desc => ?DESC(blocked), + required => true, + example => false + } + )} + ] ++ fields(deviceid) ++ fields(gcp_device); +fields(import_result) -> + [ + {errors, + hoconsc:mk( + non_neg_integer(), + #{ + desc => ?DESC(imported_counter_errors), + required => true, + example => 0 + } + )}, + {imported, + hoconsc:mk( + non_neg_integer(), + #{ + desc => ?DESC(imported_counter), + required => true, + example => 14 + } + )} + ]; +fields(key) -> + [ + {key, + hoconsc:mk( + binary(), + #{ + desc => ?DESC(key), + required => true, + example => <<"">> + } + )}, + {key_type, + hoconsc:mk( + binary(), + #{ + desc => ?DESC(key_type), + required => true, + example => <<"ES256_PEM">> + } + )}, + {expires_at, + hoconsc:mk( + non_neg_integer(), + #{ + desc => ?DESC(expires_at), + required => true, + example => 1706738400 + } + )} + ]; +fields(deviceid) -> + [ + deviceid() + ]. + +'/gcp_devices'(get, #{query_string := Params}) -> + Response = emqx_mgmt_api:paginate(?TAB, Params, ?FORMAT_FUN), + {200, Response}; +'/gcp_devices'(post, #{body := Body}) -> + import_devices(Body). + +'/gcp_devices/:deviceid'(get, #{bindings := #{deviceid := DeviceId}}) -> + get_device(DeviceId); +'/gcp_devices/:deviceid'(put, #{bindings := #{deviceid := DeviceId}, body := Body}) -> + update_device(maps:merge(Body, #{<<"deviceid">> => DeviceId})); +'/gcp_devices/:deviceid'(delete, #{bindings := #{deviceid := DeviceId}}) -> + remove_device(DeviceId). + +%%------------------------------------------------------------------------------ +%% Handlers +%%------------------------------------------------------------------------------ + +-spec import_devices([imported_device()]) -> + {200, #{imported := non_neg_integer(), errors := non_neg_integer()}} + | {400, #{message := binary()}}. +import_devices(Devices) -> + case validate_devices(Devices) of + {ok, FormattedDevices} -> + {NumImported, NumErrors} = emqx_gcp_device:import_devices(FormattedDevices), + {200, #{imported => NumImported, errors => NumErrors}}; + {error, Reason} -> + {400, #{message => Reason}} + end. + +-spec get_device(deviceid()) -> {200, formatted_device()} | {404, 'NOT_FOUND', binary()}. +get_device(DeviceId) -> + case emqx_gcp_device:get_device(DeviceId) of + {ok, Device} -> + {200, Device}; + not_found -> + Message = list_to_binary(io_lib:format("device not found: ~s", [DeviceId])), + {404, 'NOT_FOUND', Message} + end. + +-spec update_device(imported_device()) -> {200, formatted_device()} | {400, binary()}. +update_device(Device) -> + case validate_device(Device) of + {ok, ValidatedDevice} -> + ok = emqx_gcp_device:put_device(ValidatedDevice), + {200, ValidatedDevice}; + {error, Reason} -> + {400, Reason} + end. + +-spec remove_device(deviceid()) -> {204}. +remove_device(DeviceId) -> + ok = emqx_gcp_device:remove_device(DeviceId), + {204}. + +%%------------------------------------------------------------------------------ +%% Internal functions +%%------------------------------------------------------------------------------ + +-define(KEY_TYPES, [<<"RSA_PEM">>, <<"RSA_X509_PEM">>, <<"ES256_PEM">>, <<"ES256_X509_PEM">>]). + +-spec deviceid() -> tuple(). +deviceid() -> + deviceid(#{}). + +-spec deviceid(map()) -> tuple(). +deviceid(Override) -> + {deviceid, + hoconsc:mk( + binary(), + maps:merge( + #{ + desc => ?DESC(deviceid), + required => true, + example => <<"c2-ec-x509">> + }, + Override + ) + )}. + +-spec validate_devices([imported_device()]) -> {ok, [validated_device()]} | {error, binary()}. +validate_devices(Devices) -> + validate_devices(Devices, []). + +-spec validate_devices([imported_device()], [validated_device()]) -> + {ok, [validated_device()]} | {error, binary()}. +validate_devices([], Validated) -> + {ok, lists:reverse(Validated)}; +validate_devices([Device | Devices], Validated) -> + case validate_device(Device) of + {ok, ValidatedDevice} -> + validate_devices(Devices, [ValidatedDevice | Validated]); + {error, _} = Error -> + Error + end. + +-spec validate_device(imported_device()) -> {ok, validated_device()} | {error, binary()}. +validate_device(Device) -> + validate([deviceid, project, location, registry, keys, config], Device). + +-spec validate([checked_device_fields()], imported_device()) -> + {ok, validated_device()} | {error, binary()}. +validate(Fields, Device) -> + validate(Fields, Device, #{}). + +-spec validate([checked_device_fields()], imported_device(), validated_device()) -> + {ok, validated_device()} | {error, binary()}. +validate([], _Device, Validated) -> + {ok, Validated}; +validate([key_type | Fields], #{<<"key_type">> := KeyType} = Device, Validated) -> + case lists:member(KeyType, ?KEY_TYPES) of + true -> + validate(Fields, Device, Validated#{key_type => KeyType}); + false -> + {error, <<"invalid key_type">>} + end; +validate([key | Fields], #{<<"key">> := Key} = Device, Validated) -> + validate(Fields, Device, Validated#{key => Key}); +validate([expires_at | Fields], #{<<"expires_at">> := Expire} = Device, Validated) when + is_integer(Expire) +-> + validate(Fields, Device, Validated#{expires_at => Expire}); +validate([expires_at | _Fields], #{<<"expires_at">> := _}, _Validated) -> + {error, <<"invalid expires_at">>}; +validate([expires_at | Fields], Device, Validated) -> + validate(Fields, Device, Validated#{expires_at => 0}); +validate([Field | Fields], Device, Validated) when Field =:= deviceid; Field =:= key -> + FieldBin = atom_to_binary(Field), + case maps:find(FieldBin, Device) of + {ok, Value} when is_binary(Value) -> + validate(Fields, Device, Validated#{Field => Value}); + _ -> + {error, <<"invalid or missing field: ", FieldBin/binary>>} + end; +validate([Field | Fields], Device, Validated) when + Field =:= project; Field =:= location; Field =:= registry; Field =:= config +-> + FieldBin = atom_to_binary(Field), + case maps:find(FieldBin, Device) of + {ok, Value} when is_binary(Value) -> + validate(Fields, Device, Validated#{Field => Value}); + error -> + validate(Fields, Device, Validated#{Field => <<>>}); + _ -> + {error, <<"invalid field: ", FieldBin/binary>>} + end; +validate([keys | Fields], #{<<"keys">> := Keys} = Device, Validated) when is_list(Keys) -> + case validate_keys(Keys) of + {ok, ValidatedKeys} -> + validate(Fields, Device, Validated#{keys => ValidatedKeys}); + {error, _} = Error -> + Error + end; +validate([Field | _Fields], _Device, _Validated) -> + {error, <<"invalid or missing field: ", (atom_to_binary(Field))/binary>>}. + +-spec validate_keys([imported_key()]) -> + {ok, [validated_device()]} | {error, binary()}. +validate_keys(Keys) -> + validate_keys(Keys, []). + +-spec validate_keys([imported_key()], [validated_device()]) -> + {ok, [validated_device()]} | {error, binary()}. +validate_keys([], Validated) -> + {ok, lists:reverse(Validated)}; +validate_keys([Key | Keys], Validated) -> + case validate([key, key_type, expires_at], Key) of + {ok, ValidatedKey} -> + validate_keys(Keys, [ValidatedKey | Validated]); + {error, _} = Error -> + Error + end. diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device_app.erl b/apps/emqx_gcp_device/src/emqx_gcp_device_app.erl new file mode 100644 index 000000000..a3d80b0a8 --- /dev/null +++ b/apps/emqx_gcp_device/src/emqx_gcp_device_app.erl @@ -0,0 +1,21 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_gcp_device_app). + +-behaviour(application). + +-emqx_plugin(?MODULE). + +-export([ + start/2, + stop/1 +]). + +start(_StartType, _StartArgs) -> + emqx_gcp_device:create_table(), + emqx_gcp_device_sup:start_link(). + +stop(_State) -> + ok. diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device_authn.erl b/apps/emqx_gcp_device/src/emqx_gcp_device_authn.erl new file mode 100644 index 000000000..956545c95 --- /dev/null +++ b/apps/emqx_gcp_device/src/emqx_gcp_device_authn.erl @@ -0,0 +1,213 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_gcp_device_authn). + +-include_lib("emqx_authn/include/emqx_authn.hrl"). +-include_lib("emqx/include/logger.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). +-include_lib("jose/include/jose_jwt.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). + +-behaviour(hocon_schema). + +-export([ + namespace/0, + tags/0, + roots/0, + fields/1, + desc/1 +]). + +-export([ + refs/0, + create/2, + update/2, + authenticate/2, + destroy/1 +]). + +%%------------------------------------------------------------------------------ +%% Hocon Schema +%%------------------------------------------------------------------------------ + +namespace() -> "authn". + +tags() -> + [<<"Authentication">>]. + +%% used for config check when the schema module is resolved +roots() -> + [{?CONF_NS, hoconsc:mk(hoconsc:ref(gcp_device))}]. + +fields(gcp_device) -> + common_fields(). + +desc(gcp_device) -> + ?DESC(emqx_gcp_device_api, gcp_device); +desc(_) -> + undefined. + +%%------------------------------------------------------------------------------ +%% APIs +%%------------------------------------------------------------------------------ + +refs() -> + [ + hoconsc:ref(?MODULE, gcp_device) + ]. + +create(_AuthenticatorID, _Config) -> + {ok, #{}}. + +update( + _Config, + State +) -> + {ok, State}. + +authenticate(#{auth_method := _}, _) -> + ignore; +authenticate(Credential, _State) -> + check(Credential). + +destroy(_State) -> + emqx_gcp_device:clear_table(), + ok. + +%%-------------------------------------------------------------------- +%% Internal functions +%%-------------------------------------------------------------------- + +common_fields() -> + [ + {mechanism, emqx_authn_schema:mechanism('gcp_device')} + ] ++ emqx_authn_schema:common_fields(). + +% The check logic is the following: +%% 1. If clientid is not GCP-like or password is not a JWT, the result is ignore +%% 2. If clientid is GCP-like and password is a JWT, but expired, the result is password_error +%% 3. If clientid is GCP-like and password is a valid and not expired JWT: +%% 3.1 If there are no keys for the client, the result is ignore +%% 3.2 If there are some keys for the client: +%% 3.2.1 If there are no actual (not expired keys), the result is password_error +%% 3.2.2 If there are some actual keys and one of them matches the JWT, the result is success +%% 3.2.3 If there are some actual keys and none of them matches the JWT, the result is password_error +check(#{password := Password} = ClientInfo) -> + case gcp_deviceid_from_clientid(ClientInfo) of + {ok, DeviceId} -> + case is_valid_jwt(Password) of + true -> + check_jwt(ClientInfo, DeviceId); + {false, not_a_jwt} -> + ?tp(authn_gcp_device_check, #{ + result => ignore, reason => "not a JWT", client => ClientInfo + }), + ?TRACE_AUTHN_PROVIDER(debug, "auth_ignored", #{ + reason => "not a JWT", + client => ClientInfo + }), + ignore; + {false, expired} -> + ?tp(authn_gcp_device_check, #{ + result => not_authorized, reason => "expired JWT", client => ClientInfo + }), + ?TRACE_AUTHN_PROVIDER(info, "auth_failed", #{ + reason => "expired JWT", + client => ClientInfo + }), + {error, not_authorized} + end; + not_a_gcp_clientid -> + ?tp(authn_gcp_device_check, #{ + result => ignore, reason => "not a GCP ClientId", client => ClientInfo + }), + ?TRACE_AUTHN_PROVIDER(debug, "auth_ignored", #{ + reason => "not a GCP ClientId", + client => ClientInfo + }), + ignore + end. + +check_jwt(ClientInfo, DeviceId) -> + case emqx_gcp_device:get_device_actual_keys(DeviceId) of + not_found -> + ?tp(authn_gcp_device_check, #{ + result => ignore, reason => "key not found", client => ClientInfo + }), + ?TRACE_AUTHN_PROVIDER(debug, "auth_ignored", #{ + reason => "key not found", + client => ClientInfo + }), + ignore; + Keys -> + case any_key_matches(Keys, ClientInfo) of + true -> + ?tp(authn_gcp_device_check, #{ + result => ok, reason => "auth success", client => ClientInfo + }), + ?TRACE_AUTHN_PROVIDER(debug, "auth_success", #{ + reason => "auth success", + client => ClientInfo + }), + ok; + false -> + ?tp(authn_gcp_device_check, #{ + result => {error, bad_username_or_password}, + reason => "no matching or valid keys", + client => ClientInfo + }), + ?TRACE_AUTHN_PROVIDER(info, "auth_failed", #{ + reason => "no matching or valid keys", + client => ClientInfo + }), + {error, bad_username_or_password} + end + end. + +any_key_matches(Keys, ClientInfo) -> + lists:any(fun(Key) -> key_matches(Key, ClientInfo) end, Keys). + +key_matches(KeyRaw, #{password := Jwt} = _ClientInfo) -> + Jwk = jose_jwk:from_pem(KeyRaw), + case jose_jws:verify(Jwk, Jwt) of + {true, _, _} -> + true; + {false, _, _} -> + false + end. + +gcp_deviceid_from_clientid(#{clientid := <<"projects/", RestClientId/binary>>}) -> + case binary:split(RestClientId, <<"/">>, [global]) of + [ + _Project, + <<"locations">>, + _Location, + <<"registries">>, + _Registry, + <<"devices">>, + DeviceId + ] -> + {ok, DeviceId}; + _ -> + not_a_gcp_clientid + end; +gcp_deviceid_from_clientid(_ClientInfo) -> + not_a_gcp_clientid. + +is_valid_jwt(Password) -> + Now = erlang:system_time(second), + try jose_jwt:peek(Password) of + #jose_jwt{fields = #{<<"exp">> := Exp}} when is_integer(Exp) andalso Exp >= Now -> + true; + #jose_jwt{fields = #{<<"exp">> := _Exp}} -> + {false, expired}; + #jose_jwt{} -> + true; + _ -> + {false, not_a_jwt} + catch + _:_ -> + {false, not_a_jwt} + end. diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device_sup.erl b/apps/emqx_gcp_device/src/emqx_gcp_device_sup.erl new file mode 100644 index 000000000..e40be256a --- /dev/null +++ b/apps/emqx_gcp_device/src/emqx_gcp_device_sup.erl @@ -0,0 +1,25 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_gcp_device_sup). + +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). + +-define(SERVER, ?MODULE). + +start_link() -> + supervisor:start_link({local, ?SERVER}, ?MODULE, []). + +init([]) -> + SupFlags = + #{ + strategy => one_for_all, + intensity => 0, + period => 1 + }, + ChildSpecs = [], + {ok, {SupFlags, ChildSpecs}}. diff --git a/apps/emqx_gcp_device/test/data/gcp-data.json b/apps/emqx_gcp_device/test/data/gcp-data.json new file mode 100644 index 000000000..91eace670 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/gcp-data.json @@ -0,0 +1,210 @@ +[ + { + "deviceid": "c1-c3-two-keys", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4tqkxsDZ1tZPhtLcCi5BdhT0idF5\nwqP9I2ITa7trw+n6YRsrqnbr+sklCPN6tySLRrGT8IpFlLo0xJFRmuAyLw==\n-----END PUBLIC KEY-----\n", + "key_type": "ES256_PEM", + "expires_at": 0 + }, + { + "key": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzc27HX3t3tBA31VE+kHV\nhPloAVBpvCSHR+HfEOI++qCUiO+nU1dAIKsSWu4ipbwCl57oetQwmeBnR49Ra0B6\ns5UyOssNw9aiRVUFZdVKOifoaXIZy1NTfG6tgp2Wq8fL5KyA5Sq+PzFkfyD9axYQ\nC5jbF+nJ78OHg0/3EYQhN7NvCipTgxCcW/oIGG6v0N6V5W7x+7ixJWbLPyZYM0vE\nTIN0BIxbx1R+fGkyUWAqvNfveTyN5wq7MY9915BSLyGUprsq9n5DJmiC44RJVau2\nMfH3mKQxkn8c/2L0hZzqK6swj1EdE/BAiA+t+67mOVMLoGrOqfO16Y3f7Sv5D7Xc\njwIDAQAB\n-----END PUBLIC KEY-----\n", + "key_type": "RSA_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "" + }, + { + "deviceid": "2852899269094682", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4tqkxsDZ1tZPhtLcCi5BdhT0idF5\nwqP9I2ITa7trw+n6YRsrqnbr+sklCPN6tySLRrGT8IpFlLo0xJFRmuAyLw==\n-----END PUBLIC KEY-----\n", + "key_type": "ES256_PEM", + "expires_at": 0 + }, + { + "key": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzc27HX3t3tBA31VE+kHV\nhPloAVBpvCSHR+HfEOI++qCUiO+nU1dAIKsSWu4ipbwCl57oetQwmeBnR49Ra0B6\ns5UyOssNw9aiRVUFZdVKOifoaXIZy1NTfG6tgp2Wq8fL5KyA5Sq+PzFkfyD9axYQ\nC5jbF+nJ78OHg0/3EYQhN7NvCipTgxCcW/oIGG6v0N6V5W7x+7ixJWbLPyZYM0vE\nTIN0BIxbx1R+fGkyUWAqvNfveTyN5wq7MY9915BSLyGUprsq9n5DJmiC44RJVau2\nMfH3mKQxkn8c/2L0hZzqK6swj1EdE/BAiA+t+67mOVMLoGrOqfO16Y3f7Sv5D7Xc\njwIDAQAB\n-----END PUBLIC KEY-----\n", + "key_type": "RSA_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "" + }, + { + "deviceid": "c1-ec", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4tqkxsDZ1tZPhtLcCi5BdhT0idF5\nwqP9I2ITa7trw+n6YRsrqnbr+sklCPN6tySLRrGT8IpFlLo0xJFRmuAyLw==\n-----END PUBLIC KEY-----\n", + "key_type": "ES256_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "eyJteSI6IFsianNvbiIsICJjb25maWciXX0=" + }, + { + "deviceid": "3058444082630640", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4tqkxsDZ1tZPhtLcCi5BdhT0idF5\nwqP9I2ITa7trw+n6YRsrqnbr+sklCPN6tySLRrGT8IpFlLo0xJFRmuAyLw==\n-----END PUBLIC KEY-----\n", + "key_type": "ES256_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "eyJteSI6IFsianNvbiIsICJjb25maWciXX0=" + }, + { + "deviceid": "c2-ec-x509", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN CERTIFICATE-----\nMIIBEjCBuAIJAPKVZoroXatKMAoGCCqGSM49BAMCMBExDzANBgNVBAMMBnVudXNl\nZDAeFw0yMzA0MTIxMzQ2NTJaFw0yMzA1MTIxMzQ2NTJaMBExDzANBgNVBAMMBnVu\ndXNlZDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABAugsuay/y2SpGEVDKfiVw9q\nVHGdZHvLXDqxj9XndUi6LEpA209ZfaC1eJ+mZiW3zBC94AdqVu+QLzS7rPT72jkw\nCgYIKoZIzj0EAwIDSQAwRgIhAMBp+1S5w0UJDuylI1TJS8vXjWOhgluUdZfFtxES\nE85SAiEAvKIAhjRhuIxanhqyv3HwOAL/zRAcv6iHsPMKYBt1dOs=\n-----END CERTIFICATE-----\n", + "key_type": "ES256_X509_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==" + }, + { + "deviceid": "2928540609735937", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN CERTIFICATE-----\nMIIBEjCBuAIJAPKVZoroXatKMAoGCCqGSM49BAMCMBExDzANBgNVBAMMBnVudXNl\nZDAeFw0yMzA0MTIxMzQ2NTJaFw0yMzA1MTIxMzQ2NTJaMBExDzANBgNVBAMMBnVu\ndXNlZDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABAugsuay/y2SpGEVDKfiVw9q\nVHGdZHvLXDqxj9XndUi6LEpA209ZfaC1eJ+mZiW3zBC94AdqVu+QLzS7rPT72jkw\nCgYIKoZIzj0EAwIDSQAwRgIhAMBp+1S5w0UJDuylI1TJS8vXjWOhgluUdZfFtxES\nE85SAiEAvKIAhjRhuIxanhqyv3HwOAL/zRAcv6iHsPMKYBt1dOs=\n-----END CERTIFICATE-----\n", + "key_type": "ES256_X509_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==" + }, + { + "deviceid": "c3-rsa", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzc27HX3t3tBA31VE+kHV\nhPloAVBpvCSHR+HfEOI++qCUiO+nU1dAIKsSWu4ipbwCl57oetQwmeBnR49Ra0B6\ns5UyOssNw9aiRVUFZdVKOifoaXIZy1NTfG6tgp2Wq8fL5KyA5Sq+PzFkfyD9axYQ\nC5jbF+nJ78OHg0/3EYQhN7NvCipTgxCcW/oIGG6v0N6V5W7x+7ixJWbLPyZYM0vE\nTIN0BIxbx1R+fGkyUWAqvNfveTyN5wq7MY9915BSLyGUprsq9n5DJmiC44RJVau2\nMfH3mKQxkn8c/2L0hZzqK6swj1EdE/BAiA+t+67mOVMLoGrOqfO16Y3f7Sv5D7Xc\njwIDAQAB\n-----END PUBLIC KEY-----\n", + "key_type": "RSA_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "" + }, + { + "deviceid": "2956940137919694", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzc27HX3t3tBA31VE+kHV\nhPloAVBpvCSHR+HfEOI++qCUiO+nU1dAIKsSWu4ipbwCl57oetQwmeBnR49Ra0B6\ns5UyOssNw9aiRVUFZdVKOifoaXIZy1NTfG6tgp2Wq8fL5KyA5Sq+PzFkfyD9axYQ\nC5jbF+nJ78OHg0/3EYQhN7NvCipTgxCcW/oIGG6v0N6V5W7x+7ixJWbLPyZYM0vE\nTIN0BIxbx1R+fGkyUWAqvNfveTyN5wq7MY9915BSLyGUprsq9n5DJmiC44RJVau2\nMfH3mKQxkn8c/2L0hZzqK6swj1EdE/BAiA+t+67mOVMLoGrOqfO16Y3f7Sv5D7Xc\njwIDAQAB\n-----END PUBLIC KEY-----\n", + "key_type": "RSA_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "" + }, + { + "deviceid": "c4-rsa-x509", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN CERTIFICATE-----\nMIICnjCCAYYCCQCh+b8WxXjihDANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDDAZ1\nbnVzZWQwHhcNMjMwNDEyMTM0NjUyWhcNMjMwNTEyMTM0NjUyWjARMQ8wDQYDVQQD\nDAZ1bnVzZWQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDOtvDuketC\n56nvrZw61UyP+MJikYbqqxIqIqwyih2KDCzlF6gTBI6vbFNwZx1b366VOfDhuj6j\n44+cN44AoVKtqSzpsDjdlIRClcBIv4k2ndXjr6yV1cJ9lrMB9vPbr8fiQOxr31Cf\nZUk0OZPppdsC5iqYpUeOdrSttOgBRIaTohBUXMatICxhc+9gC5yj9mQJuwckx6fE\nb+gJ9JrZ1/0wSW1EZNfS9hlOhA0nRUnty5wyqrpxdX4UL/G86SFl7njW9S1PBuPe\nHK7AdHZ6C3FAMfqpnETiWV149k/DR4UQQ7a23QsbgVJOM/7R9IAyln9LARhF9Bpp\ny/W2HPpBn8JHAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAGFl+G3yk/BfELjX1mT6\n4mrGlJq3I6vXLN4ICSTmI4YZQgMmudIHEd6o/cZHJq8HOOqQ5SfFhQI7tBXZpXSG\ndybOStl+GnfyIQFjsNzFXJEiaHoBPP1ccpZyCW/IBkXX39h9N/Pq0XB+xDurXpOD\nVE8nICTATe1Th11rs8j6qwFCkaoQwrzg+JWOKvFnRTPPDNg21fNRRTS+SE27asF2\nPhBWZOD4G2g6WD73SHUs+prR/q4foSVXt63Ih8uQIQJllRtpI4ZkpwSXDH9DUZSY\nWyFtYkD0EAV/FaRuALZQzxX7wda4xwBhvDL8Wua1WENTGZq7ssRHldAdFrz8NENC\nHqk=\n-----END CERTIFICATE-----\n", + "key_type": "RSA_X509_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "" + }, + { + "deviceid": "2820826361193805", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN CERTIFICATE-----\nMIICnjCCAYYCCQCh+b8WxXjihDANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDDAZ1\nbnVzZWQwHhcNMjMwNDEyMTM0NjUyWhcNMjMwNTEyMTM0NjUyWjARMQ8wDQYDVQQD\nDAZ1bnVzZWQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDOtvDuketC\n56nvrZw61UyP+MJikYbqqxIqIqwyih2KDCzlF6gTBI6vbFNwZx1b366VOfDhuj6j\n44+cN44AoVKtqSzpsDjdlIRClcBIv4k2ndXjr6yV1cJ9lrMB9vPbr8fiQOxr31Cf\nZUk0OZPppdsC5iqYpUeOdrSttOgBRIaTohBUXMatICxhc+9gC5yj9mQJuwckx6fE\nb+gJ9JrZ1/0wSW1EZNfS9hlOhA0nRUnty5wyqrpxdX4UL/G86SFl7njW9S1PBuPe\nHK7AdHZ6C3FAMfqpnETiWV149k/DR4UQQ7a23QsbgVJOM/7R9IAyln9LARhF9Bpp\ny/W2HPpBn8JHAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAGFl+G3yk/BfELjX1mT6\n4mrGlJq3I6vXLN4ICSTmI4YZQgMmudIHEd6o/cZHJq8HOOqQ5SfFhQI7tBXZpXSG\ndybOStl+GnfyIQFjsNzFXJEiaHoBPP1ccpZyCW/IBkXX39h9N/Pq0XB+xDurXpOD\nVE8nICTATe1Th11rs8j6qwFCkaoQwrzg+JWOKvFnRTPPDNg21fNRRTS+SE27asF2\nPhBWZOD4G2g6WD73SHUs+prR/q4foSVXt63Ih8uQIQJllRtpI4ZkpwSXDH9DUZSY\nWyFtYkD0EAV/FaRuALZQzxX7wda4xwBhvDL8Wua1WENTGZq7ssRHldAdFrz8NENC\nHqk=\n-----END CERTIFICATE-----\n", + "key_type": "RSA_X509_PEM", + "expires_at": 0 + } + ], + "blocked": false, + "config": "" + }, + { + "deviceid": "c5-rsa-expire", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0D86zcmGLdWjV2S1wLqu\n4tajeUxH/qSnd556Z4PKR9yXnl2YcQBZZh6gf9Y5RuLzsi+EN08NuyrWjscON16Y\nRmJYOJaH4vEOjts0EbWl/ekl/uaH2VaMByTCOXZH9oaI1hoYrr9YFyAxJlrSPc36\nD+Js3WTyjF6mr+VCZPM1MrZT97Hic/vJ12U/YSDqk6AYPdZG7dbalWR4NLWim7l7\nEnwHi2KwDLUewoGX8O/WDpkePD8ydixzqgMMgje5EMlotdeMSE5aKbSSWQWJIPyp\nNtm0FicpSMahksMG3GzZzGCe9CGvDWW82+6iP2A2/mpsaCe4PIA1sgDXqG3UoIVO\nMwIDAQAB\n-----END PUBLIC KEY-----\n", + "key_type": "RSA_PEM", + "expires_at": 1706738400 + } + ], + "blocked": false, + "config": "" + }, + { + "deviceid": "3036091876233443", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [ + { + "key": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0D86zcmGLdWjV2S1wLqu\n4tajeUxH/qSnd556Z4PKR9yXnl2YcQBZZh6gf9Y5RuLzsi+EN08NuyrWjscON16Y\nRmJYOJaH4vEOjts0EbWl/ekl/uaH2VaMByTCOXZH9oaI1hoYrr9YFyAxJlrSPc36\nD+Js3WTyjF6mr+VCZPM1MrZT97Hic/vJ12U/YSDqk6AYPdZG7dbalWR4NLWim7l7\nEnwHi2KwDLUewoGX8O/WDpkePD8ydixzqgMMgje5EMlotdeMSE5aKbSSWQWJIPyp\nNtm0FicpSMahksMG3GzZzGCe9CGvDWW82+6iP2A2/mpsaCe4PIA1sgDXqG3UoIVO\nMwIDAQAB\n-----END PUBLIC KEY-----\n", + "key_type": "RSA_PEM", + "expires_at": 1706738400 + } + ], + "blocked": false, + "config": "" + }, + { + "deviceid": "c6-nokey", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [], + "blocked": false, + "config": "" + }, + { + "deviceid": "3005440763942212", + "project": "iot-export", + "location": "europe-west1", + "registry": "my-registry", + "keys": [], + "blocked": false, + "config": "" + } +] diff --git a/apps/emqx_gcp_device/test/data/keys/c1_ec_private.pem b/apps/emqx_gcp_device/test/data/keys/c1_ec_private.pem new file mode 100644 index 000000000..2078c4eb1 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c1_ec_private.pem @@ -0,0 +1,5 @@ +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIGN8JyB8C3vW+SKTj5JcOeFdU9zM4mV35o+JumELI/w+oAoGCCqGSM49 +AwEHoUQDQgAE4tqkxsDZ1tZPhtLcCi5BdhT0idF5wqP9I2ITa7trw+n6YRsrqnbr ++sklCPN6tySLRrGT8IpFlLo0xJFRmuAyLw== +-----END EC PRIVATE KEY----- diff --git a/apps/emqx_gcp_device/test/data/keys/c1_ec_public.pem b/apps/emqx_gcp_device/test/data/keys/c1_ec_public.pem new file mode 100644 index 000000000..a13588b28 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c1_ec_public.pem @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE4tqkxsDZ1tZPhtLcCi5BdhT0idF5 +wqP9I2ITa7trw+n6YRsrqnbr+sklCPN6tySLRrGT8IpFlLo0xJFRmuAyLw== +-----END PUBLIC KEY----- diff --git a/apps/emqx_gcp_device/test/data/keys/c2_ec_cert.pem b/apps/emqx_gcp_device/test/data/keys/c2_ec_cert.pem new file mode 100644 index 000000000..1067e1520 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c2_ec_cert.pem @@ -0,0 +1,8 @@ +-----BEGIN CERTIFICATE----- +MIIBEjCBuAIJAPKVZoroXatKMAoGCCqGSM49BAMCMBExDzANBgNVBAMMBnVudXNl +ZDAeFw0yMzA0MTIxMzQ2NTJaFw0yMzA1MTIxMzQ2NTJaMBExDzANBgNVBAMMBnVu +dXNlZDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABAugsuay/y2SpGEVDKfiVw9q +VHGdZHvLXDqxj9XndUi6LEpA209ZfaC1eJ+mZiW3zBC94AdqVu+QLzS7rPT72jkw +CgYIKoZIzj0EAwIDSQAwRgIhAMBp+1S5w0UJDuylI1TJS8vXjWOhgluUdZfFtxES +E85SAiEAvKIAhjRhuIxanhqyv3HwOAL/zRAcv6iHsPMKYBt1dOs= +-----END CERTIFICATE----- diff --git a/apps/emqx_gcp_device/test/data/keys/c2_ec_private.pem b/apps/emqx_gcp_device/test/data/keys/c2_ec_private.pem new file mode 100644 index 000000000..7eb91c315 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c2_ec_private.pem @@ -0,0 +1,5 @@ +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIECpfvahaDpwOVSqQmf//F9nzK6W5m9BQklpx8DbAHscoAoGCCqGSM49 +AwEHoUQDQgAEC6Cy5rL/LZKkYRUMp+JXD2pUcZ1ke8tcOrGP1ed1SLosSkDbT1l9 +oLV4n6ZmJbfMEL3gB2pW75AvNLus9PvaOQ== +-----END EC PRIVATE KEY----- diff --git a/apps/emqx_gcp_device/test/data/keys/c3_rsa_private.pem b/apps/emqx_gcp_device/test/data/keys/c3_rsa_private.pem new file mode 100644 index 000000000..e837578b9 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c3_rsa_private.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDNzbsdfe3e0EDf +VUT6QdWE+WgBUGm8JIdH4d8Q4j76oJSI76dTV0AgqxJa7iKlvAKXnuh61DCZ4GdH +j1FrQHqzlTI6yw3D1qJFVQVl1Uo6J+hpchnLU1N8bq2CnZarx8vkrIDlKr4/MWR/ +IP1rFhALmNsX6cnvw4eDT/cRhCE3s28KKlODEJxb+ggYbq/Q3pXlbvH7uLElZss/ +JlgzS8RMg3QEjFvHVH58aTJRYCq81+95PI3nCrsxj33XkFIvIZSmuyr2fkMmaILj +hElVq7Yx8feYpDGSfxz/YvSFnOorqzCPUR0T8ECID637ruY5Uwugas6p87Xpjd/t +K/kPtdyPAgMBAAECggEAU3PcL05UOai61ZUPHme5vG0iFn5UEd3CGYzm1kLYBOs+ +r/R2Jl5X+6dDDypHVGtTpcXjQYNvncYYOzVLb7E60D1sm9ig4UvUi0a5pJyDt+dc +3/1Lpl5ImUmMBE4AvfGLpVOqBMN7V8agmMh42oacxQcbuKutnhLsjXvMlQa+LYZT ++FQV8kQV8D4GgjmP2jl0/Y2M6BjKEK2Ih7qPvo46L439vk2JGF8N+NtGjCKy6Wra +X9uFA3+RjsqcN6mPa77OEDmN9HjpSPraJowPlZR+xrJjbekIri/uyNWMZ6BCmkPx +0kRkScUmZMfq+SIIdsMszp8P549nwmBNCgFgcOJTYQKBgQDt1ZZzA7r07lhF9T9W +0bfzbg230v03LiPGHMsjerZfWCMMs+RgBkkgLPG4XyMKZNCUsj5Pt5WyVBXaaWY4 +LrE5kLdpIn/oRykaK1i+AGkXHhIWAlvqsWWg+R2sLCwaIiolGuc1b+ZERS+5VMrf +c71t/i8OB22uCPrRShIIQqrGsQKBgQDdhdeQ8ZoumNFFcapN0I/sKNhuuvw1mtOI +tduNkOyf68XCpM7yDe86DV8cPbFNHhGMZnhpSxu0yyHQLuL9Nwv9gAB66yIzvk+N +iv+WTIqgIDQN26Ljz2q4hc9SpT8zLRLrDAIJBxAti37xZTs6sj6fjXwlEE8l2RRM ++FTECIonPwKBgFBZkXuH7hijkWUJJv3w2kG+k5ngCTYkO2fKAIMbCRQLFcRL3kLm +vLvHE17jnVX8m08xLMYH0uYtbDie1S7z72HwV1aIlkfmCqfRryh5wQdTXG7dGyqe +BiStJO4u+jNWCYEBps0x4cx8x1PIpsV5N606a7FEpzRdykb8zDzIMSPxAoGAGHLK +HMwdaSEij5iA5D+tcrH7WRU3+q6QxBjWF2S0SN4boGTSFjLlgTGymopQhCNaanVw +uqY4c5arr69NDAdEQoEbDHXg+3b4jrWVib/+2LdVJ2ZjLuNYcu8Jt6RXOk2yNdDI +dLib13r60qeKhurfMHrMBccsBRBVRj1uFYifvr8CgYEAynbD898pShniuKii5c4i +3RrzhK/V6XGLfOJzDtjZ/uRcv8nt42kdbU3z+M87GE6hXn0rm6AIgVQKtSoaUHWH +oTVOtmdctkx8GmcdhSX5fs2wzVxvVsqyf1wjo6UG/90k9nxY+AjMU144ZpuRYuKQ +pWtPdQWBlw58XRAHW8r9Zxs= +-----END PRIVATE KEY----- diff --git a/apps/emqx_gcp_device/test/data/keys/c3_rsa_public.pem b/apps/emqx_gcp_device/test/data/keys/c3_rsa_public.pem new file mode 100644 index 000000000..1757bcc88 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c3_rsa_public.pem @@ -0,0 +1,9 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzc27HX3t3tBA31VE+kHV +hPloAVBpvCSHR+HfEOI++qCUiO+nU1dAIKsSWu4ipbwCl57oetQwmeBnR49Ra0B6 +s5UyOssNw9aiRVUFZdVKOifoaXIZy1NTfG6tgp2Wq8fL5KyA5Sq+PzFkfyD9axYQ +C5jbF+nJ78OHg0/3EYQhN7NvCipTgxCcW/oIGG6v0N6V5W7x+7ixJWbLPyZYM0vE +TIN0BIxbx1R+fGkyUWAqvNfveTyN5wq7MY9915BSLyGUprsq9n5DJmiC44RJVau2 +MfH3mKQxkn8c/2L0hZzqK6swj1EdE/BAiA+t+67mOVMLoGrOqfO16Y3f7Sv5D7Xc +jwIDAQAB +-----END PUBLIC KEY----- diff --git a/apps/emqx_gcp_device/test/data/keys/c4_rsa_cert.pem b/apps/emqx_gcp_device/test/data/keys/c4_rsa_cert.pem new file mode 100644 index 000000000..95bbba107 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c4_rsa_cert.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICnjCCAYYCCQCh+b8WxXjihDANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDDAZ1 +bnVzZWQwHhcNMjMwNDEyMTM0NjUyWhcNMjMwNTEyMTM0NjUyWjARMQ8wDQYDVQQD +DAZ1bnVzZWQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDOtvDuketC +56nvrZw61UyP+MJikYbqqxIqIqwyih2KDCzlF6gTBI6vbFNwZx1b366VOfDhuj6j +44+cN44AoVKtqSzpsDjdlIRClcBIv4k2ndXjr6yV1cJ9lrMB9vPbr8fiQOxr31Cf +ZUk0OZPppdsC5iqYpUeOdrSttOgBRIaTohBUXMatICxhc+9gC5yj9mQJuwckx6fE +b+gJ9JrZ1/0wSW1EZNfS9hlOhA0nRUnty5wyqrpxdX4UL/G86SFl7njW9S1PBuPe +HK7AdHZ6C3FAMfqpnETiWV149k/DR4UQQ7a23QsbgVJOM/7R9IAyln9LARhF9Bpp +y/W2HPpBn8JHAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAGFl+G3yk/BfELjX1mT6 +4mrGlJq3I6vXLN4ICSTmI4YZQgMmudIHEd6o/cZHJq8HOOqQ5SfFhQI7tBXZpXSG +dybOStl+GnfyIQFjsNzFXJEiaHoBPP1ccpZyCW/IBkXX39h9N/Pq0XB+xDurXpOD +VE8nICTATe1Th11rs8j6qwFCkaoQwrzg+JWOKvFnRTPPDNg21fNRRTS+SE27asF2 +PhBWZOD4G2g6WD73SHUs+prR/q4foSVXt63Ih8uQIQJllRtpI4ZkpwSXDH9DUZSY +WyFtYkD0EAV/FaRuALZQzxX7wda4xwBhvDL8Wua1WENTGZq7ssRHldAdFrz8NENC +Hqk= +-----END CERTIFICATE----- diff --git a/apps/emqx_gcp_device/test/data/keys/c4_rsa_private.pem b/apps/emqx_gcp_device/test/data/keys/c4_rsa_private.pem new file mode 100644 index 000000000..232e5ca99 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c4_rsa_private.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDOtvDuketC56nv +rZw61UyP+MJikYbqqxIqIqwyih2KDCzlF6gTBI6vbFNwZx1b366VOfDhuj6j44+c +N44AoVKtqSzpsDjdlIRClcBIv4k2ndXjr6yV1cJ9lrMB9vPbr8fiQOxr31CfZUk0 +OZPppdsC5iqYpUeOdrSttOgBRIaTohBUXMatICxhc+9gC5yj9mQJuwckx6fEb+gJ +9JrZ1/0wSW1EZNfS9hlOhA0nRUnty5wyqrpxdX4UL/G86SFl7njW9S1PBuPeHK7A +dHZ6C3FAMfqpnETiWV149k/DR4UQQ7a23QsbgVJOM/7R9IAyln9LARhF9Bppy/W2 +HPpBn8JHAgMBAAECggEBALKiEM55Nq7Yd1fx1UJaNRFtTL3VOJvuPYI/+EKsbB5x +qxJGQS4+D/e0St6lnQ9Z2wqFyY2nXp5N9jpvH72Xq1T7Dx7a9Ck3QJwxwLqdGjwi +ZUWe+Ct7T9krs4GNIOrFmpwAss39azRzWLFS2GletEZrFIBYw99u4XADF0KRLyK/ +qno/gnYWqrhc0NVG3OR2n+AruJF+EElbOBCmzPzgVgNYinLXpvpttBtlRIS3XIPD +UhdP9O33oTAyUGxRUcqbnwWLMPa3mQijT8fwIMvmeK94RYWsGz5r3+GQRC8ieeVy +4MjJLSwGLk2apxiuEWQzCwjnda0T9OwuIzJM0uT1CyECgYEA5njmwMQOqBsFresn +AdGLWlMKA2sM9sl/A+I6d/+B1NtAvpcq4UHQDfOSbthiOiU4/uIx/ZP4wmB7Smk/ +WB7NfuXZySpJTEWn0fwEaKcXIksqumQ2Lwom0QCV1m5nSnVdw+VLdWVIngqqG+Id +c6Rh0F96KpT8MalyxR1TsgP0jRkCgYEA5Zxirqc9SYQm/jaBfjGW9teunY/zQj7m +lCEUEp4aS9zfwcOS973sU80HXsfU1dsbQy15whvozqbTQMAYoaKz54DCebuPkW3I +o4tY6oCuFEHlOiait0KnRPG8ZiHZKeO3TGcLajQWGssNLbbDFlhby8S9thlJ1+GT +ldSW0AxhVl8CgYAy+zGIGJZpZzjVZPwG8fRScaX4ZZjDioT3NfbbDoEItctXnZbV +pzo/q86LiIAJ/qvh7eVDA5V2YeND7Y4ejwnD9VI8pob6QTpDP+01vShn5Jq6CmrV +8vftKaT7fwaIOPgZ2kHb4SC0HQXODzGWoBkm/8fFXZl/3szNf5RA/5D8GQKBgQDX +Y9pWiF+/pQ6HDk5vOMmrCSyudaj2jdbzQgx4YoO8gpgMRhCKAkm9Wun9CWwoqP9s +By7e3huIL4qghRMWHXCyTGEinMXS4K+Ea2WfpdKnAiGsaS3ex9HtpO7cyAfVed4q +98cHe5D41V2pcnaTcZO7FPX56sMQlnVB6kkHJXXx9QKBgHQPmp1uT+MCYOd+HLqo +b2tDxSukm/qe5MioiAKx4MhO8ZI/4BFDvlIEfcjWLCfvjXjZRIreYPys2idq8kX5 +Sb2n8ikw+YO79QfRuKmjtvXp/Ur+FROGIxb+/+OVzcZKF/An6p7oKmG4ACaBG6DP +LOJcBiQ8TVXz9f0V7jRko1kK +-----END PRIVATE KEY----- diff --git a/apps/emqx_gcp_device/test/data/keys/c5_rsa_private.pem b/apps/emqx_gcp_device/test/data/keys/c5_rsa_private.pem new file mode 100644 index 000000000..f260ba307 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c5_rsa_private.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDQPzrNyYYt1aNX +ZLXAuq7i1qN5TEf+pKd3nnpng8pH3JeeXZhxAFlmHqB/1jlG4vOyL4Q3Tw27KtaO +xw43XphGYlg4lofi8Q6O2zQRtaX96SX+5ofZVowHJMI5dkf2hojWGhiuv1gXIDEm +WtI9zfoP4mzdZPKMXqav5UJk8zUytlP3seJz+8nXZT9hIOqToBg91kbt1tqVZHg0 +taKbuXsSfAeLYrAMtR7CgZfw79YOmR48PzJ2LHOqAwyCN7kQyWi114xITloptJJZ +BYkg/Kk22bQWJylIxqGSwwbcbNnMYJ70Ia8NZbzb7qI/YDb+amxoJ7g8gDWyANeo +bdSghU4zAgMBAAECggEAE4Q5gJvIZXdGLaSUnBFi3oN7Ip0Rij3oK//APP9O79ku +pHrlFIIR3s40AIcVKx2N9T8axwwznzzuiscBABNvdfk1h2gkKBKraJwGjzpU6iz1 +kKQOS0IfMXQyd6wsJmCJZndfpNDt8ozjzlJorb4mF2MDDOSvDpS4TnfP9yIL9EqS +pcHWsLQkqab5WjC7bwXvgFOIMwE32UhX/M6U3nAi8UuAanWVI2bXowywdK5f9HYU +2TOw4TK+S773savQhczC7BAzBlNeOKguLQsO8St+4aLs/1k60qST/mcoFYgjhkXT +iMMFrTp4kQNBfNto7LHOwLEXlT6rHGNMlYWJXzkvgQKBgQD//EDc3rMSudEKJxrU +gzZ9D4ji+Rloa5lc4Qdg0Mxm2e2hrEgJqgPhBFO2v86t84NqtzQ+3Iu+j7o4Idor +feEPx/74NztjQRDdU06kMGHHE6jNC1f+V0NmMgbvR26PqtZImI0FM10KGpPDjl/W +t7w+D+XLBkjRysekkf1kYsX20wKBgQDQQkcSpevgzebGubp+cQ24mKCP8q4nyOul +0vLK9iX05q2A4cWOQNlLVcxeV5uA2Y/aZUKMsjwcyF8xi/vDW4CzQej0fi0zQrxD +hImhUzPDqejaRtG+qdj7u6IQN7QjWetLKbU9OPmzsZt0EZQu7B7S9ftkZzjK5Inv +crbXPjlvIQKBgCt7MZlSyqAXqAZNdiU61HqRtPK41TQDct1v68zqKo4d3ltj5Cig +FGCYV4/nLLgncN8jl2BGHgaUa1E1jtVsYFpJ4mlPGGtXlgHCMM162mDyWe3aS2wM +bophXQQv4fvNTPCv2ORVQSyCLy88c9MJCpSQJrxBqQTZqOevVJdEn9O5AoGAIULk +nQrY8G+SMx0ItxcRTPE7e6ITxJDnafWWB2pmx4VsIpBsf/rFea27VToCwQJ+YjAX +/+abiTFLWttzm1Dq7jZRoXLhfzViYhox7Q0f0Fk7sljrONthp1rhWFu9LoQ2+ysv +IhcOcm+kV1ZTZ2cYyTK2MuP1gxobGZ4lq5zpiWECgYB86qlXEAZP2YinZfuPETII +RPPfTHESserJmikGxAxDk00yfWtoW8kJePKvNIPuDCe0NsMVp8PFaN08stD8Xj4k +8gZTkasoH8kbcZXjUDRbNOM0oHWlIYLaRTfdknyh27HRbDHPukXJV/IxQGqahmBs +K0Yh5NkZp9Rxn7iQtojCvQ== +-----END PRIVATE KEY----- diff --git a/apps/emqx_gcp_device/test/data/keys/c5_rsa_public.pem b/apps/emqx_gcp_device/test/data/keys/c5_rsa_public.pem new file mode 100644 index 000000000..a0ce58a93 --- /dev/null +++ b/apps/emqx_gcp_device/test/data/keys/c5_rsa_public.pem @@ -0,0 +1,9 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0D86zcmGLdWjV2S1wLqu +4tajeUxH/qSnd556Z4PKR9yXnl2YcQBZZh6gf9Y5RuLzsi+EN08NuyrWjscON16Y +RmJYOJaH4vEOjts0EbWl/ekl/uaH2VaMByTCOXZH9oaI1hoYrr9YFyAxJlrSPc36 +D+Js3WTyjF6mr+VCZPM1MrZT97Hic/vJ12U/YSDqk6AYPdZG7dbalWR4NLWim7l7 +EnwHi2KwDLUewoGX8O/WDpkePD8ydixzqgMMgje5EMlotdeMSE5aKbSSWQWJIPyp +Ntm0FicpSMahksMG3GzZzGCe9CGvDWW82+6iP2A2/mpsaCe4PIA1sgDXqG3UoIVO +MwIDAQAB +-----END PUBLIC KEY----- diff --git a/apps/emqx_gcp_device/test/emqx_gcp_device_SUITE.erl b/apps/emqx_gcp_device/test/emqx_gcp_device_SUITE.erl new file mode 100644 index 000000000..5f286d629 --- /dev/null +++ b/apps/emqx_gcp_device/test/emqx_gcp_device_SUITE.erl @@ -0,0 +1,390 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_gcp_device_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). +-include_lib("emqx_authn/include/emqx_authn.hrl"). +-include_lib("emqx/include/emqx.hrl"). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config) -> + ok = emqx_common_test_helpers:start_apps([emqx_conf, emqx_authn, emqx_retainer, emqx_gcp_device]), + Config. + +end_per_suite(Config) -> + _ = emqx_common_test_helpers:stop_apps([emqx_authn, emqx_retainer, emqx_gcp_device]), + Config. + +init_per_testcase(_TestCase, Config) -> + {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL + ), + clear_data(), + Config. + +end_per_testcase(_TestCase, Config) -> + clear_data(), + Config. + +%%-------------------------------------------------------------------- +%% Tests +%%-------------------------------------------------------------------- + +t_ignore_non_jwt(_Config) -> + ClientId = gcp_client_id(<<"clientid">>), + ClientInfo = client_info(ClientId, <<"non_jwt_password">>), + ?check_trace( + ?assertEqual( + ignore, + emqx_gcp_device_authn:authenticate(ClientInfo, #{}) + ), + fun(Trace) -> + ?assertMatch( + [#{result := ignore, reason := "not a JWT"}], + ?of_kind(authn_gcp_device_check, Trace) + ) + end + ), + ok. + +t_ignore_non_gcp_clientid(_Config) -> + % GCP Client pattern: + % projects//locations//registries//devices/ + NonGCPClientIdList = [ + <<"non_gcp_clientid">>, + <<"projects/non_gcp_client">>, + <<"projects/proj/locations/non_gcp_client">>, + <<"projects/proj/locations/loc/registries/non_gcp_client">>, + <<"projects/proj/locations/loc/registries/reg/device/non_gcp_client">> + ], + [{_DeviceId, KeyType, PrivateKeyName, _PublicKey} | _] = keys(), + Payload = #{<<"exp">> => 0}, + JWT = generate_jws(Payload, KeyType, PrivateKeyName), + lists:foreach( + fun(ClientId) -> + ClientInfo = client_info(ClientId, JWT), + ?check_trace( + ?assertEqual( + ignore, + emqx_gcp_device_authn:authenticate(ClientInfo, #{}), + ClientId + ), + fun(Trace) -> + ?assertMatch( + [#{result := ignore, reason := "not a GCP ClientId"}], + ?of_kind(authn_gcp_device_check, Trace), + ClientId + ) + end + ) + end, + NonGCPClientIdList + ), + ok. + +t_deny_expired_jwt(_Config) -> + lists:foreach( + fun({DeviceId, KeyType, PrivateKeyName, _PublicKey}) -> + ClientId = gcp_client_id(DeviceId), + Payload = #{<<"exp">> => 0}, + JWT = generate_jws(Payload, KeyType, PrivateKeyName), + ClientInfo = client_info(ClientId, JWT), + ?check_trace( + ?assertMatch( + {error, _}, + emqx_gcp_device_authn:authenticate(ClientInfo, #{}), + DeviceId + ), + fun(Trace) -> + ?assertMatch( + [#{result := not_authorized, reason := "expired JWT"}], + ?of_kind(authn_gcp_device_check, Trace), + DeviceId + ) + end + ) + end, + keys() + ), + ok. + +t_no_keys(_Config) -> + lists:foreach( + fun({DeviceId, KeyType, PrivateKeyName, _PublicKey}) -> + ClientId = gcp_client_id(DeviceId), + Payload = #{<<"exp">> => erlang:system_time(second) + 3600}, + JWT = generate_jws(Payload, KeyType, PrivateKeyName), + ClientInfo = client_info(ClientId, JWT), + ?check_trace( + ?assertMatch( + ignore, + emqx_gcp_device_authn:authenticate(ClientInfo, #{}), + DeviceId + ), + fun(Trace) -> + ?assertMatch( + [#{result := ignore, reason := "key not found"}], + ?of_kind(authn_gcp_device_check, Trace), + DeviceId + ) + end + ) + end, + keys() + ), + ok. + +t_expired_keys(_Config) -> + lists:foreach( + fun({DeviceId, KeyType, PrivateKeyName, PublicKey}) -> + ClientId = gcp_client_id(DeviceId), + Device = #{ + deviceid => DeviceId, + config => <<>>, + keys => + [ + #{ + key_type => KeyType, + key => key_data(PublicKey), + expires_at => erlang:system_time(second) - 3600 + } + ] + }, + ok = emqx_gcp_device:put_device(Device), + Payload = #{<<"exp">> => erlang:system_time(second) + 3600}, + JWT = generate_jws(Payload, KeyType, PrivateKeyName), + ClientInfo = client_info(ClientId, JWT), + ?check_trace( + ?assertMatch( + {error, _}, + emqx_gcp_device_authn:authenticate(ClientInfo, #{}), + DeviceId + ), + fun(Trace) -> + ?assertMatch( + [ + #{ + result := {error, bad_username_or_password}, + reason := "no matching or valid keys" + } + ], + ?of_kind(authn_gcp_device_check, Trace), + DeviceId + ) + end + ) + end, + keys() + ), + ok. + +t_valid_keys(_Config) -> + [ + {DeviceId, KeyType0, PrivateKeyName0, PublicKey0}, + {_DeviceId1, KeyType1, PrivateKeyName1, PublicKey1}, + {_DeviceId2, KeyType2, PrivateKeyName2, _PublicKey} + | _ + ] = keys(), + Device = #{ + deviceid => DeviceId, + config => <<>>, + keys => + [ + #{ + key_type => KeyType0, + key => key_data(PublicKey0), + expires_at => erlang:system_time(second) + 3600 + }, + #{ + key_type => KeyType1, + key => key_data(PublicKey1), + expires_at => erlang:system_time(second) + 3600 + } + ] + }, + ok = emqx_gcp_device:put_device(Device), + Payload = #{<<"exp">> => erlang:system_time(second) + 3600}, + JWT0 = generate_jws(Payload, KeyType0, PrivateKeyName0), + JWT1 = generate_jws(Payload, KeyType1, PrivateKeyName1), + JWT2 = generate_jws(Payload, KeyType2, PrivateKeyName2), + ClientId = gcp_client_id(DeviceId), + lists:foreach( + fun(JWT) -> + ?check_trace( + begin + ClientInfo = client_info(ClientId, JWT), + ?assertMatch( + ok, + emqx_gcp_device_authn:authenticate(ClientInfo, #{}) + ) + end, + fun(Trace) -> + ?assertMatch( + [#{result := ok, reason := "auth success"}], + ?of_kind(authn_gcp_device_check, Trace) + ) + end + ) + end, + [JWT0, JWT1] + ), + ?check_trace( + begin + ClientInfo = client_info(ClientId, JWT2), + ?assertMatch( + {error, bad_username_or_password}, + emqx_gcp_device_authn:authenticate(ClientInfo, #{}) + ) + end, + fun(Trace) -> + ?assertMatch( + [ + #{ + result := {error, bad_username_or_password}, + reason := "no matching or valid keys" + } + ], + ?of_kind(authn_gcp_device_check, Trace) + ) + end + ), + ok. + +t_all_key_types(_Config) -> + lists:foreach( + fun({DeviceId, KeyType, _PrivateKeyName, PublicKey}) -> + Device = #{ + deviceid => DeviceId, + config => <<>>, + keys => + [ + #{ + key_type => KeyType, + key => key_data(PublicKey), + expires_at => 0 + } + ] + }, + ok = emqx_gcp_device:put_device(Device) + end, + keys() + ), + Payload = #{<<"exp">> => erlang:system_time(second) + 3600}, + lists:foreach( + fun({DeviceId, KeyType, PrivateKeyName, _PublicKey}) -> + ClientId = gcp_client_id(DeviceId), + JWT = generate_jws(Payload, KeyType, PrivateKeyName), + ClientInfo = client_info(ClientId, JWT), + ?check_trace( + ?assertMatch( + ok, + emqx_gcp_device_authn:authenticate(ClientInfo, #{}) + ), + fun(Trace) -> + ?assertMatch( + [#{result := ok, reason := "auth success"}], + ?of_kind(authn_gcp_device_check, Trace) + ) + end + ) + end, + keys() + ), + ok. + +t_config(_Config) -> + Device = #{ + deviceid => <<"t">>, + config => base64:encode(<<"myconf">>), + keys => [] + }, + ok = emqx_gcp_device:put_device(Device), + + {ok, Pid} = emqtt:start_link(), + {ok, _} = emqtt:connect(Pid), + {ok, _, _} = emqtt:subscribe(Pid, <<"/devices/t/config">>, 0), + + receive + {publish, #{payload := <<"myconf">>}} -> + ok + after 1000 -> + ct:fail("No config received") + end, + emqtt:stop(Pid), + ok. + +t_wrong_device(_Config) -> + Device = #{wrong_field => wrong_value}, + ?assertMatch( + {error, {function_clause, _}}, + emqx_gcp_device:put_device(Device) + ), + ok. + +t_import_wrong_devices(_Config) -> + InvalidDevices = [ + #{wrong_field => wrong_value}, + #{another_wrong_field => another_wrong_value}, + #{yet_another_wrong_field => yet_another_wrong_value} + ], + ValidDevices = [ + #{ + deviceid => gcp_client_id(<<"valid_device_1">>), + config => <<>>, + keys => [] + }, + #{ + deviceid => gcp_client_id(<<"valid_device_2">>), + config => <<>>, + keys => [] + } + ], + Devices = InvalidDevices ++ ValidDevices, + InvalidDevicesLength = length(InvalidDevices), + ValidDevicesLength = length(ValidDevices), + ?assertMatch( + {ValidDevicesLength, InvalidDevicesLength}, + emqx_gcp_device:import_devices(Devices) + ), + ok. + +%%-------------------------------------------------------------------- +%% Helpers +%%-------------------------------------------------------------------- + +client_info(ClientId, Password) -> + emqx_gcp_device_test_helpers:client_info(ClientId, Password). + +device_loc(DeviceId) -> + {<<"iot-export">>, <<"europe-west1">>, <<"my-registry">>, DeviceId}. + +gcp_client_id(DeviceId) -> + emqx_gcp_device_test_helpers:client_id(DeviceId). + +keys() -> + emqx_gcp_device_test_helpers:keys(). + +key_data(Filename) -> + emqx_gcp_device_test_helpers:key(Filename). + +generate_jws(Payload, KeyType, PrivateKeyName) -> + emqx_gcp_device_test_helpers:generate_jws(Payload, KeyType, PrivateKeyName). + +clear_data() -> + emqx_gcp_device_test_helpers:clear_data(), + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL + ), + ok. diff --git a/apps/emqx_gcp_device/test/emqx_gcp_device_api_SUITE.erl b/apps/emqx_gcp_device/test/emqx_gcp_device_api_SUITE.erl new file mode 100644 index 000000000..238f99445 --- /dev/null +++ b/apps/emqx_gcp_device/test/emqx_gcp_device_api_SUITE.erl @@ -0,0 +1,327 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_gcp_device_api_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). +-include_lib("emqx_authn/include/emqx_authn.hrl"). +-include_lib("emqx/include/emqx.hrl"). + +-define(PATH, [authentication]). +-define(BASE_CONF, << + "" + "\n" + "retainer {\n" + " enable = true\n" + "}" + "" +>>). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config) -> + ok = emqx_config:init_load(emqx_retainer_schema, ?BASE_CONF), + ok = emqx_common_test_helpers:start_apps([emqx_gcp_device, emqx_authn, emqx_conf, emqx_retainer]), + emqx_dashboard_api_test_helpers:set_default_config(), + emqx_mgmt_api_test_util:init_suite(), + Config. + +end_per_suite(Config) -> + emqx_mgmt_api_test_util:end_suite(), + _ = emqx_common_test_helpers:stop_apps([emqx_authn, emqx_retainer, emqx_gcp_device]), + Config. + +init_per_testcase(_TestCase, Config) -> + {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL + ), + clear_data(), + Config. + +end_per_testcase(_TestCase, Config) -> + clear_data(), + Config. + +%%-------------------------------------------------------------------- +%% Tests +%%-------------------------------------------------------------------- + +t_import(_Config) -> + ?assertMatch( + {ok, #{<<"errors">> := 0, <<"imported">> := 14}}, + api(post, ["gcp_devices"], emqx_gcp_device_test_helpers:exported_data()) + ), + + InvalidData = + [ + #{<<"deviceid">> => <<"device1">>, <<"device_numid">> => <<"device1">>}, + #{<<"name">> => []} + ], + ?assertMatch({error, {_, 400, _}}, api(post, ["gcp_devices"], InvalidData)), + + ?assertMatch( + {ok, #{<<"meta">> := #{<<"count">> := 14}}}, + api(get, ["gcp_devices"]) + ), + + ?assertMatch( + {ok, #{ + <<"meta">> := + #{ + <<"count">> := 14, + <<"page">> := 2, + <<"limit">> := 3 + } + }}, + api(get, ["gcp_devices"], [{"limit", "3"}, {"page", "2"}]) + ). + +t_device_crud_ok(_Config) -> + AuthConfig = raw_config(), + DeviceId = <<"my device">>, + DeviceIdReq = emqx_http_lib:uri_encode(DeviceId), + ConfigTopic = emqx_gcp_device:config_topic(DeviceId), + DeviceConfig = <<"myconfig">>, + EncodedConfig = base64:encode(DeviceConfig), + {ok, _} = emqx:update_config(?PATH, {create_authenticator, ?GLOBAL, AuthConfig}), + + Payload = #{<<"exp">> => erlang:system_time(second) + 3600}, + JWT = generate_jws(Payload, <<"ES256_PEM">>, "c1_ec_private.pem"), + ClientInfo = client_info(client_id(DeviceId), JWT), + ?assertMatch( + {error, _}, + emqx_access_control:authenticate(ClientInfo) + ), + Device0 = + #{ + <<"project">> => <<"iot-export">>, + <<"location">> => <<"europe-west1">>, + <<"registry">> => <<"my-registry">>, + <<"keys">> => + [ + #{ + <<"key">> => emqx_gcp_device_test_helpers:key("c1_ec_public.pem"), + <<"key_type">> => <<"ES256_PEM">>, + <<"expires_at">> => 0 + }, + #{ + <<"key">> => emqx_gcp_device_test_helpers:key("c1_ec_public.pem"), + <<"key_type">> => <<"ES256_PEM">>, + <<"expires_at">> => 0 + } + ], + <<"config">> => EncodedConfig + }, + ?assertMatch( + {ok, #{<<"deviceid">> := DeviceId}}, + api(put, ["gcp_devices", DeviceIdReq], Device0) + ), + ?assertMatch( + {ok, _}, + emqx_access_control:authenticate(ClientInfo) + ), + + ?retry( + _Sleep = 100, + _Attempts = 10, + ?assertMatch( + {ok, [#message{payload = DeviceConfig}]}, + emqx_retainer:read_message(ConfigTopic) + ) + ), + ?assertMatch( + {ok, #{ + <<"project">> := <<"iot-export">>, + <<"location">> := <<"europe-west1">>, + <<"registry">> := <<"my-registry">>, + <<"keys">> := + [ + #{ + <<"key">> := _, + <<"key_type">> := <<"ES256_PEM">>, + <<"expires_at">> := 0 + }, + #{ + <<"key">> := _, + <<"key_type">> := <<"ES256_PEM">>, + <<"expires_at">> := 0 + } + ], + <<"config">> := EncodedConfig + }}, + api(get, ["gcp_devices", DeviceIdReq]) + ), + + Device1 = maps:without([<<"project">>, <<"location">>, <<"registry">>], Device0), + ?assertMatch( + {ok, #{<<"deviceid">> := DeviceId}}, + api(put, ["gcp_devices", DeviceIdReq], Device1) + ), + + ?assertMatch( + {ok, #{ + <<"project">> := <<>>, + <<"location">> := <<>>, + <<"registry">> := <<>> + }}, + api(get, ["gcp_devices", DeviceIdReq]) + ), + ?assertMatch({ok, {{_, 204, _}, _, _}}, api(delete, ["gcp_devices", DeviceIdReq])), + + ?retry( + _Sleep = 100, + _Attempts = 10, + ?assertNotMatch( + {ok, [#message{payload = DeviceConfig}]}, + emqx_retainer:read_message(ConfigTopic) + ) + ), + ?assertMatch({error, {_, 404, _}}, api(get, ["gcp_devices", DeviceIdReq])). + +t_device_crud_nok(_Config) -> + DeviceId = <<"my device">>, + DeviceIdReq = emqx_http_lib:uri_encode(DeviceId), + Config = <<"myconfig">>, + EncodedConfig = base64:encode(Config), + + BadDevices = + [ + #{ + <<"project">> => 5, + <<"keys">> => [], + <<"config">> => EncodedConfig + }, + #{ + <<"keys">> => <<"keys">>, + <<"config">> => EncodedConfig + }, + #{ + <<"keys">> => [<<"key">>], + <<"config">> => EncodedConfig + }, + #{ + <<"keys">> => [#{<<"key">> => <<"key">>}], + <<"config">> => EncodedConfig + }, + #{ + <<"keys">> => [#{<<"key_type">> => <<"ES256_PEM">>}], + <<"config">> => EncodedConfig + }, + #{ + <<"keys">> => + [ + #{ + <<"key">> => <<"key">>, + <<"key_type">> => <<"ES256_PEM">>, + <<"expires_at">> => <<"123">> + } + ], + <<"config">> => EncodedConfig + } + ], + + lists:foreach( + fun(BadDevice) -> + ?assertMatch( + {error, {_, 400, _}}, + api(put, ["gcp_devices", DeviceIdReq], BadDevice) + ) + end, + BadDevices + ). + +%%-------------------------------------------------------------------- +%% Helpers +%%-------------------------------------------------------------------- + +assert_no_retained(ConfigTopic) -> + {ok, Pid} = emqtt:start_link(), + {ok, _} = emqtt:connect(Pid), + {ok, _, _} = emqtt:subscribe(Pid, ConfigTopic, 0), + + receive + {publish, #{payload := Config}} -> + ct:fail("Unexpected config received: ~p", [Config]) + after 100 -> + ok + end, + + _ = emqtt:stop(Pid). + +api(get, Path) -> + api(get, Path, ""); +api(delete, Path) -> + api(delete, Path, []). + +api(get, Path, Query) -> + maybe_decode_response( + emqx_mgmt_api_test_util:request_api( + get, + emqx_mgmt_api_test_util:api_path(Path), + uri_string:compose_query(Query), + emqx_mgmt_api_test_util:auth_header_() + ) + ); +api(delete, Path, Query) -> + emqx_mgmt_api_test_util:request_api( + delete, + emqx_mgmt_api_test_util:api_path(Path), + uri_string:compose_query(Query), + emqx_mgmt_api_test_util:auth_header_(), + [], + #{return_all => true} + ); +api(Method, Path, Data) when + Method =:= put orelse Method =:= post +-> + api(Method, Path, [], Data). + +api(Method, Path, Query, Data) when + Method =:= put orelse Method =:= post +-> + maybe_decode_response( + emqx_mgmt_api_test_util:request_api( + Method, + emqx_mgmt_api_test_util:api_path(Path), + uri_string:compose_query(Query), + emqx_mgmt_api_test_util:auth_header_(), + Data + ) + ). + +maybe_decode_response({ok, ResponseBody}) -> + {ok, jiffy:decode(list_to_binary(ResponseBody), [return_maps])}; +maybe_decode_response({error, _} = Error) -> + Error. + +generate_jws(Payload, KeyType, PrivateKeyName) -> + emqx_gcp_device_test_helpers:generate_jws(Payload, KeyType, PrivateKeyName). + +client_info(ClientId, Password) -> + emqx_gcp_device_test_helpers:client_info(ClientId, Password). + +client_id(DeviceId) -> + emqx_gcp_device_test_helpers:client_id(DeviceId). + +raw_config() -> + #{ + <<"mechanism">> => <<"gcp_device">>, + <<"enable">> => <<"true">> + }. + +clear_data() -> + emqx_gcp_device_test_helpers:clear_data(), + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL + ), + ok. diff --git a/apps/emqx_gcp_device/test/emqx_gcp_device_authn_SUITE.erl b/apps/emqx_gcp_device/test/emqx_gcp_device_authn_SUITE.erl new file mode 100644 index 000000000..8c3f8e0fa --- /dev/null +++ b/apps/emqx_gcp_device/test/emqx_gcp_device_authn_SUITE.erl @@ -0,0 +1,175 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_gcp_device_authn_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). +-include_lib("emqx_authn/include/emqx_authn.hrl"). + +-define(PATH, [authentication]). +-define(DEVICE_ID, <<"test-device">>). +-define(PROJECT, <<"iot-export">>). +-define(LOCATION, <<"europe-west1">>). +-define(REGISTRY, <<"my-registry">>). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config0) -> + ok = snabbkaffe:start_trace(), + emqx_common_test_helpers:start_apps([emqx_conf, emqx_authn, emqx_gcp_device]), + ValidExpirationTime = erlang:system_time(second) + 3600, + ValidJWT = generate_jws(ValidExpirationTime), + ExpiredJWT = generate_jws(0), + ValidClient = generate_client(ValidExpirationTime), + ExpiredClient = generate_client(0), + [ + {device_id, ?DEVICE_ID}, + {client_id, client_id()}, + {valid_jwt, ValidJWT}, + {expired_jwt, ExpiredJWT}, + {valid_client, ValidClient}, + {expired_client, ExpiredClient} + | Config0 + ]. + +end_per_suite(_) -> + _ = emqx_common_test_helpers:stop_apps([emqx_authn, emqx_gcp_device]), + ok. + +init_per_testcase(_, Config) -> + {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), + Config. + +end_per_testcase(_Case, Config) -> + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL + ), + Config. + +%%------------------------------------------------------------------------------ +%% Tests +%%------------------------------------------------------------------------------ + +t_create(_Config) -> + AuthConfig = raw_config(), + {ok, _} = emqx:update_config(?PATH, {create_authenticator, ?GLOBAL, AuthConfig}), + ?assertMatch( + {ok, [#{provider := emqx_gcp_device_authn}]}, + emqx_authentication:list_authenticators(?GLOBAL) + ). + +t_destroy(Config) -> + ClientId = ?config(client_id, Config), + JWT = ?config(valid_jwt, Config), + Credential = credential(ClientId, JWT), + Client = ?config(valid_client, Config), + AuthConfig = raw_config(), + {ok, _} = emqx:update_config(?PATH, {create_authenticator, ?GLOBAL, AuthConfig}), + ok = emqx_gcp_device:put_device(Client), + ?assertMatch( + {ok, _}, + emqx_access_control:authenticate(Credential) + ), + emqx_authn_test_lib:delete_authenticators([authentication], ?GLOBAL), + ?assertMatch( + ignore, + emqx_gcp_device_authn:authenticate(Credential, #{}) + ). + +t_expired_client(Config) -> + ClientId = ?config(client_id, Config), + JWT = ?config(expired_jwt, Config), + Credential = credential(ClientId, JWT), + Client = ?config(expired_client, Config), + AuthConfig = raw_config(), + {ok, _} = emqx:update_config(?PATH, {create_authenticator, ?GLOBAL, AuthConfig}), + ?assertMatch( + {ok, [#{provider := emqx_gcp_device_authn}]}, + emqx_authentication:list_authenticators(?GLOBAL) + ), + ok = emqx_gcp_device:put_device(Client), + ?assertMatch( + {error, not_authorized}, + emqx_access_control:authenticate(Credential) + ). + +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ + +raw_config() -> + #{ + <<"mechanism">> => <<"gcp_device">>, + <<"enable">> => <<"true">> + }. + +generate_client(ExpirationTime) -> + generate_client(?DEVICE_ID, ExpirationTime). + +generate_client(ClientId, ExpirationTime) -> + #{ + deviceid => ClientId, + project => ?PROJECT, + location => ?LOCATION, + registry => ?REGISTRY, + config => <<>>, + keys => + [ + #{ + key_type => <<"RSA_PEM">>, + key => public_key(), + expires_at => ExpirationTime + } + ] + }. + +client_id() -> + client_id(?DEVICE_ID). + +client_id(DeviceId) -> + <<"projects/", ?PROJECT/binary, "/locations/", ?LOCATION/binary, "/registries/", + ?REGISTRY/binary, "/devices/", DeviceId/binary>>. + +generate_jws(ExpirationTime) -> + Payload = #{<<"exp">> => ExpirationTime}, + JWK = jose_jwk:from_pem_file(test_rsa_key(private)), + Header = #{<<"alg">> => <<"RS256">>, <<"typ">> => <<"JWT">>}, + Signed = jose_jwt:sign(JWK, Header, Payload), + {_, JWS} = jose_jws:compact(Signed), + JWS. + +public_key() -> + {ok, Data} = file:read_file(test_rsa_key(public)), + Data. + +private_key() -> + {ok, Data} = file:read_file(test_rsa_key(private)), + Data. + +test_rsa_key(public) -> + data_file("public_key.pem"); +test_rsa_key(private) -> + data_file("private_key.pem"). + +data_file(Name) -> + Dir = code:lib_dir(emqx_authn, test), + list_to_binary(filename:join([Dir, "data", Name])). + +credential(ClientId, JWT) -> + #{ + listener => 'tcp:default', + protocol => mqtt, + clientid => ClientId, + password => JWT + }. + +check(Module, HoconConf) -> + emqx_hocon:check(Module, ["authentication= ", HoconConf]). diff --git a/apps/emqx_gcp_device/test/emqx_gcp_device_test_helpers.erl b/apps/emqx_gcp_device/test/emqx_gcp_device_test_helpers.erl new file mode 100644 index 000000000..3e961a168 --- /dev/null +++ b/apps/emqx_gcp_device/test/emqx_gcp_device_test_helpers.erl @@ -0,0 +1,66 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_gcp_device_test_helpers). + +-compile(export_all). +-compile(nowarn_export_all). + +-define(KEYS, [ + {<<"c1-ec">>, <<"ES256_PEM">>, <<"c1_ec_private.pem">>, <<"c1_ec_public.pem">>}, + {<<"c2-ec-x509">>, <<"ES256_X509_PEM">>, <<"c2_ec_private.pem">>, <<"c2_ec_cert.pem">>}, + {<<"c3-rsa">>, <<"RSA_PEM">>, <<"c3_rsa_private.pem">>, <<"c3_rsa_public.pem">>}, + {<<"c4-rsa-x509">>, <<"RSA_X509_PEM">>, <<"c4_rsa_private.pem">>, <<"c4_rsa_cert.pem">>} +]). + +exported_data() -> + FileName = + filename:join([code:lib_dir(emqx_gcp_device), "test", "data", "gcp-data.json"]), + {ok, Data} = file:read_file(FileName), + jiffy:decode(Data, [return_maps]). + +key(Name) -> + {ok, Data} = file:read_file(key_path(Name)), + Data. + +key_path(Name) -> + filename:join([code:lib_dir(emqx_gcp_device), "test", "data", "keys", Name]). + +clear_data() -> + {atomic, ok} = mria:clear_table(emqx_gcp_device), + ok = emqx_retainer:clean(), + ok. + +keys() -> + ?KEYS. + +client_id(DeviceId) -> + <<"projects/iot-export/locations/europe-west1/registries/my-registry/devices/", + DeviceId/binary>>. + +generate_jws(Payload, KeyType, PrivateKeyName) -> + JWK = jose_jwk:from_pem_file( + emqx_gcp_device_test_helpers:key_path(PrivateKeyName) + ), + Header = #{<<"alg">> => alg(KeyType), <<"typ">> => <<"JWT">>}, + Signed = jose_jwt:sign(JWK, Header, Payload), + {_, JWS} = jose_jws:compact(Signed), + JWS. + +alg(<<"ES256_PEM">>) -> + <<"ES256">>; +alg(<<"ES256_X509_PEM">>) -> + <<"ES256">>; +alg(<<"RSA_PEM">>) -> + <<"RS256">>; +alg(<<"RSA_X509_PEM">>) -> + <<"RS256">>. + +client_info(ClientId, JWT) -> + #{ + listener => 'tcp:default', + protocol => mqtt, + clientid => ClientId, + password => JWT + }. diff --git a/apps/emqx_ldap/src/emqx_ldap.app.src b/apps/emqx_ldap/src/emqx_ldap.app.src index bdc9493c7..7a252dd33 100644 --- a/apps/emqx_ldap/src/emqx_ldap.app.src +++ b/apps/emqx_ldap/src/emqx_ldap.app.src @@ -1,10 +1,11 @@ {application, emqx_ldap, [ {description, "EMQX LDAP Connector"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {applications, [ kernel, stdlib, + eldap, emqx_authn, emqx_authz ]}, diff --git a/apps/emqx_machine/priv/reboot_lists.eterm b/apps/emqx_machine/priv/reboot_lists.eterm index 51c2d2274..0e2ecb799 100644 --- a/apps/emqx_machine/priv/reboot_lists.eterm +++ b/apps/emqx_machine/priv/reboot_lists.eterm @@ -17,21 +17,26 @@ asn1, syntax_tools, ssl, - os_mon, + %% started temporary in emqx to prevent crash vm when permanent. + {os_mon, load}, inets, compiler, runtime_tools, redbug, xmerl, {hocon, load}, - telemetry + telemetry, + {opentelemetry, load}, + {opentelemetry_api, load}, + {opentelemetry_experimental, load}, + {opentelemetry_api_experimental, load}, + {opentelemetry_exporter, load} ], %% must always be of type `load' common_business_apps => [ emqx, emqx_conf, - esasl, observer_cli, tools, @@ -68,6 +73,7 @@ emqx_redis, emqx_mysql, emqx_plugins, + emqx_opentelemetry, quicer, bcrypt, jq, @@ -107,7 +113,8 @@ emqx_eviction_agent, emqx_node_rebalance, emqx_ft, - emqx_ldap + emqx_ldap, + emqx_gcp_device ], %% must always be of type `load' ce_business_apps => diff --git a/apps/emqx_machine/src/emqx_machine.app.src b/apps/emqx_machine/src/emqx_machine.app.src index bdd1db76e..dd1915cfb 100644 --- a/apps/emqx_machine/src/emqx_machine.app.src +++ b/apps/emqx_machine/src/emqx_machine.app.src @@ -3,7 +3,7 @@ {id, "emqx_machine"}, {description, "The EMQX Machine"}, % strict semver, bump manually! - {vsn, "0.2.10"}, + {vsn, "0.2.12"}, {modules, []}, {registered, []}, {applications, [kernel, stdlib, emqx_ctl]}, diff --git a/apps/emqx_machine/src/emqx_machine_boot.erl b/apps/emqx_machine/src/emqx_machine_boot.erl index 82b909b4f..481927765 100644 --- a/apps/emqx_machine/src/emqx_machine_boot.erl +++ b/apps/emqx_machine/src/emqx_machine_boot.erl @@ -30,12 +30,19 @@ -export([sorted_reboot_apps/1, reboot_apps/0]). -endif. -%% these apps are always (re)started by emqx_machine +%% These apps are always (re)started by emqx_machine: -define(BASIC_REBOOT_APPS, [gproc, esockd, ranch, cowboy, emqx]). -%% If any of these applications crash, the entire EMQX node shuts down +%% If any of these applications crash, the entire EMQX node shuts down: -define(BASIC_PERMANENT_APPS, [mria, ekka, esockd, emqx]). +%% These apps should NOT be (re)started automatically: +-define(EXCLUDED_APPS, [system_monitor, observer_cli, jq]). + +%% These apps are optional, they may or may not be present in the +%% release, depending on the build flags: +-define(OPTIONAL_APPS, [bcrypt, observer]). + post_boot() -> ok = ensure_apps_started(), ok = print_vsn(), @@ -148,9 +155,9 @@ basic_reboot_apps() -> ?BASIC_REBOOT_APPS ++ (BusinessApps -- excluded_apps()). excluded_apps() -> - OptionalApps = [bcrypt, jq, observer], - [system_monitor, observer_cli] ++ - [App || App <- OptionalApps, not is_app(App)]. + %% Optional apps _should_ be (re)started automatically, but only + %% when they are found in the release: + ?EXCLUDED_APPS ++ [App || App <- ?OPTIONAL_APPS, not is_app(App)]. is_app(Name) -> case application:load(Name) of diff --git a/apps/emqx_management/src/emqx_management.app.src b/apps/emqx_management/src/emqx_management.app.src index 9e22cd375..e1056ab0c 100644 --- a/apps/emqx_management/src/emqx_management.app.src +++ b/apps/emqx_management/src/emqx_management.app.src @@ -2,7 +2,7 @@ {application, emqx_management, [ {description, "EMQX Management API and CLI"}, % strict semver, bump manually! - {vsn, "5.0.26"}, + {vsn, "5.0.28"}, {modules, []}, {registered, [emqx_management_sup]}, {applications, [kernel, stdlib, emqx_plugins, minirest, emqx, emqx_ctl, emqx_bridge_http]}, diff --git a/apps/emqx_management/src/emqx_mgmt.erl b/apps/emqx_management/src/emqx_mgmt.erl index 2f261c0d5..9d4ad8521 100644 --- a/apps/emqx_management/src/emqx_mgmt.erl +++ b/apps/emqx_management/src/emqx_mgmt.erl @@ -107,7 +107,8 @@ %% Common Table API -export([ default_row_limit/0, - vm_stats/0 + vm_stats/0, + vm_stats/1 ]). -elvis([{elvis_style, god_modules, disable}]). @@ -184,22 +185,47 @@ node_info(Nodes) -> stopped_node_info(Node) -> {Node, #{node => Node, node_status => 'stopped', role => core}}. +%% Hide cpu stats if os_check is not supported. vm_stats() -> - Idle = - case cpu_sup:util([detailed]) of - %% Not support for Windows - {_, 0, 0, _} -> 0; - {_Num, _Use, IdleList, _} -> proplists:get_value(idle, IdleList, 0) - end, - RunQueue = erlang:statistics(run_queue), {MemUsedRatio, MemTotal} = get_sys_memory(), - [ - {run_queue, RunQueue}, - {cpu_idle, Idle}, - {cpu_use, 100 - Idle}, - {total_memory, MemTotal}, - {used_memory, erlang:round(MemTotal * MemUsedRatio)} - ]. + cpu_stats() ++ + [ + {run_queue, vm_stats('run.queue')}, + {total_memory, MemTotal}, + {used_memory, erlang:round(MemTotal * MemUsedRatio)} + ]. + +cpu_stats() -> + case emqx_os_mon:is_os_check_supported() of + false -> + []; + true -> + Idle = vm_stats('cpu.idle'), + [ + {cpu_idle, Idle}, + {cpu_use, 100 - Idle} + ] + end. + +vm_stats('cpu.idle') -> + case emqx_vm:cpu_util([detailed]) of + {_Num, _Use, List, _} when is_list(List) -> proplists:get_value(idle, List, 0); + %% return {all, 0, 0, []} when cpu_sup is not started + _ -> 0 + end; +vm_stats('cpu.use') -> + case vm_stats('cpu.idle') of + 0 -> 0; + Idle -> 100 - Idle + end; +vm_stats('total.memory') -> + {_, MemTotal} = get_sys_memory(), + MemTotal; +vm_stats('used.memory') -> + {MemUsedRatio, MemTotal} = get_sys_memory(), + erlang:round(MemTotal * MemUsedRatio); +vm_stats('run.queue') -> + erlang:statistics(run_queue). %%-------------------------------------------------------------------- %% Brokers @@ -218,7 +244,7 @@ broker_info() -> Info#{node => node(), otp_release => otp_rel(), node_status => 'running'}. convert_broker_info({uptime, Uptime}, M) -> - M#{uptime => emqx_datetime:human_readable_duration_string(Uptime)}; + M#{uptime => emqx_utils_calendar:human_readable_duration_string(Uptime)}; convert_broker_info({K, V}, M) -> M#{K => iolist_to_binary(V)}. diff --git a/apps/emqx_management/src/emqx_mgmt_api.erl b/apps/emqx_management/src/emqx_mgmt_api.erl index dd82cffec..0acffbe4a 100644 --- a/apps/emqx_management/src/emqx_mgmt_api.erl +++ b/apps/emqx_management/src/emqx_mgmt_api.erl @@ -460,7 +460,11 @@ finalize_query(Result = #{overflow := Overflow}, QueryState = #{complete := Comp maybe_accumulate_totals(Result#{hasnext => HasNext}, QueryState). maybe_accumulate_totals(Result, #{total := TotalAcc}) -> - QueryTotal = maps:fold(fun(_Node, T, N) -> N + T end, 0, TotalAcc), + AccFun = fun + (_Node, NodeTotal, AccIn) when is_number(NodeTotal) -> AccIn + NodeTotal; + (_Node, _, AccIn) -> AccIn + end, + QueryTotal = maps:fold(AccFun, 0, TotalAcc), Result#{total => QueryTotal}; maybe_accumulate_totals(Result, _QueryState) -> Result. diff --git a/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl b/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl index 432734688..78bbef540 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl @@ -127,7 +127,7 @@ fields(app) -> )}, {expired_at, hoconsc:mk( - hoconsc:union([infinity, emqx_datetime:epoch_second()]), + hoconsc:union([infinity, emqx_utils_calendar:epoch_second()]), #{ desc => "No longer valid datetime", example => <<"2021-12-05T02:01:34.186Z">>, @@ -137,7 +137,7 @@ fields(app) -> )}, {created_at, hoconsc:mk( - emqx_datetime:epoch_second(), + emqx_utils_calendar:epoch_second(), #{ desc => "ApiKey create datetime", example => <<"2021-12-01T00:00:00.000Z">> diff --git a/apps/emqx_management/src/emqx_mgmt_api_banned.erl b/apps/emqx_management/src/emqx_mgmt_api_banned.erl index 508cf7d07..6c1d407b5 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_banned.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_banned.erl @@ -79,6 +79,13 @@ schema("/banned") -> ?DESC(create_banned_api_response400) ) } + }, + delete => #{ + description => ?DESC(clear_banned_api), + tags => ?TAGS, + parameters => [], + 'requestBody' => [], + responses => #{204 => <<"No Content">>} } }; schema("/banned/:as/:who") -> @@ -140,13 +147,13 @@ fields(ban) -> example => <<"Too many requests">> })}, {at, - hoconsc:mk(emqx_datetime:epoch_second(), #{ + hoconsc:mk(emqx_utils_calendar:epoch_second(), #{ desc => ?DESC(at), required => false, example => <<"2021-10-25T21:48:47+08:00">> })}, {until, - hoconsc:mk(emqx_datetime:epoch_second(), #{ + hoconsc:mk(emqx_utils_calendar:epoch_second(), #{ desc => ?DESC(until), required => false, example => <<"2021-10-25T21:53:47+08:00">> @@ -168,7 +175,10 @@ banned(post, #{body := Body}) -> OldBannedFormat = emqx_utils_json:encode(format(Old)), {400, 'ALREADY_EXISTS', OldBannedFormat} end - end. + end; +banned(delete, _) -> + emqx_banned:clear(), + {204}. delete_banned(delete, #{bindings := Params}) -> case emqx_banned:look_up(Params) of diff --git a/apps/emqx_management/src/emqx_mgmt_api_clients.erl b/apps/emqx_management/src/emqx_mgmt_api_clients.erl index 2b47fdb11..18ac65ae6 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_clients.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_clients.erl @@ -161,7 +161,7 @@ schema("/clients") -> desc => <<"Fuzzy search `username` as substring">> })}, {gte_created_at, - hoconsc:mk(emqx_datetime:epoch_millisecond(), #{ + hoconsc:mk(emqx_utils_calendar:epoch_millisecond(), #{ in => query, required => false, desc => @@ -169,7 +169,7 @@ schema("/clients") -> " than or equal method, rfc3339 or timestamp(millisecond)">> })}, {lte_created_at, - hoconsc:mk(emqx_datetime:epoch_millisecond(), #{ + hoconsc:mk(emqx_utils_calendar:epoch_millisecond(), #{ in => query, required => false, desc => @@ -177,7 +177,7 @@ schema("/clients") -> " than or equal method, rfc3339 or timestamp(millisecond)">> })}, {gte_connected_at, - hoconsc:mk(emqx_datetime:epoch_millisecond(), #{ + hoconsc:mk(emqx_utils_calendar:epoch_millisecond(), #{ in => query, required => false, desc => << @@ -186,7 +186,7 @@ schema("/clients") -> >> })}, {lte_connected_at, - hoconsc:mk(emqx_datetime:epoch_millisecond(), #{ + hoconsc:mk(emqx_utils_calendar:epoch_millisecond(), #{ in => query, required => false, desc => << @@ -399,16 +399,16 @@ fields(client) -> {connected, hoconsc:mk(boolean(), #{desc => <<"Whether the client is connected">>})}, {connected_at, hoconsc:mk( - emqx_datetime:epoch_millisecond(), + emqx_utils_calendar:epoch_millisecond(), #{desc => <<"Client connection time, rfc3339 or timestamp(millisecond)">>} )}, {created_at, hoconsc:mk( - emqx_datetime:epoch_millisecond(), + emqx_utils_calendar:epoch_millisecond(), #{desc => <<"Session creation time, rfc3339 or timestamp(millisecond)">>} )}, {disconnected_at, - hoconsc:mk(emqx_datetime:epoch_millisecond(), #{ + hoconsc:mk(emqx_utils_calendar:epoch_millisecond(), #{ desc => << "Client offline time." @@ -950,7 +950,7 @@ result_format_time_fun(Key, NClientInfoMap) -> case NClientInfoMap of #{Key := TimeStamp} -> NClientInfoMap#{ - Key => emqx_datetime:epoch_to_rfc3339(TimeStamp) + Key => emqx_utils_calendar:epoch_to_rfc3339(TimeStamp) }; #{} -> NClientInfoMap diff --git a/apps/emqx_management/src/emqx_mgmt_api_trace.erl b/apps/emqx_management/src/emqx_mgmt_api_trace.erl index 27789fff9..17adf7460 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_trace.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_trace.erl @@ -281,7 +281,7 @@ fields(trace) -> })}, {start_at, hoconsc:mk( - emqx_datetime:epoch_second(), + emqx_utils_calendar:epoch_second(), #{ description => ?DESC(time_format), required => false, @@ -290,7 +290,7 @@ fields(trace) -> )}, {end_at, hoconsc:mk( - emqx_datetime:epoch_second(), + emqx_utils_calendar:epoch_second(), #{ description => ?DESC(time_format), required => false, @@ -410,8 +410,8 @@ trace(get, _Params) -> Trace0#{ log_size => LogSize, Type => iolist_to_binary(Filter), - start_at => list_to_binary(calendar:system_time_to_rfc3339(Start)), - end_at => list_to_binary(calendar:system_time_to_rfc3339(End)), + start_at => emqx_utils_calendar:epoch_to_rfc3339(Start, second), + end_at => emqx_utils_calendar:epoch_to_rfc3339(End, second), status => status(Enable, Start, End, Now) } end, @@ -468,8 +468,8 @@ format_trace(Trace0) -> Trace2#{ log_size => LogSize, Type => iolist_to_binary(Filter), - start_at => list_to_binary(calendar:system_time_to_rfc3339(Start)), - end_at => list_to_binary(calendar:system_time_to_rfc3339(End)), + start_at => emqx_utils_calendar:epoch_to_rfc3339(Start, second), + end_at => emqx_utils_calendar:epoch_to_rfc3339(Start, second), status => status(Enable, Start, End, Now) }. diff --git a/apps/emqx_management/src/emqx_mgmt_auth.erl b/apps/emqx_management/src/emqx_mgmt_auth.erl index 4fe47cf93..ace4c155a 100644 --- a/apps/emqx_management/src/emqx_mgmt_auth.erl +++ b/apps/emqx_management/src/emqx_mgmt_auth.erl @@ -142,11 +142,11 @@ format(App = #{expired_at := ExpiredAt0, created_at := CreateAt}) -> ExpiredAt = case ExpiredAt0 of infinity -> <<"infinity">>; - _ -> list_to_binary(calendar:system_time_to_rfc3339(ExpiredAt0)) + _ -> emqx_utils_calendar:epoch_to_rfc3339(ExpiredAt0, second) end, App#{ expired_at => ExpiredAt, - created_at => list_to_binary(calendar:system_time_to_rfc3339(CreateAt)) + created_at => emqx_utils_calendar:epoch_to_rfc3339(CreateAt, second) }. list() -> diff --git a/apps/emqx_management/src/emqx_mgmt_cli.erl b/apps/emqx_management/src/emqx_mgmt_cli.erl index 9692441a6..aeed5b922 100644 --- a/apps/emqx_management/src/emqx_mgmt_cli.erl +++ b/apps/emqx_management/src/emqx_mgmt_cli.erl @@ -87,7 +87,7 @@ broker([]) -> Funs = [sysdescr, version, datetime], [emqx_ctl:print("~-10s: ~ts~n", [Fun, emqx_sys:Fun()]) || Fun <- Funs], emqx_ctl:print("~-10s: ~ts~n", [ - uptime, emqx_datetime:human_readable_duration_string(emqx_sys:uptime()) + uptime, emqx_utils_calendar:human_readable_duration_string(emqx_sys:uptime()) ]); broker(["stats"]) -> [ diff --git a/apps/emqx_management/test/emqx_mgmt_api_banned_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_banned_SUITE.erl index 9f1b560f7..3167a5621 100644 --- a/apps/emqx_management/test/emqx_mgmt_api_banned_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_api_banned_SUITE.erl @@ -157,6 +157,30 @@ t_delete(_Config) -> ), ok. +t_clear(_Config) -> + Now = erlang:system_time(second), + At = emqx_banned:to_rfc3339(Now), + Until = emqx_banned:to_rfc3339(Now + 3), + Who = <<"TestClient-"/utf8>>, + By = <<"banned suite 中"/utf8>>, + Reason = <<"test测试"/utf8>>, + As = <<"clientid">>, + Banned = #{ + as => clientid, + who => Who, + by => By, + reason => Reason, + at => At, + until => Until + }, + {ok, _} = create_banned(Banned), + ?assertMatch({ok, _}, clear_banned()), + ?assertMatch( + {error, {"HTTP/1.1", 404, "Not Found"}}, + delete_banned(binary_to_list(As), binary_to_list(Who)) + ), + ok. + list_banned() -> Path = emqx_mgmt_api_test_util:api_path(["banned"]), case emqx_mgmt_api_test_util:request_api(get, Path) of @@ -176,5 +200,9 @@ delete_banned(As, Who) -> DeletePath = emqx_mgmt_api_test_util:api_path(["banned", As, Who]), emqx_mgmt_api_test_util:request_api(delete, DeletePath). +clear_banned() -> + ClearPath = emqx_mgmt_api_test_util:api_path(["banned"]), + emqx_mgmt_api_test_util:request_api(delete, ClearPath). + to_rfc3339(Sec) -> list_to_binary(calendar:system_time_to_rfc3339(Sec)). diff --git a/apps/emqx_management/test/emqx_mgmt_api_clients_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_clients_SUITE.erl index efdaa9c96..f428009cb 100644 --- a/apps/emqx_management/test/emqx_mgmt_api_clients_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_api_clients_SUITE.erl @@ -260,7 +260,7 @@ t_query_clients_with_time(_) -> %% Do not uri_encode `=` to `%3D` Rfc3339String = emqx_http_lib:uri_encode( binary:bin_to_list( - emqx_datetime:epoch_to_rfc3339(NowTimeStampInt) + emqx_utils_calendar:epoch_to_rfc3339(NowTimeStampInt) ) ), TimeStampString = emqx_http_lib:uri_encode(integer_to_list(NowTimeStampInt)), diff --git a/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl index 0102eb56c..8f9a4a5ca 100644 --- a/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl @@ -269,7 +269,6 @@ create_trace(Name, ClientId, Start) -> ). t_stream_log(_Config) -> - application:set_env(emqx, allow_anonymous, true), emqx_trace:clear(), load(), ClientId = <<"client-stream">>, diff --git a/apps/emqx_modules/src/emqx_delayed.erl b/apps/emqx_modules/src/emqx_delayed.erl index 32219a139..559648bdd 100644 --- a/apps/emqx_modules/src/emqx_delayed.erl +++ b/apps/emqx_modules/src/emqx_delayed.erl @@ -208,8 +208,8 @@ format_delayed( }, WithPayload ) -> - PublishTime = to_rfc3339(PublishTimeStamp div 1000), - ExpectTime = to_rfc3339(ExpectTimeStamp div 1000), + PublishTime = emqx_utils_calendar:epoch_to_rfc3339(PublishTimeStamp), + ExpectTime = emqx_utils_calendar:epoch_to_rfc3339(ExpectTimeStamp), RemainingTime = ExpectTimeStamp - ?NOW, Result = #{ msgid => emqx_guid:to_hexstr(Id), @@ -230,9 +230,6 @@ format_delayed( Result end. -to_rfc3339(Timestamp) -> - list_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, second}])). - -spec get_delayed_message(binary()) -> with_id_return(map()). get_delayed_message(Id) -> case ets:select(?TAB, ?QUERY_MS(Id)) of diff --git a/apps/emqx_modules/src/emqx_modules.app.src b/apps/emqx_modules/src/emqx_modules.app.src index b7a9d7f4d..cd2f6c8b9 100644 --- a/apps/emqx_modules/src/emqx_modules.app.src +++ b/apps/emqx_modules/src/emqx_modules.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_modules, [ {description, "EMQX Modules"}, - {vsn, "5.0.19"}, + {vsn, "5.0.20"}, {modules, []}, {applications, [kernel, stdlib, emqx, emqx_ctl]}, {mod, {emqx_modules_app, []}}, diff --git a/apps/emqx_modules/src/emqx_topic_metrics.erl b/apps/emqx_modules/src/emqx_topic_metrics.erl index efe309b9e..987b0b69b 100644 --- a/apps/emqx_modules/src/emqx_topic_metrics.erl +++ b/apps/emqx_modules/src/emqx_topic_metrics.erl @@ -295,7 +295,7 @@ terminate(_Reason, _State) -> reset_topic({Topic, Data}, Speeds) -> CRef = maps:get(counter_ref, Data), ok = reset_counter(CRef), - ResetTime = emqx_rule_funcs:now_rfc3339(), + ResetTime = emqx_utils_calendar:now_to_rfc3339(), true = ets:insert(?TAB, {Topic, Data#{reset_time => ResetTime}}), Fun = fun(Metric, CurrentSpeeds) -> diff --git a/apps/emqx_modules/src/emqx_topic_metrics_api.erl b/apps/emqx_modules/src/emqx_topic_metrics_api.erl index 50b586228..49b3071e0 100644 --- a/apps/emqx_modules/src/emqx_topic_metrics_api.erl +++ b/apps/emqx_modules/src/emqx_topic_metrics_api.erl @@ -183,7 +183,7 @@ fields(topic_metrics) -> )}, {create_time, mk( - emqx_datetime:epoch_second(), + emqx_utils_calendar:epoch_second(), #{ desc => ?DESC(create_time), required => true, @@ -192,7 +192,7 @@ fields(topic_metrics) -> )}, {reset_time, mk( - emqx_datetime:epoch_second(), + emqx_utils_calendar:epoch_second(), #{ desc => ?DESC(reset_time), required => false, diff --git a/apps/emqx_mongodb/rebar.config b/apps/emqx_mongodb/rebar.config index cfd7dc9be..577dee8b8 100644 --- a/apps/emqx_mongodb/rebar.config +++ b/apps/emqx_mongodb/rebar.config @@ -3,5 +3,5 @@ {erl_opts, [debug_info]}. {deps, [ {emqx_connector, {path, "../../apps/emqx_connector"}} , {emqx_resource, {path, "../../apps/emqx_resource"}} - , {mongodb, {git, "https://github.com/emqx/mongodb-erlang", {tag, "v3.0.20"}}} + , {mongodb, {git, "https://github.com/emqx/mongodb-erlang", {tag, "v3.0.21"}}} ]}. diff --git a/apps/emqx_mongodb/src/emqx_mongodb.app.src b/apps/emqx_mongodb/src/emqx_mongodb.app.src index 00dcb0cfb..eb846a7ab 100644 --- a/apps/emqx_mongodb/src/emqx_mongodb.app.src +++ b/apps/emqx_mongodb/src/emqx_mongodb.app.src @@ -1,6 +1,6 @@ {application, emqx_mongodb, [ {description, "EMQX MongoDB Connector"}, - {vsn, "0.1.1"}, + {vsn, "0.1.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_mongodb/src/emqx_mongodb.erl b/apps/emqx_mongodb/src/emqx_mongodb.erl index dfa732a7b..77161911a 100644 --- a/apps/emqx_mongodb/src/emqx_mongodb.erl +++ b/apps/emqx_mongodb/src/emqx_mongodb.erl @@ -141,6 +141,11 @@ mongo_fields() -> {pool_size, fun emqx_connector_schema_lib:pool_size/1}, {username, fun emqx_connector_schema_lib:username/1}, {password, fun emqx_connector_schema_lib:password/1}, + {use_legacy_protocol, + hoconsc:mk(hoconsc:enum([auto, true, false]), #{ + default => auto, + desc => ?DESC("use_legacy_protocol") + })}, {auth_source, #{ type => binary(), required => false, @@ -429,6 +434,8 @@ init_worker_options([{w_mode, V} | R], Acc) -> init_worker_options(R, [{w_mode, V} | Acc]); init_worker_options([{r_mode, V} | R], Acc) -> init_worker_options(R, [{r_mode, V} | Acc]); +init_worker_options([{use_legacy_protocol, V} | R], Acc) -> + init_worker_options(R, [{use_legacy_protocol, V} | Acc]); init_worker_options([_ | R], Acc) -> init_worker_options(R, Acc); init_worker_options([], Acc) -> diff --git a/apps/emqx_opentelemetry/README.md b/apps/emqx_opentelemetry/README.md new file mode 100644 index 000000000..d5d0b97ea --- /dev/null +++ b/apps/emqx_opentelemetry/README.md @@ -0,0 +1,4 @@ +emqx_opentelemetry +===== + +OpenTelemetry metric log trace framework for EMQX. diff --git a/apps/emqx_opentelemetry/etc/emqx_otel.conf b/apps/emqx_opentelemetry/etc/emqx_otel.conf new file mode 100644 index 000000000..e69de29bb diff --git a/apps/emqx_opentelemetry/rebar.config b/apps/emqx_opentelemetry/rebar.config new file mode 100644 index 000000000..7086a2f29 --- /dev/null +++ b/apps/emqx_opentelemetry/rebar.config @@ -0,0 +1,29 @@ +%% -*- mode: erlang -*- + +{deps, [ + {emqx, {path, "../emqx"}} +]}. + +{edoc_opts, [{preprocess, true}]}. +{erl_opts, [ + warn_unused_vars, + warn_shadow_vars, + warn_unused_import, + warn_obsolete_guard, + debug_info, + {parse_transform} +]}. + +{xref_checks, [ + undefined_function_calls, + undefined_functions, + locals_not_used, + deprecated_function_calls, + warnings_as_errors, + deprecated_functions +]}. +{cover_enabled, true}. +{cover_opts, [verbose]}. +{cover_export_enabled, true}. + +{project_plugins, [erlfmt]}. diff --git a/apps/emqx_opentelemetry/src/emqx_opentelemetry.app.src b/apps/emqx_opentelemetry/src/emqx_opentelemetry.app.src new file mode 100644 index 000000000..7202b24c8 --- /dev/null +++ b/apps/emqx_opentelemetry/src/emqx_opentelemetry.app.src @@ -0,0 +1,15 @@ +{application, emqx_opentelemetry, [ + {description, "OpenTelemetry for EMQX Broker"}, + {vsn, "0.1.1"}, + {registered, []}, + {mod, {emqx_otel_app, []}}, + {applications, [kernel, stdlib, emqx]}, + {env, []}, + {modules, []}, + {licenses, ["Apache 2.0"]}, + {maintainers, ["EMQX Team "]}, + {links, [ + {"Homepage", "https://emqx.io/"}, + {"Github", "https://github.com/emqx/emqx"} + ]} +]}. diff --git a/apps/emqx_opentelemetry/src/emqx_otel.erl b/apps/emqx_opentelemetry/src/emqx_otel.erl new file mode 100644 index 000000000..0b0e16cab --- /dev/null +++ b/apps/emqx_opentelemetry/src/emqx_otel.erl @@ -0,0 +1,207 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_otel). +-include_lib("emqx/include/logger.hrl"). + +-export([start_link/1]). +-export([get_cluster_gauge/1, get_stats_gauge/1, get_vm_gauge/1, get_metric_counter/1]). +-export([init/1, handle_continue/2, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). + +start_link(Conf) -> + gen_server:start_link({local, ?MODULE}, ?MODULE, Conf, []). + +init(Conf) -> + erlang:process_flag(trap_exit, true), + {ok, #{}, {continue, {setup, Conf}}}. + +handle_continue({setup, Conf}, State) -> + setup(Conf), + {noreply, State, hibernate}. + +handle_call(_Msg, _From, State) -> + {reply, ok, State}. + +handle_cast(_Msg, State) -> + {noreply, State}. + +handle_info(_Msg, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + cleanup(), + ok. + +setup(Conf = #{enable := true}) -> + ensure_apps(Conf), + create_metric_views(); +setup(_Conf) -> + cleanup(), + ok. + +ensure_apps(Conf) -> + #{exporter := #{interval := ExporterInterval}} = Conf, + {ok, _} = application:ensure_all_started(opentelemetry_exporter), + _ = application:stop(opentelemetry_experimental), + ok = application:set_env( + opentelemetry_experimental, + readers, + [ + #{ + module => otel_metric_reader, + config => #{ + exporter => {opentelemetry_exporter, #{}}, + export_interval_ms => ExporterInterval + } + } + ] + ), + {ok, _} = application:ensure_all_started(opentelemetry_experimental), + {ok, _} = application:ensure_all_started(opentelemetry_api_experimental), + ok. + +cleanup() -> + _ = application:stop(opentelemetry_experimental), + _ = application:stop(opentelemetry_experimental_api), + _ = application:stop(opentelemetry_exporter), + ok. + +create_metric_views() -> + Meter = opentelemetry_experimental:get_meter(), + StatsGauge = emqx_stats:getstats(), + create_gauge(Meter, StatsGauge, fun ?MODULE:get_stats_gauge/1), + VmGauge = lists:map(fun({K, V}) -> {normalize_name(K), V} end, emqx_mgmt:vm_stats()), + create_gauge(Meter, VmGauge, fun ?MODULE:get_vm_gauge/1), + ClusterGauge = [{'node.running', 0}, {'node.stopped', 0}], + create_gauge(Meter, ClusterGauge, fun ?MODULE:get_cluster_gauge/1), + Metrics = lists:map(fun({K, V}) -> {K, V, unit(K)} end, emqx_metrics:all()), + create_counter(Meter, Metrics, fun ?MODULE:get_metric_counter/1), + ok. + +unit(K) -> + case lists:member(K, bytes_metrics()) of + true -> kb; + false -> '1' + end. + +bytes_metrics() -> + [ + 'bytes.received', + 'bytes.sent', + 'packets.received', + 'packets.sent', + 'packets.connect', + 'packets.connack.sent', + 'packets.connack.error', + 'packets.connack.auth_error', + 'packets.publish.received', + 'packets.publish.sent', + 'packets.publish.inuse', + 'packets.publish.error', + 'packets.publish.auth_error', + 'packets.publish.dropped', + 'packets.puback.received', + 'packets.puback.sent', + 'packets.puback.inuse', + 'packets.puback.missed', + 'packets.pubrec.received', + 'packets.pubrec.sent', + 'packets.pubrec.inuse', + 'packets.pubrec.missed', + 'packets.pubrel.received', + 'packets.pubrel.sent', + 'packets.pubrel.missed', + 'packets.pubcomp.received', + 'packets.pubcomp.sent', + 'packets.pubcomp.inuse', + 'packets.pubcomp.missed', + 'packets.subscribe.received', + 'packets.subscribe.error', + 'packets.subscribe.auth_error', + 'packets.suback.sent', + 'packets.unsubscribe.received', + 'packets.unsubscribe.error', + 'packets.unsuback.sent', + 'packets.pingreq.received', + 'packets.pingresp.sent', + 'packets.disconnect.received', + 'packets.disconnect.sent', + 'packets.auth.received', + 'packets.auth.sent' + ]. + +get_stats_gauge(Name) -> + [{emqx_stats:getstat(Name), #{}}]. + +get_vm_gauge(Name) -> + [{emqx_mgmt:vm_stats(Name), #{}}]. + +get_cluster_gauge('node.running') -> + [{length(emqx:cluster_nodes(running)), #{}}]; +get_cluster_gauge('node.stopped') -> + [{length(emqx:cluster_nodes(stopped)), #{}}]. + +get_metric_counter(Name) -> + [{emqx_metrics:val(Name), #{}}]. + +create_gauge(Meter, Names, CallBack) -> + lists:foreach( + fun({Name, _}) -> + true = otel_meter_server:add_view( + #{instrument_name => Name}, + #{aggregation_module => otel_aggregation_last_value} + ), + otel_meter:create_observable_gauge( + Meter, + Name, + CallBack, + Name, + #{ + description => iolist_to_binary([ + <<"observable ">>, atom_to_binary(Name), <<" gauge">> + ]), + unit => '1' + } + ) + end, + Names + ). + +create_counter(Meter, Counters, CallBack) -> + lists:foreach( + fun({Name, _, Unit}) -> + true = otel_meter_server:add_view( + #{instrument_name => Name}, + #{aggregation_module => otel_aggregation_sum} + ), + otel_meter:create_observable_counter( + Meter, + Name, + CallBack, + Name, + #{ + description => iolist_to_binary([ + <<"observable ">>, atom_to_binary(Name), <<" counter">> + ]), + unit => Unit + } + ) + end, + Counters + ). + +normalize_name(Name) -> + list_to_existing_atom(lists:flatten(string:replace(atom_to_list(Name), "_", ".", all))). diff --git a/apps/emqx_opentelemetry/src/emqx_otel_api.erl b/apps/emqx_opentelemetry/src/emqx_otel_api.erl new file mode 100644 index 000000000..7478859f8 --- /dev/null +++ b/apps/emqx_opentelemetry/src/emqx_otel_api.erl @@ -0,0 +1,112 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_otel_api). + +-behaviour(minirest_api). + +-include_lib("hocon/include/hoconsc.hrl"). +-include_lib("emqx/include/http_api.hrl"). + +-import(hoconsc, [ref/2]). + +-export([ + api_spec/0, + paths/0, + schema/1 +]). + +-export([config/2]). + +-define(TAGS, [<<"Monitor">>]). + +api_spec() -> + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). + +paths() -> + [ + "/opentelemetry" + ]. + +schema("/opentelemetry") -> + #{ + 'operationId' => config, + get => + #{ + description => "Get opentelmetry configuration", + tags => ?TAGS, + responses => + #{200 => otel_config_schema()} + }, + put => + #{ + description => "Update opentelmetry configuration", + tags => ?TAGS, + 'requestBody' => otel_config_schema(), + responses => + #{ + 200 => otel_config_schema(), + 400 => + emqx_dashboard_swagger:error_codes( + [?BAD_REQUEST], <<"Update Config Failed">> + ) + } + } + }. + +%%-------------------------------------------------------------------- +%% API Handler funcs +%%-------------------------------------------------------------------- + +config(get, _Params) -> + {200, get_raw()}; +config(put, #{body := Body}) -> + case emqx_otel_config:update(Body) of + {ok, NewConfig} -> + {200, NewConfig}; + {error, Reason} -> + Message = list_to_binary(io_lib:format("Update config failed ~p", [Reason])), + {400, ?BAD_REQUEST, Message} + end. + +%%-------------------------------------------------------------------- +%% Internal funcs +%%-------------------------------------------------------------------- + +get_raw() -> + Path = <<"opentelemetry">>, + #{Path := Conf} = + emqx_config:fill_defaults( + #{Path => emqx_conf:get_raw([Path])}, + #{obfuscate_sensitive_values => true} + ), + Conf. + +otel_config_schema() -> + emqx_dashboard_swagger:schema_with_example( + ref(emqx_otel_schema, "opentelemetry"), + otel_config_example() + ). + +otel_config_example() -> + #{ + enable => true, + exporter => + #{ + endpoint => "http://localhost:4317", + interval => "10s" + } + }. diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_correction.erl b/apps/emqx_opentelemetry/src/emqx_otel_app.erl similarity index 51% rename from apps/emqx/src/emqx_limiter/src/emqx_limiter_correction.erl rename to apps/emqx_opentelemetry/src/emqx_otel_app.erl index 013c23e61..f028a000a 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_limiter_correction.erl +++ b/apps/emqx_opentelemetry/src/emqx_otel_app.erl @@ -1,5 +1,5 @@ %%-------------------------------------------------------------------- -%% Copyright (c) 2019-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -14,23 +14,16 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_limiter_correction). +-module(emqx_otel_app). -%% API --export([add/2]). +-behaviour(application). --type correction_value() :: #{ - correction := emqx_limiter_decimal:zero_or_float(), - any() => any() -}. +-export([start/2, stop/1]). --export_type([correction_value/0]). +start(_StartType, _StartArgs) -> + emqx_otel_config:add_handler(), + emqx_otel_sup:start_link(). -%%-------------------------------------------------------------------- -%%% API -%%-------------------------------------------------------------------- --spec add(number(), correction_value()) -> {integer(), correction_value()}. -add(Inc, #{correction := Correction} = Data) -> - FixedInc = Inc + Correction, - IntInc = erlang:floor(FixedInc), - {IntInc, Data#{correction := FixedInc - IntInc}}. +stop(_State) -> + emqx_otel_config:remove_handler(), + ok. diff --git a/apps/emqx_opentelemetry/src/emqx_otel_config.erl b/apps/emqx_opentelemetry/src/emqx_otel_config.erl new file mode 100644 index 000000000..3df535890 --- /dev/null +++ b/apps/emqx_opentelemetry/src/emqx_otel_config.erl @@ -0,0 +1,58 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_otel_config). + +-behaviour(emqx_config_handler). + +-define(OPTL, [opentelemetry]). + +-export([add_handler/0, remove_handler/0]). +-export([post_config_update/5]). +-export([update/1]). + +update(Config) -> + case + emqx_conf:update( + ?OPTL, + Config, + #{rawconf_with_defaults => true, override_to => cluster} + ) + of + {ok, #{raw_config := NewConfigRows}} -> + {ok, NewConfigRows}; + {error, Reason} -> + {error, Reason} + end. + +add_handler() -> + ok = emqx_config_handler:add_handler(?OPTL, ?MODULE), + ok. + +remove_handler() -> + ok = emqx_config_handler:remove_handler(?OPTL), + ok. + +post_config_update(?OPTL, _Req, New, _Old, AppEnvs) -> + application:set_env(AppEnvs), + ensure_otel(New); +post_config_update(_ConfPath, _Req, _NewConf, _OldConf, _AppEnvs) -> + ok. + +ensure_otel(#{enable := true} = Conf) -> + _ = emqx_otel_sup:stop_otel(), + emqx_otel_sup:start_otel(Conf); +ensure_otel(#{enable := false}) -> + emqx_otel_sup:stop_otel(). diff --git a/apps/emqx_opentelemetry/src/emqx_otel_schema.erl b/apps/emqx_opentelemetry/src/emqx_otel_schema.erl new file mode 100644 index 000000000..1479009a2 --- /dev/null +++ b/apps/emqx_opentelemetry/src/emqx_otel_schema.erl @@ -0,0 +1,82 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_otel_schema). + +-include_lib("hocon/include/hoconsc.hrl"). + +-export([ + roots/0, + fields/1, + namespace/0, + desc/1 +]). + +namespace() -> opentelemetry. +roots() -> ["opentelemetry"]. + +fields("opentelemetry") -> + [ + {exporter, + ?HOCON( + ?R_REF("exporter"), + #{desc => ?DESC(exporter)} + )}, + {enable, + ?HOCON( + boolean(), + #{ + default => false, + required => true, + desc => ?DESC(enable) + } + )} + ]; +fields("exporter") -> + [ + {"protocol", + ?HOCON( + %% http_protobuf is not support for metrics yet. + ?ENUM([grpc]), + #{ + mapping => "opentelemetry_exporter.otlp_protocol", + desc => ?DESC(protocol), + default => grpc, + importance => ?IMPORTANCE_HIDDEN + } + )}, + {"endpoint", + ?HOCON( + emqx_schema:url(), + #{ + mapping => "opentelemetry_exporter.otlp_endpoint", + default => "http://localhost:4317", + desc => ?DESC(endpoint) + } + )}, + {"interval", + ?HOCON( + emqx_schema:timeout_duration_ms(), + #{ + default => <<"10s">>, + required => true, + desc => ?DESC(interval) + } + )} + ]. + +desc("opentelemetry") -> ?DESC(opentelemetry); +desc("exporter") -> ?DESC(exporter); +desc(_) -> undefined. diff --git a/apps/emqx_opentelemetry/src/emqx_otel_sup.erl b/apps/emqx_opentelemetry/src/emqx_otel_sup.erl new file mode 100644 index 000000000..2240cca03 --- /dev/null +++ b/apps/emqx_opentelemetry/src/emqx_otel_sup.erl @@ -0,0 +1,67 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_otel_sup). + +-behaviour(supervisor). + +-export([start_link/0]). +-export([init/1]). +-export([start_otel/1]). +-export([stop_otel/0]). + +-define(CHILD(Mod, Opts), #{ + id => Mod, + start => {Mod, start_link, [Opts]}, + restart => permanent, + shutdown => 5000, + type => worker, + modules => [Mod] +}). + +-define(WORKER, emqx_otel). + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +-spec start_otel(map()) -> ok. +start_otel(Conf) -> + assert_started(supervisor:start_child(?MODULE, ?CHILD(?WORKER, Conf))). + +-spec stop_otel() -> ok | {error, term()}. +stop_otel() -> + case supervisor:terminate_child(?MODULE, ?WORKER) of + ok -> supervisor:delete_child(?MODULE, ?WORKER); + {error, not_found} -> ok; + Error -> Error + end. + +init([]) -> + SupFlags = #{ + strategy => one_for_one, + intensity => 10, + period => 512 + }, + Children = + case emqx_conf:get([opentelemetry]) of + #{enable := false} -> []; + #{enable := true} = Conf -> [?CHILD(?WORKER, Conf)] + end, + {ok, {SupFlags, Children}}. + +assert_started({ok, _Pid}) -> ok; +assert_started({ok, _Pid, _Info}) -> ok; +assert_started({error, {already_started, _Pid}}) -> ok; +assert_started({error, Reason}) -> {error, Reason}. diff --git a/apps/emqx_prometheus/src/emqx_prometheus.app.src b/apps/emqx_prometheus/src/emqx_prometheus.app.src index e6ee145ff..10fd75e98 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.app.src +++ b/apps/emqx_prometheus/src/emqx_prometheus.app.src @@ -2,7 +2,7 @@ {application, emqx_prometheus, [ {description, "Prometheus for EMQX"}, % strict semver, bump manually! - {vsn, "5.0.14"}, + {vsn, "5.0.15"}, {modules, []}, {registered, [emqx_prometheus_sup]}, {applications, [kernel, stdlib, prometheus, emqx, emqx_management]}, diff --git a/apps/emqx_prometheus/src/emqx_prometheus.erl b/apps/emqx_prometheus/src/emqx_prometheus.erl index d999f294e..ac902ca55 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus.erl @@ -160,7 +160,7 @@ collect_mf(_Registry, Callback) -> Stats = emqx_stats:getstats(), VMData = emqx_vm_data(), ClusterData = emqx_cluster_data(), - _ = [add_collect_family(Name, Stats, Callback, gauge) || Name <- emqx_stats()], + _ = [add_collect_family(Name, Stats, Callback, gauge) || Name <- emqx_stats:names()], _ = [add_collect_family(Name, VMData, Callback, gauge) || Name <- emqx_vm()], _ = [add_collect_family(Name, ClusterData, Callback, gauge) || Name <- emqx_cluster()], _ = [add_collect_family(Name, Metrics, Callback, counter) || Name <- emqx_metrics_packets()], @@ -176,7 +176,7 @@ collect(<<"json">>) -> Stats = emqx_stats:getstats(), VMData = emqx_vm_data(), #{ - stats => maps:from_list([collect_stats(Name, Stats) || Name <- emqx_stats()]), + stats => maps:from_list([collect_stats(Name, Stats) || Name <- emqx_stats:names()]), metrics => maps:from_list([collect_stats(Name, VMData) || Name <- emqx_vm()]), packets => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_packets()]), messages => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_messages()]), @@ -460,28 +460,6 @@ emqx_collect(emqx_cluster_nodes_stopped, ClusterData) -> %% Indicators %%-------------------------------------------------------------------- -emqx_stats() -> - [ - emqx_connections_count, - emqx_connections_max, - emqx_live_connections_count, - emqx_live_connections_max, - emqx_sessions_count, - emqx_sessions_max, - emqx_topics_count, - emqx_topics_max, - emqx_suboptions_count, - emqx_suboptions_max, - emqx_subscribers_count, - emqx_subscribers_max, - emqx_subscriptions_count, - emqx_subscriptions_max, - emqx_subscriptions_shared_count, - emqx_subscriptions_shared_max, - emqx_retained_count, - emqx_retained_max - ]. - emqx_metrics_packets() -> [ emqx_bytes_received, diff --git a/apps/emqx_retainer/src/emqx_retainer.app.src b/apps/emqx_retainer/src/emqx_retainer.app.src index 5238328f0..8f7c9aa17 100644 --- a/apps/emqx_retainer/src/emqx_retainer.app.src +++ b/apps/emqx_retainer/src/emqx_retainer.app.src @@ -2,7 +2,7 @@ {application, emqx_retainer, [ {description, "EMQX Retainer"}, % strict semver, bump manually! - {vsn, "5.0.16"}, + {vsn, "5.0.17"}, {modules, []}, {registered, [emqx_retainer_sup]}, {applications, [kernel, stdlib, emqx, emqx_ctl]}, diff --git a/apps/emqx_retainer/src/emqx_retainer.erl b/apps/emqx_retainer/src/emqx_retainer.erl index b9a608f62..4976e2400 100644 --- a/apps/emqx_retainer/src/emqx_retainer.erl +++ b/apps/emqx_retainer/src/emqx_retainer.erl @@ -40,6 +40,7 @@ update_config/1, clean/0, delete/1, + read_message/1, page_read/3, post_config_update/5, stats_fun/0, @@ -157,6 +158,9 @@ delete(Topic) -> retained_count() -> call(?FUNCTION_NAME). +read_message(Topic) -> + call({?FUNCTION_NAME, Topic}). + page_read(Topic, Page, Limit) -> call({?FUNCTION_NAME, Topic, Page, Limit}). @@ -210,6 +214,10 @@ handle_call(clean, _, #{context := Context} = State) -> handle_call({delete, Topic}, _, #{context := Context} = State) -> delete_message(Context, Topic), {reply, ok, State}; +handle_call({read_message, Topic}, _, #{context := Context} = State) -> + Mod = get_backend_module(), + Result = Mod:read_message(Context, Topic), + {reply, Result, State}; handle_call({page_read, Topic, Page, Limit}, _, #{context := Context} = State) -> Mod = get_backend_module(), Result = Mod:page_read(Context, Topic, Page, Limit), diff --git a/apps/emqx_retainer/src/emqx_retainer_api.erl b/apps/emqx_retainer/src/emqx_retainer_api.erl index 3274f0e4c..446679325 100644 --- a/apps/emqx_retainer/src/emqx_retainer_api.erl +++ b/apps/emqx_retainer/src/emqx_retainer_api.erl @@ -211,11 +211,8 @@ format_message(#message{ msgid => emqx_guid:to_hexstr(ID), qos => Qos, topic => Topic, - publish_at => list_to_binary( - calendar:system_time_to_rfc3339( - Timestamp, [{unit, millisecond}] - ) - ), + publish_at => + emqx_utils_calendar:epoch_to_rfc3339(Timestamp), from_clientid => to_bin_string(From), from_username => maps:get(username, Headers, <<>>) }. diff --git a/apps/emqx_retainer/test/emqx_retainer_SUITE.erl b/apps/emqx_retainer/test/emqx_retainer_SUITE.erl index d51045cd8..d75e2ca07 100644 --- a/apps/emqx_retainer/test/emqx_retainer_SUITE.erl +++ b/apps/emqx_retainer/test/emqx_retainer_SUITE.erl @@ -135,9 +135,17 @@ t_store_and_clean(_) -> {ok, List} = emqx_retainer:page_read(<<"retained">>, 1, 10), ?assertEqual(1, length(List)), + ?assertMatch( + {ok, [#message{payload = <<"this is a retained message">>}]}, + emqx_retainer:read_message(<<"retained">>) + ), {ok, #{}, [0]} = emqtt:subscribe(C1, <<"retained">>, [{qos, 0}, {rh, 0}]), ?assertEqual(1, length(receive_messages(1))), + ?assertMatch( + {ok, [#message{payload = <<"this is a retained message">>}]}, + emqx_retainer:read_message(<<"retained">>) + ), {ok, #{}, [0]} = emqtt:unsubscribe(C1, <<"retained">>), @@ -145,10 +153,18 @@ t_store_and_clean(_) -> timer:sleep(100), {ok, #{}, [0]} = emqtt:subscribe(C1, <<"retained">>, [{qos, 0}, {rh, 0}]), ?assertEqual(0, length(receive_messages(1))), + ?assertMatch( + {ok, []}, + emqx_retainer:read_message(<<"retained">>) + ), ok = emqx_retainer:clean(), {ok, List2} = emqx_retainer:page_read(<<"retained">>, 1, 10), ?assertEqual(0, length(List2)), + ?assertMatch( + {ok, []}, + emqx_retainer:read_message(<<"retained">>) + ), ok = emqtt:disconnect(C1). diff --git a/apps/emqx_rule_engine/include/rule_engine.hrl b/apps/emqx_rule_engine/include/rule_engine.hrl index b2a6a549e..7df5d9941 100644 --- a/apps/emqx_rule_engine/include/rule_engine.hrl +++ b/apps/emqx_rule_engine/include/rule_engine.hrl @@ -109,6 +109,7 @@ %% Tables -define(RULE_TAB, emqx_rule_engine). +-define(RULE_TOPIC_INDEX, emqx_rule_engine_topic_index). %% Allowed sql function provider modules -define(DEFAULT_SQL_FUNC_PROVIDER, emqx_rule_funcs). diff --git a/apps/emqx_rule_engine/src/emqx_rule_date.erl b/apps/emqx_rule_engine/src/emqx_rule_date.erl deleted file mode 100644 index aeb5d7a1b..000000000 --- a/apps/emqx_rule_engine/src/emqx_rule_date.erl +++ /dev/null @@ -1,270 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- - --module(emqx_rule_date). - --export([date/3, date/4, parse_date/4]). - --export([ - is_int_char/1, - is_symbol_char/1, - is_m_char/1 -]). - --record(result, { - %%year() - year = "1970" :: string(), - %%month() - month = "1" :: string(), - %%day() - day = "1" :: string(), - %%hour() - hour = "0" :: string(), - %%minute() %% epoch in millisecond precision - minute = "0" :: string(), - %%second() %% epoch in millisecond precision - second = "0" :: string(), - %%integer() %% zone maybe some value - zone = "+00:00" :: string() -}). - -%% -type time_unit() :: 'microsecond' -%% | 'millisecond' -%% | 'nanosecond' -%% | 'second'. -%% -type offset() :: [byte()] | (Time :: integer()). -date(TimeUnit, Offset, FormatString) -> - date(TimeUnit, Offset, FormatString, erlang:system_time(TimeUnit)). - -date(TimeUnit, Offset, FormatString, TimeEpoch) -> - [Head | Other] = string:split(FormatString, "%", all), - R = create_tag([{st, Head}], Other), - Res = lists:map( - fun(Expr) -> - eval_tag(rmap(make_time(TimeUnit, Offset, TimeEpoch)), Expr) - end, - R - ), - lists:concat(Res). - -parse_date(TimeUnit, Offset, FormatString, InputString) -> - [Head | Other] = string:split(FormatString, "%", all), - R = create_tag([{st, Head}], Other), - IsZ = fun(V) -> - case V of - {tag, $Z} -> true; - _ -> false - end - end, - R1 = lists:filter(IsZ, R), - IfFun = fun(Con, A, B) -> - case Con of - [] -> A; - _ -> B - end - end, - Res = parse_input(FormatString, InputString), - Str = - Res#result.year ++ "-" ++ - Res#result.month ++ "-" ++ - Res#result.day ++ "T" ++ - Res#result.hour ++ ":" ++ - Res#result.minute ++ ":" ++ - Res#result.second ++ - IfFun(R1, Offset, Res#result.zone), - calendar:rfc3339_to_system_time(Str, [{unit, TimeUnit}]). - -mlist(R) -> - %% %H Shows hour in 24-hour format [15] - [ - {$H, R#result.hour}, - %% %M Displays minutes [00-59] - {$M, R#result.minute}, - %% %S Displays seconds [00-59] - {$S, R#result.second}, - %% %y Displays year YYYY [2021] - {$y, R#result.year}, - %% %m Displays the number of the month [01-12] - {$m, R#result.month}, - %% %d Displays the number of the month [01-12] - {$d, R#result.day}, - %% %Z Displays Time zone - {$Z, R#result.zone} - ]. - -rmap(Result) -> - maps:from_list(mlist(Result)). - -support_char() -> "HMSymdZ". - -create_tag(Head, []) -> - Head; -create_tag(Head, [Val1 | RVal]) -> - case Val1 of - [] -> - create_tag(Head ++ [{st, [$%]}], RVal); - [H | Other] -> - case lists:member(H, support_char()) of - true -> create_tag(Head ++ [{tag, H}, {st, Other}], RVal); - false -> create_tag(Head ++ [{st, [$% | Val1]}], RVal) - end - end. - -eval_tag(_, {st, Str}) -> - Str; -eval_tag(Map, {tag, Char}) -> - maps:get(Char, Map, "undefined"). - -%% make_time(TimeUnit, Offset) -> -%% make_time(TimeUnit, Offset, erlang:system_time(TimeUnit)). -make_time(TimeUnit, Offset, TimeEpoch) -> - Res = calendar:system_time_to_rfc3339( - TimeEpoch, - [{unit, TimeUnit}, {offset, Offset}] - ), - [ - Y1, - Y2, - Y3, - Y4, - $-, - Mon1, - Mon2, - $-, - D1, - D2, - _T, - H1, - H2, - $:, - Min1, - Min2, - $:, - S1, - S2 - | TimeStr - ] = Res, - IsFractionChar = fun(C) -> C >= $0 andalso C =< $9 orelse C =:= $. end, - {FractionStr, UtcOffset} = lists:splitwith(IsFractionChar, TimeStr), - #result{ - year = [Y1, Y2, Y3, Y4], - month = [Mon1, Mon2], - day = [D1, D2], - hour = [H1, H2], - minute = [Min1, Min2], - second = [S1, S2] ++ FractionStr, - zone = UtcOffset - }. - -is_int_char(C) -> - C >= $0 andalso C =< $9. -is_symbol_char(C) -> - C =:= $- orelse C =:= $+. -is_m_char(C) -> - C =:= $:. - -parse_char_with_fun(_, []) -> - error(null_input); -parse_char_with_fun(ValidFun, [C | Other]) -> - Res = - case erlang:is_function(ValidFun) of - true -> ValidFun(C); - false -> erlang:apply(emqx_rule_date, ValidFun, [C]) - end, - case Res of - true -> {C, Other}; - false -> error({unexpected, [C | Other]}) - end. -parse_string([], Input) -> - {[], Input}; -parse_string([C | Other], Input) -> - {C1, Input1} = parse_char_with_fun(fun(V) -> V =:= C end, Input), - {Res, Input2} = parse_string(Other, Input1), - {[C1 | Res], Input2}. - -parse_times(0, _, Input) -> - {[], Input}; -parse_times(Times, Fun, Input) -> - {C1, Input1} = parse_char_with_fun(Fun, Input), - {Res, Input2} = parse_times((Times - 1), Fun, Input1), - {[C1 | Res], Input2}. - -parse_int_times(Times, Input) -> - parse_times(Times, is_int_char, Input). - -parse_fraction(Input) -> - IsFractionChar = fun(C) -> C >= $0 andalso C =< $9 orelse C =:= $. end, - lists:splitwith(IsFractionChar, Input). - -parse_second(Input) -> - {M, Input1} = parse_int_times(2, Input), - {M1, Input2} = parse_fraction(Input1), - {M ++ M1, Input2}. - -parse_zone(Input) -> - {S, Input1} = parse_char_with_fun(is_symbol_char, Input), - {M, Input2} = parse_int_times(2, Input1), - {C, Input3} = parse_char_with_fun(is_m_char, Input2), - {V, Input4} = parse_int_times(2, Input3), - {[S | M ++ [C | V]], Input4}. - -mlist1() -> - maps:from_list( - %% %H Shows hour in 24-hour format [15] - [ - {$H, fun(Input) -> parse_int_times(2, Input) end}, - %% %M Displays minutes [00-59] - {$M, fun(Input) -> parse_int_times(2, Input) end}, - %% %S Displays seconds [00-59] - {$S, fun(Input) -> parse_second(Input) end}, - %% %y Displays year YYYY [2021] - {$y, fun(Input) -> parse_int_times(4, Input) end}, - %% %m Displays the number of the month [01-12] - {$m, fun(Input) -> parse_int_times(2, Input) end}, - %% %d Displays the number of the month [01-12] - {$d, fun(Input) -> parse_int_times(2, Input) end}, - %% %Z Displays Time zone - {$Z, fun(Input) -> parse_zone(Input) end} - ] - ). - -update_result($H, Res, Str) -> Res#result{hour = Str}; -update_result($M, Res, Str) -> Res#result{minute = Str}; -update_result($S, Res, Str) -> Res#result{second = Str}; -update_result($y, Res, Str) -> Res#result{year = Str}; -update_result($m, Res, Str) -> Res#result{month = Str}; -update_result($d, Res, Str) -> Res#result{day = Str}; -update_result($Z, Res, Str) -> Res#result{zone = Str}. - -parse_tag(Res, {st, St}, InputString) -> - {_A, B} = parse_string(St, InputString), - {Res, B}; -parse_tag(Res, {tag, St}, InputString) -> - Fun = maps:get(St, mlist1()), - {A, B} = Fun(InputString), - NRes = update_result(St, Res, A), - {NRes, B}. - -parse_tags(Res, [], _) -> - Res; -parse_tags(Res, [Tag | Others], InputString) -> - {NRes, B} = parse_tag(Res, Tag, InputString), - parse_tags(NRes, Others, B). - -parse_input(FormatString, InputString) -> - [Head | Other] = string:split(FormatString, "%", all), - R = create_tag([{st, Head}], Other), - parse_tags(#result{}, R, InputString). diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src index 09d57a4f9..e6d00bcae 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src @@ -2,7 +2,7 @@ {application, emqx_rule_engine, [ {description, "EMQX Rule Engine"}, % strict semver, bump manually! - {vsn, "5.0.22"}, + {vsn, "5.0.23"}, {modules, []}, {registered, [emqx_rule_engine_sup, emqx_rule_engine]}, {applications, [kernel, stdlib, rulesql, getopt, emqx_ctl, uuid]}, diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.erl b/apps/emqx_rule_engine/src/emqx_rule_engine.erl index 66c82d3a1..41d1ed433 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.erl @@ -176,7 +176,7 @@ create_rule(Params) -> create_rule(Params = #{id := RuleId}, CreatedAt) when is_binary(RuleId) -> case get_rule(RuleId) of - not_found -> parse_and_insert(Params, CreatedAt); + not_found -> with_parsed_rule(Params, CreatedAt, fun insert_rule/1); {ok, _} -> {error, already_exists} end. @@ -185,18 +185,27 @@ update_rule(Params = #{id := RuleId}) when is_binary(RuleId) -> case get_rule(RuleId) of not_found -> {error, not_found}; - {ok, #{created_at := CreatedAt}} -> - parse_and_insert(Params, CreatedAt) + {ok, RulePrev = #{created_at := CreatedAt}} -> + with_parsed_rule(Params, CreatedAt, fun(Rule) -> update_rule(Rule, RulePrev) end) end. -spec delete_rule(RuleId :: rule_id()) -> ok. delete_rule(RuleId) when is_binary(RuleId) -> - gen_server:call(?RULE_ENGINE, {delete_rule, RuleId}, ?T_CALL). + case get_rule(RuleId) of + not_found -> + ok; + {ok, Rule} -> + gen_server:call(?RULE_ENGINE, {delete_rule, Rule}, ?T_CALL) + end. -spec insert_rule(Rule :: rule()) -> ok. insert_rule(Rule) -> gen_server:call(?RULE_ENGINE, {insert_rule, Rule}, ?T_CALL). +-spec update_rule(Rule :: rule(), RulePrev :: rule()) -> ok. +update_rule(Rule, RulePrev) -> + gen_server:call(?RULE_ENGINE, {update_rule, Rule, RulePrev}, ?T_CALL). + %%---------------------------------------------------------------------------------------- %% Rule Management %%---------------------------------------------------------------------------------------- @@ -216,9 +225,8 @@ get_rules_ordered_by_ts() -> -spec get_rules_for_topic(Topic :: binary()) -> [rule()]. get_rules_for_topic(Topic) -> [ - Rule - || Rule = #{from := From} <- get_rules(), - emqx_topic:match_any(Topic, From) + emqx_topic_index:get_record(M, ?RULE_TOPIC_INDEX) + || M <- emqx_topic_index:matches(Topic, ?RULE_TOPIC_INDEX, [unique]) ]. -spec get_rules_with_same_event(Topic :: binary()) -> [rule()]. @@ -411,10 +419,17 @@ init([]) -> {ok, #{}}. handle_call({insert_rule, Rule}, _From, State) -> - do_insert_rule(Rule), + ok = do_insert_rule(Rule), + ok = do_update_rule_index(Rule), + {reply, ok, State}; +handle_call({update_rule, Rule, RulePrev}, _From, State) -> + ok = do_delete_rule_index(RulePrev), + ok = do_insert_rule(Rule), + ok = do_update_rule_index(Rule), {reply, ok, State}; handle_call({delete_rule, Rule}, _From, State) -> - do_delete_rule(Rule), + ok = do_delete_rule_index(Rule), + ok = do_delete_rule(Rule), {reply, ok, State}; handle_call(Req, _From, State) -> ?SLOG(error, #{msg => "unexpected_call", request => Req}), @@ -438,7 +453,7 @@ code_change(_OldVsn, State, _Extra) -> %% Internal Functions %%---------------------------------------------------------------------------------------- -parse_and_insert(Params = #{id := RuleId, sql := Sql, actions := Actions}, CreatedAt) -> +with_parsed_rule(Params = #{id := RuleId, sql := Sql, actions := Actions}, CreatedAt, Fun) -> case emqx_rule_sqlparser:parse(Sql) of {ok, Select} -> Rule = #{ @@ -459,7 +474,7 @@ parse_and_insert(Params = #{id := RuleId, sql := Sql, actions := Actions}, Creat conditions => emqx_rule_sqlparser:select_where(Select) %% -- calculated fields end }, - ok = insert_rule(Rule), + ok = Fun(Rule), {ok, Rule}; {error, Reason} -> {error, Reason} @@ -471,16 +486,27 @@ do_insert_rule(#{id := Id} = Rule) -> true = ets:insert(?RULE_TAB, {Id, maps:remove(id, Rule)}), ok. -do_delete_rule(RuleId) -> - case get_rule(RuleId) of - {ok, Rule} -> - ok = unload_hooks_for_rule(Rule), - ok = clear_metrics_for_rule(RuleId), - true = ets:delete(?RULE_TAB, RuleId), - ok; - not_found -> - ok - end. +do_delete_rule(#{id := Id} = Rule) -> + ok = unload_hooks_for_rule(Rule), + ok = clear_metrics_for_rule(Id), + true = ets:delete(?RULE_TAB, Id), + ok. + +do_update_rule_index(#{id := Id, from := From} = Rule) -> + ok = lists:foreach( + fun(Topic) -> + true = emqx_topic_index:insert(Topic, Id, Rule, ?RULE_TOPIC_INDEX) + end, + From + ). + +do_delete_rule_index(#{id := Id, from := From}) -> + ok = lists:foreach( + fun(Topic) -> + true = emqx_topic_index:delete(Topic, Id, ?RULE_TOPIC_INDEX) + end, + From + ). parse_actions(Actions) -> [do_parse_action(Act) || Act <- Actions]. diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl index 2e6952920..79be197aa 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl @@ -514,7 +514,7 @@ format_rule_engine_resp(Config) -> maps:remove(rules, Config). format_datetime(Timestamp, Unit) -> - list_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, Unit}])). + emqx_utils_calendar:epoch_to_rfc3339(Timestamp, Unit). format_action(Actions) -> [do_format_action(Act) || Act <- Actions]. diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_app.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_app.erl index d8b031bdd..28515cb1a 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_app.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_app.erl @@ -26,6 +26,7 @@ start(_Type, _Args) -> _ = ets:new(?RULE_TAB, [named_table, public, ordered_set, {read_concurrency, true}]), + _ = ets:new(?RULE_TOPIC_INDEX, [named_table, public, ordered_set, {read_concurrency, true}]), ok = emqx_rule_events:reload(), SupRet = emqx_rule_engine_sup:start_link(), ok = emqx_rule_engine:load_rules(), diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_cli.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_cli.erl index 7f4e06252..1ba924864 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_cli.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_cli.erl @@ -74,8 +74,8 @@ pretty_print_rule(ID) -> "Updated at:\n ~ts\n" "Actions:\n ~s\n" ,[Id, Name, left_pad(Descr), Enable, left_pad(SQL), - calendar:system_time_to_rfc3339(CreatedAt, [{unit, millisecond}]), - calendar:system_time_to_rfc3339(UpdatedAt, [{unit, millisecond}]), + emqx_utils_calendar:epoch_to_rfc3339(CreatedAt, millisecond), + emqx_utils_calendar:epoch_to_rfc3339(UpdatedAt, millisecond), [left_pad(format_action(A)) || A <- Actions] ] ); diff --git a/apps/emqx_rule_engine/src/emqx_rule_funcs.erl b/apps/emqx_rule_engine/src/emqx_rule_funcs.erl index 64522ee60..edaa5f25f 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_funcs.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_funcs.erl @@ -276,6 +276,8 @@ ]} ). +-import(emqx_utils_calendar, [time_unit/1, now_to_rfc3339/0, now_to_rfc3339/1, epoch_to_rfc3339/2]). + %% @doc "msgid()" Func msgid() -> fun @@ -1077,23 +1079,19 @@ kv_store_del(Key) -> %%-------------------------------------------------------------------- now_rfc3339() -> - now_rfc3339(<<"second">>). + now_to_rfc3339(). now_rfc3339(Unit) -> - unix_ts_to_rfc3339(now_timestamp(Unit), Unit). + now_to_rfc3339(time_unit(Unit)). unix_ts_to_rfc3339(Epoch) -> - unix_ts_to_rfc3339(Epoch, <<"second">>). + epoch_to_rfc3339(Epoch, second). unix_ts_to_rfc3339(Epoch, Unit) when is_integer(Epoch) -> - emqx_utils_conv:bin( - calendar:system_time_to_rfc3339( - Epoch, [{unit, time_unit(Unit)}] - ) - ). + epoch_to_rfc3339(Epoch, time_unit(Unit)). rfc3339_to_unix_ts(DateTime) -> - rfc3339_to_unix_ts(DateTime, <<"second">>). + rfc3339_to_unix_ts(DateTime, second). rfc3339_to_unix_ts(DateTime, Unit) when is_binary(DateTime) -> calendar:rfc3339_to_system_time( @@ -1107,15 +1105,6 @@ now_timestamp() -> now_timestamp(Unit) -> erlang:system_time(time_unit(Unit)). -time_unit(<<"second">>) -> second; -time_unit(<<"millisecond">>) -> millisecond; -time_unit(<<"microsecond">>) -> microsecond; -time_unit(<<"nanosecond">>) -> nanosecond; -time_unit(second) -> second; -time_unit(millisecond) -> millisecond; -time_unit(microsecond) -> microsecond; -time_unit(nanosecond) -> nanosecond. - format_date(TimeUnit, Offset, FormatString) -> Unit = time_unit(TimeUnit), TimeEpoch = erlang:system_time(Unit), @@ -1125,17 +1114,17 @@ format_date(TimeUnit, Offset, FormatString, TimeEpoch) -> Unit = time_unit(TimeUnit), emqx_utils_conv:bin( lists:concat( - emqx_calendar:format(TimeEpoch, Unit, Offset, FormatString) + emqx_utils_calendar:format(TimeEpoch, Unit, Offset, FormatString) ) ). date_to_unix_ts(TimeUnit, FormatString, InputString) -> Unit = time_unit(TimeUnit), - emqx_calendar:parse(InputString, Unit, FormatString). + emqx_utils_calendar:parse(InputString, Unit, FormatString). date_to_unix_ts(TimeUnit, Offset, FormatString, InputString) -> Unit = time_unit(TimeUnit), - OffsetSecond = emqx_calendar:offset_second(Offset), + OffsetSecond = emqx_utils_calendar:offset_second(Offset), OffsetDelta = erlang:convert_time_unit(OffsetSecond, second, Unit), date_to_unix_ts(Unit, FormatString, InputString) - OffsetDelta. @@ -1143,7 +1132,7 @@ timezone_to_second(TimeZone) -> timezone_to_offset_seconds(TimeZone). timezone_to_offset_seconds(TimeZone) -> - emqx_calendar:offset_second(TimeZone). + emqx_utils_calendar:offset_second(TimeZone). '$handle_undefined_function'(sprintf, [Format | Args]) -> erlang:apply(fun sprintf_s/2, [Format, Args]); @@ -1185,15 +1174,30 @@ function_literal(Fun, Args) -> {invalid_func, {Fun, Args}}. mongo_date() -> - erlang:timestamp(). + maybe_isodate_format(erlang:timestamp()). mongo_date(MillisecondsTimestamp) -> - convert_timestamp(MillisecondsTimestamp). + maybe_isodate_format(convert_timestamp(MillisecondsTimestamp)). mongo_date(Timestamp, Unit) -> InsertedTimeUnit = time_unit(Unit), ScaledEpoch = erlang:convert_time_unit(Timestamp, InsertedTimeUnit, millisecond), - convert_timestamp(ScaledEpoch). + mongo_date(ScaledEpoch). + +maybe_isodate_format(ErlTimestamp) -> + case emqx_rule_sqltester:is_test_runtime_env() of + false -> + ErlTimestamp; + true -> + %% if this is called from sqltest, we need to convert it to the ISODate() format, + %% so that it can be correctly converted into a JSON string. + isodate_format(ErlTimestamp) + end. + +isodate_format({MegaSecs, Secs, MicroSecs}) -> + SystemTimeMs = (MegaSecs * 1000_000_000_000 + Secs * 1000_000 + MicroSecs) div 1000, + Ts3339Str = calendar:system_time_to_rfc3339(SystemTimeMs, [{unit, millisecond}, {offset, "Z"}]), + iolist_to_binary(["ISODate(", Ts3339Str, ")"]). convert_timestamp(MillisecondsTimestamp) -> MicroTimestamp = MillisecondsTimestamp * 1000, diff --git a/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl b/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl index f3b4e2790..867fffcc1 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl @@ -18,7 +18,9 @@ -export([ test/1, - get_selected_data/3 + get_selected_data/3, + %% Some SQL functions return different results in the test environment + is_test_runtime_env/0 ]). -spec test(#{sql := binary(), context := map()}) -> {ok, map() | list()} | {error, term()}. @@ -63,12 +65,14 @@ test_rule(Sql, Select, Context, EventTopics) -> created_at => erlang:system_time(millisecond) }, FullContext = fill_default_values(hd(EventTopics), emqx_rule_maps:atom_key_map(Context)), + set_is_test_runtime_env(), try emqx_rule_runtime:apply_rule(Rule, FullContext, #{}) of {ok, Data} -> {ok, flatten(Data)}; {error, Reason} -> {error, Reason} after + unset_is_test_runtime_env(), ok = emqx_rule_engine:clear_metrics_for_rule(RuleId) end. @@ -97,3 +101,20 @@ envs_examp(EventTopic) -> emqx_rule_events:columns_with_exam(EventName) ) ). + +is_test_runtime_env_atom() -> + 'emqx_rule_sqltester:is_test_runtime_env'. + +set_is_test_runtime_env() -> + erlang:put(is_test_runtime_env_atom(), true), + ok. + +unset_is_test_runtime_env() -> + erlang:erase(is_test_runtime_env_atom()), + ok. + +is_test_runtime_env() -> + case erlang:get(is_test_runtime_env_atom()) of + true -> true; + _ -> false + end. diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_api_rule_test_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_api_rule_test_SUITE.erl index 575d35238..0c772958e 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_api_rule_test_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_api_rule_test_SUITE.erl @@ -214,6 +214,27 @@ t_ctx_delivery_dropped(_) -> Expected = check_result([from_clientid, from_username, reason, qos, topic], [], Context), do_test(SQL, Context, Expected). +t_mongo_date_function_should_return_string_in_test_env(_) -> + SQL = + <<"SELECT mongo_date() as mongo_date FROM \"t/1\"">>, + Context = + #{ + action => <<"publish">>, + clientid => <<"c_emqx">>, + event_type => client_check_authz_complete, + result => <<"allow">>, + topic => <<"t/1">>, + username => <<"u_emqx">> + }, + CheckFunction = fun(Result) -> + MongoDate = maps:get(mongo_date, Result), + %% Use regex to match the expected string + MatchResult = re:run(MongoDate, <<"ISODate\\([0-9]{4}-[0-9]{2}-[0-9]{2}T.*\\)">>), + ?assertMatch({match, _}, MatchResult), + ok + end, + do_test(SQL, Context, CheckFunction). + do_test(SQL, Context, Expected0) -> Res = emqx_rule_engine_api:'/rule_test'( post, diff --git a/apps/emqx_utils/src/emqx_placeholder.erl b/apps/emqx_utils/src/emqx_placeholder.erl index edf4123e4..0f677236d 100644 --- a/apps/emqx_utils/src/emqx_placeholder.erl +++ b/apps/emqx_utils/src/emqx_placeholder.erl @@ -48,9 +48,13 @@ -define(PH_VAR_THIS, '$this'). --define(EX_PLACE_HOLDER, "(\\$\\{[a-zA-Z0-9\\._]+\\})"). +%% To match any pattern starts with '$' and followed by '{', and closed by a '}' char: +%% e.g. for string "a${abc}bb", "${abc}" will be matched. +%% Note this is non-greedy matching +%% e.g. if "${{abc}}" is given, the "${{abc}" should be matched, NOT "${{abc}}". +-define(EX_PLACE_HOLDER, "(\\$\\{[^}]+\\})"). --define(EX_PLACE_HOLDER_DOUBLE_QUOTE, "(\\$\\{[a-zA-Z0-9\\._]+\\}|\"\\$\\{[a-zA-Z0-9\\._]+\\}\")"). +-define(EX_PLACE_HOLDER_DOUBLE_QUOTE, "(\\$\\{[^}]+\\}|\"\\$\\{[^}]+\\}\")"). %% Space and CRLF -define(EX_WITHE_CHARS, "\\s"). diff --git a/apps/emqx_utils/src/emqx_utils.app.src b/apps/emqx_utils/src/emqx_utils.app.src index 5900514dc..539bfd3b7 100644 --- a/apps/emqx_utils/src/emqx_utils.app.src +++ b/apps/emqx_utils/src/emqx_utils.app.src @@ -2,7 +2,7 @@ {application, emqx_utils, [ {description, "Miscellaneous utilities for EMQX apps"}, % strict semver, bump manually! - {vsn, "5.0.5"}, + {vsn, "5.0.7"}, {modules, [ emqx_utils, emqx_utils_api, diff --git a/apps/emqx/src/emqx_calendar.erl b/apps/emqx_utils/src/emqx_utils_calendar.erl similarity index 75% rename from apps/emqx/src/emqx_calendar.erl rename to apps/emqx_utils/src/emqx_utils_calendar.erl index 8a424ac2b..a42b8d0ca 100644 --- a/apps/emqx/src/emqx_calendar.erl +++ b/apps/emqx_utils/src/emqx_utils_calendar.erl @@ -1,5 +1,5 @@ %%-------------------------------------------------------------------- -%% Copyright (c) 2019-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -14,7 +14,32 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_calendar). +-module(emqx_utils_calendar). + +-include_lib("typerefl/include/types.hrl"). + +-export([ + formatter/1, + format/3, + format/4, + parse/3, + offset_second/1 +]). + +%% API +-export([ + to_epoch_millisecond/1, + to_epoch_second/1, + human_readable_duration_string/1 +]). +-export([ + epoch_to_rfc3339/1, + epoch_to_rfc3339/2, + now_to_rfc3339/0, + now_to_rfc3339/1 +]). + +-export([time_unit/1]). -define(SECONDS_PER_MINUTE, 60). -define(SECONDS_PER_HOUR, 3600). @@ -24,13 +49,11 @@ -define(DAYS_FROM_0_TO_1970, 719528). -define(SECONDS_FROM_0_TO_1970, (?DAYS_FROM_0_TO_1970 * ?SECONDS_PER_DAY)). --export([ - formatter/1, - format/3, - format/4, - parse/3, - offset_second/1 -]). +%% the maximum value is the SECONDS_FROM_0_TO_10000 in the calendar.erl, +%% here minus SECONDS_PER_DAY to tolerate timezone time offset, +%% so the maximum date can reach 9999-12-31 which is ample. +-define(MAXIMUM_EPOCH, 253402214400). +-define(MAXIMUM_EPOCH_MILLI, 253402214400_000). -define(DATE_PART, [ year, @@ -50,6 +73,72 @@ timezone2 ]). +-reflect_type([ + epoch_millisecond/0, + epoch_second/0 +]). + +-type epoch_second() :: non_neg_integer(). +-type epoch_millisecond() :: non_neg_integer(). +-typerefl_from_string({epoch_second/0, ?MODULE, to_epoch_second}). +-typerefl_from_string({epoch_millisecond/0, ?MODULE, to_epoch_millisecond}). + +%%-------------------------------------------------------------------- +%% Epoch <-> RFC 3339 +%%-------------------------------------------------------------------- + +to_epoch_second(DateTime) -> + to_epoch(DateTime, second). + +to_epoch_millisecond(DateTime) -> + to_epoch(DateTime, millisecond). + +to_epoch(DateTime, Unit) -> + try + case string:to_integer(DateTime) of + {Epoch, []} -> validate_epoch(Epoch, Unit); + _ -> {ok, calendar:rfc3339_to_system_time(DateTime, [{unit, Unit}])} + end + catch + error:_ -> + {error, bad_rfc3339_timestamp} + end. + +epoch_to_rfc3339(Timestamp) -> + epoch_to_rfc3339(Timestamp, millisecond). + +epoch_to_rfc3339(Timestamp, Unit) when is_integer(Timestamp) -> + list_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, Unit}])). + +now_to_rfc3339() -> + now_to_rfc3339(second). + +now_to_rfc3339(Unit) -> + epoch_to_rfc3339(erlang:system_time(Unit), Unit). + +-spec human_readable_duration_string(integer()) -> string(). +human_readable_duration_string(Milliseconds) -> + Seconds = Milliseconds div 1000, + {D, {H, M, S}} = calendar:seconds_to_daystime(Seconds), + L0 = [{D, " days"}, {H, " hours"}, {M, " minutes"}, {S, " seconds"}], + L1 = lists:dropwhile(fun({K, _}) -> K =:= 0 end, L0), + L2 = lists:map(fun({Time, Unit}) -> [integer_to_list(Time), Unit] end, L1), + lists:flatten(lists:join(", ", L2)). + +validate_epoch(Epoch, _Unit) when Epoch < 0 -> + {error, bad_epoch}; +validate_epoch(Epoch, second) when Epoch =< ?MAXIMUM_EPOCH -> + {ok, Epoch}; +validate_epoch(Epoch, millisecond) when Epoch =< ?MAXIMUM_EPOCH_MILLI -> + {ok, Epoch}; +validate_epoch(_Epoch, _Unit) -> + {error, bad_epoch}. + +%%-------------------------------------------------------------------- +%% Timestamp <-> any format date string +%% Timestamp treat as a superset for epoch, it can be any positive integer +%%-------------------------------------------------------------------- + formatter(FormatterStr) when is_list(FormatterStr) -> formatter(list_to_binary(FormatterStr)); formatter(FormatterBin) when is_binary(FormatterBin) -> @@ -70,8 +159,10 @@ parse(DateStr, Unit, FormatterBin) when is_binary(FormatterBin) -> parse(DateStr, Unit, formatter(FormatterBin)); parse(DateStr, Unit, Formatter) -> do_parse(DateStr, Unit, Formatter). -%% ------------------------------------------------------------------------------------------------- -%% internal + +%%-------------------------------------------------------------------- +%% Time unit +%%-------------------------------------------------------------------- time_unit(second) -> second; time_unit(millisecond) -> millisecond; @@ -84,10 +175,12 @@ time_unit("nanosecond") -> nanosecond; time_unit(<<"second">>) -> second; time_unit(<<"millisecond">>) -> millisecond; time_unit(<<"microsecond">>) -> microsecond; -time_unit(<<"nanosecond">>) -> nanosecond. +time_unit(<<"nanosecond">>) -> nanosecond; +time_unit(Any) -> error({invalid_time_unit, Any}). -%% ------------------------------------------------------------------------------------------------- +%%-------------------------------------------------------------------- %% internal: format part +%%-------------------------------------------------------------------- do_formatter(<<>>, Formatter) -> lists:reverse(Formatter); @@ -357,9 +450,9 @@ padding(Data, Len) when Len > 0 andalso erlang:length(Data) < Len -> padding(Data, _Len) -> Data. -%% ------------------------------------------------------------------------------------------------- -%% internal -%% parse part +%%-------------------------------------------------------------------- +%% internal: parse part +%%-------------------------------------------------------------------- do_parse(DateStr, Unit, Formatter) -> DateInfo = do_parse_date_str(DateStr, Formatter, #{}), @@ -476,3 +569,77 @@ str_to_int_or_error(Str, Error) -> _ -> error(Error) end. + +%%-------------------------------------------------------------------- +%% Unit Test +%%-------------------------------------------------------------------- + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). +-compile(nowarn_export_all). +-compile(export_all). +roots() -> [bar]. + +fields(bar) -> + [ + {second, ?MODULE:epoch_second()}, + {millisecond, ?MODULE:epoch_millisecond()} + ]. + +-define(FORMAT(_Sec_, _Ms_), + lists:flatten( + io_lib:format("bar={second=~w,millisecond=~w}", [_Sec_, _Ms_]) + ) +). + +epoch_ok_test() -> + BigStamp = 1 bsl 37, + Args = [ + {0, 0, 0, 0}, + {1, 1, 1, 1}, + {BigStamp, BigStamp * 1000, BigStamp, BigStamp * 1000}, + {"2022-01-01T08:00:00+08:00", "2022-01-01T08:00:00+08:00", 1640995200, 1640995200000} + ], + lists:foreach( + fun({Sec, Ms, EpochSec, EpochMs}) -> + check_ok(?FORMAT(Sec, Ms), EpochSec, EpochMs) + end, + Args + ), + ok. + +check_ok(Input, Sec, Ms) -> + {ok, Data} = hocon:binary(Input, #{}), + ?assertMatch( + #{bar := #{second := Sec, millisecond := Ms}}, + hocon_tconf:check_plain(?MODULE, Data, #{atom_key => true}, [bar]) + ), + ok. + +epoch_failed_test() -> + BigStamp = 1 bsl 38, + Args = [ + {-1, -1}, + {"1s", "1s"}, + {BigStamp, 0}, + {0, BigStamp * 1000}, + {"2022-13-13T08:00:00+08:00", "2022-13-13T08:00:00+08:00"} + ], + lists:foreach( + fun({Sec, Ms}) -> + check_failed(?FORMAT(Sec, Ms)) + end, + Args + ), + ok. + +check_failed(Input) -> + {ok, Data} = hocon:binary(Input, #{}), + ?assertException( + throw, + _, + hocon_tconf:check_plain(?MODULE, Data, #{atom_key => true}, [bar]) + ), + ok. + +-endif. diff --git a/apps/emqx_utils/test/emqx_placeholder_SUITE.erl b/apps/emqx_utils/test/emqx_placeholder_SUITE.erl index 81bf0853a..f813656f2 100644 --- a/apps/emqx_utils/test/emqx_placeholder_SUITE.erl +++ b/apps/emqx_utils/test/emqx_placeholder_SUITE.erl @@ -206,3 +206,53 @@ t_preproc_tmpl_deep(_) -> #{<<"${a}">> => [<<"1">>, "c", 2, 3.0, '${d}', {[<<"1.0">>], 0}]}, emqx_placeholder:proc_tmpl_deep(Tmpl1, Selected) ). + +t_proc_tmpl_arbitrary_var_name(_) -> + Selected = #{ + <<"中"/utf8>> => <<"1">>, + <<"中-1"/utf8>> => <<"1-1">>, + <<"-_+=<>,/?:;\"'\\[]|">> => 1, + <<"-_+=<>,">> => #{<<"/?:;\"'\\[]|">> => 2}, + <<"!@#$%^&*()">> => 1.0, + <<"d">> => #{ + <<"$ff">> => <<"oo">>, + <<"${f">> => <<"hi">>, + <<"${f}">> => <<"qq">> + } + }, + Tks = emqx_placeholder:preproc_tmpl( + << + "a:${中},a:${中-1},b:${-_+=<>,/?:;\"'\\[]|}," + "b:${-_+=<>,./?:;\"'\\[]|},c:${!@#$%^&*()},d:${d.$ff},d1:${d.${f}}"/utf8 + >> + ), + ?assertEqual( + <<"a:1,a:1-1,b:1,b:2,c:1.0,d:oo,d1:hi}">>, + emqx_placeholder:proc_tmpl(Tks, Selected) + ). + +t_proc_tmpl_arbitrary_var_name_double_quote(_) -> + Selected = #{ + <<"中"/utf8>> => <<"1">>, + <<"中-1"/utf8>> => <<"1-1">>, + <<"-_+=<>,/?:;\"'\\[]|">> => 1, + <<"-_+=<>,">> => #{<<"/?:;\"'\\[]|">> => 2}, + <<"!@#$%^&*()">> => 1.0, + <<"d">> => #{ + <<"$ff">> => <<"oo">>, + <<"${f">> => <<"hi">>, + <<"${f}">> => <<"qq">> + } + }, + Tks = emqx_placeholder:preproc_tmpl( + << + "a:\"${中}\",a:\"${中-1}\",b:\"${-_+=<>,/?:;\"'\\[]|}\"," + "b:\"${-_+=<>,./?:;\"'\\[]|}\",c:\"${!@#$%^&*()}\",d:\"${d.$ff}\",d1:\"${d.${f}\"}"/utf8 + >>, + #{strip_double_quote => true} + ), + ct:print("TKs:~p~n", [Tks]), + ?assertEqual( + <<"a:1,a:1-1,b:1,b:2,c:1.0,d:oo,d1:hi}">>, + emqx_placeholder:proc_tmpl(Tks, Selected) + ). diff --git a/build b/build index 50b3fd861..874e4088c 100755 --- a/build +++ b/build @@ -369,20 +369,20 @@ docker_cleanup() { ## Build the default docker image based on debian 11. make_docker() { - local EMQX_BUILDER_VERSION="${EMQX_BUILDER_VERSION:-5.1-3}" + local EMQX_BUILDER_VERSION="${EMQX_BUILDER_VERSION:-5.1-4}" local EMQX_BUILDER_PLATFORM="${EMQX_BUILDER_PLATFORM:-debian11}" - local EMQX_BUILDER_OTP="${EMQX_BUILDER_OTP:-25.3.2-1}" + local EMQX_BUILDER_OTP="${EMQX_BUILDER_OTP:-25.3.2-2}" local EMQX_BUILDER_ELIXIR="${EMQX_BUILDER_ELIXIR:-1.14.5}" local EMQX_BUILDER=${EMQX_BUILDER:-ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VERSION}:${EMQX_BUILDER_ELIXIR}-${EMQX_BUILDER_OTP}-${EMQX_BUILDER_PLATFORM}} local EMQX_RUNNER="${EMQX_RUNNER:-${EMQX_DEFAULT_RUNNER}}" local EMQX_DOCKERFILE="${EMQX_DOCKERFILE:-deploy/docker/Dockerfile}" local PKG_VSN="${PKG_VSN:-$(./pkg-vsn.sh)}" # shellcheck disable=SC2155 - local VSN_MAJOR="$(echo "$PKG_VSN" | cut -d . -f 1)" + local VSN_MAJOR="$(scripts/semver.sh "$PKG_VSN" --major)" # shellcheck disable=SC2155 - local VSN_MINOR="$(echo "$PKG_VSN" | cut -d . -f 2)" + local VSN_MINOR="$(scripts/semver.sh "$PKG_VSN" --minor)" # shellcheck disable=SC2155 - local VSN_PATCH="$(echo "$PKG_VSN" | cut -d . -f 3)" + local VSN_PATCH="$(scripts/semver.sh "$PKG_VSN" --patch)" local SUFFIX='' if [[ "$PROFILE" = *-elixir ]]; then SUFFIX="-elixir" @@ -430,8 +430,6 @@ make_docker() { --label org.opencontainers.image.licenses="${LICENSE}" \ --label org.opencontainers.image.otp.version="${EMQX_BUILDER_OTP}" \ --tag "${EMQX_IMAGE_TAG}" \ - --tag "${EMQX_BASE_DOCKER_TAG}:${VSN_MAJOR}.${VSN_MINOR}${SUFFIX}" \ - --tag "${EMQX_BASE_DOCKER_TAG}:${VSN_MAJOR}.${VSN_MINOR}.${VSN_PATCH}${SUFFIX}" \ --provenance false \ --pull ) @@ -442,7 +440,9 @@ make_docker() { DOCKER_BUILDX_ARGS+=(--label org.opencontainers.image.elixir.version="${EMQX_BUILDER_ELIXIR}") fi if [ "${DOCKER_LATEST:-false}" = true ]; then - DOCKER_BUILDX_ARGS+=(--tag "${DOCKER_REGISTRY}/${DOCKER_ORG}/${PROFILE}:latest${SUFFIX}") + DOCKER_BUILDX_ARGS+=(--tag "${EMQX_BASE_DOCKER_TAG}:latest${SUFFIX}") + DOCKER_BUILDX_ARGS+=(--tag "${EMQX_BASE_DOCKER_TAG}:${VSN_MAJOR}.${VSN_MINOR}${SUFFIX}") + DOCKER_BUILDX_ARGS+=(--tag "${EMQX_BASE_DOCKER_TAG}:${VSN_MAJOR}.${VSN_MINOR}.${VSN_PATCH}${SUFFIX}") fi if [ "${DOCKER_PLATFORMS:-default}" != 'default' ]; then DOCKER_BUILDX_ARGS+=(--platform "${DOCKER_PLATFORMS}") diff --git a/changes/ce/feat-11429.en.md b/changes/ce/feat-11429.en.md new file mode 100644 index 000000000..5c0028774 --- /dev/null +++ b/changes/ce/feat-11429.en.md @@ -0,0 +1 @@ +Added option to configure detection of legacy protocol in MondoDB connectors and bridges. diff --git a/changes/ce/feat-11436.en.md b/changes/ce/feat-11436.en.md new file mode 100644 index 000000000..e4e53f19d --- /dev/null +++ b/changes/ce/feat-11436.en.md @@ -0,0 +1 @@ +Add a new API endpoint `DELETE /banned` to clear all `banned` data. diff --git a/changes/ce/feat-11438.en.md b/changes/ce/feat-11438.en.md new file mode 100644 index 000000000..65cab5494 --- /dev/null +++ b/changes/ce/feat-11438.en.md @@ -0,0 +1,2 @@ +Changed the type of the `mqtt.mqx_packet_size` from string to byteSize to better represent the valid numeric range. +Strings will still be accepted for backwards compatibility. diff --git a/changes/ce/feat-11446.en.md b/changes/ce/feat-11446.en.md new file mode 100644 index 000000000..aa420136c --- /dev/null +++ b/changes/ce/feat-11446.en.md @@ -0,0 +1 @@ +Refactored datetime-related modules and functions to simplify the code. diff --git a/changes/ce/fix-11279.en.md b/changes/ce/fix-11279.en.md new file mode 100644 index 000000000..9f56bf543 --- /dev/null +++ b/changes/ce/fix-11279.en.md @@ -0,0 +1 @@ +Prevent client disconnected when sending large payloads with debug/trace logging is enabled. diff --git a/changes/ce/fix-11424.en.md b/changes/ce/fix-11424.en.md new file mode 100644 index 000000000..1d44d9745 --- /dev/null +++ b/changes/ce/fix-11424.en.md @@ -0,0 +1 @@ +Add a check for the maximum value of the timestamp in the API to ensure it is a valid Unix timestamp. diff --git a/changes/ce/fix-11445.en.md b/changes/ce/fix-11445.en.md new file mode 100644 index 000000000..589846db2 --- /dev/null +++ b/changes/ce/fix-11445.en.md @@ -0,0 +1,2 @@ +Removed os_mon application monitor support on Windows platforms to prevent VM crashes. +Functionality remains on non-Windows platforms. diff --git a/changes/ce/fix-11454.en.md b/changes/ce/fix-11454.en.md new file mode 100644 index 000000000..50e7fe826 --- /dev/null +++ b/changes/ce/fix-11454.en.md @@ -0,0 +1 @@ +Fixed crashing when debugging/tracing with large payloads(introduce when [#11279](https://github.com/emqx/emqx/pull/11279)) diff --git a/changes/ce/fix-11456.en.md b/changes/ce/fix-11456.en.md new file mode 100644 index 000000000..8ace3f88a --- /dev/null +++ b/changes/ce/fix-11456.en.md @@ -0,0 +1,2 @@ +Removed validation that enforced non-empty PEM for CA cert file. +CA certificate file PEM can now be empty. diff --git a/changes/ce/fix-11499.en.md b/changes/ce/fix-11499.en.md new file mode 100644 index 000000000..3ed4d1e15 --- /dev/null +++ b/changes/ce/fix-11499.en.md @@ -0,0 +1,3 @@ +Upgrade Erlang/OTP to 25.3.2-2 + +Erlang/OTP 25.3.2-2 excludes sensitive data from mnesia_hook log message. diff --git a/changes/ce/perf-11396.en.md b/changes/ce/perf-11396.en.md new file mode 100644 index 000000000..fd8df9a9d --- /dev/null +++ b/changes/ce/perf-11396.en.md @@ -0,0 +1 @@ +Introduce topic index for the rule engine runtime that significantly improves the performance of EMQX with a non-trivial number of rules consuming messages matching different topic filters. diff --git a/changes/ce/perf-11399.en.md b/changes/ce/perf-11399.en.md new file mode 100644 index 000000000..42dac80bc --- /dev/null +++ b/changes/ce/perf-11399.en.md @@ -0,0 +1,8 @@ +Improved the placeholder syntax of rule engine. + +The parameters of actions support using placeholder syntax to +dynamically fill in the content of strings. The format of the +placeholder syntax is `${key}`. +Before this improvement, the `key` in `${key}` could only contain +letters, numbers, and underscores. Now the `key` supports any UTF8 +characters. diff --git a/changes/ee/feat-11367.en.md b/changes/ee/feat-11367.en.md new file mode 100644 index 000000000..ee60b7cd9 --- /dev/null +++ b/changes/ee/feat-11367.en.md @@ -0,0 +1 @@ +Ported GCP IoT Hub authentication support. diff --git a/changes/ee/feat-11403.en.md b/changes/ee/feat-11403.en.md new file mode 100644 index 000000000..9942a2490 --- /dev/null +++ b/changes/ee/feat-11403.en.md @@ -0,0 +1,3 @@ +Added support for defining message attributes and ordering key templates for GCP PubSub Producer bridge. + +Also updated our HOCON library to fix an issue where objects in an array were being concatenated even if they lay on different lines. diff --git a/changes/ee/feat-11459.en.md b/changes/ee/feat-11459.en.md new file mode 100644 index 000000000..88b2047c4 --- /dev/null +++ b/changes/ee/feat-11459.en.md @@ -0,0 +1 @@ +Added the option to configure health check interval for Kafka bridges. diff --git a/changes/ee/fix-11394.en.md b/changes/ee/fix-11394.en.md new file mode 100644 index 000000000..ace678ecc --- /dev/null +++ b/changes/ee/fix-11394.en.md @@ -0,0 +1,2 @@ +Upgrade Kafka producer client `wolff` from 1.7.6 to 1.7.7. +This fixes a potential race condition which may cause all Kafka producers to crash if some failed to initialize. diff --git a/changes/ee/fix-11401.en.md b/changes/ee/fix-11401.en.md new file mode 100644 index 000000000..2bce7170a --- /dev/null +++ b/changes/ee/fix-11401.en.md @@ -0,0 +1 @@ +When running one of the rule engine SQL `mongo_date` functions in the EMQX dashboard test interface, the resulting date is formatted as `ISODate(*)`, where * is the date in ISO date format instead of only the ISO date string. This is the format used by MongoDB to store dates. diff --git a/changes/ee/fix-11444.en.md b/changes/ee/fix-11444.en.md new file mode 100644 index 000000000..c8e80946d --- /dev/null +++ b/changes/ee/fix-11444.en.md @@ -0,0 +1 @@ +Fixed error information when Kinesis bridge fails to connect to endpoint. diff --git a/changes/ee/fix-11453.en.md b/changes/ee/fix-11453.en.md new file mode 100644 index 000000000..428f51d5b --- /dev/null +++ b/changes/ee/fix-11453.en.md @@ -0,0 +1 @@ +Fixed an issue which would yield false negatives when testing the connectivity of InfluxDB bridges. diff --git a/changes/v5.1.5-build.3.en.md b/changes/v5.1.5-build.3.en.md new file mode 100644 index 000000000..f8317a3d8 --- /dev/null +++ b/changes/v5.1.5-build.3.en.md @@ -0,0 +1,37 @@ +# v5.1.5-build.3 + +## Enhancements + +- [#10697](https://github.com/emqx/emqx/pull/10697) This change allows to set the minReadySeconds for the StatefulSet. This allows to add a gap between the restarts of each pod by upgrade or restart command. + +- [#11390](https://github.com/emqx/emqx/pull/11390) Add `node.broker_pool_size`, `node.generic_pool_size`, `node.channel_cleanup_batch_size` options to EMQX configuration. + + Tuning these options can significantly improve performance if cluster interconnect network latency is high. + +- [#11389](https://github.com/emqx/emqx/pull/11389) Improved retained message publishing latency by consolidating multiple index update operations into a single mnesia activity, leveraging the new APIs introduced in mria 0.6.0. + +- [#11399](https://github.com/emqx/emqx/pull/11399) Improved the placeholder syntax of rule engine. + + The parameters of actions support using placeholder syntax to + dynamically fill in the content of strings. The format of the + placeholder syntax is `${key}`. + Before this improvement, the `key` in `${key}` could only contain + letters, numbers, and underscores. Now the `key` supports any UTF8 + characters. + +- [#11405](https://github.com/emqx/emqx/pull/11405) Improve the error reason of the `date_to_unix_ts` to make more sense. + +## Bug Fixes + +- [#11279](https://github.com/emqx/emqx/pull/11279) Prevent client disconnected when sending large payloads with debug/trace logging is enabled. + +- [#11388](https://github.com/emqx/emqx/pull/11388) Increase `emqx_router_sup` restart intensity. + + The goal is to tolerate occasional crashes that can happen under relatively normal conditions + and don't seem critical to shutdown the whole app (emqx). + For example, mria write/delete call delegated from a replicant to a core node by `emqx_router_helper` may fail, + if the core node is being stopped / restarted / not ready. + +- [#11410](https://github.com/emqx/emqx/pull/11410) Reintroduce `cacerts` TLS client option as a deprecated option. + + This fixes issues found when trying to upgrade from 5.1.3 where that option is set in the configuration files or persisted in EMQX Operator settings. diff --git a/changes/v5.1.6.en.md b/changes/v5.1.6.en.md new file mode 100644 index 000000000..3c393c55b --- /dev/null +++ b/changes/v5.1.6.en.md @@ -0,0 +1,30 @@ +# v5.1.6 + +## Enhancements + +- [#11429](https://github.com/emqx/emqx/pull/11429) Added option to configure detection of legacy protocol in MondoDB connectors and bridges. + +- [#11436](https://github.com/emqx/emqx/pull/11436) Add a new API endpoint `DELETE /banned` to clear all `banned` data. + +- [#11438](https://github.com/emqx/emqx/pull/11438) Changed the type of the `mqtt.mqx_packet_size` from string to byteSize to better represent the valid numeric range. + Strings will still be accepted for backwards compatibility. + +- [#11446](https://github.com/emqx/emqx/pull/11446) Refactored datetime-related modules and functions to simplify the code. + +- [#11396](https://github.com/emqx/emqx/pull/11396) Introduce topic index for the rule engine runtime that significantly improves the performance of EMQX with a non-trivial number of rules consuming messages matching different topic filters. + +## Bug Fixes + +- [#11424](https://github.com/emqx/emqx/pull/11424) Add a check for the maximum value of the timestamp in the API to ensure it is a valid Unix timestamp. + +- [#11445](https://github.com/emqx/emqx/pull/11445) Removed os_mon application monitor support on Windows platforms to prevent VM crashes. + Functionality remains on non-Windows platforms. + +- [#11454](https://github.com/emqx/emqx/pull/11454) Fixed crashing when debugging/tracing with large payloads(introduce when [#11279](https://github.com/emqx/emqx/pull/11279)) + +- [#11456](https://github.com/emqx/emqx/pull/11456) Removed validation that enforced non-empty PEM for CA cert file. + CA certificate file PEM can now be empty. + +- [#11499](https://github.com/emqx/emqx/pull/11499) Upgrade Erlang/OTP to 25.3.2-2 + + Erlang/OTP 25.3.2-2 excludes sensitive data from mnesia_hook log message. diff --git a/deploy/charts/emqx-enterprise/Chart.yaml b/deploy/charts/emqx-enterprise/Chart.yaml index 626436517..575c6b354 100644 --- a/deploy/charts/emqx-enterprise/Chart.yaml +++ b/deploy/charts/emqx-enterprise/Chart.yaml @@ -14,8 +14,8 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 5.1.1 +version: 5.2.0-alpha.3 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 5.1.1 +appVersion: 5.2.0-alpha.3 diff --git a/deploy/charts/emqx/Chart.yaml b/deploy/charts/emqx/Chart.yaml index f1e4fe822..f8cd69735 100644 --- a/deploy/charts/emqx/Chart.yaml +++ b/deploy/charts/emqx/Chart.yaml @@ -14,8 +14,8 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 5.1.5 +version: 5.1.6 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 5.1.5 +appVersion: 5.1.6 diff --git a/deploy/docker/Dockerfile b/deploy/docker/Dockerfile index 61a143cae..76ded75eb 100644 --- a/deploy/docker/Dockerfile +++ b/deploy/docker/Dockerfile @@ -1,4 +1,4 @@ -ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-debian11 +ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-debian11 ARG RUN_FROM=debian:11-slim FROM ${BUILD_FROM} AS builder diff --git a/mix.exs b/mix.exs index 00d190136..d57cedc2b 100644 --- a/mix.exs +++ b/mix.exs @@ -72,7 +72,7 @@ defmodule EMQXUmbrella.MixProject do # in conflict by emqtt and hocon {:getopt, "1.0.2", override: true}, {:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "1.0.8", override: true}, - {:hocon, github: "emqx/hocon", tag: "0.39.14", override: true}, + {:hocon, github: "emqx/hocon", tag: "0.39.16", override: true}, {:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.5.2", override: true}, {:esasl, github: "emqx/esasl", tag: "0.2.0"}, {:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"}, @@ -98,7 +98,32 @@ defmodule EMQXUmbrella.MixProject do # set by hackney (dependency) {:ssl_verify_fun, "1.1.6", override: true}, {:uuid, github: "okeuday/uuid", tag: "v2.0.6", override: true}, - {:quickrand, github: "okeuday/quickrand", tag: "v2.0.6", override: true} + {:quickrand, github: "okeuday/quickrand", tag: "v2.0.6", override: true}, + {:opentelemetry_api, + github: "emqx/opentelemetry-erlang", + sparse: "apps/opentelemetry_api", + override: true, + runtime: false}, + {:opentelemetry, + github: "emqx/opentelemetry-erlang", + sparse: "apps/opentelemetry", + override: true, + runtime: false}, + {:opentelemetry_api_experimental, + github: "emqx/opentelemetry-erlang", + sparse: "apps/opentelemetry_api_experimental", + override: true, + runtime: false}, + {:opentelemetry_experimental, + github: "emqx/opentelemetry-erlang", + sparse: "apps/opentelemetry_experimental", + override: true, + runtime: false}, + {:opentelemetry_exporter, + github: "emqx/opentelemetry-erlang", + sparse: "apps/opentelemetry_exporter", + override: true, + runtime: false} ] ++ emqx_apps(profile_info, version) ++ enterprise_deps(profile_info) ++ bcrypt_dep() ++ jq_dep() ++ quicer_dep() @@ -195,7 +220,8 @@ defmodule EMQXUmbrella.MixProject do :emqx_enterprise, :emqx_bridge_kinesis, :emqx_bridge_azure_event_hub, - :emqx_ldap + :emqx_ldap, + :emqx_gcp_device ]) end @@ -203,7 +229,7 @@ defmodule EMQXUmbrella.MixProject do [ {:hstreamdb_erl, github: "hstreamdb/hstreamdb_erl", tag: "0.3.1+v0.12.0"}, {:influxdb, github: "emqx/influxdb-client-erl", tag: "1.1.11", override: true}, - {:wolff, github: "kafka4beam/wolff", tag: "1.7.6"}, + {:wolff, github: "kafka4beam/wolff", tag: "1.7.7"}, {:kafka_protocol, github: "kafka4beam/kafka_protocol", tag: "4.1.3", override: true}, {:brod_gssapi, github: "kafka4beam/brod_gssapi", tag: "v0.1.0"}, {:brod, github: "kafka4beam/brod", tag: "3.16.8"}, @@ -324,6 +350,7 @@ defmodule EMQXUmbrella.MixProject do :emqx_plugins, :emqx_ft, :emqx_s3, + :emqx_opentelemetry, :emqx_durable_storage, :rabbit_common ], @@ -376,7 +403,8 @@ defmodule EMQXUmbrella.MixProject do quicer: enable_quicer?(), bcrypt: enable_bcrypt?(), jq: enable_jq?(), - observer: is_app?(:observer) + observer: is_app?(:observer), + os_mon: enable_os_mon?() } |> Enum.reject(&elem(&1, 1)) |> Enum.map(&elem(&1, 0)) @@ -808,6 +836,10 @@ defmodule EMQXUmbrella.MixProject do not win32?() end + defp enable_os_mon?() do + not win32?() + end + defp enable_jq?() do not Enum.any?([ build_without_jq?(), diff --git a/pkg-vsn.sh b/pkg-vsn.sh index 99524dd82..661aefc4b 100755 --- a/pkg-vsn.sh +++ b/pkg-vsn.sh @@ -11,6 +11,8 @@ help() { echo "$0 PROFILE [options]" echo echo "-h|--help: To display this usage information" + echo "--release: Print release version from emqx_release.hrl" + echo echo "--long: Print long vsn number. e.g. 5.0.0-ubuntu20.04-amd64" echo " Otherwise short e.g. 5.0.0" echo "--elixir: Include elixir version in the long version string" @@ -33,6 +35,10 @@ while [ "$#" -gt 0 ]; do help exit 0 ;; + --release) + RELEASE_VERSION='yes' + shift 1 + ;; --long) LONG_VERSION='yes' shift 1 @@ -88,6 +94,11 @@ esac ## emqx_release.hrl is the single source of truth for release version RELEASE="$(grep -E "define.+${RELEASE_EDITION}" apps/emqx/include/emqx_release.hrl | cut -d '"' -f2)" +if [ "${RELEASE_VERSION:-}" = 'yes' ]; then + echo "$RELEASE" + exit 0 +fi + git_exact_vsn() { local tag tag="$(git describe --tags --match "${GIT_TAG_PREFIX}*" --exact 2>/dev/null)" diff --git a/rebar.config b/rebar.config index 131149f47..2d605c18f 100644 --- a/rebar.config +++ b/rebar.config @@ -75,7 +75,7 @@ , {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.3"}}} , {getopt, "1.0.2"} , {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.8"}}} - , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.14"}}} + , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.16"}}} , {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}} , {esasl, {git, "https://github.com/emqx/esasl", {tag, "0.2.0"}}} , {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}} @@ -84,6 +84,14 @@ %% in conflict by erlavro and rocketmq , {jsone, {git, "https://github.com/emqx/jsone.git", {tag, "1.7.1"}}} , {uuid, {git, "https://github.com/okeuday/uuid.git", {tag, "v2.0.6"}}} +%% trace + , {opentelemetry_api, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry_api"}} + , {opentelemetry, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry"}} + %% log metrics + , {opentelemetry_experimental, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry_experimental"}} + , {opentelemetry_api_experimental, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry_api_experimental"}} + %% export + , {opentelemetry_exporter, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry_exporter"}} ]}. {xref_ignores, diff --git a/rebar.config.erl b/rebar.config.erl index b45516d2b..ad6f425a0 100644 --- a/rebar.config.erl +++ b/rebar.config.erl @@ -107,6 +107,7 @@ is_community_umbrella_app("apps/emqx_enterprise") -> false; is_community_umbrella_app("apps/emqx_bridge_kinesis") -> false; is_community_umbrella_app("apps/emqx_bridge_azure_event_hub") -> false; is_community_umbrella_app("apps/emqx_ldap") -> false; +is_community_umbrella_app("apps/emqx_gcp_device") -> false; is_community_umbrella_app(_) -> true. is_jq_supported() -> @@ -404,12 +405,13 @@ relx_apps(ReleaseType, Edition) -> ce -> CEBusinessApps end, BusinessApps = CommonBusinessApps ++ EditionSpecificApps, - ExcludedApps = excluded_apps(ReleaseType), - SystemApps ++ - %% EMQX starts the DB and the business applications: - [{App, load} || App <- (DBApps -- ExcludedApps)] ++ - [emqx_machine] ++ - [{App, load} || App <- (BusinessApps -- ExcludedApps)]. + Apps = + (SystemApps ++ + %% EMQX starts the DB and the business applications: + [{App, load} || App <- DBApps] ++ + [emqx_machine] ++ + [{App, load} || App <- BusinessApps]), + lists:foldl(fun proplists:delete/2, Apps, excluded_apps(ReleaseType)). excluded_apps(ReleaseType) -> OptionalApps = [ @@ -417,7 +419,8 @@ excluded_apps(ReleaseType) -> {bcrypt, provide_bcrypt_release(ReleaseType)}, {jq, is_jq_supported()}, {observer, is_app(observer)}, - {mnesia_rocksdb, is_rocksdb_supported()} + {mnesia_rocksdb, is_rocksdb_supported()}, + {os_mon, provide_os_mon_release()} ], [App || {App, false} <- OptionalApps]. @@ -523,6 +526,9 @@ is_debug(VarName) -> provide_bcrypt_dep() -> not is_win32(). +provide_os_mon_release() -> + not is_win32(). + provide_bcrypt_release(ReleaseType) -> provide_bcrypt_dep() andalso ReleaseType =:= cloud. diff --git a/rel/config/examples/rate_limit.conf.example b/rel/config/examples/rate_limit.conf.example new file mode 100644 index 000000000..d62c3cd8c --- /dev/null +++ b/rel/config/examples/rate_limit.conf.example @@ -0,0 +1,14 @@ +## Rate Limit + +## Rate limiting is applied on the listener +listeners.tcp.my_tcp_listener_name { + + ## Connections per second per listener + max_conn_rate = "1000/s" + + ## Incoming messages per second per client + messages_rate = "1000/s" + + ## Incoming message size in bytes per second per client + bytes_rate = "1MB/s" +} diff --git a/rel/i18n/emqx_bridge_gcp_pubsub.hocon b/rel/i18n/emqx_bridge_gcp_pubsub.hocon index 39c4b7417..b5dffec1f 100644 --- a/rel/i18n/emqx_bridge_gcp_pubsub.hocon +++ b/rel/i18n/emqx_bridge_gcp_pubsub.hocon @@ -46,6 +46,18 @@ payload_template.desc: payload_template.label: """Payload template""" +attributes_template.desc: +"""The template for formatting the outgoing message attributes. Undefined values will be rendered as empty string values. Empty keys are removed from the attribute map.""" + +attributes_template.label: +"""Attributes template""" + +ordering_key_template.desc: +"""The template for formatting the outgoing message ordering key. Undefined values will be rendered as empty string values. This value will not be added to the message if it's empty.""" + +ordering_key_template.label: +"""Ordering Key template""" + pipelining.desc: """A positive integer. Whether to send HTTP requests continuously, when set to 1, it means that after each HTTP request is sent, you need to wait for the server to return and then continue to send the next request.""" @@ -64,6 +76,36 @@ pubsub_topic.desc: pubsub_topic.label: """GCP PubSub Topic""" +producer_attributes.desc: +"""List of key-value pairs representing templates to construct the attributes for a given GCP PubSub message. Both keys and values support the placeholder `${var_name}` notation. Keys that are undefined or resolve to an empty string are omitted from the attribute map.""" + +producer_attributes.label: +"""Attributes Template""" + +producer_ordering_key.desc: +"""Template for the Ordering Key of a given GCP PubSub message. If the resolved value is undefined or an empty string, the ordering key property is omitted from the message.""" + +producer_ordering_key.label: +"""Ordering Key Template""" + +kv_pair_desc.desc: +"""Key-value pair.""" + +kv_pair_desc.label: +"""Key-value pair""" + +kv_pair_key.desc: +"""Key""" + +kv_pair_key.label: +"""Key""" + +kv_pair_value.desc: +"""Value""" + +kv_pair_value.label: +"""Value""" + service_account_json.desc: """JSON containing the GCP Service Account credentials to be used with PubSub. When a GCP Service Account is created (as described in https://developers.google.com/identity/protocols/oauth2/service-account#creatinganaccount), you have the option of downloading the credentials in JSON form. That's the file needed.""" diff --git a/rel/i18n/emqx_gcp_device_api.hocon b/rel/i18n/emqx_gcp_device_api.hocon new file mode 100644 index 000000000..2ae7dc8e1 --- /dev/null +++ b/rel/i18n/emqx_gcp_device_api.hocon @@ -0,0 +1,95 @@ +emqx_gcp_device_api { + +gcp_device.desc: +"""Configuration of authenticator using GCP Device as authentication data source.""" + +gcp_devices_get.desc: +"""List all devices imported from GCP IoT Core""" +gcp_devices_get.label: +"""List all GCP devices""" + +gcp_devices_post.desc: +"""Import authentication and config data for devices from GCP IoT Core""" +gcp_devices_post.label: +"""Import GCP devices""" + +gcp_device_get.desc: +"""Get a device imported from GCP IoT Core""" +gcp_device_get.label: +"""Get GCP device""" + +gcp_device_put.desc: +"""Update a device imported from GCP IoT Core""" +gcp_device_put.label: +"""Update GCP device""" + +gcp_device_delete.desc: +"""Remove a device imported from GCP IoT Core""" +gcp_device_delete.label: +"""Remove GCP device""" + +project.desc: +"""Cloud project identifier""" +project.label: +"""Project""" + +location.desc: +"""Cloud region""" +location.label: +"""Region""" + +registry.desc: +"""Device registry identifier""" +registry.label: +"""Registry""" + +deviceid.label: +"""Device identifier""" +deviceid.desc: +"""Device identifier""" + +keys.desc: +"""Public keys associated to GCP device""" +keys.label: +"""Public keys""" + +key.desc: +"""Public key""" +key.label: +"""Public key""" + +key_type.desc: +"""Public key type""" +key_type.label: +"""Public key type""" + +expires_at.desc: +"""Public key expiration time""" +expires_at.label: +"""Expiration time""" + +created_at.desc: +"""Time when GCP device was imported""" +created_at.label: +"""Creation time""" + +config.label: +"""Device configuration""" +config.desc: +"""Configuration""" + +blocked.label: +"""If device is blocked from communicating to GCP IoT Core""" +blocked.desc: +"""Blocked""" + +gcp_device_response404.desc: +"""The GCP device was not found""" + +imported_counter.desc: +"""Number of successfully imported GCP devices""" + +imported_counter_errors.desc: +"""Number of GCP devices not imported due to some error""" + +} diff --git a/rel/i18n/emqx_mgmt_api_banned.hocon b/rel/i18n/emqx_mgmt_api_banned.hocon index 0a5439402..4bf72103f 100644 --- a/rel/i18n/emqx_mgmt_api_banned.hocon +++ b/rel/i18n/emqx_mgmt_api_banned.hocon @@ -57,4 +57,9 @@ who.desc: who.label: """Ban Object""" +clear_banned_api.desc: +"""Clear all banned data.""" +clear_banned_api.label: +"""Clear""" + } diff --git a/rel/i18n/emqx_mongodb.hocon b/rel/i18n/emqx_mongodb.hocon index b1830868d..162460281 100644 --- a/rel/i18n/emqx_mongodb.hocon +++ b/rel/i18n/emqx_mongodb.hocon @@ -149,4 +149,10 @@ wait_queue_timeout.desc: wait_queue_timeout.label: """Wait Queue Timeout""" +use_legacy_protocol.desc: +"""Whether to use MongoDB's legacy protocol for communicating with the database. The default is to attempt to automatically determine if the newer protocol is supported.""" + +use_legacy_protocol.label: +"""Use legacy protocol""" + } diff --git a/rel/i18n/emqx_otel_schema.hocon b/rel/i18n/emqx_otel_schema.hocon new file mode 100644 index 000000000..f662598b9 --- /dev/null +++ b/rel/i18n/emqx_otel_schema.hocon @@ -0,0 +1,15 @@ +emqx_otel_schema { + +opentelemetry.desc: "Open Telemetry Toolkit configuration" + +exporter.desc: "Open Telemetry Exporter" + +enable.desc: "Enable or disable open telemetry metrics" + +protocol.desc: "Open Telemetry Exporter Protocol" + +endpoint.desc: "Open Telemetry Exporter Endpoint" + +interval.desc: "The interval of sending metrics to Open Telemetry Endpoint" + +} diff --git a/rel/i18n/emqx_schema.hocon b/rel/i18n/emqx_schema.hocon index 64de73b24..251dcdcb9 100644 --- a/rel/i18n/emqx_schema.hocon +++ b/rel/i18n/emqx_schema.hocon @@ -156,7 +156,7 @@ persistent_session_builtin_messages_table.label: sysmon_os_cpu_low_watermark.desc: """The threshold, as percentage of system CPU load, - for how much system cpu can be used before the corresponding alarm is cleared.""" + for how much system cpu can be used before the corresponding alarm is cleared. Disabled on Windows platform""" sysmon_os_cpu_low_watermark.label: """CPU low watermark""" @@ -278,7 +278,7 @@ fields_ws_opts_mqtt_path.label: sysmon_os_procmem_high_watermark.desc: """The threshold, as percentage of system memory, for how much system memory can be allocated by one Erlang process before - the corresponding alarm is raised.""" + the corresponding alarm is raised. Disabled on Windows platform.""" sysmon_os_procmem_high_watermark.label: """ProcMem high wartermark""" @@ -389,7 +389,7 @@ fields_tcp_opts_sndbuf.label: """TCP send buffer""" sysmon_os_mem_check_interval.desc: -"""The time interval for the periodic memory check.""" +"""The time interval for the periodic memory check. Disabled on Windows platform.""" sysmon_os_mem_check_interval.label: """Mem check interval""" @@ -742,7 +742,7 @@ common_ssl_opts_schema_keyfile.label: sysmon_os_cpu_high_watermark.desc: """The threshold, as percentage of system CPU load, - for how much system cpu can be used before the corresponding alarm is raised.""" + for how much system cpu can be used before the corresponding alarm is raised. Disabled on Windows platform""" sysmon_os_cpu_high_watermark.label: """CPU high watermark""" @@ -798,7 +798,7 @@ fields_ws_opts_proxy_address_header.label: sysmon_os_sysmem_high_watermark.desc: """The threshold, as percentage of system memory, - for how much system memory can be allocated before the corresponding alarm is raised.""" + for how much system memory can be allocated before the corresponding alarm is raised. Disabled on Windows platform""" sysmon_os_sysmem_high_watermark.label: """SysMem high wartermark""" @@ -1369,7 +1369,7 @@ sysmon_vm_process_low_watermark.label: """Process low watermark""" mqtt_max_packet_size.desc: -"""Maximum MQTT packet size allowed.""" +"""Maximum MQTT packet size allowed. Default: 1 MB, Maximum: 256 MB""" mqtt_max_packet_size.label: """Max Packet Size""" @@ -1521,7 +1521,7 @@ fields_tcp_opts_send_timeout_close.label: """TCP send timeout close""" sysmon_os_cpu_check_interval.desc: -"""The time interval for the periodic CPU check.""" +"""The time interval for the periodic CPU check. Disabled on Windows platform.""" sysmon_os_cpu_check_interval.label: """The time interval for the periodic CPU check.""" diff --git a/scripts/buildx.sh b/scripts/buildx.sh index 462ab6612..662a7233c 100755 --- a/scripts/buildx.sh +++ b/scripts/buildx.sh @@ -9,7 +9,7 @@ ## example: ## ./scripts/buildx.sh --profile emqx --pkgtype tgz --arch arm64 \ -## --builder ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-debian11 +## --builder ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-debian11 set -euo pipefail @@ -24,7 +24,7 @@ help() { echo "--arch amd64|arm64: Target arch to build the EMQX package for" echo "--src_dir : EMQX source code in this dir, default to PWD" echo "--builder : Builder image to pull" - echo " E.g. ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-debian11" + echo " E.g. ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-debian11" } die() { diff --git a/scripts/check_missing_reboot_apps.exs b/scripts/check_missing_reboot_apps.exs index d9933e099..91d4b39ea 100755 --- a/scripts/check_missing_reboot_apps.exs +++ b/scripts/check_missing_reboot_apps.exs @@ -24,10 +24,10 @@ apps = :xref.start(:xref) :xref.set_default(:xref, warnings: false) -rel_dir = '_build/#{profile}/lib/' +rel_dir = ~c"_build/#{profile}/lib/" :xref.add_release(:xref, rel_dir) -{:ok, calls} = :xref.q(:xref, '(App) (XC | [#{Enum.join(apps, ",")}] || mria:create_table/_)') +{:ok, calls} = :xref.q(:xref, ~c"(App) (XC | [#{Enum.join(apps, ",")}] || mria:create_table/_)") emqx_calls = calls diff --git a/scripts/find-suites.sh b/scripts/find-suites.sh index 685ab5ec8..47799f885 100755 --- a/scripts/find-suites.sh +++ b/scripts/find-suites.sh @@ -19,8 +19,14 @@ if [ -n "${EMQX_CT_SUITES:-}" ]; then fi TESTDIR="$1/test" +INTEGRATION_TESTDIR="$1/integration_test" # Get the output of the find command IFS=$'\n' read -r -d '' -a FILES < <(find "${TESTDIR}" -name "*_SUITE.erl" 2>/dev/null | sort && printf '\0') +if [[ -d "${INTEGRATION_TESTDIR}" ]]; then + IFS=$'\n' read -r -d '' -a FILES_INTEGRATION < <(find "${INTEGRATION_TESTDIR}" -name "*_SUITE.erl" 2>/dev/null | sort && printf '\0') +fi +# shellcheck disable=SC2206 +FILES+=(${FILES_INTEGRATION:-}) SUITEGROUP_RAW="${SUITEGROUP:-1_1}" SUITEGROUP="$(echo "$SUITEGROUP_RAW" | cut -d '_' -f1)" diff --git a/scripts/parse-git-ref.sh b/scripts/parse-git-ref.sh index 4cb138212..b0d01d2d3 100755 --- a/scripts/parse-git-ref.sh +++ b/scripts/parse-git-ref.sh @@ -20,7 +20,7 @@ if [[ $1 =~ ^refs/tags/v[5-9]+\.[0-9]+\.[0-9]+$ ]]; then PROFILE=emqx RELEASE=true LATEST=$(is_latest "$1") -elif [[ $1 =~ ^refs/tags/v[5-9]+\.[0-9]+\.[0-9]+-patch\.[0-9]+$ ]]; then +elif [[ $1 =~ ^refs/tags/v[5-9]+\.[0-9]+\.[0-9]+-build\.[0-9]+$ ]]; then PROFILE=emqx RELEASE=true LATEST=$(is_latest "$1") @@ -28,7 +28,7 @@ elif [[ $1 =~ ^refs/tags/e[5-9]+\.[0-9]+\.[0-9]+$ ]]; then PROFILE=emqx-enterprise RELEASE=true LATEST=$(is_latest "$1") -elif [[ $1 =~ ^refs/tags/e[5-9]+\.[0-9]+\.[0-9]+-patch\.[0-9]+$ ]]; then +elif [[ $1 =~ ^refs/tags/e[5-9]+\.[0-9]+\.[0-9]+-build\.[0-9]+$ ]]; then PROFILE=emqx-enterprise RELEASE=true LATEST=$(is_latest "$1") diff --git a/scripts/pr-sanity-checks.sh b/scripts/pr-sanity-checks.sh index 6b193b74e..19321230b 100755 --- a/scripts/pr-sanity-checks.sh +++ b/scripts/pr-sanity-checks.sh @@ -12,8 +12,8 @@ if ! type "yq" > /dev/null; then exit 1 fi -EMQX_BUILDER_VERSION=${EMQX_BUILDER_VERSION:-5.1-3} -EMQX_BUILDER_OTP=${EMQX_BUILDER_OTP:-25.3.2-1} +EMQX_BUILDER_VERSION=${EMQX_BUILDER_VERSION:-5.1-4} +EMQX_BUILDER_OTP=${EMQX_BUILDER_OTP:-25.3.2-2} EMQX_BUILDER_ELIXIR=${EMQX_BUILDER_ELIXIR:-1.14.5} EMQX_BUILDER_PLATFORM=${EMQX_BUILDER_PLATFORM:-ubuntu22.04} EMQX_BUILDER=${EMQX_BUILDER:-ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VERSION}:${EMQX_BUILDER_ELIXIR}-${EMQX_BUILDER_OTP}-${EMQX_BUILDER_PLATFORM}} diff --git a/scripts/rel/check-chart-vsn.sh b/scripts/rel/check-chart-vsn.sh index 81ba32413..7bc9b8029 100755 --- a/scripts/rel/check-chart-vsn.sh +++ b/scripts/rel/check-chart-vsn.sh @@ -25,11 +25,11 @@ if [ "$CHART_VSN" != "$APP_VSN" ]; then exit 2 fi -PKG_VSN="$(./pkg-vsn.sh "$PROFILE" | cut -d '-' -f 1)" +RELEASE_VSN="$(./pkg-vsn.sh "$PROFILE" --release)" -if [ "$CHART_VSN" != "$PKG_VSN" ]; then +if [ "$CHART_VSN" != "$RELEASE_VSN" ]; then echo "Chart version in $CHART_FILE is not in sync with release version." echo "Chart version: $CHART_VSN" - echo "Release version: $PKG_VSN" + echo "Release version: $RELEASE_VSN" exit 3 fi diff --git a/scripts/rel/cut.sh b/scripts/rel/cut.sh index 613fd05f0..f779a12b9 100755 --- a/scripts/rel/cut.sh +++ b/scripts/rel/cut.sh @@ -21,6 +21,7 @@ options: -b|--base: Specify the current release base branch, can be one of release-51 + release-52 NOTE: this option should be used when --dryrun. --dryrun: Do not actually create the git tag. @@ -31,14 +32,16 @@ options: --prev-tag : Provide the prev tag to automatically generate changelogs If this option is absent, the tag found by git describe will be used - --docker-latest: Set this option to assign :latest tag on the corresponding docker image - in addition to regular : one - -NOTE: For 5.1 series the current working branch must be 'release-51' +For 5.1 series the current working branch must be 'release-51' --.--[ master ]---------------------------.-----------.--- \\ / \`---[release-51]----(v5.1.1 | e5.1.1) + +For 5.2 series the current working branch must be 'release-52' + --.--[ master ]---------------------------.-----------.--- + \\ / + \`---[release-52]----(v5.2.1 | e5.2.1) EOF } @@ -55,21 +58,18 @@ logmsg() { } TAG="${1:-}" -DOCKER_LATEST_TAG= case "$TAG" in v*) TAG_PREFIX='v' PROFILE='emqx' SKIP_APPUP='yes' - DOCKER_LATEST_TAG='docker-latest-ce' ;; e*) TAG_PREFIX='e' PROFILE='emqx-enterprise' #TODO change to no when we are ready to support hot-upgrade SKIP_APPUP='yes' - DOCKER_LATEST_TAG='docker-latest-ee' ;; -h|--help) usage @@ -85,7 +85,6 @@ esac shift 1 DRYRUN='no' -DOCKER_LATEST='no' while [ "$#" -gt 0 ]; do case $1 in -h|--help) @@ -113,10 +112,6 @@ while [ "$#" -gt 0 ]; do PREV_TAG="$1" shift ;; - --docker-latest) - DOCKER_LATEST='yes' - shift - ;; *) logerr "Unknown option $1" exit 1 @@ -133,6 +128,12 @@ rel_branch() { e5.1.*) echo 'release-51' ;; + v5.2.*) + echo 'release-52' + ;; + e5.2.*) + echo 'release-52' + ;; *) logerr "Unsupported version tag $TAG" exit 1 @@ -182,15 +183,13 @@ assert_tag_absent() { } assert_tag_absent "$TAG" -PKG_VSN=$(./pkg-vsn.sh "$PROFILE") +RELEASE_VSN=$(./pkg-vsn.sh "$PROFILE" --release) ## Assert package version is updated to the tag which is being created assert_release_version() { local tag="$1" - # shellcheck disable=SC2001 - pkg_vsn="$(echo "$PKG_VSN" | sed 's/-g[0-9a-f]\{8\}$//g')" - if [ "${TAG_PREFIX}${pkg_vsn}" != "${tag}" ]; then - logerr "The release version ($pkg_vsn) is different from the desired git tag." + if [ "${TAG_PREFIX}${RELEASE_VSN}" != "${tag}" ]; then + logerr "The release version ($RELEASE_VSN) is different from the desired git tag." logerr "Update the release version in emqx_release.hrl" exit 1 fi @@ -220,7 +219,7 @@ fi ## Ensure relup paths are updated ## TODO: add relup path db -#./scripts/relup-base-vsns.escript check-vsn-db "$PKG_VSN" "$RELUP_PATHS" +#./scripts/relup-base-vsns.escript check-vsn-db "$RELEASE_VSN" "$RELUP_PATHS" ## Run some additional checks (e.g. some for enterprise edition only) CHECKS_DIR="./scripts/rel/checks" @@ -256,9 +255,6 @@ generate_changelog () { if [ "$DRYRUN" = 'yes' ]; then logmsg "Release tag is ready to be created with command: git tag $TAG" - if [ "$DOCKER_LATEST" = 'yes' ]; then - logmsg "Docker latest tag is ready to be created with command: git tag --force $DOCKER_LATEST_TAG" - fi else case "$TAG" in *rc*) @@ -276,14 +272,6 @@ else esac git tag "$TAG" logmsg "$TAG is created OK." - if [ "$DOCKER_LATEST" = 'yes' ]; then - git tag --force "$DOCKER_LATEST_TAG" - logmsg "$DOCKER_LATEST_TAG is created OK." - fi - logwarn "Don't forget to push the tags!" - if [ "$DOCKER_LATEST" = 'yes' ]; then - echo "git push --atomic --force origin $TAG $DOCKER_LATEST_TAG" - else - echo "git push origin $TAG" - fi + logwarn "Don't forget to push the tag!" + echo "git push origin $TAG" fi diff --git a/scripts/rel/sync-remotes.sh b/scripts/rel/sync-remotes.sh index f4cbadfa1..dddc10638 100755 --- a/scripts/rel/sync-remotes.sh +++ b/scripts/rel/sync-remotes.sh @@ -5,7 +5,7 @@ set -euo pipefail # ensure dir cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/../.." -BASE_BRANCHES=( 'release-51' 'master' ) +BASE_BRANCHES=( 'release-52' 'release-51' 'master' ) usage() { cat <>>= 0 -./parse-git-ref.sh refs/tags/v5.1.5-patch.1 +./parse-git-ref.sh refs/tags/v5.1.5-build.1 >>> {"profile": "emqx", "release": true, "latest": false} >>>= 0 @@ -53,7 +53,7 @@ Unrecognized tag: refs/tags/v5.2.0-alpha-1 {"profile": "emqx-enterprise", "release": true, "latest": false} >>>= 0 -./parse-git-ref.sh refs/tags/e5.1.5-patch.1 +./parse-git-ref.sh refs/tags/e5.1.5-build.1 >>> {"profile": "emqx-enterprise", "release": true, "latest": false} >>>= 0 @@ -93,6 +93,11 @@ Unrecognized tag: refs/tags/v5.2.0-alpha-1 {"profile": "emqx-enterprise", "release": false, "latest": false} >>>= 0 +./parse-git-ref.sh refs/heads/release-52 +>>> +{"profile": "emqx-enterprise", "release": false, "latest": false} +>>>= 0 + ./parse-git-ref.sh refs/heads/ci/foobar >>> {"profile": "emqx", "release": false, "latest": false} diff --git a/scripts/shelltest/run_tests.sh b/scripts/shelltest/run_tests.sh index 11caa6cac..e265782f2 100755 --- a/scripts/shelltest/run_tests.sh +++ b/scripts/shelltest/run_tests.sh @@ -7,13 +7,13 @@ exit_code=0 for test in shelltest/*.test; do echo "Running $test" - /bin/sh "${test%.test}.setup" + [ -f "${test%.test}.setup" ] && /bin/sh "${test%.test}.setup" shelltest -c --diff --all --precise -- "$test" # shellcheck disable=SC2181 if [ $? -ne 0 ]; then exit_code=1 fi - /bin/sh "${test%.test}.cleanup" + [ -f "${test%.test}.cleanup" ] && /bin/sh "${test%.test}.cleanup" done exit $exit_code diff --git a/scripts/shelltest/semver.test b/scripts/shelltest/semver.test new file mode 100644 index 000000000..b04543cad --- /dev/null +++ b/scripts/shelltest/semver.test @@ -0,0 +1,32 @@ +./semver.sh 5.2.0.1 +>>>= 1 + +./semver.sh 5.1.0 +>>> +{"major": 5, "minor": 1, "patch": 0, "build": ""} +>>>= 0 + +./semver.sh 5.1.0-build.3 +>>> +{"major": 5, "minor": 1, "patch": 0, "build": "build.3"} +>>>= 0 + +./semver.sh 5.1.0-build.3 --major +>>> +5 +>>>= 0 + +./semver.sh 5.1.0-build.3 --minor +>>> +1 +>>>= 0 + +./semver.sh 5.1.0-build.3 --patch +>>> +0 +>>>= 0 + +./semver.sh 5.1.0-build.3 --build +>>> +build.3 +>>>= 0 diff --git a/scripts/ui-tests/dashboard_test.py b/scripts/ui-tests/dashboard_test.py index 4b93262b1..91a7264ec 100644 --- a/scripts/ui-tests/dashboard_test.py +++ b/scripts/ui-tests/dashboard_test.py @@ -62,12 +62,13 @@ def test_log(driver, login, dashboard_url): ensure_current_url(driver, dest_url) title = wait_title(driver) assert "Logging" == title.text - label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[./label/span[text()='Enable Log Handler']]") + + label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[contains(., 'Enable Log Handler')]") assert driver.find_elements(By.ID, label.get_attribute("for")) - label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[./label/span[text()='Log Level']]") + label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[contains(., 'Log Level')]") assert driver.find_elements(By.ID, label.get_attribute("for")) - label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[./label/span[text()='Log Formatter']]") + label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[contains(., 'Log Formatter')]") assert driver.find_elements(By.ID, label.get_attribute("for")) - label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[./label/span[text()='Time Offset']]") + label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[contains(., 'Time Offset')]") assert driver.find_elements(By.ID, label.get_attribute("for"))