diff --git a/.ci/docker-compose-file/docker-compose-kafka.yaml b/.ci/docker-compose-file/docker-compose-kafka.yaml index 9a2ca988c..89479dee9 100644 --- a/.ci/docker-compose-file/docker-compose-kafka.yaml +++ b/.ci/docker-compose-file/docker-compose-kafka.yaml @@ -18,7 +18,7 @@ services: - /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret kdc: hostname: kdc.emqx.net - image: ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04 + image: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04 container_name: kdc.emqx.net expose: - 88 # kdc diff --git a/.ci/docker-compose-file/docker-compose.yaml b/.ci/docker-compose-file/docker-compose.yaml index 2366f383a..c88613d12 100644 --- a/.ci/docker-compose-file/docker-compose.yaml +++ b/.ci/docker-compose-file/docker-compose.yaml @@ -3,7 +3,7 @@ version: '3.9' services: erlang: container_name: erlang - image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04} + image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04} env_file: - credentials.env - conf.env diff --git a/.github/actions/prepare-jmeter/action.yaml b/.github/actions/prepare-jmeter/action.yaml index 52a96fa6c..da24fbaa1 100644 --- a/.github/actions/prepare-jmeter/action.yaml +++ b/.github/actions/prepare-jmeter/action.yaml @@ -1,24 +1,8 @@ name: 'Prepare jmeter' -inputs: - version-emqx: - required: true - type: string - runs: using: composite steps: - - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 - with: - name: emqx-docker - path: /tmp - - name: load docker image - shell: bash - env: - PKG_VSN: ${{ inputs.version-emqx }} - run: | - EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g') - echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 with: repository: emqx/emqx-fvt diff --git a/.github/workflows/_pr_entrypoint.yaml b/.github/workflows/_pr_entrypoint.yaml index edf7b110a..7c9b70031 100644 --- a/.github/workflows/_pr_entrypoint.yaml +++ b/.github/workflows/_pr_entrypoint.yaml @@ -11,23 +11,42 @@ on: ref: required: false +defaults: + run: + shell: bash + env: IS_CI: "yes" jobs: + init: + runs-on: ubuntu-22.04 + outputs: + BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }} + OTP_VSN: ${{ steps.env.outputs.OTP_VSN }} + ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }} + BUILDER: ${{ steps.env.outputs.BUILDER }} + steps: + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + with: + ref: ${{ github.event.inputs.ref }} + - name: Set up environment + id: env + run: | + source ./env.sh + echo "BUILDER_VSN=$EMQX_BUILDER_VSN" | tee -a "$GITHUB_OUTPUT" + echo "OTP_VSN=$OTP_VSN" | tee -a "$GITHUB_OUTPUT" + echo "ELIXIR_VSN=$ELIXIR_VSN" | tee -a "$GITHUB_OUTPUT" + echo "BUILDER=$EMQX_BUILDER" | tee -a "$GITHUB_OUTPUT" + sanity-checks: runs-on: ubuntu-22.04 - container: "ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04" + needs: init + container: ${{ needs.init.outputs.BUILDER }} outputs: ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} ct-host: ${{ steps.matrix.outputs.ct-host }} ct-docker: ${{ steps.matrix.outputs.ct-docker }} - version-emqx: ${{ steps.matrix.outputs.version-emqx }} - version-emqx-enterprise: ${{ steps.matrix.outputs.version-emqx-enterprise }} - builder: "ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04" - builder_vsn: "5.3-8" - otp_vsn: "26.2.5-2" - elixir_vsn: "1.15.7" permissions: contents: read @@ -92,35 +111,20 @@ jobs: - name: Generate CT Matrix id: matrix run: | - APPS="$(./scripts/find-apps.sh --ci)" - MATRIX="$(echo "${APPS}" | jq -c ' - [ - (.[] | select(.profile == "emqx") | . + { - builder: "5.3-8", - otp: "26.2.5-2", - elixir: "1.15.7" - }), - (.[] | select(.profile == "emqx-enterprise") | . + { - builder: "5.3-8", - otp: ["26.2.5-2"][], - elixir: "1.15.7" - }) - ] - ')" + MATRIX="$(./scripts/find-apps.sh --ci)" echo "${MATRIX}" | jq - CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')" + CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')" CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')" CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')" echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT - echo "version-emqx=$(./pkg-vsn.sh emqx)" | tee -a $GITHUB_OUTPUT - echo "version-emqx-enterprise=$(./pkg-vsn.sh emqx-enterprise)" | tee -a $GITHUB_OUTPUT compile: runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral-xl","linux","x64"]') }} - container: ${{ needs.sanity-checks.outputs.builder }} + container: ${{ needs.init.outputs.BUILDER }} needs: + - init - sanity-checks strategy: matrix: @@ -156,53 +160,47 @@ jobs: run_emqx_app_tests: needs: + - init - sanity-checks - compile uses: ./.github/workflows/run_emqx_app_tests.yaml with: - builder: ${{ needs.sanity-checks.outputs.builder }} + builder: ${{ needs.init.outputs.BUILDER }} before_ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }} after_ref: ${{ github.sha }} run_test_cases: needs: + - init - sanity-checks - compile uses: ./.github/workflows/run_test_cases.yaml with: - builder: ${{ needs.sanity-checks.outputs.builder }} + builder: ${{ needs.init.outputs.BUILDER }} ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }} ct-host: ${{ needs.sanity-checks.outputs.ct-host }} ct-docker: ${{ needs.sanity-checks.outputs.ct-docker }} static_checks: needs: + - init - sanity-checks - compile uses: ./.github/workflows/static_checks.yaml with: - builder: ${{ needs.sanity-checks.outputs.builder }} + builder: ${{ needs.init.outputs.BUILDER }} ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }} build_slim_packages: needs: - sanity-checks uses: ./.github/workflows/build_slim_packages.yaml - with: - builder: ${{ needs.sanity-checks.outputs.builder }} - builder_vsn: ${{ needs.sanity-checks.outputs.builder_vsn }} - otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }} - elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }} build_docker_for_test: needs: + - init - sanity-checks uses: ./.github/workflows/build_docker_for_test.yaml - with: - otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }} - elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }} - version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} - version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }} spellcheck: needs: @@ -212,41 +210,35 @@ jobs: run_conf_tests: needs: + - init - sanity-checks - compile uses: ./.github/workflows/run_conf_tests.yaml with: - builder: ${{ needs.sanity-checks.outputs.builder }} + builder: ${{ needs.init.outputs.BUILDER }} check_deps_integrity: needs: + - init - sanity-checks uses: ./.github/workflows/check_deps_integrity.yaml with: - builder: ${{ needs.sanity-checks.outputs.builder }} + builder: ${{ needs.init.outputs.BUILDER }} run_jmeter_tests: needs: - sanity-checks - build_docker_for_test uses: ./.github/workflows/run_jmeter_tests.yaml - with: - version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} run_docker_tests: needs: - sanity-checks - build_docker_for_test uses: ./.github/workflows/run_docker_tests.yaml - with: - version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} - version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }} run_helm_tests: needs: - sanity-checks - build_docker_for_test uses: ./.github/workflows/run_helm_tests.yaml - with: - version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} - version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }} diff --git a/.github/workflows/_push-entrypoint.yaml b/.github/workflows/_push-entrypoint.yaml index 935b7b6ac..8a1d2b699 100644 --- a/.github/workflows/_push-entrypoint.yaml +++ b/.github/workflows/_push-entrypoint.yaml @@ -8,7 +8,6 @@ on: push: tags: - 'v*' - - 'e*' branches: - 'master' - 'release-5[0-9]' @@ -18,13 +17,42 @@ on: ref: required: false +defaults: + run: + shell: bash + env: IS_CI: 'yes' jobs: + init: + runs-on: ubuntu-22.04 + outputs: + BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }} + OTP_VSN: ${{ steps.env.outputs.OTP_VSN }} + ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }} + BUILDER: ${{ steps.env.outputs.BUILDER }} + BUILD_FROM: ${{ steps.env.outputs.BUILD_FROM }} + RUN_FROM: ${{ steps.env.outputs.BUILD_FROM }} + steps: + - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + with: + ref: ${{ github.event.inputs.ref }} + - name: Set up environment + id: env + run: | + source env.sh + echo "BUILDER_VSN=$EMQX_BUILDER_VSN" >> "$GITHUB_OUTPUT" + echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT" + echo "ELIXIR_VSN=$ELIXIR_VSN" >> "$GITHUB_OUTPUT" + echo "BUILDER=$EMQX_BUILDER" >> "$GITHUB_OUTPUT" + echo "BUILD_FROM=$EMQX_DOCKER_BUILD_FROM" >> "$GITHUB_OUTPUT" + echo "RUN_FROM=$EMQX_DOCKER_RUN_FROM" >> "$GITHUB_OUTPUT" + prepare: runs-on: ubuntu-22.04 - container: 'ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04' + needs: init + container: ${{ needs.init.outputs.BUILDER }} outputs: profile: ${{ steps.parse-git-ref.outputs.profile }} release: ${{ steps.parse-git-ref.outputs.release }} @@ -32,10 +60,6 @@ jobs: ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} ct-host: ${{ steps.matrix.outputs.ct-host }} ct-docker: ${{ steps.matrix.outputs.ct-docker }} - builder: 'ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04' - builder_vsn: '5.3-8' - otp_vsn: '26.2.5-2' - elixir_vsn: '1.15.7' permissions: contents: read @@ -62,23 +86,9 @@ jobs: - name: Build matrix id: matrix run: | - APPS="$(./scripts/find-apps.sh --ci)" - MATRIX="$(echo "${APPS}" | jq -c ' - [ - (.[] | select(.profile == "emqx") | . + { - builder: "5.3-8", - otp: "26.2.5-2", - elixir: "1.15.7" - }), - (.[] | select(.profile == "emqx-enterprise") | . + { - builder: "5.3-8", - otp: ["26.2.5-2"][], - elixir: "1.15.7" - }) - ] - ')" + MATRIX="$(./scripts/find-apps.sh --ci)" echo "${MATRIX}" | jq - CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')" + CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')" CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')" CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')" echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT @@ -88,46 +98,44 @@ jobs: build_packages: if: needs.prepare.outputs.release == 'true' needs: + - init - prepare uses: ./.github/workflows/build_packages.yaml with: profile: ${{ needs.prepare.outputs.profile }} publish: true - otp_vsn: ${{ needs.prepare.outputs.otp_vsn }} - elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }} - builder_vsn: ${{ needs.prepare.outputs.builder_vsn }} + otp_vsn: ${{ needs.init.outputs.OTP_VSN }} + elixir_vsn: ${{ needs.init.outputs.ELIXIR_VSN }} + builder_vsn: ${{ needs.init.outputs.BUILDER_VSN }} secrets: inherit build_and_push_docker_images: if: needs.prepare.outputs.release == 'true' needs: + - init - prepare uses: ./.github/workflows/build_and_push_docker_images.yaml with: profile: ${{ needs.prepare.outputs.profile }} publish: true latest: ${{ needs.prepare.outputs.latest }} - otp_vsn: ${{ needs.prepare.outputs.otp_vsn }} - elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }} - builder_vsn: ${{ needs.prepare.outputs.builder_vsn }} + build_from: ${{ needs.init.outputs.BUILD_FROM }} + run_from: ${{ needs.init.outputs.RUN_FROM }} secrets: inherit build_slim_packages: if: needs.prepare.outputs.release != 'true' needs: + - init - prepare uses: ./.github/workflows/build_slim_packages.yaml - with: - builder: ${{ needs.prepare.outputs.builder }} - builder_vsn: ${{ needs.prepare.outputs.builder_vsn }} - otp_vsn: ${{ needs.prepare.outputs.otp_vsn }} - elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }} compile: if: needs.prepare.outputs.release != 'true' runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} - container: ${{ needs.prepare.outputs.builder }} + container: ${{ needs.init.outputs.BUILDER }} needs: + - init - prepare strategy: matrix: @@ -163,22 +171,23 @@ jobs: run_emqx_app_tests: needs: - - prepare + - init - compile uses: ./.github/workflows/run_emqx_app_tests.yaml with: - builder: ${{ needs.prepare.outputs.builder }} + builder: ${{ needs.init.outputs.BUILDER }} before_ref: ${{ github.event.before }} after_ref: ${{ github.sha }} run_test_cases: if: needs.prepare.outputs.release != 'true' needs: + - init - prepare - compile uses: ./.github/workflows/run_test_cases.yaml with: - builder: ${{ needs.prepare.outputs.builder }} + builder: ${{ needs.init.outputs.BUILDER }} ct-matrix: ${{ needs.prepare.outputs.ct-matrix }} ct-host: ${{ needs.prepare.outputs.ct-host }} ct-docker: ${{ needs.prepare.outputs.ct-docker }} @@ -186,18 +195,20 @@ jobs: run_conf_tests: if: needs.prepare.outputs.release != 'true' needs: + - init - prepare - compile uses: ./.github/workflows/run_conf_tests.yaml with: - builder: ${{ needs.prepare.outputs.builder }} + builder: ${{ needs.init.outputs.BUILDER }} static_checks: if: needs.prepare.outputs.release != 'true' needs: + - init - prepare - compile uses: ./.github/workflows/static_checks.yaml with: - builder: ${{ needs.prepare.outputs.builder }} + builder: ${{ needs.init.outputs.BUILDER }} ct-matrix: ${{ needs.prepare.outputs.ct-matrix }} diff --git a/.github/workflows/build_and_push_docker_images.yaml b/.github/workflows/build_and_push_docker_images.yaml index acf3962f6..1ffd54144 100644 --- a/.github/workflows/build_and_push_docker_images.yaml +++ b/.github/workflows/build_and_push_docker_images.yaml @@ -16,13 +16,10 @@ on: publish: required: true type: boolean - otp_vsn: + build_from: required: true type: string - elixir_vsn: - required: true - type: string - builder_vsn: + run_from: required: true type: string secrets: @@ -50,18 +47,12 @@ on: required: false type: boolean default: false - otp_vsn: + build_from: required: false type: string - default: '26.2.5-2' - elixir_vsn: - required: false - type: string - default: '1.15.7' - builder_vsn: - required: false - type: string - default: '5.3-8' + default: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-debian12 + run_from: + default: public.ecr.aws/debian/debian:stable-20240612-slim permissions: contents: read @@ -69,7 +60,7 @@ permissions: jobs: build: runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.arch)) || 'ubuntu-22.04' }} - container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ inputs.otp_vsn }}-debian12" + container: ${{ inputs.build_from }} outputs: PKG_VSN: ${{ steps.build.outputs.PKG_VSN }} @@ -164,13 +155,9 @@ jobs: DOCKER_LATEST: ${{ inputs.latest }} DOCKER_PUSH: false DOCKER_BUILD_NOCACHE: true - DOCKER_LOAD: true - EMQX_RUNNER: 'public.ecr.aws/debian/debian:stable-20240612-slim' - EMQX_DOCKERFILE: 'deploy/docker/Dockerfile' + BUILD_FROM: ${{ inputs.build_from }} + RUN_FROM: ${{ inputs.run_from }} PKG_VSN: ${{ needs.build.outputs.PKG_VSN }} - EMQX_BUILDER_VERSION: ${{ inputs.builder_vsn }} - OTP_VSN: ${{ inputs.otp_vsn }} - ELIXIR_VSN: ${{ inputs.elixir_vsn }} EMQX_SOURCE_TYPE: tgz run: | ./build ${PROFILE} docker @@ -184,7 +171,7 @@ jobs: timeout-minutes: 1 run: | for tag in $(cat .emqx_docker_image_tags); do - CID=$(docker run -d -P $tag) + CID=$(docker run -d -p 18083:18083 $tag) HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID) ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT docker rm -f $CID @@ -214,12 +201,9 @@ jobs: DOCKER_BUILD_NOCACHE: false DOCKER_PLATFORMS: linux/amd64,linux/arm64 DOCKER_LOAD: false - EMQX_RUNNER: 'public.ecr.aws/debian/debian:stable-20240612-slim' - EMQX_DOCKERFILE: 'deploy/docker/Dockerfile' + BUILD_FROM: ${{ inputs.build_from }} + RUN_FROM: ${{ inputs.run_from }} PKG_VSN: ${{ needs.build.outputs.PKG_VSN }} - EMQX_BUILDER_VERSION: ${{ inputs.builder_vsn }} - OTP_VSN: ${{ inputs.otp_vsn }} - ELIXIR_VSN: ${{ inputs.elixir_vsn }} EMQX_SOURCE_TYPE: tgz run: | ./build ${PROFILE} docker diff --git a/.github/workflows/build_docker_for_test.yaml b/.github/workflows/build_docker_for_test.yaml index 16483b97c..774e2861f 100644 --- a/.github/workflows/build_docker_for_test.yaml +++ b/.github/workflows/build_docker_for_test.yaml @@ -6,19 +6,6 @@ concurrency: on: workflow_call: - inputs: - otp_vsn: - required: true - type: string - elixir_vsn: - required: true - type: string - version-emqx: - required: true - type: string - version-emqx-enterprise: - required: true - type: string permissions: contents: read @@ -28,9 +15,6 @@ jobs: runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} env: EMQX_NAME: ${{ matrix.profile }} - PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }} - OTP_VSN: ${{ inputs.otp_vsn }} - ELIXIR_VSN: ${{ inputs.elixir_vsn }} strategy: fail-fast: false @@ -43,6 +27,12 @@ jobs: steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - name: Set up environment + id: env + run: | + source env.sh + PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME") + echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV" - name: build and export to Docker id: build run: | @@ -52,9 +42,13 @@ jobs: run: | CID=$(docker run -d --rm -P $_EMQX_DOCKER_IMAGE_TAG) HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID) - ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT + ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT || { + docker logs $CID + exit 1 + } docker stop $CID - name: export docker image + if: always() run: | docker save $_EMQX_DOCKER_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 diff --git a/.github/workflows/build_packages.yaml b/.github/workflows/build_packages.yaml index 99d3ad063..9cfa60dcf 100644 --- a/.github/workflows/build_packages.yaml +++ b/.github/workflows/build_packages.yaml @@ -55,7 +55,7 @@ on: otp_vsn: required: false type: string - default: '26.2.5-2' + default: '26.2.5-3' elixir_vsn: required: false type: string @@ -63,7 +63,7 @@ on: builder_vsn: required: false type: string - default: '5.3-8' + default: '5.3-9' permissions: contents: read diff --git a/.github/workflows/build_packages_cron.yaml b/.github/workflows/build_packages_cron.yaml index 42198d081..d081eea76 100644 --- a/.github/workflows/build_packages_cron.yaml +++ b/.github/workflows/build_packages_cron.yaml @@ -16,19 +16,22 @@ jobs: linux: if: github.repository_owner == 'emqx' runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }} - container: - image: "ghcr.io/emqx/emqx-builder/${{ matrix.profile[2] }}-${{ matrix.os }}" strategy: fail-fast: false matrix: profile: - - ['emqx', 'master', '5.3-8:1.15.7-26.2.5-2'] - - ['emqx', 'release-57', '5.3-8:1.15.7-26.2.5-2'] + - ['emqx', 'master'] + - ['emqx', 'release-57'] os: - ubuntu22.04 - amzn2023 + env: + PROFILE: ${{ matrix.profile[0] }} + OS: ${{ matrix.os }} + BUILDER_SYSTEM: force_docker + defaults: run: shell: bash @@ -38,33 +41,18 @@ jobs: with: ref: ${{ matrix.profile[1] }} fetch-depth: 0 - - - name: fix workdir + - name: Set up environment + id: env run: | - set -eu - git config --global --add safe.directory "$GITHUB_WORKSPACE" - # Align path for CMake caches - if [ ! "$PWD" = "/emqx" ]; then - ln -s $PWD /emqx - cd /emqx - fi - echo "pwd is $PWD" - - - name: build emqx packages - env: - PROFILE: ${{ matrix.profile[0] }} - ACLOCAL_PATH: "/usr/share/aclocal:/usr/local/share/aclocal" + source env.sh + BUILDER="ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VSN}:${ELIXIR_VSN}-${OTP_VSN}-${OS}" + echo "BUILDER=$BUILDER" >> "$GITHUB_ENV" + - name: build tgz run: | - set -eu - make "${PROFILE}-tgz" - make "${PROFILE}-pkg" - - name: test emqx packages - env: - PROFILE: ${{ matrix.profile[0] }} + ./scripts/buildx.sh --profile "$PROFILE" --pkgtype tgz --builder "$BUILDER" + - name: build pkg run: | - set -eu - ./scripts/pkg-tests.sh "${PROFILE}-tgz" - ./scripts/pkg-tests.sh "${PROFILE}-pkg" + ./scripts/buildx.sh --profile "$PROFILE" --pkgtype pkg --builder "$BUILDER" - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: success() with: @@ -91,20 +79,23 @@ jobs: - emqx branch: - master - otp: - - 26.2.5-2 os: - - macos-12-arm64 + - macos-14-arm64 steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 with: ref: ${{ matrix.branch }} fetch-depth: 0 + - name: Set up environment + id: env + run: | + source env.sh + echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT" - uses: ./.github/actions/package-macos with: profile: ${{ matrix.profile }} - otp: ${{ matrix.otp }} + otp: ${{ steps.env.outputs.OTP_VSN }} os: ${{ matrix.os }} apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }} apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }} diff --git a/.github/workflows/build_slim_packages.yaml b/.github/workflows/build_slim_packages.yaml index 9dd9bcd2e..d36fc377a 100644 --- a/.github/workflows/build_slim_packages.yaml +++ b/.github/workflows/build_slim_packages.yaml @@ -6,97 +6,50 @@ concurrency: on: workflow_call: - inputs: - builder: - required: true - type: string - builder_vsn: - required: true - type: string - otp_vsn: - required: true - type: string - elixir_vsn: - required: true - type: string - workflow_dispatch: inputs: ref: required: false - builder: - required: false - type: string - default: 'ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04' - builder_vsn: - required: false - type: string - default: '5.3-8' - otp_vsn: - required: false - type: string - default: '26.2.5-2' - elixir_vsn: - required: false - type: string - default: '1.15.7' permissions: contents: read jobs: linux: - runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.profile[4])) || 'ubuntu-22.04' }} + runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.profile[2])) || 'ubuntu-22.04' }} env: - EMQX_NAME: ${{ matrix.profile[0] }} + PROFILE: ${{ matrix.profile[0] }} + ELIXIR: ${{ matrix.profile[1] == 'elixir' && 'yes' || 'no' }} + ARCH: ${{ matrix.profile[2] == 'x64' && 'amd64' || 'arm64' }} + BUILDER_SYSTEM: force_docker strategy: fail-fast: false matrix: profile: - - ["emqx", "26.2.5-2", "ubuntu22.04", "elixir", "x64"] - - ["emqx", "26.2.5-2", "ubuntu22.04", "elixir", "arm64"] - - ["emqx-enterprise", "26.2.5-2", "ubuntu22.04", "erlang", "x64"] - - container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" + - ["emqx", "elixir", "x64"] + - ["emqx", "elixir", "arm64"] + - ["emqx-enterprise", "erlang", "x64"] steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 with: fetch-depth: 0 - - name: Work around https://github.com/actions/checkout/issues/766 + - name: build tgz run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - echo "CODE_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV - - name: build and test tgz package - if: matrix.profile[3] == 'erlang' + ./scripts/buildx.sh --profile $PROFILE --pkgtype tgz --elixir $ELIXIR --arch $ARCH + - name: build pkg run: | - make ${EMQX_NAME}-tgz - ./scripts/pkg-tests.sh ${EMQX_NAME}-tgz - - name: build and test deb/rpm packages - if: matrix.profile[3] == 'erlang' - run: | - make ${EMQX_NAME}-pkg - ./scripts/pkg-tests.sh ${EMQX_NAME}-pkg - - name: build and test tgz package (Elixir) - if: matrix.profile[3] == 'elixir' - run: | - make ${EMQX_NAME}-elixir-tgz - ./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-tgz - - name: build and test deb/rpm packages (Elixir) - if: matrix.profile[3] == 'elixir' - run: | - make ${EMQX_NAME}-elixir-pkg - ./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-pkg + ./scripts/buildx.sh --profile $PROFILE --pkgtype pkg --elixir $ELIXIR --arch $ARCH - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: - name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}-${{ matrix.profile[3] }}-${{ matrix.profile[4] }}" + name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" path: _packages/${{ matrix.profile[0] }}/* retention-days: 7 compression-level: 0 - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: - name: "${{ matrix.profile[0] }}-schema-dump-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}-${{ matrix.profile[3] }}-${{ matrix.profile[4] }}" + name: "${{ matrix.profile[0] }}-schema-dump-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" path: | scripts/spellcheck _build/docgen/${{ matrix.profile[0] }}/schema-en.json @@ -108,10 +61,8 @@ jobs: matrix: profile: - emqx - otp: - - ${{ inputs.otp_vsn }} os: - - macos-14 + - macos-14-arm64 runs-on: ${{ matrix.os }} env: @@ -119,10 +70,15 @@ jobs: steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - name: Set up environment + id: env + run: | + source env.sh + echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT" - uses: ./.github/actions/package-macos with: profile: ${{ matrix.profile }} - otp: ${{ matrix.otp }} + otp: ${{ steps.env.outputs.OTP_VSN }} os: ${{ matrix.os }} apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }} apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }} diff --git a/.github/workflows/codeql.yaml b/.github/workflows/codeql.yaml index 707f03401..213fa03ed 100644 --- a/.github/workflows/codeql.yaml +++ b/.github/workflows/codeql.yaml @@ -17,8 +17,6 @@ jobs: permissions: actions: read security-events: write - container: - image: ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04 strategy: fail-fast: false @@ -36,11 +34,6 @@ jobs: with: ref: ${{ matrix.branch }} - - name: Ensure git safe dir - run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - make ensure-rebar3 - - name: Initialize CodeQL uses: github/codeql-action/init@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5 with: @@ -51,14 +44,7 @@ jobs: env: PROFILE: emqx-enterprise run: | - make emqx-enterprise-compile - - - name: Fetch deps - if: matrix.language == 'python' - env: - PROFILE: emqx-enterprise - run: | - make deps-emqx-enterprise + ./scripts/buildx.sh --profile emqx-enterprise --pkgtype rel - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5 diff --git a/.github/workflows/performance_test.yaml b/.github/workflows/performance_test.yaml index 3adb39cea..1c247a5aa 100644 --- a/.github/workflows/performance_test.yaml +++ b/.github/workflows/performance_test.yaml @@ -26,7 +26,7 @@ jobs: prepare: runs-on: ubuntu-latest if: github.repository_owner == 'emqx' - container: ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu20.04 + container: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu20.04 outputs: BENCH_ID: ${{ steps.prepare.outputs.BENCH_ID }} PACKAGE_FILE: ${{ steps.package_file.outputs.PACKAGE_FILE }} diff --git a/.github/workflows/run_docker_tests.yaml b/.github/workflows/run_docker_tests.yaml index b559464b9..1dc4e9e33 100644 --- a/.github/workflows/run_docker_tests.yaml +++ b/.github/workflows/run_docker_tests.yaml @@ -6,13 +6,6 @@ concurrency: on: workflow_call: - inputs: - version-emqx: - required: true - type: string - version-emqx-enterprise: - required: true - type: string permissions: contents: read @@ -32,11 +25,16 @@ jobs: env: EMQX_NAME: ${{ matrix.profile[0] }} - PKG_VSN: ${{ matrix.profile[0] == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }} EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }} steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - name: Set up environment + id: env + run: | + source env.sh + PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME") + echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV" - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 with: name: ${{ env.EMQX_NAME }}-docker @@ -52,9 +50,11 @@ jobs: docker compose up --abort-on-container-exit --exit-code-from selenium - name: test two nodes cluster with proto_dist=inet_tls in docker run: | - ./scripts/test/start-two-nodes-in-docker.sh -P $_EMQX_DOCKER_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG + ## -d 1 means only put node 1 (latest version) behind haproxy + ./scripts/test/start-two-nodes-in-docker.sh -d 1 -P $_EMQX_DOCKER_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy) ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT + ## -c menas 'cleanup' ./scripts/test/start-two-nodes-in-docker.sh -c - name: cleanup if: always() @@ -69,7 +69,6 @@ jobs: shell: bash env: EMQX_NAME: ${{ matrix.profile }} - PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }} _EMQX_TEST_DB_BACKEND: ${{ matrix.cluster_db_backend }} strategy: @@ -84,6 +83,12 @@ jobs: - rlog steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + - name: Set up environment + id: env + run: | + source env.sh + PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME") + echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV" - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 with: name: ${{ env.EMQX_NAME }}-docker diff --git a/.github/workflows/run_helm_tests.yaml b/.github/workflows/run_helm_tests.yaml index 617b01bb1..14e6404f1 100644 --- a/.github/workflows/run_helm_tests.yaml +++ b/.github/workflows/run_helm_tests.yaml @@ -6,13 +6,6 @@ concurrency: on: workflow_call: - inputs: - version-emqx: - required: true - type: string - version-emqx-enterprise: - required: true - type: string permissions: contents: read @@ -25,7 +18,6 @@ jobs: shell: bash env: EMQX_NAME: ${{ matrix.profile }} - EMQX_TAG: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }} REPOSITORY: "emqx/${{ matrix.profile }}" strategy: @@ -45,6 +37,13 @@ jobs: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 with: path: source + - name: Set up environment + id: env + run: | + cd source + source env.sh + PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME") + echo "EMQX_TAG=$PKG_VSN" >> "$GITHUB_ENV" - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 with: name: "${{ env.EMQX_NAME }}-docker" diff --git a/.github/workflows/run_jmeter_tests.yaml b/.github/workflows/run_jmeter_tests.yaml index 0f62715e0..ef9923243 100644 --- a/.github/workflows/run_jmeter_tests.yaml +++ b/.github/workflows/run_jmeter_tests.yaml @@ -2,10 +2,6 @@ name: JMeter integration tests on: workflow_call: - inputs: - version-emqx: - required: true - type: string permissions: contents: read @@ -56,9 +52,22 @@ jobs: needs: jmeter_artifact steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - - uses: ./.github/actions/prepare-jmeter + - name: Set up environment + id: env + run: | + source env.sh + PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx) + echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV" + - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 with: - version-emqx: ${{ inputs.version-emqx }} + name: emqx-docker + path: /tmp + - name: load docker image + shell: bash + run: | + EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g') + echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV + - uses: ./.github/actions/prepare-jmeter - name: docker compose up timeout-minutes: 5 run: | @@ -112,9 +121,22 @@ jobs: needs: jmeter_artifact steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - - uses: ./.github/actions/prepare-jmeter + - name: Set up environment + id: env + run: | + source env.sh + PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx) + echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV" + - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 with: - version-emqx: ${{ inputs.version-emqx }} + name: emqx-docker + path: /tmp + - name: load docker image + shell: bash + run: | + EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g') + echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV + - uses: ./.github/actions/prepare-jmeter - name: docker compose up timeout-minutes: 5 env: @@ -176,9 +198,22 @@ jobs: needs: jmeter_artifact steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - - uses: ./.github/actions/prepare-jmeter + - name: Set up environment + id: env + run: | + source env.sh + PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx) + echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV" + - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 with: - version-emqx: ${{ inputs.version-emqx }} + name: emqx-docker + path: /tmp + - name: load docker image + shell: bash + run: | + EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g') + echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV + - uses: ./.github/actions/prepare-jmeter - name: docker compose up timeout-minutes: 5 env: @@ -232,9 +267,22 @@ jobs: needs: jmeter_artifact steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - - uses: ./.github/actions/prepare-jmeter + - name: Set up environment + id: env + run: | + source env.sh + PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx) + echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV" + - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 with: - version-emqx: ${{ inputs.version-emqx }} + name: emqx-docker + path: /tmp + - name: load docker image + shell: bash + run: | + EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g') + echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV + - uses: ./.github/actions/prepare-jmeter - name: docker compose up timeout-minutes: 5 run: | @@ -285,9 +333,22 @@ jobs: needs: jmeter_artifact steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - - uses: ./.github/actions/prepare-jmeter + - name: Set up environment + id: env + run: | + source env.sh + PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx) + echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV" + - uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4 with: - version-emqx: ${{ inputs.version-emqx }} + name: emqx-docker + path: /tmp + - name: load docker image + shell: bash + run: | + EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g') + echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV + - uses: ./.github/actions/prepare-jmeter - name: docker compose up timeout-minutes: 5 run: | diff --git a/.github/workflows/run_test_cases.yaml b/.github/workflows/run_test_cases.yaml index d884af81f..ed48877b6 100644 --- a/.github/workflows/run_test_cases.yaml +++ b/.github/workflows/run_test_cases.yaml @@ -35,12 +35,12 @@ jobs: defaults: run: shell: bash - container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" + container: ${{ inputs.builder }} env: PROFILE: ${{ matrix.profile }} ENABLE_COVER_COMPILE: 1 - CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }} + CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }} permissions: contents: read @@ -100,7 +100,7 @@ jobs: # produces $PROFILE---sg.coverdata - name: run common tests env: - DOCKER_CT_RUNNER_IMAGE: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" + DOCKER_CT_RUNNER_IMAGE: ${{ inputs.builder }} MONGO_TAG: "5" MYSQL_TAG: "8" PGSQL_TAG: "13" @@ -111,7 +111,7 @@ jobs: MINIO_TAG: "RELEASE.2023-03-20T20-16-18Z" SUITEGROUP: ${{ matrix.suitegroup }} ENABLE_COVER_COMPILE: 1 - CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }} + CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: ./scripts/ct/run.sh --ci --app ${{ matrix.app }} --keep-up @@ -136,7 +136,7 @@ jobs: - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: failure() with: - name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }} + name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }} path: logs.tar.gz compression-level: 0 retention-days: 7 @@ -149,7 +149,7 @@ jobs: matrix: include: ${{ fromJson(inputs.ct-host) }} - container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" + container: ${{ inputs.builder }} defaults: run: shell: bash @@ -161,7 +161,7 @@ jobs: PROFILE: ${{ matrix.profile }} SUITEGROUP: ${{ matrix.suitegroup }} ENABLE_COVER_COMPILE: 1 - CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }} + CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }} steps: - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 @@ -196,7 +196,7 @@ jobs: - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: failure() with: - name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }} + name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }} path: logs.tar.gz compression-level: 0 retention-days: 7 diff --git a/.github/workflows/static_checks.yaml b/.github/workflows/static_checks.yaml index 6e3e4cb78..e7f1ffb4e 100644 --- a/.github/workflows/static_checks.yaml +++ b/.github/workflows/static_checks.yaml @@ -28,7 +28,7 @@ jobs: fail-fast: false matrix: include: ${{ fromJson(inputs.ct-matrix) }} - container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" + container: "${{ inputs.builder }}" steps: - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 with: @@ -39,10 +39,10 @@ jobs: git config --global --add safe.directory "$GITHUB_WORKSPACE" - uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2 with: - path: "emqx_dialyzer_${{ matrix.otp }}_plt" - key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }} + path: "emqx_dialyzer_${{ matrix.profile }}_plt" + key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }} restore-keys: | - rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}- + rebar3-dialyzer-plt-${{ matrix.profile }}- - run: cat .env | tee -a $GITHUB_ENV - name: run static checks run: make static_checks diff --git a/.tool-versions b/.tool-versions index 688b1e2da..26230041c 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,2 +1,2 @@ -erlang 26.2.5-2 +erlang 26.2.5-3 elixir 1.15.7-otp-26 diff --git a/Makefile b/Makefile index c372e27d7..ae92feced 100644 --- a/Makefile +++ b/Makefile @@ -6,22 +6,15 @@ endif REBAR = $(CURDIR)/rebar3 BUILD = $(CURDIR)/build SCRIPTS = $(CURDIR)/scripts -export EMQX_RELUP ?= true -export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-debian12 -export EMQX_DEFAULT_RUNNER = public.ecr.aws/debian/debian:stable-20240612-slim -export EMQX_REL_FORM ?= tgz -export QUICER_DOWNLOAD_FROM_RELEASE = 1 -ifeq ($(OS),Windows_NT) - export REBAR_COLOR=none - FIND=/usr/bin/find -else - FIND=find -endif +include env.sh # Dashboard version # from https://github.com/emqx/emqx-dashboard5 -export EMQX_DASHBOARD_VERSION ?= v1.9.1-beta.1 -export EMQX_EE_DASHBOARD_VERSION ?= e1.7.1-beta.1 +export EMQX_DASHBOARD_VERSION ?= v1.9.1 +export EMQX_EE_DASHBOARD_VERSION ?= e1.7.1 + +export EMQX_RELUP ?= true +export EMQX_REL_FORM ?= tgz -include default-profile.mk PROFILE ?= emqx @@ -196,8 +189,8 @@ $(PROFILES:%=clean-%): @if [ -d _build/$(@:clean-%=%) ]; then \ rm -f rebar.lock; \ rm -rf _build/$(@:clean-%=%)/rel; \ - $(FIND) _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \ - $(FIND) _build/$(@:clean-%=%) -type l -delete; \ + find _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \ + find _build/$(@:clean-%=%) -type l -delete; \ fi .PHONY: clean-all @@ -317,12 +310,12 @@ $(foreach tt,$(ALL_ELIXIR_TGZS),$(eval $(call gen-elixir-tgz-target,$(tt)))) .PHONY: fmt fmt: $(REBAR) - @$(FIND) . \( -name '*.app.src' -o \ - -name '*.erl' -o \ - -name '*.hrl' -o \ - -name 'rebar.config' -o \ - -name '*.eterm' -o \ - -name '*.escript' \) \ + @find . \( -name '*.app.src' -o \ + -name '*.erl' -o \ + -name '*.hrl' -o \ + -name 'rebar.config' -o \ + -name '*.eterm' -o \ + -name '*.escript' \) \ -not -path '*/_build/*' \ -not -path '*/deps/*' \ -not -path '*/_checkouts/*' \ diff --git a/apps/emqx/include/emqx_release.hrl b/apps/emqx/include/emqx_release.hrl index 46fb16ec9..c8e84ea06 100644 --- a/apps/emqx/include/emqx_release.hrl +++ b/apps/emqx/include/emqx_release.hrl @@ -32,7 +32,7 @@ %% `apps/emqx/src/bpapi/README.md' %% Opensource edition --define(EMQX_RELEASE_CE, "5.7.1-alpha.1"). +-define(EMQX_RELEASE_CE, "5.7.1"). %% Enterprise edition --define(EMQX_RELEASE_EE, "5.7.1-alpha.1"). +-define(EMQX_RELEASE_EE, "5.7.1"). diff --git a/apps/emqx/rebar.config b/apps/emqx/rebar.config index df9f69f87..e71b3b9f4 100644 --- a/apps/emqx/rebar.config +++ b/apps/emqx/rebar.config @@ -29,7 +29,7 @@ {gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}}, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}}, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.11.2"}}}, - {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.4"}}}, + {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.5"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}}, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.42.2"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}}, diff --git a/apps/emqx/src/emqx.app.src b/apps/emqx/src/emqx.app.src index 3d26c63ed..20b1445c9 100644 --- a/apps/emqx/src/emqx.app.src +++ b/apps/emqx/src/emqx.app.src @@ -2,7 +2,7 @@ {application, emqx, [ {id, "emqx"}, {description, "EMQX Core"}, - {vsn, "5.3.1"}, + {vsn, "5.3.3"}, {modules, []}, {registered, []}, {applications, [ diff --git a/apps/emqx/src/emqx_channel.erl b/apps/emqx/src/emqx_channel.erl index 3177c1c11..958e8acaf 100644 --- a/apps/emqx/src/emqx_channel.erl +++ b/apps/emqx/src/emqx_channel.erl @@ -545,8 +545,10 @@ handle_in( {error, ReasonCode} -> handle_out(disconnect, ReasonCode, Channel) end; -handle_in(?PACKET(?PINGREQ), Channel) -> - {ok, ?PACKET(?PINGRESP), Channel}; +handle_in(?PACKET(?PINGREQ), Channel = #channel{keepalive = Keepalive}) -> + {ok, NKeepalive} = emqx_keepalive:check(Keepalive), + NChannel = Channel#channel{keepalive = NKeepalive}, + {ok, ?PACKET(?PINGRESP), reset_timer(keepalive, NChannel)}; handle_in( ?DISCONNECT_PACKET(ReasonCode, Properties), Channel = #channel{conninfo = ConnInfo} @@ -1230,11 +1232,12 @@ handle_call( {keepalive, Interval}, Channel = #channel{ keepalive = KeepAlive, - conninfo = ConnInfo + conninfo = ConnInfo, + clientinfo = #{zone := Zone} } ) -> ClientId = info(clientid, Channel), - NKeepalive = emqx_keepalive:update(timer:seconds(Interval), KeepAlive), + NKeepalive = emqx_keepalive:update(Zone, Interval, KeepAlive), NConnInfo = maps:put(keepalive, Interval, ConnInfo), NChannel = Channel#channel{keepalive = NKeepalive, conninfo = NConnInfo}, SockInfo = maps:get(sockinfo, emqx_cm:get_chan_info(ClientId), #{}), @@ -1337,22 +1340,22 @@ die_if_test_compiled() -> | {shutdown, Reason :: term(), channel()}. handle_timeout( _TRef, - {keepalive, _StatVal}, + keepalive, Channel = #channel{keepalive = undefined} ) -> {ok, Channel}; handle_timeout( _TRef, - {keepalive, _StatVal}, + keepalive, Channel = #channel{conn_state = disconnected} ) -> {ok, Channel}; handle_timeout( _TRef, - {keepalive, StatVal}, + keepalive, Channel = #channel{keepalive = Keepalive} ) -> - case emqx_keepalive:check(StatVal, Keepalive) of + case emqx_keepalive:check(Keepalive) of {ok, NKeepalive} -> NChannel = Channel#channel{keepalive = NKeepalive}, {ok, reset_timer(keepalive, NChannel)}; @@ -1463,10 +1466,16 @@ reset_timer(Name, Time, Channel) -> ensure_timer(Name, Time, clean_timer(Name, Channel)). clean_timer(Name, Channel = #channel{timers = Timers}) -> - Channel#channel{timers = maps:remove(Name, Timers)}. + case maps:take(Name, Timers) of + error -> + Channel; + {TRef, NTimers} -> + ok = emqx_utils:cancel_timer(TRef), + Channel#channel{timers = NTimers} + end. interval(keepalive, #channel{keepalive = KeepAlive}) -> - emqx_keepalive:info(interval, KeepAlive); + emqx_keepalive:info(check_interval, KeepAlive); interval(retry_delivery, #channel{session = Session}) -> emqx_session:info(retry_interval, Session); interval(expire_awaiting_rel, #channel{session = Session}) -> @@ -2324,9 +2333,7 @@ ensure_keepalive_timer(0, Channel) -> ensure_keepalive_timer(disabled, Channel) -> Channel; ensure_keepalive_timer(Interval, Channel = #channel{clientinfo = #{zone := Zone}}) -> - Multiplier = get_mqtt_conf(Zone, keepalive_multiplier), - RecvCnt = emqx_pd:get_counter(recv_pkt), - Keepalive = emqx_keepalive:init(RecvCnt, round(timer:seconds(Interval) * Multiplier)), + Keepalive = emqx_keepalive:init(Zone, Interval), ensure_timer(keepalive, Channel#channel{keepalive = Keepalive}). clear_keepalive(Channel = #channel{timers = Timers}) -> diff --git a/apps/emqx/src/emqx_connection.erl b/apps/emqx/src/emqx_connection.erl index a570303ed..c23c8cc59 100644 --- a/apps/emqx/src/emqx_connection.erl +++ b/apps/emqx/src/emqx_connection.erl @@ -727,9 +727,7 @@ handle_timeout( disconnected -> {ok, State}; _ -> - %% recv_pkt: valid MQTT message - RecvCnt = emqx_pd:get_counter(recv_pkt), - handle_timeout(TRef, {keepalive, RecvCnt}, State) + with_channel(handle_timeout, [TRef, keepalive], State) end; handle_timeout(TRef, Msg, State) -> with_channel(handle_timeout, [TRef, Msg], State). diff --git a/apps/emqx/src/emqx_frame.erl b/apps/emqx/src/emqx_frame.erl index 661fc7861..41fc00f4c 100644 --- a/apps/emqx/src/emqx_frame.erl +++ b/apps/emqx/src/emqx_frame.erl @@ -285,17 +285,24 @@ parse_connect(FrameBin, StrictMode) -> end, parse_connect2(ProtoName, Rest, StrictMode). -% Note: return malformed if reserved flag is not 0. parse_connect2( ProtoName, - <>, + <>, StrictMode ) -> - case Reserved of - 0 -> ok; - 1 -> ?PARSE_ERR(reserved_connect_flag) - end, + _ = validate_connect_reserved(Reserved), + _ = validate_connect_will( + WillFlag = bool(WillFlagB), + WillRetain = bool(WillRetainB), + WillQoS + ), + _ = validate_connect_password_flag( + StrictMode, + ProtoVer, + UsernameFlag = bool(UsernameFlagB), + PasswordFlag = bool(PasswordFlagB) + ), {Properties, Rest3} = parse_properties(Rest2, ProtoVer, StrictMode), {ClientId, Rest4} = parse_utf8_string_with_cause(Rest3, StrictMode, invalid_clientid), ConnPacket = #mqtt_packet_connect{ @@ -305,9 +312,9 @@ parse_connect2( %% Invented by mosquitto, named 'try_private': https://mosquitto.org/man/mosquitto-conf-5.html is_bridge = (BridgeTag =:= 8), clean_start = bool(CleanStart), - will_flag = bool(WillFlag), + will_flag = WillFlag, will_qos = WillQoS, - will_retain = bool(WillRetain), + will_retain = WillRetain, keepalive = KeepAlive, properties = Properties, clientid = ClientId @@ -318,14 +325,14 @@ parse_connect2( fun(Bin) -> parse_utf8_string_with_cause(Bin, StrictMode, invalid_username) end, - bool(UsernameFlag) + UsernameFlag ), {Password, Rest7} = parse_optional( Rest6, fun(Bin) -> parse_utf8_string_with_cause(Bin, StrictMode, invalid_password) end, - bool(PasswordFlag) + PasswordFlag ), case Rest7 of <<>> -> @@ -1150,6 +1157,32 @@ validate_subqos([3 | _]) -> ?PARSE_ERR(bad_subqos); validate_subqos([_ | T]) -> validate_subqos(T); validate_subqos([]) -> ok. +%% MQTT-v3.1.1-[MQTT-3.1.2-3], MQTT-v5.0-[MQTT-3.1.2-3] +validate_connect_reserved(0) -> ok; +validate_connect_reserved(1) -> ?PARSE_ERR(reserved_connect_flag). + +%% MQTT-v3.1.1-[MQTT-3.1.2-13], MQTT-v5.0-[MQTT-3.1.2-11] +validate_connect_will(false, _, WillQos) when WillQos > 0 -> ?PARSE_ERR(invalid_will_qos); +%% MQTT-v3.1.1-[MQTT-3.1.2-14], MQTT-v5.0-[MQTT-3.1.2-12] +validate_connect_will(true, _, WillQoS) when WillQoS > 2 -> ?PARSE_ERR(invalid_will_qos); +%% MQTT-v3.1.1-[MQTT-3.1.2-15], MQTT-v5.0-[MQTT-3.1.2-13] +validate_connect_will(false, WillRetain, _) when WillRetain -> ?PARSE_ERR(invalid_will_retain); +validate_connect_will(_, _, _) -> ok. + +%% MQTT-v3.1 +%% Username flag and password flag are not strongly related +%% https://public.dhe.ibm.com/software/dw/webservices/ws-mqtt/mqtt-v3r1.html#connect +validate_connect_password_flag(true, ?MQTT_PROTO_V3, _, _) -> + ok; +%% MQTT-v3.1.1-[MQTT-3.1.2-22] +validate_connect_password_flag(true, ?MQTT_PROTO_V4, UsernameFlag, PasswordFlag) -> + %% BUG-FOR-BUG compatible, only check when `strict-mode` + UsernameFlag orelse PasswordFlag andalso ?PARSE_ERR(invalid_password_flag); +validate_connect_password_flag(true, ?MQTT_PROTO_V5, _, _) -> + ok; +validate_connect_password_flag(_, _, _, _) -> + ok. + bool(0) -> false; bool(1) -> true. diff --git a/apps/emqx/src/emqx_keepalive.erl b/apps/emqx/src/emqx_keepalive.erl index 8ed685db2..785893d2d 100644 --- a/apps/emqx/src/emqx_keepalive.erl +++ b/apps/emqx/src/emqx_keepalive.erl @@ -19,10 +19,12 @@ -export([ init/1, init/2, + init/3, info/1, info/2, + check/1, check/2, - update/2 + update/3 ]). -elvis([{elvis_style, no_if_expression, disable}]). @@ -30,8 +32,12 @@ -export_type([keepalive/0]). -record(keepalive, { - interval :: pos_integer(), - statval :: non_neg_integer() + check_interval :: pos_integer(), + %% the received packets since last keepalive check + statval :: non_neg_integer(), + %% The number of idle intervals allowed before disconnecting the client. + idle_milliseconds = 0 :: non_neg_integer(), + max_idle_millisecond :: pos_integer() }). -opaque keepalive() :: #keepalive{}. @@ -39,7 +45,11 @@ %% @doc Init keepalive. -spec init(Interval :: non_neg_integer()) -> keepalive(). -init(Interval) -> init(0, Interval). +init(Interval) -> init(default, 0, Interval). + +init(Zone, Interval) -> + RecvCnt = emqx_pd:get_counter(recv_pkt), + init(Zone, RecvCnt, Interval). %% from mqtt-v3.1.1 specific %% A Keep Alive value of zero (0) has the effect of turning off the keep alive mechanism. @@ -53,42 +63,88 @@ init(Interval) -> init(0, Interval). %% typically this is a few minutes. %% The maximum value is (65535s) 18 hours 12 minutes and 15 seconds. %% @doc Init keepalive. --spec init(StatVal :: non_neg_integer(), Interval :: non_neg_integer()) -> keepalive() | undefined. -init(StatVal, Interval) when Interval > 0 andalso Interval =< ?MAX_INTERVAL -> - #keepalive{interval = Interval, statval = StatVal}; -init(_, 0) -> +-spec init( + Zone :: atom(), + StatVal :: non_neg_integer(), + Second :: non_neg_integer() +) -> keepalive() | undefined. +init(Zone, StatVal, Second) when Second > 0 andalso Second =< ?MAX_INTERVAL -> + #{keepalive_multiplier := Mul, keepalive_check_interval := CheckInterval} = + emqx_config:get_zone_conf(Zone, [mqtt]), + MilliSeconds = timer:seconds(Second), + Interval = emqx_utils:clamp(CheckInterval, 1000, max(MilliSeconds div 2, 1000)), + MaxIdleMs = ceil(MilliSeconds * Mul), + #keepalive{ + check_interval = Interval, + statval = StatVal, + idle_milliseconds = 0, + max_idle_millisecond = MaxIdleMs + }; +init(_Zone, _, 0) -> undefined; -init(StatVal, Interval) when Interval > ?MAX_INTERVAL -> init(StatVal, ?MAX_INTERVAL). +init(Zone, StatVal, Interval) when Interval > ?MAX_INTERVAL -> init(Zone, StatVal, ?MAX_INTERVAL). %% @doc Get Info of the keepalive. -spec info(keepalive()) -> emqx_types:infos(). info(#keepalive{ - interval = Interval, - statval = StatVal + check_interval = Interval, + statval = StatVal, + idle_milliseconds = IdleIntervals, + max_idle_millisecond = MaxMs }) -> #{ - interval => Interval, - statval => StatVal + check_interval => Interval, + statval => StatVal, + idle_milliseconds => IdleIntervals, + max_idle_millisecond => MaxMs }. --spec info(interval | statval, keepalive()) -> +-spec info(check_interval | statval | idle_milliseconds, keepalive()) -> non_neg_integer(). -info(interval, #keepalive{interval = Interval}) -> +info(check_interval, #keepalive{check_interval = Interval}) -> Interval; info(statval, #keepalive{statval = StatVal}) -> StatVal; -info(interval, undefined) -> +info(idle_milliseconds, #keepalive{idle_milliseconds = Val}) -> + Val; +info(check_interval, undefined) -> 0. +check(Keepalive = #keepalive{}) -> + RecvCnt = emqx_pd:get_counter(recv_pkt), + check(RecvCnt, Keepalive); +check(Keepalive) -> + {ok, Keepalive}. + %% @doc Check keepalive. -spec check(non_neg_integer(), keepalive()) -> {ok, keepalive()} | {error, timeout}. -check(Val, #keepalive{statval = Val}) -> {error, timeout}; -check(Val, KeepAlive) -> {ok, KeepAlive#keepalive{statval = Val}}. + +check( + NewVal, + #keepalive{ + statval = NewVal, + idle_milliseconds = IdleAcc, + check_interval = Interval, + max_idle_millisecond = Max + } +) when IdleAcc + Interval >= Max -> + {error, timeout}; +check( + NewVal, + #keepalive{ + statval = NewVal, + idle_milliseconds = IdleAcc, + check_interval = Interval + } = KeepAlive +) -> + {ok, KeepAlive#keepalive{statval = NewVal, idle_milliseconds = IdleAcc + Interval}}; +check(NewVal, #keepalive{} = KeepAlive) -> + {ok, KeepAlive#keepalive{statval = NewVal, idle_milliseconds = 0}}. %% @doc Update keepalive. %% The statval of the previous keepalive will be used, %% and normal checks will begin from the next cycle. --spec update(non_neg_integer(), keepalive() | undefined) -> keepalive() | undefined. -update(Interval, undefined) -> init(0, Interval); -update(Interval, #keepalive{statval = StatVal}) -> init(StatVal, Interval). +-spec update(atom(), non_neg_integer(), keepalive() | undefined) -> keepalive() | undefined. +update(Zone, Interval, undefined) -> init(Zone, 0, Interval); +update(Zone, Interval, #keepalive{statval = StatVal}) -> init(Zone, StatVal, Interval). diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 46bde617b..9dfd6a17f 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -3487,6 +3487,7 @@ mqtt_general() -> )}, {"max_clientid_len", sc( + %% MQTT-v3.1.1-[MQTT-3.1.3-5], MQTT-v5.0-[MQTT-3.1.3-5] range(23, 65535), #{ default => 65535, @@ -3608,9 +3609,17 @@ mqtt_general() -> desc => ?DESC(mqtt_keepalive_multiplier) } )}, + {"keepalive_check_interval", + sc( + timeout_duration(), + #{ + default => <<"30s">>, + desc => ?DESC(mqtt_keepalive_check_interval) + } + )}, {"retry_interval", sc( - hoconsc:union([infinity, duration()]), + hoconsc:union([infinity, timeout_duration()]), #{ default => infinity, desc => ?DESC(mqtt_retry_interval) diff --git a/apps/emqx/src/emqx_ws_connection.erl b/apps/emqx/src/emqx_ws_connection.erl index 2559452bf..8518aead9 100644 --- a/apps/emqx/src/emqx_ws_connection.erl +++ b/apps/emqx/src/emqx_ws_connection.erl @@ -555,8 +555,7 @@ handle_info(Info, State) -> handle_timeout(TRef, idle_timeout, State = #state{idle_timer = TRef}) -> shutdown(idle_timeout, State); handle_timeout(TRef, keepalive, State) when is_reference(TRef) -> - RecvOct = emqx_pd:get_counter(recv_oct), - handle_timeout(TRef, {keepalive, RecvOct}, State); + with_channel(handle_timeout, [TRef, keepalive], State); handle_timeout( TRef, emit_stats, diff --git a/apps/emqx/test/emqx_config_SUITE.erl b/apps/emqx/test/emqx_config_SUITE.erl index 2acc69c1d..7341a5c9b 100644 --- a/apps/emqx/test/emqx_config_SUITE.erl +++ b/apps/emqx/test/emqx_config_SUITE.erl @@ -428,6 +428,7 @@ zone_global_defaults() -> ignore_loop_deliver => false, keepalive_backoff => 0.75, keepalive_multiplier => 1.5, + keepalive_check_interval => 30000, max_awaiting_rel => 100, max_clientid_len => 65535, max_inflight => 32, diff --git a/apps/emqx/test/emqx_frame_SUITE.erl b/apps/emqx/test/emqx_frame_SUITE.erl index 8193f9c31..9c8a99547 100644 --- a/apps/emqx/test/emqx_frame_SUITE.erl +++ b/apps/emqx/test/emqx_frame_SUITE.erl @@ -64,7 +64,10 @@ groups() -> t_malformed_connect_header, t_malformed_connect_data, t_reserved_connect_flag, - t_invalid_clientid + t_invalid_clientid, + t_undefined_password, + t_invalid_will_retain, + t_invalid_will_qos ]}, {connack, [parallel], [ t_serialize_parse_connack, @@ -703,9 +706,15 @@ t_invalid_clientid(_) -> ). %% for regression: `password` must be `undefined` +%% BUG-FOR-BUG compatible t_undefined_password(_) -> - Payload = <<16, 19, 0, 4, 77, 81, 84, 84, 4, 130, 0, 60, 0, 2, 97, 49, 0, 3, 97, 97, 97>>, - {ok, Packet, <<>>, {none, _}} = emqx_frame:parse(Payload), + %% Username Flag = true + %% Password Flag = false + %% Clean Session = true + ConnectFlags = <<2#1000:4, 2#0010:4>>, + ConnBin = + <<16, 17, 0, 4, 77, 81, 84, 84, 4, ConnectFlags/binary, 0, 60, 0, 2, 97, 49, 0, 1, 97>>, + {ok, Packet, <<>>, {none, _}} = emqx_frame:parse(ConnBin), Password = undefined, ?assertEqual( #mqtt_packet{ @@ -729,7 +738,7 @@ t_undefined_password(_) -> will_props = #{}, will_topic = undefined, will_payload = undefined, - username = <<"aaa">>, + username = <<"a">>, password = Password }, payload = undefined @@ -738,6 +747,75 @@ t_undefined_password(_) -> ), ok. +t_invalid_password_flag(_) -> + %% Username Flag = false + %% Password Flag = true + %% Clean Session = true + ConnectFlags = <<2#0100:4, 2#0010:4>>, + ConnectBin = + <<16, 17, 0, 4, 77, 81, 84, 84, 4, ConnectFlags/binary, 0, 60, 0, 2, 97, 49, 0, 1, 97>>, + ?assertMatch( + {ok, _, _, _}, + emqx_frame:parse(ConnectBin) + ), + + StrictModeParseState = emqx_frame:initial_parse_state(#{strict_mode => true}), + ?assertException( + throw, + {frame_parse_error, invalid_password_flag}, + emqx_frame:parse(ConnectBin, StrictModeParseState) + ). + +t_invalid_will_retain(_) -> + ConnectFlags = <<2#01100000>>, + ConnectBin = + <<16, 51, 0, 4, 77, 81, 84, 84, 5, ConnectFlags/binary, 174, 157, 24, 38, 0, 14, 98, 55, + 122, 51, 83, 73, 89, 50, 54, 79, 77, 73, 65, 86, 0, 5, 66, 117, 53, 57, 66, 0, 6, 84, + 54, 75, 78, 112, 57, 0, 6, 68, 103, 55, 87, 87, 87>>, + ?assertException( + throw, + {frame_parse_error, invalid_will_retain}, + emqx_frame:parse(ConnectBin) + ), + ok. + +t_invalid_will_qos(_) -> + Will_F_WillQoS0 = <<2#010:3, 2#00:2, 2#000:3>>, + Will_F_WillQoS1 = <<2#010:3, 2#01:2, 2#000:3>>, + Will_F_WillQoS2 = <<2#010:3, 2#10:2, 2#000:3>>, + Will_F_WillQoS3 = <<2#010:3, 2#11:2, 2#000:3>>, + Will_T_WillQoS3 = <<2#011:3, 2#11:2, 2#000:3>>, + ConnectBinFun = fun(ConnectFlags) -> + <<16, 51, 0, 4, 77, 81, 84, 84, 5, ConnectFlags/binary, 174, 157, 24, 38, 0, 14, 98, 55, + 122, 51, 83, 73, 89, 50, 54, 79, 77, 73, 65, 86, 0, 5, 66, 117, 53, 57, 66, 0, 6, 84, + 54, 75, 78, 112, 57, 0, 6, 68, 103, 55, 87, 87, 87>> + end, + ?assertMatch( + {ok, _, _, _}, + emqx_frame:parse(ConnectBinFun(Will_F_WillQoS0)) + ), + ?assertException( + throw, + {frame_parse_error, invalid_will_qos}, + emqx_frame:parse(ConnectBinFun(Will_F_WillQoS1)) + ), + ?assertException( + throw, + {frame_parse_error, invalid_will_qos}, + emqx_frame:parse(ConnectBinFun(Will_F_WillQoS2)) + ), + ?assertException( + throw, + {frame_parse_error, invalid_will_qos}, + emqx_frame:parse(ConnectBinFun(Will_F_WillQoS3)) + ), + ?assertException( + throw, + {frame_parse_error, invalid_will_qos}, + emqx_frame:parse(ConnectBinFun(Will_T_WillQoS3)) + ), + ok. + parse_serialize(Packet) -> parse_serialize(Packet, #{strict_mode => true}). diff --git a/apps/emqx/test/emqx_keepalive_SUITE.erl b/apps/emqx/test/emqx_keepalive_SUITE.erl index 7773774a7..84f66b3a5 100644 --- a/apps/emqx/test/emqx_keepalive_SUITE.erl +++ b/apps/emqx/test/emqx_keepalive_SUITE.erl @@ -19,22 +19,180 @@ -compile(export_all). -compile(nowarn_export_all). +-include_lib("emqx/include/emqx.hrl"). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). all() -> emqx_common_test_helpers:all(?MODULE). +init_per_suite(Config) -> + Apps = emqx_cth_suite:start( + [ + {emqx, + "listeners {" + "tcp.default.bind = 1883," + "ssl.default = marked_for_deletion," + "quic.default = marked_for_deletion," + "ws.default = marked_for_deletion," + "wss.default = marked_for_deletion" + "}"} + ], + #{work_dir => emqx_cth_suite:work_dir(Config)} + ), + [{apps, Apps} | Config]. + +end_per_suite(Config) -> + emqx_cth_suite:stop(?config(apps, Config)). + +t_check_keepalive_default_timeout(_) -> + emqx_config:put_zone_conf(default, [mqtt, keepalive_multiplier], 1.5), + emqx_config:put_zone_conf(default, [mqtt, keepalive_check_interval], 30000), + erlang:process_flag(trap_exit, true), + ClientID = <<"default">>, + KeepaliveSec = 10, + {ok, C} = emqtt:start_link([ + {keepalive, KeepaliveSec}, + {clientid, binary_to_list(ClientID)} + ]), + {ok, _} = emqtt:connect(C), + emqtt:pause(C), + [ChannelPid] = emqx_cm:lookup_channels(ClientID), + erlang:link(ChannelPid), + CheckInterval = emqx_utils:clamp(keepalive_check_interval(), 1000, 5000), + ?assertMatch(5000, CheckInterval), + %% when keepalive_check_interval is 30s and keepalive_multiplier is 1.5 + %% connect T0(packet = 1, idle_milliseconds = 0) + %% check1 T1(packet = 1, idle_milliseconds = 1 * CheckInterval = 5000) + %% check2 T2(packet = 1, idle_milliseconds = 2 * CheckInterval = 10000) + %% check2 T3(packet = 1, idle_milliseconds = 3 * CheckInterval = 15000) -> timeout + Timeout = CheckInterval * 3, + %% connector but not send a packet. + ?assertMatch( + no_keepalive_timeout_received, + receive_msg_in_time(ChannelPid, C, Timeout - 200), + Timeout - 200 + ), + ?assertMatch(ok, receive_msg_in_time(ChannelPid, C, 1200)). + +t_check_keepalive_other_timeout(_) -> + emqx_config:put_zone_conf(default, [mqtt, keepalive_multiplier], 1.5), + emqx_config:put_zone_conf(default, [mqtt, keepalive_check_interval], 2000), + erlang:process_flag(trap_exit, true), + ClientID = <<"other">>, + KeepaliveSec = 10, + {ok, C} = emqtt:start_link([ + {keepalive, KeepaliveSec}, + {clientid, binary_to_list(ClientID)} + ]), + {ok, _} = emqtt:connect(C), + emqtt:pause(C), + {ok, _, [0]} = emqtt:subscribe(C, <<"mytopic">>, []), + [ChannelPid] = emqx_cm:lookup_channels(ClientID), + erlang:link(ChannelPid), + %%CheckInterval = ceil(keepalive_check_factor() * KeepaliveSec * 1000), + CheckInterval = emqx_utils:clamp(keepalive_check_interval(), 1000, 5000), + ?assertMatch(2000, CheckInterval), + %% when keepalive_check_interval is 2s and keepalive_multiplier is 1.5 + %% connect T0(packet = 1, idle_milliseconds = 0) + %% subscribe T1(packet = 2, idle_milliseconds = 0) + %% check1 T2(packet = 2, idle_milliseconds = 1 * CheckInterval = 2000) + %% check2 T3(packet = 2, idle_milliseconds = 2 * CheckInterval = 4000) + %% check3 T4(packet = 2, idle_milliseconds = 3 * CheckInterval = 6000) + %% check4 T5(packet = 2, idle_milliseconds = 4 * CheckInterval = 8000) + %% check4 T6(packet = 2, idle_milliseconds = 5 * CheckInterval = 10000) + %% check4 T7(packet = 2, idle_milliseconds = 6 * CheckInterval = 12000) + %% check4 T8(packet = 2, idle_milliseconds = 7 * CheckInterval = 14000) + %% check4 T9(packet = 2, idle_milliseconds = 8 * CheckInterval = 16000) > 15000 timeout + Timeout = CheckInterval * 9, + ?assertMatch( + no_keepalive_timeout_received, + receive_msg_in_time(ChannelPid, C, Timeout - 200), + Timeout - 200 + ), + ?assertMatch(ok, receive_msg_in_time(ChannelPid, C, 1200), Timeout). + +t_check_keepalive_ping_reset_timer(_) -> + emqx_config:put_zone_conf(default, [mqtt, keepalive_multiplier], 1.5), + emqx_config:put_zone_conf(default, [mqtt, keepalive_check_interval], 100000), + erlang:process_flag(trap_exit, true), + ClientID = <<"ping_reset">>, + KeepaliveSec = 10, + {ok, C} = emqtt:start_link([ + {keepalive, KeepaliveSec}, + {clientid, binary_to_list(ClientID)} + ]), + {ok, _} = emqtt:connect(C), + emqtt:pause(C), + ct:sleep(1000), + emqtt:resume(C), + pong = emqtt:ping(C), + emqtt:pause(C), + [ChannelPid] = emqx_cm:lookup_channels(ClientID), + erlang:link(ChannelPid), + CheckInterval = emqx_utils:clamp(keepalive_check_interval(), 1000, 5000), + ?assertMatch(5000, CheckInterval), + %% when keepalive_check_interval is 30s and keepalive_multiplier is 1.5 + %% connect T0(packet = 1, idle_milliseconds = 0) + %% sleep 1000ms + %% ping (packet = 2, idle_milliseconds = 0) restart timer + %% check1 T1(packet = 1, idle_milliseconds = 1 * CheckInterval = 5000) + %% check2 T2(packet = 1, idle_milliseconds = 2 * CheckInterval = 10000) + %% check2 T3(packet = 1, idle_milliseconds = 3 * CheckInterval = 15000) -> timeout + Timeout = CheckInterval * 3, + ?assertMatch( + no_keepalive_timeout_received, + receive_msg_in_time(ChannelPid, C, Timeout - 200), + Timeout - 200 + ), + ?assertMatch(ok, receive_msg_in_time(ChannelPid, C, 1200)). + t_check(_) -> + emqx_config:put_zone_conf(default, [mqtt, keepalive_multiplier], 1.5), + emqx_config:put_zone_conf(default, [mqtt, keepalive_check_interval], 30000), Keepalive = emqx_keepalive:init(60), - ?assertEqual(60, emqx_keepalive:info(interval, Keepalive)), + ?assertEqual(30000, emqx_keepalive:info(check_interval, Keepalive)), ?assertEqual(0, emqx_keepalive:info(statval, Keepalive)), Info = emqx_keepalive:info(Keepalive), ?assertEqual( #{ - interval => 60, - statval => 0 + check_interval => 30000, + statval => 0, + idle_milliseconds => 0, + %% 60 * 1.5 * 1000 + max_idle_millisecond => 90000 }, Info ), {ok, Keepalive1} = emqx_keepalive:check(1, Keepalive), ?assertEqual(1, emqx_keepalive:info(statval, Keepalive1)), - ?assertEqual({error, timeout}, emqx_keepalive:check(1, Keepalive1)). + {ok, Keepalive2} = emqx_keepalive:check(1, Keepalive1), + ?assertEqual(1, emqx_keepalive:info(statval, Keepalive2)), + {ok, Keepalive3} = emqx_keepalive:check(1, Keepalive2), + ?assertEqual(1, emqx_keepalive:info(statval, Keepalive3)), + ?assertEqual({error, timeout}, emqx_keepalive:check(1, Keepalive3)), + + Keepalive4 = emqx_keepalive:init(90), + ?assertEqual(30000, emqx_keepalive:info(check_interval, Keepalive4)), + + Keepalive5 = emqx_keepalive:init(1), + ?assertEqual(1000, emqx_keepalive:info(check_interval, Keepalive5)), + ok. + +keepalive_multiplier() -> + emqx_config:get_zone_conf(default, [mqtt, keepalive_multiplier]). + +keepalive_check_interval() -> + emqx_config:get_zone_conf(default, [mqtt, keepalive_check_interval]). + +receive_msg_in_time(ChannelPid, C, Timeout) -> + receive + {'EXIT', ChannelPid, {shutdown, keepalive_timeout}} -> + receive + {'EXIT', C, {shutdown, tcp_closed}} -> + ok + after 500 -> + throw(no_tcp_closed_from_mqtt_client) + end + after Timeout -> + no_keepalive_timeout_received + end. diff --git a/apps/emqx_auth/src/emqx_auth.app.src b/apps/emqx_auth/src/emqx_auth.app.src index 6db2d6213..d61ba281b 100644 --- a/apps/emqx_auth/src/emqx_auth.app.src +++ b/apps/emqx_auth/src/emqx_auth.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth, [ {description, "EMQX Authentication and authorization"}, - {vsn, "0.3.1"}, + {vsn, "0.3.3"}, {modules, []}, {registered, [emqx_auth_sup]}, {applications, [ diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz.erl index b6ece054b..6bae7c3df 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz.erl @@ -408,7 +408,7 @@ init_metrics(Source) -> emqx_metrics_worker:create_metrics( authz_metrics, TypeName, - [total, allow, deny, nomatch], + [total, allow, deny, nomatch, ignore], [total] ) end. @@ -510,8 +510,8 @@ do_authorize( }), do_authorize(Client, PubSub, Topic, Tail); ignore -> - ?TRACE("AUTHZ", "authorization_ignore", #{ - authorize_type => Type, + emqx_metrics_worker:inc(authz_metrics, Type, ignore), + ?TRACE("AUTHZ", "authorization_module_ignore", #{ module => Module, username => Username, topic => Topic, diff --git a/apps/emqx_auth_ext/src/emqx_auth_ext_tls_const_v1.erl b/apps/emqx_auth_ext/src/emqx_auth_ext_tls_const_v1.erl index ed95b8270..1abc5c2a4 100644 --- a/apps/emqx_auth_ext/src/emqx_auth_ext_tls_const_v1.erl +++ b/apps/emqx_auth_ext/src/emqx_auth_ext_tls_const_v1.erl @@ -10,7 +10,12 @@ make_tls_verify_fun/2 ]). +-export([default_root_fun/1]). + -include_lib("public_key/include/public_key.hrl"). + +-define(unknown_ca, unknown_ca). + %% @doc Build a root fun for verify TLS partial_chain. %% The `InputChain' is composed by OTP SSL with local cert store %% AND the cert (chain if any) from the client. @@ -109,3 +114,8 @@ ext_key_opts(Str) -> end, Usages ). + +%% @doc default root fun for partial_chain 'false' +-spec default_root_fun(_) -> ?unknown_ca. +default_root_fun(_) -> + ?unknown_ca. diff --git a/apps/emqx_auth_ext/src/emqx_auth_ext_tls_lib.erl b/apps/emqx_auth_ext/src/emqx_auth_ext_tls_lib.erl index e858920e7..a0ddba8fd 100644 --- a/apps/emqx_auth_ext/src/emqx_auth_ext_tls_lib.erl +++ b/apps/emqx_auth_ext/src/emqx_auth_ext_tls_lib.erl @@ -13,10 +13,12 @@ -include_lib("emqx/include/logger.hrl"). -define(CONST_MOD_V1, emqx_auth_ext_tls_const_v1). -%% @doc enable TLS partial_chain validation if set. +%% @doc enable TLS partial_chain validation -spec opt_partial_chain(SslOpts :: map()) -> NewSslOpts :: map(). opt_partial_chain(#{partial_chain := false} = SslOpts) -> - maps:remove(partial_chain, SslOpts); + %% For config update scenario, we must set it to override + %% the 'existing' partial_chain in the listener + SslOpts#{partial_chain := fun ?CONST_MOD_V1:default_root_fun/1}; opt_partial_chain(#{partial_chain := true} = SslOpts) -> SslOpts#{partial_chain := rootfun_trusted_ca_from_cacertfile(1, SslOpts)}; opt_partial_chain(#{partial_chain := cacert_from_cacertfile} = SslOpts) -> diff --git a/apps/emqx_auth_ext/test/emqx_auth_ext_schema_SUITE.erl b/apps/emqx_auth_ext/test/emqx_auth_ext_schema_SUITE.erl index b47f5fa39..4461fada0 100644 --- a/apps/emqx_auth_ext/test/emqx_auth_ext_schema_SUITE.erl +++ b/apps/emqx_auth_ext/test/emqx_auth_ext_schema_SUITE.erl @@ -24,7 +24,7 @@ "\n" " listeners.ssl.auth_ext.bind = 28883\n" " listeners.ssl.auth_ext.enable = true\n" - " listeners.ssl.auth_ext.ssl_options.partial_chain = true\n" + " listeners.ssl.auth_ext.ssl_options.partial_chain = false\n" " listeners.ssl.auth_ext.ssl_options.verify = verify_peer\n" " listeners.ssl.auth_ext.ssl_options.verify_peer_ext_key_usage = \"clientAuth\"\n" " " @@ -62,5 +62,6 @@ t_conf_check_default(_Config) -> t_conf_check_auth_ext(_Config) -> Opts = esockd:get_options({'ssl:auth_ext', 28883}), SSLOpts = proplists:get_value(ssl_options, Opts), + %% Even when partial_chain is set to `false` ?assertMatch(Fun when is_function(Fun), proplists:get_value(partial_chain, SSLOpts)), ?assertMatch({Fun, _} when is_function(Fun), proplists:get_value(verify_fun, SSLOpts)). diff --git a/apps/emqx_auth_http/test/emqx_authz_http_SUITE.erl b/apps/emqx_auth_http/test/emqx_authz_http_SUITE.erl index d165d288d..e07a9f69e 100644 --- a/apps/emqx_auth_http/test/emqx_authz_http_SUITE.erl +++ b/apps/emqx_auth_http/test/emqx_authz_http_SUITE.erl @@ -529,6 +529,68 @@ t_bad_response_content_type(_Config) -> end ). +%% Checks that we bump the correct metrics when we receive an error response +t_bad_response(_Config) -> + ok = setup_handler_and_config( + fun(Req0, State) -> + ?assertEqual( + <<"/authz/users/">>, + cowboy_req:path(Req0) + ), + + {ok, _PostVars, Req1} = cowboy_req:read_urlencoded_body(Req0), + + Req = cowboy_req:reply( + 400, + #{<<"content-type">> => <<"application/json">>}, + "{\"error\":true}", + Req1 + ), + {ok, Req, State} + end, + #{ + <<"method">> => <<"post">>, + <<"body">> => #{ + <<"username">> => <<"${username}">> + }, + <<"headers">> => #{} + } + ), + + ClientInfo = #{ + clientid => <<"client id">>, + username => <<"user name">>, + peerhost => {127, 0, 0, 1}, + protocol => <<"MQTT">>, + mountpoint => <<"MOUNTPOINT">>, + zone => default, + listener => {tcp, default}, + cn => ?PH_CERT_CN_NAME, + dn => ?PH_CERT_SUBJECT + }, + + ?assertEqual( + deny, + emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>) + ), + ?assertMatch( + #{ + counters := #{ + total := 1, + ignore := 1, + nomatch := 0, + allow := 0, + deny := 0 + }, + 'authorization.superuser' := 0, + 'authorization.matched.allow' := 0, + 'authorization.matched.deny' := 0, + 'authorization.nomatch' := 1 + }, + get_metrics() + ), + ok. + t_no_value_for_placeholder(_Config) -> ok = setup_handler_and_config( fun(Req0, State) -> @@ -729,3 +791,18 @@ start_apps(Apps) -> stop_apps(Apps) -> lists:foreach(fun application:stop/1, Apps). + +get_metrics() -> + Metrics = emqx_metrics_worker:get_metrics(authz_metrics, http), + lists:foldl( + fun(Name, Acc) -> + Acc#{Name => emqx_metrics:val(Name)} + end, + Metrics, + [ + 'authorization.superuser', + 'authorization.matched.allow', + 'authorization.matched.deny', + 'authorization.nomatch' + ] + ). diff --git a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl index 8a50bd19f..86b302d57 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl @@ -116,7 +116,7 @@ create( user_id_type := Type, password_hash_algorithm := Algorithm, user_group := UserGroup - } + } = Config ) -> ok = emqx_authn_password_hashing:init(Algorithm), State = #{ @@ -124,6 +124,7 @@ create( user_id_type => Type, password_hash_algorithm => Algorithm }, + ok = boostrap_user_from_file(Config, State), {ok, State}. update(Config, _State) -> @@ -338,8 +339,24 @@ run_fuzzy_filter( %%------------------------------------------------------------------------------ insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) -> - UserInfoRecord = user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser), - insert_user(UserInfoRecord). + UserInfoRecord = + #user_info{user_id = DBUserID} = + user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser), + case mnesia:read(?TAB, DBUserID, write) of + [] -> + insert_user(UserInfoRecord); + [UserInfoRecord] -> + ok; + [_] -> + ?SLOG(warning, #{ + msg => "bootstrap_authentication_overridden_in_the_built_in_database", + user_id => UserID, + group_id => UserGroup, + suggestion => + "If you have made changes in other way, remove the user_id from the bootstrap file." + }), + insert_user(UserInfoRecord) + end. insert_user(#user_info{} = UserInfoRecord) -> mnesia:write(?TAB, UserInfoRecord, write). @@ -531,3 +548,25 @@ find_password_hash(_, _, _) -> is_superuser(#{<<"is_superuser">> := <<"true">>}) -> true; is_superuser(#{<<"is_superuser">> := true}) -> true; is_superuser(_) -> false. + +boostrap_user_from_file(Config, State) -> + case maps:get(boostrap_file, Config, <<>>) of + <<>> -> + ok; + FileName0 -> + #{boostrap_type := Type} = Config, + FileName = emqx_schema:naive_env_interpolation(FileName0), + case file:read_file(FileName) of + {ok, FileData} -> + %% if there is a key conflict, override with the key which from the bootstrap file + _ = import_users({Type, FileName, FileData}, State), + ok; + {error, Reason} -> + ?SLOG(warning, #{ + msg => "boostrap_authn_built_in_database_failed", + boostrap_file => FileName, + boostrap_type => Type, + reason => emqx_utils:explain_posix(Reason) + }) + end + end. diff --git a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl index 7ad6616d4..6544874dc 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl @@ -46,7 +46,7 @@ select_union_member(_Kind, _Value) -> fields(builtin_db) -> [ {password_hash_algorithm, fun emqx_authn_password_hashing:type_rw/1} - ] ++ common_fields(); + ] ++ common_fields() ++ bootstrap_fields(); fields(builtin_db_api) -> [ {password_hash_algorithm, fun emqx_authn_password_hashing:type_rw_api/1} @@ -69,3 +69,24 @@ common_fields() -> {backend, emqx_authn_schema:backend(?AUTHN_BACKEND)}, {user_id_type, fun user_id_type/1} ] ++ emqx_authn_schema:common_fields(). + +bootstrap_fields() -> + [ + {bootstrap_file, + ?HOCON( + binary(), + #{ + desc => ?DESC(bootstrap_file), + required => false, + default => <<>> + } + )}, + {bootstrap_type, + ?HOCON( + ?ENUM([hash, plain]), #{ + desc => ?DESC(bootstrap_type), + required => false, + default => <<"plain">> + } + )} + ]. diff --git a/apps/emqx_auth_mnesia/test/emqx_authn_mnesia_SUITE.erl b/apps/emqx_auth_mnesia/test/emqx_authn_mnesia_SUITE.erl index 54409a73f..666b4a628 100644 --- a/apps/emqx_auth_mnesia/test/emqx_authn_mnesia_SUITE.erl +++ b/apps/emqx_auth_mnesia/test/emqx_authn_mnesia_SUITE.erl @@ -54,7 +54,74 @@ t_create(_) -> {ok, _} = emqx_authn_mnesia:create(?AUTHN_ID, Config0), Config1 = Config0#{password_hash_algorithm => #{name => sha256}}, - {ok, _} = emqx_authn_mnesia:create(?AUTHN_ID, Config1). + {ok, _} = emqx_authn_mnesia:create(?AUTHN_ID, Config1), + ok. +t_bootstrap_file(_) -> + Config = config(), + %% hash to hash + HashConfig = Config#{password_hash_algorithm => #{name => sha256, salt_position => suffix}}, + ?assertMatch( + [ + {user_info, {_, <<"myuser1">>}, _, _, true}, + {user_info, {_, <<"myuser2">>}, _, _, false} + ], + test_bootstrap_file(HashConfig, hash, <<"user-credentials.json">>) + ), + ?assertMatch( + [ + {user_info, {_, <<"myuser3">>}, _, _, true}, + {user_info, {_, <<"myuser4">>}, _, _, false} + ], + test_bootstrap_file(HashConfig, hash, <<"user-credentials.csv">>) + ), + + %% plain to plain + PlainConfig = Config#{ + password_hash_algorithm => + #{name => plain, salt_position => disable} + }, + ?assertMatch( + [ + {user_info, {_, <<"myuser1">>}, <<"password1">>, _, true}, + {user_info, {_, <<"myuser2">>}, <<"password2">>, _, false} + ], + test_bootstrap_file(PlainConfig, plain, <<"user-credentials-plain.json">>) + ), + ?assertMatch( + [ + {user_info, {_, <<"myuser3">>}, <<"password3">>, _, true}, + {user_info, {_, <<"myuser4">>}, <<"password4">>, _, false} + ], + test_bootstrap_file(PlainConfig, plain, <<"user-credentials-plain.csv">>) + ), + %% plain to hash + ?assertMatch( + [ + {user_info, {_, <<"myuser1">>}, _, _, true}, + {user_info, {_, <<"myuser2">>}, _, _, false} + ], + test_bootstrap_file(HashConfig, plain, <<"user-credentials-plain.json">>) + ), + ?assertMatch( + [ + {user_info, {_, <<"myuser3">>}, _, _, true}, + {user_info, {_, <<"myuser4">>}, _, _, false} + ], + test_bootstrap_file(HashConfig, plain, <<"user-credentials-plain.csv">>) + ), + ok. + +test_bootstrap_file(Config0, Type, File) -> + {Type, Filename, _FileData} = sample_filename_and_data(Type, File), + Config2 = Config0#{ + boostrap_file => Filename, + boostrap_type => Type + }, + {ok, State0} = emqx_authn_mnesia:create(?AUTHN_ID, Config2), + Result = ets:tab2list(emqx_authn_mnesia), + ok = emqx_authn_mnesia:destroy(State0), + ?assertMatch([], ets:tab2list(emqx_authn_mnesia)), + Result. t_update(_) -> Config0 = config(), diff --git a/apps/emqx_bridge/src/emqx_bridge.app.src b/apps/emqx_bridge/src/emqx_bridge.app.src index 8abfb075e..30930c494 100644 --- a/apps/emqx_bridge/src/emqx_bridge.app.src +++ b/apps/emqx_bridge/src/emqx_bridge.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge, [ {description, "EMQX bridges"}, - {vsn, "0.2.1"}, + {vsn, "0.2.3"}, {registered, [emqx_bridge_sup]}, {mod, {emqx_bridge_app, []}}, {applications, [ diff --git a/apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl b/apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl index 5e7f22db8..8cebb9a3a 100644 --- a/apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl +++ b/apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl @@ -288,6 +288,14 @@ request(Method, Path, Params) -> Error end. +simplify_result(Res) -> + case Res of + {error, {{_, Status, _}, _, Body}} -> + {Status, Body}; + {ok, {{_, Status, _}, _, Body}} -> + {Status, Body} + end. + list_bridges_api() -> Params = [], Path = emqx_mgmt_api_test_util:api_path(["actions"]), @@ -321,7 +329,7 @@ get_bridge_api(BridgeKind, BridgeType, BridgeName) -> Path = emqx_mgmt_api_test_util:api_path([Root, BridgeId]), ct:pal("get bridge ~p (via http)", [{BridgeKind, BridgeType, BridgeName}]), Res = request(get, Path, Params), - ct:pal("get bridge ~p result: ~p", [{BridgeKind, BridgeType, BridgeName}, Res]), + ct:pal("get bridge ~p result:\n ~p", [{BridgeKind, BridgeType, BridgeName}, Res]), Res. create_bridge_api(Config) -> @@ -349,6 +357,26 @@ create_kind_api(Config, Overrides) -> ct:pal("bridge create (~s, http) result:\n ~p", [Kind, Res]), Res. +enable_kind_api(Kind, ConnectorType, ConnectorName) -> + do_enable_disable_kind_api(Kind, ConnectorType, ConnectorName, enable). + +disable_kind_api(Kind, ConnectorType, ConnectorName) -> + do_enable_disable_kind_api(Kind, ConnectorType, ConnectorName, disable). + +do_enable_disable_kind_api(Kind, Type, Name, Op) -> + BridgeId = emqx_bridge_resource:bridge_id(Type, Name), + RootBin = api_path_root(Kind), + {OpPath, OpStr} = + case Op of + enable -> {"true", "enable"}; + disable -> {"false", "disable"} + end, + Path = emqx_mgmt_api_test_util:api_path([RootBin, BridgeId, "enable", OpPath]), + ct:pal(OpStr ++ " ~s ~s (http)", [Kind, BridgeId]), + Res = request(put, Path, []), + ct:pal(OpStr ++ " ~s ~s (http) result:\n ~p", [Kind, BridgeId, Res]), + simplify_result(Res). + create_connector_api(Config) -> create_connector_api(Config, _Overrides = #{}). @@ -453,6 +481,15 @@ update_bridge_api(Config, Overrides) -> ct:pal("update bridge (~s, http) result:\n ~p", [Kind, Res]), Res. +delete_kind_api(Kind, Type, Name) -> + BridgeId = emqx_bridge_resource:bridge_id(Type, Name), + PathRoot = api_path_root(Kind), + Path = emqx_mgmt_api_test_util:api_path([PathRoot, BridgeId]), + ct:pal("deleting bridge (~s, http)", [Kind]), + Res = request(delete, Path, _Params = []), + ct:pal("delete bridge (~s, http) result:\n ~p", [Kind, Res]), + simplify_result(Res). + op_bridge_api(Op, BridgeType, BridgeName) -> op_bridge_api(_Kind = action, Op, BridgeType, BridgeName). @@ -1054,6 +1091,7 @@ t_on_get_status(Config, Opts) -> ProxyHost = ?config(proxy_host, Config), ProxyName = ?config(proxy_name, Config), FailureStatus = maps:get(failure_status, Opts, disconnected), + NormalStatus = maps:get(normal_status, Opts, connected), ?assertMatch({ok, _}, create_bridge_api(Config)), ResourceId = resource_id(Config), %% Since the connection process is async, we give it some time to @@ -1061,7 +1099,7 @@ t_on_get_status(Config, Opts) -> ?retry( _Sleep = 1_000, _Attempts = 20, - ?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceId)) + ?assertEqual({ok, NormalStatus}, emqx_resource_manager:health_check(ResourceId)) ), case ProxyHost of undefined -> @@ -1080,7 +1118,7 @@ t_on_get_status(Config, Opts) -> ?retry( _Sleep = 1_000, _Attempts = 20, - ?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceId)) + ?assertEqual({ok, NormalStatus}, emqx_resource_manager:health_check(ResourceId)) ) end, ok. diff --git a/apps/emqx_bridge_azure_event_hub/rebar.config b/apps/emqx_bridge_azure_event_hub/rebar.config index a0cc8def3..76ea7fa6c 100644 --- a/apps/emqx_bridge_azure_event_hub/rebar.config +++ b/apps/emqx_bridge_azure_event_hub/rebar.config @@ -2,7 +2,7 @@ {erl_opts, [debug_info]}. {deps, [ - {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.10.5"}}}, + {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "2.0.0"}}}, {kafka_protocol, {git, "https://github.com/kafka4beam/kafka_protocol.git", {tag, "4.1.5"}}}, {brod_gssapi, {git, "https://github.com/kafka4beam/brod_gssapi.git", {tag, "v0.1.1"}}}, {brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.18.0"}}}, diff --git a/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_v2_SUITE.erl b/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_v2_SUITE.erl index bc09a5509..661b8819c 100644 --- a/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_v2_SUITE.erl +++ b/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_v2_SUITE.erl @@ -40,6 +40,8 @@ init_per_suite(Config) -> emqx, emqx_management, emqx_resource, + %% Just for test helpers + brod, emqx_bridge_azure_event_hub, emqx_bridge, emqx_rule_engine, @@ -93,6 +95,9 @@ common_init_per_testcase(TestCase, Config) -> {connector_type, ?CONNECTOR_TYPE}, {connector_name, Name}, {connector_config, ConnectorConfig}, + {action_type, ?BRIDGE_TYPE}, + {action_name, Name}, + {action_config, BridgeConfig}, {bridge_type, ?BRIDGE_TYPE}, {bridge_name, Name}, {bridge_config, BridgeConfig} @@ -100,18 +105,13 @@ common_init_per_testcase(TestCase, Config) -> ]. end_per_testcase(_Testcase, Config) -> - case proplists:get_bool(skip_does_not_apply, Config) of - true -> - ok; - false -> - ProxyHost = ?config(proxy_host, Config), - ProxyPort = ?config(proxy_port, Config), - emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort), - emqx_bridge_v2_testlib:delete_all_bridges_and_connectors(), - emqx_common_test_helpers:call_janitor(60_000), - ok = snabbkaffe:stop(), - ok - end. + ProxyHost = ?config(proxy_host, Config), + ProxyPort = ?config(proxy_port, Config), + emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort), + emqx_bridge_v2_testlib:delete_all_bridges_and_connectors(), + emqx_common_test_helpers:call_janitor(60_000), + ok = snabbkaffe:stop(), + ok. %%------------------------------------------------------------------------------ %% Helper fns @@ -172,7 +172,7 @@ bridge_config(Name, ConnectorId, KafkaTopic) -> #{ <<"enable">> => true, <<"connector">> => ConnectorId, - <<"kafka">> => + <<"parameters">> => #{ <<"buffer">> => #{ @@ -322,7 +322,7 @@ t_same_name_azure_kafka_bridges(Config) -> ), %% then create a Kafka bridge with same name and delete it after creation - ConfigKafka0 = lists:keyreplace(bridge_type, 1, Config, {bridge_type, ?KAFKA_BRIDGE_TYPE}), + ConfigKafka0 = lists:keyreplace(action_type, 1, Config, {action_type, ?KAFKA_BRIDGE_TYPE}), ConfigKafka = lists:keyreplace( connector_type, 1, ConfigKafka0, {connector_type, ?KAFKA_BRIDGE_TYPE} ), @@ -374,3 +374,20 @@ t_http_api_get(Config) -> emqx_bridge_testlib:list_bridges_api() ), ok. + +t_multiple_actions_sharing_topic(Config) -> + ActionConfig0 = ?config(action_config, Config), + ActionConfig = + emqx_utils_maps:deep_merge( + ActionConfig0, + #{<<"parameters">> => #{<<"query_mode">> => <<"sync">>}} + ), + ok = emqx_bridge_v2_kafka_producer_SUITE:t_multiple_actions_sharing_topic( + [ + {type, ?BRIDGE_TYPE_BIN}, + {connector_name, ?config(connector_name, Config)}, + {connector_config, ?config(connector_config, Config)}, + {action_config, ActionConfig} + ] + ), + ok. diff --git a/apps/emqx_bridge_cassandra/src/emqx_bridge_cassandra.app.src b/apps/emqx_bridge_cassandra/src/emqx_bridge_cassandra.app.src index 84087d715..946ca591a 100644 --- a/apps/emqx_bridge_cassandra/src/emqx_bridge_cassandra.app.src +++ b/apps/emqx_bridge_cassandra/src/emqx_bridge_cassandra.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_cassandra, [ {description, "EMQX Enterprise Cassandra Bridge"}, - {vsn, "0.3.0"}, + {vsn, "0.3.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_clickhouse/src/emqx_bridge_clickhouse.app.src b/apps/emqx_bridge_clickhouse/src/emqx_bridge_clickhouse.app.src index df74e07ed..f38036b83 100644 --- a/apps/emqx_bridge_clickhouse/src/emqx_bridge_clickhouse.app.src +++ b/apps/emqx_bridge_clickhouse/src/emqx_bridge_clickhouse.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_clickhouse, [ {description, "EMQX Enterprise ClickHouse Bridge"}, - {vsn, "0.4.0"}, + {vsn, "0.4.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_confluent/rebar.config b/apps/emqx_bridge_confluent/rebar.config index f518c8d4f..1a91f501d 100644 --- a/apps/emqx_bridge_confluent/rebar.config +++ b/apps/emqx_bridge_confluent/rebar.config @@ -2,7 +2,7 @@ {erl_opts, [debug_info]}. {deps, [ - {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.10.5"}}}, + {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "2.0.0"}}}, {kafka_protocol, {git, "https://github.com/kafka4beam/kafka_protocol.git", {tag, "4.1.5"}}}, {brod_gssapi, {git, "https://github.com/kafka4beam/brod_gssapi.git", {tag, "v0.1.1"}}}, {brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.18.0"}}}, diff --git a/apps/emqx_bridge_confluent/test/emqx_bridge_confluent_producer_SUITE.erl b/apps/emqx_bridge_confluent/test/emqx_bridge_confluent_producer_SUITE.erl index 724365c58..0b3a22a99 100644 --- a/apps/emqx_bridge_confluent/test/emqx_bridge_confluent_producer_SUITE.erl +++ b/apps/emqx_bridge_confluent/test/emqx_bridge_confluent_producer_SUITE.erl @@ -40,6 +40,8 @@ init_per_suite(Config) -> emqx, emqx_management, emqx_resource, + %% Just for test helpers + brod, emqx_bridge_confluent, emqx_bridge, emqx_rule_engine, @@ -93,6 +95,9 @@ common_init_per_testcase(TestCase, Config) -> {connector_type, ?CONNECTOR_TYPE}, {connector_name, Name}, {connector_config, ConnectorConfig}, + {action_type, ?ACTION_TYPE}, + {action_name, Name}, + {action_config, BridgeConfig}, {bridge_type, ?ACTION_TYPE}, {bridge_name, Name}, {bridge_config, BridgeConfig} @@ -306,7 +311,7 @@ t_same_name_confluent_kafka_bridges(Config) -> ), %% then create a Kafka bridge with same name and delete it after creation - ConfigKafka0 = lists:keyreplace(bridge_type, 1, Config, {bridge_type, ?KAFKA_BRIDGE_TYPE}), + ConfigKafka0 = lists:keyreplace(action_type, 1, Config, {action_type, ?KAFKA_BRIDGE_TYPE}), ConfigKafka = lists:keyreplace( connector_type, 1, ConfigKafka0, {connector_type, ?KAFKA_BRIDGE_TYPE} ), @@ -378,3 +383,20 @@ t_list_v1_bridges(Config) -> [] ), ok. + +t_multiple_actions_sharing_topic(Config) -> + ActionConfig0 = ?config(action_config, Config), + ActionConfig = + emqx_utils_maps:deep_merge( + ActionConfig0, + #{<<"parameters">> => #{<<"query_mode">> => <<"sync">>}} + ), + ok = emqx_bridge_v2_kafka_producer_SUITE:t_multiple_actions_sharing_topic( + [ + {type, ?ACTION_TYPE_BIN}, + {connector_name, ?config(connector_name, Config)}, + {connector_config, ?config(connector_config, Config)}, + {action_config, ActionConfig} + ] + ), + ok. diff --git a/apps/emqx_bridge_dynamo/src/emqx_bridge_dynamo.app.src b/apps/emqx_bridge_dynamo/src/emqx_bridge_dynamo.app.src index 0ffd143dc..ac71e04e7 100644 --- a/apps/emqx_bridge_dynamo/src/emqx_bridge_dynamo.app.src +++ b/apps/emqx_bridge_dynamo/src/emqx_bridge_dynamo.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_dynamo, [ {description, "EMQX Enterprise Dynamo Bridge"}, - {vsn, "0.2.1"}, + {vsn, "0.2.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_es/src/emqx_bridge_es.app.src b/apps/emqx_bridge_es/src/emqx_bridge_es.app.src index 7e5f6203b..262ac84bd 100644 --- a/apps/emqx_bridge_es/src/emqx_bridge_es.app.src +++ b/apps/emqx_bridge_es/src/emqx_bridge_es.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge_es, [ {description, "EMQX Enterprise Elastic Search Bridge"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {modules, [ emqx_bridge_es, emqx_bridge_es_connector diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src index 8890e5570..d98355a90 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_gcp_pubsub, [ {description, "EMQX Enterprise GCP Pub/Sub Bridge"}, - {vsn, "0.3.0"}, + {vsn, "0.3.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl index c96eeeccf..9450d02f0 100644 --- a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl +++ b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl @@ -1448,7 +1448,10 @@ t_connection_down_before_starting(Config) -> ), {ok, _} = create_bridge(Config), {ok, _} = snabbkaffe:receive_events(SRef0), - ?assertMatch({ok, connecting}, health_check(Config)), + ?assertMatch( + {ok, Status} when Status =:= connecting orelse Status =:= disconnected, + health_check(Config) + ), emqx_common_test_helpers:heal_failure(down, ProxyName, ProxyHost, ProxyPort), ?retry( diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src index 8c3223e8b..8ab084323 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_greptimedb, [ {description, "EMQX GreptimeDB Bridge"}, - {vsn, "0.2.1"}, + {vsn, "0.2.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl index be52f4469..e4cc0aa31 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl @@ -363,7 +363,7 @@ do_start_client( {error, Reason} end. -grpc_config() -> +grpc_opts() -> #{ sync_start => true, connect_timeout => ?CONNECT_TIMEOUT @@ -382,7 +382,7 @@ client_config( {pool, InstId}, {pool_type, random}, {auto_reconnect, ?AUTO_RECONNECT_S}, - {gprc_options, grpc_config()} + {grpc_opts, grpc_opts()} ] ++ protocol_config(Config). protocol_config( diff --git a/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb.app.src b/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb.app.src index 93ac355e3..af232accc 100644 --- a/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb.app.src +++ b/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_hstreamdb, [ {description, "EMQX Enterprise HStreamDB Bridge"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http.app.src b/apps/emqx_bridge_http/src/emqx_bridge_http.app.src index 57fceae74..5a52cc047 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http.app.src +++ b/apps/emqx_bridge_http/src/emqx_bridge_http.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_http, [ {description, "EMQX HTTP Bridge and Connector Application"}, - {vsn, "0.3.1"}, + {vsn, "0.3.2"}, {registered, []}, {applications, [kernel, stdlib, emqx_resource, ehttpc]}, {env, [ diff --git a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src index e211498ac..a8314541a 100644 --- a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src +++ b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_influxdb, [ {description, "EMQX Enterprise InfluxDB Bridge"}, - {vsn, "0.2.2"}, + {vsn, "0.2.3"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb.app.src b/apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb.app.src index b3c5767db..691778cfd 100644 --- a/apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb.app.src +++ b/apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge_iotdb, [ {description, "EMQX Enterprise Apache IoTDB Bridge"}, - {vsn, "0.2.1"}, + {vsn, "0.2.2"}, {modules, [ emqx_bridge_iotdb, emqx_bridge_iotdb_connector diff --git a/apps/emqx_bridge_kafka/rebar.config b/apps/emqx_bridge_kafka/rebar.config index 500c5a394..b89c9190f 100644 --- a/apps/emqx_bridge_kafka/rebar.config +++ b/apps/emqx_bridge_kafka/rebar.config @@ -2,7 +2,7 @@ {erl_opts, [debug_info]}. {deps, [ - {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.10.5"}}}, + {wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "2.0.0"}}}, {kafka_protocol, {git, "https://github.com/kafka4beam/kafka_protocol.git", {tag, "4.1.5"}}}, {brod_gssapi, {git, "https://github.com/kafka4beam/brod_gssapi.git", {tag, "v0.1.1"}}}, {brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.18.0"}}}, diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src index ea7bfaba8..0e906203d 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge_kafka, [ {description, "EMQX Enterprise Kafka Bridge"}, - {vsn, "0.3.1"}, + {vsn, "0.3.3"}, {registered, [emqx_bridge_kafka_consumer_sup]}, {applications, [ kernel, diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl index c266f14c2..8395251e8 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl @@ -327,6 +327,12 @@ on_query( }), do_send_msg(sync, KafkaMessage, Producers, SyncTimeout) catch + error:{invalid_partition_count, Count, _Partitioner} -> + ?tp("kafka_producer_invalid_partition_count", #{ + action_id => MessageTag, + query_mode => sync + }), + {error, {unrecoverable_error, {invalid_partition_count, Count}}}; throw:{bad_kafka_header, _} = Error -> ?tp( emqx_bridge_kafka_impl_producer_sync_query_failed, @@ -387,8 +393,12 @@ on_query_async( }), do_send_msg(async, KafkaMessage, Producers, AsyncReplyFn) catch - error:{invalid_partition_count, _Count, _Partitioner} -> - {error, invalid_partition_count}; + error:{invalid_partition_count, Count, _Partitioner} -> + ?tp("kafka_producer_invalid_partition_count", #{ + action_id => MessageTag, + query_mode => async + }), + {error, {unrecoverable_error, {invalid_partition_count, Count}}}; throw:{bad_kafka_header, _} = Error -> ?tp( emqx_bridge_kafka_impl_producer_async_query_failed, @@ -711,6 +721,7 @@ producers_config(BridgeType, BridgeName, Input, IsDryRun, BridgeV2Id) -> max_batch_bytes => MaxBatchBytes, max_send_ahead => MaxInflight - 1, compression => Compression, + alias => BridgeV2Id, telemetry_meta_data => #{bridge_id => BridgeV2Id}, max_partitions => MaxPartitions }. diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl index 3caa712ed..c876869b1 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl @@ -142,6 +142,9 @@ check_send_message_with_bridge(BridgeName) -> check_kafka_message_payload(Offset, Payload). send_message(ActionName) -> + send_message(?TYPE, ActionName). + +send_message(Type, ActionName) -> %% ###################################### %% Create Kafka message %% ###################################### @@ -157,8 +160,8 @@ send_message(ActionName) -> %% ###################################### %% Send message %% ###################################### - emqx_bridge_v2:send_message(?TYPE, ActionName, Msg, #{}), - #{offset => Offset, payload => Payload}. + Res = emqx_bridge_v2:send_message(Type, ActionName, Msg, #{}), + #{offset => Offset, payload => Payload, result => Res}. resolve_kafka_offset() -> KafkaTopic = emqx_bridge_kafka_impl_producer_SUITE:test_topic_one_partition(), @@ -285,6 +288,21 @@ action_api_spec_props_for_get() -> emqx_bridge_v2_testlib:actions_api_spec_schemas(), Props. +assert_status_api(Line, Type, Name, Status) -> + ?assertMatch( + {ok, + {{_, 200, _}, _, #{ + <<"status">> := Status, + <<"node_status">> := [#{<<"status">> := Status}] + }}}, + emqx_bridge_v2_testlib:get_bridge_api(Type, Name), + #{line => Line, name => Name, expected_status => Status} + ). +-define(assertStatusAPI(TYPE, NAME, STATUS), assert_status_api(?LINE, TYPE, NAME, STATUS)). + +get_rule_metrics(RuleId) -> + emqx_metrics_worker:get_metrics(rule_metrics, RuleId). + %%------------------------------------------------------------------------------ %% Testcases %%------------------------------------------------------------------------------ @@ -702,3 +720,204 @@ t_connector_health_check_topic(_Config) -> [] ), ok. + +%% Checks that, if Kafka raises `invalid_partition_count' error, we bump the corresponding +%% failure rule action metric. +t_invalid_partition_count_metrics(Config) -> + Type = proplists:get_value(type, Config, ?TYPE), + ConnectorName = proplists:get_value(connector_name, Config, <<"c">>), + ConnectorConfig = proplists:get_value(connector_config, Config, connector_config()), + ActionConfig1 = proplists:get_value(action_config, Config, action_config(ConnectorName)), + ?check_trace( + #{timetrap => 10_000}, + begin + ConnectorParams = [ + {connector_config, ConnectorConfig}, + {connector_name, ConnectorName}, + {connector_type, Type} + ], + ActionName = <<"a">>, + ActionParams = [ + {action_config, ActionConfig1}, + {action_name, ActionName}, + {action_type, Type} + ], + {ok, {{_, 201, _}, _, #{}}} = + emqx_bridge_v2_testlib:create_connector_api(ConnectorParams), + + {ok, {{_, 201, _}, _, #{}}} = + emqx_bridge_v2_testlib:create_action_api(ActionParams), + RuleTopic = <<"t/a">>, + {ok, #{<<"id">> := RuleId}} = + emqx_bridge_v2_testlib:create_rule_and_action_http(Type, RuleTopic, [ + {bridge_name, ActionName} + ]), + + {ok, C} = emqtt:start_link([]), + {ok, _} = emqtt:connect(C), + + %%-------------------------------------------- + ?tp(notice, "sync", #{}), + %%-------------------------------------------- + %% Artificially force sync query to be used; otherwise, it's only used when the + %% resource is blocked and retrying. + ok = meck:new(emqx_bridge_kafka_impl_producer, [passthrough, no_history]), + on_exit(fun() -> catch meck:unload() end), + ok = meck:expect(emqx_bridge_kafka_impl_producer, query_mode, 1, simple_sync), + + %% Simulate `invalid_partition_count' + emqx_common_test_helpers:with_mock( + wolff, + send_sync, + fun(_Producers, _Msgs, _Timeout) -> + error({invalid_partition_count, 0, partitioner}) + end, + fun() -> + {{ok, _}, {ok, _}} = + ?wait_async_action( + emqtt:publish(C, RuleTopic, <<"hi">>, 2), + #{ + ?snk_kind := "kafka_producer_invalid_partition_count", + query_mode := sync + } + ), + ?assertMatch( + #{ + counters := #{ + 'actions.total' := 1, + 'actions.failed' := 1 + } + }, + get_rule_metrics(RuleId) + ), + ok + end + ), + + %%-------------------------------------------- + %% Same thing, but async call + ?tp(notice, "async", #{}), + %%-------------------------------------------- + ok = meck:expect( + emqx_bridge_kafka_impl_producer, + query_mode, + fun(Conf) -> meck:passthrough([Conf]) end + ), + ok = emqx_bridge_v2:remove(actions, Type, ActionName), + {ok, {{_, 201, _}, _, #{}}} = + emqx_bridge_v2_testlib:create_action_api( + ActionParams, + #{<<"parameters">> => #{<<"query_mode">> => <<"async">>}} + ), + + %% Simulate `invalid_partition_count' + emqx_common_test_helpers:with_mock( + wolff, + send, + fun(_Producers, _Msgs, _Timeout) -> + error({invalid_partition_count, 0, partitioner}) + end, + fun() -> + {{ok, _}, {ok, _}} = + ?wait_async_action( + emqtt:publish(C, RuleTopic, <<"hi">>, 2), + #{?snk_kind := "rule_engine_applied_all_rules"} + ), + ?assertMatch( + #{ + counters := #{ + 'actions.total' := 2, + 'actions.failed' := 2 + } + }, + get_rule_metrics(RuleId) + ), + ok + end + ), + + ok + end, + fun(Trace) -> + ?assertMatch( + [#{query_mode := sync}, #{query_mode := async} | _], + ?of_kind("kafka_producer_invalid_partition_count", Trace) + ), + ok + end + ), + ok. + +%% Tests that deleting/disabling an action that share the same Kafka topic with other +%% actions do not disturb the latter. +t_multiple_actions_sharing_topic(Config) -> + Type = proplists:get_value(type, Config, ?TYPE), + ConnectorName = proplists:get_value(connector_name, Config, <<"c">>), + ConnectorConfig = proplists:get_value(connector_config, Config, connector_config()), + ActionConfig = proplists:get_value(action_config, Config, action_config(ConnectorName)), + ?check_trace( + begin + ConnectorParams = [ + {connector_config, ConnectorConfig}, + {connector_name, ConnectorName}, + {connector_type, Type} + ], + ActionName1 = <<"a1">>, + ActionParams1 = [ + {action_config, ActionConfig}, + {action_name, ActionName1}, + {action_type, Type} + ], + ActionName2 = <<"a2">>, + ActionParams2 = [ + {action_config, ActionConfig}, + {action_name, ActionName2}, + {action_type, Type} + ], + {ok, {{_, 201, _}, _, #{}}} = + emqx_bridge_v2_testlib:create_connector_api(ConnectorParams), + {ok, {{_, 201, _}, _, #{}}} = + emqx_bridge_v2_testlib:create_action_api(ActionParams1), + {ok, {{_, 201, _}, _, #{}}} = + emqx_bridge_v2_testlib:create_action_api(ActionParams2), + RuleTopic = <<"t/a2">>, + {ok, _} = emqx_bridge_v2_testlib:create_rule_and_action_http(Type, RuleTopic, Config), + + ?assertStatusAPI(Type, ActionName1, <<"connected">>), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + + %% Disabling a1 shouldn't disturb a2. + ?assertMatch( + {204, _}, emqx_bridge_v2_testlib:disable_kind_api(action, Type, ActionName1) + ), + + ?assertStatusAPI(Type, ActionName1, <<"disconnected">>), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + + ?assertMatch(#{result := ok}, send_message(Type, ActionName2)), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + + ?assertMatch( + {204, _}, + emqx_bridge_v2_testlib:enable_kind_api(action, Type, ActionName1) + ), + ?assertStatusAPI(Type, ActionName1, <<"connected">>), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + ?assertMatch(#{result := ok}, send_message(Type, ActionName2)), + + %% Deleting also shouldn't disrupt a2. + ?assertMatch( + {204, _}, + emqx_bridge_v2_testlib:delete_kind_api(action, Type, ActionName1) + ), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + ?assertMatch(#{result := ok}, send_message(Type, ActionName2)), + + ok + end, + fun(Trace) -> + ?assertEqual([], ?of_kind("kafka_producer_invalid_partition_count", Trace)), + ok + end + ), + ok. diff --git a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.app.src b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.app.src index d1273e923..f411b95fb 100644 --- a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.app.src +++ b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_kinesis, [ {description, "EMQX Enterprise Amazon Kinesis Bridge"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_mongodb/src/emqx_bridge_mongodb.app.src b/apps/emqx_bridge_mongodb/src/emqx_bridge_mongodb.app.src index a36253c34..df9935dbb 100644 --- a/apps/emqx_bridge_mongodb/src/emqx_bridge_mongodb.app.src +++ b/apps/emqx_bridge_mongodb/src/emqx_bridge_mongodb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_mongodb, [ {description, "EMQX Enterprise MongoDB Bridge"}, - {vsn, "0.3.1"}, + {vsn, "0.3.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src index e66d97a07..26b8967f0 100644 --- a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src +++ b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge_mqtt, [ {description, "EMQX MQTT Broker Bridge"}, - {vsn, "0.2.1"}, + {vsn, "0.2.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_mysql/src/emqx_bridge_mysql.app.src b/apps/emqx_bridge_mysql/src/emqx_bridge_mysql.app.src index a7ec0c31e..63bc61e62 100644 --- a/apps/emqx_bridge_mysql/src/emqx_bridge_mysql.app.src +++ b/apps/emqx_bridge_mysql/src/emqx_bridge_mysql.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_mysql, [ {description, "EMQX Enterprise MySQL Bridge"}, - {vsn, "0.1.6"}, + {vsn, "0.1.7"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_opents/src/emqx_bridge_opents.app.src b/apps/emqx_bridge_opents/src/emqx_bridge_opents.app.src index 808db650f..65cc97e4c 100644 --- a/apps/emqx_bridge_opents/src/emqx_bridge_opents.app.src +++ b/apps/emqx_bridge_opents/src/emqx_bridge_opents.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_opents, [ {description, "EMQX Enterprise OpenTSDB Bridge"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.app.src b/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.app.src index 7973ecf95..a706d8e2f 100644 --- a/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.app.src +++ b/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_oracle, [ {description, "EMQX Enterprise Oracle Database Bridge"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.app.src b/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.app.src index d223a2488..64ac592a5 100644 --- a/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.app.src +++ b/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_pgsql, [ {description, "EMQX Enterprise PostgreSQL Bridge"}, - {vsn, "0.1.7"}, + {vsn, "0.1.8"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src b/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src index be0f39002..a8eeba483 100644 --- a/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src +++ b/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_pulsar, [ {description, "EMQX Pulsar Bridge"}, - {vsn, "0.2.1"}, + {vsn, "0.2.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_v2_SUITE.erl b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_v2_SUITE.erl index 0636806de..11caa15c6 100644 --- a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_v2_SUITE.erl +++ b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_v2_SUITE.erl @@ -127,23 +127,18 @@ init_per_testcase(TestCase, Config) -> common_init_per_testcase(TestCase, Config). end_per_testcase(_Testcase, Config) -> - case proplists:get_bool(skip_does_not_apply, Config) of - true -> - ok; - false -> - ok = emqx_config:delete_override_conf_files(), - ProxyHost = ?config(proxy_host, Config), - ProxyPort = ?config(proxy_port, Config), - emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort), - emqx_bridge_v2_testlib:delete_all_bridges(), - stop_consumer(Config), - %% in CI, apparently this needs more time since the - %% machines struggle with all the containers running... - emqx_common_test_helpers:call_janitor(60_000), - ok = snabbkaffe:stop(), - flush_consumed(), - ok - end. + ok = emqx_config:delete_override_conf_files(), + ProxyHost = ?config(proxy_host, Config), + ProxyPort = ?config(proxy_port, Config), + emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort), + emqx_bridge_v2_testlib:delete_all_bridges(), + stop_consumer(Config), + %% in CI, apparently this needs more time since the + %% machines struggle with all the containers running... + emqx_common_test_helpers:call_janitor(60_000), + ok = snabbkaffe:stop(), + flush_consumed(), + ok. common_init_per_testcase(TestCase, Config0) -> ct:timetrap(timer:seconds(60)), @@ -160,6 +155,10 @@ common_init_per_testcase(TestCase, Config0) -> ok = snabbkaffe:start_trace(), Config. +%%------------------------------------------------------------------------------ +%% Helper fns +%%------------------------------------------------------------------------------ + create_connector(Name, Config) -> Connector = pulsar_connector(Config), {ok, _} = emqx_connector:create(?TYPE, Name, Connector). @@ -174,69 +173,6 @@ create_action(Name, Config) -> delete_action(Name) -> ok = emqx_bridge_v2:remove(actions, ?TYPE, Name). -%%------------------------------------------------------------------------------ -%% Testcases -%%------------------------------------------------------------------------------ - -t_action_probe(Config) -> - Name = atom_to_binary(?FUNCTION_NAME), - Action = pulsar_action(Config), - {ok, Res0} = emqx_bridge_v2_testlib:probe_bridge_api(action, ?TYPE, Name, Action), - ?assertMatch({{_, 204, _}, _, _}, Res0), - ok. - -t_action(Config) -> - Name = atom_to_binary(?FUNCTION_NAME), - create_action(Name, Config), - Actions = emqx_bridge_v2:list(actions), - Any = fun(#{name := BName}) -> BName =:= Name end, - ?assert(lists:any(Any, Actions), Actions), - Topic = <<"lkadfdaction">>, - {ok, #{id := RuleId}} = emqx_rule_engine:create_rule( - #{ - sql => <<"select * from \"", Topic/binary, "\"">>, - id => atom_to_binary(?FUNCTION_NAME), - actions => [<<"pulsar:", Name/binary>>], - description => <<"bridge_v2 send msg to pulsar action">> - } - ), - on_exit(fun() -> emqx_rule_engine:delete_rule(RuleId) end), - MQTTClientID = <<"pulsar_mqtt_clientid">>, - {ok, C1} = emqtt:start_link([{clean_start, true}, {clientid, MQTTClientID}]), - {ok, _} = emqtt:connect(C1), - ReqPayload = payload(), - ReqPayloadBin = emqx_utils_json:encode(ReqPayload), - {ok, _} = emqtt:publish(C1, Topic, #{}, ReqPayloadBin, [{qos, 1}, {retain, false}]), - [#{<<"clientid">> := ClientID, <<"payload">> := RespPayload}] = receive_consumed(5000), - ?assertEqual(MQTTClientID, ClientID), - ?assertEqual(ReqPayload, emqx_utils_json:decode(RespPayload)), - ok = emqtt:disconnect(C1), - InstanceId = instance_id(actions, Name), - ?retry( - 100, - 20, - ?assertMatch( - #{ - counters := #{ - dropped := 0, - success := 1, - matched := 1, - failed := 0, - received := 0 - } - }, - emqx_resource:get_metrics(InstanceId) - ) - ), - ok = delete_action(Name), - ActionsAfterDelete = emqx_bridge_v2:list(actions), - ?assertNot(lists:any(Any, ActionsAfterDelete), ActionsAfterDelete), - ok. - -%%------------------------------------------------------------------------------ -%% Helper fns -%%------------------------------------------------------------------------------ - pulsar_connector(Config) -> PulsarHost = ?config(pulsar_host, Config), PulsarPort = ?config(pulsar_port, Config), @@ -455,3 +391,158 @@ maybe_skip_without_ci() -> _ -> {skip, no_pulsar} end. + +assert_status_api(Line, Type, Name, Status) -> + ?assertMatch( + {ok, + {{_, 200, _}, _, #{ + <<"status">> := Status, + <<"node_status">> := [#{<<"status">> := Status}] + }}}, + emqx_bridge_v2_testlib:get_bridge_api(Type, Name), + #{line => Line, name => Name, expected_status => Status} + ). +-define(assertStatusAPI(TYPE, NAME, STATUS), assert_status_api(?LINE, TYPE, NAME, STATUS)). + +%%------------------------------------------------------------------------------ +%% Testcases +%%------------------------------------------------------------------------------ + +t_action_probe(Config) -> + Name = atom_to_binary(?FUNCTION_NAME), + Action = pulsar_action(Config), + {ok, Res0} = emqx_bridge_v2_testlib:probe_bridge_api(action, ?TYPE, Name, Action), + ?assertMatch({{_, 204, _}, _, _}, Res0), + ok. + +t_action(Config) -> + Name = atom_to_binary(?FUNCTION_NAME), + create_action(Name, Config), + Actions = emqx_bridge_v2:list(actions), + Any = fun(#{name := BName}) -> BName =:= Name end, + ?assert(lists:any(Any, Actions), Actions), + Topic = <<"lkadfdaction">>, + {ok, #{id := RuleId}} = emqx_rule_engine:create_rule( + #{ + sql => <<"select * from \"", Topic/binary, "\"">>, + id => atom_to_binary(?FUNCTION_NAME), + actions => [<<"pulsar:", Name/binary>>], + description => <<"bridge_v2 send msg to pulsar action">> + } + ), + on_exit(fun() -> emqx_rule_engine:delete_rule(RuleId) end), + MQTTClientID = <<"pulsar_mqtt_clientid">>, + {ok, C1} = emqtt:start_link([{clean_start, true}, {clientid, MQTTClientID}]), + {ok, _} = emqtt:connect(C1), + ReqPayload = payload(), + ReqPayloadBin = emqx_utils_json:encode(ReqPayload), + {ok, _} = emqtt:publish(C1, Topic, #{}, ReqPayloadBin, [{qos, 1}, {retain, false}]), + [#{<<"clientid">> := ClientID, <<"payload">> := RespPayload}] = receive_consumed(5000), + ?assertEqual(MQTTClientID, ClientID), + ?assertEqual(ReqPayload, emqx_utils_json:decode(RespPayload)), + ok = emqtt:disconnect(C1), + InstanceId = instance_id(actions, Name), + ?retry( + 100, + 20, + ?assertMatch( + #{ + counters := #{ + dropped := 0, + success := 1, + matched := 1, + failed := 0, + received := 0 + } + }, + emqx_resource:get_metrics(InstanceId) + ) + ), + ok = delete_action(Name), + ActionsAfterDelete = emqx_bridge_v2:list(actions), + ?assertNot(lists:any(Any, ActionsAfterDelete), ActionsAfterDelete), + ok. + +%% Tests that deleting/disabling an action that share the same Pulsar topic with other +%% actions do not disturb the latter. +t_multiple_actions_sharing_topic(Config) -> + Type = ?TYPE, + ConnectorName = <<"c">>, + ConnectorConfig = pulsar_connector(Config), + ActionConfig = pulsar_action(Config), + ?check_trace( + begin + ConnectorParams = [ + {connector_config, ConnectorConfig}, + {connector_name, ConnectorName}, + {connector_type, Type} + ], + ActionName1 = <<"a1">>, + ActionParams1 = [ + {action_config, ActionConfig}, + {action_name, ActionName1}, + {action_type, Type} + ], + ActionName2 = <<"a2">>, + ActionParams2 = [ + {action_config, ActionConfig}, + {action_name, ActionName2}, + {action_type, Type} + ], + {ok, {{_, 201, _}, _, #{}}} = + emqx_bridge_v2_testlib:create_connector_api(ConnectorParams), + {ok, {{_, 201, _}, _, #{}}} = + emqx_bridge_v2_testlib:create_action_api(ActionParams1), + {ok, {{_, 201, _}, _, #{}}} = + emqx_bridge_v2_testlib:create_action_api(ActionParams2), + + ?assertStatusAPI(Type, ActionName1, <<"connected">>), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + + RuleTopic = <<"t/a2">>, + {ok, _} = emqx_bridge_v2_testlib:create_rule_and_action_http(Type, RuleTopic, [ + {bridge_name, ActionName2} + ]), + {ok, C} = emqtt:start_link([]), + {ok, _} = emqtt:connect(C), + SendMessage = fun() -> + ReqPayload = payload(), + ReqPayloadBin = emqx_utils_json:encode(ReqPayload), + {ok, _} = emqtt:publish(C, RuleTopic, #{}, ReqPayloadBin, [ + {qos, 1}, {retain, false} + ]), + ok + end, + + %% Disabling a1 shouldn't disturb a2. + ?assertMatch( + {204, _}, emqx_bridge_v2_testlib:disable_kind_api(action, Type, ActionName1) + ), + + ?assertStatusAPI(Type, ActionName1, <<"disconnected">>), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + + ?assertMatch(ok, SendMessage()), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + + ?assertMatch( + {204, _}, + emqx_bridge_v2_testlib:enable_kind_api(action, Type, ActionName1) + ), + ?assertStatusAPI(Type, ActionName1, <<"connected">>), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + ?assertMatch(ok, SendMessage()), + + %% Deleting also shouldn't disrupt a2. + ?assertMatch( + {204, _}, + emqx_bridge_v2_testlib:delete_kind_api(action, Type, ActionName1) + ), + ?assertStatusAPI(Type, ActionName2, <<"connected">>), + ?assertMatch(ok, SendMessage()), + + ok + end, + [] + ), + ok. diff --git a/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq.app.src b/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq.app.src index fd6d82115..27a4fedc4 100644 --- a/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq.app.src +++ b/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_rabbitmq, [ {description, "EMQX Enterprise RabbitMQ Bridge"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {mod, {emqx_bridge_rabbitmq_app, []}}, {applications, [ diff --git a/apps/emqx_bridge_redis/src/emqx_bridge_redis.app.src b/apps/emqx_bridge_redis/src/emqx_bridge_redis.app.src index acd1837ba..2cd037ed5 100644 --- a/apps/emqx_bridge_redis/src/emqx_bridge_redis.app.src +++ b/apps/emqx_bridge_redis/src/emqx_bridge_redis.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_redis, [ {description, "EMQX Enterprise Redis Bridge"}, - {vsn, "0.1.7"}, + {vsn, "0.1.8"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_redis/test/emqx_bridge_v2_redis_SUITE.erl b/apps/emqx_bridge_redis/test/emqx_bridge_v2_redis_SUITE.erl index 7d3003bfa..725d24a88 100644 --- a/apps/emqx_bridge_redis/test/emqx_bridge_v2_redis_SUITE.erl +++ b/apps/emqx_bridge_redis/test/emqx_bridge_v2_redis_SUITE.erl @@ -19,6 +19,7 @@ -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). -define(BRIDGE_TYPE, redis). -define(BRIDGE_TYPE_BIN, <<"redis">>). @@ -46,6 +47,7 @@ matrix_testcases() -> t_start_stop, t_create_via_http, t_on_get_status, + t_on_get_status_no_username_pass, t_sync_query, t_map_to_redis_hset_args ]. @@ -325,6 +327,43 @@ t_on_get_status(Config) when is_list(Config) -> emqx_bridge_v2_testlib:t_on_get_status(Config, #{failure_status => connecting}), ok. +t_on_get_status_no_username_pass(matrix) -> + {on_get_status, [ + [single, tcp], + [cluster, tcp], + [sentinel, tcp] + ]}; +t_on_get_status_no_username_pass(Config0) when is_list(Config0) -> + ConnectorConfig0 = ?config(connector_config, Config0), + ConnectorConfig1 = emqx_utils_maps:deep_put( + [<<"parameters">>, <<"password">>], ConnectorConfig0, <<"">> + ), + ConnectorConfig2 = emqx_utils_maps:deep_put( + [<<"parameters">>, <<"username">>], ConnectorConfig1, <<"">> + ), + Config1 = proplists:delete(connector_config, Config0), + Config2 = [{connector_config, ConnectorConfig2} | Config1], + ?check_trace( + emqx_bridge_v2_testlib:t_on_get_status( + Config2, + #{ + failure_status => disconnected, + normal_status => disconnected + } + ), + fun(ok, Trace) -> + case ?config(redis_type, Config2) of + single -> + ?assertMatch([_ | _], ?of_kind(emqx_redis_auth_required_error, Trace)); + sentinel -> + ?assertMatch([_ | _], ?of_kind(emqx_redis_auth_required_error, Trace)); + cluster -> + ok + end + end + ), + ok. + t_sync_query(matrix) -> {sync_query, [ [single, tcp], diff --git a/apps/emqx_bridge_rocketmq/src/emqx_bridge_rocketmq.app.src b/apps/emqx_bridge_rocketmq/src/emqx_bridge_rocketmq.app.src index 4b52f8dfc..fc59aeeca 100644 --- a/apps/emqx_bridge_rocketmq/src/emqx_bridge_rocketmq.app.src +++ b/apps/emqx_bridge_rocketmq/src/emqx_bridge_rocketmq.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_rocketmq, [ {description, "EMQX Enterprise RocketMQ Bridge"}, - {vsn, "0.2.1"}, + {vsn, "0.2.2"}, {registered, []}, {applications, [kernel, stdlib, emqx_resource, rocketmq]}, {env, [ diff --git a/apps/emqx_bridge_s3/src/emqx_bridge_s3.app.src b/apps/emqx_bridge_s3/src/emqx_bridge_s3.app.src index eea4ff89e..da9cd1a96 100644 --- a/apps/emqx_bridge_s3/src/emqx_bridge_s3.app.src +++ b/apps/emqx_bridge_s3/src/emqx_bridge_s3.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_s3, [ {description, "EMQX Enterprise S3 Bridge"}, - {vsn, "0.1.2"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_s3/src/emqx_bridge_s3_connector.erl b/apps/emqx_bridge_s3/src/emqx_bridge_s3_connector.erl index 768495382..e90016927 100644 --- a/apps/emqx_bridge_s3/src/emqx_bridge_s3_connector.erl +++ b/apps/emqx_bridge_s3/src/emqx_bridge_s3_connector.erl @@ -146,29 +146,22 @@ on_stop(InstId, _State = #{pool_name := PoolName}) -> on_get_status(_InstId, State = #{client_config := Config}) -> case emqx_s3_client:aws_config(Config) of {error, Reason} -> - {?status_disconnected, State, Reason}; + {?status_disconnected, State, map_error_details(Reason)}; AWSConfig -> try erlcloud_s3:list_buckets(AWSConfig) of Props when is_list(Props) -> ?status_connected catch - error:{aws_error, {http_error, _Code, _, Reason}} -> - {?status_disconnected, State, Reason}; - error:{aws_error, {socket_error, Reason}} -> - {?status_disconnected, State, Reason} + error:Error -> + {?status_disconnected, State, map_error_details(Error)} end end. -spec on_add_channel(_InstanceId :: resource_id(), state(), channel_id(), channel_config()) -> {ok, state()} | {error, _Reason}. on_add_channel(_InstId, State = #{channels := Channels}, ChannelId, Config) -> - try - ChannelState = start_channel(State, Config), - {ok, State#{channels => Channels#{ChannelId => ChannelState}}} - catch - throw:Reason -> - {error, Reason} - end. + ChannelState = start_channel(State, Config), + {ok, State#{channels => Channels#{ChannelId => ChannelState}}}. -spec on_remove_channel(_InstanceId :: resource_id(), state(), channel_id()) -> {ok, state()}. @@ -217,7 +210,8 @@ start_channel(State, #{ max_records := MaxRecords }, container := Container, - bucket := Bucket + bucket := Bucket, + key := Key } }) -> AggregId = {Type, Name}, @@ -226,7 +220,7 @@ start_channel(State, #{ max_records => MaxRecords, work_dir => work_dir(Type, Name) }, - Template = ensure_ok(emqx_bridge_s3_upload:mk_key_template(Parameters)), + Template = emqx_bridge_s3_upload:mk_key_template(Key), DeliveryOpts = #{ bucket => Bucket, key => Template, @@ -253,11 +247,6 @@ start_channel(State, #{ on_stop => fun() -> ?AGGREG_SUP:delete_child(AggregId) end }. -ensure_ok({ok, V}) -> - V; -ensure_ok({error, Reason}) -> - throw(Reason). - upload_options(Parameters) -> #{acl => maps:get(acl, Parameters, undefined)}. @@ -285,7 +274,7 @@ channel_status(#{mode := aggregated, aggreg_id := AggregId, bucket := Bucket}, S check_bucket_accessible(Bucket, #{client_config := Config}) -> case emqx_s3_client:aws_config(Config) of {error, Reason} -> - throw({unhealthy_target, Reason}); + throw({unhealthy_target, map_error_details(Reason)}); AWSConfig -> try erlcloud_s3:list_objects(Bucket, [{max_keys, 1}], AWSConfig) of Props when is_list(Props) -> @@ -293,8 +282,8 @@ check_bucket_accessible(Bucket, #{client_config := Config}) -> catch error:{aws_error, {http_error, 404, _, _Reason}} -> throw({unhealthy_target, "Bucket does not exist"}); - error:{aws_error, {socket_error, Reason}} -> - throw({unhealthy_target, emqx_utils:format(Reason)}) + error:Error -> + throw({unhealthy_target, map_error_details(Error)}) end end. @@ -304,8 +293,7 @@ check_aggreg_upload_errors(AggregId) -> %% TODO %% This approach means that, for example, 3 upload failures will cause %% the channel to be marked as unhealthy for 3 consecutive health checks. - ErrorMessage = emqx_utils:format(Error), - throw({unhealthy_target, ErrorMessage}); + throw({unhealthy_target, map_error_details(Error)}); [] -> ok end. @@ -384,16 +372,38 @@ run_aggregated_upload(InstId, ChannelID, Records, #{aggreg_id := AggregId}) -> ?tp(s3_bridge_aggreg_push_ok, #{instance_id => InstId, name => AggregId}), ok; {error, Reason} -> - {error, {unrecoverable_error, Reason}} + {error, {unrecoverable_error, emqx_utils:explain_posix(Reason)}} end. -map_error({socket_error, _} = Reason) -> - {recoverable_error, Reason}; -map_error(Reason = {aws_error, Status, _, _Body}) when Status >= 500 -> +map_error(Error) -> + {map_error_class(Error), map_error_details(Error)}. + +map_error_class({s3_error, _, _}) -> + unrecoverable_error; +map_error_class({aws_error, Error}) -> + map_error_class(Error); +map_error_class({socket_error, _}) -> + recoverable_error; +map_error_class({http_error, Status, _, _}) when Status >= 500 -> %% https://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html#ErrorCodeList - {recoverable_error, Reason}; -map_error(Reason) -> - {unrecoverable_error, Reason}. + recoverable_error; +map_error_class(_Error) -> + unrecoverable_error. + +map_error_details({s3_error, Code, Message}) -> + emqx_utils:format("S3 error: ~s ~s", [Code, Message]); +map_error_details({aws_error, Error}) -> + map_error_details(Error); +map_error_details({socket_error, Reason}) -> + emqx_utils:format("Socket error: ~s", [emqx_utils:readable_error_msg(Reason)]); +map_error_details({http_error, _, _, _} = Error) -> + emqx_utils:format("AWS error: ~s", [map_aws_error_details(Error)]); +map_error_details({failed_to_obtain_credentials, Error}) -> + emqx_utils:format("Unable to obtain AWS credentials: ~s", [map_error_details(Error)]); +map_error_details({upload_failed, Error}) -> + map_error_details(Error); +map_error_details(Error) -> + Error. render_bucket(Template, Data) -> case emqx_template:render(Template, {emqx_jsonish, Data}) of @@ -416,6 +426,32 @@ render_content(Template, Data) -> iolist_to_string(IOList) -> unicode:characters_to_list(IOList). +%% + +-include_lib("xmerl/include/xmerl.hrl"). + +-spec map_aws_error_details(_AWSError) -> + unicode:chardata(). +map_aws_error_details({http_error, _Status, _, Body}) -> + try xmerl_scan:string(unicode:characters_to_list(Body), [{quiet, true}]) of + {Error = #xmlElement{name = 'Error'}, _} -> + map_aws_error_details(Error); + _ -> + Body + catch + exit:_ -> + Body + end; +map_aws_error_details(#xmlElement{content = Content}) -> + Code = extract_xml_text(lists:keyfind('Code', #xmlElement.name, Content)), + Message = extract_xml_text(lists:keyfind('Message', #xmlElement.name, Content)), + [Code, $:, $\s | Message]. + +extract_xml_text(#xmlElement{content = Content}) -> + [Fragment || #xmlText{value = Fragment} <- Content]; +extract_xml_text(false) -> + []. + %% `emqx_connector_aggreg_delivery` APIs -spec init_transfer_state(buffer(), map()) -> emqx_s3_upload:t(). diff --git a/apps/emqx_bridge_s3/src/emqx_bridge_s3_upload.erl b/apps/emqx_bridge_s3/src/emqx_bridge_s3_upload.erl index 2bf12f24b..bedefc7c5 100644 --- a/apps/emqx_bridge_s3/src/emqx_bridge_s3_upload.erl +++ b/apps/emqx_bridge_s3/src/emqx_bridge_s3_upload.erl @@ -29,7 +29,10 @@ ]). %% Internal exports --export([convert_actions/2]). +-export([ + convert_actions/2, + validate_key_template/1 +]). -define(DEFAULT_AGGREG_BATCH_SIZE, 100). -define(DEFAULT_AGGREG_BATCH_TIME, <<"10ms">>). @@ -137,7 +140,10 @@ fields(s3_aggregated_upload_parameters) -> )} ], emqx_resource_schema:override(emqx_s3_schema:fields(s3_upload), [ - {key, #{desc => ?DESC(s3_aggregated_upload_key)}} + {key, #{ + desc => ?DESC(s3_aggregated_upload_key), + validator => fun ?MODULE:validate_key_template/1 + }} ]), emqx_s3_schema:fields(s3_uploader) ]); @@ -246,23 +252,13 @@ convert_action(Conf = #{<<"parameters">> := Params, <<"resource_opts">> := Resou Conf#{<<"resource_opts">> := NResourceOpts} end. -%% Interpreting options - --spec mk_key_template(_Parameters :: map()) -> - {ok, emqx_template:str()} | {error, _Reason}. -mk_key_template(#{key := Key}) -> - Template = emqx_template:parse(Key), +validate_key_template(Conf) -> + Template = emqx_template:parse(Conf), case validate_bindings(emqx_template:placeholders(Template)) of - UsedBindings when is_list(UsedBindings) -> - SuffixTemplate = mk_suffix_template(UsedBindings), - case emqx_template:is_const(SuffixTemplate) of - true -> - {ok, Template}; - false -> - {ok, Template ++ SuffixTemplate} - end; - Error = {error, _} -> - Error + Bindings when is_list(Bindings) -> + ok; + {error, {disallowed_placeholders, Disallowed}} -> + {error, emqx_utils:format("Template placeholders are disallowed: ~p", [Disallowed])} end. validate_bindings(Bindings) -> @@ -276,7 +272,22 @@ validate_bindings(Bindings) -> [] -> Bindings; Disallowed -> - {error, {invalid_key_template, {disallowed_placeholders, Disallowed}}} + {error, {disallowed_placeholders, Disallowed}} + end. + +%% Interpreting options + +-spec mk_key_template(unicode:chardata()) -> + emqx_template:str(). +mk_key_template(Key) -> + Template = emqx_template:parse(Key), + UsedBindings = emqx_template:placeholders(Template), + SuffixTemplate = mk_suffix_template(UsedBindings), + case emqx_template:is_const(SuffixTemplate) of + true -> + Template; + false -> + Template ++ SuffixTemplate end. mk_suffix_template(UsedBindings) -> diff --git a/apps/emqx_bridge_s3/test/emqx_bridge_s3_SUITE.erl b/apps/emqx_bridge_s3/test/emqx_bridge_s3_SUITE.erl index f8eaa1b3a..ea69a346f 100644 --- a/apps/emqx_bridge_s3/test/emqx_bridge_s3_SUITE.erl +++ b/apps/emqx_bridge_s3/test/emqx_bridge_s3_SUITE.erl @@ -134,6 +134,22 @@ action_config(Name, ConnectorId) -> t_start_stop(Config) -> emqx_bridge_v2_testlib:t_start_stop(Config, s3_bridge_stopped). +t_create_unavailable_credentials(Config) -> + ConnectorName = ?config(connector_name, Config), + ConnectorType = ?config(connector_type, Config), + ConnectorConfig = maps:without( + [<<"access_key_id">>, <<"secret_access_key">>], + ?config(connector_config, Config) + ), + ?assertMatch( + {ok, + {{_HTTP, 201, _}, _, #{ + <<"status_reason">> := + <<"Unable to obtain AWS credentials:", _/bytes>> + }}}, + emqx_bridge_v2_testlib:create_connector_api(ConnectorName, ConnectorType, ConnectorConfig) + ). + t_ignore_batch_opts(Config) -> {ok, {_Status, _, Bridge}} = emqx_bridge_v2_testlib:create_bridge_api(Config), ?assertMatch( @@ -159,6 +175,13 @@ t_start_broken_update_restart(Config) -> _Attempts = 20, ?assertEqual({ok, disconnected}, emqx_resource_manager:health_check(ConnectorId)) ), + ?assertMatch( + {ok, + {{_HTTP, 200, _}, _, #{ + <<"status_reason">> := <<"AWS error: SignatureDoesNotMatch:", _/bytes>> + }}}, + emqx_bridge_v2_testlib:get_connector_api(Type, Name) + ), ?assertMatch( {ok, {{_HTTP, 200, _}, _, _}}, emqx_bridge_v2_testlib:update_connector_api(Name, Type, ConnectorConf) diff --git a/apps/emqx_bridge_s3/test/emqx_bridge_s3_aggreg_upload_SUITE.erl b/apps/emqx_bridge_s3/test/emqx_bridge_s3_aggreg_upload_SUITE.erl index b7c17bbaa..345c2e9aa 100644 --- a/apps/emqx_bridge_s3/test/emqx_bridge_s3_aggreg_upload_SUITE.erl +++ b/apps/emqx_bridge_s3/test/emqx_bridge_s3_aggreg_upload_SUITE.erl @@ -177,6 +177,27 @@ t_create_invalid_config(Config) -> ) ). +t_create_invalid_config_key_template(Config) -> + ?assertMatch( + {error, + {_Status, _, #{ + <<"code">> := <<"BAD_REQUEST">>, + <<"message">> := #{ + <<"kind">> := <<"validation_error">>, + <<"reason">> := <<"Template placeholders are disallowed:", _/bytes>>, + <<"path">> := <<"root.parameters.key">> + } + }}}, + emqx_bridge_v2_testlib:create_bridge_api( + Config, + _Overrides = #{ + <<"parameters">> => #{ + <<"key">> => <<"${action}/${foo}:${bar.rfc3339}">> + } + } + ) + ). + t_update_invalid_config(Config) -> ?assertMatch({ok, _Bridge}, emqx_bridge_v2_testlib:create_bridge(Config)), ?assertMatch( diff --git a/apps/emqx_bridge_sqlserver/src/emqx_bridge_sqlserver.app.src b/apps/emqx_bridge_sqlserver/src/emqx_bridge_sqlserver.app.src index 4b3135f5f..3bc62734c 100644 --- a/apps/emqx_bridge_sqlserver/src/emqx_bridge_sqlserver.app.src +++ b/apps/emqx_bridge_sqlserver/src/emqx_bridge_sqlserver.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_sqlserver, [ {description, "EMQX Enterprise SQL Server Bridge"}, - {vsn, "0.2.1"}, + {vsn, "0.2.2"}, {registered, []}, {applications, [kernel, stdlib, emqx_resource, odbc]}, {env, [ diff --git a/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper.app.src b/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper.app.src index 4f461ab4e..5ae95ca67 100644 --- a/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper.app.src +++ b/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_syskeeper, [ {description, "EMQX Enterprise Data bridge for Syskeeper"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_tdengine/src/emqx_bridge_tdengine.app.src b/apps/emqx_bridge_tdengine/src/emqx_bridge_tdengine.app.src index ab89e8ee6..d358ba8fa 100644 --- a/apps/emqx_bridge_tdengine/src/emqx_bridge_tdengine.app.src +++ b/apps/emqx_bridge_tdengine/src/emqx_bridge_tdengine.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_tdengine, [ {description, "EMQX Enterprise TDEngine Bridge"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_conf/src/emqx_conf.app.src b/apps/emqx_conf/src/emqx_conf.app.src index 1c2fbc77a..dc406b735 100644 --- a/apps/emqx_conf/src/emqx_conf.app.src +++ b/apps/emqx_conf/src/emqx_conf.app.src @@ -1,6 +1,6 @@ {application, emqx_conf, [ {description, "EMQX configuration management"}, - {vsn, "0.2.1"}, + {vsn, "0.2.3"}, {registered, []}, {mod, {emqx_conf_app, []}}, {applications, [kernel, stdlib]}, diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index 9ccb88c7a..9ac4298bb 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -163,8 +163,13 @@ dump_schema(Dir, SchemaModule) -> ), emqx_dashboard:save_dispatch_eterm(SchemaModule). -load(emqx_enterprise_schema, emqx_telemetry) -> ignore; -load(_, Lib) -> ok = application:load(Lib). +load(emqx_enterprise_schema, emqx_telemetry) -> + ignore; +load(_, Lib) -> + case application:load(Lib) of + ok -> ok; + {error, {already_loaded, _}} -> ok + end. %% for scripts/spellcheck. gen_schema_json(Dir, SchemaModule, Lang) -> diff --git a/apps/emqx_conf/test/emqx_cluster_rpc_SUITE.erl b/apps/emqx_conf/test/emqx_cluster_rpc_SUITE.erl index b054988be..cfdc5820e 100644 --- a/apps/emqx_conf/test/emqx_cluster_rpc_SUITE.erl +++ b/apps/emqx_conf/test/emqx_cluster_rpc_SUITE.erl @@ -74,13 +74,14 @@ end_per_testcase(_Config) -> t_base_test(_Config) -> ?assertEqual(emqx_cluster_rpc:status(), {atomic, []}), Pid = self(), - MFA = {M, F, A} = {?MODULE, echo, [Pid, test]}, + Msg = ?FUNCTION_NAME, + MFA = {M, F, A} = {?MODULE, echo, [Pid, Msg]}, {ok, TnxId, ok} = multicall(M, F, A), {atomic, Query} = emqx_cluster_rpc:query(TnxId), ?assertEqual(MFA, maps:get(mfa, Query)), ?assertEqual(node(), maps:get(initiator, Query)), ?assert(maps:is_key(created_at, Query)), - ?assertEqual(ok, receive_msg(3, test)), + ?assertEqual(ok, receive_msg(3, Msg)), ?assertEqual({ok, 2, ok}, multicall(M, F, A)), {atomic, Status} = emqx_cluster_rpc:status(), case length(Status) =:= 3 of @@ -118,9 +119,10 @@ t_commit_ok_but_apply_fail_on_other_node(_Config) -> emqx_cluster_rpc:reset(), {atomic, []} = emqx_cluster_rpc:status(), Pid = self(), - {BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, test]}, + Msg = ?FUNCTION_NAME, + {BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, Msg]}, {ok, _TnxId, ok} = multicall(BaseM, BaseF, BaseA), - ?assertEqual(ok, receive_msg(3, test)), + ?assertEqual(ok, receive_msg(3, Msg)), {M, F, A} = {?MODULE, failed_on_node, [erlang:whereis(?NODE1)]}, {ok, _, ok} = multicall(M, F, A, 1, 1000), @@ -154,9 +156,10 @@ t_commit_ok_but_apply_fail_on_other_node(_Config) -> t_commit_concurrency(_Config) -> {atomic, []} = emqx_cluster_rpc:status(), Pid = self(), - {BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, test]}, - {ok, _TnxId, ok} = multicall(BaseM, BaseF, BaseA), - ?assertEqual(ok, receive_msg(3, test)), + Msg = ?FUNCTION_NAME, + {BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, Msg]}, + ?assertEqual({ok, 1, ok}, multicall(BaseM, BaseF, BaseA)), + ?assertEqual(ok, receive_msg(3, Msg)), %% call concurrently without stale tnx_id error Workers = lists:seq(1, 256), @@ -231,23 +234,24 @@ t_commit_ok_apply_fail_on_other_node_then_recover(_Config) -> {atomic, [_Status | L]} = emqx_cluster_rpc:status(), ?assertEqual([], L), ets:insert(test, {other_mfa_result, ok}), - {ok, 2, ok} = multicall(io, format, ["test"], 1, 1000), + {ok, 2, ok} = multicall(io, format, ["format:~p~n", [?FUNCTION_NAME]], 1, 1000), ct:sleep(1000), {atomic, NewStatus} = emqx_cluster_rpc:status(), ?assertEqual(3, length(NewStatus)), Pid = self(), - MFAEcho = {M1, F1, A1} = {?MODULE, echo, [Pid, test]}, + Msg = ?FUNCTION_NAME, + MFAEcho = {M1, F1, A1} = {?MODULE, echo, [Pid, Msg]}, {ok, TnxId, ok} = multicall(M1, F1, A1), {atomic, Query} = emqx_cluster_rpc:query(TnxId), ?assertEqual(MFAEcho, maps:get(mfa, Query)), ?assertEqual(node(), maps:get(initiator, Query)), ?assert(maps:is_key(created_at, Query)), - ?assertEqual(ok, receive_msg(3, test)), + ?assertEqual(ok, receive_msg(3, Msg)), ok. t_del_stale_mfa(_Config) -> {atomic, []} = emqx_cluster_rpc:status(), - MFA = {M, F, A} = {io, format, ["test"]}, + MFA = {M, F, A} = {io, format, ["format:~p~n", [?FUNCTION_NAME]]}, Keys = lists:seq(1, 50), Keys2 = lists:seq(51, 150), Ids = @@ -288,7 +292,7 @@ t_del_stale_mfa(_Config) -> t_skip_failed_commit(_Config) -> {atomic, []} = emqx_cluster_rpc:status(), - {ok, 1, ok} = multicall(io, format, ["test~n"], all, 1000), + {ok, 1, ok} = multicall(io, format, ["format:~p~n", [?FUNCTION_NAME]], all, 1000), ct:sleep(180), {atomic, List1} = emqx_cluster_rpc:status(), Node = node(), @@ -308,7 +312,7 @@ t_skip_failed_commit(_Config) -> t_fast_forward_commit(_Config) -> {atomic, []} = emqx_cluster_rpc:status(), - {ok, 1, ok} = multicall(io, format, ["test~n"], all, 1000), + {ok, 1, ok} = multicall(io, format, ["format:~p~n", [?FUNCTION_NAME]], all, 1000), ct:sleep(180), {atomic, List1} = emqx_cluster_rpc:status(), Node = node(), @@ -356,7 +360,11 @@ tnx_ids(Status) -> start() -> {ok, _Pid2} = emqx_cluster_rpc:start_link({node(), ?NODE2}, ?NODE2, 500), {ok, _Pid3} = emqx_cluster_rpc:start_link({node(), ?NODE3}, ?NODE3, 500), + ok = emqx_cluster_rpc:wait_for_cluster_rpc(), ok = emqx_cluster_rpc:reset(), + %% Ensure all processes are idle status. + ok = gen_server:call(?NODE2, test), + ok = gen_server:call(?NODE3, test), ok. stop() -> @@ -366,6 +374,7 @@ stop() -> undefined -> ok; P -> + erlang:unregister(N), erlang:unlink(P), erlang:exit(P, kill) end @@ -379,8 +388,9 @@ receive_msg(Count, Msg) when Count > 0 -> receive Msg -> receive_msg(Count - 1, Msg) - after 1000 -> - timeout + after 1300 -> + Msg = iolist_to_binary(io_lib:format("There's still ~w messages to be received", [Count])), + {Msg, flush_msg([])} end. echo(Pid, Msg) -> @@ -425,3 +435,11 @@ multicall(M, F, A, N, T) -> multicall(M, F, A) -> multicall(M, F, A, all, timer:minutes(2)). + +flush_msg(Acc) -> + receive + Msg -> + flush_msg([Msg | Acc]) + after 10 -> + Acc + end. diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src index ec57f2c85..ca973c862 100644 --- a/apps/emqx_connector/src/emqx_connector.app.src +++ b/apps/emqx_connector/src/emqx_connector.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_connector, [ {description, "EMQX Data Integration Connectors"}, - {vsn, "0.3.1"}, + {vsn, "0.3.2"}, {registered, []}, {mod, {emqx_connector_app, []}}, {applications, [ diff --git a/apps/emqx_dashboard/src/emqx_dashboard.app.src b/apps/emqx_dashboard/src/emqx_dashboard.app.src index 427f2958a..a4a133df1 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard.app.src +++ b/apps/emqx_dashboard/src/emqx_dashboard.app.src @@ -2,7 +2,7 @@ {application, emqx_dashboard, [ {description, "EMQX Web Dashboard"}, % strict semver, bump manually! - {vsn, "5.1.1"}, + {vsn, "5.1.2"}, {modules, []}, {registered, [emqx_dashboard_sup]}, {applications, [ diff --git a/apps/emqx_dashboard_sso/rebar.config b/apps/emqx_dashboard_sso/rebar.config index 90172af3d..ae4125add 100644 --- a/apps/emqx_dashboard_sso/rebar.config +++ b/apps/emqx_dashboard_sso/rebar.config @@ -4,5 +4,6 @@ {deps, [ {emqx_ldap, {path, "../../apps/emqx_ldap"}}, {emqx_dashboard, {path, "../../apps/emqx_dashboard"}}, - {esaml, {git, "https://github.com/emqx/esaml", {tag, "v1.1.3"}}} + {esaml, {git, "https://github.com/emqx/esaml", {tag, "v1.1.3"}}}, + {oidcc, {git, "https://github.com/emqx/oidcc.git", {tag, "v3.2.0-1"}}} ]}. diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src index 0ed5d8025..95d49a150 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src @@ -7,7 +7,8 @@ stdlib, emqx_dashboard, emqx_ldap, - esaml + esaml, + oidcc ]}, {mod, {emqx_dashboard_sso_app, []}}, {env, []}, diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.erl index 2750fc528..599a4ad8f 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.erl +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.erl @@ -92,7 +92,8 @@ provider(Backend) -> backends() -> #{ ldap => emqx_dashboard_sso_ldap, - saml => emqx_dashboard_sso_saml + saml => emqx_dashboard_sso_saml, + oidc => emqx_dashboard_sso_oidc }. format(Args) -> diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_api.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_api.erl index 20bc99de1..81abc8f19 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_api.erl +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_api.erl @@ -33,7 +33,7 @@ backend/2 ]). --export([sso_parameters/1, login_meta/3]). +-export([sso_parameters/1, login_meta/4]). -define(REDIRECT, 'REDIRECT'). -define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD'). @@ -168,7 +168,7 @@ login(post, #{bindings := #{backend := Backend}, body := Body} = Request) -> request => emqx_utils:redact(Request) }), Username = maps:get(<<"username">>, Body), - {200, login_meta(Username, Role, Token)}; + {200, login_meta(Username, Role, Token, Backend)}; {redirect, Redirect} -> ?SLOG(info, #{ msg => "dashboard_sso_login_redirect", @@ -286,11 +286,12 @@ to_redacted_json(Data) -> end ). -login_meta(Username, Role, Token) -> +login_meta(Username, Role, Token, Backend) -> #{ username => Username, role => Role, token => Token, version => iolist_to_binary(proplists:get_value(version, emqx_sys:info())), - license => #{edition => emqx_release:edition()} + license => #{edition => emqx_release:edition()}, + backend => Backend }. diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_manager.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_manager.erl index 20289c2b3..6834da9e9 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_manager.erl +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_manager.erl @@ -17,6 +17,7 @@ handle_call/3, handle_cast/2, handle_info/2, + handle_continue/2, terminate/2, code_change/3, format_status/2 @@ -106,7 +107,14 @@ get_backend_status(Backend, _) -> end. update(Backend, Config) -> - update_config(Backend, {?FUNCTION_NAME, Backend, Config}). + UpdateConf = + case emqx:get_raw_config(?MOD_KEY_PATH(Backend), #{}) of + RawConf when is_map(RawConf) -> + emqx_utils:deobfuscate(Config, RawConf); + null -> + Config + end, + update_config(Backend, {?FUNCTION_NAME, Backend, UpdateConf}). delete(Backend) -> update_config(Backend, {?FUNCTION_NAME, Backend}). @@ -154,8 +162,7 @@ init([]) -> {read_concurrency, true} ] ), - start_backend_services(), - {ok, #{}}. + {ok, #{}, {continue, start_backend_services}}. handle_call(_Request, _From, State) -> Reply = ok, @@ -167,6 +174,12 @@ handle_cast(_Request, State) -> handle_info(_Info, State) -> {noreply, State}. +handle_continue(start_backend_services, State) -> + start_backend_services(), + {noreply, State}; +handle_continue(_Info, State) -> + {noreply, State}. + terminate(_Reason, _State) -> remove_handler(), ok. diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc.erl new file mode 100644 index 000000000..dbc0d7f0b --- /dev/null +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc.erl @@ -0,0 +1,294 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_dashboard_sso_oidc). + +-include_lib("emqx_dashboard/include/emqx_dashboard.hrl"). +-include_lib("emqx/include/logger.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +-behaviour(emqx_dashboard_sso). + +-export([ + namespace/0, + fields/1, + desc/1 +]). + +-export([ + hocon_ref/0, + login_ref/0, + login/2, + create/1, + update/2, + destroy/1, + convert_certs/2 +]). + +-define(PROVIDER_SVR_NAME, ?MODULE). +-define(RESPHEADERS, #{ + <<"cache-control">> => <<"no-cache">>, + <<"pragma">> => <<"no-cache">>, + <<"content-type">> => <<"text/plain">> +}). +-define(REDIRECT_BODY, <<"Redirecting...">>). +-define(PKCE_VERIFIER_LEN, 60). + +%%------------------------------------------------------------------------------ +%% Hocon Schema +%%------------------------------------------------------------------------------ + +namespace() -> + "sso". + +hocon_ref() -> + hoconsc:ref(?MODULE, oidc). + +login_ref() -> + hoconsc:ref(?MODULE, login). + +fields(oidc) -> + emqx_dashboard_sso_schema:common_backend_schema([oidc]) ++ + [ + {issuer, + ?HOCON( + binary(), + #{desc => ?DESC(issuer), required => true} + )}, + {clientid, + ?HOCON( + binary(), + #{desc => ?DESC(clientid), required => true} + )}, + {secret, + emqx_schema_secret:mk( + maps:merge(#{desc => ?DESC(secret), required => true}, #{}) + )}, + {scopes, + ?HOCON( + ?ARRAY(binary()), + #{desc => ?DESC(scopes), default => [<<"openid">>]} + )}, + {name_var, + ?HOCON( + binary(), + #{desc => ?DESC(name_var), default => <<"${sub}">>} + )}, + {dashboard_addr, + ?HOCON(binary(), #{ + desc => ?DESC(dashboard_addr), + default => <<"http://127.0.0.1:18083">> + })}, + {session_expiry, + ?HOCON(emqx_schema:timeout_duration_s(), #{ + desc => ?DESC(session_expiry), + default => <<"30s">> + })}, + {require_pkce, + ?HOCON(boolean(), #{ + desc => ?DESC(require_pkce), + default => false + })}, + {preferred_auth_methods, + ?HOCON( + ?ARRAY( + ?ENUM([ + private_key_jwt, + client_secret_jwt, + client_secret_post, + client_secret_basic, + none + ]) + ), + #{ + desc => ?DESC(preferred_auth_methods), + default => [ + client_secret_post, + client_secret_basic, + none + ] + } + )}, + {provider, + ?HOCON(?ENUM([okta, generic]), #{ + mapping => "oidcc.provider", + desc => ?DESC(provider), + default => generic + })}, + {fallback_methods, + ?HOCON(?ARRAY(binary()), #{ + mapping => "oidcc.fallback_methods", + desc => ?DESC(fallback_methods), + default => [<<"RS256">>] + })}, + {client_jwks, + %% TODO: add url JWKS + ?HOCON(?UNION([none, ?R_REF(client_file_jwks)]), #{ + desc => ?DESC(client_jwks), + default => none + })} + ]; +fields(client_file_jwks) -> + [ + {type, + ?HOCON(?ENUM([file]), #{ + desc => ?DESC(client_file_jwks_type), + required => true + })}, + {file, + ?HOCON(binary(), #{ + desc => ?DESC(client_file_jwks_file), + required => true + })} + ]; +fields(login) -> + [ + emqx_dashboard_sso_schema:backend_schema([oidc]) + ]. + +desc(oidc) -> + "OIDC"; +desc(client_file_jwks) -> + ?DESC(client_file_jwks); +desc(_) -> + undefined. + +%%------------------------------------------------------------------------------ +%% APIs +%%------------------------------------------------------------------------------ + +create(#{name_var := NameVar} = Config) -> + case + emqx_dashboard_sso_oidc_session:start( + ?PROVIDER_SVR_NAME, + Config + ) + of + {error, _} = Error -> + Error; + _ -> + %% Note: the oidcc maintains an ETS with the same name of the provider gen_server, + %% we should use this name in each API calls not the PID, + %% or it would backoff to sync calls to the gen_server + ClientJwks = init_client_jwks(Config), + {ok, #{ + name => ?PROVIDER_SVR_NAME, + config => Config, + client_jwks => ClientJwks, + name_tokens => emqx_placeholder:preproc_tmpl(NameVar) + }} + end. + +update(Config, State) -> + destroy(State), + create(Config). + +destroy(State) -> + emqx_dashboard_sso_oidc_session:stop(), + try_delete_jwks_file(State). + +-dialyzer({nowarn_function, login/2}). +login( + _Req, + #{ + client_jwks := ClientJwks, + config := #{ + clientid := ClientId, + secret := Secret, + scopes := Scopes, + require_pkce := RequirePKCE, + preferred_auth_methods := AuthMethods + } + } = Cfg +) -> + Nonce = emqx_dashboard_sso_oidc_session:random_bin(), + Opts = maybe_require_pkce(RequirePKCE, #{ + scopes => Scopes, + nonce => Nonce, + redirect_uri => emqx_dashboard_sso_oidc_api:make_callback_url(Cfg) + }), + + Data = maps:with([nonce, require_pkce, pkce_verifier], Opts), + State = emqx_dashboard_sso_oidc_session:new(Data), + + case + oidcc:create_redirect_url( + ?PROVIDER_SVR_NAME, + ClientId, + emqx_secret:unwrap(Secret), + Opts#{ + state => State, + client_jwks => ClientJwks, + preferred_auth_methods => AuthMethods + } + ) + of + {ok, [Base, Delimiter, Params]} -> + RedirectUri = <>, + Redirect = {302, ?RESPHEADERS#{<<"location">> => RedirectUri}, ?REDIRECT_BODY}, + {redirect, Redirect}; + {error, _Reason} = Error -> + Error + end. + +convert_certs( + Dir, + #{ + <<"client_jwks">> := #{ + <<"type">> := file, + <<"file">> := Content + } = Jwks + } = Conf +) -> + case save_jwks_file(Dir, Content) of + {ok, Path} -> + Conf#{<<"client_jwks">> := Jwks#{<<"file">> := Path}}; + {error, Reason} -> + ?SLOG(error, #{msg => "failed_to_save_client_jwks", reason => Reason}), + throw("Failed to save client jwks") + end; +convert_certs(_Dir, Conf) -> + Conf. + +%%------------------------------------------------------------------------------ +%% Internal functions +%%------------------------------------------------------------------------------ + +save_jwks_file(Dir, Content) -> + Path = filename:join([emqx_tls_lib:pem_dir(Dir), "client_jwks"]), + case filelib:ensure_dir(Path) of + ok -> + case file:write_file(Path, Content) of + ok -> + {ok, Path}; + {error, Reason} -> + {error, #{failed_to_write_file => Reason, file_path => Path}} + end; + {error, Reason} -> + {error, #{failed_to_create_dir_for => Path, reason => Reason}} + end. + +try_delete_jwks_file(#{config := #{client_jwks := #{type := file, file := File}}}) -> + _ = file:delete(File), + ok; +try_delete_jwks_file(_) -> + ok. + +maybe_require_pkce(false, Opts) -> + Opts; +maybe_require_pkce(true, Opts) -> + Opts#{ + require_pkce => true, + pkce_verifier => emqx_dashboard_sso_oidc_session:random_bin(?PKCE_VERIFIER_LEN) + }. + +init_client_jwks(#{client_jwks := #{type := file, file := File}}) -> + case jose_jwk:from_file(File) of + {error, _} -> + none; + Jwks -> + Jwks + end; +init_client_jwks(_) -> + none. diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc_api.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc_api.erl new file mode 100644 index 000000000..3514b4fbb --- /dev/null +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_oidc_api.erl @@ -0,0 +1,214 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_dashboard_sso_oidc_api). + +-behaviour(minirest_api). + +-include_lib("hocon/include/hoconsc.hrl"). +-include_lib("emqx/include/logger.hrl"). +-include_lib("emqx_dashboard/include/emqx_dashboard.hrl"). + +-import(hoconsc, [ + mk/2, + array/1, + enum/1, + ref/1 +]). + +-import(emqx_dashboard_sso_api, [login_meta/3]). + +-export([ + api_spec/0, + paths/0, + schema/1, + namespace/0 +]). + +-export([code_callback/2, make_callback_url/1]). + +-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD'). +-define(BACKEND_NOT_FOUND, 'BACKEND_NOT_FOUND'). + +-define(RESPHEADERS, #{ + <<"cache-control">> => <<"no-cache">>, + <<"pragma">> => <<"no-cache">>, + <<"content-type">> => <<"text/plain">> +}). +-define(REDIRECT_BODY, <<"Redirecting...">>). + +-define(TAGS, <<"Dashboard Single Sign-On">>). +-define(BACKEND, oidc). +-define(BASE_PATH, "/api/v5"). +-define(CALLBACK_PATH, "/sso/oidc/callback"). + +namespace() -> "dashboard_sso". + +api_spec() -> + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => false, translate_body => false}). + +paths() -> + [ + ?CALLBACK_PATH + ]. + +%% Handles Authorization Code callback from the OP. +schema("/sso/oidc/callback") -> + #{ + 'operationId' => code_callback, + get => #{ + tags => [?TAGS], + desc => ?DESC(code_callback), + responses => #{ + 200 => emqx_dashboard_api:fields([token, version, license]), + 401 => response_schema(401), + 404 => response_schema(404) + }, + security => [] + } + }. + +%%-------------------------------------------------------------------- +%% API +%%-------------------------------------------------------------------- +code_callback(get, #{query_string := QS}) -> + case ensure_sso_state(QS) of + {ok, Target} -> + ?SLOG(info, #{ + msg => "dashboard_sso_login_successful" + }), + + {302, ?RESPHEADERS#{<<"location">> => Target}, ?REDIRECT_BODY}; + {error, invalid_backend} -> + {404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}}; + {error, Reason} -> + ?SLOG(info, #{ + msg => "dashboard_sso_login_failed", + reason => emqx_utils:redact(Reason) + }), + {401, #{code => ?BAD_USERNAME_OR_PWD, message => reason_to_message(Reason)}} + end. + +%%-------------------------------------------------------------------- +%% internal +%%-------------------------------------------------------------------- + +response_schema(401) -> + emqx_dashboard_swagger:error_codes([?BAD_USERNAME_OR_PWD], ?DESC(login_failed401)); +response_schema(404) -> + emqx_dashboard_swagger:error_codes([?BACKEND_NOT_FOUND], ?DESC(backend_not_found)). + +reason_to_message(Bin) when is_binary(Bin) -> + Bin; +reason_to_message(Term) -> + erlang:iolist_to_binary(io_lib:format("~p", [Term])). + +ensure_sso_state(QS) -> + case emqx_dashboard_sso_manager:lookup_state(?BACKEND) of + undefined -> + {error, invalid_backend}; + Cfg -> + ensure_oidc_state(QS, Cfg) + end. + +ensure_oidc_state(#{<<"state">> := State} = QS, Cfg) -> + case emqx_dashboard_sso_oidc_session:lookup(State) of + {ok, Data} -> + emqx_dashboard_sso_oidc_session:delete(State), + retrieve_token(QS, Cfg, Data); + _ -> + {error, session_not_exists} + end. + +retrieve_token( + #{<<"code">> := Code}, + #{ + name := Name, + client_jwks := ClientJwks, + config := #{ + clientid := ClientId, + secret := Secret, + preferred_auth_methods := AuthMethods + } + } = Cfg, + Data +) -> + case + oidcc:retrieve_token( + Code, + Name, + ClientId, + emqx_secret:unwrap(Secret), + Data#{ + redirect_uri => make_callback_url(Cfg), + client_jwks => ClientJwks, + preferred_auth_methods => AuthMethods + } + ) + of + {ok, Token} -> + retrieve_userinfo(Token, Cfg); + {error, _Reason} = Error -> + Error + end. + +retrieve_userinfo( + Token, + #{ + name := Name, + client_jwks := ClientJwks, + config := #{clientid := ClientId, secret := Secret}, + name_tokens := NameTks + } = Cfg +) -> + case + oidcc:retrieve_userinfo( + Token, + Name, + ClientId, + emqx_secret:unwrap(Secret), + #{client_jwks => ClientJwks} + ) + of + {ok, UserInfo} -> + ?SLOG(debug, #{ + msg => "sso_oidc_login_user_info", + user_info => UserInfo + }), + Username = emqx_placeholder:proc_tmpl(NameTks, UserInfo), + ensure_user_exists(Cfg, Username); + {error, _Reason} = Error -> + Error + end. + +-dialyzer({nowarn_function, ensure_user_exists/2}). +ensure_user_exists(_Cfg, <<>>) -> + {error, <<"Username can not be empty">>}; +ensure_user_exists(_Cfg, <<"undefined">>) -> + {error, <<"Username can not be undefined">>}; +ensure_user_exists(Cfg, Username) -> + case emqx_dashboard_admin:lookup_user(?BACKEND, Username) of + [User] -> + case emqx_dashboard_token:sign(User, <<>>) of + {ok, Role, Token} -> + {ok, login_redirect_target(Cfg, Username, Role, Token)}; + Error -> + Error + end; + [] -> + case emqx_dashboard_admin:add_sso_user(?BACKEND, Username, ?ROLE_VIEWER, <<>>) of + {ok, _} -> + ensure_user_exists(Cfg, Username); + Error -> + Error + end + end. + +make_callback_url(#{config := #{dashboard_addr := Addr}}) -> + list_to_binary(binary_to_list(Addr) ++ ?BASE_PATH ++ ?CALLBACK_PATH). + +login_redirect_target(#{config := #{dashboard_addr := Addr}}, Username, Role, Token) -> + LoginMeta = emqx_dashboard_sso_api:login_meta(Username, Role, Token, oidc), + MetaBin = base64:encode(emqx_utils_json:encode(LoginMeta)), + < + gen_server:start_link({local, ?MODULE}, ?MODULE, Cfg, []). + +start(Name, #{issuer := Issuer, session_expiry := SessionExpiry0}) -> + case + emqx_dashboard_sso_sup:start_child( + oidcc_provider_configuration_worker, + [ + #{ + issuer => Issuer, + name => {local, Name} + } + ] + ) + of + {error, _} = Error -> + Error; + _ -> + SessionExpiry = timer:seconds(SessionExpiry0), + emqx_dashboard_sso_sup:start_child(?MODULE, [SessionExpiry]) + end. + +stop() -> + _ = emqx_dashboard_sso_sup:stop_child(oidcc_provider_configuration_worker), + _ = emqx_dashboard_sso_sup:stop_child(?MODULE), + ok. + +new(Data) -> + State = new_state(), + ets:insert( + ?TAB, + #?TAB{ + state = State, + created_at = ?NOW, + data = Data + } + ), + State. + +delete(State) -> + ets:delete(?TAB, State). + +lookup(State) -> + case ets:lookup(?TAB, State) of + [#?TAB{data = Data}] -> + {ok, Data}; + _ -> + undefined + end. + +random_bin() -> + random_bin(?DEFAULT_RANDOM_LEN). + +random_bin(Len) -> + emqx_utils_conv:bin(emqx_utils:gen_id(Len)). + +%%------------------------------------------------------------------------------ +%% gen_server callbacks +%%------------------------------------------------------------------------------ +init(SessionExpiry) -> + process_flag(trap_exit, true), + emqx_utils_ets:new( + ?TAB, + [ + ordered_set, + public, + named_table, + {keypos, #?TAB.state}, + {read_concurrency, true} + ] + ), + State = #{session_expiry => SessionExpiry}, + tick_session_expiry(State), + {ok, State}. + +handle_call(_Request, _From, State) -> + Reply = ok, + {reply, Reply, State}. + +handle_cast(_Request, State) -> + {noreply, State}. + +handle_info(tick_session_expiry, #{session_expiry := SessionExpiry} = State) -> + Now = ?NOW, + Spec = ets:fun2ms(fun(#?TAB{created_at = CreatedAt}) -> + Now - CreatedAt >= SessionExpiry + end), + _ = ets:select_delete(?TAB, Spec), + tick_session_expiry(State), + {noreply, State}; +handle_info(_Info, State) -> + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. + +format_status(_Opt, Status) -> + Status. + +%%------------------------------------------------------------------------------ +%% Internal functions +%%------------------------------------------------------------------------------ +new_state() -> + State = random_bin(), + case ets:lookup(?TAB, State) of + [] -> + State; + _ -> + new_state() + end. + +tick_session_expiry(#{session_expiry := SessionExpiry}) -> + erlang:send_after(SessionExpiry, self(), tick_session_expiry). diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_saml.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_saml.erl index eaa550d64..9efd2effd 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_saml.erl +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_saml.erl @@ -273,7 +273,7 @@ is_msie(Headers) -> not (binary:match(UA, <<"MSIE">>) =:= nomatch). login_redirect_target(DashboardAddr, Username, Role, Token) -> - LoginMeta = emqx_dashboard_sso_api:login_meta(Username, Role, Token), + LoginMeta = emqx_dashboard_sso_api:login_meta(Username, Role, Token, saml), < Apps = emqx_cth_suite:start( [ {emqx_conf, ?CONF_DEFAULT}, - emqx_gateway, - emqx_auth, - emqx_auth_redis, - emqx_auth_mnesia + emqx_gateway ], #{work_dir => emqx_cth_suite:work_dir(Config)} ), @@ -50,25 +47,6 @@ end_per_suite(Config) -> emqx_config:delete_override_conf_files(), ok. -init_per_testcase(t_get_basic_usage_info_2, Config) -> - DataDir = ?config(data_dir, Config), - application:stop(emqx_gateway), - ok = setup_fake_usage_data(DataDir), - Config; -init_per_testcase(_TestCase, Config) -> - Config. - -end_per_testcase(t_get_basic_usage_info_2, _Config) -> - emqx_gateway_cm:unregister_channel(lwm2m, <<"client_id">>), - emqx_config:put([gateway], #{}), - emqx_common_test_helpers:stop_apps([emqx_gateway]), - emqx_config:erase(gateway), - emqx_common_test_helpers:load_config(emqx_gateway_schema, ?CONF_DEFAULT), - emqx_common_test_helpers:start_apps([emqx_gateway]), - ok; -end_per_testcase(_TestCase, _Config) -> - ok. - %%-------------------------------------------------------------------- %% cases %%-------------------------------------------------------------------- @@ -160,27 +138,8 @@ t_get_basic_usage_info_1(_Config) -> } }, emqx_gateway:get_basic_usage_info() - ). - -t_get_basic_usage_info_2(_Config) -> - ?assertEqual( - #{ - lwm2m => - #{ - authn => <<"password_based:redis">>, - listeners => - [ - #{ - authn => - <<"password_based:built_in_database">>, - type => udp - } - ], - num_clients => 1 - } - }, - emqx_gateway:get_basic_usage_info() - ). + ), + ok = emqx_gateway:unload(?GWNAME). %%-------------------------------------------------------------------- %% helper functions diff --git a/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl b/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl index 6e2afe105..e74b25a06 100644 --- a/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl @@ -48,8 +48,6 @@ init_per_suite(Conf) -> Apps = emqx_cth_suite:start( [ emqx_conf, - emqx_auth, - emqx_auth_mnesia, emqx_management, {emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}, {emqx_gateway, ?CONF_DEFAULT} @@ -62,7 +60,8 @@ init_per_suite(Conf) -> end_per_suite(Conf) -> _ = emqx_common_test_http:delete_default_app(), - ok = emqx_cth_suite:stop(proplists:get_value(suite_apps, Conf)). + ok = emqx_cth_suite:stop(proplists:get_value(suite_apps, Conf)), + emqx_config:delete_override_conf_files(). init_per_testcase(t_gateway_fail, Config) -> meck:expect( diff --git a/apps/emqx_gateway/test/emqx_gateway_authn_SUITE.erl b/apps/emqx_gateway/test/emqx_gateway_authn_SUITE.erl index 1489f8a55..936856c2e 100644 --- a/apps/emqx_gateway/test/emqx_gateway_authn_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_gateway_authn_SUITE.erl @@ -57,8 +57,6 @@ init_per_group(AuthName, Conf) -> Apps = emqx_cth_suite:start( [ emqx_conf, - emqx_auth, - emqx_auth_http, emqx_management, {emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}, {emqx_gateway, emqx_gateway_auth_ct:list_gateway_conf()} @@ -73,6 +71,7 @@ init_per_group(AuthName, Conf) -> end_per_group(AuthName, Conf) -> ok = emqx_gateway_auth_ct:stop_auth(AuthName), _ = emqx_common_test_http:delete_default_app(), + emqx_config:delete_override_conf_files(), ok = emqx_cth_suite:stop(?config(group_apps, Conf)), Conf. diff --git a/apps/emqx_gateway/test/emqx_gateway_authz_SUITE.erl b/apps/emqx_gateway/test/emqx_gateway_authz_SUITE.erl index 45f716a9e..5b262158f 100644 --- a/apps/emqx_gateway/test/emqx_gateway_authz_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_gateway_authz_SUITE.erl @@ -57,8 +57,6 @@ init_per_group(AuthName, Conf) -> Apps = emqx_cth_suite:start( [ {emqx_conf, "authorization { no_match = deny, cache { enable = false } }"}, - emqx_auth, - emqx_auth_http, {emqx_gateway, emqx_gateway_auth_ct:list_gateway_conf()} | emqx_gateway_test_utils:all_gateway_apps() ], diff --git a/apps/emqx_gateway/test/emqx_gateway_cli_SUITE.erl b/apps/emqx_gateway/test/emqx_gateway_cli_SUITE.erl index 2281262df..5ec956f31 100644 --- a/apps/emqx_gateway/test/emqx_gateway_cli_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_gateway_cli_SUITE.erl @@ -20,6 +20,7 @@ -compile(nowarn_export_all). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). -define(GP(S), begin S, @@ -29,13 +30,6 @@ end end). -%% this parses to #{}, will not cause config cleanup -%% so we will need call emqx_config:erase --define(CONF_DEFAULT, << - "\n" - "gateway {}\n" ->>). - %% The config with json format for mqtt-sn gateway -define(CONF_MQTTSN, "\n" @@ -65,13 +59,18 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Conf) -> emqx_config:erase(gateway), emqx_gateway_test_utils:load_all_gateway_apps(), - emqx_common_test_helpers:load_config(emqx_gateway_schema, ?CONF_DEFAULT), - emqx_mgmt_api_test_util:init_suite([emqx_conf, emqx_auth, emqx_gateway]), - Conf. + Apps = emqx_cth_suite:start( + [ + {emqx_conf, <<"gateway {}">>}, + emqx_gateway + ], + #{work_dir => emqx_cth_suite:work_dir(Conf)} + ), + [{suite_apps, Apps} | Conf]. end_per_suite(Conf) -> - emqx_mgmt_api_test_util:end_suite([emqx_gateway, emqx_auth, emqx_conf]), - Conf. + emqx_cth_suite:stop(?config(suite_apps, Conf)), + emqx_config:delete_override_conf_files(). init_per_testcase(_, Conf) -> Self = self(), diff --git a/apps/emqx_gateway/test/emqx_gateway_conf_SUITE.erl b/apps/emqx_gateway/test/emqx_gateway_conf_SUITE.erl index a600d7915..e084214bf 100644 --- a/apps/emqx_gateway/test/emqx_gateway_conf_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_gateway_conf_SUITE.erl @@ -28,6 +28,7 @@ ). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). %%-------------------------------------------------------------------- %% Setups @@ -38,12 +39,17 @@ all() -> init_per_suite(Conf) -> emqx_gateway_test_utils:load_all_gateway_apps(), - emqx_common_test_helpers:load_config(emqx_gateway_schema, <<"gateway {}">>), - emqx_common_test_helpers:start_apps([emqx_conf, emqx_auth, emqx_auth_mnesia, emqx_gateway]), - Conf. + Apps = emqx_cth_suite:start( + [ + {emqx_conf, <<"gateway {}">>}, + emqx_gateway + ], + #{work_dir => emqx_cth_suite:work_dir(Conf)} + ), + [{suite_apps, Apps} | Conf]. -end_per_suite(_Conf) -> - emqx_common_test_helpers:stop_apps([emqx_gateway, emqx_auth, emqx_auth_mnesia, emqx_conf]), +end_per_suite(Conf) -> + emqx_cth_suite:stop(?config(suite_apps, Conf)), emqx_config:delete_override_conf_files(). init_per_testcase(_CaseName, Conf) -> diff --git a/apps/emqx_gateway_coap/src/emqx_coap_channel.erl b/apps/emqx_gateway_coap/src/emqx_coap_channel.erl index fbab1ff14..844677d12 100644 --- a/apps/emqx_gateway_coap/src/emqx_coap_channel.erl +++ b/apps/emqx_gateway_coap/src/emqx_coap_channel.erl @@ -85,7 +85,7 @@ -define(INFO_KEYS, [conninfo, conn_state, clientinfo, session]). --define(DEF_IDLE_TIME, timer:seconds(30)). +-define(DEF_IDLE_SECONDS, 30). -import(emqx_coap_medium, [reply/2, reply/3, reply/4, iter/3, iter/4]). @@ -149,7 +149,7 @@ init( mountpoint => Mountpoint } ), - Heartbeat = maps:get(heartbeat, Config, ?DEF_IDLE_TIME), + Heartbeat = maps:get(heartbeat, Config, ?DEF_IDLE_SECONDS), #channel{ ctx = Ctx, conninfo = ConnInfo, @@ -378,7 +378,7 @@ ensure_keepalive_timer(Channel) -> ensure_keepalive_timer(fun ensure_timer/4, Channel). ensure_keepalive_timer(Fun, #channel{keepalive = KeepAlive} = Channel) -> - Heartbeat = emqx_keepalive:info(interval, KeepAlive), + Heartbeat = emqx_keepalive:info(check_interval, KeepAlive), Fun(keepalive, Heartbeat, keepalive, Channel). check_auth_state(Msg, #channel{connection_required = false} = Channel) -> @@ -495,7 +495,7 @@ enrich_conninfo( ) -> case Queries of #{<<"clientid">> := ClientId} -> - Interval = maps:get(interval, emqx_keepalive:info(KeepAlive)), + Interval = emqx_keepalive:info(check_interval, KeepAlive), NConnInfo = ConnInfo#{ clientid => ClientId, proto_name => <<"CoAP">>, diff --git a/apps/emqx_gateway_coap/src/emqx_coap_schema.erl b/apps/emqx_gateway_coap/src/emqx_coap_schema.erl index 7d38a2bb6..61d4b7376 100644 --- a/apps/emqx_gateway_coap/src/emqx_coap_schema.erl +++ b/apps/emqx_gateway_coap/src/emqx_coap_schema.erl @@ -19,12 +19,6 @@ -include_lib("hocon/include/hoconsc.hrl"). -include_lib("typerefl/include/types.hrl"). --type duration() :: non_neg_integer(). - --typerefl_from_string({duration/0, emqx_schema, to_duration}). - --reflect_type([duration/0]). - %% config schema provides -export([namespace/0, fields/1, desc/1]). @@ -34,7 +28,7 @@ fields(coap) -> [ {heartbeat, sc( - duration(), + emqx_schema:duration_s(), #{ default => <<"30s">>, desc => ?DESC(coap_heartbeat) diff --git a/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src b/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src index 3a715eac4..e9c1f2b4a 100644 --- a/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src +++ b/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway_coap, [ {description, "CoAP Gateway"}, - {vsn, "0.1.8"}, + {vsn, "0.1.9"}, {registered, []}, {applications, [kernel, stdlib, emqx, emqx_gateway]}, {env, []}, diff --git a/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl b/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl index 3201d5dbf..bd403a463 100644 --- a/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl +++ b/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl @@ -100,7 +100,7 @@ init_per_testcase(t_heartbeat, Config) -> OldConf = emqx:get_raw_config([gateway, coap]), {ok, _} = emqx_gateway_conf:update_gateway( coap, - OldConf#{<<"heartbeat">> => <<"800ms">>} + OldConf#{<<"heartbeat">> => <<"1s">>} ), [ {old_conf, OldConf}, @@ -216,8 +216,9 @@ t_heartbeat(Config) -> [], emqx_gateway_cm_registry:lookup_channels(coap, <<"client1">>) ), - - timer:sleep(Heartbeat * 2), + %% The minimum timeout time is 1 second. + %% 1.5 * Heartbeat + 0.5 * Heartbeat(< 1s) = 1.5 * 1 + 1 = 2.5 + timer:sleep(Heartbeat * 2 + 1000), ?assertEqual( [], emqx_gateway_cm_registry:lookup_channels(coap, <<"client1">>) diff --git a/apps/emqx_gateway_exproto/src/emqx_exproto_channel.erl b/apps/emqx_gateway_exproto/src/emqx_exproto_channel.erl index c145506c9..93646acbf 100644 --- a/apps/emqx_gateway_exproto/src/emqx_exproto_channel.erl +++ b/apps/emqx_gateway_exproto/src/emqx_exproto_channel.erl @@ -715,7 +715,7 @@ ensure_keepalive_timer(Interval, Channel) when Interval =< 0 -> Channel; ensure_keepalive_timer(Interval, Channel) -> StatVal = emqx_gateway_conn:keepalive_stats(recv), - Keepalive = emqx_keepalive:init(StatVal, timer:seconds(Interval)), + Keepalive = emqx_keepalive:init(default, StatVal, Interval), ensure_timer(keepalive, Channel#channel{keepalive = Keepalive}). ensure_timer(Name, Channel = #channel{timers = Timers}) -> @@ -746,7 +746,7 @@ interval(force_close_idle, #channel{conninfo = #{idle_timeout := IdleTimeout}}) interval(force_close, _) -> 15000; interval(keepalive, #channel{keepalive = Keepalive}) -> - emqx_keepalive:info(interval, Keepalive). + emqx_keepalive:info(check_interval, Keepalive). %%-------------------------------------------------------------------- %% Dispatch diff --git a/apps/emqx_gateway_exproto/src/emqx_gateway_exproto.app.src b/apps/emqx_gateway_exproto/src/emqx_gateway_exproto.app.src index 34fcca216..1d5cb85b8 100644 --- a/apps/emqx_gateway_exproto/src/emqx_gateway_exproto.app.src +++ b/apps/emqx_gateway_exproto/src/emqx_gateway_exproto.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway_exproto, [ {description, "ExProto Gateway"}, - {vsn, "0.1.10"}, + {vsn, "0.1.12"}, {registered, []}, {applications, [kernel, stdlib, grpc, emqx, emqx_gateway]}, {env, []}, diff --git a/apps/emqx_gateway_gbt32960/src/emqx_gateway_gbt32960.app.src b/apps/emqx_gateway_gbt32960/src/emqx_gateway_gbt32960.app.src index 123b60203..f96d112e9 100644 --- a/apps/emqx_gateway_gbt32960/src/emqx_gateway_gbt32960.app.src +++ b/apps/emqx_gateway_gbt32960/src/emqx_gateway_gbt32960.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway_gbt32960, [ {description, "GBT32960 Gateway"}, - {vsn, "0.1.2"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [kernel, stdlib, emqx, emqx_gateway]}, {env, []}, diff --git a/apps/emqx_gateway_gbt32960/src/emqx_gbt32960_channel.erl b/apps/emqx_gateway_gbt32960/src/emqx_gbt32960_channel.erl index 9652290d3..809a79f7d 100644 --- a/apps/emqx_gateway_gbt32960/src/emqx_gbt32960_channel.erl +++ b/apps/emqx_gateway_gbt32960/src/emqx_gbt32960_channel.erl @@ -506,7 +506,7 @@ clean_timer(Name, Channel = #channel{timers = Timers}) -> Channel#channel{timers = maps:remove(Name, Timers)}. interval(alive_timer, #channel{keepalive = KeepAlive}) -> - emqx_keepalive:info(interval, KeepAlive); + emqx_keepalive:info(check_interval, KeepAlive); interval(retry_timer, #channel{retx_interval = RetxIntv}) -> RetxIntv. diff --git a/apps/emqx_gateway_jt808/src/emqx_gateway_jt808.app.src b/apps/emqx_gateway_jt808/src/emqx_gateway_jt808.app.src index 8e5157695..8d1e33f74 100644 --- a/apps/emqx_gateway_jt808/src/emqx_gateway_jt808.app.src +++ b/apps/emqx_gateway_jt808/src/emqx_gateway_jt808.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway_jt808, [ {description, "JT/T 808 Gateway"}, - {vsn, "0.0.3"}, + {vsn, "0.1.0"}, {registered, []}, {applications, [kernel, stdlib, emqx, emqx_gateway]}, {env, []}, diff --git a/apps/emqx_gateway_jt808/src/emqx_jt808_channel.erl b/apps/emqx_gateway_jt808/src/emqx_jt808_channel.erl index 876f623e9..a74214a1c 100644 --- a/apps/emqx_gateway_jt808/src/emqx_jt808_channel.erl +++ b/apps/emqx_gateway_jt808/src/emqx_jt808_channel.erl @@ -616,7 +616,7 @@ clean_timer(Name, Channel = #channel{timers = Timers}) -> Channel#channel{timers = maps:remove(Name, Timers)}. interval(alive_timer, #channel{keepalive = KeepAlive}) -> - emqx_keepalive:info(interval, KeepAlive); + emqx_keepalive:info(check_interval, KeepAlive); interval(retry_timer, #channel{retx_interval = RetxIntv}) -> RetxIntv. diff --git a/apps/emqx_gateway_mqttsn/src/emqx_gateway_mqttsn.app.src b/apps/emqx_gateway_mqttsn/src/emqx_gateway_mqttsn.app.src index 45a1d5da7..1dc3f6939 100644 --- a/apps/emqx_gateway_mqttsn/src/emqx_gateway_mqttsn.app.src +++ b/apps/emqx_gateway_mqttsn/src/emqx_gateway_mqttsn.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway_mqttsn, [ {description, "MQTT-SN Gateway"}, - {vsn, "0.2.0"}, + {vsn, "0.2.2"}, {registered, []}, {applications, [kernel, stdlib, emqx, emqx_gateway]}, {env, []}, diff --git a/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_channel.erl b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_channel.erl index 501308ea0..c9e109c3f 100644 --- a/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_channel.erl +++ b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_channel.erl @@ -430,7 +430,7 @@ ensure_keepalive(Channel = #channel{conninfo = ConnInfo}) -> ensure_keepalive_timer(0, Channel) -> Channel; ensure_keepalive_timer(Interval, Channel) -> - Keepalive = emqx_keepalive:init(round(timer:seconds(Interval))), + Keepalive = emqx_keepalive:init(Interval), ensure_timer(keepalive, Channel#channel{keepalive = Keepalive}). %%-------------------------------------------------------------------- @@ -2245,7 +2245,7 @@ clean_timer(Name, Channel = #channel{timers = Timers}) -> Channel#channel{timers = maps:remove(Name, Timers)}. interval(keepalive, #channel{keepalive = KeepAlive}) -> - emqx_keepalive:info(interval, KeepAlive); + emqx_keepalive:info(check_interval, KeepAlive); interval(retry_delivery, #channel{session = Session}) -> emqx_mqttsn_session:info(retry_interval, Session); interval(expire_awaiting_rel, #channel{session = Session}) -> diff --git a/apps/emqx_machine/src/emqx_machine.app.src b/apps/emqx_machine/src/emqx_machine.app.src index cf556a2d1..ab907d657 100644 --- a/apps/emqx_machine/src/emqx_machine.app.src +++ b/apps/emqx_machine/src/emqx_machine.app.src @@ -3,7 +3,7 @@ {id, "emqx_machine"}, {description, "The EMQX Machine"}, % strict semver, bump manually! - {vsn, "0.3.1"}, + {vsn, "0.3.2"}, {modules, []}, {registered, []}, {applications, [kernel, stdlib, emqx_ctl, redbug]}, diff --git a/apps/emqx_management/src/emqx_management.app.src b/apps/emqx_management/src/emqx_management.app.src index 7c99b70cd..c22793cf0 100644 --- a/apps/emqx_management/src/emqx_management.app.src +++ b/apps/emqx_management/src/emqx_management.app.src @@ -2,7 +2,7 @@ {application, emqx_management, [ {description, "EMQX Management API and CLI"}, % strict semver, bump manually! - {vsn, "5.2.1"}, + {vsn, "5.2.3"}, {modules, []}, {registered, [emqx_management_sup]}, {applications, [ diff --git a/apps/emqx_management/src/emqx_mgmt.erl b/apps/emqx_management/src/emqx_mgmt.erl index 95303a1e6..7631426c5 100644 --- a/apps/emqx_management/src/emqx_mgmt.erl +++ b/apps/emqx_management/src/emqx_mgmt.erl @@ -567,7 +567,18 @@ list_subscriptions_via_topic(Node, Topic, _FormatFun = {M, F}) -> %%-------------------------------------------------------------------- subscribe(ClientId, TopicTables) -> - subscribe(emqx:running_nodes(), ClientId, TopicTables). + case emqx_cm_registry:is_enabled() of + false -> + subscribe(emqx:running_nodes(), ClientId, TopicTables); + true -> + with_client_node( + ClientId, + {error, channel_not_found}, + fun(Node) -> + subscribe([Node], ClientId, TopicTables) + end + ) + end. subscribe([Node | Nodes], ClientId, TopicTables) -> case unwrap_rpc(emqx_management_proto_v5:subscribe(Node, ClientId, TopicTables)) of @@ -615,7 +626,18 @@ do_unsubscribe(ClientId, Topic) -> -spec unsubscribe_batch(emqx_types:clientid(), [emqx_types:topic()]) -> {unsubscribe, _} | {error, channel_not_found}. unsubscribe_batch(ClientId, Topics) -> - unsubscribe_batch(emqx:running_nodes(), ClientId, Topics). + case emqx_cm_registry:is_enabled() of + false -> + unsubscribe_batch(emqx:running_nodes(), ClientId, Topics); + true -> + with_client_node( + ClientId, + {error, channel_not_found}, + fun(Node) -> + unsubscribe_batch([Node], ClientId, Topics) + end + ) + end. -spec unsubscribe_batch([node()], emqx_types:clientid(), [emqx_types:topic()]) -> {unsubscribe_batch, _} | {error, channel_not_found}. @@ -691,15 +713,33 @@ delete_banned(Who) -> %%-------------------------------------------------------------------- lookup_running_client(ClientId, FormatFun) -> - lists:append([ - lookup_client(Node, {clientid, ClientId}, FormatFun) - || Node <- emqx:running_nodes() - ]). + case emqx_cm_registry:is_enabled() of + false -> + lists:append([ + lookup_client(Node, {clientid, ClientId}, FormatFun) + || Node <- emqx:running_nodes() + ]); + true -> + with_client_node( + ClientId, + _WhenNotFound = [], + fun(Node) -> lookup_client(Node, {clientid, ClientId}, FormatFun) end + ) + end. %%-------------------------------------------------------------------- %% Internal Functions. %%-------------------------------------------------------------------- +with_client_node(ClientId, WhenNotFound, Fn) -> + case emqx_cm_registry:lookup_channels(ClientId) of + [ChanPid | _] -> + Node = node(ChanPid), + Fn(Node); + [] -> + WhenNotFound + end. + unwrap_rpc({badrpc, Reason}) -> {error, Reason}; unwrap_rpc(Res) -> diff --git a/apps/emqx_management/src/emqx_mgmt_api_clients.erl b/apps/emqx_management/src/emqx_mgmt_api_clients.erl index 547324925..810ccf08f 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_clients.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_clients.erl @@ -1224,10 +1224,20 @@ subscribe(#{clientid := ClientID, topic := Topic} = Sub) -> end. subscribe_batch(#{clientid := ClientID, topics := Topics}) -> - %% We use emqx_channel instead of emqx_channel_info (used by the emqx_mgmt:lookup_client/2), - %% as the emqx_channel_info table will only be populated after the hook `client.connected` - %% has returned. So if one want to subscribe topics in this hook, it will fail. - case ets:lookup(?CHAN_TAB, ClientID) of + %% On the one hand, we first try to use `emqx_channel' instead of `emqx_channel_info' + %% (used by the `emqx_mgmt:lookup_client/2'), as the `emqx_channel_info' table will + %% only be populated after the hook `client.connected' has returned. So if one want to + %% subscribe topics in this hook, it will fail. + %% ... On the other hand, using only `emqx_channel' would render this API unusable if + %% called from a node that doesn't have hold the targeted client connection, so we + %% fall back to `emqx_mgmt:lookup_client/2', which consults the global registry. + Result1 = ets:lookup(?CHAN_TAB, ClientID), + Result = + case Result1 of + [] -> emqx_mgmt:lookup_client({clientid, ClientID}, _FormatFn = undefined); + _ -> Result1 + end, + case Result of [] -> {404, ?CLIENTID_NOT_FOUND}; _ -> diff --git a/apps/emqx_management/src/emqx_mgmt_api_plugins.erl b/apps/emqx_management/src/emqx_mgmt_api_plugins.erl index a60131cfc..6c7f9783c 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_plugins.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_plugins.erl @@ -59,7 +59,6 @@ -define(VSN_WILDCARD, "-*.tar.gz"). -define(CONTENT_PLUGIN, plugin). --define(CONTENT_CONFIG, config). namespace() -> "plugins". @@ -411,7 +410,7 @@ upload_install(post, #{body := #{<<"plugin">> := Plugin}}) when is_map(Plugin) - %% File bin is too large, we use rpc:multicall instead of cluster_rpc:multicall NameVsn = string:trim(FileName, trailing, ".tar.gz"), case emqx_plugins:describe(NameVsn) of - {error, #{error_msg := "bad_info_file", reason := {enoent, _}}} -> + {error, #{msg := "bad_info_file", reason := {enoent, _Path}}} -> case emqx_plugins:parse_name_vsn(FileName) of {ok, AppName, _Vsn} -> AppDir = filename:join(emqx_plugins:install_dir(), AppName), @@ -467,12 +466,12 @@ do_install_package(FileName, Bin) -> ), Reason = case hd(Filtered) of - {error, #{error_msg := Reason0}} -> Reason0; + {error, #{msg := Reason0}} -> Reason0; {error, #{reason := Reason0}} -> Reason0 end, {400, #{ code => 'BAD_PLUGIN_INFO', - message => iolist_to_binary([Reason, ": ", FileName]) + message => iolist_to_binary([bin(Reason), ": ", FileName]) }} end. @@ -565,8 +564,8 @@ install_package(FileName, Bin) -> ok = filelib:ensure_dir(File), ok = file:write_file(File, Bin), PackageName = string:trim(FileName, trailing, ".tar.gz"), - case emqx_plugins:ensure_installed(PackageName) of - {error, #{reason := not_found}} = NotFound -> + case emqx_plugins:ensure_installed(PackageName, ?fresh_install) of + {error, #{reason := plugin_not_found}} = NotFound -> NotFound; {error, Reason} = Error -> ?SLOG(error, Reason#{msg => "failed_to_install_plugin"}), @@ -597,6 +596,9 @@ delete_package(Name) -> end. %% for RPC plugin update +%% TODO: catch thrown error to return 400 +%% - plugin_not_found +%% - otp vsn assertion failed ensure_action(Name, start) -> _ = emqx_plugins:ensure_started(Name), _ = emqx_plugins:ensure_enabled(Name), @@ -625,10 +627,9 @@ do_update_plugin_config(NameVsn, AvroJsonMap, AvroValue) -> return(Code, ok) -> {Code}; -return(_, {error, #{error_msg := "bad_info_file", reason := {enoent, _} = Reason}}) -> - {404, #{code => 'NOT_FOUND', message => readable_error_msg(Reason)}}; -return(_, {error, #{error_msg := "bad_avro_config_file", reason := {enoent, _} = Reason}}) -> - {404, #{code => 'NOT_FOUND', message => readable_error_msg(Reason)}}; +return(_, {error, #{msg := Msg, reason := {enoent, Path} = Reason}}) -> + ?SLOG(error, #{msg => Msg, reason => Reason}), + {404, #{code => 'NOT_FOUND', message => iolist_to_binary([Path, " does not exist"])}}; return(_, {error, Reason}) -> {400, #{code => 'PARAM_ERROR', message => readable_error_msg(Reason)}}. @@ -728,6 +729,10 @@ format_plugin_avsc_and_i18n(_NameVsn) -> #{avsc => null, i18n => null}. -endif. +bin(A) when is_atom(A) -> atom_to_binary(A, utf8); +bin(L) when is_list(L) -> list_to_binary(L); +bin(B) when is_binary(B) -> B. + % running_status: running loaded, stopped %% config_status: not_configured disable enable plugin_status(#{running_status := running}) -> running; diff --git a/apps/emqx_management/test/emqx_mgmt_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_SUITE.erl index 0cde87465..7c34ddc0b 100644 --- a/apps/emqx_management/test/emqx_mgmt_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_SUITE.erl @@ -185,9 +185,14 @@ t_lookup_client(_Config) -> ), ?assertEqual([], emqx_mgmt:lookup_client({clientid, <<"notfound">>}, ?FORMATFUN)), meck:expect(emqx, running_nodes, 0, [node(), 'fake@nonode']), - ?assertMatch( - [_ | {error, nodedown}], emqx_mgmt:lookup_client({clientid, <<"client1">>}, ?FORMATFUN) - ). + try + emqx:update_config([broker, enable_session_registry], false), + ?assertMatch( + [_ | {error, nodedown}], emqx_mgmt:lookup_client({clientid, <<"client1">>}, ?FORMATFUN) + ) + after + emqx:update_config([broker, enable_session_registry], true) + end. t_kickout_client(init, Config) -> process_flag(trap_exit, true), diff --git a/apps/emqx_management/test/emqx_mgmt_api_clients_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_clients_SUITE.erl index be827510e..e565b13b2 100644 --- a/apps/emqx_management/test/emqx_mgmt_api_clients_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_api_clients_SUITE.erl @@ -34,12 +34,16 @@ all() -> [ {group, general}, - {group, persistent_sessions} + {group, persistent_sessions}, + {group, non_persistent_cluster} ]. groups() -> AllTCs = emqx_common_test_helpers:all(?MODULE), - GeneralTCs = AllTCs -- (persistent_session_testcases() ++ client_msgs_testcases()), + GeneralTCs = + AllTCs -- + (persistent_session_testcases() ++ + non_persistent_cluster_testcases() ++ client_msgs_testcases()), [ {general, [ {group, msgs_base64_encoding}, @@ -47,6 +51,7 @@ groups() -> | GeneralTCs ]}, {persistent_sessions, persistent_session_testcases()}, + {non_persistent_cluster, non_persistent_cluster_testcases()}, {msgs_base64_encoding, client_msgs_testcases()}, {msgs_plain_encoding, client_msgs_testcases()} ]. @@ -62,7 +67,10 @@ persistent_session_testcases() -> t_persistent_sessions_subscriptions1, t_list_clients_v2 ]. - +non_persistent_cluster_testcases() -> + [ + t_bulk_subscribe + ]. client_msgs_testcases() -> [ t_inflight_messages, @@ -112,6 +120,27 @@ init_per_group(persistent_sessions, Config) -> {api_auth_header, erpc:call(N1, emqx_mgmt_api_test_util, auth_header_, [])} | Config ]; +init_per_group(non_persistent_cluster, Config) -> + AppSpecs = [ + emqx, + emqx_conf, + emqx_management + ], + Dashboard = emqx_mgmt_api_test_util:emqx_dashboard(), + Cluster = [ + {mgmt_api_clients_SUITE1, #{role => core, apps => AppSpecs ++ [Dashboard]}}, + {mgmt_api_clients_SUITE2, #{role => core, apps => AppSpecs}} + ], + Nodes = + [N1 | _] = emqx_cth_cluster:start( + Cluster, + #{work_dir => emqx_cth_suite:work_dir(Config)} + ), + [ + {nodes, Nodes}, + {api_auth_header, erpc:call(N1, emqx_mgmt_api_test_util, auth_header_, [])} + | Config + ]; init_per_group(msgs_base64_encoding, Config) -> [{payload_encoding, base64} | Config]; init_per_group(msgs_plain_encoding, Config) -> @@ -122,7 +151,10 @@ init_per_group(_Group, Config) -> end_per_group(general, Config) -> Apps = ?config(apps, Config), ok = emqx_cth_suite:stop(Apps); -end_per_group(persistent_sessions, Config) -> +end_per_group(Group, Config) when + Group =:= persistent_sessions; + Group =:= non_persistent_cluster +-> Nodes = ?config(nodes, Config), ok = emqx_cth_cluster:stop(Nodes); end_per_group(_Group, _Config) -> @@ -1071,10 +1103,7 @@ t_keepalive(Config) -> [Pid] = emqx_cm:lookup_channels(list_to_binary(ClientId)), %% will reset to max keepalive if keepalive > max keepalive #{conninfo := #{keepalive := InitKeepalive}} = emqx_connection:info(Pid), - ?assertMatch( - #{interval := 65535000}, - emqx_connection:info({channel, keepalive}, sys:get_state(Pid)) - ), + ?assertMatch({keepalive, _, _, _, 65536500}, element(5, element(9, sys:get_state(Pid)))), ?assertMatch( {ok, {?HTTP200, _, #{<<"keepalive">> := 11}}}, @@ -1518,6 +1547,41 @@ t_subscribe_shared_topic_nl(Config) -> request(post, Path, #{topic => Topic, qos => 1, nl => 1, rh => 1}, Config) ). +%% Checks that we can use the bulk subscribe API on a different node than the one a client +%% is connected to. +t_bulk_subscribe(Config) -> + [N1, N2] = ?config(nodes, Config), + Port1 = get_mqtt_port(N1, tcp), + Port2 = get_mqtt_port(N2, tcp), + ?check_trace( + begin + ClientId1 = <<"bulk-sub1">>, + _C1 = connect_client(#{port => Port2, clientid => ClientId1, clean_start => true}), + ClientId2 = <<"bulk-sub2">>, + C2 = connect_client(#{port => Port1, clientid => ClientId2, clean_start => true}), + Topic = <<"testtopic">>, + BulkSub = [#{topic => Topic, qos => 1, nl => 1, rh => 1}], + ?assertMatch({200, [_]}, bulk_subscribe_request(ClientId1, Config, BulkSub)), + ?assertMatch( + {200, [_]}, + get_subscriptions_request(ClientId1, Config, #{simplify_result => true}) + ), + {ok, _} = emqtt:publish(C2, Topic, <<"hi1">>, [{qos, 1}]), + ?assertReceive({publish, #{topic := Topic, payload := <<"hi1">>}}), + BulkUnsub = [#{topic => Topic}], + ?assertMatch({204, _}, bulk_unsubscribe_request(ClientId1, Config, BulkUnsub)), + ?assertMatch( + {200, []}, + get_subscriptions_request(ClientId1, Config, #{simplify_result => true}) + ), + {ok, _} = emqtt:publish(C2, Topic, <<"hi2">>, [{qos, 1}]), + ?assertNotReceive({publish, _}), + ok + end, + [] + ), + ok. + t_list_clients_v2(Config) -> [N1, N2] = ?config(nodes, Config), Port1 = get_mqtt_port(N1, tcp), @@ -1883,8 +1947,16 @@ maybe_json_decode(X) -> end. get_subscriptions_request(ClientId, Config) -> + get_subscriptions_request(ClientId, Config, _Opts = #{}). + +get_subscriptions_request(ClientId, Config, Opts) -> Path = emqx_mgmt_api_test_util:api_path(["clients", ClientId, "subscriptions"]), - request(get, Path, [], Config). + Res = request(get, Path, [], Config), + Simplify = maps:get(simplify_result, Opts, false), + case Simplify of + true -> simplify_result(Res); + false -> Res + end. get_client_request(ClientId, Config) -> Path = emqx_mgmt_api_test_util:api_path(["clients", ClientId]), @@ -1897,7 +1969,23 @@ list_request(QueryParams, Config) -> Path = emqx_mgmt_api_test_util:api_path(["clients"]), request(get, Path, [], compose_query_string(QueryParams), Config). -list_v2_request(QueryParams, Config) -> +bulk_subscribe_request(ClientId, Config, Body) -> + Path = emqx_mgmt_api_test_util:api_path(["clients", ClientId, "subscribe", "bulk"]), + simplify_result(request(post, Path, Body, Config)). + +bulk_unsubscribe_request(ClientId, Config, Body) -> + Path = emqx_mgmt_api_test_util:api_path(["clients", ClientId, "unsubscribe", "bulk"]), + simplify_result(request(post, Path, Body, Config)). + +simplify_result(Res) -> + case Res of + {error, {{_, Status, _}, _, Body}} -> + {Status, Body}; + {ok, {{_, Status, _}, _, Body}} -> + {Status, Body} + end. + +list_v2_request(QueryParams = #{}, Config) -> Path = emqx_mgmt_api_test_util:api_path(["clients_v2"]), request(get, Path, [], compose_query_string(QueryParams), Config). diff --git a/apps/emqx_opentelemetry/src/emqx_opentelemetry.app.src b/apps/emqx_opentelemetry/src/emqx_opentelemetry.app.src index cb7c7d32a..6a84ae043 100644 --- a/apps/emqx_opentelemetry/src/emqx_opentelemetry.app.src +++ b/apps/emqx_opentelemetry/src/emqx_opentelemetry.app.src @@ -1,6 +1,6 @@ {application, emqx_opentelemetry, [ {description, "OpenTelemetry for EMQX Broker"}, - {vsn, "0.2.5"}, + {vsn, "0.2.6"}, {registered, []}, {mod, {emqx_otel_app, []}}, {applications, [ diff --git a/apps/emqx_oracle/src/emqx_oracle.app.src b/apps/emqx_oracle/src/emqx_oracle.app.src index 09aa4e589..3f238ae9c 100644 --- a/apps/emqx_oracle/src/emqx_oracle.app.src +++ b/apps/emqx_oracle/src/emqx_oracle.app.src @@ -1,6 +1,6 @@ {application, emqx_oracle, [ {description, "EMQX Enterprise Oracle Database Connector"}, - {vsn, "0.2.1"}, + {vsn, "0.2.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_plugins/include/emqx_plugins.hrl b/apps/emqx_plugins/include/emqx_plugins.hrl index 3c7621ca7..2097c04bd 100644 --- a/apps/emqx_plugins/include/emqx_plugins.hrl +++ b/apps/emqx_plugins/include/emqx_plugins.hrl @@ -26,6 +26,8 @@ -define(plugin_conf_not_found, plugin_conf_not_found). -define(plugin_without_config_schema, plugin_without_config_schema). +-define(fresh_install, fresh_install). +-define(normal, normal). -type schema_name() :: binary(). -type avsc_path() :: string(). diff --git a/apps/emqx_plugins/src/emqx_plugins.erl b/apps/emqx_plugins/src/emqx_plugins.erl index de42b22ad..7d685256f 100644 --- a/apps/emqx_plugins/src/emqx_plugins.erl +++ b/apps/emqx_plugins/src/emqx_plugins.erl @@ -39,6 +39,7 @@ -export([ ensure_installed/0, ensure_installed/1, + ensure_installed/2, ensure_uninstalled/1, ensure_enabled/1, ensure_enabled/2, @@ -169,19 +170,27 @@ ensure_installed(NameVsn) -> case read_plugin_info(NameVsn, #{}) of {ok, _} -> ok, - _ = maybe_ensure_plugin_config(NameVsn); + _ = maybe_ensure_plugin_config(NameVsn, ?normal); {error, _} -> ok = purge(NameVsn), case ensure_exists_and_installed(NameVsn) of ok -> - maybe_post_op_after_installed(NameVsn), - _ = maybe_ensure_plugin_config(NameVsn), + maybe_post_op_after_installed(NameVsn, ?normal), ok; {error, _Reason} = Err -> Err end end. +ensure_installed(NameVsn, ?fresh_install = Mode) -> + case ensure_exists_and_installed(NameVsn) of + ok -> + maybe_post_op_after_installed(NameVsn, Mode), + ok; + {error, _Reason} = Err -> + Err + end. + %% @doc Ensure files and directories for the given plugin are being deleted. %% If a plugin is running, or enabled, an error is returned. -spec ensure_uninstalled(name_vsn()) -> ok | {error, any()}. @@ -189,12 +198,12 @@ ensure_uninstalled(NameVsn) -> case read_plugin_info(NameVsn, #{}) of {ok, #{running_status := RunningSt}} when RunningSt =/= stopped -> {error, #{ - error_msg => "bad_plugin_running_status", + msg => "bad_plugin_running_status", hint => "stop_the_plugin_first" }}; {ok, #{config_status := enabled}} -> {error, #{ - error_msg => "bad_plugin_config_status", + msg => "bad_plugin_config_status", hint => "disable_the_plugin_first" }}; _ -> @@ -279,9 +288,9 @@ ensure_started(NameVsn) -> case do_ensure_started(NameVsn) of ok -> ok; - {error, Reason} -> - ?SLOG(alert, Reason#{msg => "failed_to_start_plugin"}), - {error, Reason} + {error, ReasonMap} -> + ?SLOG(error, ReasonMap#{msg => "failed_to_start_plugin"}), + {error, ReasonMap} end. %% @doc Stop all plugins before broker stops. @@ -374,7 +383,7 @@ list() -> {ok, Info} -> {true, Info}; {error, Reason} -> - ?SLOG(warning, Reason), + ?SLOG(warning, Reason#{msg => "failed_to_read_plugin_info"}), false end end, @@ -402,7 +411,10 @@ decode_plugin_config_map(NameVsn, AvroJsonMap) -> do_decode_plugin_config_map(NameVsn, AvroJsonMap) end; false -> - ?SLOG(debug, #{name_vsn => NameVsn, plugin_with_avro_schema => false}), + ?SLOG(debug, #{ + msg => "plugin_without_config_schema", + name_vsn => NameVsn + }), {ok, ?plugin_without_config_schema} end. @@ -537,13 +549,13 @@ do_ensure_installed(NameVsn) -> end; {error, {_, enoent}} -> {error, #{ - error_msg => "failed_to_extract_plugin_package", + msg => "failed_to_extract_plugin_package", path => TarGz, - reason => not_found + reason => plugin_tarball_not_found }}; {error, Reason} -> {error, #{ - error_msg => "bad_plugin_package", + msg => "bad_plugin_package", path => TarGz, reason => Reason }} @@ -600,7 +612,7 @@ add_new_configured(Configured, {Action, NameVsn}, Item) -> {Front, Rear} = lists:splitwith(SplitFun, Configured), Rear =:= [] andalso throw(#{ - error_msg => "position_anchor_plugin_not_configured", + msg => "position_anchor_plugin_not_configured", hint => "maybe_install_and_configure", name_vsn => NameVsn }), @@ -664,12 +676,13 @@ do_ensure_started(NameVsn) -> ok -> Plugin = do_read_plugin(NameVsn), ok = load_code_start_apps(NameVsn, Plugin); - {error, plugin_not_found} -> + {error, #{reason := Reason} = ReasonMap} -> ?SLOG(error, #{ - error_msg => "plugin_not_found", - name_vsn => NameVsn + msg => "failed_to_start_plugin", + name_vsn => NameVsn, + reason => Reason }), - ok + {error, ReasonMap} end end ). @@ -682,10 +695,12 @@ tryit(WhichOp, F) -> try F() catch - throw:ReasonMap -> + throw:ReasonMap when is_map(ReasonMap) -> %% thrown exceptions are known errors %% translate to a return value without stacktrace {error, ReasonMap}; + throw:Reason -> + {error, #{reason => Reason}}; error:Reason:Stacktrace -> %% unexpected errors, log stacktrace ?SLOG(warning, #{ @@ -769,18 +784,18 @@ do_get_from_cluster(NameVsn) -> ok = do_ensure_installed(NameVsn); {error, NodeErrors} when Nodes =/= [] -> ErrMeta = #{ - error_msg => "failed_to_copy_plugin_from_other_nodes", + msg => "failed_to_copy_plugin_from_other_nodes", name_vsn => NameVsn, node_errors => NodeErrors, - reason => not_found + reason => plugin_not_found }, ?SLOG(error, ErrMeta), {error, ErrMeta}; {error, _} -> ErrMeta = #{ - error_msg => "no_nodes_to_copy_plugin_from", + msg => "no_nodes_to_copy_plugin_from", name_vsn => NameVsn, - reason => not_found + reason => plugin_not_found }, ?SLOG(error, ErrMeta), {error, ErrMeta} @@ -791,6 +806,11 @@ get_plugin_tar_from_any_node([], _NameVsn, Errors) -> get_plugin_tar_from_any_node([Node | T], NameVsn, Errors) -> case emqx_plugins_proto_v1:get_tar(Node, NameVsn, infinity) of {ok, _} = Res -> + ?SLOG(debug, #{ + msg => "get_plugin_tar_from_cluster_successfully", + node => Node, + name_vsn => NameVsn + }), Res; Err -> get_plugin_tar_from_any_node(T, NameVsn, [{Node, Err} | Errors]) @@ -805,6 +825,11 @@ get_plugin_config_from_any_node([Node | T], NameVsn, Errors) -> ) of {ok, _} = Res -> + ?SLOG(debug, #{ + msg => "get_plugin_config_from_cluster_successfully", + node => Node, + name_vsn => NameVsn + }), Res; Err -> get_plugin_config_from_any_node(T, NameVsn, [{Node, Err} | Errors]) @@ -870,7 +895,7 @@ check_plugin( catch _:_ -> throw(#{ - error_msg => "bad_rel_apps", + msg => "bad_rel_apps", rel_apps => Apps, hint => "A non-empty string list of app_name-app_vsn format" }) @@ -878,7 +903,7 @@ check_plugin( Info; false -> throw(#{ - error_msg => "name_vsn_mismatch", + msg => "name_vsn_mismatch", name_vsn => NameVsn, path => FilePath, name => Name, @@ -887,7 +912,7 @@ check_plugin( end; check_plugin(_What, NameVsn, File) -> throw(#{ - error_msg => "bad_info_file_content", + msg => "bad_info_file_content", mandatory_fields => [rel_vsn, name, rel_apps, description], name_vsn => NameVsn, path => File @@ -943,7 +968,7 @@ do_load_plugin_app(AppName, Ebin) -> ok; {error, Reason} -> throw(#{ - error_msg => "failed_to_load_plugin_beam", + msg => "failed_to_load_plugin_beam", path => BeamFile, reason => Reason }) @@ -958,7 +983,7 @@ do_load_plugin_app(AppName, Ebin) -> ok; {error, Reason} -> throw(#{ - error_msg => "failed_to_load_plugin_app", + msg => "failed_to_load_plugin_app", name => AppName, reason => Reason }) @@ -975,7 +1000,7 @@ start_app(App) -> ok; {error, {ErrApp, Reason}} -> throw(#{ - error_msg => "failed_to_start_plugin_app", + msg => "failed_to_start_plugin_app", app => App, err_app => ErrApp, reason => Reason @@ -1057,7 +1082,7 @@ stop_app(App) -> ?SLOG(debug, #{msg => "plugin_not_started", app => App}), ok = unload_moudle_and_app(App); {error, Reason} -> - throw(#{error_msg => "failed_to_stop_app", app => App, reason => Reason}) + throw(#{msg => "failed_to_stop_app", app => App, reason => Reason}) end. unload_moudle_and_app(App) -> @@ -1152,13 +1177,13 @@ for_plugins(ActionFun) -> for_plugins_action_error_occurred, ErrMeta ), - ?SLOG(error, ErrMeta), + ?SLOG(error, ErrMeta#{msg => "for_plugins_action_error_occurred"}), ok end. -maybe_post_op_after_installed(NameVsn0) -> +maybe_post_op_after_installed(NameVsn0, Mode) -> NameVsn = wrap_to_list(NameVsn0), - _ = maybe_load_config_schema(NameVsn), + _ = maybe_load_config_schema(NameVsn, Mode), ok = maybe_ensure_state(NameVsn). maybe_ensure_state(NameVsn) -> @@ -1183,13 +1208,13 @@ maybe_ensure_state(NameVsn) -> end, ok. -maybe_load_config_schema(NameVsn) -> +maybe_load_config_schema(NameVsn, Mode) -> AvscPath = avsc_file_path(NameVsn), _ = with_plugin_avsc(NameVsn) andalso filelib:is_regular(AvscPath) andalso do_load_config_schema(NameVsn, AvscPath), - _ = maybe_create_config_dir(NameVsn). + _ = maybe_create_config_dir(NameVsn, Mode). do_load_config_schema(NameVsn, AvscPath) -> case emqx_plugins_serde:add_schema(bin(NameVsn), AvscPath) of @@ -1198,11 +1223,11 @@ do_load_config_schema(NameVsn, AvscPath) -> {error, _Reason} -> ok end. -maybe_create_config_dir(NameVsn) -> +maybe_create_config_dir(NameVsn, Mode) -> with_plugin_avsc(NameVsn) andalso - do_create_config_dir(NameVsn). + do_create_config_dir(NameVsn, Mode). -do_create_config_dir(NameVsn) -> +do_create_config_dir(NameVsn, Mode) -> case plugin_config_dir(NameVsn) of {error, Reason} -> {error, {gen_config_dir_failed, Reason}}; @@ -1210,7 +1235,7 @@ do_create_config_dir(NameVsn) -> case filelib:ensure_path(ConfigDir) of ok -> %% get config from other nodes or get from tarball - _ = maybe_ensure_plugin_config(NameVsn), + _ = maybe_ensure_plugin_config(NameVsn, Mode), ok; {error, Reason} -> ?SLOG(warning, #{ @@ -1222,20 +1247,25 @@ do_create_config_dir(NameVsn) -> end end. --spec maybe_ensure_plugin_config(name_vsn()) -> ok. -maybe_ensure_plugin_config(NameVsn) -> +-spec maybe_ensure_plugin_config(name_vsn(), ?fresh_install | ?normal) -> ok. +maybe_ensure_plugin_config(NameVsn, Mode) -> maybe true ?= with_plugin_avsc(NameVsn), - _ = ensure_plugin_config(NameVsn) + _ = ensure_plugin_config({NameVsn, Mode}) else _ -> ok end. --spec ensure_plugin_config(name_vsn()) -> ok. -ensure_plugin_config(NameVsn) -> - %% fetch plugin hocon config from cluster - Nodes = [N || N <- mria:running_nodes(), N /= node()], - ensure_plugin_config(NameVsn, Nodes). +-spec ensure_plugin_config({name_vsn(), ?fresh_install | ?normal}) -> ok. +ensure_plugin_config({NameVsn, ?normal}) -> + ensure_plugin_config(NameVsn, [N || N <- mria:running_nodes(), N /= node()]); +ensure_plugin_config({NameVsn, ?fresh_install}) -> + ?SLOG(debug, #{ + msg => "default_plugin_config_used", + name_vsn => NameVsn, + hint => "fresh_install" + }), + cp_default_config_file(NameVsn). -spec ensure_plugin_config(name_vsn(), list()) -> ok. ensure_plugin_config(NameVsn, []) -> @@ -1255,8 +1285,6 @@ ensure_plugin_config(NameVsn, Nodes) -> ensure_config_map(NameVsn); _ -> ?SLOG(error, #{msg => "config_not_found_from_cluster", name_vsn => NameVsn}), - %% otherwise cp default hocon file - %% i.e. Clean installation cp_default_config_file(NameVsn) end. @@ -1292,6 +1320,11 @@ ensure_config_map(NameVsn) -> true -> do_ensure_config_map(NameVsn, ConfigJsonMap); false -> + ?SLOG(debug, #{ + msg => "put_plugin_config_directly", + hint => "plugin_without_config_schema", + name_vsn => NameVsn + }), put_config(NameVsn, ConfigJsonMap, ?plugin_without_config_schema) end; _ -> @@ -1376,23 +1409,23 @@ prune_backup_files(Path) -> Deletes ). -read_file_fun(Path, ErrMsg, #{read_mode := ?RAW_BIN}) -> +read_file_fun(Path, Msg, #{read_mode := ?RAW_BIN}) -> fun() -> case file:read_file(Path) of {ok, Bin} -> {ok, Bin}; {error, Reason} -> - ErrMeta = #{error_msg => ErrMsg, reason => Reason}, + ErrMeta = #{msg => Msg, reason => Reason}, throw(ErrMeta) end end; -read_file_fun(Path, ErrMsg, #{read_mode := ?JSON_MAP}) -> +read_file_fun(Path, Msg, #{read_mode := ?JSON_MAP}) -> fun() -> case hocon:load(Path, #{format => richmap}) of {ok, RichMap} -> {ok, hocon_maps:ensure_plain(RichMap)}; {error, Reason} -> - ErrMeta = #{error_msg => ErrMsg, reason => Reason}, + ErrMeta = #{msg => Msg, reason => Reason}, throw(ErrMeta) end end. diff --git a/apps/emqx_plugins/test/emqx_plugins_SUITE.erl b/apps/emqx_plugins/test/emqx_plugins_SUITE.erl index 5c8c51f1e..e960763da 100644 --- a/apps/emqx_plugins/test/emqx_plugins_SUITE.erl +++ b/apps/emqx_plugins/test/emqx_plugins_SUITE.erl @@ -353,7 +353,7 @@ t_enable_disable(Config) -> ?assertEqual([#{name_vsn => NameVsn, enable => true}], emqx_plugins:configured()), ?assertMatch( {error, #{ - error_msg := "bad_plugin_config_status", + msg := "bad_plugin_config_status", hint := "disable_the_plugin_first" }}, emqx_plugins:ensure_uninstalled(NameVsn) @@ -381,7 +381,7 @@ t_bad_tar_gz(Config) -> ok = file:write_file(FakeTarTz, "a\n"), ?assertMatch( {error, #{ - error_msg := "bad_plugin_package", + msg := "bad_plugin_package", reason := eof }}, emqx_plugins:ensure_installed("fake-vsn") @@ -389,8 +389,8 @@ t_bad_tar_gz(Config) -> %% the plugin tarball can not be found on any nodes ?assertMatch( {error, #{ - error_msg := "no_nodes_to_copy_plugin_from", - reason := not_found + msg := "no_nodes_to_copy_plugin_from", + reason := plugin_not_found }}, emqx_plugins:ensure_installed("nonexisting") ), @@ -463,7 +463,7 @@ t_bad_info_json(Config) -> ok = write_info_file(Config, NameVsn, "bad-syntax"), ?assertMatch( {error, #{ - error_msg := "bad_info_file", + msg := "bad_info_file", reason := {parse_error, _} }}, emqx_plugins:describe(NameVsn) @@ -471,7 +471,7 @@ t_bad_info_json(Config) -> ok = write_info_file(Config, NameVsn, "{\"bad\": \"obj\"}"), ?assertMatch( {error, #{ - error_msg := "bad_info_file_content", + msg := "bad_info_file_content", mandatory_fields := _ }}, emqx_plugins:describe(NameVsn) diff --git a/apps/emqx_plugins/test/emqx_plugins_tests.erl b/apps/emqx_plugins/test/emqx_plugins_tests.erl index 1ae0bcef3..84a1ba677 100644 --- a/apps/emqx_plugins/test/emqx_plugins_tests.erl +++ b/apps/emqx_plugins/test/emqx_plugins_tests.erl @@ -57,7 +57,7 @@ read_plugin_test() -> try ok = write_file(InfoFile, FakeInfo), ?assertMatch( - {error, #{error_msg := "bad_rel_apps"}}, + {error, #{msg := "bad_rel_apps"}}, emqx_plugins:read_plugin_info(NameVsn, #{}) ) after diff --git a/apps/emqx_postgresql/src/emqx_postgresql.app.src b/apps/emqx_postgresql/src/emqx_postgresql.app.src index 2cf3392bf..7aaf42e71 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.app.src +++ b/apps/emqx_postgresql/src/emqx_postgresql.app.src @@ -1,6 +1,6 @@ {application, emqx_postgresql, [ {description, "EMQX PostgreSQL Database Connector"}, - {vsn, "0.2.1"}, + {vsn, "0.2.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_prometheus/src/emqx_prometheus.app.src b/apps/emqx_prometheus/src/emqx_prometheus.app.src index f571dcce6..713a3e511 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.app.src +++ b/apps/emqx_prometheus/src/emqx_prometheus.app.src @@ -2,7 +2,7 @@ {application, emqx_prometheus, [ {description, "Prometheus for EMQX"}, % strict semver, bump manually! - {vsn, "5.2.1"}, + {vsn, "5.2.2"}, {modules, []}, {registered, [emqx_prometheus_sup]}, {applications, [kernel, stdlib, prometheus, emqx, emqx_auth, emqx_resource, emqx_management]}, diff --git a/apps/emqx_redis/src/emqx_redis.app.src b/apps/emqx_redis/src/emqx_redis.app.src index 660c490e6..02a251637 100644 --- a/apps/emqx_redis/src/emqx_redis.app.src +++ b/apps/emqx_redis/src/emqx_redis.app.src @@ -1,6 +1,6 @@ {application, emqx_redis, [ {description, "EMQX Redis Database Connector"}, - {vsn, "0.1.5"}, + {vsn, "0.1.6"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_redis/src/emqx_redis.erl b/apps/emqx_redis/src/emqx_redis.erl index 17a0ede49..059e9aa23 100644 --- a/apps/emqx_redis/src/emqx_redis.erl +++ b/apps/emqx_redis/src/emqx_redis.erl @@ -19,6 +19,8 @@ -include_lib("typerefl/include/types.hrl"). -include_lib("hocon/include/hoconsc.hrl"). -include_lib("emqx/include/logger.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). +-include_lib("emqx_resource/include/emqx_resource.hrl"). -export([namespace/0, roots/0, fields/1, redis_fields/0, desc/1]). @@ -231,7 +233,7 @@ is_unrecoverable_error({error, invalid_cluster_command}) -> is_unrecoverable_error(_) -> false. -on_get_status(_InstId, #{type := cluster, pool_name := PoolName}) -> +on_get_status(_InstId, #{type := cluster, pool_name := PoolName} = State) -> case eredis_cluster:pool_exists(PoolName) of true -> %% eredis_cluster has null slot even pool_exists when emqx start before redis cluster. @@ -240,26 +242,51 @@ on_get_status(_InstId, #{type := cluster, pool_name := PoolName}) -> %% In this case, we can directly consider it as a disconnect and then proceed to reconnect. case eredis_cluster_monitor:get_all_pools(PoolName) of [] -> - disconnected; + ?status_disconnected; [_ | _] -> - Health = eredis_cluster:ping_all(PoolName), - status_result(Health) + do_cluster_status_check(PoolName, State) end; false -> - disconnected + ?status_disconnected end; -on_get_status(_InstId, #{pool_name := PoolName}) -> - Health = emqx_resource_pool:health_check_workers(PoolName, fun ?MODULE:do_get_status/1), - status_result(Health). - -do_get_status(Conn) -> - case eredis:q(Conn, ["PING"]) of - {ok, _} -> true; - _ -> false +on_get_status(_InstId, #{pool_name := PoolName} = State) -> + HealthCheckResoults = emqx_resource_pool:health_check_workers( + PoolName, + fun ?MODULE:do_get_status/1, + emqx_resource_pool:health_check_timeout(), + #{return_values => true} + ), + case HealthCheckResoults of + {ok, Results} -> + sum_worker_results(Results, State); + Error -> + {?status_disconnected, State, Error} end. -status_result(_Status = true) -> connected; -status_result(_Status = false) -> connecting. +do_cluster_status_check(Pool, State) -> + Pongs = eredis_cluster:qa(Pool, [<<"PING">>]), + sum_worker_results(Pongs, State). + +do_get_status(Conn) -> + eredis:q(Conn, ["PING"]). + +sum_worker_results([], _State) -> + ?status_connected; +sum_worker_results([{error, <<"NOAUTH Authentication required.">>} = Error | _Rest], State) -> + ?tp(emqx_redis_auth_required_error, #{}), + %% This requires user action to fix so we set the status to disconnected + {?status_disconnected, State, {unhealthy_target, Error}}; +sum_worker_results([{ok, _} | Rest], State) -> + sum_worker_results(Rest, State); +sum_worker_results([Error | _Rest], State) -> + ?SLOG( + warning, + #{ + msg => "emqx_redis_check_status_error", + error => Error + } + ), + {?status_connecting, State, Error}. do_cmd(PoolName, cluster, {cmd, Command}) -> eredis_cluster:q(PoolName, Command); diff --git a/apps/emqx_resource/src/emqx_resource.app.src b/apps/emqx_resource/src/emqx_resource.app.src index 39b8ec8d1..6e35949a9 100644 --- a/apps/emqx_resource/src/emqx_resource.app.src +++ b/apps/emqx_resource/src/emqx_resource.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_resource, [ {description, "Manager for all external resources"}, - {vsn, "0.1.30"}, + {vsn, "0.1.32"}, {registered, []}, {mod, {emqx_resource_app, []}}, {applications, [ diff --git a/apps/emqx_resource/src/emqx_resource_buffer_worker.erl b/apps/emqx_resource/src/emqx_resource_buffer_worker.erl index b0db3ca20..516795f39 100644 --- a/apps/emqx_resource/src/emqx_resource_buffer_worker.erl +++ b/apps/emqx_resource/src/emqx_resource_buffer_worker.erl @@ -1401,16 +1401,26 @@ apply_query_fun( query_opts => QueryOpts, min_query => minimize(Query) }, + IsSimpleQuery = maps:get(simple_query, QueryOpts, false), IsRetriable = false, AsyncWorkerMRef = undefined, InflightItem = ?INFLIGHT_ITEM(Ref, Query, IsRetriable, AsyncWorkerMRef), ok = inflight_append(InflightTID, InflightItem), case pre_query_channel_check(Request, Channels, QueryOpts) of ok -> - Result = Mod:on_query_async( - extract_connector_id(Id), Request, {ReplyFun, [ReplyContext]}, ResSt - ), - {async_return, Result}; + case + Mod:on_query_async( + extract_connector_id(Id), Request, {ReplyFun, [ReplyContext]}, ResSt + ) + of + {error, _} = Error when IsSimpleQuery -> + %% If this callback returns error, we assume it won't reply + %% anything else and won't retry. + maybe_reply_to(Error, QueryOpts), + Error; + Result -> + {async_return, Result} + end; Error -> maybe_reply_to(Error, QueryOpts) end @@ -1480,16 +1490,26 @@ apply_query_fun( Requests = lists:map( fun(?QUERY(_ReplyTo, Request, _, _ExpireAt, _TraceCtx)) -> Request end, Batch ), + IsSimpleQuery = maps:get(simple_query, QueryOpts, false), IsRetriable = false, AsyncWorkerMRef = undefined, InflightItem = ?INFLIGHT_ITEM(Ref, Batch, IsRetriable, AsyncWorkerMRef), ok = inflight_append(InflightTID, InflightItem), case pre_query_channel_check(FirstRequest, Channels, QueryOpts) of ok -> - Result = Mod:on_batch_query_async( - extract_connector_id(Id), Requests, {ReplyFun, [ReplyContext]}, ResSt - ), - {async_return, Result}; + case + Mod:on_batch_query_async( + extract_connector_id(Id), Requests, {ReplyFun, [ReplyContext]}, ResSt + ) + of + {error, _} = Error when IsSimpleQuery -> + %% If this callback returns error, we assume it won't reply + %% anything else and won't retry. + maybe_reply_to(Error, QueryOpts), + Error; + Result -> + {async_return, Result} + end; Error -> maybe_reply_to(Error, QueryOpts) end diff --git a/apps/emqx_resource/test/emqx_resource_SUITE.erl b/apps/emqx_resource/test/emqx_resource_SUITE.erl index 764c65e6f..af9abe95b 100644 --- a/apps/emqx_resource/test/emqx_resource_SUITE.erl +++ b/apps/emqx_resource/test/emqx_resource_SUITE.erl @@ -29,6 +29,9 @@ -define(RESOURCE_ERROR(REASON), {error, {resource_error, #{reason := REASON}}}). -define(TRACE_OPTS, #{timetrap => 10000, timeout => 1000}). -define(TELEMETRY_PREFIX, emqx, resource). +-define(QUERY(FROM, REQUEST, SENT, EXPIRE_AT, TRACE_CTX), + {query, FROM, REQUEST, SENT, EXPIRE_AT, TRACE_CTX} +). -import(emqx_common_test_helpers, [on_exit/1]). @@ -2494,7 +2497,7 @@ t_expiration_retry(_Config) -> resume_interval => 300 } ), - do_t_expiration_retry(). + do_t_expiration_retry(#{is_batch => false}). t_expiration_retry_batch(_Config) -> emqx_connector_demo:set_callback_mode(always_sync), @@ -2511,20 +2514,17 @@ t_expiration_retry_batch(_Config) -> resume_interval => 300 } ), - do_t_expiration_retry(). + do_t_expiration_retry(#{is_batch => true}). -do_t_expiration_retry() -> +do_t_expiration_retry(Context) -> + IsBatch = maps:get(is_batch, Context), ResumeInterval = 300, ?check_trace( + #{timetrap => 10_000}, begin ok = emqx_resource:simple_sync_query(?ID, block), - {ok, SRef0} = snabbkaffe:subscribe( - ?match_event(#{?snk_kind := buffer_worker_flush_nack}), - 1, - 200 - ), - TimeoutMS = 100, + TimeoutMS = 200, %% the request that expires must be first, so it's the %% head of the inflight table (and retriable). {ok, SRef1} = snabbkaffe:subscribe( @@ -2542,6 +2542,8 @@ do_t_expiration_retry() -> ) ) end), + %% This second message must be enqueued while the resource is blocked by the + %% previous message. Pid1 = spawn_link(fun() -> receive @@ -2556,22 +2558,33 @@ do_t_expiration_retry() -> ) ) end), + ?tp("waiting for first message to be appended to the queue", #{}), {ok, _} = snabbkaffe:receive_events(SRef1), + + ?tp("waiting for first message to expire during blocked retries", #{}), + {ok, _} = ?block_until(#{?snk_kind := buffer_worker_retry_expired}), + + %% Now we wait until the worker tries the second message at least once before + %% unblocking it. Pid1 ! go, - {ok, _} = snabbkaffe:receive_events(SRef0), + ?tp("waiting for second message to be retried and be nacked while blocked", #{}), + case IsBatch of + false -> + {ok, _} = ?block_until(#{ + ?snk_kind := buffer_worker_flush_nack, + batch_or_query := ?QUERY(_, {inc_counter, 2}, _, _, _) + }); + true -> + {ok, _} = ?block_until(#{ + ?snk_kind := buffer_worker_flush_nack, + batch_or_query := [?QUERY(_, {inc_counter, 2}, _, _, _) | _] + }) + end, - {ok, _} = - ?block_until( - #{?snk_kind := buffer_worker_retry_expired}, - ResumeInterval * 10 - ), - - {ok, {ok, _}} = - ?wait_async_action( - emqx_resource:simple_sync_query(?ID, resume), - #{?snk_kind := buffer_worker_retry_inflight_succeeded}, - ResumeInterval * 5 - ), + %% Bypass the buffer worker and unblock the resource. + ok = emqx_resource:simple_sync_query(?ID, resume), + ?tp("waiting for second message to be retried and be acked, unblocking", #{}), + {ok, _} = ?block_until(#{?snk_kind := buffer_worker_retry_inflight_succeeded}), ok end, diff --git a/apps/emqx_retainer/src/emqx_retainer.app.src b/apps/emqx_retainer/src/emqx_retainer.app.src index 4f778769d..21fbd5412 100644 --- a/apps/emqx_retainer/src/emqx_retainer.app.src +++ b/apps/emqx_retainer/src/emqx_retainer.app.src @@ -2,7 +2,7 @@ {application, emqx_retainer, [ {description, "EMQX Retainer"}, % strict semver, bump manually! - {vsn, "5.0.24"}, + {vsn, "5.0.25"}, {modules, []}, {registered, [emqx_retainer_sup]}, {applications, [kernel, stdlib, emqx, emqx_ctl]}, diff --git a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl index 21a42c283..0d2b353b1 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl @@ -20,6 +20,7 @@ -include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/emqx_trace.hrl"). -include_lib("emqx_resource/include/emqx_resource_errors.hrl"). +-include_lib("snabbkaffe/include/trace.hrl"). -export([ apply_rule/3, @@ -58,6 +59,7 @@ %%------------------------------------------------------------------------------ -spec apply_rules(list(rule()), columns(), envs()) -> ok. apply_rules([], _Columns, _Envs) -> + ?tp("rule_engine_applied_all_rules", #{}), ok; apply_rules([#{enable := false} | More], Columns, Envs) -> apply_rules(More, Columns, Envs); diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl index 680aac759..da1df58ea 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl @@ -216,10 +216,8 @@ init_per_group(metrics_fail_simple, Config) -> (_) -> simple_async end), meck:expect(?BRIDGE_IMPL, on_query, 3, {error, {unrecoverable_error, mecked_failure}}), - meck:expect(?BRIDGE_IMPL, on_query_async, fun(_, _, {ReplyFun, Args}, _) -> - Result = {error, {unrecoverable_error, mecked_failure}}, - erlang:apply(ReplyFun, Args ++ [Result]), - Result + meck:expect(?BRIDGE_IMPL, on_query_async, fun(_, _, {_ReplyFun, _Args}, _) -> + {error, {unrecoverable_error, mecked_failure}} end), [{mecked, [?BRIDGE_IMPL]} | Config]; init_per_group(_Groupname, Config) -> diff --git a/apps/emqx_schema_registry/src/emqx_schema_registry.app.src b/apps/emqx_schema_registry/src/emqx_schema_registry.app.src index 7577f8aeb..4879b9c60 100644 --- a/apps/emqx_schema_registry/src/emqx_schema_registry.app.src +++ b/apps/emqx_schema_registry/src/emqx_schema_registry.app.src @@ -1,6 +1,6 @@ {application, emqx_schema_registry, [ {description, "EMQX Schema Registry"}, - {vsn, "0.3.1"}, + {vsn, "0.3.2"}, {registered, [emqx_schema_registry_sup]}, {mod, {emqx_schema_registry_app, []}}, {included_applications, [ diff --git a/apps/emqx_schema_registry/src/emqx_schema_registry_http_api.erl b/apps/emqx_schema_registry/src/emqx_schema_registry_http_api.erl index 3034687b9..6cd9cac33 100644 --- a/apps/emqx_schema_registry/src/emqx_schema_registry_http_api.erl +++ b/apps/emqx_schema_registry/src/emqx_schema_registry_http_api.erl @@ -142,18 +142,26 @@ schema("/schema_registry/:name") -> ), ?OK(Response); '/schema_registry'(post, #{body := Params0 = #{<<"name">> := Name}}) -> - Params = maps:without([<<"name">>], Params0), - case emqx_schema_registry:get_schema(Name) of - {error, not_found} -> - case emqx_schema_registry:add_schema(Name, Params) of - ok -> - {ok, Res} = emqx_schema_registry:get_schema(Name), - {201, Res#{name => Name}}; - {error, Error} -> - ?BAD_REQUEST(Error) - end; - {ok, _} -> - ?BAD_REQUEST('ALREADY_EXISTS', <<"Schema already exists">>) + try + ok = emqx_resource:validate_name(Name), + Params = maps:without([<<"name">>], Params0), + case emqx_schema_registry:get_schema(Name) of + {error, not_found} -> + case emqx_schema_registry:add_schema(Name, Params) of + ok -> + {ok, Res} = emqx_schema_registry:get_schema(Name), + {201, Res#{name => Name}}; + {error, Error} -> + ?BAD_REQUEST(Error) + end; + {ok, _} -> + ?BAD_REQUEST('ALREADY_EXISTS', <<"Schema already exists">>) + end + catch + throw:#{kind := Kind, reason := Reason} -> + Msg0 = ?ERROR_MSG('BAD_REQUEST', Reason), + Msg = Msg0#{kind => Kind}, + {400, Msg} end. '/schema_registry/:name'(get, #{bindings := #{name := Name}}) -> diff --git a/apps/emqx_schema_registry/test/emqx_schema_registry_http_api_SUITE.erl b/apps/emqx_schema_registry/test/emqx_schema_registry_http_api_SUITE.erl index b7a86e4f4..fd2b51c7c 100644 --- a/apps/emqx_schema_registry/test/emqx_schema_registry_http_api_SUITE.erl +++ b/apps/emqx_schema_registry/test/emqx_schema_registry_http_api_SUITE.erl @@ -399,3 +399,26 @@ t_empty_sparkplug(_Config) -> [] ), ok. + +%% Tests that we can't create names that are too long and get a decent error message. +t_name_too_long(Config) -> + SerdeType = ?config(serde_type, Config), + SourceBin = ?config(schema_source, Config), + SerdeTypeBin = atom_to_binary(SerdeType), + %% Too long! + SchemaName = binary:copy(<<"a">>, 256), + Params = #{ + <<"type">> => SerdeTypeBin, + <<"source">> => SourceBin, + <<"name">> => SchemaName, + <<"description">> => <<"My schema">> + }, + ?assertMatch( + {ok, 400, #{ + <<"code">> := <<"BAD_REQUEST">>, + <<"kind">> := <<"validation_error">>, + <<"message">> := <<"Name length must be less than 255">> + }}, + request({post, Params}) + ), + ok. diff --git a/apps/emqx_utils/src/emqx_utils.app.src b/apps/emqx_utils/src/emqx_utils.app.src index 959a3a37a..b2ec221e3 100644 --- a/apps/emqx_utils/src/emqx_utils.app.src +++ b/apps/emqx_utils/src/emqx_utils.app.src @@ -2,7 +2,7 @@ {application, emqx_utils, [ {description, "Miscellaneous utilities for EMQX apps"}, % strict semver, bump manually! - {vsn, "5.2.1"}, + {vsn, "5.2.3"}, {modules, [ emqx_utils, emqx_utils_api, diff --git a/apps/emqx_utils/src/emqx_variform_bif.erl b/apps/emqx_utils/src/emqx_variform_bif.erl index ed9dfb851..f30db8f7a 100644 --- a/apps/emqx_utils/src/emqx_variform_bif.erl +++ b/apps/emqx_utils/src/emqx_variform_bif.erl @@ -199,20 +199,24 @@ regex_match(Str, RE) -> regex_replace(SrcStr, RE, RepStr) -> re:replace(SrcStr, RE, RepStr, [global, {return, binary}]). -%% @doc Searches the string Str for patterns specified by Regexp. +%% @doc Non-global search for specified regular expression pattern in the given string. %% If matches are found, it returns a list of all captured groups from these matches. %% If no matches are found or there are no groups captured, it returns an empty list. %% This function can be used to extract parts of a string based on a regular expression, %% excluding the complete match itself. +%% %% Examples: %% ("Number: 12345", "(\\d+)") -> [<<"12345">>] -%% ("Hello, world!", "(\\w+)") -> [<<"Hello">>, <<"world">>] +%% ("Hello, world!", "(\\w+).*\s(\\w+)") -> [<<"Hello">>, <<"world">>] %% ("No numbers here!", "(\\d+)") -> [] %% ("Date: 2021-05-20", "(\\d{4})-(\\d{2})-(\\d{2})") -> [<<"2021">>, <<"05">>, <<"20">>] +-spec regex_extract(string() | binary(), string() | binary()) -> [binary()]. regex_extract(Str, Regexp) -> - case re:run(Str, Regexp, [{capture, all_but_first, list}]) of - {match, [_ | _] = L} -> lists:map(fun erlang:iolist_to_binary/1, L); - _ -> [] + case re:run(Str, Regexp, [{capture, all_but_first, binary}]) of + {match, CapturedGroups} -> + CapturedGroups; + _ -> + [] end. ascii(Char) when is_binary(Char) -> diff --git a/build b/build index cfecd9eeb..fd01de2f4 100755 --- a/build +++ b/build @@ -397,12 +397,14 @@ function is_ecr_and_enterprise() { ## Build the default docker image based on debian 12. make_docker() { - local EMQX_BUILDER_VERSION="${EMQX_BUILDER_VERSION:-5.3-8}" - local EMQX_BUILDER_PLATFORM="${EMQX_BUILDER_PLATFORM:-debian12}" - local OTP_VSN="${OTP_VSN:-26.2.5-2}" - local ELIXIR_VSN="${ELIXIR_VSN:-1.15.7}" - local EMQX_BUILDER=${EMQX_BUILDER:-ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VERSION}:${ELIXIR_VSN}-${OTP_VSN}-${EMQX_BUILDER_PLATFORM}} - local EMQX_RUNNER="${EMQX_RUNNER:-${EMQX_DEFAULT_RUNNER}}" + # shellcheck disable=SC1091 + source ./env.sh + local BUILD_FROM="${BUILD_FROM:-${EMQX_DOCKER_BUILD_FROM}}" + # shellcheck disable=SC2155 + local OTP_VSN="$(docker run --rm "${BUILD_FROM}" erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)" + # shellcheck disable=SC2155 + local ELIXIR_VSN="$(docker run --rm "${BUILD_FROM}" elixir --short-version)" + local RUN_FROM="${RUN_FROM:-${EMQX_DOCKER_RUN_FROM}}" local EMQX_DOCKERFILE="${EMQX_DOCKERFILE:-deploy/docker/Dockerfile}" local EMQX_SOURCE_TYPE="${EMQX_SOURCE_TYPE:-src}" # shellcheck disable=SC2155 @@ -446,8 +448,8 @@ make_docker() { GIT_REVISION="$(git rev-parse HEAD)" export BUILDX_NO_DEFAULT_ATTESTATIONS=1 local DOCKER_BUILDX_ARGS=( - --build-arg BUILD_FROM="${EMQX_BUILDER}" \ - --build-arg RUN_FROM="${EMQX_RUNNER}" \ + --build-arg BUILD_FROM="${BUILD_FROM}" \ + --build-arg RUN_FROM="${RUN_FROM}" \ --build-arg SOURCE_TYPE="${EMQX_SOURCE_TYPE}" \ --build-arg PROFILE="${PROFILE%%-elixir}" \ --build-arg IS_ELIXIR="$([[ "$PROFILE" = *-elixir ]] && echo yes || echo no)" \ diff --git a/changes/ce/feat-13317.en.md b/changes/ce/feat-13317.en.md new file mode 100644 index 000000000..cf77d2f62 --- /dev/null +++ b/changes/ce/feat-13317.en.md @@ -0,0 +1 @@ +Added a new per-authorization source metric type: `ignore`. The meaning of this counter is that it's increased whenever the authorization source attempts to authorize a request, but either it's not applicable, or an error was encountered and the result is undecidable. diff --git a/changes/ce/feat-13336.en.md b/changes/ce/feat-13336.en.md new file mode 100644 index 000000000..ff09f624b --- /dev/null +++ b/changes/ce/feat-13336.en.md @@ -0,0 +1 @@ +Added new configs `bootstrap_file` and `bootstrap_type` for built-in database for authentication to support bootstrapping the table with csv and json file. diff --git a/changes/ce/fix-13222.en.md b/changes/ce/fix-13222.en.md new file mode 100644 index 000000000..0fc7a40ac --- /dev/null +++ b/changes/ce/fix-13222.en.md @@ -0,0 +1,5 @@ +Fix the flags check and error handling related to the Will message in the `CONNECT` packet. +See also: +- MQTT-v3.1.1-[MQTT-3.1.2-13], MQTT-v5.0-[MQTT-3.1.2-11] +- MQTT-v3.1.1-[MQTT-3.1.2-14], MQTT-v5.0-[MQTT-3.1.2-12] +- MQTT-v3.1.1-[MQTT-3.1.2-15], MQTT-v5.0-[MQTT-3.1.2-13] diff --git a/changes/ce/fix-13258.en.md b/changes/ce/fix-13258.en.md new file mode 100644 index 000000000..4084ed6f1 --- /dev/null +++ b/changes/ce/fix-13258.en.md @@ -0,0 +1 @@ +Fix an issue where the MQTT-SN gateway would not restart correctly due to incorrect startup order of gateway dependencies. diff --git a/changes/ce/fix-13307.en.md b/changes/ce/fix-13307.en.md new file mode 100644 index 000000000..d15732586 --- /dev/null +++ b/changes/ce/fix-13307.en.md @@ -0,0 +1,7 @@ +Upgrade ekka lib to 0.19.5 + +ekka 0.19.5 uses mria 0.8.8 that improves auto-heal functionality. +Previously, the auto-heal worked only when all core nodes were reachable again. +This update allows to apply auto-heal once the majority of core nodes are alive. + +[Mria PR](https://github.com/emqx/mria/pull/180) diff --git a/changes/ce/fix-13334.en.md b/changes/ce/fix-13334.en.md new file mode 100644 index 000000000..e638be9ce --- /dev/null +++ b/changes/ce/fix-13334.en.md @@ -0,0 +1,4 @@ +Check the `PasswordFlag` of the MQTT v3.1.1 CONNECT packet in strict mode to comply with the protocol. + +> [!NOTE] +> To ensure BUG-TO-BUG compatibility, this check is performed only in strict mode. diff --git a/changes/ce/fix-13344.en.md b/changes/ce/fix-13344.en.md new file mode 100644 index 000000000..58882651f --- /dev/null +++ b/changes/ce/fix-13344.en.md @@ -0,0 +1 @@ +Fixed an issue that prevented the `POST /clients/:clientid/subscribe/bulk` API from working properly if the node receiving the API request did not hold the connection to the targeted clientid. diff --git a/changes/e5.7.1.en.md b/changes/e5.7.1.en.md new file mode 100644 index 000000000..12ba03c2d --- /dev/null +++ b/changes/e5.7.1.en.md @@ -0,0 +1,111 @@ +## 5.7.1 + +*Release Date: 2024-06-26* + +### Enhancements + +- [#12983](https://github.com/emqx/emqx/pull/12983) Add new rule engine event `$events/client_check_authn_complete` for authentication completion event. + +- [#13175](https://github.com/emqx/emqx/pull/13175) Added the `disable_prepared_statements` option for Postgres-based connectors. + + This option is to be used with endpoints that do not support the prepared statements session feature, such as PGBouncer and Supabase in Transaction mode. + +- [#13180](https://github.com/emqx/emqx/pull/13180) Improved client message handling performance when EMQX is running on Erlang/OTP 26 and increased message throughput by 10% in fan-in mode. + +- [#13191](https://github.com/emqx/emqx/pull/13191) Upgraded EMQX Docker images to run on Erlang/OTP 26. + + EMQX had been running on Erlang/OTP 26 since v5.5 except for docker images which were on Erlang/OTP 25. Now all releases are on Erlang/OTP 26. This upgrade fixed the following known issue: + + When an older version of EMQX joins a cluster with newer version nodes, the Schema Registry of the older version node may encounter an issue, emitting logs like the following: + + ``` + Error loading module '$schema_parser___CiYAWBja87PleCyKZ58h__SparkPlug_B_BUILT-IN':, + This BEAM file was compiled for a later version of the runtime system than the current (Erlang/OTP 25). + ``` + + This issue is fixed in the newer version. However, for older versions, a manual step is required. Execute the following command on one of the clustered nodes before the older version EMQX joins the cluster: + + ```shell + emqx eval 'lists:foreach(fun(Key) -> mnesia:dirty_delete(emqx_ee_schema_registry_protobuf_cache_tab, Key) end, mnesia:dirty_all_keys(emqx_ee_schema_registry_protobuf_cache_tab)).' + ``` + + If the older version of EMQX is already in the cluster, execute the above command and restart the affected node. + +- [#13242](https://github.com/emqx/emqx/pull/13242) Significantly increased the startup speed of EMQX Dashboard listener. + +- [#13172](https://github.com/emqx/emqx/pull/13172) Added a rule function `map_to_redis_hset_args` to help preparing redis HSET (or HMSET) multi-fields values. + + For example, if `payload.value` is a map of multiple data fields, + this rule `SELECT map_to_redis_hset_args(payload.value) as hset_fields FROM "t/#"` can prepare `hset_fields` + for redis action to render the command template like `HMSET name1 ${hset_fields}`. + +- [#13210](https://github.com/emqx/emqx/pull/13210) EMQX now validates that referenced schemas and message types exist in the Schema Registry when inserting or updating a Schema Validation. + +- [#13211](https://github.com/emqx/emqx/pull/13211) Enhanced TLS listener to support more flexible TLS verifications. + + - `partial_chain` support: If the option `partial_chain` is set to `true`, connections with incomplete certificate chains are allowed. Check the [Configuration Manual](https://docs.emqx.com/en/enterprise/v@EE_VERSION@/hocon/) for more details. + + - Certificate Key Usage validation: Added support for required Extended Key Usage as defined in + [rfc5280](https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.12). A new option (`verify_peer_ext_key_usage`) has been introduced to enforce specific key usages (such as "serverAuth") in peer certificates during the TLS handshake. This enhances security by ensuring certificates are used for their intended purposes, for example, "serverAuth,OID:1.3.6.1.5.5.7.3.2". Check the [Configuration Manual](https://docs.emqx.com/en/enterprise/v@EE_VERSION@/hocon/) for more details. + +- [#13274](https://github.com/emqx/emqx/pull/13274) The RocketMQ connector now supports configuring SSL settings. + +### Bug Fixes + +- [#13156](https://github.com/emqx/emqx/pull/13156) Resolved an issue where the Dashboard Monitoring pages would crash following the update to EMQX v5.7.0. + +- [#13164](https://github.com/emqx/emqx/pull/13164) Fixed HTTP authorization request body encoding. + + Before this fix, the HTTP authorization request body encoding format was taken from the `accept` header. The fix is to respect the `content-type` header instead. Also added `access` templating variable for v4 compatibility. The access code of SUBSCRIBE action is `1` and PUBLISH action is `2`. + +- [#13238](https://github.com/emqx/emqx/pull/13238) Improved the logged error messages when an HTTP authorization request with an unsupported content-type header is returned. + +- [#13258](https://github.com/emqx/emqx/pull/13258) Fixed an issue where the MQTT-SN gateway would not restart correctly due to incorrect startup order of gateway dependencies. + +- [#13273](https://github.com/emqx/emqx/pull/13273) Fixed and improved handling of URIs in several configurations. The fix includes the following improvement details: + + * Authentication and authorization configurations: Corrected a previous error where valid pathless URIs such as `https://example.com?q=x` were mistakenly rejected. These URIs are now properly recognized as valid. + * Connector configurations: Enhanced checks to ensure that URIs with potentially problematic components, such as user info or fragment parts, are no longer erroneously accepted. + +- [#13276](https://github.com/emqx/emqx/pull/13276) Fixed an issue in the durable message storage mechanism where parts of the internal storage state were not correctly persisted during the setup of new storage generations. The concept of "generation" is used internally and is crucial for managing message expiration and cleanup. This could have manifested as messages being lost after a restart of EMQX. + +- [#13291](https://github.com/emqx/emqx/pull/13291) Fixed an issue where durable storage sites that were down being reported as up. + +- [#13290](https://github.com/emqx/emqx/pull/13290) Fixed an issue where the command `$ bin/emqx ctl rules show rule_0hyd` would produce no output when used to display rules with a data integration action attached. + +- [#13293](https://github.com/emqx/emqx/pull/13293) Improved the restoration process from data backups by automating the re-indexing of imported retained messages. Previously, re-indexing required manual intervention using the `emqx ctl retainer reindex start` CLI command after importing a data backup file. + + This fix also extended the functionality to allow exporting retained messages to a backup file when the `retainer.backend.storage_type` is configured as `ram`. Previously, only setups with `disc` as the storage type supported exporting retained messages. + +- [#13147](https://github.com/emqx/emqx/pull/13147) Improved the error messages for decoding failures in the rule engine protobuf decode functions by adding clear descriptions to indicate what went wrong when message decoding failed. + +- [#13140](https://github.com/emqx/emqx/pull/13140) Fixed an issue that caused text traces for the republish action to crash and not display correctly. + +- [#13148](https://github.com/emqx/emqx/pull/13148) Fixed an issue where a 500 HTTP status code could be returned by `/connectors/:connector-id/start` when there is a timeout waiting for the resource to be connected. + +- [#13181](https://github.com/emqx/emqx/pull/13181) EMQX now forcefully shut down the connector process when attempting to stop a connector, if such operation times out. This fix also improved the clarity of error messages when disabling an action or source fails due to an unresponsive underlying connector. + +- [#13216](https://github.com/emqx/emqx/pull/13216) Respect `clientid_prefix` config for MQTT bridges. Since EMQX v5.4.1, the MQTT client IDs are restricted to a maximum of 23 bytes. Previously, the system factored the `clientid_prefix` into the hash of the original, longer client ID, affecting the final shortened ID. The fix includes the following change details: + + - Without Prefix: The behavior remains unchanged. EMQX hashes the long client IDs (exceeding 23 bytes) to fit within the 23-byte limit. + - With Prefix: + - Prefix ≤ 19 bytes: The prefix is retained, and the remaining portion of the client ID is hashed into a 4-byte space, ensuring the total length does not exceed 23 bytes. + - Prefix ≥ 20 bytes: EMQX will not attempt to shorten the client ID, fully preserving the configured prefix regardless of length. + +- [#13189](https://github.com/emqx/emqx/pull/13189) Fixed an issue where the data integration with Microsoft SQL Server or MySQL could not use SQL templates with substring `values` in table name or column name. + +- [#13070](https://github.com/emqx/emqx/pull/13070) Improved Kafka connector error logs to provide more diagnostic information by capturing specific error details, such as unreachable advertised listeners. To manage log verbosity, only the first occurrence of an error is logged, accompanied by the total count of similar errors. + +- [#13093](https://github.com/emqx/emqx/pull/13093) Improved Kafka consumer group stability. Before this change, the Kafka consumer group sometimes needs to rebalance twice after the Kafka group coordinator restarted. + +- [#13277](https://github.com/emqx/emqx/pull/13277) Refined the error handling for Kafka producers when encountering the `message_too_large` error. Previously, Kafka producers would repeatedly attempt to resend oversized message batches, hoping for a server-side adjustment in `max.message.bytes`. + + Now, oversized messages are automatically split into single-message batches for retry. If a message still exceeds size limits, it will be dropped to maintain data flow. + +- [#13130](https://github.com/emqx/emqx/pull/13130) Improved the trace message formatting for Redis action batch requests. Spaces are now added between components of commands and semicolons are added between commands to make the trace message easier to read. + +- [#13136](https://github.com/emqx/emqx/pull/13136) Improved the template-rendered traces for Oracle actions for better readability. + +- [#13197](https://github.com/emqx/emqx/pull/13197) Fixed an issue in AWS S3 data integration that prevented automatic saving of TLS certificates and key files to the file system when they are supplied through the Dashboard UI or Connector API. + +- [#13227](https://github.com/emqx/emqx/pull/13227) Fixed an issue in AWS S3 Sink running in aggregated mode. Before the fix, an invalid key template in the configuration was reported as an error during the Sink setup, but instead caused a storm of hard-to-recover crashes later. diff --git a/changes/ee/breaking-13327.en.md b/changes/ee/breaking-13327.en.md new file mode 100644 index 000000000..28d57d788 --- /dev/null +++ b/changes/ee/breaking-13327.en.md @@ -0,0 +1,3 @@ +The directory path scheme for on-disk Kafka/Confluent/Azure Event Hub buffers has changed. It now uses the Action name instead of the topic name. + +Upgrading to this version will invalidate (not use) old buffer files, and will require manual cleanup of the old directories. diff --git a/changes/ee/breaking-13332.en.md b/changes/ee/breaking-13332.en.md new file mode 100644 index 000000000..0b5bf5896 --- /dev/null +++ b/changes/ee/breaking-13332.en.md @@ -0,0 +1,4 @@ +When an S3 Bridge is improperly configured, error messages now contain more informative and easy to read details. + +## Breaking changes +* S3 Bridge configuration with invalid aggregated upload key template will no longer work. Before this change, such configuration was considered valid but the bridge would never work anyway. diff --git a/changes/ee/fix-13305.en.md b/changes/ee/fix-13305.en.md new file mode 100644 index 000000000..1936a49e3 --- /dev/null +++ b/changes/ee/fix-13305.en.md @@ -0,0 +1 @@ +Improved error handling for Redis connectors. Previously, Redis connectors of type single or sentinel would always encounter a timeout error during the connector test in the dashboard if no username or password was provided. This update ensures that users now receive an informative error message in such scenarios. Additionally, more detailed error information has been added for all Redis connector types to enhance diagnostics and troubleshooting. diff --git a/changes/ee/fix-13327.en.md b/changes/ee/fix-13327.en.md new file mode 100644 index 000000000..e81490c5c --- /dev/null +++ b/changes/ee/fix-13327.en.md @@ -0,0 +1 @@ +Fixed an issue with Kafka, Confluent and Azure Event Hub bridges where different actions targeting the same topic could break one another when being deleted or disabled. diff --git a/changes/ee/fix-13345.en.md b/changes/ee/fix-13345.en.md new file mode 100644 index 000000000..1a70e55cf --- /dev/null +++ b/changes/ee/fix-13345.en.md @@ -0,0 +1 @@ +Improved error message when creating a schema in Schema Registry whose name is too long or has invalid format. diff --git a/changes/v5.7.1.en.md b/changes/v5.7.1.en.md new file mode 100644 index 000000000..c0c8de5f3 --- /dev/null +++ b/changes/v5.7.1.en.md @@ -0,0 +1,55 @@ +## 5.7.1 + +*Release Date: 2024-06-26* + +### Enhancements + +- [#12983](https://github.com/emqx/emqx/pull/12983) Add new rule engine event `$events/client_check_authn_complete` for authentication completion event. + +- [#13180](https://github.com/emqx/emqx/pull/13180) Improved client message handling performance when EMQX is running on Erlang/OTP 26 and increased message throughput by 10% in fan-in mode. + +- [#13191](https://github.com/emqx/emqx/pull/13191) Upgraded EMQX Docker images to run on Erlang/OTP 26. + + EMQX had been running on Erlang/OTP 26 since v5.5 except for docker images which were on Erlang/OTP 25. Now all releases are on Erlang/OTP 26. + +- [#13242](https://github.com/emqx/emqx/pull/13242) Significantly increased the startup speed of EMQX dashboard listener. + +### Bug Fixes + +- [#13156](https://github.com/emqx/emqx/pull/13156) Resolved an issue where the Dashboard Monitoring pages would crash following the update to EMQX v5.7.0. + +- [#13164](https://github.com/emqx/emqx/pull/13164) Fixed HTTP authorization request body encoding. + + Before this fix, the HTTP authorization request body encoding format was taken from the `accept` header. The fix is to respect the `content-type` header instead. Also added `access` templating variable for v4 compatibility. The access code of SUBSCRIBE action is `1` and PUBLISH action is `2`. + +- [#13238](https://github.com/emqx/emqx/pull/13238) Improved the logged error messages when an HTTP authorization request with an unsupported content-type header is returned. + +- [#13258](https://github.com/emqx/emqx/pull/13258) Fix an issue where the MQTT-SN gateway would not restart correctly due to incorrect startup order of gateway dependencies. + +- [#13273](https://github.com/emqx/emqx/pull/13273) Fixed and improved handling of URIs in several configurations. The fix includes the following improvement details: + + * Authentication and authorization configurations: Corrected a previous error where valid pathless URIs such as `https://example.com?q=x` were mistakenly rejected. These URIs are now properly recognized as valid. + * Connector configurations: Enhanced checks to ensure that URIs with potentially problematic components, such as user info or fragment parts, are no longer erroneously accepted. + +- [#13276](https://github.com/emqx/emqx/pull/13276) Fixed an issue in the durable message storage mechanism where parts of the internal storage state were not correctly persisted during the setup of new storage generations. The concept of "generation" is used internally and is crucial for managing message expiration and cleanup. This could have manifested as messages being lost after a restart of EMQX. + +- [#13291](https://github.com/emqx/emqx/pull/13291) Fixed an issue where durable storage sites that were down being reported as up. + +- [#13290](https://github.com/emqx/emqx/pull/13290) Fixed an issue where the command `$ bin/emqx ctl rules show rule_0hyd` would produce no output when used to display rules with a data integration action attached. + +- [#13293](https://github.com/emqx/emqx/pull/13293) Improved the restoration process from data backups by automating the re-indexing of imported retained messages. Previously, re-indexing required manual intervention using the `emqx ctl retainer reindex start` CLI command after importing a data backup file. + + This fix also extended the functionality to allow exporting retained messages to a backup file when the `retainer.backend.storage_type` is configured as `ram`. Previously, only setups with `disc` as the storage type supported exporting retained messages. + +- [#13140](https://github.com/emqx/emqx/pull/13140) Fixed an issue that caused text traces for the republish action to crash and not display correctly. + +- [#13148](https://github.com/emqx/emqx/pull/13148) Fixed an issue where a 500 HTTP status code could be returned by `/connectors/:connector-id/start` when there is a timeout waiting for the resource to be connected. + +- [#13181](https://github.com/emqx/emqx/pull/13181) EMQX now forcefully shut down the connector process when attempting to stop a connector, if such operation times out. This fix also improved the clarity of error messages when disabling an action or source fails due to an unresponsive underlying connector. + +- [#13216](https://github.com/emqx/emqx/pull/13216) Respect `clientid_prefix` config for MQTT bridges. Since EMQX v5.4.1, the MQTT client IDs are restricted to a maximum of 23 bytes. Previously, the system factored the `clientid_prefix` into the hash of the original, longer client ID, affecting the final shortened ID. The fix includes the following change details: + + - Without Prefix: The behavior remains unchanged. EMQX hashes the long client IDs (exceeding 23 bytes) to fit within the 23-byte limit. + - With Prefix: + - Prefix ≤ 19 bytes: The prefix is retained, and the remaining portion of the client ID is hashed into a 4-byte space, ensuring the total length does not exceed 23 bytes. + - Prefix ≥ 20 bytes: EMQX will not attempt to shorten the client ID, fully preserving the configured prefix regardless of length. diff --git a/deploy/charts/emqx-enterprise/Chart.yaml b/deploy/charts/emqx-enterprise/Chart.yaml index a2327ebc5..cd795d4f4 100644 --- a/deploy/charts/emqx-enterprise/Chart.yaml +++ b/deploy/charts/emqx-enterprise/Chart.yaml @@ -14,8 +14,8 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 5.7.0 +version: 5.7.1 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 5.7.0 +appVersion: 5.7.1 diff --git a/deploy/charts/emqx/Chart.yaml b/deploy/charts/emqx/Chart.yaml index 0bc96e822..d31648f2f 100644 --- a/deploy/charts/emqx/Chart.yaml +++ b/deploy/charts/emqx/Chart.yaml @@ -14,8 +14,8 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 5.7.1-alpha.1 +version: 5.7.1 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 5.7.1-alpha.1 +appVersion: 5.7.1 diff --git a/deploy/docker/Dockerfile b/deploy/docker/Dockerfile index 7377709a2..4f068fab6 100644 --- a/deploy/docker/Dockerfile +++ b/deploy/docker/Dockerfile @@ -1,4 +1,4 @@ -ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-debian12 +ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-debian12 ARG RUN_FROM=public.ecr.aws/debian/debian:stable-20240612-slim ARG SOURCE_TYPE=src # tgz diff --git a/env.sh b/env.sh new file mode 100644 index 000000000..c71402edb --- /dev/null +++ b/env.sh @@ -0,0 +1,8 @@ +# https://github.com/emqx/emqx-builder +export EMQX_BUILDER_VSN=5.3-9 +export OTP_VSN=26.2.5-3 +export ELIXIR_VSN=1.15.7 +export EMQX_BUILDER=ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VSN}:${ELIXIR_VSN}-${OTP_VSN}-ubuntu22.04 +export EMQX_DOCKER_BUILD_FROM=ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VSN}:${ELIXIR_VSN}-${OTP_VSN}-debian12 +export EMQX_DOCKER_RUN_FROM=public.ecr.aws/debian/debian:stable-20240612-slim +export QUICER_DOWNLOAD_FROM_RELEASE=1 diff --git a/mix.exs b/mix.exs index cc8f1682e..ed2cc0f75 100644 --- a/mix.exs +++ b/mix.exs @@ -55,7 +55,7 @@ defmodule EMQXUmbrella.MixProject do {:cowboy, github: "emqx/cowboy", tag: "2.9.2", override: true}, {:esockd, github: "emqx/esockd", tag: "5.11.2", override: true}, {:rocksdb, github: "emqx/erlang-rocksdb", tag: "1.8.0-emqx-6", override: true}, - {:ekka, github: "emqx/ekka", tag: "0.19.4", override: true}, + {:ekka, github: "emqx/ekka", tag: "0.19.5", override: true}, {:gen_rpc, github: "emqx/gen_rpc", tag: "3.3.1", override: true}, {:grpc, github: "emqx/grpc-erl", tag: "0.6.12", override: true}, {:minirest, github: "emqx/minirest", tag: "1.4.3", override: true}, @@ -68,14 +68,14 @@ defmodule EMQXUmbrella.MixProject do {:rulesql, github: "emqx/rulesql", tag: "0.2.1"}, {:observer_cli, "1.7.1"}, {:system_monitor, github: "ieQu1/system_monitor", tag: "3.0.5"}, - {:telemetry, "1.1.0"}, + {:telemetry, "1.1.0", override: true}, # in conflict by emqtt and hocon {:getopt, "1.0.2", override: true}, {:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "1.0.10", override: true}, {:hocon, github: "emqx/hocon", tag: "0.42.2", override: true}, {:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.5.3", override: true}, {:esasl, github: "emqx/esasl", tag: "0.2.1"}, - {:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"}, + {:jose, github: "potatosalad/erlang-jose", tag: "1.11.2", override: true}, # in conflict by ehttpc and emqtt {:gun, github: "emqx/gun", tag: "1.3.11", override: true}, # in conflict by emqx_connector and system_monitor @@ -215,7 +215,7 @@ defmodule EMQXUmbrella.MixProject do {:hstreamdb_erl, github: "hstreamdb/hstreamdb_erl", tag: "0.5.18+v0.18.1+ezstd-v1.0.5-emqx1"}, {:influxdb, github: "emqx/influxdb-client-erl", tag: "1.1.13", override: true}, - {:wolff, github: "kafka4beam/wolff", tag: "1.10.5"}, + {:wolff, github: "kafka4beam/wolff", tag: "2.0.0"}, {:kafka_protocol, github: "kafka4beam/kafka_protocol", tag: "4.1.5", override: true}, {:brod_gssapi, github: "kafka4beam/brod_gssapi", tag: "v0.1.1"}, {:brod, github: "kafka4beam/brod", tag: "3.18.0"}, diff --git a/rebar.config b/rebar.config index 55ea83fdd..b537b23ab 100644 --- a/rebar.config +++ b/rebar.config @@ -83,7 +83,7 @@ {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}}, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.11.2"}}}, {rocksdb, {git, "https://github.com/emqx/erlang-rocksdb", {tag, "1.8.0-emqx-6"}}}, - {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.4"}}}, + {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.5"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}}, {grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.12"}}}, {minirest, {git, "https://github.com/emqx/minirest", {tag, "1.4.3"}}}, diff --git a/rel/i18n/emqx_authn_mnesia_schema.hocon b/rel/i18n/emqx_authn_mnesia_schema.hocon index b0d1a8517..3c9a24d2c 100644 --- a/rel/i18n/emqx_authn_mnesia_schema.hocon +++ b/rel/i18n/emqx_authn_mnesia_schema.hocon @@ -9,4 +9,34 @@ user_id_type.desc: user_id_type.label: """Authentication ID Type""" +bootstrap_file.desc: +"""The bootstrap file imports users into the built-in database. +The file content format is determined by `bootstrap_type`. +Remove the item from the bootstrap file when you have made changes in other way, +otherwise, after restarting, the bootstrap item will be overridden again.""" + +bootstrap_file.label: +"""Bootstrap File Path""" + +bootstrap_type.desc: +"""Specify which type of content the bootstrap file has. + +- **`plain`**: + - Expected data fields: `user_id`, `password`, `is_superuser` + - `user_id`: Can be Client ID or username, depending on built-in database authentication's `user_id_type` config. + - `password`: User's plaintext password. + - `is_superuser`: Boolean, user's administrative status. + +- **`hash`**: + - Expected data fields: `user_id`,`password_hash`,`salt`,`is_superuser` + - Definitions similar to `plain` type, with `password_hash` and `salt` added for security. + +The content can be either in CSV, or JSON format. + +Here is a CSV example: `user_id,password_hash,salt,is_superuser\nmy_user,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235,true` + +And JSON content should be decoded into an array of objects, for example: `[{"user_id": "my_user","password": "s3cr3tp@ssw0rd","is_superuser": true}]`. + +The hash string for `password_hash` depends on how `password_hash_algorithm` is configured for the built-in database authentication mechanism. For example, if it's configured as `password_hash_algorithm {name = sha256, salt_position = suffix}`, then the salt is appended to the password before hashed. Here is the equivalent Python expression: `hashlib.sha256(password + salt).hexdigest()`.""" + } diff --git a/rel/i18n/emqx_dashboard_sso_oidc.hocon b/rel/i18n/emqx_dashboard_sso_oidc.hocon new file mode 100644 index 000000000..7dd6cf497 --- /dev/null +++ b/rel/i18n/emqx_dashboard_sso_oidc.hocon @@ -0,0 +1,48 @@ +emqx_dashboard_sso_oidc { + +issuer.desc: +"""The URL of the OIDC issuer.""" + +clientid.desc: +"""The clientId for this backend.""" + +secret.desc: +"""The client secret.""" + +scopes.desc: +"""The scopes, its default value is `["openid"]`.""" + +name_var.desc: +"""A template to map OIDC user information to a Dashboard name, its default value is `${sub}`.""" + +dashboard_addr.desc: +"""The address of the EMQX Dashboard.""" + +session_expiry.desc: +"""The valid time span for an OIDC `state`, the default is `30s`, if the code response returned by the authorization server exceeds this time span, it will be treated as invalid.""" + +require_pkce.desc: +"""Whether to require PKCE when getting the token.""" + +client_jwks.desc: +"""Set JWK or JWKS here to enable the `private_key_jwt` authorization or the `DPoP` extension.""" + +client_file_jwks_type.desc: +"""The JWKS source type.""" + +client_file_jwks.desc: +"""Set JWKS from file.""" + +client_file_jwks_file.desc: +"""The content of the JWKS.""" + +preferred_auth_methods.desc: +"""Set the valid authentication methods and their priority.""" + +provider.desc: +"""The OIDC provider.""" + +fallback_methods.desc: +"""Some providers do not provide all the method items in the provider configuration, set this value as a fallback for those items.""" + +} diff --git a/rel/i18n/emqx_dashboard_sso_oidc_api.hocon b/rel/i18n/emqx_dashboard_sso_oidc_api.hocon new file mode 100644 index 000000000..b164d3db4 --- /dev/null +++ b/rel/i18n/emqx_dashboard_sso_oidc_api.hocon @@ -0,0 +1,6 @@ +emqx_dashboard_sso_oidc_api { + +code_callback.desc: +"""The callback path for the OIDC authorization server..""" + +} diff --git a/rel/i18n/emqx_schema.hocon b/rel/i18n/emqx_schema.hocon index e80f36817..f9978fe6f 100644 --- a/rel/i18n/emqx_schema.hocon +++ b/rel/i18n/emqx_schema.hocon @@ -855,6 +855,15 @@ The default value 1.5 is following the MQTT 5.0 specification. This multiplier i mqtt_keepalive_multiplier.label: """Keep Alive Multiplier""" +mqtt_keepalive_check_interval.desc: +"""The frequency of checking for incoming MQTT packets determines how often the server will check for new MQTT packets. +If a certain amount of time passes without any packets being sent from the client, this time will be added up. +Once the accumulated time exceeds `keepalive-interval * keepalive-multiplier`, the connection will be terminated. +The default is set to 30 seconds, with a minimum value of 1 second and a maximum value of `keepalive-interval / 2`.""" + +mqtt_keepalive_check_interval.label: +"""Keep Alive Check Interval""" + force_gc_bytes.desc: """GC the process after specified number of bytes have passed through.""" diff --git a/scripts/apps-version-check.sh b/scripts/apps-version-check.sh index e6d13d964..51f7a81ac 100755 --- a/scripts/apps-version-check.sh +++ b/scripts/apps-version-check.sh @@ -27,21 +27,12 @@ is_allowed_non_strict() { local src_file="$1" local from="$2" local to="$3" - case "$(basename "${src_file}" '.app.src')" in - emqx_auth_http) - case "${from}-${to}" in - '0.1.4-0.2.1') - return 0 - ;; - *) - return 1 - ;; - esac - ;; - *) - return 1 - ;; - esac + if [ -f .emqx-platform ]; then + log_red "ERROR: $src_file vsn bump from $from to $to" + return 1 + fi + log_red "WARN: $src_file vsn bump from $from to $to" + return 0 } APPS="$(./scripts/find-apps.sh)" diff --git a/scripts/buildx.sh b/scripts/buildx.sh index 1013a529a..1fa7793ec 100755 --- a/scripts/buildx.sh +++ b/scripts/buildx.sh @@ -8,8 +8,7 @@ ## i.e. will not work if docker command has to be executed with sudo ## example: -## ./scripts/buildx.sh --profile emqx --pkgtype tgz --arch arm64 \ -## --builder ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-debian12 +## ./scripts/buildx.sh --profile emqx --pkgtype tgz set -euo pipefail @@ -17,13 +16,13 @@ help() { echo echo "-h|--help: To display this usage information" echo "--profile : EMQX profile to build (emqx|emqx-enterprise)" - echo "--pkgtype tgz|pkg: Specify which package to build, tgz for .tar.gz," - echo " pkg for .rpm or .deb" + echo "--pkgtype tgz|pkg|rel: Specify which package to build, tgz for .tar.gz," + echo " pkg for .rpm or .deb, rel for release only" echo "--elixir: Specify if the release should be built with Elixir, " echo " defaults to 'no'." echo "--arch amd64|arm64: Target arch to build the EMQX package for" echo "--src_dir : EMQX source code in this dir, default to PWD" - echo "--builder : Builder image to pull" + echo "--builder : Docker image to use for building" echo " E.g. ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-debian12" } @@ -34,6 +33,11 @@ die() { exit 1 } +# ensure dir +cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/.." +# shellcheck disable=SC1091 +source ./env.sh + while [ "$#" -gt 0 ]; do case $1 in -h|--help) @@ -53,7 +57,7 @@ while [ "$#" -gt 0 ]; do shift 2 ;; --builder) - BUILDER="$2" + EMQX_BUILDER="$2" shift 2 ;; --arch) @@ -103,24 +107,21 @@ ARCH="${ARCH:-${NATIVE_ARCH:-}}" [ -z "${PROFILE:-}" ] && die "missing --profile" [ -z "${PKGTYPE:-}" ] && die "missing --pkgtype" -[ -z "${BUILDER:-}" ] && die "missing --builder" +[ -z "${EMQX_BUILDER:-}" ] && die "missing --builder" [ -z "${ARCH:-}" ] && die "missing --arch" -# ensure dir -cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/.." - set -x if [ -z "${IS_ELIXIR:-}" ]; then IS_ELIXIR=no fi -case "$PKGTYPE" in - tgz|pkg) +case "${PKGTYPE:-}" in + tgz|pkg|rel) true ;; *) - echo "Bad --pkgtype option, should be tgz or pkg" + echo "Bad --pkgtype option, should be tgz, pkg or rel" exit 1 ;; esac @@ -135,9 +136,13 @@ else fi HOST_SYSTEM="$(./scripts/get-distro.sh)" -BUILDER_SYSTEM="${BUILDER_SYSTEM:-$(echo "$BUILDER" | awk -F'-' '{print $NF}')}" +BUILDER_SYSTEM="${BUILDER_SYSTEM:-$(echo "$EMQX_BUILDER" | awk -F'-' '{print $NF}')}" -CMD_RUN="make ${MAKE_TARGET} && ./scripts/pkg-tests.sh ${MAKE_TARGET}" +if [ "${PKGTYPE}" != 'rel' ]; then + CMD_RUN="make ${MAKE_TARGET} && ./scripts/pkg-tests.sh ${MAKE_TARGET}" +else + CMD_RUN="make ${MAKE_TARGET}" +fi IS_NATIVE_SYSTEM='no' if [[ "$BUILDER_SYSTEM" != "force_docker" ]]; then @@ -163,7 +168,7 @@ elif docker info; then --workdir /emqx \ --platform="linux/$ARCH" \ --env ACLOCAL_PATH="/usr/share/aclocal:/usr/local/share/aclocal" \ - "$BUILDER" \ + "$EMQX_BUILDER" \ bash -euc "git config --global --add safe.directory /emqx && $CMD_RUN" else echo "Error: Docker not available on unsupported platform" diff --git a/scripts/pr-sanity-checks.sh b/scripts/pr-sanity-checks.sh index 3d687e6f5..a12dcb326 100755 --- a/scripts/pr-sanity-checks.sh +++ b/scripts/pr-sanity-checks.sh @@ -12,11 +12,9 @@ if ! type "yq" > /dev/null; then exit 1 fi -EMQX_BUILDER_VERSION=${EMQX_BUILDER_VERSION:-5.3-8} -OTP_VSN=${OTP_VSN:-26.2.5-2} -ELIXIR_VSN=${ELIXIR_VSN:-1.15.7} -EMQX_BUILDER_PLATFORM=${EMQX_BUILDER_PLATFORM:-ubuntu22.04} -EMQX_BUILDER=${EMQX_BUILDER:-ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VERSION}:${ELIXIR_VSN}-${OTP_VSN}-${EMQX_BUILDER_PLATFORM}} +cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/.." +# shellcheck disable=SC1091 +source ./env.sh commands=$(yq ".jobs.sanity-checks.steps[].run" .github/workflows/_pr_entrypoint.yaml | grep -v null) diff --git a/scripts/rel/cut.sh b/scripts/rel/cut.sh index 724b0cab2..1affd48bf 100755 --- a/scripts/rel/cut.sh +++ b/scripts/rel/cut.sh @@ -142,6 +142,21 @@ rel_branch() { esac } +assert_profile() { + local tag="$1" + local allowed_prefix + if [ -f .emqx-platform ]; then + allowed_prefix='e' + else + allowed_prefix='v' + fi + if [[ "${tag}" != "${allowed_prefix}"* ]]; then + logerr "Expecting a '${allowed_prefix}' tag on this commit" + exit 1 + fi +} +assert_profile "$TAG" + ## Ensure the current work branch assert_work_branch() { local tag="$1" @@ -283,6 +298,10 @@ if [ "$DRYRUN" = 'yes' ]; then else git tag "$TAG" logmsg "$TAG is created OK." - logwarn "Don't forget to push the tag!" + PUSH_TO="both emqx.git and emqx-platform.git!" + if [ -f .emqx-platform ]; then + PUSH_TO="emqx-platform.git but NOT emqx.git!" + fi + logwarn "Don't forget to push the tag to ${PUSH_TO}" echo "git push origin $TAG" fi diff --git a/scripts/relup-test/start-relup-test-cluster.sh b/scripts/relup-test/start-relup-test-cluster.sh index 796ffdaa1..71adca4a8 100755 --- a/scripts/relup-test/start-relup-test-cluster.sh +++ b/scripts/relup-test/start-relup-test-cluster.sh @@ -9,6 +9,8 @@ set -euo pipefail # ensure dir cd -P -- "$(dirname -- "$0")/../.." +# shellcheck disable=SC1091 +source ./env.sh set -x @@ -22,7 +24,7 @@ WEBHOOK="webhook.$NET" BENCH="bench.$NET" COOKIE='this-is-a-secret' ## Erlang image is needed to run webhook server and emqtt-bench -ERLANG_IMAGE="ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04" +ERLANG_IMAGE="${EMQX_BUILDER}" # builder has emqtt-bench installed BENCH_IMAGE="$ERLANG_IMAGE" diff --git a/scripts/spellcheck/dicts/emqx.txt b/scripts/spellcheck/dicts/emqx.txt index ce08d0f6b..347020b63 100644 --- a/scripts/spellcheck/dicts/emqx.txt +++ b/scripts/spellcheck/dicts/emqx.txt @@ -311,3 +311,4 @@ doc_as_upsert upsert aliyun OID +PKCE diff --git a/scripts/test/start-two-nodes-in-docker.sh b/scripts/test/start-two-nodes-in-docker.sh index c2cc30d5e..1a4af8c5e 100755 --- a/scripts/test/start-two-nodes-in-docker.sh +++ b/scripts/test/start-two-nodes-in-docker.sh @@ -17,6 +17,7 @@ NODE1="node1.$NET" NODE2="node2.$NET" COOKIE='this-is-a-secret' IPV6=0 +DASHBOARD_NODES='both' cleanup() { docker rm -f haproxy >/dev/null 2>&1 || true @@ -32,13 +33,19 @@ show_help() { echo "EMQX_IMAGE2 is the same as EMQX_IMAGE1 if not set" echo "" echo "Options:" - echo " -h, --help Show this help message and exit." - echo " -P Add -p options for docker run to expose more HAProxy container ports." - echo " -6 Test with IPv6" - echo " -c Cleanup: delete docker network, force delete the containers." + echo " -h, --help: Show this help message and exit." + echo " -P: Add -p options for docker run to expose more HAProxy container ports." + echo " -6: Test with IPv6" + echo " -c: Cleanup: delete docker network, force delete the containers." + echo " -d: '1', '2', or 'both' (defualt = 'both')" + echo " 1: Only put node 1 behind haproxy" + echo " 2: Only put node 2 behind haproxy" + echo " both: This is the default value, which means both nodes serve dashboard" + echo " This is often needed for tests which want to check one dashboard version" + echo " when starting two different versions of EMQX." } -while getopts "hc6P:" opt +while getopts "hc6P:d:" opt do case $opt in # -P option is treated similarly to docker run -P: @@ -47,6 +54,7 @@ do c) cleanup; exit 0;; h) show_help; exit 0;; 6) IPV6=1;; + d) DASHBOARD_NODES="$OPTARG";; *) ;; esac done @@ -55,6 +63,19 @@ shift $((OPTIND - 1)) IMAGE1="${1:-}" IMAGE2="${2:-${IMAGE1}}" +DASHBOARD_BACKEND1="server emqx-1 $NODE1:18083" +DASHBOARD_BACKEND2="server emqx-2 $NODE2:18083" +case "${DASHBOARD_NODES}" in + 1) + DASHBOARD_BACKEND2="" + ;; + 2) + DASHBOARD_BACKEND1="" + ;; + both) + ;; +esac + if [ -z "${IMAGE1:-}" ] || [ -z "${IMAGE2:-}" ]; then show_help exit 1 @@ -146,8 +167,8 @@ backend emqx_dashboard_back # load randomly will cause the browser fail to GET some chunks (or get bad chunks if names clash) balance source mode http - server emqx-1 $NODE1:18083 - server emqx-2 $NODE2:18083 + ${DASHBOARD_BACKEND1} + ${DASHBOARD_BACKEND2} ##---------------------------------------------------------------- ## TLS diff --git a/scripts/ui-tests/dashboard_test.py b/scripts/ui-tests/dashboard_test.py index 85d40cec8..6005a9403 100644 --- a/scripts/ui-tests/dashboard_test.py +++ b/scripts/ui-tests/dashboard_test.py @@ -118,7 +118,7 @@ def test_docs_link(driver, login, dashboard_url): if prefix == 'e': docs_base_url = "https://docs.emqx.com/en/enterprise" else: - docs_base_url = "https://www.emqx.io/docs/en" + docs_base_url = "https://docs.emqx.com/en/emqx" docs_url = f"{docs_base_url}/{emqx_version}" xpath = f"//div[@id='app']//div[@class='nav-header']//a[@href[starts-with(.,'{docs_url}')]]" diff --git a/scripts/ui-tests/docker-compose.yaml b/scripts/ui-tests/docker-compose.yaml index f5a66ab33..952b0f382 100644 --- a/scripts/ui-tests/docker-compose.yaml +++ b/scripts/ui-tests/docker-compose.yaml @@ -8,7 +8,7 @@ services: selenium: shm_size: '2gb' - image: ghcr.io/emqx/selenium-chrome:latest + image: ghcr.io/emqx/selenium-chrome:1.0.0 volumes: - ./:/app depends_on: