diff --git a/.ci/docker-compose-file/docker-compose-emqx-cluster.yaml b/.ci/docker-compose-file/docker-compose-emqx-cluster.yaml index 65bf4faf8..5745dcf5b 100644 --- a/.ci/docker-compose-file/docker-compose-emqx-cluster.yaml +++ b/.ci/docker-compose-file/docker-compose-emqx-cluster.yaml @@ -19,7 +19,9 @@ services: - emqx2 volumes: - ./haproxy/haproxy.cfg:/usr/local/etc/haproxy/haproxy.cfg - - ../../apps/emqx/etc/certs:/usr/local/etc/haproxy/certs + - ../../apps/emqx/etc/certs/cert.pem:/usr/local/etc/haproxy/certs/cert.pem + - ../../apps/emqx/etc/certs/key.pem:/usr/local/etc/haproxy/certs/key.pem + - ../../apps/emqx/etc/certs/cacert.pem:/usr/local/etc/haproxy/certs/cacert.pem ports: - "18083:18083" # - "1883:1883" @@ -34,7 +36,7 @@ services: - -c - | set -x - cat /usr/local/etc/haproxy/certs/cert.pem /usr/local/etc/haproxy/certs/key.pem > /tmp/emqx.pem + cat /usr/local/etc/haproxy/certs/cert.pem /usr/local/etc/haproxy/certs/key.pem > /var/lib/haproxy/emqx.pem haproxy -f /usr/local/etc/haproxy/haproxy.cfg emqx1: diff --git a/.ci/docker-compose-file/docker-compose-ldap-tcp.yaml b/.ci/docker-compose-file/docker-compose-ldap.yaml similarity index 80% rename from .ci/docker-compose-file/docker-compose-ldap-tcp.yaml rename to .ci/docker-compose-file/docker-compose-ldap.yaml index 61eab91ec..e6c8ba2d8 100644 --- a/.ci/docker-compose-file/docker-compose-ldap-tcp.yaml +++ b/.ci/docker-compose-file/docker-compose-ldap.yaml @@ -6,11 +6,11 @@ services: build: context: ../.. dockerfile: .ci/docker-compose-file/openldap/Dockerfile - args: + args: LDAP_TAG: ${LDAP_TAG} - image: openldap - ports: - - 389:389 + image: openldap + #ports: + # - 389:389 restart: always networks: - emqx_bridge diff --git a/.ci/docker-compose-file/haproxy/haproxy.cfg b/.ci/docker-compose-file/haproxy/haproxy.cfg index 1f10c4f9e..8a863c2d3 100644 --- a/.ci/docker-compose-file/haproxy/haproxy.cfg +++ b/.ci/docker-compose-file/haproxy/haproxy.cfg @@ -83,13 +83,13 @@ backend emqx_ws_back frontend emqx_ssl mode tcp option tcplog - bind *:8883 ssl crt /tmp/emqx.pem ca-file /usr/local/etc/haproxy/certs/cacert.pem verify required no-sslv3 + bind *:8883 ssl crt /var/lib/haproxy/emqx.pem ca-file /usr/local/etc/haproxy/certs/cacert.pem verify required no-sslv3 default_backend emqx_ssl_back frontend emqx_wss mode tcp option tcplog - bind *:8084 ssl crt /tmp/emqx.pem ca-file /usr/local/etc/haproxy/certs/cacert.pem verify required no-sslv3 + bind *:8084 ssl crt /var/lib/haproxy/emqx.pem ca-file /usr/local/etc/haproxy/certs/cacert.pem verify required no-sslv3 default_backend emqx_wss_back backend emqx_ssl_back diff --git a/.ci/docker-compose-file/openldap/Dockerfile b/.ci/docker-compose-file/openldap/Dockerfile index 88a096066..dd0114b64 100644 --- a/.ci/docker-compose-file/openldap/Dockerfile +++ b/.ci/docker-compose-file/openldap/Dockerfile @@ -1,18 +1,20 @@ -FROM buildpack-deps:stretch +FROM buildpack-deps:bookworm -ARG LDAP_TAG=2.4.50 +ARG LDAP_TAG=2.5.16 RUN apt-get update && apt-get install -y groff groff-base -RUN wget ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-${LDAP_TAG}.tgz \ - && gunzip -c openldap-${LDAP_TAG}.tgz | tar xvfB - \ +RUN wget https://www.openldap.org/software/download/OpenLDAP/openldap-release/openldap-${LDAP_TAG}.tgz \ + && tar xvzf openldap-${LDAP_TAG}.tgz \ && cd openldap-${LDAP_TAG} \ && ./configure && make depend && make && make install \ && cd .. && rm -rf openldap-${LDAP_TAG} COPY .ci/docker-compose-file/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf -COPY apps/emqx_authn/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif -COPY apps/emqx_authn/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema -COPY apps/emqx_authn/test/data/certs/*.pem /usr/local/etc/openldap/ +COPY apps/emqx_ldap/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif +COPY apps/emqx_ldap/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema +COPY .ci/docker-compose-file/certs/ca.crt /usr/local/etc/openldap/cacert.pem +COPY .ci/docker-compose-file/certs/server.crt /usr/local/etc/openldap/cert.pem +COPY .ci/docker-compose-file/certs/server.key /usr/local/etc/openldap/key.pem RUN mkdir -p /usr/local/etc/openldap/data \ && slapadd -l /usr/local/etc/openldap/schema/emqx.io.ldif -f /usr/local/etc/openldap/slapd.conf diff --git a/.github/actions/prepare-jmeter/action.yaml b/.github/actions/prepare-jmeter/action.yaml new file mode 100644 index 000000000..e3d30af1a --- /dev/null +++ b/.github/actions/prepare-jmeter/action.yaml @@ -0,0 +1,49 @@ +name: 'Prepare jmeter' + +inputs: + version-emqx: + required: true + type: string + +runs: + using: composite + steps: + - uses: actions/download-artifact@v3 + with: + name: emqx-docker + path: /tmp + - name: load docker image + shell: bash + env: + PKG_VSN: ${{ inputs.version-emqx }} + run: | + EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g') + echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV + - uses: actions/checkout@v3 + with: + repository: emqx/emqx-fvt + ref: broker-autotest-v5 + path: scripts + - uses: actions/setup-java@v3 + with: + java-version: '8.0.282' # The JDK version to make available on the path. + java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk + architecture: x64 # (x64 or x86) - defaults to x64 + # https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md + distribution: 'zulu' + - uses: actions/download-artifact@v3 + with: + name: apache-jmeter.tgz + - name: install jmeter + shell: bash + env: + JMETER_VERSION: 5.4.3 + run: | + tar -xf apache-jmeter.tgz + ln -s apache-jmeter-$JMETER_VERSION jmeter + echo "jmeter.save.saveservice.output_format=xml" >> jmeter/user.properties + echo "jmeter.save.saveservice.response_data.on_error=true" >> jmeter/user.properties + cd jmeter/lib/ext + wget --no-verbose https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar + wget --no-verbose https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar + wget --no-verbose https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.18/postgresql-42.2.18.jar diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index a12aeb012..d75661a8a 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -9,10 +9,11 @@ copilot:summary Please convert it to a draft if any of the following conditions are not met. Reviewers may skip over until all the items are checked: - [ ] Added tests for the changes +- [ ] Added property-based tests for code which performs user input validation - [ ] Changed lines covered in coverage report - [ ] Change log has been added to `changes/(ce|ee)/(feat|perf|fix)-.en.md` files - [ ] For internal contributor: there is a jira ticket to track this change -- [ ] If there should be document changes, a PR to emqx-docs.git is sent, or a jira ticket is created to follow up +- [ ] Created PR to [emqx-docs](https://github.com/emqx/emqx-docs) if documentation update is required, or link to a follow-up jira ticket - [ ] Schema changes are backward compatible ## Checklist for CI (.github/workflows) changes diff --git a/.github/workflows/_pr_entrypoint.yaml b/.github/workflows/_pr_entrypoint.yaml new file mode 100644 index 000000000..ec2bbf2e1 --- /dev/null +++ b/.github/workflows/_pr_entrypoint.yaml @@ -0,0 +1,242 @@ +name: PR Entrypoint + +concurrency: + group: pr-entrypoint-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +on: + pull_request: + workflow_dispatch: + inputs: + ref: + required: false + +env: + IS_CI: "yes" + +jobs: + sanity-checks: + runs-on: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }} + container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" + outputs: + ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} + ct-host: ${{ steps.matrix.outputs.ct-host }} + ct-docker: ${{ steps.matrix.outputs.ct-docker }} + version-emqx: ${{ steps.matrix.outputs.version-emqx }} + version-emqx-enterprise: ${{ steps.matrix.outputs.version-emqx-enterprise }} + runner: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }} + builder: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" + builder_vsn: "5.1-3" + otp_vsn: "25.3.2-1" + elixir_vsn: "1.14.5" + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.inputs.ref }} + fetch-depth: 0 + - name: Work around https://github.com/actions/checkout/issues/766 + run: | + git config --global --add safe.directory "$GITHUB_WORKSPACE" + - name: Run gitlint + env: + BEFORE_REF: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }} + AFTER_REF: ${{ github.sha }} + run: | + pip install gitlint + gitlint --commits $BEFORE_REF..$AFTER_REF --config .github/workflows/.gitlint + - name: Run shellcheck + run: | + DEBIAN_FRONTEND=noninteractive apt-get update -qy && apt-get install -qy shellcheck + ./scripts/shellcheck.sh + - name: Run shell tests + run: | + DEBIAN_FRONTEND=noninteractive apt-get update -qy && apt-get install -qy shelltestrunner + scripts/shelltest/run_tests.sh + - name: Check workflow files + env: + ACTIONLINT_VSN: 1.6.25 + run: | + wget https://github.com/rhysd/actionlint/releases/download/v${ACTIONLINT_VSN}/actionlint_${ACTIONLINT_VSN}_linux_amd64.tar.gz + tar zxf actionlint_${ACTIONLINT_VSN}_linux_amd64.tar.gz actionlint + # TODO: enable shellcheck when all the current issues are fixed + ./actionlint -color \ + -shellcheck= \ + -ignore 'label ".+" is unknown' \ + -ignore 'value "emqx-enterprise" in "exclude"' + - name: Check line-break at EOF + run: | + ./scripts/check-nl-at-eof.sh + - name: Check apps version + run: | + ./scripts/apps-version-check.sh + - name: Setup mix + env: + MIX_ENV: emqx-enterprise + PROFILE: emqx-enterprise + run: | + mix local.hex --force --if-missing && mix local.rebar --force --if-missing + - name: Check formatting + env: + MIX_ENV: emqx-enterprise + PROFILE: emqx-enterprise + run: | + ./scripts/check-format.sh + - name: Run elvis check + run: | + ./scripts/elvis-check.sh $GITHUB_BASE_REF + - name: Generate CT Matrix + id: matrix + run: | + APPS="$(./scripts/find-apps.sh --ci)" + MATRIX="$(echo "${APPS}" | jq -c ' + [ + (.[] | select(.profile == "emqx") | . + { + builder: "5.1-3", + otp: "25.3.2-1", + elixir: "1.14.5" + }), + (.[] | select(.profile == "emqx-enterprise") | . + { + builder: "5.1-3", + otp: ["25.3.2-1"][], + elixir: "1.14.5" + }) + ] + ')" + echo "${MATRIX}" | jq + CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')" + CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')" + CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')" + echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT + echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT + echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT + echo "version-emqx=$(./pkg-vsn.sh emqx)" | tee -a $GITHUB_OUTPUT + echo "version-emqx-enterprise=$(./pkg-vsn.sh emqx-enterprise)" | tee -a $GITHUB_OUTPUT + + compile: + runs-on: ${{ needs.sanity-checks.outputs.runner }} + container: ${{ needs.sanity-checks.outputs.builder }} + needs: + - sanity-checks + strategy: + matrix: + profile: + - emqx + - emqx-enterprise + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Work around https://github.com/actions/checkout/issues/766 + run: | + git config --global --add safe.directory "$GITHUB_WORKSPACE" + - id: compile + env: + PROFILE: ${{ matrix.profile }} + ENABLE_COVER_COMPILE: 1 + run: | + make ensure-rebar3 + make ${PROFILE} + make test-compile + zip -ryq $PROFILE.zip . + - uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.profile }} + path: ${{ matrix.profile }}.zip + retention-days: 1 + + run_test_cases: + needs: + - sanity-checks + - compile + uses: ./.github/workflows/run_test_cases.yaml + with: + runner: ${{ needs.sanity-checks.outputs.runner }} + builder: ${{ needs.sanity-checks.outputs.builder }} + ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }} + ct-host: ${{ needs.sanity-checks.outputs.ct-host }} + ct-docker: ${{ needs.sanity-checks.outputs.ct-docker }} + + static_checks: + needs: + - sanity-checks + - compile + uses: ./.github/workflows/static_checks.yaml + with: + runner: ${{ needs.sanity-checks.outputs.runner }} + builder: ${{ needs.sanity-checks.outputs.builder }} + ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }} + + build_slim_packages: + needs: + - sanity-checks + uses: ./.github/workflows/build_slim_packages.yaml + with: + runner: ${{ needs.sanity-checks.outputs.runner }} + builder: ${{ needs.sanity-checks.outputs.builder }} + builder_vsn: ${{ needs.sanity-checks.outputs.builder_vsn }} + otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }} + elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }} + + build_docker_for_test: + needs: + - sanity-checks + uses: ./.github/workflows/build_docker_for_test.yaml + with: + otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }} + elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }} + version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} + version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }} + + spellcheck: + needs: + - sanity-checks + - build_slim_packages + uses: ./.github/workflows/spellcheck.yaml + with: + runner: ${{ needs.sanity-checks.outputs.runner }} + + run_conf_tests: + needs: + - sanity-checks + - compile + uses: ./.github/workflows/run_conf_tests.yaml + with: + runner: ${{ needs.sanity-checks.outputs.runner }} + builder: ${{ needs.sanity-checks.outputs.builder }} + + check_deps_integrity: + needs: + - sanity-checks + uses: ./.github/workflows/check_deps_integrity.yaml + with: + runner: ${{ needs.sanity-checks.outputs.runner }} + builder: ${{ needs.sanity-checks.outputs.builder }} + + run_jmeter_tests: + needs: + - sanity-checks + - build_docker_for_test + uses: ./.github/workflows/run_jmeter_tests.yaml + with: + version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} + + run_docker_tests: + needs: + - sanity-checks + - build_docker_for_test + uses: ./.github/workflows/run_docker_tests.yaml + with: + runner: ${{ needs.sanity-checks.outputs.runner }} + version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} + version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }} + + run_helm_tests: + needs: + - sanity-checks + - build_docker_for_test + uses: ./.github/workflows/run_helm_tests.yaml + with: + version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} + version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }} diff --git a/.github/workflows/_push-entrypoint.yaml b/.github/workflows/_push-entrypoint.yaml new file mode 100644 index 000000000..9faceebbc --- /dev/null +++ b/.github/workflows/_push-entrypoint.yaml @@ -0,0 +1,191 @@ +name: Push Entrypoint + +concurrency: + group: push-entrypoint-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +on: + push: + tags: + - 'v*' + - 'e*' + branches: + - 'master' + - 'release-51' + - 'ci/**' + +env: + IS_CI: 'yes' + +jobs: + prepare: + runs-on: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }} + container: 'ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04' + outputs: + profile: ${{ steps.parse-git-ref.outputs.profile }} + edition: ${{ steps.parse-git-ref.outputs.edition }} + release: ${{ steps.parse-git-ref.outputs.release }} + latest: ${{ steps.parse-git-ref.outputs.latest }} + version: ${{ steps.parse-git-ref.outputs.version }} + ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} + ct-host: ${{ steps.matrix.outputs.ct-host }} + ct-docker: ${{ steps.matrix.outputs.ct-docker }} + runner: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }} + builder: 'ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04' + builder_vsn: '5.1-3' + otp_vsn: '25.3.2-1' + elixir_vsn: '1.14.5' + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.inputs.ref }} + fetch-depth: 0 + - name: Work around https://github.com/actions/checkout/issues/766 + shell: bash + run: | + git config --global --add safe.directory "$GITHUB_WORKSPACE" + - name: Detect emqx profile and version + id: parse-git-ref + run: | + JSON="$(./scripts/parse-git-ref.sh $GITHUB_REF)" + PROFILE=$(echo "$JSON" | jq -cr '.profile') + EDITION=$(echo "$JSON" | jq -cr '.edition') + RELEASE=$(echo "$JSON" | jq -cr '.release') + LATEST=$(echo "$JSON" | jq -cr '.latest') + VERSION="$(./pkg-vsn.sh "$PROFILE")" + echo "profile=$PROFILE" | tee -a $GITHUB_OUTPUT + echo "edition=$EDITION" | tee -a $GITHUB_OUTPUT + echo "release=$RELEASE" | tee -a $GITHUB_OUTPUT + echo "latest=$LATEST" | tee -a $GITHUB_OUTPUT + echo "version=$VERSION" | tee -a $GITHUB_OUTPUT + - name: Build matrix + id: matrix + run: | + APPS="$(./scripts/find-apps.sh --ci)" + MATRIX="$(echo "${APPS}" | jq -c ' + [ + (.[] | select(.profile == "emqx") | . + { + builder: "5.1-3", + otp: "25.3.2-1", + elixir: "1.14.5" + }), + (.[] | select(.profile == "emqx-enterprise") | . + { + builder: "5.1-3", + otp: ["25.3.2-1"][], + elixir: "1.14.5" + }) + ] + ')" + echo "${MATRIX}" | jq + CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')" + CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')" + CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')" + echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT + echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT + echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT + + build_slim_packages: + if: ${{ needs.prepare.outputs.release != 'true' }} + needs: + - prepare + uses: ./.github/workflows/build_slim_packages.yaml + with: + runner: ${{ needs.prepare.outputs.runner }} + builder: ${{ needs.prepare.outputs.builder }} + builder_vsn: ${{ needs.prepare.outputs.builder_vsn }} + otp_vsn: ${{ needs.prepare.outputs.otp_vsn }} + elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }} + + build_packages: + if: ${{ needs.prepare.outputs.release == 'true' }} + needs: + - prepare + uses: ./.github/workflows/build_packages.yaml + with: + profile: ${{ needs.prepare.outputs.profile }} + publish: ${{ needs.prepare.outputs.release }} + otp_vsn: ${{ needs.prepare.outputs.otp_vsn }} + elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }} + runner: ${{ needs.prepare.outputs.runner }} + builder_vsn: ${{ needs.prepare.outputs.builder_vsn }} + + build_and_push_docker_images: + if: ${{ needs.prepare.outputs.release == 'true' }} + needs: + - prepare + uses: ./.github/workflows/build_and_push_docker_images.yaml + with: + profile: ${{ needs.prepare.outputs.profile }} + edition: ${{ needs.prepare.outputs.edition }} + version: ${{ needs.prepare.outputs.version }} + latest: ${{ needs.prepare.outputs.latest }} + publish: ${{ needs.prepare.outputs.release }} + otp_vsn: ${{ needs.prepare.outputs.otp_vsn }} + elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }} + runner: ${{ needs.prepare.outputs.runner }} + builder_vsn: ${{ needs.prepare.outputs.builder_vsn }} + + compile: + runs-on: ${{ needs.prepare.outputs.runner }} + container: ${{ needs.prepare.outputs.builder }} + needs: + - prepare + strategy: + matrix: + profile: + - emqx + - emqx-enterprise + + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.inputs.ref }} + fetch-depth: 0 + - name: Work around https://github.com/actions/checkout/issues/766 + run: | + git config --global --add safe.directory "$GITHUB_WORKSPACE" + - id: compile + env: + PROFILE: ${{ matrix.profile }} + ENABLE_COVER_COMPILE: 1 + run: | + make $PROFILE + zip -ryq $PROFILE.zip . + - uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.profile }} + path: ${{ matrix.profile }}.zip + retention-days: 1 + + run_test_cases: + needs: + - prepare + - compile + uses: ./.github/workflows/run_test_cases.yaml + with: + runner: ${{ needs.prepare.outputs.runner }} + builder: ${{ needs.prepare.outputs.builder }} + ct-matrix: ${{ needs.prepare.outputs.ct-matrix }} + ct-host: ${{ needs.prepare.outputs.ct-host }} + ct-docker: ${{ needs.prepare.outputs.ct-docker }} + + run_conf_tests: + needs: + - prepare + - compile + uses: ./.github/workflows/run_conf_tests.yaml + with: + runner: ${{ needs.prepare.outputs.runner }} + builder: ${{ needs.prepare.outputs.builder }} + + static_checks: + needs: + - prepare + - compile + uses: ./.github/workflows/static_checks.yaml + with: + runner: ${{ needs.prepare.outputs.runner }} + builder: ${{ needs.prepare.outputs.builder }} + ct-matrix: ${{ needs.prepare.outputs.ct-matrix }} + diff --git a/.github/workflows/apps_version_check.yaml b/.github/workflows/apps_version_check.yaml deleted file mode 100644 index 52c467786..000000000 --- a/.github/workflows/apps_version_check.yaml +++ /dev/null @@ -1,14 +0,0 @@ -name: Check Apps Version - -on: [pull_request] - -jobs: - check_apps_version: - runs-on: ubuntu-22.04 - - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Check apps version - run: ./scripts/apps-version-check.sh diff --git a/.github/workflows/build_and_push_docker_images.yaml b/.github/workflows/build_and_push_docker_images.yaml index e8066b8bc..33128e40d 100644 --- a/.github/workflows/build_and_push_docker_images.yaml +++ b/.github/workflows/build_and_push_docker_images.yaml @@ -5,138 +5,102 @@ concurrency: cancel-in-progress: true on: - push: - tags: - - v* - - e* - - docker-latest-* + workflow_call: + inputs: + profile: + required: true + type: string + edition: + required: true + type: string + version: + required: true + type: string + latest: + required: true + type: string + publish: + required: true + type: string + otp_vsn: + required: true + type: string + elixir_vsn: + required: true + type: string + runner: + required: true + type: string + builder_vsn: + required: true + type: string workflow_dispatch: inputs: - branch_or_tag: + ref: required: false + version: + required: true + type: string profile: required: false + type: string default: 'emqx' - is_latest: + edition: required: false + type: string + default: 'Opensource' + latest: + required: false + type: boolean default: false + publish: + required: false + type: boolean + default: false + otp_vsn: + required: false + type: string + default: '25.3.2-1' + elixir_vsn: + required: false + type: string + default: '1.14.5' + runner: + required: false + type: string + default: 'ubuntu-22.04' + builder_vsn: + required: false + type: string + default: '5.1-3' jobs: - prepare: - runs-on: ubuntu-22.04 - # prepare source with any OTP version, no need for a matrix - container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" - - outputs: - PROFILE: ${{ steps.get_profile.outputs.PROFILE }} - EDITION: ${{ steps.get_profile.outputs.EDITION }} - IS_LATEST: ${{ steps.get_profile.outputs.IS_LATEST }} - IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }} - VERSION: ${{ steps.get_profile.outputs.VERSION }} - - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.inputs.branch_or_tag }} # when input is not given, the event tag is used - path: source - fetch-depth: 0 - - - name: Get profiles to build - id: get_profile - env: - INPUTS_PROFILE: ${{ github.event.inputs.profile }} - run: | - cd source - # tag docker-latest-ce or docker-latest-ee - if git describe --tags --exact --match 'docker-latest-*' 2>/dev/null; then - echo 'is_latest=true due to docker-latest-* tag' - is_latest=true - elif [ "${{ inputs.is_latest }}" = "true" ]; then - echo 'is_latest=true due to manual input from workflow_dispatch' - is_latest=true - else - echo 'is_latest=false' - is_latest=false - fi - # resolve profile - if git describe --tags --match "v*" --exact; then - echo "This is an exact git tag, will publish images" - is_exact='true' - PROFILE=emqx - elif git describe --tags --match "e*" --exact; then - echo "This is an exact git tag, will publish images" - is_exact='true' - PROFILE=emqx-enterprise - else - echo "This is NOT an exact git tag, will not publish images" - is_exact='false' - fi - - case "${PROFILE:-$INPUTS_PROFILE}" in - emqx) - EDITION='Opensource' - ;; - emqx-enterprise) - EDITION='Enterprise' - ;; - *) - echo "ERROR: Failed to resolve build profile" - exit 1 - ;; - esac - - VSN="$(./pkg-vsn.sh "$PROFILE")" - echo "Building emqx/$PROFILE:$VSN image (latest=$is_latest)" - echo "Push = $is_exact" - echo "IS_LATEST=$is_latest" >> $GITHUB_OUTPUT - echo "IS_EXACT_TAG=$is_exact" >> $GITHUB_OUTPUT - echo "PROFILE=$PROFILE" >> $GITHUB_OUTPUT - echo "EDITION=$EDITION" >> $GITHUB_OUTPUT - echo "VERSION=$VSN" >> $GITHUB_OUTPUT - - name: get_all_deps - env: - PROFILE: ${{ steps.get_profile.outputs.PROFILE }} - run: | - zip -ryq source.zip source/* source/.[^.]* - - uses: actions/upload-artifact@v3 - with: - name: source - path: source.zip - docker: - runs-on: ubuntu-22.04 - needs: prepare + runs-on: ${{ inputs.runner }} strategy: fail-fast: false matrix: profile: - - "${{ needs.prepare.outputs.PROFILE }}" + - ${{ inputs.profile }} registry: - 'docker.io' - 'public.ecr.aws' os: - [debian11, "debian:11-slim", "deploy/docker/Dockerfile"] - # NOTE: 'otp' and 'elixir' are to configure emqx-builder image - # only support latest otp and elixir, not a matrix builder: - - 5.1-3 # update to latest + - ${{ inputs.builder_vsn }} otp: - - 25.3.2-1 + - ${{ inputs.otp_vsn }} elixir: - 'no_elixir' - - '1.14.5' # update to latest - exclude: # TODO: publish enterprise to ecr too? - - registry: 'public.ecr.aws' - profile: emqx-enterprise + - ${{ inputs.elixir_vsn }} steps: - - uses: actions/download-artifact@v3 + - uses: actions/checkout@v3 with: - name: source - path: . - - name: unzip source code - run: unzip -q source.zip + ref: ${{ github.event.inputs.ref }} + fetch-depth: 0 - uses: docker/setup-qemu-action@v2 - uses: docker/setup-buildx-action@v2 @@ -185,18 +149,18 @@ jobs: latest=${{ matrix.elixir == 'no_elixir' }} suffix=${{ steps.pre-meta.outputs.img_suffix }} tags: | - type=semver,pattern={{major}}.{{minor}},value=${{ needs.prepare.outputs.VERSION }} - type=semver,pattern={{version}},value=${{ needs.prepare.outputs.VERSION }} - type=raw,value=${{ needs.prepare.outputs.VERSION }} - type=raw,value=latest,enable=${{ needs.prepare.outputs.IS_LATEST }} + type=semver,pattern={{major}}.{{minor}},value=${{ inputs.version }} + type=semver,pattern={{version}},value=${{ inputs.version }} + type=raw,value=${{ inputs.version }} + type=raw,value=latest,enable=${{ inputs.latest }} labels: | org.opencontainers.image.otp.version=${{ matrix.otp }} - org.opencontainers.image.edition=${{ needs.prepare.outputs.EDITION }} + org.opencontainers.image.edition=${{ inputs.edition }} ${{ steps.pre-meta.outputs.extra_labels }} - uses: docker/build-push-action@v3 with: - push: ${{ needs.prepare.outputs.IS_EXACT_TAG == 'true' || github.repository_owner != 'emqx' }} + push: ${{ inputs.publish == 'true' || github.repository_owner != 'emqx' }} pull: true no-cache: true platforms: linux/amd64,linux/arm64 @@ -206,4 +170,4 @@ jobs: EMQX_NAME=${{ matrix.profile }}${{ steps.pre-meta.outputs.img_suffix }} EXTRA_DEPS=${{ steps.pre-meta.outputs.extra_deps }} file: source/${{ matrix.os[2] }} - context: source + diff --git a/.github/workflows/build_docker_for_test.yaml b/.github/workflows/build_docker_for_test.yaml new file mode 100644 index 000000000..548d5e2cd --- /dev/null +++ b/.github/workflows/build_docker_for_test.yaml @@ -0,0 +1,61 @@ +name: Build docker image for test + +concurrency: + group: docker-test-build-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +on: + workflow_call: + inputs: + otp_vsn: + required: true + type: string + elixir_vsn: + required: true + type: string + version-emqx: + required: true + type: string + version-emqx-enterprise: + required: true + type: string + +jobs: + docker: + runs-on: ubuntu-latest + env: + EMQX_NAME: ${{ matrix.profile }} + PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }} + OTP_VSN: ${{ inputs.otp_vsn }} + ELIXIR_VSN: ${{ inputs.elixir_vsn }} + + strategy: + fail-fast: false + matrix: + profile: + - emqx + - emqx-enterprise + - emqx-elixir + + steps: + - uses: actions/checkout@v3 + - name: build and export to Docker + id: build + run: | + make ${EMQX_NAME}-docker + echo "EMQX_IMAGE_TAG=$(cat .docker_image_tag)" >> $GITHUB_ENV + - name: smoke test + run: | + CID=$(docker run -d --rm -P $EMQX_IMAGE_TAG) + HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID) + ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT + docker stop $CID + - name: export docker image + run: | + docker save $EMQX_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz + - uses: actions/upload-artifact@v3 + with: + name: "${{ env.EMQX_NAME }}-docker" + path: "${{ env.EMQX_NAME }}-docker-${{ env.PKG_VSN }}.tar.gz" + retention-days: 3 + diff --git a/.github/workflows/build_packages.yaml b/.github/workflows/build_packages.yaml index bb6b46612..94841edc8 100644 --- a/.github/workflows/build_packages.yaml +++ b/.github/workflows/build_packages.yaml @@ -1,81 +1,61 @@ name: Cross build packages concurrency: - group: build-${{ github.event_name }}-${{ github.ref }} + group: build-packages-${{ github.event_name }}-${{ github.ref }} cancel-in-progress: true on: - push: - branches: - - 'ci/**' - tags: - - v* - - e* + workflow_call: + inputs: + profile: + required: true + type: string + publish: + required: true + type: string + otp_vsn: + required: true + type: string + elixir_vsn: + required: true + type: string + runner: + required: true + type: string + builder_vsn: + required: true + type: string workflow_dispatch: inputs: - branch_or_tag: + ref: required: false profile: required: false + publish: + required: false + type: boolean + default: false + otp_vsn: + required: false + type: string + default: '25.3.2-1' + elixir_vsn: + required: false + type: string + default: '1.14.5' + runner: + required: false + type: string + default: 'ubuntu-22.04' + builder_vsn: + required: false + type: string + default: '5.1-3' jobs: - prepare: - runs-on: ubuntu-22.04 - container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04 - outputs: - BUILD_PROFILE: ${{ steps.get_profile.outputs.BUILD_PROFILE }} - IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }} - VERSION: ${{ steps.get_profile.outputs.VERSION }} - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.inputs.branch_or_tag }} # when input is not given, the event tag is used - fetch-depth: 0 - - - name: Get profile to build - id: get_profile - run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - tag=${{ github.ref }} - if git describe --tags --match "[v|e]*" --exact; then - echo "WARN: This is an exact git tag, will publish release" - is_exact_tag='true' - else - echo "WARN: This is NOT an exact git tag, will not publish release" - is_exact_tag='false' - fi - echo "IS_EXACT_TAG=${is_exact_tag}" >> $GITHUB_OUTPUT - case $tag in - refs/tags/v*) - PROFILE='emqx' - ;; - refs/tags/e*) - PROFILE=emqx-enterprise - ;; - *) - PROFILE=${{ github.event.inputs.profile }} - case "$PROFILE" in - emqx) - true - ;; - emqx-enterprise) - true - ;; - *) - # maybe triggered from schedule - echo "WARN: \"$PROFILE\" is not a valid profile." - echo "building the default profile 'emqx' instead" - PROFILE='emqx' - ;; - esac - ;; - esac - echo "BUILD_PROFILE=$PROFILE" >> $GITHUB_OUTPUT - echo "VERSION=$(./pkg-vsn.sh $PROFILE)" >> $GITHUB_OUTPUT - windows: runs-on: windows-2019 - if: startsWith(github.ref_name, 'v') + if: inputs.profile == 'emqx' strategy: fail-fast: false matrix: @@ -84,11 +64,11 @@ jobs: steps: - uses: actions/checkout@v3 with: - ref: ${{ github.event.inputs.branch_or_tag }} + ref: ${{ github.event.inputs.ref }} fetch-depth: 0 - uses: ilammy/msvc-dev-cmd@v1.12.0 - - uses: erlef/setup-beam@v1.15.4 + - uses: erlef/setup-beam@v1.16.0 with: otp-version: 25.3.2 - name: build @@ -125,14 +105,13 @@ jobs: path: _packages/${{ matrix.profile }}/ mac: - needs: prepare strategy: fail-fast: false matrix: profile: - - ${{ needs.prepare.outputs.BUILD_PROFILE }} + - ${{ inputs.profile }} otp: - - 25.3.2-1 + - ${{ inputs.otp_vsn }} os: - macos-11 - macos-12 @@ -142,7 +121,7 @@ jobs: - uses: emqx/self-hosted-cleanup-action@v1.0.3 - uses: actions/checkout@v3 with: - ref: ${{ github.event.inputs.branch_or_tag }} + ref: ${{ github.event.inputs.ref }} fetch-depth: 0 - uses: ./.github/actions/package-macos with: @@ -160,7 +139,6 @@ jobs: path: _packages/${{ matrix.profile }}/ linux: - needs: prepare runs-on: ${{ matrix.build_machine }} # always run in builder container because the host might have the wrong OTP version etc. # otherwise buildx.sh does not run docker if arch and os matches the target arch and os. @@ -171,9 +149,9 @@ jobs: fail-fast: false matrix: profile: - - ${{ needs.prepare.outputs.BUILD_PROFILE }} + - ${{ inputs.profile }} otp: - - 25.3.2-1 + - ${{ inputs.otp_vsn }} arch: - amd64 - arm64 @@ -193,9 +171,9 @@ jobs: - aws-arm64 - aws-amd64 builder: - - 5.1-3 + - ${{ inputs.builder_vsn }} elixir: - - 1.14.5 + - ${{ inputs.elixir_vsn }} with_elixir: - 'no' exclude: @@ -205,12 +183,12 @@ jobs: build_machine: aws-arm64 include: - profile: emqx - otp: 25.3.2-1 + otp: ${{ inputs.otp_vsn }} arch: amd64 os: ubuntu22.04 build_machine: aws-amd64 - builder: 5.1-3 - elixir: 1.14.5 + builder: ${{ inputs.builder_vsn }} + elixir: ${{ inputs.elixir_vsn }} with_elixir: 'yes' defaults: @@ -222,7 +200,7 @@ jobs: - uses: actions/checkout@v3 with: - ref: ${{ github.event.inputs.branch_or_tag }} + ref: ${{ github.event.inputs.ref }} fetch-depth: 0 - name: fix workdir @@ -267,14 +245,16 @@ jobs: path: _packages/${{ matrix.profile }}/ publish_artifacts: - runs-on: ubuntu-22.04 - needs: [prepare, mac, linux] - if: needs.prepare.outputs.IS_EXACT_TAG == 'true' + runs-on: ${{ inputs.runner }} + needs: + - mac + - linux + if: ${{ inputs.publish == 'true' }} strategy: fail-fast: false matrix: profile: - - ${{ needs.prepare.outputs.BUILD_PROFILE }} + - ${{ inputs.profile }} steps: - uses: actions/download-artifact@v3 with: @@ -284,7 +264,7 @@ jobs: run: sudo apt-get update && sudo apt install -y dos2unix - name: get packages run: | - set -e -u + set -eu cd packages/${{ matrix.profile }} # fix the .sha256 file format for var in $(ls | grep emqx | grep -v sha256); do diff --git a/.github/workflows/build_packages_cron.yaml b/.github/workflows/build_packages_cron.yaml index 09f68c256..b245078da 100644 --- a/.github/workflows/build_packages_cron.yaml +++ b/.github/workflows/build_packages_cron.yaml @@ -29,7 +29,7 @@ jobs: arch: - amd64 os: - - debian10 + - ubuntu22.04 - amzn2023 builder: - 5.1-3 @@ -94,7 +94,7 @@ jobs: otp: - 25.3.2-1 os: - - macos-12 + - macos-13 - macos-12-arm64 steps: @@ -117,6 +117,7 @@ jobs: with: name: ${{ matrix.profile }} path: _packages/${{ matrix.profile }}/ + retention-days: 7 - name: Send notification to Slack uses: slackapi/slack-github-action@v1.23.0 if: failure() @@ -125,3 +126,59 @@ jobs: with: payload: | {"text": "Scheduled build of ${{ matrix.profile }} package for ${{ matrix.os }} failed: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"} + + windows: + if: github.repository_owner == 'emqx' + runs-on: windows-2019 + strategy: + fail-fast: false + matrix: + profile: + - emqx + otp: + - 25.3.2 + steps: + - uses: actions/checkout@v3 + - uses: ilammy/msvc-dev-cmd@v1.12.0 + - uses: erlef/setup-beam@v1.16.0 + with: + otp-version: ${{ matrix.otp }} + - name: build + env: + PYTHON: python + DIAGNOSTIC: 1 + run: | + # ensure crypto app (openssl) + erl -eval "erlang:display(crypto:info_lib())" -s init stop + make ${{ matrix.profile }}-tgz + - name: run emqx + timeout-minutes: 5 + run: | + ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start + Start-Sleep -s 10 + $pingOutput = ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx ping + if ($pingOutput = 'pong') { + echo "EMQX started OK" + } else { + echo "Failed to ping EMQX $pingOutput" + Exit 1 + } + ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop + echo "EMQX stopped" + ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx install + echo "EMQX installed" + ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx uninstall + echo "EMQX uninstalled" + - uses: actions/upload-artifact@v3 + with: + name: windows + path: _packages/${{ matrix.profile }}/* + retention-days: 7 + - name: Send notification to Slack + uses: slackapi/slack-github-action@v1.23.0 + if: failure() + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + with: + payload: | + {"text": "Scheduled build of ${{ matrix.profile }} package for Windows failed: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"} diff --git a/.github/workflows/build_slim_packages.yaml b/.github/workflows/build_slim_packages.yaml index a955b4a9b..b7ba78ef4 100644 --- a/.github/workflows/build_slim_packages.yaml +++ b/.github/workflows/build_slim_packages.yaml @@ -5,54 +5,73 @@ concurrency: cancel-in-progress: true on: - push: - branches: - - master - - release-51 - pull_request: - # GitHub pull_request action is by default triggered when - # opened reopened or synchronize, - # we add labeled and unlabeled to the list because - # the mac job dpends on the PR having a 'Mac' label - types: - - labeled - - unlabeled - - opened - - reopened - - synchronize + workflow_call: + inputs: + runner: + required: true + type: string + builder: + required: true + type: string + builder_vsn: + required: true + type: string + otp_vsn: + required: true + type: string + elixir_vsn: + required: true + type: string + workflow_dispatch: + inputs: + ref: + required: false + runner: + required: false + type: string + default: 'ubuntu-22.04' + builder: + required: false + type: string + default: 'ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04' + builder_vsn: + required: false + type: string + default: '5.1-3' + otp_vsn: + required: false + type: string + default: '25.3.2-1' + elixir_vsn: + required: false + type: string + default: '1.14.5' jobs: linux: - runs-on: aws-amd64 + runs-on: ${{ inputs.runner }} + env: + EMQX_NAME: ${{ matrix.profile[0] }} strategy: fail-fast: false matrix: profile: - - ["emqx", "25.3.2-1", "el7", "erlang"] - - ["emqx", "25.3.2-1", "ubuntu22.04", "elixir"] - - ["emqx-enterprise", "25.3.2-1", "amzn2023", "erlang"] + - ["emqx", "25.3.2-1", "ubuntu20.04", "elixir"] - ["emqx-enterprise", "25.3.2-1", "ubuntu20.04", "erlang"] - builder: - - 5.1-3 - elixir: - - '1.14.5' - container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" + container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" steps: - uses: AutoModality/action-clean@v1 - uses: actions/checkout@v3 with: fetch-depth: 0 - - name: prepare - run: | - echo "EMQX_NAME=${{ matrix.profile[0] }}" >> $GITHUB_ENV - echo "CODE_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV - name: Work around https://github.com/actions/checkout/issues/766 run: | git config --global --add safe.directory "$GITHUB_WORKSPACE" + echo "CODE_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV - name: build and test tgz package if: matrix.profile[3] == 'erlang' run: | @@ -77,58 +96,14 @@ jobs: with: name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" path: _packages/${{ matrix.profile[0] }}/* + retention-days: 7 - uses: actions/upload-artifact@v3 with: name: "${{ matrix.profile[0] }}_schema_dump" path: | scripts/spellcheck _build/docgen/${{ matrix.profile[0] }}/schema-en.json - - windows: - runs-on: windows-2019 - strategy: - fail-fast: false - matrix: - profile: - - emqx - otp: - - 25.3.2 - steps: - - uses: actions/checkout@v3 - - uses: ilammy/msvc-dev-cmd@v1.12.0 - - uses: erlef/setup-beam@v1.15.4 - with: - otp-version: ${{ matrix.otp }} - - name: build - env: - PYTHON: python - DIAGNOSTIC: 1 - run: | - # ensure crypto app (openssl) - erl -eval "erlang:display(crypto:info_lib())" -s init stop - make ${{ matrix.profile }}-tgz - - name: run emqx - timeout-minutes: 5 - run: | - ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start - Start-Sleep -s 10 - $pingOutput = ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx ping - if ($pingOutput = 'pong') { - echo "EMQX started OK" - } else { - echo "Failed to ping EMQX $pingOutput" - Exit 1 - } - ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop - echo "EMQX stopped" - ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx install - echo "EMQX installed" - ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx uninstall - echo "EMQX uninstalled" - - uses: actions/upload-artifact@v3 - with: - name: windows - path: _packages/${{ matrix.profile }}/* + retention-days: 7 mac: strategy: @@ -136,20 +111,18 @@ jobs: matrix: profile: - emqx - - emqx-enterprise otp: - - 25.3.2-1 + - ${{ inputs.otp_vsn }} os: - macos-11 - macos-12-arm64 runs-on: ${{ matrix.os }} + env: + EMQX_NAME: ${{ matrix.profile }} steps: - uses: actions/checkout@v3 - - name: prepare - run: | - echo "EMQX_NAME=${{ matrix.profile }}" >> $GITHUB_ENV - uses: ./.github/actions/package-macos with: profile: ${{ matrix.profile }} @@ -163,84 +136,4 @@ jobs: with: name: ${{ matrix.os }} path: _packages/**/* - - docker: - runs-on: aws-amd64 - - strategy: - fail-fast: false - matrix: - profile: - - ["emqx", "5.0.16"] - - ["emqx-enterprise", "5.0.1"] - - steps: - - uses: actions/checkout@v3 - - name: prepare - run: | - EMQX_NAME=${{ matrix.profile[0] }} - PKG_VSN=${PKG_VSN:-$(./pkg-vsn.sh $EMQX_NAME)} - EMQX_IMAGE_TAG=emqx/$EMQX_NAME:test - EMQX_IMAGE_OLD_VERSION_TAG=emqx/$EMQX_NAME:${{ matrix.profile[1] }} - echo "EMQX_NAME=$EMQX_NAME" >> $GITHUB_ENV - echo "PKG_VSN=$PKG_VSN" >> $GITHUB_ENV - echo "EMQX_IMAGE_TAG=$EMQX_IMAGE_TAG" >> $GITHUB_ENV - echo "EMQX_IMAGE_OLD_VERSION_TAG=$EMQX_IMAGE_OLD_VERSION_TAG" >> $GITHUB_ENV - - uses: docker/setup-buildx-action@v2 - - name: build and export to Docker - uses: docker/build-push-action@v4 - with: - context: . - file: ./deploy/docker/Dockerfile - load: true - tags: ${{ env.EMQX_IMAGE_TAG }} - build-args: | - EMQX_NAME=${{ env.EMQX_NAME }} - - name: smoke test - run: | - CID=$(docker run -d --rm -P $EMQX_IMAGE_TAG) - HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID) - ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT - docker stop $CID - - name: dashboard tests - working-directory: ./scripts/ui-tests - run: | - set -eu - docker compose up --abort-on-container-exit --exit-code-from selenium - - name: test two nodes cluster with proto_dist=inet_tls in docker - run: | - ./scripts/test/start-two-nodes-in-docker.sh -P $EMQX_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG - HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy) - ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT - # cleanup - ./scripts/test/start-two-nodes-in-docker.sh -c - - name: export docker image - run: | - docker save $EMQX_IMAGE_TAG | gzip > $EMQX_NAME-$PKG_VSN.tar.gz - - uses: actions/upload-artifact@v3 - with: - name: "${{ matrix.profile[0] }}-docker" - path: "${{ env.EMQX_NAME }}-${{ env.PKG_VSN }}.tar.gz" - - name: cleanup - if: always() - working-directory: ./scripts/ui-tests - run: | - docker compose rm -fs - - spellcheck: - needs: linux - strategy: - matrix: - profile: - - emqx - - emqx-enterprise - runs-on: aws-amd64 - steps: - - uses: actions/download-artifact@v3 - name: Download schema dump - with: - name: "${{ matrix.profile }}_schema_dump" - path: /tmp/ - - name: Run spellcheck - run: | - bash /tmp/scripts/spellcheck/spellcheck.sh /tmp/_build/docgen/${{ matrix.profile }}/schema-en.json + retention-days: 7 diff --git a/.github/workflows/check_deps_integrity.yaml b/.github/workflows/check_deps_integrity.yaml index 4a079c570..8ece0746b 100644 --- a/.github/workflows/check_deps_integrity.yaml +++ b/.github/workflows/check_deps_integrity.yaml @@ -1,14 +1,46 @@ -name: Check Rebar Dependencies +name: Check integrity of rebar and mix dependencies on: - pull_request: + workflow_call: + inputs: + runner: + required: true + type: string + builder: + required: true + type: string jobs: check_deps_integrity: - runs-on: ubuntu-22.04 - container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04 - + runs-on: ${{ inputs.runner }} + container: ${{ inputs.builder }} steps: - uses: actions/checkout@v3 - - name: Run check-deps-integrity.escript - run: ./scripts/check-deps-integrity.escript + - run: git config --global --add safe.directory "$GITHUB_WORKSPACE" + - run: make ensure-rebar3 + - run: ./scripts/check-deps-integrity.escript + - name: Setup mix + env: + MIX_ENV: emqx-enterprise + PROFILE: emqx-enterprise + run: | + mix local.hex --force + mix local.rebar --force + mix deps.get + - run: ./scripts/check-elixir-deps-discrepancies.exs + env: + MIX_ENV: emqx-enterprise + PROFILE: emqx-enterprise + - run: ./scripts/check-elixir-applications.exs + env: + MIX_ENV: emqx-enterprise + PROFILE: emqx-enterprise + - name: Upload produced lock files + uses: actions/upload-artifact@v3 + if: failure() + with: + name: produced_lock_files + path: | + mix.lock + rebar.lock + retention-days: 1 diff --git a/.github/workflows/code_style_check.yaml b/.github/workflows/code_style_check.yaml deleted file mode 100644 index ff1043b81..000000000 --- a/.github/workflows/code_style_check.yaml +++ /dev/null @@ -1,29 +0,0 @@ -name: Code style check - -on: [pull_request] - -jobs: - code_style_check: - runs-on: ubuntu-22.04 - container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 1000 - - name: Work around https://github.com/actions/checkout/issues/766 - run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - - name: Check line-break at EOF - run: | - ./scripts/check-nl-at-eof.sh - - name: Check Elixir code formatting - run: | - mix format --check-formatted - - - name: Check Erlang code formatting - run: | - ./scripts/check-format.sh - - - name: Run elvis check - run: | - ./scripts/elvis-check.sh $GITHUB_BASE_REF diff --git a/.github/workflows/elixir_apps_check.yaml b/.github/workflows/elixir_apps_check.yaml deleted file mode 100644 index 31f70690e..000000000 --- a/.github/workflows/elixir_apps_check.yaml +++ /dev/null @@ -1,45 +0,0 @@ ---- - -name: Check Elixir Release Applications - -on: - pull_request: - -jobs: - elixir_apps_check: - runs-on: ubuntu-22.04 - # just use the latest builder - container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" - - strategy: - fail-fast: false - matrix: - profile: - - emqx - - emqx-enterprise - - emqx-pkg - - emqx-enterprise-pkg - - steps: - - name: fix_git_permission - run: git config --global --add safe.directory '/__w/emqx/emqx' - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: ensure rebar - run: ./scripts/ensure-rebar3.sh - - name: Work around https://github.com/actions/checkout/issues/766 - run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - - name: check applications - run: ./scripts/check-elixir-applications.exs - env: - MIX_ENV: ${{ matrix.profile }} - PROFILE: ${{ matrix.profile }} -# - name: check applications started with emqx_machine -# run: ./scripts/check-elixir-emqx-machine-boot-discrepancies.exs -# env: -# MIX_ENV: ${{ matrix.profile }} - -... diff --git a/.github/workflows/elixir_deps_check.yaml b/.github/workflows/elixir_deps_check.yaml deleted file mode 100644 index a7e086bb1..000000000 --- a/.github/workflows/elixir_deps_check.yaml +++ /dev/null @@ -1,49 +0,0 @@ ---- - -name: Elixir Dependency Version Check - -on: - pull_request: - -jobs: - elixir_deps_check: - runs-on: ubuntu-22.04 - container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04 - - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: ensure rebar - run: ./scripts/ensure-rebar3.sh - - name: Work around https://github.com/actions/checkout/issues/766 - run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - - name: setup mix - run: | - mix local.hex --force - mix local.rebar --force - mix deps.get - # we check only enterprise because `rebar3 tree`, even if an - # enterprise app is excluded from `project_app_dirs` in - # `rebar.config.erl`, will still list dependencies from it. - # Since the enterprise profile is a superset of the - # community one and thus more complete, we use the former. - env: - MIX_ENV: emqx-enterprise - PROFILE: emqx-enterprise - - name: check elixir deps - run: ./scripts/check-elixir-deps-discrepancies.exs - env: - MIX_ENV: emqx-enterprise - PROFILE: emqx-enterprise - - name: produced lock files - uses: actions/upload-artifact@v3 - if: failure() - with: - name: produced_lock_files - path: | - mix.lock - rebar.lock - retention-days: 1 - -... diff --git a/.github/workflows/elixir_release.yml b/.github/workflows/elixir_release.yml deleted file mode 100644 index 73807bfa0..000000000 --- a/.github/workflows/elixir_release.yml +++ /dev/null @@ -1,41 +0,0 @@ -# FIXME: temporary workflow for testing; remove later -name: Elixir Build (temporary) - -concurrency: - group: mix-${{ github.event_name }}-${{ github.ref }} - cancel-in-progress: true - -on: - pull_request: - workflow_dispatch: - -jobs: - elixir_release_build: - runs-on: ubuntu-22.04 - strategy: - matrix: - profile: - - emqx - - emqx-enterprise - container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04 - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: install tools - run: apt update && apt install netcat-openbsd - - name: Work around https://github.com/actions/checkout/issues/766 - run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - - name: elixir release - run: make ${{ matrix.profile }}-elixir - - name: start release - run: | - cd _build/${{ matrix.profile }}/rel/emqx - bin/emqx start - - name: check if started - run: | - sleep 10 - nc -zv localhost 1883 - cd _build/${{ matrix.profile }}/rel/emqx - bin/emqx ping - bin/emqx ctl status diff --git a/.github/workflows/run_conf_tests.yaml b/.github/workflows/run_conf_tests.yaml index 80fe53133..cf696e0c6 100644 --- a/.github/workflows/run_conf_tests.yaml +++ b/.github/workflows/run_conf_tests.yaml @@ -5,49 +5,45 @@ concurrency: cancel-in-progress: true on: - push: - branches: - - master - - 'ci/**' - tags: - - v* - - e* - pull_request: - -env: - IS_CI: "yes" + workflow_call: + inputs: + runner: + required: true + type: string + builder: + required: true + type: string jobs: run_conf_tests: - runs-on: ubuntu-22.04 + runs-on: ${{ inputs.runner }} + container: ${{ inputs.builder }} + env: + PROFILE: ${{ matrix.profile }} strategy: fail-fast: false matrix: profile: - emqx - emqx-enterprise - container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" steps: - uses: AutoModality/action-clean@v1 - - uses: actions/checkout@v3 + - uses: actions/download-artifact@v3 with: - path: source - - name: build_package - working-directory: source + name: ${{ matrix.profile }} + - name: extract artifact run: | - make ${{ matrix.profile }} - - name: run_tests - working-directory: source - env: - PROFILE: ${{ matrix.profile }} - run: | - ./scripts/conf-test/run.sh - - name: print_erlang_log + unzip -o -q ${{ matrix.profile }}.zip + git config --global --add safe.directory "$GITHUB_WORKSPACE" + - run: ./scripts/test/check-example-configs.sh + - run: ./scripts/conf-test/run.sh + - name: print erlang log if: failure() run: | - cat source/_build/${{ matrix.profile }}/rel/emqx/logs/erlang.log.* + cat _build/${{ matrix.profile }}/rel/emqx/logs/erlang.log.* - uses: actions/upload-artifact@v3 if: failure() with: name: logs-${{ matrix.profile }} - path: source/_build/${{ matrix.profile }}/rel/emqx/logs + path: _build/${{ matrix.profile }}/rel/emqx/logs + diff --git a/.github/workflows/run_docker_tests.yaml b/.github/workflows/run_docker_tests.yaml new file mode 100644 index 000000000..4188378d0 --- /dev/null +++ b/.github/workflows/run_docker_tests.yaml @@ -0,0 +1,118 @@ +name: Docker image tests + +concurrency: + group: docker-tests-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +on: + workflow_call: + inputs: + runner: + required: true + type: string + version-emqx: + required: true + type: string + version-emqx-enterprise: + required: true + type: string + +jobs: + basic-tests: + runs-on: ${{ inputs.runner }} + defaults: + run: + shell: bash + strategy: + fail-fast: false + matrix: + profile: + - ["emqx", "emqx/emqx:5.0.16"] + - ["emqx-enterprise", "emqx/emqx-enterprise:5.0.1"] + + env: + EMQX_NAME: ${{ matrix.profile[0] }} + PKG_VSN: ${{ matrix.profile[0] == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }} + EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }} + + steps: + - uses: actions/checkout@v3 + - uses: actions/download-artifact@v3 + with: + name: ${{ env.EMQX_NAME }}-docker + path: /tmp + - name: load docker image + run: | + EMQX_IMAGE_TAG=$(docker load < /tmp/${EMQX_NAME}-docker-${PKG_VSN}.tar.gz 2>/dev/null | sed 's/Loaded image: //g') + echo "EMQX_IMAGE_TAG=$EMQX_IMAGE_TAG" >> $GITHUB_ENV + - name: dashboard tests + working-directory: ./scripts/ui-tests + run: | + set -eu + docker compose up --abort-on-container-exit --exit-code-from selenium + - name: test two nodes cluster with proto_dist=inet_tls in docker + run: | + ./scripts/test/start-two-nodes-in-docker.sh -P $EMQX_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG + HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy) + ./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT + ./scripts/test/start-two-nodes-in-docker.sh -c + - name: cleanup + if: always() + working-directory: ./scripts/ui-tests + run: | + docker compose rm -fs + + paho-mqtt-testing: + runs-on: ${{ inputs.runner }} + defaults: + run: + shell: bash + env: + EMQX_NAME: ${{ matrix.profile }} + PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }} + _EMQX_TEST_DB_BACKEND: ${{ matrix.cluster_db_backend }} + + strategy: + fail-fast: false + matrix: + profile: + - emqx + - emqx-enterprise + - emqx-elixir + cluster_db_backend: + - mnesia + - rlog + steps: + - uses: AutoModality/action-clean@v1 + - uses: actions/checkout@v3 + - uses: actions/download-artifact@v3 + with: + name: ${{ env.EMQX_NAME }}-docker + path: /tmp + - name: load docker image + run: | + EMQX_IMAGE_TAG=$(docker load < /tmp/${EMQX_NAME}-docker-${PKG_VSN}.tar.gz 2>/dev/null | sed 's/Loaded image: //g') + echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_IMAGE_TAG" >> $GITHUB_ENV + - name: run emqx + timeout-minutes: 5 + run: | + ./.ci/docker-compose-file/scripts/run-emqx.sh $_EMQX_DOCKER_IMAGE_TAG $_EMQX_TEST_DB_BACKEND + - name: make paho tests + run: | + if ! docker exec -i python /scripts/pytest.sh "$_EMQX_TEST_DB_BACKEND"; then + echo "DUMP_CONTAINER_LOGS_BGN" + echo "============== haproxy ==============" + docker logs haproxy + echo "============== node1 ==============" + docker logs node1.emqx.io + echo "============== node2 ==============" + docker logs node2.emqx.io + echo "DUMP_CONTAINER_LOGS_END" + exit 1 + fi + # node_dump requires netstat, which is not available in the container + # simple smoke test for node_dump + # - name: test node_dump + # run: | + # docker exec -u root node1.emqx.io apt update && apt install -y net-tools + # docker exec node1.emqx.io node_dump diff --git a/.github/workflows/run_emqx_app_tests.yaml b/.github/workflows/run_emqx_app_tests.yaml deleted file mode 100644 index 40a630e76..000000000 --- a/.github/workflows/run_emqx_app_tests.yaml +++ /dev/null @@ -1,80 +0,0 @@ -name: Check emqx app standalone - -## apps/emqx can be used as a rebar/mix dependency -## in other project, so we need to make sure apps/emqx -## as an Erlang/Elixir app works standalone - -on: - pull_request: - -jobs: - run_emqx_app_tests: - strategy: - matrix: - builder: - - 5.1-3 - otp: - - 25.3.2-1 - # no need to use more than 1 version of Elixir, since tests - # run using only Erlang code. This is needed just to specify - # the base image. - elixir: - - 1.14.5 - os: - - ubuntu22.04 - arch: - - amd64 - runs-on: - - aws-amd64 - - ubuntu-22.04 - use-self-hosted: - - ${{ github.repository_owner == 'emqx' }} - exclude: - - runs-on: ubuntu-22.04 - use-self-hosted: true - - runs-on: aws-amd64 - use-self-hosted: false - - runs-on: ${{ matrix.runs-on }} - container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir}}-${{ matrix.otp }}-${{ matrix.os }}" - - defaults: - run: - shell: bash - - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: run - run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - echo "git diff base: $GITHUB_BASE_REF" - if [[ "$GITHUB_BASE_REF" =~ [0-9a-f]{8,40} ]]; then - # base is a commit sha1 - compare_base="$GITHUB_BASE_REF" - else - repo="${GITHUB_REPOSITORY}" - git remote -v - remote="$(git remote -v | grep -E "github\.com(:|/)$repo((\.git)|(\s))" | grep fetch | awk '{print $1}')" - git fetch "$remote" "$GITHUB_BASE_REF" - compare_base="$remote/$GITHUB_BASE_REF" - fi - changed_files="$(git diff --name-only ${compare_base} HEAD apps/emqx)" - if [ "$changed_files" = '' ]; then - echo "nothing changed in apps/emqx, ignored." - exit 0 - fi - make ensure-rebar3 - cp rebar3 apps/emqx/ - cd apps/emqx - ./rebar3 xref - ./rebar3 dialyzer - ./rebar3 eunit -v - ./rebar3 ct --name 'test@127.0.0.1' -v --readable=true - ./rebar3 proper -d test/props - - uses: actions/upload-artifact@v3 - if: failure() - with: - name: logs-${{ matrix.runs-on }} - path: apps/emqx/_build/test/logs diff --git a/.github/workflows/run_fvt_tests.yaml b/.github/workflows/run_fvt_tests.yaml deleted file mode 100644 index 0bcdee93a..000000000 --- a/.github/workflows/run_fvt_tests.yaml +++ /dev/null @@ -1,255 +0,0 @@ -name: Functional Verification Tests - -concurrency: - group: fvt-${{ github.event_name }}-${{ github.ref }} - cancel-in-progress: true - -on: - push: - branches: - - master - - 'ci/**' - tags: - - v* - pull_request: - -jobs: - prepare: - runs-on: ubuntu-22.04 - # prepare source with any OTP version, no need for a matrix - container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-debian11 - - steps: - - uses: actions/checkout@v3 - with: - path: source - fetch-depth: 0 - - name: get deps - run: | - make -C source deps-all - zip -ryq source.zip source/* source/.[^.]* - - uses: actions/upload-artifact@v3 - with: - name: source - path: source.zip - - docker_test: - runs-on: ubuntu-22.04 - needs: prepare - - strategy: - fail-fast: false - matrix: - profile: - - emqx - - emqx-enterprise - - emqx-elixir - cluster_db_backend: - - mnesia - - rlog - os: - - ["debian11", "debian:11-slim"] - builder: - - 5.1-3 - otp: - - 25.3.2-1 - elixir: - - 1.14.5 - arch: - - amd64 - steps: - - uses: erlef/setup-beam@v1.15.4 - with: - otp-version: 25.3.2 - - uses: actions/download-artifact@v3 - with: - name: source - path: . - - name: unzip source code - run: unzip -q source.zip - - - name: make docker image - working-directory: source - env: - EMQX_BUILDER: ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }} - EMQX_RUNNER: ${{ matrix.os[1] }} - run: | - make ${{ matrix.profile }}-docker - - name: run emqx - timeout-minutes: 5 - working-directory: source - run: | - set -x - if [[ "${{ matrix.profile }}" = *-elixir ]] - then - export IS_ELIXIR=yes - PROFILE=$(echo ${{ matrix.profile }} | sed -e "s/-elixir//g") - IMAGE=emqx/$PROFILE:$(./pkg-vsn.sh ${{ matrix.profile }})-elixir - else - IMAGE=emqx/${{ matrix.profile }}:$(./pkg-vsn.sh ${{ matrix.profile }}) - fi - ./.ci/docker-compose-file/scripts/run-emqx.sh $IMAGE ${{ matrix.cluster_db_backend }} - - name: make paho tests - run: | - if ! docker exec -i python /scripts/pytest.sh "${{ matrix.cluster_db_backend }}"; then - echo "DUMP_CONTAINER_LOGS_BGN" - echo "============== haproxy ==============" - docker logs haproxy - echo "============== node1 ==============" - docker logs node1.emqx.io - echo "============== node2 ==============" - docker logs node2.emqx.io - echo "DUMP_CONTAINER_LOGS_END" - exit 1 - fi - # simple smoke test for node_dump - - name: test node_dump - run: | - docker exec node1.emqx.io node_dump - - helm_test: - runs-on: ubuntu-22.04 - needs: prepare - - strategy: - fail-fast: false - matrix: - discovery: - - k8s - - dns - profile: - - emqx - - emqx-enterprise - os: - - ["debian11", "debian:11-slim"] - builder: - - 5.1-3 - otp: - - 25.3.2-1 - elixir: - - 1.14.5 - arch: - - amd64 - # - emqx-enterprise # TODO test enterprise - - steps: - - uses: erlef/setup-beam@v1.15.4 - with: - otp-version: 25.3.2 - - uses: actions/download-artifact@v3 - with: - name: source - path: . - - name: unzip source code - run: unzip -q source.zip - - - name: make docker image - working-directory: source - env: - EMQX_BUILDER: ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }} - EMQX_RUNNER: ${{ matrix.os[1] }} - run: | - make ${{ matrix.profile }}-docker - echo "TARGET=emqx/${{ matrix.profile }}" >> $GITHUB_ENV - echo "EMQX_TAG=$(./pkg-vsn.sh ${{ matrix.profile }})" >> $GITHUB_ENV - - run: minikube start - - run: minikube image load $TARGET:$EMQX_TAG - - name: run emqx on chart - working-directory: source - if: matrix.discovery == 'k8s' - run: | - helm install ${{ matrix.profile }} \ - --set emqxConfig.EMQX_CLUSTER__DISCOVERY_STRATEGY="k8s" \ - --set emqxConfig.EMQX_CLUSTER__K8S__APISERVER="https://kubernetes.default.svc:443" \ - --set emqxConfig.EMQX_CLUSTER__K8S__SERVICE_NAME="${{ matrix.profile }}-headless" \ - --set emqxConfig.EMQX_CLUSTER__K8S__NAMESPACE="default" \ - --set image.repository=$TARGET \ - --set image.pullPolicy=Never \ - --set image.tag=$EMQX_TAG \ - --set emqxAclConfig="" \ - --set emqxConfig.EMQX_MQTT__RETRY_INTERVAL=2s \ - --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \ - --set emqxConfig.EMQX_AUTHORIZATION__SOURCES=[] \ - --set emqxConfig.EMQX_AUTHORIZATION__NO_MATCH=allow \ - deploy/charts/${{ matrix.profile }} \ - --debug - - name: run emqx on chart - working-directory: source - if: matrix.discovery == 'dns' - run: | - helm install ${{ matrix.profile }} \ - --set emqxConfig.EMQX_CLUSTER__DISCOVERY_STRATEGY="dns" \ - --set emqxConfig.EMQX_CLUSTER__DNS__RECORD_TYPE="srv" \ - --set emqxConfig.EMQX_CLUSTER__DNS__NAME="${{ matrix.profile }}-headless.default.svc.cluster.local" \ - --set image.repository=$TARGET \ - --set image.pullPolicy=Never \ - --set image.tag=$EMQX_TAG \ - --set emqxAclConfig="" \ - --set emqxConfig.EMQX_MQTT__RETRY_INTERVAL=2s \ - --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \ - --set emqxConfig.EMQX_AUTHORIZATION__SOURCES=[] \ - --set emqxConfig.EMQX_AUTHORIZATION__NO_MATCH=allow \ - deploy/charts/${{ matrix.profile }} \ - --debug - - name: waiting emqx started - timeout-minutes: 10 - run: | - while [ "$(kubectl get StatefulSet -l app.kubernetes.io/instance=${{ matrix.profile }} -o jsonpath='{.items[0].status.replicas}')" \ - != "$(kubectl get StatefulSet -l app.kubernetes.io/instance=${{ matrix.profile }} -o jsonpath='{.items[0].status.readyReplicas}')" ]; do - echo "=============================="; - kubectl get pods; - echo "=============================="; - echo "waiting emqx started"; - sleep 10; - done - - name: Get Token - timeout-minutes: 1 - run: | - kubectl port-forward service/${{ matrix.profile }} 18083:18083 > /dev/null & - - while - [ "$(curl --silent -X 'GET' 'http://127.0.0.1:18083/api/v5/status' | tail -n1)" != "emqx is running" ] - do - echo "waiting emqx" - sleep 1 - done - - echo "TOKEN=$(curl --silent -X 'POST' 'http://127.0.0.1:18083/api/v5/login' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"username": "admin","password": "public"}' | jq -r ".token")" >> $GITHUB_ENV - - - name: Check cluster - timeout-minutes: 10 - run: | - while - [ "$(curl --silent -H "Authorization: Bearer $TOKEN" -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')" != "3" ]; - do - echo "waiting ${{ matrix.profile }} cluster scale" - sleep 1 - done - - uses: actions/checkout@v3 - with: - repository: emqx/paho.mqtt.testing - ref: develop-5.0 - path: paho.mqtt.testing - - name: install pytest - run: | - pip install pytest==7.1.2 pytest-retry - echo "$HOME/.local/bin" >> $GITHUB_PATH - - name: run paho test - timeout-minutes: 10 - run: | - port_connected () { - local server="$1" - local port="$2" - echo > /dev/tcp/${server}/${port} 2>/dev/null - } - - kubectl port-forward service/${{ matrix.profile }} 1883:1883 > /dev/null & - - while ! port_connected localhost 1883; do - echo server not listening yet... - sleep 10 - done - - pytest --retries 3 -v paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "127.0.0.1" - - if: failure() - run: kubectl logs -l "app.kubernetes.io/instance=${{ matrix.profile }}" -c emqx --tail=1000 diff --git a/.github/workflows/run_gitlint.yaml b/.github/workflows/run_gitlint.yaml deleted file mode 100644 index 52082c56e..000000000 --- a/.github/workflows/run_gitlint.yaml +++ /dev/null @@ -1,16 +0,0 @@ -name: Run gitlint - -on: [pull_request] - -jobs: - run_gitlint: - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Run gitlint - shell: bash - run: | - set -ex - docker run --ulimit nofile=1024 -v $(pwd):/repo -w /repo ghcr.io/emqx/gitlint --commits ${{ github.event.pull_request.base.sha }}..$GITHUB_SHA --config .github/workflows/.gitlint diff --git a/.github/workflows/run_helm_tests.yaml b/.github/workflows/run_helm_tests.yaml new file mode 100644 index 000000000..1106b6057 --- /dev/null +++ b/.github/workflows/run_helm_tests.yaml @@ -0,0 +1,144 @@ +name: Helm tests + +concurrency: + group: helm-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +on: + workflow_call: + inputs: + version-emqx: + required: true + type: string + version-emqx-enterprise: + required: true + type: string + +jobs: + helm_test: + runs-on: ubuntu-22.04 + defaults: + run: + shell: bash + env: + EMQX_NAME: ${{ matrix.profile }} + EMQX_TAG: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }} + REPOSITORY: "emqx/${{ matrix.profile }}" + + strategy: + fail-fast: false + matrix: + discovery: + - k8s + - dns + profile: + - emqx + - emqx-enterprise + + steps: + - uses: actions/checkout@v3 + with: + path: source + - uses: actions/download-artifact@v3 + with: + name: "${{ env.EMQX_NAME }}-docker" + path: /tmp + - run: minikube start + - run: | + img="/tmp/${EMQX_NAME}-docker-${EMQX_TAG}.tar.gz" + if stderr=$(minikube image load "${img}" 2>&1 >/dev/null) && test -n "$stderr"; then + echo "${stderr}"; + exit 1; + fi + - name: run emqx on chart (k8s) + if: matrix.discovery == 'k8s' + working-directory: source + run: | + helm install ${EMQX_NAME} \ + --set emqxConfig.EMQX_CLUSTER__DISCOVERY_STRATEGY="k8s" \ + --set emqxConfig.EMQX_CLUSTER__K8S__APISERVER="https://kubernetes.default.svc:443" \ + --set emqxConfig.EMQX_CLUSTER__K8S__SERVICE_NAME="${EMQX_NAME}-headless" \ + --set emqxConfig.EMQX_CLUSTER__K8S__NAMESPACE="default" \ + --set image.repository=$REPOSITORY \ + --set image.pullPolicy=Never \ + --set image.tag=$EMQX_TAG \ + --set emqxAclConfig="" \ + --set emqxConfig.EMQX_MQTT__RETRY_INTERVAL=2s \ + --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \ + --set emqxConfig.EMQX_AUTHORIZATION__SOURCES=[] \ + --set emqxConfig.EMQX_AUTHORIZATION__NO_MATCH=allow \ + deploy/charts/${EMQX_NAME} \ + --debug + - name: run emqx on chart (dns) + if: matrix.discovery == 'dns' + working-directory: source + run: | + helm install ${EMQX_NAME} \ + --set emqxConfig.EMQX_CLUSTER__DISCOVERY_STRATEGY="dns" \ + --set emqxConfig.EMQX_CLUSTER__DNS__RECORD_TYPE="srv" \ + --set emqxConfig.EMQX_CLUSTER__DNS__NAME="${EMQX_NAME}-headless.default.svc.cluster.local" \ + --set image.repository=$REPOSITORY \ + --set image.pullPolicy=Never \ + --set image.tag=$EMQX_TAG \ + --set emqxAclConfig="" \ + --set emqxConfig.EMQX_MQTT__RETRY_INTERVAL=2s \ + --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \ + --set emqxConfig.EMQX_AUTHORIZATION__SOURCES=[] \ + --set emqxConfig.EMQX_AUTHORIZATION__NO_MATCH=allow \ + deploy/charts/${EMQX_NAME} \ + --debug + - name: waiting emqx started + timeout-minutes: 5 + run: | + while [ "$(kubectl get StatefulSet -l app.kubernetes.io/instance=${EMQX_NAME} -o jsonpath='{.items[0].status.replicas}')" \ + != "$(kubectl get StatefulSet -l app.kubernetes.io/instance=${EMQX_NAME} -o jsonpath='{.items[0].status.readyReplicas}')" ]; do + echo "=============================="; + kubectl get pods; + echo "=============================="; + echo "waiting emqx started"; + sleep 10; + done + - name: Get Token + run: | + kubectl port-forward service/${EMQX_NAME} 18083:18083 > /dev/null & + curl --head -X GET --retry 10 --retry-connrefused --retry-delay 6 http://localhost:18083/status + echo "TOKEN=$(curl --silent -X 'POST' 'http://127.0.0.1:18083/api/v5/login' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"username": "admin","password": "public"}' | jq -r ".token")" >> $GITHUB_ENV + + - name: Check cluster + timeout-minutes: 1 + run: | + while + nodes_length="$(curl --silent -H "Authorization: Bearer $TOKEN" -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')" + [ $nodes_length != "3" ] + do + echo "waiting ${EMQX_NAME} cluster scale. Current live nodes: $nodes_length." + sleep 1 + done + - uses: actions/checkout@v3 + with: + repository: emqx/paho.mqtt.testing + ref: develop-5.0 + path: paho.mqtt.testing + - name: install pytest + run: | + pip install pytest==7.1.2 pytest-retry + echo "$HOME/.local/bin" >> $GITHUB_PATH + - name: run paho test + timeout-minutes: 10 + run: | + port_connected () { + local server="$1" + local port="$2" + echo > /dev/tcp/${server}/${port} 2>/dev/null + } + + kubectl port-forward service/${EMQX_NAME} 1883:1883 > /dev/null & + + while ! port_connected localhost 1883; do + echo server not listening yet... + sleep 10 + done + + pytest --retries 3 -v paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "127.0.0.1" + - if: failure() + run: kubectl logs -l "app.kubernetes.io/instance=${EMQX_NAME}" -c emqx --tail=1000 diff --git a/.github/workflows/run_jmeter_tests.yaml b/.github/workflows/run_jmeter_tests.yaml index 04866c7a9..d45b66324 100644 --- a/.github/workflows/run_jmeter_tests.yaml +++ b/.github/workflows/run_jmeter_tests.yaml @@ -1,22 +1,16 @@ name: JMeter integration tests on: - push: - tags: - - "v5.*" - pull_request: - branches: - - "master" + workflow_call: + inputs: + version-emqx: + required: true + type: string jobs: - build_emqx_for_jmeter_tests: + jmeter_artifact: runs-on: ubuntu-22.04 - outputs: - version: ${{ steps.build_docker.outputs.version}} steps: - - uses: erlef/setup-beam@v1.15.4 - with: - otp-version: 25.3.2 - name: Cache Jmeter id: cache-jmeter uses: actions/cache@v3 @@ -42,21 +36,6 @@ jobs: with: name: apache-jmeter.tgz path: /tmp/apache-jmeter.tgz - - uses: actions/checkout@v3 - - name: zip emqx docker image - id: build_docker - if: endsWith(github.repository, 'emqx') - run: | - ## TODO: make profile a matrix dimension - PROFILE='emqx' - make "${PROFILE}-docker" - VSN="$(./pkg-vsn.sh $PROFILE)" - echo "version=${VSN}" >> $GITHUB_OUTPUT - docker save -o emqx.tar emqx/emqx:${VSN} - - uses: actions/upload-artifact@v3 - with: - name: emqx.tar - path: ./emqx.tar advanced_feat: runs-on: ubuntu-22.04 @@ -70,69 +49,28 @@ jobs: - mqtt_topic_rewrite # - mqtt_retainer - needs: build_emqx_for_jmeter_tests + needs: jmeter_artifact steps: - - uses: erlef/setup-beam@v1.15.4 - with: - otp-version: 25.3.2 - uses: actions/checkout@v3 - - uses: actions/download-artifact@v3 + - uses: ./.github/actions/prepare-jmeter with: - name: emqx.tar - path: /tmp - - name: load docker image - run: | - docker load < /tmp/emqx.tar + version-emqx: ${{ inputs.version-emqx }} - name: docker compose up timeout-minutes: 5 - env: - _EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }} run: | - docker-compose \ + docker compose \ -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \ - up -d --build - - name: wait docker compose up - timeout-minutes: 5 - run: | - while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do - echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx"; - sleep 5; - done - while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \ - != $(docker ps -a --filter name=client | wc -l) ]; do - sleep 1 - done - docker ps -a + up --wait --build echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV - - uses: actions/checkout@v3 - with: - repository: emqx/emqx-fvt - ref: broker-autotest-v5 - path: scripts - - uses: actions/setup-java@v3 - with: - java-version: '8.0.282' # The JDK version to make available on the path. - java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk - architecture: x64 # (x64 or x86) - defaults to x64 - # https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md - distribution: 'zulu' - - uses: actions/download-artifact@v3 - with: - name: apache-jmeter.tgz - path: /tmp - - name: install jmeter - timeout-minutes: 10 - env: - JMETER_VERSION: 5.4.3 + - name: show logs + if: failure() run: | - cd /tmp && tar -xvf apache-jmeter.tgz - echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar - ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter + docker compose \ + -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \ + logs - name: run jmeter run: | - /opt/jmeter/bin/jmeter.sh \ + jmeter/bin/jmeter.sh \ -Jjmeter.save.saveservice.output_format=xml -n \ -t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \ -Demqx_ip=$HAPROXY_IP \ @@ -152,8 +90,6 @@ jobs: pgsql_authn_authz: runs-on: ubuntu-22.04 - env: - _EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }} strategy: fail-fast: false @@ -168,72 +104,26 @@ jobs: - pgsql_authn - pgsql_authz - needs: build_emqx_for_jmeter_tests + needs: jmeter_artifact steps: - - uses: erlef/setup-beam@v1.15.4 - with: - otp-version: 25.3.2 - uses: actions/checkout@v3 - - uses: actions/download-artifact@v3 + - uses: ./.github/actions/prepare-jmeter with: - name: emqx.tar - path: /tmp - - name: load docker image - run: | - docker load < /tmp/emqx.tar + version-emqx: ${{ inputs.version-emqx }} - name: docker compose up timeout-minutes: 5 env: PGSQL_TAG: ${{ matrix.pgsql_tag }} run: | - docker-compose \ + docker compose \ -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \ -f .ci/docker-compose-file/docker-compose-pgsql-tls.yaml \ - up -d --build - - name: wait docker compose up - timeout-minutes: 5 - run: | - while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do - echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx"; - sleep 5; - done - while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \ - != $(docker ps -a --filter name=client | wc -l) ]; do - sleep 1 - done - docker ps -a + up --wait --build echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV echo PGSQL_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' pgsql-tls) >> $GITHUB_ENV - - uses: actions/checkout@v3 - with: - repository: emqx/emqx-fvt - ref: broker-autotest-v5 - path: scripts - - uses: actions/setup-java@v3 - with: - java-version: '8.0.282' # The JDK version to make available on the path. - java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk - architecture: x64 # (x64 or x86) - defaults to x64 - # https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md - distribution: 'zulu' - - uses: actions/download-artifact@v3 - with: - name: apache-jmeter.tgz - path: /tmp - - name: install jmeter - timeout-minutes: 10 - env: - JMETER_VERSION: 5.4.3 - run: | - cd /tmp && tar -xvf apache-jmeter.tgz - echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar - wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/postgresql-42.2.18.jar https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.18/postgresql-42.2.18.jar - ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter - name: run jmeter run: | - /opt/jmeter/bin/jmeter.sh \ + jmeter/bin/jmeter.sh \ -Jjmeter.save.saveservice.output_format=xml -n \ -t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \ -Demqx_ip=$HAPROXY_IP \ @@ -257,7 +147,7 @@ jobs: - name: dump docker compose logs if: failure() run: | - docker-compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log + docker compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log - uses: actions/upload-artifact@v3 if: always() with: @@ -277,73 +167,26 @@ jobs: - mysql_authn - mysql_authz - needs: build_emqx_for_jmeter_tests + needs: jmeter_artifact steps: - - uses: erlef/setup-beam@v1.15.4 - with: - otp-version: 25.3.2 - uses: actions/checkout@v3 - - uses: actions/download-artifact@v3 + - uses: ./.github/actions/prepare-jmeter with: - name: emqx.tar - path: /tmp - - name: load docker image - run: | - docker load < /tmp/emqx.tar + version-emqx: ${{ inputs.version-emqx }} - name: docker compose up timeout-minutes: 5 env: - _EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }} PGSQL_TAG: ${{ matrix.mysql_tag }} run: | - docker-compose \ + docker compose \ -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \ -f .ci/docker-compose-file/docker-compose-mysql-tls.yaml \ - up -d --build - - name: wait docker compose up - timeout-minutes: 5 - run: | - while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do - echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx"; - sleep 5; - done - while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \ - != $(docker ps -a --filter name=client | wc -l) ]; do - sleep 1 - done - docker ps -a + up --wait --build echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV echo MYSQL_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' mysql-tls) >> $GITHUB_ENV - - uses: actions/checkout@v3 - with: - repository: emqx/emqx-fvt - ref: broker-autotest-v5 - path: scripts - - uses: actions/setup-java@v3 - with: - java-version: '8.0.282' # The JDK version to make available on the path. - java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk - architecture: x64 # (x64 or x86) - defaults to x64 - # https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md - distribution: 'zulu' - - uses: actions/download-artifact@v3 - with: - name: apache-jmeter.tgz - path: /tmp - - name: install jmeter - timeout-minutes: 10 - env: - JMETER_VERSION: 5.4.3 - run: | - cd /tmp && tar -xvf apache-jmeter.tgz - echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar - wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/mysql-connector-java-8.0.16.jar https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar - ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter - name: run jmeter run: | - /opt/jmeter/bin/jmeter.sh \ + jmeter/bin/jmeter.sh \ -Jjmeter.save.saveservice.output_format=xml -n \ -t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \ -Demqx_ip=$HAPROXY_IP \ @@ -379,45 +222,19 @@ jobs: scripts_type: - jwt_authn - needs: build_emqx_for_jmeter_tests + needs: jmeter_artifact steps: - - uses: erlef/setup-beam@v1.15.4 - with: - otp-version: 25.3.2 - uses: actions/checkout@v3 - - uses: actions/download-artifact@v3 + - uses: ./.github/actions/prepare-jmeter with: - name: emqx.tar - path: /tmp - - name: load docker image - run: | - docker load < /tmp/emqx.tar + version-emqx: ${{ inputs.version-emqx }} - name: docker compose up timeout-minutes: 5 - env: - _EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }} run: | - docker-compose \ + docker compose \ -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \ - up -d --build - - name: wait docker compose up - timeout-minutes: 5 - run: | - while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do - echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx"; - sleep 5; - done - while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \ - != $(docker ps -a --filter name=client | wc -l) ]; do - sleep 1 - done - docker ps -a + up --wait --build echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV - - uses: actions/checkout@v3 - with: - repository: emqx/emqx-fvt - ref: broker-autotest-v5 - path: scripts - name: run jwks_server timeout-minutes: 10 run: | @@ -426,30 +243,9 @@ jobs: cd target docker run --name jwks_server --network emqx_bridge --ip 172.100.239.88 -d -v $(pwd)/jwkserver-0.0.1.jar:/jwks_server/jwkserver-0.0.1.jar --workdir /jwks_server openjdk:8-jdk bash \ -c "java -jar jwkserver-0.0.1.jar" - - uses: actions/setup-java@v3 - with: - java-version: '8.0.282' # The JDK version to make available on the path. - java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk - architecture: x64 # (x64 or x86) - defaults to x64 - # https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md - distribution: 'zulu' - - uses: actions/download-artifact@v3 - with: - name: apache-jmeter.tgz - path: /tmp - - name: install jmeter - timeout-minutes: 10 - env: - JMETER_VERSION: 5.4.3 - run: | - cd /tmp && tar -xvf apache-jmeter.tgz - echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar - ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter - name: run jmeter run: | - /opt/jmeter/bin/jmeter.sh \ + jmeter/bin/jmeter.sh \ -Jjmeter.save.saveservice.output_format=xml -n \ -t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \ -Demqx_ip=$HAPROXY_IP \ @@ -478,79 +274,30 @@ jobs: - built_in_database_authn - built_in_database_authz - needs: build_emqx_for_jmeter_tests + needs: jmeter_artifact steps: - - uses: erlef/setup-beam@v1.15.4 - with: - otp-version: 25.3.2 - uses: actions/checkout@v3 - - uses: actions/download-artifact@v3 + - uses: ./.github/actions/prepare-jmeter with: - name: emqx.tar - path: /tmp - - name: load docker image - run: | - docker load < /tmp/emqx.tar + version-emqx: ${{ inputs.version-emqx }} - name: docker compose up timeout-minutes: 5 - env: - _EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }} - PGSQL_TAG: ${{ matrix.mysql_tag }} run: | - docker-compose \ + docker compose \ -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \ - up -d --build - - name: wait docker compose up - timeout-minutes: 5 - run: | - while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do - echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx"; - sleep 5; - done - while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \ - != $(docker ps -a --filter name=client | wc -l) ]; do - sleep 1 - done - docker ps -a + up --wait --build echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV - - uses: actions/checkout@v3 - with: - repository: emqx/emqx-fvt - ref: broker-autotest-v5 - path: scripts - - uses: actions/setup-java@v3 - with: - java-version: '8.0.282' # The JDK version to make available on the path. - java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk - architecture: x64 # (x64 or x86) - defaults to x64 - # https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md - distribution: 'zulu' - - uses: actions/download-artifact@v3 - with: - name: apache-jmeter.tgz - path: /tmp - - name: install jmeter - timeout-minutes: 10 - env: - JMETER_VERSION: 5.4.3 - run: | - cd /tmp && tar -xvf apache-jmeter.tgz - echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties - wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar - wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/mysql-connector-java-8.0.16.jar https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar - ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter - name: run jmeter run: | - /opt/jmeter/bin/jmeter.sh \ + jmeter/bin/jmeter.sh \ -Jjmeter.save.saveservice.output_format=xml -n \ -t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \ -Demqx_ip=$HAPROXY_IP \ - -l jmeter_logs/${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}.jtl \ - -j jmeter_logs/logs/${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}.log + -l jmeter_logs/${{ matrix.scripts_type }}.jtl \ + -j jmeter_logs/logs/${{ matrix.scripts_type }}.log - name: check logs run: | - if cat jmeter_logs/${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}.jtl | grep -e 'true' > /dev/null 2>&1; then + if cat jmeter_logs/${{ matrix.scripts_type }}.jtl | grep -e 'true' > /dev/null 2>&1; then echo "check logs failed" exit 1 fi @@ -559,11 +306,3 @@ jobs: with: name: jmeter_logs path: ./jmeter_logs - - delete-artifact: - runs-on: ubuntu-22.04 - needs: [advanced_feat,pgsql_authn_authz,JWT_authn,mysql_authn_authz,built_in_database_authn_authz] - steps: - - uses: geekyeggo/delete-artifact@v2 - with: - name: emqx.tar diff --git a/.github/workflows/run_relup_tests.yaml b/.github/workflows/run_relup_tests.yaml index 0400d0502..3958d5b00 100644 --- a/.github/workflows/run_relup_tests.yaml +++ b/.github/workflows/run_relup_tests.yaml @@ -4,18 +4,20 @@ concurrency: group: relup-${{ github.event_name }}-${{ github.ref }} cancel-in-progress: true -# on: -# push: -# branches: -# - '**' -# tags: -# - e* -# pull_request: +on: + workflow_call: + inputs: + runner: + required: true + type: string + builder: + required: true + type: string jobs: relup_test_plan: - runs-on: ubuntu-22.04 - container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" + runs-on: ${{ inputs.runner }} + container: ${{ inputs.builder }} outputs: CUR_EE_VSN: ${{ steps.find-versions.outputs.CUR_EE_VSN }} OLD_VERSIONS: ${{ steps.find-versions.outputs.OLD_VERSIONS }} @@ -23,16 +25,18 @@ jobs: run: shell: bash steps: - - uses: actions/checkout@v3 - name: Checkout + - uses: AutoModality/action-clean@v1 + - uses: actions/download-artifact@v3 with: - path: emqx - fetch-depth: 0 + name: emqx-enterprise + - name: extract artifact + run: | + unzip -o -q emqx-enterprise.zip + git config --global --add safe.directory "$GITHUB_WORKSPACE" - name: Find versions id: find-versions run: | set -x - cd emqx ee_vsn="$(./pkg-vsn.sh enterprise)" old_ee_vsns="$(./scripts/relup-build/base-vsns.sh enterprise | xargs)" old_vsns=$(echo -n "${old_ee_vsns}" | sed 's/ $//g' | jq -R -s -c 'split(" ")') @@ -40,8 +44,6 @@ jobs: echo "OLD_VERSIONS=$old_vsns" >> $GITHUB_OUTPUT - name: build emqx run: | - set -x - cd emqx export PROFILE='emqx-enterprise' make emqx-enterprise-tgz - uses: actions/upload-artifact@v3 @@ -49,10 +51,10 @@ jobs: with: name: emqx_built path: | - emqx/_upgrade_base - emqx/_packages - emqx/scripts - emqx/.ci + _upgrade_base + _packages + scripts + .ci relup_test_run: needs: @@ -70,8 +72,7 @@ jobs: run: shell: bash steps: - # setup Erlang to run lux - - uses: erlef/setup-beam@v1.15.4 + - uses: erlef/setup-beam@v1.16.0 with: otp-version: 25.3.2 - uses: actions/checkout@v3 @@ -81,7 +82,7 @@ jobs: path: lux - name: Install lux run: | - set -e -u -x + set -eu cd lux autoconf ./configure @@ -94,10 +95,7 @@ jobs: path: . - name: run relup test run: | - set -e -x -u - chmod a+x scripts/**/*.sh - ls -l scripts - ls -l scripts/relup-test + set -eux case "$OLD_VSN" in e*) export CUR_VSN="$CUR_EE_VSN" diff --git a/.github/workflows/run_test_cases.yaml b/.github/workflows/run_test_cases.yaml index d43079d61..48e551612 100644 --- a/.github/workflows/run_test_cases.yaml +++ b/.github/workflows/run_test_cases.yaml @@ -5,152 +5,34 @@ concurrency: cancel-in-progress: true on: - push: - branches: - - master - - 'ci/**' - tags: - - v* - - e* - pull_request: + workflow_call: + inputs: + runner: + required: true + type: string + builder: + required: true + type: string + ct-matrix: + required: true + type: string + ct-host: + required: true + type: string + ct-docker: + required: true + type: string env: IS_CI: "yes" jobs: - build-matrix: - runs-on: ubuntu-22.04 - outputs: - prepare: ${{ steps.matrix.outputs.prepare }} - host: ${{ steps.matrix.outputs.host }} - docker: ${{ steps.matrix.outputs.docker }} - runs-on: ${{ steps.runner.outputs.runs-on }} - steps: - - uses: actions/checkout@v3 - - name: Build matrix - id: matrix - run: | - APPS="$(./scripts/find-apps.sh --ci)" - MATRIX="$(echo "${APPS}" | jq -c ' - [ - (.[] | select(.profile == "emqx") | . + { - builder: "5.1-3", - otp: "25.3.2-1", - elixir: "1.14.5" - }), - (.[] | select(.profile == "emqx-enterprise") | . + { - builder: "5.1-3", - otp: ["25.3.2-1"][], - elixir: "1.14.5" - }) - ] - ')" - echo "${MATRIX}" | jq - MATRIX_PREPARE="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')" - MATRIX_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')" - MATRIX_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')" - echo "prepare=${MATRIX_PREPARE}" | tee -a $GITHUB_OUTPUT - echo "host=${MATRIX_HOST}" | tee -a $GITHUB_OUTPUT - echo "docker=${MATRIX_DOCKER}" | tee -a $GITHUB_OUTPUT - - name: Choose runner host - id: runner - run: | - RUNS_ON="ubuntu-22.04" - ${{ github.repository_owner == 'emqx' }} && RUNS_ON="aws-amd64" - echo "runs-on=${RUNS_ON}" | tee -a $GITHUB_OUTPUT - - prepare: - runs-on: ${{ needs.build-matrix.outputs.runs-on }} - needs: [build-matrix] - strategy: - fail-fast: false - matrix: - include: ${{ fromJson(needs.build-matrix.outputs.prepare) }} - container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" - steps: - - uses: AutoModality/action-clean@v1 - - uses: actions/checkout@v3 - with: - path: source - - name: get_all_deps - working-directory: source - env: - PROFILE: ${{ matrix.profile }} - run: | - make ensure-rebar3 - # fetch all deps and compile - make ${{ matrix.profile }}-compile - make test-compile - cd .. - zip -ryq source.zip source/* source/.[^.]* - - uses: actions/upload-artifact@v3 - with: - name: source-${{ matrix.profile }}-${{ matrix.otp }} - path: source.zip - - check_examples: - needs: - - build-matrix - - prepare - runs-on: ${{ needs.build-matrix.outputs.runs-on }} - strategy: - fail-fast: false - matrix: - include: ${{ fromJson(needs.build-matrix.outputs.prepare) }} - container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" - steps: - - uses: AutoModality/action-clean@v1 - - uses: actions/download-artifact@v3 - with: - name: source-${{ matrix.profile }}-${{ matrix.otp }} - path: . - - name: unzip source code - run: unzip -o -q source.zip - - name: check example config files - env: - PROFILE: ${{ matrix.profile }} - working-directory: source - run: ./scripts/test/check-example-configs.sh - - static_checks: - needs: - - build-matrix - - prepare - runs-on: ${{ needs.build-matrix.outputs.runs-on }} - strategy: - fail-fast: false - matrix: - include: ${{ fromJson(needs.build-matrix.outputs.prepare) }} - container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" - steps: - - uses: AutoModality/action-clean@v1 - - uses: actions/download-artifact@v3 - with: - name: source-${{ matrix.profile }}-${{ matrix.otp }} - path: . - - name: unzip source code - run: unzip -o -q source.zip - - uses: actions/cache@v3 - with: - path: "source/emqx_dialyzer_${{ matrix.otp }}_plt" - key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('source/rebar.*', 'source/apps/*/rebar.*', 'source/lib-ee/*/rebar.*') }} - restore-keys: | - rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}- - - name: run static checks - env: - PROFILE: ${{ matrix.profile }} - working-directory: source - run: make static_checks - eunit_and_proper: - needs: - - build-matrix - - prepare - runs-on: ${{ needs.build-matrix.outputs.runs-on }} + runs-on: ${{ inputs.runner }} strategy: fail-fast: false matrix: - include: ${{ fromJson(needs.build-matrix.outputs.prepare) }} + include: ${{ fromJson(inputs.ct-matrix) }} defaults: run: @@ -161,16 +43,16 @@ jobs: - uses: AutoModality/action-clean@v1 - uses: actions/download-artifact@v3 with: - name: source-${{ matrix.profile }}-${{ matrix.otp }} - path: . - - name: unzip source code - run: unzip -o -q source.zip + name: ${{ matrix.profile }} + - name: extract artifact + run: | + unzip -o -q ${{ matrix.profile }}.zip + git config --global --add safe.directory "$GITHUB_WORKSPACE" # produces eunit.coverdata - name: eunit env: PROFILE: ${{ matrix.profile }} CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }} - working-directory: source run: make eunit # produces proper.coverdata @@ -178,23 +60,19 @@ jobs: env: PROFILE: ${{ matrix.profile }} CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }} - working-directory: source run: make proper - uses: actions/upload-artifact@v3 with: name: coverdata - path: source/_build/test/cover + path: _build/test/cover ct_docker: - needs: - - build-matrix - - prepare - runs-on: ${{ needs.build-matrix.outputs.runs-on }} + runs-on: ${{ inputs.runner }} strategy: fail-fast: false matrix: - include: ${{ fromJson(needs.build-matrix.outputs.docker) }} + include: ${{ fromJson(inputs.ct-docker) }} defaults: run: @@ -204,14 +82,14 @@ jobs: - uses: AutoModality/action-clean@v1 - uses: actions/download-artifact@v3 with: - name: source-${{ matrix.profile }}-${{ matrix.otp }} - path: . - - name: unzip source code - run: unzip -q source.zip + name: ${{ matrix.profile }} + - name: extract artifact + run: | + unzip -o -q ${{ matrix.profile }}.zip + git config --global --add safe.directory "$GITHUB_WORKSPACE" # produces $PROFILE---sg.coverdata - name: run common tests - working-directory: source env: DOCKER_CT_RUNNER_IMAGE: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" MONGO_TAG: "5" @@ -229,22 +107,19 @@ jobs: - uses: actions/upload-artifact@v3 with: name: coverdata - path: source/_build/test/cover + path: _build/test/cover - uses: actions/upload-artifact@v3 if: failure() with: name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }} - path: source/_build/test/logs + path: _build/test/logs ct: - needs: - - build-matrix - - prepare - runs-on: ${{ needs.build-matrix.outputs.runs-on }} + runs-on: ${{ inputs.runner }} strategy: fail-fast: false matrix: - include: ${{ fromJson(needs.build-matrix.outputs.host) }} + include: ${{ fromJson(inputs.ct-host) }} container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" defaults: @@ -255,14 +130,14 @@ jobs: - uses: AutoModality/action-clean@v1 - uses: actions/download-artifact@v3 with: - name: source-${{ matrix.profile }}-${{ matrix.otp }} - path: . - - name: unzip source code - run: unzip -q source.zip + name: ${{ matrix.profile }} + - name: extract artifact + run: | + unzip -o -q ${{ matrix.profile }}.zip + git config --global --add safe.directory "$GITHUB_WORKSPACE" # produces $PROFILE---sg.coverdata - name: run common tests - working-directory: source env: PROFILE: ${{ matrix.profile }} SUITEGROUP: ${{ matrix.suitegroup }} @@ -272,58 +147,61 @@ jobs: - uses: actions/upload-artifact@v3 with: name: coverdata - path: source/_build/test/cover + path: _build/test/cover if-no-files-found: warn # do not fail if no coverdata found - uses: actions/upload-artifact@v3 if: failure() with: name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }} - path: source/_build/test/logs + path: _build/test/logs make_cover: needs: - eunit_and_proper - ct - ct_docker - runs-on: ubuntu-22.04 - container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04" + runs-on: ${{ inputs.runner }} + container: ${{ inputs.builder }} + strategy: + fail-fast: false + matrix: + profile: + - emqx-enterprise steps: - uses: AutoModality/action-clean@v1 - uses: actions/download-artifact@v3 with: - name: source-emqx-enterprise-25.3.2-1 - path: . - - name: unzip source code - run: unzip -q source.zip + name: ${{ matrix.profile }} + - name: extract artifact + run: | + unzip -o -q ${{ matrix.profile }}.zip + git config --global --add safe.directory "$GITHUB_WORKSPACE" - uses: actions/download-artifact@v3 name: download coverdata with: name: coverdata - path: source/_build/test/cover + path: _build/test/cover - name: make cover - working-directory: source env: PROFILE: emqx-enterprise run: make cover - name: send to coveralls - working-directory: source env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} PROFILE: emqx-enterprise run: make coveralls - name: get coveralls logs - working-directory: source if: failure() run: cat rebar3.crashdump # do this in a separate job upload_coverdata: needs: make_cover - runs-on: ubuntu-22.04 + runs-on: ${{ inputs.runner }} steps: - name: Coveralls Finished env: diff --git a/.github/workflows/shellcheck.yaml b/.github/workflows/shellcheck.yaml deleted file mode 100644 index 7f29572b9..000000000 --- a/.github/workflows/shellcheck.yaml +++ /dev/null @@ -1,19 +0,0 @@ -name: Shellcheck - -on: - pull_request: - -jobs: - shellcheck: - runs-on: ubuntu-22.04 - steps: - - name: Checkout source code - uses: actions/checkout@v3 - - name: Install shellcheck - run: | - sudo apt-get update - sudo apt install shellcheck - - name: Run shellcheck - run: | - ./scripts/shellcheck.sh - echo "success" diff --git a/.github/workflows/spellcheck.yaml b/.github/workflows/spellcheck.yaml new file mode 100644 index 000000000..42a464ee8 --- /dev/null +++ b/.github/workflows/spellcheck.yaml @@ -0,0 +1,29 @@ +name: Spellcheck + +concurrency: + group: spellcheck-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +on: + workflow_call: + inputs: + runner: + required: true + type: string + +jobs: + spellcheck: + strategy: + matrix: + profile: + - emqx + - emqx-enterprise + runs-on: ${{ inputs.runner }} + steps: + - uses: actions/download-artifact@v3 + with: + name: "${{ matrix.profile }}_schema_dump" + path: /tmp/ + - name: Run spellcheck + run: | + bash /tmp/scripts/spellcheck/spellcheck.sh /tmp/_build/docgen/${{ matrix.profile }}/schema-en.json diff --git a/.github/workflows/static_checks.yaml b/.github/workflows/static_checks.yaml new file mode 100644 index 000000000..3b32a36b4 --- /dev/null +++ b/.github/workflows/static_checks.yaml @@ -0,0 +1,49 @@ +name: Static checks + +concurrency: + group: static-checks-${{ github.event_name }}-${{ github.ref }} + cancel-in-progress: true + +on: + workflow_call: + inputs: + runner: + required: true + type: string + builder: + required: true + type: string + ct-matrix: + required: true + type: string + +env: + IS_CI: "yes" + +jobs: + static_checks: + runs-on: ${{ inputs.runner }} + strategy: + fail-fast: false + matrix: + include: ${{ fromJson(inputs.ct-matrix) }} + container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04" + steps: + - uses: AutoModality/action-clean@v1 + - uses: actions/download-artifact@v3 + with: + name: ${{ matrix.profile }} + - name: extract artifact + run: | + unzip -o -q ${{ matrix.profile }}.zip + git config --global --add safe.directory "$GITHUB_WORKSPACE" + - uses: actions/cache@v3 + with: + path: "emqx_dialyzer_${{ matrix.otp }}_plt" + key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*', 'lib-ee/*/rebar.*') }} + restore-keys: | + rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}- + - name: run static checks + env: + PROFILE: ${{ matrix.profile }} + run: make static_checks diff --git a/Makefile b/Makefile index db00303d6..037d33cea 100644 --- a/Makefile +++ b/Makefile @@ -143,7 +143,7 @@ endif .PHONY: cover cover: $(REBAR) - @ENABLE_COVER_COMPILE=1 $(REBAR) cover + @ENABLE_COVER_COMPILE=1 $(REBAR) as test cover .PHONY: coveralls coveralls: $(REBAR) diff --git a/apps/emqx/include/emqx_release.hrl b/apps/emqx/include/emqx_release.hrl index 4cd4ee382..5238c38b0 100644 --- a/apps/emqx/include/emqx_release.hrl +++ b/apps/emqx/include/emqx_release.hrl @@ -32,7 +32,7 @@ %% `apps/emqx/src/bpapi/README.md' %% Opensource edition --define(EMQX_RELEASE_CE, "5.1.3"). +-define(EMQX_RELEASE_CE, "5.1.5"). %% Enterprise edition -define(EMQX_RELEASE_EE, "5.1.1"). diff --git a/apps/emqx/rebar.config b/apps/emqx/rebar.config index d24999972..c2dfccad6 100644 --- a/apps/emqx/rebar.config +++ b/apps/emqx/rebar.config @@ -28,7 +28,7 @@ {gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}}, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}}, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}}, - {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.9"}}}, + {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.10"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}}, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.14"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}}, diff --git a/apps/emqx/src/emqx.app.src b/apps/emqx/src/emqx.app.src index cff8cf35b..d9598ee1b 100644 --- a/apps/emqx/src/emqx.app.src +++ b/apps/emqx/src/emqx.app.src @@ -2,7 +2,7 @@ {application, emqx, [ {id, "emqx"}, {description, "EMQX Core"}, - {vsn, "5.1.4"}, + {vsn, "5.1.5"}, {modules, []}, {registered, []}, {applications, [ diff --git a/apps/emqx/src/emqx_broker_sup.erl b/apps/emqx/src/emqx_broker_sup.erl index a43ee771f..ac2fe587c 100644 --- a/apps/emqx/src/emqx_broker_sup.erl +++ b/apps/emqx/src/emqx_broker_sup.erl @@ -31,7 +31,7 @@ start_link() -> init([]) -> %% Broker pool - PoolSize = emqx_vm:schedulers() * 2, + PoolSize = emqx:get_config([node, broker_pool_size], emqx_vm:schedulers() * 2), BrokerPool = emqx_pool_sup:spec([ broker_pool, hash, diff --git a/apps/emqx/src/emqx_cm.erl b/apps/emqx/src/emqx_cm.erl index c9fb93ceb..c680560fb 100644 --- a/apps/emqx/src/emqx_cm.erl +++ b/apps/emqx/src/emqx_cm.erl @@ -685,7 +685,8 @@ handle_cast(Msg, State) -> handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) -> ?tp(emqx_cm_process_down, #{stale_pid => Pid, reason => _Reason}), - ChanPids = [Pid | emqx_utils:drain_down(?BATCH_SIZE)], + BatchSize = emqx:get_config([node, channel_cleanup_batch_size], ?BATCH_SIZE), + ChanPids = [Pid | emqx_utils:drain_down(BatchSize)], {Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon), lists:foreach(fun mark_channel_disconnected/1, ChanPids), ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]), diff --git a/apps/emqx/src/emqx_kernel_sup.erl b/apps/emqx/src/emqx_kernel_sup.erl index 45451084a..85724b9b4 100644 --- a/apps/emqx/src/emqx_kernel_sup.erl +++ b/apps/emqx/src/emqx_kernel_sup.erl @@ -31,7 +31,9 @@ init([]) -> %% always start emqx_config_handler first to load the emqx.conf to emqx_config [ child_spec(emqx_config_handler, worker), - child_spec(emqx_pool_sup, supervisor), + child_spec(emqx_pool_sup, supervisor, [ + emqx:get_config([node, generic_pool_size], emqx_vm:schedulers()) + ]), child_spec(emqx_hooks, worker), child_spec(emqx_stats, worker), child_spec(emqx_metrics, worker), diff --git a/apps/emqx/src/emqx_passwd.erl b/apps/emqx/src/emqx_passwd.erl index dc940645b..c68a146ed 100644 --- a/apps/emqx/src/emqx_passwd.erl +++ b/apps/emqx/src/emqx_passwd.erl @@ -19,7 +19,8 @@ -export([ hash/2, hash_data/2, - check_pass/3 + check_pass/3, + compare_secure/2 ]). -export_type([ diff --git a/apps/emqx/src/emqx_pool_sup.erl b/apps/emqx/src/emqx_pool_sup.erl index aadd1895a..7c4f68d03 100644 --- a/apps/emqx/src/emqx_pool_sup.erl +++ b/apps/emqx/src/emqx_pool_sup.erl @@ -24,6 +24,7 @@ -export([ start_link/0, + start_link/1, start_link/3, start_link/4 ]). @@ -51,6 +52,9 @@ spec(ChildId, Args) -> start_link() -> start_link(?POOL, random, {?POOL, start_link, []}). +start_link(PoolSize) -> + start_link(?POOL, random, PoolSize, {?POOL, start_link, []}). + -spec start_link(atom() | tuple(), atom(), mfargs()) -> {ok, pid()} | {error, term()}. start_link(Pool, Type, MFA) -> diff --git a/apps/emqx/src/emqx_router_helper.erl b/apps/emqx/src/emqx_router_helper.erl index 78cf62d6c..8d96bf81d 100644 --- a/apps/emqx/src/emqx_router_helper.erl +++ b/apps/emqx/src/emqx_router_helper.erl @@ -146,13 +146,18 @@ handle_info({mnesia_table_event, Event}, State) -> ?SLOG(debug, #{msg => "unexpected_mnesia_table_event", event => Event}), {noreply, State}; handle_info({nodedown, Node}, State = #{nodes := Nodes}) -> - global:trans( - {?LOCK, self()}, - fun() -> - mria:transaction(?ROUTE_SHARD, fun ?MODULE:cleanup_routes/1, [Node]) - end - ), - ok = mria:dirty_delete(?ROUTING_NODE, Node), + case mria_rlog:role() of + core -> + global:trans( + {?LOCK, self()}, + fun() -> + mria:transaction(?ROUTE_SHARD, fun ?MODULE:cleanup_routes/1, [Node]) + end + ), + ok = mria:dirty_delete(?ROUTING_NODE, Node); + replicant -> + ok + end, ?tp(emqx_router_helper_cleanup_done, #{node => Node}), {noreply, State#{nodes := lists:delete(Node, Nodes)}, hibernate}; handle_info({membership, {mnesia, down, Node}}, State) -> diff --git a/apps/emqx/src/emqx_router_sup.erl b/apps/emqx/src/emqx_router_sup.erl index d0e5ea05a..0fa48d9d2 100644 --- a/apps/emqx/src/emqx_router_sup.erl +++ b/apps/emqx/src/emqx_router_sup.erl @@ -41,4 +41,9 @@ init([]) -> hash, {emqx_router, start_link, []} ]), - {ok, {{one_for_all, 0, 1}, [Helper, RouterPool]}}. + SupFlags = #{ + strategy => one_for_one, + intensity => 10, + period => 100 + }, + {ok, {SupFlags, [Helper, RouterPool]}}. diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 5cab3cbc5..e6bff790e 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -2017,6 +2017,14 @@ common_ssl_opts_schema(Defaults, Type) -> desc => ?DESC(common_ssl_opts_schema_cacertfile) } )}, + {"cacerts", + sc( + boolean(), + #{ + default => false, + deprecated => {since, "5.1.4"} + } + )}, {"certfile", sc( binary(), diff --git a/apps/emqx_authn/src/emqx_authn.app.src b/apps/emqx_authn/src/emqx_authn.app.src index c4cacca80..4ab86ef4a 100644 --- a/apps/emqx_authn/src/emqx_authn.app.src +++ b/apps/emqx_authn/src/emqx_authn.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_authn, [ {description, "EMQX Authentication"}, - {vsn, "0.1.23"}, + {vsn, "0.1.24"}, {modules, []}, {registered, [emqx_authn_sup, emqx_authn_registry]}, {applications, [ diff --git a/apps/emqx_authn/src/emqx_authn.erl b/apps/emqx_authn/src/emqx_authn.erl index 50287941e..ed6f0a095 100644 --- a/apps/emqx_authn/src/emqx_authn.erl +++ b/apps/emqx_authn/src/emqx_authn.erl @@ -40,7 +40,8 @@ providers() -> {{password_based, http}, emqx_authn_http}, {jwt, emqx_authn_jwt}, {{scram, built_in_database}, emqx_enhanced_authn_scram_mnesia} - ]. + ] ++ + emqx_authn_enterprise:providers(). check_config(Config) -> check_config(Config, #{}). diff --git a/apps/emqx_authn/src/emqx_authn_api.erl b/apps/emqx_authn/src/emqx_authn_api.erl index 35d2caa96..fa9f6c820 100644 --- a/apps/emqx_authn/src/emqx_authn_api.erl +++ b/apps/emqx_authn/src/emqx_authn_api.erl @@ -876,7 +876,8 @@ resource_provider() -> emqx_authn_mongodb, emqx_authn_redis, emqx_authn_http - ]. + ] ++ + emqx_authn_enterprise:resource_provider(). lookup_from_local_node(ChainName, AuthenticatorID) -> NodeId = node(self()), diff --git a/apps/emqx_authn/src/emqx_authn_enterprise.erl b/apps/emqx_authn/src/emqx_authn_enterprise.erl new file mode 100644 index 000000000..029872694 --- /dev/null +++ b/apps/emqx_authn/src/emqx_authn_enterprise.erl @@ -0,0 +1,24 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_authn_enterprise). + +-export([providers/0, resource_provider/0]). + +-if(?EMQX_RELEASE_EDITION == ee). + +providers() -> + [{{password_based, ldap}, emqx_ldap_authn}]. + +resource_provider() -> + [emqx_ldap_authn]. + +-else. + +providers() -> + []. + +resource_provider() -> + []. +-endif. diff --git a/apps/emqx_authn/src/emqx_authn_utils.erl b/apps/emqx_authn/src/emqx_authn_utils.erl index 8e168bb5d..7fc20995a 100644 --- a/apps/emqx_authn/src/emqx_authn_utils.erl +++ b/apps/emqx_authn/src/emqx_authn_utils.erl @@ -35,7 +35,8 @@ ensure_apps_started/1, cleanup_resources/0, make_resource_id/1, - without_password/1 + without_password/1, + to_bool/1 ]). -define(AUTHN_PLACEHOLDERS, [ @@ -144,47 +145,8 @@ render_sql_params(ParamList, Credential) -> #{return => rawlist, var_trans => fun handle_sql_var/2} ). -%% true -is_superuser(#{<<"is_superuser">> := <<"true">>}) -> - #{is_superuser => true}; -is_superuser(#{<<"is_superuser">> := true}) -> - #{is_superuser => true}; -is_superuser(#{<<"is_superuser">> := <<"1">>}) -> - #{is_superuser => true}; -is_superuser(#{<<"is_superuser">> := I}) when - is_integer(I) andalso I >= 1 --> - #{is_superuser => true}; -%% false -is_superuser(#{<<"is_superuser">> := <<"">>}) -> - #{is_superuser => false}; -is_superuser(#{<<"is_superuser">> := <<"0">>}) -> - #{is_superuser => false}; -is_superuser(#{<<"is_superuser">> := 0}) -> - #{is_superuser => false}; -is_superuser(#{<<"is_superuser">> := null}) -> - #{is_superuser => false}; -is_superuser(#{<<"is_superuser">> := undefined}) -> - #{is_superuser => false}; -is_superuser(#{<<"is_superuser">> := <<"false">>}) -> - #{is_superuser => false}; -is_superuser(#{<<"is_superuser">> := false}) -> - #{is_superuser => false}; -is_superuser(#{<<"is_superuser">> := MaybeBinInt}) when - is_binary(MaybeBinInt) --> - try binary_to_integer(MaybeBinInt) of - Int when Int >= 1 -> - #{is_superuser => true}; - Int when Int =< 0 -> - #{is_superuser => false} - catch - error:badarg -> - #{is_superuser => false} - end; -%% fallback to default -is_superuser(#{<<"is_superuser">> := _}) -> - #{is_superuser => false}; +is_superuser(#{<<"is_superuser">> := Value}) -> + #{is_superuser => to_bool(Value)}; is_superuser(#{}) -> #{is_superuser => false}. @@ -211,6 +173,40 @@ make_resource_id(Name) -> without_password(Credential) -> without_password(Credential, [password, <<"password">>]). +to_bool(<<"true">>) -> + true; +to_bool(true) -> + true; +to_bool(<<"1">>) -> + true; +to_bool(I) when is_integer(I) andalso I >= 1 -> + true; +%% false +to_bool(<<"">>) -> + false; +to_bool(<<"0">>) -> + false; +to_bool(0) -> + false; +to_bool(null) -> + false; +to_bool(undefined) -> + false; +to_bool(<<"false">>) -> + false; +to_bool(false) -> + false; +to_bool(MaybeBinInt) when is_binary(MaybeBinInt) -> + try + binary_to_integer(MaybeBinInt) >= 1 + catch + error:badarg -> + false + end; +%% fallback to default +to_bool(_) -> + false. + %%-------------------------------------------------------------------- %% Internal functions %%-------------------------------------------------------------------- diff --git a/apps/emqx_authz/src/emqx_authz.app.src b/apps/emqx_authz/src/emqx_authz.app.src index 3311d5983..9de573795 100644 --- a/apps/emqx_authz/src/emqx_authz.app.src +++ b/apps/emqx_authz/src/emqx_authz.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_authz, [ {description, "An OTP application"}, - {vsn, "0.1.24"}, + {vsn, "0.1.25"}, {registered, []}, {mod, {emqx_authz_app, []}}, {applications, [ diff --git a/apps/emqx_authz/src/emqx_authz.erl b/apps/emqx_authz/src/emqx_authz.erl index 0419bcf72..1398ef8e9 100644 --- a/apps/emqx_authz/src/emqx_authz.erl +++ b/apps/emqx_authz/src/emqx_authz.erl @@ -19,6 +19,8 @@ -behaviour(emqx_config_handler). -behaviour(emqx_config_backup). +-dialyzer({nowarn_function, [authz_module/1]}). + -include("emqx_authz.hrl"). -include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/emqx_hooks.hrl"). @@ -571,7 +573,12 @@ find_action_in_hooks() -> authz_module(built_in_database) -> emqx_authz_mnesia; authz_module(Type) -> - list_to_existing_atom("emqx_authz_" ++ atom_to_list(Type)). + case emqx_authz_enterprise:is_enterprise_module(Type) of + {ok, Module} -> + Module; + _ -> + list_to_existing_atom("emqx_authz_" ++ atom_to_list(Type)) + end. type(#{type := Type}) -> type(Type); type(#{<<"type">> := Type}) -> type(Type); @@ -591,8 +598,7 @@ type(built_in_database) -> built_in_database; type(<<"built_in_database">>) -> built_in_database; type(client_info) -> client_info; type(<<"client_info">>) -> client_info; -%% should never happen if the input is type-checked by hocon schema -type(Unknown) -> throw({unknown_authz_source_type, Unknown}). +type(MaybeEnterprise) -> emqx_authz_enterprise:type(MaybeEnterprise). maybe_write_files(#{<<"type">> := <<"file">>} = Source) -> write_acl_file(Source); diff --git a/apps/emqx_authz/src/emqx_authz_api_schema.erl b/apps/emqx_authz/src/emqx_authz_api_schema.erl index 29433e421..7aa34c266 100644 --- a/apps/emqx_authz/src/emqx_authz_api_schema.erl +++ b/apps/emqx_authz/src/emqx_authz_api_schema.erl @@ -95,7 +95,9 @@ fields(position) -> in => body } )} - ]. + ]; +fields(MaybeEnterprise) -> + emqx_authz_enterprise:fields(MaybeEnterprise). %%------------------------------------------------------------------------------ %% http type funcs @@ -283,7 +285,7 @@ authz_sources_types(Type) -> mysql, postgresql, file - ]. + ] ++ emqx_authz_enterprise:authz_sources_types(). to_list(A) when is_atom(A) -> atom_to_list(A); diff --git a/apps/emqx_authz/src/emqx_authz_enterprise.erl b/apps/emqx_authz/src/emqx_authz_enterprise.erl new file mode 100644 index 000000000..7362a003a --- /dev/null +++ b/apps/emqx_authz/src/emqx_authz_enterprise.erl @@ -0,0 +1,66 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_authz_enterprise). + +-export([ + type_names/0, + fields/1, + is_enterprise_module/1, + authz_sources_types/0, + type/1, + desc/1 +]). + +-if(?EMQX_RELEASE_EDITION == ee). + +%% type name set +type_names() -> + [ldap]. + +%% type -> type schema +fields(ldap) -> + emqx_ldap_authz:fields(config). + +%% type -> type module +is_enterprise_module(ldap) -> + {ok, emqx_ldap_authz}; +is_enterprise_module(_) -> + false. + +%% api sources set +authz_sources_types() -> + [ldap]. + +%% atom-able name -> type +type(<<"ldap">>) -> ldap; +type(ldap) -> ldap; +type(Unknown) -> throw({unknown_authz_source_type, Unknown}). + +desc(ldap) -> + emqx_ldap_authz:description(); +desc(_) -> + undefined. + +-else. + +-dialyzer({nowarn_function, [fields/1, type/1, desc/1]}). + +type_names() -> + []. + +fields(Any) -> + error({invalid_field, Any}). + +is_enterprise_module(_) -> + false. + +authz_sources_types() -> + []. + +%% should never happen if the input is type-checked by hocon schema +type(Unknown) -> throw({unknown_authz_source_type, Unknown}). + +desc(_) -> + undefined. +-endif. diff --git a/apps/emqx_authz/src/emqx_authz_schema.erl b/apps/emqx_authz/src/emqx_authz_schema.erl index 9e02e8a32..6aa04cdc1 100644 --- a/apps/emqx_authz/src/emqx_authz_schema.erl +++ b/apps/emqx_authz/src/emqx_authz_schema.erl @@ -43,7 +43,8 @@ -export([ headers_no_content_type/1, headers/1, - default_authz/0 + default_authz/0, + authz_common_fields/1 ]). %%-------------------------------------------------------------------- @@ -64,7 +65,8 @@ type_names() -> redis_single, redis_sentinel, redis_cluster - ]. + ] ++ + emqx_authz_enterprise:type_names(). namespace() -> authz. @@ -176,7 +178,9 @@ fields("node_error") -> [ node_name(), {"error", ?HOCON(string(), #{desc => ?DESC("node_error")})} - ]. + ]; +fields(MaybeEnterprise) -> + emqx_authz_enterprise:fields(MaybeEnterprise). common_field() -> [ @@ -220,8 +224,8 @@ desc(redis_sentinel) -> ?DESC(redis_sentinel); desc(redis_cluster) -> ?DESC(redis_cluster); -desc(_) -> - undefined. +desc(MaybeEnterprise) -> + emqx_authz_enterprise:desc(MaybeEnterprise). authz_common_fields(Type) -> [ diff --git a/apps/emqx_bridge/src/emqx_bridge.app.src b/apps/emqx_bridge/src/emqx_bridge.app.src index c9ea7d6bc..96d953e34 100644 --- a/apps/emqx_bridge/src/emqx_bridge.app.src +++ b/apps/emqx_bridge/src/emqx_bridge.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge, [ {description, "EMQX bridges"}, - {vsn, "0.1.25"}, + {vsn, "0.1.26"}, {registered, [emqx_bridge_sup]}, {mod, {emqx_bridge_app, []}}, {applications, [ diff --git a/apps/emqx_bridge/src/emqx_bridge_api.erl b/apps/emqx_bridge/src/emqx_bridge_api.erl index e1c3ee987..3190a2ef9 100644 --- a/apps/emqx_bridge/src/emqx_bridge_api.erl +++ b/apps/emqx_bridge/src/emqx_bridge_api.erl @@ -544,18 +544,20 @@ schema("/bridges_probe") -> case emqx_bridge_resource:create_dry_run(ConnType, maps:remove(<<"type">>, Params1)) of ok -> ?NO_CONTENT; - {error, #{kind := validation_error} = Reason} -> + {error, #{kind := validation_error} = Reason0} -> + Reason = redact(Reason0), ?BAD_REQUEST('TEST_FAILED', map_to_json(Reason)); {error, Reason0} when not is_tuple(Reason0); element(1, Reason0) =/= 'exit' -> - Reason = + Reason1 = case Reason0 of {unhealthy_target, Message} -> Message; _ -> Reason0 end, + Reason = redact(Reason1), ?BAD_REQUEST('TEST_FAILED', Reason) end; BadRequest -> - BadRequest + redact(BadRequest) end. maybe_deobfuscate_bridge_probe(#{<<"type">> := BridgeType, <<"name">> := BridgeName} = Params) -> @@ -608,7 +610,7 @@ create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) -> {ok, _} -> lookup_from_all_nodes(BridgeType, BridgeName, HttpStatusCode); {error, Reason} when is_map(Reason) -> - ?BAD_REQUEST(map_to_json(emqx_utils:redact(Reason))) + ?BAD_REQUEST(map_to_json(redact(Reason))) end. get_metrics_from_local_node(BridgeType, BridgeName) -> @@ -990,7 +992,9 @@ call_operation(NodeOrAll, OperFunc, Args = [_Nodes, BridgeType, BridgeName]) -> {error, timeout} -> ?BAD_REQUEST(<<"Request timeout">>); {error, {start_pool_failed, Name, Reason}} -> - Msg = bin(io_lib:format("Failed to start ~p pool for reason ~p", [Name, Reason])), + Msg = bin( + io_lib:format("Failed to start ~p pool for reason ~p", [Name, redact(Reason)]) + ), ?BAD_REQUEST(Msg); {error, not_found} -> BridgeId = emqx_bridge_resource:bridge_id(BridgeType, BridgeName), @@ -1007,7 +1011,7 @@ call_operation(NodeOrAll, OperFunc, Args = [_Nodes, BridgeType, BridgeName]) -> {error, {unhealthy_target, Message}} -> ?BAD_REQUEST(Message); {error, Reason} when not is_tuple(Reason); element(1, Reason) =/= 'exit' -> - ?BAD_REQUEST(Reason) + ?BAD_REQUEST(redact(Reason)) end. maybe_try_restart(all, start_bridges_to_all_nodes, Args) -> @@ -1071,7 +1075,15 @@ deobfuscate(NewConf, OldConf) -> NewConf ). -map_to_json(M) -> - emqx_utils_json:encode( - emqx_utils_maps:jsonable_map(M, fun(K, V) -> {K, emqx_utils_maps:binary_string(V)} end) - ). +map_to_json(M0) -> + %% When dealing with Hocon validation errors, `value' might contain non-serializable + %% values (e.g.: user_lookup_fun), so we try again without that key if serialization + %% fails as a best effort. + M1 = emqx_utils_maps:jsonable_map(M0, fun(K, V) -> {K, emqx_utils_maps:binary_string(V)} end), + try + emqx_utils_json:encode(M1) + catch + error:_ -> + M2 = maps:without([value, <<"value">>], M1), + emqx_utils_json:encode(M2) + end. diff --git a/apps/emqx_bridge/src/emqx_bridge_resource.erl b/apps/emqx_bridge/src/emqx_bridge_resource.erl index cd5fd2d24..a48d0294e 100644 --- a/apps/emqx_bridge/src/emqx_bridge_resource.erl +++ b/apps/emqx_bridge/src/emqx_bridge_resource.erl @@ -261,21 +261,31 @@ recreate(Type, Name, Conf, Opts) -> create_dry_run(Type, Conf0) -> TmpName = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]), TmpPath = emqx_utils:safe_filename(TmpName), - Conf = emqx_utils_maps:safe_atom_key_map(Conf0), - case emqx_connector_ssl:convert_certs(TmpPath, Conf) of - {error, Reason} -> - {error, Reason}; - {ok, ConfNew} -> - try + %% Already typechecked, no need to catch errors + TypeBin = bin(Type), + TypeAtom = safe_atom(Type), + Conf1 = maps:without([<<"name">>], Conf0), + RawConf = #{<<"bridges">> => #{TypeBin => #{<<"temp_name">> => Conf1}}}, + try + #{bridges := #{TypeAtom := #{temp_name := Conf}}} = + hocon_tconf:check_plain( + emqx_bridge_schema, + RawConf, + #{atom_key => true, required => false} + ), + case emqx_connector_ssl:convert_certs(TmpPath, Conf) of + {error, Reason} -> + {error, Reason}; + {ok, ConfNew} -> ParseConf = parse_confs(bin(Type), TmpName, ConfNew), emqx_resource:create_dry_run_local(bridge_to_resource_type(Type), ParseConf) - catch - %% validation errors - throw:Reason -> - {error, Reason} - after - _ = file:del_dir_r(emqx_tls_lib:pem_dir(TmpPath)) - end + end + catch + %% validation errors + throw:Reason1 -> + {error, Reason1} + after + _ = file:del_dir_r(emqx_tls_lib:pem_dir(TmpPath)) end. remove(BridgeId) -> @@ -415,6 +425,9 @@ bin(Bin) when is_binary(Bin) -> Bin; bin(Str) when is_list(Str) -> list_to_binary(Str); bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8). +safe_atom(Bin) when is_binary(Bin) -> binary_to_existing_atom(Bin, utf8); +safe_atom(Atom) when is_atom(Atom) -> Atom. + parse_opts(Conf, Opts0) -> override_start_after_created(Conf, Opts0). diff --git a/apps/emqx_bridge/test/emqx_bridge_testlib.erl b/apps/emqx_bridge/test/emqx_bridge_testlib.erl index fc35449a7..1c0a3957a 100644 --- a/apps/emqx_bridge/test/emqx_bridge_testlib.erl +++ b/apps/emqx_bridge/test/emqx_bridge_testlib.erl @@ -212,6 +212,19 @@ probe_bridge_api(BridgeType, BridgeName, BridgeConfig) -> ct:pal("bridge probe result: ~p", [Res]), Res. +try_decode_error(Body0) -> + case emqx_utils_json:safe_decode(Body0, [return_maps]) of + {ok, #{<<"message">> := Msg0} = Body1} -> + case emqx_utils_json:safe_decode(Msg0, [return_maps]) of + {ok, Msg1} -> Body1#{<<"message">> := Msg1}; + {error, _} -> Body1 + end; + {ok, Body1} -> + Body1; + {error, _} -> + Body0 + end. + create_rule_and_action_http(BridgeType, RuleTopic, Config) -> create_rule_and_action_http(BridgeType, RuleTopic, Config, _Opts = #{}). diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src index ba3e86eac..9faf65860 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_gcp_pubsub, [ {description, "EMQX Enterprise GCP Pub/Sub Bridge"}, - {vsn, "0.1.5"}, + {vsn, "0.1.6"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl index 8ef369068..b3792da71 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl @@ -363,9 +363,9 @@ service_account_json_validator(Map) -> {[], <<"service_account">>} -> ok; {[], Type} -> - {error, {wrong_type, Type}}; + {error, #{wrong_type => Type}}; {_, _} -> - {error, {missing_keys, MissingKeys}} + {error, #{missing_keys => MissingKeys}} end. service_account_json_converter(Map) when is_map(Map) -> @@ -382,7 +382,8 @@ service_account_json_converter(Val) -> consumer_topic_mapping_validator(_TopicMapping = []) -> {error, "There must be at least one GCP PubSub-MQTT topic mapping"}; -consumer_topic_mapping_validator(TopicMapping = [_ | _]) -> +consumer_topic_mapping_validator(TopicMapping0 = [_ | _]) -> + TopicMapping = [emqx_utils_maps:binary_key_map(TM) || TM <- TopicMapping0], NumEntries = length(TopicMapping), PubSubTopics = [KT || #{<<"pubsub_topic">> := KT} <- TopicMapping], DistinctPubSubTopics = length(lists:usort(PubSubTopics)), diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_client.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_client.erl index 80283ee73..cb4aa853c 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_client.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_client.erl @@ -220,10 +220,10 @@ parse_jwt_config(ResourceId, #{ service_account_json := ServiceAccountJSON }) -> #{ - project_id := ProjectId, - private_key_id := KId, - private_key := PrivateKeyPEM, - client_email := ServiceAccountEmail + <<"project_id">> := ProjectId, + <<"private_key_id">> := KId, + <<"private_key">> := PrivateKeyPEM, + <<"client_email">> := ServiceAccountEmail } = ServiceAccountJSON, %% fixed for pubsub; trailing slash is important. Aud = <<"https://pubsub.googleapis.com/">>, diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl index 8f67d2678..74ee941ec 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl @@ -64,7 +64,9 @@ callback_mode() -> async_if_possible. query_mode(_Config) -> no_queries. -spec on_start(resource_id(), config()) -> {ok, state()} | {error, term()}. -on_start(InstanceId, Config) -> +on_start(InstanceId, Config0) -> + %% ensure it's a binary key map + Config = maps:update_with(service_account_json, fun emqx_utils_maps:binary_key_map/1, Config0), case emqx_bridge_gcp_pubsub_client:start(InstanceId, Config) of {ok, Client} -> start_consumers(InstanceId, Client, Config); @@ -125,7 +127,7 @@ start_consumers(InstanceId, Client, Config) -> consumer := ConsumerConfig0, hookpoint := Hookpoint, resource_opts := #{request_ttl := RequestTTL}, - service_account_json := #{project_id := ProjectId} + service_account_json := #{<<"project_id">> := ProjectId} } = Config, ConsumerConfig1 = maps:update_with(topic_mapping, fun convert_topic_mapping/1, ConsumerConfig0), TopicMapping = maps:get(topic_mapping, ConsumerConfig1), diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_producer.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_producer.erl index 1f87d8343..b1ded2121 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_producer.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_producer.erl @@ -50,15 +50,16 @@ callback_mode() -> async_if_possible. query_mode(_Config) -> async. -spec on_start(resource_id(), config()) -> {ok, state()} | {error, term()}. -on_start(InstanceId, Config) -> +on_start(InstanceId, Config0) -> ?SLOG(info, #{ msg => "starting_gcp_pubsub_bridge", - config => Config + config => Config0 }), + Config = maps:update_with(service_account_json, fun emqx_utils_maps:binary_key_map/1, Config0), #{ payload_template := PayloadTemplate, pubsub_topic := PubSubTopic, - service_account_json := #{project_id := ProjectId} + service_account_json := #{<<"project_id">> := ProjectId} } = Config, case emqx_bridge_gcp_pubsub_client:start(InstanceId, Config) of {ok, Client} -> diff --git a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl index 7d50304b1..8cb0ef2f9 100644 --- a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl +++ b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl @@ -275,14 +275,13 @@ ensure_topic(Config, Topic) -> start_control_client() -> RawServiceAccount = emqx_bridge_gcp_pubsub_utils:generate_service_account_json(), - ServiceAccount = emqx_utils_maps:unsafe_atom_key_map(RawServiceAccount), ConnectorConfig = #{ connect_timeout => 5_000, max_retries => 0, pool_size => 1, resource_opts => #{request_ttl => 5_000}, - service_account_json => ServiceAccount + service_account_json => RawServiceAccount }, PoolName = <<"control_connector">>, {ok, Client} = emqx_bridge_gcp_pubsub_client:start(PoolName, ConnectorConfig), diff --git a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_producer_SUITE.erl b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_producer_SUITE.erl index cf992bb23..a9bbf6178 100644 --- a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_producer_SUITE.erl +++ b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_producer_SUITE.erl @@ -196,16 +196,27 @@ create_bridge_http(Config, GCPPubSubConfigOverrides) -> Path = emqx_mgmt_api_test_util:api_path(["bridges"]), AuthHeader = emqx_mgmt_api_test_util:auth_header_(), ProbePath = emqx_mgmt_api_test_util:api_path(["bridges_probe"]), - ProbeResult = emqx_mgmt_api_test_util:request_api(post, ProbePath, "", AuthHeader, Params), + Opts = #{return_all => true}, + ProbeResult = emqx_mgmt_api_test_util:request_api( + post, ProbePath, "", AuthHeader, Params, Opts + ), ct:pal("creating bridge (via http): ~p", [Params]), ct:pal("probe result: ~p", [ProbeResult]), Res = - case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params) of - {ok, Res0} -> {ok, emqx_utils_json:decode(Res0, [return_maps])}; - Error -> Error + case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of + {ok, {Status, Headhers, Res0}} -> + {ok, {Status, Headhers, emqx_utils_json:decode(Res0, [return_maps])}}; + {error, {Status, Headers, Body0}} -> + {error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}}; + Error -> + Error end, ct:pal("bridge creation result: ~p", [Res]), ?assertEqual(element(1, ProbeResult), element(1, Res)), + case ProbeResult of + {error, {{_, 500, _}, _, _}} -> error({bad_probe_result, ProbeResult}); + _ -> ok + end, Res. create_rule_and_action_http(Config) -> @@ -821,7 +832,7 @@ t_not_of_service_account_type(Config) -> ?assertMatch( {error, #{ kind := validation_error, - reason := {wrong_type, <<"not a service account">>}, + reason := #{wrong_type := <<"not a service account">>}, %% should be censored as it contains secrets value := <<"******">> }}, @@ -832,6 +843,23 @@ t_not_of_service_account_type(Config) -> } ) ), + ?assertMatch( + {error, + {{_, 400, _}, _, #{ + <<"message">> := #{ + <<"kind">> := <<"validation_error">>, + <<"reason">> := #{<<"wrong_type">> := <<"not a service account">>}, + %% should be censored as it contains secrets + <<"value">> := <<"******">> + } + }}}, + create_bridge_http( + Config, + #{ + <<"service_account_json">> => #{<<"type">> => <<"not a service account">>} + } + ) + ), ok. t_json_missing_fields(Config) -> @@ -840,13 +868,15 @@ t_json_missing_fields(Config) -> {error, #{ kind := validation_error, reason := - {missing_keys, [ - <<"client_email">>, - <<"private_key">>, - <<"private_key_id">>, - <<"project_id">>, - <<"type">> - ]}, + #{ + missing_keys := [ + <<"client_email">>, + <<"private_key">>, + <<"private_key_id">>, + <<"project_id">>, + <<"type">> + ] + }, %% should be censored as it contains secrets value := <<"******">> }}, @@ -855,6 +885,30 @@ t_json_missing_fields(Config) -> | Config ]) ), + ?assertMatch( + {error, + {{_, 400, _}, _, #{ + <<"message">> := #{ + <<"kind">> := <<"validation_error">>, + <<"reason">> := + #{ + <<"missing_keys">> := [ + <<"client_email">>, + <<"private_key">>, + <<"private_key_id">>, + <<"project_id">>, + <<"type">> + ] + }, + %% should be censored as it contains secrets + <<"value">> := <<"******">> + } + }}}, + create_bridge_http([ + {gcp_pubsub_config, GCPPubSubConfig0#{<<"service_account_json">> := #{}}} + | Config + ]) + ), ok. t_invalid_private_key(Config) -> diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src index c048a0d0c..fa7e0d4af 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_greptimedb, [ {description, "EMQX GreptimeDB Bridge"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl index 4be100594..89fad78d2 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl @@ -53,6 +53,8 @@ -define(AUTO_RECONNECT_S, 1). +-define(CONNECT_TIMEOUT, 5_000). + %% ------------------------------------------------------------------------------------------------- %% resource callback callback_mode() -> always_sync. @@ -251,6 +253,12 @@ do_start_client( {error, Reason} end. +grpc_config() -> + #{ + sync_start => true, + connect_timeout => ?CONNECT_TIMEOUT + }. + client_config( InstId, Config = #{ @@ -264,6 +272,7 @@ client_config( {pool, InstId}, {pool_type, random}, {auto_reconnect, ?AUTO_RECONNECT_S}, + {gprc_options, grpc_config()}, {timeunit, maps:get(precision, Config, ms)} ] ++ protocol_config(Config). diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http.app.src b/apps/emqx_bridge_http/src/emqx_bridge_http.app.src index 859f80f53..3849747c7 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http.app.src +++ b/apps/emqx_bridge_http/src/emqx_bridge_http.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_http, [ {description, "EMQX HTTP Bridge and Connector Application"}, - {vsn, "0.1.1"}, + {vsn, "0.1.2"}, {registered, []}, {applications, [kernel, stdlib, emqx_connector, emqx_resource, ehttpc]}, {env, []}, diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl index 6e58505c4..42eddbeef 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl +++ b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl @@ -155,7 +155,16 @@ desc("request") -> desc(_) -> undefined. -validate_method(M) when M =:= <<"post">>; M =:= <<"put">>; M =:= <<"get">>; M =:= <<"delete">> -> +validate_method(M) when + M =:= <<"post">>; + M =:= <<"put">>; + M =:= <<"get">>; + M =:= <<"delete">>; + M =:= post; + M =:= put; + M =:= get; + M =:= delete +-> ok; validate_method(M) -> case string:find(M, "${") of diff --git a/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl b/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl index 3f3a3f62e..5395460b8 100644 --- a/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl +++ b/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl @@ -82,6 +82,14 @@ init_per_testcase(t_rule_action_expired, Config) -> {bridge_name, ?BRIDGE_NAME} | Config ]; +init_per_testcase(t_bridge_probes_header_atoms, Config) -> + HTTPPath = <<"/path">>, + ServerSSLOpts = false, + {ok, {HTTPPort, _Pid}} = emqx_bridge_http_connector_test_server:start_link( + _Port = random, HTTPPath, ServerSSLOpts + ), + ok = emqx_bridge_http_connector_test_server:set_handler(success_http_handler()), + [{http_server, #{port => HTTPPort, path => HTTPPath}} | Config]; init_per_testcase(_TestCase, Config) -> Server = start_http_server(#{response_delay_ms => 0}), [{http_server, Server} | Config]. @@ -89,7 +97,8 @@ init_per_testcase(_TestCase, Config) -> end_per_testcase(TestCase, _Config) when TestCase =:= t_path_not_found; TestCase =:= t_too_many_requests; - TestCase =:= t_rule_action_expired + TestCase =:= t_rule_action_expired; + TestCase =:= t_bridge_probes_header_atoms -> ok = emqx_bridge_http_connector_test_server:stop(), persistent_term:erase({?MODULE, times_called}), @@ -292,6 +301,22 @@ make_bridge(Config) -> ), emqx_bridge_resource:bridge_id(Type, Name). +success_http_handler() -> + TestPid = self(), + fun(Req0, State) -> + {ok, Body, Req} = cowboy_req:read_body(Req0), + Headers = cowboy_req:headers(Req), + ct:pal("http request received: ~p", [#{body => Body, headers => Headers}]), + TestPid ! {http, Headers, Body}, + Rep = cowboy_req:reply( + 200, + #{<<"content-type">> => <<"text/plain">>}, + <<"hello">>, + Req + ), + {ok, Rep, State} + end. + not_found_http_handler() -> TestPid = self(), fun(Req0, State) -> @@ -613,6 +638,55 @@ t_rule_action_expired(Config) -> ), ok. +t_bridge_probes_header_atoms(Config) -> + #{port := Port, path := Path} = ?config(http_server, Config), + ?check_trace( + begin + LocalTopic = <<"t/local/topic">>, + BridgeConfig0 = bridge_async_config(#{ + type => ?BRIDGE_TYPE, + name => ?BRIDGE_NAME, + port => Port, + path => Path, + resume_interval => "100ms", + connect_timeout => "1s", + request_timeout => "100ms", + resource_request_ttl => "100ms", + local_topic => LocalTopic + }), + BridgeConfig = BridgeConfig0#{ + <<"headers">> => #{ + <<"some-non-existent-atom">> => <<"x">> + } + }, + ?assertMatch( + {ok, {{_, 204, _}, _Headers, _Body}}, + probe_bridge_api(BridgeConfig) + ), + ?assertMatch( + {ok, {{_, 201, _}, _Headers, _Body}}, + emqx_bridge_testlib:create_bridge_api( + ?BRIDGE_TYPE, + ?BRIDGE_NAME, + BridgeConfig + ) + ), + Msg = emqx_message:make(LocalTopic, <<"hi">>), + emqx:publish(Msg), + receive + {http, Headers, _Body} -> + ?assertMatch(#{<<"some-non-existent-atom">> := <<"x">>}, Headers), + ok + after 5_000 -> + ct:pal("mailbox: ~p", [process_info(self(), messages)]), + ct:fail("request not made") + end, + ok + end, + [] + ), + ok. + %% helpers do_t_async_retries(TestContext, Error, Fn) -> #{error_attempts := ErrorAttempts} = TestContext, @@ -659,3 +733,17 @@ remove_message_id(MessageIDs, #{body := IDBin}) -> ID = erlang:binary_to_integer(IDBin), %% It is acceptable to get the same message more than once maps:without([ID], MessageIDs). + +probe_bridge_api(BridgeConfig) -> + Params = BridgeConfig#{<<"type">> => ?BRIDGE_TYPE, <<"name">> => ?BRIDGE_NAME}, + Path = emqx_mgmt_api_test_util:api_path(["bridges_probe"]), + AuthHeader = emqx_mgmt_api_test_util:auth_header_(), + Opts = #{return_all => true}, + ct:pal("probing bridge (via http): ~p", [Params]), + Res = + case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of + {ok, {{_, 204, _}, _Headers, _Body0} = Res0} -> {ok, Res0}; + Error -> Error + end, + ct:pal("bridge probe result: ~p", [Res]), + Res. diff --git a/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl b/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl index f5b6b1f46..6b5c2b0cd 100644 --- a/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl +++ b/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl @@ -91,3 +91,121 @@ is_unwrapped_headers(Headers) -> is_unwrapped_header({_, V}) when is_function(V) -> false; is_unwrapped_header({_, [{str, _V}]}) -> throw(unexpected_tmpl_token); is_unwrapped_header(_) -> true. + +method_validator_test() -> + Conf0 = parse(webhook_config_hocon()), + ?assertMatch( + #{<<"method">> := _}, + emqx_utils_maps:deep_get([<<"bridges">>, <<"webhook">>, <<"a">>], Conf0) + ), + lists:foreach( + fun(Method) -> + Conf1 = emqx_utils_maps:deep_put( + [<<"bridges">>, <<"webhook">>, <<"a">>, <<"method">>], + Conf0, + Method + ), + ?assertMatch( + #{}, + check(Conf1), + #{method => Method} + ), + ?assertMatch( + #{}, + check_atom_key(Conf1), + #{method => Method} + ), + ok + end, + [<<"post">>, <<"put">>, <<"get">>, <<"delete">>] + ), + lists:foreach( + fun(Method) -> + Conf1 = emqx_utils_maps:deep_put( + [<<"bridges">>, <<"webhook">>, <<"a">>, <<"method">>], + Conf0, + Method + ), + ?assertThrow( + {_, [ + #{ + kind := validation_error, + reason := not_a_enum_symbol + } + ]}, + check(Conf1), + #{method => Method} + ), + ?assertThrow( + {_, [ + #{ + kind := validation_error, + reason := not_a_enum_symbol + } + ]}, + check_atom_key(Conf1), + #{method => Method} + ), + ok + end, + [<<"x">>, <<"patch">>, <<"options">>] + ), + ok. + +%%=========================================================================== +%% Helper functions +%%=========================================================================== + +parse(Hocon) -> + {ok, Conf} = hocon:binary(Hocon), + Conf. + +%% what bridge creation does +check(Conf) when is_map(Conf) -> + hocon_tconf:check_plain(emqx_bridge_schema, Conf). + +%% what bridge probe does +check_atom_key(Conf) when is_map(Conf) -> + hocon_tconf:check_plain(emqx_bridge_schema, Conf, #{atom_key => true, required => false}). + +%%=========================================================================== +%% Data section +%%=========================================================================== + +%% erlfmt-ignore +webhook_config_hocon() -> +""" +bridges.webhook.a { + body = \"${.}\" + connect_timeout = 15s + enable = false + enable_pipelining = 100 + headers {content-type = \"application/json\", jjjjjjjjjjjjjjjjjjj = jjjjjjj} + max_retries = 2 + method = post + pool_size = 8 + pool_type = random + resource_opts { + health_check_interval = 15s + inflight_window = 100 + max_buffer_bytes = 1GB + query_mode = async + request_ttl = 45s + start_after_created = true + start_timeout = 5s + worker_pool_size = 4 + } + ssl { + ciphers = [] + depth = 10 + enable = false + hibernate_after = 5s + log_level = notice + reuse_sessions = true + secure_renegotiate = true + verify = verify_peer + versions = [tlsv1.3, tlsv1.2] + } + url = \"http://some.host:4000/api/echo\" +} +""". diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src index 7157a1580..3792409c6 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge_kafka, [ {description, "EMQX Enterprise Kafka Bridge"}, - {vsn, "0.1.6"}, + {vsn, "0.1.7"}, {registered, [emqx_bridge_kafka_consumer_sup]}, {applications, [ kernel, diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl index eeaa7d4b7..544c95b85 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl @@ -125,7 +125,7 @@ values(consumer) -> topic_mapping => [ #{ kafka_topic => <<"kafka-topic-1">>, - mqtt_topic => <<"mqtt/topic/1">>, + mqtt_topic => <<"mqtt/topic/${.offset}">>, qos => 1, payload_template => <<"${.}">> }, @@ -528,7 +528,8 @@ kafka_producer_converter(Config, _HoconOpts) -> consumer_topic_mapping_validator(_TopicMapping = []) -> {error, "There must be at least one Kafka-MQTT topic mapping"}; -consumer_topic_mapping_validator(TopicMapping = [_ | _]) -> +consumer_topic_mapping_validator(TopicMapping0 = [_ | _]) -> + TopicMapping = [emqx_utils_maps:binary_key_map(TM) || TM <- TopicMapping0], NumEntries = length(TopicMapping), KafkaTopics = [KT || #{<<"kafka_topic">> := KT} <- TopicMapping], DistinctKafkaTopics = length(lists:usort(KafkaTopics)), @@ -539,6 +540,13 @@ consumer_topic_mapping_validator(TopicMapping = [_ | _]) -> {error, "Kafka topics must not be repeated in a bridge"} end. +producer_strategy_key_validator( + #{ + partition_strategy := _, + message := #{key := _} + } = Conf +) -> + producer_strategy_key_validator(emqx_utils_maps:binary_key_map(Conf)); producer_strategy_key_validator(#{ <<"partition_strategy">> := key_dispatch, <<"message">> := #{<<"key">> := ""} diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl index e18bf7e29..b8abb928c 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl @@ -69,7 +69,7 @@ topic_mapping := #{ kafka_topic() := #{ payload_template := emqx_placeholder:tmpl_token(), - mqtt_topic => emqx_types:topic(), + mqtt_topic_template => emqx_placeholder:tmpl_token(), qos => emqx_types:qos() } }, @@ -83,7 +83,7 @@ topic_mapping := #{ kafka_topic() := #{ payload_template := emqx_placeholder:tmpl_token(), - mqtt_topic => emqx_types:topic(), + mqtt_topic_template => emqx_placeholder:tmpl_token(), qos => emqx_types:qos() } }, @@ -235,7 +235,7 @@ do_handle_message(Message, State) -> value_encoding_mode := ValueEncodingMode } = State, #{ - mqtt_topic := MQTTTopic, + mqtt_topic_template := MQTTTopicTemplate, qos := MQTTQoS, payload_template := PayloadTemplate } = maps:get(KafkaTopic, TopicMapping), @@ -249,6 +249,7 @@ do_handle_message(Message, State) -> value => encode(Message#kafka_message.value, ValueEncodingMode) }, Payload = render(FullMessage, PayloadTemplate), + MQTTTopic = render(FullMessage, MQTTTopicTemplate), MQTTMessage = emqx_message:make(ResourceId, MQTTQoS, MQTTTopic, Payload), _ = emqx:publish(MQTTMessage), emqx:run_hook(Hookpoint, [FullMessage]), @@ -533,15 +534,16 @@ convert_topic_mapping(TopicMappingList) -> fun(Fields, Acc) -> #{ kafka_topic := KafkaTopic, - mqtt_topic := MQTTTopic, + mqtt_topic := MQTTTopicTemplate0, qos := QoS, payload_template := PayloadTemplate0 } = Fields, PayloadTemplate = emqx_placeholder:preproc_tmpl(PayloadTemplate0), + MQTTTopicTemplate = emqx_placeholder:preproc_tmpl(MQTTTopicTemplate0), Acc#{ KafkaTopic => #{ payload_template => PayloadTemplate, - mqtt_topic => MQTTTopic, + mqtt_topic_template => MQTTTopicTemplate, qos => QoS } } diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl index f1f2ce362..2d8355e8e 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl @@ -60,6 +60,7 @@ only_once_tests() -> t_node_joins_existing_cluster, t_cluster_node_down, t_multiple_topic_mappings, + t_dynamic_mqtt_topic, t_resource_manager_crash_after_subscriber_started, t_resource_manager_crash_before_subscriber_started ]. @@ -329,6 +330,23 @@ init_per_testcase(t_multiple_topic_mappings = TestCase, Config0) -> ], Config = [{topic_mapping, TopicMapping} | Config0], common_init_per_testcase(TestCase, Config); +init_per_testcase(t_dynamic_mqtt_topic = TestCase, Config0) -> + KafkaTopic = + << + (atom_to_binary(TestCase))/binary, + (integer_to_binary(erlang:unique_integer()))/binary + >>, + TopicMapping = + [ + #{ + kafka_topic => KafkaTopic, + mqtt_topic => <<"${.topic}/${.value}/${.headers.hkey}">>, + qos => 1, + payload_template => <<"${.}">> + } + ], + Config = [{kafka_topic, KafkaTopic}, {topic_mapping, TopicMapping} | Config0], + common_init_per_testcase(TestCase, Config); init_per_testcase(TestCase, Config) -> common_init_per_testcase(TestCase, Config). @@ -336,11 +354,12 @@ common_init_per_testcase(TestCase, Config0) -> ct:timetrap(timer:seconds(60)), delete_all_bridges(), emqx_config:delete_override_conf_files(), - KafkaTopic = + KafkaTopic0 = << (atom_to_binary(TestCase))/binary, (integer_to_binary(erlang:unique_integer()))/binary >>, + KafkaTopic = proplists:get_value(kafka_topic, Config0, KafkaTopic0), KafkaType = ?config(kafka_type, Config0), UniqueNum = integer_to_binary(erlang:unique_integer()), MQTTTopic = proplists:get_value(mqtt_topic, Config0, <<"mqtt/topic/", UniqueNum/binary>>), @@ -1674,6 +1693,78 @@ t_bridge_rule_action_source(Config) -> ), ok. +t_dynamic_mqtt_topic(Config) -> + KafkaTopic = ?config(kafka_topic, Config), + NPartitions = ?config(num_partitions, Config), + ResourceId = resource_id(Config), + Payload = emqx_guid:to_hexstr(emqx_guid:gen()), + MQTTTopic = emqx_topic:join([KafkaTopic, '#']), + ?check_trace( + begin + ?assertMatch( + {ok, _}, + create_bridge(Config) + ), + wait_until_subscribers_are_ready(NPartitions, 40_000), + {ok, C} = emqtt:start_link(), + on_exit(fun() -> emqtt:stop(C) end), + {ok, _} = emqtt:connect(C), + {ok, _, [0]} = emqtt:subscribe(C, MQTTTopic), + ct:pal("subscribed to ~p", [MQTTTopic]), + + {ok, SRef0} = snabbkaffe:subscribe( + ?match_event(#{ + ?snk_kind := kafka_consumer_handle_message, ?snk_span := {complete, _} + }), + _NumMsgs = 3, + 20_000 + ), + {_Partition, _OffsetReply} = + publish(Config, [ + %% this will have the last segment defined + #{ + key => <<"mykey">>, + value => Payload, + headers => [{<<"hkey">>, <<"hvalue">>}] + }, + %% this will not + #{ + key => <<"mykey">>, + value => Payload + }, + %% will inject an invalid topic segment + #{ + key => <<"mykey">>, + value => <<"+">> + } + ]), + {ok, _} = snabbkaffe:receive_events(SRef0), + ok + end, + fun(Trace) -> + ?assertMatch([_Enter, _Complete | _], ?of_kind(kafka_consumer_handle_message, Trace)), + %% the message with invalid topic will fail to be published + Published = receive_published(#{n => 2}), + ExpectedMQTTTopic0 = emqx_topic:join([KafkaTopic, Payload, <<"hvalue">>]), + ExpectedMQTTTopic1 = emqx_topic:join([KafkaTopic, Payload, <<>>]), + ?assertMatch( + [ + #{ + topic := ExpectedMQTTTopic0 + }, + #{ + topic := ExpectedMQTTTopic1 + } + ], + Published + ), + ?assertEqual(3, emqx_resource_metrics:received_get(ResourceId)), + ?assertError({timeout, _}, receive_published(#{timeout => 500})), + ok + end + ), + ok. + %% checks that an existing cluster can be configured with a kafka %% consumer bridge and that the consumers will distribute over the two %% nodes. diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl index 3b558200c..367423cd4 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl @@ -166,11 +166,24 @@ message_key_dispatch_validations_test() -> ]}, check(Conf) ), + %% ensure atoms exist + _ = [myproducer], + ?assertThrow( + {_, [ + #{ + path := "bridges.kafka.myproducer.kafka", + reason := "Message key cannot be empty when `key_dispatch` strategy is used" + } + ]}, + check_atom_key(Conf) + ), ok. tcp_keepalive_validation_test_() -> ProducerConf = parse(kafka_producer_new_hocon()), ConsumerConf = parse(kafka_consumer_hocon()), + %% ensure atoms exist + _ = [my_producer, my_consumer], test_keepalive_validation([<<"kafka">>, <<"myproducer">>], ProducerConf) ++ test_keepalive_validation([<<"kafka_consumer">>, <<"my_consumer">>], ConsumerConf). @@ -184,7 +197,9 @@ test_keepalive_validation(Name, Conf) -> InvalidConf2 = emqx_utils_maps:deep_force_put(Path, Conf, <<"5,6,1000">>), InvalidConfs = [InvalidConf, InvalidConf1, InvalidConf2], [?_assertMatch(#{<<"bridges">> := _}, check(C)) || C <- ValidConfs] ++ - [?_assertThrow(_, check(C)) || C <- InvalidConfs]. + [?_assertMatch(#{bridges := _}, check_atom_key(C)) || C <- ValidConfs] ++ + [?_assertThrow(_, check(C)) || C <- InvalidConfs] ++ + [?_assertThrow(_, check_atom_key(C)) || C <- InvalidConfs]. %%=========================================================================== %% Helper functions @@ -194,9 +209,14 @@ parse(Hocon) -> {ok, Conf} = hocon:binary(Hocon), Conf. +%% what bridge creation does check(Conf) when is_map(Conf) -> hocon_tconf:check_plain(emqx_bridge_schema, Conf). +%% what bridge probe does +check_atom_key(Conf) when is_map(Conf) -> + hocon_tconf:check_plain(emqx_bridge_schema, Conf, #{atom_key => true, required => false}). + %%=========================================================================== %% Data section %%=========================================================================== diff --git a/apps/emqx_bridge_oracle/README.md b/apps/emqx_bridge_oracle/README.md index d2974b722..7aea14b43 100644 --- a/apps/emqx_bridge_oracle/README.md +++ b/apps/emqx_bridge_oracle/README.md @@ -6,6 +6,9 @@ It implements the data bridge APIs for interacting with an Oracle Database Bridg # Documentation +- Refer to [Ingest data into Oracle DB](https://docs.emqx.com/en/enterprise/v5.0/data-integration/data-bridge-oracle.html) + for how to use EMQX dashboard to ingest IoT data into Oracle Database. + - Refer to [EMQX Rules](https://docs.emqx.com/en/enterprise/v5.0/data-integration/rules.html) for the EMQX rules engine introduction. diff --git a/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.app.src b/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.app.src index 4f46ce464..d68c6ca9a 100644 --- a/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.app.src +++ b/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_oracle, [ {description, "EMQX Enterprise Oracle Database Bridge"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.erl b/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.erl index 46e118c69..15b2be575 100644 --- a/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.erl +++ b/apps/emqx_bridge_oracle/src/emqx_bridge_oracle.erl @@ -108,6 +108,8 @@ type_field(Type) -> name_field() -> {name, hoconsc:mk(binary(), #{required => true, desc => ?DESC("desc_name")})}. +config_validator(#{server := _} = Config) -> + config_validator(emqx_utils_maps:binary_key_map(Config)); config_validator(#{<<"server">> := Server} = Config) when not is_map(Server) andalso not is_map_key(<<"sid">>, Config) andalso diff --git a/apps/emqx_bridge_oracle/test/emqx_bridge_oracle_SUITE.erl b/apps/emqx_bridge_oracle/test/emqx_bridge_oracle_SUITE.erl index bd3ac289c..6b949b047 100644 --- a/apps/emqx_bridge_oracle/test/emqx_bridge_oracle_SUITE.erl +++ b/apps/emqx_bridge_oracle/test/emqx_bridge_oracle_SUITE.erl @@ -305,6 +305,8 @@ create_bridge_api(Config, Overrides) -> case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of {ok, {Status, Headers, Body0}} -> {ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}}; + {error, {Status, Headers, Body0}} -> + {error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}}; Error -> Error end, @@ -348,8 +350,12 @@ probe_bridge_api(Config, Overrides) -> ct:pal("probing bridge (via http): ~p", [Params]), Res = case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of - {ok, {{_, 204, _}, _Headers, _Body0} = Res0} -> {ok, Res0}; - Error -> Error + {ok, {{_, 204, _}, _Headers, _Body0} = Res0} -> + {ok, Res0}; + {error, {Status, Headers, Body0}} -> + {error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}}; + Error -> + Error end, ct:pal("bridge probe result: ~p", [Res]), Res. @@ -630,6 +636,30 @@ t_no_sid_nor_service_name(Config0) -> {error, #{kind := validation_error, reason := "neither SID nor Service Name was set"}}, create_bridge(Config) ), + ?assertMatch( + {error, + {{_, 400, _}, _, #{ + <<"message">> := #{ + <<"kind">> := <<"validation_error">>, + <<"reason">> := <<"neither SID nor Service Name was set">>, + %% should be censored as it contains secrets + <<"value">> := #{<<"password">> := <<"******">>} + } + }}}, + create_bridge_api(Config) + ), + ?assertMatch( + {error, + {{_, 400, _}, _, #{ + <<"message">> := #{ + <<"kind">> := <<"validation_error">>, + <<"reason">> := <<"neither SID nor Service Name was set">>, + %% should be censored as it contains secrets + <<"value">> := #{<<"password">> := <<"******">>} + } + }}}, + probe_bridge_api(Config) + ), ok. t_missing_table(Config) -> diff --git a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl index 89b1c58e0..cd79db43d 100644 --- a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl +++ b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl @@ -10,6 +10,7 @@ -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). +-include("emqx_resource_errors.hrl"). % SQL definitions -define(SQL_BRIDGE, @@ -690,10 +691,14 @@ t_table_removed(Config) -> connect_and_drop_table(Config), Val = integer_to_binary(erlang:unique_integer()), SentData = #{payload => Val, timestamp => 1668602148000}, - ?assertMatch( - {error, {unrecoverable_error, {error, error, <<"42P01">>, undefined_table, _, _}}}, - query_resource_sync(Config, {send_message, SentData, []}) - ), + case query_resource_sync(Config, {send_message, SentData, []}) of + {error, {unrecoverable_error, {error, error, <<"42P01">>, undefined_table, _, _}}} -> + ok; + ?RESOURCE_ERROR_M(not_connected, _) -> + ok; + Res -> + ct:fail("unexpected result: ~p", [Res]) + end, ok end, [] diff --git a/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src b/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src index ed468a833..16c9ce59f 100644 --- a/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src +++ b/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_pulsar, [ {description, "EMQX Pulsar Bridge"}, - {vsn, "0.1.5"}, + {vsn, "0.1.6"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.erl b/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.erl index 2fa5d70cf..beb8452b2 100644 --- a/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.erl +++ b/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.erl @@ -220,6 +220,13 @@ conn_bridge_examples(_Method) -> } ]. +producer_strategy_key_validator( + #{ + strategy := _, + message := #{key := _} + } = Conf +) -> + producer_strategy_key_validator(emqx_utils_maps:binary_key_map(Conf)); producer_strategy_key_validator(#{ <<"strategy">> := key_dispatch, <<"message">> := #{<<"key">> := ""} @@ -257,7 +264,12 @@ override_default(OriginalFn, NewDefault) -> auth_union_member_selector(all_union_members) -> [none, ref(auth_basic), ref(auth_token)]; -auth_union_member_selector({value, V}) -> +auth_union_member_selector({value, V0}) -> + V = + case is_map(V0) of + true -> emqx_utils_maps:binary_key_map(V0); + false -> V0 + end, case V of #{<<"password">> := _} -> [ref(auth_basic)]; @@ -265,6 +277,8 @@ auth_union_member_selector({value, V}) -> [ref(auth_token)]; <<"none">> -> [none]; + none -> + [none]; _ -> Expected = "none | basic | token", throw(#{ diff --git a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl index 4f0f73732..38b112e99 100644 --- a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl +++ b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl @@ -40,6 +40,7 @@ groups() -> only_once_tests() -> [ t_create_via_http, + t_strategy_key_validation, t_start_when_down, t_send_when_down, t_send_when_timeout, @@ -313,6 +314,8 @@ create_bridge_api(Config, Overrides) -> case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of {ok, {Status, Headers, Body0}} -> {ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}}; + {error, {Status, Headers, Body0}} -> + {error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}}; Error -> Error end, @@ -356,8 +359,12 @@ probe_bridge_api(Config, Overrides) -> ct:pal("probing bridge (via http): ~p", [Params]), Res = case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of - {ok, {{_, 204, _}, _Headers, _Body0} = Res0} -> {ok, Res0}; - Error -> Error + {ok, {{_, 204, _}, _Headers, _Body0} = Res0} -> + {ok, Res0}; + {error, {Status, Headers, Body0}} -> + {error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}}; + Error -> + Error end, ct:pal("bridge probe result: ~p", [Res]), Res. @@ -1074,6 +1081,37 @@ t_resource_manager_crash_before_producers_started(Config) -> ), ok. +t_strategy_key_validation(Config) -> + ?assertMatch( + {error, + {{_, 400, _}, _, #{ + <<"message">> := + #{ + <<"kind">> := <<"validation_error">>, + <<"reason">> := <<"Message key cannot be empty", _/binary>> + } = Msg + }}}, + probe_bridge_api( + Config, + #{<<"strategy">> => <<"key_dispatch">>, <<"message">> => #{<<"key">> => <<>>}} + ) + ), + ?assertMatch( + {error, + {{_, 400, _}, _, #{ + <<"message">> := + #{ + <<"kind">> := <<"validation_error">>, + <<"reason">> := <<"Message key cannot be empty", _/binary>> + } = Msg + }}}, + create_bridge_api( + Config, + #{<<"strategy">> => <<"key_dispatch">>, <<"message">> => #{<<"key">> => <<>>}} + ) + ), + ok. + t_cluster(Config0) -> ct:timetrap({seconds, 120}), ?retrying(Config0, 3, fun do_t_cluster/1). diff --git a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl index d46f2af6f..031767063 100644 --- a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl +++ b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl @@ -35,6 +35,17 @@ pulsar_producer_validations_test() -> ]}, check(Conf) ), + %% ensure atoms exist + _ = [my_producer], + ?assertThrow( + {_, [ + #{ + path := "bridges.pulsar_producer.my_producer", + reason := "Message key cannot be empty when `key_dispatch` strategy is used" + } + ]}, + check_atom_key(Conf) + ), ok. @@ -46,9 +57,14 @@ parse(Hocon) -> {ok, Conf} = hocon:binary(Hocon), Conf. +%% what bridge creation does check(Conf) when is_map(Conf) -> hocon_tconf:check_plain(emqx_bridge_schema, Conf). +%% what bridge probe does +check_atom_key(Conf) when is_map(Conf) -> + hocon_tconf:check_plain(emqx_bridge_schema, Conf, #{atom_key => true, required => false}). + %%=========================================================================== %% Data section %%=========================================================================== diff --git a/apps/emqx_conf/src/emqx_conf.app.src b/apps/emqx_conf/src/emqx_conf.app.src index 3c1e5592f..86fb169a6 100644 --- a/apps/emqx_conf/src/emqx_conf.app.src +++ b/apps/emqx_conf/src/emqx_conf.app.src @@ -1,6 +1,6 @@ {application, emqx_conf, [ {description, "EMQX configuration management"}, - {vsn, "0.1.24"}, + {vsn, "0.1.25"}, {registered, []}, {mod, {emqx_conf_app, []}}, {applications, [kernel, stdlib, emqx_ctl]}, diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index 22c8c3c26..eea2bf1b8 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -648,6 +648,7 @@ fields("node") -> mapping => "mria.tlog_push_mode", default => async, 'readOnly' => true, + deprecated => {since, "5.2.0"}, importance => ?IMPORTANCE_HIDDEN, desc => ?DESC(db_tlog_push_mode) } @@ -671,6 +672,35 @@ fields("node") -> mapping => "emqx_machine.custom_shard_transports", default => #{} } + )}, + {"broker_pool_size", + sc( + pos_integer(), + #{ + importance => ?IMPORTANCE_HIDDEN, + default => emqx_vm:schedulers() * 2, + 'readOnly' => true, + desc => ?DESC(node_broker_pool_size) + } + )}, + {"generic_pool_size", + sc( + pos_integer(), + #{ + importance => ?IMPORTANCE_HIDDEN, + default => emqx_vm:schedulers(), + 'readOnly' => true, + desc => ?DESC(node_generic_pool_size) + } + )}, + {"channel_cleanup_batch_size", + sc( + pos_integer(), + #{ + importance => ?IMPORTANCE_HIDDEN, + default => 100_000, + desc => ?DESC(node_channel_cleanup_batch_size) + } )} ]; fields("cluster_call") -> diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src index 7614ddac3..cd8ce864c 100644 --- a/apps/emqx_connector/src/emqx_connector.app.src +++ b/apps/emqx_connector/src/emqx_connector.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_connector, [ {description, "EMQX Data Integration Connectors"}, - {vsn, "0.1.28"}, + {vsn, "0.1.29"}, {registered, []}, {mod, {emqx_connector_app, []}}, {applications, [ diff --git a/apps/emqx_connector/src/emqx_connector_ldap.erl b/apps/emqx_connector/src/emqx_connector_ldap.erl deleted file mode 100644 index c8c134f55..000000000 --- a/apps/emqx_connector/src/emqx_connector_ldap.erl +++ /dev/null @@ -1,199 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- --module(emqx_connector_ldap). - --include("emqx_connector.hrl"). --include_lib("typerefl/include/types.hrl"). --include_lib("emqx/include/logger.hrl"). - --export([roots/0, fields/1]). - --behaviour(emqx_resource). - -%% callbacks of behaviour emqx_resource --export([ - callback_mode/0, - on_start/2, - on_stop/2, - on_query/3, - on_get_status/2 -]). - --export([connect/1]). - --export([search/4]). - -%% port is not expected from configuration because -%% all servers expected to use the same port number --define(LDAP_HOST_OPTIONS, #{no_port => true}). - -%%===================================================================== -roots() -> - ldap_fields() ++ emqx_connector_schema_lib:ssl_fields(). - -%% this schema has no sub-structs -fields(_) -> []. - -%% =================================================================== -callback_mode() -> always_sync. - -on_start( - InstId, - #{ - servers := Servers0, - port := Port, - bind_dn := BindDn, - bind_password := BindPassword, - timeout := Timeout, - pool_size := PoolSize, - ssl := SSL - } = Config -) -> - ?SLOG(info, #{ - msg => "starting_ldap_connector", - connector => InstId, - config => emqx_utils:redact(Config) - }), - Servers1 = emqx_schema:parse_servers(Servers0, ?LDAP_HOST_OPTIONS), - Servers = - lists:map( - fun - (#{hostname := Host, port := Port0}) -> - {Host, Port0}; - (#{hostname := Host}) -> - Host - end, - Servers1 - ), - SslOpts = - case maps:get(enable, SSL) of - true -> - [ - {ssl, true}, - {sslopts, emqx_tls_lib:to_client_opts(SSL)} - ]; - false -> - [{ssl, false}] - end, - Opts = [ - {servers, Servers}, - {port, Port}, - {bind_dn, BindDn}, - {bind_password, BindPassword}, - {timeout, Timeout}, - {pool_size, PoolSize}, - {auto_reconnect, ?AUTO_RECONNECT_INTERVAL} - ], - case emqx_resource_pool:start(InstId, ?MODULE, Opts ++ SslOpts) of - ok -> {ok, #{pool_name => InstId}}; - {error, Reason} -> {error, Reason} - end. - -on_stop(InstId, _State) -> - ?SLOG(info, #{ - msg => "stopping_ldap_connector", - connector => InstId - }), - emqx_resource_pool:stop(InstId). - -on_query(InstId, {search, Base, Filter, Attributes}, #{pool_name := PoolName} = State) -> - Request = {Base, Filter, Attributes}, - ?TRACE( - "QUERY", - "ldap_connector_received", - #{request => Request, connector => InstId, state => State} - ), - case - Result = ecpool:pick_and_do( - PoolName, - {?MODULE, search, [Base, Filter, Attributes]}, - no_handover - ) - of - {error, Reason} -> - ?SLOG(error, #{ - msg => "ldap_connector_do_request_failed", - request => Request, - connector => InstId, - reason => Reason - }), - case Reason of - ecpool_empty -> - {error, {recoverable_error, Reason}}; - _ -> - Result - end; - _ -> - Result - end. - -on_get_status(_InstId, _State) -> connected. - -search(Conn, Base, Filter, Attributes) -> - eldap2:search(Conn, [ - {base, Base}, - {filter, Filter}, - {attributes, Attributes}, - {deref, eldap2:'derefFindingBaseObj'()} - ]). - -%% =================================================================== -connect(Opts) -> - Servers = proplists:get_value(servers, Opts, ["localhost"]), - Port = proplists:get_value(port, Opts, 389), - Timeout = proplists:get_value(timeout, Opts, 30), - BindDn = proplists:get_value(bind_dn, Opts), - BindPassword = proplists:get_value(bind_password, Opts), - SslOpts = - case proplists:get_value(ssl, Opts, false) of - true -> - [{sslopts, proplists:get_value(sslopts, Opts, [])}, {ssl, true}]; - false -> - [{ssl, false}] - end, - LdapOpts = - [ - {port, Port}, - {timeout, Timeout} - ] ++ SslOpts, - {ok, LDAP} = eldap2:open(Servers, LdapOpts), - ok = eldap2:simple_bind(LDAP, BindDn, BindPassword), - {ok, LDAP}. - -ldap_fields() -> - [ - {servers, servers()}, - {port, fun port/1}, - {pool_size, fun emqx_connector_schema_lib:pool_size/1}, - {bind_dn, fun bind_dn/1}, - {bind_password, fun emqx_connector_schema_lib:password/1}, - {timeout, fun duration/1}, - {auto_reconnect, fun emqx_connector_schema_lib:auto_reconnect/1} - ]. - -servers() -> - emqx_schema:servers_sc(#{}, ?LDAP_HOST_OPTIONS). - -bind_dn(type) -> binary(); -bind_dn(default) -> 0; -bind_dn(_) -> undefined. - -port(type) -> integer(); -port(default) -> 389; -port(_) -> undefined. - -duration(type) -> emqx_schema:timeout_duration_ms(); -duration(_) -> undefined. diff --git a/apps/emqx_eviction_agent/test/emqx_eviction_agent_SUITE.erl b/apps/emqx_eviction_agent/test/emqx_eviction_agent_SUITE.erl index 65df26387..3bbdcd707 100644 --- a/apps/emqx_eviction_agent/test/emqx_eviction_agent_SUITE.erl +++ b/apps/emqx_eviction_agent/test/emqx_eviction_agent_SUITE.erl @@ -278,13 +278,14 @@ t_session_serialization(_Config) -> emqx_eviction_agent:session_count() ), + [ChanPid0] = emqx_cm:lookup_channels(<<"client_with_session">>), + MRef0 = erlang:monitor(process, ChanPid0), + %% Evacuate to the same node - ?assertWaitEvent( - emqx_eviction_agent:evict_sessions(1, node()), - #{?snk_kind := emqx_channel_takeover_end, clientid := <<"client_with_session">>}, - 1000 - ), + _ = emqx_eviction_agent:evict_sessions(1, node()), + + ?assertReceive({'DOWN', MRef0, process, ChanPid0, _}), ok = emqx_eviction_agent:disable(test_eviction), diff --git a/apps/emqx_ldap/.gitignore b/apps/emqx_ldap/.gitignore new file mode 100644 index 000000000..3b0d6b553 --- /dev/null +++ b/apps/emqx_ldap/.gitignore @@ -0,0 +1,2 @@ +src/emqx_ldap_filter_lexer.erl +src/emqx_ldap_filter_parser.erl diff --git a/apps/emqx_ldap/BSL.txt b/apps/emqx_ldap/BSL.txt new file mode 100644 index 000000000..0acc0e696 --- /dev/null +++ b/apps/emqx_ldap/BSL.txt @@ -0,0 +1,94 @@ +Business Source License 1.1 + +Licensor: Hangzhou EMQ Technologies Co., Ltd. +Licensed Work: EMQX Enterprise Edition + The Licensed Work is (c) 2023 + Hangzhou EMQ Technologies Co., Ltd. +Additional Use Grant: Students and educators are granted right to copy, + modify, and create derivative work for research + or education. +Change Date: 2027-02-01 +Change License: Apache License, Version 2.0 + +For information about alternative licensing arrangements for the Software, +please contact Licensor: https://www.emqx.com/en/contact + +Notice + +The Business Source License (this document, or the “License”) is not an Open +Source license. However, the Licensed Work will eventually be made available +under an Open Source License, as stated in this License. + +License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved. +“Business Source License” is a trademark of MariaDB Corporation Ab. + +----------------------------------------------------------------------------- + +Business Source License 1.1 + +Terms + +The Licensor hereby grants you the right to copy, modify, create derivative +works, redistribute, and make non-production use of the Licensed Work. The +Licensor may make an Additional Use Grant, above, permitting limited +production use. + +Effective on the Change Date, or the fourth anniversary of the first publicly +available distribution of a specific version of the Licensed Work under this +License, whichever comes first, the Licensor hereby grants you rights under +the terms of the Change License, and the rights granted in the paragraph +above terminate. + +If your use of the Licensed Work does not comply with the requirements +currently in effect as described in this License, you must purchase a +commercial license from the Licensor, its affiliated entities, or authorized +resellers, or you must refrain from using the Licensed Work. + +All copies of the original and modified Licensed Work, and derivative works +of the Licensed Work, are subject to this License. This License applies +separately for each version of the Licensed Work and the Change Date may vary +for each version of the Licensed Work released by Licensor. + +You must conspicuously display this License on each original or modified copy +of the Licensed Work. If you receive the Licensed Work in original or +modified form from a third party, the terms and conditions set forth in this +License apply to your use of that work. + +Any use of the Licensed Work in violation of this License will automatically +terminate your rights under this License for the current and all other +versions of the Licensed Work. + +This License does not grant you any right in any trademark or logo of +Licensor or its affiliates (provided that you may use a trademark or logo of +Licensor as expressly required by this License). + +TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON +AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, +EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND +TITLE. + +MariaDB hereby grants you permission to use this License’s text to license +your works, and to refer to it using the trademark “Business Source License”, +as long as you comply with the Covenants of Licensor below. + +Covenants of Licensor + +In consideration of the right to use this License’s text and the “Business +Source License” name and trademark, Licensor covenants to MariaDB, and to all +other recipients of the licensed work to be provided by Licensor: + +1. To specify as the Change License the GPL Version 2.0 or any later version, + or a license that is compatible with GPL Version 2.0 or a later version, + where “compatible” means that software provided under the Change License can + be included in a program with software provided under GPL Version 2.0 or a + later version. Licensor may specify additional Change Licenses without + limitation. + +2. To either: (a) specify an additional grant of rights to use that does not + impose any additional restriction on the right granted in this License, as + the Additional Use Grant; or (b) insert the text “None”. + +3. To specify a Change Date. + +4. Not to modify this License in any other way. diff --git a/apps/emqx_ldap/README.md b/apps/emqx_ldap/README.md new file mode 100644 index 000000000..5923a10d7 --- /dev/null +++ b/apps/emqx_ldap/README.md @@ -0,0 +1,14 @@ +# LDAP Connector + +This application houses the LDAP connector. +It provides the APIs to connect to the LDAP service. + +It is used by the emqx_authz and emqx_authn applications to check user permissions. + +## Contributing + +Please see our [contributing.md](../../CONTRIBUTING.md). + +## License + +See [APL](../../APL.txt). diff --git a/apps/emqx_ldap/docker-ct b/apps/emqx_ldap/docker-ct new file mode 100644 index 000000000..c1142c3c5 --- /dev/null +++ b/apps/emqx_ldap/docker-ct @@ -0,0 +1 @@ +ldap diff --git a/apps/emqx_ldap/rebar.config b/apps/emqx_ldap/rebar.config new file mode 100644 index 000000000..abf0d192f --- /dev/null +++ b/apps/emqx_ldap/rebar.config @@ -0,0 +1,9 @@ +%% -*- mode: erlang; -*- + +{erl_opts, [debug_info]}. +{deps, [ + {emqx_connector, {path, "../../apps/emqx_connector"}}, + {emqx_resource, {path, "../../apps/emqx_resource"}}, + {emqx_authn, {path, "../../apps/emqx_authn"}}, + {emqx_authz, {path, "../../apps/emqx_authz"}} +]}. diff --git a/apps/emqx_ldap/src/emqx_ldap.app.src b/apps/emqx_ldap/src/emqx_ldap.app.src new file mode 100644 index 000000000..bdc9493c7 --- /dev/null +++ b/apps/emqx_ldap/src/emqx_ldap.app.src @@ -0,0 +1,15 @@ +{application, emqx_ldap, [ + {description, "EMQX LDAP Connector"}, + {vsn, "0.1.0"}, + {registered, []}, + {applications, [ + kernel, + stdlib, + emqx_authn, + emqx_authz + ]}, + {env, []}, + {modules, []}, + + {links, []} +]}. diff --git a/apps/emqx_ldap/src/emqx_ldap.erl b/apps/emqx_ldap/src/emqx_ldap.erl new file mode 100644 index 000000000..d505f92d0 --- /dev/null +++ b/apps/emqx_ldap/src/emqx_ldap.erl @@ -0,0 +1,239 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_ldap). + +-include_lib("emqx_connector/include/emqx_connector.hrl"). +-include_lib("typerefl/include/types.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). +-include_lib("emqx/include/logger.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). +-include_lib("eldap/include/eldap.hrl"). + +-behaviour(emqx_resource). + +%% callbacks of behaviour emqx_resource +-export([ + callback_mode/0, + on_start/2, + on_stop/2, + on_query/3, + on_get_status/2 +]). + +%% ecpool connect & reconnect +-export([connect/1]). + +-export([roots/0, fields/1]). + +-export([do_get_status/1]). + +-define(LDAP_HOST_OPTIONS, #{ + default_port => 389 +}). + +-type params_tokens() :: #{atom() => list()}. +-type state() :: + #{ + pool_name := binary(), + base_tokens := params_tokens(), + filter_tokens := params_tokens() + }. + +-define(ECS, emqx_connector_schema_lib). + +%%===================================================================== +%% Hocon schema +roots() -> + [{config, #{type => hoconsc:ref(?MODULE, config)}}]. + +fields(config) -> + [ + {server, server()}, + {pool_size, fun ?ECS:pool_size/1}, + {username, fun ensure_username/1}, + {password, fun ?ECS:password/1}, + {base_object, + ?HOCON(binary(), #{ + desc => ?DESC(base_object), + required => true, + validator => fun emqx_schema:non_empty_string/1 + })}, + {filter, + ?HOCON( + binary(), + #{ + desc => ?DESC(filter), + default => <<"(objectClass=mqttUser)">>, + validator => fun emqx_schema:non_empty_string/1 + } + )} + ] ++ emqx_connector_schema_lib:ssl_fields(). + +server() -> + Meta = #{desc => ?DESC("server")}, + emqx_schema:servers_sc(Meta, ?LDAP_HOST_OPTIONS). + +ensure_username(required) -> + true; +ensure_username(Field) -> + ?ECS:username(Field). + +%% =================================================================== +callback_mode() -> always_sync. + +-spec on_start(binary(), hoconsc:config()) -> {ok, state()} | {error, _}. +on_start( + InstId, + #{ + server := Server, + pool_size := PoolSize, + ssl := SSL + } = Config +) -> + HostPort = emqx_schema:parse_server(Server, ?LDAP_HOST_OPTIONS), + ?SLOG(info, #{ + msg => "starting_ldap_connector", + connector => InstId, + config => emqx_utils:redact(Config) + }), + + Config2 = maps:merge(Config, HostPort), + Config3 = + case maps:get(enable, SSL) of + true -> + Config2#{sslopts => emqx_tls_lib:to_client_opts(SSL)}; + false -> + Config2 + end, + Options = [ + {pool_size, PoolSize}, + {auto_reconnect, ?AUTO_RECONNECT_INTERVAL}, + {options, Config3} + ], + + case emqx_resource_pool:start(InstId, ?MODULE, Options) of + ok -> + {ok, prepare_template(Config, #{pool_name => InstId})}; + {error, Reason} -> + ?tp( + ldap_connector_start_failed, + #{error => Reason} + ), + {error, Reason} + end. + +on_stop(InstId, _State) -> + ?SLOG(info, #{ + msg => "stopping_ldap_connector", + connector => InstId + }), + emqx_resource_pool:stop(InstId). + +on_query(InstId, {query, Data}, State) -> + on_query(InstId, {query, Data}, [], State); +on_query(InstId, {query, Data, Attrs}, State) -> + on_query(InstId, {query, Data}, [{attributes, Attrs}], State); +on_query(InstId, {query, Data, Attrs, Timeout}, State) -> + on_query(InstId, {query, Data}, [{attributes, Attrs}, {timeout, Timeout}], State). + +on_get_status(_InstId, #{pool_name := PoolName} = _State) -> + case emqx_resource_pool:health_check_workers(PoolName, fun ?MODULE:do_get_status/1) of + true -> + connected; + false -> + connecting + end. + +do_get_status(Conn) -> + erlang:is_process_alive(Conn). + +%% =================================================================== + +connect(Options) -> + #{hostname := Host, username := Username, password := Password} = + Conf = proplists:get_value(options, Options), + OpenOpts = maps:to_list(maps:with([port, sslopts], Conf)), + case eldap:open([Host], [{log, fun log/3} | OpenOpts]) of + {ok, Handle} = Ret -> + case eldap:simple_bind(Handle, Username, Password) of + ok -> Ret; + Error -> Error + end; + Error -> + Error + end. + +on_query( + InstId, + {query, Data}, + SearchOptions, + #{base_tokens := BaseTks, filter_tokens := FilterTks} = State +) -> + Base = emqx_placeholder:proc_tmpl(BaseTks, Data), + FilterBin = emqx_placeholder:proc_tmpl(FilterTks, Data), + case emqx_ldap_filter_parser:scan_and_parse(FilterBin) of + {ok, Filter} -> + do_ldap_query( + InstId, + [{base, Base}, {filter, Filter} | SearchOptions], + State + ); + {error, Reason} = Error -> + ?SLOG(error, #{ + msg => "filter_parse_failed", + filter => FilterBin, + reason => Reason + }), + Error + end. + +do_ldap_query( + InstId, + SearchOptions, + #{pool_name := PoolName} = State +) -> + LogMeta = #{connector => InstId, search => SearchOptions, state => State}, + ?TRACE("QUERY", "ldap_connector_received", LogMeta), + case + ecpool:pick_and_do( + PoolName, + {eldap, search, [SearchOptions]}, + handover + ) + of + {ok, Result} -> + ?tp( + ldap_connector_query_return, + #{result => Result} + ), + {ok, Result#eldap_search_result.entries}; + {error, 'noSuchObject'} -> + {ok, []}; + {error, Reason} -> + ?SLOG( + error, + LogMeta#{msg => "ldap_connector_do_sql_query_failed", reason => Reason} + ), + {error, {unrecoverable_error, Reason}} + end. + +log(Level, Format, Args) -> + ?SLOG( + Level, + #{ + msg => "ldap_log", + log => io_lib:format(Format, Args) + } + ). + +prepare_template(Config, State) -> + do_prepare_template(maps:to_list(maps:with([base_object, filter], Config)), State). + +do_prepare_template([{base_object, V} | T], State) -> + do_prepare_template(T, State#{base_tokens => emqx_placeholder:preproc_tmpl(V)}); +do_prepare_template([{filter, V} | T], State) -> + do_prepare_template(T, State#{filter_tokens => emqx_placeholder:preproc_tmpl(V)}); +do_prepare_template([], State) -> + State. diff --git a/apps/emqx_ldap/src/emqx_ldap_authn.erl b/apps/emqx_ldap/src/emqx_ldap_authn.erl new file mode 100644 index 000000000..d814e2aae --- /dev/null +++ b/apps/emqx_ldap/src/emqx_ldap_authn.erl @@ -0,0 +1,285 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_ldap_authn). + +-include_lib("emqx_authn/include/emqx_authn.hrl"). +-include_lib("emqx/include/logger.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). +-include_lib("eldap/include/eldap.hrl"). + +-behaviour(hocon_schema). +-behaviour(emqx_authentication). + +%% a compatible attribute for version 4.x +-define(ISENABLED_ATTR, "isEnabled"). +-define(VALID_ALGORITHMS, [md5, ssha, sha, sha256, sha384, sha512]). +%% TODO +%% 1. Supports more salt algorithms, SMD5 SSHA 256/384/512 +%% 2. Supports https://datatracker.ietf.org/doc/html/rfc3112 + +-export([ + namespace/0, + tags/0, + roots/0, + fields/1, + desc/1 +]). + +-export([ + refs/0, + create/2, + update/2, + authenticate/2, + destroy/1 +]). + +-import(proplists, [get_value/2, get_value/3]). +%%------------------------------------------------------------------------------ +%% Hocon Schema +%%------------------------------------------------------------------------------ + +namespace() -> "authn". + +tags() -> + [<<"Authentication">>]. + +%% used for config check when the schema module is resolved +roots() -> + [{?CONF_NS, hoconsc:mk(hoconsc:ref(?MODULE, ldap))}]. + +fields(ldap) -> + [ + {mechanism, emqx_authn_schema:mechanism(password_based)}, + {backend, emqx_authn_schema:backend(ldap)}, + {password_attribute, fun password_attribute/1}, + {is_superuser_attribute, fun is_superuser_attribute/1}, + {query_timeout, fun query_timeout/1} + ] ++ emqx_authn_schema:common_fields() ++ emqx_ldap:fields(config). + +desc(ldap) -> + ?DESC(ldap); +desc(_) -> + undefined. + +password_attribute(type) -> string(); +password_attribute(desc) -> ?DESC(?FUNCTION_NAME); +password_attribute(default) -> <<"userPassword">>; +password_attribute(_) -> undefined. + +is_superuser_attribute(type) -> string(); +is_superuser_attribute(desc) -> ?DESC(?FUNCTION_NAME); +is_superuser_attribute(default) -> <<"isSuperuser">>; +is_superuser_attribute(_) -> undefined. + +query_timeout(type) -> emqx_schema:timeout_duration_ms(); +query_timeout(desc) -> ?DESC(?FUNCTION_NAME); +query_timeout(default) -> <<"5s">>; +query_timeout(_) -> undefined. + +%%------------------------------------------------------------------------------ +%% APIs +%%------------------------------------------------------------------------------ + +refs() -> + [hoconsc:ref(?MODULE, ldap)]. + +create(_AuthenticatorID, Config) -> + create(Config). + +create(Config0) -> + ResourceId = emqx_authn_utils:make_resource_id(?MODULE), + {Config, State} = parse_config(Config0), + {ok, _Data} = emqx_authn_utils:create_resource(ResourceId, emqx_ldap, Config), + {ok, State#{resource_id => ResourceId}}. + +update(Config0, #{resource_id := ResourceId} = _State) -> + {Config, NState} = parse_config(Config0), + case emqx_authn_utils:update_resource(emqx_ldap, Config, ResourceId) of + {error, Reason} -> + error({load_config_error, Reason}); + {ok, _} -> + {ok, NState#{resource_id => ResourceId}} + end. + +destroy(#{resource_id := ResourceId}) -> + _ = emqx_resource:remove_local(ResourceId), + ok. + +authenticate(#{auth_method := _}, _) -> + ignore; +authenticate( + #{password := Password} = Credential, + #{ + password_attribute := PasswordAttr, + is_superuser_attribute := IsSuperuserAttr, + query_timeout := Timeout, + resource_id := ResourceId + } = State +) -> + case + emqx_resource:simple_sync_query( + ResourceId, + {query, Credential, [PasswordAttr, IsSuperuserAttr, ?ISENABLED_ATTR], Timeout} + ) + of + {ok, []} -> + ignore; + {ok, [Entry | _]} -> + is_enabled(Password, Entry, State); + {error, Reason} -> + ?TRACE_AUTHN_PROVIDER(error, "ldap_query_failed", #{ + resource => ResourceId, + timeout => Timeout, + reason => Reason + }), + ignore + end. + +parse_config(Config) -> + State = lists:foldl( + fun(Key, Acc) -> + Value = + case maps:get(Key, Config) of + Bin when is_binary(Bin) -> + erlang:binary_to_list(Bin); + Any -> + Any + end, + Acc#{Key => Value} + end, + #{}, + [password_attribute, is_superuser_attribute, query_timeout] + ), + {Config, State}. + +%% To compatible v4.x +is_enabled(Password, #eldap_entry{attributes = Attributes} = Entry, State) -> + IsEnabled = get_lower_bin_value(?ISENABLED_ATTR, Attributes, "true"), + case emqx_authn_utils:to_bool(IsEnabled) of + true -> + ensure_password(Password, Entry, State); + _ -> + {error, user_disabled} + end. + +ensure_password( + Password, + #eldap_entry{attributes = Attributes} = Entry, + #{password_attribute := PasswordAttr} = State +) -> + case get_value(PasswordAttr, Attributes) of + undefined -> + {error, no_password}; + [LDAPPassword | _] -> + extract_hash_algorithm(LDAPPassword, Password, fun try_decode_password/4, Entry, State) + end. + +%% RFC 2307 format password +%% https://datatracker.ietf.org/doc/html/rfc2307 +extract_hash_algorithm(LDAPPassword, Password, OnFail, Entry, State) -> + case + re:run( + LDAPPassword, + "{([^{}]+)}(.+)", + [{capture, all_but_first, list}, global] + ) + of + {match, [[HashTypeStr, PasswordHashStr]]} -> + case emqx_utils:safe_to_existing_atom(string:to_lower(HashTypeStr)) of + {ok, HashType} -> + PasswordHash = to_binary(PasswordHashStr), + is_valid_algorithm(HashType, PasswordHash, Password, Entry, State); + _Error -> + {error, invalid_hash_type} + end; + _ -> + OnFail(LDAPPassword, Password, Entry, State) + end. + +is_valid_algorithm(HashType, PasswordHash, Password, Entry, State) -> + case lists:member(HashType, ?VALID_ALGORITHMS) of + true -> + verify_password(HashType, PasswordHash, Password, Entry, State); + _ -> + {error, {invalid_hash_type, HashType}} + end. + +%% this password is in LDIF format which is base64 encoding +try_decode_password(LDAPPassword, Password, Entry, State) -> + case safe_base64_decode(LDAPPassword) of + {ok, Decode} -> + extract_hash_algorithm( + Decode, + Password, + fun(_, _, _, _) -> + {error, invalid_password} + end, + Entry, + State + ); + {error, Reason} -> + {error, {invalid_password, Reason}} + end. + +%% sha with salt +%% https://www.openldap.org/faq/data/cache/347.html +verify_password(ssha, PasswordData, Password, Entry, State) -> + case safe_base64_decode(PasswordData) of + {ok, <>} -> + verify_password(sha, hash, PasswordHash, Salt, suffix, Password, Entry, State); + {ok, _} -> + {error, invalid_ssha_password}; + {error, Reason} -> + {error, {invalid_password, Reason}} + end; +verify_password( + Algorithm, + Base64HashData, + Password, + Entry, + State +) -> + verify_password(Algorithm, base64, Base64HashData, <<>>, disable, Password, Entry, State). + +verify_password(Algorithm, LDAPPasswordType, LDAPPassword, Salt, Position, Password, Entry, State) -> + PasswordHash = hash_password(Algorithm, Salt, Position, Password), + case compare_password(LDAPPasswordType, LDAPPassword, PasswordHash) of + true -> + {ok, is_superuser(Entry, State)}; + _ -> + {error, invalid_password} + end. + +is_superuser(Entry, #{is_superuser_attribute := Attr} = _State) -> + Value = get_lower_bin_value(Attr, Entry#eldap_entry.attributes, "false"), + #{is_superuser => emqx_authn_utils:to_bool(Value)}. + +safe_base64_decode(Data) -> + try + {ok, base64:decode(Data)} + catch + _:Reason -> + {error, {invalid_base64_data, Reason}} + end. + +get_lower_bin_value(Key, Proplists, Default) -> + [Value | _] = get_value(Key, Proplists, [Default]), + to_binary(string:to_lower(Value)). + +to_binary(Value) -> + erlang:list_to_binary(Value). + +hash_password(Algorithm, _Salt, disable, Password) -> + hash_password(Algorithm, Password); +hash_password(Algorithm, Salt, suffix, Password) -> + hash_password(Algorithm, <>). + +hash_password(Algorithm, Data) -> + crypto:hash(Algorithm, Data). + +compare_password(hash, LDAPPasswordHash, PasswordHash) -> + emqx_passwd:compare_secure(LDAPPasswordHash, PasswordHash); +compare_password(base64, Base64HashData, PasswordHash) -> + emqx_passwd:compare_secure(Base64HashData, base64:encode(PasswordHash)). diff --git a/apps/emqx_ldap/src/emqx_ldap_authz.erl b/apps/emqx_ldap/src/emqx_ldap_authz.erl new file mode 100644 index 000000000..f5ef38c01 --- /dev/null +++ b/apps/emqx_ldap/src/emqx_ldap_authz.erl @@ -0,0 +1,164 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_ldap_authz). + +-include_lib("emqx_authz/include/emqx_authz.hrl"). +-include_lib("emqx/include/emqx.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). +-include_lib("emqx/include/logger.hrl"). +-include_lib("emqx/include/emqx_placeholder.hrl"). +-include_lib("eldap/include/eldap.hrl"). + +-behaviour(emqx_authz). + +-define(PREPARE_KEY, ?MODULE). + +%% AuthZ Callbacks +-export([ + description/0, + create/1, + update/1, + destroy/1, + authorize/4 +]). + +-export([fields/1]). + +-ifdef(TEST). +-compile(export_all). +-compile(nowarn_export_all). +-endif. + +%%------------------------------------------------------------------------------ +%% Hocon Schema +%%------------------------------------------------------------------------------ + +fields(config) -> + emqx_authz_schema:authz_common_fields(ldap) ++ + [ + {publish_attribute, attribute_meta(publish_attribute, <<"mqttPublishTopic">>)}, + {subscribe_attribute, attribute_meta(subscribe_attribute, <<"mqttSubscriptionTopic">>)}, + {all_attribute, attribute_meta(all_attribute, <<"mqttPubSubTopic">>)}, + {query_timeout, + ?HOCON( + emqx_schema:timeout_duration_ms(), + #{ + desc => ?DESC(query_timeout), + default => <<"5s">> + } + )} + ] ++ + emqx_ldap:fields(config). + +attribute_meta(Name, Default) -> + ?HOCON( + string(), + #{ + default => Default, + desc => ?DESC(Name) + } + ). + +%%------------------------------------------------------------------------------ +%% AuthZ Callbacks +%%------------------------------------------------------------------------------ + +description() -> + "AuthZ with LDAP". + +create(Source) -> + ResourceId = emqx_authz_utils:make_resource_id(?MODULE), + {ok, _Data} = emqx_authz_utils:create_resource(ResourceId, emqx_ldap, Source), + Annotations = new_annotations(#{id => ResourceId}, Source), + Source#{annotations => Annotations}. + +update(Source) -> + case emqx_authz_utils:update_resource(emqx_ldap, Source) of + {error, Reason} -> + error({load_config_error, Reason}); + {ok, Id} -> + Annotations = new_annotations(#{id => Id}, Source), + Source#{annotations => Annotations} + end. + +destroy(#{annotations := #{id := Id}}) -> + ok = emqx_resource:remove_local(Id). + +authorize( + Client, + Action, + Topic, + #{ + query_timeout := QueryTimeout, + annotations := #{id := ResourceID} = Annotations + } +) -> + Attrs = select_attrs(Action, Annotations), + case emqx_resource:simple_sync_query(ResourceID, {query, Client, Attrs, QueryTimeout}) of + {ok, []} -> + nomatch; + {ok, [Entry | _]} -> + do_authorize(Action, Topic, Attrs, Entry); + {error, Reason} -> + ?SLOG(error, #{ + msg => "query_ldap_error", + reason => Reason, + resource_id => ResourceID + }), + nomatch + end. + +do_authorize(Action, Topic, [Attr | T], Entry) -> + Topics = proplists:get_value(Attr, Entry#eldap_entry.attributes, []), + case match_topic(Topic, Topics) of + true -> + {matched, allow}; + false -> + do_authorize(Action, Topic, T, Entry) + end; +do_authorize(_Action, _Topic, [], _Entry) -> + nomatch. + +new_annotations(Init, Source) -> + lists:foldl( + fun(Attr, Acc) -> + Acc#{ + Attr => + case maps:get(Attr, Source) of + Value when is_binary(Value) -> + erlang:binary_to_list(Value); + Value -> + Value + end + } + end, + Init, + [publish_attribute, subscribe_attribute, all_attribute] + ). + +select_attrs(#{action_type := publish}, #{publish_attribute := Pub, all_attribute := All}) -> + [Pub, All]; +select_attrs(_, #{subscribe_attribute := Sub, all_attribute := All}) -> + [Sub, All]. + +match_topic(Target, Topics) -> + lists:any( + fun(Topic) -> + emqx_topic:match(Target, erlang:list_to_binary(Topic)) + end, + Topics + ). diff --git a/apps/emqx_ldap/src/emqx_ldap_filter_lexer.xrl b/apps/emqx_ldap/src/emqx_ldap_filter_lexer.xrl new file mode 100644 index 000000000..a82a3ee3e --- /dev/null +++ b/apps/emqx_ldap/src/emqx_ldap_filter_lexer.xrl @@ -0,0 +1,31 @@ +Definitions. + +Control = [()&|!=~><:*] +White = [\s\t\n\r]+ +NonString = [^()&|!=~><:*\s\t\n\r] +String = {NonString}+ + +Rules. + +\( : {token, {lparen, TokenLine}}. +\) : {token, {rparen, TokenLine}}. +\& : {token, {'and', TokenLine}}. +\| : {token, {'or', TokenLine}}. +\! : {token, {'not', TokenLine}}. += : {token, {equal, TokenLine}}. +~= : {token, {approx, TokenLine}}. +>= : {token, {greaterOrEqual, TokenLine}}. +<= : {token, {lessOrEqual, TokenLine}}. +\* : {token, {asterisk, TokenLine}}. +\: : {token, {colon, TokenLine}}. +dn : {token, {dn, TokenLine}}. +{White} : skip_token. +{String} : {token, {string, TokenLine, TokenChars}}. +%% Leex will hang if a composite operation is missing a character +{Control} : {error, lists:flatten(io_lib:format("Unexpected Tokens:~ts", [TokenChars]))}. + +Erlang code. + +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- diff --git a/apps/emqx_ldap/src/emqx_ldap_filter_parser.yrl b/apps/emqx_ldap/src/emqx_ldap_filter_parser.yrl new file mode 100644 index 000000000..e1b1ed98e --- /dev/null +++ b/apps/emqx_ldap/src/emqx_ldap_filter_parser.yrl @@ -0,0 +1,149 @@ +Header "%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%--------------------------------------------------------------------". + +Nonterminals +filter filtercomp filterlist item simple present substring initial any final extensible attr value type dnattrs matchingrule. + +Terminals +lparen rparen 'and' 'or' 'not' equal approx greaterOrEqual lessOrEqual asterisk colon dn string. + +Rootsymbol filter. +Left 100 present. +Left 500 substring. + +filter -> + lparen filtercomp rparen : '$2'. + +filtercomp -> + 'and' filterlist: 'and'('$2'). +filtercomp -> + 'or' filterlist: 'or'('$2'). +filtercomp -> + 'not' filter: 'not'('$2'). +filtercomp -> + item: '$1'. + +filterlist -> + filter: ['$1']. +filterlist -> + filter filterlist: ['$1' | '$2']. + +item -> + simple: '$1'. +item -> + present: '$1'. +item -> + substring: '$1'. +item-> + extensible: '$1'. + +simple -> + attr equal value: equal('$1', '$3'). +simple -> + attr approx value: approx('$1', '$3'). +simple -> + attr greaterOrEqual value: greaterOrEqual('$1', '$3'). +simple -> + attr lessOrEqual value: lessOrEqual('$1', '$3'). + +present -> + attr equal asterisk: present('$1'). + +substring -> + attr equal initial asterisk any final: substrings('$1', ['$3', '$5', '$6']). +substring -> + attr equal asterisk any final: substrings('$1', ['$4', '$5']). +substring -> + attr equal initial asterisk any: substrings('$1', ['$3', '$5']). +substring -> + attr equal asterisk any: substrings('$1', ['$4']). + +initial -> + value: {initial, '$1'}. + +final -> + value: {final, '$1'}. + +any -> any value asterisk: 'any'('$1', '$2'). +any -> '$empty': []. + +extensible -> + type dnattrs matchingrule colon equal value : extensible('$6', ['$1', '$2', '$3']). +extensible -> + type dnattrs colon equal value: extensible('$5', ['$1', '$2']). +extensible -> + type matchingrule colon equal value: extensible('$5', ['$1', '$2']). +extensible -> + type colon equal value: extensible('$4', ['$1']). + +extensible -> + dnattrs matchingrule colon equal value: extensible('$5', ['$1', '$2']). +extensible -> + matchingrule colon equal value: extensible('$4', ['$1']). + +attr -> + string: get_value('$1'). + +value -> + string: get_value('$1'). + +type -> + value: {type, '$1'}. + +dnattrs -> + colon dn: {dnAttributes, true}. + +matchingrule -> + colon value: {matchingRule, '$2'}. + +Erlang code. +-export([scan_and_parse/1]). +-ignore_xref({return_error, 2}). + +'and'(Value) -> + eldap:'and'(Value). + +'or'(Value) -> + eldap:'or'(Value). + +'not'(Value) -> + eldap:'not'(Value). + +equal(Attr, Value) -> + eldap:equalityMatch(Attr, Value). + +approx(Attr, Value) -> + eldap:approxMatch(Attr, Value). + +greaterOrEqual(Attr, Value) -> + eldap:greaterOrEqual(Attr, Value). + +lessOrEqual(Attr, Value) -> + eldap:lessOrEqual(Attr, Value). + +present(Value) -> + eldap:present(Value). + +substrings(Attr, List) -> + eldap:substrings(Attr, flatten(List)). + +'any'(List, Item) -> + [List, {any, Item}]. + +extensible(Value, Opts) -> eldap:extensibleMatch(Value, Opts). + +flatten(List) -> lists:flatten(List). + +get_value({_Token, _Line, Value}) -> + Value. + +scan_and_parse(Bin) when is_binary(Bin) -> + scan_and_parse(erlang:binary_to_list(Bin)); +scan_and_parse(String) -> + case emqx_ldap_filter_lexer:string(String) of + {ok, Tokens, _} -> + parse(Tokens); + {error, Reason, _} -> + {error, Reason} + end. diff --git a/apps/emqx_authn/test/data/emqx.io.ldif b/apps/emqx_ldap/test/data/emqx.io.ldif similarity index 90% rename from apps/emqx_authn/test/data/emqx.io.ldif rename to apps/emqx_ldap/test/data/emqx.io.ldif index 4675717ec..138651958 100644 --- a/apps/emqx_authn/test/data/emqx.io.ldif +++ b/apps/emqx_ldap/test/data/emqx.io.ldif @@ -132,3 +132,21 @@ mqttPubSubTopic: mqttuser0005/pubsub/1 mqttPubSubTopic: mqttuser0005/pubsub/+ mqttPubSubTopic: mqttuser0005/pubsub/# userPassword: {SHA}jKnxeEDGR14kE8AR7yuVFOelhz4= + +objectClass: top +dn:uid=mqttuser0006,ou=testdevice,dc=emqx,dc=io +objectClass: mqttUser +objectClass: mqttDevice +objectClass: mqttSecurity +uid: mqttuser0006 +isEnabled: FALSE +userPassword: {SHA}AlNm2FUO8G5BK5pCggfrPauRqN0= + +objectClass: top +dn:uid=mqttuser0007,ou=testdevice,dc=emqx,dc=io +objectClass: mqttUser +objectClass: mqttDevice +objectClass: mqttSecurity +uid: mqttuser0007 +isSuperuser: TRUE +userPassword: {SHA}axpQGbl00j3jvOG058y313ocnBk= diff --git a/apps/emqx_authn/test/data/emqx.schema b/apps/emqx_ldap/test/data/emqx.schema similarity index 86% rename from apps/emqx_authn/test/data/emqx.schema rename to apps/emqx_ldap/test/data/emqx.schema index 55f92269b..d08548272 100644 --- a/apps/emqx_authn/test/data/emqx.schema +++ b/apps/emqx_ldap/test/data/emqx.schema @@ -8,6 +8,12 @@ attributetype ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.1.3 NAME 'isEnabled' SINGLE-VALUE USAGE userApplications ) +attributetype ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.1.4 NAME 'isSuperuser' + EQUALITY booleanMatch + SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 + SINGLE-VALUE + USAGE userApplications ) + attributetype ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.4.1 NAME ( 'mqttPublishTopic' 'mpt' ) EQUALITY caseIgnoreMatch SUBSTR caseIgnoreSubstringsMatch @@ -32,7 +38,7 @@ attributetype ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.4.4 NAME ( 'mqttAccountName' 'ma objectclass ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.4 NAME 'mqttUser' AUXILIARY - MAY ( mqttPublishTopic $ mqttSubscriptionTopic $ mqttPubSubTopic $ mqttAccountName) ) + MAY ( mqttPublishTopic $ mqttSubscriptionTopic $ mqttPubSubTopic $ mqttAccountName $ isSuperuser) ) objectclass ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.2 NAME 'mqttDevice' SUP top @@ -43,4 +49,4 @@ objectclass ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.2 NAME 'mqttDevice' objectclass ( 1.3.6.1.4.1.11.2.53.2.2.3.1.2.3.3 NAME 'mqttSecurity' SUP top AUXILIARY - MAY ( userPassword $ userPKCS12 $ pwdAttribute $ pwdLockout ) ) + MUST ( userPassword ) ) diff --git a/apps/emqx_ldap/test/emqx_ldap_SUITE.erl b/apps/emqx_ldap/test/emqx_ldap_SUITE.erl new file mode 100644 index 000000000..a191da3bd --- /dev/null +++ b/apps/emqx_ldap/test/emqx_ldap_SUITE.erl @@ -0,0 +1,194 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_ldap_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include_lib("emqx_connector/include/emqx_connector.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("emqx/include/emqx.hrl"). +-include_lib("stdlib/include/assert.hrl"). +-include_lib("eldap/include/eldap.hrl"). + +-define(LDAP_HOST, "ldap"). +-define(LDAP_RESOURCE_MOD, emqx_ldap). + +all() -> + [ + {group, tcp}, + {group, ssl} + ]. + +groups() -> + Cases = emqx_common_test_helpers:all(?MODULE), + [ + {tcp, Cases}, + {ssl, Cases} + ]. + +init_per_group(Group, Config) -> + [{group, Group} | Config]. + +end_per_group(_, Config) -> + proplists:delete(group, Config). + +init_per_suite(Config) -> + Port = port(tcp), + case emqx_common_test_helpers:is_tcp_server_available(?LDAP_HOST, Port) of + true -> + ok = emqx_common_test_helpers:start_apps([emqx_conf]), + ok = emqx_connector_test_helpers:start_apps([emqx_resource]), + {ok, _} = application:ensure_all_started(emqx_connector), + Config; + false -> + {skip, no_ldap} + end. + +end_per_suite(_Config) -> + ok = emqx_common_test_helpers:stop_apps([emqx_conf]), + ok = emqx_connector_test_helpers:stop_apps([emqx_resource]), + _ = application:stop(emqx_connector). + +init_per_testcase(_, Config) -> + Config. + +end_per_testcase(_, _Config) -> + ok. + +% %%------------------------------------------------------------------------------ +% %% Testcases +% %%------------------------------------------------------------------------------ + +t_lifecycle(Config) -> + perform_lifecycle_check( + <<"emqx_ldap_SUITE">>, + ldap_config(Config) + ). + +perform_lifecycle_check(ResourceId, InitialConfig) -> + {ok, #{config := CheckedConfig}} = + emqx_resource:check_config(?LDAP_RESOURCE_MOD, InitialConfig), + {ok, #{ + state := #{pool_name := PoolName} = State, + status := InitialStatus + }} = emqx_resource:create_local( + ResourceId, + ?CONNECTOR_RESOURCE_GROUP, + ?LDAP_RESOURCE_MOD, + CheckedConfig, + #{} + ), + ?assertEqual(InitialStatus, connected), + % Instance should match the state and status of the just started resource + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := InitialStatus + }} = + emqx_resource:get_instance(ResourceId), + ?assertEqual({ok, connected}, emqx_resource:health_check(ResourceId)), + % % Perform query as further check that the resource is working as expected + ?assertMatch( + {ok, [#eldap_entry{attributes = [_, _ | _]}]}, + emqx_resource:query(ResourceId, test_query_no_attr()) + ), + ?assertMatch( + {ok, [#eldap_entry{attributes = [{"mqttAccountName", _}]}]}, + emqx_resource:query(ResourceId, test_query_with_attr()) + ), + ?assertMatch( + {ok, _}, + emqx_resource:query( + ResourceId, + test_query_with_attr_and_timeout() + ) + ), + ?assertMatch({ok, []}, emqx_resource:query(ResourceId, test_query_not_exists())), + ?assertEqual(ok, emqx_resource:stop(ResourceId)), + % Resource will be listed still, but state will be changed and healthcheck will fail + % as the worker no longer exists. + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := StoppedStatus + }} = + emqx_resource:get_instance(ResourceId), + ?assertEqual(stopped, StoppedStatus), + ?assertEqual({error, resource_is_stopped}, emqx_resource:health_check(ResourceId)), + % Resource healthcheck shortcuts things by checking ets. Go deeper by checking pool itself. + ?assertEqual({error, not_found}, ecpool:stop_sup_pool(PoolName)), + % Can call stop/1 again on an already stopped instance + ?assertEqual(ok, emqx_resource:stop(ResourceId)), + % Make sure it can be restarted and the healthchecks and queries work properly + ?assertEqual(ok, emqx_resource:restart(ResourceId)), + % async restart, need to wait resource + timer:sleep(500), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{status := InitialStatus}} = + emqx_resource:get_instance(ResourceId), + ?assertEqual({ok, connected}, emqx_resource:health_check(ResourceId)), + ?assertMatch({ok, _}, emqx_resource:query(ResourceId, test_query_no_attr())), + ?assertMatch({ok, _}, emqx_resource:query(ResourceId, test_query_with_attr())), + ?assertMatch( + {ok, _}, + emqx_resource:query( + ResourceId, + test_query_with_attr_and_timeout() + ) + ), + % Stop and remove the resource in one go. + ?assertEqual(ok, emqx_resource:remove_local(ResourceId)), + ?assertEqual({error, not_found}, ecpool:stop_sup_pool(PoolName)), + % Should not even be able to get the resource data out of ets now unlike just stopping. + ?assertEqual({error, not_found}, emqx_resource:get_instance(ResourceId)). + +% %%------------------------------------------------------------------------------ +% %% Helpers +% %%------------------------------------------------------------------------------ +ldap_config(Config) -> + RawConfig = list_to_binary( + io_lib:format( + "" + "\n" + " username= \"cn=root,dc=emqx,dc=io\"\n" + " password = public\n" + " pool_size = 8\n" + " server = \"~s:~b\"\n" + " base_object=\"uid=${username},ou=testdevice,dc=emqx,dc=io\"\n" + " filter =\"(objectClass=mqttUser)\"\n" + " ~ts\n" + "", + [?LDAP_HOST, port(Config), ssl(Config)] + ) + ), + + {ok, LDConfig} = hocon:binary(RawConfig), + #{<<"config">> => LDConfig}. + +test_query_no_attr() -> + {query, data()}. + +test_query_with_attr() -> + {query, data(), ["mqttAccountName"]}. + +test_query_with_attr_and_timeout() -> + {query, data(), ["mqttAccountName"], 5000}. + +test_query_not_exists() -> + {query, #{username => <<"not_exists">>}}. + +data() -> + #{username => <<"mqttuser0001">>}. + +port(tcp) -> 389; +port(ssl) -> 636; +port(Config) -> port(proplists:get_value(group, Config, tcp)). + +ssl(Config) -> + case proplists:get_value(group, Config, tcp) of + tcp -> + "ssl.enable=false"; + ssl -> + "ssl.enable=true\n" + "ssl.cacertfile=\"etc/openldap/cacert.pem\"" + end. diff --git a/apps/emqx_ldap/test/emqx_ldap_authn_SUITE.erl b/apps/emqx_ldap/test/emqx_ldap_authn_SUITE.erl new file mode 100644 index 000000000..fb3b9fc36 --- /dev/null +++ b/apps/emqx_ldap/test/emqx_ldap_authn_SUITE.erl @@ -0,0 +1,258 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_ldap_authn_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include_lib("emqx_authn/include/emqx_authn.hrl"). +-include_lib("eunit/include/eunit.hrl"). + +-define(LDAP_HOST, "ldap"). +-define(LDAP_DEFAULT_PORT, 389). +-define(LDAP_RESOURCE, <<"emqx_authn_ldap_SUITE">>). + +-define(PATH, [authentication]). +-define(ResourceID, <<"password_based:ldap">>). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +init_per_testcase(_, Config) -> + {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), + emqx_authentication:initialize_authentication(?GLOBAL, []), + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL + ), + Config. + +init_per_suite(Config) -> + _ = application:load(emqx_conf), + case emqx_common_test_helpers:is_tcp_server_available(?LDAP_HOST, ?LDAP_DEFAULT_PORT) of + true -> + ok = emqx_common_test_helpers:start_apps([emqx_authn]), + ok = start_apps([emqx_resource]), + {ok, _} = emqx_resource:create_local( + ?LDAP_RESOURCE, + ?RESOURCE_GROUP, + emqx_ldap, + ldap_config(), + #{} + ), + Config; + false -> + {skip, no_ldap} + end. + +end_per_suite(_Config) -> + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL + ), + ok = emqx_resource:remove_local(?LDAP_RESOURCE), + ok = stop_apps([emqx_resource]), + ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + +%%------------------------------------------------------------------------------ +%% Tests +%%------------------------------------------------------------------------------ + +t_create(_Config) -> + AuthConfig = raw_ldap_auth_config(), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig} + ), + + {ok, [#{provider := emqx_ldap_authn}]} = emqx_authentication:list_authenticators(?GLOBAL), + emqx_authn_test_lib:delete_config(?ResourceID). + +t_create_invalid(_Config) -> + AuthConfig = raw_ldap_auth_config(), + + InvalidConfigs = + [ + AuthConfig#{<<"server">> => <<"unknownhost:3333">>}, + AuthConfig#{<<"password">> => <<"wrongpass">>} + ], + + lists:foreach( + fun(Config) -> + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, Config} + ), + emqx_authn_test_lib:delete_config(?ResourceID), + ?assertEqual( + {error, {not_found, {chain, ?GLOBAL}}}, + emqx_authentication:list_authenticators(?GLOBAL) + ) + end, + InvalidConfigs + ). + +t_authenticate(_Config) -> + ok = lists:foreach( + fun(Sample) -> + ct:pal("test_user_auth sample: ~p", [Sample]), + test_user_auth(Sample) + end, + user_seeds() + ). + +test_user_auth(#{ + credentials := Credentials0, + config_params := SpecificConfigParams, + result := Result +}) -> + AuthConfig = maps:merge(raw_ldap_auth_config(), SpecificConfigParams), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig} + ), + + Credentials = Credentials0#{ + listener => 'tcp:default', + protocol => mqtt + }, + + ?assertEqual(Result, emqx_access_control:authenticate(Credentials)), + + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL + ). + +t_destroy(_Config) -> + AuthConfig = raw_ldap_auth_config(), + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, AuthConfig} + ), + + {ok, [#{provider := emqx_ldap_authn, state := State}]} = + emqx_authentication:list_authenticators(?GLOBAL), + + {ok, _} = emqx_ldap_authn:authenticate( + #{ + username => <<"mqttuser0001">>, + password => <<"mqttuser0001">> + }, + State + ), + + emqx_authn_test_lib:delete_authenticators( + [authentication], + ?GLOBAL + ), + + % Authenticator should not be usable anymore + ?assertMatch( + ignore, + emqx_ldap_authn:authenticate( + #{ + username => <<"mqttuser0001">>, + password => <<"mqttuser0001">> + }, + State + ) + ). + +t_update(_Config) -> + CorrectConfig = raw_ldap_auth_config(), + IncorrectConfig = + CorrectConfig#{ + <<"base_object">> => <<"ou=testdevice,dc=emqx,dc=io">> + }, + + {ok, _} = emqx:update_config( + ?PATH, + {create_authenticator, ?GLOBAL, IncorrectConfig} + ), + + {error, _} = emqx_access_control:authenticate( + #{ + username => <<"mqttuser0001">>, + password => <<"mqttuser0001">>, + listener => 'tcp:default', + protocol => mqtt + } + ), + + % We update with config with correct query, provider should update and work properly + {ok, _} = emqx:update_config( + ?PATH, + {update_authenticator, ?GLOBAL, <<"password_based:ldap">>, CorrectConfig} + ), + + {ok, _} = emqx_access_control:authenticate( + #{ + username => <<"mqttuser0001">>, + password => <<"mqttuser0001">>, + listener => 'tcp:default', + protocol => mqtt + } + ). + +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ + +raw_ldap_auth_config() -> + #{ + <<"mechanism">> => <<"password_based">>, + <<"backend">> => <<"ldap">>, + <<"server">> => ldap_server(), + <<"base_object">> => <<"uid=${username},ou=testdevice,dc=emqx,dc=io">>, + <<"username">> => <<"cn=root,dc=emqx,dc=io">>, + <<"password">> => <<"public">>, + <<"pool_size">> => 8 + }. + +user_seeds() -> + New = fun(Username, Password, Result) -> + #{ + credentials => #{ + username => Username, + password => Password + }, + config_params => #{}, + result => Result + } + end, + Valid = + lists:map( + fun(Idx) -> + Username = erlang:iolist_to_binary(io_lib:format("mqttuser000~b", [Idx])), + New(Username, Username, {ok, #{is_superuser => false}}) + end, + lists:seq(1, 5) + ), + [ + %% Not exists + New(<<"notexists">>, <<"notexists">>, {error, not_authorized}), + %% Wrong Password + New(<<"mqttuser0001">>, <<"wrongpassword">>, {error, invalid_password}), + %% Disabled + New(<<"mqttuser0006">>, <<"mqttuser0006">>, {error, user_disabled}), + %% IsSuperuser + New(<<"mqttuser0007">>, <<"mqttuser0007">>, {ok, #{is_superuser => true}}) + | Valid + ]. + +ldap_server() -> + iolist_to_binary(io_lib:format("~s:~B", [?LDAP_HOST, ?LDAP_DEFAULT_PORT])). + +ldap_config() -> + emqx_ldap_SUITE:ldap_config([]). + +start_apps(Apps) -> + lists:foreach(fun application:ensure_all_started/1, Apps). + +stop_apps(Apps) -> + lists:foreach(fun application:stop/1, Apps). diff --git a/apps/emqx_ldap/test/emqx_ldap_authz_SUITE.erl b/apps/emqx_ldap/test/emqx_ldap_authz_SUITE.erl new file mode 100644 index 000000000..de037ddf1 --- /dev/null +++ b/apps/emqx_ldap/test/emqx_ldap_authz_SUITE.erl @@ -0,0 +1,173 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_ldap_authz_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include("emqx_authz.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). + +-define(LDAP_HOST, "ldap"). +-define(LDAP_DEFAULT_PORT, 389). +-define(LDAP_RESOURCE, <<"emqx_ldap_authz_SUITE">>). + +all() -> + emqx_authz_test_lib:all_with_table_case(?MODULE, t_run_case, cases()). + +groups() -> + emqx_authz_test_lib:table_groups(t_run_case, cases()). + +init_per_suite(Config) -> + ok = stop_apps([emqx_resource]), + case emqx_common_test_helpers:is_tcp_server_available(?LDAP_HOST, ?LDAP_DEFAULT_PORT) of + true -> + ok = emqx_common_test_helpers:start_apps( + [emqx_conf, emqx_authz], + fun set_special_configs/1 + ), + ok = start_apps([emqx_resource]), + ok = create_ldap_resource(), + Config; + false -> + {skip, no_ldap} + end. + +end_per_suite(_Config) -> + ok = emqx_authz_test_lib:restore_authorizers(), + ok = emqx_resource:remove_local(?LDAP_RESOURCE), + ok = stop_apps([emqx_resource]), + ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_authz]). + +init_per_group(Group, Config) -> + [{test_case, emqx_authz_test_lib:get_case(Group, cases())} | Config]. +end_per_group(_Group, _Config) -> + ok. + +init_per_testcase(_TestCase, Config) -> + ok = emqx_authz_test_lib:reset_authorizers(), + Config. +end_per_testcase(_TestCase, _Config) -> + _ = emqx_authz:set_feature_available(rich_actions, true), + ok. + +set_special_configs(emqx_authz) -> + ok = emqx_authz_test_lib:reset_authorizers(); +set_special_configs(_) -> + ok. + +%%------------------------------------------------------------------------------ +%% Testcases +%%------------------------------------------------------------------------------ + +t_run_case(Config) -> + Case = ?config(test_case, Config), + ok = setup_authz_source(), + ok = emqx_authz_test_lib:run_checks(Case). + +t_create_invalid(_Config) -> + ok = setup_authz_source(), + BadConfig = maps:merge( + raw_ldap_authz_config(), + #{<<"server">> => <<"255.255.255.255:33333">>} + ), + {ok, _} = emqx_authz:update(?CMD_REPLACE, [BadConfig]), + + [_] = emqx_authz:lookup(). + +%%------------------------------------------------------------------------------ +%% Case +%%------------------------------------------------------------------------------ +cases() -> + [ + #{ + name => simpe_publish, + client_info => #{username => <<"mqttuser0001">>}, + checks => [ + {allow, ?AUTHZ_PUBLISH, <<"mqttuser0001/pub/1">>}, + {allow, ?AUTHZ_PUBLISH, <<"mqttuser0001/pub/+">>}, + {allow, ?AUTHZ_PUBLISH, <<"mqttuser0001/pub/#">>} + ] + }, + #{ + name => simpe_subscribe, + client_info => #{username => <<"mqttuser0001">>}, + checks => [ + {allow, ?AUTHZ_SUBSCRIBE, <<"mqttuser0001/sub/1">>}, + {allow, ?AUTHZ_SUBSCRIBE, <<"mqttuser0001/sub/+">>}, + {allow, ?AUTHZ_SUBSCRIBE, <<"mqttuser0001/sub/#">>} + ] + }, + + #{ + name => simpe_pubsub, + client_info => #{username => <<"mqttuser0001">>}, + checks => [ + {allow, ?AUTHZ_PUBLISH, <<"mqttuser0001/pubsub/1">>}, + {allow, ?AUTHZ_PUBLISH, <<"mqttuser0001/pubsub/+">>}, + {allow, ?AUTHZ_PUBLISH, <<"mqttuser0001/pubsub/#">>}, + + {allow, ?AUTHZ_SUBSCRIBE, <<"mqttuser0001/pubsub/1">>}, + {allow, ?AUTHZ_SUBSCRIBE, <<"mqttuser0001/pubsub/+">>}, + {allow, ?AUTHZ_SUBSCRIBE, <<"mqttuser0001/pubsub/#">>} + ] + }, + + #{ + name => simpe_unmatched, + client_info => #{username => <<"mqttuser0001">>}, + checks => [ + {deny, ?AUTHZ_PUBLISH, <<"mqttuser0001/req/mqttuser0001/+">>}, + {deny, ?AUTHZ_PUBLISH, <<"mqttuser0001/req/mqttuser0002/+">>}, + {deny, ?AUTHZ_SUBSCRIBE, <<"mqttuser0001/req/+/mqttuser0002">>} + ] + } + ]. + +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ + +setup_authz_source() -> + setup_config(#{}). + +raw_ldap_authz_config() -> + #{ + <<"enable">> => <<"true">>, + <<"type">> => <<"ldap">>, + <<"server">> => ldap_server(), + <<"base_object">> => <<"uid=${username},ou=testdevice,dc=emqx,dc=io">>, + <<"username">> => <<"cn=root,dc=emqx,dc=io">>, + <<"password">> => <<"public">>, + <<"pool_size">> => 8 + }. + +setup_config(SpecialParams) -> + emqx_authz_test_lib:setup_config( + raw_ldap_authz_config(), + SpecialParams + ). + +ldap_server() -> + iolist_to_binary(io_lib:format("~s:~B", [?LDAP_HOST, ?LDAP_DEFAULT_PORT])). + +ldap_config() -> + emqx_ldap_SUITE:ldap_config([]). + +start_apps(Apps) -> + lists:foreach(fun application:ensure_all_started/1, Apps). + +stop_apps(Apps) -> + lists:foreach(fun application:stop/1, Apps). + +create_ldap_resource() -> + {ok, _} = emqx_resource:create_local( + ?LDAP_RESOURCE, + ?RESOURCE_GROUP, + emqx_ldap, + ldap_config(), + #{} + ), + ok. diff --git a/apps/emqx_ldap/test/emqx_ldap_filter_SUITE.erl b/apps/emqx_ldap/test/emqx_ldap_filter_SUITE.erl new file mode 100644 index 000000000..1a7e970a8 --- /dev/null +++ b/apps/emqx_ldap/test/emqx_ldap_filter_SUITE.erl @@ -0,0 +1,234 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_ldap_filter_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("stdlib/include/assert.hrl"). + +-import(eldap, [ + 'and'/1, + 'or'/1, + 'not'/1, + equalityMatch/2, + substrings/2, + present/1, + greaterOrEqual/2, + lessOrEqual/2, + approxMatch/2, + extensibleMatch/2 +]). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +groups() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + _ = application:stop(emqx_connector). + +% %%------------------------------------------------------------------------------ +% %% Testcases +% %%------------------------------------------------------------------------------ + +t_and(_Config) -> + ?assertEqual('and'([equalityMatch("a", "1")]), parse("(&(a=1))")), + ?assertEqual( + 'and'([equalityMatch("a", "1"), (equalityMatch("b", "2"))]), + parse("(&(a=1)(b=2))") + ), + ?assertMatch({error, _}, scan_and_parse("(&)")). + +t_or(_Config) -> + ?assertEqual('or'([equalityMatch("a", "1")]), parse("(|(a=1))")), + ?assertEqual( + 'or'([equalityMatch("a", "1"), (equalityMatch("b", "2"))]), + parse("(|(a=1)(b=2))") + ), + ?assertMatch({error, _}, scan_and_parse("(|)")). + +t_not(_Config) -> + ?assertEqual('not'(equalityMatch("a", "1")), parse("(!(a=1))")), + ?assertMatch({error, _}, scan_and_parse("(!)")), + ?assertMatch({error, _}, scan_and_parse("(!(a=1)(b=1))")). + +t_equalityMatch(_Config) -> + ?assertEqual(equalityMatch("attr", "value"), parse("(attr=value)")), + ?assertEqual(equalityMatch("attr", "value"), parse("(attr = value)")), + ?assertMatch({error, _}, scan_and_parse("(attr=)")), + ?assertMatch({error, _}, scan_and_parse("(=)")), + ?assertMatch({error, _}, scan_and_parse("(=value)")). + +t_substrings_initial(_Config) -> + ?assertEqual(substrings("attr", [{initial, "initial"}]), parse("(attr=initial*)")), + ?assertEqual( + substrings("attr", [{initial, "initial"}, {any, "a"}]), + parse("(attr=initial*a*)") + ), + ?assertEqual( + substrings("attr", [{initial, "initial"}, {any, "a"}, {any, "b"}]), + parse("(attr=initial*a*b*)") + ). + +t_substrings_final(_Config) -> + ?assertEqual(substrings("attr", [{final, "final"}]), parse("(attr=*final)")), + ?assertEqual( + substrings("attr", [{any, "a"}, {final, "final"}]), + parse("(attr=*a*final)") + ), + ?assertEqual( + substrings("attr", [{any, "a"}, {any, "b"}, {final, "final"}]), + parse("(attr=*a*b*final)") + ). + +t_substrings_initial_final(_Config) -> + ?assertEqual( + substrings("attr", [{initial, "initial"}, {final, "final"}]), + parse("(attr=initial*final)") + ), + ?assertEqual( + substrings("attr", [{initial, "initial"}, {any, "a"}, {final, "final"}]), + parse("(attr=initial*a*final)") + ), + ?assertEqual( + substrings( + "attr", + [{initial, "initial"}, {any, "a"}, {any, "b"}, {final, "final"}] + ), + parse("(attr=initial*a*b*final)") + ). + +t_substrings_only_any(_Config) -> + ?assertEqual(present("attr"), parse("(attr=*)")), + ?assertEqual(substrings("attr", [{any, "a"}]), parse("(attr=*a*)")), + ?assertEqual( + substrings("attr", [{any, "a"}, {any, "b"}]), + parse("(attr=*a*b*)") + ). + +t_greaterOrEqual(_Config) -> + ?assertEqual(greaterOrEqual("attr", "value"), parse("(attr>=value)")), + ?assertEqual(greaterOrEqual("attr", "value"), parse("(attr >= value )")), + ?assertMatch({error, _}, scan_and_parse("(attr>=)")), + ?assertMatch({error, _}, scan_and_parse("(>=)")), + ?assertMatch({error, _}, scan_and_parse("(>=value)")). + +t_lessOrEqual(_Config) -> + ?assertEqual(lessOrEqual("attr", "value"), parse("(attr<=value)")), + ?assertEqual(lessOrEqual("attr", "value"), parse("( attr <= value )")), + ?assertMatch({error, _}, scan_and_parse("(attr<=)")), + ?assertMatch({error, _}, scan_and_parse("(<=)")), + ?assertMatch({error, _}, scan_and_parse("(<=value)")). + +t_present(_Config) -> + ?assertEqual(present("attr"), parse("(attr=*)")), + ?assertEqual(present("attr"), parse("( attr = * )")). + +t_approxMatch(_Config) -> + ?assertEqual(approxMatch("attr", "value"), parse("(attr~=value)")), + ?assertEqual(approxMatch("attr", "value"), parse("( attr ~= value )")), + ?assertMatch({error, _}, scan_and_parse("(attr~=)")), + ?assertMatch({error, _}, scan_and_parse("(~=)")), + ?assertMatch({error, _}, scan_and_parse("(~=value)")). + +t_extensibleMatch_dn(_Config) -> + ?assertEqual( + extensibleMatch("value", [{type, "attr"}, {dnAttributes, true}]), parse("(attr:dn:=value)") + ), + ?assertEqual( + extensibleMatch("value", [{type, "attr"}, {dnAttributes, true}]), + parse("( attr:dn := value )") + ). + +t_extensibleMatch_rule(_Config) -> + ?assertEqual( + extensibleMatch("value", [{type, "attr"}, {matchingRule, "objectClass"}]), + parse("(attr:objectClass:=value)") + ), + ?assertEqual( + extensibleMatch("value", [{type, "attr"}, {matchingRule, "objectClass"}]), + parse("( attr:objectClass := value )") + ). + +t_extensibleMatch_dn_rule(_Config) -> + ?assertEqual( + extensibleMatch( + "value", + [ + {type, "attr"}, + {dnAttributes, true}, + {matchingRule, "objectClass"} + ] + ), + parse("(attr:dn:objectClass:=value)") + ), + ?assertEqual( + extensibleMatch( + "value", + [ + {type, "attr"}, + {dnAttributes, true}, + {matchingRule, "objectClass"} + ] + ), + parse("( attr:dn:objectClass :=value)") + ). + +t_extensibleMatch_no_dn_rule(_Config) -> + ?assertEqual(extensibleMatch("value", [{type, "attr"}]), parse("(attr:=value)")), + ?assertEqual(extensibleMatch("value", [{type, "attr"}]), parse("( attr := value )")). + +t_extensibleMatch_no_type_dn(_Config) -> + ?assertEqual( + extensibleMatch("value", [{matchingRule, "objectClass"}]), + parse("(:objectClass:=value)") + ), + ?assertEqual( + extensibleMatch("value", [{matchingRule, "objectClass"}]), + parse("( :objectClass := value )") + ). + +t_extensibleMatch_no_type_no_dn(_Config) -> + ?assertEqual( + extensibleMatch( + "value", + [{dnAttributes, true}, {matchingRule, "objectClass"}] + ), + parse("(:dn:objectClass:=value)") + ), + ?assertEqual( + extensibleMatch( + "value", + [{dnAttributes, true}, {matchingRule, "objectClass"}] + ), + parse("( :dn:objectClass :=value)") + ). + +t_extensibleMatch_error(_Config) -> + ?assertMatch({error, _}, scan_and_parse("(:dn:=value)")), + ?assertMatch({error, _}, scan_and_parse("(::=value)")), + ?assertMatch({error, _}, scan_and_parse("(:=)")), + ?assertMatch({error, _}, scan_and_parse("(attr:=)")). + +t_error(_Config) -> + ?assertMatch({error, _}, scan_and_parse("(attr=value")), + ?assertMatch({error, _}, scan_and_parse("attr=value")), + ?assertMatch({error, _}, scan_and_parse("(a=b)(c=d)")). + +% %%------------------------------------------------------------------------------ +% %% Helpers +% %%------------------------------------------------------------------------------ +parse(Str) -> + {ok, Res} = scan_and_parse(Str), + Res. + +scan_and_parse(Str) -> + emqx_ldap_filter_parser:scan_and_parse(Str). diff --git a/apps/emqx_machine/priv/reboot_lists.eterm b/apps/emqx_machine/priv/reboot_lists.eterm index 03dc8618a..51c2d2274 100644 --- a/apps/emqx_machine/priv/reboot_lists.eterm +++ b/apps/emqx_machine/priv/reboot_lists.eterm @@ -106,7 +106,8 @@ emqx_schema_registry, emqx_eviction_agent, emqx_node_rebalance, - emqx_ft + emqx_ft, + emqx_ldap ], %% must always be of type `load' ce_business_apps => diff --git a/apps/emqx_machine/src/emqx_machine.app.src b/apps/emqx_machine/src/emqx_machine.app.src index 9a9dedc28..bdd1db76e 100644 --- a/apps/emqx_machine/src/emqx_machine.app.src +++ b/apps/emqx_machine/src/emqx_machine.app.src @@ -3,7 +3,7 @@ {id, "emqx_machine"}, {description, "The EMQX Machine"}, % strict semver, bump manually! - {vsn, "0.2.9"}, + {vsn, "0.2.10"}, {modules, []}, {registered, []}, {applications, [kernel, stdlib, emqx_ctl]}, diff --git a/apps/emqx_retainer/src/emqx_retainer.app.src b/apps/emqx_retainer/src/emqx_retainer.app.src index f117fda05..5238328f0 100644 --- a/apps/emqx_retainer/src/emqx_retainer.app.src +++ b/apps/emqx_retainer/src/emqx_retainer.app.src @@ -2,7 +2,7 @@ {application, emqx_retainer, [ {description, "EMQX Retainer"}, % strict semver, bump manually! - {vsn, "5.0.15"}, + {vsn, "5.0.16"}, {modules, []}, {registered, [emqx_retainer_sup]}, {applications, [kernel, stdlib, emqx, emqx_ctl]}, diff --git a/apps/emqx_retainer/src/emqx_retainer_mnesia.erl b/apps/emqx_retainer/src/emqx_retainer_mnesia.erl index 0152c240e..73c86fe04 100644 --- a/apps/emqx_retainer/src/emqx_retainer_mnesia.erl +++ b/apps/emqx_retainer/src/emqx_retainer_mnesia.erl @@ -153,6 +153,14 @@ store_retained(_, Msg = #message{topic = Topic}) -> end. clear_expired(_) -> + case mria_rlog:role() of + core -> + clear_expired(); + _ -> + ok + end. + +clear_expired() -> NowMs = erlang:system_time(millisecond), QH = qlc:q([ RetainedMsg @@ -263,12 +271,22 @@ reindex_status() -> do_store_retained(Msg, TopicTokens, ExpiryTime) -> %% Retained message is stored syncronously on all core nodes + %% + %% No transaction, meaning that concurrent writes in the cluster may + %% lead to inconsistent replicas. This could manifest in two clients + %% getting different retained messages for the same topic, depending + %% on which node they are connected to. We tolerate that. ok = do_store_retained_message(Msg, TopicTokens, ExpiryTime), %% Since retained message was stored syncronously on all core nodes, %% now we are sure that %% * either we will write correct indices %% * or if we a replicant with outdated write indices due to reindexing, %% the correct indices will be added by reindexing + %% + %% No transacation as well, meaning that concurrent writes in the cluster + %% may lead to inconsistent index replicas. This essentially allows for + %% inconsistent query results, where index entry has different expiry time + %% than the message it points to. ok = do_store_retained_indices(TopicTokens, ExpiryTime). do_store_retained_message(Msg, TopicTokens, ExpiryTime) -> @@ -281,18 +299,20 @@ do_store_retained_message(Msg, TopicTokens, ExpiryTime) -> do_store_retained_indices(TopicTokens, ExpiryTime) -> Indices = dirty_indices(write), - ok = emqx_retainer_index:foreach_index_key( - fun(Key) -> do_store_retained_index(Key, ExpiryTime) end, - Indices, - TopicTokens - ). + ok = mria:async_dirty(?RETAINER_SHARD, fun() -> + emqx_retainer_index:foreach_index_key( + fun(Key) -> do_store_retained_index(Key, ExpiryTime) end, + Indices, + TopicTokens + ) + end). do_store_retained_index(Key, ExpiryTime) -> RetainedIndex = #retained_index{ key = Key, expiry_time = ExpiryTime }, - mria:dirty_write(?TAB_INDEX, RetainedIndex). + mnesia:write(?TAB_INDEX, RetainedIndex, write). msg_table(SearchTable) -> qlc:q([ diff --git a/build b/build index 1ca324120..fb0d213f2 100755 --- a/build +++ b/build @@ -361,10 +361,10 @@ make_tgz() { log "Archive sha256sum: $(cat "${target}.sha256")" } -trap docker_cleanup EXIT - docker_cleanup() { rm -f ./.dockerignore >/dev/null + # shellcheck disable=SC2015 + [ -f ./.dockerignore.bak ] && mv ./.dockerignore.bak ./.dockerignore >/dev/null || true } ## Build the default docker image based on debian 11. @@ -384,7 +384,14 @@ make_docker() { if [[ "$PROFILE" = *enterprise* ]]; then extra_deps='libsasl2-2,libsasl2-modules-gssapi-mit' fi - echo '_build' >> ./.dockerignore + # shellcheck disable=SC2015 + [ -f ./.dockerignore ] && mv ./.dockerignore ./.dockerignore.bak || true + trap docker_cleanup EXIT + { + echo '/_build' + echo '/deps' + echo '/*.lock' + } >> ./.dockerignore set -x docker build --no-cache --pull \ --build-arg BUILD_FROM="${EMQX_BUILDER}" \ @@ -394,6 +401,7 @@ make_docker() { --tag "${EMQX_IMAGE_TAG}" \ -f "${EMQX_DOCKERFILE}" . [[ "${DEBUG:-}" -eq 1 ]] || set +x + echo "${EMQX_IMAGE_TAG}" > ./.docker_image_tag } function join { diff --git a/changes/ce/feat-10697.en.md b/changes/ce/feat-10697.en.md new file mode 100644 index 000000000..4361c04e8 --- /dev/null +++ b/changes/ce/feat-10697.en.md @@ -0,0 +1 @@ +This change allows to set the minReadySeconds for the StatefulSet. This allows to add a gap between the restarts of each pod by upgrade or restart command. diff --git a/changes/ce/feat-11390.en.md b/changes/ce/feat-11390.en.md new file mode 100644 index 000000000..e0fa9a212 --- /dev/null +++ b/changes/ce/feat-11390.en.md @@ -0,0 +1,3 @@ +Add `node.broker_pool_size`, `node.generic_pool_size`, `node.channel_cleanup_batch_size` options to EMQX configuration. + +Tuning these options can significantly improve performance if cluster interconnect network latency is high. diff --git a/changes/ce/fix-11388.en.md b/changes/ce/fix-11388.en.md new file mode 100644 index 000000000..835155585 --- /dev/null +++ b/changes/ce/fix-11388.en.md @@ -0,0 +1,6 @@ +Increase `emqx_router_sup` restart intensity. + +The goal is to tolerate occasional crashes that can happen under relatively normal conditions +and don't seem critical to shutdown the whole app (emqx). +For example, mria write/delete call delegated from a replicant to a core node by `emqx_router_helper` may fail, +if the core node is being stopped / restarted / not ready. diff --git a/changes/ce/fix-11410.en.md b/changes/ce/fix-11410.en.md new file mode 100644 index 000000000..f0c144652 --- /dev/null +++ b/changes/ce/fix-11410.en.md @@ -0,0 +1,3 @@ +Reintroduce `cacerts` TLS client option as a deprecated option. + +This fixes issues found when trying to upgrade from 5.1.3 where that option is set in the configuration files or persisted in EMQX Operator settings. diff --git a/changes/ce/perf-11389.en.md b/changes/ce/perf-11389.en.md new file mode 100644 index 000000000..053f7f58f --- /dev/null +++ b/changes/ce/perf-11389.en.md @@ -0,0 +1 @@ +Improved retained message publishing latency by consolidating multiple index update operations into a single mnesia activity, leveraging the new APIs introduced in mria 0.6.0. diff --git a/changes/ee/feat-11386.en.md b/changes/ee/feat-11386.en.md new file mode 100644 index 000000000..740d8f3bf --- /dev/null +++ b/changes/ee/feat-11386.en.md @@ -0,0 +1 @@ +Integrated LDAP as a new authenticator. diff --git a/changes/ee/feat-11392.en.md b/changes/ee/feat-11392.en.md new file mode 100644 index 000000000..6ac14abff --- /dev/null +++ b/changes/ee/feat-11392.en.md @@ -0,0 +1 @@ +Integrated LDAP as a authorization source. diff --git a/changes/ee/feat-11402.en.md b/changes/ee/feat-11402.en.md new file mode 100644 index 000000000..7d6090b58 --- /dev/null +++ b/changes/ee/feat-11402.en.md @@ -0,0 +1 @@ +Added support for using placeholders to define MQTT Topic in Kafka Consumer bridge topic mappings. diff --git a/changes/v5.1.4.en.md b/changes/v5.1.4.en.md new file mode 100644 index 000000000..5004ee867 --- /dev/null +++ b/changes/v5.1.4.en.md @@ -0,0 +1,11 @@ +# v5.1.4 + +## Enhancements + +- [#11185](https://github.com/emqx/emqx/pull/11185) Add support for [topologySpreadConstraints](https://kubernetes.io/docs/concepts/scheduling-eviction/topology-spread-constraints/) in the Helm chart. + +## Bug Fixes + +- [#11347](https://github.com/emqx/emqx/pull/11347) Ensure that OCSP request path is properly URL encoded. + +- [#11372](https://github.com/emqx/emqx/pull/11372) Removed the recently introduced `cacerts` option from TLS client schema due to incompatibilities with some cluster discovery mechanisms. diff --git a/changes/v5.1.5.en.md b/changes/v5.1.5.en.md new file mode 100644 index 000000000..69377f157 --- /dev/null +++ b/changes/v5.1.5.en.md @@ -0,0 +1,24 @@ +# v5.1.5 + +## Enhancements + +- [#10697](https://github.com/emqx/emqx/pull/10697) This change allows to set the minReadySeconds for the StatefulSet. This allows to add a gap between the restarts of each pod by upgrade or restart command. + +- [#11390](https://github.com/emqx/emqx/pull/11390) Add `node.broker_pool_size`, `node.generic_pool_size`, `node.channel_cleanup_batch_size` options to EMQX configuration. + + Tuning these options can significantly improve performance if cluster interconnect network latency is high. + +- [#11389](https://github.com/emqx/emqx/pull/11389) Improved retained message publishing latency by consolidating multiple index update operations into a single mnesia activity, leveraging the new APIs introduced in mria 0.6.0. + +## Bug Fixes + +- [#11388](https://github.com/emqx/emqx/pull/11388) Increase `emqx_router_sup` restart intensity. + + The goal is to tolerate occasional crashes that can happen under relatively normal conditions + and don't seem critical to shutdown the whole app (emqx). + For example, mria write/delete call delegated from a replicant to a core node by `emqx_router_helper` may fail, + if the core node is being stopped / restarted / not ready. + +- [#11410](https://github.com/emqx/emqx/pull/11410) Reintroduce `cacerts` TLS client option as a deprecated option. + + This fixes issues found when trying to upgrade from 5.1.3 where that option is set in the configuration files or persisted in EMQX Operator settings. diff --git a/deploy/charts/emqx-enterprise/templates/StatefulSet.yaml b/deploy/charts/emqx-enterprise/templates/StatefulSet.yaml index eb5192c5a..7f909cc79 100644 --- a/deploy/charts/emqx-enterprise/templates/StatefulSet.yaml +++ b/deploy/charts/emqx-enterprise/templates/StatefulSet.yaml @@ -32,6 +32,9 @@ spec: {{- end }} updateStrategy: type: RollingUpdate + {{- if .Values.minReadySeconds }} + minReadySeconds: {{ .Values.minReadySeconds }} + {{- end }} replicas: {{ .Values.replicaCount }} selector: matchLabels: diff --git a/deploy/charts/emqx-enterprise/values.yaml b/deploy/charts/emqx-enterprise/values.yaml index d527bf057..37fa56348 100644 --- a/deploy/charts/emqx-enterprise/values.yaml +++ b/deploy/charts/emqx-enterprise/values.yaml @@ -35,6 +35,9 @@ serviceAccount: ## Forces the recreation of pods during helm upgrades. This can be useful to update configuration values even if the container image did not change. recreatePods: false +## Sets the minReadySeconds parameter on the stateful set. This can be used to add delay between restart / updates between the single pods. +minReadySeconds: + clusterDomain: cluster.local podAnnotations: {} diff --git a/deploy/charts/emqx/Chart.yaml b/deploy/charts/emqx/Chart.yaml index 0f85cf8ab..f1e4fe822 100644 --- a/deploy/charts/emqx/Chart.yaml +++ b/deploy/charts/emqx/Chart.yaml @@ -14,8 +14,8 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 5.1.3 +version: 5.1.5 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 5.1.3 +appVersion: 5.1.5 diff --git a/deploy/charts/emqx/templates/StatefulSet.yaml b/deploy/charts/emqx/templates/StatefulSet.yaml index 9f8e64f9c..430260585 100644 --- a/deploy/charts/emqx/templates/StatefulSet.yaml +++ b/deploy/charts/emqx/templates/StatefulSet.yaml @@ -32,6 +32,9 @@ spec: {{- end }} updateStrategy: type: RollingUpdate + {{- if .Values.minReadySeconds }} + minReadySeconds: {{ .Values.minReadySeconds }} + {{- end }} replicas: {{ .Values.replicaCount }} selector: matchLabels: diff --git a/deploy/charts/emqx/values.yaml b/deploy/charts/emqx/values.yaml index ff28d71cd..791db5812 100644 --- a/deploy/charts/emqx/values.yaml +++ b/deploy/charts/emqx/values.yaml @@ -35,6 +35,9 @@ serviceAccount: ## Forces the recreation of pods during helm upgrades. This can be useful to update configuration values even if the container image did not change. recreatePods: false +## Sets the minReadySeconds parameter on the stateful set. This can be used to add delay between restart / updates between the single pods. +minReadySeconds: + clusterDomain: cluster.local podAnnotations: {} diff --git a/dev b/dev index 1db84f7c2..20cd66569 100755 --- a/dev +++ b/dev @@ -155,10 +155,11 @@ BASE_DIR="_build/dev-run/$PROFILE" export EMQX_ETC_DIR="$BASE_DIR/etc" export EMQX_DATA_DIR="$BASE_DIR/data" export EMQX_LOG_DIR="$BASE_DIR/log" +export EMQX_PLUGINS__INSTALL_DIR="${EMQX_PLUGINS__INSTALL_DIR:-$BASE_DIR/plugins}" CONFIGS_DIR="$EMQX_DATA_DIR/configs" # Use your cookie so your IDE can connect to it. COOKIE="${EMQX_NODE__COOKIE:-${EMQX_NODE_COOKIE:-$(cat ~/.erlang.cookie || echo 'emqxsecretcookie')}}" -mkdir -p "$EMQX_ETC_DIR" "$EMQX_DATA_DIR/patches" "$EMQX_DATA_DIR/certs" "$EMQX_LOG_DIR" "$CONFIGS_DIR" +mkdir -p "$EMQX_ETC_DIR" "$EMQX_DATA_DIR/patches" "$EMQX_DATA_DIR/plugins" "$EMQX_DATA_DIR/certs" "$EMQX_LOG_DIR" "$CONFIGS_DIR" if [ $EKKA_EPMD -eq 1 ]; then EPMD_ARGS='-start_epmd false -epmd_module ekka_epmd' else diff --git a/mix.exs b/mix.exs index c145c4a66..00d190136 100644 --- a/mix.exs +++ b/mix.exs @@ -55,7 +55,7 @@ defmodule EMQXUmbrella.MixProject do {:cowboy, github: "emqx/cowboy", tag: "2.9.2", override: true}, {:esockd, github: "emqx/esockd", tag: "5.9.6", override: true}, {:rocksdb, github: "emqx/erlang-rocksdb", tag: "1.8.0-emqx-1", override: true}, - {:ekka, github: "emqx/ekka", tag: "0.15.9", override: true}, + {:ekka, github: "emqx/ekka", tag: "0.15.10", override: true}, {:gen_rpc, github: "emqx/gen_rpc", tag: "2.8.1", override: true}, {:grpc, github: "emqx/grpc-erl", tag: "0.6.8", override: true}, {:minirest, github: "emqx/minirest", tag: "1.3.11", override: true}, @@ -194,7 +194,8 @@ defmodule EMQXUmbrella.MixProject do :emqx_schema_registry, :emqx_enterprise, :emqx_bridge_kinesis, - :emqx_bridge_azure_event_hub + :emqx_bridge_azure_event_hub, + :emqx_ldap ]) end diff --git a/rebar.config b/rebar.config index 7a8e0bb02..131149f47 100644 --- a/rebar.config +++ b/rebar.config @@ -62,7 +62,7 @@ , {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}} , {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}} , {rocksdb, {git, "https://github.com/emqx/erlang-rocksdb", {tag, "1.8.0-emqx-1"}}} - , {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.9"}}} + , {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.10"}}} , {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}} , {grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.8"}}} , {minirest, {git, "https://github.com/emqx/minirest", {tag, "1.3.11"}}} diff --git a/rebar.config.erl b/rebar.config.erl index f679bc2bb..b45516d2b 100644 --- a/rebar.config.erl +++ b/rebar.config.erl @@ -106,6 +106,7 @@ is_community_umbrella_app("apps/emqx_schema_registry") -> false; is_community_umbrella_app("apps/emqx_enterprise") -> false; is_community_umbrella_app("apps/emqx_bridge_kinesis") -> false; is_community_umbrella_app("apps/emqx_bridge_azure_event_hub") -> false; +is_community_umbrella_app("apps/emqx_ldap") -> false; is_community_umbrella_app(_) -> true. is_jq_supported() -> diff --git a/rel/config/examples/retainer.conf.example b/rel/config/examples/retainer.conf.example index d33037938..d78119ec2 100644 --- a/rel/config/examples/retainer.conf.example +++ b/rel/config/examples/retainer.conf.example @@ -8,6 +8,9 @@ retainer { ## set to false to disable retainer enable = true + ## Message retention time, default is 0 means the message will never expire + msg_expiry_interval = 5s + ## Maximum message size allowed max_payload_size = 1MB diff --git a/rel/i18n/emqx_bridge_dynamo.hocon b/rel/i18n/emqx_bridge_dynamo.hocon index 0b49c6e2f..417b43c0c 100644 --- a/rel/i18n/emqx_bridge_dynamo.hocon +++ b/rel/i18n/emqx_bridge_dynamo.hocon @@ -37,8 +37,7 @@ local_topic.label: template.desc: """Template, the default value is empty. When this value is empty the whole message will be stored in the database.
The template can be any valid JSON with placeholders and make sure all keys for table are here, example:
- {"id" : "${id}", "clientid" : "${clientid}", "data" : "${payload.data}"} -""" + {"id" : "${id}", "clientid" : "${clientid}", "data" : "${payload.data}"}""" template.label: """Template""" diff --git a/rel/i18n/emqx_conf_schema.hocon b/rel/i18n/emqx_conf_schema.hocon index bc45fa009..442df1fa8 100644 --- a/rel/i18n/emqx_conf_schema.hocon +++ b/rel/i18n/emqx_conf_schema.hocon @@ -776,4 +776,29 @@ the default is to use the value set in db.default_shard_transport." db_shard_transports.label: """Shard Transports""" +node_broker_pool_size.desc: +"""The number of workers in emqx_broker pool. Increasing this value may improve performance +by enhancing parallelism, especially when EMQX cluster interconnect network latency is high. +Defaults to the number of Erlang schedulers (CPU cores) * 2.""" + +node_broker_pool_size.label: +"""Node Broker Pool Size""" + +node_generic_pool_size.desc: +"""The number of workers in emqx_pool. Increasing this value may improve performance +by enhancing parallelism, especially when EMQX cluster interconnect network latency is high. +Defaults to the number of Erlang schedulers (CPU cores).""" + +node_generic_pool_size.label: +"""Node Generic Pool Size""" + +node_channel_cleanup_batch_size.desc: +"""The size of the channel cleanup batch. if EMQX cluster interconnect network latency is high, +reducing this value together with increasing node.generic_pool_size may improve performance +during an abrupt disconnect of a large numbers of clients. +Defaults to 100000.""" + +node_channel_cleanup_batch_size.label: +"""Node Channel Cleanup Batch Size""" + } diff --git a/rel/i18n/emqx_connector_ldap.hocon b/rel/i18n/emqx_connector_ldap.hocon deleted file mode 100644 index 64a953816..000000000 --- a/rel/i18n/emqx_connector_ldap.hocon +++ /dev/null @@ -1,21 +0,0 @@ -emqx_connector_ldap { - -bind_dn.desc: -"""LDAP's Binding Distinguished Name (DN)""" - -bind_dn.label: -"""Bind DN""" - -port.desc: -"""LDAP Port""" - -port.label: -"""Port""" - -timeout.desc: -"""LDAP's query timeout""" - -timeout.label: -"""timeout""" - -} diff --git a/rel/i18n/emqx_dashboard_schema.hocon b/rel/i18n/emqx_dashboard_schema.hocon index 0559af2c5..524e633aa 100644 --- a/rel/i18n/emqx_dashboard_schema.hocon +++ b/rel/i18n/emqx_dashboard_schema.hocon @@ -8,8 +8,7 @@ backlog.label: bind.desc: """Port without IP(18083) or port with specified IP(127.0.0.1:18083). -Disabled when setting bind to `0`. -""" +Disabled when setting bind to `0`.""" bind.label: """Bind""" diff --git a/rel/i18n/emqx_ldap.hocon b/rel/i18n/emqx_ldap.hocon new file mode 100644 index 000000000..cd2865d85 --- /dev/null +++ b/rel/i18n/emqx_ldap.hocon @@ -0,0 +1,25 @@ +emqx_ldap { + +server.desc: +"""The IPv4 or IPv6 address or the hostname to connect to.
+A host entry has the following form: `Host[:Port]`.
+The LDAP default port 389 is used if `[:Port]` is not specified.""" + +server.label: +"""Server Host""" + +base_object.desc: +"""The name of the base object entry (or possibly the root) relative to +which the Search is to be performed.""" + +base_object.label: +"""Base Object""" + +filter.desc: +"""The filter that defines the conditions that must be fulfilled in order +for the Search to match a given entry.""" + +filter.label: +"""Filter""" + +} diff --git a/rel/i18n/emqx_ldap_authn.hocon b/rel/i18n/emqx_ldap_authn.hocon new file mode 100644 index 000000000..04dc88e83 --- /dev/null +++ b/rel/i18n/emqx_ldap_authn.hocon @@ -0,0 +1,24 @@ +emqx_ldap_authn { + +ldap.desc: +"""Configuration of authenticator using LDAP as authentication data source.""" + +password_attribute.desc: +"""Indicates which attribute is used to represent the user's password.""" + +password_attribute.label: +"""Password Attribute""" + +is_superuser_attribute.desc: +"""Indicates which attribute is used to represent whether the user is a superuser.""" + +is_superuser_attribute.label: +"""IsSuperuser Attribute""" + +query_timeout.desc: +"""Timeout for the LDAP query.""" + +query_timeout.label: +"""Query Timeout""" + +} diff --git a/rel/i18n/emqx_ldap_authz.hocon b/rel/i18n/emqx_ldap_authz.hocon new file mode 100644 index 000000000..1ccb085f1 --- /dev/null +++ b/rel/i18n/emqx_ldap_authz.hocon @@ -0,0 +1,27 @@ +emqx_ldap_authz { + +publish_attribute.desc: +"""Indicates which attribute is used to represent the allowed topics list of the `publish`.""" + +publish_attribute.label: +"""Publish Attribute""" + +subscribe_attribute.desc: +"""Indicates which attribute is used to represent the allowed topics list of the `subscribe`.""" + +subscribe_attribute.label: +"""Subscribe Attribute""" + +all_attribute.desc: +"""Indicates which attribute is used to represent the both allowed topics list of `publish` and `subscribe`.""" + +all_attribute.label: +"""All Attribute""" + +query_timeout.desc: +"""Timeout for the LDAP query.""" + +query_timeout.label: +"""Query Timeout""" + +} diff --git a/rel/i18n/emqx_mgmt_api_key_schema.hocon b/rel/i18n/emqx_mgmt_api_key_schema.hocon index c217dc2db..811ab8a98 100644 --- a/rel/i18n/emqx_mgmt_api_key_schema.hocon +++ b/rel/i18n/emqx_mgmt_api_key_schema.hocon @@ -10,8 +10,7 @@ bootstrap_file.desc: """The bootstrap file provides API keys for EMQX. EMQX will load these keys on startup to authorize API requests. It contains key-value pairs in the format:`api_key:api_secret`. -Each line specifies an API key and its associated secret. -""" +Each line specifies an API key and its associated secret.""" bootstrap_file.label: """Initialize api_key file.""" diff --git a/scripts/check-i18n-style.escript b/scripts/check-i18n-style.escript index b8b6bdac7..f48e5a513 100755 --- a/scripts/check-i18n-style.escript +++ b/scripts/check-i18n-style.escript @@ -16,9 +16,9 @@ main([Files0]) -> ok = lists:foreach(fun check/1, Files), case get(errors) of 1 -> - logerr("1 error found~n", []); + die("1 error found~n", []); N when is_integer(N) andalso N > 1 -> - logerr("~p errors found~n", [N]); + die("~p errors found~n", [N]); _ -> io:format(user, "~nOK~n", []) end. diff --git a/scripts/ct/run.sh b/scripts/ct/run.sh index 578b9c4de..5ad289303 100755 --- a/scripts/ct/run.sh +++ b/scripts/ct/run.sh @@ -225,6 +225,9 @@ for dep in ${CT_DEPS}; do greptimedb) FILES+=( '.ci/docker-compose-file/docker-compose-greptimedb.yaml' ) ;; + ldap) + FILES+=( '.ci/docker-compose-file/docker-compose-ldap.yaml' ) + ;; *) echo "unknown_ct_dependency $dep" exit 1 diff --git a/scripts/parse-git-ref.sh b/scripts/parse-git-ref.sh new file mode 100755 index 000000000..a486f2589 --- /dev/null +++ b/scripts/parse-git-ref.sh @@ -0,0 +1,74 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# $1 is fully qualified git ref name, e.g. refs/tags/v5.1.0 or refs/heads/master + +is_latest() { + ref_name=$(basename "$1") + # shellcheck disable=SC2046 + for t in $(git tag --points-at $(git rev-list --tags --max-count=1)); do + if [[ "$t" == "$ref_name" ]]; then + echo true; + return; + fi + done + echo false +} + +if [[ $1 =~ ^refs/tags/v[5-9]+\.[0-9]+\.[0-9]+$ ]]; then + PROFILE=emqx + EDITION=Opensource + RELEASE=true + LATEST=$(is_latest "$1") +elif [[ $1 =~ ^refs/tags/v[5-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + PROFILE=emqx + EDITION=Opensource + RELEASE=true + LATEST=$(is_latest "$1") +elif [[ $1 =~ ^refs/tags/e[5-9]+\.[0-9]+\.[0-9]+$ ]]; then + PROFILE=emqx-enterprise + EDITION=Enterprise + RELEASE=true + LATEST=$(is_latest "$1") +elif [[ $1 =~ ^refs/tags/e[5-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + PROFILE=emqx-enterprise + EDITION=Enterprise + RELEASE=true + LATEST=$(is_latest "$1") +elif [[ $1 =~ ^refs/tags/v[5-9]+\.[0-9]+\.[0-9]+-(alpha|beta|rc)\.[0-9]+$ ]]; then + PROFILE=emqx + EDITION=Opensource + RELEASE=true + LATEST=false +elif [[ $1 =~ ^refs/tags/e[5-9]+\.[0-9]+\.[0-9]+-(alpha|beta|rc)\.[0-9]+$ ]]; then + PROFILE=emqx-enterprise + EDITION=Enterprise + RELEASE=true + LATEST=false +elif [[ $1 =~ ^refs/tags/.+ ]]; then + echo "Unrecognized tag: $1" 1>&2 + exit 1 +elif [[ $1 =~ ^refs/heads/master$ ]]; then + PROFILE=emqx + EDITION=Opensource + RELEASE=false + LATEST=false +elif [[ $1 =~ ^refs/heads/release-[5-9][0-9]+$ ]]; then + PROFILE=emqx-enterprise + EDITION=Enterprise + RELEASE=false + LATEST=false +elif [[ $1 =~ ^refs/heads/ci/.* ]]; then + PROFILE=emqx + EDITION=Opensource + RELEASE=false + LATEST=false +else + echo "Unrecognized git ref: $1" 1>&2 + exit 1 +fi + +cat < /dev/null; then + echo "docker is not installed" + exit 1 +fi + +if ! type "yq" > /dev/null; then + echo "yq is not installed" + exit 1 +fi + +EMQX_BUILDER_VERSION=${EMQX_BUILDER_VERSION:-5.1-3} +EMQX_BUILDER_OTP=${EMQX_BUILDER_OTP:-25.3.2-1} +EMQX_BUILDER_ELIXIR=${EMQX_BUILDER_ELIXIR:-1.14.5} +EMQX_BUILDER_PLATFORM=${EMQX_BUILDER_PLATFORM:-ubuntu22.04} +EMQX_BUILDER=${EMQX_BUILDER:-ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VERSION}:${EMQX_BUILDER_ELIXIR}-${EMQX_BUILDER_OTP}-${EMQX_BUILDER_PLATFORM}} + +commands=$(yq ".jobs.sanity-checks.steps[].run" .github/workflows/_pr_entrypoint.yaml | grep -v null) + +BEFORE_REF=${BEFORE_REF:-$(git rev-parse master)} +AFTER_REF=${AFTER_REF:-$(git rev-parse HEAD)} +docker run --rm -it -v "$(pwd):/emqx" -w /emqx \ + -e GITHUB_WORKSPACE=/emqx \ + -e BEFORE_REF="$BEFORE_REF" \ + -e AFTER_REF="$AFTER_REF" \ + -e GITHUB_BASE_REF="$BEFORE_REF" \ + -e MIX_ENV=emqx-enterprise \ + -e PROFILE=emqx-enterprise \ + -e ACTIONLINT_VSN=1.6.25 \ + "${EMQX_BUILDER}" /bin/bash -c "git config --global --add safe.directory /emqx; ${commands}" diff --git a/scripts/shelltest/parse-git-ref.cleanup b/scripts/shelltest/parse-git-ref.cleanup new file mode 100644 index 000000000..8c6dcc027 --- /dev/null +++ b/scripts/shelltest/parse-git-ref.cleanup @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +git tag -d v5.1.99 >/dev/null +git tag -d e5.1.99 >/dev/null diff --git a/scripts/shelltest/parse-git-ref.setup b/scripts/shelltest/parse-git-ref.setup new file mode 100644 index 000000000..f8938787f --- /dev/null +++ b/scripts/shelltest/parse-git-ref.setup @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +git tag v5.1.99 +git tag e5.1.99 diff --git a/scripts/shelltest/parse-git-ref.test b/scripts/shelltest/parse-git-ref.test new file mode 100644 index 000000000..3f2ede0d1 --- /dev/null +++ b/scripts/shelltest/parse-git-ref.test @@ -0,0 +1,94 @@ +./parse-git-ref.sh refs/tags/v5.2.0-foobar.1 +>>>2 +Unrecognized tag: refs/tags/v5.2.0-foobar.1 +>>>= 1 + +./parse-git-ref.sh v5.2.0 +>>>2 +Unrecognized git ref: v5.2.0 +>>>= 1 + +./parse-git-ref.sh refs/tags/v5.1.0 +>>> +{"profile": "emqx", "edition": "Opensource", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/v5.1.5.1 +>>> +{"profile": "emqx", "edition": "Opensource", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/v5.2.0-alpha.1 +>>> +{"profile": "emqx", "edition": "Opensource", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/v5.2.0-alpha-1 +>>>2 +Unrecognized tag: refs/tags/v5.2.0-alpha-1 +>>>= 1 + +./parse-git-ref.sh refs/tags/v5.2.0-beta.1 +>>> +{"profile": "emqx", "edition": "Opensource", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/v5.2.0-rc.1 +>>> +{"profile": "emqx", "edition": "Opensource", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/e5.1.0 +>>> +{"profile": "emqx-enterprise", "edition": "Enterprise", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/e5.1.5.1 +>>> +{"profile": "emqx-enterprise", "edition": "Enterprise", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/e5.2.0-alpha.1 +>>> +{"profile": "emqx-enterprise", "edition": "Enterprise", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/e5.2.0-beta.1 +>>> +{"profile": "emqx-enterprise", "edition": "Enterprise", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/e5.2.0-rc.1 +>>> +{"profile": "emqx-enterprise", "edition": "Enterprise", "release": true, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/tags/e5.1.99 +>>> +{"profile": "emqx-enterprise", "edition": "Enterprise", "release": true, "latest": true} +>>>= 0 + +./parse-git-ref.sh refs/tags/v5.1.99 +>>> +{"profile": "emqx", "edition": "Opensource", "release": true, "latest": true} +>>>= 0 + +./parse-git-ref.sh refs/heads/master +>>> +{"profile": "emqx", "edition": "Opensource", "release": false, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/heads/release-51 +>>> +{"profile": "emqx-enterprise", "edition": "Enterprise", "release": false, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/heads/ci/foobar +>>> +{"profile": "emqx", "edition": "Opensource", "release": false, "latest": false} +>>>= 0 + +./parse-git-ref.sh refs/heads/release-44 +>>>2 +Unrecognized git ref: refs/heads/release-44 +>>>= 1 diff --git a/scripts/shelltest/run_tests.sh b/scripts/shelltest/run_tests.sh new file mode 100755 index 000000000..11caa6cac --- /dev/null +++ b/scripts/shelltest/run_tests.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +# shellcheck disable=SC2164 +cd -P -- "$(dirname -- "$0")/.." + +exit_code=0 + +for test in shelltest/*.test; do + echo "Running $test" + /bin/sh "${test%.test}.setup" + shelltest -c --diff --all --precise -- "$test" + # shellcheck disable=SC2181 + if [ $? -ne 0 ]; then + exit_code=1 + fi + /bin/sh "${test%.test}.cleanup" +done + +exit $exit_code