Merge pull request #11410 from thalesmg/prepare-v515-20230808
prepare to tag `v5.1.5`
This commit is contained in:
commit
6a40a9fc8e
|
@ -19,7 +19,9 @@ services:
|
|||
- emqx2
|
||||
volumes:
|
||||
- ./haproxy/haproxy.cfg:/usr/local/etc/haproxy/haproxy.cfg
|
||||
- ../../apps/emqx/etc/certs:/usr/local/etc/haproxy/certs
|
||||
- ../../apps/emqx/etc/certs/cert.pem:/usr/local/etc/haproxy/certs/cert.pem
|
||||
- ../../apps/emqx/etc/certs/key.pem:/usr/local/etc/haproxy/certs/key.pem
|
||||
- ../../apps/emqx/etc/certs/cacert.pem:/usr/local/etc/haproxy/certs/cacert.pem
|
||||
ports:
|
||||
- "18083:18083"
|
||||
# - "1883:1883"
|
||||
|
@ -34,7 +36,7 @@ services:
|
|||
- -c
|
||||
- |
|
||||
set -x
|
||||
cat /usr/local/etc/haproxy/certs/cert.pem /usr/local/etc/haproxy/certs/key.pem > /tmp/emqx.pem
|
||||
cat /usr/local/etc/haproxy/certs/cert.pem /usr/local/etc/haproxy/certs/key.pem > /var/lib/haproxy/emqx.pem
|
||||
haproxy -f /usr/local/etc/haproxy/haproxy.cfg
|
||||
|
||||
emqx1:
|
||||
|
|
|
@ -6,11 +6,11 @@ services:
|
|||
build:
|
||||
context: ../..
|
||||
dockerfile: .ci/docker-compose-file/openldap/Dockerfile
|
||||
args:
|
||||
args:
|
||||
LDAP_TAG: ${LDAP_TAG}
|
||||
image: openldap
|
||||
ports:
|
||||
- 389:389
|
||||
image: openldap
|
||||
#ports:
|
||||
# - 389:389
|
||||
restart: always
|
||||
networks:
|
||||
- emqx_bridge
|
|
@ -83,13 +83,13 @@ backend emqx_ws_back
|
|||
frontend emqx_ssl
|
||||
mode tcp
|
||||
option tcplog
|
||||
bind *:8883 ssl crt /tmp/emqx.pem ca-file /usr/local/etc/haproxy/certs/cacert.pem verify required no-sslv3
|
||||
bind *:8883 ssl crt /var/lib/haproxy/emqx.pem ca-file /usr/local/etc/haproxy/certs/cacert.pem verify required no-sslv3
|
||||
default_backend emqx_ssl_back
|
||||
|
||||
frontend emqx_wss
|
||||
mode tcp
|
||||
option tcplog
|
||||
bind *:8084 ssl crt /tmp/emqx.pem ca-file /usr/local/etc/haproxy/certs/cacert.pem verify required no-sslv3
|
||||
bind *:8084 ssl crt /var/lib/haproxy/emqx.pem ca-file /usr/local/etc/haproxy/certs/cacert.pem verify required no-sslv3
|
||||
default_backend emqx_wss_back
|
||||
|
||||
backend emqx_ssl_back
|
||||
|
|
|
@ -1,18 +1,20 @@
|
|||
FROM buildpack-deps:stretch
|
||||
FROM buildpack-deps:bookworm
|
||||
|
||||
ARG LDAP_TAG=2.4.50
|
||||
ARG LDAP_TAG=2.5.16
|
||||
|
||||
RUN apt-get update && apt-get install -y groff groff-base
|
||||
RUN wget ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-${LDAP_TAG}.tgz \
|
||||
&& gunzip -c openldap-${LDAP_TAG}.tgz | tar xvfB - \
|
||||
RUN wget https://www.openldap.org/software/download/OpenLDAP/openldap-release/openldap-${LDAP_TAG}.tgz \
|
||||
&& tar xvzf openldap-${LDAP_TAG}.tgz \
|
||||
&& cd openldap-${LDAP_TAG} \
|
||||
&& ./configure && make depend && make && make install \
|
||||
&& cd .. && rm -rf openldap-${LDAP_TAG}
|
||||
|
||||
COPY .ci/docker-compose-file/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf
|
||||
COPY apps/emqx_authn/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
|
||||
COPY apps/emqx_authn/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema
|
||||
COPY apps/emqx_authn/test/data/certs/*.pem /usr/local/etc/openldap/
|
||||
COPY apps/emqx_ldap/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
|
||||
COPY apps/emqx_ldap/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema
|
||||
COPY .ci/docker-compose-file/certs/ca.crt /usr/local/etc/openldap/cacert.pem
|
||||
COPY .ci/docker-compose-file/certs/server.crt /usr/local/etc/openldap/cert.pem
|
||||
COPY .ci/docker-compose-file/certs/server.key /usr/local/etc/openldap/key.pem
|
||||
|
||||
RUN mkdir -p /usr/local/etc/openldap/data \
|
||||
&& slapadd -l /usr/local/etc/openldap/schema/emqx.io.ldif -f /usr/local/etc/openldap/slapd.conf
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
name: 'Prepare jmeter'
|
||||
|
||||
inputs:
|
||||
version-emqx:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: emqx-docker
|
||||
path: /tmp
|
||||
- name: load docker image
|
||||
shell: bash
|
||||
env:
|
||||
PKG_VSN: ${{ inputs.version-emqx }}
|
||||
run: |
|
||||
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
|
||||
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: emqx/emqx-fvt
|
||||
ref: broker-autotest-v5
|
||||
path: scripts
|
||||
- uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||
architecture: x64 # (x64 or x86) - defaults to x64
|
||||
# https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
|
||||
distribution: 'zulu'
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: apache-jmeter.tgz
|
||||
- name: install jmeter
|
||||
shell: bash
|
||||
env:
|
||||
JMETER_VERSION: 5.4.3
|
||||
run: |
|
||||
tar -xf apache-jmeter.tgz
|
||||
ln -s apache-jmeter-$JMETER_VERSION jmeter
|
||||
echo "jmeter.save.saveservice.output_format=xml" >> jmeter/user.properties
|
||||
echo "jmeter.save.saveservice.response_data.on_error=true" >> jmeter/user.properties
|
||||
cd jmeter/lib/ext
|
||||
wget --no-verbose https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar
|
||||
wget --no-verbose https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar
|
||||
wget --no-verbose https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.18/postgresql-42.2.18.jar
|
|
@ -9,10 +9,11 @@ copilot:summary
|
|||
Please convert it to a draft if any of the following conditions are not met. Reviewers may skip over until all the items are checked:
|
||||
|
||||
- [ ] Added tests for the changes
|
||||
- [ ] Added property-based tests for code which performs user input validation
|
||||
- [ ] Changed lines covered in coverage report
|
||||
- [ ] Change log has been added to `changes/(ce|ee)/(feat|perf|fix)-<PR-id>.en.md` files
|
||||
- [ ] For internal contributor: there is a jira ticket to track this change
|
||||
- [ ] If there should be document changes, a PR to emqx-docs.git is sent, or a jira ticket is created to follow up
|
||||
- [ ] Created PR to [emqx-docs](https://github.com/emqx/emqx-docs) if documentation update is required, or link to a follow-up jira ticket
|
||||
- [ ] Schema changes are backward compatible
|
||||
|
||||
## Checklist for CI (.github/workflows) changes
|
||||
|
|
|
@ -0,0 +1,242 @@
|
|||
name: PR Entrypoint
|
||||
|
||||
concurrency:
|
||||
group: pr-entrypoint-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
required: false
|
||||
|
||||
env:
|
||||
IS_CI: "yes"
|
||||
|
||||
jobs:
|
||||
sanity-checks:
|
||||
runs-on: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }}
|
||||
container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04"
|
||||
outputs:
|
||||
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
|
||||
ct-host: ${{ steps.matrix.outputs.ct-host }}
|
||||
ct-docker: ${{ steps.matrix.outputs.ct-docker }}
|
||||
version-emqx: ${{ steps.matrix.outputs.version-emqx }}
|
||||
version-emqx-enterprise: ${{ steps.matrix.outputs.version-emqx-enterprise }}
|
||||
runner: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }}
|
||||
builder: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04"
|
||||
builder_vsn: "5.1-3"
|
||||
otp_vsn: "25.3.2-1"
|
||||
elixir_vsn: "1.14.5"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
fetch-depth: 0
|
||||
- name: Work around https://github.com/actions/checkout/issues/766
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- name: Run gitlint
|
||||
env:
|
||||
BEFORE_REF: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
|
||||
AFTER_REF: ${{ github.sha }}
|
||||
run: |
|
||||
pip install gitlint
|
||||
gitlint --commits $BEFORE_REF..$AFTER_REF --config .github/workflows/.gitlint
|
||||
- name: Run shellcheck
|
||||
run: |
|
||||
DEBIAN_FRONTEND=noninteractive apt-get update -qy && apt-get install -qy shellcheck
|
||||
./scripts/shellcheck.sh
|
||||
- name: Run shell tests
|
||||
run: |
|
||||
DEBIAN_FRONTEND=noninteractive apt-get update -qy && apt-get install -qy shelltestrunner
|
||||
scripts/shelltest/run_tests.sh
|
||||
- name: Check workflow files
|
||||
env:
|
||||
ACTIONLINT_VSN: 1.6.25
|
||||
run: |
|
||||
wget https://github.com/rhysd/actionlint/releases/download/v${ACTIONLINT_VSN}/actionlint_${ACTIONLINT_VSN}_linux_amd64.tar.gz
|
||||
tar zxf actionlint_${ACTIONLINT_VSN}_linux_amd64.tar.gz actionlint
|
||||
# TODO: enable shellcheck when all the current issues are fixed
|
||||
./actionlint -color \
|
||||
-shellcheck= \
|
||||
-ignore 'label ".+" is unknown' \
|
||||
-ignore 'value "emqx-enterprise" in "exclude"'
|
||||
- name: Check line-break at EOF
|
||||
run: |
|
||||
./scripts/check-nl-at-eof.sh
|
||||
- name: Check apps version
|
||||
run: |
|
||||
./scripts/apps-version-check.sh
|
||||
- name: Setup mix
|
||||
env:
|
||||
MIX_ENV: emqx-enterprise
|
||||
PROFILE: emqx-enterprise
|
||||
run: |
|
||||
mix local.hex --force --if-missing && mix local.rebar --force --if-missing
|
||||
- name: Check formatting
|
||||
env:
|
||||
MIX_ENV: emqx-enterprise
|
||||
PROFILE: emqx-enterprise
|
||||
run: |
|
||||
./scripts/check-format.sh
|
||||
- name: Run elvis check
|
||||
run: |
|
||||
./scripts/elvis-check.sh $GITHUB_BASE_REF
|
||||
- name: Generate CT Matrix
|
||||
id: matrix
|
||||
run: |
|
||||
APPS="$(./scripts/find-apps.sh --ci)"
|
||||
MATRIX="$(echo "${APPS}" | jq -c '
|
||||
[
|
||||
(.[] | select(.profile == "emqx") | . + {
|
||||
builder: "5.1-3",
|
||||
otp: "25.3.2-1",
|
||||
elixir: "1.14.5"
|
||||
}),
|
||||
(.[] | select(.profile == "emqx-enterprise") | . + {
|
||||
builder: "5.1-3",
|
||||
otp: ["25.3.2-1"][],
|
||||
elixir: "1.14.5"
|
||||
})
|
||||
]
|
||||
')"
|
||||
echo "${MATRIX}" | jq
|
||||
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
|
||||
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
|
||||
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
|
||||
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
|
||||
echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT
|
||||
echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT
|
||||
echo "version-emqx=$(./pkg-vsn.sh emqx)" | tee -a $GITHUB_OUTPUT
|
||||
echo "version-emqx-enterprise=$(./pkg-vsn.sh emqx-enterprise)" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
compile:
|
||||
runs-on: ${{ needs.sanity-checks.outputs.runner }}
|
||||
container: ${{ needs.sanity-checks.outputs.builder }}
|
||||
needs:
|
||||
- sanity-checks
|
||||
strategy:
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Work around https://github.com/actions/checkout/issues/766
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- id: compile
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
ENABLE_COVER_COMPILE: 1
|
||||
run: |
|
||||
make ensure-rebar3
|
||||
make ${PROFILE}
|
||||
make test-compile
|
||||
zip -ryq $PROFILE.zip .
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.profile }}
|
||||
path: ${{ matrix.profile }}.zip
|
||||
retention-days: 1
|
||||
|
||||
run_test_cases:
|
||||
needs:
|
||||
- sanity-checks
|
||||
- compile
|
||||
uses: ./.github/workflows/run_test_cases.yaml
|
||||
with:
|
||||
runner: ${{ needs.sanity-checks.outputs.runner }}
|
||||
builder: ${{ needs.sanity-checks.outputs.builder }}
|
||||
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
|
||||
ct-host: ${{ needs.sanity-checks.outputs.ct-host }}
|
||||
ct-docker: ${{ needs.sanity-checks.outputs.ct-docker }}
|
||||
|
||||
static_checks:
|
||||
needs:
|
||||
- sanity-checks
|
||||
- compile
|
||||
uses: ./.github/workflows/static_checks.yaml
|
||||
with:
|
||||
runner: ${{ needs.sanity-checks.outputs.runner }}
|
||||
builder: ${{ needs.sanity-checks.outputs.builder }}
|
||||
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
|
||||
|
||||
build_slim_packages:
|
||||
needs:
|
||||
- sanity-checks
|
||||
uses: ./.github/workflows/build_slim_packages.yaml
|
||||
with:
|
||||
runner: ${{ needs.sanity-checks.outputs.runner }}
|
||||
builder: ${{ needs.sanity-checks.outputs.builder }}
|
||||
builder_vsn: ${{ needs.sanity-checks.outputs.builder_vsn }}
|
||||
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
|
||||
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
|
||||
|
||||
build_docker_for_test:
|
||||
needs:
|
||||
- sanity-checks
|
||||
uses: ./.github/workflows/build_docker_for_test.yaml
|
||||
with:
|
||||
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
|
||||
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
|
||||
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
|
||||
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
|
||||
|
||||
spellcheck:
|
||||
needs:
|
||||
- sanity-checks
|
||||
- build_slim_packages
|
||||
uses: ./.github/workflows/spellcheck.yaml
|
||||
with:
|
||||
runner: ${{ needs.sanity-checks.outputs.runner }}
|
||||
|
||||
run_conf_tests:
|
||||
needs:
|
||||
- sanity-checks
|
||||
- compile
|
||||
uses: ./.github/workflows/run_conf_tests.yaml
|
||||
with:
|
||||
runner: ${{ needs.sanity-checks.outputs.runner }}
|
||||
builder: ${{ needs.sanity-checks.outputs.builder }}
|
||||
|
||||
check_deps_integrity:
|
||||
needs:
|
||||
- sanity-checks
|
||||
uses: ./.github/workflows/check_deps_integrity.yaml
|
||||
with:
|
||||
runner: ${{ needs.sanity-checks.outputs.runner }}
|
||||
builder: ${{ needs.sanity-checks.outputs.builder }}
|
||||
|
||||
run_jmeter_tests:
|
||||
needs:
|
||||
- sanity-checks
|
||||
- build_docker_for_test
|
||||
uses: ./.github/workflows/run_jmeter_tests.yaml
|
||||
with:
|
||||
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
|
||||
|
||||
run_docker_tests:
|
||||
needs:
|
||||
- sanity-checks
|
||||
- build_docker_for_test
|
||||
uses: ./.github/workflows/run_docker_tests.yaml
|
||||
with:
|
||||
runner: ${{ needs.sanity-checks.outputs.runner }}
|
||||
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
|
||||
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
|
||||
|
||||
run_helm_tests:
|
||||
needs:
|
||||
- sanity-checks
|
||||
- build_docker_for_test
|
||||
uses: ./.github/workflows/run_helm_tests.yaml
|
||||
with:
|
||||
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
|
||||
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
|
|
@ -0,0 +1,191 @@
|
|||
name: Push Entrypoint
|
||||
|
||||
concurrency:
|
||||
group: push-entrypoint-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
- 'e*'
|
||||
branches:
|
||||
- 'master'
|
||||
- 'release-51'
|
||||
- 'ci/**'
|
||||
|
||||
env:
|
||||
IS_CI: 'yes'
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }}
|
||||
container: 'ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04'
|
||||
outputs:
|
||||
profile: ${{ steps.parse-git-ref.outputs.profile }}
|
||||
edition: ${{ steps.parse-git-ref.outputs.edition }}
|
||||
release: ${{ steps.parse-git-ref.outputs.release }}
|
||||
latest: ${{ steps.parse-git-ref.outputs.latest }}
|
||||
version: ${{ steps.parse-git-ref.outputs.version }}
|
||||
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
|
||||
ct-host: ${{ steps.matrix.outputs.ct-host }}
|
||||
ct-docker: ${{ steps.matrix.outputs.ct-docker }}
|
||||
runner: ${{ github.repository_owner == 'emqx' && 'aws-amd64' || 'ubuntu-22.04' }}
|
||||
builder: 'ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04'
|
||||
builder_vsn: '5.1-3'
|
||||
otp_vsn: '25.3.2-1'
|
||||
elixir_vsn: '1.14.5'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
fetch-depth: 0
|
||||
- name: Work around https://github.com/actions/checkout/issues/766
|
||||
shell: bash
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- name: Detect emqx profile and version
|
||||
id: parse-git-ref
|
||||
run: |
|
||||
JSON="$(./scripts/parse-git-ref.sh $GITHUB_REF)"
|
||||
PROFILE=$(echo "$JSON" | jq -cr '.profile')
|
||||
EDITION=$(echo "$JSON" | jq -cr '.edition')
|
||||
RELEASE=$(echo "$JSON" | jq -cr '.release')
|
||||
LATEST=$(echo "$JSON" | jq -cr '.latest')
|
||||
VERSION="$(./pkg-vsn.sh "$PROFILE")"
|
||||
echo "profile=$PROFILE" | tee -a $GITHUB_OUTPUT
|
||||
echo "edition=$EDITION" | tee -a $GITHUB_OUTPUT
|
||||
echo "release=$RELEASE" | tee -a $GITHUB_OUTPUT
|
||||
echo "latest=$LATEST" | tee -a $GITHUB_OUTPUT
|
||||
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
|
||||
- name: Build matrix
|
||||
id: matrix
|
||||
run: |
|
||||
APPS="$(./scripts/find-apps.sh --ci)"
|
||||
MATRIX="$(echo "${APPS}" | jq -c '
|
||||
[
|
||||
(.[] | select(.profile == "emqx") | . + {
|
||||
builder: "5.1-3",
|
||||
otp: "25.3.2-1",
|
||||
elixir: "1.14.5"
|
||||
}),
|
||||
(.[] | select(.profile == "emqx-enterprise") | . + {
|
||||
builder: "5.1-3",
|
||||
otp: ["25.3.2-1"][],
|
||||
elixir: "1.14.5"
|
||||
})
|
||||
]
|
||||
')"
|
||||
echo "${MATRIX}" | jq
|
||||
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
|
||||
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
|
||||
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
|
||||
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
|
||||
echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT
|
||||
echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
build_slim_packages:
|
||||
if: ${{ needs.prepare.outputs.release != 'true' }}
|
||||
needs:
|
||||
- prepare
|
||||
uses: ./.github/workflows/build_slim_packages.yaml
|
||||
with:
|
||||
runner: ${{ needs.prepare.outputs.runner }}
|
||||
builder: ${{ needs.prepare.outputs.builder }}
|
||||
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
|
||||
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
|
||||
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
|
||||
|
||||
build_packages:
|
||||
if: ${{ needs.prepare.outputs.release == 'true' }}
|
||||
needs:
|
||||
- prepare
|
||||
uses: ./.github/workflows/build_packages.yaml
|
||||
with:
|
||||
profile: ${{ needs.prepare.outputs.profile }}
|
||||
publish: ${{ needs.prepare.outputs.release }}
|
||||
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
|
||||
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
|
||||
runner: ${{ needs.prepare.outputs.runner }}
|
||||
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
|
||||
|
||||
build_and_push_docker_images:
|
||||
if: ${{ needs.prepare.outputs.release == 'true' }}
|
||||
needs:
|
||||
- prepare
|
||||
uses: ./.github/workflows/build_and_push_docker_images.yaml
|
||||
with:
|
||||
profile: ${{ needs.prepare.outputs.profile }}
|
||||
edition: ${{ needs.prepare.outputs.edition }}
|
||||
version: ${{ needs.prepare.outputs.version }}
|
||||
latest: ${{ needs.prepare.outputs.latest }}
|
||||
publish: ${{ needs.prepare.outputs.release }}
|
||||
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
|
||||
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
|
||||
runner: ${{ needs.prepare.outputs.runner }}
|
||||
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
|
||||
|
||||
compile:
|
||||
runs-on: ${{ needs.prepare.outputs.runner }}
|
||||
container: ${{ needs.prepare.outputs.builder }}
|
||||
needs:
|
||||
- prepare
|
||||
strategy:
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
fetch-depth: 0
|
||||
- name: Work around https://github.com/actions/checkout/issues/766
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- id: compile
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
ENABLE_COVER_COMPILE: 1
|
||||
run: |
|
||||
make $PROFILE
|
||||
zip -ryq $PROFILE.zip .
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.profile }}
|
||||
path: ${{ matrix.profile }}.zip
|
||||
retention-days: 1
|
||||
|
||||
run_test_cases:
|
||||
needs:
|
||||
- prepare
|
||||
- compile
|
||||
uses: ./.github/workflows/run_test_cases.yaml
|
||||
with:
|
||||
runner: ${{ needs.prepare.outputs.runner }}
|
||||
builder: ${{ needs.prepare.outputs.builder }}
|
||||
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}
|
||||
ct-host: ${{ needs.prepare.outputs.ct-host }}
|
||||
ct-docker: ${{ needs.prepare.outputs.ct-docker }}
|
||||
|
||||
run_conf_tests:
|
||||
needs:
|
||||
- prepare
|
||||
- compile
|
||||
uses: ./.github/workflows/run_conf_tests.yaml
|
||||
with:
|
||||
runner: ${{ needs.prepare.outputs.runner }}
|
||||
builder: ${{ needs.prepare.outputs.builder }}
|
||||
|
||||
static_checks:
|
||||
needs:
|
||||
- prepare
|
||||
- compile
|
||||
uses: ./.github/workflows/static_checks.yaml
|
||||
with:
|
||||
runner: ${{ needs.prepare.outputs.runner }}
|
||||
builder: ${{ needs.prepare.outputs.builder }}
|
||||
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
name: Check Apps Version
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
check_apps_version:
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check apps version
|
||||
run: ./scripts/apps-version-check.sh
|
|
@ -5,138 +5,102 @@ concurrency:
|
|||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
- e*
|
||||
- docker-latest-*
|
||||
workflow_call:
|
||||
inputs:
|
||||
profile:
|
||||
required: true
|
||||
type: string
|
||||
edition:
|
||||
required: true
|
||||
type: string
|
||||
version:
|
||||
required: true
|
||||
type: string
|
||||
latest:
|
||||
required: true
|
||||
type: string
|
||||
publish:
|
||||
required: true
|
||||
type: string
|
||||
otp_vsn:
|
||||
required: true
|
||||
type: string
|
||||
elixir_vsn:
|
||||
required: true
|
||||
type: string
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
builder_vsn:
|
||||
required: true
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch_or_tag:
|
||||
ref:
|
||||
required: false
|
||||
version:
|
||||
required: true
|
||||
type: string
|
||||
profile:
|
||||
required: false
|
||||
type: string
|
||||
default: 'emqx'
|
||||
is_latest:
|
||||
edition:
|
||||
required: false
|
||||
type: string
|
||||
default: 'Opensource'
|
||||
latest:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
publish:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
otp_vsn:
|
||||
required: false
|
||||
type: string
|
||||
default: '25.3.2-1'
|
||||
elixir_vsn:
|
||||
required: false
|
||||
type: string
|
||||
default: '1.14.5'
|
||||
runner:
|
||||
required: false
|
||||
type: string
|
||||
default: 'ubuntu-22.04'
|
||||
builder_vsn:
|
||||
required: false
|
||||
type: string
|
||||
default: '5.1-3'
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-22.04
|
||||
# prepare source with any OTP version, no need for a matrix
|
||||
container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04"
|
||||
|
||||
outputs:
|
||||
PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
|
||||
EDITION: ${{ steps.get_profile.outputs.EDITION }}
|
||||
IS_LATEST: ${{ steps.get_profile.outputs.IS_LATEST }}
|
||||
IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }}
|
||||
VERSION: ${{ steps.get_profile.outputs.VERSION }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch_or_tag }} # when input is not given, the event tag is used
|
||||
path: source
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get profiles to build
|
||||
id: get_profile
|
||||
env:
|
||||
INPUTS_PROFILE: ${{ github.event.inputs.profile }}
|
||||
run: |
|
||||
cd source
|
||||
# tag docker-latest-ce or docker-latest-ee
|
||||
if git describe --tags --exact --match 'docker-latest-*' 2>/dev/null; then
|
||||
echo 'is_latest=true due to docker-latest-* tag'
|
||||
is_latest=true
|
||||
elif [ "${{ inputs.is_latest }}" = "true" ]; then
|
||||
echo 'is_latest=true due to manual input from workflow_dispatch'
|
||||
is_latest=true
|
||||
else
|
||||
echo 'is_latest=false'
|
||||
is_latest=false
|
||||
fi
|
||||
# resolve profile
|
||||
if git describe --tags --match "v*" --exact; then
|
||||
echo "This is an exact git tag, will publish images"
|
||||
is_exact='true'
|
||||
PROFILE=emqx
|
||||
elif git describe --tags --match "e*" --exact; then
|
||||
echo "This is an exact git tag, will publish images"
|
||||
is_exact='true'
|
||||
PROFILE=emqx-enterprise
|
||||
else
|
||||
echo "This is NOT an exact git tag, will not publish images"
|
||||
is_exact='false'
|
||||
fi
|
||||
|
||||
case "${PROFILE:-$INPUTS_PROFILE}" in
|
||||
emqx)
|
||||
EDITION='Opensource'
|
||||
;;
|
||||
emqx-enterprise)
|
||||
EDITION='Enterprise'
|
||||
;;
|
||||
*)
|
||||
echo "ERROR: Failed to resolve build profile"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
VSN="$(./pkg-vsn.sh "$PROFILE")"
|
||||
echo "Building emqx/$PROFILE:$VSN image (latest=$is_latest)"
|
||||
echo "Push = $is_exact"
|
||||
echo "IS_LATEST=$is_latest" >> $GITHUB_OUTPUT
|
||||
echo "IS_EXACT_TAG=$is_exact" >> $GITHUB_OUTPUT
|
||||
echo "PROFILE=$PROFILE" >> $GITHUB_OUTPUT
|
||||
echo "EDITION=$EDITION" >> $GITHUB_OUTPUT
|
||||
echo "VERSION=$VSN" >> $GITHUB_OUTPUT
|
||||
- name: get_all_deps
|
||||
env:
|
||||
PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
|
||||
run: |
|
||||
zip -ryq source.zip source/* source/.[^.]*
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: source
|
||||
path: source.zip
|
||||
|
||||
docker:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: prepare
|
||||
runs-on: ${{ inputs.runner }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- "${{ needs.prepare.outputs.PROFILE }}"
|
||||
- ${{ inputs.profile }}
|
||||
registry:
|
||||
- 'docker.io'
|
||||
- 'public.ecr.aws'
|
||||
os:
|
||||
- [debian11, "debian:11-slim", "deploy/docker/Dockerfile"]
|
||||
# NOTE: 'otp' and 'elixir' are to configure emqx-builder image
|
||||
# only support latest otp and elixir, not a matrix
|
||||
builder:
|
||||
- 5.1-3 # update to latest
|
||||
- ${{ inputs.builder_vsn }}
|
||||
otp:
|
||||
- 25.3.2-1
|
||||
- ${{ inputs.otp_vsn }}
|
||||
elixir:
|
||||
- 'no_elixir'
|
||||
- '1.14.5' # update to latest
|
||||
exclude: # TODO: publish enterprise to ecr too?
|
||||
- registry: 'public.ecr.aws'
|
||||
profile: emqx-enterprise
|
||||
- ${{ inputs.elixir_vsn }}
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
name: source
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: docker/setup-qemu-action@v2
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
|
@ -185,18 +149,18 @@ jobs:
|
|||
latest=${{ matrix.elixir == 'no_elixir' }}
|
||||
suffix=${{ steps.pre-meta.outputs.img_suffix }}
|
||||
tags: |
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.prepare.outputs.VERSION }}
|
||||
type=semver,pattern={{version}},value=${{ needs.prepare.outputs.VERSION }}
|
||||
type=raw,value=${{ needs.prepare.outputs.VERSION }}
|
||||
type=raw,value=latest,enable=${{ needs.prepare.outputs.IS_LATEST }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ inputs.version }}
|
||||
type=semver,pattern={{version}},value=${{ inputs.version }}
|
||||
type=raw,value=${{ inputs.version }}
|
||||
type=raw,value=latest,enable=${{ inputs.latest }}
|
||||
labels: |
|
||||
org.opencontainers.image.otp.version=${{ matrix.otp }}
|
||||
org.opencontainers.image.edition=${{ needs.prepare.outputs.EDITION }}
|
||||
org.opencontainers.image.edition=${{ inputs.edition }}
|
||||
${{ steps.pre-meta.outputs.extra_labels }}
|
||||
|
||||
- uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: ${{ needs.prepare.outputs.IS_EXACT_TAG == 'true' || github.repository_owner != 'emqx' }}
|
||||
push: ${{ inputs.publish == 'true' || github.repository_owner != 'emqx' }}
|
||||
pull: true
|
||||
no-cache: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
@ -206,4 +170,4 @@ jobs:
|
|||
EMQX_NAME=${{ matrix.profile }}${{ steps.pre-meta.outputs.img_suffix }}
|
||||
EXTRA_DEPS=${{ steps.pre-meta.outputs.extra_deps }}
|
||||
file: source/${{ matrix.os[2] }}
|
||||
context: source
|
||||
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
name: Build docker image for test
|
||||
|
||||
concurrency:
|
||||
group: docker-test-build-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
otp_vsn:
|
||||
required: true
|
||||
type: string
|
||||
elixir_vsn:
|
||||
required: true
|
||||
type: string
|
||||
version-emqx:
|
||||
required: true
|
||||
type: string
|
||||
version-emqx-enterprise:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
EMQX_NAME: ${{ matrix.profile }}
|
||||
PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
|
||||
OTP_VSN: ${{ inputs.otp_vsn }}
|
||||
ELIXIR_VSN: ${{ inputs.elixir_vsn }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
- emqx-elixir
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: build and export to Docker
|
||||
id: build
|
||||
run: |
|
||||
make ${EMQX_NAME}-docker
|
||||
echo "EMQX_IMAGE_TAG=$(cat .docker_image_tag)" >> $GITHUB_ENV
|
||||
- name: smoke test
|
||||
run: |
|
||||
CID=$(docker run -d --rm -P $EMQX_IMAGE_TAG)
|
||||
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
|
||||
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
|
||||
docker stop $CID
|
||||
- name: export docker image
|
||||
run: |
|
||||
docker save $EMQX_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "${{ env.EMQX_NAME }}-docker"
|
||||
path: "${{ env.EMQX_NAME }}-docker-${{ env.PKG_VSN }}.tar.gz"
|
||||
retention-days: 3
|
||||
|
|
@ -1,81 +1,61 @@
|
|||
name: Cross build packages
|
||||
|
||||
concurrency:
|
||||
group: build-${{ github.event_name }}-${{ github.ref }}
|
||||
group: build-packages-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'ci/**'
|
||||
tags:
|
||||
- v*
|
||||
- e*
|
||||
workflow_call:
|
||||
inputs:
|
||||
profile:
|
||||
required: true
|
||||
type: string
|
||||
publish:
|
||||
required: true
|
||||
type: string
|
||||
otp_vsn:
|
||||
required: true
|
||||
type: string
|
||||
elixir_vsn:
|
||||
required: true
|
||||
type: string
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
builder_vsn:
|
||||
required: true
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch_or_tag:
|
||||
ref:
|
||||
required: false
|
||||
profile:
|
||||
required: false
|
||||
publish:
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
otp_vsn:
|
||||
required: false
|
||||
type: string
|
||||
default: '25.3.2-1'
|
||||
elixir_vsn:
|
||||
required: false
|
||||
type: string
|
||||
default: '1.14.5'
|
||||
runner:
|
||||
required: false
|
||||
type: string
|
||||
default: 'ubuntu-22.04'
|
||||
builder_vsn:
|
||||
required: false
|
||||
type: string
|
||||
default: '5.1-3'
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-22.04
|
||||
container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04
|
||||
outputs:
|
||||
BUILD_PROFILE: ${{ steps.get_profile.outputs.BUILD_PROFILE }}
|
||||
IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }}
|
||||
VERSION: ${{ steps.get_profile.outputs.VERSION }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch_or_tag }} # when input is not given, the event tag is used
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get profile to build
|
||||
id: get_profile
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
tag=${{ github.ref }}
|
||||
if git describe --tags --match "[v|e]*" --exact; then
|
||||
echo "WARN: This is an exact git tag, will publish release"
|
||||
is_exact_tag='true'
|
||||
else
|
||||
echo "WARN: This is NOT an exact git tag, will not publish release"
|
||||
is_exact_tag='false'
|
||||
fi
|
||||
echo "IS_EXACT_TAG=${is_exact_tag}" >> $GITHUB_OUTPUT
|
||||
case $tag in
|
||||
refs/tags/v*)
|
||||
PROFILE='emqx'
|
||||
;;
|
||||
refs/tags/e*)
|
||||
PROFILE=emqx-enterprise
|
||||
;;
|
||||
*)
|
||||
PROFILE=${{ github.event.inputs.profile }}
|
||||
case "$PROFILE" in
|
||||
emqx)
|
||||
true
|
||||
;;
|
||||
emqx-enterprise)
|
||||
true
|
||||
;;
|
||||
*)
|
||||
# maybe triggered from schedule
|
||||
echo "WARN: \"$PROFILE\" is not a valid profile."
|
||||
echo "building the default profile 'emqx' instead"
|
||||
PROFILE='emqx'
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
echo "BUILD_PROFILE=$PROFILE" >> $GITHUB_OUTPUT
|
||||
echo "VERSION=$(./pkg-vsn.sh $PROFILE)" >> $GITHUB_OUTPUT
|
||||
|
||||
windows:
|
||||
runs-on: windows-2019
|
||||
if: startsWith(github.ref_name, 'v')
|
||||
if: inputs.profile == 'emqx'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
@ -84,11 +64,11 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch_or_tag }}
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: ilammy/msvc-dev-cmd@v1.12.0
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
- uses: erlef/setup-beam@v1.16.0
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- name: build
|
||||
|
@ -125,14 +105,13 @@ jobs:
|
|||
path: _packages/${{ matrix.profile }}/
|
||||
|
||||
mac:
|
||||
needs: prepare
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
|
||||
- ${{ inputs.profile }}
|
||||
otp:
|
||||
- 25.3.2-1
|
||||
- ${{ inputs.otp_vsn }}
|
||||
os:
|
||||
- macos-11
|
||||
- macos-12
|
||||
|
@ -142,7 +121,7 @@ jobs:
|
|||
- uses: emqx/self-hosted-cleanup-action@v1.0.3
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch_or_tag }}
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
fetch-depth: 0
|
||||
- uses: ./.github/actions/package-macos
|
||||
with:
|
||||
|
@ -160,7 +139,6 @@ jobs:
|
|||
path: _packages/${{ matrix.profile }}/
|
||||
|
||||
linux:
|
||||
needs: prepare
|
||||
runs-on: ${{ matrix.build_machine }}
|
||||
# always run in builder container because the host might have the wrong OTP version etc.
|
||||
# otherwise buildx.sh does not run docker if arch and os matches the target arch and os.
|
||||
|
@ -171,9 +149,9 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
|
||||
- ${{ inputs.profile }}
|
||||
otp:
|
||||
- 25.3.2-1
|
||||
- ${{ inputs.otp_vsn }}
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
|
@ -193,9 +171,9 @@ jobs:
|
|||
- aws-arm64
|
||||
- aws-amd64
|
||||
builder:
|
||||
- 5.1-3
|
||||
- ${{ inputs.builder_vsn }}
|
||||
elixir:
|
||||
- 1.14.5
|
||||
- ${{ inputs.elixir_vsn }}
|
||||
with_elixir:
|
||||
- 'no'
|
||||
exclude:
|
||||
|
@ -205,12 +183,12 @@ jobs:
|
|||
build_machine: aws-arm64
|
||||
include:
|
||||
- profile: emqx
|
||||
otp: 25.3.2-1
|
||||
otp: ${{ inputs.otp_vsn }}
|
||||
arch: amd64
|
||||
os: ubuntu22.04
|
||||
build_machine: aws-amd64
|
||||
builder: 5.1-3
|
||||
elixir: 1.14.5
|
||||
builder: ${{ inputs.builder_vsn }}
|
||||
elixir: ${{ inputs.elixir_vsn }}
|
||||
with_elixir: 'yes'
|
||||
|
||||
defaults:
|
||||
|
@ -222,7 +200,7 @@ jobs:
|
|||
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch_or_tag }}
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: fix workdir
|
||||
|
@ -267,14 +245,16 @@ jobs:
|
|||
path: _packages/${{ matrix.profile }}/
|
||||
|
||||
publish_artifacts:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [prepare, mac, linux]
|
||||
if: needs.prepare.outputs.IS_EXACT_TAG == 'true'
|
||||
runs-on: ${{ inputs.runner }}
|
||||
needs:
|
||||
- mac
|
||||
- linux
|
||||
if: ${{ inputs.publish == 'true' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- ${{ needs.prepare.outputs.BUILD_PROFILE }}
|
||||
- ${{ inputs.profile }}
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
|
@ -284,7 +264,7 @@ jobs:
|
|||
run: sudo apt-get update && sudo apt install -y dos2unix
|
||||
- name: get packages
|
||||
run: |
|
||||
set -e -u
|
||||
set -eu
|
||||
cd packages/${{ matrix.profile }}
|
||||
# fix the .sha256 file format
|
||||
for var in $(ls | grep emqx | grep -v sha256); do
|
||||
|
|
|
@ -29,7 +29,7 @@ jobs:
|
|||
arch:
|
||||
- amd64
|
||||
os:
|
||||
- debian10
|
||||
- ubuntu22.04
|
||||
- amzn2023
|
||||
builder:
|
||||
- 5.1-3
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
|||
otp:
|
||||
- 25.3.2-1
|
||||
os:
|
||||
- macos-12
|
||||
- macos-13
|
||||
- macos-12-arm64
|
||||
|
||||
steps:
|
||||
|
@ -117,6 +117,7 @@ jobs:
|
|||
with:
|
||||
name: ${{ matrix.profile }}
|
||||
path: _packages/${{ matrix.profile }}/
|
||||
retention-days: 7
|
||||
- name: Send notification to Slack
|
||||
uses: slackapi/slack-github-action@v1.23.0
|
||||
if: failure()
|
||||
|
@ -125,3 +126,59 @@ jobs:
|
|||
with:
|
||||
payload: |
|
||||
{"text": "Scheduled build of ${{ matrix.profile }} package for ${{ matrix.os }} failed: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"}
|
||||
|
||||
windows:
|
||||
if: github.repository_owner == 'emqx'
|
||||
runs-on: windows-2019
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
otp:
|
||||
- 25.3.2
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ilammy/msvc-dev-cmd@v1.12.0
|
||||
- uses: erlef/setup-beam@v1.16.0
|
||||
with:
|
||||
otp-version: ${{ matrix.otp }}
|
||||
- name: build
|
||||
env:
|
||||
PYTHON: python
|
||||
DIAGNOSTIC: 1
|
||||
run: |
|
||||
# ensure crypto app (openssl)
|
||||
erl -eval "erlang:display(crypto:info_lib())" -s init stop
|
||||
make ${{ matrix.profile }}-tgz
|
||||
- name: run emqx
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start
|
||||
Start-Sleep -s 10
|
||||
$pingOutput = ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx ping
|
||||
if ($pingOutput = 'pong') {
|
||||
echo "EMQX started OK"
|
||||
} else {
|
||||
echo "Failed to ping EMQX $pingOutput"
|
||||
Exit 1
|
||||
}
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop
|
||||
echo "EMQX stopped"
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx install
|
||||
echo "EMQX installed"
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx uninstall
|
||||
echo "EMQX uninstalled"
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: windows
|
||||
path: _packages/${{ matrix.profile }}/*
|
||||
retention-days: 7
|
||||
- name: Send notification to Slack
|
||||
uses: slackapi/slack-github-action@v1.23.0
|
||||
if: failure()
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
with:
|
||||
payload: |
|
||||
{"text": "Scheduled build of ${{ matrix.profile }} package for Windows failed: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"}
|
||||
|
|
|
@ -5,54 +5,73 @@ concurrency:
|
|||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- release-51
|
||||
pull_request:
|
||||
# GitHub pull_request action is by default triggered when
|
||||
# opened reopened or synchronize,
|
||||
# we add labeled and unlabeled to the list because
|
||||
# the mac job dpends on the PR having a 'Mac' label
|
||||
types:
|
||||
- labeled
|
||||
- unlabeled
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
workflow_call:
|
||||
inputs:
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
builder:
|
||||
required: true
|
||||
type: string
|
||||
builder_vsn:
|
||||
required: true
|
||||
type: string
|
||||
otp_vsn:
|
||||
required: true
|
||||
type: string
|
||||
elixir_vsn:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
required: false
|
||||
runner:
|
||||
required: false
|
||||
type: string
|
||||
default: 'ubuntu-22.04'
|
||||
builder:
|
||||
required: false
|
||||
type: string
|
||||
default: 'ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04'
|
||||
builder_vsn:
|
||||
required: false
|
||||
type: string
|
||||
default: '5.1-3'
|
||||
otp_vsn:
|
||||
required: false
|
||||
type: string
|
||||
default: '25.3.2-1'
|
||||
elixir_vsn:
|
||||
required: false
|
||||
type: string
|
||||
default: '1.14.5'
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
runs-on: aws-amd64
|
||||
runs-on: ${{ inputs.runner }}
|
||||
env:
|
||||
EMQX_NAME: ${{ matrix.profile[0] }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- ["emqx", "25.3.2-1", "el7", "erlang"]
|
||||
- ["emqx", "25.3.2-1", "ubuntu22.04", "elixir"]
|
||||
- ["emqx-enterprise", "25.3.2-1", "amzn2023", "erlang"]
|
||||
- ["emqx", "25.3.2-1", "ubuntu20.04", "elixir"]
|
||||
- ["emqx-enterprise", "25.3.2-1", "ubuntu20.04", "erlang"]
|
||||
builder:
|
||||
- 5.1-3
|
||||
elixir:
|
||||
- '1.14.5'
|
||||
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
|
||||
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: prepare
|
||||
run: |
|
||||
echo "EMQX_NAME=${{ matrix.profile[0] }}" >> $GITHUB_ENV
|
||||
echo "CODE_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
|
||||
- name: Work around https://github.com/actions/checkout/issues/766
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
echo "CODE_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
|
||||
- name: build and test tgz package
|
||||
if: matrix.profile[3] == 'erlang'
|
||||
run: |
|
||||
|
@ -77,58 +96,14 @@ jobs:
|
|||
with:
|
||||
name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
|
||||
path: _packages/${{ matrix.profile[0] }}/*
|
||||
retention-days: 7
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "${{ matrix.profile[0] }}_schema_dump"
|
||||
path: |
|
||||
scripts/spellcheck
|
||||
_build/docgen/${{ matrix.profile[0] }}/schema-en.json
|
||||
|
||||
windows:
|
||||
runs-on: windows-2019
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
otp:
|
||||
- 25.3.2
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ilammy/msvc-dev-cmd@v1.12.0
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
with:
|
||||
otp-version: ${{ matrix.otp }}
|
||||
- name: build
|
||||
env:
|
||||
PYTHON: python
|
||||
DIAGNOSTIC: 1
|
||||
run: |
|
||||
# ensure crypto app (openssl)
|
||||
erl -eval "erlang:display(crypto:info_lib())" -s init stop
|
||||
make ${{ matrix.profile }}-tgz
|
||||
- name: run emqx
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start
|
||||
Start-Sleep -s 10
|
||||
$pingOutput = ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx ping
|
||||
if ($pingOutput = 'pong') {
|
||||
echo "EMQX started OK"
|
||||
} else {
|
||||
echo "Failed to ping EMQX $pingOutput"
|
||||
Exit 1
|
||||
}
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop
|
||||
echo "EMQX stopped"
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx install
|
||||
echo "EMQX installed"
|
||||
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx uninstall
|
||||
echo "EMQX uninstalled"
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: windows
|
||||
path: _packages/${{ matrix.profile }}/*
|
||||
retention-days: 7
|
||||
|
||||
mac:
|
||||
strategy:
|
||||
|
@ -136,20 +111,18 @@ jobs:
|
|||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
otp:
|
||||
- 25.3.2-1
|
||||
- ${{ inputs.otp_vsn }}
|
||||
os:
|
||||
- macos-11
|
||||
- macos-12-arm64
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
EMQX_NAME: ${{ matrix.profile }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: prepare
|
||||
run: |
|
||||
echo "EMQX_NAME=${{ matrix.profile }}" >> $GITHUB_ENV
|
||||
- uses: ./.github/actions/package-macos
|
||||
with:
|
||||
profile: ${{ matrix.profile }}
|
||||
|
@ -163,84 +136,4 @@ jobs:
|
|||
with:
|
||||
name: ${{ matrix.os }}
|
||||
path: _packages/**/*
|
||||
|
||||
docker:
|
||||
runs-on: aws-amd64
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- ["emqx", "5.0.16"]
|
||||
- ["emqx-enterprise", "5.0.1"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: prepare
|
||||
run: |
|
||||
EMQX_NAME=${{ matrix.profile[0] }}
|
||||
PKG_VSN=${PKG_VSN:-$(./pkg-vsn.sh $EMQX_NAME)}
|
||||
EMQX_IMAGE_TAG=emqx/$EMQX_NAME:test
|
||||
EMQX_IMAGE_OLD_VERSION_TAG=emqx/$EMQX_NAME:${{ matrix.profile[1] }}
|
||||
echo "EMQX_NAME=$EMQX_NAME" >> $GITHUB_ENV
|
||||
echo "PKG_VSN=$PKG_VSN" >> $GITHUB_ENV
|
||||
echo "EMQX_IMAGE_TAG=$EMQX_IMAGE_TAG" >> $GITHUB_ENV
|
||||
echo "EMQX_IMAGE_OLD_VERSION_TAG=$EMQX_IMAGE_OLD_VERSION_TAG" >> $GITHUB_ENV
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
- name: build and export to Docker
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./deploy/docker/Dockerfile
|
||||
load: true
|
||||
tags: ${{ env.EMQX_IMAGE_TAG }}
|
||||
build-args: |
|
||||
EMQX_NAME=${{ env.EMQX_NAME }}
|
||||
- name: smoke test
|
||||
run: |
|
||||
CID=$(docker run -d --rm -P $EMQX_IMAGE_TAG)
|
||||
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
|
||||
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
|
||||
docker stop $CID
|
||||
- name: dashboard tests
|
||||
working-directory: ./scripts/ui-tests
|
||||
run: |
|
||||
set -eu
|
||||
docker compose up --abort-on-container-exit --exit-code-from selenium
|
||||
- name: test two nodes cluster with proto_dist=inet_tls in docker
|
||||
run: |
|
||||
./scripts/test/start-two-nodes-in-docker.sh -P $EMQX_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
|
||||
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy)
|
||||
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
|
||||
# cleanup
|
||||
./scripts/test/start-two-nodes-in-docker.sh -c
|
||||
- name: export docker image
|
||||
run: |
|
||||
docker save $EMQX_IMAGE_TAG | gzip > $EMQX_NAME-$PKG_VSN.tar.gz
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "${{ matrix.profile[0] }}-docker"
|
||||
path: "${{ env.EMQX_NAME }}-${{ env.PKG_VSN }}.tar.gz"
|
||||
- name: cleanup
|
||||
if: always()
|
||||
working-directory: ./scripts/ui-tests
|
||||
run: |
|
||||
docker compose rm -fs
|
||||
|
||||
spellcheck:
|
||||
needs: linux
|
||||
strategy:
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
runs-on: aws-amd64
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
name: Download schema dump
|
||||
with:
|
||||
name: "${{ matrix.profile }}_schema_dump"
|
||||
path: /tmp/
|
||||
- name: Run spellcheck
|
||||
run: |
|
||||
bash /tmp/scripts/spellcheck/spellcheck.sh /tmp/_build/docgen/${{ matrix.profile }}/schema-en.json
|
||||
retention-days: 7
|
||||
|
|
|
@ -1,14 +1,46 @@
|
|||
name: Check Rebar Dependencies
|
||||
name: Check integrity of rebar and mix dependencies
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
builder:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
check_deps_integrity:
|
||||
runs-on: ubuntu-22.04
|
||||
container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04
|
||||
|
||||
runs-on: ${{ inputs.runner }}
|
||||
container: ${{ inputs.builder }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run check-deps-integrity.escript
|
||||
run: ./scripts/check-deps-integrity.escript
|
||||
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- run: make ensure-rebar3
|
||||
- run: ./scripts/check-deps-integrity.escript
|
||||
- name: Setup mix
|
||||
env:
|
||||
MIX_ENV: emqx-enterprise
|
||||
PROFILE: emqx-enterprise
|
||||
run: |
|
||||
mix local.hex --force
|
||||
mix local.rebar --force
|
||||
mix deps.get
|
||||
- run: ./scripts/check-elixir-deps-discrepancies.exs
|
||||
env:
|
||||
MIX_ENV: emqx-enterprise
|
||||
PROFILE: emqx-enterprise
|
||||
- run: ./scripts/check-elixir-applications.exs
|
||||
env:
|
||||
MIX_ENV: emqx-enterprise
|
||||
PROFILE: emqx-enterprise
|
||||
- name: Upload produced lock files
|
||||
uses: actions/upload-artifact@v3
|
||||
if: failure()
|
||||
with:
|
||||
name: produced_lock_files
|
||||
path: |
|
||||
mix.lock
|
||||
rebar.lock
|
||||
retention-days: 1
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
name: Code style check
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
code_style_check:
|
||||
runs-on: ubuntu-22.04
|
||||
container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 1000
|
||||
- name: Work around https://github.com/actions/checkout/issues/766
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- name: Check line-break at EOF
|
||||
run: |
|
||||
./scripts/check-nl-at-eof.sh
|
||||
- name: Check Elixir code formatting
|
||||
run: |
|
||||
mix format --check-formatted
|
||||
|
||||
- name: Check Erlang code formatting
|
||||
run: |
|
||||
./scripts/check-format.sh
|
||||
|
||||
- name: Run elvis check
|
||||
run: |
|
||||
./scripts/elvis-check.sh $GITHUB_BASE_REF
|
|
@ -1,45 +0,0 @@
|
|||
---
|
||||
|
||||
name: Check Elixir Release Applications
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
elixir_apps_check:
|
||||
runs-on: ubuntu-22.04
|
||||
# just use the latest builder
|
||||
container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04"
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
- emqx-pkg
|
||||
- emqx-enterprise-pkg
|
||||
|
||||
steps:
|
||||
- name: fix_git_permission
|
||||
run: git config --global --add safe.directory '/__w/emqx/emqx'
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: ensure rebar
|
||||
run: ./scripts/ensure-rebar3.sh
|
||||
- name: Work around https://github.com/actions/checkout/issues/766
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- name: check applications
|
||||
run: ./scripts/check-elixir-applications.exs
|
||||
env:
|
||||
MIX_ENV: ${{ matrix.profile }}
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
# - name: check applications started with emqx_machine
|
||||
# run: ./scripts/check-elixir-emqx-machine-boot-discrepancies.exs
|
||||
# env:
|
||||
# MIX_ENV: ${{ matrix.profile }}
|
||||
|
||||
...
|
|
@ -1,49 +0,0 @@
|
|||
---
|
||||
|
||||
name: Elixir Dependency Version Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
elixir_deps_check:
|
||||
runs-on: ubuntu-22.04
|
||||
container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: ensure rebar
|
||||
run: ./scripts/ensure-rebar3.sh
|
||||
- name: Work around https://github.com/actions/checkout/issues/766
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- name: setup mix
|
||||
run: |
|
||||
mix local.hex --force
|
||||
mix local.rebar --force
|
||||
mix deps.get
|
||||
# we check only enterprise because `rebar3 tree`, even if an
|
||||
# enterprise app is excluded from `project_app_dirs` in
|
||||
# `rebar.config.erl`, will still list dependencies from it.
|
||||
# Since the enterprise profile is a superset of the
|
||||
# community one and thus more complete, we use the former.
|
||||
env:
|
||||
MIX_ENV: emqx-enterprise
|
||||
PROFILE: emqx-enterprise
|
||||
- name: check elixir deps
|
||||
run: ./scripts/check-elixir-deps-discrepancies.exs
|
||||
env:
|
||||
MIX_ENV: emqx-enterprise
|
||||
PROFILE: emqx-enterprise
|
||||
- name: produced lock files
|
||||
uses: actions/upload-artifact@v3
|
||||
if: failure()
|
||||
with:
|
||||
name: produced_lock_files
|
||||
path: |
|
||||
mix.lock
|
||||
rebar.lock
|
||||
retention-days: 1
|
||||
|
||||
...
|
|
@ -1,41 +0,0 @@
|
|||
# FIXME: temporary workflow for testing; remove later
|
||||
name: Elixir Build (temporary)
|
||||
|
||||
concurrency:
|
||||
group: mix-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
elixir_release_build:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: install tools
|
||||
run: apt update && apt install netcat-openbsd
|
||||
- name: Work around https://github.com/actions/checkout/issues/766
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- name: elixir release
|
||||
run: make ${{ matrix.profile }}-elixir
|
||||
- name: start release
|
||||
run: |
|
||||
cd _build/${{ matrix.profile }}/rel/emqx
|
||||
bin/emqx start
|
||||
- name: check if started
|
||||
run: |
|
||||
sleep 10
|
||||
nc -zv localhost 1883
|
||||
cd _build/${{ matrix.profile }}/rel/emqx
|
||||
bin/emqx ping
|
||||
bin/emqx ctl status
|
|
@ -5,49 +5,45 @@ concurrency:
|
|||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- 'ci/**'
|
||||
tags:
|
||||
- v*
|
||||
- e*
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
IS_CI: "yes"
|
||||
workflow_call:
|
||||
inputs:
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
builder:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
run_conf_tests:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ${{ inputs.runner }}
|
||||
container: ${{ inputs.builder }}
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04"
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: source
|
||||
- name: build_package
|
||||
working-directory: source
|
||||
name: ${{ matrix.profile }}
|
||||
- name: extract artifact
|
||||
run: |
|
||||
make ${{ matrix.profile }}
|
||||
- name: run_tests
|
||||
working-directory: source
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
run: |
|
||||
./scripts/conf-test/run.sh
|
||||
- name: print_erlang_log
|
||||
unzip -o -q ${{ matrix.profile }}.zip
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- run: ./scripts/test/check-example-configs.sh
|
||||
- run: ./scripts/conf-test/run.sh
|
||||
- name: print erlang log
|
||||
if: failure()
|
||||
run: |
|
||||
cat source/_build/${{ matrix.profile }}/rel/emqx/logs/erlang.log.*
|
||||
cat _build/${{ matrix.profile }}/rel/emqx/logs/erlang.log.*
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: failure()
|
||||
with:
|
||||
name: logs-${{ matrix.profile }}
|
||||
path: source/_build/${{ matrix.profile }}/rel/emqx/logs
|
||||
path: _build/${{ matrix.profile }}/rel/emqx/logs
|
||||
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
name: Docker image tests
|
||||
|
||||
concurrency:
|
||||
group: docker-tests-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
version-emqx:
|
||||
required: true
|
||||
type: string
|
||||
version-emqx-enterprise:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
basic-tests:
|
||||
runs-on: ${{ inputs.runner }}
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- ["emqx", "emqx/emqx:5.0.16"]
|
||||
- ["emqx-enterprise", "emqx/emqx-enterprise:5.0.1"]
|
||||
|
||||
env:
|
||||
EMQX_NAME: ${{ matrix.profile[0] }}
|
||||
PKG_VSN: ${{ matrix.profile[0] == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
|
||||
EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.EMQX_NAME }}-docker
|
||||
path: /tmp
|
||||
- name: load docker image
|
||||
run: |
|
||||
EMQX_IMAGE_TAG=$(docker load < /tmp/${EMQX_NAME}-docker-${PKG_VSN}.tar.gz 2>/dev/null | sed 's/Loaded image: //g')
|
||||
echo "EMQX_IMAGE_TAG=$EMQX_IMAGE_TAG" >> $GITHUB_ENV
|
||||
- name: dashboard tests
|
||||
working-directory: ./scripts/ui-tests
|
||||
run: |
|
||||
set -eu
|
||||
docker compose up --abort-on-container-exit --exit-code-from selenium
|
||||
- name: test two nodes cluster with proto_dist=inet_tls in docker
|
||||
run: |
|
||||
./scripts/test/start-two-nodes-in-docker.sh -P $EMQX_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
|
||||
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy)
|
||||
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
|
||||
./scripts/test/start-two-nodes-in-docker.sh -c
|
||||
- name: cleanup
|
||||
if: always()
|
||||
working-directory: ./scripts/ui-tests
|
||||
run: |
|
||||
docker compose rm -fs
|
||||
|
||||
paho-mqtt-testing:
|
||||
runs-on: ${{ inputs.runner }}
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
env:
|
||||
EMQX_NAME: ${{ matrix.profile }}
|
||||
PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
|
||||
_EMQX_TEST_DB_BACKEND: ${{ matrix.cluster_db_backend }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
- emqx-elixir
|
||||
cluster_db_backend:
|
||||
- mnesia
|
||||
- rlog
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.EMQX_NAME }}-docker
|
||||
path: /tmp
|
||||
- name: load docker image
|
||||
run: |
|
||||
EMQX_IMAGE_TAG=$(docker load < /tmp/${EMQX_NAME}-docker-${PKG_VSN}.tar.gz 2>/dev/null | sed 's/Loaded image: //g')
|
||||
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_IMAGE_TAG" >> $GITHUB_ENV
|
||||
- name: run emqx
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
./.ci/docker-compose-file/scripts/run-emqx.sh $_EMQX_DOCKER_IMAGE_TAG $_EMQX_TEST_DB_BACKEND
|
||||
- name: make paho tests
|
||||
run: |
|
||||
if ! docker exec -i python /scripts/pytest.sh "$_EMQX_TEST_DB_BACKEND"; then
|
||||
echo "DUMP_CONTAINER_LOGS_BGN"
|
||||
echo "============== haproxy =============="
|
||||
docker logs haproxy
|
||||
echo "============== node1 =============="
|
||||
docker logs node1.emqx.io
|
||||
echo "============== node2 =============="
|
||||
docker logs node2.emqx.io
|
||||
echo "DUMP_CONTAINER_LOGS_END"
|
||||
exit 1
|
||||
fi
|
||||
# node_dump requires netstat, which is not available in the container
|
||||
# simple smoke test for node_dump
|
||||
# - name: test node_dump
|
||||
# run: |
|
||||
# docker exec -u root node1.emqx.io apt update && apt install -y net-tools
|
||||
# docker exec node1.emqx.io node_dump
|
|
@ -1,80 +0,0 @@
|
|||
name: Check emqx app standalone
|
||||
|
||||
## apps/emqx can be used as a rebar/mix dependency
|
||||
## in other project, so we need to make sure apps/emqx
|
||||
## as an Erlang/Elixir app works standalone
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
run_emqx_app_tests:
|
||||
strategy:
|
||||
matrix:
|
||||
builder:
|
||||
- 5.1-3
|
||||
otp:
|
||||
- 25.3.2-1
|
||||
# no need to use more than 1 version of Elixir, since tests
|
||||
# run using only Erlang code. This is needed just to specify
|
||||
# the base image.
|
||||
elixir:
|
||||
- 1.14.5
|
||||
os:
|
||||
- ubuntu22.04
|
||||
arch:
|
||||
- amd64
|
||||
runs-on:
|
||||
- aws-amd64
|
||||
- ubuntu-22.04
|
||||
use-self-hosted:
|
||||
- ${{ github.repository_owner == 'emqx' }}
|
||||
exclude:
|
||||
- runs-on: ubuntu-22.04
|
||||
use-self-hosted: true
|
||||
- runs-on: aws-amd64
|
||||
use-self-hosted: false
|
||||
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir}}-${{ matrix.otp }}-${{ matrix.os }}"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: run
|
||||
run: |
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
echo "git diff base: $GITHUB_BASE_REF"
|
||||
if [[ "$GITHUB_BASE_REF" =~ [0-9a-f]{8,40} ]]; then
|
||||
# base is a commit sha1
|
||||
compare_base="$GITHUB_BASE_REF"
|
||||
else
|
||||
repo="${GITHUB_REPOSITORY}"
|
||||
git remote -v
|
||||
remote="$(git remote -v | grep -E "github\.com(:|/)$repo((\.git)|(\s))" | grep fetch | awk '{print $1}')"
|
||||
git fetch "$remote" "$GITHUB_BASE_REF"
|
||||
compare_base="$remote/$GITHUB_BASE_REF"
|
||||
fi
|
||||
changed_files="$(git diff --name-only ${compare_base} HEAD apps/emqx)"
|
||||
if [ "$changed_files" = '' ]; then
|
||||
echo "nothing changed in apps/emqx, ignored."
|
||||
exit 0
|
||||
fi
|
||||
make ensure-rebar3
|
||||
cp rebar3 apps/emqx/
|
||||
cd apps/emqx
|
||||
./rebar3 xref
|
||||
./rebar3 dialyzer
|
||||
./rebar3 eunit -v
|
||||
./rebar3 ct --name 'test@127.0.0.1' -v --readable=true
|
||||
./rebar3 proper -d test/props
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: failure()
|
||||
with:
|
||||
name: logs-${{ matrix.runs-on }}
|
||||
path: apps/emqx/_build/test/logs
|
|
@ -1,255 +0,0 @@
|
|||
name: Functional Verification Tests
|
||||
|
||||
concurrency:
|
||||
group: fvt-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- 'ci/**'
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-22.04
|
||||
# prepare source with any OTP version, no need for a matrix
|
||||
container: ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-debian11
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
path: source
|
||||
fetch-depth: 0
|
||||
- name: get deps
|
||||
run: |
|
||||
make -C source deps-all
|
||||
zip -ryq source.zip source/* source/.[^.]*
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: source
|
||||
path: source.zip
|
||||
|
||||
docker_test:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: prepare
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
- emqx-elixir
|
||||
cluster_db_backend:
|
||||
- mnesia
|
||||
- rlog
|
||||
os:
|
||||
- ["debian11", "debian:11-slim"]
|
||||
builder:
|
||||
- 5.1-3
|
||||
otp:
|
||||
- 25.3.2-1
|
||||
elixir:
|
||||
- 1.14.5
|
||||
arch:
|
||||
- amd64
|
||||
steps:
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: source
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
|
||||
- name: make docker image
|
||||
working-directory: source
|
||||
env:
|
||||
EMQX_BUILDER: ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }}
|
||||
EMQX_RUNNER: ${{ matrix.os[1] }}
|
||||
run: |
|
||||
make ${{ matrix.profile }}-docker
|
||||
- name: run emqx
|
||||
timeout-minutes: 5
|
||||
working-directory: source
|
||||
run: |
|
||||
set -x
|
||||
if [[ "${{ matrix.profile }}" = *-elixir ]]
|
||||
then
|
||||
export IS_ELIXIR=yes
|
||||
PROFILE=$(echo ${{ matrix.profile }} | sed -e "s/-elixir//g")
|
||||
IMAGE=emqx/$PROFILE:$(./pkg-vsn.sh ${{ matrix.profile }})-elixir
|
||||
else
|
||||
IMAGE=emqx/${{ matrix.profile }}:$(./pkg-vsn.sh ${{ matrix.profile }})
|
||||
fi
|
||||
./.ci/docker-compose-file/scripts/run-emqx.sh $IMAGE ${{ matrix.cluster_db_backend }}
|
||||
- name: make paho tests
|
||||
run: |
|
||||
if ! docker exec -i python /scripts/pytest.sh "${{ matrix.cluster_db_backend }}"; then
|
||||
echo "DUMP_CONTAINER_LOGS_BGN"
|
||||
echo "============== haproxy =============="
|
||||
docker logs haproxy
|
||||
echo "============== node1 =============="
|
||||
docker logs node1.emqx.io
|
||||
echo "============== node2 =============="
|
||||
docker logs node2.emqx.io
|
||||
echo "DUMP_CONTAINER_LOGS_END"
|
||||
exit 1
|
||||
fi
|
||||
# simple smoke test for node_dump
|
||||
- name: test node_dump
|
||||
run: |
|
||||
docker exec node1.emqx.io node_dump
|
||||
|
||||
helm_test:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: prepare
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
discovery:
|
||||
- k8s
|
||||
- dns
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
os:
|
||||
- ["debian11", "debian:11-slim"]
|
||||
builder:
|
||||
- 5.1-3
|
||||
otp:
|
||||
- 25.3.2-1
|
||||
elixir:
|
||||
- 1.14.5
|
||||
arch:
|
||||
- amd64
|
||||
# - emqx-enterprise # TODO test enterprise
|
||||
|
||||
steps:
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: source
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
|
||||
- name: make docker image
|
||||
working-directory: source
|
||||
env:
|
||||
EMQX_BUILDER: ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-${{ matrix.os[0] }}
|
||||
EMQX_RUNNER: ${{ matrix.os[1] }}
|
||||
run: |
|
||||
make ${{ matrix.profile }}-docker
|
||||
echo "TARGET=emqx/${{ matrix.profile }}" >> $GITHUB_ENV
|
||||
echo "EMQX_TAG=$(./pkg-vsn.sh ${{ matrix.profile }})" >> $GITHUB_ENV
|
||||
- run: minikube start
|
||||
- run: minikube image load $TARGET:$EMQX_TAG
|
||||
- name: run emqx on chart
|
||||
working-directory: source
|
||||
if: matrix.discovery == 'k8s'
|
||||
run: |
|
||||
helm install ${{ matrix.profile }} \
|
||||
--set emqxConfig.EMQX_CLUSTER__DISCOVERY_STRATEGY="k8s" \
|
||||
--set emqxConfig.EMQX_CLUSTER__K8S__APISERVER="https://kubernetes.default.svc:443" \
|
||||
--set emqxConfig.EMQX_CLUSTER__K8S__SERVICE_NAME="${{ matrix.profile }}-headless" \
|
||||
--set emqxConfig.EMQX_CLUSTER__K8S__NAMESPACE="default" \
|
||||
--set image.repository=$TARGET \
|
||||
--set image.pullPolicy=Never \
|
||||
--set image.tag=$EMQX_TAG \
|
||||
--set emqxAclConfig="" \
|
||||
--set emqxConfig.EMQX_MQTT__RETRY_INTERVAL=2s \
|
||||
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
|
||||
--set emqxConfig.EMQX_AUTHORIZATION__SOURCES=[] \
|
||||
--set emqxConfig.EMQX_AUTHORIZATION__NO_MATCH=allow \
|
||||
deploy/charts/${{ matrix.profile }} \
|
||||
--debug
|
||||
- name: run emqx on chart
|
||||
working-directory: source
|
||||
if: matrix.discovery == 'dns'
|
||||
run: |
|
||||
helm install ${{ matrix.profile }} \
|
||||
--set emqxConfig.EMQX_CLUSTER__DISCOVERY_STRATEGY="dns" \
|
||||
--set emqxConfig.EMQX_CLUSTER__DNS__RECORD_TYPE="srv" \
|
||||
--set emqxConfig.EMQX_CLUSTER__DNS__NAME="${{ matrix.profile }}-headless.default.svc.cluster.local" \
|
||||
--set image.repository=$TARGET \
|
||||
--set image.pullPolicy=Never \
|
||||
--set image.tag=$EMQX_TAG \
|
||||
--set emqxAclConfig="" \
|
||||
--set emqxConfig.EMQX_MQTT__RETRY_INTERVAL=2s \
|
||||
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
|
||||
--set emqxConfig.EMQX_AUTHORIZATION__SOURCES=[] \
|
||||
--set emqxConfig.EMQX_AUTHORIZATION__NO_MATCH=allow \
|
||||
deploy/charts/${{ matrix.profile }} \
|
||||
--debug
|
||||
- name: waiting emqx started
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
while [ "$(kubectl get StatefulSet -l app.kubernetes.io/instance=${{ matrix.profile }} -o jsonpath='{.items[0].status.replicas}')" \
|
||||
!= "$(kubectl get StatefulSet -l app.kubernetes.io/instance=${{ matrix.profile }} -o jsonpath='{.items[0].status.readyReplicas}')" ]; do
|
||||
echo "==============================";
|
||||
kubectl get pods;
|
||||
echo "==============================";
|
||||
echo "waiting emqx started";
|
||||
sleep 10;
|
||||
done
|
||||
- name: Get Token
|
||||
timeout-minutes: 1
|
||||
run: |
|
||||
kubectl port-forward service/${{ matrix.profile }} 18083:18083 > /dev/null &
|
||||
|
||||
while
|
||||
[ "$(curl --silent -X 'GET' 'http://127.0.0.1:18083/api/v5/status' | tail -n1)" != "emqx is running" ]
|
||||
do
|
||||
echo "waiting emqx"
|
||||
sleep 1
|
||||
done
|
||||
|
||||
echo "TOKEN=$(curl --silent -X 'POST' 'http://127.0.0.1:18083/api/v5/login' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"username": "admin","password": "public"}' | jq -r ".token")" >> $GITHUB_ENV
|
||||
|
||||
- name: Check cluster
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
while
|
||||
[ "$(curl --silent -H "Authorization: Bearer $TOKEN" -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')" != "3" ];
|
||||
do
|
||||
echo "waiting ${{ matrix.profile }} cluster scale"
|
||||
sleep 1
|
||||
done
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: emqx/paho.mqtt.testing
|
||||
ref: develop-5.0
|
||||
path: paho.mqtt.testing
|
||||
- name: install pytest
|
||||
run: |
|
||||
pip install pytest==7.1.2 pytest-retry
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
- name: run paho test
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
port_connected () {
|
||||
local server="$1"
|
||||
local port="$2"
|
||||
echo > /dev/tcp/${server}/${port} 2>/dev/null
|
||||
}
|
||||
|
||||
kubectl port-forward service/${{ matrix.profile }} 1883:1883 > /dev/null &
|
||||
|
||||
while ! port_connected localhost 1883; do
|
||||
echo server not listening yet...
|
||||
sleep 10
|
||||
done
|
||||
|
||||
pytest --retries 3 -v paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "127.0.0.1"
|
||||
- if: failure()
|
||||
run: kubectl logs -l "app.kubernetes.io/instance=${{ matrix.profile }}" -c emqx --tail=1000
|
|
@ -1,16 +0,0 @@
|
|||
name: Run gitlint
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
run_gitlint:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Run gitlint
|
||||
shell: bash
|
||||
run: |
|
||||
set -ex
|
||||
docker run --ulimit nofile=1024 -v $(pwd):/repo -w /repo ghcr.io/emqx/gitlint --commits ${{ github.event.pull_request.base.sha }}..$GITHUB_SHA --config .github/workflows/.gitlint
|
|
@ -0,0 +1,144 @@
|
|||
name: Helm tests
|
||||
|
||||
concurrency:
|
||||
group: helm-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version-emqx:
|
||||
required: true
|
||||
type: string
|
||||
version-emqx-enterprise:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
helm_test:
|
||||
runs-on: ubuntu-22.04
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
env:
|
||||
EMQX_NAME: ${{ matrix.profile }}
|
||||
EMQX_TAG: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
|
||||
REPOSITORY: "emqx/${{ matrix.profile }}"
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
discovery:
|
||||
- k8s
|
||||
- dns
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
path: source
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: "${{ env.EMQX_NAME }}-docker"
|
||||
path: /tmp
|
||||
- run: minikube start
|
||||
- run: |
|
||||
img="/tmp/${EMQX_NAME}-docker-${EMQX_TAG}.tar.gz"
|
||||
if stderr=$(minikube image load "${img}" 2>&1 >/dev/null) && test -n "$stderr"; then
|
||||
echo "${stderr}";
|
||||
exit 1;
|
||||
fi
|
||||
- name: run emqx on chart (k8s)
|
||||
if: matrix.discovery == 'k8s'
|
||||
working-directory: source
|
||||
run: |
|
||||
helm install ${EMQX_NAME} \
|
||||
--set emqxConfig.EMQX_CLUSTER__DISCOVERY_STRATEGY="k8s" \
|
||||
--set emqxConfig.EMQX_CLUSTER__K8S__APISERVER="https://kubernetes.default.svc:443" \
|
||||
--set emqxConfig.EMQX_CLUSTER__K8S__SERVICE_NAME="${EMQX_NAME}-headless" \
|
||||
--set emqxConfig.EMQX_CLUSTER__K8S__NAMESPACE="default" \
|
||||
--set image.repository=$REPOSITORY \
|
||||
--set image.pullPolicy=Never \
|
||||
--set image.tag=$EMQX_TAG \
|
||||
--set emqxAclConfig="" \
|
||||
--set emqxConfig.EMQX_MQTT__RETRY_INTERVAL=2s \
|
||||
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
|
||||
--set emqxConfig.EMQX_AUTHORIZATION__SOURCES=[] \
|
||||
--set emqxConfig.EMQX_AUTHORIZATION__NO_MATCH=allow \
|
||||
deploy/charts/${EMQX_NAME} \
|
||||
--debug
|
||||
- name: run emqx on chart (dns)
|
||||
if: matrix.discovery == 'dns'
|
||||
working-directory: source
|
||||
run: |
|
||||
helm install ${EMQX_NAME} \
|
||||
--set emqxConfig.EMQX_CLUSTER__DISCOVERY_STRATEGY="dns" \
|
||||
--set emqxConfig.EMQX_CLUSTER__DNS__RECORD_TYPE="srv" \
|
||||
--set emqxConfig.EMQX_CLUSTER__DNS__NAME="${EMQX_NAME}-headless.default.svc.cluster.local" \
|
||||
--set image.repository=$REPOSITORY \
|
||||
--set image.pullPolicy=Never \
|
||||
--set image.tag=$EMQX_TAG \
|
||||
--set emqxAclConfig="" \
|
||||
--set emqxConfig.EMQX_MQTT__RETRY_INTERVAL=2s \
|
||||
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
|
||||
--set emqxConfig.EMQX_AUTHORIZATION__SOURCES=[] \
|
||||
--set emqxConfig.EMQX_AUTHORIZATION__NO_MATCH=allow \
|
||||
deploy/charts/${EMQX_NAME} \
|
||||
--debug
|
||||
- name: waiting emqx started
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
while [ "$(kubectl get StatefulSet -l app.kubernetes.io/instance=${EMQX_NAME} -o jsonpath='{.items[0].status.replicas}')" \
|
||||
!= "$(kubectl get StatefulSet -l app.kubernetes.io/instance=${EMQX_NAME} -o jsonpath='{.items[0].status.readyReplicas}')" ]; do
|
||||
echo "==============================";
|
||||
kubectl get pods;
|
||||
echo "==============================";
|
||||
echo "waiting emqx started";
|
||||
sleep 10;
|
||||
done
|
||||
- name: Get Token
|
||||
run: |
|
||||
kubectl port-forward service/${EMQX_NAME} 18083:18083 > /dev/null &
|
||||
curl --head -X GET --retry 10 --retry-connrefused --retry-delay 6 http://localhost:18083/status
|
||||
echo "TOKEN=$(curl --silent -X 'POST' 'http://127.0.0.1:18083/api/v5/login' -H 'accept: application/json' -H 'Content-Type: application/json' -d '{"username": "admin","password": "public"}' | jq -r ".token")" >> $GITHUB_ENV
|
||||
|
||||
- name: Check cluster
|
||||
timeout-minutes: 1
|
||||
run: |
|
||||
while
|
||||
nodes_length="$(curl --silent -H "Authorization: Bearer $TOKEN" -X GET http://127.0.0.1:18083/api/v5/cluster| jq '.nodes|length')"
|
||||
[ $nodes_length != "3" ]
|
||||
do
|
||||
echo "waiting ${EMQX_NAME} cluster scale. Current live nodes: $nodes_length."
|
||||
sleep 1
|
||||
done
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: emqx/paho.mqtt.testing
|
||||
ref: develop-5.0
|
||||
path: paho.mqtt.testing
|
||||
- name: install pytest
|
||||
run: |
|
||||
pip install pytest==7.1.2 pytest-retry
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
- name: run paho test
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
port_connected () {
|
||||
local server="$1"
|
||||
local port="$2"
|
||||
echo > /dev/tcp/${server}/${port} 2>/dev/null
|
||||
}
|
||||
|
||||
kubectl port-forward service/${EMQX_NAME} 1883:1883 > /dev/null &
|
||||
|
||||
while ! port_connected localhost 1883; do
|
||||
echo server not listening yet...
|
||||
sleep 10
|
||||
done
|
||||
|
||||
pytest --retries 3 -v paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "127.0.0.1"
|
||||
- if: failure()
|
||||
run: kubectl logs -l "app.kubernetes.io/instance=${EMQX_NAME}" -c emqx --tail=1000
|
|
@ -1,22 +1,16 @@
|
|||
name: JMeter integration tests
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v5.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
workflow_call:
|
||||
inputs:
|
||||
version-emqx:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
build_emqx_for_jmeter_tests:
|
||||
jmeter_artifact:
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
version: ${{ steps.build_docker.outputs.version}}
|
||||
steps:
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- name: Cache Jmeter
|
||||
id: cache-jmeter
|
||||
uses: actions/cache@v3
|
||||
|
@ -42,21 +36,6 @@ jobs:
|
|||
with:
|
||||
name: apache-jmeter.tgz
|
||||
path: /tmp/apache-jmeter.tgz
|
||||
- uses: actions/checkout@v3
|
||||
- name: zip emqx docker image
|
||||
id: build_docker
|
||||
if: endsWith(github.repository, 'emqx')
|
||||
run: |
|
||||
## TODO: make profile a matrix dimension
|
||||
PROFILE='emqx'
|
||||
make "${PROFILE}-docker"
|
||||
VSN="$(./pkg-vsn.sh $PROFILE)"
|
||||
echo "version=${VSN}" >> $GITHUB_OUTPUT
|
||||
docker save -o emqx.tar emqx/emqx:${VSN}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: emqx.tar
|
||||
path: ./emqx.tar
|
||||
|
||||
advanced_feat:
|
||||
runs-on: ubuntu-22.04
|
||||
|
@ -70,69 +49,28 @@ jobs:
|
|||
- mqtt_topic_rewrite
|
||||
# - mqtt_retainer
|
||||
|
||||
needs: build_emqx_for_jmeter_tests
|
||||
needs: jmeter_artifact
|
||||
steps:
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: ./.github/actions/prepare-jmeter
|
||||
with:
|
||||
name: emqx.tar
|
||||
path: /tmp
|
||||
- name: load docker image
|
||||
run: |
|
||||
docker load < /tmp/emqx.tar
|
||||
version-emqx: ${{ inputs.version-emqx }}
|
||||
- name: docker compose up
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
_EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }}
|
||||
run: |
|
||||
docker-compose \
|
||||
docker compose \
|
||||
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||
up -d --build
|
||||
- name: wait docker compose up
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do
|
||||
echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx";
|
||||
sleep 5;
|
||||
done
|
||||
while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \
|
||||
!= $(docker ps -a --filter name=client | wc -l) ]; do
|
||||
sleep 1
|
||||
done
|
||||
docker ps -a
|
||||
up --wait --build
|
||||
echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: emqx/emqx-fvt
|
||||
ref: broker-autotest-v5
|
||||
path: scripts
|
||||
- uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||
architecture: x64 # (x64 or x86) - defaults to x64
|
||||
# https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
|
||||
distribution: 'zulu'
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: apache-jmeter.tgz
|
||||
path: /tmp
|
||||
- name: install jmeter
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
JMETER_VERSION: 5.4.3
|
||||
- name: show logs
|
||||
if: failure()
|
||||
run: |
|
||||
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar
|
||||
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||
docker compose \
|
||||
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||
logs
|
||||
- name: run jmeter
|
||||
run: |
|
||||
/opt/jmeter/bin/jmeter.sh \
|
||||
jmeter/bin/jmeter.sh \
|
||||
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||
-t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \
|
||||
-Demqx_ip=$HAPROXY_IP \
|
||||
|
@ -152,8 +90,6 @@ jobs:
|
|||
|
||||
pgsql_authn_authz:
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
_EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
@ -168,72 +104,26 @@ jobs:
|
|||
- pgsql_authn
|
||||
- pgsql_authz
|
||||
|
||||
needs: build_emqx_for_jmeter_tests
|
||||
needs: jmeter_artifact
|
||||
steps:
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: ./.github/actions/prepare-jmeter
|
||||
with:
|
||||
name: emqx.tar
|
||||
path: /tmp
|
||||
- name: load docker image
|
||||
run: |
|
||||
docker load < /tmp/emqx.tar
|
||||
version-emqx: ${{ inputs.version-emqx }}
|
||||
- name: docker compose up
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
PGSQL_TAG: ${{ matrix.pgsql_tag }}
|
||||
run: |
|
||||
docker-compose \
|
||||
docker compose \
|
||||
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-pgsql-tls.yaml \
|
||||
up -d --build
|
||||
- name: wait docker compose up
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do
|
||||
echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx";
|
||||
sleep 5;
|
||||
done
|
||||
while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \
|
||||
!= $(docker ps -a --filter name=client | wc -l) ]; do
|
||||
sleep 1
|
||||
done
|
||||
docker ps -a
|
||||
up --wait --build
|
||||
echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV
|
||||
echo PGSQL_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' pgsql-tls) >> $GITHUB_ENV
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: emqx/emqx-fvt
|
||||
ref: broker-autotest-v5
|
||||
path: scripts
|
||||
- uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||
architecture: x64 # (x64 or x86) - defaults to x64
|
||||
# https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
|
||||
distribution: 'zulu'
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: apache-jmeter.tgz
|
||||
path: /tmp
|
||||
- name: install jmeter
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
JMETER_VERSION: 5.4.3
|
||||
run: |
|
||||
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/postgresql-42.2.18.jar https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.18/postgresql-42.2.18.jar
|
||||
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||
- name: run jmeter
|
||||
run: |
|
||||
/opt/jmeter/bin/jmeter.sh \
|
||||
jmeter/bin/jmeter.sh \
|
||||
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||
-t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \
|
||||
-Demqx_ip=$HAPROXY_IP \
|
||||
|
@ -257,7 +147,7 @@ jobs:
|
|||
- name: dump docker compose logs
|
||||
if: failure()
|
||||
run: |
|
||||
docker-compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log
|
||||
docker compose -f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml logs --no-color > ./jmeter_logs/emqx.log
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: always()
|
||||
with:
|
||||
|
@ -277,73 +167,26 @@ jobs:
|
|||
- mysql_authn
|
||||
- mysql_authz
|
||||
|
||||
needs: build_emqx_for_jmeter_tests
|
||||
needs: jmeter_artifact
|
||||
steps:
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: ./.github/actions/prepare-jmeter
|
||||
with:
|
||||
name: emqx.tar
|
||||
path: /tmp
|
||||
- name: load docker image
|
||||
run: |
|
||||
docker load < /tmp/emqx.tar
|
||||
version-emqx: ${{ inputs.version-emqx }}
|
||||
- name: docker compose up
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
_EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }}
|
||||
PGSQL_TAG: ${{ matrix.mysql_tag }}
|
||||
run: |
|
||||
docker-compose \
|
||||
docker compose \
|
||||
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-mysql-tls.yaml \
|
||||
up -d --build
|
||||
- name: wait docker compose up
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do
|
||||
echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx";
|
||||
sleep 5;
|
||||
done
|
||||
while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \
|
||||
!= $(docker ps -a --filter name=client | wc -l) ]; do
|
||||
sleep 1
|
||||
done
|
||||
docker ps -a
|
||||
up --wait --build
|
||||
echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV
|
||||
echo MYSQL_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' mysql-tls) >> $GITHUB_ENV
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: emqx/emqx-fvt
|
||||
ref: broker-autotest-v5
|
||||
path: scripts
|
||||
- uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||
architecture: x64 # (x64 or x86) - defaults to x64
|
||||
# https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
|
||||
distribution: 'zulu'
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: apache-jmeter.tgz
|
||||
path: /tmp
|
||||
- name: install jmeter
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
JMETER_VERSION: 5.4.3
|
||||
run: |
|
||||
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/mysql-connector-java-8.0.16.jar https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar
|
||||
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||
- name: run jmeter
|
||||
run: |
|
||||
/opt/jmeter/bin/jmeter.sh \
|
||||
jmeter/bin/jmeter.sh \
|
||||
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||
-t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \
|
||||
-Demqx_ip=$HAPROXY_IP \
|
||||
|
@ -379,45 +222,19 @@ jobs:
|
|||
scripts_type:
|
||||
- jwt_authn
|
||||
|
||||
needs: build_emqx_for_jmeter_tests
|
||||
needs: jmeter_artifact
|
||||
steps:
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: ./.github/actions/prepare-jmeter
|
||||
with:
|
||||
name: emqx.tar
|
||||
path: /tmp
|
||||
- name: load docker image
|
||||
run: |
|
||||
docker load < /tmp/emqx.tar
|
||||
version-emqx: ${{ inputs.version-emqx }}
|
||||
- name: docker compose up
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
_EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }}
|
||||
run: |
|
||||
docker-compose \
|
||||
docker compose \
|
||||
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||
up -d --build
|
||||
- name: wait docker compose up
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do
|
||||
echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx";
|
||||
sleep 5;
|
||||
done
|
||||
while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \
|
||||
!= $(docker ps -a --filter name=client | wc -l) ]; do
|
||||
sleep 1
|
||||
done
|
||||
docker ps -a
|
||||
up --wait --build
|
||||
echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: emqx/emqx-fvt
|
||||
ref: broker-autotest-v5
|
||||
path: scripts
|
||||
- name: run jwks_server
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
|
@ -426,30 +243,9 @@ jobs:
|
|||
cd target
|
||||
docker run --name jwks_server --network emqx_bridge --ip 172.100.239.88 -d -v $(pwd)/jwkserver-0.0.1.jar:/jwks_server/jwkserver-0.0.1.jar --workdir /jwks_server openjdk:8-jdk bash \
|
||||
-c "java -jar jwkserver-0.0.1.jar"
|
||||
- uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||
architecture: x64 # (x64 or x86) - defaults to x64
|
||||
# https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
|
||||
distribution: 'zulu'
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: apache-jmeter.tgz
|
||||
path: /tmp
|
||||
- name: install jmeter
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
JMETER_VERSION: 5.4.3
|
||||
run: |
|
||||
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar
|
||||
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||
- name: run jmeter
|
||||
run: |
|
||||
/opt/jmeter/bin/jmeter.sh \
|
||||
jmeter/bin/jmeter.sh \
|
||||
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||
-t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \
|
||||
-Demqx_ip=$HAPROXY_IP \
|
||||
|
@ -478,79 +274,30 @@ jobs:
|
|||
- built_in_database_authn
|
||||
- built_in_database_authz
|
||||
|
||||
needs: build_emqx_for_jmeter_tests
|
||||
needs: jmeter_artifact
|
||||
steps:
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
- uses: ./.github/actions/prepare-jmeter
|
||||
with:
|
||||
name: emqx.tar
|
||||
path: /tmp
|
||||
- name: load docker image
|
||||
run: |
|
||||
docker load < /tmp/emqx.tar
|
||||
version-emqx: ${{ inputs.version-emqx }}
|
||||
- name: docker compose up
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
_EMQX_DOCKER_IMAGE_TAG: emqx/emqx:${{ needs.build_emqx_for_jmeter_tests.outputs.version }}
|
||||
PGSQL_TAG: ${{ matrix.mysql_tag }}
|
||||
run: |
|
||||
docker-compose \
|
||||
docker compose \
|
||||
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
|
||||
up -d --build
|
||||
- name: wait docker compose up
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
while [ "$(docker inspect -f '{{ .State.Health.Status}}' node1.emqx.io)" != "healthy" ] || [ "$(docker inspect -f '{{ .State.Health.Status}}' node2.emqx.io)" != "healthy" ]; do
|
||||
echo "['$(date -u +"%y-%m-%dt%h:%m:%sz")']:waiting emqx";
|
||||
sleep 5;
|
||||
done
|
||||
while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \
|
||||
!= $(docker ps -a --filter name=client | wc -l) ]; do
|
||||
sleep 1
|
||||
done
|
||||
docker ps -a
|
||||
up --wait --build
|
||||
echo HAPROXY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' haproxy) >> $GITHUB_ENV
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: emqx/emqx-fvt
|
||||
ref: broker-autotest-v5
|
||||
path: scripts
|
||||
- uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||
architecture: x64 # (x64 or x86) - defaults to x64
|
||||
# https://github.com/actions/setup-java/blob/main/docs/switching-to-v2.md
|
||||
distribution: 'zulu'
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: apache-jmeter.tgz
|
||||
path: /tmp
|
||||
- name: install jmeter
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
JMETER_VERSION: 5.4.3
|
||||
run: |
|
||||
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/mysql-connector-java-8.0.16.jar https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.16/mysql-connector-java-8.0.16.jar
|
||||
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||
- name: run jmeter
|
||||
run: |
|
||||
/opt/jmeter/bin/jmeter.sh \
|
||||
jmeter/bin/jmeter.sh \
|
||||
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||
-t scripts/broker-autotest-suite/${{ matrix.scripts_type }}.jmx \
|
||||
-Demqx_ip=$HAPROXY_IP \
|
||||
-l jmeter_logs/${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}.jtl \
|
||||
-j jmeter_logs/logs/${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}.log
|
||||
-l jmeter_logs/${{ matrix.scripts_type }}.jtl \
|
||||
-j jmeter_logs/logs/${{ matrix.scripts_type }}.log
|
||||
- name: check logs
|
||||
run: |
|
||||
if cat jmeter_logs/${{ matrix.scripts_type }}_${{ matrix.mysql_tag }}.jtl | grep -e '<failure>true</failure>' > /dev/null 2>&1; then
|
||||
if cat jmeter_logs/${{ matrix.scripts_type }}.jtl | grep -e '<failure>true</failure>' > /dev/null 2>&1; then
|
||||
echo "check logs failed"
|
||||
exit 1
|
||||
fi
|
||||
|
@ -559,11 +306,3 @@ jobs:
|
|||
with:
|
||||
name: jmeter_logs
|
||||
path: ./jmeter_logs
|
||||
|
||||
delete-artifact:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [advanced_feat,pgsql_authn_authz,JWT_authn,mysql_authn_authz,built_in_database_authn_authz]
|
||||
steps:
|
||||
- uses: geekyeggo/delete-artifact@v2
|
||||
with:
|
||||
name: emqx.tar
|
||||
|
|
|
@ -4,18 +4,20 @@ concurrency:
|
|||
group: relup-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# on:
|
||||
# push:
|
||||
# branches:
|
||||
# - '**'
|
||||
# tags:
|
||||
# - e*
|
||||
# pull_request:
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
builder:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
relup_test_plan:
|
||||
runs-on: ubuntu-22.04
|
||||
container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04"
|
||||
runs-on: ${{ inputs.runner }}
|
||||
container: ${{ inputs.builder }}
|
||||
outputs:
|
||||
CUR_EE_VSN: ${{ steps.find-versions.outputs.CUR_EE_VSN }}
|
||||
OLD_VERSIONS: ${{ steps.find-versions.outputs.OLD_VERSIONS }}
|
||||
|
@ -23,16 +25,18 @@ jobs:
|
|||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
name: Checkout
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: emqx
|
||||
fetch-depth: 0
|
||||
name: emqx-enterprise
|
||||
- name: extract artifact
|
||||
run: |
|
||||
unzip -o -q emqx-enterprise.zip
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- name: Find versions
|
||||
id: find-versions
|
||||
run: |
|
||||
set -x
|
||||
cd emqx
|
||||
ee_vsn="$(./pkg-vsn.sh enterprise)"
|
||||
old_ee_vsns="$(./scripts/relup-build/base-vsns.sh enterprise | xargs)"
|
||||
old_vsns=$(echo -n "${old_ee_vsns}" | sed 's/ $//g' | jq -R -s -c 'split(" ")')
|
||||
|
@ -40,8 +44,6 @@ jobs:
|
|||
echo "OLD_VERSIONS=$old_vsns" >> $GITHUB_OUTPUT
|
||||
- name: build emqx
|
||||
run: |
|
||||
set -x
|
||||
cd emqx
|
||||
export PROFILE='emqx-enterprise'
|
||||
make emqx-enterprise-tgz
|
||||
- uses: actions/upload-artifact@v3
|
||||
|
@ -49,10 +51,10 @@ jobs:
|
|||
with:
|
||||
name: emqx_built
|
||||
path: |
|
||||
emqx/_upgrade_base
|
||||
emqx/_packages
|
||||
emqx/scripts
|
||||
emqx/.ci
|
||||
_upgrade_base
|
||||
_packages
|
||||
scripts
|
||||
.ci
|
||||
|
||||
relup_test_run:
|
||||
needs:
|
||||
|
@ -70,8 +72,7 @@ jobs:
|
|||
run:
|
||||
shell: bash
|
||||
steps:
|
||||
# setup Erlang to run lux
|
||||
- uses: erlef/setup-beam@v1.15.4
|
||||
- uses: erlef/setup-beam@v1.16.0
|
||||
with:
|
||||
otp-version: 25.3.2
|
||||
- uses: actions/checkout@v3
|
||||
|
@ -81,7 +82,7 @@ jobs:
|
|||
path: lux
|
||||
- name: Install lux
|
||||
run: |
|
||||
set -e -u -x
|
||||
set -eu
|
||||
cd lux
|
||||
autoconf
|
||||
./configure
|
||||
|
@ -94,10 +95,7 @@ jobs:
|
|||
path: .
|
||||
- name: run relup test
|
||||
run: |
|
||||
set -e -x -u
|
||||
chmod a+x scripts/**/*.sh
|
||||
ls -l scripts
|
||||
ls -l scripts/relup-test
|
||||
set -eux
|
||||
case "$OLD_VSN" in
|
||||
e*)
|
||||
export CUR_VSN="$CUR_EE_VSN"
|
||||
|
|
|
@ -5,152 +5,34 @@ concurrency:
|
|||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- 'ci/**'
|
||||
tags:
|
||||
- v*
|
||||
- e*
|
||||
pull_request:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
builder:
|
||||
required: true
|
||||
type: string
|
||||
ct-matrix:
|
||||
required: true
|
||||
type: string
|
||||
ct-host:
|
||||
required: true
|
||||
type: string
|
||||
ct-docker:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
IS_CI: "yes"
|
||||
|
||||
jobs:
|
||||
build-matrix:
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
prepare: ${{ steps.matrix.outputs.prepare }}
|
||||
host: ${{ steps.matrix.outputs.host }}
|
||||
docker: ${{ steps.matrix.outputs.docker }}
|
||||
runs-on: ${{ steps.runner.outputs.runs-on }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build matrix
|
||||
id: matrix
|
||||
run: |
|
||||
APPS="$(./scripts/find-apps.sh --ci)"
|
||||
MATRIX="$(echo "${APPS}" | jq -c '
|
||||
[
|
||||
(.[] | select(.profile == "emqx") | . + {
|
||||
builder: "5.1-3",
|
||||
otp: "25.3.2-1",
|
||||
elixir: "1.14.5"
|
||||
}),
|
||||
(.[] | select(.profile == "emqx-enterprise") | . + {
|
||||
builder: "5.1-3",
|
||||
otp: ["25.3.2-1"][],
|
||||
elixir: "1.14.5"
|
||||
})
|
||||
]
|
||||
')"
|
||||
echo "${MATRIX}" | jq
|
||||
MATRIX_PREPARE="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
|
||||
MATRIX_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
|
||||
MATRIX_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
|
||||
echo "prepare=${MATRIX_PREPARE}" | tee -a $GITHUB_OUTPUT
|
||||
echo "host=${MATRIX_HOST}" | tee -a $GITHUB_OUTPUT
|
||||
echo "docker=${MATRIX_DOCKER}" | tee -a $GITHUB_OUTPUT
|
||||
- name: Choose runner host
|
||||
id: runner
|
||||
run: |
|
||||
RUNS_ON="ubuntu-22.04"
|
||||
${{ github.repository_owner == 'emqx' }} && RUNS_ON="aws-amd64"
|
||||
echo "runs-on=${RUNS_ON}" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
prepare:
|
||||
runs-on: ${{ needs.build-matrix.outputs.runs-on }}
|
||||
needs: [build-matrix]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.build-matrix.outputs.prepare) }}
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
path: source
|
||||
- name: get_all_deps
|
||||
working-directory: source
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
run: |
|
||||
make ensure-rebar3
|
||||
# fetch all deps and compile
|
||||
make ${{ matrix.profile }}-compile
|
||||
make test-compile
|
||||
cd ..
|
||||
zip -ryq source.zip source/* source/.[^.]*
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: source-${{ matrix.profile }}-${{ matrix.otp }}
|
||||
path: source.zip
|
||||
|
||||
check_examples:
|
||||
needs:
|
||||
- build-matrix
|
||||
- prepare
|
||||
runs-on: ${{ needs.build-matrix.outputs.runs-on }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.build-matrix.outputs.prepare) }}
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: source-${{ matrix.profile }}-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -o -q source.zip
|
||||
- name: check example config files
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
working-directory: source
|
||||
run: ./scripts/test/check-example-configs.sh
|
||||
|
||||
static_checks:
|
||||
needs:
|
||||
- build-matrix
|
||||
- prepare
|
||||
runs-on: ${{ needs.build-matrix.outputs.runs-on }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.build-matrix.outputs.prepare) }}
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: source-${{ matrix.profile }}-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -o -q source.zip
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: "source/emqx_dialyzer_${{ matrix.otp }}_plt"
|
||||
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('source/rebar.*', 'source/apps/*/rebar.*', 'source/lib-ee/*/rebar.*') }}
|
||||
restore-keys: |
|
||||
rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-
|
||||
- name: run static checks
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
working-directory: source
|
||||
run: make static_checks
|
||||
|
||||
eunit_and_proper:
|
||||
needs:
|
||||
- build-matrix
|
||||
- prepare
|
||||
runs-on: ${{ needs.build-matrix.outputs.runs-on }}
|
||||
runs-on: ${{ inputs.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.build-matrix.outputs.prepare) }}
|
||||
include: ${{ fromJson(inputs.ct-matrix) }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
@ -161,16 +43,16 @@ jobs:
|
|||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: source-${{ matrix.profile }}-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -o -q source.zip
|
||||
name: ${{ matrix.profile }}
|
||||
- name: extract artifact
|
||||
run: |
|
||||
unzip -o -q ${{ matrix.profile }}.zip
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
# produces eunit.coverdata
|
||||
- name: eunit
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
|
||||
working-directory: source
|
||||
run: make eunit
|
||||
|
||||
# produces proper.coverdata
|
||||
|
@ -178,23 +60,19 @@ jobs:
|
|||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
|
||||
working-directory: source
|
||||
run: make proper
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverdata
|
||||
path: source/_build/test/cover
|
||||
path: _build/test/cover
|
||||
|
||||
ct_docker:
|
||||
needs:
|
||||
- build-matrix
|
||||
- prepare
|
||||
runs-on: ${{ needs.build-matrix.outputs.runs-on }}
|
||||
runs-on: ${{ inputs.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.build-matrix.outputs.docker) }}
|
||||
include: ${{ fromJson(inputs.ct-docker) }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
|
@ -204,14 +82,14 @@ jobs:
|
|||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: source-${{ matrix.profile }}-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
name: ${{ matrix.profile }}
|
||||
- name: extract artifact
|
||||
run: |
|
||||
unzip -o -q ${{ matrix.profile }}.zip
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
|
||||
# produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata
|
||||
- name: run common tests
|
||||
working-directory: source
|
||||
env:
|
||||
DOCKER_CT_RUNNER_IMAGE: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
MONGO_TAG: "5"
|
||||
|
@ -229,22 +107,19 @@ jobs:
|
|||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverdata
|
||||
path: source/_build/test/cover
|
||||
path: _build/test/cover
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: failure()
|
||||
with:
|
||||
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
|
||||
path: source/_build/test/logs
|
||||
path: _build/test/logs
|
||||
|
||||
ct:
|
||||
needs:
|
||||
- build-matrix
|
||||
- prepare
|
||||
runs-on: ${{ needs.build-matrix.outputs.runs-on }}
|
||||
runs-on: ${{ inputs.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.build-matrix.outputs.host) }}
|
||||
include: ${{ fromJson(inputs.ct-host) }}
|
||||
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
defaults:
|
||||
|
@ -255,14 +130,14 @@ jobs:
|
|||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: source-${{ matrix.profile }}-${{ matrix.otp }}
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
name: ${{ matrix.profile }}
|
||||
- name: extract artifact
|
||||
run: |
|
||||
unzip -o -q ${{ matrix.profile }}.zip
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
|
||||
# produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata
|
||||
- name: run common tests
|
||||
working-directory: source
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
SUITEGROUP: ${{ matrix.suitegroup }}
|
||||
|
@ -272,58 +147,61 @@ jobs:
|
|||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverdata
|
||||
path: source/_build/test/cover
|
||||
path: _build/test/cover
|
||||
if-no-files-found: warn # do not fail if no coverdata found
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: failure()
|
||||
with:
|
||||
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
|
||||
path: source/_build/test/logs
|
||||
path: _build/test/logs
|
||||
|
||||
make_cover:
|
||||
needs:
|
||||
- eunit_and_proper
|
||||
- ct
|
||||
- ct_docker
|
||||
runs-on: ubuntu-22.04
|
||||
container: "ghcr.io/emqx/emqx-builder/5.1-3:1.14.5-25.3.2-1-ubuntu22.04"
|
||||
runs-on: ${{ inputs.runner }}
|
||||
container: ${{ inputs.builder }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
profile:
|
||||
- emqx-enterprise
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: source-emqx-enterprise-25.3.2-1
|
||||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
name: ${{ matrix.profile }}
|
||||
- name: extract artifact
|
||||
run: |
|
||||
unzip -o -q ${{ matrix.profile }}.zip
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
name: download coverdata
|
||||
with:
|
||||
name: coverdata
|
||||
path: source/_build/test/cover
|
||||
path: _build/test/cover
|
||||
|
||||
- name: make cover
|
||||
working-directory: source
|
||||
env:
|
||||
PROFILE: emqx-enterprise
|
||||
run: make cover
|
||||
|
||||
- name: send to coveralls
|
||||
working-directory: source
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PROFILE: emqx-enterprise
|
||||
run: make coveralls
|
||||
|
||||
- name: get coveralls logs
|
||||
working-directory: source
|
||||
if: failure()
|
||||
run: cat rebar3.crashdump
|
||||
|
||||
# do this in a separate job
|
||||
upload_coverdata:
|
||||
needs: make_cover
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ${{ inputs.runner }}
|
||||
steps:
|
||||
- name: Coveralls Finished
|
||||
env:
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
name: Shellcheck
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
shellcheck:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v3
|
||||
- name: Install shellcheck
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt install shellcheck
|
||||
- name: Run shellcheck
|
||||
run: |
|
||||
./scripts/shellcheck.sh
|
||||
echo "success"
|
|
@ -0,0 +1,29 @@
|
|||
name: Spellcheck
|
||||
|
||||
concurrency:
|
||||
group: spellcheck-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
spellcheck:
|
||||
strategy:
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
runs-on: ${{ inputs.runner }}
|
||||
steps:
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: "${{ matrix.profile }}_schema_dump"
|
||||
path: /tmp/
|
||||
- name: Run spellcheck
|
||||
run: |
|
||||
bash /tmp/scripts/spellcheck/spellcheck.sh /tmp/_build/docgen/${{ matrix.profile }}/schema-en.json
|
|
@ -0,0 +1,49 @@
|
|||
name: Static checks
|
||||
|
||||
concurrency:
|
||||
group: static-checks-${{ github.event_name }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runner:
|
||||
required: true
|
||||
type: string
|
||||
builder:
|
||||
required: true
|
||||
type: string
|
||||
ct-matrix:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
IS_CI: "yes"
|
||||
|
||||
jobs:
|
||||
static_checks:
|
||||
runs-on: ${{ inputs.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(inputs.ct-matrix) }}
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.profile }}
|
||||
- name: extract artifact
|
||||
run: |
|
||||
unzip -o -q ${{ matrix.profile }}.zip
|
||||
git config --global --add safe.directory "$GITHUB_WORKSPACE"
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: "emqx_dialyzer_${{ matrix.otp }}_plt"
|
||||
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*', 'lib-ee/*/rebar.*') }}
|
||||
restore-keys: |
|
||||
rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-
|
||||
- name: run static checks
|
||||
env:
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
run: make static_checks
|
2
Makefile
2
Makefile
|
@ -143,7 +143,7 @@ endif
|
|||
|
||||
.PHONY: cover
|
||||
cover: $(REBAR)
|
||||
@ENABLE_COVER_COMPILE=1 $(REBAR) cover
|
||||
@ENABLE_COVER_COMPILE=1 $(REBAR) as test cover
|
||||
|
||||
.PHONY: coveralls
|
||||
coveralls: $(REBAR)
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
%% `apps/emqx/src/bpapi/README.md'
|
||||
|
||||
%% Opensource edition
|
||||
-define(EMQX_RELEASE_CE, "5.1.3").
|
||||
-define(EMQX_RELEASE_CE, "5.1.5").
|
||||
|
||||
%% Enterprise edition
|
||||
-define(EMQX_RELEASE_EE, "5.1.1").
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
{gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}},
|
||||
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}},
|
||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}},
|
||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.9"}}},
|
||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.10"}}},
|
||||
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.14"}}},
|
||||
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}},
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
{application, emqx, [
|
||||
{id, "emqx"},
|
||||
{description, "EMQX Core"},
|
||||
{vsn, "5.1.4"},
|
||||
{vsn, "5.1.5"},
|
||||
{modules, []},
|
||||
{registered, []},
|
||||
{applications, [
|
||||
|
|
|
@ -31,7 +31,7 @@ start_link() ->
|
|||
|
||||
init([]) ->
|
||||
%% Broker pool
|
||||
PoolSize = emqx_vm:schedulers() * 2,
|
||||
PoolSize = emqx:get_config([node, broker_pool_size], emqx_vm:schedulers() * 2),
|
||||
BrokerPool = emqx_pool_sup:spec([
|
||||
broker_pool,
|
||||
hash,
|
||||
|
|
|
@ -685,7 +685,8 @@ handle_cast(Msg, State) ->
|
|||
|
||||
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
|
||||
?tp(emqx_cm_process_down, #{stale_pid => Pid, reason => _Reason}),
|
||||
ChanPids = [Pid | emqx_utils:drain_down(?BATCH_SIZE)],
|
||||
BatchSize = emqx:get_config([node, channel_cleanup_batch_size], ?BATCH_SIZE),
|
||||
ChanPids = [Pid | emqx_utils:drain_down(BatchSize)],
|
||||
{Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon),
|
||||
lists:foreach(fun mark_channel_disconnected/1, ChanPids),
|
||||
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]),
|
||||
|
|
|
@ -31,7 +31,9 @@ init([]) ->
|
|||
%% always start emqx_config_handler first to load the emqx.conf to emqx_config
|
||||
[
|
||||
child_spec(emqx_config_handler, worker),
|
||||
child_spec(emqx_pool_sup, supervisor),
|
||||
child_spec(emqx_pool_sup, supervisor, [
|
||||
emqx:get_config([node, generic_pool_size], emqx_vm:schedulers())
|
||||
]),
|
||||
child_spec(emqx_hooks, worker),
|
||||
child_spec(emqx_stats, worker),
|
||||
child_spec(emqx_metrics, worker),
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
-export([
|
||||
hash/2,
|
||||
hash_data/2,
|
||||
check_pass/3
|
||||
check_pass/3,
|
||||
compare_secure/2
|
||||
]).
|
||||
|
||||
-export_type([
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
|
||||
-export([
|
||||
start_link/0,
|
||||
start_link/1,
|
||||
start_link/3,
|
||||
start_link/4
|
||||
]).
|
||||
|
@ -51,6 +52,9 @@ spec(ChildId, Args) ->
|
|||
start_link() ->
|
||||
start_link(?POOL, random, {?POOL, start_link, []}).
|
||||
|
||||
start_link(PoolSize) ->
|
||||
start_link(?POOL, random, PoolSize, {?POOL, start_link, []}).
|
||||
|
||||
-spec start_link(atom() | tuple(), atom(), mfargs()) ->
|
||||
{ok, pid()} | {error, term()}.
|
||||
start_link(Pool, Type, MFA) ->
|
||||
|
|
|
@ -146,13 +146,18 @@ handle_info({mnesia_table_event, Event}, State) ->
|
|||
?SLOG(debug, #{msg => "unexpected_mnesia_table_event", event => Event}),
|
||||
{noreply, State};
|
||||
handle_info({nodedown, Node}, State = #{nodes := Nodes}) ->
|
||||
global:trans(
|
||||
{?LOCK, self()},
|
||||
fun() ->
|
||||
mria:transaction(?ROUTE_SHARD, fun ?MODULE:cleanup_routes/1, [Node])
|
||||
end
|
||||
),
|
||||
ok = mria:dirty_delete(?ROUTING_NODE, Node),
|
||||
case mria_rlog:role() of
|
||||
core ->
|
||||
global:trans(
|
||||
{?LOCK, self()},
|
||||
fun() ->
|
||||
mria:transaction(?ROUTE_SHARD, fun ?MODULE:cleanup_routes/1, [Node])
|
||||
end
|
||||
),
|
||||
ok = mria:dirty_delete(?ROUTING_NODE, Node);
|
||||
replicant ->
|
||||
ok
|
||||
end,
|
||||
?tp(emqx_router_helper_cleanup_done, #{node => Node}),
|
||||
{noreply, State#{nodes := lists:delete(Node, Nodes)}, hibernate};
|
||||
handle_info({membership, {mnesia, down, Node}}, State) ->
|
||||
|
|
|
@ -41,4 +41,9 @@ init([]) ->
|
|||
hash,
|
||||
{emqx_router, start_link, []}
|
||||
]),
|
||||
{ok, {{one_for_all, 0, 1}, [Helper, RouterPool]}}.
|
||||
SupFlags = #{
|
||||
strategy => one_for_one,
|
||||
intensity => 10,
|
||||
period => 100
|
||||
},
|
||||
{ok, {SupFlags, [Helper, RouterPool]}}.
|
||||
|
|
|
@ -2017,6 +2017,14 @@ common_ssl_opts_schema(Defaults, Type) ->
|
|||
desc => ?DESC(common_ssl_opts_schema_cacertfile)
|
||||
}
|
||||
)},
|
||||
{"cacerts",
|
||||
sc(
|
||||
boolean(),
|
||||
#{
|
||||
default => false,
|
||||
deprecated => {since, "5.1.4"}
|
||||
}
|
||||
)},
|
||||
{"certfile",
|
||||
sc(
|
||||
binary(),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_authn, [
|
||||
{description, "EMQX Authentication"},
|
||||
{vsn, "0.1.23"},
|
||||
{vsn, "0.1.24"},
|
||||
{modules, []},
|
||||
{registered, [emqx_authn_sup, emqx_authn_registry]},
|
||||
{applications, [
|
||||
|
|
|
@ -40,7 +40,8 @@ providers() ->
|
|||
{{password_based, http}, emqx_authn_http},
|
||||
{jwt, emqx_authn_jwt},
|
||||
{{scram, built_in_database}, emqx_enhanced_authn_scram_mnesia}
|
||||
].
|
||||
] ++
|
||||
emqx_authn_enterprise:providers().
|
||||
|
||||
check_config(Config) ->
|
||||
check_config(Config, #{}).
|
||||
|
|
|
@ -876,7 +876,8 @@ resource_provider() ->
|
|||
emqx_authn_mongodb,
|
||||
emqx_authn_redis,
|
||||
emqx_authn_http
|
||||
].
|
||||
] ++
|
||||
emqx_authn_enterprise:resource_provider().
|
||||
|
||||
lookup_from_local_node(ChainName, AuthenticatorID) ->
|
||||
NodeId = node(self()),
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authn_enterprise).
|
||||
|
||||
-export([providers/0, resource_provider/0]).
|
||||
|
||||
-if(?EMQX_RELEASE_EDITION == ee).
|
||||
|
||||
providers() ->
|
||||
[{{password_based, ldap}, emqx_ldap_authn}].
|
||||
|
||||
resource_provider() ->
|
||||
[emqx_ldap_authn].
|
||||
|
||||
-else.
|
||||
|
||||
providers() ->
|
||||
[].
|
||||
|
||||
resource_provider() ->
|
||||
[].
|
||||
-endif.
|
|
@ -35,7 +35,8 @@
|
|||
ensure_apps_started/1,
|
||||
cleanup_resources/0,
|
||||
make_resource_id/1,
|
||||
without_password/1
|
||||
without_password/1,
|
||||
to_bool/1
|
||||
]).
|
||||
|
||||
-define(AUTHN_PLACEHOLDERS, [
|
||||
|
@ -144,47 +145,8 @@ render_sql_params(ParamList, Credential) ->
|
|||
#{return => rawlist, var_trans => fun handle_sql_var/2}
|
||||
).
|
||||
|
||||
%% true
|
||||
is_superuser(#{<<"is_superuser">> := <<"true">>}) ->
|
||||
#{is_superuser => true};
|
||||
is_superuser(#{<<"is_superuser">> := true}) ->
|
||||
#{is_superuser => true};
|
||||
is_superuser(#{<<"is_superuser">> := <<"1">>}) ->
|
||||
#{is_superuser => true};
|
||||
is_superuser(#{<<"is_superuser">> := I}) when
|
||||
is_integer(I) andalso I >= 1
|
||||
->
|
||||
#{is_superuser => true};
|
||||
%% false
|
||||
is_superuser(#{<<"is_superuser">> := <<"">>}) ->
|
||||
#{is_superuser => false};
|
||||
is_superuser(#{<<"is_superuser">> := <<"0">>}) ->
|
||||
#{is_superuser => false};
|
||||
is_superuser(#{<<"is_superuser">> := 0}) ->
|
||||
#{is_superuser => false};
|
||||
is_superuser(#{<<"is_superuser">> := null}) ->
|
||||
#{is_superuser => false};
|
||||
is_superuser(#{<<"is_superuser">> := undefined}) ->
|
||||
#{is_superuser => false};
|
||||
is_superuser(#{<<"is_superuser">> := <<"false">>}) ->
|
||||
#{is_superuser => false};
|
||||
is_superuser(#{<<"is_superuser">> := false}) ->
|
||||
#{is_superuser => false};
|
||||
is_superuser(#{<<"is_superuser">> := MaybeBinInt}) when
|
||||
is_binary(MaybeBinInt)
|
||||
->
|
||||
try binary_to_integer(MaybeBinInt) of
|
||||
Int when Int >= 1 ->
|
||||
#{is_superuser => true};
|
||||
Int when Int =< 0 ->
|
||||
#{is_superuser => false}
|
||||
catch
|
||||
error:badarg ->
|
||||
#{is_superuser => false}
|
||||
end;
|
||||
%% fallback to default
|
||||
is_superuser(#{<<"is_superuser">> := _}) ->
|
||||
#{is_superuser => false};
|
||||
is_superuser(#{<<"is_superuser">> := Value}) ->
|
||||
#{is_superuser => to_bool(Value)};
|
||||
is_superuser(#{}) ->
|
||||
#{is_superuser => false}.
|
||||
|
||||
|
@ -211,6 +173,40 @@ make_resource_id(Name) ->
|
|||
without_password(Credential) ->
|
||||
without_password(Credential, [password, <<"password">>]).
|
||||
|
||||
to_bool(<<"true">>) ->
|
||||
true;
|
||||
to_bool(true) ->
|
||||
true;
|
||||
to_bool(<<"1">>) ->
|
||||
true;
|
||||
to_bool(I) when is_integer(I) andalso I >= 1 ->
|
||||
true;
|
||||
%% false
|
||||
to_bool(<<"">>) ->
|
||||
false;
|
||||
to_bool(<<"0">>) ->
|
||||
false;
|
||||
to_bool(0) ->
|
||||
false;
|
||||
to_bool(null) ->
|
||||
false;
|
||||
to_bool(undefined) ->
|
||||
false;
|
||||
to_bool(<<"false">>) ->
|
||||
false;
|
||||
to_bool(false) ->
|
||||
false;
|
||||
to_bool(MaybeBinInt) when is_binary(MaybeBinInt) ->
|
||||
try
|
||||
binary_to_integer(MaybeBinInt) >= 1
|
||||
catch
|
||||
error:badarg ->
|
||||
false
|
||||
end;
|
||||
%% fallback to default
|
||||
to_bool(_) ->
|
||||
false.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_authz, [
|
||||
{description, "An OTP application"},
|
||||
{vsn, "0.1.24"},
|
||||
{vsn, "0.1.25"},
|
||||
{registered, []},
|
||||
{mod, {emqx_authz_app, []}},
|
||||
{applications, [
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
-behaviour(emqx_config_handler).
|
||||
-behaviour(emqx_config_backup).
|
||||
|
||||
-dialyzer({nowarn_function, [authz_module/1]}).
|
||||
|
||||
-include("emqx_authz.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("emqx/include/emqx_hooks.hrl").
|
||||
|
@ -571,7 +573,12 @@ find_action_in_hooks() ->
|
|||
authz_module(built_in_database) ->
|
||||
emqx_authz_mnesia;
|
||||
authz_module(Type) ->
|
||||
list_to_existing_atom("emqx_authz_" ++ atom_to_list(Type)).
|
||||
case emqx_authz_enterprise:is_enterprise_module(Type) of
|
||||
{ok, Module} ->
|
||||
Module;
|
||||
_ ->
|
||||
list_to_existing_atom("emqx_authz_" ++ atom_to_list(Type))
|
||||
end.
|
||||
|
||||
type(#{type := Type}) -> type(Type);
|
||||
type(#{<<"type">> := Type}) -> type(Type);
|
||||
|
@ -591,8 +598,7 @@ type(built_in_database) -> built_in_database;
|
|||
type(<<"built_in_database">>) -> built_in_database;
|
||||
type(client_info) -> client_info;
|
||||
type(<<"client_info">>) -> client_info;
|
||||
%% should never happen if the input is type-checked by hocon schema
|
||||
type(Unknown) -> throw({unknown_authz_source_type, Unknown}).
|
||||
type(MaybeEnterprise) -> emqx_authz_enterprise:type(MaybeEnterprise).
|
||||
|
||||
maybe_write_files(#{<<"type">> := <<"file">>} = Source) ->
|
||||
write_acl_file(Source);
|
||||
|
|
|
@ -95,7 +95,9 @@ fields(position) ->
|
|||
in => body
|
||||
}
|
||||
)}
|
||||
].
|
||||
];
|
||||
fields(MaybeEnterprise) ->
|
||||
emqx_authz_enterprise:fields(MaybeEnterprise).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% http type funcs
|
||||
|
@ -283,7 +285,7 @@ authz_sources_types(Type) ->
|
|||
mysql,
|
||||
postgresql,
|
||||
file
|
||||
].
|
||||
] ++ emqx_authz_enterprise:authz_sources_types().
|
||||
|
||||
to_list(A) when is_atom(A) ->
|
||||
atom_to_list(A);
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_authz_enterprise).
|
||||
|
||||
-export([
|
||||
type_names/0,
|
||||
fields/1,
|
||||
is_enterprise_module/1,
|
||||
authz_sources_types/0,
|
||||
type/1,
|
||||
desc/1
|
||||
]).
|
||||
|
||||
-if(?EMQX_RELEASE_EDITION == ee).
|
||||
|
||||
%% type name set
|
||||
type_names() ->
|
||||
[ldap].
|
||||
|
||||
%% type -> type schema
|
||||
fields(ldap) ->
|
||||
emqx_ldap_authz:fields(config).
|
||||
|
||||
%% type -> type module
|
||||
is_enterprise_module(ldap) ->
|
||||
{ok, emqx_ldap_authz};
|
||||
is_enterprise_module(_) ->
|
||||
false.
|
||||
|
||||
%% api sources set
|
||||
authz_sources_types() ->
|
||||
[ldap].
|
||||
|
||||
%% atom-able name -> type
|
||||
type(<<"ldap">>) -> ldap;
|
||||
type(ldap) -> ldap;
|
||||
type(Unknown) -> throw({unknown_authz_source_type, Unknown}).
|
||||
|
||||
desc(ldap) ->
|
||||
emqx_ldap_authz:description();
|
||||
desc(_) ->
|
||||
undefined.
|
||||
|
||||
-else.
|
||||
|
||||
-dialyzer({nowarn_function, [fields/1, type/1, desc/1]}).
|
||||
|
||||
type_names() ->
|
||||
[].
|
||||
|
||||
fields(Any) ->
|
||||
error({invalid_field, Any}).
|
||||
|
||||
is_enterprise_module(_) ->
|
||||
false.
|
||||
|
||||
authz_sources_types() ->
|
||||
[].
|
||||
|
||||
%% should never happen if the input is type-checked by hocon schema
|
||||
type(Unknown) -> throw({unknown_authz_source_type, Unknown}).
|
||||
|
||||
desc(_) ->
|
||||
undefined.
|
||||
-endif.
|
|
@ -43,7 +43,8 @@
|
|||
-export([
|
||||
headers_no_content_type/1,
|
||||
headers/1,
|
||||
default_authz/0
|
||||
default_authz/0,
|
||||
authz_common_fields/1
|
||||
]).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
|
@ -64,7 +65,8 @@ type_names() ->
|
|||
redis_single,
|
||||
redis_sentinel,
|
||||
redis_cluster
|
||||
].
|
||||
] ++
|
||||
emqx_authz_enterprise:type_names().
|
||||
|
||||
namespace() -> authz.
|
||||
|
||||
|
@ -176,7 +178,9 @@ fields("node_error") ->
|
|||
[
|
||||
node_name(),
|
||||
{"error", ?HOCON(string(), #{desc => ?DESC("node_error")})}
|
||||
].
|
||||
];
|
||||
fields(MaybeEnterprise) ->
|
||||
emqx_authz_enterprise:fields(MaybeEnterprise).
|
||||
|
||||
common_field() ->
|
||||
[
|
||||
|
@ -220,8 +224,8 @@ desc(redis_sentinel) ->
|
|||
?DESC(redis_sentinel);
|
||||
desc(redis_cluster) ->
|
||||
?DESC(redis_cluster);
|
||||
desc(_) ->
|
||||
undefined.
|
||||
desc(MaybeEnterprise) ->
|
||||
emqx_authz_enterprise:desc(MaybeEnterprise).
|
||||
|
||||
authz_common_fields(Type) ->
|
||||
[
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_bridge, [
|
||||
{description, "EMQX bridges"},
|
||||
{vsn, "0.1.25"},
|
||||
{vsn, "0.1.26"},
|
||||
{registered, [emqx_bridge_sup]},
|
||||
{mod, {emqx_bridge_app, []}},
|
||||
{applications, [
|
||||
|
|
|
@ -544,18 +544,20 @@ schema("/bridges_probe") ->
|
|||
case emqx_bridge_resource:create_dry_run(ConnType, maps:remove(<<"type">>, Params1)) of
|
||||
ok ->
|
||||
?NO_CONTENT;
|
||||
{error, #{kind := validation_error} = Reason} ->
|
||||
{error, #{kind := validation_error} = Reason0} ->
|
||||
Reason = redact(Reason0),
|
||||
?BAD_REQUEST('TEST_FAILED', map_to_json(Reason));
|
||||
{error, Reason0} when not is_tuple(Reason0); element(1, Reason0) =/= 'exit' ->
|
||||
Reason =
|
||||
Reason1 =
|
||||
case Reason0 of
|
||||
{unhealthy_target, Message} -> Message;
|
||||
_ -> Reason0
|
||||
end,
|
||||
Reason = redact(Reason1),
|
||||
?BAD_REQUEST('TEST_FAILED', Reason)
|
||||
end;
|
||||
BadRequest ->
|
||||
BadRequest
|
||||
redact(BadRequest)
|
||||
end.
|
||||
|
||||
maybe_deobfuscate_bridge_probe(#{<<"type">> := BridgeType, <<"name">> := BridgeName} = Params) ->
|
||||
|
@ -608,7 +610,7 @@ create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) ->
|
|||
{ok, _} ->
|
||||
lookup_from_all_nodes(BridgeType, BridgeName, HttpStatusCode);
|
||||
{error, Reason} when is_map(Reason) ->
|
||||
?BAD_REQUEST(map_to_json(emqx_utils:redact(Reason)))
|
||||
?BAD_REQUEST(map_to_json(redact(Reason)))
|
||||
end.
|
||||
|
||||
get_metrics_from_local_node(BridgeType, BridgeName) ->
|
||||
|
@ -990,7 +992,9 @@ call_operation(NodeOrAll, OperFunc, Args = [_Nodes, BridgeType, BridgeName]) ->
|
|||
{error, timeout} ->
|
||||
?BAD_REQUEST(<<"Request timeout">>);
|
||||
{error, {start_pool_failed, Name, Reason}} ->
|
||||
Msg = bin(io_lib:format("Failed to start ~p pool for reason ~p", [Name, Reason])),
|
||||
Msg = bin(
|
||||
io_lib:format("Failed to start ~p pool for reason ~p", [Name, redact(Reason)])
|
||||
),
|
||||
?BAD_REQUEST(Msg);
|
||||
{error, not_found} ->
|
||||
BridgeId = emqx_bridge_resource:bridge_id(BridgeType, BridgeName),
|
||||
|
@ -1007,7 +1011,7 @@ call_operation(NodeOrAll, OperFunc, Args = [_Nodes, BridgeType, BridgeName]) ->
|
|||
{error, {unhealthy_target, Message}} ->
|
||||
?BAD_REQUEST(Message);
|
||||
{error, Reason} when not is_tuple(Reason); element(1, Reason) =/= 'exit' ->
|
||||
?BAD_REQUEST(Reason)
|
||||
?BAD_REQUEST(redact(Reason))
|
||||
end.
|
||||
|
||||
maybe_try_restart(all, start_bridges_to_all_nodes, Args) ->
|
||||
|
@ -1071,7 +1075,15 @@ deobfuscate(NewConf, OldConf) ->
|
|||
NewConf
|
||||
).
|
||||
|
||||
map_to_json(M) ->
|
||||
emqx_utils_json:encode(
|
||||
emqx_utils_maps:jsonable_map(M, fun(K, V) -> {K, emqx_utils_maps:binary_string(V)} end)
|
||||
).
|
||||
map_to_json(M0) ->
|
||||
%% When dealing with Hocon validation errors, `value' might contain non-serializable
|
||||
%% values (e.g.: user_lookup_fun), so we try again without that key if serialization
|
||||
%% fails as a best effort.
|
||||
M1 = emqx_utils_maps:jsonable_map(M0, fun(K, V) -> {K, emqx_utils_maps:binary_string(V)} end),
|
||||
try
|
||||
emqx_utils_json:encode(M1)
|
||||
catch
|
||||
error:_ ->
|
||||
M2 = maps:without([value, <<"value">>], M1),
|
||||
emqx_utils_json:encode(M2)
|
||||
end.
|
||||
|
|
|
@ -261,21 +261,31 @@ recreate(Type, Name, Conf, Opts) ->
|
|||
create_dry_run(Type, Conf0) ->
|
||||
TmpName = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]),
|
||||
TmpPath = emqx_utils:safe_filename(TmpName),
|
||||
Conf = emqx_utils_maps:safe_atom_key_map(Conf0),
|
||||
case emqx_connector_ssl:convert_certs(TmpPath, Conf) of
|
||||
{error, Reason} ->
|
||||
{error, Reason};
|
||||
{ok, ConfNew} ->
|
||||
try
|
||||
%% Already typechecked, no need to catch errors
|
||||
TypeBin = bin(Type),
|
||||
TypeAtom = safe_atom(Type),
|
||||
Conf1 = maps:without([<<"name">>], Conf0),
|
||||
RawConf = #{<<"bridges">> => #{TypeBin => #{<<"temp_name">> => Conf1}}},
|
||||
try
|
||||
#{bridges := #{TypeAtom := #{temp_name := Conf}}} =
|
||||
hocon_tconf:check_plain(
|
||||
emqx_bridge_schema,
|
||||
RawConf,
|
||||
#{atom_key => true, required => false}
|
||||
),
|
||||
case emqx_connector_ssl:convert_certs(TmpPath, Conf) of
|
||||
{error, Reason} ->
|
||||
{error, Reason};
|
||||
{ok, ConfNew} ->
|
||||
ParseConf = parse_confs(bin(Type), TmpName, ConfNew),
|
||||
emqx_resource:create_dry_run_local(bridge_to_resource_type(Type), ParseConf)
|
||||
catch
|
||||
%% validation errors
|
||||
throw:Reason ->
|
||||
{error, Reason}
|
||||
after
|
||||
_ = file:del_dir_r(emqx_tls_lib:pem_dir(TmpPath))
|
||||
end
|
||||
end
|
||||
catch
|
||||
%% validation errors
|
||||
throw:Reason1 ->
|
||||
{error, Reason1}
|
||||
after
|
||||
_ = file:del_dir_r(emqx_tls_lib:pem_dir(TmpPath))
|
||||
end.
|
||||
|
||||
remove(BridgeId) ->
|
||||
|
@ -415,6 +425,9 @@ bin(Bin) when is_binary(Bin) -> Bin;
|
|||
bin(Str) when is_list(Str) -> list_to_binary(Str);
|
||||
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
|
||||
|
||||
safe_atom(Bin) when is_binary(Bin) -> binary_to_existing_atom(Bin, utf8);
|
||||
safe_atom(Atom) when is_atom(Atom) -> Atom.
|
||||
|
||||
parse_opts(Conf, Opts0) ->
|
||||
override_start_after_created(Conf, Opts0).
|
||||
|
||||
|
|
|
@ -212,6 +212,19 @@ probe_bridge_api(BridgeType, BridgeName, BridgeConfig) ->
|
|||
ct:pal("bridge probe result: ~p", [Res]),
|
||||
Res.
|
||||
|
||||
try_decode_error(Body0) ->
|
||||
case emqx_utils_json:safe_decode(Body0, [return_maps]) of
|
||||
{ok, #{<<"message">> := Msg0} = Body1} ->
|
||||
case emqx_utils_json:safe_decode(Msg0, [return_maps]) of
|
||||
{ok, Msg1} -> Body1#{<<"message">> := Msg1};
|
||||
{error, _} -> Body1
|
||||
end;
|
||||
{ok, Body1} ->
|
||||
Body1;
|
||||
{error, _} ->
|
||||
Body0
|
||||
end.
|
||||
|
||||
create_rule_and_action_http(BridgeType, RuleTopic, Config) ->
|
||||
create_rule_and_action_http(BridgeType, RuleTopic, Config, _Opts = #{}).
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{application, emqx_bridge_gcp_pubsub, [
|
||||
{description, "EMQX Enterprise GCP Pub/Sub Bridge"},
|
||||
{vsn, "0.1.5"},
|
||||
{vsn, "0.1.6"},
|
||||
{registered, []},
|
||||
{applications, [
|
||||
kernel,
|
||||
|
|
|
@ -363,9 +363,9 @@ service_account_json_validator(Map) ->
|
|||
{[], <<"service_account">>} ->
|
||||
ok;
|
||||
{[], Type} ->
|
||||
{error, {wrong_type, Type}};
|
||||
{error, #{wrong_type => Type}};
|
||||
{_, _} ->
|
||||
{error, {missing_keys, MissingKeys}}
|
||||
{error, #{missing_keys => MissingKeys}}
|
||||
end.
|
||||
|
||||
service_account_json_converter(Map) when is_map(Map) ->
|
||||
|
@ -382,7 +382,8 @@ service_account_json_converter(Val) ->
|
|||
|
||||
consumer_topic_mapping_validator(_TopicMapping = []) ->
|
||||
{error, "There must be at least one GCP PubSub-MQTT topic mapping"};
|
||||
consumer_topic_mapping_validator(TopicMapping = [_ | _]) ->
|
||||
consumer_topic_mapping_validator(TopicMapping0 = [_ | _]) ->
|
||||
TopicMapping = [emqx_utils_maps:binary_key_map(TM) || TM <- TopicMapping0],
|
||||
NumEntries = length(TopicMapping),
|
||||
PubSubTopics = [KT || #{<<"pubsub_topic">> := KT} <- TopicMapping],
|
||||
DistinctPubSubTopics = length(lists:usort(PubSubTopics)),
|
||||
|
|
|
@ -220,10 +220,10 @@ parse_jwt_config(ResourceId, #{
|
|||
service_account_json := ServiceAccountJSON
|
||||
}) ->
|
||||
#{
|
||||
project_id := ProjectId,
|
||||
private_key_id := KId,
|
||||
private_key := PrivateKeyPEM,
|
||||
client_email := ServiceAccountEmail
|
||||
<<"project_id">> := ProjectId,
|
||||
<<"private_key_id">> := KId,
|
||||
<<"private_key">> := PrivateKeyPEM,
|
||||
<<"client_email">> := ServiceAccountEmail
|
||||
} = ServiceAccountJSON,
|
||||
%% fixed for pubsub; trailing slash is important.
|
||||
Aud = <<"https://pubsub.googleapis.com/">>,
|
||||
|
|
|
@ -64,7 +64,9 @@ callback_mode() -> async_if_possible.
|
|||
query_mode(_Config) -> no_queries.
|
||||
|
||||
-spec on_start(resource_id(), config()) -> {ok, state()} | {error, term()}.
|
||||
on_start(InstanceId, Config) ->
|
||||
on_start(InstanceId, Config0) ->
|
||||
%% ensure it's a binary key map
|
||||
Config = maps:update_with(service_account_json, fun emqx_utils_maps:binary_key_map/1, Config0),
|
||||
case emqx_bridge_gcp_pubsub_client:start(InstanceId, Config) of
|
||||
{ok, Client} ->
|
||||
start_consumers(InstanceId, Client, Config);
|
||||
|
@ -125,7 +127,7 @@ start_consumers(InstanceId, Client, Config) ->
|
|||
consumer := ConsumerConfig0,
|
||||
hookpoint := Hookpoint,
|
||||
resource_opts := #{request_ttl := RequestTTL},
|
||||
service_account_json := #{project_id := ProjectId}
|
||||
service_account_json := #{<<"project_id">> := ProjectId}
|
||||
} = Config,
|
||||
ConsumerConfig1 = maps:update_with(topic_mapping, fun convert_topic_mapping/1, ConsumerConfig0),
|
||||
TopicMapping = maps:get(topic_mapping, ConsumerConfig1),
|
||||
|
|
|
@ -50,15 +50,16 @@ callback_mode() -> async_if_possible.
|
|||
query_mode(_Config) -> async.
|
||||
|
||||
-spec on_start(resource_id(), config()) -> {ok, state()} | {error, term()}.
|
||||
on_start(InstanceId, Config) ->
|
||||
on_start(InstanceId, Config0) ->
|
||||
?SLOG(info, #{
|
||||
msg => "starting_gcp_pubsub_bridge",
|
||||
config => Config
|
||||
config => Config0
|
||||
}),
|
||||
Config = maps:update_with(service_account_json, fun emqx_utils_maps:binary_key_map/1, Config0),
|
||||
#{
|
||||
payload_template := PayloadTemplate,
|
||||
pubsub_topic := PubSubTopic,
|
||||
service_account_json := #{project_id := ProjectId}
|
||||
service_account_json := #{<<"project_id">> := ProjectId}
|
||||
} = Config,
|
||||
case emqx_bridge_gcp_pubsub_client:start(InstanceId, Config) of
|
||||
{ok, Client} ->
|
||||
|
|
|
@ -275,14 +275,13 @@ ensure_topic(Config, Topic) ->
|
|||
|
||||
start_control_client() ->
|
||||
RawServiceAccount = emqx_bridge_gcp_pubsub_utils:generate_service_account_json(),
|
||||
ServiceAccount = emqx_utils_maps:unsafe_atom_key_map(RawServiceAccount),
|
||||
ConnectorConfig =
|
||||
#{
|
||||
connect_timeout => 5_000,
|
||||
max_retries => 0,
|
||||
pool_size => 1,
|
||||
resource_opts => #{request_ttl => 5_000},
|
||||
service_account_json => ServiceAccount
|
||||
service_account_json => RawServiceAccount
|
||||
},
|
||||
PoolName = <<"control_connector">>,
|
||||
{ok, Client} = emqx_bridge_gcp_pubsub_client:start(PoolName, ConnectorConfig),
|
||||
|
|
|
@ -196,16 +196,27 @@ create_bridge_http(Config, GCPPubSubConfigOverrides) ->
|
|||
Path = emqx_mgmt_api_test_util:api_path(["bridges"]),
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
ProbePath = emqx_mgmt_api_test_util:api_path(["bridges_probe"]),
|
||||
ProbeResult = emqx_mgmt_api_test_util:request_api(post, ProbePath, "", AuthHeader, Params),
|
||||
Opts = #{return_all => true},
|
||||
ProbeResult = emqx_mgmt_api_test_util:request_api(
|
||||
post, ProbePath, "", AuthHeader, Params, Opts
|
||||
),
|
||||
ct:pal("creating bridge (via http): ~p", [Params]),
|
||||
ct:pal("probe result: ~p", [ProbeResult]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params) of
|
||||
{ok, Res0} -> {ok, emqx_utils_json:decode(Res0, [return_maps])};
|
||||
Error -> Error
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {Status, Headhers, Res0}} ->
|
||||
{ok, {Status, Headhers, emqx_utils_json:decode(Res0, [return_maps])}};
|
||||
{error, {Status, Headers, Body0}} ->
|
||||
{error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}};
|
||||
Error ->
|
||||
Error
|
||||
end,
|
||||
ct:pal("bridge creation result: ~p", [Res]),
|
||||
?assertEqual(element(1, ProbeResult), element(1, Res)),
|
||||
case ProbeResult of
|
||||
{error, {{_, 500, _}, _, _}} -> error({bad_probe_result, ProbeResult});
|
||||
_ -> ok
|
||||
end,
|
||||
Res.
|
||||
|
||||
create_rule_and_action_http(Config) ->
|
||||
|
@ -821,7 +832,7 @@ t_not_of_service_account_type(Config) ->
|
|||
?assertMatch(
|
||||
{error, #{
|
||||
kind := validation_error,
|
||||
reason := {wrong_type, <<"not a service account">>},
|
||||
reason := #{wrong_type := <<"not a service account">>},
|
||||
%% should be censored as it contains secrets
|
||||
value := <<"******">>
|
||||
}},
|
||||
|
@ -832,6 +843,23 @@ t_not_of_service_account_type(Config) ->
|
|||
}
|
||||
)
|
||||
),
|
||||
?assertMatch(
|
||||
{error,
|
||||
{{_, 400, _}, _, #{
|
||||
<<"message">> := #{
|
||||
<<"kind">> := <<"validation_error">>,
|
||||
<<"reason">> := #{<<"wrong_type">> := <<"not a service account">>},
|
||||
%% should be censored as it contains secrets
|
||||
<<"value">> := <<"******">>
|
||||
}
|
||||
}}},
|
||||
create_bridge_http(
|
||||
Config,
|
||||
#{
|
||||
<<"service_account_json">> => #{<<"type">> => <<"not a service account">>}
|
||||
}
|
||||
)
|
||||
),
|
||||
ok.
|
||||
|
||||
t_json_missing_fields(Config) ->
|
||||
|
@ -840,13 +868,15 @@ t_json_missing_fields(Config) ->
|
|||
{error, #{
|
||||
kind := validation_error,
|
||||
reason :=
|
||||
{missing_keys, [
|
||||
<<"client_email">>,
|
||||
<<"private_key">>,
|
||||
<<"private_key_id">>,
|
||||
<<"project_id">>,
|
||||
<<"type">>
|
||||
]},
|
||||
#{
|
||||
missing_keys := [
|
||||
<<"client_email">>,
|
||||
<<"private_key">>,
|
||||
<<"private_key_id">>,
|
||||
<<"project_id">>,
|
||||
<<"type">>
|
||||
]
|
||||
},
|
||||
%% should be censored as it contains secrets
|
||||
value := <<"******">>
|
||||
}},
|
||||
|
@ -855,6 +885,30 @@ t_json_missing_fields(Config) ->
|
|||
| Config
|
||||
])
|
||||
),
|
||||
?assertMatch(
|
||||
{error,
|
||||
{{_, 400, _}, _, #{
|
||||
<<"message">> := #{
|
||||
<<"kind">> := <<"validation_error">>,
|
||||
<<"reason">> :=
|
||||
#{
|
||||
<<"missing_keys">> := [
|
||||
<<"client_email">>,
|
||||
<<"private_key">>,
|
||||
<<"private_key_id">>,
|
||||
<<"project_id">>,
|
||||
<<"type">>
|
||||
]
|
||||
},
|
||||
%% should be censored as it contains secrets
|
||||
<<"value">> := <<"******">>
|
||||
}
|
||||
}}},
|
||||
create_bridge_http([
|
||||
{gcp_pubsub_config, GCPPubSubConfig0#{<<"service_account_json">> := #{}}}
|
||||
| Config
|
||||
])
|
||||
),
|
||||
ok.
|
||||
|
||||
t_invalid_private_key(Config) ->
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{application, emqx_bridge_greptimedb, [
|
||||
{description, "EMQX GreptimeDB Bridge"},
|
||||
{vsn, "0.1.0"},
|
||||
{vsn, "0.1.1"},
|
||||
{registered, []},
|
||||
{applications, [
|
||||
kernel,
|
||||
|
|
|
@ -53,6 +53,8 @@
|
|||
|
||||
-define(AUTO_RECONNECT_S, 1).
|
||||
|
||||
-define(CONNECT_TIMEOUT, 5_000).
|
||||
|
||||
%% -------------------------------------------------------------------------------------------------
|
||||
%% resource callback
|
||||
callback_mode() -> always_sync.
|
||||
|
@ -251,6 +253,12 @@ do_start_client(
|
|||
{error, Reason}
|
||||
end.
|
||||
|
||||
grpc_config() ->
|
||||
#{
|
||||
sync_start => true,
|
||||
connect_timeout => ?CONNECT_TIMEOUT
|
||||
}.
|
||||
|
||||
client_config(
|
||||
InstId,
|
||||
Config = #{
|
||||
|
@ -264,6 +272,7 @@ client_config(
|
|||
{pool, InstId},
|
||||
{pool_type, random},
|
||||
{auto_reconnect, ?AUTO_RECONNECT_S},
|
||||
{gprc_options, grpc_config()},
|
||||
{timeunit, maps:get(precision, Config, ms)}
|
||||
] ++ protocol_config(Config).
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{application, emqx_bridge_http, [
|
||||
{description, "EMQX HTTP Bridge and Connector Application"},
|
||||
{vsn, "0.1.1"},
|
||||
{vsn, "0.1.2"},
|
||||
{registered, []},
|
||||
{applications, [kernel, stdlib, emqx_connector, emqx_resource, ehttpc]},
|
||||
{env, []},
|
||||
|
|
|
@ -155,7 +155,16 @@ desc("request") ->
|
|||
desc(_) ->
|
||||
undefined.
|
||||
|
||||
validate_method(M) when M =:= <<"post">>; M =:= <<"put">>; M =:= <<"get">>; M =:= <<"delete">> ->
|
||||
validate_method(M) when
|
||||
M =:= <<"post">>;
|
||||
M =:= <<"put">>;
|
||||
M =:= <<"get">>;
|
||||
M =:= <<"delete">>;
|
||||
M =:= post;
|
||||
M =:= put;
|
||||
M =:= get;
|
||||
M =:= delete
|
||||
->
|
||||
ok;
|
||||
validate_method(M) ->
|
||||
case string:find(M, "${") of
|
||||
|
|
|
@ -82,6 +82,14 @@ init_per_testcase(t_rule_action_expired, Config) ->
|
|||
{bridge_name, ?BRIDGE_NAME}
|
||||
| Config
|
||||
];
|
||||
init_per_testcase(t_bridge_probes_header_atoms, Config) ->
|
||||
HTTPPath = <<"/path">>,
|
||||
ServerSSLOpts = false,
|
||||
{ok, {HTTPPort, _Pid}} = emqx_bridge_http_connector_test_server:start_link(
|
||||
_Port = random, HTTPPath, ServerSSLOpts
|
||||
),
|
||||
ok = emqx_bridge_http_connector_test_server:set_handler(success_http_handler()),
|
||||
[{http_server, #{port => HTTPPort, path => HTTPPath}} | Config];
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
Server = start_http_server(#{response_delay_ms => 0}),
|
||||
[{http_server, Server} | Config].
|
||||
|
@ -89,7 +97,8 @@ init_per_testcase(_TestCase, Config) ->
|
|||
end_per_testcase(TestCase, _Config) when
|
||||
TestCase =:= t_path_not_found;
|
||||
TestCase =:= t_too_many_requests;
|
||||
TestCase =:= t_rule_action_expired
|
||||
TestCase =:= t_rule_action_expired;
|
||||
TestCase =:= t_bridge_probes_header_atoms
|
||||
->
|
||||
ok = emqx_bridge_http_connector_test_server:stop(),
|
||||
persistent_term:erase({?MODULE, times_called}),
|
||||
|
@ -292,6 +301,22 @@ make_bridge(Config) ->
|
|||
),
|
||||
emqx_bridge_resource:bridge_id(Type, Name).
|
||||
|
||||
success_http_handler() ->
|
||||
TestPid = self(),
|
||||
fun(Req0, State) ->
|
||||
{ok, Body, Req} = cowboy_req:read_body(Req0),
|
||||
Headers = cowboy_req:headers(Req),
|
||||
ct:pal("http request received: ~p", [#{body => Body, headers => Headers}]),
|
||||
TestPid ! {http, Headers, Body},
|
||||
Rep = cowboy_req:reply(
|
||||
200,
|
||||
#{<<"content-type">> => <<"text/plain">>},
|
||||
<<"hello">>,
|
||||
Req
|
||||
),
|
||||
{ok, Rep, State}
|
||||
end.
|
||||
|
||||
not_found_http_handler() ->
|
||||
TestPid = self(),
|
||||
fun(Req0, State) ->
|
||||
|
@ -613,6 +638,55 @@ t_rule_action_expired(Config) ->
|
|||
),
|
||||
ok.
|
||||
|
||||
t_bridge_probes_header_atoms(Config) ->
|
||||
#{port := Port, path := Path} = ?config(http_server, Config),
|
||||
?check_trace(
|
||||
begin
|
||||
LocalTopic = <<"t/local/topic">>,
|
||||
BridgeConfig0 = bridge_async_config(#{
|
||||
type => ?BRIDGE_TYPE,
|
||||
name => ?BRIDGE_NAME,
|
||||
port => Port,
|
||||
path => Path,
|
||||
resume_interval => "100ms",
|
||||
connect_timeout => "1s",
|
||||
request_timeout => "100ms",
|
||||
resource_request_ttl => "100ms",
|
||||
local_topic => LocalTopic
|
||||
}),
|
||||
BridgeConfig = BridgeConfig0#{
|
||||
<<"headers">> => #{
|
||||
<<"some-non-existent-atom">> => <<"x">>
|
||||
}
|
||||
},
|
||||
?assertMatch(
|
||||
{ok, {{_, 204, _}, _Headers, _Body}},
|
||||
probe_bridge_api(BridgeConfig)
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, {{_, 201, _}, _Headers, _Body}},
|
||||
emqx_bridge_testlib:create_bridge_api(
|
||||
?BRIDGE_TYPE,
|
||||
?BRIDGE_NAME,
|
||||
BridgeConfig
|
||||
)
|
||||
),
|
||||
Msg = emqx_message:make(LocalTopic, <<"hi">>),
|
||||
emqx:publish(Msg),
|
||||
receive
|
||||
{http, Headers, _Body} ->
|
||||
?assertMatch(#{<<"some-non-existent-atom">> := <<"x">>}, Headers),
|
||||
ok
|
||||
after 5_000 ->
|
||||
ct:pal("mailbox: ~p", [process_info(self(), messages)]),
|
||||
ct:fail("request not made")
|
||||
end,
|
||||
ok
|
||||
end,
|
||||
[]
|
||||
),
|
||||
ok.
|
||||
|
||||
%% helpers
|
||||
do_t_async_retries(TestContext, Error, Fn) ->
|
||||
#{error_attempts := ErrorAttempts} = TestContext,
|
||||
|
@ -659,3 +733,17 @@ remove_message_id(MessageIDs, #{body := IDBin}) ->
|
|||
ID = erlang:binary_to_integer(IDBin),
|
||||
%% It is acceptable to get the same message more than once
|
||||
maps:without([ID], MessageIDs).
|
||||
|
||||
probe_bridge_api(BridgeConfig) ->
|
||||
Params = BridgeConfig#{<<"type">> => ?BRIDGE_TYPE, <<"name">> => ?BRIDGE_NAME},
|
||||
Path = emqx_mgmt_api_test_util:api_path(["bridges_probe"]),
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
Opts = #{return_all => true},
|
||||
ct:pal("probing bridge (via http): ~p", [Params]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {{_, 204, _}, _Headers, _Body0} = Res0} -> {ok, Res0};
|
||||
Error -> Error
|
||||
end,
|
||||
ct:pal("bridge probe result: ~p", [Res]),
|
||||
Res.
|
||||
|
|
|
@ -91,3 +91,121 @@ is_unwrapped_headers(Headers) ->
|
|||
is_unwrapped_header({_, V}) when is_function(V) -> false;
|
||||
is_unwrapped_header({_, [{str, _V}]}) -> throw(unexpected_tmpl_token);
|
||||
is_unwrapped_header(_) -> true.
|
||||
|
||||
method_validator_test() ->
|
||||
Conf0 = parse(webhook_config_hocon()),
|
||||
?assertMatch(
|
||||
#{<<"method">> := _},
|
||||
emqx_utils_maps:deep_get([<<"bridges">>, <<"webhook">>, <<"a">>], Conf0)
|
||||
),
|
||||
lists:foreach(
|
||||
fun(Method) ->
|
||||
Conf1 = emqx_utils_maps:deep_put(
|
||||
[<<"bridges">>, <<"webhook">>, <<"a">>, <<"method">>],
|
||||
Conf0,
|
||||
Method
|
||||
),
|
||||
?assertMatch(
|
||||
#{},
|
||||
check(Conf1),
|
||||
#{method => Method}
|
||||
),
|
||||
?assertMatch(
|
||||
#{},
|
||||
check_atom_key(Conf1),
|
||||
#{method => Method}
|
||||
),
|
||||
ok
|
||||
end,
|
||||
[<<"post">>, <<"put">>, <<"get">>, <<"delete">>]
|
||||
),
|
||||
lists:foreach(
|
||||
fun(Method) ->
|
||||
Conf1 = emqx_utils_maps:deep_put(
|
||||
[<<"bridges">>, <<"webhook">>, <<"a">>, <<"method">>],
|
||||
Conf0,
|
||||
Method
|
||||
),
|
||||
?assertThrow(
|
||||
{_, [
|
||||
#{
|
||||
kind := validation_error,
|
||||
reason := not_a_enum_symbol
|
||||
}
|
||||
]},
|
||||
check(Conf1),
|
||||
#{method => Method}
|
||||
),
|
||||
?assertThrow(
|
||||
{_, [
|
||||
#{
|
||||
kind := validation_error,
|
||||
reason := not_a_enum_symbol
|
||||
}
|
||||
]},
|
||||
check_atom_key(Conf1),
|
||||
#{method => Method}
|
||||
),
|
||||
ok
|
||||
end,
|
||||
[<<"x">>, <<"patch">>, <<"options">>]
|
||||
),
|
||||
ok.
|
||||
|
||||
%%===========================================================================
|
||||
%% Helper functions
|
||||
%%===========================================================================
|
||||
|
||||
parse(Hocon) ->
|
||||
{ok, Conf} = hocon:binary(Hocon),
|
||||
Conf.
|
||||
|
||||
%% what bridge creation does
|
||||
check(Conf) when is_map(Conf) ->
|
||||
hocon_tconf:check_plain(emqx_bridge_schema, Conf).
|
||||
|
||||
%% what bridge probe does
|
||||
check_atom_key(Conf) when is_map(Conf) ->
|
||||
hocon_tconf:check_plain(emqx_bridge_schema, Conf, #{atom_key => true, required => false}).
|
||||
|
||||
%%===========================================================================
|
||||
%% Data section
|
||||
%%===========================================================================
|
||||
|
||||
%% erlfmt-ignore
|
||||
webhook_config_hocon() ->
|
||||
"""
|
||||
bridges.webhook.a {
|
||||
body = \"${.}\"
|
||||
connect_timeout = 15s
|
||||
enable = false
|
||||
enable_pipelining = 100
|
||||
headers {content-type = \"application/json\", jjjjjjjjjjjjjjjjjjj = jjjjjjj}
|
||||
max_retries = 2
|
||||
method = post
|
||||
pool_size = 8
|
||||
pool_type = random
|
||||
resource_opts {
|
||||
health_check_interval = 15s
|
||||
inflight_window = 100
|
||||
max_buffer_bytes = 1GB
|
||||
query_mode = async
|
||||
request_ttl = 45s
|
||||
start_after_created = true
|
||||
start_timeout = 5s
|
||||
worker_pool_size = 4
|
||||
}
|
||||
ssl {
|
||||
ciphers = []
|
||||
depth = 10
|
||||
enable = false
|
||||
hibernate_after = 5s
|
||||
log_level = notice
|
||||
reuse_sessions = true
|
||||
secure_renegotiate = true
|
||||
verify = verify_peer
|
||||
versions = [tlsv1.3, tlsv1.2]
|
||||
}
|
||||
url = \"http://some.host:4000/api/echo\"
|
||||
}
|
||||
""".
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_bridge_kafka, [
|
||||
{description, "EMQX Enterprise Kafka Bridge"},
|
||||
{vsn, "0.1.6"},
|
||||
{vsn, "0.1.7"},
|
||||
{registered, [emqx_bridge_kafka_consumer_sup]},
|
||||
{applications, [
|
||||
kernel,
|
||||
|
|
|
@ -125,7 +125,7 @@ values(consumer) ->
|
|||
topic_mapping => [
|
||||
#{
|
||||
kafka_topic => <<"kafka-topic-1">>,
|
||||
mqtt_topic => <<"mqtt/topic/1">>,
|
||||
mqtt_topic => <<"mqtt/topic/${.offset}">>,
|
||||
qos => 1,
|
||||
payload_template => <<"${.}">>
|
||||
},
|
||||
|
@ -528,7 +528,8 @@ kafka_producer_converter(Config, _HoconOpts) ->
|
|||
|
||||
consumer_topic_mapping_validator(_TopicMapping = []) ->
|
||||
{error, "There must be at least one Kafka-MQTT topic mapping"};
|
||||
consumer_topic_mapping_validator(TopicMapping = [_ | _]) ->
|
||||
consumer_topic_mapping_validator(TopicMapping0 = [_ | _]) ->
|
||||
TopicMapping = [emqx_utils_maps:binary_key_map(TM) || TM <- TopicMapping0],
|
||||
NumEntries = length(TopicMapping),
|
||||
KafkaTopics = [KT || #{<<"kafka_topic">> := KT} <- TopicMapping],
|
||||
DistinctKafkaTopics = length(lists:usort(KafkaTopics)),
|
||||
|
@ -539,6 +540,13 @@ consumer_topic_mapping_validator(TopicMapping = [_ | _]) ->
|
|||
{error, "Kafka topics must not be repeated in a bridge"}
|
||||
end.
|
||||
|
||||
producer_strategy_key_validator(
|
||||
#{
|
||||
partition_strategy := _,
|
||||
message := #{key := _}
|
||||
} = Conf
|
||||
) ->
|
||||
producer_strategy_key_validator(emqx_utils_maps:binary_key_map(Conf));
|
||||
producer_strategy_key_validator(#{
|
||||
<<"partition_strategy">> := key_dispatch,
|
||||
<<"message">> := #{<<"key">> := ""}
|
||||
|
|
|
@ -69,7 +69,7 @@
|
|||
topic_mapping := #{
|
||||
kafka_topic() := #{
|
||||
payload_template := emqx_placeholder:tmpl_token(),
|
||||
mqtt_topic => emqx_types:topic(),
|
||||
mqtt_topic_template => emqx_placeholder:tmpl_token(),
|
||||
qos => emqx_types:qos()
|
||||
}
|
||||
},
|
||||
|
@ -83,7 +83,7 @@
|
|||
topic_mapping := #{
|
||||
kafka_topic() := #{
|
||||
payload_template := emqx_placeholder:tmpl_token(),
|
||||
mqtt_topic => emqx_types:topic(),
|
||||
mqtt_topic_template => emqx_placeholder:tmpl_token(),
|
||||
qos => emqx_types:qos()
|
||||
}
|
||||
},
|
||||
|
@ -235,7 +235,7 @@ do_handle_message(Message, State) ->
|
|||
value_encoding_mode := ValueEncodingMode
|
||||
} = State,
|
||||
#{
|
||||
mqtt_topic := MQTTTopic,
|
||||
mqtt_topic_template := MQTTTopicTemplate,
|
||||
qos := MQTTQoS,
|
||||
payload_template := PayloadTemplate
|
||||
} = maps:get(KafkaTopic, TopicMapping),
|
||||
|
@ -249,6 +249,7 @@ do_handle_message(Message, State) ->
|
|||
value => encode(Message#kafka_message.value, ValueEncodingMode)
|
||||
},
|
||||
Payload = render(FullMessage, PayloadTemplate),
|
||||
MQTTTopic = render(FullMessage, MQTTTopicTemplate),
|
||||
MQTTMessage = emqx_message:make(ResourceId, MQTTQoS, MQTTTopic, Payload),
|
||||
_ = emqx:publish(MQTTMessage),
|
||||
emqx:run_hook(Hookpoint, [FullMessage]),
|
||||
|
@ -533,15 +534,16 @@ convert_topic_mapping(TopicMappingList) ->
|
|||
fun(Fields, Acc) ->
|
||||
#{
|
||||
kafka_topic := KafkaTopic,
|
||||
mqtt_topic := MQTTTopic,
|
||||
mqtt_topic := MQTTTopicTemplate0,
|
||||
qos := QoS,
|
||||
payload_template := PayloadTemplate0
|
||||
} = Fields,
|
||||
PayloadTemplate = emqx_placeholder:preproc_tmpl(PayloadTemplate0),
|
||||
MQTTTopicTemplate = emqx_placeholder:preproc_tmpl(MQTTTopicTemplate0),
|
||||
Acc#{
|
||||
KafkaTopic => #{
|
||||
payload_template => PayloadTemplate,
|
||||
mqtt_topic => MQTTTopic,
|
||||
mqtt_topic_template => MQTTTopicTemplate,
|
||||
qos => QoS
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,6 +60,7 @@ only_once_tests() ->
|
|||
t_node_joins_existing_cluster,
|
||||
t_cluster_node_down,
|
||||
t_multiple_topic_mappings,
|
||||
t_dynamic_mqtt_topic,
|
||||
t_resource_manager_crash_after_subscriber_started,
|
||||
t_resource_manager_crash_before_subscriber_started
|
||||
].
|
||||
|
@ -329,6 +330,23 @@ init_per_testcase(t_multiple_topic_mappings = TestCase, Config0) ->
|
|||
],
|
||||
Config = [{topic_mapping, TopicMapping} | Config0],
|
||||
common_init_per_testcase(TestCase, Config);
|
||||
init_per_testcase(t_dynamic_mqtt_topic = TestCase, Config0) ->
|
||||
KafkaTopic =
|
||||
<<
|
||||
(atom_to_binary(TestCase))/binary,
|
||||
(integer_to_binary(erlang:unique_integer()))/binary
|
||||
>>,
|
||||
TopicMapping =
|
||||
[
|
||||
#{
|
||||
kafka_topic => KafkaTopic,
|
||||
mqtt_topic => <<"${.topic}/${.value}/${.headers.hkey}">>,
|
||||
qos => 1,
|
||||
payload_template => <<"${.}">>
|
||||
}
|
||||
],
|
||||
Config = [{kafka_topic, KafkaTopic}, {topic_mapping, TopicMapping} | Config0],
|
||||
common_init_per_testcase(TestCase, Config);
|
||||
init_per_testcase(TestCase, Config) ->
|
||||
common_init_per_testcase(TestCase, Config).
|
||||
|
||||
|
@ -336,11 +354,12 @@ common_init_per_testcase(TestCase, Config0) ->
|
|||
ct:timetrap(timer:seconds(60)),
|
||||
delete_all_bridges(),
|
||||
emqx_config:delete_override_conf_files(),
|
||||
KafkaTopic =
|
||||
KafkaTopic0 =
|
||||
<<
|
||||
(atom_to_binary(TestCase))/binary,
|
||||
(integer_to_binary(erlang:unique_integer()))/binary
|
||||
>>,
|
||||
KafkaTopic = proplists:get_value(kafka_topic, Config0, KafkaTopic0),
|
||||
KafkaType = ?config(kafka_type, Config0),
|
||||
UniqueNum = integer_to_binary(erlang:unique_integer()),
|
||||
MQTTTopic = proplists:get_value(mqtt_topic, Config0, <<"mqtt/topic/", UniqueNum/binary>>),
|
||||
|
@ -1674,6 +1693,78 @@ t_bridge_rule_action_source(Config) ->
|
|||
),
|
||||
ok.
|
||||
|
||||
t_dynamic_mqtt_topic(Config) ->
|
||||
KafkaTopic = ?config(kafka_topic, Config),
|
||||
NPartitions = ?config(num_partitions, Config),
|
||||
ResourceId = resource_id(Config),
|
||||
Payload = emqx_guid:to_hexstr(emqx_guid:gen()),
|
||||
MQTTTopic = emqx_topic:join([KafkaTopic, '#']),
|
||||
?check_trace(
|
||||
begin
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
create_bridge(Config)
|
||||
),
|
||||
wait_until_subscribers_are_ready(NPartitions, 40_000),
|
||||
{ok, C} = emqtt:start_link(),
|
||||
on_exit(fun() -> emqtt:stop(C) end),
|
||||
{ok, _} = emqtt:connect(C),
|
||||
{ok, _, [0]} = emqtt:subscribe(C, MQTTTopic),
|
||||
ct:pal("subscribed to ~p", [MQTTTopic]),
|
||||
|
||||
{ok, SRef0} = snabbkaffe:subscribe(
|
||||
?match_event(#{
|
||||
?snk_kind := kafka_consumer_handle_message, ?snk_span := {complete, _}
|
||||
}),
|
||||
_NumMsgs = 3,
|
||||
20_000
|
||||
),
|
||||
{_Partition, _OffsetReply} =
|
||||
publish(Config, [
|
||||
%% this will have the last segment defined
|
||||
#{
|
||||
key => <<"mykey">>,
|
||||
value => Payload,
|
||||
headers => [{<<"hkey">>, <<"hvalue">>}]
|
||||
},
|
||||
%% this will not
|
||||
#{
|
||||
key => <<"mykey">>,
|
||||
value => Payload
|
||||
},
|
||||
%% will inject an invalid topic segment
|
||||
#{
|
||||
key => <<"mykey">>,
|
||||
value => <<"+">>
|
||||
}
|
||||
]),
|
||||
{ok, _} = snabbkaffe:receive_events(SRef0),
|
||||
ok
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assertMatch([_Enter, _Complete | _], ?of_kind(kafka_consumer_handle_message, Trace)),
|
||||
%% the message with invalid topic will fail to be published
|
||||
Published = receive_published(#{n => 2}),
|
||||
ExpectedMQTTTopic0 = emqx_topic:join([KafkaTopic, Payload, <<"hvalue">>]),
|
||||
ExpectedMQTTTopic1 = emqx_topic:join([KafkaTopic, Payload, <<>>]),
|
||||
?assertMatch(
|
||||
[
|
||||
#{
|
||||
topic := ExpectedMQTTTopic0
|
||||
},
|
||||
#{
|
||||
topic := ExpectedMQTTTopic1
|
||||
}
|
||||
],
|
||||
Published
|
||||
),
|
||||
?assertEqual(3, emqx_resource_metrics:received_get(ResourceId)),
|
||||
?assertError({timeout, _}, receive_published(#{timeout => 500})),
|
||||
ok
|
||||
end
|
||||
),
|
||||
ok.
|
||||
|
||||
%% checks that an existing cluster can be configured with a kafka
|
||||
%% consumer bridge and that the consumers will distribute over the two
|
||||
%% nodes.
|
||||
|
|
|
@ -166,11 +166,24 @@ message_key_dispatch_validations_test() ->
|
|||
]},
|
||||
check(Conf)
|
||||
),
|
||||
%% ensure atoms exist
|
||||
_ = [myproducer],
|
||||
?assertThrow(
|
||||
{_, [
|
||||
#{
|
||||
path := "bridges.kafka.myproducer.kafka",
|
||||
reason := "Message key cannot be empty when `key_dispatch` strategy is used"
|
||||
}
|
||||
]},
|
||||
check_atom_key(Conf)
|
||||
),
|
||||
ok.
|
||||
|
||||
tcp_keepalive_validation_test_() ->
|
||||
ProducerConf = parse(kafka_producer_new_hocon()),
|
||||
ConsumerConf = parse(kafka_consumer_hocon()),
|
||||
%% ensure atoms exist
|
||||
_ = [my_producer, my_consumer],
|
||||
test_keepalive_validation([<<"kafka">>, <<"myproducer">>], ProducerConf) ++
|
||||
test_keepalive_validation([<<"kafka_consumer">>, <<"my_consumer">>], ConsumerConf).
|
||||
|
||||
|
@ -184,7 +197,9 @@ test_keepalive_validation(Name, Conf) ->
|
|||
InvalidConf2 = emqx_utils_maps:deep_force_put(Path, Conf, <<"5,6,1000">>),
|
||||
InvalidConfs = [InvalidConf, InvalidConf1, InvalidConf2],
|
||||
[?_assertMatch(#{<<"bridges">> := _}, check(C)) || C <- ValidConfs] ++
|
||||
[?_assertThrow(_, check(C)) || C <- InvalidConfs].
|
||||
[?_assertMatch(#{bridges := _}, check_atom_key(C)) || C <- ValidConfs] ++
|
||||
[?_assertThrow(_, check(C)) || C <- InvalidConfs] ++
|
||||
[?_assertThrow(_, check_atom_key(C)) || C <- InvalidConfs].
|
||||
|
||||
%%===========================================================================
|
||||
%% Helper functions
|
||||
|
@ -194,9 +209,14 @@ parse(Hocon) ->
|
|||
{ok, Conf} = hocon:binary(Hocon),
|
||||
Conf.
|
||||
|
||||
%% what bridge creation does
|
||||
check(Conf) when is_map(Conf) ->
|
||||
hocon_tconf:check_plain(emqx_bridge_schema, Conf).
|
||||
|
||||
%% what bridge probe does
|
||||
check_atom_key(Conf) when is_map(Conf) ->
|
||||
hocon_tconf:check_plain(emqx_bridge_schema, Conf, #{atom_key => true, required => false}).
|
||||
|
||||
%%===========================================================================
|
||||
%% Data section
|
||||
%%===========================================================================
|
||||
|
|
|
@ -6,6 +6,9 @@ It implements the data bridge APIs for interacting with an Oracle Database Bridg
|
|||
|
||||
# Documentation
|
||||
|
||||
- Refer to [Ingest data into Oracle DB](https://docs.emqx.com/en/enterprise/v5.0/data-integration/data-bridge-oracle.html)
|
||||
for how to use EMQX dashboard to ingest IoT data into Oracle Database.
|
||||
|
||||
- Refer to [EMQX Rules](https://docs.emqx.com/en/enterprise/v5.0/data-integration/rules.html)
|
||||
for the EMQX rules engine introduction.
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{application, emqx_bridge_oracle, [
|
||||
{description, "EMQX Enterprise Oracle Database Bridge"},
|
||||
{vsn, "0.1.3"},
|
||||
{vsn, "0.1.4"},
|
||||
{registered, []},
|
||||
{applications, [
|
||||
kernel,
|
||||
|
|
|
@ -108,6 +108,8 @@ type_field(Type) ->
|
|||
name_field() ->
|
||||
{name, hoconsc:mk(binary(), #{required => true, desc => ?DESC("desc_name")})}.
|
||||
|
||||
config_validator(#{server := _} = Config) ->
|
||||
config_validator(emqx_utils_maps:binary_key_map(Config));
|
||||
config_validator(#{<<"server">> := Server} = Config) when
|
||||
not is_map(Server) andalso
|
||||
not is_map_key(<<"sid">>, Config) andalso
|
||||
|
|
|
@ -305,6 +305,8 @@ create_bridge_api(Config, Overrides) ->
|
|||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {Status, Headers, Body0}} ->
|
||||
{ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}};
|
||||
{error, {Status, Headers, Body0}} ->
|
||||
{error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}};
|
||||
Error ->
|
||||
Error
|
||||
end,
|
||||
|
@ -348,8 +350,12 @@ probe_bridge_api(Config, Overrides) ->
|
|||
ct:pal("probing bridge (via http): ~p", [Params]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {{_, 204, _}, _Headers, _Body0} = Res0} -> {ok, Res0};
|
||||
Error -> Error
|
||||
{ok, {{_, 204, _}, _Headers, _Body0} = Res0} ->
|
||||
{ok, Res0};
|
||||
{error, {Status, Headers, Body0}} ->
|
||||
{error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}};
|
||||
Error ->
|
||||
Error
|
||||
end,
|
||||
ct:pal("bridge probe result: ~p", [Res]),
|
||||
Res.
|
||||
|
@ -630,6 +636,30 @@ t_no_sid_nor_service_name(Config0) ->
|
|||
{error, #{kind := validation_error, reason := "neither SID nor Service Name was set"}},
|
||||
create_bridge(Config)
|
||||
),
|
||||
?assertMatch(
|
||||
{error,
|
||||
{{_, 400, _}, _, #{
|
||||
<<"message">> := #{
|
||||
<<"kind">> := <<"validation_error">>,
|
||||
<<"reason">> := <<"neither SID nor Service Name was set">>,
|
||||
%% should be censored as it contains secrets
|
||||
<<"value">> := #{<<"password">> := <<"******">>}
|
||||
}
|
||||
}}},
|
||||
create_bridge_api(Config)
|
||||
),
|
||||
?assertMatch(
|
||||
{error,
|
||||
{{_, 400, _}, _, #{
|
||||
<<"message">> := #{
|
||||
<<"kind">> := <<"validation_error">>,
|
||||
<<"reason">> := <<"neither SID nor Service Name was set">>,
|
||||
%% should be censored as it contains secrets
|
||||
<<"value">> := #{<<"password">> := <<"******">>}
|
||||
}
|
||||
}}},
|
||||
probe_bridge_api(Config)
|
||||
),
|
||||
ok.
|
||||
|
||||
t_missing_table(Config) ->
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
-include("emqx_resource_errors.hrl").
|
||||
|
||||
% SQL definitions
|
||||
-define(SQL_BRIDGE,
|
||||
|
@ -690,10 +691,14 @@ t_table_removed(Config) ->
|
|||
connect_and_drop_table(Config),
|
||||
Val = integer_to_binary(erlang:unique_integer()),
|
||||
SentData = #{payload => Val, timestamp => 1668602148000},
|
||||
?assertMatch(
|
||||
{error, {unrecoverable_error, {error, error, <<"42P01">>, undefined_table, _, _}}},
|
||||
query_resource_sync(Config, {send_message, SentData, []})
|
||||
),
|
||||
case query_resource_sync(Config, {send_message, SentData, []}) of
|
||||
{error, {unrecoverable_error, {error, error, <<"42P01">>, undefined_table, _, _}}} ->
|
||||
ok;
|
||||
?RESOURCE_ERROR_M(not_connected, _) ->
|
||||
ok;
|
||||
Res ->
|
||||
ct:fail("unexpected result: ~p", [Res])
|
||||
end,
|
||||
ok
|
||||
end,
|
||||
[]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{application, emqx_bridge_pulsar, [
|
||||
{description, "EMQX Pulsar Bridge"},
|
||||
{vsn, "0.1.5"},
|
||||
{vsn, "0.1.6"},
|
||||
{registered, []},
|
||||
{applications, [
|
||||
kernel,
|
||||
|
|
|
@ -220,6 +220,13 @@ conn_bridge_examples(_Method) ->
|
|||
}
|
||||
].
|
||||
|
||||
producer_strategy_key_validator(
|
||||
#{
|
||||
strategy := _,
|
||||
message := #{key := _}
|
||||
} = Conf
|
||||
) ->
|
||||
producer_strategy_key_validator(emqx_utils_maps:binary_key_map(Conf));
|
||||
producer_strategy_key_validator(#{
|
||||
<<"strategy">> := key_dispatch,
|
||||
<<"message">> := #{<<"key">> := ""}
|
||||
|
@ -257,7 +264,12 @@ override_default(OriginalFn, NewDefault) ->
|
|||
|
||||
auth_union_member_selector(all_union_members) ->
|
||||
[none, ref(auth_basic), ref(auth_token)];
|
||||
auth_union_member_selector({value, V}) ->
|
||||
auth_union_member_selector({value, V0}) ->
|
||||
V =
|
||||
case is_map(V0) of
|
||||
true -> emqx_utils_maps:binary_key_map(V0);
|
||||
false -> V0
|
||||
end,
|
||||
case V of
|
||||
#{<<"password">> := _} ->
|
||||
[ref(auth_basic)];
|
||||
|
@ -265,6 +277,8 @@ auth_union_member_selector({value, V}) ->
|
|||
[ref(auth_token)];
|
||||
<<"none">> ->
|
||||
[none];
|
||||
none ->
|
||||
[none];
|
||||
_ ->
|
||||
Expected = "none | basic | token",
|
||||
throw(#{
|
||||
|
|
|
@ -40,6 +40,7 @@ groups() ->
|
|||
only_once_tests() ->
|
||||
[
|
||||
t_create_via_http,
|
||||
t_strategy_key_validation,
|
||||
t_start_when_down,
|
||||
t_send_when_down,
|
||||
t_send_when_timeout,
|
||||
|
@ -313,6 +314,8 @@ create_bridge_api(Config, Overrides) ->
|
|||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {Status, Headers, Body0}} ->
|
||||
{ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}};
|
||||
{error, {Status, Headers, Body0}} ->
|
||||
{error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}};
|
||||
Error ->
|
||||
Error
|
||||
end,
|
||||
|
@ -356,8 +359,12 @@ probe_bridge_api(Config, Overrides) ->
|
|||
ct:pal("probing bridge (via http): ~p", [Params]),
|
||||
Res =
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params, Opts) of
|
||||
{ok, {{_, 204, _}, _Headers, _Body0} = Res0} -> {ok, Res0};
|
||||
Error -> Error
|
||||
{ok, {{_, 204, _}, _Headers, _Body0} = Res0} ->
|
||||
{ok, Res0};
|
||||
{error, {Status, Headers, Body0}} ->
|
||||
{error, {Status, Headers, emqx_bridge_testlib:try_decode_error(Body0)}};
|
||||
Error ->
|
||||
Error
|
||||
end,
|
||||
ct:pal("bridge probe result: ~p", [Res]),
|
||||
Res.
|
||||
|
@ -1074,6 +1081,37 @@ t_resource_manager_crash_before_producers_started(Config) ->
|
|||
),
|
||||
ok.
|
||||
|
||||
t_strategy_key_validation(Config) ->
|
||||
?assertMatch(
|
||||
{error,
|
||||
{{_, 400, _}, _, #{
|
||||
<<"message">> :=
|
||||
#{
|
||||
<<"kind">> := <<"validation_error">>,
|
||||
<<"reason">> := <<"Message key cannot be empty", _/binary>>
|
||||
} = Msg
|
||||
}}},
|
||||
probe_bridge_api(
|
||||
Config,
|
||||
#{<<"strategy">> => <<"key_dispatch">>, <<"message">> => #{<<"key">> => <<>>}}
|
||||
)
|
||||
),
|
||||
?assertMatch(
|
||||
{error,
|
||||
{{_, 400, _}, _, #{
|
||||
<<"message">> :=
|
||||
#{
|
||||
<<"kind">> := <<"validation_error">>,
|
||||
<<"reason">> := <<"Message key cannot be empty", _/binary>>
|
||||
} = Msg
|
||||
}}},
|
||||
create_bridge_api(
|
||||
Config,
|
||||
#{<<"strategy">> => <<"key_dispatch">>, <<"message">> => #{<<"key">> => <<>>}}
|
||||
)
|
||||
),
|
||||
ok.
|
||||
|
||||
t_cluster(Config0) ->
|
||||
ct:timetrap({seconds, 120}),
|
||||
?retrying(Config0, 3, fun do_t_cluster/1).
|
||||
|
|
|
@ -35,6 +35,17 @@ pulsar_producer_validations_test() ->
|
|||
]},
|
||||
check(Conf)
|
||||
),
|
||||
%% ensure atoms exist
|
||||
_ = [my_producer],
|
||||
?assertThrow(
|
||||
{_, [
|
||||
#{
|
||||
path := "bridges.pulsar_producer.my_producer",
|
||||
reason := "Message key cannot be empty when `key_dispatch` strategy is used"
|
||||
}
|
||||
]},
|
||||
check_atom_key(Conf)
|
||||
),
|
||||
|
||||
ok.
|
||||
|
||||
|
@ -46,9 +57,14 @@ parse(Hocon) ->
|
|||
{ok, Conf} = hocon:binary(Hocon),
|
||||
Conf.
|
||||
|
||||
%% what bridge creation does
|
||||
check(Conf) when is_map(Conf) ->
|
||||
hocon_tconf:check_plain(emqx_bridge_schema, Conf).
|
||||
|
||||
%% what bridge probe does
|
||||
check_atom_key(Conf) when is_map(Conf) ->
|
||||
hocon_tconf:check_plain(emqx_bridge_schema, Conf, #{atom_key => true, required => false}).
|
||||
|
||||
%%===========================================================================
|
||||
%% Data section
|
||||
%%===========================================================================
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{application, emqx_conf, [
|
||||
{description, "EMQX configuration management"},
|
||||
{vsn, "0.1.24"},
|
||||
{vsn, "0.1.25"},
|
||||
{registered, []},
|
||||
{mod, {emqx_conf_app, []}},
|
||||
{applications, [kernel, stdlib, emqx_ctl]},
|
||||
|
|
|
@ -648,6 +648,7 @@ fields("node") ->
|
|||
mapping => "mria.tlog_push_mode",
|
||||
default => async,
|
||||
'readOnly' => true,
|
||||
deprecated => {since, "5.2.0"},
|
||||
importance => ?IMPORTANCE_HIDDEN,
|
||||
desc => ?DESC(db_tlog_push_mode)
|
||||
}
|
||||
|
@ -671,6 +672,35 @@ fields("node") ->
|
|||
mapping => "emqx_machine.custom_shard_transports",
|
||||
default => #{}
|
||||
}
|
||||
)},
|
||||
{"broker_pool_size",
|
||||
sc(
|
||||
pos_integer(),
|
||||
#{
|
||||
importance => ?IMPORTANCE_HIDDEN,
|
||||
default => emqx_vm:schedulers() * 2,
|
||||
'readOnly' => true,
|
||||
desc => ?DESC(node_broker_pool_size)
|
||||
}
|
||||
)},
|
||||
{"generic_pool_size",
|
||||
sc(
|
||||
pos_integer(),
|
||||
#{
|
||||
importance => ?IMPORTANCE_HIDDEN,
|
||||
default => emqx_vm:schedulers(),
|
||||
'readOnly' => true,
|
||||
desc => ?DESC(node_generic_pool_size)
|
||||
}
|
||||
)},
|
||||
{"channel_cleanup_batch_size",
|
||||
sc(
|
||||
pos_integer(),
|
||||
#{
|
||||
importance => ?IMPORTANCE_HIDDEN,
|
||||
default => 100_000,
|
||||
desc => ?DESC(node_channel_cleanup_batch_size)
|
||||
}
|
||||
)}
|
||||
];
|
||||
fields("cluster_call") ->
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_connector, [
|
||||
{description, "EMQX Data Integration Connectors"},
|
||||
{vsn, "0.1.28"},
|
||||
{vsn, "0.1.29"},
|
||||
{registered, []},
|
||||
{mod, {emqx_connector_app, []}},
|
||||
{applications, [
|
||||
|
|
|
@ -1,199 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_connector_ldap).
|
||||
|
||||
-include("emqx_connector.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
|
||||
-export([roots/0, fields/1]).
|
||||
|
||||
-behaviour(emqx_resource).
|
||||
|
||||
%% callbacks of behaviour emqx_resource
|
||||
-export([
|
||||
callback_mode/0,
|
||||
on_start/2,
|
||||
on_stop/2,
|
||||
on_query/3,
|
||||
on_get_status/2
|
||||
]).
|
||||
|
||||
-export([connect/1]).
|
||||
|
||||
-export([search/4]).
|
||||
|
||||
%% port is not expected from configuration because
|
||||
%% all servers expected to use the same port number
|
||||
-define(LDAP_HOST_OPTIONS, #{no_port => true}).
|
||||
|
||||
%%=====================================================================
|
||||
roots() ->
|
||||
ldap_fields() ++ emqx_connector_schema_lib:ssl_fields().
|
||||
|
||||
%% this schema has no sub-structs
|
||||
fields(_) -> [].
|
||||
|
||||
%% ===================================================================
|
||||
callback_mode() -> always_sync.
|
||||
|
||||
on_start(
|
||||
InstId,
|
||||
#{
|
||||
servers := Servers0,
|
||||
port := Port,
|
||||
bind_dn := BindDn,
|
||||
bind_password := BindPassword,
|
||||
timeout := Timeout,
|
||||
pool_size := PoolSize,
|
||||
ssl := SSL
|
||||
} = Config
|
||||
) ->
|
||||
?SLOG(info, #{
|
||||
msg => "starting_ldap_connector",
|
||||
connector => InstId,
|
||||
config => emqx_utils:redact(Config)
|
||||
}),
|
||||
Servers1 = emqx_schema:parse_servers(Servers0, ?LDAP_HOST_OPTIONS),
|
||||
Servers =
|
||||
lists:map(
|
||||
fun
|
||||
(#{hostname := Host, port := Port0}) ->
|
||||
{Host, Port0};
|
||||
(#{hostname := Host}) ->
|
||||
Host
|
||||
end,
|
||||
Servers1
|
||||
),
|
||||
SslOpts =
|
||||
case maps:get(enable, SSL) of
|
||||
true ->
|
||||
[
|
||||
{ssl, true},
|
||||
{sslopts, emqx_tls_lib:to_client_opts(SSL)}
|
||||
];
|
||||
false ->
|
||||
[{ssl, false}]
|
||||
end,
|
||||
Opts = [
|
||||
{servers, Servers},
|
||||
{port, Port},
|
||||
{bind_dn, BindDn},
|
||||
{bind_password, BindPassword},
|
||||
{timeout, Timeout},
|
||||
{pool_size, PoolSize},
|
||||
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL}
|
||||
],
|
||||
case emqx_resource_pool:start(InstId, ?MODULE, Opts ++ SslOpts) of
|
||||
ok -> {ok, #{pool_name => InstId}};
|
||||
{error, Reason} -> {error, Reason}
|
||||
end.
|
||||
|
||||
on_stop(InstId, _State) ->
|
||||
?SLOG(info, #{
|
||||
msg => "stopping_ldap_connector",
|
||||
connector => InstId
|
||||
}),
|
||||
emqx_resource_pool:stop(InstId).
|
||||
|
||||
on_query(InstId, {search, Base, Filter, Attributes}, #{pool_name := PoolName} = State) ->
|
||||
Request = {Base, Filter, Attributes},
|
||||
?TRACE(
|
||||
"QUERY",
|
||||
"ldap_connector_received",
|
||||
#{request => Request, connector => InstId, state => State}
|
||||
),
|
||||
case
|
||||
Result = ecpool:pick_and_do(
|
||||
PoolName,
|
||||
{?MODULE, search, [Base, Filter, Attributes]},
|
||||
no_handover
|
||||
)
|
||||
of
|
||||
{error, Reason} ->
|
||||
?SLOG(error, #{
|
||||
msg => "ldap_connector_do_request_failed",
|
||||
request => Request,
|
||||
connector => InstId,
|
||||
reason => Reason
|
||||
}),
|
||||
case Reason of
|
||||
ecpool_empty ->
|
||||
{error, {recoverable_error, Reason}};
|
||||
_ ->
|
||||
Result
|
||||
end;
|
||||
_ ->
|
||||
Result
|
||||
end.
|
||||
|
||||
on_get_status(_InstId, _State) -> connected.
|
||||
|
||||
search(Conn, Base, Filter, Attributes) ->
|
||||
eldap2:search(Conn, [
|
||||
{base, Base},
|
||||
{filter, Filter},
|
||||
{attributes, Attributes},
|
||||
{deref, eldap2:'derefFindingBaseObj'()}
|
||||
]).
|
||||
|
||||
%% ===================================================================
|
||||
connect(Opts) ->
|
||||
Servers = proplists:get_value(servers, Opts, ["localhost"]),
|
||||
Port = proplists:get_value(port, Opts, 389),
|
||||
Timeout = proplists:get_value(timeout, Opts, 30),
|
||||
BindDn = proplists:get_value(bind_dn, Opts),
|
||||
BindPassword = proplists:get_value(bind_password, Opts),
|
||||
SslOpts =
|
||||
case proplists:get_value(ssl, Opts, false) of
|
||||
true ->
|
||||
[{sslopts, proplists:get_value(sslopts, Opts, [])}, {ssl, true}];
|
||||
false ->
|
||||
[{ssl, false}]
|
||||
end,
|
||||
LdapOpts =
|
||||
[
|
||||
{port, Port},
|
||||
{timeout, Timeout}
|
||||
] ++ SslOpts,
|
||||
{ok, LDAP} = eldap2:open(Servers, LdapOpts),
|
||||
ok = eldap2:simple_bind(LDAP, BindDn, BindPassword),
|
||||
{ok, LDAP}.
|
||||
|
||||
ldap_fields() ->
|
||||
[
|
||||
{servers, servers()},
|
||||
{port, fun port/1},
|
||||
{pool_size, fun emqx_connector_schema_lib:pool_size/1},
|
||||
{bind_dn, fun bind_dn/1},
|
||||
{bind_password, fun emqx_connector_schema_lib:password/1},
|
||||
{timeout, fun duration/1},
|
||||
{auto_reconnect, fun emqx_connector_schema_lib:auto_reconnect/1}
|
||||
].
|
||||
|
||||
servers() ->
|
||||
emqx_schema:servers_sc(#{}, ?LDAP_HOST_OPTIONS).
|
||||
|
||||
bind_dn(type) -> binary();
|
||||
bind_dn(default) -> 0;
|
||||
bind_dn(_) -> undefined.
|
||||
|
||||
port(type) -> integer();
|
||||
port(default) -> 389;
|
||||
port(_) -> undefined.
|
||||
|
||||
duration(type) -> emqx_schema:timeout_duration_ms();
|
||||
duration(_) -> undefined.
|
|
@ -278,13 +278,14 @@ t_session_serialization(_Config) ->
|
|||
emqx_eviction_agent:session_count()
|
||||
),
|
||||
|
||||
[ChanPid0] = emqx_cm:lookup_channels(<<"client_with_session">>),
|
||||
MRef0 = erlang:monitor(process, ChanPid0),
|
||||
|
||||
%% Evacuate to the same node
|
||||
|
||||
?assertWaitEvent(
|
||||
emqx_eviction_agent:evict_sessions(1, node()),
|
||||
#{?snk_kind := emqx_channel_takeover_end, clientid := <<"client_with_session">>},
|
||||
1000
|
||||
),
|
||||
_ = emqx_eviction_agent:evict_sessions(1, node()),
|
||||
|
||||
?assertReceive({'DOWN', MRef0, process, ChanPid0, _}),
|
||||
|
||||
ok = emqx_eviction_agent:disable(test_eviction),
|
||||
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
src/emqx_ldap_filter_lexer.erl
|
||||
src/emqx_ldap_filter_parser.erl
|
|
@ -0,0 +1,94 @@
|
|||
Business Source License 1.1
|
||||
|
||||
Licensor: Hangzhou EMQ Technologies Co., Ltd.
|
||||
Licensed Work: EMQX Enterprise Edition
|
||||
The Licensed Work is (c) 2023
|
||||
Hangzhou EMQ Technologies Co., Ltd.
|
||||
Additional Use Grant: Students and educators are granted right to copy,
|
||||
modify, and create derivative work for research
|
||||
or education.
|
||||
Change Date: 2027-02-01
|
||||
Change License: Apache License, Version 2.0
|
||||
|
||||
For information about alternative licensing arrangements for the Software,
|
||||
please contact Licensor: https://www.emqx.com/en/contact
|
||||
|
||||
Notice
|
||||
|
||||
The Business Source License (this document, or the “License”) is not an Open
|
||||
Source license. However, the Licensed Work will eventually be made available
|
||||
under an Open Source License, as stated in this License.
|
||||
|
||||
License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved.
|
||||
“Business Source License” is a trademark of MariaDB Corporation Ab.
|
||||
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
Business Source License 1.1
|
||||
|
||||
Terms
|
||||
|
||||
The Licensor hereby grants you the right to copy, modify, create derivative
|
||||
works, redistribute, and make non-production use of the Licensed Work. The
|
||||
Licensor may make an Additional Use Grant, above, permitting limited
|
||||
production use.
|
||||
|
||||
Effective on the Change Date, or the fourth anniversary of the first publicly
|
||||
available distribution of a specific version of the Licensed Work under this
|
||||
License, whichever comes first, the Licensor hereby grants you rights under
|
||||
the terms of the Change License, and the rights granted in the paragraph
|
||||
above terminate.
|
||||
|
||||
If your use of the Licensed Work does not comply with the requirements
|
||||
currently in effect as described in this License, you must purchase a
|
||||
commercial license from the Licensor, its affiliated entities, or authorized
|
||||
resellers, or you must refrain from using the Licensed Work.
|
||||
|
||||
All copies of the original and modified Licensed Work, and derivative works
|
||||
of the Licensed Work, are subject to this License. This License applies
|
||||
separately for each version of the Licensed Work and the Change Date may vary
|
||||
for each version of the Licensed Work released by Licensor.
|
||||
|
||||
You must conspicuously display this License on each original or modified copy
|
||||
of the Licensed Work. If you receive the Licensed Work in original or
|
||||
modified form from a third party, the terms and conditions set forth in this
|
||||
License apply to your use of that work.
|
||||
|
||||
Any use of the Licensed Work in violation of this License will automatically
|
||||
terminate your rights under this License for the current and all other
|
||||
versions of the Licensed Work.
|
||||
|
||||
This License does not grant you any right in any trademark or logo of
|
||||
Licensor or its affiliates (provided that you may use a trademark or logo of
|
||||
Licensor as expressly required by this License).
|
||||
|
||||
TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON
|
||||
AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,
|
||||
EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND
|
||||
TITLE.
|
||||
|
||||
MariaDB hereby grants you permission to use this License’s text to license
|
||||
your works, and to refer to it using the trademark “Business Source License”,
|
||||
as long as you comply with the Covenants of Licensor below.
|
||||
|
||||
Covenants of Licensor
|
||||
|
||||
In consideration of the right to use this License’s text and the “Business
|
||||
Source License” name and trademark, Licensor covenants to MariaDB, and to all
|
||||
other recipients of the licensed work to be provided by Licensor:
|
||||
|
||||
1. To specify as the Change License the GPL Version 2.0 or any later version,
|
||||
or a license that is compatible with GPL Version 2.0 or a later version,
|
||||
where “compatible” means that software provided under the Change License can
|
||||
be included in a program with software provided under GPL Version 2.0 or a
|
||||
later version. Licensor may specify additional Change Licenses without
|
||||
limitation.
|
||||
|
||||
2. To either: (a) specify an additional grant of rights to use that does not
|
||||
impose any additional restriction on the right granted in this License, as
|
||||
the Additional Use Grant; or (b) insert the text “None”.
|
||||
|
||||
3. To specify a Change Date.
|
||||
|
||||
4. Not to modify this License in any other way.
|
|
@ -0,0 +1,14 @@
|
|||
# LDAP Connector
|
||||
|
||||
This application houses the LDAP connector.
|
||||
It provides the APIs to connect to the LDAP service.
|
||||
|
||||
It is used by the emqx_authz and emqx_authn applications to check user permissions.
|
||||
|
||||
## Contributing
|
||||
|
||||
Please see our [contributing.md](../../CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
See [APL](../../APL.txt).
|
|
@ -0,0 +1 @@
|
|||
ldap
|
|
@ -0,0 +1,9 @@
|
|||
%% -*- mode: erlang; -*-
|
||||
|
||||
{erl_opts, [debug_info]}.
|
||||
{deps, [
|
||||
{emqx_connector, {path, "../../apps/emqx_connector"}},
|
||||
{emqx_resource, {path, "../../apps/emqx_resource"}},
|
||||
{emqx_authn, {path, "../../apps/emqx_authn"}},
|
||||
{emqx_authz, {path, "../../apps/emqx_authz"}}
|
||||
]}.
|
|
@ -0,0 +1,15 @@
|
|||
{application, emqx_ldap, [
|
||||
{description, "EMQX LDAP Connector"},
|
||||
{vsn, "0.1.0"},
|
||||
{registered, []},
|
||||
{applications, [
|
||||
kernel,
|
||||
stdlib,
|
||||
emqx_authn,
|
||||
emqx_authz
|
||||
]},
|
||||
{env, []},
|
||||
{modules, []},
|
||||
|
||||
{links, []}
|
||||
]}.
|
|
@ -0,0 +1,239 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_ldap).
|
||||
|
||||
-include_lib("emqx_connector/include/emqx_connector.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
-include_lib("eldap/include/eldap.hrl").
|
||||
|
||||
-behaviour(emqx_resource).
|
||||
|
||||
%% callbacks of behaviour emqx_resource
|
||||
-export([
|
||||
callback_mode/0,
|
||||
on_start/2,
|
||||
on_stop/2,
|
||||
on_query/3,
|
||||
on_get_status/2
|
||||
]).
|
||||
|
||||
%% ecpool connect & reconnect
|
||||
-export([connect/1]).
|
||||
|
||||
-export([roots/0, fields/1]).
|
||||
|
||||
-export([do_get_status/1]).
|
||||
|
||||
-define(LDAP_HOST_OPTIONS, #{
|
||||
default_port => 389
|
||||
}).
|
||||
|
||||
-type params_tokens() :: #{atom() => list()}.
|
||||
-type state() ::
|
||||
#{
|
||||
pool_name := binary(),
|
||||
base_tokens := params_tokens(),
|
||||
filter_tokens := params_tokens()
|
||||
}.
|
||||
|
||||
-define(ECS, emqx_connector_schema_lib).
|
||||
|
||||
%%=====================================================================
|
||||
%% Hocon schema
|
||||
roots() ->
|
||||
[{config, #{type => hoconsc:ref(?MODULE, config)}}].
|
||||
|
||||
fields(config) ->
|
||||
[
|
||||
{server, server()},
|
||||
{pool_size, fun ?ECS:pool_size/1},
|
||||
{username, fun ensure_username/1},
|
||||
{password, fun ?ECS:password/1},
|
||||
{base_object,
|
||||
?HOCON(binary(), #{
|
||||
desc => ?DESC(base_object),
|
||||
required => true,
|
||||
validator => fun emqx_schema:non_empty_string/1
|
||||
})},
|
||||
{filter,
|
||||
?HOCON(
|
||||
binary(),
|
||||
#{
|
||||
desc => ?DESC(filter),
|
||||
default => <<"(objectClass=mqttUser)">>,
|
||||
validator => fun emqx_schema:non_empty_string/1
|
||||
}
|
||||
)}
|
||||
] ++ emqx_connector_schema_lib:ssl_fields().
|
||||
|
||||
server() ->
|
||||
Meta = #{desc => ?DESC("server")},
|
||||
emqx_schema:servers_sc(Meta, ?LDAP_HOST_OPTIONS).
|
||||
|
||||
ensure_username(required) ->
|
||||
true;
|
||||
ensure_username(Field) ->
|
||||
?ECS:username(Field).
|
||||
|
||||
%% ===================================================================
|
||||
callback_mode() -> always_sync.
|
||||
|
||||
-spec on_start(binary(), hoconsc:config()) -> {ok, state()} | {error, _}.
|
||||
on_start(
|
||||
InstId,
|
||||
#{
|
||||
server := Server,
|
||||
pool_size := PoolSize,
|
||||
ssl := SSL
|
||||
} = Config
|
||||
) ->
|
||||
HostPort = emqx_schema:parse_server(Server, ?LDAP_HOST_OPTIONS),
|
||||
?SLOG(info, #{
|
||||
msg => "starting_ldap_connector",
|
||||
connector => InstId,
|
||||
config => emqx_utils:redact(Config)
|
||||
}),
|
||||
|
||||
Config2 = maps:merge(Config, HostPort),
|
||||
Config3 =
|
||||
case maps:get(enable, SSL) of
|
||||
true ->
|
||||
Config2#{sslopts => emqx_tls_lib:to_client_opts(SSL)};
|
||||
false ->
|
||||
Config2
|
||||
end,
|
||||
Options = [
|
||||
{pool_size, PoolSize},
|
||||
{auto_reconnect, ?AUTO_RECONNECT_INTERVAL},
|
||||
{options, Config3}
|
||||
],
|
||||
|
||||
case emqx_resource_pool:start(InstId, ?MODULE, Options) of
|
||||
ok ->
|
||||
{ok, prepare_template(Config, #{pool_name => InstId})};
|
||||
{error, Reason} ->
|
||||
?tp(
|
||||
ldap_connector_start_failed,
|
||||
#{error => Reason}
|
||||
),
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
on_stop(InstId, _State) ->
|
||||
?SLOG(info, #{
|
||||
msg => "stopping_ldap_connector",
|
||||
connector => InstId
|
||||
}),
|
||||
emqx_resource_pool:stop(InstId).
|
||||
|
||||
on_query(InstId, {query, Data}, State) ->
|
||||
on_query(InstId, {query, Data}, [], State);
|
||||
on_query(InstId, {query, Data, Attrs}, State) ->
|
||||
on_query(InstId, {query, Data}, [{attributes, Attrs}], State);
|
||||
on_query(InstId, {query, Data, Attrs, Timeout}, State) ->
|
||||
on_query(InstId, {query, Data}, [{attributes, Attrs}, {timeout, Timeout}], State).
|
||||
|
||||
on_get_status(_InstId, #{pool_name := PoolName} = _State) ->
|
||||
case emqx_resource_pool:health_check_workers(PoolName, fun ?MODULE:do_get_status/1) of
|
||||
true ->
|
||||
connected;
|
||||
false ->
|
||||
connecting
|
||||
end.
|
||||
|
||||
do_get_status(Conn) ->
|
||||
erlang:is_process_alive(Conn).
|
||||
|
||||
%% ===================================================================
|
||||
|
||||
connect(Options) ->
|
||||
#{hostname := Host, username := Username, password := Password} =
|
||||
Conf = proplists:get_value(options, Options),
|
||||
OpenOpts = maps:to_list(maps:with([port, sslopts], Conf)),
|
||||
case eldap:open([Host], [{log, fun log/3} | OpenOpts]) of
|
||||
{ok, Handle} = Ret ->
|
||||
case eldap:simple_bind(Handle, Username, Password) of
|
||||
ok -> Ret;
|
||||
Error -> Error
|
||||
end;
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
on_query(
|
||||
InstId,
|
||||
{query, Data},
|
||||
SearchOptions,
|
||||
#{base_tokens := BaseTks, filter_tokens := FilterTks} = State
|
||||
) ->
|
||||
Base = emqx_placeholder:proc_tmpl(BaseTks, Data),
|
||||
FilterBin = emqx_placeholder:proc_tmpl(FilterTks, Data),
|
||||
case emqx_ldap_filter_parser:scan_and_parse(FilterBin) of
|
||||
{ok, Filter} ->
|
||||
do_ldap_query(
|
||||
InstId,
|
||||
[{base, Base}, {filter, Filter} | SearchOptions],
|
||||
State
|
||||
);
|
||||
{error, Reason} = Error ->
|
||||
?SLOG(error, #{
|
||||
msg => "filter_parse_failed",
|
||||
filter => FilterBin,
|
||||
reason => Reason
|
||||
}),
|
||||
Error
|
||||
end.
|
||||
|
||||
do_ldap_query(
|
||||
InstId,
|
||||
SearchOptions,
|
||||
#{pool_name := PoolName} = State
|
||||
) ->
|
||||
LogMeta = #{connector => InstId, search => SearchOptions, state => State},
|
||||
?TRACE("QUERY", "ldap_connector_received", LogMeta),
|
||||
case
|
||||
ecpool:pick_and_do(
|
||||
PoolName,
|
||||
{eldap, search, [SearchOptions]},
|
||||
handover
|
||||
)
|
||||
of
|
||||
{ok, Result} ->
|
||||
?tp(
|
||||
ldap_connector_query_return,
|
||||
#{result => Result}
|
||||
),
|
||||
{ok, Result#eldap_search_result.entries};
|
||||
{error, 'noSuchObject'} ->
|
||||
{ok, []};
|
||||
{error, Reason} ->
|
||||
?SLOG(
|
||||
error,
|
||||
LogMeta#{msg => "ldap_connector_do_sql_query_failed", reason => Reason}
|
||||
),
|
||||
{error, {unrecoverable_error, Reason}}
|
||||
end.
|
||||
|
||||
log(Level, Format, Args) ->
|
||||
?SLOG(
|
||||
Level,
|
||||
#{
|
||||
msg => "ldap_log",
|
||||
log => io_lib:format(Format, Args)
|
||||
}
|
||||
).
|
||||
|
||||
prepare_template(Config, State) ->
|
||||
do_prepare_template(maps:to_list(maps:with([base_object, filter], Config)), State).
|
||||
|
||||
do_prepare_template([{base_object, V} | T], State) ->
|
||||
do_prepare_template(T, State#{base_tokens => emqx_placeholder:preproc_tmpl(V)});
|
||||
do_prepare_template([{filter, V} | T], State) ->
|
||||
do_prepare_template(T, State#{filter_tokens => emqx_placeholder:preproc_tmpl(V)});
|
||||
do_prepare_template([], State) ->
|
||||
State.
|
|
@ -0,0 +1,285 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_ldap_authn).
|
||||
|
||||
-include_lib("emqx_authn/include/emqx_authn.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
-include_lib("eldap/include/eldap.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
%% a compatible attribute for version 4.x
|
||||
-define(ISENABLED_ATTR, "isEnabled").
|
||||
-define(VALID_ALGORITHMS, [md5, ssha, sha, sha256, sha384, sha512]).
|
||||
%% TODO
|
||||
%% 1. Supports more salt algorithms, SMD5 SSHA 256/384/512
|
||||
%% 2. Supports https://datatracker.ietf.org/doc/html/rfc3112
|
||||
|
||||
-export([
|
||||
namespace/0,
|
||||
tags/0,
|
||||
roots/0,
|
||||
fields/1,
|
||||
desc/1
|
||||
]).
|
||||
|
||||
-export([
|
||||
refs/0,
|
||||
create/2,
|
||||
update/2,
|
||||
authenticate/2,
|
||||
destroy/1
|
||||
]).
|
||||
|
||||
-import(proplists, [get_value/2, get_value/3]).
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
namespace() -> "authn".
|
||||
|
||||
tags() ->
|
||||
[<<"Authentication">>].
|
||||
|
||||
%% used for config check when the schema module is resolved
|
||||
roots() ->
|
||||
[{?CONF_NS, hoconsc:mk(hoconsc:ref(?MODULE, ldap))}].
|
||||
|
||||
fields(ldap) ->
|
||||
[
|
||||
{mechanism, emqx_authn_schema:mechanism(password_based)},
|
||||
{backend, emqx_authn_schema:backend(ldap)},
|
||||
{password_attribute, fun password_attribute/1},
|
||||
{is_superuser_attribute, fun is_superuser_attribute/1},
|
||||
{query_timeout, fun query_timeout/1}
|
||||
] ++ emqx_authn_schema:common_fields() ++ emqx_ldap:fields(config).
|
||||
|
||||
desc(ldap) ->
|
||||
?DESC(ldap);
|
||||
desc(_) ->
|
||||
undefined.
|
||||
|
||||
password_attribute(type) -> string();
|
||||
password_attribute(desc) -> ?DESC(?FUNCTION_NAME);
|
||||
password_attribute(default) -> <<"userPassword">>;
|
||||
password_attribute(_) -> undefined.
|
||||
|
||||
is_superuser_attribute(type) -> string();
|
||||
is_superuser_attribute(desc) -> ?DESC(?FUNCTION_NAME);
|
||||
is_superuser_attribute(default) -> <<"isSuperuser">>;
|
||||
is_superuser_attribute(_) -> undefined.
|
||||
|
||||
query_timeout(type) -> emqx_schema:timeout_duration_ms();
|
||||
query_timeout(desc) -> ?DESC(?FUNCTION_NAME);
|
||||
query_timeout(default) -> <<"5s">>;
|
||||
query_timeout(_) -> undefined.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[hoconsc:ref(?MODULE, ldap)].
|
||||
|
||||
create(_AuthenticatorID, Config) ->
|
||||
create(Config).
|
||||
|
||||
create(Config0) ->
|
||||
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
|
||||
{Config, State} = parse_config(Config0),
|
||||
{ok, _Data} = emqx_authn_utils:create_resource(ResourceId, emqx_ldap, Config),
|
||||
{ok, State#{resource_id => ResourceId}}.
|
||||
|
||||
update(Config0, #{resource_id := ResourceId} = _State) ->
|
||||
{Config, NState} = parse_config(Config0),
|
||||
case emqx_authn_utils:update_resource(emqx_ldap, Config, ResourceId) of
|
||||
{error, Reason} ->
|
||||
error({load_config_error, Reason});
|
||||
{ok, _} ->
|
||||
{ok, NState#{resource_id => ResourceId}}
|
||||
end.
|
||||
|
||||
destroy(#{resource_id := ResourceId}) ->
|
||||
_ = emqx_resource:remove_local(ResourceId),
|
||||
ok.
|
||||
|
||||
authenticate(#{auth_method := _}, _) ->
|
||||
ignore;
|
||||
authenticate(
|
||||
#{password := Password} = Credential,
|
||||
#{
|
||||
password_attribute := PasswordAttr,
|
||||
is_superuser_attribute := IsSuperuserAttr,
|
||||
query_timeout := Timeout,
|
||||
resource_id := ResourceId
|
||||
} = State
|
||||
) ->
|
||||
case
|
||||
emqx_resource:simple_sync_query(
|
||||
ResourceId,
|
||||
{query, Credential, [PasswordAttr, IsSuperuserAttr, ?ISENABLED_ATTR], Timeout}
|
||||
)
|
||||
of
|
||||
{ok, []} ->
|
||||
ignore;
|
||||
{ok, [Entry | _]} ->
|
||||
is_enabled(Password, Entry, State);
|
||||
{error, Reason} ->
|
||||
?TRACE_AUTHN_PROVIDER(error, "ldap_query_failed", #{
|
||||
resource => ResourceId,
|
||||
timeout => Timeout,
|
||||
reason => Reason
|
||||
}),
|
||||
ignore
|
||||
end.
|
||||
|
||||
parse_config(Config) ->
|
||||
State = lists:foldl(
|
||||
fun(Key, Acc) ->
|
||||
Value =
|
||||
case maps:get(Key, Config) of
|
||||
Bin when is_binary(Bin) ->
|
||||
erlang:binary_to_list(Bin);
|
||||
Any ->
|
||||
Any
|
||||
end,
|
||||
Acc#{Key => Value}
|
||||
end,
|
||||
#{},
|
||||
[password_attribute, is_superuser_attribute, query_timeout]
|
||||
),
|
||||
{Config, State}.
|
||||
|
||||
%% To compatible v4.x
|
||||
is_enabled(Password, #eldap_entry{attributes = Attributes} = Entry, State) ->
|
||||
IsEnabled = get_lower_bin_value(?ISENABLED_ATTR, Attributes, "true"),
|
||||
case emqx_authn_utils:to_bool(IsEnabled) of
|
||||
true ->
|
||||
ensure_password(Password, Entry, State);
|
||||
_ ->
|
||||
{error, user_disabled}
|
||||
end.
|
||||
|
||||
ensure_password(
|
||||
Password,
|
||||
#eldap_entry{attributes = Attributes} = Entry,
|
||||
#{password_attribute := PasswordAttr} = State
|
||||
) ->
|
||||
case get_value(PasswordAttr, Attributes) of
|
||||
undefined ->
|
||||
{error, no_password};
|
||||
[LDAPPassword | _] ->
|
||||
extract_hash_algorithm(LDAPPassword, Password, fun try_decode_password/4, Entry, State)
|
||||
end.
|
||||
|
||||
%% RFC 2307 format password
|
||||
%% https://datatracker.ietf.org/doc/html/rfc2307
|
||||
extract_hash_algorithm(LDAPPassword, Password, OnFail, Entry, State) ->
|
||||
case
|
||||
re:run(
|
||||
LDAPPassword,
|
||||
"{([^{}]+)}(.+)",
|
||||
[{capture, all_but_first, list}, global]
|
||||
)
|
||||
of
|
||||
{match, [[HashTypeStr, PasswordHashStr]]} ->
|
||||
case emqx_utils:safe_to_existing_atom(string:to_lower(HashTypeStr)) of
|
||||
{ok, HashType} ->
|
||||
PasswordHash = to_binary(PasswordHashStr),
|
||||
is_valid_algorithm(HashType, PasswordHash, Password, Entry, State);
|
||||
_Error ->
|
||||
{error, invalid_hash_type}
|
||||
end;
|
||||
_ ->
|
||||
OnFail(LDAPPassword, Password, Entry, State)
|
||||
end.
|
||||
|
||||
is_valid_algorithm(HashType, PasswordHash, Password, Entry, State) ->
|
||||
case lists:member(HashType, ?VALID_ALGORITHMS) of
|
||||
true ->
|
||||
verify_password(HashType, PasswordHash, Password, Entry, State);
|
||||
_ ->
|
||||
{error, {invalid_hash_type, HashType}}
|
||||
end.
|
||||
|
||||
%% this password is in LDIF format which is base64 encoding
|
||||
try_decode_password(LDAPPassword, Password, Entry, State) ->
|
||||
case safe_base64_decode(LDAPPassword) of
|
||||
{ok, Decode} ->
|
||||
extract_hash_algorithm(
|
||||
Decode,
|
||||
Password,
|
||||
fun(_, _, _, _) ->
|
||||
{error, invalid_password}
|
||||
end,
|
||||
Entry,
|
||||
State
|
||||
);
|
||||
{error, Reason} ->
|
||||
{error, {invalid_password, Reason}}
|
||||
end.
|
||||
|
||||
%% sha with salt
|
||||
%% https://www.openldap.org/faq/data/cache/347.html
|
||||
verify_password(ssha, PasswordData, Password, Entry, State) ->
|
||||
case safe_base64_decode(PasswordData) of
|
||||
{ok, <<PasswordHash:20/binary, Salt/binary>>} ->
|
||||
verify_password(sha, hash, PasswordHash, Salt, suffix, Password, Entry, State);
|
||||
{ok, _} ->
|
||||
{error, invalid_ssha_password};
|
||||
{error, Reason} ->
|
||||
{error, {invalid_password, Reason}}
|
||||
end;
|
||||
verify_password(
|
||||
Algorithm,
|
||||
Base64HashData,
|
||||
Password,
|
||||
Entry,
|
||||
State
|
||||
) ->
|
||||
verify_password(Algorithm, base64, Base64HashData, <<>>, disable, Password, Entry, State).
|
||||
|
||||
verify_password(Algorithm, LDAPPasswordType, LDAPPassword, Salt, Position, Password, Entry, State) ->
|
||||
PasswordHash = hash_password(Algorithm, Salt, Position, Password),
|
||||
case compare_password(LDAPPasswordType, LDAPPassword, PasswordHash) of
|
||||
true ->
|
||||
{ok, is_superuser(Entry, State)};
|
||||
_ ->
|
||||
{error, invalid_password}
|
||||
end.
|
||||
|
||||
is_superuser(Entry, #{is_superuser_attribute := Attr} = _State) ->
|
||||
Value = get_lower_bin_value(Attr, Entry#eldap_entry.attributes, "false"),
|
||||
#{is_superuser => emqx_authn_utils:to_bool(Value)}.
|
||||
|
||||
safe_base64_decode(Data) ->
|
||||
try
|
||||
{ok, base64:decode(Data)}
|
||||
catch
|
||||
_:Reason ->
|
||||
{error, {invalid_base64_data, Reason}}
|
||||
end.
|
||||
|
||||
get_lower_bin_value(Key, Proplists, Default) ->
|
||||
[Value | _] = get_value(Key, Proplists, [Default]),
|
||||
to_binary(string:to_lower(Value)).
|
||||
|
||||
to_binary(Value) ->
|
||||
erlang:list_to_binary(Value).
|
||||
|
||||
hash_password(Algorithm, _Salt, disable, Password) ->
|
||||
hash_password(Algorithm, Password);
|
||||
hash_password(Algorithm, Salt, suffix, Password) ->
|
||||
hash_password(Algorithm, <<Password/binary, Salt/binary>>).
|
||||
|
||||
hash_password(Algorithm, Data) ->
|
||||
crypto:hash(Algorithm, Data).
|
||||
|
||||
compare_password(hash, LDAPPasswordHash, PasswordHash) ->
|
||||
emqx_passwd:compare_secure(LDAPPasswordHash, PasswordHash);
|
||||
compare_password(base64, Base64HashData, PasswordHash) ->
|
||||
emqx_passwd:compare_secure(Base64HashData, base64:encode(PasswordHash)).
|
|
@ -0,0 +1,164 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_ldap_authz).
|
||||
|
||||
-include_lib("emqx_authz/include/emqx_authz.hrl").
|
||||
-include_lib("emqx/include/emqx.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||
-include_lib("eldap/include/eldap.hrl").
|
||||
|
||||
-behaviour(emqx_authz).
|
||||
|
||||
-define(PREPARE_KEY, ?MODULE).
|
||||
|
||||
%% AuthZ Callbacks
|
||||
-export([
|
||||
description/0,
|
||||
create/1,
|
||||
update/1,
|
||||
destroy/1,
|
||||
authorize/4
|
||||
]).
|
||||
|
||||
-export([fields/1]).
|
||||
|
||||
-ifdef(TEST).
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
-endif.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
fields(config) ->
|
||||
emqx_authz_schema:authz_common_fields(ldap) ++
|
||||
[
|
||||
{publish_attribute, attribute_meta(publish_attribute, <<"mqttPublishTopic">>)},
|
||||
{subscribe_attribute, attribute_meta(subscribe_attribute, <<"mqttSubscriptionTopic">>)},
|
||||
{all_attribute, attribute_meta(all_attribute, <<"mqttPubSubTopic">>)},
|
||||
{query_timeout,
|
||||
?HOCON(
|
||||
emqx_schema:timeout_duration_ms(),
|
||||
#{
|
||||
desc => ?DESC(query_timeout),
|
||||
default => <<"5s">>
|
||||
}
|
||||
)}
|
||||
] ++
|
||||
emqx_ldap:fields(config).
|
||||
|
||||
attribute_meta(Name, Default) ->
|
||||
?HOCON(
|
||||
string(),
|
||||
#{
|
||||
default => Default,
|
||||
desc => ?DESC(Name)
|
||||
}
|
||||
).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% AuthZ Callbacks
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
description() ->
|
||||
"AuthZ with LDAP".
|
||||
|
||||
create(Source) ->
|
||||
ResourceId = emqx_authz_utils:make_resource_id(?MODULE),
|
||||
{ok, _Data} = emqx_authz_utils:create_resource(ResourceId, emqx_ldap, Source),
|
||||
Annotations = new_annotations(#{id => ResourceId}, Source),
|
||||
Source#{annotations => Annotations}.
|
||||
|
||||
update(Source) ->
|
||||
case emqx_authz_utils:update_resource(emqx_ldap, Source) of
|
||||
{error, Reason} ->
|
||||
error({load_config_error, Reason});
|
||||
{ok, Id} ->
|
||||
Annotations = new_annotations(#{id => Id}, Source),
|
||||
Source#{annotations => Annotations}
|
||||
end.
|
||||
|
||||
destroy(#{annotations := #{id := Id}}) ->
|
||||
ok = emqx_resource:remove_local(Id).
|
||||
|
||||
authorize(
|
||||
Client,
|
||||
Action,
|
||||
Topic,
|
||||
#{
|
||||
query_timeout := QueryTimeout,
|
||||
annotations := #{id := ResourceID} = Annotations
|
||||
}
|
||||
) ->
|
||||
Attrs = select_attrs(Action, Annotations),
|
||||
case emqx_resource:simple_sync_query(ResourceID, {query, Client, Attrs, QueryTimeout}) of
|
||||
{ok, []} ->
|
||||
nomatch;
|
||||
{ok, [Entry | _]} ->
|
||||
do_authorize(Action, Topic, Attrs, Entry);
|
||||
{error, Reason} ->
|
||||
?SLOG(error, #{
|
||||
msg => "query_ldap_error",
|
||||
reason => Reason,
|
||||
resource_id => ResourceID
|
||||
}),
|
||||
nomatch
|
||||
end.
|
||||
|
||||
do_authorize(Action, Topic, [Attr | T], Entry) ->
|
||||
Topics = proplists:get_value(Attr, Entry#eldap_entry.attributes, []),
|
||||
case match_topic(Topic, Topics) of
|
||||
true ->
|
||||
{matched, allow};
|
||||
false ->
|
||||
do_authorize(Action, Topic, T, Entry)
|
||||
end;
|
||||
do_authorize(_Action, _Topic, [], _Entry) ->
|
||||
nomatch.
|
||||
|
||||
new_annotations(Init, Source) ->
|
||||
lists:foldl(
|
||||
fun(Attr, Acc) ->
|
||||
Acc#{
|
||||
Attr =>
|
||||
case maps:get(Attr, Source) of
|
||||
Value when is_binary(Value) ->
|
||||
erlang:binary_to_list(Value);
|
||||
Value ->
|
||||
Value
|
||||
end
|
||||
}
|
||||
end,
|
||||
Init,
|
||||
[publish_attribute, subscribe_attribute, all_attribute]
|
||||
).
|
||||
|
||||
select_attrs(#{action_type := publish}, #{publish_attribute := Pub, all_attribute := All}) ->
|
||||
[Pub, All];
|
||||
select_attrs(_, #{subscribe_attribute := Sub, all_attribute := All}) ->
|
||||
[Sub, All].
|
||||
|
||||
match_topic(Target, Topics) ->
|
||||
lists:any(
|
||||
fun(Topic) ->
|
||||
emqx_topic:match(Target, erlang:list_to_binary(Topic))
|
||||
end,
|
||||
Topics
|
||||
).
|
|
@ -0,0 +1,31 @@
|
|||
Definitions.
|
||||
|
||||
Control = [()&|!=~><:*]
|
||||
White = [\s\t\n\r]+
|
||||
NonString = [^()&|!=~><:*\s\t\n\r]
|
||||
String = {NonString}+
|
||||
|
||||
Rules.
|
||||
|
||||
\( : {token, {lparen, TokenLine}}.
|
||||
\) : {token, {rparen, TokenLine}}.
|
||||
\& : {token, {'and', TokenLine}}.
|
||||
\| : {token, {'or', TokenLine}}.
|
||||
\! : {token, {'not', TokenLine}}.
|
||||
= : {token, {equal, TokenLine}}.
|
||||
~= : {token, {approx, TokenLine}}.
|
||||
>= : {token, {greaterOrEqual, TokenLine}}.
|
||||
<= : {token, {lessOrEqual, TokenLine}}.
|
||||
\* : {token, {asterisk, TokenLine}}.
|
||||
\: : {token, {colon, TokenLine}}.
|
||||
dn : {token, {dn, TokenLine}}.
|
||||
{White} : skip_token.
|
||||
{String} : {token, {string, TokenLine, TokenChars}}.
|
||||
%% Leex will hang if a composite operation is missing a character
|
||||
{Control} : {error, lists:flatten(io_lib:format("Unexpected Tokens:~ts", [TokenChars]))}.
|
||||
|
||||
Erlang code.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
|
@ -0,0 +1,149 @@
|
|||
Header "%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------".
|
||||
|
||||
Nonterminals
|
||||
filter filtercomp filterlist item simple present substring initial any final extensible attr value type dnattrs matchingrule.
|
||||
|
||||
Terminals
|
||||
lparen rparen 'and' 'or' 'not' equal approx greaterOrEqual lessOrEqual asterisk colon dn string.
|
||||
|
||||
Rootsymbol filter.
|
||||
Left 100 present.
|
||||
Left 500 substring.
|
||||
|
||||
filter ->
|
||||
lparen filtercomp rparen : '$2'.
|
||||
|
||||
filtercomp ->
|
||||
'and' filterlist: 'and'('$2').
|
||||
filtercomp ->
|
||||
'or' filterlist: 'or'('$2').
|
||||
filtercomp ->
|
||||
'not' filter: 'not'('$2').
|
||||
filtercomp ->
|
||||
item: '$1'.
|
||||
|
||||
filterlist ->
|
||||
filter: ['$1'].
|
||||
filterlist ->
|
||||
filter filterlist: ['$1' | '$2'].
|
||||
|
||||
item ->
|
||||
simple: '$1'.
|
||||
item ->
|
||||
present: '$1'.
|
||||
item ->
|
||||
substring: '$1'.
|
||||
item->
|
||||
extensible: '$1'.
|
||||
|
||||
simple ->
|
||||
attr equal value: equal('$1', '$3').
|
||||
simple ->
|
||||
attr approx value: approx('$1', '$3').
|
||||
simple ->
|
||||
attr greaterOrEqual value: greaterOrEqual('$1', '$3').
|
||||
simple ->
|
||||
attr lessOrEqual value: lessOrEqual('$1', '$3').
|
||||
|
||||
present ->
|
||||
attr equal asterisk: present('$1').
|
||||
|
||||
substring ->
|
||||
attr equal initial asterisk any final: substrings('$1', ['$3', '$5', '$6']).
|
||||
substring ->
|
||||
attr equal asterisk any final: substrings('$1', ['$4', '$5']).
|
||||
substring ->
|
||||
attr equal initial asterisk any: substrings('$1', ['$3', '$5']).
|
||||
substring ->
|
||||
attr equal asterisk any: substrings('$1', ['$4']).
|
||||
|
||||
initial ->
|
||||
value: {initial, '$1'}.
|
||||
|
||||
final ->
|
||||
value: {final, '$1'}.
|
||||
|
||||
any -> any value asterisk: 'any'('$1', '$2').
|
||||
any -> '$empty': [].
|
||||
|
||||
extensible ->
|
||||
type dnattrs matchingrule colon equal value : extensible('$6', ['$1', '$2', '$3']).
|
||||
extensible ->
|
||||
type dnattrs colon equal value: extensible('$5', ['$1', '$2']).
|
||||
extensible ->
|
||||
type matchingrule colon equal value: extensible('$5', ['$1', '$2']).
|
||||
extensible ->
|
||||
type colon equal value: extensible('$4', ['$1']).
|
||||
|
||||
extensible ->
|
||||
dnattrs matchingrule colon equal value: extensible('$5', ['$1', '$2']).
|
||||
extensible ->
|
||||
matchingrule colon equal value: extensible('$4', ['$1']).
|
||||
|
||||
attr ->
|
||||
string: get_value('$1').
|
||||
|
||||
value ->
|
||||
string: get_value('$1').
|
||||
|
||||
type ->
|
||||
value: {type, '$1'}.
|
||||
|
||||
dnattrs ->
|
||||
colon dn: {dnAttributes, true}.
|
||||
|
||||
matchingrule ->
|
||||
colon value: {matchingRule, '$2'}.
|
||||
|
||||
Erlang code.
|
||||
-export([scan_and_parse/1]).
|
||||
-ignore_xref({return_error, 2}).
|
||||
|
||||
'and'(Value) ->
|
||||
eldap:'and'(Value).
|
||||
|
||||
'or'(Value) ->
|
||||
eldap:'or'(Value).
|
||||
|
||||
'not'(Value) ->
|
||||
eldap:'not'(Value).
|
||||
|
||||
equal(Attr, Value) ->
|
||||
eldap:equalityMatch(Attr, Value).
|
||||
|
||||
approx(Attr, Value) ->
|
||||
eldap:approxMatch(Attr, Value).
|
||||
|
||||
greaterOrEqual(Attr, Value) ->
|
||||
eldap:greaterOrEqual(Attr, Value).
|
||||
|
||||
lessOrEqual(Attr, Value) ->
|
||||
eldap:lessOrEqual(Attr, Value).
|
||||
|
||||
present(Value) ->
|
||||
eldap:present(Value).
|
||||
|
||||
substrings(Attr, List) ->
|
||||
eldap:substrings(Attr, flatten(List)).
|
||||
|
||||
'any'(List, Item) ->
|
||||
[List, {any, Item}].
|
||||
|
||||
extensible(Value, Opts) -> eldap:extensibleMatch(Value, Opts).
|
||||
|
||||
flatten(List) -> lists:flatten(List).
|
||||
|
||||
get_value({_Token, _Line, Value}) ->
|
||||
Value.
|
||||
|
||||
scan_and_parse(Bin) when is_binary(Bin) ->
|
||||
scan_and_parse(erlang:binary_to_list(Bin));
|
||||
scan_and_parse(String) ->
|
||||
case emqx_ldap_filter_lexer:string(String) of
|
||||
{ok, Tokens, _} ->
|
||||
parse(Tokens);
|
||||
{error, Reason, _} ->
|
||||
{error, Reason}
|
||||
end.
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue