Merge remote-tracking branch 'upstream/master' into cassa
This commit is contained in:
commit
12942b676d
|
@ -10,32 +10,34 @@ services:
|
|||
networks:
|
||||
emqx_bridge:
|
||||
ssl_cert_gen:
|
||||
image: fredrikhgrelland/alpine-jdk11-openssl
|
||||
# see https://github.com/emqx/docker-images
|
||||
image: ghcr.io/emqx/certgen:latest
|
||||
container_name: ssl_cert_gen
|
||||
user: "${DOCKER_USER:-root}"
|
||||
volumes:
|
||||
- emqx-shared-secret:/var/lib/secret
|
||||
- ./kafka/generate-certs.sh:/bin/generate-certs.sh
|
||||
entrypoint: /bin/sh
|
||||
command: /bin/generate-certs.sh
|
||||
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
|
||||
kdc:
|
||||
hostname: kdc.emqx.net
|
||||
image: ghcr.io/emqx/emqx-builder/5.0-28:1.13.4-24.3.4.2-2-ubuntu20.04
|
||||
container_name: kdc.emqx.net
|
||||
expose:
|
||||
- 88 # kdc
|
||||
- 749 # admin server
|
||||
# ports:
|
||||
# - 88:88
|
||||
# - 749:749
|
||||
networks:
|
||||
emqx_bridge:
|
||||
volumes:
|
||||
- emqx-shared-secret:/var/lib/secret
|
||||
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
|
||||
- ./kerberos/krb5.conf:/etc/kdc/krb5.conf
|
||||
- ./kerberos/krb5.conf:/etc/krb5.conf
|
||||
- ./kerberos/run.sh:/usr/bin/run.sh
|
||||
command: run.sh
|
||||
kafka_1:
|
||||
image: wurstmeister/kafka:2.13-2.7.0
|
||||
ports:
|
||||
- "9092:9092"
|
||||
- "9093:9093"
|
||||
- "9094:9094"
|
||||
- "9095:9095"
|
||||
image: wurstmeister/kafka:2.13-2.8.1
|
||||
# ports:
|
||||
# - "9192-9195:9192-9195"
|
||||
container_name: kafka-1.emqx.net
|
||||
hostname: kafka-1.emqx.net
|
||||
depends_on:
|
||||
|
@ -48,9 +50,9 @@ services:
|
|||
environment:
|
||||
KAFKA_BROKER_ID: 1
|
||||
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
|
||||
KAFKA_LISTENERS: PLAINTEXT://:9092,SASL_PLAINTEXT://:9093,SSL://:9094,SASL_SSL://:9095
|
||||
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-1.emqx.net:9092,SASL_PLAINTEXT://kafka-1.emqx.net:9093,SSL://kafka-1.emqx.net:9094,SASL_SSL://kafka-1.emqx.net:9095
|
||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT,SSL:SSL,SASL_SSL:SASL_SSL
|
||||
KAFKA_LISTENERS: PLAINTEXT://:9092,SASL_PLAINTEXT://:9093,SSL://:9094,SASL_SSL://:9095,LOCAL_PLAINTEXT://:9192,LOCAL_SASL_PLAINTEXT://:9193,LOCAL_SSL://:9194,LOCAL_SASL_SSL://:9195,TOXIPROXY_PLAINTEXT://:9292,TOXIPROXY_SASL_PLAINTEXT://:9293,TOXIPROXY_SSL://:9294,TOXIPROXY_SASL_SSL://:9295
|
||||
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-1.emqx.net:9092,SASL_PLAINTEXT://kafka-1.emqx.net:9093,SSL://kafka-1.emqx.net:9094,SASL_SSL://kafka-1.emqx.net:9095,LOCAL_PLAINTEXT://localhost:9192,LOCAL_SASL_PLAINTEXT://localhost:9193,LOCAL_SSL://localhost:9194,LOCAL_SASL_SSL://localhost:9195,TOXIPROXY_PLAINTEXT://toxiproxy.emqx.net:9292,TOXIPROXY_SASL_PLAINTEXT://toxiproxy.emqx.net:9293,TOXIPROXY_SSL://toxiproxy.emqx.net:9294,TOXIPROXY_SASL_SSL://toxiproxy.emqx.net:9295
|
||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT,SSL:SSL,SASL_SSL:SASL_SSL,LOCAL_PLAINTEXT:PLAINTEXT,LOCAL_SASL_PLAINTEXT:SASL_PLAINTEXT,LOCAL_SSL:SSL,LOCAL_SASL_SSL:SASL_SSL,TOXIPROXY_PLAINTEXT:PLAINTEXT,TOXIPROXY_SASL_PLAINTEXT:SASL_PLAINTEXT,TOXIPROXY_SSL:SSL,TOXIPROXY_SASL_SSL:SASL_SSL
|
||||
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
|
||||
KAFKA_SASL_ENABLED_MECHANISMS: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512,GSSAPI
|
||||
KAFKA_SASL_KERBEROS_SERVICE_NAME: kafka
|
||||
|
@ -58,6 +60,7 @@ services:
|
|||
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/jaas.conf"
|
||||
KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
|
||||
KAFKA_CREATE_TOPICS_NG: test-topic-one-partition:1:1,test-topic-two-partitions:2:1,test-topic-three-partitions:3:1,
|
||||
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
|
||||
KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.auth.SimpleAclAuthorizer
|
||||
KAFKA_SSL_TRUSTSTORE_LOCATION: /var/lib/secret/kafka.truststore.jks
|
||||
KAFKA_SSL_TRUSTSTORE_PASSWORD: password
|
||||
|
@ -67,7 +70,7 @@ services:
|
|||
networks:
|
||||
emqx_bridge:
|
||||
volumes:
|
||||
- emqx-shared-secret:/var/lib/secret
|
||||
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
|
||||
- ./kafka/jaas.conf:/etc/kafka/jaas.conf
|
||||
- ./kafka/kafka-entrypoint.sh:/bin/kafka-entrypoint.sh
|
||||
- ./kerberos/krb5.conf:/etc/kdc/krb5.conf
|
||||
|
|
|
@ -6,7 +6,10 @@ services:
|
|||
image: ghcr.io/shopify/toxiproxy:2.5.0
|
||||
restart: always
|
||||
networks:
|
||||
- emqx_bridge
|
||||
emqx_bridge:
|
||||
aliases:
|
||||
- toxiproxy
|
||||
- toxiproxy.emqx.net
|
||||
volumes:
|
||||
- "./toxiproxy.json:/config/toxiproxy.json"
|
||||
ports:
|
||||
|
|
|
@ -18,12 +18,12 @@ services:
|
|||
- emqx_bridge
|
||||
volumes:
|
||||
- ../..:/emqx
|
||||
- emqx-shared-secret:/var/lib/secret
|
||||
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
|
||||
- ./kerberos/krb5.conf:/etc/kdc/krb5.conf
|
||||
- ./kerberos/krb5.conf:/etc/krb5.conf
|
||||
working_dir: /emqx
|
||||
tty: true
|
||||
user: "${UID_GID}"
|
||||
user: "${DOCKER_USER:-root}"
|
||||
|
||||
networks:
|
||||
emqx_bridge:
|
||||
|
@ -37,6 +37,3 @@ networks:
|
|||
gateway: 172.100.239.1
|
||||
- subnet: 2001:3200:3200::/64
|
||||
gateway: 2001:3200:3200::1
|
||||
|
||||
volumes: # add this section
|
||||
emqx-shared-secret: # does not need anything underneath this
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
set -x
|
||||
|
||||
# Source https://github.com/zmstone/docker-kafka/blob/master/generate-certs.sh
|
||||
|
||||
HOST="*."
|
||||
DAYS=3650
|
||||
PASS="password"
|
||||
|
||||
cd /var/lib/secret/
|
||||
|
||||
# Delete old files
|
||||
(rm ca.key ca.crt server.key server.csr server.crt client.key client.csr client.crt server.p12 kafka.keystore.jks kafka.truststore.jks 2>/dev/null || true)
|
||||
|
||||
ls
|
||||
|
||||
echo '== Generate self-signed server and client certificates'
|
||||
echo '= generate CA'
|
||||
openssl req -new -x509 -keyout ca.key -out ca.crt -days $DAYS -nodes -subj "/C=SE/ST=Stockholm/L=Stockholm/O=brod/OU=test/CN=$HOST"
|
||||
|
||||
echo '= generate server certificate request'
|
||||
openssl req -newkey rsa:2048 -sha256 -keyout server.key -out server.csr -days "$DAYS" -nodes -subj "/C=SE/ST=Stockholm/L=Stockholm/O=brod/OU=test/CN=$HOST"
|
||||
|
||||
echo '= sign server certificate'
|
||||
openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days "$DAYS" -CAcreateserial
|
||||
|
||||
echo '= generate client certificate request'
|
||||
openssl req -newkey rsa:2048 -sha256 -keyout client.key -out client.csr -days "$DAYS" -nodes -subj "/C=SE/ST=Stockholm/L=Stockholm/O=brod/OU=test/CN=$HOST"
|
||||
|
||||
echo '== sign client certificate'
|
||||
openssl x509 -req -CA ca.crt -CAkey ca.key -in client.csr -out client.crt -days $DAYS -CAserial ca.srl
|
||||
|
||||
echo '= Convert self-signed certificate to PKCS#12 format'
|
||||
openssl pkcs12 -export -name "$HOST" -in server.crt -inkey server.key -out server.p12 -CAfile ca.crt -passout pass:"$PASS"
|
||||
|
||||
echo '= Import PKCS#12 into a java keystore'
|
||||
|
||||
echo $PASS | keytool -importkeystore -destkeystore kafka.keystore.jks -srckeystore server.p12 -srcstoretype pkcs12 -alias "$HOST" -storepass "$PASS"
|
||||
|
||||
|
||||
echo '= Import CA into java truststore'
|
||||
|
||||
echo yes | keytool -keystore kafka.truststore.jks -alias CARoot -import -file ca.crt -storepass "$PASS"
|
|
@ -17,6 +17,7 @@ timeout $TIMEOUT bash -c 'until [ -f /var/lib/secret/kafka.keytab ]; do sleep 1;
|
|||
echo "+++++++ Wait until SSL certs are generated ++++++++"
|
||||
|
||||
timeout $TIMEOUT bash -c 'until [ -f /var/lib/secret/kafka.truststore.jks ]; do sleep 1; done'
|
||||
keytool -list -v -keystore /var/lib/secret/kafka.keystore.jks -storepass password
|
||||
|
||||
sleep 3
|
||||
|
||||
|
|
|
@ -54,6 +54,30 @@
|
|||
"upstream": "dynamo:8000",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"name": "kafka_plain",
|
||||
"listen": "0.0.0.0:9292",
|
||||
"upstream": "kafka-1.emqx.net:9292",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"name": "kafka_sasl_plain",
|
||||
"listen": "0.0.0.0:9293",
|
||||
"upstream": "kafka-1.emqx.net:9293",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"name": "kafka_ssl",
|
||||
"listen": "0.0.0.0:9294",
|
||||
"upstream": "kafka-1.emqx.net:9294",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"name": "kafka_sasl_ssl",
|
||||
"listen": "0.0.0.0:9295",
|
||||
"upstream": "kafka-1.emqx.net:9295",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"name": "cassa_tcp",
|
||||
"listen": "0.0.0.0:9042",
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
Fixes <issue-or-jira-number>
|
||||
|
||||
## PR Checklist
|
||||
Please convert it to a draft if any of the following conditions are not met. Reviewers may skip over until all the items are checked:
|
||||
|
||||
- [ ] If changed package build ci, pass [this action](https://github.com/emqx/emqx/actions/workflows/build_packages.yaml) (manual trigger)
|
||||
- [ ] Change log has been added to `changes/` dir for user-facing artifacts update
|
|
@ -1 +0,0 @@
|
|||
Fixes <issue-or-jira-number>
|
|
@ -1,12 +0,0 @@
|
|||
Fixes <issue-or-jira-number>
|
||||
|
||||
## PR Checklist
|
||||
Please convert it to a draft if any of the following conditions are not met. Reviewers may skip over until all the items are checked:
|
||||
|
||||
- [ ] Added tests for the changes
|
||||
- [ ] Changed lines covered in coverage report
|
||||
- [ ] Change log has been added to `changes/` dir
|
||||
- [ ] `appup` files updated (execute `scripts/update-appup.sh emqx`)
|
||||
- [ ] For internal contributor: there is a jira ticket to track this change
|
||||
- [ ] If there should be document changes, a PR to emqx-docs.git is sent, or a jira ticket is created to follow up
|
||||
- [ ] In case of non-backward compatible changes, reviewer should check this item as a write-off, and add details in **Backward Compatibility** section
|
|
@ -9,3 +9,8 @@ Please convert it to a draft if any of the following conditions are not met. Rev
|
|||
- [ ] For internal contributor: there is a jira ticket to track this change
|
||||
- [ ] If there should be document changes, a PR to emqx-docs.git is sent, or a jira ticket is created to follow up
|
||||
- [ ] Schema changes are backward compatible
|
||||
|
||||
## Checklist for CI (.github/workflows) changes
|
||||
|
||||
- [ ] If changed package build workflow, pass [this action](https://github.com/emqx/emqx/actions/workflows/build_packages.yaml) (manual trigger)
|
||||
- [ ] Change log has been added to `changes/` dir for user-facing artifacts update
|
|
@ -4,7 +4,7 @@ on: [pull_request]
|
|||
|
||||
jobs:
|
||||
check_apps_version:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
|
|
@ -25,7 +25,7 @@ jobs:
|
|||
prepare:
|
||||
runs-on: ubuntu-22.04
|
||||
# prepare source with any OTP version, no need for a matrix
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-24.3.4.2-2-ubuntu22.04"
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-24.3.4.2-2-ubuntu22.04"
|
||||
|
||||
outputs:
|
||||
PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
|
||||
|
@ -121,7 +121,7 @@ jobs:
|
|||
# NOTE: 'otp' and 'elixir' are to configure emqx-builder image
|
||||
# only support latest otp and elixir, not a matrix
|
||||
builder:
|
||||
- 5.0-29 # update to latest
|
||||
- 5.0-32 # update to latest
|
||||
otp:
|
||||
- 24.3.4.2-2 # switch to 25 once ready to release 5.1
|
||||
elixir:
|
||||
|
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
prepare:
|
||||
runs-on: ubuntu-22.04
|
||||
if: (github.repository_owner == 'emqx' && github.event_name == 'schedule') || github.event_name != 'schedule'
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-24.3.4.2-2-ubuntu22.04
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-24.3.4.2-2-ubuntu22.04
|
||||
outputs:
|
||||
BUILD_PROFILE: ${{ steps.get_profile.outputs.BUILD_PROFILE }}
|
||||
IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }}
|
||||
|
@ -213,8 +213,6 @@ jobs:
|
|||
- ubuntu18.04
|
||||
- debian11
|
||||
- debian10
|
||||
- raspbian10
|
||||
- raspbian9
|
||||
- el9
|
||||
- el8
|
||||
- el7
|
||||
|
@ -223,7 +221,7 @@ jobs:
|
|||
- aws-arm64
|
||||
- ubuntu-22.04
|
||||
builder:
|
||||
- 5.0-29
|
||||
- 5.0-32
|
||||
elixir:
|
||||
- 1.13.4
|
||||
exclude:
|
||||
|
@ -231,17 +229,13 @@ jobs:
|
|||
build_machine: ubuntu-22.04
|
||||
- arch: amd64
|
||||
build_machine: aws-arm64
|
||||
- arch: amd64
|
||||
os: raspbian9
|
||||
- arch: amd64
|
||||
os: raspbian10
|
||||
include:
|
||||
- profile: emqx
|
||||
otp: 25.1.2-2
|
||||
arch: amd64
|
||||
os: ubuntu22.04
|
||||
build_machine: ubuntu-22.04
|
||||
builder: 5.0-29
|
||||
builder: 5.0-32
|
||||
elixir: 1.13.4
|
||||
release_with: elixir
|
||||
- profile: emqx
|
||||
|
@ -249,7 +243,7 @@ jobs:
|
|||
arch: amd64
|
||||
os: amzn2
|
||||
build_machine: ubuntu-22.04
|
||||
builder: 5.0-29
|
||||
builder: 5.0-32
|
||||
elixir: 1.13.4
|
||||
release_with: elixir
|
||||
|
||||
|
@ -266,6 +260,11 @@ jobs:
|
|||
path: .
|
||||
- name: unzip source code
|
||||
run: unzip -q source.zip
|
||||
- name: tmp fix for el9
|
||||
if: matrix.os == 'el9'
|
||||
run: |
|
||||
set -eu
|
||||
dnf install -y krb5-devel
|
||||
- name: build emqx packages
|
||||
working-directory: source
|
||||
env:
|
||||
|
@ -340,7 +339,7 @@ jobs:
|
|||
echo "$(cat $var.sha256) $var" | sha256sum -c || exit 1
|
||||
done
|
||||
cd -
|
||||
- uses: aws-actions/configure-aws-credentials@v1-node16
|
||||
- uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
@ -378,8 +377,6 @@ jobs:
|
|||
push "debian/buster" "packages/$PROFILE/$PROFILE-$VERSION-debian10-arm64.deb"
|
||||
push "debian/bullseye" "packages/$PROFILE/$PROFILE-$VERSION-debian11-amd64.deb"
|
||||
push "debian/bullseye" "packages/$PROFILE/$PROFILE-$VERSION-debian11-arm64.deb"
|
||||
push "raspbian/stretch" "packages/$PROFILE/$PROFILE-$VERSION-raspbian9-arm64.deb"
|
||||
push "raspbian/buster" "packages/$PROFILE/$PROFILE-$VERSION-raspbian10-arm64.deb"
|
||||
push "ubuntu/bionic" "packages/$PROFILE/$PROFILE-$VERSION-ubuntu18.04-amd64.deb"
|
||||
push "ubuntu/bionic" "packages/$PROFILE/$PROFILE-$VERSION-ubuntu18.04-arm64.deb"
|
||||
push "ubuntu/focal" "packages/$PROFILE/$PROFILE-$VERSION-ubuntu20.04-amd64.deb"
|
||||
|
|
|
@ -35,7 +35,7 @@ jobs:
|
|||
- ["emqx-enterprise", "24.3.4.2-2", "amzn2", "erlang"]
|
||||
- ["emqx-enterprise", "25.1.2-2", "ubuntu20.04", "erlang"]
|
||||
builder:
|
||||
- 5.0-29
|
||||
- 5.0-32
|
||||
elixir:
|
||||
- '1.13.4'
|
||||
|
||||
|
@ -82,7 +82,7 @@ jobs:
|
|||
name: "${{ matrix.profile[0] }}_schema_dump"
|
||||
path: |
|
||||
scripts/spellcheck
|
||||
_build/docgen/${{ matrix.profile[0] }}/schema.json
|
||||
_build/docgen/${{ matrix.profile[0] }}/schema-en.json
|
||||
|
||||
windows:
|
||||
runs-on: windows-2019
|
||||
|
@ -218,4 +218,4 @@ jobs:
|
|||
path: /tmp/
|
||||
- name: Run spellcheck
|
||||
run: |
|
||||
bash /tmp/scripts/spellcheck/spellcheck.sh /tmp/_build/docgen/${{ matrix.profile }}/schema.json
|
||||
bash /tmp/scripts/spellcheck/spellcheck.sh /tmp/_build/docgen/${{ matrix.profile }}/schema-en.json
|
||||
|
|
|
@ -5,8 +5,8 @@ on:
|
|||
|
||||
jobs:
|
||||
check_deps_integrity:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-25.1.2-2-ubuntu22.04
|
||||
runs-on: ubuntu-22.04
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-25.1.2-2-ubuntu22.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
|
|
@ -5,7 +5,7 @@ on: [pull_request]
|
|||
jobs:
|
||||
code_style_check:
|
||||
runs-on: ubuntu-22.04
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-25.1.2-2-ubuntu22.04"
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-25.1.2-2-ubuntu22.04"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
|
|
@ -7,9 +7,9 @@ on:
|
|||
|
||||
jobs:
|
||||
elixir_apps_check:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
# just use the latest builder
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-25.1.2-2-ubuntu22.04"
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-25.1.2-2-ubuntu22.04"
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
|
|
@ -7,8 +7,8 @@ on:
|
|||
|
||||
jobs:
|
||||
elixir_deps_check:
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-25.1.2-2-ubuntu22.04
|
||||
runs-on: ubuntu-22.04
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-25.1.2-2-ubuntu22.04
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
|
|
@ -11,13 +11,13 @@ on:
|
|||
|
||||
jobs:
|
||||
elixir_release_build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
profile:
|
||||
- emqx
|
||||
- emqx-enterprise
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-25.1.2-2-ubuntu22.04
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-25.1.2-2-ubuntu22.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
|
|
@ -19,7 +19,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: aws-actions/configure-aws-credentials@v1-node16
|
||||
- uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
@ -69,9 +69,9 @@ jobs:
|
|||
with:
|
||||
asset_paths: '["packages/*"]'
|
||||
- name: update to emqx.io
|
||||
if: github.event_name == 'release' || inputs.publish_release_artefacts
|
||||
if: startsWith(github.ref_name, 'v') && (github.event_name == 'release' || inputs.publish_release_artefacts)
|
||||
run: |
|
||||
set -e -x -u
|
||||
set -eux
|
||||
curl -w %{http_code} \
|
||||
--insecure \
|
||||
-H "Content-Type: application/json" \
|
||||
|
|
|
@ -12,7 +12,7 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
builder:
|
||||
- 5.0-29
|
||||
- 5.0-32
|
||||
otp:
|
||||
- 24.3.4.2-2
|
||||
- 25.1.2-2
|
||||
|
|
|
@ -15,9 +15,9 @@ on:
|
|||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
# prepare source with any OTP version, no need for a matrix
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-24.3.4.2-2-debian11
|
||||
container: ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-24.3.4.2-2-debian11
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
@ -50,7 +50,7 @@ jobs:
|
|||
os:
|
||||
- ["debian11", "debian:11-slim"]
|
||||
builder:
|
||||
- 5.0-29
|
||||
- 5.0-32
|
||||
otp:
|
||||
- 24.3.4.2-2
|
||||
elixir:
|
||||
|
@ -123,7 +123,7 @@ jobs:
|
|||
os:
|
||||
- ["debian11", "debian:11-slim"]
|
||||
builder:
|
||||
- 5.0-29
|
||||
- 5.0-32
|
||||
otp:
|
||||
- 24.3.4.2-2
|
||||
elixir:
|
||||
|
|
|
@ -4,41 +4,13 @@ on: [pull_request]
|
|||
|
||||
jobs:
|
||||
run_gitlint:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v3
|
||||
- name: Install gitlint
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt install gitlint
|
||||
- name: Set auth header
|
||||
if: endsWith(github.repository, 'enterprise')
|
||||
run: |
|
||||
echo 'AUTH_HEADER<<EOF' >> $GITHUB_ENV
|
||||
echo "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" >> $GITHUB_ENV
|
||||
echo 'EOF' >> $GITHUB_ENV
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Run gitlint
|
||||
shell: bash
|
||||
run: |
|
||||
pr_number=$(echo $GITHUB_REF | awk 'BEGIN { FS = "/" } ; { print $3 }')
|
||||
messages="$(curl --silent --show-error \
|
||||
--header "${{ env.AUTH_HEADER }}" \
|
||||
--header "Accept: application/vnd.github.v3+json" \
|
||||
"https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${pr_number}/commits")"
|
||||
len=$(echo $messages | jq length)
|
||||
result=true
|
||||
for i in $( seq 0 $(($len - 1)) ); do
|
||||
message=$(echo $messages | jq -r .[$i].commit.message)
|
||||
echo "commit message: $message"
|
||||
status=0
|
||||
echo $message | gitlint -C ./.github/workflows/.gitlint || status=$?
|
||||
if [ $status -ne 0 ]; then
|
||||
result=false
|
||||
fi
|
||||
done
|
||||
if ! ${result} ; then
|
||||
echo "Some of the commit messages are not structured as The Conventional Commits specification. Please check CONTRIBUTING.md for our process on PR."
|
||||
exit 1
|
||||
fi
|
||||
echo "success"
|
||||
set -ex
|
||||
docker run --ulimit nofile=1024 -v $(pwd):/repo -w /repo ghcr.io/emqx/gitlint --commits ${{ github.event.pull_request.base.sha }}..$GITHUB_SHA --config .github/workflows/.gitlint
|
||||
|
|
|
@ -10,7 +10,7 @@ on:
|
|||
|
||||
jobs:
|
||||
build_emqx_for_jmeter_tests:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
version: ${{ steps.build_docker.outputs.version}}
|
||||
steps:
|
||||
|
@ -44,7 +44,7 @@ jobs:
|
|||
path: ./emqx.tar
|
||||
|
||||
advanced_feat:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
@ -136,7 +136,7 @@ jobs:
|
|||
path: ./jmeter_logs
|
||||
|
||||
pgsql_authn_authz:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
@ -245,7 +245,7 @@ jobs:
|
|||
path: ./jmeter_logs
|
||||
|
||||
mysql_authn_authz:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
@ -351,7 +351,7 @@ jobs:
|
|||
path: ./jmeter_logs
|
||||
|
||||
JWT_authn:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
@ -449,7 +449,7 @@ jobs:
|
|||
path: ./jmeter_logs
|
||||
|
||||
built_in_database_authn_authz:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
@ -541,7 +541,7 @@ jobs:
|
|||
path: ./jmeter_logs
|
||||
|
||||
delete-artifact:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [advanced_feat,pgsql_authn_authz,JWT_authn,mysql_authn_authz,built_in_database_authn_authz]
|
||||
steps:
|
||||
- uses: geekyeggo/delete-artifact@v2
|
||||
|
|
|
@ -15,7 +15,7 @@ concurrency:
|
|||
jobs:
|
||||
relup_test_plan:
|
||||
runs-on: ubuntu-22.04
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-24.3.4.2-2-ubuntu22.04"
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-24.3.4.2-2-ubuntu22.04"
|
||||
outputs:
|
||||
CUR_EE_VSN: ${{ steps.find-versions.outputs.CUR_EE_VSN }}
|
||||
OLD_VERSIONS: ${{ steps.find-versions.outputs.OLD_VERSIONS }}
|
||||
|
@ -58,7 +58,7 @@ jobs:
|
|||
needs:
|
||||
- relup_test_plan
|
||||
if: needs.relup_test_plan.outputs.OLD_VERSIONS != '[]'
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
|
|
@ -16,7 +16,7 @@ on:
|
|||
|
||||
jobs:
|
||||
build-matrix:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
prepare: ${{ steps.matrix.outputs.prepare }}
|
||||
host: ${{ steps.matrix.outputs.host }}
|
||||
|
@ -31,12 +31,12 @@ jobs:
|
|||
MATRIX="$(echo "${APPS}" | jq -c '
|
||||
[
|
||||
(.[] | select(.profile == "emqx") | . + {
|
||||
builder: "5.0-29",
|
||||
builder: "5.0-32",
|
||||
otp: "25.1.2-2",
|
||||
elixir: "1.13.4"
|
||||
}),
|
||||
(.[] | select(.profile == "emqx-enterprise") | . + {
|
||||
builder: "5.0-29",
|
||||
builder: "5.0-32",
|
||||
otp: ["24.3.4.2-2", "25.1.2-2"][],
|
||||
elixir: "1.13.4"
|
||||
})
|
||||
|
@ -63,12 +63,17 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.build-matrix.outputs.prepare) }}
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu20.04"
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
path: source
|
||||
- uses: actions/cache@v3
|
||||
id: cache
|
||||
with:
|
||||
path: "$HOME/.cache/rebar3/rebar3_${{ matrix.otp }}_plt"
|
||||
key: rebar3-dialyzer-plt-${{ matrix.otp }}
|
||||
- name: get_all_deps
|
||||
working-directory: source
|
||||
env:
|
||||
|
@ -100,7 +105,7 @@ jobs:
|
|||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu20.04"
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
|
@ -156,7 +161,7 @@ jobs:
|
|||
- name: run tests
|
||||
working-directory: source
|
||||
env:
|
||||
DOCKER_CT_RUNNER_IMAGE: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu20.04"
|
||||
DOCKER_CT_RUNNER_IMAGE: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
MONGO_TAG: "5"
|
||||
MYSQL_TAG: "8"
|
||||
PGSQL_TAG: "13"
|
||||
|
@ -186,7 +191,7 @@ jobs:
|
|||
matrix:
|
||||
include: ${{ fromJson(needs.build-matrix.outputs.host) }}
|
||||
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu20.04"
|
||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
@ -225,7 +230,7 @@ jobs:
|
|||
- ct
|
||||
- ct_docker
|
||||
runs-on: ubuntu-22.04
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-29:1.13.4-24.3.4.2-2-ubuntu22.04"
|
||||
container: "ghcr.io/emqx/emqx-builder/5.0-32:1.13.4-24.3.4.2-2-ubuntu22.04"
|
||||
steps:
|
||||
- uses: AutoModality/action-clean@v1
|
||||
- uses: actions/download-artifact@v3
|
||||
|
@ -262,7 +267,7 @@ jobs:
|
|||
# do this in a separate job
|
||||
upload_coverdata:
|
||||
needs: make_cover
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Coveralls Finished
|
||||
env:
|
||||
|
|
|
@ -5,7 +5,7 @@ on:
|
|||
|
||||
jobs:
|
||||
shellcheck:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v3
|
||||
|
|
|
@ -10,7 +10,7 @@ on:
|
|||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.repository_owner == 'emqx'
|
||||
permissions:
|
||||
issues: write
|
||||
|
|
|
@ -11,11 +11,11 @@ on:
|
|||
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: aws-actions/configure-aws-credentials@v1-node16
|
||||
- uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
|
8
Makefile
8
Makefile
|
@ -6,8 +6,8 @@ export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.0-28:1.13.4-24.3.4.2-2
|
|||
export EMQX_DEFAULT_RUNNER = debian:11-slim
|
||||
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
|
||||
export ELIXIR_VSN ?= $(shell $(CURDIR)/scripts/get-elixir-vsn.sh)
|
||||
export EMQX_DASHBOARD_VERSION ?= v1.1.8
|
||||
export EMQX_EE_DASHBOARD_VERSION ?= e1.0.4
|
||||
export EMQX_DASHBOARD_VERSION ?= v1.1.9
|
||||
export EMQX_EE_DASHBOARD_VERSION ?= e1.0.5-beta.1
|
||||
export EMQX_REL_FORM ?= tgz
|
||||
export QUICER_DOWNLOAD_FROM_RELEASE = 1
|
||||
ifeq ($(OS),Windows_NT)
|
||||
|
@ -80,7 +80,7 @@ ct: $(REBAR) merge-config
|
|||
## only check bpapi for enterprise profile because it's a super-set.
|
||||
.PHONY: static_checks
|
||||
static_checks:
|
||||
@$(REBAR) as check do dialyzer, xref
|
||||
@$(REBAR) as check do xref, dialyzer
|
||||
@if [ "$${PROFILE}" = 'emqx-enterprise' ]; then $(REBAR) ct --suite apps/emqx/test/emqx_static_checks --readable $(CT_READABLE); fi
|
||||
@if [ "$${PROFILE}" = 'emqx-enterprise' ]; then ./scripts/check-i18n-style.sh; fi
|
||||
|
||||
|
@ -107,7 +107,7 @@ endef
|
|||
$(foreach app,$(APPS),$(eval $(call gen-app-prop-target,$(app))))
|
||||
|
||||
.PHONY: ct-suite
|
||||
ct-suite: $(REBAR)
|
||||
ct-suite: $(REBAR) merge-config
|
||||
ifneq ($(TESTCASE),)
|
||||
ifneq ($(GROUP),)
|
||||
$(REBAR) ct -v --readable=$(CT_READABLE) --name $(CT_NODE_NAME) --suite $(SUITE) --case $(TESTCASE) --group $(GROUP)
|
||||
|
|
|
@ -1079,11 +1079,11 @@ Supported configurations are the following:
|
|||
zh: """共享订阅消息派发策略。
|
||||
- `random`:随机挑选一个共享订阅者派发;
|
||||
- `round_robin`:使用 round-robin 策略派发;
|
||||
- `round_robin_per_group`: 在共享组内循环选择下一个成员;
|
||||
- `local`: 选择随机的本地成员,否则选择随机的集群范围内成员;
|
||||
- `round_robin_per_group`:在共享组内循环选择下一个成员;
|
||||
- `local`:选择随机的本地成员,否则选择随机的集群范围内成员;
|
||||
- `sticky`:总是使用上次选中的订阅者派发,直到它断开连接;
|
||||
- `hash_clientid`:使用发送者的 Client ID 进行 Hash 来选择订阅者;
|
||||
- `hash_topic`: 使用源主题进行 Hash 来选择订阅者。"""
|
||||
- `hash_topic`:使用源主题进行 Hash 来选择订阅者。"""
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1095,7 +1095,7 @@ This should allow messages to be dispatched to a different subscriber in the gro
|
|||
|
||||
zh: """该配置项已废弃,会在 5.1 中移除。
|
||||
启用/禁用 QoS 1 和 QoS 2 消息的共享派发确认。
|
||||
开启后,允许将消息从未及时回复 ACK 的订阅者 (例如,客户端离线)重新派发给另外一个订阅者。"""
|
||||
开启后,允许将消息从未及时回复 ACK 的订阅者 (例如,客户端离线) 重新派发给另外一个订阅者。"""
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1753,6 +1753,63 @@ server_ssl_opts_schema_gc_after_handshake {
|
|||
}
|
||||
}
|
||||
|
||||
server_ssl_opts_schema_enable_ocsp_stapling {
|
||||
desc {
|
||||
en: "Whether to enable Online Certificate Status Protocol (OCSP) stapling for the listener."
|
||||
" If set to true, requires defining the OCSP responder URL and issuer PEM path."
|
||||
zh: "是否为监听器启用 OCSP Stapling 功能。 如果设置为 true,"
|
||||
"需要定义 OCSP Responder 的 URL 和证书签发者的 PEM 文件路径。"
|
||||
}
|
||||
label: {
|
||||
en: "Enable OCSP Stapling"
|
||||
zh: "启用 OCSP Stapling"
|
||||
}
|
||||
}
|
||||
|
||||
server_ssl_opts_schema_ocsp_responder_url {
|
||||
desc {
|
||||
en: "URL for the OCSP responder to check the server certificate against."
|
||||
zh: "用于检查服务器证书的 OCSP Responder 的 URL。"
|
||||
}
|
||||
label: {
|
||||
en: "OCSP Responder URL"
|
||||
zh: "OCSP Responder 的 URL"
|
||||
}
|
||||
}
|
||||
|
||||
server_ssl_opts_schema_ocsp_issuer_pem {
|
||||
desc {
|
||||
en: "PEM-encoded certificate of the OCSP issuer for the server certificate."
|
||||
zh: "服务器证书的 OCSP 签发者的 PEM 编码证书。"
|
||||
}
|
||||
label: {
|
||||
en: "OCSP Issuer Certificate"
|
||||
zh: "OCSP 签发者证书"
|
||||
}
|
||||
}
|
||||
|
||||
server_ssl_opts_schema_ocsp_refresh_interval {
|
||||
desc {
|
||||
en: "The period to refresh the OCSP response for the server."
|
||||
zh: "为服务器刷新OCSP响应的周期。"
|
||||
}
|
||||
label: {
|
||||
en: "OCSP Refresh Interval"
|
||||
zh: "OCSP 刷新间隔"
|
||||
}
|
||||
}
|
||||
|
||||
server_ssl_opts_schema_ocsp_refresh_http_timeout {
|
||||
desc {
|
||||
en: "The timeout for the HTTP request when checking OCSP responses."
|
||||
zh: "检查 OCSP 响应时,HTTP 请求的超时。"
|
||||
}
|
||||
label: {
|
||||
en: "OCSP Refresh HTTP Timeout"
|
||||
zh: "OCSP 刷新 HTTP 超时"
|
||||
}
|
||||
}
|
||||
|
||||
fields_listeners_tcp {
|
||||
desc {
|
||||
en: """TCP listeners."""
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
-define(EMQX_RELEASE_CE, "5.0.20").
|
||||
|
||||
%% Enterprise edition
|
||||
-define(EMQX_RELEASE_EE, "5.0.1").
|
||||
-define(EMQX_RELEASE_EE, "5.0.2-alpha.1").
|
||||
|
||||
%% the HTTP API version
|
||||
-define(EMQX_API_VERSION, "5.0").
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
{emqx_authz,1}.
|
||||
{emqx_bridge,1}.
|
||||
{emqx_bridge,2}.
|
||||
{emqx_bridge,3}.
|
||||
{emqx_broker,1}.
|
||||
{emqx_cm,1}.
|
||||
{emqx_conf,1}.
|
||||
|
|
|
@ -27,12 +27,13 @@
|
|||
{jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}},
|
||||
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}},
|
||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}},
|
||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.4"}}},
|
||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.5"}}},
|
||||
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.37.0"}}},
|
||||
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}},
|
||||
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
||||
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
||||
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.0"}}}
|
||||
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.7"}}}
|
||||
]}.
|
||||
|
||||
{plugins, [{rebar3_proper, "0.12.1"}]}.
|
||||
|
@ -43,7 +44,7 @@
|
|||
{meck, "0.9.2"},
|
||||
{proper, "1.4.0"},
|
||||
{bbmustache, "1.10.0"},
|
||||
{emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.8.2"}}}
|
||||
{emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.8.5"}}}
|
||||
]},
|
||||
{extra_src_dirs, [{"test", [recursive]}]}
|
||||
]}
|
||||
|
|
|
@ -87,6 +87,10 @@
|
|||
remove_handlers/0
|
||||
]).
|
||||
|
||||
-ifdef(TEST).
|
||||
-export([erase_schema_mod_and_names/0]).
|
||||
-endif.
|
||||
|
||||
-include("logger.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
|
||||
|
@ -501,6 +505,11 @@ save_schema_mod_and_names(SchemaMod) ->
|
|||
names => lists:usort(OldNames ++ RootNames)
|
||||
}).
|
||||
|
||||
-ifdef(TEST).
|
||||
erase_schema_mod_and_names() ->
|
||||
persistent_term:erase(?PERSIS_SCHEMA_MODS).
|
||||
-endif.
|
||||
|
||||
-spec get_schema_mod() -> #{binary() => atom()}.
|
||||
get_schema_mod() ->
|
||||
maps:get(mods, persistent_term:get(?PERSIS_SCHEMA_MODS, #{mods => #{}})).
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%
|
||||
%% @doc Never update this module, create a v2 instead.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_const_v1).
|
||||
|
||||
-export([make_sni_fun/1]).
|
||||
|
||||
make_sni_fun(ListenerID) ->
|
||||
fun(SN) -> emqx_ocsp_cache:sni_fun(SN, ListenerID) end.
|
|
@ -35,7 +35,8 @@ init([]) ->
|
|||
child_spec(emqx_hooks, worker),
|
||||
child_spec(emqx_stats, worker),
|
||||
child_spec(emqx_metrics, worker),
|
||||
child_spec(emqx_authn_authz_metrics_sup, supervisor)
|
||||
child_spec(emqx_authn_authz_metrics_sup, supervisor),
|
||||
child_spec(emqx_ocsp_cache, worker)
|
||||
]
|
||||
}}.
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
{application, emqx_limiter, [
|
||||
{description, "EMQX Hierarchical Limiter"},
|
||||
% strict semver, bump manually!
|
||||
{vsn, "1.0.0"},
|
||||
{vsn, "1.0.1"},
|
||||
{modules, []},
|
||||
{registered, [emqx_limiter_sup]},
|
||||
{applications, [kernel, stdlib, emqx]},
|
||||
|
|
|
@ -35,6 +35,12 @@
|
|||
]).
|
||||
|
||||
-define(KILOBYTE, 1024).
|
||||
-define(BUCKET_KEYS, [
|
||||
{bytes_in, bucket_infinity},
|
||||
{message_in, bucket_infinity},
|
||||
{connection, bucket_limit},
|
||||
{message_routing, bucket_infinity}
|
||||
]).
|
||||
|
||||
-type limiter_type() ::
|
||||
bytes_in
|
||||
|
@ -126,12 +132,18 @@ fields(client_fields) ->
|
|||
})}
|
||||
|| Type <- types()
|
||||
];
|
||||
fields(bucket_opts) ->
|
||||
fields(bucket_infinity) ->
|
||||
[
|
||||
{rate, ?HOCON(rate(), #{desc => ?DESC(rate), default => <<"infinity">>})},
|
||||
{capacity, ?HOCON(capacity(), #{desc => ?DESC(capacity), default => <<"infinity">>})},
|
||||
{initial, ?HOCON(initial(), #{default => <<"0">>, desc => ?DESC(initial)})}
|
||||
];
|
||||
fields(bucket_limit) ->
|
||||
[
|
||||
{rate, ?HOCON(rate(), #{desc => ?DESC(rate), default => <<"1000/s">>})},
|
||||
{capacity, ?HOCON(capacity(), #{desc => ?DESC(capacity), default => <<"1000">>})},
|
||||
{initial, ?HOCON(initial(), #{default => <<"0">>, desc => ?DESC(initial)})}
|
||||
];
|
||||
fields(client_opts) ->
|
||||
[
|
||||
{rate, ?HOCON(rate(), #{default => <<"infinity">>, desc => ?DESC(rate)})},
|
||||
|
@ -179,9 +191,9 @@ fields(client_opts) ->
|
|||
)}
|
||||
];
|
||||
fields(listener_fields) ->
|
||||
bucket_fields([bytes_in, message_in, connection, message_routing], listener_client_fields);
|
||||
bucket_fields(?BUCKET_KEYS, listener_client_fields);
|
||||
fields(listener_client_fields) ->
|
||||
client_fields([bytes_in, message_in, connection, message_routing]);
|
||||
client_fields(?BUCKET_KEYS);
|
||||
fields(Type) ->
|
||||
bucket_field(Type).
|
||||
|
||||
|
@ -189,8 +201,10 @@ desc(limiter) ->
|
|||
"Settings for the rate limiter.";
|
||||
desc(node_opts) ->
|
||||
"Settings for the limiter of the node level.";
|
||||
desc(bucket_opts) ->
|
||||
desc(bucket_infinity) ->
|
||||
"Settings for the bucket.";
|
||||
desc(bucket_limit) ->
|
||||
desc(bucket_infinity);
|
||||
desc(client_opts) ->
|
||||
"Settings for the client in bucket level.";
|
||||
desc(client_fields) ->
|
||||
|
@ -337,7 +351,7 @@ apply_unit("gb", Val) -> Val * ?KILOBYTE * ?KILOBYTE * ?KILOBYTE;
|
|||
apply_unit(Unit, _) -> throw("invalid unit:" ++ Unit).
|
||||
|
||||
bucket_field(Type) when is_atom(Type) ->
|
||||
fields(bucket_opts) ++
|
||||
fields(bucket_infinity) ++
|
||||
[
|
||||
{client,
|
||||
?HOCON(
|
||||
|
@ -351,11 +365,11 @@ bucket_field(Type) when is_atom(Type) ->
|
|||
bucket_fields(Types, ClientRef) ->
|
||||
[
|
||||
{Type,
|
||||
?HOCON(?R_REF(?MODULE, bucket_opts), #{
|
||||
?HOCON(?R_REF(?MODULE, Opts), #{
|
||||
desc => ?DESC(?MODULE, Type),
|
||||
required => false
|
||||
})}
|
||||
|| Type <- Types
|
||||
|| {Type, Opts} <- Types
|
||||
] ++
|
||||
[
|
||||
{client,
|
||||
|
@ -375,5 +389,5 @@ client_fields(Types) ->
|
|||
desc => ?DESC(Type),
|
||||
required => false
|
||||
})}
|
||||
|| Type <- Types
|
||||
|| {Type, _} <- Types
|
||||
].
|
||||
|
|
|
@ -484,8 +484,12 @@ esockd_opts(ListenerId, Type, Opts0) ->
|
|||
},
|
||||
maps:to_list(
|
||||
case Type of
|
||||
tcp -> Opts3#{tcp_options => tcp_opts(Opts0)};
|
||||
ssl -> Opts3#{ssl_options => ssl_opts(Opts0), tcp_options => tcp_opts(Opts0)}
|
||||
tcp ->
|
||||
Opts3#{tcp_options => tcp_opts(Opts0)};
|
||||
ssl ->
|
||||
OptsWithSNI = inject_sni_fun(ListenerId, Opts0),
|
||||
SSLOpts = ssl_opts(OptsWithSNI),
|
||||
Opts3#{ssl_options => SSLOpts, tcp_options => tcp_opts(Opts0)}
|
||||
end
|
||||
).
|
||||
|
||||
|
@ -785,3 +789,8 @@ quic_listener_optional_settings() ->
|
|||
max_binding_stateless_operations,
|
||||
stateless_operation_expiration_ms
|
||||
].
|
||||
|
||||
inject_sni_fun(ListenerId, Conf = #{ssl_options := #{ocsp := #{enable_ocsp_stapling := true}}}) ->
|
||||
emqx_ocsp_cache:inject_sni_fun(ListenerId, Conf);
|
||||
inject_sni_fun(_ListenerId, Conf) ->
|
||||
Conf.
|
||||
|
|
|
@ -0,0 +1,532 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%
|
||||
%% @doc EMQX OCSP cache.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_ocsp_cache).
|
||||
|
||||
-include("logger.hrl").
|
||||
-include_lib("public_key/include/public_key.hrl").
|
||||
-include_lib("ssl/src/ssl_handshake.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
-export([
|
||||
start_link/0,
|
||||
sni_fun/2,
|
||||
fetch_response/1,
|
||||
register_listener/2,
|
||||
inject_sni_fun/2
|
||||
]).
|
||||
|
||||
%% gen_server API
|
||||
-export([
|
||||
init/1,
|
||||
handle_call/3,
|
||||
handle_cast/2,
|
||||
handle_info/2,
|
||||
code_change/3
|
||||
]).
|
||||
|
||||
%% internal export; only for mocking in tests
|
||||
-export([http_get/2]).
|
||||
|
||||
-define(CACHE_TAB, ?MODULE).
|
||||
-define(CALL_TIMEOUT, 20_000).
|
||||
-define(RETRY_TIMEOUT, 5_000).
|
||||
-define(REFRESH_TIMER(LID), {refresh_timer, LID}).
|
||||
-ifdef(TEST).
|
||||
-define(MIN_REFRESH_INTERVAL, timer:seconds(5)).
|
||||
-else.
|
||||
-define(MIN_REFRESH_INTERVAL, timer:minutes(1)).
|
||||
-endif.
|
||||
|
||||
%% Allow usage of OTP certificate record fields (camelCase).
|
||||
-elvis([
|
||||
{elvis_style, atom_naming_convention, #{
|
||||
regex => "^([a-z][a-z0-9]*_?)([a-zA-Z0-9]*_?)*$",
|
||||
enclosed_atoms => ".*"
|
||||
}}
|
||||
]).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% API
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
sni_fun(_ServerName, ListenerID) ->
|
||||
Res =
|
||||
try
|
||||
fetch_response(ListenerID)
|
||||
catch
|
||||
_:_ -> error
|
||||
end,
|
||||
case Res of
|
||||
{ok, Response} ->
|
||||
[
|
||||
{certificate_status, #certificate_status{
|
||||
status_type = ?CERTIFICATE_STATUS_TYPE_OCSP,
|
||||
response = Response
|
||||
}}
|
||||
];
|
||||
error ->
|
||||
[]
|
||||
end.
|
||||
|
||||
fetch_response(ListenerID) ->
|
||||
case do_lookup(ListenerID) of
|
||||
{ok, DERResponse} ->
|
||||
{ok, DERResponse};
|
||||
{error, invalid_listener_id} ->
|
||||
error;
|
||||
{error, not_cached} ->
|
||||
?tp(ocsp_cache_miss, #{listener_id => ListenerID}),
|
||||
?SLOG(debug, #{
|
||||
msg => "fetching_new_ocsp_response",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
http_fetch(ListenerID)
|
||||
end.
|
||||
|
||||
register_listener(ListenerID, Opts) ->
|
||||
gen_server:call(?MODULE, {register_listener, ListenerID, Opts}, ?CALL_TIMEOUT).
|
||||
|
||||
-spec inject_sni_fun(emqx_listeners:listener_id(), map()) -> map().
|
||||
inject_sni_fun(ListenerID, Conf0) ->
|
||||
SNIFun = emqx_const_v1:make_sni_fun(ListenerID),
|
||||
Conf = emqx_map_lib:deep_merge(Conf0, #{ssl_options => #{sni_fun => SNIFun}}),
|
||||
ok = ?MODULE:register_listener(ListenerID, Conf),
|
||||
Conf.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server behaviour
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init(_Args) ->
|
||||
logger:set_process_metadata(#{domain => [emqx, ocsp, cache]}),
|
||||
emqx_tables:new(?CACHE_TAB, [
|
||||
named_table,
|
||||
public,
|
||||
{heir, whereis(emqx_kernel_sup), none},
|
||||
{read_concurrency, true}
|
||||
]),
|
||||
?tp(ocsp_cache_init, #{}),
|
||||
{ok, #{}}.
|
||||
|
||||
handle_call({http_fetch, ListenerID}, _From, State) ->
|
||||
case do_lookup(ListenerID) of
|
||||
{ok, DERResponse} ->
|
||||
{reply, {ok, DERResponse}, State};
|
||||
{error, invalid_listener_id} ->
|
||||
{reply, error, State};
|
||||
{error, not_cached} ->
|
||||
Conf = undefined,
|
||||
with_refresh_params(ListenerID, Conf, {reply, error, State}, fun(Params) ->
|
||||
case do_http_fetch_and_cache(ListenerID, Params) of
|
||||
error -> {reply, error, ensure_timer(ListenerID, State, ?RETRY_TIMEOUT)};
|
||||
{ok, Response} -> {reply, {ok, Response}, ensure_timer(ListenerID, State)}
|
||||
end
|
||||
end)
|
||||
end;
|
||||
handle_call({register_listener, ListenerID, Conf}, _From, State0) ->
|
||||
?SLOG(debug, #{
|
||||
msg => "registering_ocsp_cache",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
RefreshInterval0 = emqx_map_lib:deep_get([ssl_options, ocsp, refresh_interval], Conf),
|
||||
RefreshInterval = max(RefreshInterval0, ?MIN_REFRESH_INTERVAL),
|
||||
State = State0#{{refresh_interval, ListenerID} => RefreshInterval},
|
||||
%% we need to pass the config along because this might be called
|
||||
%% during the listener's `post_config_update', hence the config is
|
||||
%% not yet "commited" and accessible when we need it.
|
||||
Message = {refresh, ListenerID, Conf},
|
||||
{reply, ok, ensure_timer(ListenerID, Message, State, 0)};
|
||||
handle_call(Call, _From, State) ->
|
||||
{reply, {error, {unknown_call, Call}}, State}.
|
||||
|
||||
handle_cast(_Cast, State) ->
|
||||
{noreply, State}.
|
||||
|
||||
handle_info({timeout, TRef, {refresh, ListenerID}}, State0) ->
|
||||
case maps:get(?REFRESH_TIMER(ListenerID), State0, undefined) of
|
||||
TRef ->
|
||||
?tp(ocsp_refresh_timer, #{listener_id => ListenerID}),
|
||||
?SLOG(debug, #{
|
||||
msg => "refreshing_ocsp_response",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
Conf = undefined,
|
||||
handle_refresh(ListenerID, Conf, State0);
|
||||
_ ->
|
||||
{noreply, State0}
|
||||
end;
|
||||
handle_info({timeout, TRef, {refresh, ListenerID, Conf}}, State0) ->
|
||||
case maps:get(?REFRESH_TIMER(ListenerID), State0, undefined) of
|
||||
TRef ->
|
||||
?tp(ocsp_refresh_timer, #{listener_id => ListenerID}),
|
||||
?SLOG(debug, #{
|
||||
msg => "refreshing_ocsp_response",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
handle_refresh(ListenerID, Conf, State0);
|
||||
_ ->
|
||||
{noreply, State0}
|
||||
end;
|
||||
handle_info(_Info, State) ->
|
||||
{noreply, State}.
|
||||
|
||||
code_change(_Vsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
http_fetch(ListenerID) ->
|
||||
%% TODO: configurable call timeout?
|
||||
gen_server:call(?MODULE, {http_fetch, ListenerID}, ?CALL_TIMEOUT).
|
||||
|
||||
with_listener_config(ListenerID, ConfPath, ErrorResp, Fn) ->
|
||||
case emqx_listeners:parse_listener_id(ListenerID) of
|
||||
{ok, #{type := Type, name := Name}} ->
|
||||
case emqx_config:get_listener_conf(Type, Name, ConfPath, not_found) of
|
||||
not_found ->
|
||||
?SLOG(error, #{
|
||||
msg => "listener_config_missing",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
ErrorResp;
|
||||
Config ->
|
||||
Fn(Config)
|
||||
end;
|
||||
_Err ->
|
||||
?SLOG(error, #{
|
||||
msg => "listener_id_not_found",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
ErrorResp
|
||||
end.
|
||||
|
||||
cache_key(ListenerID) ->
|
||||
with_listener_config(ListenerID, [ssl_options], error, fun
|
||||
(#{certfile := ServerCertPemPath}) ->
|
||||
#'Certificate'{
|
||||
tbsCertificate =
|
||||
#'TBSCertificate'{
|
||||
signature = Signature
|
||||
}
|
||||
} = read_server_cert(ServerCertPemPath),
|
||||
{ok, {ocsp_response, Signature}};
|
||||
(OtherConfig) ->
|
||||
?SLOG(error, #{
|
||||
msg => "listener_config_inconsistent",
|
||||
listener_id => ListenerID,
|
||||
config => OtherConfig
|
||||
}),
|
||||
error
|
||||
end).
|
||||
|
||||
do_lookup(ListenerID) ->
|
||||
CacheKey = cache_key(ListenerID),
|
||||
case CacheKey of
|
||||
error ->
|
||||
{error, invalid_listener_id};
|
||||
{ok, Key} ->
|
||||
%% Respond immediately if a concurrent call already fetched it.
|
||||
case ets:lookup(?CACHE_TAB, Key) of
|
||||
[{_, DERResponse}] ->
|
||||
?tp(ocsp_cache_hit, #{listener_id => ListenerID}),
|
||||
{ok, DERResponse};
|
||||
[] ->
|
||||
{error, not_cached}
|
||||
end
|
||||
end.
|
||||
|
||||
read_server_cert(ServerCertPemPath0) ->
|
||||
ServerCertPemPath = to_bin(ServerCertPemPath0),
|
||||
case ets:lookup(ssl_pem_cache, ServerCertPemPath) of
|
||||
[{_, [{'Certificate', ServerCertDer, _} | _]}] ->
|
||||
public_key:der_decode('Certificate', ServerCertDer);
|
||||
[] ->
|
||||
case file:read_file(ServerCertPemPath) of
|
||||
{ok, ServerCertPem} ->
|
||||
[{'Certificate', ServerCertDer, _} | _] =
|
||||
public_key:pem_decode(ServerCertPem),
|
||||
public_key:der_decode('Certificate', ServerCertDer);
|
||||
{error, Error1} ->
|
||||
error({bad_server_cert_file, Error1})
|
||||
end
|
||||
end.
|
||||
|
||||
handle_refresh(ListenerID, Conf, State0) ->
|
||||
%% no point in retrying if the config is inconsistent or non
|
||||
%% existent.
|
||||
State1 = maps:without([{refresh_interval, ListenerID}, ?REFRESH_TIMER(ListenerID)], State0),
|
||||
with_refresh_params(ListenerID, Conf, {noreply, State1}, fun(Params) ->
|
||||
case do_http_fetch_and_cache(ListenerID, Params) of
|
||||
error ->
|
||||
?SLOG(debug, #{
|
||||
msg => "failed_to_fetch_ocsp_response",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
{noreply, ensure_timer(ListenerID, State0, ?RETRY_TIMEOUT)};
|
||||
{ok, _Response} ->
|
||||
?SLOG(debug, #{
|
||||
msg => "fetched_ocsp_response",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
{noreply, ensure_timer(ListenerID, State0)}
|
||||
end
|
||||
end).
|
||||
|
||||
with_refresh_params(ListenerID, Conf, ErrorRet, Fn) ->
|
||||
case get_refresh_params(ListenerID, Conf) of
|
||||
error ->
|
||||
ErrorRet;
|
||||
{ok, Params} ->
|
||||
try
|
||||
Fn(Params)
|
||||
catch
|
||||
Kind:Error ->
|
||||
?SLOG(error, #{
|
||||
msg => "error_fetching_ocsp_response",
|
||||
listener_id => ListenerID,
|
||||
error => {Kind, Error}
|
||||
}),
|
||||
ErrorRet
|
||||
end
|
||||
end.
|
||||
|
||||
get_refresh_params(ListenerID, undefined = _Conf) ->
|
||||
%% during normal periodic refreshes, we read from the emqx config.
|
||||
with_listener_config(ListenerID, [ssl_options], error, fun
|
||||
(
|
||||
#{
|
||||
ocsp := #{
|
||||
issuer_pem := IssuerPemPath,
|
||||
responder_url := ResponderURL,
|
||||
refresh_http_timeout := HTTPTimeout
|
||||
},
|
||||
certfile := ServerCertPemPath
|
||||
}
|
||||
) ->
|
||||
{ok, #{
|
||||
issuer_pem => IssuerPemPath,
|
||||
responder_url => ResponderURL,
|
||||
refresh_http_timeout => HTTPTimeout,
|
||||
server_certfile => ServerCertPemPath
|
||||
}};
|
||||
(OtherConfig) ->
|
||||
?SLOG(error, #{
|
||||
msg => "listener_config_inconsistent",
|
||||
listener_id => ListenerID,
|
||||
config => OtherConfig
|
||||
}),
|
||||
error
|
||||
end);
|
||||
get_refresh_params(_ListenerID, #{
|
||||
ssl_options := #{
|
||||
ocsp := #{
|
||||
issuer_pem := IssuerPemPath,
|
||||
responder_url := ResponderURL,
|
||||
refresh_http_timeout := HTTPTimeout
|
||||
},
|
||||
certfile := ServerCertPemPath
|
||||
}
|
||||
}) ->
|
||||
{ok, #{
|
||||
issuer_pem => IssuerPemPath,
|
||||
responder_url => ResponderURL,
|
||||
refresh_http_timeout => HTTPTimeout,
|
||||
server_certfile => ServerCertPemPath
|
||||
}};
|
||||
get_refresh_params(_ListenerID, _Conf) ->
|
||||
error.
|
||||
|
||||
do_http_fetch_and_cache(ListenerID, Params) ->
|
||||
#{
|
||||
issuer_pem := IssuerPemPath,
|
||||
responder_url := ResponderURL,
|
||||
refresh_http_timeout := HTTPTimeout,
|
||||
server_certfile := ServerCertPemPath
|
||||
} = Params,
|
||||
IssuerPem =
|
||||
case file:read_file(IssuerPemPath) of
|
||||
{ok, IssuerPem0} -> IssuerPem0;
|
||||
{error, Error0} -> error({bad_issuer_pem_file, Error0})
|
||||
end,
|
||||
ServerCert = read_server_cert(ServerCertPemPath),
|
||||
Request = build_ocsp_request(IssuerPem, ServerCert),
|
||||
?tp(ocsp_http_fetch, #{
|
||||
listener_id => ListenerID,
|
||||
responder_url => ResponderURL,
|
||||
timeout => HTTPTimeout
|
||||
}),
|
||||
RequestURI = iolist_to_binary([ResponderURL, Request]),
|
||||
Resp = ?MODULE:http_get(RequestURI, HTTPTimeout),
|
||||
case Resp of
|
||||
{ok, {{_, 200, _}, _, Body}} ->
|
||||
?SLOG(debug, #{
|
||||
msg => "caching_ocsp_response",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
%% if we got this far, the certfile is correct.
|
||||
{ok, CacheKey} = cache_key(ListenerID),
|
||||
true = ets:insert(?CACHE_TAB, {CacheKey, Body}),
|
||||
?tp(ocsp_http_fetch_and_cache, #{
|
||||
listener_id => ListenerID,
|
||||
headers => true
|
||||
}),
|
||||
{ok, Body};
|
||||
{ok, {200, Body}} ->
|
||||
?SLOG(debug, #{
|
||||
msg => "caching_ocsp_response",
|
||||
listener_id => ListenerID
|
||||
}),
|
||||
%% if we got this far, the certfile is correct.
|
||||
{ok, CacheKey} = cache_key(ListenerID),
|
||||
true = ets:insert(?CACHE_TAB, {CacheKey, Body}),
|
||||
?tp(ocsp_http_fetch_and_cache, #{
|
||||
listener_id => ListenerID,
|
||||
headers => false
|
||||
}),
|
||||
{ok, Body};
|
||||
{ok, {{_, Code, _}, _, Body}} ->
|
||||
?tp(
|
||||
error,
|
||||
ocsp_http_fetch_bad_code,
|
||||
#{
|
||||
listener_id => ListenerID,
|
||||
body => Body,
|
||||
code => Code,
|
||||
headers => true
|
||||
}
|
||||
),
|
||||
?SLOG(error, #{
|
||||
msg => "error_fetching_ocsp_response",
|
||||
listener_id => ListenerID,
|
||||
code => Code,
|
||||
body => Body
|
||||
}),
|
||||
error;
|
||||
{ok, {Code, Body}} ->
|
||||
?tp(
|
||||
error,
|
||||
ocsp_http_fetch_bad_code,
|
||||
#{
|
||||
listener_id => ListenerID,
|
||||
body => Body,
|
||||
code => Code,
|
||||
headers => false
|
||||
}
|
||||
),
|
||||
?SLOG(error, #{
|
||||
msg => "error_fetching_ocsp_response",
|
||||
listener_id => ListenerID,
|
||||
code => Code,
|
||||
body => Body
|
||||
}),
|
||||
error;
|
||||
{error, Error} ->
|
||||
?tp(
|
||||
error,
|
||||
ocsp_http_fetch_error,
|
||||
#{
|
||||
listener_id => ListenerID,
|
||||
error => Error
|
||||
}
|
||||
),
|
||||
?SLOG(error, #{
|
||||
msg => "error_fetching_ocsp_response",
|
||||
listener_id => ListenerID,
|
||||
error => Error
|
||||
}),
|
||||
error
|
||||
end.
|
||||
|
||||
http_get(URL, HTTPTimeout) ->
|
||||
httpc:request(
|
||||
get,
|
||||
{URL, [{"connection", "close"}]},
|
||||
[{timeout, HTTPTimeout}],
|
||||
[{body_format, binary}]
|
||||
).
|
||||
|
||||
ensure_timer(ListenerID, State) ->
|
||||
Timeout = maps:get({refresh_interval, ListenerID}, State, timer:minutes(5)),
|
||||
ensure_timer(ListenerID, State, Timeout).
|
||||
|
||||
ensure_timer(ListenerID, State, Timeout) ->
|
||||
ensure_timer(ListenerID, {refresh, ListenerID}, State, Timeout).
|
||||
|
||||
ensure_timer(ListenerID, Message, State, Timeout) ->
|
||||
emqx_misc:cancel_timer(maps:get(?REFRESH_TIMER(ListenerID), State, undefined)),
|
||||
State#{
|
||||
?REFRESH_TIMER(ListenerID) => emqx_misc:start_timer(
|
||||
Timeout,
|
||||
Message
|
||||
)
|
||||
}.
|
||||
|
||||
build_ocsp_request(IssuerPem, ServerCert) ->
|
||||
[{'Certificate', IssuerDer, _} | _] = public_key:pem_decode(IssuerPem),
|
||||
#'Certificate'{
|
||||
tbsCertificate =
|
||||
#'TBSCertificate'{
|
||||
serialNumber = SerialNumber,
|
||||
issuer = Issuer
|
||||
}
|
||||
} = ServerCert,
|
||||
#'Certificate'{
|
||||
tbsCertificate =
|
||||
#'TBSCertificate'{
|
||||
subjectPublicKeyInfo =
|
||||
#'SubjectPublicKeyInfo'{subjectPublicKey = IssuerPublicKeyDer}
|
||||
}
|
||||
} = public_key:der_decode('Certificate', IssuerDer),
|
||||
IssuerDNHash = crypto:hash(sha, public_key:der_encode('Name', Issuer)),
|
||||
IssuerPKHash = crypto:hash(sha, IssuerPublicKeyDer),
|
||||
Req = #'OCSPRequest'{
|
||||
tbsRequest =
|
||||
#'TBSRequest'{
|
||||
version = 0,
|
||||
requestList =
|
||||
[
|
||||
#'Request'{
|
||||
reqCert =
|
||||
#'CertID'{
|
||||
hashAlgorithm =
|
||||
#'AlgorithmIdentifier'{
|
||||
algorithm = ?'id-sha1',
|
||||
%% ???
|
||||
parameters = <<5, 0>>
|
||||
},
|
||||
issuerNameHash = IssuerDNHash,
|
||||
issuerKeyHash = IssuerPKHash,
|
||||
serialNumber = SerialNumber
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
ReqDer = public_key:der_encode('OCSPRequest', Req),
|
||||
base64:encode_to_string(ReqDer).
|
||||
|
||||
to_bin(Str) when is_list(Str) -> list_to_binary(Str);
|
||||
to_bin(Bin) when is_binary(Bin) -> Bin.
|
|
@ -43,6 +43,7 @@
|
|||
-type cipher() :: map().
|
||||
-type port_number() :: 1..65536.
|
||||
-type server_parse_option() :: #{default_port => port_number(), no_port => boolean()}.
|
||||
-type url() :: binary().
|
||||
|
||||
-typerefl_from_string({duration/0, emqx_schema, to_duration}).
|
||||
-typerefl_from_string({duration_s/0, emqx_schema, to_duration_s}).
|
||||
|
@ -56,6 +57,7 @@
|
|||
-typerefl_from_string({ip_port/0, emqx_schema, to_ip_port}).
|
||||
-typerefl_from_string({cipher/0, emqx_schema, to_erl_cipher_suite}).
|
||||
-typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}).
|
||||
-typerefl_from_string({url/0, emqx_schema, to_url}).
|
||||
|
||||
-export([
|
||||
validate_heap_size/1,
|
||||
|
@ -81,7 +83,8 @@
|
|||
to_bar_separated_list/1,
|
||||
to_ip_port/1,
|
||||
to_erl_cipher_suite/1,
|
||||
to_comma_separated_atoms/1
|
||||
to_comma_separated_atoms/1,
|
||||
to_url/1
|
||||
]).
|
||||
|
||||
-export([
|
||||
|
@ -108,7 +111,8 @@
|
|||
bar_separated_list/0,
|
||||
ip_port/0,
|
||||
cipher/0,
|
||||
comma_separated_atoms/0
|
||||
comma_separated_atoms/0,
|
||||
url/0
|
||||
]).
|
||||
|
||||
-export([namespace/0, roots/0, roots/1, fields/1, desc/1, tags/0]).
|
||||
|
@ -810,7 +814,7 @@ fields("mqtt_ssl_listener") ->
|
|||
{"ssl_options",
|
||||
sc(
|
||||
ref("listener_ssl_opts"),
|
||||
#{}
|
||||
#{validator => fun mqtt_ssl_listener_ssl_options_validator/1}
|
||||
)}
|
||||
];
|
||||
fields("mqtt_ws_listener") ->
|
||||
|
@ -1294,6 +1298,49 @@ fields("listener_quic_ssl_opts") ->
|
|||
);
|
||||
fields("ssl_client_opts") ->
|
||||
client_ssl_opts_schema(#{});
|
||||
fields("ocsp") ->
|
||||
[
|
||||
{"enable_ocsp_stapling",
|
||||
sc(
|
||||
boolean(),
|
||||
#{
|
||||
default => false,
|
||||
desc => ?DESC("server_ssl_opts_schema_enable_ocsp_stapling")
|
||||
}
|
||||
)},
|
||||
{"responder_url",
|
||||
sc(
|
||||
url(),
|
||||
#{
|
||||
required => false,
|
||||
desc => ?DESC("server_ssl_opts_schema_ocsp_responder_url")
|
||||
}
|
||||
)},
|
||||
{"issuer_pem",
|
||||
sc(
|
||||
binary(),
|
||||
#{
|
||||
required => false,
|
||||
desc => ?DESC("server_ssl_opts_schema_ocsp_issuer_pem")
|
||||
}
|
||||
)},
|
||||
{"refresh_interval",
|
||||
sc(
|
||||
duration(),
|
||||
#{
|
||||
default => <<"5m">>,
|
||||
desc => ?DESC("server_ssl_opts_schema_ocsp_refresh_interval")
|
||||
}
|
||||
)},
|
||||
{"refresh_http_timeout",
|
||||
sc(
|
||||
duration(),
|
||||
#{
|
||||
default => <<"15s">>,
|
||||
desc => ?DESC("server_ssl_opts_schema_ocsp_refresh_http_timeout")
|
||||
}
|
||||
)}
|
||||
];
|
||||
fields("deflate_opts") ->
|
||||
[
|
||||
{"level",
|
||||
|
@ -1856,10 +1903,7 @@ base_listener(Bind) ->
|
|||
listener_fields
|
||||
),
|
||||
#{
|
||||
desc => ?DESC(base_listener_limiter),
|
||||
default => #{
|
||||
<<"connection">> => #{<<"rate">> => <<"1000/s">>, <<"capacity">> => 1000}
|
||||
}
|
||||
desc => ?DESC(base_listener_limiter)
|
||||
}
|
||||
)},
|
||||
{"enable_authn",
|
||||
|
@ -2017,6 +2061,8 @@ desc("trace") ->
|
|||
"Real-time filtering logs for the ClientID or Topic or IP for debugging.";
|
||||
desc("shared_subscription_group") ->
|
||||
"Per group dispatch strategy for shared subscription";
|
||||
desc("ocsp") ->
|
||||
"Per listener OCSP Stapling configuration.";
|
||||
desc(_) ->
|
||||
undefined.
|
||||
|
||||
|
@ -2199,14 +2245,62 @@ server_ssl_opts_schema(Defaults, IsRanchListener) ->
|
|||
)}
|
||||
] ++
|
||||
[
|
||||
{"gc_after_handshake",
|
||||
sc(boolean(), #{
|
||||
default => false,
|
||||
desc => ?DESC(server_ssl_opts_schema_gc_after_handshake)
|
||||
})}
|
||||
|| not IsRanchListener
|
||||
Field
|
||||
|| not IsRanchListener,
|
||||
Field <- [
|
||||
{"gc_after_handshake",
|
||||
sc(boolean(), #{
|
||||
default => false,
|
||||
desc => ?DESC(server_ssl_opts_schema_gc_after_handshake)
|
||||
})},
|
||||
{"ocsp",
|
||||
sc(
|
||||
ref("ocsp"),
|
||||
#{
|
||||
required => false,
|
||||
validator => fun ocsp_inner_validator/1
|
||||
}
|
||||
)}
|
||||
]
|
||||
].
|
||||
|
||||
mqtt_ssl_listener_ssl_options_validator(Conf) ->
|
||||
Checks = [
|
||||
fun ocsp_outer_validator/1
|
||||
],
|
||||
case emqx_misc:pipeline(Checks, Conf, not_used) of
|
||||
{ok, _, _} ->
|
||||
ok;
|
||||
{error, Reason, _NotUsed} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
ocsp_outer_validator(#{<<"ocsp">> := #{<<"enable_ocsp_stapling">> := true}} = Conf) ->
|
||||
%% outer mqtt listener ssl server config
|
||||
ServerCertPemPath = maps:get(<<"certfile">>, Conf, undefined),
|
||||
case ServerCertPemPath of
|
||||
undefined ->
|
||||
{error, "Server certificate must be defined when using OCSP stapling"};
|
||||
_ ->
|
||||
%% check if issuer pem is readable and/or valid?
|
||||
ok
|
||||
end;
|
||||
ocsp_outer_validator(_Conf) ->
|
||||
ok.
|
||||
|
||||
ocsp_inner_validator(#{enable_ocsp_stapling := _} = Conf) ->
|
||||
ocsp_inner_validator(emqx_map_lib:binary_key_map(Conf));
|
||||
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := false} = _Conf) ->
|
||||
ok;
|
||||
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := true} = Conf) ->
|
||||
assert_required_field(
|
||||
Conf, <<"responder_url">>, "The responder URL is required for OCSP stapling"
|
||||
),
|
||||
assert_required_field(
|
||||
Conf, <<"issuer_pem">>, "The issuer PEM path is required for OCSP stapling"
|
||||
),
|
||||
ok.
|
||||
|
||||
%% @doc Make schema for SSL client.
|
||||
-spec client_ssl_opts_schema(map()) -> hocon_schema:field_schema().
|
||||
client_ssl_opts_schema(Defaults) ->
|
||||
|
@ -2408,6 +2502,15 @@ to_comma_separated_binary(Str) ->
|
|||
to_comma_separated_atoms(Str) ->
|
||||
{ok, lists:map(fun to_atom/1, string:tokens(Str, ", "))}.
|
||||
|
||||
to_url(Str) ->
|
||||
case emqx_http_lib:uri_parse(Str) of
|
||||
{ok, URIMap} ->
|
||||
URIString = emqx_http_lib:normalize(URIMap),
|
||||
{ok, iolist_to_binary(URIString)};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
to_bar_separated_list(Str) ->
|
||||
{ok, string:tokens(Str, "| ")}.
|
||||
|
||||
|
@ -2865,3 +2968,11 @@ is_quic_ssl_opts(Name) ->
|
|||
%% , "handshake_timeout"
|
||||
%% , "gc_after_handshake"
|
||||
]).
|
||||
|
||||
assert_required_field(Conf, Key, ErrorMessage) ->
|
||||
case maps:get(Key, Conf, undefined) of
|
||||
undefined ->
|
||||
throw(ErrorMessage);
|
||||
_ ->
|
||||
ok
|
||||
end.
|
||||
|
|
|
@ -47,8 +47,18 @@
|
|||
-define(IS_TRUE(Val), ((Val =:= true) orelse (Val =:= <<"true">>))).
|
||||
-define(IS_FALSE(Val), ((Val =:= false) orelse (Val =:= <<"false">>))).
|
||||
|
||||
-define(SSL_FILE_OPT_NAMES, [<<"keyfile">>, <<"certfile">>, <<"cacertfile">>]).
|
||||
-define(SSL_FILE_OPT_NAMES_A, [keyfile, certfile, cacertfile]).
|
||||
-define(SSL_FILE_OPT_PATHS, [
|
||||
[<<"keyfile">>],
|
||||
[<<"certfile">>],
|
||||
[<<"cacertfile">>],
|
||||
[<<"ocsp">>, <<"issuer_pem">>]
|
||||
]).
|
||||
-define(SSL_FILE_OPT_PATHS_A, [
|
||||
[keyfile],
|
||||
[certfile],
|
||||
[cacertfile],
|
||||
[ocsp, issuer_pem]
|
||||
]).
|
||||
|
||||
%% non-empty string
|
||||
-define(IS_STRING(L), (is_list(L) andalso L =/= [] andalso is_integer(hd(L)))).
|
||||
|
@ -298,20 +308,20 @@ ensure_ssl_files(Dir, SSL, Opts) ->
|
|||
RequiredKeys = maps:get(required_keys, Opts, []),
|
||||
case ensure_ssl_file_key(SSL, RequiredKeys) of
|
||||
ok ->
|
||||
Keys = ?SSL_FILE_OPT_NAMES ++ ?SSL_FILE_OPT_NAMES_A,
|
||||
ensure_ssl_files(Dir, SSL, Keys, Opts);
|
||||
KeyPaths = ?SSL_FILE_OPT_PATHS ++ ?SSL_FILE_OPT_PATHS_A,
|
||||
ensure_ssl_files(Dir, SSL, KeyPaths, Opts);
|
||||
{error, _} = Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
ensure_ssl_files(_Dir, SSL, [], _Opts) ->
|
||||
{ok, SSL};
|
||||
ensure_ssl_files(Dir, SSL, [Key | Keys], Opts) ->
|
||||
case ensure_ssl_file(Dir, Key, SSL, maps:get(Key, SSL, undefined), Opts) of
|
||||
ensure_ssl_files(Dir, SSL, [KeyPath | KeyPaths], Opts) ->
|
||||
case ensure_ssl_file(Dir, KeyPath, SSL, emqx_map_lib:deep_get(KeyPath, SSL, undefined), Opts) of
|
||||
{ok, NewSSL} ->
|
||||
ensure_ssl_files(Dir, NewSSL, Keys, Opts);
|
||||
ensure_ssl_files(Dir, NewSSL, KeyPaths, Opts);
|
||||
{error, Reason} ->
|
||||
{error, Reason#{which_options => [Key]}}
|
||||
{error, Reason#{which_options => [KeyPath]}}
|
||||
end.
|
||||
|
||||
%% @doc Compare old and new config, delete the ones in old but not in new.
|
||||
|
@ -321,12 +331,12 @@ delete_ssl_files(Dir, NewOpts0, OldOpts0) ->
|
|||
{ok, NewOpts} = ensure_ssl_files(Dir, NewOpts0, #{dry_run => DryRun}),
|
||||
{ok, OldOpts} = ensure_ssl_files(Dir, OldOpts0, #{dry_run => DryRun}),
|
||||
Get = fun
|
||||
(_K, undefined) -> undefined;
|
||||
(K, Opts) -> maps:get(K, Opts, undefined)
|
||||
(_KP, undefined) -> undefined;
|
||||
(KP, Opts) -> emqx_map_lib:deep_get(KP, Opts, undefined)
|
||||
end,
|
||||
lists:foreach(
|
||||
fun(Key) -> delete_old_file(Get(Key, NewOpts), Get(Key, OldOpts)) end,
|
||||
?SSL_FILE_OPT_NAMES ++ ?SSL_FILE_OPT_NAMES_A
|
||||
fun(KeyPath) -> delete_old_file(Get(KeyPath, NewOpts), Get(KeyPath, OldOpts)) end,
|
||||
?SSL_FILE_OPT_PATHS ++ ?SSL_FILE_OPT_PATHS_A
|
||||
),
|
||||
%% try to delete the dir if it is empty
|
||||
_ = file:del_dir(pem_dir(Dir)),
|
||||
|
@ -346,29 +356,33 @@ delete_old_file(_New, Old) ->
|
|||
?SLOG(error, #{msg => "failed_to_delete_ssl_file", file_path => Old, reason => Reason})
|
||||
end.
|
||||
|
||||
ensure_ssl_file(_Dir, _Key, SSL, undefined, _Opts) ->
|
||||
ensure_ssl_file(_Dir, _KeyPath, SSL, undefined, _Opts) ->
|
||||
{ok, SSL};
|
||||
ensure_ssl_file(Dir, Key, SSL, MaybePem, Opts) ->
|
||||
ensure_ssl_file(Dir, KeyPath, SSL, MaybePem, Opts) ->
|
||||
case is_valid_string(MaybePem) of
|
||||
true ->
|
||||
DryRun = maps:get(dry_run, Opts, false),
|
||||
do_ensure_ssl_file(Dir, Key, SSL, MaybePem, DryRun);
|
||||
do_ensure_ssl_file(Dir, KeyPath, SSL, MaybePem, DryRun);
|
||||
false ->
|
||||
{error, #{reason => invalid_file_path_or_pem_string}}
|
||||
end.
|
||||
|
||||
do_ensure_ssl_file(Dir, Key, SSL, MaybePem, DryRun) ->
|
||||
do_ensure_ssl_file(Dir, KeyPath, SSL, MaybePem, DryRun) ->
|
||||
case is_pem(MaybePem) of
|
||||
true ->
|
||||
case save_pem_file(Dir, Key, MaybePem, DryRun) of
|
||||
{ok, Path} -> {ok, SSL#{Key => Path}};
|
||||
{error, Reason} -> {error, Reason}
|
||||
case save_pem_file(Dir, KeyPath, MaybePem, DryRun) of
|
||||
{ok, Path} ->
|
||||
NewSSL = emqx_map_lib:deep_put(KeyPath, SSL, Path),
|
||||
{ok, NewSSL};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
false ->
|
||||
case is_valid_pem_file(MaybePem) of
|
||||
true ->
|
||||
{ok, SSL};
|
||||
{error, enoent} when DryRun -> {ok, SSL};
|
||||
{error, enoent} when DryRun ->
|
||||
{ok, SSL};
|
||||
{error, Reason} ->
|
||||
{error, #{
|
||||
pem_check => invalid_pem,
|
||||
|
@ -398,8 +412,8 @@ is_pem(MaybePem) ->
|
|||
%% To make it simple, the file is always overwritten.
|
||||
%% Also a potentially half-written PEM file (e.g. due to power outage)
|
||||
%% can be corrected with an overwrite.
|
||||
save_pem_file(Dir, Key, Pem, DryRun) ->
|
||||
Path = pem_file_name(Dir, Key, Pem),
|
||||
save_pem_file(Dir, KeyPath, Pem, DryRun) ->
|
||||
Path = pem_file_name(Dir, KeyPath, Pem),
|
||||
case filelib:ensure_dir(Path) of
|
||||
ok when DryRun ->
|
||||
{ok, Path};
|
||||
|
@ -422,11 +436,14 @@ is_generated_file(Filename) ->
|
|||
_ -> false
|
||||
end.
|
||||
|
||||
pem_file_name(Dir, Key, Pem) ->
|
||||
pem_file_name(Dir, KeyPath, Pem) ->
|
||||
<<CK:8/binary, _/binary>> = crypto:hash(md5, Pem),
|
||||
Suffix = hex_str(CK),
|
||||
FileName = binary:replace(ensure_bin(Key), <<"file">>, <<"-", Suffix/binary>>),
|
||||
filename:join([pem_dir(Dir), FileName]).
|
||||
Segments = lists:map(fun ensure_bin/1, KeyPath),
|
||||
Filename0 = iolist_to_binary(lists:join(<<"_">>, Segments)),
|
||||
Filename1 = binary:replace(Filename0, <<"file">>, <<>>),
|
||||
Filename = <<Filename1/binary, "-", Suffix/binary>>,
|
||||
filename:join([pem_dir(Dir), Filename]).
|
||||
|
||||
pem_dir(Dir) ->
|
||||
filename:join([emqx:mutable_certs_dir(), Dir]).
|
||||
|
@ -465,24 +482,26 @@ is_valid_pem_file(Path) ->
|
|||
%% so they are forced to upload a cert file, or use an existing file path.
|
||||
-spec drop_invalid_certs(map()) -> map().
|
||||
drop_invalid_certs(#{enable := False} = SSL) when ?IS_FALSE(False) ->
|
||||
maps:without(?SSL_FILE_OPT_NAMES_A, SSL);
|
||||
lists:foldl(fun emqx_map_lib:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS_A);
|
||||
drop_invalid_certs(#{<<"enable">> := False} = SSL) when ?IS_FALSE(False) ->
|
||||
maps:without(?SSL_FILE_OPT_NAMES, SSL);
|
||||
lists:foldl(fun emqx_map_lib:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS);
|
||||
drop_invalid_certs(#{enable := True} = SSL) when ?IS_TRUE(True) ->
|
||||
do_drop_invalid_certs(?SSL_FILE_OPT_NAMES_A, SSL);
|
||||
do_drop_invalid_certs(?SSL_FILE_OPT_PATHS_A, SSL);
|
||||
drop_invalid_certs(#{<<"enable">> := True} = SSL) when ?IS_TRUE(True) ->
|
||||
do_drop_invalid_certs(?SSL_FILE_OPT_NAMES, SSL).
|
||||
do_drop_invalid_certs(?SSL_FILE_OPT_PATHS, SSL).
|
||||
|
||||
do_drop_invalid_certs([], SSL) ->
|
||||
SSL;
|
||||
do_drop_invalid_certs([Key | Keys], SSL) ->
|
||||
case maps:get(Key, SSL, undefined) of
|
||||
do_drop_invalid_certs([KeyPath | KeyPaths], SSL) ->
|
||||
case emqx_map_lib:deep_get(KeyPath, SSL, undefined) of
|
||||
undefined ->
|
||||
do_drop_invalid_certs(Keys, SSL);
|
||||
do_drop_invalid_certs(KeyPaths, SSL);
|
||||
PemOrPath ->
|
||||
case is_pem(PemOrPath) orelse is_valid_pem_file(PemOrPath) of
|
||||
true -> do_drop_invalid_certs(Keys, SSL);
|
||||
{error, _} -> do_drop_invalid_certs(Keys, maps:without([Key], SSL))
|
||||
true ->
|
||||
do_drop_invalid_certs(KeyPaths, SSL);
|
||||
{error, _} ->
|
||||
do_drop_invalid_certs(KeyPaths, emqx_map_lib:deep_remove(KeyPath, SSL))
|
||||
end
|
||||
end.
|
||||
|
||||
|
@ -565,9 +584,10 @@ ensure_bin(A) when is_atom(A) -> atom_to_binary(A, utf8).
|
|||
|
||||
ensure_ssl_file_key(_SSL, []) ->
|
||||
ok;
|
||||
ensure_ssl_file_key(SSL, RequiredKeys) ->
|
||||
Filter = fun(Key) -> not maps:is_key(Key, SSL) end,
|
||||
case lists:filter(Filter, RequiredKeys) of
|
||||
ensure_ssl_file_key(SSL, RequiredKeyPaths) ->
|
||||
NotFoundRef = make_ref(),
|
||||
Filter = fun(KeyPath) -> NotFoundRef =:= emqx_map_lib:deep_get(KeyPath, SSL, NotFoundRef) end,
|
||||
case lists:filter(Filter, RequiredKeyPaths) of
|
||||
[] -> ok;
|
||||
Miss -> {error, #{reason => ssl_file_option_not_found, which_options => Miss}}
|
||||
end.
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
all() -> emqx_common_test_helpers:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
emqx_common_test_helpers:boot_modules(all),
|
||||
emqx_common_test_helpers:start_apps([]),
|
||||
Config.
|
||||
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
|
||||
-module(emqx_common_test_helpers).
|
||||
|
||||
-include("emqx_authentication.hrl").
|
||||
|
||||
-type special_config_handler() :: fun().
|
||||
|
||||
-type apps() :: list(atom()).
|
||||
|
@ -27,6 +29,7 @@
|
|||
boot_modules/1,
|
||||
start_apps/1,
|
||||
start_apps/2,
|
||||
start_apps/3,
|
||||
stop_apps/1,
|
||||
reload/2,
|
||||
app_path/2,
|
||||
|
@ -34,7 +37,9 @@
|
|||
deps_path/2,
|
||||
flush/0,
|
||||
flush/1,
|
||||
render_and_load_app_config/1
|
||||
load/1,
|
||||
render_and_load_app_config/1,
|
||||
render_and_load_app_config/2
|
||||
]).
|
||||
|
||||
-export([
|
||||
|
@ -62,14 +67,16 @@
|
|||
emqx_cluster/2,
|
||||
start_epmd/0,
|
||||
start_slave/2,
|
||||
stop_slave/1
|
||||
stop_slave/1,
|
||||
listener_port/2
|
||||
]).
|
||||
|
||||
-export([clear_screen/0]).
|
||||
-export([with_mock/4]).
|
||||
-export([
|
||||
on_exit/1,
|
||||
call_janitor/0
|
||||
call_janitor/0,
|
||||
call_janitor/1
|
||||
]).
|
||||
|
||||
%% Toxiproxy API
|
||||
|
@ -183,17 +190,21 @@ start_apps(Apps) ->
|
|||
application:set_env(system_monitor, db_hostname, ""),
|
||||
ok
|
||||
end,
|
||||
start_apps(Apps, DefaultHandler).
|
||||
start_apps(Apps, DefaultHandler, #{}).
|
||||
|
||||
-spec start_apps(Apps :: apps(), Handler :: special_config_handler()) -> ok.
|
||||
start_apps(Apps, SpecAppConfig) when is_function(SpecAppConfig) ->
|
||||
start_apps(Apps, SpecAppConfig, #{}).
|
||||
|
||||
-spec start_apps(Apps :: apps(), Handler :: special_config_handler(), map()) -> ok.
|
||||
start_apps(Apps, SpecAppConfig, Opts) when is_function(SpecAppConfig) ->
|
||||
%% Load all application code to beam vm first
|
||||
%% Because, minirest, ekka etc.. application will scan these modules
|
||||
lists:foreach(fun load/1, [emqx | Apps]),
|
||||
ok = start_ekka(),
|
||||
mnesia:clear_table(emqx_admin),
|
||||
ok = emqx_ratelimiter_SUITE:load_conf(),
|
||||
lists:foreach(fun(App) -> start_app(App, SpecAppConfig) end, [emqx | Apps]).
|
||||
lists:foreach(fun(App) -> start_app(App, SpecAppConfig, Opts) end, [emqx | Apps]).
|
||||
|
||||
load(App) ->
|
||||
case application:load(App) of
|
||||
|
@ -203,27 +214,31 @@ load(App) ->
|
|||
end.
|
||||
|
||||
render_and_load_app_config(App) ->
|
||||
render_and_load_app_config(App, #{}).
|
||||
|
||||
render_and_load_app_config(App, Opts) ->
|
||||
load(App),
|
||||
Schema = app_schema(App),
|
||||
Conf = app_path(App, filename:join(["etc", app_conf_file(App)])),
|
||||
ConfFilePath = maps:get(conf_file_path, Opts, filename:join(["etc", app_conf_file(App)])),
|
||||
Conf = app_path(App, ConfFilePath),
|
||||
try
|
||||
do_render_app_config(App, Schema, Conf)
|
||||
do_render_app_config(App, Schema, Conf, Opts)
|
||||
catch
|
||||
throw:E:St ->
|
||||
%% turn throw into error
|
||||
error({Conf, E, St})
|
||||
end.
|
||||
|
||||
do_render_app_config(App, Schema, ConfigFile) ->
|
||||
Vars = mustache_vars(App),
|
||||
do_render_app_config(App, Schema, ConfigFile, Opts) ->
|
||||
Vars = mustache_vars(App, Opts),
|
||||
RenderedConfigFile = render_config_file(ConfigFile, Vars),
|
||||
read_schema_configs(Schema, RenderedConfigFile),
|
||||
force_set_config_file_paths(App, [RenderedConfigFile]),
|
||||
copy_certs(App, RenderedConfigFile),
|
||||
ok.
|
||||
|
||||
start_app(App, SpecAppConfig) ->
|
||||
render_and_load_app_config(App),
|
||||
start_app(App, SpecAppConfig, Opts) ->
|
||||
render_and_load_app_config(App, Opts),
|
||||
SpecAppConfig(App),
|
||||
case application:ensure_all_started(App) of
|
||||
{ok, _} ->
|
||||
|
@ -246,12 +261,13 @@ app_schema(App) ->
|
|||
no_schema
|
||||
end.
|
||||
|
||||
mustache_vars(App) ->
|
||||
mustache_vars(App, Opts) ->
|
||||
ExtraMustacheVars = maps:get(extra_mustache_vars, Opts, []),
|
||||
[
|
||||
{platform_data_dir, app_path(App, "data")},
|
||||
{platform_etc_dir, app_path(App, "etc")},
|
||||
{platform_log_dir, app_path(App, "log")}
|
||||
].
|
||||
] ++ ExtraMustacheVars.
|
||||
|
||||
render_config_file(ConfigFile, Vars0) ->
|
||||
Temp =
|
||||
|
@ -283,6 +299,14 @@ generate_config(SchemaModule, ConfigFile) when is_atom(SchemaModule) ->
|
|||
-spec stop_apps(list()) -> ok.
|
||||
stop_apps(Apps) ->
|
||||
[application:stop(App) || App <- Apps ++ [emqx, ekka, mria, mnesia]],
|
||||
%% to avoid inter-suite flakiness
|
||||
application:unset_env(emqx, init_config_load_done),
|
||||
persistent_term:erase(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY),
|
||||
emqx_config:erase_schema_mod_and_names(),
|
||||
ok = emqx_config:delete_override_conf_files(),
|
||||
application:unset_env(emqx, local_override_conf_file),
|
||||
application:unset_env(emqx, cluster_override_conf_file),
|
||||
application:unset_env(gen_rpc, port_discovery),
|
||||
ok.
|
||||
|
||||
proj_root() ->
|
||||
|
@ -327,7 +351,7 @@ safe_relative_path_2(Path) ->
|
|||
-spec reload(App :: atom(), SpecAppConfig :: special_config_handler()) -> ok.
|
||||
reload(App, SpecAppConfigHandler) ->
|
||||
application:stop(App),
|
||||
start_app(App, SpecAppConfigHandler),
|
||||
start_app(App, SpecAppConfigHandler, #{}),
|
||||
application:start(App).
|
||||
|
||||
ensure_mnesia_stopped() ->
|
||||
|
@ -469,7 +493,7 @@ is_all_tcp_servers_available(Servers) ->
|
|||
{_, []} ->
|
||||
true;
|
||||
{_, Unavail} ->
|
||||
ct:print("Unavailable servers: ~p", [Unavail]),
|
||||
ct:pal("Unavailable servers: ~p", [Unavail]),
|
||||
false
|
||||
end.
|
||||
|
||||
|
@ -566,6 +590,12 @@ ensure_quic_listener(Name, UdpPort, ExtraSettings) ->
|
|||
%% Whether to execute `emqx_config:init_load(SchemaMod)`
|
||||
%% default: true
|
||||
load_schema => boolean(),
|
||||
%% If we want to exercise the scenario where a node joins an
|
||||
%% existing cluster where there has already been some
|
||||
%% configuration changes (via cluster rpc), then we need to enable
|
||||
%% autocluster so that the joining node will restart the
|
||||
%% `emqx_conf' app and correctly catch up the config.
|
||||
start_autocluster => boolean(),
|
||||
%% Eval by emqx_config:put/2
|
||||
conf => [{KeyPath :: list(), Val :: term()}],
|
||||
%% Fast option to config listener port
|
||||
|
@ -616,25 +646,53 @@ emqx_cluster(Specs0, CommonOpts) ->
|
|||
%% Lower level starting API
|
||||
|
||||
-spec start_slave(shortname(), node_opts()) -> nodename().
|
||||
start_slave(Name, Opts) ->
|
||||
{ok, Node} = ct_slave:start(
|
||||
list_to_atom(atom_to_list(Name) ++ "@" ++ host()),
|
||||
[
|
||||
{kill_if_fail, true},
|
||||
{monitor_master, true},
|
||||
{init_timeout, 10000},
|
||||
{startup_timeout, 10000},
|
||||
{erl_flags, erl_flags()}
|
||||
]
|
||||
),
|
||||
|
||||
start_slave(Name, Opts) when is_list(Opts) ->
|
||||
start_slave(Name, maps:from_list(Opts));
|
||||
start_slave(Name, Opts) when is_map(Opts) ->
|
||||
SlaveMod = maps:get(peer_mod, Opts, ct_slave),
|
||||
Node = node_name(Name),
|
||||
DoStart =
|
||||
fun() ->
|
||||
case SlaveMod of
|
||||
ct_slave ->
|
||||
ct_slave:start(
|
||||
Node,
|
||||
[
|
||||
{kill_if_fail, true},
|
||||
{monitor_master, true},
|
||||
{init_timeout, 10000},
|
||||
{startup_timeout, 10000},
|
||||
{erl_flags, erl_flags()}
|
||||
]
|
||||
);
|
||||
slave ->
|
||||
slave:start_link(host(), Name, ebin_path())
|
||||
end
|
||||
end,
|
||||
case DoStart() of
|
||||
{ok, _} ->
|
||||
ok;
|
||||
{error, started_not_connected, _} ->
|
||||
ok;
|
||||
Other ->
|
||||
throw(Other)
|
||||
end,
|
||||
pong = net_adm:ping(Node),
|
||||
put_peer_mod(Node, SlaveMod),
|
||||
setup_node(Node, Opts),
|
||||
ok = snabbkaffe:forward_trace(Node),
|
||||
Node.
|
||||
|
||||
%% Node stopping
|
||||
stop_slave(Node) ->
|
||||
ct_slave:stop(Node).
|
||||
stop_slave(Node0) ->
|
||||
Node = node_name(Node0),
|
||||
SlaveMod = get_peer_mod(Node),
|
||||
erase_peer_mod(Node),
|
||||
case SlaveMod:stop(Node) of
|
||||
ok -> ok;
|
||||
{ok, _} -> ok;
|
||||
{error, not_started, _} -> ok
|
||||
end.
|
||||
|
||||
%% EPMD starting
|
||||
start_epmd() ->
|
||||
|
@ -672,9 +730,27 @@ setup_node(Node, Opts) when is_map(Opts) ->
|
|||
{Type, listener_port(BasePort, Type)}
|
||||
|| Type <- [tcp, ssl, ws, wss]
|
||||
]),
|
||||
%% we need a fresh data dir for each peer node to avoid unintended
|
||||
%% successes due to sharing of data in the cluster.
|
||||
PrivDataDir = maps:get(priv_data_dir, Opts, "/tmp"),
|
||||
%% If we want to exercise the scenario where a node joins an
|
||||
%% existing cluster where there has already been some
|
||||
%% configuration changes (via cluster rpc), then we need to enable
|
||||
%% autocluster so that the joining node will restart the
|
||||
%% `emqx_conf' app and correctly catch up the config.
|
||||
StartAutocluster = maps:get(start_autocluster, Opts, false),
|
||||
|
||||
%% Load env before doing anything to avoid overriding
|
||||
[ok = rpc:call(Node, application, load, [App]) || App <- LoadApps],
|
||||
lists:foreach(fun(App) -> rpc:call(Node, ?MODULE, load, [App]) end, LoadApps),
|
||||
%% Ensure a clean mnesia directory for each run to avoid
|
||||
%% inter-test flakiness.
|
||||
MnesiaDataDir = filename:join([
|
||||
PrivDataDir,
|
||||
node(),
|
||||
integer_to_list(erlang:unique_integer()),
|
||||
"mnesia"
|
||||
]),
|
||||
erpc:call(Node, application, set_env, [mnesia, dir, MnesiaDataDir]),
|
||||
|
||||
%% Needs to be set explicitly because ekka:start() (which calls `gen`) is called without Handler
|
||||
%% in emqx_common_test_helpers:start_apps(...)
|
||||
|
@ -700,7 +776,19 @@ setup_node(Node, Opts) when is_map(Opts) ->
|
|||
%% Otherwise, configuration gets loaded and all preset env in EnvHandler is lost
|
||||
LoadSchema andalso
|
||||
begin
|
||||
%% to avoid sharing data between executions and/or
|
||||
%% nodes. these variables might notbe in the
|
||||
%% config file (e.g.: emqx_ee_conf_schema).
|
||||
NodeDataDir = filename:join([
|
||||
PrivDataDir,
|
||||
node(),
|
||||
integer_to_list(erlang:unique_integer())
|
||||
]),
|
||||
os:putenv("EMQX_NODE__DATA_DIR", NodeDataDir),
|
||||
os:putenv("EMQX_NODE__COOKIE", atom_to_list(erlang:get_cookie())),
|
||||
emqx_config:init_load(SchemaMod),
|
||||
os:unsetenv("EMQX_NODE__DATA_DIR"),
|
||||
os:unsetenv("EMQX_NODE__COOKIE"),
|
||||
application:set_env(emqx, init_config_load_done, true)
|
||||
end,
|
||||
|
||||
|
@ -727,6 +815,8 @@ setup_node(Node, Opts) when is_map(Opts) ->
|
|||
undefined ->
|
||||
ok;
|
||||
_ ->
|
||||
StartAutocluster andalso
|
||||
(ok = rpc:call(Node, emqx_machine_boot, start_autocluster, [])),
|
||||
case rpc:call(Node, ekka, join, [JoinTo]) of
|
||||
ok ->
|
||||
ok;
|
||||
|
@ -741,8 +831,27 @@ setup_node(Node, Opts) when is_map(Opts) ->
|
|||
|
||||
%% Helpers
|
||||
|
||||
put_peer_mod(Node, SlaveMod) ->
|
||||
put({?MODULE, Node}, SlaveMod),
|
||||
ok.
|
||||
|
||||
get_peer_mod(Node) ->
|
||||
case get({?MODULE, Node}) of
|
||||
undefined -> ct_slave;
|
||||
SlaveMod -> SlaveMod
|
||||
end.
|
||||
|
||||
erase_peer_mod(Node) ->
|
||||
erase({?MODULE, Node}).
|
||||
|
||||
node_name(Name) ->
|
||||
list_to_atom(lists:concat([Name, "@", host()])).
|
||||
case string:tokens(atom_to_list(Name), "@") of
|
||||
[_Name, _Host] ->
|
||||
%% the name already has a @
|
||||
Name;
|
||||
_ ->
|
||||
list_to_atom(atom_to_list(Name) ++ "@" ++ host())
|
||||
end.
|
||||
|
||||
gen_node_name(Num) ->
|
||||
list_to_atom("autocluster_node" ++ integer_to_list(Num)).
|
||||
|
@ -783,6 +892,9 @@ base_port(Number) ->
|
|||
gen_rpc_port(BasePort) ->
|
||||
BasePort - 1.
|
||||
|
||||
listener_port(Opts, Type) when is_map(Opts) ->
|
||||
BasePort = maps:get(base_port, Opts),
|
||||
listener_port(BasePort, Type);
|
||||
listener_port(BasePort, tcp) ->
|
||||
BasePort;
|
||||
listener_port(BasePort, ssl) ->
|
||||
|
@ -967,8 +1079,11 @@ latency_up_proxy(off, Name, ProxyHost, ProxyPort) ->
|
|||
%% stop the janitor gracefully to ensure proper cleanup order and less
|
||||
%% noise in the logs.
|
||||
call_janitor() ->
|
||||
call_janitor(15_000).
|
||||
|
||||
call_janitor(Timeout) ->
|
||||
Janitor = get_or_spawn_janitor(),
|
||||
exit(Janitor, normal),
|
||||
ok = emqx_test_janitor:stop(Janitor, Timeout),
|
||||
ok.
|
||||
|
||||
get_or_spawn_janitor() ->
|
||||
|
|
|
@ -0,0 +1,944 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_ocsp_cache_SUITE).
|
||||
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
|
||||
-include_lib("ssl/src/ssl_handshake.hrl").
|
||||
|
||||
-define(CACHE_TAB, emqx_ocsp_cache).
|
||||
|
||||
all() ->
|
||||
[{group, openssl}] ++ tests().
|
||||
|
||||
tests() ->
|
||||
emqx_common_test_helpers:all(?MODULE) -- openssl_tests().
|
||||
|
||||
openssl_tests() ->
|
||||
[t_openssl_client].
|
||||
|
||||
groups() ->
|
||||
OpensslTests = openssl_tests(),
|
||||
[
|
||||
{openssl, [
|
||||
{group, tls12},
|
||||
{group, tls13}
|
||||
]},
|
||||
{tls12, [
|
||||
{group, with_status_request},
|
||||
{group, without_status_request}
|
||||
]},
|
||||
{tls13, [
|
||||
{group, with_status_request},
|
||||
{group, without_status_request}
|
||||
]},
|
||||
{with_status_request, [], OpensslTests},
|
||||
{without_status_request, [], OpensslTests}
|
||||
].
|
||||
|
||||
init_per_suite(Config) ->
|
||||
application:load(emqx),
|
||||
emqx_config:save_schema_mod_and_names(emqx_schema),
|
||||
emqx_common_test_helpers:boot_modules(all),
|
||||
Config.
|
||||
|
||||
end_per_suite(_Config) ->
|
||||
ok.
|
||||
|
||||
init_per_group(tls12, Config) ->
|
||||
[{tls_vsn, "-tls1_2"} | Config];
|
||||
init_per_group(tls13, Config) ->
|
||||
[{tls_vsn, "-tls1_3"} | Config];
|
||||
init_per_group(with_status_request, Config) ->
|
||||
[{status_request, true} | Config];
|
||||
init_per_group(without_status_request, Config) ->
|
||||
[{status_request, false} | Config];
|
||||
init_per_group(_Group, Config) ->
|
||||
Config.
|
||||
|
||||
end_per_group(_Group, _Config) ->
|
||||
ok.
|
||||
|
||||
init_per_testcase(t_openssl_client, Config) ->
|
||||
ct:timetrap({seconds, 30}),
|
||||
DataDir = ?config(data_dir, Config),
|
||||
Handler = fun(_) -> ok end,
|
||||
{OCSPResponderPort, OCSPOSPid} = setup_openssl_ocsp(Config),
|
||||
ConfFilePath = filename:join([DataDir, "openssl_listeners.conf"]),
|
||||
emqx_common_test_helpers:start_apps(
|
||||
[],
|
||||
Handler,
|
||||
#{
|
||||
extra_mustache_vars => [{test_data_dir, DataDir}],
|
||||
conf_file_path => ConfFilePath
|
||||
}
|
||||
),
|
||||
ct:sleep(1_000),
|
||||
[
|
||||
{ocsp_responder_port, OCSPResponderPort},
|
||||
{ocsp_responder_os_pid, OCSPOSPid}
|
||||
| Config
|
||||
];
|
||||
init_per_testcase(TestCase, Config) when
|
||||
TestCase =:= t_update_listener;
|
||||
TestCase =:= t_validations
|
||||
->
|
||||
%% when running emqx standalone tests, we can't use those
|
||||
%% features.
|
||||
case does_module_exist(emqx_mgmt_api_test_util) of
|
||||
true ->
|
||||
ct:timetrap({seconds, 30}),
|
||||
%% start the listener with the default (non-ocsp) config
|
||||
TestPid = self(),
|
||||
ok = meck:new(emqx_ocsp_cache, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(
|
||||
emqx_ocsp_cache,
|
||||
http_get,
|
||||
fun(URL, _HTTPTimeout) ->
|
||||
ct:pal("ocsp http request ~p", [URL]),
|
||||
TestPid ! {http_get, URL},
|
||||
{ok, {{"HTTP/1.0", 200, 'OK'}, [], <<"ocsp response">>}}
|
||||
end
|
||||
),
|
||||
emqx_mgmt_api_test_util:init_suite([emqx_conf]),
|
||||
snabbkaffe:start_trace(),
|
||||
Config;
|
||||
false ->
|
||||
[{skip_does_not_apply, true} | Config]
|
||||
end;
|
||||
init_per_testcase(t_ocsp_responder_error_responses, Config) ->
|
||||
ct:timetrap({seconds, 30}),
|
||||
TestPid = self(),
|
||||
ok = meck:new(emqx_ocsp_cache, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(
|
||||
emqx_ocsp_cache,
|
||||
http_get,
|
||||
fun(URL, _HTTPTimeout) ->
|
||||
ct:pal("ocsp http request ~p", [URL]),
|
||||
TestPid ! {http_get, URL},
|
||||
persistent_term:get({?MODULE, http_response})
|
||||
end
|
||||
),
|
||||
DataDir = ?config(data_dir, Config),
|
||||
Type = ssl,
|
||||
Name = test_ocsp,
|
||||
ListenerOpts = #{
|
||||
ssl_options =>
|
||||
#{
|
||||
certfile => filename:join(DataDir, "server.pem"),
|
||||
ocsp => #{
|
||||
enable_ocsp_stapling => true,
|
||||
responder_url => <<"http://localhost:9877/">>,
|
||||
issuer_pem => filename:join(DataDir, "ocsp-issuer.pem"),
|
||||
refresh_http_timeout => 15_000,
|
||||
refresh_interval => 1_000
|
||||
}
|
||||
}
|
||||
},
|
||||
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
|
||||
ConfBin = emqx_map_lib:binary_key_map(Conf),
|
||||
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
|
||||
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
|
||||
snabbkaffe:start_trace(),
|
||||
_Heir = spawn_dummy_heir(),
|
||||
{ok, CachePid} = emqx_ocsp_cache:start_link(),
|
||||
[
|
||||
{cache_pid, CachePid}
|
||||
| Config
|
||||
];
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
ct:timetrap({seconds, 10}),
|
||||
TestPid = self(),
|
||||
ok = meck:new(emqx_ocsp_cache, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(
|
||||
emqx_ocsp_cache,
|
||||
http_get,
|
||||
fun(URL, _HTTPTimeout) ->
|
||||
TestPid ! {http_get, URL},
|
||||
{ok, {{"HTTP/1.0", 200, 'OK'}, [], <<"ocsp response">>}}
|
||||
end
|
||||
),
|
||||
_Heir = spawn_dummy_heir(),
|
||||
{ok, CachePid} = emqx_ocsp_cache:start_link(),
|
||||
DataDir = ?config(data_dir, Config),
|
||||
Type = ssl,
|
||||
Name = test_ocsp,
|
||||
ListenerOpts = #{
|
||||
ssl_options =>
|
||||
#{
|
||||
certfile => filename:join(DataDir, "server.pem"),
|
||||
ocsp => #{
|
||||
enable_ocsp_stapling => true,
|
||||
responder_url => <<"http://localhost:9877/">>,
|
||||
issuer_pem => filename:join(DataDir, "ocsp-issuer.pem"),
|
||||
refresh_http_timeout => 15_000,
|
||||
refresh_interval => 1_000
|
||||
}
|
||||
}
|
||||
},
|
||||
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
|
||||
ConfBin = emqx_map_lib:binary_key_map(Conf),
|
||||
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
|
||||
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
|
||||
snabbkaffe:start_trace(),
|
||||
[
|
||||
{cache_pid, CachePid}
|
||||
| Config
|
||||
].
|
||||
|
||||
end_per_testcase(t_openssl_client, Config) ->
|
||||
OCSPResponderOSPid = ?config(ocsp_responder_os_pid, Config),
|
||||
catch kill_pid(OCSPResponderOSPid),
|
||||
emqx_common_test_helpers:stop_apps([]),
|
||||
ok;
|
||||
end_per_testcase(TestCase, Config) when
|
||||
TestCase =:= t_update_listener;
|
||||
TestCase =:= t_validations
|
||||
->
|
||||
Skip = proplists:get_bool(skip_does_not_apply, Config),
|
||||
case Skip of
|
||||
true ->
|
||||
ok;
|
||||
false ->
|
||||
emqx_mgmt_api_test_util:end_suite([emqx_conf]),
|
||||
meck:unload([emqx_ocsp_cache]),
|
||||
ok
|
||||
end;
|
||||
end_per_testcase(t_ocsp_responder_error_responses, Config) ->
|
||||
CachePid = ?config(cache_pid, Config),
|
||||
catch gen_server:stop(CachePid),
|
||||
meck:unload([emqx_ocsp_cache]),
|
||||
persistent_term:erase({?MODULE, http_response}),
|
||||
ok;
|
||||
end_per_testcase(_TestCase, Config) ->
|
||||
CachePid = ?config(cache_pid, Config),
|
||||
catch gen_server:stop(CachePid),
|
||||
meck:unload([emqx_ocsp_cache]),
|
||||
ok.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Helper functions
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
%% The real cache makes `emqx_kernel_sup' the heir to its ETS table.
|
||||
%% In some tests, we don't start the full supervision tree, so we need
|
||||
%% this dummy process.
|
||||
spawn_dummy_heir() ->
|
||||
spawn_link(fun() ->
|
||||
true = register(emqx_kernel_sup, self()),
|
||||
receive
|
||||
stop -> ok
|
||||
end
|
||||
end).
|
||||
|
||||
does_module_exist(Mod) ->
|
||||
case erlang:module_loaded(Mod) of
|
||||
true ->
|
||||
true;
|
||||
false ->
|
||||
case code:ensure_loaded(Mod) of
|
||||
ok ->
|
||||
true;
|
||||
{module, Mod} ->
|
||||
true;
|
||||
_ ->
|
||||
false
|
||||
end
|
||||
end.
|
||||
|
||||
assert_no_http_get() ->
|
||||
receive
|
||||
{http_get, _URL} ->
|
||||
error(should_be_cached)
|
||||
after 0 ->
|
||||
ok
|
||||
end.
|
||||
|
||||
assert_http_get(N) ->
|
||||
assert_http_get(N, 0).
|
||||
|
||||
assert_http_get(0, _Timeout) ->
|
||||
ok;
|
||||
assert_http_get(N, Timeout) when N > 0 ->
|
||||
receive
|
||||
{http_get, URL} ->
|
||||
?assertMatch(<<"http://localhost:9877/", _Request64/binary>>, URL),
|
||||
ok
|
||||
after Timeout ->
|
||||
error({no_http_get, #{mailbox => process_info(self(), messages)}})
|
||||
end,
|
||||
assert_http_get(N - 1, Timeout).
|
||||
|
||||
openssl_client_command(TLSVsn, RequestStatus, Config) ->
|
||||
DataDir = ?config(data_dir, Config),
|
||||
ClientCert = filename:join([DataDir, "client.pem"]),
|
||||
ClientKey = filename:join([DataDir, "client.key"]),
|
||||
Cacert = filename:join([DataDir, "ca.pem"]),
|
||||
Openssl = os:find_executable("openssl"),
|
||||
StatusOpt =
|
||||
case RequestStatus of
|
||||
true -> ["-status"];
|
||||
false -> []
|
||||
end,
|
||||
[
|
||||
Openssl,
|
||||
"s_client",
|
||||
"-connect",
|
||||
"localhost:8883",
|
||||
%% needed to trigger `sni_fun'
|
||||
"-servername",
|
||||
"localhost",
|
||||
TLSVsn,
|
||||
"-CAfile",
|
||||
Cacert,
|
||||
"-cert",
|
||||
ClientCert,
|
||||
"-key",
|
||||
ClientKey
|
||||
] ++ StatusOpt.
|
||||
|
||||
run_openssl_client(TLSVsn, RequestStatus, Config) ->
|
||||
Command0 = openssl_client_command(TLSVsn, RequestStatus, Config),
|
||||
Command = lists:flatten(lists:join(" ", Command0)),
|
||||
os:cmd(Command).
|
||||
|
||||
%% fixme: for some reason, the port program doesn't return any output
|
||||
%% when running in OTP 25 using `open_port`, but the `os:cmd` version
|
||||
%% works fine.
|
||||
%% the `open_port' version works fine in OTP 24 for some reason.
|
||||
spawn_openssl_client(TLSVsn, RequestStatus, Config) ->
|
||||
[Openssl | Args] = openssl_client_command(TLSVsn, RequestStatus, Config),
|
||||
open_port(
|
||||
{spawn_executable, Openssl},
|
||||
[
|
||||
{args, Args},
|
||||
binary,
|
||||
stderr_to_stdout
|
||||
]
|
||||
).
|
||||
|
||||
spawn_openssl_ocsp_responder(Config) ->
|
||||
DataDir = ?config(data_dir, Config),
|
||||
IssuerCert = filename:join([DataDir, "ocsp-issuer.pem"]),
|
||||
IssuerKey = filename:join([DataDir, "ocsp-issuer.key"]),
|
||||
Cacert = filename:join([DataDir, "ca.pem"]),
|
||||
Index = filename:join([DataDir, "index.txt"]),
|
||||
Openssl = os:find_executable("openssl"),
|
||||
open_port(
|
||||
{spawn_executable, Openssl},
|
||||
[
|
||||
{args, [
|
||||
"ocsp",
|
||||
"-ignore_err",
|
||||
"-port",
|
||||
"9877",
|
||||
"-CA",
|
||||
Cacert,
|
||||
"-rkey",
|
||||
IssuerKey,
|
||||
"-rsigner",
|
||||
IssuerCert,
|
||||
"-index",
|
||||
Index
|
||||
]},
|
||||
binary,
|
||||
stderr_to_stdout
|
||||
]
|
||||
).
|
||||
|
||||
kill_pid(OSPid) ->
|
||||
os:cmd("kill -9 " ++ integer_to_list(OSPid)).
|
||||
|
||||
test_ocsp_connection(TLSVsn, WithRequestStatus = true, Config) ->
|
||||
OCSPOutput = run_openssl_client(TLSVsn, WithRequestStatus, Config),
|
||||
?assertMatch(
|
||||
{match, _},
|
||||
re:run(OCSPOutput, "OCSP Response Status: successful"),
|
||||
#{mailbox => process_info(self(), messages)}
|
||||
),
|
||||
?assertMatch(
|
||||
{match, _},
|
||||
re:run(OCSPOutput, "Cert Status: good"),
|
||||
#{mailbox => process_info(self(), messages)}
|
||||
),
|
||||
ok;
|
||||
test_ocsp_connection(TLSVsn, WithRequestStatus = false, Config) ->
|
||||
OCSPOutput = run_openssl_client(TLSVsn, WithRequestStatus, Config),
|
||||
?assertMatch(
|
||||
nomatch,
|
||||
re:run(OCSPOutput, "Cert Status: good", [{capture, none}]),
|
||||
#{mailbox => process_info(self(), messages)}
|
||||
),
|
||||
ok.
|
||||
|
||||
ensure_port_open(Port) ->
|
||||
do_ensure_port_open(Port, 10).
|
||||
|
||||
do_ensure_port_open(Port, 0) ->
|
||||
error({port_not_open, Port});
|
||||
do_ensure_port_open(Port, N) when N > 0 ->
|
||||
Timeout = 1_000,
|
||||
case gen_tcp:connect("localhost", Port, [], Timeout) of
|
||||
{ok, Sock} ->
|
||||
gen_tcp:close(Sock),
|
||||
ok;
|
||||
{error, _} ->
|
||||
ct:sleep(500),
|
||||
do_ensure_port_open(Port, N - 1)
|
||||
end.
|
||||
|
||||
get_sni_fun(ListenerID) ->
|
||||
#{opts := Opts} = emqx_listeners:find_by_id(ListenerID),
|
||||
SSLOpts = proplists:get_value(ssl_options, Opts),
|
||||
proplists:get_value(sni_fun, SSLOpts).
|
||||
|
||||
openssl_version() ->
|
||||
Res0 = string:trim(os:cmd("openssl version"), trailing),
|
||||
[_, Res] = string:split(Res0, " "),
|
||||
{match, [Version]} = re:run(Res, "^([^ ]+)", [{capture, first, list}]),
|
||||
Version.
|
||||
|
||||
setup_openssl_ocsp(Config) ->
|
||||
OCSPResponderPort = spawn_openssl_ocsp_responder(Config),
|
||||
{os_pid, OCSPOSPid} = erlang:port_info(OCSPResponderPort, os_pid),
|
||||
%%%%%%%% Warning!!!
|
||||
%% Apparently, openssl 3.0.7 introduced a bug in the responder
|
||||
%% that makes it hang forever if one probes the port with
|
||||
%% `gen_tcp:open' / `gen_tcp:close'... Comment this out if
|
||||
%% openssl gets updated in CI or in your local machine.
|
||||
OpenSSLVersion = openssl_version(),
|
||||
ct:pal("openssl version: ~p", [OpenSSLVersion]),
|
||||
case OpenSSLVersion of
|
||||
"3." ++ _ ->
|
||||
%% hope that the responder has started...
|
||||
ok;
|
||||
_ ->
|
||||
ensure_port_open(9877)
|
||||
end,
|
||||
ct:sleep(1_000),
|
||||
{OCSPResponderPort, OCSPOSPid}.
|
||||
|
||||
request(Method, Url, QueryParams, Body) ->
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
Opts = #{return_all => true},
|
||||
case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of
|
||||
{ok, {Reason, Headers, BodyR}} ->
|
||||
{ok, {Reason, Headers, emqx_json:decode(BodyR, [return_maps])}};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
get_listener_via_api(ListenerId) ->
|
||||
Path = emqx_mgmt_api_test_util:api_path(["listeners", ListenerId]),
|
||||
request(get, Path, [], []).
|
||||
|
||||
update_listener_via_api(ListenerId, NewConfig) ->
|
||||
Path = emqx_mgmt_api_test_util:api_path(["listeners", ListenerId]),
|
||||
request(put, Path, [], NewConfig).
|
||||
|
||||
put_http_response(Response) ->
|
||||
persistent_term:put({?MODULE, http_response}, Response).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Test cases
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
t_request_ocsp_response(_Config) ->
|
||||
?check_trace(
|
||||
begin
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
%% not yet cached.
|
||||
?assertEqual([], ets:tab2list(?CACHE_TAB)),
|
||||
?assertEqual(
|
||||
{ok, <<"ocsp response">>},
|
||||
emqx_ocsp_cache:fetch_response(ListenerID)
|
||||
),
|
||||
assert_http_get(1),
|
||||
?assertMatch([{_, <<"ocsp response">>}], ets:tab2list(?CACHE_TAB)),
|
||||
%% already cached; should not perform request again.
|
||||
?assertEqual(
|
||||
{ok, <<"ocsp response">>},
|
||||
emqx_ocsp_cache:fetch_response(ListenerID)
|
||||
),
|
||||
assert_no_http_get(),
|
||||
ok
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assert(
|
||||
?strict_causality(
|
||||
#{?snk_kind := ocsp_cache_miss, listener_id := _ListenerID},
|
||||
#{?snk_kind := ocsp_http_fetch_and_cache, listener_id := _ListenerID},
|
||||
Trace
|
||||
)
|
||||
),
|
||||
?assertMatch(
|
||||
[_],
|
||||
?of_kind(ocsp_cache_miss, Trace)
|
||||
),
|
||||
?assertMatch(
|
||||
[_],
|
||||
?of_kind(ocsp_http_fetch_and_cache, Trace)
|
||||
),
|
||||
?assertMatch(
|
||||
[_],
|
||||
?of_kind(ocsp_cache_hit, Trace)
|
||||
),
|
||||
ok
|
||||
end
|
||||
).
|
||||
|
||||
t_request_ocsp_response_restart_cache(Config) ->
|
||||
process_flag(trap_exit, true),
|
||||
CachePid = ?config(cache_pid, Config),
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
?check_trace(
|
||||
begin
|
||||
[] = ets:tab2list(?CACHE_TAB),
|
||||
{ok, _} = emqx_ocsp_cache:fetch_response(ListenerID),
|
||||
?wait_async_action(
|
||||
begin
|
||||
Ref = monitor(process, CachePid),
|
||||
exit(CachePid, kill),
|
||||
receive
|
||||
{'DOWN', Ref, process, CachePid, killed} ->
|
||||
ok
|
||||
after 1_000 ->
|
||||
error(cache_not_killed)
|
||||
end,
|
||||
{ok, _} = emqx_ocsp_cache:start_link(),
|
||||
ok
|
||||
end,
|
||||
#{?snk_kind := ocsp_cache_init}
|
||||
),
|
||||
{ok, _} = emqx_ocsp_cache:fetch_response(ListenerID),
|
||||
ok
|
||||
end,
|
||||
fun(Trace) ->
|
||||
%% Only one fetch because the cache table was preserved by
|
||||
%% its heir ("emqx_kernel_sup").
|
||||
?assertMatch(
|
||||
[_],
|
||||
?of_kind(ocsp_http_fetch_and_cache, Trace)
|
||||
),
|
||||
assert_http_get(1),
|
||||
ok
|
||||
end
|
||||
).
|
||||
|
||||
t_request_ocsp_response_bad_http_status(_Config) ->
|
||||
TestPid = self(),
|
||||
meck:expect(
|
||||
emqx_ocsp_cache,
|
||||
http_get,
|
||||
fun(URL, _HTTPTimeout) ->
|
||||
TestPid ! {http_get, URL},
|
||||
{ok, {{"HTTP/1.0", 404, 'Not Found'}, [], <<"not found">>}}
|
||||
end
|
||||
),
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
%% not yet cached.
|
||||
?assertEqual([], ets:tab2list(?CACHE_TAB)),
|
||||
?assertEqual(
|
||||
error,
|
||||
emqx_ocsp_cache:fetch_response(ListenerID)
|
||||
),
|
||||
assert_http_get(1),
|
||||
?assertEqual([], ets:tab2list(?CACHE_TAB)),
|
||||
ok.
|
||||
|
||||
t_request_ocsp_response_timeout(_Config) ->
|
||||
TestPid = self(),
|
||||
meck:expect(
|
||||
emqx_ocsp_cache,
|
||||
http_get,
|
||||
fun(URL, _HTTPTimeout) ->
|
||||
TestPid ! {http_get, URL},
|
||||
{error, timeout}
|
||||
end
|
||||
),
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
%% not yet cached.
|
||||
?assertEqual([], ets:tab2list(?CACHE_TAB)),
|
||||
?assertEqual(
|
||||
error,
|
||||
emqx_ocsp_cache:fetch_response(ListenerID)
|
||||
),
|
||||
assert_http_get(1),
|
||||
?assertEqual([], ets:tab2list(?CACHE_TAB)),
|
||||
ok.
|
||||
|
||||
t_register_listener(_Config) ->
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
Conf = emqx_config:get_listener_conf(ssl, test_ocsp, []),
|
||||
%% should fetch and cache immediately
|
||||
{ok, {ok, _}} =
|
||||
?wait_async_action(
|
||||
emqx_ocsp_cache:register_listener(ListenerID, Conf),
|
||||
#{?snk_kind := ocsp_http_fetch_and_cache, listener_id := ListenerID}
|
||||
),
|
||||
assert_http_get(1),
|
||||
?assertMatch([{_, <<"ocsp response">>}], ets:tab2list(?CACHE_TAB)),
|
||||
ok.
|
||||
|
||||
t_register_twice(_Config) ->
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
Conf = emqx_config:get_listener_conf(ssl, test_ocsp, []),
|
||||
{ok, {ok, _}} =
|
||||
?wait_async_action(
|
||||
emqx_ocsp_cache:register_listener(ListenerID, Conf),
|
||||
#{?snk_kind := ocsp_http_fetch_and_cache, listener_id := ListenerID}
|
||||
),
|
||||
assert_http_get(1),
|
||||
?assertMatch([{_, <<"ocsp response">>}], ets:tab2list(?CACHE_TAB)),
|
||||
%% should have no problem in registering the same listener again.
|
||||
%% this prompts an immediate refresh.
|
||||
{ok, {ok, _}} =
|
||||
?wait_async_action(
|
||||
emqx_ocsp_cache:register_listener(ListenerID, Conf),
|
||||
#{?snk_kind := ocsp_http_fetch_and_cache, listener_id := ListenerID}
|
||||
),
|
||||
ok.
|
||||
|
||||
t_refresh_periodically(_Config) ->
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
Conf = emqx_config:get_listener_conf(ssl, test_ocsp, []),
|
||||
%% should refresh periodically
|
||||
{ok, SubRef} =
|
||||
snabbkaffe:subscribe(
|
||||
fun
|
||||
(#{?snk_kind := ocsp_http_fetch_and_cache, listener_id := ListenerID0}) ->
|
||||
ListenerID0 =:= ListenerID;
|
||||
(_) ->
|
||||
false
|
||||
end,
|
||||
_NEvents = 2,
|
||||
_Timeout = 10_000
|
||||
),
|
||||
ok = emqx_ocsp_cache:register_listener(ListenerID, Conf),
|
||||
?assertMatch({ok, [_, _]}, snabbkaffe:receive_events(SubRef)),
|
||||
assert_http_get(2),
|
||||
ok.
|
||||
|
||||
t_sni_fun_success(_Config) ->
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
ServerName = "localhost",
|
||||
?assertEqual(
|
||||
[
|
||||
{certificate_status, #certificate_status{
|
||||
status_type = ?CERTIFICATE_STATUS_TYPE_OCSP,
|
||||
response = <<"ocsp response">>
|
||||
}}
|
||||
],
|
||||
emqx_ocsp_cache:sni_fun(ServerName, ListenerID)
|
||||
),
|
||||
ok.
|
||||
|
||||
t_sni_fun_http_error(_Config) ->
|
||||
meck:expect(
|
||||
emqx_ocsp_cache,
|
||||
http_get,
|
||||
fun(_URL, _HTTPTimeout) ->
|
||||
{error, timeout}
|
||||
end
|
||||
),
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
ServerName = "localhost",
|
||||
?assertEqual(
|
||||
[],
|
||||
emqx_ocsp_cache:sni_fun(ServerName, ListenerID)
|
||||
),
|
||||
ok.
|
||||
|
||||
%% check that we can start with a non-ocsp stapling listener and
|
||||
%% restart it with the new ocsp config.
|
||||
t_update_listener(Config) ->
|
||||
case proplists:get_bool(skip_does_not_apply, Config) of
|
||||
true ->
|
||||
ok;
|
||||
false ->
|
||||
do_t_update_listener(Config)
|
||||
end.
|
||||
|
||||
do_t_update_listener(Config) ->
|
||||
DataDir = ?config(data_dir, Config),
|
||||
Keyfile = filename:join([DataDir, "server.key"]),
|
||||
Certfile = filename:join([DataDir, "server.pem"]),
|
||||
Cacertfile = filename:join([DataDir, "ca.pem"]),
|
||||
IssuerPemPath = filename:join([DataDir, "ocsp-issuer.pem"]),
|
||||
{ok, IssuerPem} = file:read_file(IssuerPemPath),
|
||||
|
||||
%% no ocsp at first
|
||||
ListenerId = "ssl:default",
|
||||
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"ssl_options">> :=
|
||||
#{
|
||||
<<"ocsp">> :=
|
||||
#{<<"enable_ocsp_stapling">> := false}
|
||||
}
|
||||
},
|
||||
ListenerData0
|
||||
),
|
||||
assert_no_http_get(),
|
||||
|
||||
%% configure ocsp
|
||||
OCSPConfig =
|
||||
#{
|
||||
<<"ssl_options">> =>
|
||||
#{
|
||||
<<"keyfile">> => Keyfile,
|
||||
<<"certfile">> => Certfile,
|
||||
<<"cacertfile">> => Cacertfile,
|
||||
<<"ocsp">> =>
|
||||
#{
|
||||
<<"enable_ocsp_stapling">> => true,
|
||||
%% we use the file contents to check that
|
||||
%% the API converts that to an internally
|
||||
%% managed file
|
||||
<<"issuer_pem">> => IssuerPem,
|
||||
<<"responder_url">> => <<"http://localhost:9877">>
|
||||
}
|
||||
}
|
||||
},
|
||||
ListenerData1 = emqx_map_lib:deep_merge(ListenerData0, OCSPConfig),
|
||||
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"ssl_options">> :=
|
||||
#{
|
||||
<<"ocsp">> :=
|
||||
#{
|
||||
<<"enable_ocsp_stapling">> := true,
|
||||
<<"issuer_pem">> := _,
|
||||
<<"responder_url">> := _
|
||||
}
|
||||
}
|
||||
},
|
||||
ListenerData2
|
||||
),
|
||||
%% issuer pem should have been uploaded and saved to a new
|
||||
%% location
|
||||
?assertNotEqual(
|
||||
IssuerPemPath,
|
||||
emqx_map_lib:deep_get(
|
||||
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
|
||||
ListenerData2
|
||||
)
|
||||
),
|
||||
?assertNotEqual(
|
||||
IssuerPem,
|
||||
emqx_map_lib:deep_get(
|
||||
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
|
||||
ListenerData2
|
||||
)
|
||||
),
|
||||
assert_http_get(1, 5_000),
|
||||
ok.
|
||||
|
||||
t_ocsp_responder_error_responses(_Config) ->
|
||||
ListenerId = <<"ssl:test_ocsp">>,
|
||||
Conf = emqx_config:get_listener_conf(ssl, test_ocsp, []),
|
||||
?check_trace(
|
||||
begin
|
||||
%% successful response without headers
|
||||
put_http_response({ok, {200, <<"ocsp_response">>}}),
|
||||
{ok, {ok, _}} =
|
||||
?wait_async_action(
|
||||
emqx_ocsp_cache:register_listener(ListenerId, Conf),
|
||||
#{?snk_kind := ocsp_http_fetch_and_cache, headers := false},
|
||||
1_000
|
||||
),
|
||||
|
||||
%% error response with headers
|
||||
put_http_response({ok, {{"HTTP/1.0", 500, "Internal Server Error"}, [], <<"error">>}}),
|
||||
{ok, {ok, _}} =
|
||||
?wait_async_action(
|
||||
emqx_ocsp_cache:register_listener(ListenerId, Conf),
|
||||
#{?snk_kind := ocsp_http_fetch_bad_code, code := 500, headers := true},
|
||||
1_000
|
||||
),
|
||||
|
||||
%% error response without headers
|
||||
put_http_response({ok, {500, <<"error">>}}),
|
||||
{ok, {ok, _}} =
|
||||
?wait_async_action(
|
||||
emqx_ocsp_cache:register_listener(ListenerId, Conf),
|
||||
#{?snk_kind := ocsp_http_fetch_bad_code, code := 500, headers := false},
|
||||
1_000
|
||||
),
|
||||
|
||||
%% econnrefused
|
||||
put_http_response(
|
||||
{error,
|
||||
{failed_connect, [
|
||||
{to_address, {"localhost", 9877}},
|
||||
{inet, [inet], econnrefused}
|
||||
]}}
|
||||
),
|
||||
{ok, {ok, _}} =
|
||||
?wait_async_action(
|
||||
emqx_ocsp_cache:register_listener(ListenerId, Conf),
|
||||
#{?snk_kind := ocsp_http_fetch_error, error := {failed_connect, _}},
|
||||
1_000
|
||||
),
|
||||
|
||||
%% timeout
|
||||
put_http_response({error, timeout}),
|
||||
{ok, {ok, _}} =
|
||||
?wait_async_action(
|
||||
emqx_ocsp_cache:register_listener(ListenerId, Conf),
|
||||
#{?snk_kind := ocsp_http_fetch_error, error := timeout},
|
||||
1_000
|
||||
),
|
||||
|
||||
ok
|
||||
end,
|
||||
[]
|
||||
),
|
||||
ok.
|
||||
|
||||
t_unknown_requests(_Config) ->
|
||||
emqx_ocsp_cache ! unknown,
|
||||
?assertEqual(ok, gen_server:cast(emqx_ocsp_cache, unknown)),
|
||||
?assertEqual({error, {unknown_call, unknown}}, gen_server:call(emqx_ocsp_cache, unknown)),
|
||||
ok.
|
||||
|
||||
t_validations(Config) ->
|
||||
case proplists:get_bool(skip_does_not_apply, Config) of
|
||||
true ->
|
||||
ok;
|
||||
false ->
|
||||
do_t_validations(Config)
|
||||
end.
|
||||
|
||||
do_t_validations(_Config) ->
|
||||
ListenerId = <<"ssl:default">>,
|
||||
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
|
||||
|
||||
ListenerData1 =
|
||||
emqx_map_lib:deep_merge(
|
||||
ListenerData0,
|
||||
#{
|
||||
<<"ssl_options">> =>
|
||||
#{<<"ocsp">> => #{<<"enable_ocsp_stapling">> => true}}
|
||||
}
|
||||
),
|
||||
{error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1),
|
||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} =
|
||||
emqx_json:decode(ResRaw1, [return_maps]),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"mismatches">> :=
|
||||
#{
|
||||
<<"listeners:ssl_not_required_bind">> :=
|
||||
#{
|
||||
<<"reason">> :=
|
||||
<<"The responder URL is required for OCSP stapling">>
|
||||
}
|
||||
}
|
||||
},
|
||||
emqx_json:decode(MsgRaw1, [return_maps])
|
||||
),
|
||||
|
||||
ListenerData2 =
|
||||
emqx_map_lib:deep_merge(
|
||||
ListenerData0,
|
||||
#{
|
||||
<<"ssl_options">> =>
|
||||
#{
|
||||
<<"ocsp">> => #{
|
||||
<<"enable_ocsp_stapling">> => true,
|
||||
<<"responder_url">> => <<"http://localhost:9877">>
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
{error, {_, _, ResRaw2}} = update_listener_via_api(ListenerId, ListenerData2),
|
||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw2} =
|
||||
emqx_json:decode(ResRaw2, [return_maps]),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"mismatches">> :=
|
||||
#{
|
||||
<<"listeners:ssl_not_required_bind">> :=
|
||||
#{
|
||||
<<"reason">> :=
|
||||
<<"The issuer PEM path is required for OCSP stapling">>
|
||||
}
|
||||
}
|
||||
},
|
||||
emqx_json:decode(MsgRaw2, [return_maps])
|
||||
),
|
||||
|
||||
ListenerData3a =
|
||||
emqx_map_lib:deep_merge(
|
||||
ListenerData0,
|
||||
#{
|
||||
<<"ssl_options">> =>
|
||||
#{
|
||||
<<"ocsp">> => #{
|
||||
<<"enable_ocsp_stapling">> => true,
|
||||
<<"responder_url">> => <<"http://localhost:9877">>,
|
||||
<<"issuer_pem">> => <<"some_file">>
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
ListenerData3 = emqx_map_lib:deep_remove([<<"ssl_options">>, <<"certfile">>], ListenerData3a),
|
||||
{error, {_, _, ResRaw3}} = update_listener_via_api(ListenerId, ListenerData3),
|
||||
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw3} =
|
||||
emqx_json:decode(ResRaw3, [return_maps]),
|
||||
?assertMatch(
|
||||
#{
|
||||
<<"mismatches">> :=
|
||||
#{
|
||||
<<"listeners:ssl_not_required_bind">> :=
|
||||
#{
|
||||
<<"reason">> :=
|
||||
<<"Server certificate must be defined when using OCSP stapling">>
|
||||
}
|
||||
}
|
||||
},
|
||||
emqx_json:decode(MsgRaw3, [return_maps])
|
||||
),
|
||||
|
||||
ok.
|
||||
|
||||
t_unknown_error_fetching_ocsp_response(_Config) ->
|
||||
ListenerID = <<"ssl:test_ocsp">>,
|
||||
TestPid = self(),
|
||||
ok = meck:expect(
|
||||
emqx_ocsp_cache,
|
||||
http_get,
|
||||
fun(_RequestURI, _HTTPTimeout) ->
|
||||
TestPid ! error_raised,
|
||||
meck:exception(error, something_went_wrong)
|
||||
end
|
||||
),
|
||||
?assertEqual(error, emqx_ocsp_cache:fetch_response(ListenerID)),
|
||||
receive
|
||||
error_raised -> ok
|
||||
after 200 -> ct:fail("should have tried to fetch ocsp response")
|
||||
end,
|
||||
ok.
|
||||
|
||||
t_openssl_client(Config) ->
|
||||
TLSVsn = ?config(tls_vsn, Config),
|
||||
WithStatusRequest = ?config(status_request, Config),
|
||||
%% ensure ocsp response is already cached.
|
||||
ListenerID = <<"ssl:default">>,
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_ocsp_cache:fetch_response(ListenerID),
|
||||
#{msgs => process_info(self(), messages)}
|
||||
),
|
||||
timer:sleep(500),
|
||||
test_ocsp_connection(TLSVsn, WithStatusRequest, Config).
|
|
@ -0,0 +1,68 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIF+zCCA+OgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwbzELMAkGA1UEBhMCU0Ux
|
||||
EjAQBgNVBAgMCVN0b2NraG9sbTESMBAGA1UEBwwJU3RvY2tob2xtMRIwEAYDVQQK
|
||||
DAlNeU9yZ05hbWUxETAPBgNVBAsMCE15Um9vdENBMREwDwYDVQQDDAhNeVJvb3RD
|
||||
QTAeFw0yMzAxMTIxMzA4MTZaFw0zMzAxMDkxMzA4MTZaMGsxCzAJBgNVBAYTAlNF
|
||||
MRIwEAYDVQQIDAlTdG9ja2hvbG0xEjAQBgNVBAoMCU15T3JnTmFtZTEZMBcGA1UE
|
||||
CwwQTXlJbnRlcm1lZGlhdGVDQTEZMBcGA1UEAwwQTXlJbnRlcm1lZGlhdGVDQTCC
|
||||
AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALQG7dMeU/y9HDNHzhydR0bm
|
||||
wN9UGplqJOJPwqJRaZZcrn9umgJ9SU2il2ceEVxMDwzBWCRKJO5/H9A9k13SqsXM
|
||||
2c2c9xXfIF1kb820lCm1Uow5hZ/auDjxliNk9kNJDigCRi3QoIs/dVeWzFsgEC2l
|
||||
gxRqauN2eNFb6/yXY788YALHBsCRV2NFOFXxtPsvLXpD9Q/8EqYsSMuLARRdHVNU
|
||||
ryaEF5lhShpcuz0TlIuTy2TiuXJUtJ+p7a4Z7friZ6JsrmQWsVQBj44F8TJRHWzW
|
||||
C7vm9c+dzEX9eqbr5iPL+L4ctMW9Lz6ePcYfIXne6CElusRUf8G+xM1uwovF9bpV
|
||||
+9IqY7tAu9G1iY9iNtJgNNDKOCcOGKcZCx6Cg1XYOEKReNnUMazvYeqRrrjV5WQ0
|
||||
vOcD5zcBRNTXCddCLa7U0guXP9mQrfuk4NTH1Bt77JieTJ8cfDXHwtaKf6aGbmZP
|
||||
wl1Xi/GuXNUP/xeog78RKyFwBmjt2JKwvWzMpfmH4mEkG9moh2alva+aEz6LIJuP
|
||||
16g6s0Q6c793/OvUtpNcewHw4Vjn39LD9o6VLp854G4n8dVpUWSbWS+sXD1ZE69H
|
||||
g/sMNMyq+09ufkbewY8xoCm/rQ1pqDZAVMWsstJEaYu7b/eb7R+RGOj1YECCV/Yp
|
||||
EZPdDotbSNRkIi2d/a1NAgMBAAGjgaQwgaEwHQYDVR0OBBYEFExwhjsVUom6tQ+S
|
||||
qq6xMUETvnPzMB8GA1UdIwQYMBaAFD90kfU5pc5l48THu0Ayj9SNpHuhMBIGA1Ud
|
||||
EwEB/wQIMAYBAf8CAQAwDgYDVR0PAQH/BAQDAgGGMDsGA1UdHwQ0MDIwMKAuoCyG
|
||||
Kmh0dHA6Ly9sb2NhbGhvc3Q6OTg3OC9pbnRlcm1lZGlhdGUuY3JsLnBlbTANBgkq
|
||||
hkiG9w0BAQsFAAOCAgEAK6NgdWQYtPNKQNBGjsgtgqTRh+k30iqSO6Y3yE1KGABO
|
||||
EuQdVqkC2qUIbCB0M0qoV0ab50KNLfU6cbshggW4LDpcMpoQpI05fukNh1jm3ZuZ
|
||||
0xsB7vlmlsv00tpqmfIl/zykPDynHKOmFh/hJP/KetMy4+wDv4/+xP31UdEj5XvG
|
||||
HvMtuqOS23A+H6WPU7ol7KzKBnU2zz/xekvPbUD3JqV+ynP5bgbIZHAndd0o9T8e
|
||||
NFX23Us4cTenU2/ZlOq694bRzGaK+n3Ksz995Nbtzv5fbUgqmf7Mcq4iHGRVtV11
|
||||
MRyBrsXZp2vbF63c4hrf2Zd6SWRoaDKRhP2DMhajpH9zZASSTlfejg/ZRO2s+Clh
|
||||
YrSTkeMAdnRt6i/q4QRcOTCfsX75RFM5v67njvTXsSaSTnAwaPi78tRtf+WSh0EP
|
||||
VVPzy++BszBVlJ1VAf7soWZHCjZxZ8ZPqVTy5okoHwWQ09WmYe8GfulDh1oj0wbK
|
||||
3FjN7bODWHJN+bFf5aQfK+tumYKoPG8RXL6QxpEzjFWjxhIMJHHMKfDWnAV1o1+7
|
||||
/1/aDzq7MzEYBbrgQR7oE5ZHtyqhCf9LUgw0Kr7/8QWuNAdeDCJzjXRROU0hJczp
|
||||
dOyfRlLbHmLLmGOnROlx6LsGNQ17zuz6SPi7ei8/ylhykawDOAGkM1+xFakmQhM=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFzzCCA7egAwIBAgIUYjc7hD7/UJ0/VPADfNfp/WpOwRowDQYJKoZIhvcNAQEL
|
||||
BQAwbzELMAkGA1UEBhMCU0UxEjAQBgNVBAgMCVN0b2NraG9sbTESMBAGA1UEBwwJ
|
||||
U3RvY2tob2xtMRIwEAYDVQQKDAlNeU9yZ05hbWUxETAPBgNVBAsMCE15Um9vdENB
|
||||
MREwDwYDVQQDDAhNeVJvb3RDQTAeFw0yMzAxMTIxMzA4MTRaFw00MzAxMDcxMzA4
|
||||
MTRaMG8xCzAJBgNVBAYTAlNFMRIwEAYDVQQIDAlTdG9ja2hvbG0xEjAQBgNVBAcM
|
||||
CVN0b2NraG9sbTESMBAGA1UECgwJTXlPcmdOYW1lMREwDwYDVQQLDAhNeVJvb3RD
|
||||
QTERMA8GA1UEAwwITXlSb290Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
|
||||
AoICAQCnBwSOYVJw47IoMHMXTVDtOYvUt3rqsurEhFcB4O8xmf2mmwr6m7s8A5Ft
|
||||
AvAehg1GvnXT3t/KiyU7BK+acTwcErGyZwS2wvdB0lpHWSpOn/u5y+4ZETvQefcj
|
||||
ZTdDOM9VN5nutpitgNb+1yL8sqSexfVbY7DnYYvFjOVBYoP/SGvM9jVjCad+0WL3
|
||||
FhuD+L8QAxzCieX3n9UMymlFwINQuEc+TDjuNcEqt+0J5EgS1fwzxb2RCVL0TNv4
|
||||
9a71hFGCNRj20AeZm99hbdufm7+0AFO7ocV5q43rLrWFUoBzqKPYIjga/cv/UdWZ
|
||||
c5RLRXw3JDSrCqkf/mOlaEhNPlmWRF9MSus5Da3wuwgGCaVzmrf30rWR5aHHcscG
|
||||
e+AOgJ4HayvBUQeb6ZlRXc0YlACiLToMKxuyxDyUcDfVEXpUIsDILF8dkiVQxEU3
|
||||
j9g6qjXiqPVdNiwpqXfBKObj8vNCzORnoHYs8cCgib3RgDVWeqkDmlSwlZE7CvQh
|
||||
U4Loj4l7813xxzYEKkVaT1JdXPWu42CG/b4Y/+f4V+3rkJkYzUwndX6kZNksIBai
|
||||
phmtvKt+CTdP1eAbT+C9AWWF3PT31+BIhuT0u9tR8BVSkXdQB8dG4M/AAJcTo640
|
||||
0mdYYOXT153gEKHJuUBm750ZTy+r6NjNvpw8VrMAakJwHqnIdQIDAQABo2MwYTAd
|
||||
BgNVHQ4EFgQUP3SR9TmlzmXjxMe7QDKP1I2ke6EwHwYDVR0jBBgwFoAUP3SR9Tml
|
||||
zmXjxMe7QDKP1I2ke6EwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
|
||||
DQYJKoZIhvcNAQELBQADggIBAFMFv4C+I0+xOAb9v6G/IOpfPBZ1ez31EXKJJBra
|
||||
lulP4nRHQMeb310JS8BIeQ3dl+7+PkSxPABZSwc3jkxdSMvhc+Z4MQtTgos+Qsjs
|
||||
gH7sTqwWeeQ0lHYxWmkXijrh5OPRZwTKzYQlkcn85BCUXl2KDuNEdiqPbDTao+lc
|
||||
lA0/UAvC6NCyFKq/jqf4CmW5Kx6yG1v1LaE+IXn7cbIXj+DaehocVXi0wsXqj03Q
|
||||
DDUHuLHZP+LBsg4e91/0Jy2ekNRTYJifSqr+9ufHl0ZX1pFDZyf396IgZ5CQZ0PJ
|
||||
nRxZHlCfsxWxmxxdy3FQSE6YwXhdTjjoAa1ApZcKkkt1beJa6/oRLze/ux5x+5q+
|
||||
4QczufHd6rjoKBi6BM3FgFQ8As5iNohHXlMHd/xITo1Go3CWw2j9TGH5vzksOElK
|
||||
B0mcwwt2zwNEjvfytc+tI5jcfGN3tiT5fVHS8hw9dWKevypLL+55Ua9G8ZgDHasT
|
||||
XFRJHgmnbyFcaAe26D2dSKmhC9u2mHBH+MaI8dj3e7wNBfpxNgp41aFIk+QTmiFW
|
||||
VXFED6DHQ/Mxq93ACalHdYg18PlIYClbT6Pf2xXBnn33YPhn5xzoTZ+cDH/RpaQp
|
||||
s0UUTSJT1UTXgtXPnZWQfvKlMjJEIiVFiLEC0sgZRlWuZDRAY0CdZJJxvQp59lqu
|
||||
cbTm
|
||||
-----END CERTIFICATE-----
|
|
@ -0,0 +1,52 @@
|
|||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCmfZmBAOZJ8xjP
|
||||
YkpyQxTGZ40vIwOuylwSow12idWN6jcW9g5aIip+B2oKrfzR7PYsxbDodcj/KOpQ
|
||||
GwCFAujSYgYviiOsmATQ1meNocnnWjAsybw+dSXK/ZjfrVgIaJF7RHaLiDtq5TI4
|
||||
b4KjUFyh5NILIc+zfZqoNU6khUF0bcOBAG2BFaBzRf+a/hgZXEPyEnoqFK5J5k+D
|
||||
DSlKXDbOTEHhXG4QFT1hZataxptD1nTEFRYuzfmh/g4RDvWtawm9YU3j/V0Un7t/
|
||||
Taj0fAXNi30TzKOVaVcDrkVtDFHe2hX3lOJd53I5NpS7asaq+aTNytz+I3Bf/a4v
|
||||
khEgrKpjBSXXm/+Vw5NzsXNwKddSUGywmIbV2YBYnK+0DwhOXLsTPh3pv6931NVx
|
||||
pifW0nM4Ur6XCDHOPVX/jIZZ819bzAlZZ3BgMTz7pqT9906lmNRQBgSgr+Zaw9gj
|
||||
VhLg1VDfwF85eanhbzk5ITnffR+s2conZr2g+LEDsq2dJv/sEbYuHBNBkDthn439
|
||||
MgNq1nr3PV0hn8pNcgS5ZFUw+fN8403RY9TYLssB/FFYREDCax0j75qL3E7LbZK8
|
||||
JfsP8uh1e3PdR64TgtoYoTKuwtIqelmh+ryAWFjaXLPoP/AqYk1VcRCevOXUKw6L
|
||||
iskdukplk9cy2cPLcm+EP+2Js3B28QIDAQABAoICABxBnVOcZjk/QaLy1N07HtPE
|
||||
f9zz5Zxc+k7sbuzDHGQzT8m9FXb9LPaKRhhNaqbrP2WeYLW3RdduZ4QUbRxl/8Mz
|
||||
AUdAu+i/PTP/a4BJaOWztBDp5SG5iqI+s5skxZfZvXUtC6yHQMRV5VXYMRUMHsiY
|
||||
OADNKn3VT7IEKBZ6ij8bIO7sNmmN1NczllvFC6yEMQDs22B4dZMTvENq8KrO5ztQ
|
||||
jG7V29Utcact1Oz5X6EeDN+5j3P+n8M7RcJl5lLaI4NJeCl9VvaY3H7Q3J+vy+FU
|
||||
bvQ1Cz9gqzSz91L4YA3BODC2i0uyK/vjVE9Roimi6HJH34VfWONlv9IRiYgg3eLd
|
||||
xrWe/qZkxcfrHmgyH0a6fxwpT58T3d6WH0I/HwSbJuVvm2AhLy+7zXdLNRLrlE+n
|
||||
UfrJDgTwiTPlJA5JzSVGKVBSOVQs9G52aZ0IAvgN9uHHFhhqeJ3naax1q/JtRfDo
|
||||
O0w5Ga2KjAJDcAQj/Cq5+LMSI1Bxl46db17EFnA//X3Oxhv93CvsTULPiOJ7fdYC
|
||||
3X7YCJ33a7w4B8+FxmiTYLe+aR6CC8fsu4qYccCctPUje1MzUkw6gvbWSyxkbmW7
|
||||
kGTWKx4E/SL4cc+DjoC1h37RtqghDDxtYhA42wWiocDXoKPlWJoIkG1UUO5f6/2N
|
||||
cKPzQx1f23UTvIRkMYe1AoIBAQDR94YzLncfuY4DhHpqJRjv8xXfOif+ARWicnma
|
||||
CwePpv80YoQvc7B9rbPA9qZ5EG9eQF62FkTrvCwbAhA5L11aJsXxnSvZREQcdteO
|
||||
kQPnKXAJbHYh5yto/HhezdtIMmoZCGpHLmsiK20QnRyA0InKsFCKBpi20gFzOKMx
|
||||
DwuQEoANHIwUscHnansM958eKAolujfjjOeFiK+j4Vd6P0neV8EQTl6A0+R/l5td
|
||||
l69wySW7tB4xfOon5Y0D+AfGMH3alZs3ymAjBNKZIk+2hKvhDRa7IqwlckwQq6by
|
||||
Ku25LKeRVt3wOkfJitSDgiEsNA5oJQ90A4ny6hIOAvLWir6tAoIBAQDK/fPVaT7r
|
||||
7tNjzaMgeQ/VKGUauCMbPC7ST2cEvZMp9YFhdKbl/TwhC8lpJqrsKhXyKNz20FOL
|
||||
7m8XjHu4mdSs6zaPvkMnUboge9pcnIKeS5nRVsW0CRuSc4A3qhrvBp9av77gIjnr
|
||||
XJ6RyFihDji1P6RVoylyyR8k/qiZupMg7UK3vbuTpJqARObfaaprOwqVItkJX2vf
|
||||
XF7qfBCnik1jlZKWZq+9dbhz8KP4KWpKINrwIuvlAQnTJpc15beHxMEt73hxAY3A
|
||||
n3Iydtm5zsBcOLyLLgySUOsp0zlcAv0iHP3ShsFP2WeQLKR9Qapc58kkJ1lmlu71
|
||||
QdahwonpXjXVAoIBAEQnfYc1iPNiTsezg+zad9rDZBEeloaroXMmh3RKKj0l7ub5
|
||||
J4Ejo2FYNeXn6ieX/x5v9I5UcjC21vY5WDzHtBykQ1JnOyl+MEGxDc04IzUwzS4x
|
||||
57KfkAa3FPdpCMnJm4jeo2jRl3Ly96cR6IOjrWZ+jtYOyBln15KoCsjM4mr0pl4b
|
||||
Kxk4jgFpHeIaqqqmQoz2gle5kBlXQfQHHFcRHhAvGfsKBUD6Bsyn0IWzy/3nPPlN
|
||||
wRM9QeCLcZedNiDN8rw2HbkhVs1nLlkIuyk6rXQSxJMf8RMCo9Axd7JZ3uphpU7X
|
||||
DJmCwXSZPNwnLE9l4ltJ1FdLIscX1Z54tIyRYs0CggEBAIVPgnMFS21myy0gP6Fz
|
||||
4BH9FWkWxPd97sHvo5hZZ+yGbxGxqmoghPyu4PdNjbLLcN44N+Vfq36aeBrfB+GU
|
||||
JTfqwUpliXSpF7N9o0pu/tk2jS4N7ojt8k2bzPjBni6cCstuYcyQrbkEep8DFDGx
|
||||
RUzDHwmevfnEW8/P7qoG/dkB+G7zC91KnKzgkz7mBiWmAK0w1ZhyMkXeQ/d6wvVE
|
||||
vs5HzJ05kvC5/wklYIn5qPRF34MVbBZZODqTfXrIAmAHt1aTjmWov49hJ348z4BX
|
||||
Z70pBanh9B+jRM2TCniC/fsJTyiTlyD5hioJJ32bQmcBUfeMYAof1Y78ThityiSY
|
||||
2oECggEAYdkz6z+1hIMI2nIMtei1n5bLV4bWmS1nkZ3pBSMkbS7VJFAxZ53xJi0S
|
||||
StSs/bka+akvnYEoFAGhVtiaz4497qnUiquf/aBs4TUHfNGn22/LN5b8vs51ugil
|
||||
RXejaJjPLqL6jmXz5T4+TJGcH5kL6NDtYkT3IEtv5uWkQkBs0Z1Juf34nVjMbozC
|
||||
bohyOyCMOLt7HqcUpUtevSK7SXmyU4yd2UyRqFMFPi4RJjxQWFZmNFC5S1PsZBh+
|
||||
OOMNAJ1F2h2fC7KdNVBpdoNsOAPxdCNxbwGKiNHwnukvF9uvaDIw3jqKJU3g/Z6j
|
||||
rkE8Bz5a/iwO+QwdO5Q2cp5+0nm41A==
|
||||
-----END PRIVATE KEY-----
|
|
@ -0,0 +1,38 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIGmjCCBIKgAwIBAgICEAYwDQYJKoZIhvcNAQELBQAwazELMAkGA1UEBhMCU0Ux
|
||||
EjAQBgNVBAgMCVN0b2NraG9sbTESMBAGA1UECgwJTXlPcmdOYW1lMRkwFwYDVQQL
|
||||
DBBNeUludGVybWVkaWF0ZUNBMRkwFwYDVQQDDBBNeUludGVybWVkaWF0ZUNBMB4X
|
||||
DTIzMDMwNjE5NTA0N1oXDTMzMDYxMTE5NTA0N1owezELMAkGA1UEBhMCU0UxEjAQ
|
||||
BgNVBAgMCVN0b2NraG9sbTESMBAGA1UEBwwJU3RvY2tob2xtMRIwEAYDVQQKDAlN
|
||||
eU9yZ05hbWUxGTAXBgNVBAsMEE15SW50ZXJtZWRpYXRlQ0ExFTATBgNVBAMMDG9j
|
||||
c3AuY2xpZW50MjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKZ9mYEA
|
||||
5knzGM9iSnJDFMZnjS8jA67KXBKjDXaJ1Y3qNxb2DloiKn4Hagqt/NHs9izFsOh1
|
||||
yP8o6lAbAIUC6NJiBi+KI6yYBNDWZ42hyedaMCzJvD51Jcr9mN+tWAhokXtEdouI
|
||||
O2rlMjhvgqNQXKHk0gshz7N9mqg1TqSFQXRtw4EAbYEVoHNF/5r+GBlcQ/ISeioU
|
||||
rknmT4MNKUpcNs5MQeFcbhAVPWFlq1rGm0PWdMQVFi7N+aH+DhEO9a1rCb1hTeP9
|
||||
XRSfu39NqPR8Bc2LfRPMo5VpVwOuRW0MUd7aFfeU4l3ncjk2lLtqxqr5pM3K3P4j
|
||||
cF/9ri+SESCsqmMFJdeb/5XDk3Oxc3Ap11JQbLCYhtXZgFicr7QPCE5cuxM+Hem/
|
||||
r3fU1XGmJ9bSczhSvpcIMc49Vf+MhlnzX1vMCVlncGAxPPumpP33TqWY1FAGBKCv
|
||||
5lrD2CNWEuDVUN/AXzl5qeFvOTkhOd99H6zZyidmvaD4sQOyrZ0m/+wRti4cE0GQ
|
||||
O2Gfjf0yA2rWevc9XSGfyk1yBLlkVTD583zjTdFj1NguywH8UVhEQMJrHSPvmovc
|
||||
Tsttkrwl+w/y6HV7c91HrhOC2hihMq7C0ip6WaH6vIBYWNpcs+g/8CpiTVVxEJ68
|
||||
5dQrDouKyR26SmWT1zLZw8tyb4Q/7YmzcHbxAgMBAAGjggE2MIIBMjAJBgNVHRME
|
||||
AjAAMBEGCWCGSAGG+EIBAQQEAwIFoDAzBglghkgBhvhCAQ0EJhYkT3BlblNTTCBH
|
||||
ZW5lcmF0ZWQgQ2xpZW50IENlcnRpZmljYXRlMB0GA1UdDgQWBBSJ/yia067wCafe
|
||||
kDCgk+e8PJTCUDAfBgNVHSMEGDAWgBRMcIY7FVKJurUPkqqusTFBE75z8zAOBgNV
|
||||
HQ8BAf8EBAMCBeAwHQYDVR0lBBYwFAYIKwYBBQUHAwIGCCsGAQUFBwMEMDsGA1Ud
|
||||
HwQ0MDIwMKAuoCyGKmh0dHA6Ly9sb2NhbGhvc3Q6OTg3OC9pbnRlcm1lZGlhdGUu
|
||||
Y3JsLnBlbTAxBggrBgEFBQcBAQQlMCMwIQYIKwYBBQUHMAGGFWh0dHA6Ly9sb2Nh
|
||||
bGhvc3Q6OTg3NzANBgkqhkiG9w0BAQsFAAOCAgEAN2XfYgbrjxC6OWh9UoMLQaDD
|
||||
59JPxAUBxlRtWzTWqxY2jfT+OwJfDP4e+ef2G1YEG+qyt57ddlm/EwX9IvAvG0D4
|
||||
wd4tfItG88IJWKDM3wpT5KYrUsu+PlQTFmGmaWlORK/mRKlmfjbP5CIAcUedvCS9
|
||||
j9PkCrbbkklAmp0ULLSLUkYajmfFOkQ+VdGhQ6nAamTeyh2Z2S4dVjsKc8yBViMo
|
||||
/V6HP56rOvUqiVTcvhZtH7QDptMSTzuJ+AsmreYjwIiTGzYS/i8QVAFuPfXJKEOB
|
||||
jD5WhUaP/8Snbuft4MxssPAph8okcmxLfb55nw+soNc2oS1wWwKMe7igRelq8vtg
|
||||
bu00QSEGiY1eq/vFgZh0+Wohy/YeYzhO4Jq40FFpKiVbkLzexpNH/Afj2QrHuZ7y
|
||||
259uGGfv5tGA+TW6PsckCQknEb5V4V35ZZlbWVRKpuADeNPoDuoYPtc5eOomIkmw
|
||||
rFz/gPZWSA+4pYEgXgqcaM8+KP0i53eTbWqwy5DVgXiuaTYWU4m1FTsIZ+/nGIqW
|
||||
Dsgqd/D6jivf9Yvm+VFYTZsxIfq5sMdjxSuMBo0nZrzFDpqc6m6fVVoHv5R9Yliw
|
||||
MbxgmFQ84CKLy7iNKGSGVN2SIr1obMQ0e/t3NiCHib3WKzmZFoNoFCtVzAgsxGmF
|
||||
Q6rY83JdIPPW4LqZNcE=
|
||||
-----END CERTIFICATE-----
|
|
@ -0,0 +1,6 @@
|
|||
V 330419130816Z 1000 unknown /C=SE/ST=Stockholm/L=Stockholm/O=MyOrgName/OU=MyIntermediateCA/CN=localhost
|
||||
V 330419130816Z 1001 unknown /C=SE/ST=Stockholm/L=Stockholm/O=MyOrgName/OU=MyIntermediateCA/CN=MyClient
|
||||
R 330419130816Z 230112130816Z 1002 unknown /C=SE/ST=Stockholm/L=Stockholm/O=MyOrgName/OU=MyIntermediateCA/CN=client-revoked
|
||||
V 330419130816Z 1003 unknown /C=SE/ST=Stockholm/L=Stockholm/O=MyOrgName/OU=MyIntermediateCA/CN=ocsp.server
|
||||
V 330419130816Z 1004 unknown /C=SE/ST=Stockholm/L=Stockholm/O=MyOrgName/OU=MyIntermediateCA/CN=ocsp.client
|
||||
V 330425123656Z 1005 unknown /C=SE/ST=Stockholm/L=Stockholm/O=MyOrgName/OU=MyIntermediateCA/CN=client-no-dist-points
|
|
@ -0,0 +1,52 @@
|
|||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQQIBADANBgkqhkiG9w0BAQEFAASCCSswggknAgEAAoICAQC0Bu3THlP8vRwz
|
||||
R84cnUdG5sDfVBqZaiTiT8KiUWmWXK5/bpoCfUlNopdnHhFcTA8MwVgkSiTufx/Q
|
||||
PZNd0qrFzNnNnPcV3yBdZG/NtJQptVKMOYWf2rg48ZYjZPZDSQ4oAkYt0KCLP3VX
|
||||
lsxbIBAtpYMUamrjdnjRW+v8l2O/PGACxwbAkVdjRThV8bT7Ly16Q/UP/BKmLEjL
|
||||
iwEUXR1TVK8mhBeZYUoaXLs9E5SLk8tk4rlyVLSfqe2uGe364meibK5kFrFUAY+O
|
||||
BfEyUR1s1gu75vXPncxF/Xqm6+Yjy/i+HLTFvS8+nj3GHyF53ughJbrEVH/BvsTN
|
||||
bsKLxfW6VfvSKmO7QLvRtYmPYjbSYDTQyjgnDhinGQsegoNV2DhCkXjZ1DGs72Hq
|
||||
ka641eVkNLznA+c3AUTU1wnXQi2u1NILlz/ZkK37pODUx9Qbe+yYnkyfHHw1x8LW
|
||||
in+mhm5mT8JdV4vxrlzVD/8XqIO/ESshcAZo7diSsL1szKX5h+JhJBvZqIdmpb2v
|
||||
mhM+iyCbj9eoOrNEOnO/d/zr1LaTXHsB8OFY59/Sw/aOlS6fOeBuJ/HVaVFkm1kv
|
||||
rFw9WROvR4P7DDTMqvtPbn5G3sGPMaApv60Naag2QFTFrLLSRGmLu2/3m+0fkRjo
|
||||
9WBAglf2KRGT3Q6LW0jUZCItnf2tTQIDAQABAoICAAVlH8Nv6TxtvmabBEY/QF+T
|
||||
krwenR1z3N8bXM3Yer2S0XfoLJ1ee8/jy32/nO2TKfBL6wRLZIfxL1biQYRSR+Pd
|
||||
m7lZtt3k7edelysm+jm1wV+KacK8n0C1nLY61FZ33gC88LV2xxjlMfMKBd3FPDbh
|
||||
+ueluMZQSpablprfPpIAkTAEHuOud1v2OxX4RGAyrb44QyPTfguU0CmpZMLjd3mD
|
||||
1CvnUX27OKlJliLib1UvfKztTnlqqG8QfJr3E/asykZH04IUXAQUd+TdsLi9TZBx
|
||||
abCb30n1hKWkTwSplSAFgNLRsWkrnjrWKyvAyxQH5hT4OHyhu6JmwScW5qWhrRd3
|
||||
ld+pMaKQlOmtrTiRzSeFD2pOHFHvZ3N/1BhH5TGfnTIXKuEja3xdOArCHTBkh/9S
|
||||
kEZegVIAjoFW+t3gfbz12JzNmDUUX+sWfadBBiwYepTUr2aZQehZM8+dzdSwQeh4
|
||||
XcAUC55YgaC2oFCfcc8rD5o+57nlR+7xAjZ/Z61SuUJHrKSRzB6w2PARiEIuYotK
|
||||
E/CsQfL9tgjoc0aN0uVl8SH+GvKvRWM6LV711ep8w2XoPIAxId3ne/Ktw+wKCrqC
|
||||
CJsHXIGOi8n0YZLZ6vz/6WrjmY1GdJc1aywQvr5eDFP5g0j3e+WzGBxoCKX8Gah5
|
||||
KpA4fcN44s2umsu7WcoBAoIBAQDZyGhtu9rbm3JMJrA9Eyq97niv6axwbhocE/bU
|
||||
tPwdeWbPdprkK4aQ9UqJwHmVHkAUrGFRsY2iPJFLvdRwvixFYVAf/WLlAepd+HFz
|
||||
Xit1oX5ouzbcjq2+13zUQpfjXFqfLqVYcu/sW7UFaD3yJEstkhI+ZM6Ci+kLWXN5
|
||||
+KOXASGzO8p7WBHFABRMH0bUjRnZy8xX3wdOhAKRFaCalxABodH9wz/cMunzrmEa
|
||||
uHRsNWIIdWIVle4ZX4QTcsDgJSf5LeDaLtrpMu2AnFafQ2VCAb/jdKdighBsZG3H
|
||||
Pu6e1fJzSKZEUtWSLMzBoB6R/oNDW9cPhcXWXlNc8QsZ7DAtAoIBAQDTnmUqf8Lo
|
||||
lWPEQCrfkgQm2Gom/75uj5TnHsQYf2xk3vZNF5UwErD3Ixzh4F1E5ewA1Xvy5t3J
|
||||
VCOLypiKDlfcZnsMPncdubGMrT575mkpZgsvR/w8u8pd4mFSdyCc/y5TeyfcNFQe
|
||||
0Ho1NXMH6czutQs3oX+yfaTUr6Oa3brG1SAJQpG53nQI74pMWKHcivI/ytlA26Ki
|
||||
zxIVzeAzJ/ToVc6MzbObkXjFxrnVlvjsLyGMJEfW2lmny4Gpx1xpc2j3YW8vehfx
|
||||
DalWOJai1mtAo8ieo7CVw+kV2CqL7gJOJ2iNmCKT+IFk4LRtfJxd4wUJz6A/+vWp
|
||||
o0LMvApAnIWhAoIBAER1S+Zaq9Rmi8pGSxYXxVLI+KULhkodQhXbbLa2YZ3+QIQs
|
||||
m0noKLe+c3zTxSRLywb0nO7qKkR6V44AkRwTm6T/jwlPRFwKexqo8zi5vF2Qs0TG
|
||||
vNsd+p3H7RRoDojIyi/JoO4pyyN4PHIDr51DLWKYzSVR2NyOkGYh6zvHHd1k3KwT
|
||||
unWFXKiZesfm+QPtite8yXJByHE06/2hV8fgfoaU0Ia9boCQfJw+D4Yvv2EYcsWH
|
||||
6JoydBMDxGe8pcaPx337nvfWzLeLa78G5e/QZq8WD7S3Qbqkefcopp2AOdAyHrGA
|
||||
f8twYnQ9ouumopVv9OEiqHrXqTXWlsvbdYrjhM0CggEABOEHBhbSAJjJJxIvqt3r
|
||||
+JVOxT1qP5RR445DCSmO7zhwx1A+4U/dAqWtmcuZeuguK8rAQ9Zs0KJ++08dezlf
|
||||
bzZxqdOa3XWVkV/BLAwg6pJuuZVYTHIr9UQt6D/U4anEgKo7Pgl60wcNekKUN199
|
||||
mRdVfd/cWNoqvbia9gwcrU7moTAGuhlV5YrYTnBQswwFD9F2dtdZhZVunlAT1joa
|
||||
nGy2CWsItBKDjVPKnxEPBisEA/4mJd786DB5+dcd21SM2/9EF/0hpi4hdFpzpqd4
|
||||
65GbI4U0og9VRWqpeHZxWSnxcCpMycqV+SRxJIEV/dgpGpPN5wu7NEEOXjgLqHez
|
||||
YQKCAQBjwMVQUgn2KZK6Q9Lwe09ZpWTxGMh9mevU3eMA/6awajkE4UVgV8hSVvcG
|
||||
i3Otn9UMnMhYu+HuU9O9W4zzncH0nRoiwjQr3X0MTT3Lc0rSJNPb/a6pcvysBuvB
|
||||
wvhQ/dRXbCtmK9VE9ctPa9EO9f9SQRZF2NQsTOkyILdsgISm4zXSBhyT8KkQbiTe
|
||||
0ToI7qMM73HqLHKOkjA+8jYkE5MTVQaaRXx2JlCeHEsIpH/2Nj1OsmUfn3paL6ZN
|
||||
3loKhFfGy4onSOJOxoYaI3r6aykTFm7Qyg1xrG+8uFhK/qTOCB22I63LmSLZ1wlY
|
||||
xBO4CmF79pAcAXvDoRB619Flx5/G
|
||||
-----END PRIVATE KEY-----
|
|
@ -0,0 +1,34 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIF+zCCA+OgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwbzELMAkGA1UEBhMCU0Ux
|
||||
EjAQBgNVBAgMCVN0b2NraG9sbTESMBAGA1UEBwwJU3RvY2tob2xtMRIwEAYDVQQK
|
||||
DAlNeU9yZ05hbWUxETAPBgNVBAsMCE15Um9vdENBMREwDwYDVQQDDAhNeVJvb3RD
|
||||
QTAeFw0yMzAxMTIxMzA4MTZaFw0zMzAxMDkxMzA4MTZaMGsxCzAJBgNVBAYTAlNF
|
||||
MRIwEAYDVQQIDAlTdG9ja2hvbG0xEjAQBgNVBAoMCU15T3JnTmFtZTEZMBcGA1UE
|
||||
CwwQTXlJbnRlcm1lZGlhdGVDQTEZMBcGA1UEAwwQTXlJbnRlcm1lZGlhdGVDQTCC
|
||||
AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALQG7dMeU/y9HDNHzhydR0bm
|
||||
wN9UGplqJOJPwqJRaZZcrn9umgJ9SU2il2ceEVxMDwzBWCRKJO5/H9A9k13SqsXM
|
||||
2c2c9xXfIF1kb820lCm1Uow5hZ/auDjxliNk9kNJDigCRi3QoIs/dVeWzFsgEC2l
|
||||
gxRqauN2eNFb6/yXY788YALHBsCRV2NFOFXxtPsvLXpD9Q/8EqYsSMuLARRdHVNU
|
||||
ryaEF5lhShpcuz0TlIuTy2TiuXJUtJ+p7a4Z7friZ6JsrmQWsVQBj44F8TJRHWzW
|
||||
C7vm9c+dzEX9eqbr5iPL+L4ctMW9Lz6ePcYfIXne6CElusRUf8G+xM1uwovF9bpV
|
||||
+9IqY7tAu9G1iY9iNtJgNNDKOCcOGKcZCx6Cg1XYOEKReNnUMazvYeqRrrjV5WQ0
|
||||
vOcD5zcBRNTXCddCLa7U0guXP9mQrfuk4NTH1Bt77JieTJ8cfDXHwtaKf6aGbmZP
|
||||
wl1Xi/GuXNUP/xeog78RKyFwBmjt2JKwvWzMpfmH4mEkG9moh2alva+aEz6LIJuP
|
||||
16g6s0Q6c793/OvUtpNcewHw4Vjn39LD9o6VLp854G4n8dVpUWSbWS+sXD1ZE69H
|
||||
g/sMNMyq+09ufkbewY8xoCm/rQ1pqDZAVMWsstJEaYu7b/eb7R+RGOj1YECCV/Yp
|
||||
EZPdDotbSNRkIi2d/a1NAgMBAAGjgaQwgaEwHQYDVR0OBBYEFExwhjsVUom6tQ+S
|
||||
qq6xMUETvnPzMB8GA1UdIwQYMBaAFD90kfU5pc5l48THu0Ayj9SNpHuhMBIGA1Ud
|
||||
EwEB/wQIMAYBAf8CAQAwDgYDVR0PAQH/BAQDAgGGMDsGA1UdHwQ0MDIwMKAuoCyG
|
||||
Kmh0dHA6Ly9sb2NhbGhvc3Q6OTg3OC9pbnRlcm1lZGlhdGUuY3JsLnBlbTANBgkq
|
||||
hkiG9w0BAQsFAAOCAgEAK6NgdWQYtPNKQNBGjsgtgqTRh+k30iqSO6Y3yE1KGABO
|
||||
EuQdVqkC2qUIbCB0M0qoV0ab50KNLfU6cbshggW4LDpcMpoQpI05fukNh1jm3ZuZ
|
||||
0xsB7vlmlsv00tpqmfIl/zykPDynHKOmFh/hJP/KetMy4+wDv4/+xP31UdEj5XvG
|
||||
HvMtuqOS23A+H6WPU7ol7KzKBnU2zz/xekvPbUD3JqV+ynP5bgbIZHAndd0o9T8e
|
||||
NFX23Us4cTenU2/ZlOq694bRzGaK+n3Ksz995Nbtzv5fbUgqmf7Mcq4iHGRVtV11
|
||||
MRyBrsXZp2vbF63c4hrf2Zd6SWRoaDKRhP2DMhajpH9zZASSTlfejg/ZRO2s+Clh
|
||||
YrSTkeMAdnRt6i/q4QRcOTCfsX75RFM5v67njvTXsSaSTnAwaPi78tRtf+WSh0EP
|
||||
VVPzy++BszBVlJ1VAf7soWZHCjZxZ8ZPqVTy5okoHwWQ09WmYe8GfulDh1oj0wbK
|
||||
3FjN7bODWHJN+bFf5aQfK+tumYKoPG8RXL6QxpEzjFWjxhIMJHHMKfDWnAV1o1+7
|
||||
/1/aDzq7MzEYBbrgQR7oE5ZHtyqhCf9LUgw0Kr7/8QWuNAdeDCJzjXRROU0hJczp
|
||||
dOyfRlLbHmLLmGOnROlx6LsGNQ17zuz6SPi7ei8/ylhykawDOAGkM1+xFakmQhM=
|
||||
-----END CERTIFICATE-----
|
|
@ -0,0 +1,14 @@
|
|||
listeners.ssl.default {
|
||||
bind = "0.0.0.0:8883"
|
||||
max_connections = 512000
|
||||
ssl_options {
|
||||
keyfile = "{{ test_data_dir }}/server.key"
|
||||
certfile = "{{ test_data_dir }}/server.pem"
|
||||
cacertfile = "{{ test_data_dir }}/ca.pem"
|
||||
ocsp {
|
||||
enable_ocsp_stapling = true
|
||||
issuer_pem = "{{ test_data_dir }}/ocsp-issuer.pem"
|
||||
responder_url = "http://127.0.0.1:9877"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
-----BEGIN PRIVATE KEY-----
|
||||
MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCnVPRWgP59GU15
|
||||
HddFwPZflFfcSkeuWU8tgKQhZcNoBli4lIfemuoV/hkGRVFexAiAw3/u5wvOaMaN
|
||||
V8n9KxxgAUNLh5YaknpnNdhfQDyM0S5UJIbVeLzAQWxkBXpI3uBfW4WPSULRnVyR
|
||||
psLEfl1qOklGOyuZfRbkkkkVwtJEmGEH0kz0fy6xenn3R3/mTeIbj+5TNqiBXWn1
|
||||
/qgTiNf2Ni7SE6Nk2lP4V8iofcBIrsp6KtEWdipGEJZeXCg/X0g/qVt15tF1l00M
|
||||
uEWRHt1qGBELJJTcNzQvdqHAPz0AfQRjTtXyocw5+pFth8Q8a7gyjrjv5nhnpAKQ
|
||||
msrt3vyNAgMBAAECggEABnWvIQ/Fw0qQxRYz00uJt1LguW5cqgxklBsdOvTUwFVO
|
||||
Y4HIZP2R/9tZV/ahF4l10pK5g52DxSoiUB6Ne6qIY+RolqfbUZdKBmX7vmGadM02
|
||||
fqUSV3dbwghEiO/1Mo74FnZQB6IKZFEw26aWakN+k7VAUufB3SEJGzXSgHaO63ru
|
||||
dFGSiYI8U+q+YnhUJjCnmI12fycNfy451TdUQtGZb6pNmm5HRUF6hpAV8Le9LojP
|
||||
Ql9eacPpsrzU15X5ElCQZ/f9iNh1bplcISuhrULgKUKOvAVrBlEK67uRVy6g98xA
|
||||
c/rgNLkbL/jZEsAc3/vHAyFgd3lABfwpBGLHej3QgQKBgQDFNYmfBNQr89HC5Zc+
|
||||
M6jXcAT/R+0GNczBTfC4iyNemwqsumSSRelNZ748UefKuS3F6Mvb2CBqE2LbB61G
|
||||
hrnCffG2pARjZ491SefRwghhWWVGLP1p8KliLgOGBehA1REgJb+XULncjuHZuh4O
|
||||
LVn3HVnWGxeBGg+yKa6Z4YQi3QKBgQDZN0O8ZcZY74lRJ0UjscD9mJ1yHlsssZag
|
||||
njkX/f0GR/iVpfaIxQNC3gvWUy2LsU0He9sidcB0cfej0j/qZObQyFsCB0+utOgy
|
||||
+hX7gokV2pes27WICbNWE2lJL4QZRJgvf82OaEy57kfDrm+eK1XaSZTZ10P82C9u
|
||||
gAmMnontcQKBgGu29lhY9tqa7jOZ26Yp6Uri8JfO3XPK5u+edqEVvlfqL0Zw+IW8
|
||||
kdWpmIqx4f0kcA/tO4v03J+TvycLZmVjKQtGZ0PvCkaRRhY2K9yyMomZnmtaH4BB
|
||||
5wKtR1do2pauyg/ZDnDDswD5OfsGYWw08TK8YVlEqu3lIjWZ9rguKVIxAoGAZYUk
|
||||
zVqr10ks3pcCA2rCjkPT4lA5wKvHgI4ylPoKVfMxRY/pp4acvZXV5ne9o7pcDBFh
|
||||
G7v5FPNnEFPlt4EtN4tMragJH9hBZgHoYEJkG6islweg0lHmVWaBIMlqbfzXO+v5
|
||||
gINSyNuLAvP2CvCqEXmubhnkFrpbgMOqsuQuBqECgYB3ss2PDhBF+5qoWgqymFof
|
||||
1ovRPuQ9sPjWBn5IrCdoYITDnbBzBZERx7GLs6A/PUlWgST7jkb1PY/TxYSUfXzJ
|
||||
SNd47q0mCQ+IUdqUbHgpK9b1ncwLMsnexpYZdHJWRLgnUhOx7OMjJc/4iLCAFCoN
|
||||
3KJ7/V1keo7GBHOwnsFcCA==
|
||||
-----END PRIVATE KEY-----
|
|
@ -0,0 +1,35 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIGCTCCA/GgAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwazELMAkGA1UEBhMCU0Ux
|
||||
EjAQBgNVBAgMCVN0b2NraG9sbTESMBAGA1UECgwJTXlPcmdOYW1lMRkwFwYDVQQL
|
||||
DBBNeUludGVybWVkaWF0ZUNBMRkwFwYDVQQDDBBNeUludGVybWVkaWF0ZUNBMB4X
|
||||
DTIzMDExMjEzMDgxNloXDTMzMDQxOTEzMDgxNloweDELMAkGA1UEBhMCU0UxEjAQ
|
||||
BgNVBAgMCVN0b2NraG9sbTESMBAGA1UEBwwJU3RvY2tob2xtMRIwEAYDVQQKDAlN
|
||||
eU9yZ05hbWUxGTAXBgNVBAsMEE15SW50ZXJtZWRpYXRlQ0ExEjAQBgNVBAMMCWxv
|
||||
Y2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKdU9FaA/n0Z
|
||||
TXkd10XA9l+UV9xKR65ZTy2ApCFlw2gGWLiUh96a6hX+GQZFUV7ECIDDf+7nC85o
|
||||
xo1Xyf0rHGABQ0uHlhqSemc12F9APIzRLlQkhtV4vMBBbGQFekje4F9bhY9JQtGd
|
||||
XJGmwsR+XWo6SUY7K5l9FuSSSRXC0kSYYQfSTPR/LrF6efdHf+ZN4huP7lM2qIFd
|
||||
afX+qBOI1/Y2LtITo2TaU/hXyKh9wEiuynoq0RZ2KkYQll5cKD9fSD+pW3Xm0XWX
|
||||
TQy4RZEe3WoYEQsklNw3NC92ocA/PQB9BGNO1fKhzDn6kW2HxDxruDKOuO/meGek
|
||||
ApCayu3e/I0CAwEAAaOCAagwggGkMAkGA1UdEwQCMAAwEQYJYIZIAYb4QgEBBAQD
|
||||
AgZAMDMGCWCGSAGG+EIBDQQmFiRPcGVuU1NMIEdlbmVyYXRlZCBTZXJ2ZXIgQ2Vy
|
||||
dGlmaWNhdGUwHQYDVR0OBBYEFGy5LQPzIelruJl7mL0mtUXM57XhMIGaBgNVHSME
|
||||
gZIwgY+AFExwhjsVUom6tQ+Sqq6xMUETvnPzoXOkcTBvMQswCQYDVQQGEwJTRTES
|
||||
MBAGA1UECAwJU3RvY2tob2xtMRIwEAYDVQQHDAlTdG9ja2hvbG0xEjAQBgNVBAoM
|
||||
CU15T3JnTmFtZTERMA8GA1UECwwITXlSb290Q0ExETAPBgNVBAMMCE15Um9vdENB
|
||||
ggIQADAOBgNVHQ8BAf8EBAMCBaAwEwYDVR0lBAwwCgYIKwYBBQUHAwEwOwYDVR0f
|
||||
BDQwMjAwoC6gLIYqaHR0cDovL2xvY2FsaG9zdDo5ODc4L2ludGVybWVkaWF0ZS5j
|
||||
cmwucGVtMDEGCCsGAQUFBwEBBCUwIzAhBggrBgEFBQcwAYYVaHR0cDovL2xvY2Fs
|
||||
aG9zdDo5ODc3MA0GCSqGSIb3DQEBCwUAA4ICAQCX3EQgiCVqLhnCNd0pmptxXPxo
|
||||
l1KyZkpdrFa/NgSqRhkuZSAkszwBDDS/gzkHFKEUhmqs6/UZwN4+Rr3LzrHonBiN
|
||||
aQ6GeNNXZ/3xAQfUCwjjGmz9Sgw6kaX19Gnk2CjI6xP7T+O5UmsMI9hHUepC9nWa
|
||||
XX2a0hsO/KOVu5ZZckI16Ek/jxs2/HEN0epYdvjKFAaVmzZZ5PATNjrPQXvPmq2r
|
||||
x++La+3bXZsrH8P2FhPpM5t/IxKKW/Tlpgz92c2jVSIHF5khSA/MFDC+dk80OFmm
|
||||
v4ZTPIMuZ//Q+wo0f9P48rsL9D27qS7CA+8pn9wu+cfnBDSt7JD5Yipa1gHz71fy
|
||||
YTa9qRxIAPpzW2v7TFZE8eSKFUY9ipCeM2BbdmCQGmq4+v36b5TZoyjH4k0UVWGo
|
||||
Gclos2cic5Vxi8E6hb7b7yZpjEfn/5lbCiGMfAnI6aoOyrWg6keaRA33kaLUEZiK
|
||||
OgFNbPkjiTV0ZQyLXf7uK9YFhpVzJ0dv0CFNse8rZb7A7PLn8VrV/ZFnJ9rPoawn
|
||||
t7ZGxC0d5BRSEyEeEgsQdxuY4m8OkE18zwhCkt2Qs3uosOWlIrYmqSEa0i/sPSQP
|
||||
jiwB4nEdBrf8ZygzuYjT5T9YRSwhVox4spS/Av8Ells5JnkuKAhCVv9gHxYwbj0c
|
||||
CzyLJgE1z9Tq63m+gQ==
|
||||
-----END CERTIFICATE-----
|
|
@ -473,3 +473,43 @@ password_converter_test() ->
|
|||
?assertEqual(<<"123">>, emqx_schema:password_converter(<<"123">>, #{})),
|
||||
?assertThrow("must_quote", emqx_schema:password_converter(foobar, #{})),
|
||||
ok.
|
||||
|
||||
url_type_test_() ->
|
||||
[
|
||||
?_assertEqual(
|
||||
{ok, <<"http://some.server/">>},
|
||||
typerefl:from_string(emqx_schema:url(), <<"http://some.server/">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{ok, <<"http://192.168.0.1/">>},
|
||||
typerefl:from_string(emqx_schema:url(), <<"http://192.168.0.1">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{ok, <<"http://some.server/">>},
|
||||
typerefl:from_string(emqx_schema:url(), "http://some.server/")
|
||||
),
|
||||
?_assertEqual(
|
||||
{ok, <<"http://some.server/">>},
|
||||
typerefl:from_string(emqx_schema:url(), <<"http://some.server">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{ok, <<"http://some.server:9090/">>},
|
||||
typerefl:from_string(emqx_schema:url(), <<"http://some.server:9090">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{ok, <<"https://some.server:9090/">>},
|
||||
typerefl:from_string(emqx_schema:url(), <<"https://some.server:9090">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{ok, <<"https://some.server:9090/path?q=uery">>},
|
||||
typerefl:from_string(emqx_schema:url(), <<"https://some.server:9090/path?q=uery">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{error, {unsupported_scheme, <<"postgres">>}},
|
||||
typerefl:from_string(emqx_schema:url(), <<"postgres://some.server:9090">>)
|
||||
),
|
||||
?_assertEqual(
|
||||
{error, empty_host_not_allowed},
|
||||
typerefl:from_string(emqx_schema:url(), <<"">>)
|
||||
)
|
||||
].
|
||||
|
|
|
@ -30,6 +30,8 @@
|
|||
%% API
|
||||
-export([
|
||||
start_link/0,
|
||||
stop/1,
|
||||
stop/2,
|
||||
push_on_exit_callback/2
|
||||
]).
|
||||
|
||||
|
@ -40,6 +42,12 @@
|
|||
start_link() ->
|
||||
gen_server:start_link(?MODULE, self(), []).
|
||||
|
||||
stop(Server) ->
|
||||
stop(Server, 15_000).
|
||||
|
||||
stop(Server, Timeout) ->
|
||||
gen_server:call(Server, terminate, Timeout).
|
||||
|
||||
push_on_exit_callback(Server, Callback) when is_function(Callback, 0) ->
|
||||
gen_server:call(Server, {push, Callback}).
|
||||
|
||||
|
@ -52,10 +60,13 @@ init(Parent) ->
|
|||
{ok, #{callbacks => [], owner => Parent}}.
|
||||
|
||||
terminate(_Reason, #{callbacks := Callbacks}) ->
|
||||
lists:foreach(fun(Fun) -> Fun() end, Callbacks).
|
||||
lists:foreach(fun(Fun) -> catch Fun() end, Callbacks).
|
||||
|
||||
handle_call({push, Callback}, _From, State = #{callbacks := Callbacks}) ->
|
||||
{reply, ok, State#{callbacks := [Callback | Callbacks]}};
|
||||
handle_call(terminate, _From, State = #{callbacks := Callbacks}) ->
|
||||
lists:foreach(fun(Fun) -> Fun() end, Callbacks),
|
||||
{stop, normal, ok, State};
|
||||
handle_call(_Req, _From, State) ->
|
||||
{reply, error, State}.
|
||||
|
||||
|
|
|
@ -117,7 +117,7 @@ ssl_files_failure_test_() ->
|
|||
%% empty string
|
||||
?assertMatch(
|
||||
{error, #{
|
||||
reason := invalid_file_path_or_pem_string, which_options := [<<"keyfile">>]
|
||||
reason := invalid_file_path_or_pem_string, which_options := [[<<"keyfile">>]]
|
||||
}},
|
||||
emqx_tls_lib:ensure_ssl_files("/tmp", #{
|
||||
<<"keyfile">> => <<>>,
|
||||
|
@ -128,7 +128,7 @@ ssl_files_failure_test_() ->
|
|||
%% not valid unicode
|
||||
?assertMatch(
|
||||
{error, #{
|
||||
reason := invalid_file_path_or_pem_string, which_options := [<<"keyfile">>]
|
||||
reason := invalid_file_path_or_pem_string, which_options := [[<<"keyfile">>]]
|
||||
}},
|
||||
emqx_tls_lib:ensure_ssl_files("/tmp", #{
|
||||
<<"keyfile">> => <<255, 255>>,
|
||||
|
@ -136,6 +136,18 @@ ssl_files_failure_test_() ->
|
|||
<<"cacertfile">> => bin(test_key())
|
||||
})
|
||||
),
|
||||
?assertMatch(
|
||||
{error, #{
|
||||
reason := invalid_file_path_or_pem_string,
|
||||
which_options := [[<<"ocsp">>, <<"issuer_pem">>]]
|
||||
}},
|
||||
emqx_tls_lib:ensure_ssl_files("/tmp", #{
|
||||
<<"keyfile">> => bin(test_key()),
|
||||
<<"certfile">> => bin(test_key()),
|
||||
<<"cacertfile">> => bin(test_key()),
|
||||
<<"ocsp">> => #{<<"issuer_pem">> => <<255, 255>>}
|
||||
})
|
||||
),
|
||||
%% not printable
|
||||
?assertMatch(
|
||||
{error, #{reason := invalid_file_path_or_pem_string}},
|
||||
|
@ -155,7 +167,8 @@ ssl_files_failure_test_() ->
|
|||
#{
|
||||
<<"cacertfile">> => bin(TmpFile),
|
||||
<<"keyfile">> => bin(TmpFile),
|
||||
<<"certfile">> => bin(TmpFile)
|
||||
<<"certfile">> => bin(TmpFile),
|
||||
<<"ocsp">> => #{<<"issuer_pem">> => bin(TmpFile)}
|
||||
}
|
||||
)
|
||||
)
|
||||
|
@ -170,22 +183,29 @@ ssl_files_save_delete_test() ->
|
|||
SSL0 = #{
|
||||
<<"keyfile">> => Key,
|
||||
<<"certfile">> => Key,
|
||||
<<"cacertfile">> => Key
|
||||
<<"cacertfile">> => Key,
|
||||
<<"ocsp">> => #{<<"issuer_pem">> => Key}
|
||||
},
|
||||
Dir = filename:join(["/tmp", "ssl-test-dir"]),
|
||||
{ok, SSL} = emqx_tls_lib:ensure_ssl_files(Dir, SSL0),
|
||||
File = maps:get(<<"keyfile">>, SSL),
|
||||
?assertMatch(<<"/tmp/ssl-test-dir/key-", _:16/binary>>, File),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(File)),
|
||||
FileKey = maps:get(<<"keyfile">>, SSL),
|
||||
?assertMatch(<<"/tmp/ssl-test-dir/key-", _:16/binary>>, FileKey),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileKey)),
|
||||
FileIssuerPem = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL),
|
||||
?assertMatch(<<"/tmp/ssl-test-dir/ocsp_issuer_pem-", _:16/binary>>, FileIssuerPem),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileIssuerPem)),
|
||||
%% no old file to delete
|
||||
ok = emqx_tls_lib:delete_ssl_files(Dir, SSL, undefined),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(File)),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileKey)),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileIssuerPem)),
|
||||
%% old and new identical, no delete
|
||||
ok = emqx_tls_lib:delete_ssl_files(Dir, SSL, SSL),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(File)),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileKey)),
|
||||
?assertEqual({ok, bin(test_key())}, file:read_file(FileIssuerPem)),
|
||||
%% new is gone, delete old
|
||||
ok = emqx_tls_lib:delete_ssl_files(Dir, undefined, SSL),
|
||||
?assertEqual({error, enoent}, file:read_file(File)),
|
||||
?assertEqual({error, enoent}, file:read_file(FileKey)),
|
||||
?assertEqual({error, enoent}, file:read_file(FileIssuerPem)),
|
||||
%% test idempotence
|
||||
ok = emqx_tls_lib:delete_ssl_files(Dir, undefined, SSL),
|
||||
ok.
|
||||
|
@ -198,7 +218,8 @@ ssl_files_handle_non_generated_file_test() ->
|
|||
SSL0 = #{
|
||||
<<"keyfile">> => TmpKeyFile,
|
||||
<<"certfile">> => TmpKeyFile,
|
||||
<<"cacertfile">> => TmpKeyFile
|
||||
<<"cacertfile">> => TmpKeyFile,
|
||||
<<"ocsp">> => #{<<"issuer_pem">> => TmpKeyFile}
|
||||
},
|
||||
Dir = filename:join(["/tmp", "ssl-test-dir-00"]),
|
||||
{ok, SSL2} = emqx_tls_lib:ensure_ssl_files(Dir, SSL0),
|
||||
|
@ -216,24 +237,32 @@ ssl_file_replace_test() ->
|
|||
SSL0 = #{
|
||||
<<"keyfile">> => Key1,
|
||||
<<"certfile">> => Key1,
|
||||
<<"cacertfile">> => Key1
|
||||
<<"cacertfile">> => Key1,
|
||||
<<"ocsp">> => #{<<"issuer_pem">> => Key1}
|
||||
},
|
||||
SSL1 = #{
|
||||
<<"keyfile">> => Key2,
|
||||
<<"certfile">> => Key2,
|
||||
<<"cacertfile">> => Key2
|
||||
<<"cacertfile">> => Key2,
|
||||
<<"ocsp">> => #{<<"issuer_pem">> => Key2}
|
||||
},
|
||||
Dir = filename:join(["/tmp", "ssl-test-dir2"]),
|
||||
{ok, SSL2} = emqx_tls_lib:ensure_ssl_files(Dir, SSL0),
|
||||
{ok, SSL3} = emqx_tls_lib:ensure_ssl_files(Dir, SSL1),
|
||||
File1 = maps:get(<<"keyfile">>, SSL2),
|
||||
File2 = maps:get(<<"keyfile">>, SSL3),
|
||||
IssuerPem1 = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL2),
|
||||
IssuerPem2 = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL3),
|
||||
?assert(filelib:is_regular(File1)),
|
||||
?assert(filelib:is_regular(File2)),
|
||||
?assert(filelib:is_regular(IssuerPem1)),
|
||||
?assert(filelib:is_regular(IssuerPem2)),
|
||||
%% delete old file (File1, in SSL2)
|
||||
ok = emqx_tls_lib:delete_ssl_files(Dir, SSL3, SSL2),
|
||||
?assertNot(filelib:is_regular(File1)),
|
||||
?assert(filelib:is_regular(File2)),
|
||||
?assertNot(filelib:is_regular(IssuerPem1)),
|
||||
?assert(filelib:is_regular(IssuerPem2)),
|
||||
ok.
|
||||
|
||||
bin(X) -> iolist_to_binary(X).
|
||||
|
|
|
@ -55,7 +55,7 @@ init_per_suite(Config) ->
|
|||
|
||||
end_per_suite(_Config) ->
|
||||
ok = emqx_authz_test_lib:restore_authorizers(),
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_authz]).
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_authz]).
|
||||
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
ok = emqx_authz_test_lib:reset_authorizers(),
|
||||
|
|
|
@ -52,7 +52,7 @@ init_per_suite(Config) ->
|
|||
end_per_suite(_Config) ->
|
||||
ok = emqx_authz_test_lib:restore_authorizers(),
|
||||
ok = stop_apps([emqx_resource, cowboy]),
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_authz]).
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_authz]).
|
||||
|
||||
set_special_configs(emqx_authz) ->
|
||||
ok = emqx_authz_test_lib:reset_authorizers();
|
||||
|
|
|
@ -36,7 +36,7 @@ init_per_suite(Config) ->
|
|||
|
||||
end_per_suite(_Config) ->
|
||||
ok = emqx_authz_test_lib:restore_authorizers(),
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_authz]).
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_authz]).
|
||||
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
ok = emqx_authz_test_lib:reset_authorizers(),
|
||||
|
|
|
@ -50,7 +50,7 @@ init_per_suite(Config) ->
|
|||
end_per_suite(_Config) ->
|
||||
ok = emqx_authz_test_lib:restore_authorizers(),
|
||||
ok = stop_apps([emqx_resource]),
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_authz]).
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_authz]).
|
||||
|
||||
set_special_configs(emqx_authz) ->
|
||||
ok = emqx_authz_test_lib:reset_authorizers();
|
||||
|
|
|
@ -57,7 +57,7 @@ end_per_suite(_Config) ->
|
|||
ok = emqx_authz_test_lib:restore_authorizers(),
|
||||
ok = emqx_resource:remove_local(?MYSQL_RESOURCE),
|
||||
ok = stop_apps([emqx_resource]),
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_authz]).
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_authz]).
|
||||
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
ok = emqx_authz_test_lib:reset_authorizers(),
|
||||
|
|
|
@ -57,7 +57,7 @@ end_per_suite(_Config) ->
|
|||
ok = emqx_authz_test_lib:restore_authorizers(),
|
||||
ok = emqx_resource:remove_local(?PGSQL_RESOURCE),
|
||||
ok = stop_apps([emqx_resource]),
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_authz]).
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_authz]).
|
||||
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
ok = emqx_authz_test_lib:reset_authorizers(),
|
||||
|
|
|
@ -58,7 +58,7 @@ end_per_suite(_Config) ->
|
|||
ok = emqx_authz_test_lib:restore_authorizers(),
|
||||
ok = emqx_resource:remove_local(?REDIS_RESOURCE),
|
||||
ok = stop_apps([emqx_resource]),
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_authz]).
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_authz]).
|
||||
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
ok = emqx_authz_test_lib:reset_authorizers(),
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
{erl_opts, [debug_info]}.
|
||||
{deps, [{emqx, {path, "../emqx"}}]}.
|
||||
{deps, [ {emqx, {path, "../emqx"}}
|
||||
, {emqx_resource, {path, "../../apps/emqx_resource"}}
|
||||
]}.
|
||||
|
||||
{shell, [
|
||||
% {config, "config/sys.config"},
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
{application, emqx_bridge, [
|
||||
{description, "EMQX bridges"},
|
||||
{vsn, "0.1.13"},
|
||||
{registered, []},
|
||||
{registered, [emqx_bridge_sup]},
|
||||
{mod, {emqx_bridge_app, []}},
|
||||
{applications, [
|
||||
kernel,
|
||||
|
|
|
@ -55,6 +55,10 @@
|
|||
T == gcp_pubsub;
|
||||
T == influxdb_api_v1;
|
||||
T == influxdb_api_v2;
|
||||
%% TODO: rename this to `kafka_producer' after alias support is
|
||||
%% added to hocon; keeping this as just `kafka' for backwards
|
||||
%% compatibility.
|
||||
T == kafka;
|
||||
T == redis_single;
|
||||
T == redis_sentinel;
|
||||
T == redis_cluster;
|
||||
|
@ -138,12 +142,12 @@ load_hook(Bridges) ->
|
|||
maps:to_list(Bridges)
|
||||
).
|
||||
|
||||
do_load_hook(Type, #{local_topic := _}) when ?EGRESS_DIR_BRIDGES(Type) ->
|
||||
do_load_hook(Type, #{local_topic := LocalTopic}) when
|
||||
?EGRESS_DIR_BRIDGES(Type) andalso is_binary(LocalTopic)
|
||||
->
|
||||
emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}, ?HP_BRIDGE);
|
||||
do_load_hook(mqtt, #{egress := #{local := #{topic := _}}}) ->
|
||||
emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}, ?HP_BRIDGE);
|
||||
do_load_hook(kafka, #{producer := #{mqtt := #{topic := _}}}) ->
|
||||
emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}, ?HP_BRIDGE);
|
||||
do_load_hook(_Type, _Conf) ->
|
||||
ok.
|
||||
|
||||
|
@ -224,24 +228,25 @@ post_config_update(_, _Req, NewConf, OldConf, _AppEnv) ->
|
|||
]),
|
||||
ok = unload_hook(),
|
||||
ok = load_hook(NewConf),
|
||||
?tp(bridge_post_config_update_done, #{}),
|
||||
Result.
|
||||
|
||||
list() ->
|
||||
lists:foldl(
|
||||
fun({Type, NameAndConf}, Bridges) ->
|
||||
lists:foldl(
|
||||
fun({Name, RawConf}, Acc) ->
|
||||
maps:fold(
|
||||
fun(Type, NameAndConf, Bridges) ->
|
||||
maps:fold(
|
||||
fun(Name, RawConf, Acc) ->
|
||||
case lookup(Type, Name, RawConf) of
|
||||
{error, not_found} -> Acc;
|
||||
{ok, Res} -> [Res | Acc]
|
||||
end
|
||||
end,
|
||||
Bridges,
|
||||
maps:to_list(NameAndConf)
|
||||
NameAndConf
|
||||
)
|
||||
end,
|
||||
[],
|
||||
maps:to_list(emqx:get_raw_config([bridges], #{}))
|
||||
emqx:get_raw_config([bridges], #{})
|
||||
).
|
||||
|
||||
lookup(Id) ->
|
||||
|
@ -408,8 +413,8 @@ get_matched_bridge_id(BType, Conf, Topic, BName, Acc) when ?EGRESS_DIR_BRIDGES(B
|
|||
end;
|
||||
get_matched_bridge_id(mqtt, #{egress := #{local := #{topic := Filter}}}, Topic, BName, Acc) ->
|
||||
do_get_matched_bridge_id(Topic, Filter, mqtt, BName, Acc);
|
||||
get_matched_bridge_id(kafka, #{producer := #{mqtt := #{topic := Filter}}}, Topic, BName, Acc) ->
|
||||
do_get_matched_bridge_id(Topic, Filter, kafka, BName, Acc).
|
||||
get_matched_bridge_id(_BType, _Conf, _Topic, _BName, Acc) ->
|
||||
Acc.
|
||||
|
||||
do_get_matched_bridge_id(Topic, Filter, BType, BName, Acc) ->
|
||||
case emqx_topic:match(Topic, Filter) of
|
||||
|
|
|
@ -487,11 +487,18 @@ schema("/bridges_probe") ->
|
|||
lookup_from_all_nodes(BridgeType, BridgeName, 201)
|
||||
end;
|
||||
'/bridges'(get, _Params) ->
|
||||
{200,
|
||||
zip_bridges([
|
||||
[format_resp(Data, Node) || Data <- emqx_bridge_proto_v1:list_bridges(Node)]
|
||||
|| Node <- mria:running_nodes()
|
||||
])}.
|
||||
Nodes = mria:running_nodes(),
|
||||
NodeReplies = emqx_bridge_proto_v3:list_bridges_on_nodes(Nodes),
|
||||
case is_ok(NodeReplies) of
|
||||
{ok, NodeBridges} ->
|
||||
AllBridges = [
|
||||
format_resource(Data, Node)
|
||||
|| {Node, Bridges} <- lists:zip(Nodes, NodeBridges), Data <- Bridges
|
||||
],
|
||||
{200, zip_bridges([AllBridges])};
|
||||
{error, Reason} ->
|
||||
{500, error_msg('INTERNAL_ERROR', Reason)}
|
||||
end.
|
||||
|
||||
'/bridges/:id'(get, #{bindings := #{id := Id}}) ->
|
||||
?TRY_PARSE_ID(Id, lookup_from_all_nodes(BridgeType, BridgeName, 200));
|
||||
|
@ -589,7 +596,7 @@ lookup_from_all_nodes_metrics(BridgeType, BridgeName, SuccCode) ->
|
|||
|
||||
do_lookup_from_all_nodes(BridgeType, BridgeName, SuccCode, FormatFun) ->
|
||||
Nodes = mria:running_nodes(),
|
||||
case is_ok(emqx_bridge_proto_v1:lookup_from_all_nodes(Nodes, BridgeType, BridgeName)) of
|
||||
case is_ok(emqx_bridge_proto_v3:lookup_from_all_nodes(Nodes, BridgeType, BridgeName)) of
|
||||
{ok, [{ok, _} | _] = Results} ->
|
||||
{SuccCode, FormatFun([R || {ok, R} <- Results])};
|
||||
{ok, [{error, not_found} | _]} ->
|
||||
|
@ -600,7 +607,7 @@ do_lookup_from_all_nodes(BridgeType, BridgeName, SuccCode, FormatFun) ->
|
|||
|
||||
lookup_from_local_node(BridgeType, BridgeName) ->
|
||||
case emqx_bridge:lookup(BridgeType, BridgeName) of
|
||||
{ok, Res} -> {ok, format_resp(Res)};
|
||||
{ok, Res} -> {ok, format_resource(Res, node())};
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
|
@ -809,10 +816,7 @@ aggregate_metrics(
|
|||
aggregate_metrics(#{}, Metrics) ->
|
||||
Metrics.
|
||||
|
||||
format_resp(Data) ->
|
||||
format_resp(Data, node()).
|
||||
|
||||
format_resp(
|
||||
format_resource(
|
||||
#{
|
||||
type := Type,
|
||||
name := BridgeName,
|
||||
|
@ -988,7 +992,7 @@ do_bpapi_call(Node, Call, Args) ->
|
|||
do_bpapi_call_vsn(SupportedVersion, Call, Args) ->
|
||||
case lists:member(SupportedVersion, supported_versions(Call)) of
|
||||
true ->
|
||||
apply(emqx_bridge_proto_v2, Call, Args);
|
||||
apply(emqx_bridge_proto_v3, Call, Args);
|
||||
false ->
|
||||
{error, not_implemented}
|
||||
end.
|
||||
|
@ -998,9 +1002,9 @@ maybe_unwrap({error, not_implemented}) ->
|
|||
maybe_unwrap(RpcMulticallResult) ->
|
||||
emqx_rpc:unwrap_erpc(RpcMulticallResult).
|
||||
|
||||
supported_versions(start_bridge_to_node) -> [2];
|
||||
supported_versions(start_bridges_to_all_nodes) -> [2];
|
||||
supported_versions(_Call) -> [1, 2].
|
||||
supported_versions(start_bridge_to_node) -> [2, 3];
|
||||
supported_versions(start_bridges_to_all_nodes) -> [2, 3];
|
||||
supported_versions(_Call) -> [1, 2, 3].
|
||||
|
||||
to_hr_reason(nxdomain) ->
|
||||
<<"Host not found">>;
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
-module(emqx_bridge_resource).
|
||||
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||
|
||||
-export([
|
||||
bridge_to_resource_type/1,
|
||||
|
@ -45,7 +46,12 @@
|
|||
]).
|
||||
|
||||
%% bi-directional bridge with producer/consumer or ingress/egress configs
|
||||
-define(IS_BI_DIR_BRIDGE(TYPE), TYPE =:= <<"mqtt">>; TYPE =:= <<"kafka">>).
|
||||
-define(IS_BI_DIR_BRIDGE(TYPE),
|
||||
(TYPE) =:= <<"mqtt">>
|
||||
).
|
||||
-define(IS_INGRESS_BRIDGE(TYPE),
|
||||
(TYPE) =:= <<"kafka_consumer">> orelse ?IS_BI_DIR_BRIDGE(TYPE)
|
||||
).
|
||||
|
||||
-if(?EMQX_RELEASE_EDITION == ee).
|
||||
bridge_to_resource_type(<<"mqtt">>) -> emqx_connector_mqtt;
|
||||
|
@ -219,7 +225,7 @@ recreate(Type, Name, Conf, Opts) ->
|
|||
).
|
||||
|
||||
create_dry_run(Type, Conf0) ->
|
||||
TmpPath0 = iolist_to_binary(["bridges-create-dry-run:", emqx_misc:gen_id(8)]),
|
||||
TmpPath0 = iolist_to_binary([?TEST_ID_PREFIX, emqx_misc:gen_id(8)]),
|
||||
TmpPath = emqx_misc:safe_filename(TmpPath0),
|
||||
Conf = emqx_map_lib:safe_atom_key_map(Conf0),
|
||||
case emqx_connector_ssl:convert_certs(TmpPath, Conf) of
|
||||
|
@ -297,12 +303,16 @@ parse_confs(
|
|||
max_retries => Retry
|
||||
}
|
||||
};
|
||||
parse_confs(Type, Name, Conf) when ?IS_BI_DIR_BRIDGE(Type) ->
|
||||
parse_confs(Type, Name, Conf) when ?IS_INGRESS_BRIDGE(Type) ->
|
||||
%% For some drivers that can be used as data-sources, we need to provide a
|
||||
%% hookpoint. The underlying driver will run `emqx_hooks:run/3` when it
|
||||
%% receives a message from the external database.
|
||||
BId = bridge_id(Type, Name),
|
||||
Conf#{hookpoint => <<"$bridges/", BId/binary>>, bridge_name => Name};
|
||||
%% TODO: rename this to `kafka_producer' after alias support is added
|
||||
%% to hocon; keeping this as just `kafka' for backwards compatibility.
|
||||
parse_confs(<<"kafka">> = _Type, Name, Conf) ->
|
||||
Conf#{bridge_name => Name};
|
||||
parse_confs(_Type, _Name, Conf) ->
|
||||
Conf.
|
||||
|
||||
|
|
|
@ -34,5 +34,3 @@ init([]) ->
|
|||
},
|
||||
ChildSpecs = [],
|
||||
{ok, {SupFlags, ChildSpecs}}.
|
||||
|
||||
%% internal functions
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
-export([
|
||||
introduced_in/0,
|
||||
deprecated_since/0,
|
||||
|
||||
list_bridges/1,
|
||||
restart_bridge_to_node/3,
|
||||
|
@ -38,6 +39,9 @@
|
|||
introduced_in() ->
|
||||
"5.0.17".
|
||||
|
||||
deprecated_since() ->
|
||||
"5.0.21".
|
||||
|
||||
-spec list_bridges(node()) -> list() | emqx_rpc:badrpc().
|
||||
list_bridges(Node) ->
|
||||
rpc:call(Node, emqx_bridge, list, [], ?TIMEOUT).
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_proto_v3).
|
||||
|
||||
-behaviour(emqx_bpapi).
|
||||
|
||||
-export([
|
||||
introduced_in/0,
|
||||
|
||||
list_bridges/1,
|
||||
list_bridges_on_nodes/1,
|
||||
restart_bridge_to_node/3,
|
||||
start_bridge_to_node/3,
|
||||
stop_bridge_to_node/3,
|
||||
lookup_from_all_nodes/3,
|
||||
restart_bridges_to_all_nodes/3,
|
||||
start_bridges_to_all_nodes/3,
|
||||
stop_bridges_to_all_nodes/3
|
||||
]).
|
||||
|
||||
-include_lib("emqx/include/bpapi.hrl").
|
||||
|
||||
-define(TIMEOUT, 15000).
|
||||
|
||||
introduced_in() ->
|
||||
"5.0.21".
|
||||
|
||||
-spec list_bridges(node()) -> list() | emqx_rpc:badrpc().
|
||||
list_bridges(Node) ->
|
||||
rpc:call(Node, emqx_bridge, list, [], ?TIMEOUT).
|
||||
|
||||
-spec list_bridges_on_nodes([node()]) ->
|
||||
emqx_rpc:erpc_multicall([emqx_resource:resource_data()]).
|
||||
list_bridges_on_nodes(Nodes) ->
|
||||
erpc:multicall(Nodes, emqx_bridge, list, [], ?TIMEOUT).
|
||||
|
||||
-type key() :: atom() | binary() | [byte()].
|
||||
|
||||
-spec restart_bridge_to_node(node(), key(), key()) ->
|
||||
term().
|
||||
restart_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||
rpc:call(
|
||||
Node,
|
||||
emqx_bridge_resource,
|
||||
restart,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec start_bridge_to_node(node(), key(), key()) ->
|
||||
term().
|
||||
start_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||
rpc:call(
|
||||
Node,
|
||||
emqx_bridge_resource,
|
||||
start,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec stop_bridge_to_node(node(), key(), key()) ->
|
||||
term().
|
||||
stop_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||
rpc:call(
|
||||
Node,
|
||||
emqx_bridge_resource,
|
||||
stop,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
||||
emqx_rpc:erpc_multicall().
|
||||
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_resource,
|
||||
restart,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
||||
emqx_rpc:erpc_multicall().
|
||||
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_resource,
|
||||
start,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
||||
emqx_rpc:erpc_multicall().
|
||||
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_resource,
|
||||
stop,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
||||
|
||||
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
||||
emqx_rpc:erpc_multicall().
|
||||
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||
erpc:multicall(
|
||||
Nodes,
|
||||
emqx_bridge_api,
|
||||
lookup_from_local_node,
|
||||
[BridgeType, BridgeName],
|
||||
?TIMEOUT
|
||||
).
|
|
@ -19,7 +19,6 @@
|
|||
-compile(export_all).
|
||||
|
||||
-import(emqx_dashboard_api_test_helpers, [request/4, uri/1]).
|
||||
-import(emqx_common_test_helpers, [on_exit/1]).
|
||||
|
||||
-include("emqx/include/emqx.hrl").
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
@ -164,9 +163,9 @@ init_per_testcase(_, Config) ->
|
|||
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||
ok = snabbkaffe:start_trace(),
|
||||
Config.
|
||||
|
||||
end_per_testcase(_, _Config) ->
|
||||
clear_resources(),
|
||||
emqx_common_test_helpers:call_janitor(),
|
||||
snabbkaffe:stop(),
|
||||
ok.
|
||||
|
||||
|
@ -710,13 +709,6 @@ t_mqtt_conn_bridge_egress_reconnect(_) ->
|
|||
}
|
||||
),
|
||||
|
||||
on_exit(fun() ->
|
||||
%% delete the bridge
|
||||
{ok, 204, <<>>} = request(delete, uri(["bridges", BridgeIDEgress]), []),
|
||||
{ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []),
|
||||
ok
|
||||
end),
|
||||
|
||||
%% we now test if the bridge works as expected
|
||||
LocalTopic = <<?EGRESS_LOCAL_TOPIC, "/1">>,
|
||||
RemoteTopic = <<?EGRESS_REMOTE_TOPIC, "/", LocalTopic/binary>>,
|
||||
|
@ -827,13 +819,6 @@ t_mqtt_conn_bridge_egress_async_reconnect(_) ->
|
|||
}
|
||||
),
|
||||
|
||||
on_exit(fun() ->
|
||||
%% delete the bridge
|
||||
{ok, 204, <<>>} = request(delete, uri(["bridges", BridgeIDEgress]), []),
|
||||
{ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []),
|
||||
ok
|
||||
end),
|
||||
|
||||
Self = self(),
|
||||
LocalTopic = <<?EGRESS_LOCAL_TOPIC, "/1">>,
|
||||
RemoteTopic = <<?EGRESS_REMOTE_TOPIC, "/", LocalTopic/binary>>,
|
||||
|
|
|
@ -38,10 +38,11 @@ init_per_suite(_Config) ->
|
|||
ok = emqx_common_test_helpers:start_apps([emqx_conf, emqx_bridge]),
|
||||
ok = emqx_connector_test_helpers:start_apps([emqx_resource]),
|
||||
{ok, _} = application:ensure_all_started(emqx_connector),
|
||||
snabbkaffe:fix_ct_logging(),
|
||||
[].
|
||||
|
||||
end_per_suite(_Config) ->
|
||||
ok = emqx_config:put([bridges], #{}),
|
||||
ok = emqx_config:put_raw([bridges], #{}),
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_conf, emqx_bridge]),
|
||||
ok = emqx_connector_test_helpers:stop_apps([emqx_resource]),
|
||||
_ = application:stop(emqx_connector),
|
||||
|
|
|
@ -204,7 +204,7 @@ do_multicall(M, F, A, RequiredSyncs, Timeout) ->
|
|||
query(TnxId) ->
|
||||
transaction(fun ?MODULE:trans_query/1, [TnxId]).
|
||||
|
||||
-spec reset() -> reset.
|
||||
-spec reset() -> ok.
|
||||
reset() -> gen_server:call(?MODULE, reset).
|
||||
|
||||
-spec status() -> {'atomic', [map()]} | {'aborted', Reason :: term()}.
|
||||
|
|
|
@ -146,17 +146,17 @@ dump_schema(Dir, SchemaModule, I18nFile) ->
|
|||
fun(Lang) ->
|
||||
gen_config_md(Dir, I18nFile, SchemaModule, Lang),
|
||||
gen_api_schema_json(Dir, I18nFile, Lang),
|
||||
gen_example_conf(Dir, I18nFile, SchemaModule, Lang)
|
||||
gen_example_conf(Dir, I18nFile, SchemaModule, Lang),
|
||||
gen_schema_json(Dir, I18nFile, SchemaModule, Lang)
|
||||
end,
|
||||
[en, zh]
|
||||
),
|
||||
gen_schema_json(Dir, I18nFile, SchemaModule).
|
||||
["en", "zh"]
|
||||
).
|
||||
|
||||
%% for scripts/spellcheck.
|
||||
gen_schema_json(Dir, I18nFile, SchemaModule) ->
|
||||
SchemaJsonFile = filename:join([Dir, "schema.json"]),
|
||||
gen_schema_json(Dir, I18nFile, SchemaModule, Lang) ->
|
||||
SchemaJsonFile = filename:join([Dir, "schema-" ++ Lang ++ ".json"]),
|
||||
io:format(user, "===< Generating: ~s~n", [SchemaJsonFile]),
|
||||
Opts = #{desc_file => I18nFile, lang => "en"},
|
||||
Opts = #{desc_file => I18nFile, lang => Lang},
|
||||
JsonMap = hocon_schema_json:gen(SchemaModule, Opts),
|
||||
IoData = jsx:encode(JsonMap, [space, {indent, 4}]),
|
||||
ok = file:write_file(SchemaJsonFile, IoData).
|
||||
|
@ -178,17 +178,15 @@ gen_api_schema_json_bridge(Dir, Lang) ->
|
|||
ok = do_gen_api_schema_json(File, emqx_bridge_api, SchemaInfo).
|
||||
|
||||
schema_filename(Dir, Prefix, Lang) ->
|
||||
Filename = Prefix ++ atom_to_list(Lang) ++ ".json",
|
||||
Filename = Prefix ++ Lang ++ ".json",
|
||||
filename:join([Dir, Filename]).
|
||||
|
||||
gen_config_md(Dir, I18nFile, SchemaModule, Lang0) ->
|
||||
Lang = atom_to_list(Lang0),
|
||||
gen_config_md(Dir, I18nFile, SchemaModule, Lang) ->
|
||||
SchemaMdFile = filename:join([Dir, "config-" ++ Lang ++ ".md"]),
|
||||
io:format(user, "===< Generating: ~s~n", [SchemaMdFile]),
|
||||
ok = gen_doc(SchemaMdFile, SchemaModule, I18nFile, Lang).
|
||||
|
||||
gen_example_conf(Dir, I18nFile, SchemaModule, Lang0) ->
|
||||
Lang = atom_to_list(Lang0),
|
||||
gen_example_conf(Dir, I18nFile, SchemaModule, Lang) ->
|
||||
SchemaMdFile = filename:join([Dir, "emqx.conf." ++ Lang ++ ".example"]),
|
||||
io:format(user, "===< Generating: ~s~n", [SchemaMdFile]),
|
||||
ok = gen_example(SchemaMdFile, SchemaModule, I18nFile, Lang).
|
||||
|
|
|
@ -25,7 +25,6 @@ all() ->
|
|||
emqx_common_test_helpers:all(?MODULE).
|
||||
|
||||
t_copy_conf_override_on_restarts(_Config) ->
|
||||
net_kernel:start(['master@127.0.0.1', longnames]),
|
||||
ct:timetrap({seconds, 120}),
|
||||
snabbkaffe:fix_ct_logging(),
|
||||
Cluster = cluster([core, core, core]),
|
||||
|
@ -165,11 +164,10 @@ cluster(Specs) ->
|
|||
{env, Env},
|
||||
{apps, [emqx_conf]},
|
||||
{load_schema, false},
|
||||
{join_to, false},
|
||||
{join_to, true},
|
||||
{env_handler, fun
|
||||
(emqx) ->
|
||||
application:set_env(emqx, boot_modules, []),
|
||||
io:format("~p~p~n", [node(), application:get_all_env(emqx)]),
|
||||
ok;
|
||||
(_) ->
|
||||
ok
|
||||
|
|
|
@ -106,7 +106,14 @@ fields(topology) ->
|
|||
{socket_timeout_ms, duration("socket_timeout")},
|
||||
{server_selection_timeout_ms, duration("server_selection_timeout")},
|
||||
{wait_queue_timeout_ms, duration("wait_queue_timeout")},
|
||||
{heartbeat_frequency_ms, fun heartbeat_frequency_ms/1},
|
||||
{heartbeat_frequency_ms,
|
||||
hoconsc:mk(
|
||||
emqx_schema:duration_ms(),
|
||||
#{
|
||||
default => <<"200s">>,
|
||||
desc => ?DESC("heartbeat_period")
|
||||
}
|
||||
)},
|
||||
{min_heartbeat_frequency_ms, duration("min_heartbeat_period")}
|
||||
].
|
||||
|
||||
|
@ -407,12 +414,6 @@ duration(Desc) ->
|
|||
desc => ?DESC(Desc)
|
||||
}.
|
||||
|
||||
heartbeat_frequency_ms(type) -> emqx_schema:duration_ms();
|
||||
heartbeat_frequency_ms(desc) -> ?DESC("heartbeat_period");
|
||||
heartbeat_frequency_ms(default) -> 200000;
|
||||
heartbeat_frequency_ms(validator) -> [?MIN(1)];
|
||||
heartbeat_frequency_ms(_) -> undefined.
|
||||
|
||||
max_overflow(type) -> non_neg_integer();
|
||||
max_overflow(desc) -> ?DESC("max_overflow");
|
||||
max_overflow(default) -> 0;
|
||||
|
|
|
@ -132,9 +132,11 @@ stop_listeners(Listeners) ->
|
|||
get_i18n() ->
|
||||
application:get_env(emqx_dashboard, i18n).
|
||||
|
||||
init_i18n(File, Lang) ->
|
||||
init_i18n(File, Lang) when is_atom(Lang) ->
|
||||
init_i18n(File, atom_to_list(Lang));
|
||||
init_i18n(File, Lang) when is_list(Lang) ->
|
||||
Cache = hocon_schema:new_desc_cache(File),
|
||||
application:set_env(emqx_dashboard, i18n, #{lang => atom_to_binary(Lang), cache => Cache}).
|
||||
application:set_env(emqx_dashboard, i18n, #{lang => Lang, cache => Cache}).
|
||||
|
||||
clear_i18n() ->
|
||||
case application:get_env(emqx_dashboard, i18n) of
|
||||
|
|
|
@ -163,7 +163,7 @@ diff_listeners(Type, Stop, Start) -> {#{Type => Stop}, #{Type => Start}}.
|
|||
|
||||
ensure_ssl_cert(#{<<"listeners">> := #{<<"https">> := #{<<"enable">> := true}}} = Conf) ->
|
||||
Https = emqx_map_lib:deep_get([<<"listeners">>, <<"https">>], Conf, undefined),
|
||||
Opts = #{required_keys => [<<"keyfile">>, <<"certfile">>, <<"cacertfile">>]},
|
||||
Opts = #{required_keys => [[<<"keyfile">>], [<<"certfile">>], [<<"cacertfile">>]]},
|
||||
case emqx_tls_lib:ensure_ssl_files(?DIR, Https, Opts) of
|
||||
{ok, undefined} ->
|
||||
{error, <<"ssl_cert_not_found">>};
|
||||
|
|
|
@ -24,13 +24,13 @@
|
|||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("emqx/include/emqx_hooks.hrl").
|
||||
-include_lib("emqx_conf/include/emqx_conf.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
|
||||
-define(DEFAULT_CLUSTER_NAME_ATOM, emqxcl).
|
||||
|
||||
-define(OTHER_CLUSTER_NAME_ATOM, test_emqx_cluster).
|
||||
-define(OTHER_CLUSTER_NAME_STRING, "test_emqx_cluster").
|
||||
-define(CLUSTER_RPC_SHARD, emqx_cluster_rpc_shard).
|
||||
|
||||
-define(CONF_DEFAULT, <<
|
||||
"\n"
|
||||
|
@ -54,6 +54,8 @@
|
|||
"}\n"
|
||||
>>).
|
||||
|
||||
-import(emqx_common_test_helpers, [on_exit/1]).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Setups
|
||||
%%--------------------------------------------------------------------
|
||||
|
@ -89,7 +91,7 @@ init_per_testcase(_, Config) ->
|
|||
timer:sleep(200),
|
||||
Config.
|
||||
|
||||
end_per_testcase(_, Config) ->
|
||||
end_per_testcase(_, _Config) ->
|
||||
case erlang:whereis(node()) of
|
||||
undefined ->
|
||||
ok;
|
||||
|
@ -97,7 +99,8 @@ end_per_testcase(_, Config) ->
|
|||
erlang:unlink(P),
|
||||
erlang:exit(P, kill)
|
||||
end,
|
||||
Config.
|
||||
emqx_common_test_helpers:call_janitor(),
|
||||
ok.
|
||||
|
||||
load_cfg(Cfg) ->
|
||||
ok = emqx_common_test_helpers:load_config(emqx_exhook_schema, Cfg).
|
||||
|
@ -300,6 +303,12 @@ t_cluster_name(_) ->
|
|||
|
||||
emqx_common_test_helpers:stop_apps([emqx, emqx_exhook]),
|
||||
emqx_common_test_helpers:start_apps([emqx, emqx_exhook], SetEnvFun),
|
||||
on_exit(fun() ->
|
||||
emqx_common_test_helpers:stop_apps([emqx, emqx_exhook]),
|
||||
load_cfg(?CONF_DEFAULT),
|
||||
emqx_common_test_helpers:start_apps([emqx_exhook]),
|
||||
mria:wait_for_tables([?CLUSTER_MFA, ?CLUSTER_COMMIT])
|
||||
end),
|
||||
|
||||
?assertEqual(?OTHER_CLUSTER_NAME_STRING, emqx_sys:cluster_name()),
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ init_per_suite(Config) ->
|
|||
end_per_suite(Config) ->
|
||||
emqx_gateway_auth_ct:stop(),
|
||||
emqx_config:erase(gateway),
|
||||
emqx_mgmt_api_test_util:end_suite([cowboy, emqx_authn, emqx_gateway]),
|
||||
emqx_mgmt_api_test_util:end_suite([cowboy, emqx_conf, emqx_authn, emqx_gateway]),
|
||||
Config.
|
||||
|
||||
init_per_testcase(_Case, Config) ->
|
||||
|
|
|
@ -49,7 +49,6 @@
|
|||
<<"authorization">>,
|
||||
<<"authentication">>,
|
||||
<<"rpc">>,
|
||||
<<"db">>,
|
||||
<<"connectors">>,
|
||||
<<"slow_subs">>,
|
||||
<<"psk_authentication">>,
|
||||
|
|
|
@ -24,12 +24,15 @@ init_suite() ->
|
|||
init_suite([]).
|
||||
|
||||
init_suite(Apps) ->
|
||||
init_suite(Apps, fun set_special_configs/1).
|
||||
init_suite(Apps, fun set_special_configs/1, #{}).
|
||||
|
||||
init_suite(Apps, SetConfigs) ->
|
||||
init_suite(Apps, SetConfigs) when is_function(SetConfigs) ->
|
||||
init_suite(Apps, SetConfigs, #{}).
|
||||
|
||||
init_suite(Apps, SetConfigs, Opts) ->
|
||||
mria:start(),
|
||||
application:load(emqx_management),
|
||||
emqx_common_test_helpers:start_apps(Apps ++ [emqx_dashboard], SetConfigs),
|
||||
emqx_common_test_helpers:start_apps(Apps ++ [emqx_dashboard], SetConfigs, Opts),
|
||||
emqx_common_test_http:create_default_app().
|
||||
|
||||
end_suite() ->
|
||||
|
|
|
@ -26,18 +26,6 @@ start_link() ->
|
|||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
init([]) ->
|
||||
%% TODO: Add monitor plugins change.
|
||||
Monitor = emqx_plugins_monitor,
|
||||
_Children = [
|
||||
#{
|
||||
id => Monitor,
|
||||
start => {Monitor, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => brutal_kill,
|
||||
type => worker,
|
||||
modules => [Monitor]
|
||||
}
|
||||
],
|
||||
SupFlags =
|
||||
#{
|
||||
strategy => one_for_one,
|
||||
|
|
|
@ -559,8 +559,8 @@ group_t_copy_plugin_to_a_new_node({'end', Config}) ->
|
|||
ok = rpc:call(CopyToNode, emqx_config, delete_override_conf_files, []),
|
||||
rpc:call(CopyToNode, ekka, leave, []),
|
||||
rpc:call(CopyFromNode, ekka, leave, []),
|
||||
{ok, _} = emqx_common_test_helpers:stop_slave(CopyToNode),
|
||||
{ok, _} = emqx_common_test_helpers:stop_slave(CopyFromNode),
|
||||
ok = emqx_common_test_helpers:stop_slave(CopyToNode),
|
||||
ok = emqx_common_test_helpers:stop_slave(CopyFromNode),
|
||||
ok = file:del_dir_r(proplists:get_value(to_install_dir, Config)),
|
||||
ok = file:del_dir_r(proplists:get_value(from_install_dir, Config));
|
||||
group_t_copy_plugin_to_a_new_node(Config) ->
|
||||
|
|
|
@ -119,5 +119,5 @@
|
|||
-define(AUTO_RESTART_INTERVAL, 60000).
|
||||
-define(AUTO_RESTART_INTERVAL_RAW, <<"60s">>).
|
||||
|
||||
-define(TEST_ID_PREFIX, "_test_:").
|
||||
-define(TEST_ID_PREFIX, "_probe_:").
|
||||
-define(RES_METRICS, resource_metrics).
|
||||
|
|
|
@ -112,6 +112,8 @@
|
|||
|
||||
-export([apply_reply_fun/2]).
|
||||
|
||||
-export_type([resource_data/0]).
|
||||
|
||||
-optional_callbacks([
|
||||
on_query/3,
|
||||
on_batch_query/3,
|
||||
|
@ -258,7 +260,7 @@ query(ResId, Request) ->
|
|||
-spec query(resource_id(), Request :: term(), query_opts()) ->
|
||||
Result :: term().
|
||||
query(ResId, Request, Opts) ->
|
||||
case emqx_resource_manager:ets_lookup(ResId) of
|
||||
case emqx_resource_manager:lookup_cached(ResId) of
|
||||
{ok, _Group, #{query_mode := QM, mod := Module}} ->
|
||||
IsBufferSupported = is_buffer_supported(Module),
|
||||
case {IsBufferSupported, QM} of
|
||||
|
@ -309,7 +311,7 @@ set_resource_status_connecting(ResId) ->
|
|||
-spec get_instance(resource_id()) ->
|
||||
{ok, resource_group(), resource_data()} | {error, Reason :: term()}.
|
||||
get_instance(ResId) ->
|
||||
emqx_resource_manager:lookup(ResId).
|
||||
emqx_resource_manager:lookup_cached(ResId, [metrics]).
|
||||
|
||||
-spec fetch_creation_opts(map()) -> creation_opts().
|
||||
fetch_creation_opts(Opts) ->
|
||||
|
|
|
@ -885,7 +885,7 @@ handle_async_worker_down(Data0, Pid) ->
|
|||
|
||||
call_query(QM0, Id, Index, Ref, Query, QueryOpts) ->
|
||||
?tp(call_query_enter, #{id => Id, query => Query, query_mode => QM0}),
|
||||
case emqx_resource_manager:ets_lookup(Id) of
|
||||
case emqx_resource_manager:lookup_cached(Id) of
|
||||
{ok, _Group, #{status := stopped}} ->
|
||||
?RESOURCE_ERROR(stopped, "resource stopped or disabled");
|
||||
{ok, _Group, Resource} ->
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
-include("emqx_resource.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("snabbkaffe/include/trace.hrl").
|
||||
|
||||
% API
|
||||
-export([
|
||||
|
@ -35,7 +36,8 @@
|
|||
lookup/1,
|
||||
list_all/0,
|
||||
list_group/1,
|
||||
ets_lookup/1,
|
||||
lookup_cached/1,
|
||||
lookup_cached/2,
|
||||
get_metrics/1,
|
||||
reset_metrics/1
|
||||
]).
|
||||
|
@ -229,14 +231,25 @@ set_resource_status_connecting(ResId) ->
|
|||
-spec lookup(resource_id()) -> {ok, resource_group(), resource_data()} | {error, not_found}.
|
||||
lookup(ResId) ->
|
||||
case safe_call(ResId, lookup, ?T_LOOKUP) of
|
||||
{error, timeout} -> ets_lookup(ResId);
|
||||
{error, timeout} -> lookup_cached(ResId, [metrics]);
|
||||
Result -> Result
|
||||
end.
|
||||
|
||||
%% @doc Lookup the group and data of a resource
|
||||
-spec ets_lookup(resource_id()) -> {ok, resource_group(), resource_data()} | {error, not_found}.
|
||||
ets_lookup(ResId) ->
|
||||
%% @doc Lookup the group and data of a resource from the cache
|
||||
-spec lookup_cached(resource_id()) -> {ok, resource_group(), resource_data()} | {error, not_found}.
|
||||
lookup_cached(ResId) ->
|
||||
lookup_cached(ResId, []).
|
||||
|
||||
%% @doc Lookup the group and data of a resource from the cache
|
||||
-spec lookup_cached(resource_id(), [Option]) ->
|
||||
{ok, resource_group(), resource_data()} | {error, not_found}
|
||||
when
|
||||
Option :: metrics.
|
||||
lookup_cached(ResId, Options) ->
|
||||
NeedMetrics = lists:member(metrics, Options),
|
||||
case read_cache(ResId) of
|
||||
{Group, Data} when NeedMetrics ->
|
||||
{ok, Group, data_record_to_external_map_with_metrics(Data)};
|
||||
{Group, Data} ->
|
||||
{ok, Group, data_record_to_external_map(Data)};
|
||||
not_found ->
|
||||
|
@ -253,7 +266,7 @@ reset_metrics(ResId) ->
|
|||
emqx_metrics_worker:reset_metrics(?RES_METRICS, ResId).
|
||||
|
||||
%% @doc Returns the data for all resources
|
||||
-spec list_all() -> [resource_data()] | [].
|
||||
-spec list_all() -> [resource_data()].
|
||||
list_all() ->
|
||||
try
|
||||
[
|
||||
|
@ -291,26 +304,30 @@ start_link(MgrId, ResId, Group, ResourceType, Config, Opts) ->
|
|||
query_mode = maps:get(query_mode, Opts, sync),
|
||||
config = Config,
|
||||
opts = Opts,
|
||||
status = connecting,
|
||||
state = undefined,
|
||||
error = undefined
|
||||
},
|
||||
gen_statem:start_link(?MODULE, {Data, Opts}, []).
|
||||
|
||||
init({Data, Opts}) ->
|
||||
init({DataIn, Opts}) ->
|
||||
process_flag(trap_exit, true),
|
||||
%% init the cache so that lookup/1 will always return something
|
||||
DataWithPid = Data#data{pid = self()},
|
||||
insert_cache(DataWithPid#data.id, DataWithPid#data.group, DataWithPid),
|
||||
Data = DataIn#data{pid = self()},
|
||||
case maps:get(start_after_created, Opts, ?START_AFTER_CREATED) of
|
||||
true -> {ok, connecting, DataWithPid, {next_event, internal, start_resource}};
|
||||
false -> {ok, stopped, DataWithPid}
|
||||
true ->
|
||||
%% init the cache so that lookup/1 will always return something
|
||||
UpdatedData = update_state(Data#data{status = connecting}),
|
||||
{ok, connecting, UpdatedData, {next_event, internal, start_resource}};
|
||||
false ->
|
||||
%% init the cache so that lookup/1 will always return something
|
||||
UpdatedData = update_state(Data#data{status = stopped}),
|
||||
{ok, stopped, UpdatedData}
|
||||
end.
|
||||
|
||||
terminate({shutdown, removed}, _State, _Data) ->
|
||||
ok;
|
||||
terminate(_Reason, _State, Data) ->
|
||||
_ = stop_resource(Data),
|
||||
_ = maybe_clear_alarm(Data#data.id),
|
||||
delete_cache(Data#data.id, Data#data.manager_id),
|
||||
_ = maybe_stop_resource(Data),
|
||||
ok = delete_cache(Data#data.id, Data#data.manager_id),
|
||||
ok.
|
||||
|
||||
%% Behavior callback
|
||||
|
@ -321,11 +338,12 @@ callback_mode() -> [handle_event_function, state_enter].
|
|||
|
||||
% Called during testing to force a specific state
|
||||
handle_event({call, From}, set_resource_status_connecting, _State, Data) ->
|
||||
{next_state, connecting, Data#data{status = connecting}, [{reply, From, ok}]};
|
||||
UpdatedData = update_state(Data#data{status = connecting}, Data),
|
||||
{next_state, connecting, UpdatedData, [{reply, From, ok}]};
|
||||
% Called when the resource is to be restarted
|
||||
handle_event({call, From}, restart, _State, Data) ->
|
||||
_ = stop_resource(Data),
|
||||
start_resource(Data, From);
|
||||
DataNext = stop_resource(Data),
|
||||
start_resource(DataNext, From);
|
||||
% Called when the resource is to be started (also used for manual reconnect)
|
||||
handle_event({call, From}, start, State, Data) when
|
||||
State =:= stopped orelse
|
||||
|
@ -335,16 +353,14 @@ handle_event({call, From}, start, State, Data) when
|
|||
handle_event({call, From}, start, _State, _Data) ->
|
||||
{keep_state_and_data, [{reply, From, ok}]};
|
||||
% Called when the resource received a `quit` message
|
||||
handle_event(info, quit, stopped, _Data) ->
|
||||
{stop, {shutdown, quit}};
|
||||
handle_event(info, quit, _State, _Data) ->
|
||||
{stop, {shutdown, quit}};
|
||||
% Called when the resource is to be stopped
|
||||
handle_event({call, From}, stop, stopped, _Data) ->
|
||||
{keep_state_and_data, [{reply, From, ok}]};
|
||||
handle_event({call, From}, stop, _State, Data) ->
|
||||
Result = stop_resource(Data),
|
||||
{next_state, stopped, Data, [{reply, From, Result}]};
|
||||
UpdatedData = stop_resource(Data),
|
||||
{next_state, stopped, update_state(UpdatedData, Data), [{reply, From, ok}]};
|
||||
% Called when a resource is to be stopped and removed.
|
||||
handle_event({call, From}, {remove, ClearMetrics}, _State, Data) ->
|
||||
handle_remove_event(From, ClearMetrics, Data);
|
||||
|
@ -359,11 +375,9 @@ handle_event({call, From}, health_check, stopped, _Data) ->
|
|||
handle_event({call, From}, health_check, _State, Data) ->
|
||||
handle_manually_health_check(From, Data);
|
||||
% State: CONNECTING
|
||||
handle_event(enter, _OldState, connecting, Data) ->
|
||||
UpdatedData = Data#data{status = connecting},
|
||||
insert_cache(Data#data.id, Data#data.group, Data),
|
||||
Actions = [{state_timeout, 0, health_check}],
|
||||
{keep_state, UpdatedData, Actions};
|
||||
handle_event(enter, _OldState, connecting = State, Data) ->
|
||||
ok = log_state_consistency(State, Data),
|
||||
{keep_state_and_data, [{state_timeout, 0, health_check}]};
|
||||
handle_event(internal, start_resource, connecting, Data) ->
|
||||
start_resource(Data, undefined);
|
||||
handle_event(state_timeout, health_check, connecting, Data) ->
|
||||
|
@ -371,27 +385,23 @@ handle_event(state_timeout, health_check, connecting, Data) ->
|
|||
%% State: CONNECTED
|
||||
%% The connected state is entered after a successful on_start/2 of the callback mod
|
||||
%% and successful health_checks
|
||||
handle_event(enter, _OldState, connected, Data) ->
|
||||
UpdatedData = Data#data{status = connected},
|
||||
insert_cache(Data#data.id, Data#data.group, UpdatedData),
|
||||
handle_event(enter, _OldState, connected = State, Data) ->
|
||||
ok = log_state_consistency(State, Data),
|
||||
_ = emqx_alarm:deactivate(Data#data.id),
|
||||
Actions = [{state_timeout, health_check_interval(Data#data.opts), health_check}],
|
||||
{next_state, connected, UpdatedData, Actions};
|
||||
{keep_state_and_data, health_check_actions(Data)};
|
||||
handle_event(state_timeout, health_check, connected, Data) ->
|
||||
handle_connected_health_check(Data);
|
||||
%% State: DISCONNECTED
|
||||
handle_event(enter, _OldState, disconnected, Data) ->
|
||||
UpdatedData = Data#data{status = disconnected},
|
||||
insert_cache(Data#data.id, Data#data.group, UpdatedData),
|
||||
handle_disconnected_state_enter(UpdatedData);
|
||||
handle_event(enter, _OldState, disconnected = State, Data) ->
|
||||
ok = log_state_consistency(State, Data),
|
||||
{keep_state_and_data, retry_actions(Data)};
|
||||
handle_event(state_timeout, auto_retry, disconnected, Data) ->
|
||||
start_resource(Data, undefined);
|
||||
%% State: STOPPED
|
||||
%% The stopped state is entered after the resource has been explicitly stopped
|
||||
handle_event(enter, _OldState, stopped, Data) ->
|
||||
UpdatedData = Data#data{status = stopped},
|
||||
insert_cache(Data#data.id, Data#data.group, UpdatedData),
|
||||
{next_state, stopped, UpdatedData};
|
||||
handle_event(enter, _OldState, stopped = State, Data) ->
|
||||
ok = log_state_consistency(State, Data),
|
||||
{keep_state_and_data, []};
|
||||
% Ignore all other events
|
||||
handle_event(EventType, EventData, State, Data) ->
|
||||
?SLOG(
|
||||
|
@ -406,6 +416,22 @@ handle_event(EventType, EventData, State, Data) ->
|
|||
),
|
||||
keep_state_and_data.
|
||||
|
||||
log_state_consistency(State, #data{status = State} = Data) ->
|
||||
log_cache_consistency(read_cache(Data#data.id), Data);
|
||||
log_state_consistency(State, Data) ->
|
||||
?tp(warning, "inconsistent_state", #{
|
||||
state => State,
|
||||
data => Data
|
||||
}).
|
||||
|
||||
log_cache_consistency({_, Data}, Data) ->
|
||||
ok;
|
||||
log_cache_consistency({_, DataCached}, Data) ->
|
||||
?tp(warning, "inconsistent_cache", #{
|
||||
cache => DataCached,
|
||||
data => Data
|
||||
}).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
@ -439,10 +465,12 @@ delete_cache(ResId, MgrId) ->
|
|||
end.
|
||||
|
||||
do_delete_cache(<<?TEST_ID_PREFIX, _/binary>> = ResId) ->
|
||||
ets:delete(?ETS_TABLE, {owner, ResId}),
|
||||
ets:delete(?ETS_TABLE, ResId);
|
||||
true = ets:delete(?ETS_TABLE, {owner, ResId}),
|
||||
true = ets:delete(?ETS_TABLE, ResId),
|
||||
ok;
|
||||
do_delete_cache(ResId) ->
|
||||
ets:delete(?ETS_TABLE, ResId).
|
||||
true = ets:delete(?ETS_TABLE, ResId),
|
||||
ok.
|
||||
|
||||
set_new_owner(ResId) ->
|
||||
MgrId = make_manager_id(ResId),
|
||||
|
@ -459,9 +487,6 @@ get_owner(ResId) ->
|
|||
[] -> not_found
|
||||
end.
|
||||
|
||||
handle_disconnected_state_enter(Data) ->
|
||||
{next_state, disconnected, Data, retry_actions(Data)}.
|
||||
|
||||
retry_actions(Data) ->
|
||||
case maps:get(auto_restart_interval, Data#data.opts, ?AUTO_RESTART_INTERVAL) of
|
||||
undefined ->
|
||||
|
@ -470,24 +495,27 @@ retry_actions(Data) ->
|
|||
[{state_timeout, RetryInterval, auto_retry}]
|
||||
end.
|
||||
|
||||
health_check_actions(Data) ->
|
||||
[{state_timeout, health_check_interval(Data#data.opts), health_check}].
|
||||
|
||||
handle_remove_event(From, ClearMetrics, Data) ->
|
||||
stop_resource(Data),
|
||||
_ = stop_resource(Data),
|
||||
ok = delete_cache(Data#data.id, Data#data.manager_id),
|
||||
ok = emqx_resource_buffer_worker_sup:stop_workers(Data#data.id, Data#data.opts),
|
||||
case ClearMetrics of
|
||||
true -> ok = emqx_metrics_worker:clear_metrics(?RES_METRICS, Data#data.id);
|
||||
false -> ok
|
||||
end,
|
||||
{stop_and_reply, normal, [{reply, From, ok}]}.
|
||||
{stop_and_reply, {shutdown, removed}, [{reply, From, ok}]}.
|
||||
|
||||
start_resource(Data, From) ->
|
||||
%% in case the emqx_resource:call_start/2 hangs, the lookup/1 can read status from the cache
|
||||
insert_cache(Data#data.id, Data#data.group, Data),
|
||||
case emqx_resource:call_start(Data#data.manager_id, Data#data.mod, Data#data.config) of
|
||||
{ok, ResourceState} ->
|
||||
UpdatedData = Data#data{state = ResourceState, status = connecting},
|
||||
UpdatedData = Data#data{status = connecting, state = ResourceState},
|
||||
%% Perform an initial health_check immediately before transitioning into a connected state
|
||||
Actions = maybe_reply([{state_timeout, 0, health_check}], From, ok),
|
||||
{next_state, connecting, UpdatedData, Actions};
|
||||
{next_state, connecting, update_state(UpdatedData, Data), Actions};
|
||||
{error, Reason} = Err ->
|
||||
?SLOG(warning, #{
|
||||
msg => start_resource_failed,
|
||||
|
@ -497,34 +525,42 @@ start_resource(Data, From) ->
|
|||
_ = maybe_alarm(disconnected, Data#data.id),
|
||||
%% Keep track of the error reason why the connection did not work
|
||||
%% so that the Reason can be returned when the verification call is made.
|
||||
UpdatedData = Data#data{error = Reason},
|
||||
UpdatedData = Data#data{status = disconnected, error = Reason},
|
||||
Actions = maybe_reply(retry_actions(UpdatedData), From, Err),
|
||||
{next_state, disconnected, UpdatedData, Actions}
|
||||
{next_state, disconnected, update_state(UpdatedData, Data), Actions}
|
||||
end.
|
||||
|
||||
stop_resource(#data{state = undefined, id = ResId} = _Data) ->
|
||||
_ = maybe_clear_alarm(ResId),
|
||||
ok = emqx_metrics_worker:reset_metrics(?RES_METRICS, ResId),
|
||||
ok;
|
||||
stop_resource(Data) ->
|
||||
maybe_stop_resource(#data{status = Status} = Data) when Status /= stopped ->
|
||||
stop_resource(Data);
|
||||
maybe_stop_resource(#data{status = stopped} = Data) ->
|
||||
Data.
|
||||
|
||||
stop_resource(#data{state = ResState, id = ResId} = Data) ->
|
||||
%% We don't care the return value of the Mod:on_stop/2.
|
||||
%% The callback mod should make sure the resource is stopped after on_stop/2
|
||||
%% is returned.
|
||||
ResId = Data#data.id,
|
||||
_ = emqx_resource:call_stop(Data#data.manager_id, Data#data.mod, Data#data.state),
|
||||
case ResState /= undefined of
|
||||
true ->
|
||||
emqx_resource:call_stop(Data#data.manager_id, Data#data.mod, ResState);
|
||||
false ->
|
||||
ok
|
||||
end,
|
||||
_ = maybe_clear_alarm(ResId),
|
||||
ok = emqx_metrics_worker:reset_metrics(?RES_METRICS, ResId),
|
||||
ok.
|
||||
Data#data{status = stopped}.
|
||||
|
||||
make_test_id() ->
|
||||
RandId = iolist_to_binary(emqx_misc:gen_id(16)),
|
||||
<<?TEST_ID_PREFIX, RandId/binary>>.
|
||||
|
||||
handle_manually_health_check(From, Data) ->
|
||||
with_health_check(Data, fun(Status, UpdatedData) ->
|
||||
Actions = [{reply, From, {ok, Status}}],
|
||||
{next_state, Status, UpdatedData, Actions}
|
||||
end).
|
||||
with_health_check(
|
||||
Data,
|
||||
fun(Status, UpdatedData) ->
|
||||
Actions = [{reply, From, {ok, Status}}],
|
||||
{next_state, Status, UpdatedData, Actions}
|
||||
end
|
||||
).
|
||||
|
||||
handle_connecting_health_check(Data) ->
|
||||
with_health_check(
|
||||
|
@ -533,8 +569,7 @@ handle_connecting_health_check(Data) ->
|
|||
(connected, UpdatedData) ->
|
||||
{next_state, connected, UpdatedData};
|
||||
(connecting, UpdatedData) ->
|
||||
Actions = [{state_timeout, health_check_interval(Data#data.opts), health_check}],
|
||||
{keep_state, UpdatedData, Actions};
|
||||
{keep_state, UpdatedData, health_check_actions(UpdatedData)};
|
||||
(disconnected, UpdatedData) ->
|
||||
{next_state, disconnected, UpdatedData}
|
||||
end
|
||||
|
@ -545,8 +580,7 @@ handle_connected_health_check(Data) ->
|
|||
Data,
|
||||
fun
|
||||
(connected, UpdatedData) ->
|
||||
Actions = [{state_timeout, health_check_interval(Data#data.opts), health_check}],
|
||||
{keep_state, UpdatedData, Actions};
|
||||
{keep_state, UpdatedData, health_check_actions(UpdatedData)};
|
||||
(Status, UpdatedData) ->
|
||||
?SLOG(warning, #{
|
||||
msg => health_check_failed,
|
||||
|
@ -568,8 +602,16 @@ with_health_check(Data, Func) ->
|
|||
UpdatedData = Data#data{
|
||||
state = NewState, status = Status, error = Err
|
||||
},
|
||||
insert_cache(ResId, UpdatedData#data.group, UpdatedData),
|
||||
Func(Status, UpdatedData).
|
||||
Func(Status, update_state(UpdatedData, Data)).
|
||||
|
||||
update_state(Data) ->
|
||||
update_state(Data, undefined).
|
||||
|
||||
update_state(DataWas, DataWas) ->
|
||||
DataWas;
|
||||
update_state(Data, _DataWas) ->
|
||||
_ = insert_cache(Data#data.id, Data#data.group, Data),
|
||||
Data.
|
||||
|
||||
health_check_interval(Opts) ->
|
||||
maps:get(health_check_interval, Opts, ?HEALTHCHECK_INTERVAL).
|
||||
|
|
|
@ -75,8 +75,7 @@ on_start(InstId, #{name := Name} = Opts) ->
|
|||
on_stop(_InstId, #{stop_error := true}) ->
|
||||
{error, stop_error};
|
||||
on_stop(_InstId, #{pid := Pid}) ->
|
||||
erlang:exit(Pid, shutdown),
|
||||
ok.
|
||||
stop_counter_process(Pid).
|
||||
|
||||
on_query(_InstId, get_state, State) ->
|
||||
{ok, State};
|
||||
|
@ -247,6 +246,15 @@ spawn_counter_process(Name, Register) ->
|
|||
true = maybe_register(Name, Pid, Register),
|
||||
Pid.
|
||||
|
||||
stop_counter_process(Pid) ->
|
||||
true = erlang:is_process_alive(Pid),
|
||||
true = erlang:exit(Pid, shutdown),
|
||||
receive
|
||||
{'EXIT', Pid, shutdown} -> ok
|
||||
after 5000 ->
|
||||
{error, timeout}
|
||||
end.
|
||||
|
||||
counter_loop() ->
|
||||
counter_loop(#{
|
||||
counter => 0,
|
||||
|
|
|
@ -72,115 +72,156 @@ t_check_config(_) ->
|
|||
{error, _} = emqx_resource:check_config(?TEST_RESOURCE, #{invalid => config}).
|
||||
|
||||
t_create_remove(_) ->
|
||||
{error, _} = emqx_resource:check_and_create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{unknown => test_resource}
|
||||
),
|
||||
?check_trace(
|
||||
begin
|
||||
?assertMatch(
|
||||
{error, _},
|
||||
emqx_resource:check_and_create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{unknown => test_resource}
|
||||
)
|
||||
),
|
||||
|
||||
{ok, _} = emqx_resource:create(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource}
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:create(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource}
|
||||
)
|
||||
),
|
||||
|
||||
{ok, _} = emqx_resource:recreate(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource},
|
||||
#{}
|
||||
),
|
||||
{ok, #{pid := Pid}} = emqx_resource:query(?ID, get_state),
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:recreate(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource},
|
||||
#{}
|
||||
)
|
||||
),
|
||||
|
||||
?assert(is_process_alive(Pid)),
|
||||
{ok, #{pid := Pid}} = emqx_resource:query(?ID, get_state),
|
||||
|
||||
ok = emqx_resource:remove(?ID),
|
||||
{error, _} = emqx_resource:remove(?ID),
|
||||
?assert(is_process_alive(Pid)),
|
||||
|
||||
?assertNot(is_process_alive(Pid)).
|
||||
?assertEqual(ok, emqx_resource:remove(?ID)),
|
||||
?assertMatch({error, _}, emqx_resource:remove(?ID)),
|
||||
|
||||
?assertNot(is_process_alive(Pid))
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assertEqual([], ?of_kind("inconsistent_state", Trace)),
|
||||
?assertEqual([], ?of_kind("inconsistent_cache", Trace))
|
||||
end
|
||||
).
|
||||
|
||||
t_create_remove_local(_) ->
|
||||
{error, _} = emqx_resource:check_and_create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{unknown => test_resource}
|
||||
),
|
||||
?check_trace(
|
||||
begin
|
||||
?assertMatch(
|
||||
{error, _},
|
||||
emqx_resource:check_and_create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{unknown => test_resource}
|
||||
)
|
||||
),
|
||||
|
||||
{ok, _} = emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource}
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource}
|
||||
)
|
||||
),
|
||||
|
||||
emqx_resource:recreate_local(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource},
|
||||
#{}
|
||||
),
|
||||
{ok, #{pid := Pid}} = emqx_resource:query(?ID, get_state),
|
||||
emqx_resource:recreate_local(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource},
|
||||
#{}
|
||||
),
|
||||
|
||||
?assert(is_process_alive(Pid)),
|
||||
{ok, #{pid := Pid}} = emqx_resource:query(?ID, get_state),
|
||||
|
||||
emqx_resource:set_resource_status_connecting(?ID),
|
||||
?assert(is_process_alive(Pid)),
|
||||
|
||||
emqx_resource:recreate_local(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource},
|
||||
#{}
|
||||
),
|
||||
emqx_resource:set_resource_status_connecting(?ID),
|
||||
|
||||
ok = emqx_resource:remove_local(?ID),
|
||||
{error, _} = emqx_resource:remove_local(?ID),
|
||||
emqx_resource:recreate_local(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource},
|
||||
#{}
|
||||
),
|
||||
|
||||
?assertMatch(
|
||||
?RESOURCE_ERROR(not_found),
|
||||
emqx_resource:query(?ID, get_state)
|
||||
),
|
||||
?assertNot(is_process_alive(Pid)).
|
||||
?assertEqual(ok, emqx_resource:remove_local(?ID)),
|
||||
?assertMatch({error, _}, emqx_resource:remove_local(?ID)),
|
||||
|
||||
?assertMatch(
|
||||
?RESOURCE_ERROR(not_found),
|
||||
emqx_resource:query(?ID, get_state)
|
||||
),
|
||||
|
||||
?assertNot(is_process_alive(Pid))
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assertEqual([], ?of_kind("inconsistent_state", Trace)),
|
||||
?assertEqual([], ?of_kind("inconsistent_cache", Trace))
|
||||
end
|
||||
).
|
||||
|
||||
t_do_not_start_after_created(_) ->
|
||||
ct:pal("creating resource"),
|
||||
{ok, _} = emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource},
|
||||
#{start_after_created => false}
|
||||
),
|
||||
%% the resource should remain `disconnected` after created
|
||||
timer:sleep(200),
|
||||
?assertMatch(
|
||||
?RESOURCE_ERROR(stopped),
|
||||
emqx_resource:query(?ID, get_state)
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _, #{status := stopped}},
|
||||
emqx_resource:get_instance(?ID)
|
||||
),
|
||||
?check_trace(
|
||||
begin
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource},
|
||||
#{start_after_created => false}
|
||||
)
|
||||
),
|
||||
%% the resource should remain `disconnected` after created
|
||||
timer:sleep(200),
|
||||
?assertMatch(
|
||||
?RESOURCE_ERROR(stopped),
|
||||
emqx_resource:query(?ID, get_state)
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _, #{status := stopped}},
|
||||
emqx_resource:get_instance(?ID)
|
||||
),
|
||||
|
||||
%% start the resource manually..
|
||||
ct:pal("starting resource manually"),
|
||||
ok = emqx_resource:start(?ID),
|
||||
{ok, #{pid := Pid}} = emqx_resource:query(?ID, get_state),
|
||||
?assert(is_process_alive(Pid)),
|
||||
%% start the resource manually..
|
||||
?assertEqual(ok, emqx_resource:start(?ID)),
|
||||
{ok, #{pid := Pid}} = emqx_resource:query(?ID, get_state),
|
||||
?assert(is_process_alive(Pid)),
|
||||
|
||||
%% restart the resource
|
||||
ct:pal("restarting resource"),
|
||||
ok = emqx_resource:restart(?ID),
|
||||
?assertNot(is_process_alive(Pid)),
|
||||
{ok, #{pid := Pid2}} = emqx_resource:query(?ID, get_state),
|
||||
?assert(is_process_alive(Pid2)),
|
||||
%% restart the resource
|
||||
?assertEqual(ok, emqx_resource:restart(?ID)),
|
||||
?assertNot(is_process_alive(Pid)),
|
||||
{ok, #{pid := Pid2}} = emqx_resource:query(?ID, get_state),
|
||||
?assert(is_process_alive(Pid2)),
|
||||
|
||||
ct:pal("removing resource"),
|
||||
ok = emqx_resource:remove_local(?ID),
|
||||
?assertEqual(ok, emqx_resource:remove_local(?ID)),
|
||||
|
||||
?assertNot(is_process_alive(Pid2)).
|
||||
?assertNot(is_process_alive(Pid2))
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assertEqual([], ?of_kind("inconsistent_state", Trace)),
|
||||
?assertEqual([], ?of_kind("inconsistent_cache", Trace))
|
||||
end
|
||||
).
|
||||
|
||||
t_query(_) ->
|
||||
{ok, _} = emqx_resource:create_local(
|
||||
|
@ -222,7 +263,11 @@ t_batch_query_counter(_) ->
|
|||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource, register => true},
|
||||
#{batch_size => BatchSize, query_mode => sync}
|
||||
#{
|
||||
batch_size => BatchSize,
|
||||
batch_time => 100,
|
||||
query_mode => sync
|
||||
}
|
||||
),
|
||||
|
||||
?check_trace(
|
||||
|
@ -250,8 +295,15 @@ t_batch_query_counter(_) ->
|
|||
ok
|
||||
end,
|
||||
fun(Trace) ->
|
||||
QueryTrace = ?of_kind(call_batch_query, Trace),
|
||||
?assertMatch([#{batch := BatchReq} | _] when length(BatchReq) > 1, QueryTrace)
|
||||
QueryTrace = [
|
||||
Event
|
||||
|| Event = #{
|
||||
?snk_kind := call_batch_query,
|
||||
batch := BatchReq
|
||||
} <- Trace,
|
||||
length(BatchReq) > 1
|
||||
],
|
||||
?assertMatch([_ | _], QueryTrace)
|
||||
end
|
||||
),
|
||||
{ok, NMsgs} = emqx_resource:query(?ID, get_counter),
|
||||
|
@ -581,6 +633,7 @@ t_query_counter_async_inflight_batch(_) ->
|
|||
#{
|
||||
query_mode => async,
|
||||
batch_size => BatchSize,
|
||||
batch_time => 100,
|
||||
async_inflight_window => WindowSize,
|
||||
worker_pool_size => 1,
|
||||
resume_interval => 300
|
||||
|
@ -602,19 +655,18 @@ t_query_counter_async_inflight_batch(_) ->
|
|||
5_000
|
||||
),
|
||||
fun(Trace) ->
|
||||
QueryTrace = ?of_kind(call_batch_query_async, Trace),
|
||||
?assertMatch(
|
||||
[
|
||||
#{
|
||||
batch := [
|
||||
{query, _, {inc_counter, 1}, _, _},
|
||||
{query, _, {inc_counter, 1}, _, _}
|
||||
]
|
||||
}
|
||||
| _
|
||||
],
|
||||
QueryTrace
|
||||
)
|
||||
QueryTrace = [
|
||||
Event
|
||||
|| Event = #{
|
||||
?snk_kind := call_batch_query_async,
|
||||
batch := [
|
||||
{query, _, {inc_counter, 1}, _, _},
|
||||
{query, _, {inc_counter, 1}, _, _}
|
||||
]
|
||||
} <-
|
||||
Trace
|
||||
],
|
||||
?assertMatch([_ | _], QueryTrace)
|
||||
end
|
||||
),
|
||||
tap_metrics(?LINE),
|
||||
|
@ -771,153 +823,210 @@ t_query_counter_async_inflight_batch(_) ->
|
|||
ok = emqx_resource:remove_local(?ID).
|
||||
|
||||
t_healthy_timeout(_) ->
|
||||
{ok, _} = emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => <<"bad_not_atom_name">>, register => true},
|
||||
%% the ?TEST_RESOURCE always returns the `Mod:on_get_status/2` 300ms later.
|
||||
#{health_check_interval => 200}
|
||||
),
|
||||
?assertMatch(
|
||||
{error, {resource_error, #{reason := timeout}}},
|
||||
emqx_resource:query(?ID, get_state, #{timeout => 1_000})
|
||||
),
|
||||
?assertMatch({ok, _Group, #{status := disconnected}}, emqx_resource_manager:ets_lookup(?ID)),
|
||||
ok = emqx_resource:remove_local(?ID).
|
||||
?check_trace(
|
||||
begin
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => <<"bad_not_atom_name">>, register => true},
|
||||
%% the ?TEST_RESOURCE always returns the `Mod:on_get_status/2` 300ms later.
|
||||
#{health_check_interval => 200}
|
||||
)
|
||||
),
|
||||
?assertMatch(
|
||||
{error, {resource_error, #{reason := timeout}}},
|
||||
emqx_resource:query(?ID, get_state, #{timeout => 1_000})
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _Group, #{status := disconnected}}, emqx_resource_manager:lookup(?ID)
|
||||
),
|
||||
?assertEqual(ok, emqx_resource:remove_local(?ID))
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assertEqual([], ?of_kind("inconsistent_state", Trace)),
|
||||
?assertEqual([], ?of_kind("inconsistent_cache", Trace))
|
||||
end
|
||||
).
|
||||
|
||||
t_healthy(_) ->
|
||||
{ok, _} = emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource}
|
||||
),
|
||||
{ok, #{pid := Pid}} = emqx_resource:query(?ID, get_state),
|
||||
timer:sleep(300),
|
||||
emqx_resource:set_resource_status_connecting(?ID),
|
||||
?check_trace(
|
||||
begin
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource}
|
||||
)
|
||||
),
|
||||
{ok, #{pid := Pid}} = emqx_resource:query(?ID, get_state),
|
||||
timer:sleep(300),
|
||||
emqx_resource:set_resource_status_connecting(?ID),
|
||||
|
||||
{ok, connected} = emqx_resource:health_check(?ID),
|
||||
?assertMatch(
|
||||
[#{status := connected}],
|
||||
emqx_resource:list_instances_verbose()
|
||||
),
|
||||
?assertEqual({ok, connected}, emqx_resource:health_check(?ID)),
|
||||
?assertMatch(
|
||||
[#{status := connected}],
|
||||
emqx_resource:list_instances_verbose()
|
||||
),
|
||||
|
||||
erlang:exit(Pid, shutdown),
|
||||
erlang:exit(Pid, shutdown),
|
||||
|
||||
?assertEqual({ok, disconnected}, emqx_resource:health_check(?ID)),
|
||||
?assertEqual({ok, disconnected}, emqx_resource:health_check(?ID)),
|
||||
|
||||
?assertMatch(
|
||||
[#{status := disconnected}],
|
||||
emqx_resource:list_instances_verbose()
|
||||
),
|
||||
?assertMatch(
|
||||
[#{status := disconnected}],
|
||||
emqx_resource:list_instances_verbose()
|
||||
),
|
||||
|
||||
ok = emqx_resource:remove_local(?ID).
|
||||
?assertEqual(ok, emqx_resource:remove_local(?ID))
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assertEqual([], ?of_kind("inconsistent_state", Trace)),
|
||||
?assertEqual([], ?of_kind("inconsistent_cache", Trace))
|
||||
end
|
||||
).
|
||||
|
||||
t_stop_start(_) ->
|
||||
{error, _} = emqx_resource:check_and_create(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{unknown => test_resource}
|
||||
),
|
||||
?check_trace(
|
||||
begin
|
||||
?assertMatch(
|
||||
{error, _},
|
||||
emqx_resource:check_and_create(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{unknown => test_resource}
|
||||
)
|
||||
),
|
||||
|
||||
{ok, _} = emqx_resource:check_and_create(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{<<"name">> => <<"test_resource">>}
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:check_and_create(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{<<"name">> => <<"test_resource">>}
|
||||
)
|
||||
),
|
||||
|
||||
%% add some metrics to test their persistence
|
||||
WorkerID0 = <<"worker:0">>,
|
||||
WorkerID1 = <<"worker:1">>,
|
||||
emqx_resource_metrics:inflight_set(?ID, WorkerID0, 2),
|
||||
emqx_resource_metrics:inflight_set(?ID, WorkerID1, 3),
|
||||
?assertEqual(5, emqx_resource_metrics:inflight_get(?ID)),
|
||||
%% add some metrics to test their persistence
|
||||
WorkerID0 = <<"worker:0">>,
|
||||
WorkerID1 = <<"worker:1">>,
|
||||
emqx_resource_metrics:inflight_set(?ID, WorkerID0, 2),
|
||||
emqx_resource_metrics:inflight_set(?ID, WorkerID1, 3),
|
||||
?assertEqual(5, emqx_resource_metrics:inflight_get(?ID)),
|
||||
|
||||
{ok, _} = emqx_resource:check_and_recreate(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{<<"name">> => <<"test_resource">>},
|
||||
#{}
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:check_and_recreate(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{<<"name">> => <<"test_resource">>},
|
||||
#{}
|
||||
)
|
||||
),
|
||||
|
||||
{ok, #{pid := Pid0}} = emqx_resource:query(?ID, get_state),
|
||||
{ok, #{pid := Pid0}} = emqx_resource:query(?ID, get_state),
|
||||
|
||||
?assert(is_process_alive(Pid0)),
|
||||
?assert(is_process_alive(Pid0)),
|
||||
|
||||
%% metrics are reset when recreating
|
||||
%% depending on timing, might show the request we just did.
|
||||
ct:sleep(500),
|
||||
?assertEqual(0, emqx_resource_metrics:inflight_get(?ID)),
|
||||
%% metrics are reset when recreating
|
||||
%% depending on timing, might show the request we just did.
|
||||
ct:sleep(500),
|
||||
?assertEqual(0, emqx_resource_metrics:inflight_get(?ID)),
|
||||
|
||||
ok = emqx_resource:stop(?ID),
|
||||
ok = emqx_resource:stop(?ID),
|
||||
|
||||
?assertNot(is_process_alive(Pid0)),
|
||||
?assertNot(is_process_alive(Pid0)),
|
||||
|
||||
?assertMatch(
|
||||
?RESOURCE_ERROR(stopped),
|
||||
emqx_resource:query(?ID, get_state)
|
||||
),
|
||||
?assertMatch(
|
||||
?RESOURCE_ERROR(stopped),
|
||||
emqx_resource:query(?ID, get_state)
|
||||
),
|
||||
|
||||
ok = emqx_resource:restart(?ID),
|
||||
timer:sleep(300),
|
||||
?assertEqual(ok, emqx_resource:restart(?ID)),
|
||||
timer:sleep(300),
|
||||
|
||||
{ok, #{pid := Pid1}} = emqx_resource:query(?ID, get_state),
|
||||
{ok, #{pid := Pid1}} = emqx_resource:query(?ID, get_state),
|
||||
|
||||
?assert(is_process_alive(Pid1)),
|
||||
?assert(is_process_alive(Pid1)),
|
||||
|
||||
%% now stop while resetting the metrics
|
||||
ct:sleep(500),
|
||||
emqx_resource_metrics:inflight_set(?ID, WorkerID0, 1),
|
||||
emqx_resource_metrics:inflight_set(?ID, WorkerID1, 4),
|
||||
?assertEqual(5, emqx_resource_metrics:inflight_get(?ID)),
|
||||
ok = emqx_resource:stop(?ID),
|
||||
?assertEqual(0, emqx_resource_metrics:inflight_get(?ID)),
|
||||
%% now stop while resetting the metrics
|
||||
ct:sleep(500),
|
||||
emqx_resource_metrics:inflight_set(?ID, WorkerID0, 1),
|
||||
emqx_resource_metrics:inflight_set(?ID, WorkerID1, 4),
|
||||
?assertEqual(5, emqx_resource_metrics:inflight_get(?ID)),
|
||||
?assertEqual(ok, emqx_resource:stop(?ID)),
|
||||
?assertEqual(0, emqx_resource_metrics:inflight_get(?ID))
|
||||
end,
|
||||
|
||||
ok.
|
||||
fun(Trace) ->
|
||||
?assertEqual([], ?of_kind("inconsistent_state", Trace)),
|
||||
?assertEqual([], ?of_kind("inconsistent_cache", Trace))
|
||||
end
|
||||
).
|
||||
|
||||
t_stop_start_local(_) ->
|
||||
{error, _} = emqx_resource:check_and_create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{unknown => test_resource}
|
||||
),
|
||||
?check_trace(
|
||||
begin
|
||||
?assertMatch(
|
||||
{error, _},
|
||||
emqx_resource:check_and_create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{unknown => test_resource}
|
||||
)
|
||||
),
|
||||
|
||||
{ok, _} = emqx_resource:check_and_create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{<<"name">> => <<"test_resource">>}
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:check_and_create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{<<"name">> => <<"test_resource">>}
|
||||
)
|
||||
),
|
||||
|
||||
{ok, _} = emqx_resource:check_and_recreate_local(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{<<"name">> => <<"test_resource">>},
|
||||
#{}
|
||||
),
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
emqx_resource:check_and_recreate_local(
|
||||
?ID,
|
||||
?TEST_RESOURCE,
|
||||
#{<<"name">> => <<"test_resource">>},
|
||||
#{}
|
||||
)
|
||||
),
|
||||
|
||||
{ok, #{pid := Pid0}} = emqx_resource:query(?ID, get_state),
|
||||
{ok, #{pid := Pid0}} = emqx_resource:query(?ID, get_state),
|
||||
|
||||
?assert(is_process_alive(Pid0)),
|
||||
?assert(is_process_alive(Pid0)),
|
||||
|
||||
ok = emqx_resource:stop(?ID),
|
||||
?assertEqual(ok, emqx_resource:stop(?ID)),
|
||||
|
||||
?assertNot(is_process_alive(Pid0)),
|
||||
?assertNot(is_process_alive(Pid0)),
|
||||
|
||||
?assertMatch(
|
||||
?RESOURCE_ERROR(stopped),
|
||||
emqx_resource:query(?ID, get_state)
|
||||
),
|
||||
?assertMatch(
|
||||
?RESOURCE_ERROR(stopped),
|
||||
emqx_resource:query(?ID, get_state)
|
||||
),
|
||||
|
||||
ok = emqx_resource:restart(?ID),
|
||||
?assertEqual(ok, emqx_resource:restart(?ID)),
|
||||
|
||||
{ok, #{pid := Pid1}} = emqx_resource:query(?ID, get_state),
|
||||
{ok, #{pid := Pid1}} = emqx_resource:query(?ID, get_state),
|
||||
|
||||
?assert(is_process_alive(Pid1)).
|
||||
?assert(is_process_alive(Pid1))
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assertEqual([], ?of_kind("inconsistent_state", Trace)),
|
||||
?assertEqual([], ?of_kind("inconsistent_cache", Trace))
|
||||
end
|
||||
).
|
||||
|
||||
t_list_filter(_) ->
|
||||
{ok, _} = emqx_resource:create_local(
|
||||
|
@ -1031,16 +1140,24 @@ t_auto_retry(_) ->
|
|||
?assertEqual(ok, Res).
|
||||
|
||||
t_health_check_disconnected(_) ->
|
||||
_ = emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource, create_error => true},
|
||||
#{auto_retry_interval => 100}
|
||||
),
|
||||
?assertEqual(
|
||||
{ok, disconnected},
|
||||
emqx_resource:health_check(?ID)
|
||||
?check_trace(
|
||||
begin
|
||||
_ = emqx_resource:create_local(
|
||||
?ID,
|
||||
?DEFAULT_RESOURCE_GROUP,
|
||||
?TEST_RESOURCE,
|
||||
#{name => test_resource, create_error => true},
|
||||
#{auto_retry_interval => 100}
|
||||
),
|
||||
?assertEqual(
|
||||
{ok, disconnected},
|
||||
emqx_resource:health_check(?ID)
|
||||
)
|
||||
end,
|
||||
fun(Trace) ->
|
||||
?assertEqual([], ?of_kind("inconsistent_state", Trace)),
|
||||
?assertEqual([], ?of_kind("inconsistent_cache", Trace))
|
||||
end
|
||||
).
|
||||
|
||||
t_unblock_only_required_buffer_workers(_) ->
|
||||
|
@ -1051,7 +1168,8 @@ t_unblock_only_required_buffer_workers(_) ->
|
|||
#{name => test_resource},
|
||||
#{
|
||||
query_mode => async,
|
||||
batch_size => 5
|
||||
batch_size => 5,
|
||||
batch_time => 100
|
||||
}
|
||||
),
|
||||
lists:foreach(
|
||||
|
@ -1065,7 +1183,8 @@ t_unblock_only_required_buffer_workers(_) ->
|
|||
#{name => test_resource},
|
||||
#{
|
||||
query_mode => async,
|
||||
batch_size => 5
|
||||
batch_size => 5,
|
||||
batch_time => 100
|
||||
}
|
||||
),
|
||||
%% creation of `?ID1` should not have unblocked `?ID`'s buffer workers
|
||||
|
@ -1096,6 +1215,7 @@ t_retry_batch(_Config) ->
|
|||
#{
|
||||
query_mode => async,
|
||||
batch_size => 5,
|
||||
batch_time => 100,
|
||||
worker_pool_size => 1,
|
||||
resume_interval => 1_000
|
||||
}
|
||||
|
@ -1161,10 +1281,11 @@ t_retry_batch(_Config) ->
|
|||
%% each time should be the original batch (no duplicate
|
||||
%% elements or reordering).
|
||||
ExpectedSeenPayloads = lists:flatten(lists:duplicate(4, Payloads)),
|
||||
?assertEqual(
|
||||
ExpectedSeenPayloads,
|
||||
?projection(n, ?of_kind(connector_demo_batch_inc_individual, Trace))
|
||||
Trace1 = lists:sublist(
|
||||
?projection(n, ?of_kind(connector_demo_batch_inc_individual, Trace)),
|
||||
length(ExpectedSeenPayloads)
|
||||
),
|
||||
?assertEqual(ExpectedSeenPayloads, Trace1),
|
||||
?assertMatch(
|
||||
[#{n := ExpectedCount}],
|
||||
?of_kind(connector_demo_inc_counter, Trace)
|
||||
|
@ -1465,7 +1586,6 @@ t_retry_async_inflight_full(_Config) ->
|
|||
query_mode => async,
|
||||
async_inflight_window => AsyncInflightWindow,
|
||||
batch_size => 1,
|
||||
batch_time => 20,
|
||||
worker_pool_size => 1,
|
||||
resume_interval => ResumeInterval
|
||||
}
|
||||
|
@ -1980,7 +2100,6 @@ t_expiration_async_after_reply(_Config) ->
|
|||
#{
|
||||
query_mode => async,
|
||||
batch_size => 1,
|
||||
batch_time => 100,
|
||||
worker_pool_size => 1,
|
||||
resume_interval => 1_000
|
||||
}
|
||||
|
@ -2203,7 +2322,6 @@ t_expiration_retry(_Config) ->
|
|||
#{
|
||||
query_mode => sync,
|
||||
batch_size => 1,
|
||||
batch_time => 100,
|
||||
worker_pool_size => 1,
|
||||
resume_interval => 300
|
||||
}
|
||||
|
@ -2393,7 +2511,6 @@ t_recursive_flush(_Config) ->
|
|||
#{
|
||||
query_mode => async,
|
||||
batch_size => 1,
|
||||
batch_time => 10_000,
|
||||
worker_pool_size => 1
|
||||
}
|
||||
),
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
{profiles, [
|
||||
{test, [
|
||||
{deps, [
|
||||
{emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.5.0"}}}
|
||||
{emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.8.5"}}}
|
||||
]}
|
||||
]}
|
||||
]}.
|
||||
|
|
5
bin/emqx
5
bin/emqx
|
@ -441,9 +441,8 @@ call_nodetool() {
|
|||
# Control a node
|
||||
relx_nodetool() {
|
||||
command="$1"; shift
|
||||
ERL_FLAGS="${ERL_FLAGS:-} $EPMD_ARGS" \
|
||||
call_nodetool "$NAME_TYPE" "$NAME" \
|
||||
-setcookie "$COOKIE" "$command" "$@"
|
||||
ERL_FLAGS="${ERL_FLAGS:-} $EPMD_ARGS -setcookie $COOKIE" \
|
||||
call_nodetool "$NAME_TYPE" "$NAME" "$command" "$@"
|
||||
}
|
||||
|
||||
call_hocon() {
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Add deb package support for `raspbian9` and `raspbian10`.
|
|
@ -0,0 +1 @@
|
|||
为 `raspbian9` 及 `raspbian10` 增加 deb 包支持。
|
|
@ -0,0 +1 @@
|
|||
Add support for OCSP stapling and CRL check for SSL MQTT listeners.
|
|
@ -0,0 +1 @@
|
|||
为 SSL MQTT 监听器增加对 OCSP Stapling 的支持。
|
|
@ -0,0 +1,2 @@
|
|||
When connecting with the flag `clean_start=false`, EMQX will filter out messages that published by banned clients.
|
||||
Previously, the messages sent by banned clients may still be delivered to subscribers in this scenario.
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue