diff --git a/.ci/apps_tests/docker-compose.yaml b/.ci/apps_tests/docker-compose.yaml deleted file mode 100644 index b8f84821d..000000000 --- a/.ci/apps_tests/docker-compose.yaml +++ /dev/null @@ -1,115 +0,0 @@ -version: '3' - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - depends_on: - - mysql_server - - redis_server - - mongo_server - - pgsql_server - - ldap_server - networks: - - emqx_bridge - env_file: - - conf.env - environment: - GITHUB_ACTIONS: ${GITHUB_ACTIONS} - GITHUB_TOKEN: ${GITHUB_TOKEN} - GITHUB_RUN_ID: ${GITHUB_RUN_ID} - GITHUB_SHA: ${GITHUB_SHA} - GITHUB_RUN_NUMBER: ${GITHUB_RUN_NUMBER} - GITHUB_EVENT_NAME: ${GITHUB_EVENT_NAME} - GITHUB_REF: ${GITHUB_REF} - volumes: - - ../../.:/emqx - working_dir: /emqx - tty: true - - mysql_server: - container_name: mysql - image: mysql:${MYSQL_TAG} - restart: always - ports: - - 3306:3306 - environment: - MYSQL_ROOT_PASSWORD: public - MYSQL_DATABASE: mqtt - command: - --bind-address 0.0.0.0 - --default-authentication-plugin=mysql_native_password - --character-set-server=utf8mb4 - --collation-server=utf8mb4_general_ci - --explicit_defaults_for_timestamp=true - --lower_case_table_names=1 - --max_allowed_packet=128M - --skip-symbolic-links - networks: - - emqx_bridge - - redis_server: - container_name: redis - image: redis:${REDIS_TAG} - ports: - - 6379:6379 - command: - - redis-server - - "--bind 0.0.0.0 ::" - restart: always - networks: - - emqx_bridge - - mongo_server: - container_name: mongo - image: mongo:${MONGO_TAG} - ports: - - 27017:27017 - restart: always - environment: - MONGO_INITDB_DATABASE: mqtt - command: - --ipv6 - --bind_ip_all - networks: - - emqx_bridge - - pgsql_server: - container_name: pgsql - image: postgres:${PGSQL_TAG} - ports: - - 5432:5432 - restart: always - environment: - POSTGRES_PASSWORD: public - POSTGRES_USER: root - POSTGRES_DB: mqtt - networks: - - emqx_bridge - - ldap_server: - container_name: openldap - build: - context: ../.. - dockerfile: .ci/apps_tests/openldap/Dockerfile - args: - LDAP_TAG: ${LDAP_TAG} - image: emqx-ldap:1.0 - ports: - - 389:389 - restart: always - networks: - - emqx_bridge - -networks: - emqx_bridge: - driver: bridge - name: emqx_bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.100.239.0/24 - gateway: 172.100.239.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/apps_tests/openldap/Dockerfile b/.ci/apps_tests/openldap/Dockerfile deleted file mode 100644 index f15a48e69..000000000 --- a/.ci/apps_tests/openldap/Dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -FROM buildpack-deps:stretch - -ARG LDAP_TAG=2.4.50 - -RUN apt-get update && apt-get install -y groff groff-base -RUN wget ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-${LDAP_TAG}.tgz \ - && gunzip -c openldap-${LDAP_TAG}.tgz | tar xvfB - \ - && cd openldap-${LDAP_TAG} \ - && ./configure && make depend && make && make install \ - && cd .. && rm -rf openldap-${LDAP_TAG} - -COPY .ci/apps_tests/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf -COPY apps/emqx_auth_ldap/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif -COPY apps/emqx_auth_ldap/emqx.schema /usr/local/etc/openldap/schema/emqx.schema -COPY apps/emqx_auth_ldap/test/certs/*.pem /usr/local/etc/openldap/ - -RUN mkdir -p /usr/local/etc/openldap/data \ - && slapadd -l /usr/local/etc/openldap/schema/emqx.io.ldif -f /usr/local/etc/openldap/slapd.conf - -WORKDIR /usr/local/etc/openldap - -EXPOSE 389 636 - -ENTRYPOINT ["/usr/local/libexec/slapd", "-h", "ldap:/// ldaps:///", "-d", "3", "-f", "/usr/local/etc/openldap/slapd.conf"] - -CMD [] diff --git a/.ci/build_packages/Dockerfile b/.ci/build_packages/Dockerfile index 197b7e731..92450440d 100644 --- a/.ci/build_packages/Dockerfile +++ b/.ci/build_packages/Dockerfile @@ -1,4 +1,4 @@ -ARG BUILD_FROM=emqx/build-env:erl23.2.2-ubuntu20.04 +ARG BUILD_FROM=emqx/build-env:erl23.2.7-ubuntu20.04 FROM ${BUILD_FROM} ARG EMQX_NAME=emqx diff --git a/.ci/build_packages/tests.sh b/.ci/build_packages/tests.sh index 8ae3cc838..1f9ca2651 100755 --- a/.ci/build_packages/tests.sh +++ b/.ci/build_packages/tests.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash set -x -e -u export CODE_PATH=${CODE_PATH:-"/emqx"} export EMQX_NAME=${EMQX_NAME:-"emqx"} @@ -29,7 +29,7 @@ emqx_test(){ sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins echo "running ${packagename} start" - "${PACKAGE_PATH}"/emqx/bin/emqx start || tail "${PACKAGE_PATH}"/emqx/log/erlang.log.1 + "${PACKAGE_PATH}"/emqx/bin/emqx start || ( tail "${PACKAGE_PATH}"/emqx/log/emqx.log.1 && exit 1 ) IDLE_TIME=0 while [ -z "$("${PACKAGE_PATH}"/emqx/bin/emqx_ctl status |grep 'is running'|awk '{print $1}')" ] do @@ -101,7 +101,7 @@ running_test(){ EMQX_MQTT__MAX_TOPIC_ALIAS=10 sed -i '/emqx_telemetry/d' /var/lib/emqx/loaded_plugins - emqx start || tail /var/log/emqx/erlang.log.1 + emqx start || ( tail /var/log/emqx/emqx.log.1 && exit 1 ) IDLE_TIME=0 while [ -z "$(emqx_ctl status |grep 'is running'|awk '{print $1}')" ] do @@ -118,9 +118,8 @@ running_test(){ emqx stop || kill "$(ps -ef | grep -E '\-progname\s.+emqx\s' |awk '{print $2}')" if [ "$(sed -n '/^ID=/p' /etc/os-release | sed -r 's/ID=(.*)/\1/g' | sed 's/"//g')" = ubuntu ] \ - || [ "$(sed -n '/^ID=/p' /etc/os-release | sed -r 's/ID=(.*)/\1/g' | sed 's/"//g')" = debian ] \ - || [ "$(sed -n '/^ID=/p' /etc/os-release | sed -r 's/ID=(.*)/\1/g' | sed 's/"//g')" = raspbian ];then - service emqx start || tail /var/log/emqx/erlang.log.1 + || [ "$(sed -n '/^ID=/p' /etc/os-release | sed -r 's/ID=(.*)/\1/g' | sed 's/"//g')" = debian ] ;then + service emqx start || ( tail /var/log/emqx/emqx.log.1 && exit 1 ) IDLE_TIME=0 while [ -z "$(emqx_ctl status |grep 'is running'|awk '{print $1}')" ] do @@ -144,7 +143,7 @@ relup_test(){ for var in "${EMQX_NAME}"-*-"$(uname -m)".zip;do packagename=$(basename "${var}") unzip "$packagename" - ./emqx/bin/emqx start + ./emqx/bin/emqx start || ( tail emqx/log/emqx.log.1 && exit 1 ) ./emqx/bin/emqx_ctl status ./emqx/bin/emqx versions cp "${PACKAGE_PATH}/${EMQX_NAME}"-*-"${TARGET_VERSION}-$(uname -m)".zip ./emqx/releases diff --git a/.ci/compatibility_tests/.env b/.ci/compatibility_tests/.env deleted file mode 100644 index 2ac286e98..000000000 --- a/.ci/compatibility_tests/.env +++ /dev/null @@ -1,5 +0,0 @@ -MYSQL_TAG=5.7 -REDIS_TAG=6 -MONGO_TAG=4.1 -PGSQL_TAG=11 -LDAP_TAG=2.4.50 diff --git a/.ci/compatibility_tests/docker-compose-mongo-tls.yaml b/.ci/compatibility_tests/docker-compose-mongo-tls.yaml deleted file mode 100644 index 1611534f6..000000000 --- a/.ci/compatibility_tests/docker-compose-mongo-tls.yaml +++ /dev/null @@ -1,43 +0,0 @@ -version: '3' - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../../:/emqx - working_dir: /emqx - networks: - - emqx_bridge - depends_on: - - mongo_server - tty: true - - mongo_server: - container_name: mongo - image: mongo:${MONGO_TAG} - restart: always - environment: - MONGO_INITDB_DATABASE: mqtt - volumes: - - ../../apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE_data/mongodb.pem/:/etc/certs/mongodb.pem - networks: - - emqx_bridge - command: - --ipv6 - --bind_ip_all - --sslMode requireSSL - --sslPEMKeyFile /etc/certs/mongodb.pem - -networks: - emqx_bridge: - driver: bridge - name: emqx_bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.100.100.0/24 - gateway: 172.100.100.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/compatibility_tests/docker-compose-mongo.yaml b/.ci/compatibility_tests/docker-compose-mongo.yaml deleted file mode 100644 index 2f769ac63..000000000 --- a/.ci/compatibility_tests/docker-compose-mongo.yaml +++ /dev/null @@ -1,39 +0,0 @@ -version: '3' - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../..:/emqx - working_dir: /emqx - networks: - - emqx_bridge - depends_on: - - mongo_server - tty: true - - mongo_server: - container_name: mongo - image: mongo:${MONGO_TAG} - restart: always - environment: - MONGO_INITDB_DATABASE: mqtt - networks: - - emqx_bridge - command: - --ipv6 - --bind_ip_all - -networks: - emqx_bridge: - driver: bridge - name: emqx_bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.100.100.0/24 - gateway: 172.100.100.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/compatibility_tests/docker-compose-mysql.yaml b/.ci/compatibility_tests/docker-compose-mysql.yaml deleted file mode 100644 index 1f285cc5e..000000000 --- a/.ci/compatibility_tests/docker-compose-mysql.yaml +++ /dev/null @@ -1,46 +0,0 @@ -version: '3' - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../../:/emqx - working_dir: /emqx - networks: - - emqx_bridge - depends_on: - - mysql_server - tty: true - - mysql_server: - container_name: mysql - image: mysql:${MYSQL_TAG} - restart: always - environment: - MYSQL_ROOT_PASSWORD: public - MYSQL_DATABASE: mqtt - networks: - - emqx_bridge - command: - --bind-address "::" - --default-authentication-plugin=mysql_native_password - --character-set-server=utf8mb4 - --collation-server=utf8mb4_general_ci - --explicit_defaults_for_timestamp=true - --lower_case_table_names=1 - --max_allowed_packet=128M - --skip-symbolic-links - -networks: - emqx_bridge: - driver: bridge - name: emqx_bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.100.100.0/24 - gateway: 172.100.100.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/compatibility_tests/docker-compose-pgsql.yaml b/.ci/compatibility_tests/docker-compose-pgsql.yaml deleted file mode 100644 index c5492d971..000000000 --- a/.ci/compatibility_tests/docker-compose-pgsql.yaml +++ /dev/null @@ -1,38 +0,0 @@ -version: '3' - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../../:/emqx - working_dir: /emqx - networks: - - emqx_bridge - depends_on: - - pgsql_server - tty: true - - pgsql_server: - container_name: pgsql - image: postgres:${PGSQL_TAG} - restart: always - environment: - POSTGRES_PASSWORD: public - POSTGRES_USER: root - POSTGRES_DB: mqtt - networks: - - emqx_bridge - -networks: - emqx_bridge: - driver: bridge - name: emqx_bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.100.100.0/24 - gateway: 172.100.100.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/compatibility_tests/docker-compose-redis-cluster-tls.yaml b/.ci/compatibility_tests/docker-compose-redis-cluster-tls.yaml deleted file mode 100644 index 06518854f..000000000 --- a/.ci/compatibility_tests/docker-compose-redis-cluster-tls.yaml +++ /dev/null @@ -1,41 +0,0 @@ -version: '2.4' -# network configuration is limited in version 3 -# https://github.com/docker/compose/issues/4958 - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../..:/emqx - networks: - - app_net - depends_on: - - redis_cluster - working_dir: /emqx - tty: true - - redis_cluster: - container_name: redis - image: redis:${REDIS_TAG} - volumes: - - ../../apps/emqx_auth_redis/test/emqx_auth_redis_SUITE_data/certs:/tls - - ./redis/:/data/conf - command: bash -c "/bin/bash /data/conf/redis.sh --node cluster --tls-enabled && while true; do echo 1; sleep 1; done" - networks: - app_net: - # Assign a public address. Erlang container cannot find cluster nodes by network-scoped alias (redis_cluster). - ipv4_address: 172.16.239.10 - ipv6_address: 2001:3200:3200::20 - -networks: - app_net: - driver: bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.16.239.0/24 - gateway: 172.16.239.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/compatibility_tests/docker-compose-redis-cluster.yaml b/.ci/compatibility_tests/docker-compose-redis-cluster.yaml deleted file mode 100644 index 213a06866..000000000 --- a/.ci/compatibility_tests/docker-compose-redis-cluster.yaml +++ /dev/null @@ -1,40 +0,0 @@ -version: '2.4' -# network configuration is limited in version 3 -# https://github.com/docker/compose/issues/4958 - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../..:/emqx - networks: - - app_net - depends_on: - - redis_cluster - working_dir: /emqx - tty: true - - redis_cluster: - image: redis:${REDIS_TAG} - container_name: redis - volumes: - - ./redis/:/data/conf - command: bash -c "/bin/bash /data/conf/redis.sh --node cluster && while true; do echo 1; sleep 1; done" - networks: - app_net: - # Assign a public address. Erlang container cannot find cluster nodes by network-scoped alias (redis_cluster). - ipv4_address: 172.16.239.10 - ipv6_address: 2001:3200:3200::20 - -networks: - app_net: - driver: bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.16.239.0/24 - gateway: 172.16.239.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/compatibility_tests/docker-compose-redis-sentinel.yaml b/.ci/compatibility_tests/docker-compose-redis-sentinel.yaml deleted file mode 100644 index b2b58fefe..000000000 --- a/.ci/compatibility_tests/docker-compose-redis-sentinel.yaml +++ /dev/null @@ -1,40 +0,0 @@ -version: '2.4' -# network configuration is limited in version 3 -# https://github.com/docker/compose/issues/4958 - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../..:/emqx - networks: - - app_net - depends_on: - - redis_cluster - working_dir: /emqx - tty: true - - redis_cluster: - container_name: redis - image: redis:${REDIS_TAG} - volumes: - - ./redis/:/data/conf - command: bash -c "/bin/bash /data/conf/redis.sh --node sentinel && while true; do echo 1; sleep 1; done" - networks: - app_net: - # Assign a public address. Erlang container cannot find cluster nodes by network-scoped alias (redis_cluster). - ipv4_address: 172.16.239.10 - ipv6_address: 2001:3200:3200::20 - -networks: - app_net: - driver: bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.16.239.0/24 - gateway: 172.16.239.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/compatibility_tests/docker-compose-redis-single-tls.yaml b/.ci/compatibility_tests/docker-compose-redis-single-tls.yaml deleted file mode 100644 index 03d643754..000000000 --- a/.ci/compatibility_tests/docker-compose-redis-single-tls.yaml +++ /dev/null @@ -1,43 +0,0 @@ -version: '3' - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../..:/emqx - networks: - - emqx_bridge - depends_on: - - redis_server - working_dir: /emqx - tty: true - - redis_server: - container_name: redis - image: redis:${REDIS_TAG} - volumes: - - ../../apps/emqx_auth_redis/test/emqx_auth_redis_SUITE_data/certs:/tls - command: - - redis-server - - "--bind 0.0.0.0 ::" - - --tls-port 6380 - - --tls-cert-file /tls/redis.crt - - --tls-key-file /tls/redis.key - - --tls-ca-cert-file /tls/ca.crt - restart: always - networks: - - emqx_bridge - -networks: - emqx_bridge: - driver: bridge - name: emqx_bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.100.100.0/24 - gateway: 172.100.100.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/compatibility_tests/docker-compose-redis-single.yaml b/.ci/compatibility_tests/docker-compose-redis-single.yaml deleted file mode 100644 index 5d7acb865..000000000 --- a/.ci/compatibility_tests/docker-compose-redis-single.yaml +++ /dev/null @@ -1,37 +0,0 @@ -version: '3' - -services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../..:/emqx - networks: - - emqx_bridge - depends_on: - - redis_server - working_dir: /emqx - tty: true - - redis_server: - container_name: redis - image: redis:${REDIS_TAG} - command: - - redis-server - - "--bind 0.0.0.0 ::" - restart: always - networks: - - emqx_bridge - -networks: - emqx_bridge: - driver: bridge - name: emqx_bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.100.100.0/24 - gateway: 172.100.100.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/compatibility_tests/openldap/slapd.conf b/.ci/compatibility_tests/openldap/slapd.conf deleted file mode 100644 index d6ba20caa..000000000 --- a/.ci/compatibility_tests/openldap/slapd.conf +++ /dev/null @@ -1,16 +0,0 @@ -include /usr/local/etc/openldap/schema/core.schema -include /usr/local/etc/openldap/schema/cosine.schema -include /usr/local/etc/openldap/schema/inetorgperson.schema -include /usr/local/etc/openldap/schema/ppolicy.schema -include /usr/local/etc/openldap/schema/emqx.schema - -TLSCACertificateFile /usr/local/etc/openldap/cacert.pem -TLSCertificateFile /usr/local/etc/openldap/cert.pem -TLSCertificateKeyFile /usr/local/etc/openldap/key.pem - -database bdb -suffix "dc=emqx,dc=io" -rootdn "cn=root,dc=emqx,dc=io" -rootpw {SSHA}eoF7NhNrejVYYyGHqnt+MdKNBh4r1w3W - -directory /usr/local/etc/openldap/data diff --git a/.ci/compatibility_tests/redis/redis.conf b/.ci/compatibility_tests/redis/redis.conf deleted file mode 100644 index 27eabdef5..000000000 --- a/.ci/compatibility_tests/redis/redis.conf +++ /dev/null @@ -1,2 +0,0 @@ -daemonize yes -bind 0.0.0.0 :: \ No newline at end of file diff --git a/.ci/apps_tests/.env b/.ci/docker-compose-file/.env similarity index 100% rename from .ci/apps_tests/.env rename to .ci/docker-compose-file/.env diff --git a/.ci/apps_tests/conf.env b/.ci/docker-compose-file/conf.env similarity index 92% rename from .ci/apps_tests/conf.env rename to .ci/docker-compose-file/conf.env index eb5a98277..93dfecd2b 100644 --- a/.ci/apps_tests/conf.env +++ b/.ci/docker-compose-file/conf.env @@ -1,6 +1,5 @@ EMQX_AUTH__LDAP__SERVERS=ldap_server EMQX_AUTH__MONGO__SERVER=mongo_server:27017 -EMQX_AUTH__REDIS__SERVER=redis_server:6379 EMQX_AUTH__MYSQL__SERVER=mysql_server:3306 EMQX_AUTH__MYSQL__USERNAME=root EMQX_AUTH__MYSQL__PASSWORD=public @@ -9,4 +8,6 @@ EMQX_AUTH__PGSQL__SERVER=pgsql_server:5432 EMQX_AUTH__PGSQL__USERNAME=root EMQX_AUTH__PGSQL__PASSWORD=public EMQX_AUTH__PGSQL__DATABASE=mqtt +EMQX_AUTH__REDIS__SERVER=redis_server:6379 +EMQX_AUTH__REDIS__PASSWORD=public CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_ diff --git a/.ci/docker-compose-file/docker-compose-ldap-tcp.yaml b/.ci/docker-compose-file/docker-compose-ldap-tcp.yaml new file mode 100644 index 000000000..61eab91ec --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-ldap-tcp.yaml @@ -0,0 +1,16 @@ +version: '3.9' + +services: + ldap_server: + container_name: ldap + build: + context: ../.. + dockerfile: .ci/docker-compose-file/openldap/Dockerfile + args: + LDAP_TAG: ${LDAP_TAG} + image: openldap + ports: + - 389:389 + restart: always + networks: + - emqx_bridge diff --git a/.ci/docker-compose-file/docker-compose-mongo-tcp.yaml b/.ci/docker-compose-file/docker-compose-mongo-tcp.yaml new file mode 100644 index 000000000..dee2daff6 --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-mongo-tcp.yaml @@ -0,0 +1,14 @@ +version: '3.9' + +services: + mongo_server: + container_name: mongo + image: mongo:${MONGO_TAG} + restart: always + environment: + MONGO_INITDB_DATABASE: mqtt + networks: + - emqx_bridge + command: + --ipv6 + --bind_ip_all diff --git a/.ci/docker-compose-file/docker-compose-mongo-tls.yaml b/.ci/docker-compose-file/docker-compose-mongo-tls.yaml new file mode 100644 index 000000000..a09bc803d --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-mongo-tls.yaml @@ -0,0 +1,18 @@ +version: '3.9' + +services: + mongo_server: + container_name: mongo + image: mongo:${MONGO_TAG} + restart: always + environment: + MONGO_INITDB_DATABASE: mqtt + volumes: + - ../../apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE_data/mongodb.pem/:/etc/certs/mongodb.pem + networks: + - emqx_bridge + command: + --ipv6 + --bind_ip_all + --sslMode requireSSL + --sslPEMKeyFile /etc/certs/mongodb.pem diff --git a/.ci/docker-compose-file/docker-compose-mysql-tcp.yaml b/.ci/docker-compose-file/docker-compose-mysql-tcp.yaml new file mode 100644 index 000000000..70cc3d242 --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-mysql-tcp.yaml @@ -0,0 +1,20 @@ +version: '3.9' + +services: + mysql_server: + container_name: mysql + image: mysql:${MYSQL_TAG} + restart: always + environment: + MYSQL_ROOT_PASSWORD: public + MYSQL_DATABASE: mqtt + networks: + - emqx_bridge + command: + --bind-address "::" + --character-set-server=utf8mb4 + --collation-server=utf8mb4_general_ci + --explicit_defaults_for_timestamp=true + --lower_case_table_names=1 + --max_allowed_packet=128M + --skip-symbolic-links diff --git a/.ci/compatibility_tests/docker-compose-mysql-tls.yaml b/.ci/docker-compose-file/docker-compose-mysql-tls.yaml similarity index 66% rename from .ci/compatibility_tests/docker-compose-mysql-tls.yaml rename to .ci/docker-compose-file/docker-compose-mysql-tls.yaml index ab9cbeed3..c4d5bd500 100644 --- a/.ci/compatibility_tests/docker-compose-mysql-tls.yaml +++ b/.ci/docker-compose-file/docker-compose-mysql-tls.yaml @@ -1,18 +1,6 @@ -version: '3' +version: '3.9' services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../../:/emqx - working_dir: /emqx - networks: - - emqx_bridge - depends_on: - - mysql_server - tty: true - mysql_server: container_name: mysql image: mysql:${MYSQL_TAG} @@ -20,6 +8,8 @@ services: environment: MYSQL_ROOT_PASSWORD: public MYSQL_DATABASE: mqtt + MYSQL_USER: ssluser + MYSQL_PASSWORD: public volumes: - ../../apps/emqx_auth_mysql/test/emqx_auth_mysql_SUITE_data/ca.pem:/etc/certs/ca-cert.pem - ../../apps/emqx_auth_mysql/test/emqx_auth_mysql_SUITE_data/server-cert.pem:/etc/certs/server-cert.pem @@ -38,15 +28,18 @@ services: --ssl-cert=/etc/certs/server-cert.pem --ssl-key=/etc/certs/server-key.pem -networks: - emqx_bridge: - driver: bridge - name: emqx_bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.100.100.0/24 - gateway: 172.100.100.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 + mysql_client: + container_name: mysql_client + image: mysql:${MYSQL_TAG} + networks: + - emqx_bridge + depends_on: + - mysql_server + command: + - /bin/bash + - -c + - | + service mysql start + echo "show tables;" | mysql -h mysql_server -u root -ppublic mqtt mqtt + while [[ $$? -ne 0 ]];do echo "show tables;" | mysql -h mysql_server -u root -ppublic mqtt; done + echo "ALTER USER 'ssluser'@'%' REQUIRE X509;" | mysql -h mysql_server -u root -ppublic mqtt diff --git a/.ci/docker-compose-file/docker-compose-pgsql-tcp.yaml b/.ci/docker-compose-file/docker-compose-pgsql-tcp.yaml new file mode 100644 index 000000000..111209644 --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-pgsql-tcp.yaml @@ -0,0 +1,15 @@ +version: '3.9' + +services: + pgsql_server: + container_name: pgsql + image: postgres:${PGSQL_TAG} + restart: always + environment: + POSTGRES_PASSWORD: public + POSTGRES_USER: root + POSTGRES_DB: mqtt + ports: + - "5432:5432" + networks: + - emqx_bridge diff --git a/.ci/compatibility_tests/docker-compose-pgsql-tls.yaml b/.ci/docker-compose-file/docker-compose-pgsql-tls.yaml similarity index 53% rename from .ci/compatibility_tests/docker-compose-pgsql-tls.yaml rename to .ci/docker-compose-file/docker-compose-pgsql-tls.yaml index 6bb3d321e..72aceed69 100644 --- a/.ci/compatibility_tests/docker-compose-pgsql-tls.yaml +++ b/.ci/docker-compose-file/docker-compose-pgsql-tls.yaml @@ -1,23 +1,11 @@ -version: '3' +version: '3.9' services: - erlang: - container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - volumes: - - ../../:/emqx - working_dir: /emqx - networks: - - emqx_bridge - depends_on: - - pgsql_server - tty: true - pgsql_server: container_name: pgsql build: context: ../.. - dockerfile: .ci/compatibility_tests/pgsql/Dockerfile + dockerfile: .ci/docker-compose-file/pgsql/Dockerfile args: POSTGRES_USER: postgres BUILD_FROM: postgres:${PGSQL_TAG} @@ -42,16 +30,3 @@ services: - hba_file=/var/lib/postgresql/pg_hba.conf networks: - emqx_bridge - -networks: - emqx_bridge: - driver: bridge - name: emqx_bridge - enable_ipv6: true - ipam: - driver: default - config: - - subnet: 172.100.100.0/24 - gateway: 172.100.100.1 - - subnet: 2001:3200:3200::/64 - gateway: 2001:3200:3200::1 diff --git a/.ci/docker-compose-file/docker-compose-redis-cluster-tcp.yaml b/.ci/docker-compose-file/docker-compose-redis-cluster-tcp.yaml new file mode 100644 index 000000000..7ecc686bc --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-redis-cluster-tcp.yaml @@ -0,0 +1,11 @@ +version: '3.9' + +services: + redis_cluster: + image: redis:${REDIS_TAG} + container_name: redis + volumes: + - ./redis/:/data/conf + command: bash -c "/bin/bash /data/conf/redis.sh --node cluster && tail -f /var/log/redis-server.log" + networks: + - emqx_bridge diff --git a/.ci/docker-compose-file/docker-compose-redis-cluster-tls.yaml b/.ci/docker-compose-file/docker-compose-redis-cluster-tls.yaml new file mode 100644 index 000000000..9f41e7fed --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-redis-cluster-tls.yaml @@ -0,0 +1,12 @@ +version: '3.9' + +services: + redis_cluster: + container_name: redis + image: redis:${REDIS_TAG} + volumes: + - ../../apps/emqx_auth_redis/test/emqx_auth_redis_SUITE_data/certs:/tls + - ./redis/:/data/conf + command: bash -c "/bin/bash /data/conf/redis.sh --node cluster --tls-enabled && tail -f /var/log/redis-server.log" + networks: + - emqx_bridge diff --git a/.ci/docker-compose-file/docker-compose-redis-sentinel-tcp.yaml b/.ci/docker-compose-file/docker-compose-redis-sentinel-tcp.yaml new file mode 100644 index 000000000..2c391be04 --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-redis-sentinel-tcp.yaml @@ -0,0 +1,11 @@ +version: '3.9' + +services: + redis_cluster: + container_name: redis + image: redis:${REDIS_TAG} + volumes: + - ./redis/:/data/conf + command: bash -c "/bin/bash /data/conf/redis.sh --node sentinel && tail -f /var/log/redis-server.log" + networks: + - emqx_bridge diff --git a/.ci/docker-compose-file/docker-compose-redis-single-tcp.yaml b/.ci/docker-compose-file/docker-compose-redis-single-tcp.yaml new file mode 100644 index 000000000..92a3fcf7d --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-redis-single-tcp.yaml @@ -0,0 +1,13 @@ +version: '3.9' + +services: + redis_server: + container_name: redis + image: redis:${REDIS_TAG} + command: + - redis-server + - "--bind 0.0.0.0 ::" + - --requirepass public + restart: always + networks: + - emqx_bridge diff --git a/.ci/docker-compose-file/docker-compose-redis-single-tls.yaml b/.ci/docker-compose-file/docker-compose-redis-single-tls.yaml new file mode 100644 index 000000000..814a0f1cb --- /dev/null +++ b/.ci/docker-compose-file/docker-compose-redis-single-tls.yaml @@ -0,0 +1,19 @@ +version: '3.9' + +services: + redis_server: + container_name: redis + image: redis:${REDIS_TAG} + volumes: + - ../../apps/emqx_auth_redis/test/emqx_auth_redis_SUITE_data/certs:/tls + command: + - redis-server + - "--bind 0.0.0.0 ::" + - --requirepass public + - --tls-port 6380 + - --tls-cert-file /tls/redis.crt + - --tls-key-file /tls/redis.key + - --tls-ca-cert-file /tls/ca.crt + restart: always + networks: + - emqx_bridge diff --git a/.ci/compatibility_tests/docker-compose-ldap.yaml b/.ci/docker-compose-file/docker-compose.yaml similarity index 50% rename from .ci/compatibility_tests/docker-compose-ldap.yaml rename to .ci/docker-compose-file/docker-compose.yaml index 33b37e00c..1f9345bb7 100644 --- a/.ci/compatibility_tests/docker-compose-ldap.yaml +++ b/.ci/docker-compose-file/docker-compose.yaml @@ -1,32 +1,26 @@ -version: '3' +version: '3.9' services: erlang: container_name: erlang - image: emqx/build-env:erl23.2.2-ubuntu20.04 - depends_on: - - ldap_server + image: emqx/build-env:erl23.2.7-ubuntu20.04 + env_file: + - conf.env + environment: + GITHUB_ACTIONS: ${GITHUB_ACTIONS} + GITHUB_TOKEN: ${GITHUB_TOKEN} + GITHUB_RUN_ID: ${GITHUB_RUN_ID} + GITHUB_SHA: ${GITHUB_SHA} + GITHUB_RUN_NUMBER: ${GITHUB_RUN_NUMBER} + GITHUB_EVENT_NAME: ${GITHUB_EVENT_NAME} + GITHUB_REF: ${GITHUB_REF} networks: - emqx_bridge volumes: - - ../../.:/emqx + - ../..:/emqx working_dir: /emqx tty: true - ldap_server: - container_name: ldap - build: - context: ../.. - dockerfile: .ci/compatibility_tests/openldap/Dockerfile - args: - LDAP_TAG: ${LDAP_TAG} - image: openldap - ports: - - 389:389 - restart: always - networks: - - emqx_bridge - networks: emqx_bridge: driver: bridge diff --git a/.ci/compatibility_tests/openldap/Dockerfile b/.ci/docker-compose-file/openldap/Dockerfile similarity index 93% rename from .ci/compatibility_tests/openldap/Dockerfile rename to .ci/docker-compose-file/openldap/Dockerfile index fa15ab5eb..adbb80800 100644 --- a/.ci/compatibility_tests/openldap/Dockerfile +++ b/.ci/docker-compose-file/openldap/Dockerfile @@ -9,7 +9,7 @@ RUN wget ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-${LDAP_TA && ./configure && make depend && make && make install \ && cd .. && rm -rf openldap-${LDAP_TAG} -COPY .ci/compatibility_tests/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf +COPY .ci/docker-compose-file/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf COPY apps/emqx_auth_ldap/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif COPY apps/emqx_auth_ldap/emqx.schema /usr/local/etc/openldap/schema/emqx.schema COPY apps/emqx_auth_ldap/test/certs/*.pem /usr/local/etc/openldap/ diff --git a/.ci/apps_tests/openldap/slapd.conf b/.ci/docker-compose-file/openldap/slapd.conf similarity index 100% rename from .ci/apps_tests/openldap/slapd.conf rename to .ci/docker-compose-file/openldap/slapd.conf diff --git a/.ci/compatibility_tests/pgsql/Dockerfile b/.ci/docker-compose-file/pgsql/Dockerfile similarity index 66% rename from .ci/compatibility_tests/pgsql/Dockerfile rename to .ci/docker-compose-file/pgsql/Dockerfile index ca44acffa..e4c973258 100644 --- a/.ci/compatibility_tests/pgsql/Dockerfile +++ b/.ci/docker-compose-file/pgsql/Dockerfile @@ -1,10 +1,10 @@ ARG BUILD_FROM=postgres:11 FROM ${BUILD_FROM} ARG POSTGRES_USER=postgres -COPY --chown=$POSTGRES_USER .ci/compatibility_tests/pgsql/pg_hba.conf /var/lib/postgresql/pg_hba.conf -COPY --chown=$POSTGRES_USER apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server.key /var/lib/postgresql/server.key -COPY --chown=$POSTGRES_USER apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server.crt /var/lib/postgresql/server.crt -COPY --chown=$POSTGRES_USER apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/root.crt /var/lib/postgresql/root.crt +COPY --chown=$POSTGRES_USER .ci/docker-compose-file/pgsql/pg_hba.conf /var/lib/postgresql/pg_hba.conf +COPY --chown=$POSTGRES_USER apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server-key.pem /var/lib/postgresql/server.key +COPY --chown=$POSTGRES_USER apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server-cert.pem /var/lib/postgresql/server.crt +COPY --chown=$POSTGRES_USER apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/ca.pem /var/lib/postgresql/root.crt RUN chmod 600 /var/lib/postgresql/pg_hba.conf RUN chmod 600 /var/lib/postgresql/server.key RUN chmod 600 /var/lib/postgresql/server.crt diff --git a/.ci/compatibility_tests/pgsql/pg_hba.conf b/.ci/docker-compose-file/pgsql/pg_hba.conf similarity index 100% rename from .ci/compatibility_tests/pgsql/pg_hba.conf rename to .ci/docker-compose-file/pgsql/pg_hba.conf diff --git a/.ci/compatibility_tests/redis/redis-tls.conf b/.ci/docker-compose-file/redis/redis-tls.conf similarity index 51% rename from .ci/compatibility_tests/redis/redis-tls.conf rename to .ci/docker-compose-file/redis/redis-tls.conf index 3ef09f315..8e146f9e7 100644 --- a/.ci/compatibility_tests/redis/redis-tls.conf +++ b/.ci/docker-compose-file/redis/redis-tls.conf @@ -1,5 +1,7 @@ daemonize yes bind 0.0.0.0 :: +logfile /var/log/redis-server.log tls-cert-file /tls/redis.crt tls-key-file /tls/redis.key -tls-ca-cert-file /tls/ca.crt \ No newline at end of file +tls-ca-cert-file /tls/ca.crt +requirepass public diff --git a/.ci/docker-compose-file/redis/redis.conf b/.ci/docker-compose-file/redis/redis.conf new file mode 100644 index 000000000..d6f158971 --- /dev/null +++ b/.ci/docker-compose-file/redis/redis.conf @@ -0,0 +1,4 @@ +daemonize yes +bind 0.0.0.0 :: +logfile /var/log/redis-server.log +requirepass public diff --git a/.ci/compatibility_tests/redis/redis.sh b/.ci/docker-compose-file/redis/redis.sh similarity index 89% rename from .ci/compatibility_tests/redis/redis.sh rename to .ci/docker-compose-file/redis/redis.sh index a233659ee..b03eca7c6 100755 --- a/.ci/compatibility_tests/redis/redis.sh +++ b/.ci/docker-compose-file/redis/redis.sh @@ -1,5 +1,9 @@ #!/bin/bash +set -x + +LOCAL_IP=$(hostname -i | grep -oE '((25[0-5]|(2[0-4]|1[0-9]|[1-9]|)[0-9])\.){3}(25[0-5]|(2[0-4]|1[0-9]|[1-9]|)[0-9])' | head -n 1) + node=single tls=false while [[ $# -gt 0 ]] @@ -48,9 +52,9 @@ elif [ "${node}" = "sentinel" ] ; then redis-server /data/conf/redis.conf --port 7000 --cluster-config-file /data/conf/nodes.7000.conf \ --cluster-enabled no; redis-server /data/conf/redis.conf --port 7001 --cluster-config-file /data/conf/nodes.7001.conf \ - --cluster-enabled no --slaveof 172.16.239.10 7000; + --cluster-enabled no --slaveof "$LOCAL_IP" 7000; redis-server /data/conf/redis.conf --port 7002 --cluster-config-file /data/conf/nodes.7002.conf \ - --cluster-enabled no --slaveof 172.16.239.10 7000; + --cluster-enabled no --slaveof "$LOCAL_IP" 7000; fi REDIS_LOAD_FLG=true; @@ -76,7 +80,7 @@ do continue; fi if [ "${node}" = "cluster" ] ; then - yes "yes" | redis-cli --cluster create 172.16.239.10:7000 172.16.239.10:7001 172.16.239.10:7002; + yes "yes" | redis-cli --cluster create "$LOCAL_IP:7000" "$LOCAL_IP:7001" "$LOCAL_IP:7002" --pass public --no-auth-warning; elif [ "${node}" = "sentinel" ] ; then cp /data/conf/sentinel.conf /_sentinel.conf redis-server /_sentinel.conf --sentinel; diff --git a/.ci/compatibility_tests/redis/sentinel.conf b/.ci/docker-compose-file/redis/sentinel.conf similarity index 66% rename from .ci/compatibility_tests/redis/sentinel.conf rename to .ci/docker-compose-file/redis/sentinel.conf index c3f96c1ff..146ad80fe 100644 --- a/.ci/compatibility_tests/redis/sentinel.conf +++ b/.ci/docker-compose-file/redis/sentinel.conf @@ -1,3 +1,4 @@ port 26379 dir /tmp sentinel monitor mymaster 172.16.239.10 7000 1 +logfile /var/log/redis-server.log diff --git a/.ci/fvt_tests/.env b/.ci/fvt_tests/.env new file mode 100644 index 000000000..26b92be81 --- /dev/null +++ b/.ci/fvt_tests/.env @@ -0,0 +1 @@ +TARGET=emqx/emqx diff --git a/.ci/fvt_tests/docker-compose.yaml b/.ci/fvt_tests/docker-compose.yaml index 22d48bef7..6f2ad1be2 100644 --- a/.ci/fvt_tests/docker-compose.yaml +++ b/.ci/fvt_tests/docker-compose.yaml @@ -3,7 +3,7 @@ version: '3' services: emqx1: container_name: node1.emqx.io - image: emqx/emqx:build-alpine-amd64 + image: ${TARGET}:build-alpine-amd64 environment: - "EMQX_NAME=emqx" - "EMQX_HOST=node1.emqx.io" @@ -11,6 +11,7 @@ services: - "EMQX_CLUSTER__STATIC__SEEDS=emqx@node1.emqx.io, emqx@node2.emqx.io" - "EMQX_ZONE__EXTERNAL__RETRY_INTERVAL=2s" - "EMQX_MQTT__MAX_TOPIC_ALIAS=10" + - "EMQX_LOG__LEVEL=debug" command: - /bin/sh - -c @@ -30,7 +31,7 @@ services: emqx2: container_name: node2.emqx.io - image: emqx/emqx:build-alpine-amd64 + image: ${TARGET}:build-alpine-amd64 environment: - "EMQX_NAME=emqx" - "EMQX_HOST=node2.emqx.io" @@ -38,6 +39,7 @@ services: - "EMQX_CLUSTER__STATIC__SEEDS=emqx@node1.emqx.io, emqx@node2.emqx.io" - "EMQX_ZONE__EXTERNAL__RETRY_INTERVAL=2s" - "EMQX_MQTT__MAX_TOPIC_ALIAS=10" + - "EMQX_LOG__LEVEL=debug" command: - /bin/sh - -c @@ -46,7 +48,7 @@ services: sed -i '/emqx_telemetry/d' /opt/emqx/data/loaded_plugins /opt/emqx/bin/emqx foreground healthcheck: - test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"] + test: ["CMD", "/opt/emqx/bin/emqx", "ping"] interval: 5s timeout: 25s retries: 5 @@ -64,6 +66,8 @@ services: tty: true networks: emqx-bridge: + volumes: + - ./scripts:/scripts networks: emqx-bridge: diff --git a/.ci/fvt_tests/scripts/pytest.sh b/.ci/fvt_tests/scripts/pytest.sh new file mode 100755 index 000000000..c93c4a769 --- /dev/null +++ b/.ci/fvt_tests/scripts/pytest.sh @@ -0,0 +1,22 @@ +#!/bin/sh + +## This script is to run emqx cluster smoke tests (fvt) in github action +## This script is executed in pacho_client + +set -x +set +e + +NODE1="node1.emqx.io" +NODE2="node2.emqx.io" + +apk update && apk add git curl +git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho.mqtt.testing +pip install pytest +pytest -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "$NODE1" +RESULT=$? +pytest -v /paho.mqtt.testing/interoperability/test_cluster --host1 "$NODE1" --host2 "$NODE2" +RESULT=$(( RESULT + $? )) +pytest -v /paho.mqtt.testing/interoperability/test_client --host "$NODE1" +RESULT=$(( RESULT + $? )) + +exit $RESULT diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..4ed73da9a --- /dev/null +++ b/.gitattributes @@ -0,0 +1,5 @@ +* text=auto +*.* text eol=lf +*.jpg -text +*.png -text +*.pdf -text diff --git a/.github/workflows/build_packages.yaml b/.github/workflows/build_packages.yaml index 2562a725f..a6e16a8e8 100644 --- a/.github/workflows/build_packages.yaml +++ b/.github/workflows/build_packages.yaml @@ -6,71 +6,133 @@ on: push: tags: - v* + - e* release: types: - published + workflow_dispatch: jobs: + prepare: + runs-on: ubuntu-20.04 + container: emqx/build-env:erl23.2.7-ubuntu20.04 + + outputs: + profiles: ${{ steps.set_profile.outputs.profiles}} + + steps: + - uses: actions/checkout@v2 + with: + path: source + - name: set profile + id: set_profile + shell: bash + run: | + if make -C source emqx-ee --dry-run > /dev/null 2>&1; then + echo "::set-output name=profiles::[\"emqx-ee\"]" + else + echo "::set-output name=profiles::[\"emqx\", \"emqx-edge\"]" + fi + - name: get_all_deps + if: endsWith(github.repository, 'emqx') + run: | + make -C source deps-all + zip -ryq source.zip source + - name: get_all_deps + if: endsWith(github.repository, 'enterprise') + run: | + echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials + git config --global credential.helper store + echo "${{ secrets.CI_GIT_TOKEN }}" >> source/scripts/git-token + make -C source deps-all + zip -ryq source.zip source + - uses: actions/upload-artifact@v2 + with: + name: source + path: source.zip + windows: runs-on: windows-2019 + needs: prepare + if: endsWith(github.repository, 'emqx') + + strategy: + matrix: + profile: ${{fromJSON(needs.prepare.outputs.profiles)}} + exclude: + - profile: emqx-edge + steps: - - uses: actions/checkout@v1 + - uses: actions/download-artifact@v2 + with: + name: source + path: . + - name: unzip source code + run: Expand-Archive -Path source.zip -DestinationPath ./ - uses: ilammy/msvc-dev-cmd@v1 - uses: gleam-lang/setup-erlang@v1.1.0 id: install_erlang with: otp-version: 23.2 - name: build + env: + PYTHON: python run: | - # set-executionpolicy remotesigned -s cu - # iex (new-object net.webclient).downloadstring('https://get.scoop.sh') - # # $env:path + ";" + $env:USERPROFILE + "\scoop\shims" + ';C:\Program Files\erl10.4\bin' - # [environment]::SetEnvironmentvariable("Path", ";" + $env:USERPROFILE + "\scoop\shims") - # [environment]::SetEnvironmentvariable("Path", ';C:\Program Files\erl10.4\bin') - # scoop bucket add extras https://github.com/lukesampson/scoop-extras.git - # scoop update - # scoop install sudo curl vcredist2013 - $env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH" $version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" ) if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") { $regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]" - $pkg_name = "emqx-windows-$([regex]::matches($version, $regex).value).zip" + $pkg_name = "${{ matrix.profile }}-windows-$([regex]::matches($version, $regex).value).zip" } else { - $pkg_name = "emqx-windows-$($version -replace '/').zip" + $pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip" } - - make deps-emqx || cat rebar3.crashdump - $rebar3 = $env:USERPROFILE + "\rebar3" - (New-Object System.Net.WebClient).DownloadFile('https://s3.amazonaws.com/rebar3/rebar3', $rebar3) - cd _build/emqx/lib/jiffy/ - escript $rebar3 compile - cd ../../../../ - - make emqx - mkdir -p _packages/emqx - Compress-Archive -Path _build/emqx/rel/emqx -DestinationPath _build/emqx/rel/$pkg_name - mv _build/emqx/rel/$pkg_name _packages/emqx - Get-FileHash -Path "_packages/emqx/$pkg_name" | Format-List | grep 'Hash' | awk '{print $3}' > _packages/emqx/$pkg_name.sha256 + cd source + ## We do not build/release bcrypt for windows package + Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/ + if (Test-Path rebar.lock) { + Remove-Item -Force -Path rebar.lock + } + make ${{ matrix.profile }} + mkdir -p _packages/${{ matrix.profile }} + Compress-Archive -Path _build/${{ matrix.profile }}/rel/emqx -DestinationPath _build/${{ matrix.profile }}/rel/$pkg_name + mv _build/${{ matrix.profile }}/rel/$pkg_name _packages/${{ matrix.profile }} + Get-FileHash -Path "_packages/${{ matrix.profile }}/$pkg_name" | Format-List | grep 'Hash' | awk '{print $3}' > _packages/${{ matrix.profile }}/$pkg_name.sha256 - name: run emqx + timeout-minutes: 1 run: | - ./_build/emqx/rel/emqx/bin/emqx start - ./_build/emqx/rel/emqx/bin/emqx stop - ./_build/emqx/rel/emqx/bin/emqx install - ./_build/emqx/rel/emqx/bin/emqx uninstall + cd source + ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start + Start-Sleep -s 5 + ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop + ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx install + ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx uninstall - uses: actions/upload-artifact@v1 + if: startsWith(github.ref, 'refs/tags/') with: - name: emqx - path: ./_packages/emqx/. + name: ${{ matrix.profile }} + path: source/_packages/${{ matrix.profile }}/. mac: runs-on: macos-10.15 + needs: prepare + + strategy: + matrix: + profile: ${{fromJSON(needs.prepare.outputs.profiles)}} + exclude: + - profile: emqx-edge + steps: - - uses: actions/checkout@v1 + - uses: actions/download-artifact@v2 + with: + name: source + path: . + - name: unzip source code + run: unzip -q source.zip - name: prepare run: | brew install curl zip unzip gnu-sed kerl unixodbc freetds @@ -79,16 +141,17 @@ jobs: - name: build erlang timeout-minutes: 60 run: | - kerl build 23.2.2 - kerl install 23.2.2 $HOME/.kerl/23.2.2 + kerl build 23.2.7 + kerl install 23.2.7 $HOME/.kerl/23.2.7 - name: build run: | - . $HOME/.kerl/23.2.2/activate - make emqx-pkg + . $HOME/.kerl/23.2.7/activate + make -C source ${{ matrix.profile }}-pkg - name: test run: | - pkg_name=$(basename _packages/emqx/emqx-macos-*.zip) - unzip _packages/emqx/$pkg_name + cd source + pkg_name=$(basename _packages/${{ matrix.profile }}/${{ matrix.profile }}-*.zip) + unzip _packages/${{ matrix.profile }}/$pkg_name gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins ./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ready='no' @@ -107,46 +170,51 @@ jobs: ./emqx/bin/emqx_ctl status ./emqx/bin/emqx stop rm -rf emqx - openssl dgst -sha256 ./_packages/emqx/$pkg_name | awk '{print $2}' > ./_packages/emqx/$pkg_name.sha256 + openssl dgst -sha256 ./_packages/${{ matrix.profile }}/$pkg_name | awk '{print $2}' > ./_packages/${{ matrix.profile }}/$pkg_name.sha256 - uses: actions/upload-artifact@v1 + if: startsWith(github.ref, 'refs/tags/') with: - name: emqx - path: ./_packages/emqx/. + name: ${{ matrix.profile }} + path: source/_packages/${{ matrix.profile }}/. linux: runs-on: ubuntu-20.04 + needs: prepare + strategy: matrix: + profile: ${{fromJSON(needs.prepare.outputs.profiles)}} arch: - - amd64 - - arm64 - emqx: - - emqx - - emqx-edge + - amd64 + - arm64 os: - - ubuntu20.04 - - ubuntu18.04 - - ubuntu16.04 - - debian10 - - debian9 - - opensuse - - centos8 - - centos7 - - centos6 - - raspbian10 - - raspbian9 + - ubuntu20.04 + - ubuntu18.04 + - ubuntu16.04 + - debian10 + - debian9 + # - opensuse + - centos8 + - centos7 + - centos6 + - raspbian10 + - raspbian9 exclude: - - os: raspbian9 - arch: amd64 - - os: raspbian9 - emqx: emqx - - os: raspbian10 - arch: amd64 - - os: raspbian10 - emqx: emqx - os: centos6 arch: arm64 + - os: raspbian9 + arch: amd64 + - os: raspbian10 + arch: amd64 + - os: raspbian9 + profile: emqx + - os: raspbian10 + profile: emqx + - os: raspbian9 + profile: emqx-ee + - os: raspbian10 + profile: emqx-ee defaults: run: @@ -162,71 +230,71 @@ jobs: docker info docker buildx create --use --name mybuild docker run --rm --privileged tonistiigi/binfmt --install all - - uses: actions/checkout@v1 - - name: get deps - env: - ERL_OTP: erl23.2.2 - run: | - docker run -i --rm \ - -e GITHUB_RUN_ID=$GITHUB_RUN_ID \ - -e GITHUB_REF=$GITHUB_REF \ - -v $(pwd):/emqx \ - -w /emqx \ - emqx/build-env:${ERL_OTP}-debian10 \ - bash -c "make deps-all" + - uses: actions/download-artifact@v2 + with: + name: source + path: . + - name: unzip source code + run: unzip -q source.zip - name: downloads emqx zip packages env: - EMQX: ${{ matrix.emqx }} + PROFILE: ${{ matrix.profile }} ARCH: ${{ matrix.arch }} SYSTEM: ${{ matrix.os }} run: | set -e -u -x - if [ $EMQX = "emqx-edge" ];then broker="emqx-edge"; else broker="emqx-ce"; fi - if [ $ARCH = "arm64" ];then arch="aarch64"; else arch="x86_64"; fi + cd source + if [ $PROFILE = "emqx" ];then broker="emqx-ce"; else broker="$PROFILE"; fi + if [ $PROFILE = "emqx-ee" ];then edition='enterprise'; else edition='opensource'; fi - vsn="$(grep -oE '\{vsn, (.*)\}' src/emqx.app.src | sed -r 's/\{vsn, (.*)\}/\1/g' | sed 's/\"//g')" + vsn="$(grep -E "define.+EMQX_RELEASE.+${edition}" include/emqx_release.hrl | cut -d '"' -f2)" pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')" - old_vsns=($(git tag -l "$pre_vsn.[0-9]" | sed "s/$vsn//")) + if [ $PROFILE = "emqx-ee" ]; then + old_vsns=($(git tag -l "e$pre_vsn.[0-9]" | sed "s/e$vsn//")) + else + old_vsns=($(git tag -l "v$pre_vsn.[0-9]" | sed "s/v$vsn//")) + fi - mkdir -p tmp/relup_packages/$EMQX - cd tmp/relup_packages/$EMQX + mkdir -p tmp/relup_packages/$PROFILE + cd tmp/relup_packages/$PROFILE for tag in ${old_vsns[@]};do - if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/v${tag#[e|v]}/$EMQX-$SYSTEM-${tag#[e|v]}-$arch.zip) | grep -oE "^[23]+")" ];then - wget https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/v${tag#[e|v]}/$EMQX-$SYSTEM-${tag#[e|v]}-$arch.zip - wget https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/v${tag#[e|v]}/$EMQX-$SYSTEM-${tag#[e|v]}-$arch.zip.sha256 - echo "$(cat $EMQX-$SYSTEM-${tag#[e|v]}-$arch.zip.sha256) $EMQX-$SYSTEM-${tag#[e|v]}-$arch.zip" | sha256sum -c || exit 1 + if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-${{ secrets.AWS_DEFAULT_REGION }}.amazonaws.com/${{ secrets.AWS_S3_BUCKET }}/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip) | grep -oE "^[23]+")" ];then + wget https://s3-${{ secrets.AWS_DEFAULT_REGION }}.amazonaws.com/${{ secrets.AWS_S3_BUCKET }}/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip + wget https://s3-${{ secrets.AWS_DEFAULT_REGION }}.amazonaws.com/${{ secrets.AWS_S3_BUCKET }}/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256 + echo "$(cat $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256) $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip" | sha256sum -c || exit 1 fi done cd - - name: build emqx packages env: - ERL_OTP: erl23.2.2 - EMQX: ${{ matrix.emqx }} + ERL_OTP: erl23.2.7 + PROFILE: ${{ matrix.profile }} ARCH: ${{ matrix.arch }} SYSTEM: ${{ matrix.os }} run: | - set -e -u -x + set -e -u + cd source docker buildx build --no-cache \ --platform=linux/$ARCH \ -t cross_build_emqx_for_$SYSTEM \ -f .ci/build_packages/Dockerfile \ --build-arg BUILD_FROM=emqx/build-env:$ERL_OTP-$SYSTEM \ - --build-arg EMQX_NAME=$EMQX \ - --output type=tar,dest=/tmp/cross-build-$EMQX-for-$SYSTEM.tar . + --build-arg EMQX_NAME=$PROFILE \ + --output type=tar,dest=/tmp/cross-build-$PROFILE-for-$SYSTEM.tar . - mkdir -p /tmp/packages/$EMQX - tar -xvf /tmp/cross-build-$EMQX-for-$SYSTEM.tar --wildcards emqx/_packages/$EMQX/* - mv emqx/_packages/$EMQX/* /tmp/packages/$EMQX/ - rm -rf /tmp/cross-build-$EMQX-for-$SYSTEM.tar + mkdir -p /tmp/packages/$PROFILE + tar -xvf /tmp/cross-build-$PROFILE-for-$SYSTEM.tar --wildcards emqx/_packages/$PROFILE/* + mv emqx/_packages/$PROFILE/* /tmp/packages/$PROFILE/ + rm -rf /tmp/cross-build-$PROFILE-for-$SYSTEM.tar docker rm -f $(docker ps -a -q) docker volume prune -f - name: create sha256 env: - EMQX: ${{ matrix.emqx }} + PROFILE: ${{ matrix.profile}} run: | - if [ -d /tmp/packages/$EMQX ]; then - cd /tmp/packages/$EMQX + if [ -d /tmp/packages/$PROFILE ]; then + cd /tmp/packages/$PROFILE for var in $(ls emqx-* ); do bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256" done @@ -235,52 +303,74 @@ jobs: - uses: actions/upload-artifact@v1 if: startsWith(github.ref, 'refs/tags/') with: - name: ${{ matrix.emqx }} - path: /tmp/packages/${{ matrix.emqx }}/. + name: ${{ matrix.profile }} + path: /tmp/packages/${{ matrix.profile }}/. docker: runs-on: ubuntu-20.04 + needs: prepare + strategy: matrix: + profile: ${{fromJSON(needs.prepare.outputs.profiles)}} arch: - - [amd64, x86_64] - - [arm64v8, aarch64] - - [arm32v7, arm] - - [i386, i386] - - [s390x, s390x] + - [amd64, x86_64] + - [arm64v8, aarch64] + - [arm32v7, arm] + - [i386, i386] + - [s390x, s390x] + exclude: + - profile: emqx-ee + arch: [i386, i386] + - profile: emqx-ee + arch: [s390x, s390x] steps: - - uses: actions/checkout@v1 + - uses: actions/download-artifact@v2 + with: + name: source + path: . + - name: unzip source code + run: unzip -q source.zip - name: build emqx docker image env: + PROFILE: ${{ matrix.profile }} ARCH: ${{ matrix.arch[0] }} QEMU_ARCH: ${{ matrix.arch[1] }} run: | sudo docker run --rm --privileged multiarch/qemu-user-static --reset -p yes - sudo TARGET=emqx/emqx ARCH=$ARCH QEMU_ARCH=$QEMU_ARCH make docker - cd _packages/emqx && for var in $(ls emqx-docker-* ); do sudo bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256"; done && cd - + cd source + sudo TARGET=emqx/$PROFILE ARCH=$ARCH QEMU_ARCH=$QEMU_ARCH make docker + cd _packages/$PROFILE && for var in $(ls ${PROFILE}-docker-* ); do sudo bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256"; done && cd - + - uses: actions/upload-artifact@v1 + if: startsWith(github.ref, 'refs/tags/') + with: + name: ${{ matrix.profile }} + path: source/_packages/${{ matrix.profile }}/. - sudo TARGET=emqx/emqx-edge ARCH=$ARCH QEMU_ARCH=$QEMU_ARCH make docker - cd _packages/emqx-edge && for var in $(ls emqx-edge-docker-* ); do sudo bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256"; done && cd - - - uses: actions/upload-artifact@v1 + delete-artifact: + runs-on: ubuntu-20.04 + needs: [prepare, mac, linux, docker] + steps: + - uses: geekyeggo/delete-artifact@v1 with: - name: emqx - path: ./_packages/emqx/. - - uses: actions/upload-artifact@v1 - with: - name: emqx-edge - path: ./_packages/emqx-edge/. + name: source upload: runs-on: ubuntu-20.04 - needs: [windows, mac, linux, docker] - if: startsWith(github.ref, 'refs/tags/') + needs: [prepare, mac, linux, docker] + + strategy: + matrix: + profile: ${{fromJSON(needs.prepare.outputs.profiles)}} + steps: + - uses: actions/checkout@v2 - name: get_version run: | echo 'version<> $GITHUB_ENV @@ -288,47 +378,38 @@ jobs: echo 'EOF' >> $GITHUB_ENV - uses: actions/download-artifact@v2 with: - name: emqx - path: ./_packages/emqx - - uses: actions/download-artifact@v2 - with: - name: emqx-edge - path: ./_packages/emqx-edge + name: ${{ matrix.profile }} + path: ./_packages/${{ matrix.profile }} - name: install dos2unix run: sudo apt-get update && sudo apt install -y dos2unix - name: get packages run: | - set -e -x -u - for EMQX in emqx emqx-edge; do - cd _packages/$EMQX - for var in $( ls |grep emqx |grep -v sha256); do - dos2unix $var.sha256 - echo "$(cat $var.sha256) $var" | sha256sum -c || exit 1 - done - cd - + set -e -u + cd _packages/${{ matrix.profile }} + for var in $( ls |grep emqx |grep -v sha256); do + dos2unix $var.sha256 + echo "$(cat $var.sha256) $var" | sha256sum -c || exit 1 done + cd - - name: upload aws s3 run: | - set -e -x -u + set -e -u + if [ "${{ matrix.profile }}" == "emqx" ];then + broker="emqx-ce" + else + broker=${{ matrix.profile }} + fi aws configure set aws_access_key_id ${{ secrets.AWS_ACCESS_KEY_ID }} aws configure set aws_secret_access_key ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws configure set default.region us-west-2 + aws configure set default.region ${{ secrets.AWS_DEFAULT_REGION }} - aws s3 cp --recursive _packages/emqx s3://packages.emqx/emqx-ce/${{ env.version }} - aws s3 cp --recursive _packages/emqx-edge s3://packages.emqx/emqx-edge/${{ env.version }} - aws cloudfront create-invalidation --distribution-id E170YEULGLT8XB --paths "/emqx-ce/${{ env.version }}/*,/emqx-edge/${{ env.version }}/*" - - mkdir packages - mv _packages/emqx/* packages - mv _packages/emqx-edge/* packages - - uses: actions/checkout@v2 - with: - path: emqx + aws s3 cp --recursive _packages/${{ matrix.profile }} s3://${{ secrets.AWS_S3_BUCKET }}/$broker/${{ env.version }} + aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CLOUDFRONT_ID }} --paths "/$broker/${{ env.version }}/*" - uses: Rory-Z/upload-release-asset@v1 if: github.event_name == 'release' with: repo: emqx - path: "packages/emqx-*" + path: "_packages/${{ matrix.profile }}/emqx-*" token: ${{ github.token }} - name: update to emqx.io if: github.event_name == 'release' @@ -345,15 +426,22 @@ jobs: if: github.event_name == 'release' run: | set -e -x -u - sudo make -C emqx docker-prepare - cd packages && for var in $(ls |grep docker |grep -v sha256); do unzip $var; sudo docker load < ${var%.*}; rm -f ${var%.*}; done && cd - + sudo make docker-prepare + cd _packages/${{ matrix.profile }} && for var in $(ls |grep docker |grep -v sha256); do unzip $var; sudo docker load < ${var%.*}; rm -f ${var%.*}; done && cd - echo ${{ secrets.DOCKER_HUB_TOKEN }} |sudo docker login -u ${{ secrets.DOCKER_HUB_USER }} --password-stdin - sudo TARGET=emqx/emqx make -C emqx docker-push - sudo TARGET=emqx/emqx make -C emqx docker-manifest-list - sudo TARGET=emqx/emqx-edge make -C emqx docker-push - sudo TARGET=emqx/emqx-edge make -C emqx docker-manifest-list + sudo TARGET=emqx/${{ matrix.profile }} make docker-push + sudo TARGET=emqx/${{ matrix.profile }} make docker-manifest-list - name: update repo.emqx.io - if: github.event_name == 'release' + if: github.event_name == 'release' && endsWith(github.repository, 'enterprise') && matrix.profile == 'emqx-ee' + run: | + curl --silent --show-error \ + -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ + -H "Accept: application/vnd.github.v3+json" \ + -X POST \ + -d "{\"ref\":\"v1.0.1\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \ + "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches" + - name: update repo.emqx.io + if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx' run: | curl --silent --show-error \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ @@ -362,7 +450,7 @@ jobs: -d "{\"ref\":\"v1.0.1\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \ "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches" - name: update homebrew packages - if: github.event_name == 'release' + if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx' run: | if [ -z $(echo $version | grep -oE "(alpha|beta|rc)\.[0-9]") ]; then curl --silent --show-error \ @@ -374,7 +462,4 @@ jobs: fi - uses: geekyeggo/delete-artifact@v1 with: - name: emqx - - uses: geekyeggo/delete-artifact@v1 - with: - name: emqx-edge + name: ${{ matrix.profile }} diff --git a/.github/workflows/build_slim_packages.yaml b/.github/workflows/build_slim_packages.yaml index 217e06917..175a08d05 100644 --- a/.github/workflows/build_slim_packages.yaml +++ b/.github/workflows/build_slim_packages.yaml @@ -1,26 +1,42 @@ name: Build slim packages -on: [pull_request] +on: + - pull_request + - workflow_dispatch jobs: build: runs-on: ubuntu-20.04 - + strategy: matrix: erl_otp: - - erl23.2.2 + - erl23.2.7 os: - ubuntu20.04 - - centos8 + - centos7 container: emqx/build-env:${{ matrix.erl_otp }}-${{ matrix.os }} - + steps: - uses: actions/checkout@v1 + - name: prepare + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials + git config --global credential.helper store + echo "${{ secrets.CI_GIT_TOKEN }}" >> ./scripts/git-token + echo "EMQX_NAME=emqx-ee" >> $GITHUB_ENV + else + echo "EMQX_NAME=emqx" >> $GITHUB_ENV + fi - name: build packages - run: make emqx-pkg + run: make ${EMQX_NAME}-pkg - name: pakcages test run: | export CODE_PATH=$GITHUB_WORKSPACE .ci/build_packages/tests.sh + - uses: actions/upload-artifact@v2 + with: + name: ${{ matrix.os }} + path: _packages/**/*.zip diff --git a/.github/workflows/check_deps_integrity.yaml b/.github/workflows/check_deps_integrity.yaml index 91e25e203..4eeefe46c 100644 --- a/.github/workflows/check_deps_integrity.yaml +++ b/.github/workflows/check_deps_integrity.yaml @@ -5,7 +5,7 @@ on: [pull_request] jobs: check_deps_integrity: runs-on: ubuntu-20.04 - container: emqx/build-env:erl23.2.2-ubuntu20.04 + container: emqx/build-env:erl23.2.7-ubuntu20.04 steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/elvis_lint.yaml b/.github/workflows/elvis_lint.yaml index af824f034..1fdbeba87 100644 --- a/.github/workflows/elvis_lint.yaml +++ b/.github/workflows/elvis_lint.yaml @@ -6,6 +6,11 @@ jobs: build: runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 + - name: Set git token + if: endsWith(github.repository, 'enterprise') + run: | + echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials + git config --global credential.helper store - run: | ./scripts/elvis-check.sh $GITHUB_BASE_REF diff --git a/.github/workflows/git_sync.yaml b/.github/workflows/git_sync.yaml index 6fe19eae6..93411ac9f 100644 --- a/.github/workflows/git_sync.yaml +++ b/.github/workflows/git_sync.yaml @@ -19,11 +19,24 @@ jobs: destination_branch: ${{ github.ref }} destination_ssh_private_key: "${{ secrets.CI_SSH_PRIVATE_KEY }}" - name: create pull request + id: create_pull_request + run: | + set -euo pipefail + R=$(curl --silent --show-error \ + -H "Accept: application/vnd.github.v3+json" \ + -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ + -X POST \ + -d '{"title": "Sync code into enterprise from opensource", "head": "master", "base":"enterprise"}' \ + https://api.github.com/repos/${{ github.repository_owner }}/emqx-enterprise/pulls) + echo $R | jq + echo "::set-output name=url::$(echo $R | jq '.url')" + - name: request reviewers for a pull request + if: steps.create_pull_request.outputs.url != 'null' run: | set -euo pipefail curl --silent --show-error \ -H "Accept: application/vnd.github.v3+json" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -X POST \ - -d '{"title": "Sync code into enterprise from opensource", "head": "master", "base":"enterprise"}' \ - https://api.github.com/repos/${{ github.repository_owner }}/emqx-enterprise/pulls + -d '{"team_reviewers":["emqx-devs"]}' \ + ${{ steps.create_pull_request.outputs.url }}/requested_reviewers diff --git a/.github/workflows/run_cts_tests.yaml b/.github/workflows/run_cts_tests.yaml index 0d2d9aa11..8e87e9415 100644 --- a/.github/workflows/run_cts_tests.yaml +++ b/.github/workflows/run_cts_tests.yaml @@ -24,12 +24,14 @@ jobs: steps: - uses: actions/checkout@v1 - - name: setup + - name: docker compose up env: LDAP_TAG: ${{ matrix.ldap_tag }} run: | - docker-compose -f .ci/apps_tests/docker-compose.yaml build --no-cache - docker-compose -f .ci/compatibility_tests/docker-compose-ldap.yaml up -d + docker-compose \ + -f .ci/docker-compose-file/docker-compose-ldap-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose.yaml \ + up -d --build - name: setup if: matrix.network_type == 'ipv4' run: | @@ -38,6 +40,11 @@ jobs: if: matrix.network_type == 'ipv6' run: | echo EMQX_AUTH__LDAP__SERVERS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.GlobalIPv6Address}}{{end}}' ldap) >> "$GITHUB_ENV" + - name: set git token + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + docker exec -i erlang bash -c "echo \"https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com\" > /root/.git-credentials && git config --global credential.helper store" + fi - name: run test cases run: | export CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_ @@ -69,24 +76,30 @@ jobs: steps: - uses: actions/checkout@v1 + - name: docker-compose up + run: | + docker-compose \ + -f .ci/docker-compose-file/docker-compose-mongo-${{ matrix.connect_type }}.yaml \ + -f .ci/docker-compose-file/docker-compose.yaml \ + up -d --build - name: setup env: MONGO_TAG: ${{ matrix.mongo_tag }} if: matrix.connect_type == 'tls' run: | - docker-compose -f .ci/compatibility_tests/docker-compose-mongo-tls.yaml up -d cat <<-EOF >> "$GITHUB_ENV" EMQX_AUTH__MONGO__SSL__ENABLE=on - EMQX_AUTH__MONGO__CACERTFILE=/emqx/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE_data/ca.pem - EMQX_AUTH__MONGO__CERTFILE=/emqx/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE_data/client-cert.pem - EMQX_AUTH__MONGO__KEYFILE=/emqx/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE_data/client-key.pem + EMQX_AUTH__MONGO__SSL__CACERTFILE=/emqx/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE_data/ca.pem + EMQX_AUTH__MONGO__SSL__CERTFILE=/emqx/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE_data/client-cert.pem + EMQX_AUTH__MONGO__SSL__KEYFILE=/emqx/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE_data/client-key.pem + EMQX_AUTH__MONGO__SSL__VERIFY=true + EMQX_AUTH__MONGO__SSL__SERVER_NAME_INDICATION=disable EOF - name: setup env: MONGO_TAG: ${{ matrix.mongo_tag }} if: matrix.connect_type == 'tcp' run: | - docker-compose -f .ci/compatibility_tests/docker-compose-mongo.yaml up -d echo EMQX_AUTH__MONGO__SSL__ENABLE=off >> "$GITHUB_ENV" - name: setup if: matrix.network_type == 'ipv4' @@ -96,6 +109,11 @@ jobs: if: matrix.network_type == 'ipv6' run: | echo "EMQX_AUTH__MONGO__SERVER=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.GlobalIPv6Address}}{{end}}' mongo):27017" >> "$GITHUB_ENV" + - name: set git token + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + docker exec -i erlang bash -c "echo \"https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com\" > /root/.git-credentials && git config --global credential.helper store" + fi - name: run test cases run: | export CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_ @@ -127,25 +145,44 @@ jobs: steps: - uses: actions/checkout@v1 + - name: docker-compose up + timeout-minutes: 5 + run: | + docker-compose \ + -f .ci/docker-compose-file/docker-compose-mysql-${{ matrix.connect_type }}.yaml \ + -f .ci/docker-compose-file/docker-compose.yaml \ + up -d --build + while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \ + != $(docker ps -a --filter name=client | wc -l) ]; do + sleep 5 + done - name: setup env: MYSQL_TAG: ${{ matrix.mysql_tag }} if: matrix.connect_type == 'tls' run: | - docker-compose -f .ci/compatibility_tests/docker-compose-mysql-tls.yaml up -d cat <<-EOF >> "$GITHUB_ENV" EMQX_AUTH__MYSQL__SSL__ENABLE=on + EMQX_AUTH__MYSQL__USERNAME=ssluser + EMQX_AUTH__MYSQL__PASSWORD=public + EMQX_AUTH__MYSQL__DATABASE=mqtt EMQX_AUTH__MYSQL__SSL__CACERTFILE=/emqx/apps/emqx_auth_mysql/test/emqx_auth_mysql_SUITE_data/ca.pem EMQX_AUTH__MYSQL__SSL__CERTFILE=/emqx/apps/emqx_auth_mysql/test/emqx_auth_mysql_SUITE_data/client-cert.pem EMQX_AUTH__MYSQL__SSL__KEYFILE=/emqx/apps/emqx_auth_mysql/test/emqx_auth_mysql_SUITE_data/client-key.pem + EMQX_AUTH__MYSQL__SSL__VERIFY=true + EMQX_AUTH__MYSQL__SSL__SERVER_NAME_INDICATION=disable EOF - name: setup env: MYSQL_TAG: ${{ matrix.mysql_tag }} if: matrix.connect_type == 'tcp' run: | - docker-compose -f .ci/compatibility_tests/docker-compose-mysql.yaml up -d - echo EMQX_AUTH__MYSQL__SSL__ENABLE=off >> "$GITHUB_ENV" + cat <<-EOF >> "$GITHUB_ENV" + EMQX_AUTH__MYSQL__USERNAME=root + EMQX_AUTH__MYSQL__PASSWORD=public + EMQX_AUTH__MYSQL__DATABASE=mqtt + EMQX_AUTH__MYSQL__SSL__ENABLE=off + EOF - name: setup if: matrix.network_type == 'ipv4' run: | @@ -154,12 +191,14 @@ jobs: if: matrix.network_type == 'ipv6' run: | echo "EMQX_AUTH__MYSQL__SERVER=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.GlobalIPv6Address}}{{end}}' mysql):3306" >> "$GITHUB_ENV" + - name: set git token + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + docker exec -i erlang bash -c "echo \"https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com\" > /root/.git-credentials && git config --global credential.helper store" + fi - name: run test cases run: | - export EMQX_AUTH__MYSQL__USERNAME=root \ - EMQX_AUTH__MYSQL__PASSWORD=public \ - EMQX_AUTH__MYSQL__DATABASE=mqtt \ - CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_ + export CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_ printenv > .env docker exec -i erlang sh -c "make ensure-rebar3" docker exec -i erlang sh -c "./rebar3 eunit --dir apps/emqx_auth_mysql" @@ -190,23 +229,30 @@ jobs: - tcp steps: - uses: actions/checkout@v1 + - name: docker-compose up + run: | + docker-compose \ + -f .ci/docker-compose-file/docker-compose-pgsql-${{ matrix.connect_type }}.yaml \ + -f .ci/docker-compose-file/docker-compose.yaml \ + up -d --build - name: setup env: PGSQL_TAG: ${{ matrix.pgsql_tag }} if: matrix.connect_type == 'tls' run: | - docker-compose -f .ci/compatibility_tests/docker-compose-pgsql-tls.yaml build --no-cache - docker-compose -f .ci/compatibility_tests/docker-compose-pgsql-tls.yaml up -d cat <<-EOF >> "$GITHUB_ENV" EMQX_AUTH__PGSQL__SSL__ENABLE=on - EMQX_AUTH__PGSQL__SSL__CACERTFILE=/emqx/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/root.crt + EMQX_AUTH__PGSQL__SSL__CACERTFILE=/emqx/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/ca.pem + EMQX_AUTH__PGSQL__SSL__CERTFILE=/emqx/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/client-cert.pem + EMQX_AUTH__PGSQL__SSL__KEYFILE=/emqx/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/client-key.pem + EMQX_AUTH__PGSQL__SSL__VERIFY=true + EMQX_AUTH__PGSQL__SSL__SERVER_NAME_INDICATION=disable EOF - name: setup env: PGSQL_TAG: ${{ matrix.pgsql_tag }} if: matrix.connect_type == 'tcp' run: | - docker-compose -f .ci/compatibility_tests/docker-compose-pgsql.yaml up -d echo EMQX_AUTH__PGSQL__SSL__ENABLE=off >> "$GITHUB_ENV" - name: setup if: matrix.network_type == 'ipv4' @@ -216,6 +262,11 @@ jobs: if: matrix.network_type == 'ipv6' run: | echo "EMQX_AUTH__PGSQL__SERVER=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.GlobalIPv6Address}}{{end}}' pgsql):5432" >> "$GITHUB_ENV" + - name: set git token + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + docker exec -i erlang bash -c "echo \"https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com\" > /root/.git-credentials && git config --global credential.helper store" + fi - name: run test cases run: | export EMQX_AUTH__PGSQL__USERNAME=root \ @@ -250,33 +301,39 @@ jobs: node_type: - single - cluster + exclude: + - redis_tag: 5 + connect_type: tls steps: - uses: actions/checkout@v1 + - name: docker-compose up + run: | + docker-compose \ + -f .ci/docker-compose-file/docker-compose-redis-${{ matrix.node_type }}-${{ matrix.connect_type }}.yaml \ + -f .ci/docker-compose-file/docker-compose.yaml \ + up -d --build - name: setup env: REDIS_TAG: ${{ matrix.redis_tag }} - if: matrix.connect_type == 'tls' && matrix.redis_tag != '5' + if: matrix.connect_type == 'tls' run: | - set -exu - docker-compose -f .ci/compatibility_tests/docker-compose-redis-${{ matrix.node_type }}-tls.yaml up -d cat <<-EOF >> "$GITHUB_ENV" EMQX_AUTH__REDIS__SSL__ENABLE=on EMQX_AUTH__REDIS__SSL__CACERTFILE=/emqx/apps/emqx_auth_redis/test/emqx_auth_redis_SUITE_data/certs/ca.crt EMQX_AUTH__REDIS__SSL__CERTFILE=/emqx/apps/emqx_auth_redis/test/emqx_auth_redis_SUITE_data/certs/redis.crt EMQX_AUTH__REDIS__SSL__KEYFILE=/emqx/apps/emqx_auth_redis/test/emqx_auth_redis_SUITE_data/certs/redis.key + EMQX_AUTH__REDIS__SSL__VERIFY=true + EMQX_AUTH__REDIS__SSL__SERVER_NAME_INDICATION=disable EOF - name: setup env: REDIS_TAG: ${{ matrix.redis_tag }} if: matrix.connect_type == 'tcp' run: | - docker-compose -f .ci/compatibility_tests/docker-compose-redis-${{ matrix.node_type }}.yaml up -d echo EMQX_AUTH__REDIS__SSL__ENABLE=off >> "$GITHUB_ENV" - name: get server address - if: matrix.connect_type == 'tcp' || (matrix.connect_type == 'tls' && matrix.redis_tag != '5') run: | - set -exu ipv4_address=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' redis) ipv6_address=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.GlobalIPv6Address}}{{end}}' redis) cat <<-EOF >> "$GITHUB_ENV" @@ -286,15 +343,13 @@ jobs: - name: setup if: matrix.node_type == 'single' && matrix.connect_type == 'tcp' run: | - set -exu cat <<-EOF >> "$GITHUB_ENV" EMQX_AUTH__REDIS__TYPE=single EMQX_AUTH__REDIS__SERVER=${redis_${{ matrix.network_type }}_address}:6379 EOF - name: setup - if: matrix.node_type == 'single' && matrix.connect_type == 'tls' && matrix.redis_tag != '5' + if: matrix.node_type == 'single' && matrix.connect_type == 'tls' run: | - set -exu cat <<-EOF >> "$GITHUB_ENV" EMQX_AUTH__REDIS__TYPE=single EMQX_AUTH__REDIS__SERVER=${redis_${{ matrix.network_type }}_address}:6380 @@ -302,23 +357,26 @@ jobs: - name: setup if: matrix.node_type == 'cluster' && matrix.connect_type == 'tcp' run: | - set -exu cat <<-EOF >> "$GITHUB_ENV" EMQX_AUTH__REDIS__TYPE=cluster EMQX_AUTH__REDIS__SERVER=${redis_${{ matrix.network_type }}_address}:7000 EOF - name: setup - if: matrix.node_type == 'cluster' && matrix.connect_type == 'tls' && matrix.redis_tag != '5' + if: matrix.node_type == 'cluster' && matrix.connect_type == 'tls' run: | - set -exu cat <<-EOF >> "$GITHUB_ENV" EMQX_AUTH__REDIS__TYPE=cluster EMQX_AUTH__REDIS__SERVER=${redis_${{ matrix.network_type }}_address}:8000 EOF + - name: set git token + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + docker exec -i erlang bash -c "echo \"https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com\" > /root/.git-credentials && git config --global credential.helper store" + fi - name: run test cases - if: matrix.connect_type == 'tcp' || (matrix.connect_type == 'tls' && matrix.redis_tag != '5') run: | export CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_ + export EMQX_AUTH__REIDS__PASSWORD=public printenv > .env docker exec -i erlang sh -c "make ensure-rebar3" docker exec -i erlang sh -c "./rebar3 eunit --dir apps/emqx_auth_redis" diff --git a/.github/workflows/run_fvt_tests.yaml b/.github/workflows/run_fvt_tests.yaml index 04638ca08..a155b4777 100644 --- a/.github/workflows/run_fvt_tests.yaml +++ b/.github/workflows/run_fvt_tests.yaml @@ -15,8 +15,19 @@ jobs: steps: - uses: actions/checkout@v1 + - name: prepare + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials + git config --global credential.helper store + echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token + make deps-emqx-ee + echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV + else + echo "TARGET=emqx/emqx" >> $GITHUB_ENV + fi - name: make emqx image - run: TARGET=emqx/emqx make docker + run: make docker - name: run emqx timeout-minutes: 5 run: | @@ -33,20 +44,30 @@ jobs: done - name: make paho tests run: | - docker exec -i paho_client sh -c "apk update && apk add git curl \ - && git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho.mqtt.testing \ - && pip install pytest \ - && pytest -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host node1.emqx.io \ - && pytest -v /paho.mqtt.testing/interoperability/test_cluster --host1 node1.emqx.io --host2 node2.emqx.io \ - && pytest -v /paho.mqtt.testing/interoperability/test_client --host node1.emqx.io" + if ! docker exec -i paho_client /scripts/pytest.sh; then + docker logs node1.emqx.io + docker logs node2.emqx.io + exit 1 + fi helm_test: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v1 + - name: prepare + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials + git config --global credential.helper store + echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token + make deps-emqx-ee + echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV + else + echo "TARGET=emqx/emqx" >> $GITHUB_ENV + fi - name: make emqx image - run: TARGET=emqx/emqx make docker + run: make docker - name: install k3s env: KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" @@ -69,15 +90,21 @@ jobs: timeout-minutes: 5 run: | version=$(./pkg-vsn.sh) - sudo docker save emqx/emqx:$version -o emqx.tar.gz + sudo docker save ${TARGET}:$version -o emqx.tar.gz sudo k3s ctr image import emqx.tar.gz sed -i -r "s/^appVersion: .*$/appVersion: \"${version}\"/g" deploy/charts/emqx/Chart.yaml - sed -i -r 's/ pullPolicy: .*$/ pullPolicy: Never/g' deploy/charts/emqx/values.yaml sed -i '/emqx_telemetry/d' deploy/charts/emqx/values.yaml - helm install emqx --set emqxAclConfig="" --set emqxConfig.EMQX_ZONE__EXTERNAL__RETRY_INTERVAL=2s --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 deploy/charts/emqx --debug --dry-run - helm install emqx --set emqxAclConfig="" --set emqxConfig.EMQX_ZONE__EXTERNAL__RETRY_INTERVAL=2s --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 deploy/charts/emqx + helm install emqx \ + --set image.repository=${TARGET} \ + --set image.pullPolicy=Never \ + --set emqxAclConfig="" \ + --set image.pullPolicy=Never \ + --set emqxConfig.EMQX_ZONE__EXTERNAL__RETRY_INTERVAL=2s \ + --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \ + deploy/charts/emqx \ + --debug while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \ != "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do @@ -110,11 +137,18 @@ jobs: emqx2=$(kubectl get pods emqx-2 -o jsonpath='{.status.podIP}') pytest -v paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host $emqx_svc + RESULT=$? pytest -v paho.mqtt.testing/interoperability/test_cluster --host1 $emqx1 --host2 $emqx2 + RESULT=$((RESULT + $?)) + if [ 0 -ne $RESULT ]; then + kubectl logs emqx-1 + kubectl logs emqx-2 + fi + exit $RESULT relup_test: runs-on: ubuntu-20.04 - container: emqx/build-env:erl23.2.2-ubuntu20.04 + container: emqx/build-env:erl23.2.7-ubuntu20.04 defaults: run: shell: bash @@ -148,14 +182,37 @@ jobs: repository: ${{ github.repository }} path: emqx fetch-depth: 0 + - name: prepare + run: | + if make -C emqx emqx-ee --dry-run > /dev/null 2>&1; then + echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials + git config --global credential.helper store + echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token + echo "PROFILE=emqx-ee" >> $GITHUB_ENV + else + echo "PROFILE=emqx" >> $GITHUB_ENV + fi - name: get version run: | set -e -x -u cd emqx - vsn="$(erl -eval '{ok, [{application,emqx, L} | _]} = file:consult("src/emqx.app.src"), {vsn, VSN} = lists:keyfind(vsn,1,L), io:fwrite(VSN), halt().' -noshell)" + if [ $PROFILE = "emqx" ];then + broker="emqx-ce" + edition='opensource' + else + broker="emqx-ee" + edition='enterprise' + fi + + vsn="$(grep -E "define.+EMQX_RELEASE.+${edition}" include/emqx_release.hrl | cut -d '"' -f2)" echo "VSN=$vsn" >> $GITHUB_ENV - pre_tag="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')" - old_vsns="$(git tag -l "$pre_tag.[0-9]" | tr "\n" " " | sed "s/$vsn//")" + + pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')" + if [ $PROFILE = "emqx" ]; then + old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | tr "\n" " " | sed "s/v$vsn//")" + else + old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | tr "\n" " " | sed "s/v$vsn//")" + fi echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV - name: download emqx run: | @@ -163,10 +220,10 @@ jobs: cd emqx old_vsns=($(echo $OLD_VSNS | tr ' ' ' ')) for old_vsn in ${old_vsns[@]}; do - wget https://s3-us-west-2.amazonaws.com/packages.emqx/emqx-ce/v$old_vsn/emqx-ubuntu20.04-${old_vsn}-x86_64.zip + wget https://s3-${{ secrets.AWS_DEFAULT_REGION }}.amazonaws.com/${{ secrets.AWS_S3_BUCKET }}/$broker/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-x86_64.zip done - name: build emqx - run: make -C emqx emqx-zip + run: make -C emqx ${PROFILE}-zip - name: build emqtt-bench run: make -C emqtt-bench - name: build lux diff --git a/.github/workflows/run_gitlint.yaml b/.github/workflows/run_gitlint.yaml index 9d5d72ab6..01b35461f 100644 --- a/.github/workflows/run_gitlint.yaml +++ b/.github/workflows/run_gitlint.yaml @@ -12,15 +12,25 @@ jobs: run: | sudo apt-get update sudo apt install gitlint + - name: Set auth header + if: endsWith(github.repository, 'enterprise') + run: | + echo 'AUTH_HEADER<> $GITHUB_ENV + echo "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" >> $GITHUB_ENV + echo 'EOF' >> $GITHUB_ENV - name: Run gitlint + shell: bash run: | pr_number=$(echo $GITHUB_REF | awk 'BEGIN { FS = "/" } ; { print $3 }') - messages=$(curl "https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${pr_number}/commits") + messages="$(curl --silent --show-error \ + --header "${{ env.AUTH_HEADER }}" \ + --header "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${pr_number}/commits")" len=$(echo $messages | jq length) result=true for i in $( seq 0 $(($len - 1)) ); do message=$(echo $messages | jq -r .[$i].commit.message) - echo commit message: $message + echo "commit message: $message" status=0 echo $message | gitlint -C ./.github/workflows/.gitlint || status=$? if [ $status -ne 0 ]; then diff --git a/.github/workflows/run_test_cases.yaml b/.github/workflows/run_test_cases.yaml index 0a2f13eca..ff3a935e3 100644 --- a/.github/workflows/run_test_cases.yaml +++ b/.github/workflows/run_test_cases.yaml @@ -12,10 +12,16 @@ on: jobs: run_static_analysis: runs-on: ubuntu-20.04 - container: emqx/build-env:erl23.2.2-ubuntu20.04 + container: emqx/build-env:erl23.2.7-ubuntu20.04 steps: - uses: actions/checkout@v2 + - name: set git credentials + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials + git config --global credential.helper store + fi - name: xref run: make xref - name: dialyzer @@ -26,7 +32,16 @@ jobs: steps: - uses: actions/checkout@v2 - - name: set up + - name: set edition + id: set_edition + run: | + if make emqx-ee --dry-run > /dev/null 2>&1; then + echo "EDITION=enterprise" >> $GITHUB_ENV + else + echo "EDITION=opensource" >> $GITHUB_ENV + fi + - name: docker compose up + if: env.EDITION == 'opensource' env: MYSQL_TAG: 8 REDIS_TAG: 6 @@ -35,16 +50,63 @@ jobs: LDAP_TAG: 2.4.50 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - docker-compose -f .ci/apps_tests/docker-compose.yaml build --no-cache - docker-compose -f .ci/apps_tests/docker-compose.yaml up -d + docker-compose \ + -f .ci/docker-compose-file/docker-compose.yaml \ + -f .ci/docker-compose-file/docker-compose-ldap-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-mongo-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \ + up -d --build + - name: docker compose up + if: env.EDITION == 'enterprise' + env: + MYSQL_TAG: 8 + REDIS_TAG: 6 + MONGO_TAG: 4 + PGSQL_TAG: 13 + LDAP_TAG: 2.4.50 + OPENTSDB_TAG: latest + INFLUXDB_TAG: 1.7.6 + DYNAMODB_TAG: 1.11.477 + TIMESCALE_TAG: latest-pg11 + CASSANDRA_TAG: 3.11.6 + RABBITMQ_TAG: 3.7 + KAFKA_TAG: 2.5.0 + PULSAR_TAG: 2.3.2 + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 20 + run: | + docker-compose \ + -f .ci/docker-compose-file/docker-compose.yaml \ + -f .ci/docker-compose-file/docker-compose-ldap-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-mongo-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-cassandra-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-dynamodb-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-influxdb-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-kafka-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-opentsdb-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-pulsar-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-rabbit-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-timescale-tcp.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-mysql-client.yaml \ + -f .ci/docker-compose-file/docker-compose-enterprise-pgsql-and-timescale-client.yaml \ + up -d --build + docker exec -i erlang bash -c "echo \"https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com\" > /root/.git-credentials && git config --global credential.helper store" + while [ $(docker ps -a --filter name=client --filter exited=0 | wc -l) \ + != $(docker ps -a --filter name=client | wc -l) ]; do + sleep 5 + done - name: run eunit run: | docker exec -i erlang bash -c "make eunit" - docker exec --env EMQX_EXTRA_PLUGINS=all -i erlang bash -c "./rebar3 eunit --dir $(find lib-extra/ -mindepth 1 -maxdepth 2 -type l | tr '\n' ',')" - name: run common test run: | docker exec -i erlang bash -c "make ct" - docker exec --env EMQX_EXTRA_PLUGINS=all -i erlang bash -c "./rebar3 ct --dir $(find lib-extra/ -mindepth 1 -maxdepth 2 -type l | tr '\n' ',')" - name: run cover run: | docker exec -i erlang bash -c "make cover" diff --git a/Makefile b/Makefile index 69682f29c..a84a05322 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -$(shell scripts/git-hooks-init.sh) +$(shell $(CURDIR)/scripts/git-hooks-init.sh) REBAR_VERSION = 3.14.3-emqx-5 REBAR = $(CURDIR)/rebar3 BUILD = $(CURDIR)/build @@ -6,6 +6,9 @@ SCRIPTS = $(CURDIR)/scripts export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh) export EMQX_DESC ?= EMQ X export EMQX_CE_DASHBOARD_VERSION ?= v4.3.0-beta.1 +ifeq ($(OS),Windows_NT) + export REBAR_COLOR=none +endif PROFILE ?= emqx REL_PROFILES := emqx emqx-edge @@ -33,27 +36,37 @@ get-dashboard: .PHONY: eunit eunit: $(REBAR) - @$(REBAR) eunit -v -c + @ENABLE_COVER_COMPILE=1 $(REBAR) eunit -v -c .PHONY: proper proper: $(REBAR) - @$(REBAR) as test proper -d test/props -c + @ENABLE_COVER_COMPILE=1 $(REBAR) as test proper -d test/props -c .PHONY: ct ct: $(REBAR) - @$(REBAR) ct --name 'test@127.0.0.1' -c -v + @ENABLE_COVER_COMPILE=1 $(REBAR) ct --name 'test@127.0.0.1' -c -v + +APPS=$(shell $(CURDIR)/scripts/find-apps.sh) + +## app/name-ct targets are intended for local tests hence cover is not enabled +.PHONY: $(APPS:%=%-ct) +define gen-app-ct-target +$1-ct: + $(REBAR) ct --name 'test@127.0.0.1' -v --suite $(shell $(CURDIR)/scripts/find-suites.sh $1) +endef +$(foreach app,$(APPS),$(eval $(call gen-app-ct-target,$(app)))) .PHONY: cover cover: $(REBAR) - @$(REBAR) cover + @ENABLE_COVER_COMPILE=1 $(REBAR) cover .PHONY: coveralls coveralls: $(REBAR) - @$(REBAR) as test coveralls send + @ENABLE_COVER_COMPILE=1 $(REBAR) as test coveralls send .PHONY: $(REL_PROFILES) $(REL_PROFILES:%=%): $(REBAR) get-dashboard - @$(REBAR) as $(@) release + @$(REBAR) as $(@) do compile,release ## Not calling rebar3 clean because ## 1. rebar3 clean relies on rebar3, meaning it reads config, fetches dependencies etc. @@ -97,7 +110,7 @@ ifneq ($(OS),Windows_NT) endif .PHONY: $(REL_PROFILES:%=%-tar) $(PKG_PROFILES:%=%-tar) -$(REL_PROFILES:%=%-tar) $(PKG_PROFILES:%=%-tar): $(REBAR) get-dashboard +$(REL_PROFILES:%=%-tar) $(PKG_PROFILES:%=%-tar): $(REBAR) get-dashboard $(CONF_SEGS) @$(BUILD) $(subst -tar,,$(@)) tar ## zip targets depend on the corresponding relup and tar artifacts @@ -118,4 +131,11 @@ $1: $(subst -pkg,,$1)-zip $1-tar endef $(foreach pt,$(PKG_PROFILES),$(eval $(call gen-pkg-target,$(pt)))) +.PHONY: run +run: $(PROFILE) quickrun + +.PHONY: quickrun +quickrun: + ./_build/$(PROFILE)/rel/emqx/bin/emqx console + include docker.mk diff --git a/README-CN.md b/README-CN.md index 045a089ee..0172e14bb 100644 --- a/README-CN.md +++ b/README-CN.md @@ -6,8 +6,9 @@ [![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx) [![Slack Invite]()](https://slack-invite.emqx.io) [![Twitter](https://img.shields.io/badge/Twitter-EMQ%20X-1DA1F2?logo=twitter)](https://twitter.com/emqtt) +[![Community](https://img.shields.io/badge/Community-EMQ%20X-yellow)](https://askemq.com) -[![最棒的物联网 MQTT 开源团队期待您的加入](https://www.emqx.io/static/img/github_readme_cn_bg.png)](https://www.emqx.io/cn/careers) +[![最棒的物联网 MQTT 开源团队期待您的加入](https://www.emqx.io/static/img/github_readme_cn_bg.png)](https://careers.emqx.cn/) [English](./README.md) | 简体中文 | [日本語](./README-JP.md) @@ -16,7 +17,7 @@ 从 3.0 版本开始,*EMQ X* 完整支持 MQTT V5.0 协议规范,向下兼容 MQTT V3.1 和 V3.1.1,并支持 MQTT-SN、CoAP、LwM2M、WebSocket 和 STOMP 等通信协议。EMQ X 3.0 单集群可支持千万级别的 MQTT 并发连接。 - 新功能的完整列表,请参阅 [EMQ X Release Notes](https://github.com/emqx/emqx/releases)。 -- 获取更多信息,请访问 [EMQ X 官网](https://www.emqx.io/cn/)。 +- 获取更多信息,请访问 [EMQ X 官网](https://www.emqx.cn/)。 ## 安装 @@ -25,15 +26,15 @@ #### EMQ X Docker 镜像安装 ``` -docker run -d --name emqx -p 1883:1883 -p 8083:8083 -p 8883:8883 -p 8084:8084 -p 18083:18083 emqx/emqx +docker run -d --name emqx -p 1883:1883 -p 8081:8081 -p 8083:8083 -p 8883:8883 -p 8084:8084 -p 18083:18083 emqx/emqx ``` #### 二进制软件包安装 -需从 [EMQ X 下载](https://www.emqx.io/cn/downloads) 页面获取相应操作系统的二进制软件包。 +需从 [EMQ X 下载](https://www.emqx.cn/downloads) 页面获取相应操作系统的二进制软件包。 -- [单节点安装文档](https://docs.emqx.io/broker/latest/cn/getting-started/install.html) -- [集群配置文档](https://docs.emqx.io/broker/latest/cn/advanced/cluster.html) +- [单节点安装文档](https://docs.emqx.cn/broker/latest/getting-started/install.html) +- [集群配置文档](https://docs.emqx.cn/broker/latest/advanced/cluster.html) ## 从源码构建 @@ -75,7 +76,7 @@ DIALYZER_ANALYSE_APP=emqx_lwm2m,emqx_auth_jwt,emqx_auth_ldap make dialyzer ## FAQ -访问 [EMQ X FAQ](https://docs.emqx.io/broker/latest/cn/faq/faq.html) 以获取常见问题的帮助。 +访问 [EMQ X FAQ](https://docs.emqx.cn/broker/latest/faq/faq.html) 以获取常见问题的帮助。 ## 产品路线 @@ -89,9 +90,9 @@ DIALYZER_ANALYSE_APP=emqx_lwm2m,emqx_auth_jwt,emqx_auth_ldap make dialyzer - [Twitter](https://twitter.com/emqtt) - [Facebook](https://www.facebook.com/emqxmqtt) - [Reddit](https://www.reddit.com/r/emqx/) -- [Forum](https://groups.google.com/d/forum/emqtt) +- [Forum](https://askemq.com) - [Weibo](https://weibo.com/emqtt) -- [Blog](https://www.emqx.io/cn/blog) +- [Blog](https://www.emqx.cn/blog) 欢迎你将任何 bug、问题和功能请求提交到 [emqx/emqx](https://github.com/emqx/emqx/issues)。 diff --git a/README.md b/README.md index 14e0abde4..aba873046 100644 --- a/README.md +++ b/README.md @@ -23,18 +23,20 @@ Starting from 3.0 release, *EMQ X* broker fully supports MQTT V5.0 protocol spec The *EMQ X* broker is cross-platform, which supports Linux, Unix, macOS and Windows. It means *EMQ X* can be deployed on x86_64 architecture servers and ARM devices like Raspberry Pi. +See more details for building and running *EMQ X* on Windows in [Windows.md](./Windows.md) + #### Installing via EMQ X Docker Image ``` -docker run -d --name emqx -p 1883:1883 -p 8083:8083 -p 8883:8883 -p 8084:8084 -p 18083:18083 emqx/emqx +docker run -d --name emqx -p 1883:1883 -p 8081:8081 -p 8083:8083 -p 8883:8883 -p 8084:8084 -p 18083:18083 emqx/emqx ``` #### Installing via Binary Package Get the binary package of the corresponding OS from [EMQ X Download](https://www.emqx.io/downloads) page. -- [Single Node Install](https://docs.emqx.io/broker/latest/en/getting-started/install.html) -- [Multi Node Install](https://docs.emqx.io/broker/latest/en/advanced/cluster.html) +- [Single Node Install](https://docs.emqx.io/en/broker/latest/getting-started/install.html) +- [Multi Node Install](https://docs.emqx.io/en/broker/latest/advanced/cluster.html) ## Build From Source @@ -87,17 +89,12 @@ make eunit ct ### To run subset of the common tests -examples +Examples ```bash -./rebar3 ct --name 'test@127.0.0.1' -c -v --dir test,apps/emqx_sn,apps/emqx_coap -./rebar3 ct --name 'test@127.0.0.1' -c -v --dir apps/emqx_auth_mnesi --suite emqx_acl_mnesia_SUITE -./rebar3 ct --name 'test@127.0.0.1' -c -v --dir apps/emqx_auth_mnesi --suite emqx_acl_mnesia_SUITE --case t_rest_api +make apps/emqx_bridge_mqtt-ct ``` -NOTE: Do *NOT* use full (relative) path to SUITE files like this `--suite apps/emqx_auth_mnesia/test/emqx_acl_mnesia_SUITE.erl`, -because it will lead to a full copy of `apps` dir into `_buid/test/lib/emqx`. - ### Dialyzer ##### To Analyze all the apps ``` @@ -109,19 +106,25 @@ make dialyzer DIALYZER_ANALYSE_APP=emqx_lwm2m,emqx_auth_jwt,emqx_auth_ldap make dialyzer ``` -## FAQ - -Visiting [EMQ X FAQ](https://docs.emqx.io/broker/latest/en/faq/faq.html) to get help of common problems. - -## Roadmap - -The [EMQ X Roadmap uses Github milestones](https://github.com/emqx/emqx/milestones) to track the progress of the project. - ## Community -The EMQ X community can be found on [GitHub Discussions](https://github.com/emqx/emqx/discussions), where you can ask questions, voice ideas, and share your projects. +### FAQ + +Visiting [EMQ X FAQ](https://docs.emqx.io/en/broker/latest/faq/faq.html) to get help of common problems. + + +### Questions + +[GitHub Discussions](https://github.com/emqx/emqx/discussions) is where you can ask questions, and share ideas. + +### Proposals + +For more organised improvement proposals, you can send pull requests to [EIP](https://github.com/emqx/eip). + +### Plugin development + +To develop your own plugins, see [lib-extra/README.md](./lib-extra/README.md) -To chat with other community members you can join the [EMQ X Slack](https://slack-invite.emqx.io). ## MQTT Specifications diff --git a/Windows.md b/Windows.md new file mode 100644 index 000000000..5e947a22e --- /dev/null +++ b/Windows.md @@ -0,0 +1,127 @@ +# Build and run EMQ X on Windows + +NOTE: The instructions and examples are based on Windows 10. + +## Build Environment + +### Visual studio for C/C++ compile and link + +EMQ X includes Erlang NIF (Native Implmented Function) components, implemented +in C/C++. To compile and link C/C++ libraries, the easiest way is perhaps to +install Visual Studio. + +Visual Studio 2019 is used in our tests. +If you are like me (@zmstone), do not know where to start, +please follow this OTP guide: +https://github.com/erlang/otp/blob/master/HOWTO/INSTALL-WIN32.md + +NOTE: To avoid surprises, you may need to add below two paths to `Path` environment variable +and order them before other paths. + +``` +C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.28.29910\bin\Hostx64\x64 +C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build +``` + +Depending on your visual studio version and OS, the paths may differ. +The first path is for rebar3 port compiler to find `cl.exe` and `link.exe` +The second path is for Powershell or CMD to setup environment variables. + +### Erlang/OTP + +Install Erlang/OTP 23.2 from https://www.erlang.org/downloads +You may need to edit the `Path` environment variable to allow running +Erlang commands such as `erl` from powershell. + +To validate Erlang installation in CMD or powershell: + +* Start (or restart) CMD or powershell + +* Execute `erl` command to enter Erlang shell + +* Evaluate Erlang expression `halt().` to exit Erlang shell. + +e.g. + +``` +PS C:\Users\zmsto> erl +Eshell V11.1.4 (abort with ^G) +1> halt(). +``` + +### bash + +All EMQ X build/run scripts are either in `bash` or `escript`. +`escript` is installed as a part of Erlang. To install a `bash` +environment in Windows, there are quite a few options. + +Cygwin is what we tested with. + +* Add `cygwin\bin` dir to `Path` environment variable + To do so, search for Edit environment variable in control pannel and + add `C:\tools\cygwin\bin` (depending on the location where it was installed) + to `Path` list. + +* Validate installation. + Start (restart) CMD or powershell console and execute `which bash`, it should + print out `/usr/bin/bash` + +### Other tools + +Some of the unix world tools are required to build EMQ X. Including: + +* git +* curl +* make +* jq +* zip / unzip + +We recommend using [scoop](https://scoop.sh/), or [Chocolatey](https://chocolatey.org/install) to install the tools. + +When using scoop: + +``` +scoop install git curl make jq zip unzip +``` + +## Build EMQ X source code + +* Clone the repo: `git clone https://github.com/emqx/emqx.git` + +* Start CMD or Powershell + +* Execute `vcvarsall.bat x86_amd64` to load environment variables + +* Change to emqx directory and execute `make` + +### Possible errors + +* `'cl.exe' is not recognized as an internal or external command` + This error is likely due to Visual Studio executables are not set in `Path` environment variable. + To fix it, either add path like `C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.28.29910\bin\Hostx64\x64` + to `Paht`. Or make sure `vcvarsall.bat x86_amd64` is executed prior to the `make` command + +* `fatal error C1083: Cannot open include file: 'assert.h': No such file or directory` + If Visual Studio is installed correctly, this is likely `LIB` and `LIB_PATH` environment + variables are not set. Make sure `vcvarsall.bat x86_amd64` is executed prior to the `make` command + +* `link: extra operand 'some.obj'` + This is likely due ot the usage of GNU `lnik.exe` but not the one from Visual Studio. + Exeucte `link.exe --version` to inspect which one is in use. The one installed from + Visual Studio should print out `Microsoft (R) Incremental Linker`. + To fix it, Visual Studio's bin paths should be ordered prior to Cygwin's (or similar installation's) + bin paths in `Path` environment variable. + +## Run EMQ X + +To start EMQ X broker. + +Execute `_build\emqx\rel\emqx>.\bin\emqx console` or `_build\emqx\rel\emqx>.\bin\emqx start` to start EMQ X. + +Then execute `_build\emqx\rel\emqx>.\bin\emqx_ctl status` to check status. +If everything works fine, it should print out + +``` +Node 'emqx@127.0.0.1' 4.3-beta.1 is started +Application emqx 4.3.0 is running +``` diff --git a/apps/emqx_auth_http/etc/emqx_auth_http.conf b/apps/emqx_auth_http/etc/emqx_auth_http.conf index b2f6c4280..c9077b7e0 100644 --- a/apps/emqx_auth_http/etc/emqx_auth_http.conf +++ b/apps/emqx_auth_http/etc/emqx_auth_http.conf @@ -16,14 +16,14 @@ auth.http.auth_req.method = post ## HTTP Request Headers for Auth Request, Content-Type header is configured by default. ## The possible values of the Content-Type header: application/x-www-form-urlencoded, application/json -## +## ## Examples: auth.http.auth_req.headers.accept = */* auth.http.auth_req.headers.content-type = "application/x-www-form-urlencoded" ## Parameters used to construct the request body or query string parameters ## When the request method is GET, these parameters will be converted into query string parameters ## When the request method is POST, the final format is determined by content-type -## +## ## Available Variables: ## - %u: username ## - %c: clientid @@ -58,7 +58,7 @@ auth.http.super_req.headers.content-type = "application/x-www-form-urlencoded" ## Parameters used to construct the request body or query string parameters ## When the request method is GET, these parameters will be converted into query string parameters ## When the request method is POST, the final format is determined by content-type -## +## ## Available Variables: ## - %u: username ## - %c: clientid @@ -93,7 +93,7 @@ auth.http.acl_req.headers.content-type = "application/x-www-form-urlencoded" ## Parameters used to construct the request body or query string parameters ## When the request method is GET, these parameters will be converted into query string parameters ## When the request method is POST, the final format is determined by content-type -## +## ## Available Variables: ## - %u: username ## - %c: clientid @@ -117,7 +117,7 @@ auth.http.acl_req.params = "access=%A,username=%u,clientid=%c,ipaddr=%a,topic=%t ## Default: 5s auth.http.timeout = 5s -## Connection time-out time, used during the initial request, +## Connection time-out time, used during the initial request, ## when the client is connecting to the server. ## ## Value: Duration @@ -151,3 +151,18 @@ auth.http.pool_size = 32 ## ## Value: File ## auth.http.ssl.keyfile = "{{ platform_etc_dir }}/certs/client-key.pem" + +## In mode verify_none the default behavior is to allow all x509-path +## validation errors. +## +## Value: true | false +## auth.http.ssl.verify = false + +## If not specified, the server's names returned in server's certificate is validated against +## what's provided `auth.http.auth_req.url` config's host part. +## Setting to 'disable' will make EMQ X ignore unmatched server names. +## If set with a host name, the server's names returned in server's certificate is validated +## against this value. +## +## Value: String | disable +## auth.http.ssl.server_name_indication = disable diff --git a/apps/emqx_auth_http/priv/emqx_auth_http.schema b/apps/emqx_auth_http/priv/emqx_auth_http.schema index afd71cfd9..e1f02ef49 100644 --- a/apps/emqx_auth_http/priv/emqx_auth_http.schema +++ b/apps/emqx_auth_http/priv/emqx_auth_http.schema @@ -116,3 +116,12 @@ end}. {mapping, "auth.http.ssl.keyfile", "emqx_auth_http.keyfile", [ {datatype, string} ]}. + +{mapping, "auth.http.ssl.verify", "emqx_auth_http.verify", [ + {default, false}, + {datatype, {enum, [true, false]}} +]}. + +{mapping, "auth.http.ssl.server_name_indication", "emqx_auth_http.server_name_indication", [ + {datatype, string} +]}. \ No newline at end of file diff --git a/apps/emqx_auth_http/src/emqx_auth_http_app.erl b/apps/emqx_auth_http/src/emqx_auth_http_app.erl index acbb67bf4..79b34effb 100644 --- a/apps/emqx_auth_http/src/emqx_auth_http_app.erl +++ b/apps/emqx_auth_http/src/emqx_auth_http_app.erl @@ -66,11 +66,22 @@ translate_env(EnvName) -> CACertFile = application:get_env(?APP, cacertfile, undefined), CertFile = application:get_env(?APP, certfile, undefined), KeyFile = application:get_env(?APP, keyfile, undefined), - TLSOpts = lists:filter(fun({_K, V}) when V =:= <<>> -> - false; - (_) -> - true - end, [{keyfile, KeyFile}, {certfile, CertFile}, {cacertfile, CACertFile}]), + Verify = case application:get_env(?APP, verify, fasle) of + true -> verify_peer; + false -> verify_none + end, + SNI = case application:get_env(?APP, server_name_indication, undefined) of + "disable" -> disable; + SNI0 -> SNI0 + end, + TLSOpts = lists:filter( + fun({_, V}) -> + V =/= <<>> andalso V =/= undefined + end, [{keyfile, KeyFile}, + {certfile, CertFile}, + {cacertfile, CACertFile}, + {verify, Verify}, + {server_name_indication, SNI}]), NTLSOpts = [ {versions, emqx_tls_lib:default_versions()} , {ciphers, emqx_tls_lib:default_ciphers()} | TLSOpts diff --git a/apps/emqx_auth_http/test/emqx_auth_http_SUITE.erl b/apps/emqx_auth_http/test/emqx_auth_http_SUITE.erl index c2ad0ac43..5ac5c18e8 100644 --- a/apps/emqx_auth_http/test/emqx_auth_http_SUITE.erl +++ b/apps/emqx_auth_http/test/emqx_auth_http_SUITE.erl @@ -90,7 +90,9 @@ set_https_client_opts() -> SSLOpt = emqx_ct_helpers:client_ssl_twoway(), application:set_env(emqx_auth_http, cacertfile, proplists:get_value(cacertfile, SSLOpt, undefined)), application:set_env(emqx_auth_http, certfile, proplists:get_value(certfile, SSLOpt, undefined)), - application:set_env(emqx_auth_http, keyfile, proplists:get_value(keyfile, SSLOpt, undefined)). + application:set_env(emqx_auth_http, keyfile, proplists:get_value(keyfile, SSLOpt, undefined)), + application:set_env(emqx_auth_http, verify, true), + application:set_env(emqx_auth_http, server_name_indication, "disable"). %% @private http_server(http, inet) -> "http://127.0.0.1:8991"; diff --git a/apps/emqx_auth_ldap/etc/emqx_auth_ldap.conf b/apps/emqx_auth_ldap/etc/emqx_auth_ldap.conf index c849a7eec..b457229e3 100644 --- a/apps/emqx_auth_ldap/etc/emqx_auth_ldap.conf +++ b/apps/emqx_auth_ldap/etc/emqx_auth_ldap.conf @@ -73,6 +73,4 @@ auth.ldap.ssl.enable = false #auth.ldap.ssl.verify = "verify_peer" -#auth.ldap.ssl.fail_if_no_peer_cert = true - #auth.ldap.ssl.server_name_indication = your_server_name diff --git a/apps/emqx_auth_ldap/priv/emqx_auth_ldap.schema b/apps/emqx_auth_ldap/priv/emqx_auth_ldap.schema index a9b908fab..f9c3bf16b 100644 --- a/apps/emqx_auth_ldap/priv/emqx_auth_ldap.schema +++ b/apps/emqx_auth_ldap/priv/emqx_auth_ldap.schema @@ -53,10 +53,6 @@ {datatype, {enum, [verify_none, verify_peer]}} ]}. -{mapping, "auth.ldap.ssl.fail_if_no_peer_cert", "emqx_auth_ldap.ldap", [ - {datatype, {enum, [true, false]}} -]}. - {mapping, "auth.ldap.ssl.server_name_indication", "emqx_auth_ldap.ldap", [ {datatype, string} ]}. @@ -75,8 +71,10 @@ {keyfile, cuttlefish:conf_get("auth.ldap.ssl.keyfile", Conf)}, {cacertfile, cuttlefish:conf_get("auth.ldap.ssl.cacertfile", Conf, undefined)}, {verify, cuttlefish:conf_get("auth.ldap.ssl.verify", Conf, undefined)}, - {server_name_indication, cuttlefish:conf_get("auth.ldap.ssl.server_name_indication", Conf, disable)}, - {fail_if_no_peer_cert, cuttlefish:conf_get("auth.ldap.ssl.fail_if_no_peer_cert", Conf, undefined)}] + {server_name_indication, case cuttlefish:conf_get("auth.ldap.ssl.server_name_indication", Conf, undefined) of + "disable" -> disable; + SNI -> SNI + end}] end, Opts = [{servers, Servers}, {port, Port}, diff --git a/apps/emqx_auth_ldap/test/emqx_auth_ldap_SUITE.erl b/apps/emqx_auth_ldap/test/emqx_auth_ldap_SUITE.erl index e13099f00..c9c38f610 100644 --- a/apps/emqx_auth_ldap/test/emqx_auth_ldap_SUITE.erl +++ b/apps/emqx_auth_ldap/test/emqx_auth_ldap_SUITE.erl @@ -44,13 +44,11 @@ groups() -> init_per_group(GrpName, Cfg) -> Fun = fun(App) -> set_special_configs(GrpName, App) end, - emqx_ct_helpers:start_apps([emqx_modules]), emqx_ct_helpers:start_apps([emqx_auth_ldap], Fun), - emqx_mod_acl_internal:unload([]), Cfg. end_per_group(_GrpName, _Cfg) -> - emqx_ct_helpers:stop_apps([emqx_auth_ldap, emqx_modules]). + emqx_ct_helpers:stop_apps([emqx_auth_ldap]). %%-------------------------------------------------------------------- %% Cases diff --git a/apps/emqx_auth_ldap/test/emqx_auth_ldap_bind_as_user_SUITE.erl b/apps/emqx_auth_ldap/test/emqx_auth_ldap_bind_as_user_SUITE.erl index 2960c4621..6a5e7bf47 100644 --- a/apps/emqx_auth_ldap/test/emqx_auth_ldap_bind_as_user_SUITE.erl +++ b/apps/emqx_auth_ldap/test/emqx_auth_ldap_bind_as_user_SUITE.erl @@ -36,12 +36,11 @@ all() -> check_acl]. init_per_suite(Config) -> - emqx_ct_helpers:start_apps([emqx_modules, emqx_auth_ldap], fun set_special_configs/1), - emqx_mod_acl_internal:unload([]), + emqx_ct_helpers:start_apps([emqx_auth_ldap], fun set_special_configs/1), Config. end_per_suite(_Config) -> - emqx_ct_helpers:stop_apps([emqx_auth_ldap, emqx_modules]). + emqx_ct_helpers:stop_apps([emqx_auth_ldap]). check_auth(_) -> MqttUser1 = #{clientid => <<"mqttuser1">>, @@ -62,7 +61,6 @@ check_auth(_) -> ?assertEqual({error, not_authorized}, emqx_access_control:authenticate(NonExistUser1)). check_acl(_) -> - % emqx_modules:load_module(emqx_mod_acl_internal, false), MqttUser = #{clientid => <<"mqttuser1">>, username => <<"user1">>, zone => external}, NoMqttUser = #{clientid => <<"mqttuser2">>, username => <<"user7">>, zone => external}, allow = emqx_access_control:check_acl(MqttUser, publish, <<"mqttuser0001/pub/1">>), diff --git a/apps/emqx_auth_mnesia/src/emqx_acl_mnesia.erl b/apps/emqx_auth_mnesia/src/emqx_acl_mnesia.erl index c657e54a0..ec8670a83 100644 --- a/apps/emqx_auth_mnesia/src/emqx_acl_mnesia.erl +++ b/apps/emqx_auth_mnesia/src/emqx_acl_mnesia.erl @@ -31,6 +31,7 @@ init() -> ok = ekka_mnesia:create_table(emqx_acl, [ + {type, bag}, {disc_copies, [node()]}, {attributes, record_info(fields, emqx_acl)}, {storage_properties, [{ets, [{read_concurrency, true}]}]}]), diff --git a/apps/emqx_auth_mnesia/src/emqx_acl_mnesia_cli.erl b/apps/emqx_auth_mnesia/src/emqx_acl_mnesia_cli.erl index ae4fcee1f..ca1be1676 100644 --- a/apps/emqx_auth_mnesia/src/emqx_acl_mnesia_cli.erl +++ b/apps/emqx_auth_mnesia/src/emqx_acl_mnesia_cli.erl @@ -39,13 +39,24 @@ -spec(add_acl(login() | all, emqx_topic:topic(), pub | sub | pubsub, allow | deny) -> ok | {error, any()}). add_acl(Login, Topic, Action, Access) -> - Acls = #?TABLE{ - filter = {Login, Topic}, - action = Action, - access = Access, - created_at = erlang:system_time(millisecond) - }, - ret(mnesia:transaction(fun mnesia:write/1, [Acls])). + Filter = {Login, Topic}, + Acl = #?TABLE{ + filter = Filter, + action = Action, + access = Access, + created_at = erlang:system_time(millisecond) + }, + ret(mnesia:transaction( + fun() -> + OldRecords = mnesia:wread({?TABLE, Filter}), + case Action of + pubsub -> + update_permission(pub, Acl, OldRecords), + update_permission(sub, Acl, OldRecords); + _ -> + update_permission(Action, Acl, OldRecords) + end + end)). %% @doc Lookup acl by login -spec(lookup_acl(login() | all) -> list()). @@ -160,18 +171,27 @@ cli(["show", "username", Username]) -> [print_acl(Acl) || Acl <- lookup_acl({username, iolist_to_binary(Username)})]; cli(["del", "clientid", Clientid, Topic])-> + cli(["delete", "clientid", Clientid, Topic]); + +cli(["delete", "clientid", Clientid, Topic])-> case remove_acl({clientid, iolist_to_binary(Clientid)}, iolist_to_binary(Topic)) of ok -> emqx_ctl:print("ok~n"); {error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason]) end; cli(["del", "username", Username, Topic])-> + cli(["delete", "username", Username, Topic]); + +cli(["delete", "username", Username, Topic])-> case remove_acl({username, iolist_to_binary(Username)}, iolist_to_binary(Topic)) of ok -> emqx_ctl:print("ok~n"); {error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason]) end; cli(["del", "_all", Topic])-> + cli(["delete", "_all", Topic]); + +cli(["delete", "_all", Topic])-> case remove_acl(all, iolist_to_binary(Topic)) of ok -> emqx_ctl:print("ok~n"); {error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason]) @@ -186,9 +206,9 @@ cli(_) -> , {"acl aad clientid ", "Add clientid acl"} , {"acl add Username ", "Add username acl"} , {"acl add _all ", "Add $all acl"} - , {"acl del clientid ", "Delete clientid acl"} - , {"acl del username ", "Delete username acl"} - , {"acl del _all ", "Delete $all acl"} + , {"acl delete clientid ", "Delete clientid acl"} + , {"acl delete username ", "Delete username acl"} + , {"acl delete _all ", "Delete $all acl"} ]). %%-------------------------------------------------------------------- @@ -224,3 +244,27 @@ print_acl({all, Topic, Action, Access, _}) -> "Acl($all topic = ~p action = ~p access = ~p)~n", [Topic, Action, Access] ). + +update_permission(Action, Acl0, OldRecords) -> + Acl = Acl0 #?TABLE{action = Action}, + maybe_delete_shadowed_records(Action, OldRecords), + mnesia:write(Acl). + +maybe_delete_shadowed_records(_, []) -> + ok; +maybe_delete_shadowed_records(Action1, [Rec = #emqx_acl{action = Action2} | Rest]) -> + if Action1 =:= Action2 -> + ok = mnesia:delete_object(Rec); + Action2 =:= pubsub -> + %% Perform migration from the old data format on the + %% fly. This is needed only for the enterprise version, + %% delete this branch on 5.0 + mnesia:delete_object(Rec), + mnesia:write(Rec#?TABLE{action = other_action(Action1)}); + true -> + ok + end, + maybe_delete_shadowed_records(Action1, Rest). + +other_action(pub) -> sub; +other_action(sub) -> pub. diff --git a/apps/emqx_auth_mnesia/src/emqx_auth_mnesia_cli.erl b/apps/emqx_auth_mnesia/src/emqx_auth_mnesia_cli.erl index adefa704b..ef78b1c3c 100644 --- a/apps/emqx_auth_mnesia/src/emqx_auth_mnesia_cli.erl +++ b/apps/emqx_auth_mnesia/src/emqx_auth_mnesia_cli.erl @@ -144,6 +144,9 @@ auth_clientid_cli(["update", ClientId, NewPassword]) -> end; auth_clientid_cli(["del", ClientId]) -> + auth_clientid_cli(["delete", ClientId]); + +auth_clientid_cli(["delete", ClientId]) -> case remove_user({clientid, iolist_to_binary(ClientId)}) of ok -> emqx_ctl:print("ok~n"); {error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason]) @@ -153,7 +156,7 @@ auth_clientid_cli(_) -> emqx_ctl:usage([{"clientid list", "List clientid auth rules"}, {"clientid add ", "Add clientid auth rule"}, {"clientid update ", "Update clientid auth rule"}, - {"clientid del ", "Delete clientid auth rule"}]). + {"clientid delete ", "Delete clientid auth rule"}]). %%-------------------------------------------------------------------- %% Auth Username Cli @@ -176,6 +179,9 @@ auth_username_cli(["update", Username, NewPassword]) -> {error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason]) end; auth_username_cli(["del", Username]) -> + auth_username_cli(["delete", Username]); + +auth_username_cli(["delete", Username]) -> case remove_user({username, iolist_to_binary(Username)}) of ok -> emqx_ctl:print("ok~n"); {error, Reason} -> emqx_ctl:print("Error: ~p~n", [Reason]) @@ -185,4 +191,4 @@ auth_username_cli(_) -> emqx_ctl:usage([{"user list", "List username auth rules"}, {"user add ", "Add username auth rule"}, {"user update ", "Update username auth rule"}, - {"user del ", "Delete username auth rule"}]). + {"user delete ", "Delete username auth rule"}]). diff --git a/apps/emqx_auth_mnesia/test/emqx_acl_mnesia_SUITE.erl b/apps/emqx_auth_mnesia/test/emqx_acl_mnesia_SUITE.erl index 13b041491..e5e48cc93 100644 --- a/apps/emqx_auth_mnesia/test/emqx_acl_mnesia_SUITE.erl +++ b/apps/emqx_auth_mnesia/test/emqx_acl_mnesia_SUITE.erl @@ -42,13 +42,13 @@ groups() -> []. init_per_suite(Config) -> - emqx_ct_helpers:start_apps([emqx_management, emqx_auth_mnesia], fun set_special_configs/1), + emqx_ct_helpers:start_apps([emqx_modules, emqx_management, emqx_auth_mnesia], fun set_special_configs/1), create_default_app(), Config. end_per_suite(_Config) -> delete_default_app(), - emqx_ct_helpers:stop_apps([emqx_management, emqx_auth_mnesia]). + emqx_ct_helpers:stop_apps([emqx_modules, emqx_management, emqx_auth_mnesia]). init_per_testcase(t_check_acl_as_clientid, Config) -> emqx:hook('client.check_acl', fun emqx_acl_mnesia:check_acl/5, [#{key_as => clientid}]), @@ -86,11 +86,15 @@ t_management(_Config) -> ok = emqx_acl_mnesia_cli:add_acl({username, <<"test_username">>}, <<"topic/%u">>, sub, deny), ok = emqx_acl_mnesia_cli:add_acl({username, <<"test_username">>}, <<"topic/+">>, pub, allow), ok = emqx_acl_mnesia_cli:add_acl(all, <<"#">>, pubsub, deny), + %% Sleeps below are needed to hide the race condition between + %% mnesia and ets dirty select in check_acl, that make this test + %% flaky + timer:sleep(100), ?assertEqual(2, length(emqx_acl_mnesia_cli:lookup_acl({clientid, <<"test_clientid">>}))), ?assertEqual(2, length(emqx_acl_mnesia_cli:lookup_acl({username, <<"test_username">>}))), - ?assertEqual(1, length(emqx_acl_mnesia_cli:lookup_acl(all))), - ?assertEqual(5, length(emqx_acl_mnesia_cli:all_acls())), + ?assertEqual(2, length(emqx_acl_mnesia_cli:lookup_acl(all))), + ?assertEqual(6, length(emqx_acl_mnesia_cli:all_acls())), User1 = #{zone => external, clientid => <<"test_clientid">>}, User2 = #{zone => external, clientid => <<"no_exist">>, username => <<"test_username">>}, @@ -105,11 +109,55 @@ t_management(_Config) -> deny = emqx_access_control:check_acl(User3, subscribe, <<"topic/A/B">>), deny = emqx_access_control:check_acl(User3, publish, <<"topic/A/B">>), + %% Test merging of pubsub capability: + ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pubsub, deny), + timer:sleep(100), + deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>), + deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>), + ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pub, allow), + timer:sleep(100), + deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>), + allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>), + ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pubsub, allow), + timer:sleep(100), + allow = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>), + allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>), + ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, sub, deny), + timer:sleep(100), + deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>), + allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>), + ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pub, deny), + timer:sleep(100), + deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>), + deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>), + + %% Test implicit migration of pubsub to pub and sub: + ok = emqx_acl_mnesia_cli:remove_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>), + ok = mnesia:dirty_write(#emqx_acl{ + filter = {{clientid, <<"test_clientid">>}, <<"topic/mix">>}, + action = pubsub, + access = allow, + created_at = erlang:system_time(millisecond) + }), + timer:sleep(100), + allow = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>), + allow = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>), + ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, pub, deny), + timer:sleep(100), + allow = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>), + deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>), + ok = emqx_acl_mnesia_cli:add_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>, sub, deny), + timer:sleep(100), + deny = emqx_access_control:check_acl(User1, subscribe, <<"topic/mix">>), + deny = emqx_access_control:check_acl(User1, publish, <<"topic/mix">>), + ok = emqx_acl_mnesia_cli:remove_acl({clientid, <<"test_clientid">>}, <<"topic/%c">>), ok = emqx_acl_mnesia_cli:remove_acl({clientid, <<"test_clientid">>}, <<"topic/+">>), + ok = emqx_acl_mnesia_cli:remove_acl({clientid, <<"test_clientid">>}, <<"topic/mix">>), ok = emqx_acl_mnesia_cli:remove_acl({username, <<"test_username">>}, <<"topic/%u">>), ok = emqx_acl_mnesia_cli:remove_acl({username, <<"test_username">>}, <<"topic/+">>), ok = emqx_acl_mnesia_cli:remove_acl(all, <<"#">>), + timer:sleep(100), ?assertEqual([], emqx_acl_mnesia_cli:all_acls()). @@ -124,6 +172,7 @@ t_acl_cli(_Config) -> ?assertEqual(0, length(emqx_acl_mnesia_cli:cli(["list"]))), + emqx_acl_mnesia_cli:cli(["add", "clientid", "test_clientid", "topic/A", "pub", "deny"]), emqx_acl_mnesia_cli:cli(["add", "clientid", "test_clientid", "topic/A", "pub", "allow"]), R1 = emqx_ctl:format("Acl(clientid = ~p topic = ~p action = ~p access = ~p)~n", [<<"test_clientid">>, <<"topic/A">>, pub, allow]), @@ -136,11 +185,14 @@ t_acl_cli(_Config) -> ?assertEqual([R2], emqx_acl_mnesia_cli:cli(["show", "username", "test_username"])), ?assertEqual([R2], emqx_acl_mnesia_cli:cli(["list", "username"])), + emqx_acl_mnesia_cli:cli(["add", "_all", "#", "pub", "allow"]), emqx_acl_mnesia_cli:cli(["add", "_all", "#", "pubsub", "deny"]), - ?assertMatch(["Acl($all topic = <<\"#\">> action = pubsub access = deny)\n"], - emqx_acl_mnesia_cli:cli(["list", "_all"]) + ?assertMatch(["", + "Acl($all topic = <<\"#\">> action = pub access = deny)", + "Acl($all topic = <<\"#\">> action = sub access = deny)"], + lists:sort(string:split(emqx_acl_mnesia_cli:cli(["list", "_all"]), "\n", all)) ), - ?assertEqual(3, length(emqx_acl_mnesia_cli:cli(["list"]))), + ?assertEqual(4, length(emqx_acl_mnesia_cli:cli(["list"]))), emqx_acl_mnesia_cli:cli(["del", "clientid", "test_clientid", "topic/A"]), emqx_acl_mnesia_cli:cli(["del", "username", "test_username", "topic/B"]), @@ -169,7 +221,7 @@ t_rest_api(_Config) -> }], {ok, _} = request_http_rest_add([], Params1), {ok, Re1} = request_http_rest_list(["clientid", "test_clientid"]), - ?assertMatch(3, length(get_http_data(Re1))), + ?assertMatch(4, length(get_http_data(Re1))), {ok, _} = request_http_rest_delete(["clientid", "test_clientid", "topic", "topic/A"]), {ok, _} = request_http_rest_delete(["clientid", "test_clientid", "topic", "topic/B"]), {ok, _} = request_http_rest_delete(["clientid", "test_clientid", "topic", "topic/C"]), @@ -193,7 +245,7 @@ t_rest_api(_Config) -> }], {ok, _} = request_http_rest_add([], Params2), {ok, Re2} = request_http_rest_list(["username", "test_username"]), - ?assertMatch(3, length(get_http_data(Re2))), + ?assertMatch(4, length(get_http_data(Re2))), {ok, _} = request_http_rest_delete(["username", "test_username", "topic", "topic/A"]), {ok, _} = request_http_rest_delete(["username", "test_username", "topic", "topic/B"]), {ok, _} = request_http_rest_delete(["username", "test_username", "topic", "topic/C"]), @@ -214,7 +266,7 @@ t_rest_api(_Config) -> }], {ok, _} = request_http_rest_add([], Params3), {ok, Re3} = request_http_rest_list(["$all"]), - ?assertMatch(3, length(get_http_data(Re3))), + ?assertMatch(4, length(get_http_data(Re3))), {ok, _} = request_http_rest_delete(["$all", "topic", "topic/A"]), {ok, _} = request_http_rest_delete(["$all", "topic", "topic/B"]), {ok, _} = request_http_rest_delete(["$all", "topic", "topic/C"]), diff --git a/apps/emqx_auth_mnesia/test/emqx_auth_mnesia_SUITE.erl b/apps/emqx_auth_mnesia/test/emqx_auth_mnesia_SUITE.erl index 1c3dc50b4..8ac942257 100644 --- a/apps/emqx_auth_mnesia/test/emqx_auth_mnesia_SUITE.erl +++ b/apps/emqx_auth_mnesia/test/emqx_auth_mnesia_SUITE.erl @@ -47,13 +47,13 @@ groups() -> []. init_per_suite(Config) -> - ok = emqx_ct_helpers:start_apps([emqx_management, emqx_auth_mnesia], fun set_special_configs/1), + ok = emqx_ct_helpers:start_apps([emqx_modules, emqx_management, emqx_auth_mnesia], fun set_special_configs/1), create_default_app(), Config. end_per_suite(_Config) -> delete_default_app(), - emqx_ct_helpers:stop_apps([emqx_management, emqx_auth_mnesia]). + emqx_ct_helpers:stop_apps([emqx_modules, emqx_management, emqx_auth_mnesia]). init_per_testcase(t_check_as_clientid, Config) -> Params = #{ diff --git a/apps/emqx_auth_mongo/etc/emqx_auth_mongo.conf b/apps/emqx_auth_mongo/etc/emqx_auth_mongo.conf index 8ae68f3e7..c59c80643 100644 --- a/apps/emqx_auth_mongo/etc/emqx_auth_mongo.conf +++ b/apps/emqx_auth_mongo/etc/emqx_auth_mongo.conf @@ -70,6 +70,21 @@ auth.mongo.database = mqtt ## Value: File ## auth.mongo.ssl.cacertfile = +## In mode verify_none the default behavior is to allow all x509-path +## validation errors. +## +## Value: true | false +## auth.mongo.ssl.verify = false + +## If not specified, the server's names returned in server's certificate is validated against +## what's provided `auth.mongo.server` config's host part. +## Setting to 'disable' will make EMQ X ignore unmatched server names. +## If set with a host name, the server's names returned in server's certificate is validated +## against this value. +## +## Value: String | disable +## auth.mongo.ssl.server_name_indication = disable + ## MongoDB write mode. ## ## Value: unsafe | safe diff --git a/apps/emqx_auth_mongo/priv/emqx_auth_mongo.schema b/apps/emqx_auth_mongo/priv/emqx_auth_mongo.schema index 04dd4fe52..150990818 100644 --- a/apps/emqx_auth_mongo/priv/emqx_auth_mongo.schema +++ b/apps/emqx_auth_mongo/priv/emqx_auth_mongo.schema @@ -62,6 +62,15 @@ {datatype, string} ]}. +{mapping, "auth.mongo.ssl.verify", "emqx_auth_mongo.server", [ + {default, false}, + {datatype, {enum, [true, false]}} +]}. + +{mapping, "auth.mongo.ssl.server_name_indication", "emqx_auth_mongo.server", [ + {datatype, string} +]}. + %% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0 {mapping, "auth.mongo.ssl_opts.keyfile", "emqx_auth_mongo.server", [ {datatype, string} @@ -123,8 +132,17 @@ end, Filter = fun(Opts) -> [{K, V} || {K, V} <- Opts, V =/= undefined] end, SslOpts = fun(Prefix) -> - Filter([{keyfile, cuttlefish:conf_get(Prefix ++ ".keyfile", Conf, undefined)}, - {certfile, cuttlefish:conf_get(Prefix ++ ".certfile", Conf, undefined)}, + Verify = case cuttlefish:conf_get(Prefix ++ ".verify", Conf, false) of + true -> verify_peer; + flase -> verify_none + end, + Filter([{verify, Verify}, + {server_name_indication, case cuttlefish:conf_get(Prefix ++ ".server_name_indication", Conf, undefined) of + "disable" -> disable; + SNI -> SNI + end}, + {keyfile, cuttlefish:conf_get(Prefix ++ ".keyfile", Conf, undefined)}, + {certfile, cuttlefish:conf_get(Prefix ++ ".certfile", Conf, undefined)}, {cacertfile, cuttlefish:conf_get(Prefix ++ ".cacertfile", Conf, undefined)}]) end, diff --git a/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE.erl b/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE.erl index 53f22783a..03e1fa33e 100644 --- a/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE.erl +++ b/apps/emqx_auth_mongo/test/emqx_auth_mongo_SUITE.erl @@ -50,23 +50,18 @@ all() -> emqx_ct:all(?MODULE). init_per_suite(Cfg) -> - emqx_ct_helpers:start_apps([emqx_modules, emqx_auth_mongo], fun set_special_confs/1), - emqx_modules:load_module(emqx_mod_acl_internal, false), + emqx_ct_helpers:start_apps([emqx_auth_mongo], fun set_special_confs/1), init_mongo_data(), Cfg. end_per_suite(_Cfg) -> deinit_mongo_data(), - emqx_ct_helpers:stop_apps([emqx_auth_mongo, emqx_modules]). + emqx_ct_helpers:stop_apps([emqx_auth_mongo]). set_special_confs(emqx) -> application:set_env(emqx, acl_nomatch, deny), - application:set_env(emqx, acl_file, - emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/acl.conf")), application:set_env(emqx, allow_anonymous, false), - application:set_env(emqx, enable_acl_cache, false), - application:set_env(emqx, plugins_loaded_file, - emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/loaded_plugins")); + application:set_env(emqx, enable_acl_cache, false); set_special_confs(_App) -> ok. @@ -133,7 +128,7 @@ t_check_acl(_) -> allow = emqx_access_control:check_acl(User2, subscribe, <<"$SYS/testuser/1">>), allow = emqx_access_control:check_acl(User3, publish, <<"a/b/c">>), deny = emqx_access_control:check_acl(User3, publish, <<"c">>), - allow = emqx_access_control:check_acl(User4, publish, <<"a/b/c">>). + deny = emqx_access_control:check_acl(User4, publish, <<"a/b/c">>). t_acl_super(_) -> reload({auth_query, [{password_hash, plain}, {password_field, [<<"password">>]}]}), diff --git a/apps/emqx_auth_mysql/etc/emqx_auth_mysql.conf b/apps/emqx_auth_mysql/etc/emqx_auth_mysql.conf index 28ff95c75..1c3d40059 100644 --- a/apps/emqx_auth_mysql/etc/emqx_auth_mysql.conf +++ b/apps/emqx_auth_mysql/etc/emqx_auth_mysql.conf @@ -114,3 +114,18 @@ auth.mysql.acl_query = "select allow, ipaddr, username, clientid, access, topic ## ## Value: File #auth.mysql.ssl.keyfile = /path/to/your/clientkey.pem + +## In mode verify_none the default behavior is to allow all x509-path +## validation errors. +## +## Value: true | false +#auth.mysql.ssl.verify = false + +## If not specified, the server's names returned in server's certificate is validated against +## what's provided `auth.mysql.server` config's host part. +## Setting to 'disable' will make EMQ X ignore unmatched server names. +## If set with a host name, the server's names returned in server's certificate is validated +## against this value. +## +## Value: String | disable +## auth.mysql.ssl.server_name_indication = disable diff --git a/apps/emqx_auth_mysql/priv/emqx_auth_mysql.schema b/apps/emqx_auth_mysql/priv/emqx_auth_mysql.schema index 28a9d5956..c89666a4c 100644 --- a/apps/emqx_auth_mysql/priv/emqx_auth_mysql.schema +++ b/apps/emqx_auth_mysql/priv/emqx_auth_mysql.schema @@ -52,6 +52,15 @@ {datatype, string} ]}. +{mapping, "auth.mysql.ssl.verify", "emqx_auth_mysql.server", [ + {default, false}, + {datatype, {enum, [true, false]}} +]}. + +{mapping, "auth.mysql.ssl.server_name_indication", "emqx_auth_mysql.server", [ + {datatype, string} +]}. + {translation, "emqx_auth_mysql.server", fun(Conf) -> {MyHost, MyPort} = case cuttlefish:conf_get("auth.mysql.server", Conf) of @@ -94,10 +103,20 @@ ), Cert = cuttlefish:conf_get("auth.mysql.ssl.certfile", Conf, undefined), Key = cuttlefish:conf_get("auth.mysql.ssl.keyfile", Conf, undefined), - Options ++ [{ssl, Filter([{server_name_indication, disable}, + Verify = case cuttlefish:conf_get("auth.mysql.ssl.verify", Conf, false) of + true -> verify_peer; + flase -> verify_none + end, + SNI = case cuttlefish:conf_get("auth.mysql.ssl.server_name_indication", Conf, undefined) of + "disable" -> disable; + SNI0 -> SNI0 + end, + Options ++ [{ssl, Filter([{server_name_indication, SNI}, {cacertfile, CA}, {certfile, Cert}, - {keyfile, Key}]) + {keyfile, Key}, + {verify, Verify} + ]) }]; _ -> Options diff --git a/apps/emqx_auth_pgsql/etc/emqx_auth_pgsql.conf b/apps/emqx_auth_pgsql/etc/emqx_auth_pgsql.conf index 3dbd01804..6f7018210 100644 --- a/apps/emqx_auth_pgsql/etc/emqx_auth_pgsql.conf +++ b/apps/emqx_auth_pgsql/etc/emqx_auth_pgsql.conf @@ -62,6 +62,21 @@ auth.pgsql.ssl.enable = off ## Value: File #auth.pgsql.ssl.cacertfile = +## In mode verify_none the default behavior is to allow all x509-path +## validation errors. +## +## Value: true | false +#auth.pgsql.ssl.verify = false + +## If not specified, the server's names returned in server's certificate is validated against +## what's provided `auth.pgsql.server` config's host part. +## Setting to 'disable' will make EMQ X ignore unmatched server names. +## If set with a host name, the server's names returned in server's certificate is validated +## against this value. +## +## Value: String | disable +## auth.pgsql.ssl.server_name_indication = disable + ## Authentication query. ## ## Value: SQL diff --git a/apps/emqx_auth_pgsql/priv/emqx_auth_pgsql.schema b/apps/emqx_auth_pgsql/priv/emqx_auth_pgsql.schema index 9091544a3..2f7fd3886 100644 --- a/apps/emqx_auth_pgsql/priv/emqx_auth_pgsql.schema +++ b/apps/emqx_auth_pgsql/priv/emqx_auth_pgsql.schema @@ -52,6 +52,15 @@ {datatype, string} ]}. +{mapping, "auth.pgsql.ssl.verify", "emqx_auth_pgsql.server", [ + {default, false}, + {datatype, {enum, [true, false]}} +]}. + +{mapping, "auth.pgsql.ssl.server_name_indication", "emqx_auth_pgsql.server", [ + {datatype, string} +]}. + %% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0 {mapping, "auth.pgsql.ssl_opts.keyfile", "emqx_auth_pgsql.server", [ {datatype, string} @@ -90,9 +99,18 @@ Filter = fun(Opts) -> [{K, V} || {K, V} <- Opts, V =/= undefined] end, SslOpts = fun(Prefix) -> + Verify = case cuttlefish:conf_get(Prefix ++ ".verify", Conf, false) of + true -> verify_peer; + flase -> verify_none + end, Filter([{keyfile, cuttlefish:conf_get(Prefix ++ ".keyfile", Conf, undefined)}, {certfile, cuttlefish:conf_get(Prefix ++ ".certfile", Conf, undefined)}, {cacertfile, cuttlefish:conf_get(Prefix ++ ".cacertfile", Conf, undefined)}, + {verify, Verify}, + {server_name_indication, case cuttlefish:conf_get(Prefix ++ ".server_name_indication", Conf, undefined) of + "disable" -> disable; + SNI -> SNI + end}, {versions, [list_to_existing_atom(Value) || Value <- string:tokens(cuttlefish:conf_get(Prefix ++ ".tls_versions", Conf), " ,")]}]) end, diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE.erl b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE.erl index 7929af929..2d59171b3 100644 --- a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE.erl +++ b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE.erl @@ -70,7 +70,7 @@ all() -> emqx_ct:all(?MODULE). init_per_suite(Config) -> - emqx_ct_helpers:start_apps([emqx_modules, emqx_auth_pgsql]), + emqx_ct_helpers:start_apps([emqx_auth_pgsql]), drop_acl(), drop_auth(), init_auth(), @@ -79,7 +79,7 @@ init_per_suite(Config) -> Config. end_per_suite(Config) -> - emqx_ct_helpers:stop_apps([emqx_auth_pgsql, emqx_modules]), + emqx_ct_helpers:stop_apps([emqx_auth_pgsql]), Config. set_special_configs() -> @@ -161,7 +161,6 @@ t_check_auth(_) -> {error, not_authorized} = emqx_access_control:authenticate(Bcrypt#{password => <<"password">>}). t_check_acl(_) -> - emqx_modules:load_module(emqx_mod_acl_internal, false), User1 = #{zone => external, peerhost => {127,0,0,1}, clientid => <<"c1">>, username => <<"u1">>}, User2 = #{zone => external, peerhost => {127,0,0,1}, clientid => <<"c2">>, username => <<"u2">>}, allow = emqx_access_control:check_acl(User1, subscribe, <<"t1">>), @@ -170,8 +169,8 @@ t_check_acl(_) -> User4 = #{zone => external, peerhost => {10,10,10,110}, clientid => <<"c1">>, username => <<"u1">>}, allow = emqx_access_control:check_acl(User3, subscribe, <<"t1">>), allow = emqx_access_control:check_acl(User3, subscribe, <<"t1">>), - allow = emqx_access_control:check_acl(User3, subscribe, <<"t2">>),%% nomatch -> ignore -> emqttd acl - allow = emqx_access_control:check_acl(User4, subscribe, <<"t1">>),%% nomatch -> ignore -> emqttd acl + deny = emqx_access_control:check_acl(User3, subscribe, <<"t2">>),%% nomatch -> ignore -> emqx acl + deny = emqx_access_control:check_acl(User4, subscribe, <<"t1">>),%% nomatch -> ignore -> emqx acl User5 = #{zone => external, peerhost => {127,0,0,1}, clientid => <<"c3">>, username => <<"u3">>}, allow = emqx_access_control:check_acl(User5, subscribe, <<"t1">>), allow = emqx_access_control:check_acl(User5, publish, <<"t1">>). diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/ca-key.pem b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/ca-key.pem new file mode 100644 index 000000000..e9717011e --- /dev/null +++ b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/ca-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEA0kGUBi9NDp65jgdxKfizIfuSr2wpwb44yM9SuP4oUQSULOA2 +4iFpLR/c5FAYHU81y9Vx91dQjdZfffaBZuv2zVvteXUkol8Nez7boKbo2E41MTew +8edtNKZAQVvnaHAC2NCZxjchCzUCDEoUUcl+cIERZ8R48FBqK5iTVcMRIx1akwus ++dhBqP0ykA5TGOWZkJrLM9aUXSPQha9+wXlOpkvu0Ur2nkX8PPJnifWao9UShSar +ll1IqPZNCSlZMwcFYcQNBCpdvITUUYlHvMRQV64bUpOxUGDuJkQL3dLKBlNuBRlJ +BcjBAKw7rFnwwHZcMmQ9tan/dZzpzwjo/T0XjwIDAQABAoIBAQCSHvUqnzDkWjcG +l/Fzg92qXlYBCCC0/ugj1sHcwvVt6Mq5rVE3MpUPwTcYjPlVVTlD4aEEjm/zQuq2 +ddxUlOS+r4aIhHrjRT/vSS4FpjnoKeIZxGR6maVxk6DQS3i1QjMYT1CvSpzyVvKH +a+xXMrtmoKxh+085ZAmFJtIuJhUA2yEa4zggCxWnvz8ecLClUPfVDPhdLBHc3KmL +CRpHEC6L/wanvDPRdkkzfKyaJuIJlTDaCg63AY5sDkTW2I57iI/nJ3haSeidfQKz +39EfbnM1A/YprIakafjAu3frBIsjBVcxwGihZmL/YriTHjOggJF841kT5zFkkv2L +/530Wk6xAoGBAOqZLZ4DIi/zLndEOz1mRbUfjc7GQUdYplBnBwJ22VdS0P4TOXnd +UbJth2MA92NM7ocTYVFl4TVIZY/Y+Prxk7KQdHWzR7JPpKfx9OEVgtSqV0vF9eGI +rKp79Y1T4Mvc3UcQCXX6TP7nHLihEzpS8odm2LW4txrOiLsn4Fq/IWrLAoGBAOVv +6U4tm3lImotUupKLZPKEBYwruo9qRysoug9FiorP4TjaBVOfltiiHbAQD6aGfVtN +SZpZZtrs17wL7Xl4db5asgMcZd+8Hkfo5siR7AuGW9FZloOjDcXb5wCh9EvjJ74J +Cjw7RqyVymq9t7IP6wnVwj5Ck48YhlOZCz/mzlnNAoGAWq7NYFgLvgc9feLFF23S +IjpJQZWHJEITP98jaYNxbfzYRm49+GphqxwFinKULjFNvq7yHlnIXSVYBOu1CqOZ +GRwXuGuNmlKI7lZr9xmukfAqgGLMMdr4C4qRF4lFyufcLRz42z7exmWlx4ST/yaT +E13hBRWayeTuG5JFei6Jh1MCgYEAqmX4LyC+JFBgvvQZcLboLRkSCa18bADxhENG +FAuAvmFvksqRRC71WETmqZj0Fqgxt7pp3KFjO1rFSprNLvbg85PmO1s+6fCLyLpX +lESTu2d5D71qhK93jigooxalGitFm+SY3mzjq0/AOpBWOn+J/w7rqVPGxXLgaHv0 +l+vx+00CgYBOvo9/ImjwYii2jFl+sHEoCzlvpITi2temRlT2j6ulSjCLJgjwEFw9 +8e+vvfQumQOsutakUVyURrkMGNDiNlIv8kv5YLCCkrwN22E6Ghyi69MJUvHQXkc/ +QZhjn/luyfpB5f/BeHFS2bkkxAXo+cfG45ApY3Qfz6/7o+H+vDa6/A== +-----END RSA PRIVATE KEY----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/ca.pem b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/ca.pem new file mode 100644 index 000000000..00b31d8a4 --- /dev/null +++ b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/ca.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDAzCCAeugAwIBAgIBATANBgkqhkiG9w0BAQsFADA8MTowOAYDVQQDDDFNeVNR +TF9TZXJ2ZXJfOC4wLjE5X0F1dG9fR2VuZXJhdGVkX0NBX0NlcnRpZmljYXRlMB4X +DTIwMDYxMTAzMzg0NloXDTMwMDYwOTAzMzg0NlowPDE6MDgGA1UEAwwxTXlTUUxf +U2VydmVyXzguMC4xOV9BdXRvX0dlbmVyYXRlZF9DQV9DZXJ0aWZpY2F0ZTCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANJBlAYvTQ6euY4HcSn4syH7kq9s +KcG+OMjPUrj+KFEElCzgNuIhaS0f3ORQGB1PNcvVcfdXUI3WX332gWbr9s1b7Xl1 +JKJfDXs+26Cm6NhONTE3sPHnbTSmQEFb52hwAtjQmcY3IQs1AgxKFFHJfnCBEWfE +ePBQaiuYk1XDESMdWpMLrPnYQaj9MpAOUxjlmZCayzPWlF0j0IWvfsF5TqZL7tFK +9p5F/DzyZ4n1mqPVEoUmq5ZdSKj2TQkpWTMHBWHEDQQqXbyE1FGJR7zEUFeuG1KT +sVBg7iZEC93SygZTbgUZSQXIwQCsO6xZ8MB2XDJkPbWp/3Wc6c8I6P09F48CAwEA +AaMQMA4wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEADKz6bIpP5anp +GgLB0jkclRWuMlS4qqIt4itSsMXPJ/ezpHwECixmgW2TIQl6S1woRkUeMxhT2/Ay +Sn/7aKxuzRagyE5NEGOvrOuAP5RO2ZdNJ/X3/Rh533fK1sOTEEbSsWUvW6iSkZef +rsfZBVP32xBhRWkKRdLeLB4W99ADMa0IrTmZPCXHSSE2V4e1o6zWLXcOZeH1Qh8N +SkelBweR+8r1Fbvy1r3s7eH7DCbYoGEDVLQGOLvzHKBisQHmoDnnF5E9g1eeNRdg +o+vhOKfYCOzeNREJIqS42PHcGhdNRk90ycigPmfUJclz1mDHoMjKR2S5oosTpr65 +tNPx3CL7GA== +-----END CERTIFICATE----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/client-cert.pem b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/client-cert.pem new file mode 100644 index 000000000..aad1404ca --- /dev/null +++ b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/client-cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDBDCCAeygAwIBAgIBAzANBgkqhkiG9w0BAQsFADA8MTowOAYDVQQDDDFNeVNR +TF9TZXJ2ZXJfOC4wLjE5X0F1dG9fR2VuZXJhdGVkX0NBX0NlcnRpZmljYXRlMB4X +DTIwMDYxMTAzMzg0N1oXDTMwMDYwOTAzMzg0N1owQDE+MDwGA1UEAww1TXlTUUxf +U2VydmVyXzguMC4xOV9BdXRvX0dlbmVyYXRlZF9DbGllbnRfQ2VydGlmaWNhdGUw +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVYSWpOvCTupz82fc85Opv +EQ7rkB8X2oOMyBCpkyHKBIr1ZQgRDWBp9UVOASq3GnSElm6+T3Kb1QbOffa8GIlw +sjAueKdq5L2eSkmPIEQ7eoO5kEW+4V866hE1LeL/PmHg2lGP0iqZiJYtElhHNQO8 +3y9I7cm3xWMAA3SSWikVtpJRn3qIp2QSrH+tK+/HHbE5QwtPxdir4ULSCSOaM5Yh +Wi5Oto88TZqe1v7SXC864JVvO4LuS7TuSreCdWZyPXTJFBFeCEWSAxonKZrqHbBe +CwKML6/0NuzjaQ51c2tzmVI6xpHj3nnu4cSRx6Jf9WBm+35vm0wk4pohX3ptdzeV +AgMBAAGjDTALMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQADggEBAByQ5zSNeFUH +Aw7JlpZHtHaSEeiiyBHke20ziQ07BK1yi/ms2HAWwQkpZv149sjNuIRH8pkTmkZn +g8PDzSefjLbC9AsWpWV0XNV22T/cdobqLqMBDDZ2+5bsV+jTrOigWd9/AHVZ93PP +IJN8HJn6rtvo2l1bh/CdsX14uVSdofXnuWGabNTydqtMvmCerZsdf6qKqLL+PYwm +RDpgWiRUY7KPBSSlKm/9lJzA+bOe4dHeJzxWFVCJcbpoiTFs1je1V8kKQaHtuW39 +ifX6LTKUMlwEECCbDKM8Yq2tm8NjkjCcnFDtKg8zKGPUu+jrFMN5otiC3wnKcP7r +O9EkaPcgYH8= +-----END CERTIFICATE----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/client-key.pem b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/client-key.pem new file mode 100644 index 000000000..6789d0291 --- /dev/null +++ b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/client-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA1WElqTrwk7qc/Nn3POTqbxEO65AfF9qDjMgQqZMhygSK9WUI +EQ1gafVFTgEqtxp0hJZuvk9ym9UGzn32vBiJcLIwLninauS9nkpJjyBEO3qDuZBF +vuFfOuoRNS3i/z5h4NpRj9IqmYiWLRJYRzUDvN8vSO3Jt8VjAAN0klopFbaSUZ96 +iKdkEqx/rSvvxx2xOUMLT8XYq+FC0gkjmjOWIVouTraPPE2antb+0lwvOuCVbzuC +7ku07kq3gnVmcj10yRQRXghFkgMaJyma6h2wXgsCjC+v9Dbs42kOdXNrc5lSOsaR +49557uHEkceiX/VgZvt+b5tMJOKaIV96bXc3lQIDAQABAoIBAF7yjXmSOn7h6P0y +WCuGiTLG2mbDiLJqj2LTm2Z5i+2Cu/qZ7E76Ls63TxF4v3MemH5vGfQhEhR5ZD/6 +GRJ1sKKvB3WGRqjwA9gtojHH39S/nWGy6vYW/vMOOH37XyjIr3EIdIaUtFQBTSHd +Kd71niYrAbVn6fyWHolhADwnVmTMOl5OOAhCdEF4GN3b5aIhIu8BJ7EUzTtHBJIj +CAEfjZFjDs1y1cIgGFJkuIQxMfCpq5recU2qwip7YO6fk//WEjOPu7kSf5IEswL8 +jg1dea9rGBV6KaD2xsgsC6Ll6Sb4BbsrHMfflG3K2Lk3RdVqqTFp1Fn1PTLQE/1S +S/SZPYECgYEA9qYcHKHd0+Q5Ty5wgpxKGa4UCWkpwvfvyv4bh8qlmxueB+l2AIdo +ZvkM8gTPagPQ3WypAyC2b9iQu70uOJo1NizTtKnpjDdN1YpDjISJuS/P0x73gZwy +gmoM5AzMtN4D6IbxXtXnPaYICvwLKU80ouEN5ZPM4/ODLUu6gsp0v2UCgYEA3Xgi +zMC4JF0vEKEaK0H6QstaoXUmw/lToZGH3TEojBIkb/2LrHUclygtONh9kJSFb89/ +jbmRRLAOrx3HZKCNGUmF4H9k5OQyAIv6OGBinvLGqcbqnyNlI+Le8zxySYwKMlEj +EMrBCLmSyi0CGFrbZ3mlj/oCET/ql9rNvcK+DHECgYAEx5dH3sMjtgp+RFId1dWB +xePRgt4yTwewkVgLO5wV82UOljGZNQaK6Eyd7AXw8f38LHzh+KJQbIvxd2sL4cEi +OaAoohpKg0/Y0YMZl//rPMf0OWdmdZZs/I0fZjgZUSwWN3c59T8z7KG/RL8an9RP +S7kvN7wCttdV61/D5RR6GQKBgDxCe/WKWpBKaovzydMLWLTj7/0Oi0W3iXHkzzr4 +LTgvl4qBSofaNbVLUUKuZTv5rXUG2IYPf99YqCYtzBstNDc1MiAriaBeFtzfOW4t +i6gEFtoLLbuvPc3N5Sv5vn8Ug5G9UfU3td5R4AbyyCcoUZqOFuZd+EIJSiOXfXOs +kVmBAoGBAIU9aPAqhU5LX902oq8KsrpdySONqv5mtoStvl3wo95WIqXNEsFY60wO +q02jKQmJJ2MqhkJm2EoF2Mq8+40EZ5sz8LdgeQ/M0yQ9lAhPi4rftwhpe55Ma9dk +SE9X1c/DMCBEaIjJqVXdy0/EeArwpb8sHkguVVAZUWxzD+phm1gs +-----END RSA PRIVATE KEY----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/postgresql.crt b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/postgresql.crt deleted file mode 100644 index 9867681b9..000000000 --- a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/postgresql.crt +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDYzCCAksCCQC7J1oPkDz7vTANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMC -Q0ExGTAXBgNVBAgMEEJyaXRpc2ggQ29sdW1iaWExDjAMBgNVBAcMBUNvbW94MRQw -EgYDVQQKDAtUaGVCcmFpbi5jYTEUMBIGA1UEAwwLdGhlYnJhaW4uY2ExHzAdBgkq -hkiG9w0BCQEWEGluZm9AdGhlYnJhaW4uY2EwHhcNMjEwMTEzMDkwNzM2WhcNMjEw -MjEyMDkwNzM2WjBhMQswCQYDVQQGEwJDQTEZMBcGA1UECAwQQnJpdGlzaCBDb2x1 -bWJpYTEOMAwGA1UEBwwFQ29tb3gxFDASBgNVBAoMC1RoZUJyYWluLmNhMREwDwYD -VQQDDAh3d3ctZGF0YTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJv9 -yO5JGKBl+7w0HGkRDIPZ5Ku3lIAzB4ThszRHBqll7VjlTz+q16OQOONqeHBuxPjj -11WMXD2KnfYZW2ZWd0U8FKzuIGOCStGbSUi2hC0owp+KkJcDujfIafXQnAa0fUiS -FBB5iG98vm3QI4gv9135LgnO5oHopH6oZ/t0Id1LzFhp2sdhebdtczmImpo+nt7v -fduapptuIJ20ThdAvo3MlYoAhivsvJKntlWPAwPMQdyezww/q7T5Y8DCyJJTydr5 -PrMz9S/WQTkj/G0y4dZgQonG5r0d1Nf+rwkn78DdXGktVDMBBP41+VWnEDBCTlgS -FjQEY6Izaof8s8q8K2UCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAdlAQkumOAKbQ -SW5gtkHgKyIQyfwk9maKqKccK04WlNk1t1jsvk7kaOEHr3t7YG28yKqicGHAcfFf -i/RU51v2GJVzWCbzkAAH/zNgDcYnYk6sn54YcuBzrPliVH1xxmZy/52+huTxy8Vd -3nmCjdYR/I764rd8gkRK+aHaUTLyitzX1kW90LtXonKY72CNZVXHEBom3XM/a6ff -ilybDloNVTfHstnfsnHHyNYn0SfapqXxPCO+FL9hQjlztUBZryRdS0nq66hB2GSB -CEst/vtNGo/2aa1Vw4bKl2oGepjKNzxp0ZTTVuIcwGzV6oKIsx1ZnWE3gQLEH/TX -dzMzesBayA== ------END CERTIFICATE----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/postgresql.csr b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/postgresql.csr deleted file mode 100644 index 325fbe397..000000000 --- a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/postgresql.csr +++ /dev/null @@ -1,17 +0,0 @@ ------BEGIN CERTIFICATE REQUEST----- -MIICpjCCAY4CAQAwYTELMAkGA1UEBhMCQ0ExGTAXBgNVBAgMEEJyaXRpc2ggQ29s -dW1iaWExDjAMBgNVBAcMBUNvbW94MRQwEgYDVQQKDAtUaGVCcmFpbi5jYTERMA8G -A1UEAwwId3d3LWRhdGEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCb -/cjuSRigZfu8NBxpEQyD2eSrt5SAMweE4bM0RwapZe1Y5U8/qtejkDjjanhwbsT4 -49dVjFw9ip32GVtmVndFPBSs7iBjgkrRm0lItoQtKMKfipCXA7o3yGn10JwGtH1I -khQQeYhvfL5t0COIL/dd+S4JzuaB6KR+qGf7dCHdS8xYadrHYXm3bXM5iJqaPp7e -733bmqabbiCdtE4XQL6NzJWKAIYr7LySp7ZVjwMDzEHcns8MP6u0+WPAwsiSU8na -+T6zM/Uv1kE5I/xtMuHWYEKJxua9HdTX/q8JJ+/A3VxpLVQzAQT+NflVpxAwQk5Y -EhY0BGOiM2qH/LPKvCtlAgMBAAGgADANBgkqhkiG9w0BAQsFAAOCAQEAN6Q8MEDx -g5xlpYB/fFmagpe15+G2QbqVf2mH1a4aBcBns4jMMqNidi4gyjGfzvNxX77R6KcI -AfcxENRVDYJbhAgEQ96jv4jv5pEMuyvQ8VLhn9AOXCaK/VHxbYlOiM7tfFtEDrrB -wTn8FvoEwjehfsSX2dWiwcUK4SPPeuklE/EGjRgoVCwg8EqWzf1fn+tzME8OpnRQ -I8coyALF6ANehvP7ADV3m5iOOaNhfnqmqGBEwjB3TTvE1gZ4UvAyl75bi+Zh3Osn -qemyxocp/ML4o6d/F+nKIZOe6309V2nyrY6RSd2fBCrhYj2rKTbrGTZrpKXeAhtI -jMivnjCK+WNHpQ== ------END CERTIFICATE REQUEST----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/postgresql.key b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/postgresql.key deleted file mode 100644 index 787246f6f..000000000 --- a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/postgresql.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAm/3I7kkYoGX7vDQcaREMg9nkq7eUgDMHhOGzNEcGqWXtWOVP -P6rXo5A442p4cG7E+OPXVYxcPYqd9hlbZlZ3RTwUrO4gY4JK0ZtJSLaELSjCn4qQ -lwO6N8hp9dCcBrR9SJIUEHmIb3y+bdAjiC/3XfkuCc7mgeikfqhn+3Qh3UvMWGna -x2F5t21zOYiamj6e3u9925qmm24gnbROF0C+jcyVigCGK+y8kqe2VY8DA8xB3J7P -DD+rtPljwMLIklPJ2vk+szP1L9ZBOSP8bTLh1mBCicbmvR3U1/6vCSfvwN1caS1U -MwEE/jX5VacQMEJOWBIWNARjojNqh/yzyrwrZQIDAQABAoIBAAOicycSLu+10Jq/ -ABZ2njsIPaq+mUgvaDJxa9KBASe7Rz92AFW0blfSSXELDwlXm2FNNbw5jACnFS0h -xB5rT1Yeo0CwP7Lx2zptCtUV45iFxZsgCGRsYs9f7RAcLzZ8yBqDxNHpcwNd/bXj -TqCitXnMD4WM+5P1TrfgxqN2Pj/Atg8w/4dP7KcFcTzcZzIz5rr3NTyjsrLdiFis -sR+7m7Qu4PyEfrDpR9Np111nQqVJ1bpt9qt/hv318FaBnpNY6MMBaSni99mvMXSd -SwHn3gnfHREWcNSLGA9gjEQmyIPHpV9T6SJ/zyr++6y8QCq4DiSP36A9zeA1XThP -YEIsWxUCgYEAyLppQerpOT2CnbTbKO/9rGwlbf8FT2GWFcPBtUm0lp21/C32BX+H -jNCmQsE1pZ6+sqv2mb1onr6Xl9cSEt6KsI1EJtFFR9Lnvqqu+JKo31U94z2yTqgv -sc+qMl7shy1kja8T5NaRc++UkCVzVNsnFB9torIaqQwY9IRdRwmYjisCgYEAxvHR -MwvWpOg25zz75OfupIOQhj9W6yphpY5/yoYBms/4OeabJhMrOV142s9souCHmuGU -EtzOQC5jbEc+3MUjx1ZlboHY7UuoEu87kykFEs9mnaD+T34PEAJcQjSzqzS5KMJE -Ro275xf+V/e3hS/Z3hQXmDQNQDNRYMcAZfTW9K8CgYBkHITOuYikYcc5PLBplHhi -fHWWjLBrTPJ73GxKLH6C+BmBsrKXP2mtk4q4lIBbH/dgSV/ugYciVVBqDHwZKSDm -uS4aZhk1nzyx3ZLyqsLK0ErTgTvi+wL+neH2yV0SdlNGTuGPKmzU89KWqfcBhWPS -J3KYyFd/pGb13OZgvap2jQKBgBXCXR84LEHdJCQmh2aB95gGy8fjJZ6TBBsXeuKr -xYEpPf0XO+DuN8wObSmBhmBKLorCIW/utqBOcpFlOXrsFP24dV+g1BkgLUHk6J8v -3V4xUQfsk+Qd5YfaujyDhyMyoQ3UMaOF3QdpmGgGsAvhL/MaP3pmNwzOkBgFrAV6 -wggBAoGBAMflqy2pfqGhaj9S6qZ3K95h7NdCUikdQzqmgbNtOHaZ2kHByyYtOPLB -1VnuDRQiacmum+fTZa6wNmvp2FWg+uxI/aspfF6SdPfGpyPrG5D+ITtqKF2xieK+ -XpzehKTrTuYQRAVhmWbhpuyahYnQyd/MrsCMGzUfAJtM7l5vKa2O ------END RSA PRIVATE KEY----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/private_key.pem b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/private_key.pem new file mode 100644 index 000000000..8fbf6bdec --- /dev/null +++ b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/private_key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEA1zVmMhPqpSPMmYkKh5wwlRD5XuS8YWJKEM6tjFx61VK8qxHE +YngkC2KnL5EuKAjQZIF3tJskwt0hAat047CCCZxrkNEpbVvSnvnk+A/8bg/Ww1n3 +qxzfifhsWfpUKlDnwrtH+ftt+5rZeEkf37XAPy7ZjzecAF9SDV6WSiPeAxUX2+hN +dId42Pf45woo4LFGUlQeagCFkD/R0dpNIMGwcnkKCUikiBqr2ijSIgvRtBfZ9fBG +jFGER2uE/Eay4AgcQsHue8skRwDCng8OnqtPnBtTytmqTy9V/BRgsVKUoksm6wsx +kUYwgHeaq7UCvlCm25SZ7yRyd4k8t0BKDf2h+wIDAQABAoIBAEQcrHmRACTADdNS +IjkFYALt2l8EOfMAbryfDSJtapr1kqz59JPNvmq0EIHnixo0n/APYdmReLML1ZR3 +tYkSpjVwgkLVUC1CcIjMQoGYXaZf8PLnGJHZk45RR8m6hsTV0mQ5bfBaeVa2jbma +OzJMjcnxg/3l9cPQZ2G/3AUfEPccMxOXp1KRz3mUQcGnKJGtDbN/kfmntcwYoxaE +Zg4RoeKAoMpK1SSHAiJKe7TnztINJ7uygR9XSzNd6auY8A3vomSIjpYO7XL+lh7L +izm4Ir3Gb/eCYBvWgQyQa2KCJgK/sQyEs3a09ngofSEUhQJQYhgZDwUj+fDDOGqj +hCZOA8ECgYEA+ZWuHdcUQ3ygYhLds2QcogUlIsx7C8n/Gk/FUrqqXJrTkuO0Eqqa +B47lCITvmn2zm0ODfSFIARgKEUEDLS/biZYv7SUTrFqBLcet+aGI7Dpv91CgB75R +tNzcIf8VxoiP0jPqdbh9mLbbxGi5Uc4p9TVXRljC4hkswaouebWee0sCgYEA3L2E +YB3kiHrhPI9LHS5Px9C1w+NOu5wP5snxrDGEgaFCvL6zgY6PflacppgnmTXl8D1x +im0IDKSw5dP3FFonSVXReq3CXDql7UnhfTCiLDahV7bLxTH42FofcBpDN3ERdOal +58RwQh6VrLkzQRVoObo+hbGlFiwwSAfQC509FhECgYBsRSBpVXo25IN2yBRg09cP ++gdoFyhxrsj5kw1YnB13WrrZh+oABv4WtUhp77E5ZbpaamlKCPwBbXpAjeFg4tfr +0bksuN7V79UGFQ9FsWuCfr8/nDwv38H2IbFlFhFONMOfPmJBey0Q6JJhm8R41mSh +OOiJXcv85UrjIH5U0hLUDQKBgQDVLOU5WcUJlPoOXSgiT0ZW5xWSzuOLRUUKEf6l +19BqzAzCcLy0orOrRAPW01xylt2v6/bJw1Ahva7k1ZZo/kOwjANYoZPxM+ZoSZBN +MXl8j2mzZuJVV1RFxItV3NcLJNPB/Lk+IbRz9kt/2f9InF7iWR3mSU/wIM6j0X+2 +p6yFsQKBgQCM/ldWb511lA+SNkqXB2P6WXAgAM/7+jwsNHX2ia2Ikufm4SUEKMSv +mti/nZkHDHsrHU4wb/2cOAywMELzv9EHzdcoenjBQP65OAc/1qWJs+LnBcCXfqKk +aHjEZW6+brkHdRGLLY3YAHlt/AUL+RsKPJfN72i/FSpmu+52G36eeQ== +-----END RSA PRIVATE KEY----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/public_key.pem b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/public_key.pem new file mode 100644 index 000000000..f9772b533 --- /dev/null +++ b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/public_key.pem @@ -0,0 +1,9 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1zVmMhPqpSPMmYkKh5ww +lRD5XuS8YWJKEM6tjFx61VK8qxHEYngkC2KnL5EuKAjQZIF3tJskwt0hAat047CC +CZxrkNEpbVvSnvnk+A/8bg/Ww1n3qxzfifhsWfpUKlDnwrtH+ftt+5rZeEkf37XA +Py7ZjzecAF9SDV6WSiPeAxUX2+hNdId42Pf45woo4LFGUlQeagCFkD/R0dpNIMGw +cnkKCUikiBqr2ijSIgvRtBfZ9fBGjFGER2uE/Eay4AgcQsHue8skRwDCng8OnqtP +nBtTytmqTy9V/BRgsVKUoksm6wsxkUYwgHeaq7UCvlCm25SZ7yRyd4k8t0BKDf2h ++wIDAQAB +-----END PUBLIC KEY----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/root.crt b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/root.crt deleted file mode 100644 index 46b1e2a7a..000000000 --- a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/root.crt +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDiDCCAnACCQCCsPcIlZO4TDANBgkqhkiG9w0BAQsFADCBhTELMAkGA1UEBhMC -Q0ExGTAXBgNVBAgMEEJyaXRpc2ggQ29sdW1iaWExDjAMBgNVBAcMBUNvbW94MRQw -EgYDVQQKDAtUaGVCcmFpbi5jYTEUMBIGA1UEAwwLdGhlYnJhaW4uY2ExHzAdBgkq -hkiG9w0BCQEWEGluZm9AdGhlYnJhaW4uY2EwHhcNMjEwMTEzMDkwNDIyWhcNMzEw -MTExMDkwNDIyWjCBhTELMAkGA1UEBhMCQ0ExGTAXBgNVBAgMEEJyaXRpc2ggQ29s -dW1iaWExDjAMBgNVBAcMBUNvbW94MRQwEgYDVQQKDAtUaGVCcmFpbi5jYTEUMBIG -A1UEAwwLdGhlYnJhaW4uY2ExHzAdBgkqhkiG9w0BCQEWEGluZm9AdGhlYnJhaW4u -Y2EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC2YWuwplM2Hc5tzBMu -covW9nwZ8iNEFo5pbDc8710pmnkF+wsDztLy4afJe6OeVHyCgQxmE+rTZcoWbvoh -pxW3Zy/8es4My07RKHqI3NYadThUvDsmI10cF3tJbhOZaIrMaExLGookZYKwbNAy -7yJ1+MLyNCuFFsaOiNNxHOjH/InKSzEuGSLV68tdC7Pe+uanBcC7RKhOrjUC6Occ -naHPC+a/YMyRYx29T8CfkCBB7N6WanWylFN/1RBmAgq++kDflSaF9k+Zdl6I4jiF -mCPGS0k+AMre4PuAKOZOZOwhF0sWlXIxH6zPm9w0bSYdTLBupL846RTO72NtNP+X -KX5DAgMBAAEwDQYJKoZIhvcNAQELBQADggEBACXXFws+h+Zo9HsxW3BWpl2JU5u6 -KyfbLQt4kSN/gqltd4s84Q8c4z2jNdI0t8Oh5dXTjbLCpFjzuF2tdMtOWeYBCdsQ -4NJ69RrwkFdsSPxDPhSE0WGXPaOBaA92wJjTkVf+UYIek1ozeyWwFm1LPiZVei00 -mwDVgbAbIEb8cf6OqJrl2r5PMBCLWBwwg5aca3fe6TopJhyPA//DZDRPA5xzKb9e -PHUgF3apbcWxuxm8Mts4bAq8BcKoEvLHYWJ4fEWQvXPP7q1jYC3TkpSt5n3FQZTe -nLyQ+RNzsEHzmyOtTSa0Q+5KVluO1TE3ifpv8737pTLdY8t2waBamoboCu8= ------END CERTIFICATE----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/root.srl b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/root.srl deleted file mode 100644 index cf7e9e551..000000000 --- a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/root.srl +++ /dev/null @@ -1 +0,0 @@ -BB275A0F903CFBBD diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server-cert.pem b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server-cert.pem new file mode 100644 index 000000000..a2f9688df --- /dev/null +++ b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server-cert.pem @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDBDCCAeygAwIBAgIBAjANBgkqhkiG9w0BAQsFADA8MTowOAYDVQQDDDFNeVNR +TF9TZXJ2ZXJfOC4wLjE5X0F1dG9fR2VuZXJhdGVkX0NBX0NlcnRpZmljYXRlMB4X +DTIwMDYxMTAzMzg0NloXDTMwMDYwOTAzMzg0NlowQDE+MDwGA1UEAww1TXlTUUxf +U2VydmVyXzguMC4xOV9BdXRvX0dlbmVyYXRlZF9TZXJ2ZXJfQ2VydGlmaWNhdGUw +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCcEnEm5hqP1EbEJycOz8Ua +NWp29QdpFUzTWhkKGhVXk+0msmNTw4NBAFB42moY44OU8wvDideOlJNhPRWveD8z +G2lxzJA91p0UK4et8ia9MmeuCGhdC9jxJ8X69WNlUiPyy0hI/ZsqRq9Z0C2eW0iL +JPXsy4X8Xpw3SFwoXf5pR9RFY5Pb2tuyxqmSestu2VXT/NQjJg4CVDR3mFcHPXZB +4elRzH0WshExEGkgy0bg20MJeRc2Qdb5Xx+EakbmwroDWaCn3NSGqQ7jv6Vw0doy +TGvS6h6RHBxnyqRfRgKGlCoOMG9/5+rFJC00QpCUG2vHXHWGoWlMlJ3foN7rj5v9 +AgMBAAGjDTALMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQADggEBAJ5zt2rj4Ag6 +zpN59AWC1Fur8g8l41ksHkSpKPp+PtyO/ngvbMqBpfmK1e7JCKZv/68QXfMyWWAI +hwalqZkXXWHKjuz3wE7dE25PXFXtGJtcZAaj10xt98fzdqt8lQSwh2kbfNwZIz1F +sgAStgE7+ZTcqTgvNB76Os1UK0to+/P0VBWktaVFdyub4Nc2SdPVnZNvrRBXBwOD +3V8ViwywDOFoE7DvCvwx/SVsvoC0Z4j3AMMovO6oHicP7uU83qsQgm1Qru3YeoLR ++DoVi7IPHbWvN7MqFYn3YjNlByO2geblY7MR0BlqbFlmFrqLsUfjsh2ys7/U/knC +dN/klu446fI= +-----END CERTIFICATE----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server-key.pem b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server-key.pem new file mode 100644 index 000000000..a1dfd5f78 --- /dev/null +++ b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAnBJxJuYaj9RGxCcnDs/FGjVqdvUHaRVM01oZChoVV5PtJrJj +U8ODQQBQeNpqGOODlPMLw4nXjpSTYT0Vr3g/MxtpccyQPdadFCuHrfImvTJnrgho +XQvY8SfF+vVjZVIj8stISP2bKkavWdAtnltIiyT17MuF/F6cN0hcKF3+aUfURWOT +29rbssapknrLbtlV0/zUIyYOAlQ0d5hXBz12QeHpUcx9FrIRMRBpIMtG4NtDCXkX +NkHW+V8fhGpG5sK6A1mgp9zUhqkO47+lcNHaMkxr0uoekRwcZ8qkX0YChpQqDjBv +f+fqxSQtNEKQlBtrx1x1hqFpTJSd36De64+b/QIDAQABAoIBAFiah66Dt9SruLkn +WR8piUaFyLlcBib8Nq9OWSTJBhDAJERxxb4KIvvGB+l0ZgNXNp5bFPSfzsZdRwZP +PX5uj8Kd71Dxx3mz211WESMJdEC42u+MSmN4lGLkJ5t/sDwXU91E1vbJM0ve8THV +4/Ag9qA4DX2vVZOeyqT/6YHpSsPNZplqzrbAiwrfHwkctHfgqwOf3QLfhmVQgfCS +VwidBldEUv2whSIiIxh4Rv5St4kA68IBCbJxdpOpyuQBkk6CkxZ7VN9FqOuSd4Pk +Wm7iWyBMZsCmELZh5XAXld4BEt87C5R4CvbPBDZxAv3THk1DNNvpy3PFQfwARRFb +SAToYMECgYEAyL7U8yxpzHDYWd3oCx6vTi9p9N/z0FfAkWrRF6dm4UcSklNiT1Aq +EOnTA+SaW8tV3E64gCWcY23gNP8so/ZseWj6L+peHwtchaP9+KB7yGw2A+05+lOx +VetLTjAOmfpiUXFe5w1q4C1RGhLjZjjzW+GvwdAuchQgUEFaomrV+PUCgYEAxwfH +cmVGFbAktcjU4HSRjKSfawCrut+3YUOLybyku3Q/hP9amG8qkVTFe95CTLjLe2D0 +ccaTTpofFEJ32COeck0g0Ujn/qQ+KXRoauOYs4FB1DtqMpqB78wufWEUpDpbd9/h +J+gJdC/IADd4tJW9zA92g8IA7ZtFmqDtiSpQ0ekCgYAQGkaorvJZpN+l7cf0RGTZ +h7IfI2vCVZer0n6tQA9fmLzjoe6r4AlPzAHSOR8sp9XeUy43kUzHKQQoHCPvjw/K +eWJAP7OHF/k2+x2fOPhU7mEy1W+mJdp+wt4Kio5RSaVjVQ3AyPG+w8PSrJszEvRq +dWMMz+851WV2KpfjmWBKlQKBgQC++4j4DZQV5aMkSKV1CIZOBf3vaIJhXKEUFQPD +PmB4fBEjpwCg+zNGp6iktt65zi17o8qMjrb1mtCt2SY04eD932LZUHNFlwcLMmes +Ad+aiDLJ24WJL1f16eDGcOyktlblDZB5gZ/ovJzXEGOkLXglosTfo77OQculmDy2 +/UL2WQKBgGeKasmGNfiYAcWio+KXgFkHXWtAXB9B91B1OFnCa40wx+qnl71MIWQH +PQ/CZFNWOfGiNEJIZjrHsfNJoeXkhq48oKcT0AVCDYyLV0VxDO4ejT95mGW6njNd +JpvmhwwAjOvuWVr0tn4iXlSK8irjlJHmwcRjLTJq97vE9fsA2MjI +-----END RSA PRIVATE KEY----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server.crt b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server.crt deleted file mode 100644 index 46b1e2a7a..000000000 --- a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server.crt +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDiDCCAnACCQCCsPcIlZO4TDANBgkqhkiG9w0BAQsFADCBhTELMAkGA1UEBhMC -Q0ExGTAXBgNVBAgMEEJyaXRpc2ggQ29sdW1iaWExDjAMBgNVBAcMBUNvbW94MRQw -EgYDVQQKDAtUaGVCcmFpbi5jYTEUMBIGA1UEAwwLdGhlYnJhaW4uY2ExHzAdBgkq -hkiG9w0BCQEWEGluZm9AdGhlYnJhaW4uY2EwHhcNMjEwMTEzMDkwNDIyWhcNMzEw -MTExMDkwNDIyWjCBhTELMAkGA1UEBhMCQ0ExGTAXBgNVBAgMEEJyaXRpc2ggQ29s -dW1iaWExDjAMBgNVBAcMBUNvbW94MRQwEgYDVQQKDAtUaGVCcmFpbi5jYTEUMBIG -A1UEAwwLdGhlYnJhaW4uY2ExHzAdBgkqhkiG9w0BCQEWEGluZm9AdGhlYnJhaW4u -Y2EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC2YWuwplM2Hc5tzBMu -covW9nwZ8iNEFo5pbDc8710pmnkF+wsDztLy4afJe6OeVHyCgQxmE+rTZcoWbvoh -pxW3Zy/8es4My07RKHqI3NYadThUvDsmI10cF3tJbhOZaIrMaExLGookZYKwbNAy -7yJ1+MLyNCuFFsaOiNNxHOjH/InKSzEuGSLV68tdC7Pe+uanBcC7RKhOrjUC6Occ -naHPC+a/YMyRYx29T8CfkCBB7N6WanWylFN/1RBmAgq++kDflSaF9k+Zdl6I4jiF -mCPGS0k+AMre4PuAKOZOZOwhF0sWlXIxH6zPm9w0bSYdTLBupL846RTO72NtNP+X -KX5DAgMBAAEwDQYJKoZIhvcNAQELBQADggEBACXXFws+h+Zo9HsxW3BWpl2JU5u6 -KyfbLQt4kSN/gqltd4s84Q8c4z2jNdI0t8Oh5dXTjbLCpFjzuF2tdMtOWeYBCdsQ -4NJ69RrwkFdsSPxDPhSE0WGXPaOBaA92wJjTkVf+UYIek1ozeyWwFm1LPiZVei00 -mwDVgbAbIEb8cf6OqJrl2r5PMBCLWBwwg5aca3fe6TopJhyPA//DZDRPA5xzKb9e -PHUgF3apbcWxuxm8Mts4bAq8BcKoEvLHYWJ4fEWQvXPP7q1jYC3TkpSt5n3FQZTe -nLyQ+RNzsEHzmyOtTSa0Q+5KVluO1TE3ifpv8737pTLdY8t2waBamoboCu8= ------END CERTIFICATE----- diff --git a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server.key b/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server.key deleted file mode 100644 index 8bd131632..000000000 --- a/apps/emqx_auth_pgsql/test/emqx_auth_pgsql_SUITE_data/server.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAtmFrsKZTNh3ObcwTLnKL1vZ8GfIjRBaOaWw3PO9dKZp5BfsL -A87S8uGnyXujnlR8goEMZhPq02XKFm76IacVt2cv/HrODMtO0Sh6iNzWGnU4VLw7 -JiNdHBd7SW4TmWiKzGhMSxqKJGWCsGzQMu8idfjC8jQrhRbGjojTcRzox/yJyksx -Lhki1evLXQuz3vrmpwXAu0SoTq41AujnHJ2hzwvmv2DMkWMdvU/An5AgQezelmp1 -spRTf9UQZgIKvvpA35UmhfZPmXZeiOI4hZgjxktJPgDK3uD7gCjmTmTsIRdLFpVy -MR+sz5vcNG0mHUywbqS/OOkUzu9jbTT/lyl+QwIDAQABAoIBAA6UVR6G/UnrMhBW -6wWghItHov4T/Du6LeJBk1zcqa7kuV4ABo5kXzqpTVdu+dJzYIyyMkKKvw/tKC2I -65f7GmJR7mUZkBU3v3I68Si1tqvgyQMFFRlkZFIVknZ5RTnTQJ08jTTHx1lHgB4I -ZNBdi3ywySzBfOUjv/Wu/HAjZnxuEh2guBpRMZdwQwZLXr2koDa5inL3IwJrA4Ir -QzpZ0y6ql3A0tw7jAw36G1AKyyz74aFwJ0I8U8w+2Uk4iX5hcKGA8mFq4lyO4/3+ -7W2Z4V8cQzwMq2SMixI0Omxlc2BJUi9j17Ey//5dAXyPaG8QI1kzeL/3Gbs8YBMq -ekN8AZECgYEA5YxcFIVv3yO+ARNWUHovrsMuf9ElhyRuZd0I2+vjrq1b9zQsSy2d -PsyYWD17lO/GDmpTzZOdVsYtZHi+EiXmQnkzLJ4m2nlc7W4annWlbzlQMEn6vAji -l9bSHJXXiiIB7X/oHpDUdsnJp/uyAJppmnVLbSBboNCrG4Mf5cJqOnsCgYEAy2We -scp19h4UEKAU0Yh+5jh8W4VVtlISkH64vMgz/JZWXMPt1bM5C/5j+3UVUL5VmFqF -J1g0gXYkTGTL0+entb3SUiL42zrp3rZ3GgMU6V+aktq3dmri5bOifzihuLHLgjO5 -u/MJPBzvFxIiJxnNBybNLijIZfPm+9roUfpcBNkCgYBGE3Zc0WuYnEm5/FRCVzrN -SEqevJOPUSDeuf6lXLryLXxA2E2ZWcCCVmU/su1SR2yYI/+XZ7QFtJRQ8sdbtPQ5 -YNStj05fLeOfnBhGPbYWYVHInB0OYEwEfJFCJsBZLA6YmY6cHiyuYuXMAXuS0ZDh -lWNEWjd+vZUu3fXT52kUlwKBgDgq/eH3GRA4Si41JsqeOPz2iFD1xy+sBnhkpjtr -xf9wvLStXpZvAcfwHkgokxRTG2wRQ0gUMZu2tltqUmdYR5YGr3gDNFnGMSNRnB5Q -z4uK3TLEt3k6FyJ7stoTF4Xbg2mXQylF+jzheJ0UYt4NX/MjofGnTX/qFNVkJFfP -HW4xAoGBAMBb9cXTpzOMiMcSdQRlaLttV1p05pqxTgQNEQD8HB+lkx4AGnnHvtxW -XQJvPumtqdCEpfe4kaqLip8T+67sGfcDVQMogJc/tpvZ0AN4FuViFsf/YDuTPXEp -whMldPHtusbRP2fk/JFq4Ak0Xz2wAI1iMD3qfBeW6eJpvRllUo69 ------END RSA PRIVATE KEY----- diff --git a/apps/emqx_auth_redis/etc/emqx_auth_redis.conf b/apps/emqx_auth_redis/etc/emqx_auth_redis.conf index f0145b9be..62a6e4fe1 100644 --- a/apps/emqx_auth_redis/etc/emqx_auth_redis.conf +++ b/apps/emqx_auth_redis/etc/emqx_auth_redis.conf @@ -115,3 +115,17 @@ auth.redis.acl_cmd = "HGETALL mqtt_acl:%u" ## Value: File # auth.redis.ssl.keyfile = path/to/your/keyfile +## In mode verify_none the default behavior is to allow all x509-path +## validation errors. +## +## Value: true | false +#auth.redis.ssl.verify = false + +## If not specified, the server's names returned in server's certificate is validated against +## what's provided `auth.redis.server` config's host part. +## Setting to 'disable' will make EMQ X ignore unmatched server names. +## If set with a host name, the server's names returned in server's certificate is validated +## against this value. +## +## Value: String | disable +## auth.redis.ssl.server_name_indication = disable \ No newline at end of file diff --git a/apps/emqx_auth_redis/priv/emqx_auth_redis.schema b/apps/emqx_auth_redis/priv/emqx_auth_redis.schema index 0ad9e441c..cd6e37d10 100644 --- a/apps/emqx_auth_redis/priv/emqx_auth_redis.schema +++ b/apps/emqx_auth_redis/priv/emqx_auth_redis.schema @@ -50,6 +50,30 @@ {datatype, string} ]}. +{mapping, "auth.redis.ssl.verify", "emqx_auth_redis.options", [ + {default, false}, + {datatype, {enum, [true, false]}} +]}. + +{mapping, "auth.redis.ssl.server_name_indication", "emqx_auth_redis.options", [ + {datatype, string} +]}. + +%% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0 +{mapping, "auth.redis.cafile", "emqx_auth_redis.options", [ + {datatype, string} +]}. + +%% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0 +{mapping, "auth.redis.certfile", "emqx_auth_redis.options", [ + {datatype, string} +]}. + +%% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0 +{mapping, "auth.redis.keyfile", "emqx_auth_redis.options", [ + {datatype, string} +]}. + {translation, "emqx_auth_redis.options", fun(Conf) -> Ssl = cuttlefish:conf_get("auth.redis.ssl.enable", Conf, false), Filter = fun(Opts) -> [{K, V} || {K, V} <- Opts, V =/= undefined] end, @@ -58,7 +82,7 @@ %% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0 CA = cuttlefish:conf_get( "auth.redis.ssl.cacertfile", Conf, - cuttlefish:conf_get("auth.redis.cacertfile", Conf, undefined) + cuttlefish:conf_get("auth.redis.cafile", Conf, undefined) ), Cert = cuttlefish:conf_get( "auth.redis.ssl.certfile", Conf, @@ -68,10 +92,21 @@ "auth.redis.ssl.keyfile", Conf, cuttlefish:conf_get("auth.redis.keyfile", Conf, undefined) ), + Verify = case cuttlefish:conf_get("auth.redis.ssl.verify", Conf, false) of + true -> verify_peer; + flase -> verify_none + end, + SNI = case cuttlefish:conf_get("auth.redis.ssl.server_name_indication", Conf, undefined) of + "disable" -> disable; + SNI0 -> SNI0 + end, [{options, [{ssl_options, Filter([{cacertfile, CA}, {certfile, Cert}, - {keyfile, Key}]) + {keyfile, Key}, + {verify, Verify}, + {server_name_indication, SNI} + ]) }]}]; _ -> [{options, []}] end diff --git a/apps/emqx_auth_redis/test/emqx_auth_redis_SUITE.erl b/apps/emqx_auth_redis/test/emqx_auth_redis_SUITE.erl index a20f0a2e9..c8551c2ad 100644 --- a/apps/emqx_auth_redis/test/emqx_auth_redis_SUITE.erl +++ b/apps/emqx_auth_redis/test/emqx_auth_redis_SUITE.erl @@ -49,22 +49,18 @@ all() -> emqx_ct:all(?MODULE). init_per_suite(Cfg) -> - emqx_ct_helpers:start_apps([emqx_modules, emqx_auth_redis], fun set_special_configs/1), + emqx_ct_helpers:start_apps([emqx_auth_redis], fun set_special_configs/1), init_redis_rows(), Cfg. end_per_suite(_Cfg) -> deinit_redis_rows(), - emqx_ct_helpers:stop_apps([emqx_auth_redis, emqx_modules]). + emqx_ct_helpers:stop_apps([emqx_auth_redis]). set_special_configs(emqx) -> application:set_env(emqx, allow_anonymous, false), application:set_env(emqx, acl_nomatch, deny), - application:set_env(emqx, acl_file, - emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/acl.conf")), - application:set_env(emqx, enable_acl_cache, false), - application:set_env(emqx, plugins_loaded_file, - emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/loaded_plugins")); + application:set_env(emqx, enable_acl_cache, false); set_special_configs(_App) -> ok. @@ -72,7 +68,6 @@ init_redis_rows() -> %% Users [q(["HMSET", Key|FiledValue]) || {Key, FiledValue} <- ?INIT_AUTH], %% ACLs - emqx_modules:load_module(emqx_mod_acl_internal, false), Result = [q(["HSET", Key, Filed, Value]) || {Key, Filed, Value} <- ?INIT_ACL], ct:pal("redis init result: ~p~n", [Result]). @@ -136,7 +131,7 @@ t_check_acl(_) -> allow = emqx_access_control:check_acl(User2, subscribe, <<"topic2">>), allow = emqx_access_control:check_acl(User3, publish, <<"topic3">>), allow = emqx_access_control:check_acl(User3, subscribe, <<"topic3">>), - allow = emqx_access_control:check_acl(User4, publish, <<"a/b/c">>). + deny = emqx_access_control:check_acl(User4, publish, <<"a/b/c">>). t_acl_super(_) -> reload([{password_hash, plain}]), diff --git a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_actions.erl b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_actions.erl index cbd7f28ed..8f5ad16ef 100644 --- a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_actions.erl +++ b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_actions.erl @@ -567,8 +567,7 @@ options(Options, PoolName, ResId) -> maybe_ssl(_Options, false, _ResId) -> []; maybe_ssl(Options, true, ResId) -> - Dir = filename:join([emqx:get_env(data_dir), "rule", ResId]), - [{ssl, true}, {ssl_opts, emqx_plugin_libs_ssl:save_files_return_opts(Options, Dir)}]. + [{ssl, true}, {ssl_opts, emqx_plugin_libs_ssl:save_files_return_opts(Options, "rules", ResId)}]. mqtt_ver(ProtoVer) -> case ProtoVer of diff --git a/apps/emqx_coap/README.md b/apps/emqx_coap/README.md index 2da7b9fca..927666358 100644 --- a/apps/emqx_coap/README.md +++ b/apps/emqx_coap/README.md @@ -151,8 +151,9 @@ To subscribe any topic, issue following command: - if clientid is absent, a "bad_request" will be returned. - {topicname} in URI should be percent-encoded to prevent special characters, such as + and #. - {username} and {password} are optional. -- if {username} and {password} are not correct, an uauthorized error will be returned. +- if {username} or {password} is incorrect, the error code `uauthorized` will be returned. - topic is subscribed with qos1. +- if the subscription failed due to ACL deny, the error code `forbidden` will be returned. CoAP Client Unobserve Operation (unsubscribe topic) --------------------------------------------------- @@ -168,7 +169,7 @@ To cancel observation, issue following command: - if clientid is absent, a "bad_request" will be returned. - {topicname} in URI should be percent-encoded to prevent special characters, such as + and #. - {username} and {password} are optional. -- if {username} and {password} are not correct, an uauthorized error will be returned. +- if {username} or {password} is incorrect, the error code `uauthorized` will be returned. CoAP Client Notification Operation (subscribed Message) ------------------------------------------------------- @@ -179,7 +180,7 @@ Server will issue an observe-notification as a subscribed message. CoAP Client Publish Operation ----------------------------- -Issue a coap put command to do publishment. For example: +Issue a coap put command to publish messages. For example: ``` PUT coap://localhost/mqtt/{topicname}?c={clientid}&u={username}&p={password} @@ -191,10 +192,11 @@ Issue a coap put command to do publishment. For example: - if clientid is absent, a "bad_request" will be returned. - {topicname} in URI should be percent-encoded to prevent special characters, such as + and #. - {username} and {password} are optional. -- if {username} and {password} are not correct, an uauthorized error will be returned. +- if {username} or {password} is incorrect, the error code `uauthorized` will be returned. - payload could be any binary data. - payload data type is "application/octet-stream". - publish message will be sent with qos0. +- if the publishing failed due to ACL deny, the error code `forbidden` will be returned. CoAP Client Keep Alive ---------------------- @@ -209,7 +211,7 @@ Device should issue a get command periodically, serve as a ping to keep mqtt ses - {any_topicname} is optional, and should be percent-encoded to prevent special characters. - {clientid} is mandatory. If clientid is absent, a "bad_request" will be returned. - {username} and {password} are optional. -- if {username} and {password} are not correct, an uauthorized error will be returned. +- if {username} or {password} is incorrect, the error code `uauthorized` will be returned. - coap client should do keepalive work periodically to keep mqtt session online, especially those devices in a NAT network. diff --git a/apps/emqx_coap/TODO b/apps/emqx_coap/TODO index 2af129d6c..a0a1c2aaf 100644 --- a/apps/emqx_coap/TODO +++ b/apps/emqx_coap/TODO @@ -2,7 +2,7 @@ - Enhance all test case 2. Remove the mqtt adaptor -3. Remove the emqx_coap_ps_topics.erl +3. Remove the emqx_coap_pubsub_topics.erl ### Problems diff --git a/apps/emqx_coap/src/emqx_coap_app.erl b/apps/emqx_coap/src/emqx_coap_app.erl index 4e7655a74..029d78071 100644 --- a/apps/emqx_coap/src/emqx_coap_app.erl +++ b/apps/emqx_coap/src/emqx_coap_app.erl @@ -29,12 +29,12 @@ start(_Type, _Args) -> {ok, Sup} = emqx_coap_sup:start_link(), coap_server_registry:add_handler([<<"mqtt">>], emqx_coap_resource, undefined), - coap_server_registry:add_handler([<<"ps">>], emqx_coap_ps_resource, undefined), - _ = emqx_coap_ps_topics:start_link(), + coap_server_registry:add_handler([<<"ps">>], emqx_coap_pubsub_resource, undefined), + _ = emqx_coap_pubsub_topics:start_link(), emqx_coap_server:start(application:get_all_env(?APP)), {ok,Sup}. stop(_State) -> coap_server_registry:remove_handler([<<"mqtt">>], emqx_coap_resource, undefined), - coap_server_registry:remove_handler([<<"ps">>], emqx_coap_ps_resource, undefined), + coap_server_registry:remove_handler([<<"ps">>], emqx_coap_pubsub_resource, undefined), emqx_coap_server:stop(application:get_all_env(?APP)). diff --git a/apps/emqx_coap/src/emqx_coap_mqtt_adapter.erl b/apps/emqx_coap/src/emqx_coap_mqtt_adapter.erl index 537f5137b..ac981b87c 100644 --- a/apps/emqx_coap/src/emqx_coap_mqtt_adapter.erl +++ b/apps/emqx_coap/src/emqx_coap_mqtt_adapter.erl @@ -133,8 +133,8 @@ init({ClientId, Username, Password, Channel}) -> handle_call({subscribe, Topic, CoapPid}, _From, State=#state{sub_topics = TopicList}) -> NewTopics = proplists:delete(Topic, TopicList), IsWild = emqx_topic:wildcard(Topic), - chann_subscribe(Topic, State), - {reply, ok, State#state{sub_topics = [{Topic, {IsWild, CoapPid}}|NewTopics]}, hibernate}; + {reply, chann_subscribe(Topic, State), State#state{sub_topics = + [{Topic, {IsWild, CoapPid}}|NewTopics]}, hibernate}; handle_call({unsubscribe, Topic, _CoapPid}, _From, State=#state{sub_topics = TopicList}) -> NewTopics = proplists:delete(Topic, TopicList), @@ -142,8 +142,7 @@ handle_call({unsubscribe, Topic, _CoapPid}, _From, State=#state{sub_topics = Top {reply, ok, State#state{sub_topics = NewTopics}, hibernate}; handle_call({publish, Topic, Payload}, _From, State) -> - _ = chann_publish(Topic, Payload, State), - {reply, ok, State}; + {reply, chann_publish(Topic, Payload, State), State}; handle_call(info, _From, State) -> {reply, info(State), State}; @@ -221,10 +220,12 @@ chann_subscribe(Topic, State = #state{clientid = ClientId}) -> case emqx_access_control:check_acl(clientinfo(State), subscribe, Topic) of allow -> emqx_broker:subscribe(Topic, ClientId, ?SUBOPTS), - emqx_hooks:run('session.subscribed', [clientinfo(State), Topic, ?SUBOPTS]); + emqx_hooks:run('session.subscribed', [clientinfo(State), Topic, ?SUBOPTS]), + ok; deny -> ?LOG(warning, "subscribe to ~p by clientid ~p failed due to acl check.", - [Topic, ClientId]) + [Topic, ClientId]), + {error, forbidden} end. chann_unsubscribe(Topic, State) -> @@ -237,12 +238,14 @@ chann_publish(Topic, Payload, State = #state{clientid = ClientId}) -> ?LOG(debug, "publish Topic=~p, Payload=~p", [Topic, Payload]), case emqx_access_control:check_acl(clientinfo(State), publish, Topic) of allow -> - emqx_broker:publish( - emqx_message:set_flag(retain, false, - emqx_message:make(ClientId, ?QOS_0, Topic, Payload))); + _ = emqx_broker:publish( + emqx_message:set_flag(retain, false, + emqx_message:make(ClientId, ?QOS_0, Topic, Payload))), + ok; deny -> ?LOG(warning, "publish to ~p by clientid ~p failed due to acl check.", - [Topic, ClientId]) + [Topic, ClientId]), + {error, forbidden} end. diff --git a/apps/emqx_coap/src/emqx_coap_ps_resource.erl b/apps/emqx_coap/src/emqx_coap_pubsub_resource.erl similarity index 86% rename from apps/emqx_coap/src/emqx_coap_ps_resource.erl rename to apps/emqx_coap/src/emqx_coap_pubsub_resource.erl index b2169521a..da066bb36 100644 --- a/apps/emqx_coap/src/emqx_coap_ps_resource.erl +++ b/apps/emqx_coap/src/emqx_coap_pubsub_resource.erl @@ -14,7 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_coap_ps_resource). +-module(emqx_coap_pubsub_resource). -behaviour(coap_resource). @@ -112,14 +112,16 @@ coap_observe(ChId, ?PS_PREFIX, TopicPath, Ack, Content) when TopicPath =/= [] -> Topic = topic(TopicPath), ?LOG(debug, "observe Topic=~p, Ack=~p,Content=~p", [Topic, Ack, Content]), Pid = get(mqtt_client_pid), - emqx_coap_mqtt_adapter:subscribe(Pid, Topic), - Code = case emqx_coap_ps_topics:is_topic_timeout(Topic) of - true -> - nocontent; - false-> - content - end, - {ok, {state, ChId, ?PS_PREFIX, [Topic]}, Code, Content}; + case emqx_coap_mqtt_adapter:subscribe(Pid, Topic) of + ok -> + Code = case emqx_coap_pubsub_topics:is_topic_timeout(Topic) of + true -> nocontent; + false-> content + end, + {ok, {state, ChId, ?PS_PREFIX, [Topic]}, Code, Content}; + {error, Code} -> + {error, Code} + end; coap_observe(ChId, Prefix, TopicPath, Ack, _Content) -> ?LOG(error, "unknown observe request ChId=~p, Prefix=~p, TopicPath=~p, Ack=~p", [ChId, Prefix, TopicPath, Ack]), @@ -137,7 +139,7 @@ coap_unobserve({state, ChId, Prefix, TopicPath}) -> handle_info({dispatch, Topic, Payload}, State) -> ?LOG(debug, "dispatch Topic=~p, Payload=~p", [Topic, Payload]), - {ok, Ret} = emqx_coap_ps_topics:reset_topic_info(Topic, Payload), + {ok, Ret} = emqx_coap_pubsub_topics:reset_topic_info(Topic, Payload), ?LOG(debug, "Updated publish info of topic=~p, the Ret is ~p", [Topic, Ret]), {notify, [], #coap_content{format = <<"application/octet-stream">>, payload = Payload}, State}; handle_info(Message, State) -> @@ -166,7 +168,7 @@ get_auth([Param|T], Auth=#coap_mqtt_auth{}) -> get_auth(T, Auth). add_topic_info(publish, Topic, MaxAge, Format, Payload) when is_binary(Topic), Topic =/= <<>> -> - case emqx_coap_ps_topics:lookup_topic_info(Topic) of + case emqx_coap_pubsub_topics:lookup_topic_info(Topic) of [{_, StoredMaxAge, StoredCT, _, _}] -> ?LOG(debug, "publish topic=~p already exists, need reset the topic info", [Topic]), %% check whether the ct value stored matches the ct option in this POST message @@ -175,9 +177,9 @@ add_topic_info(publish, Topic, MaxAge, Format, Payload) when is_binary(Topic), T {ok, Ret} = case StoredMaxAge =:= MaxAge of true -> - emqx_coap_ps_topics:reset_topic_info(Topic, Payload); + emqx_coap_pubsub_topics:reset_topic_info(Topic, Payload); false -> - emqx_coap_ps_topics:reset_topic_info(Topic, MaxAge, Payload) + emqx_coap_pubsub_topics:reset_topic_info(Topic, MaxAge, Payload) end, {changed, Ret}; false -> @@ -186,19 +188,19 @@ add_topic_info(publish, Topic, MaxAge, Format, Payload) when is_binary(Topic), T end; [] -> ?LOG(debug, "publish topic=~p will be created", [Topic]), - {ok, Ret} = emqx_coap_ps_topics:add_topic_info(Topic, MaxAge, Format, Payload), + {ok, Ret} = emqx_coap_pubsub_topics:add_topic_info(Topic, MaxAge, Format, Payload), {created, Ret} end; add_topic_info(create, Topic, MaxAge, Format, _Payload) when is_binary(Topic), Topic =/= <<>> -> - case emqx_coap_ps_topics:is_topic_existed(Topic) of + case emqx_coap_pubsub_topics:is_topic_existed(Topic) of true -> %% Whether we should support CREATE to an existed topic is TBD!! ?LOG(debug, "create topic=~p already exists, need reset the topic info", [Topic]), - {ok, Ret} = emqx_coap_ps_topics:reset_topic_info(Topic, MaxAge, Format, <<>>); + {ok, Ret} = emqx_coap_pubsub_topics:reset_topic_info(Topic, MaxAge, Format, <<>>); false -> ?LOG(debug, "create topic=~p will be created", [Topic]), - {ok, Ret} = emqx_coap_ps_topics:add_topic_info(Topic, MaxAge, Format, <<>>) + {ok, Ret} = emqx_coap_pubsub_topics:add_topic_info(Topic, MaxAge, Format, <<>>) end, {created, Ret}; @@ -222,17 +224,19 @@ format_string_to_int(<<"application/json">>) -> handle_received_publish(Topic, MaxAge, Format, Payload) -> case add_topic_info(publish, Topic, MaxAge, format_string_to_int(Format), Payload) of - {Ret ,true} -> + {Ret, true} -> Pid = get(mqtt_client_pid), - emqx_coap_mqtt_adapter:publish(Pid, topic(Topic), Payload), - Content = case Ret of - changed -> - #coap_content{}; - created -> - LocPath = concatenate_location_path([<<"ps">>, Topic, <<>>]), - #coap_content{location_path = [LocPath]} - end, - {ok, Ret, Content}; + case emqx_coap_mqtt_adapter:publish(Pid, topic(Topic), Payload) of + ok -> + {ok, Ret, case Ret of + changed -> #coap_content{}; + created -> + #coap_content{location_path = [ + concatenate_location_path([<<"ps">>, Topic, <<>>])]} + end}; + {error, Code} -> + {error, Code} + end; {_, false} -> ?LOG(debug, "add_topic_info failed, will return bad_request", []), {error, bad_request} @@ -275,7 +279,7 @@ return_resource(Topic, Payload, MaxAge, TimeStamp, Content) -> read_last_publish_message(false, Topic, Content=#coap_content{format = QueryFormat}) when is_binary(QueryFormat)-> ?LOG(debug, "the QueryFormat=~p", [QueryFormat]), - case emqx_coap_ps_topics:lookup_topic_info(Topic) of + case emqx_coap_pubsub_topics:lookup_topic_info(Topic) of [] -> {error, not_found}; [{_, MaxAge, CT, Payload, TimeStamp}] -> @@ -289,7 +293,7 @@ read_last_publish_message(false, Topic, Content=#coap_content{format = QueryForm end; read_last_publish_message(false, Topic, Content) -> - case emqx_coap_ps_topics:lookup_topic_info(Topic) of + case emqx_coap_pubsub_topics:lookup_topic_info(Topic) of [] -> {error, not_found}; [{_, MaxAge, _, Payload, TimeStamp}] -> @@ -301,11 +305,11 @@ read_last_publish_message(true, Topic, _Content) -> {error, bad_request}. delete_topic_info(Topic) -> - case emqx_coap_ps_topics:lookup_topic_info(Topic) of + case emqx_coap_pubsub_topics:lookup_topic_info(Topic) of [] -> {error, not_found}; [{_, _, _, _, _}] -> - emqx_coap_ps_topics:delete_sub_topics(Topic) + emqx_coap_pubsub_topics:delete_sub_topics(Topic) end. topic(Topic) when is_binary(Topic) -> Topic; diff --git a/apps/emqx_coap/src/emqx_coap_ps_topics.erl b/apps/emqx_coap/src/emqx_coap_pubsub_topics.erl similarity index 99% rename from apps/emqx_coap/src/emqx_coap_ps_topics.erl rename to apps/emqx_coap/src/emqx_coap_pubsub_topics.erl index b4affab28..79b707e6d 100644 --- a/apps/emqx_coap/src/emqx_coap_ps_topics.erl +++ b/apps/emqx_coap/src/emqx_coap_pubsub_topics.erl @@ -14,7 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_coap_ps_topics). +-module(emqx_coap_pubsub_topics). -behaviour(gen_server). diff --git a/apps/emqx_coap/src/emqx_coap_resource.erl b/apps/emqx_coap/src/emqx_coap_resource.erl index e11788a04..e46317347 100644 --- a/apps/emqx_coap/src/emqx_coap_resource.erl +++ b/apps/emqx_coap/src/emqx_coap_resource.erl @@ -56,7 +56,7 @@ coap_get(ChId, ?MQTT_PREFIX, Path, Query, _Content) -> #coap_content{}; {error, auth_failure} -> put(mqtt_client_pid, undefined), - {error, unauthorized}; + {error, forbidden}; {error, bad_request} -> put(mqtt_client_pid, undefined), {error, bad_request}; @@ -74,8 +74,7 @@ coap_post(_ChId, _Prefix, _Topic, _Content) -> coap_put(_ChId, ?MQTT_PREFIX, Topic, #coap_content{payload = Payload}) when Topic =/= [] -> ?LOG(debug, "put message, Topic=~p, Payload=~p~n", [Topic, Payload]), Pid = get(mqtt_client_pid), - emqx_coap_mqtt_adapter:publish(Pid, topic(Topic), Payload), - ok; + emqx_coap_mqtt_adapter:publish(Pid, topic(Topic), Payload); coap_put(_ChId, Prefix, Topic, Content) -> ?LOG(error, "put has error, Prefix=~p, Topic=~p, Content=~p", [Prefix, Topic, Content]), {error, bad_request}. @@ -87,8 +86,10 @@ coap_observe(ChId, ?MQTT_PREFIX, Topic, Ack, Content) when Topic =/= [] -> TrueTopic = topic(Topic), ?LOG(debug, "observe Topic=~p, Ack=~p", [TrueTopic, Ack]), Pid = get(mqtt_client_pid), - emqx_coap_mqtt_adapter:subscribe(Pid, TrueTopic), - {ok, {state, ChId, ?MQTT_PREFIX, [TrueTopic]}, content, Content}; + case emqx_coap_mqtt_adapter:subscribe(Pid, TrueTopic) of + ok -> {ok, {state, ChId, ?MQTT_PREFIX, [TrueTopic]}, content, Content}; + {error, Code} -> {error, Code} + end; coap_observe(ChId, Prefix, Topic, Ack, _Content) -> ?LOG(error, "unknown observe request ChId=~p, Prefix=~p, Topic=~p, Ack=~p", [ChId, Prefix, Topic, Ack]), {error, bad_request}. diff --git a/apps/emqx_coap/src/emqx_coap_sup.erl b/apps/emqx_coap/src/emqx_coap_sup.erl index a3a0fdc53..f61604ef5 100644 --- a/apps/emqx_coap/src/emqx_coap_sup.erl +++ b/apps/emqx_coap/src/emqx_coap_sup.erl @@ -32,11 +32,11 @@ init(_Args) -> shutdown => 5000, type => worker, modules => [emqx_coap_registry]}, - PsTopics = #{id => emqx_coap_ps_topics, - start => {emqx_coap_ps_topics, start_link, []}, + PsTopics = #{id => emqx_coap_pubsub_topics, + start => {emqx_coap_pubsub_topics, start_link, []}, restart => permanent, shutdown => 5000, type => worker, - modules => [emqx_coap_ps_topics]}, + modules => [emqx_coap_pubsub_topics]}, {ok, {{one_for_all, 10, 3600}, [Registry, PsTopics]}}. diff --git a/apps/emqx_coap/test/emqx_coap_SUITE.erl b/apps/emqx_coap/test/emqx_coap_SUITE.erl index 672113e57..0faa4965c 100644 --- a/apps/emqx_coap/test/emqx_coap_SUITE.erl +++ b/apps/emqx_coap/test/emqx_coap_SUITE.erl @@ -68,6 +68,25 @@ t_publish(_Config) -> ?assert(false) end. +t_publish_acl_deny(_Config) -> + Topic = <<"abc">>, Payload = <<"123">>, + TopicStr = binary_to_list(Topic), + URI = "coap://127.0.0.1/mqtt/"++TopicStr++"?c=client1&u=tom&p=secret", + + %% Sub topic first + emqx:subscribe(Topic), + + ok = meck:new(emqx_access_control, [non_strict, passthrough, no_history]), + ok = meck:expect(emqx_access_control, check_acl, 3, deny), + Reply = er_coap_client:request(put, URI, #coap_content{format = <<"application/octet-stream">>, payload = Payload}), + ?assertEqual({error,forbidden}, Reply), + ok = meck:unload(emqx_access_control), + receive + {deliver, Topic, Msg} -> ct:fail({unexpected, {Topic, Msg}}) + after + 500 -> ok + end. + t_observe(_Config) -> Topic = <<"abc">>, TopicStr = binary_to_list(Topic), Payload = <<"123">>, @@ -91,6 +110,15 @@ t_observe(_Config) -> [] = emqx:subscribers(Topic). +t_observe_acl_deny(_Config) -> + Topic = <<"abc">>, TopicStr = binary_to_list(Topic), + Uri = "coap://127.0.0.1/mqtt/"++TopicStr++"?c=client1&u=tom&p=secret", + ok = meck:new(emqx_access_control, [non_strict, passthrough, no_history]), + ok = meck:expect(emqx_access_control, check_acl, 3, deny), + ?assertEqual({error,forbidden}, er_coap_observer:observe(Uri)), + [] = emqx:subscribers(Topic), + ok = meck:unload(emqx_access_control). + t_observe_wildcard(_Config) -> Topic = <<"+/b">>, TopicStr = http_uri:encode(binary_to_list(Topic)), Payload = <<"123">>, diff --git a/apps/emqx_coap/test/emqx_coap_ps_SUITE.erl b/apps/emqx_coap/test/emqx_coap_pubsub_SUITE.erl similarity index 90% rename from apps/emqx_coap/test/emqx_coap_ps_SUITE.erl rename to apps/emqx_coap/test/emqx_coap_pubsub_SUITE.erl index 2bde5dfbd..886d5c782 100644 --- a/apps/emqx_coap/test/emqx_coap_ps_SUITE.erl +++ b/apps/emqx_coap/test/emqx_coap_pubsub_SUITE.erl @@ -14,7 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_coap_ps_SUITE). +-module(emqx_coap_pubsub_SUITE). -compile(export_all). -compile(nowarn_export_all). @@ -54,7 +54,7 @@ t_update_max_age(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - TopicInfo = [{TopicInPayload, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(TopicInPayload), + TopicInfo = [{TopicInPayload, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(TopicInPayload), ?LOGT("lookup topic info=~p", [TopicInfo]), ?assertEqual(60, MaxAge1), ?assertEqual(<<"42">>, CT1), @@ -65,7 +65,7 @@ t_update_max_age(_Config) -> Reply1 = er_coap_client:request(post, URI, #coap_content{max_age = 70, format = <<"application/link-format">>, payload = Payload1}), {ok,created, #coap_content{location_path = LocPath}} = Reply1, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - [{TopicInPayload, MaxAge2, CT2, _ResPayload, _TimeStamp1}] = emqx_coap_ps_topics:lookup_topic_info(TopicInPayload), + [{TopicInPayload, MaxAge2, CT2, _ResPayload, _TimeStamp1}] = emqx_coap_pubsub_topics:lookup_topic_info(TopicInPayload), ?assertEqual(70, MaxAge2), ?assertEqual(<<"50">>, CT2), @@ -82,7 +82,7 @@ t_create_subtopic(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - TopicInfo = [{TopicInPayload, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(TopicInPayload), + TopicInfo = [{TopicInPayload, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(TopicInPayload), ?LOGT("lookup topic info=~p", [TopicInfo]), ?assertEqual(60, MaxAge1), ?assertEqual(<<"42">>, CT1), @@ -99,7 +99,7 @@ t_create_subtopic(_Config) -> ?LOGT("Reply =~p", [Reply1]), {ok,created, #coap_content{location_path = LocPath1}} = Reply1, ?assertEqual([<<"/ps/topic1/subtopic">>] ,LocPath1), - [{FullTopic, MaxAge2, CT2, _ResPayload, _}] = emqx_coap_ps_topics:lookup_topic_info(FullTopic), + [{FullTopic, MaxAge2, CT2, _ResPayload, _}] = emqx_coap_pubsub_topics:lookup_topic_info(FullTopic), ?assertEqual(60, MaxAge2), ?assertEqual(<<"42">>, CT2), @@ -114,13 +114,13 @@ t_over_max_age(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - TopicInfo = [{TopicInPayload, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(TopicInPayload), + TopicInfo = [{TopicInPayload, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(TopicInPayload), ?LOGT("lookup topic info=~p", [TopicInfo]), ?assertEqual(2, MaxAge1), ?assertEqual(<<"42">>, CT1), timer:sleep(3000), - ?assertEqual(true, emqx_coap_ps_topics:is_topic_timeout(TopicInPayload)). + ?assertEqual(true, emqx_coap_pubsub_topics:is_topic_timeout(TopicInPayload)). t_refreash_max_age(_Config) -> TopicInPayload = <<"topic1">>, @@ -132,7 +132,7 @@ t_refreash_max_age(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - TopicInfo = [{TopicInPayload, MaxAge1, CT1, _ResPayload, TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(TopicInPayload), + TopicInfo = [{TopicInPayload, MaxAge1, CT1, _ResPayload, TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(TopicInPayload), ?LOGT("lookup topic info=~p", [TopicInfo]), ?LOGT("TimeStamp=~p", [TimeStamp]), ?assertEqual(5, MaxAge1), @@ -144,13 +144,13 @@ t_refreash_max_age(_Config) -> Reply1 = er_coap_client:request(post, URI, #coap_content{max_age = 5, format = <<"application/link-format">>, payload = Payload1}), {ok,created, #coap_content{location_path = LocPath}} = Reply1, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - [{TopicInPayload, MaxAge2, CT2, _ResPayload, TimeStamp1}] = emqx_coap_ps_topics:lookup_topic_info(TopicInPayload), + [{TopicInPayload, MaxAge2, CT2, _ResPayload, TimeStamp1}] = emqx_coap_pubsub_topics:lookup_topic_info(TopicInPayload), ?LOGT("TimeStamp1=~p", [TimeStamp1]), ?assertEqual(5, MaxAge2), ?assertEqual(<<"50">>, CT2), timer:sleep(3000), - ?assertEqual(false, emqx_coap_ps_topics:is_topic_timeout(TopicInPayload)), + ?assertEqual(false, emqx_coap_pubsub_topics:is_topic_timeout(TopicInPayload)), {ok, deleted, #coap_content{}} = er_coap_client:request(delete, RealURI). @@ -168,7 +168,7 @@ t_case01_publish_post(_Config) -> ?LOGT("Reply =~p", [Reply1]), {ok,created, #coap_content{location_path = LocPath1}} = Reply1, ?assertEqual([<<"/ps/maintopic/topic1">>] ,LocPath1), - [{FullTopic, MaxAge, CT2, <<>>, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(FullTopic), + [{FullTopic, MaxAge, CT2, <<>>, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(FullTopic), ?assertEqual(60, MaxAge), ?assertEqual(<<"42">>, CT2), @@ -183,7 +183,7 @@ t_case01_publish_post(_Config) -> Reply2 = er_coap_client:request(post, URI2, #coap_content{format = <<"application/octet-stream">>, payload = PubPayload}), ?LOGT("Reply =~p", [Reply2]), {ok,changed, _} = Reply2, - TopicInfo = [{FullTopic, MaxAge, CT2, PubPayload, _TimeStamp1}] = emqx_coap_ps_topics:lookup_topic_info(FullTopic), + TopicInfo = [{FullTopic, MaxAge, CT2, PubPayload, _TimeStamp1}] = emqx_coap_pubsub_topics:lookup_topic_info(FullTopic), ?LOGT("the topic info =~p", [TopicInfo]), assert_recv(FullTopic, PubPayload), @@ -203,7 +203,7 @@ t_case02_publish_post(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?assertEqual(60, MaxAge), ?assertEqual(<<"42">>, CT), @@ -214,7 +214,7 @@ t_case02_publish_post(_Config) -> Reply1 = er_coap_client:request(post, URI, #coap_content{format = <<"application/octet-stream">>, payload = NewPayload}), ?LOGT("Reply =~p", [Reply1]), {ok,changed, _} = Reply1, - [{Topic, MaxAge, CT, NewPayload, _TimeStamp1}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, NewPayload, _TimeStamp1}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), assert_recv(Topic, NewPayload), {ok, deleted, #coap_content{}} = er_coap_client:request(delete, URI). @@ -233,7 +233,7 @@ t_case03_publish_post(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?assertEqual(60, MaxAge), ?assertEqual(<<"42">>, CT), @@ -258,13 +258,13 @@ t_case04_publish_post(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?assertEqual(5, MaxAge), ?assertEqual(<<"42">>, CT), %% after max age timeout, the topic still exists but the status is timeout timer:sleep(6000), - ?assertEqual(true, emqx_coap_ps_topics:is_topic_timeout(Topic)), + ?assertEqual(true, emqx_coap_pubsub_topics:is_topic_timeout(Topic)), {ok, deleted, #coap_content{}} = er_coap_client:request(delete, URI). @@ -281,7 +281,7 @@ t_case01_publish_put(_Config) -> ?LOGT("Reply =~p", [Reply1]), {ok,created, #coap_content{location_path = LocPath1}} = Reply1, ?assertEqual([<<"/ps/maintopic/topic1">>] ,LocPath1), - [{FullTopic, MaxAge, CT2, <<>>, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(FullTopic), + [{FullTopic, MaxAge, CT2, <<>>, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(FullTopic), ?assertEqual(60, MaxAge), ?assertEqual(<<"42">>, CT2), @@ -296,7 +296,7 @@ t_case01_publish_put(_Config) -> Reply2 = er_coap_client:request(put, URI2, #coap_content{format = <<"application/octet-stream">>, payload = PubPayload}), ?LOGT("Reply =~p", [Reply2]), {ok,changed, _} = Reply2, - [{FullTopic, MaxAge, CT2, PubPayload, _TimeStamp1}] = emqx_coap_ps_topics:lookup_topic_info(FullTopic), + [{FullTopic, MaxAge, CT2, PubPayload, _TimeStamp1}] = emqx_coap_pubsub_topics:lookup_topic_info(FullTopic), assert_recv(FullTopic, PubPayload), @@ -316,7 +316,7 @@ t_case02_publish_put(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?assertEqual(60, MaxAge), ?assertEqual(<<"42">>, CT), @@ -327,7 +327,7 @@ t_case02_publish_put(_Config) -> Reply1 = er_coap_client:request(put, URI, #coap_content{format = <<"application/octet-stream">>, payload = NewPayload}), ?LOGT("Reply =~p", [Reply1]), {ok,changed, _} = Reply1, - [{Topic, MaxAge, CT, NewPayload, _TimeStamp1}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, NewPayload, _TimeStamp1}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), assert_recv(Topic, NewPayload), @@ -347,7 +347,7 @@ t_case03_publish_put(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?assertEqual(60, MaxAge), ?assertEqual(<<"42">>, CT), @@ -372,7 +372,7 @@ t_case04_publish_put(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/topic1">>] ,LocPath), - [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?assertEqual(5, MaxAge), ?assertEqual(<<"42">>, CT), @@ -381,7 +381,7 @@ t_case04_publish_put(_Config) -> % but there is one thing to do is we don't count in the publish message received from emqx(from other node).TBD!!!!!!!!!!!!! %%%%%%%%%%%%%%%%%%%%%%%%%% timer:sleep(6000), - ?assertEqual(true, emqx_coap_ps_topics:is_topic_timeout(Topic)), + ?assertEqual(true, emqx_coap_pubsub_topics:is_topic_timeout(Topic)), {ok, deleted, #coap_content{}} = er_coap_client:request(delete, URI). @@ -396,7 +396,7 @@ t_case01_subscribe(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = [LocPath]}} = Reply, ?assertEqual(<<"/ps/topic1">> ,LocPath), - TopicInfo = [{Topic, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + TopicInfo = [{Topic, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?LOGT("lookup topic info=~p", [TopicInfo]), ?assertEqual(60, MaxAge1), ?assertEqual(<<"42">>, CT1), @@ -439,13 +439,13 @@ t_case02_subscribe(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/a/b">>] ,LocPath), - [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?assertEqual(5, MaxAge), ?assertEqual(<<"42">>, CT), %% Wait for the max age of the timer expires timer:sleep(6000), - ?assertEqual(true, emqx_coap_ps_topics:is_topic_timeout(Topic)), + ?assertEqual(true, emqx_coap_pubsub_topics:is_topic_timeout(Topic)), %% Subscribe to the timeout topic "a/b", still successfully,got {ok, nocontent} Method Uri = "coap://127.0.0.1/ps/"++PercentEncodedTopic++"?c=client1&u=tom&p=secret", @@ -458,7 +458,7 @@ t_case02_subscribe(_Config) -> %% put to publish to topic "a/b" Reply2 = er_coap_client:request(put, URI, #coap_content{format = <<"application/octet-stream">>, payload = Payload}), {ok,changed, #coap_content{}} = Reply2, - [{Topic, MaxAge1, CT, Payload, TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge1, CT, Payload, TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?assertEqual(60, MaxAge1), ?assertEqual(<<"42">>, CT), ?assertEqual(false, TimeStamp =:= timeout), @@ -505,7 +505,7 @@ t_case01_read(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = [LocPath]}} = Reply, ?assertEqual(<<"/ps/topic1">> ,LocPath), - TopicInfo = [{Topic, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + TopicInfo = [{Topic, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?LOGT("lookup topic info=~p", [TopicInfo]), ?assertEqual(60, MaxAge1), ?assertEqual(<<"42">>, CT1), @@ -530,7 +530,7 @@ t_case02_read(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = [LocPath]}} = Reply, ?assertEqual(<<"/ps/topic1">> ,LocPath), - TopicInfo = [{Topic, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + TopicInfo = [{Topic, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?LOGT("lookup topic info=~p", [TopicInfo]), ?assertEqual(60, MaxAge1), ?assertEqual(<<"42">>, CT1), @@ -565,7 +565,7 @@ t_case04_read(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = [LocPath]}} = Reply, ?assertEqual(<<"/ps/topic1">> ,LocPath), - TopicInfo = [{Topic, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + TopicInfo = [{Topic, MaxAge1, CT1, _ResPayload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?LOGT("lookup topic info=~p", [TopicInfo]), ?assertEqual(60, MaxAge1), ?assertEqual(<<"42">>, CT1), @@ -591,13 +591,13 @@ t_case05_read(_Config) -> ?LOGT("Reply =~p", [Reply]), {ok,created, #coap_content{location_path = LocPath}} = Reply, ?assertEqual([<<"/ps/a/b">>] ,LocPath), - [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_ps_topics:lookup_topic_info(Topic), + [{Topic, MaxAge, CT, Payload, _TimeStamp}] = emqx_coap_pubsub_topics:lookup_topic_info(Topic), ?assertEqual(5, MaxAge), ?assertEqual(<<"42">>, CT), %% Wait for the max age of the timer expires timer:sleep(6000), - ?assertEqual(true, emqx_coap_ps_topics:is_topic_timeout(Topic)), + ?assertEqual(true, emqx_coap_pubsub_topics:is_topic_timeout(Topic)), %% GET to read the expired publish message, supposed to get {ok, nocontent}, but now got {ok, content} Reply1 = er_coap_client:request(get, URI), @@ -633,11 +633,12 @@ t_case01_delete(_Config) -> %% DELETE the topic "a/b" UriD = "coap://127.0.0.1/ps/"++PercentEncodedTopic++"?c=client1&u=tom&p=secret", ReplyD = er_coap_client:request(delete, UriD), - ?LOGT("Reply=~p", [Reply1]), + ?LOGT("Reply=~p", [ReplyD]), {ok, deleted, #coap_content{}}= ReplyD, - ?assertEqual(false, emqx_coap_ps_topics:is_topic_existed(TopicInPayload)), - ?assertEqual(false, emqx_coap_ps_topics:is_topic_existed(TopicInPayload1)). + timer:sleep(300), %% Waiting gen_server:cast/2 for deleting operation + ?assertEqual(false, emqx_coap_pubsub_topics:is_topic_existed(TopicInPayload)), + ?assertEqual(false, emqx_coap_pubsub_topics:is_topic_existed(TopicInPayload1)). t_case02_delete(_Config) -> TopicInPayload = <<"a/b">>, diff --git a/apps/emqx_exhook/include/emqx_exhook.hrl b/apps/emqx_exhook/include/emqx_exhook.hrl index 8a404ca39..301488619 100644 --- a/apps/emqx_exhook/include/emqx_exhook.hrl +++ b/apps/emqx_exhook/include/emqx_exhook.hrl @@ -19,4 +19,26 @@ -define(APP, emqx_exhook). +-define(ENABLED_HOOKS, + [ {'client.connect', {?MODULE, on_client_connect, []}} + , {'client.connack', {?MODULE, on_client_connack, []}} + , {'client.connected', {?MODULE, on_client_connected, []}} + , {'client.disconnected', {?MODULE, on_client_disconnected, []}} + , {'client.authenticate', {?MODULE, on_client_authenticate, []}} + , {'client.check_acl', {?MODULE, on_client_check_acl, []}} + , {'client.subscribe', {?MODULE, on_client_subscribe, []}} + , {'client.unsubscribe', {?MODULE, on_client_unsubscribe, []}} + , {'session.created', {?MODULE, on_session_created, []}} + , {'session.subscribed', {?MODULE, on_session_subscribed, []}} + , {'session.unsubscribed',{?MODULE, on_session_unsubscribed, []}} + , {'session.resumed', {?MODULE, on_session_resumed, []}} + , {'session.discarded', {?MODULE, on_session_discarded, []}} + , {'session.takeovered', {?MODULE, on_session_takeovered, []}} + , {'session.terminated', {?MODULE, on_session_terminated, []}} + , {'message.publish', {?MODULE, on_message_publish, []}} + , {'message.delivered', {?MODULE, on_message_delivered, []}} + , {'message.acked', {?MODULE, on_message_acked, []}} + , {'message.dropped', {?MODULE, on_message_dropped, []}} + ]). + -endif. diff --git a/apps/emqx_exhook/src/emqx_exhook_app.erl b/apps/emqx_exhook/src/emqx_exhook_app.erl index 3b829e7cd..62c1903bd 100644 --- a/apps/emqx_exhook/src/emqx_exhook_app.erl +++ b/apps/emqx_exhook/src/emqx_exhook_app.erl @@ -22,7 +22,7 @@ -emqx_plugin(extension). --define(REGISTRAY, emqx_exhook_registray). +-define(CNTER, emqx_exhook_counter). -export([ start/2 , stop/1 @@ -33,7 +33,8 @@ -export([ load_server/2 , unload_server/1 , unload_exhooks/0 - , init_hook_registray/0 + , init_hooks_cnter/0 + , deinit_hooks_cnter/0 ]). %%-------------------------------------------------------------------- @@ -43,8 +44,8 @@ start(_StartType, _StartArgs) -> {ok, Sup} = emqx_exhook_sup:start_link(), - %% Collect all available hooks - _ = init_hook_registray(), + %% Init counter + init_hooks_cnter(), %% Load all dirvers load_all_servers(), @@ -56,8 +57,8 @@ start(_StartType, _StartArgs) -> prep_stop(State) -> emqx_ctl:unregister_command(exhook), _ = unload_exhooks(), + _ = deinit_hooks_cnter(), ok = unload_all_servers(), - _ = deinit_hook_registray(), State. stop(_State) -> @@ -81,46 +82,17 @@ load_server(Name, Options) -> unload_server(Name) -> emqx_exhook:disable(Name). -%%-------------------------------------------------------------------- -%% Exhooks - -init_hook_registray() -> - _ = ets:new(?REGISTRAY, [public, named_table]), - [ets:insert(?REGISTRAY, {Name, {M, F, A}, 0}) - || {Name, {M, F, A}} <- search_exhooks()]. - unload_exhooks() -> [emqx:unhook(Name, {M, F}) || - {Name, {M, F, _A}, _} <- ets:tab2list(?REGISTRAY)]. + {Name, {M, F, _A}} <- ?ENABLED_HOOKS]. -deinit_hook_registray() -> - ets:delete(?REGISTRAY). - -search_exhooks() -> - search_exhooks(ignore_lib_apps(application:loaded_applications())). -search_exhooks(Apps) -> - lists:flatten([ExHooks || App <- Apps, {_App, _Mod, ExHooks} <- find_attrs(App, exhooks)]). - -ignore_lib_apps(Apps) -> - LibApps = [kernel, stdlib, sasl, appmon, eldap, erts, - syntax_tools, ssl, crypto, mnesia, os_mon, - inets, goldrush, gproc, runtime_tools, - snmp, otp_mibs, public_key, asn1, ssh, hipe, - common_test, observer, webtool, xmerl, tools, - test_server, compiler, debugger, eunit, et, - wx], - [AppName || {AppName, _, _} <- Apps, not lists:member(AppName, LibApps)]. - -find_attrs(App, Def) -> - [{App, Mod, Attr} || {ok, Modules} <- [application:get_key(App, modules)], - Mod <- Modules, - {Name, Attrs} <- module_attributes(Mod), Name =:= Def, - Attr <- Attrs]. - -module_attributes(Module) -> - try Module:module_info(attributes) +init_hooks_cnter() -> + try + _ = ets:new(?CNTER, [named_table, public]), ok catch - error:undef -> []; - error:Reason -> error(Reason) + exit:badarg:_ -> + ok end. +deinit_hooks_cnter() -> + ets:delete(?CNTER). diff --git a/apps/emqx_exhook/src/emqx_exhook_cli.erl b/apps/emqx_exhook/src/emqx_exhook_cli.erl index 8bab9ced5..9fea1f50d 100644 --- a/apps/emqx_exhook/src/emqx_exhook_cli.erl +++ b/apps/emqx_exhook/src/emqx_exhook_cli.erl @@ -23,7 +23,8 @@ cli(["server", "list"]) -> if_enabled(fun() -> Services = emqx_exhook:list(), - [emqx_ctl:print("HookServer(~s)~n", [emqx_exhook_server:format(Service)]) || Service <- Services] + [emqx_ctl:print("HookServer(~s)~n", + [emqx_exhook_server:format(Service)]) || Service <- Services] end); cli(["server", "enable", Name0]) -> @@ -74,7 +75,7 @@ hint() -> stats() -> lists:usort(lists:foldr(fun({K, N}, Acc) -> case atom_to_list(K) of - "exhook." ++ Key -> [{Key, N}|Acc]; + "exhook." ++ Key -> [{Key, N} | Acc]; _ -> Acc end end, [], emqx_metrics:all())). diff --git a/apps/emqx_exhook/src/emqx_exhook_handler.erl b/apps/emqx_exhook/src/emqx_exhook_handler.erl index e60eadaa7..13c41ce6a 100644 --- a/apps/emqx_exhook/src/emqx_exhook_handler.erl +++ b/apps/emqx_exhook/src/emqx_exhook_handler.erl @@ -62,27 +62,6 @@ , call_fold/3 ]). --exhooks([ {'client.connect', {?MODULE, on_client_connect, []}} - , {'client.connack', {?MODULE, on_client_connack, []}} - , {'client.connected', {?MODULE, on_client_connected, []}} - , {'client.disconnected', {?MODULE, on_client_disconnected, []}} - , {'client.authenticate', {?MODULE, on_client_authenticate, []}} - , {'client.check_acl', {?MODULE, on_client_check_acl, []}} - , {'client.subscribe', {?MODULE, on_client_subscribe, []}} - , {'client.unsubscribe', {?MODULE, on_client_unsubscribe, []}} - , {'session.created', {?MODULE, on_session_created, []}} - , {'session.subscribed', {?MODULE, on_session_subscribed, []}} - , {'session.unsubscribed',{?MODULE, on_session_unsubscribed, []}} - , {'session.resumed', {?MODULE, on_session_resumed, []}} - , {'session.discarded', {?MODULE, on_session_discarded, []}} - , {'session.takeovered', {?MODULE, on_session_takeovered, []}} - , {'session.terminated', {?MODULE, on_session_terminated, []}} - , {'message.publish', {?MODULE, on_message_publish, []}} - , {'message.delivered', {?MODULE, on_message_delivered, []}} - , {'message.acked', {?MODULE, on_message_acked, []}} - , {'message.dropped', {?MODULE, on_message_dropped, []}} - ]). - %%-------------------------------------------------------------------- %% Clients %%-------------------------------------------------------------------- @@ -273,7 +252,7 @@ clientinfo(ClientInfo = message(#message{id = Id, qos = Qos, from = From, topic = Topic, payload = Payload, timestamp = Ts}) -> #{node => stringfy(node()), - id => hexstr(Id), + id => emqx_guid:to_hexstr(Id), qos => Qos, from => stringfy(From), topic => Topic, @@ -304,12 +283,6 @@ stringfy(Term) when is_atom(Term) -> stringfy(Term) -> unicode:characters_to_binary((io_lib:format("~0p", [Term]))). -hexstr(B) -> - << <<(hexchar(H)), (hexchar(L))>> || <> <= B>>. - -hexchar(I) when I >= 0 andalso I < 10 -> I + $0; -hexchar(I) -> I - 10 + $A. - %%-------------------------------------------------------------------- %% Acc funcs diff --git a/apps/emqx_exhook/src/emqx_exhook_server.erl b/apps/emqx_exhook/src/emqx_exhook_server.erl index 76a2e491d..5a353b61b 100644 --- a/apps/emqx_exhook/src/emqx_exhook_server.erl +++ b/apps/emqx_exhook/src/emqx_exhook_server.erl @@ -16,11 +16,12 @@ -module(emqx_exhook_server). +-include("emqx_exhook.hrl"). -include_lib("emqx/include/logger.hrl"). -logger_header("[ExHook Svr]"). --define(REGISTRAY, emqx_exhook_registray). +-define(CNTER, emqx_exhook_counter). -define(PB_CLIENT_MOD, emqx_exhook_v_1_hook_provider_client). %% Load/Unload @@ -187,25 +188,25 @@ ensure_metrics(Prefix, HookSpecs) -> ensure_hooks(HookSpecs) -> lists:foreach(fun(Hookpoint) -> - case ets:lookup(?REGISTRAY, Hookpoint) of - [] -> - ?LOG(warning, "Hoook ~s not found in registray", [Hookpoint]); - [{Hookpoint, {M, F, A}, _}] -> + case lists:keyfind(Hookpoint, 1, ?ENABLED_HOOKS) of + false -> + ?LOG(error, "Unknown name ~s to hook, skip it!", [Hookpoint]); + {Hookpoint, {M, F, A}} -> emqx_hooks:put(Hookpoint, {M, F, A}), - ets:update_counter(?REGISTRAY, Hookpoint, {3, 1}) + ets:update_counter(?CNTER, Hookpoint, {2, 1}, {Hookpoint, 0}) end end, maps:keys(HookSpecs)). may_unload_hooks(HookSpecs) -> lists:foreach(fun(Hookpoint) -> - case ets:update_counter(?REGISTRAY, Hookpoint, {3, -1}) of + case ets:update_counter(?CNTER, Hookpoint, {2, -1}, {Hookpoint, 0}) of Cnt when Cnt =< 0 -> - case ets:lookup(?REGISTRAY, Hookpoint) of - [{Hookpoint, {M, F, _A}, _}] -> + case lists:keyfind(Hookpoint, 1, ?ENABLED_HOOKS) of + {Hookpoint, {M, F, _A}} -> emqx_hooks:del(Hookpoint, {M, F}); _ -> ok end, - ets:delete(?REGISTRAY, Hookpoint); + ets:delete(?CNTER, Hookpoint); _ -> ok end end, maps:keys(HookSpecs)). diff --git a/apps/emqx_exproto/src/emqx_exproto_channel.erl b/apps/emqx_exproto/src/emqx_exproto_channel.erl index 0eec36410..2966cbba9 100644 --- a/apps/emqx_exproto/src/emqx_exproto_channel.erl +++ b/apps/emqx_exproto/src/emqx_exproto_channel.erl @@ -205,7 +205,7 @@ handle_deliver(Delivers, Channel = #channel{clientinfo = ClientInfo}) -> [ClientInfo], Msg), NMsg = emqx_mountpoint:unmount(Mountpoint, Msg1), #{node => NodeStr, - id => hexstr(emqx_message:id(NMsg)), + id => emqx_guid:to_hexstr(emqx_message:id(NMsg)), qos => emqx_message:qos(NMsg), from => fmt_from(emqx_message:from(NMsg)), topic => emqx_message:topic(NMsg), @@ -591,9 +591,6 @@ default_clientinfo(#{peername := {PeerHost, _}, stringfy(Reason) -> unicode:characters_to_binary((io_lib:format("~0p", [Reason]))). -hexstr(Bin) -> - [io_lib:format("~2.16.0B",[X]) || <> <= Bin]. - fmt_from(undefined) -> <<>>; fmt_from(Bin) when is_binary(Bin) -> Bin; fmt_from(T) -> stringfy(T). diff --git a/apps/emqx_lua_hook/test/emqx_lua_hook_SUITE.erl b/apps/emqx_lua_hook/test/emqx_lua_hook_SUITE.erl index eef73b43b..1f58dcb22 100644 --- a/apps/emqx_lua_hook/test/emqx_lua_hook_SUITE.erl +++ b/apps/emqx_lua_hook/test/emqx_lua_hook_SUITE.erl @@ -41,11 +41,11 @@ all() -> ]. init_per_suite(Config) -> - emqx_ct_helpers:start_apps([emqx_modules, emqx_lua_hook], fun set_special_configs/1), + emqx_ct_helpers:start_apps([emqx_lua_hook], fun set_special_configs/1), Config. end_per_suite(Config) -> - emqx_ct_helpers:stop_apps([emqx_lua_hook, emqx_modules]), + emqx_ct_helpers:stop_apps([emqx_lua_hook]), Config. set_special_configs(emqx) -> @@ -667,7 +667,7 @@ case301(_Config) -> ClientInfo = #{clientid => undefined, username => <<"test">>, - peername => undefined, + peerhost => {127, 0, 0, 1}, password => <<"mqtt">> }, Result = #{auth_result => success, anonymous => true}, @@ -675,8 +675,6 @@ case301(_Config) -> emqx_hooks:run_fold('client.authenticate', [ClientInfo], Result)). case302(_Config) -> - application:set_env(emqx, modules, [{emqx_mod_acl_internal, [{acl_file, emqx:get_env(acl_file)}]}]), - emqx_modules:load_module(emqx_mod_acl_internal, false), ScriptName = filename:join([emqx_lua_hook:lua_dir(), "abc.lua"]), Code = "function on_client_check_acl(clientid, username, peerhost, password, topic, pubsub)" "\n return \"allow\"" @@ -688,7 +686,7 @@ case302(_Config) -> ok = file:write_file(ScriptName, Code), ok = emqx_lua_hook:load_scripts(), ClientInfo = #{clientid => undefined, username => <<"test">>, - peername => undefined, + peerhost => {127, 0, 0, 1}, password => <<"mqtt">> }, ?assertEqual(allow, emqx_hooks:run_fold('client.check_acl', diff --git a/apps/emqx_lwm2m/src/emqx_lwm2m_cmd_handler.erl b/apps/emqx_lwm2m/src/emqx_lwm2m_cmd_handler.erl index d542acf91..cd22797fa 100644 --- a/apps/emqx_lwm2m/src/emqx_lwm2m_cmd_handler.erl +++ b/apps/emqx_lwm2m/src/emqx_lwm2m_cmd_handler.erl @@ -303,18 +303,7 @@ bin(Float) when is_float(Float) -> float_to_binary(Float). decoding(Datas, <<"hex">>) -> lists:map(fun(Data = #{<<"value">> := Value}) -> - Data#{<<"value">> => hexstr_to_bin(binary_to_list(Value))} + Data#{<<"value">> => emqx_misc:hexstr2bin(Value)} end, Datas); decoding(Datas, _) -> Datas. - -hexstr_to_bin(S) -> - hexstr_to_bin(S, []). -hexstr_to_bin([], Acc) -> - list_to_binary(lists:reverse(Acc)); -hexstr_to_bin([X,Y|T], Acc) -> - {ok, [V], []} = io_lib:fread("~16u", [X,Y]), - hexstr_to_bin(T, [V | Acc]); -hexstr_to_bin([X|T], Acc) -> - {ok, [V], []} = io_lib:fread("~16u", lists:flatten([X,"0"])), - hexstr_to_bin(T, [V | Acc]). \ No newline at end of file diff --git a/apps/emqx_lwm2m/src/emqx_lwm2m_coap_server.erl b/apps/emqx_lwm2m/src/emqx_lwm2m_coap_server.erl index 31986da54..12755dcd4 100644 --- a/apps/emqx_lwm2m/src/emqx_lwm2m_coap_server.erl +++ b/apps/emqx_lwm2m/src/emqx_lwm2m_coap_server.erl @@ -22,6 +22,12 @@ , stop/1 ]). +-export([ start_listener/1 + , start_listener/3 + , stop_listener/1 + , stop_listener/2 + ]). + -define(LOG(Level, Format, Args), logger:Level("LwM2M: " ++ Format, Args)). diff --git a/apps/emqx_management/include/emqx_mgmt.hrl b/apps/emqx_management/include/emqx_mgmt.hrl index 469820b66..e3e1f9fce 100644 --- a/apps/emqx_management/include/emqx_mgmt.hrl +++ b/apps/emqx_management/include/emqx_mgmt.hrl @@ -32,4 +32,4 @@ -define(ERROR14, 114). %% OldPassword error -define(ERROR15, 115). %% bad topic --define(VERSIONS, ["1", "3.2", "3.4", "4.0", "4.1", "4.2"]). \ No newline at end of file +-define(VERSIONS, ["1", "3.2", "3.4", "4.0", "4.1", "4.2", "4.3"]). \ No newline at end of file diff --git a/apps/emqx_management/src/emqx_mgmt.erl b/apps/emqx_management/src/emqx_mgmt.erl index 3ab1b80ee..19582907e 100644 --- a/apps/emqx_management/src/emqx_mgmt.erl +++ b/apps/emqx_management/src/emqx_mgmt.erl @@ -22,8 +22,6 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). --import(proplists, [get_value/2]). - %% Nodes and Brokers API -export([ list_nodes/0 , lookup_node/1 @@ -47,6 +45,8 @@ , list_acl_cache/1 , clean_acl_cache/1 , clean_acl_cache/2 + , clean_acl_cache_all/0 + , clean_acl_cache_all/1 , set_ratelimit_policy/2 , set_quota_policy/2 ]). @@ -101,6 +101,7 @@ , delete_banned/1 ]). +-ifndef(EMQX_ENTERPRISE). -export([ enable_telemetry/0 , disable_telemetry/0 @@ -108,6 +109,8 @@ , get_telemetry_data/0 ]). +-endif. + %% Common Table API -export([ item/2 , max_row_limit/0 @@ -135,11 +138,11 @@ node_info(Node) when Node =:= node() -> BrokerInfo = emqx_sys:info(), Info#{node => node(), otp_release => iolist_to_binary(otp_rel()), - memory_total => get_value(allocated, Memory), - memory_used => get_value(used, Memory), + memory_total => proplists:get_value(allocated, Memory), + memory_used => proplists:get_value(used, Memory), process_available => erlang:system_info(process_limit), process_used => erlang:system_info(process_count), - max_fds => get_value(max_fds, lists:usort(lists:flatten(erlang:system_info(check_io)))), + max_fds => proplists:get_value(max_fds, lists:usort(lists:flatten(erlang:system_info(check_io)))), connections => ets:info(emqx_channel, size), node_status => 'Running', uptime => iolist_to_binary(proplists:get_value(uptime, BrokerInfo)), @@ -251,6 +254,19 @@ clean_acl_cache(Node, ClientId) when Node =:= node() -> clean_acl_cache(Node, ClientId) -> rpc_call(Node, clean_acl_cache, [Node, ClientId]). +clean_acl_cache_all() -> + Results = [{Node, clean_acl_cache_all(Node)} || Node <- ekka_mnesia:running_nodes()], + case lists:filter(fun({_Node, Item}) -> Item =/= ok end, Results) of + [] -> ok; + BadNodes -> {error, BadNodes} + end. + +clean_acl_cache_all(Node) when Node =:= node() -> + emqx_acl_cache:drain_cache(); + +clean_acl_cache_all(Node) -> + rpc_call(Node, clean_acl_cache_all, [Node]). + set_ratelimit_policy(ClientId, Policy) -> call_client(ClientId, {ratelimit, Policy}). @@ -488,6 +504,8 @@ delete_banned(Who) -> %% Telemtry API %%-------------------------------------------------------------------- +-ifndef(EMQX_ENTERPRISE). + enable_telemetry() -> lists:foreach(fun enable_telemetry/1,ekka_mnesia:running_nodes()). @@ -510,6 +528,8 @@ get_telemetry_status() -> get_telemetry_data() -> emqx_telemetry:get_telemetry(). +-endif. + %%-------------------------------------------------------------------- %% Common Table API %%-------------------------------------------------------------------- diff --git a/apps/emqx_management/src/emqx_mgmt_api_acl.erl b/apps/emqx_management/src/emqx_mgmt_api_acl.erl new file mode 100644 index 000000000..d6af2df58 --- /dev/null +++ b/apps/emqx_management/src/emqx_mgmt_api_acl.erl @@ -0,0 +1,47 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_mgmt_api_acl). + +-include("emqx_mgmt.hrl"). + +-rest_api(#{name => clean_acl_cache_all, + method => 'DELETE', + path => "/acl-cache", + func => clean_all, + descr => "Clean acl cache on all nodes"}). + +-rest_api(#{name => clean_acl_cache_node, + method => 'DELETE', + path => "/:atom:node/acl-cache", + func => clean_node, + descr => "Clean acl cache on specific node"}). + +-export([ clean_all/2 + , clean_node/2 + ]). + +clean_all(_Bindings, _Params) -> + case emqx_mgmt:clean_acl_cache_all() of + ok -> minirest:return(); + {error, Reason} -> minirest:return({error, ?ERROR1, Reason}) + end. + +clean_node(#{node := Node}, _Params) -> + case emqx_mgmt:clean_acl_cache_all(Node) of + ok -> minirest:return(); + {error, Reason} -> minirest:return({error, ?ERROR1, Reason}) + end. diff --git a/apps/emqx_management/src/emqx_mgmt_api_apps.erl b/apps/emqx_management/src/emqx_mgmt_api_apps.erl index 55eaddbb1..e9a4e0997 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_apps.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_apps.erl @@ -18,12 +18,6 @@ -include("emqx_mgmt.hrl"). --import(proplists, [get_value/2]). - --import(minirest, [ return/0 - , return/1 - ]). - -rest_api(#{name => add_app, method => 'POST', path => "/apps/", @@ -62,47 +56,47 @@ ]). add_app(_Bindings, Params) -> - AppId = get_value(<<"app_id">>, Params), - Name = get_value(<<"name">>, Params), - Secret = get_value(<<"secret">>, Params), - Desc = get_value(<<"desc">>, Params), - Status = get_value(<<"status">>, Params), - Expired = get_value(<<"expired">>, Params), + AppId = proplists:get_value(<<"app_id">>, Params), + Name = proplists:get_value(<<"name">>, Params), + Secret = proplists:get_value(<<"secret">>, Params), + Desc = proplists:get_value(<<"desc">>, Params), + Status = proplists:get_value(<<"status">>, Params), + Expired = proplists:get_value(<<"expired">>, Params), case emqx_mgmt_auth:add_app(AppId, Name, Secret, Desc, Status, Expired) of - {ok, AppSecret} -> return({ok, #{secret => AppSecret}}); - {error, Reason} -> return({error, Reason}) + {ok, AppSecret} -> minirest:return({ok, #{secret => AppSecret}}); + {error, Reason} -> minirest:return({error, Reason}) end. del_app(#{appid := AppId}, _Params) -> case emqx_mgmt_auth:del_app(AppId) of - ok -> return(); - {error, Reason} -> return({error, Reason}) + ok -> minirest:return(); + {error, Reason} -> minirest:return({error, Reason}) end. list_apps(_Bindings, _Params) -> - return({ok, [format(Apps)|| Apps <- emqx_mgmt_auth:list_apps()]}). + minirest:return({ok, [format(Apps)|| Apps <- emqx_mgmt_auth:list_apps()]}). lookup_app(#{appid := AppId}, _Params) -> case emqx_mgmt_auth:lookup_app(AppId) of {AppId, AppSecret, Name, Desc, Status, Expired} -> - return({ok, #{app_id => AppId, + minirest:return({ok, #{app_id => AppId, secret => AppSecret, name => Name, desc => Desc, status => Status, expired => Expired}}); undefined -> - return({ok, #{}}) + minirest:return({ok, #{}}) end. update_app(#{appid := AppId}, Params) -> - Name = get_value(<<"name">>, Params), - Desc = get_value(<<"desc">>, Params), - Status = get_value(<<"status">>, Params), - Expired = get_value(<<"expired">>, Params), + Name = proplists:get_value(<<"name">>, Params), + Desc = proplists:get_value(<<"desc">>, Params), + Status = proplists:get_value(<<"status">>, Params), + Expired = proplists:get_value(<<"expired">>, Params), case emqx_mgmt_auth:update_app(AppId, Name, Desc, Status, Expired) of - ok -> return(); - {error, Reason} -> return({error, Reason}) + ok -> minirest:return(); + {error, Reason} -> minirest:return({error, Reason}) end. format({AppId, _AppSecret, Name, Desc, Status, Expired}) -> diff --git a/apps/emqx_management/src/emqx_mgmt_api_banned.erl b/apps/emqx_management/src/emqx_mgmt_api_banned.erl index 483ab4be3..4d5856fd0 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_banned.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_banned.erl @@ -20,12 +20,6 @@ -include("emqx_mgmt.hrl"). --import(proplists, [get_value/2]). - --import(minirest, [ return/0 - , return/1 - ]). - -rest_api(#{name => list_banned, method => 'GET', path => "/banned/", @@ -50,7 +44,7 @@ ]). list(_Bindings, Params) -> - return({ok, emqx_mgmt_api:paginate(emqx_banned, Params, fun format/1)}). + minirest:return({ok, emqx_mgmt_api:paginate(emqx_banned, Params, fun format/1)}). create(_Bindings, Params) -> case pipeline([fun ensure_required/1, @@ -58,9 +52,9 @@ create(_Bindings, Params) -> {ok, NParams} -> {ok, Banned} = pack_banned(NParams), ok = emqx_mgmt:create_banned(Banned), - return({ok, maps:from_list(Params)}); + minirest:return({ok, maps:from_list(Params)}); {error, Code, Message} -> - return({error, Code, Message}) + minirest:return({error, Code, Message}) end. delete(#{as := As, who := Who}, _) -> @@ -69,10 +63,10 @@ delete(#{as := As, who := Who}, _) -> case pipeline([fun ensure_required/1, fun validate_params/1], Params) of {ok, NParams} -> - do_delete(get_value(<<"as">>, NParams), get_value(<<"who">>, NParams)), - return(); + do_delete(proplists:get_value(<<"as">>, NParams), proplists:get_value(<<"who">>, NParams)), + minirest:return(); {error, Code, Message} -> - return({error, Code, Message}) + minirest:return({error, Code, Message}) end. pipeline([], Params) -> @@ -99,7 +93,7 @@ ensure_required(Params) when is_list(Params) -> validate_params(Params) -> #{enum_values := AsEnums, message := Msg} = enum_values(as), - case lists:member(get_value(<<"as">>, Params), AsEnums) of + case lists:member(proplists:get_value(<<"as">>, Params), AsEnums) of true -> {ok, Params}; false -> {error, ?ERROR8, Msg} diff --git a/apps/emqx_management/src/emqx_mgmt_api_brokers.erl b/apps/emqx_management/src/emqx_mgmt_api_brokers.erl index 2d6d82850..89707f49c 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_brokers.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_brokers.erl @@ -18,8 +18,6 @@ -include("emqx_mgmt.hrl"). --import(minirest, [return/1]). - -rest_api(#{name => list_brokers, method => 'GET', path => "/brokers/", @@ -37,13 +35,13 @@ ]). list(_Bindings, _Params) -> - return({ok, [Info || {_Node, Info} <- emqx_mgmt:list_brokers()]}). + minirest:return({ok, [Info || {_Node, Info} <- emqx_mgmt:list_brokers()]}). get(#{node := Node}, _Params) -> case emqx_mgmt:lookup_broker(Node) of {error, Reason} -> - return({error, ?ERROR2, Reason}); + minirest:return({error, ?ERROR2, Reason}); Info -> - return({ok, Info}) + minirest:return({ok, Info}) end. diff --git a/apps/emqx_management/src/emqx_mgmt_api_clients.erl b/apps/emqx_management/src/emqx_mgmt_api_clients.erl index 7a4e4b0b7..23a70f6eb 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_clients.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_clients.erl @@ -21,12 +21,6 @@ -include_lib("emqx/include/emqx_mqtt.hrl"). -include_lib("emqx/include/emqx.hrl"). --import(minirest, [ return/0 - , return/1 - ]). - --import(proplists, [get_value/2]). - -define(CLIENT_QS_SCHEMA, {emqx_channel_info, [{<<"clientid">>, binary}, {<<"username">>, binary}, @@ -146,87 +140,87 @@ -define(format_fun, {?MODULE, format_channel_info}). list(Bindings, Params) when map_size(Bindings) == 0 -> - return({ok, emqx_mgmt_api:cluster_query(Params, ?CLIENT_QS_SCHEMA, ?query_fun)}); + minirest:return({ok, emqx_mgmt_api:cluster_query(Params, ?CLIENT_QS_SCHEMA, ?query_fun)}); list(#{node := Node}, Params) when Node =:= node() -> - return({ok, emqx_mgmt_api:node_query(Node, Params, ?CLIENT_QS_SCHEMA, ?query_fun)}); + minirest:return({ok, emqx_mgmt_api:node_query(Node, Params, ?CLIENT_QS_SCHEMA, ?query_fun)}); list(Bindings = #{node := Node}, Params) -> case rpc:call(Node, ?MODULE, list, [Bindings, Params]) of - {badrpc, Reason} -> return({error, ?ERROR1, Reason}); + {badrpc, Reason} -> minirest:return({error, ?ERROR1, Reason}); Res -> Res end. lookup(#{node := Node, clientid := ClientId}, _Params) -> - return({ok, emqx_mgmt:lookup_client(Node, {clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); + minirest:return({ok, emqx_mgmt:lookup_client(Node, {clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); lookup(#{clientid := ClientId}, _Params) -> - return({ok, emqx_mgmt:lookup_client({clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); + minirest:return({ok, emqx_mgmt:lookup_client({clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); lookup(#{node := Node, username := Username}, _Params) -> - return({ok, emqx_mgmt:lookup_client(Node, {username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}); + minirest:return({ok, emqx_mgmt:lookup_client(Node, {username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}); lookup(#{username := Username}, _Params) -> - return({ok, emqx_mgmt:lookup_client({username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}). + minirest:return({ok, emqx_mgmt:lookup_client({username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}). kickout(#{clientid := ClientId}, _Params) -> case emqx_mgmt:kickout_client(emqx_mgmt_util:urldecode(ClientId)) of - ok -> return(); - {error, not_found} -> return({error, ?ERROR12, not_found}); - {error, Reason} -> return({error, ?ERROR1, Reason}) + ok -> minirest:return(); + {error, not_found} -> minirest:return({error, ?ERROR12, not_found}); + {error, Reason} -> minirest:return({error, ?ERROR1, Reason}) end. clean_acl_cache(#{clientid := ClientId}, _Params) -> case emqx_mgmt:clean_acl_cache(emqx_mgmt_util:urldecode(ClientId)) of - ok -> return(); - {error, not_found} -> return({error, ?ERROR12, not_found}); - {error, Reason} -> return({error, ?ERROR1, Reason}) + ok -> minirest:return(); + {error, not_found} -> minirest:return({error, ?ERROR12, not_found}); + {error, Reason} -> minirest:return({error, ?ERROR1, Reason}) end. list_acl_cache(#{clientid := ClientId}, _Params) -> case emqx_mgmt:list_acl_cache(emqx_mgmt_util:urldecode(ClientId)) of - {error, not_found} -> return({error, ?ERROR12, not_found}); - {error, Reason} -> return({error, ?ERROR1, Reason}); - Caches -> return({ok, [format_acl_cache(Cache) || Cache <- Caches]}) + {error, not_found} -> minirest:return({error, ?ERROR12, not_found}); + {error, Reason} -> minirest:return({error, ?ERROR1, Reason}); + Caches -> minirest:return({ok, [format_acl_cache(Cache) || Cache <- Caches]}) end. set_ratelimit_policy(#{clientid := ClientId}, Params) -> - P = [{conn_bytes_in, get_value(<<"conn_bytes_in">>, Params)}, - {conn_messages_in, get_value(<<"conn_messages_in">>, Params)}], + P = [{conn_bytes_in, proplists:get_value(<<"conn_bytes_in">>, Params)}, + {conn_messages_in, proplists:get_value(<<"conn_messages_in">>, Params)}], case [{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined] of - [] -> return(); + [] -> minirest:return(); Policy -> case emqx_mgmt:set_ratelimit_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of - ok -> return(); - {error, not_found} -> return({error, ?ERROR12, not_found}); - {error, Reason} -> return({error, ?ERROR1, Reason}) + ok -> minirest:return(); + {error, not_found} -> minirest:return({error, ?ERROR12, not_found}); + {error, Reason} -> minirest:return({error, ?ERROR1, Reason}) end end. clean_ratelimit(#{clientid := ClientId}, _Params) -> case emqx_mgmt:set_ratelimit_policy(emqx_mgmt_util:urldecode(ClientId), []) of - ok -> return(); - {error, not_found} -> return({error, ?ERROR12, not_found}); - {error, Reason} -> return({error, ?ERROR1, Reason}) + ok -> minirest:return(); + {error, not_found} -> minirest:return({error, ?ERROR12, not_found}); + {error, Reason} -> minirest:return({error, ?ERROR1, Reason}) end. set_quota_policy(#{clientid := ClientId}, Params) -> - P = [{conn_messages_routing, get_value(<<"conn_messages_routing">>, Params)}], + P = [{conn_messages_routing, proplists:get_value(<<"conn_messages_routing">>, Params)}], case [{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined] of - [] -> return(); + [] -> minirest:return(); Policy -> case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of - ok -> return(); - {error, not_found} -> return({error, ?ERROR12, not_found}); - {error, Reason} -> return({error, ?ERROR1, Reason}) + ok -> minirest:return(); + {error, not_found} -> minirest:return({error, ?ERROR12, not_found}); + {error, Reason} -> minirest:return({error, ?ERROR1, Reason}) end end. clean_quota(#{clientid := ClientId}, _Params) -> case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), []) of - ok -> return(); - {error, not_found} -> return({error, ?ERROR12, not_found}); - {error, Reason} -> return({error, ?ERROR1, Reason}) + ok -> minirest:return(); + {error, not_found} -> minirest:return({error, ?ERROR12, not_found}); + {error, Reason} -> minirest:return({error, ?ERROR1, Reason}) end. %% @private diff --git a/apps/emqx_management/src/emqx_mgmt_api_data.erl b/apps/emqx_management/src/emqx_mgmt_api_data.erl index 5c19e95af..855e09525 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_data.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_data.erl @@ -22,10 +22,6 @@ -include("emqx_mgmt.hrl"). --import(minirest, [ return/0 - , return/1 - ]). - -rest_api(#{name => export, method => 'POST', path => "/data/export", @@ -77,14 +73,14 @@ export(_Bindings, _Params) -> case emqx_mgmt_data_backup:export() of {ok, File = #{filename := Filename}} -> - return({ok, File#{filename => filename:basename(Filename)}}); - Return -> return(Return) + minirest:return({ok, File#{filename => filename:basename(Filename)}}); + Return -> minirest:return(Return) end. list_exported(_Bindings, _Params) -> List = [ rpc:call(Node, ?MODULE, get_list_exported, []) || Node <- ekka_mnesia:running_nodes() ], NList = lists:map(fun({_, FileInfo}) -> FileInfo end, lists:keysort(1, lists:append(List))), - return({ok, NList}). + minirest:return({ok, NList}). get_list_exported() -> Dir = emqx:get_env(data_dir), @@ -114,7 +110,7 @@ get_list_exported() -> import(_Bindings, Params) -> case proplists:get_value(<<"filename">>, Params) of undefined -> - return({error, missing_required_params}); + minirest:return({error, missing_required_params}); Filename -> Result = case proplists:get_value(<<"node">>, Params) of undefined -> do_import(Filename); @@ -122,16 +118,16 @@ import(_Bindings, Params) -> case lists:member(Node, [ erlang:atom_to_binary(N, utf8) || N <- ekka_mnesia:running_nodes() ] ) of - true -> return(rpc:call(erlang:binary_to_atom(Node, utf8), ?MODULE, do_import, [Filename])); - false -> return({error, no_existent_node}) + true -> minirest:return(rpc:call(erlang:binary_to_atom(Node, utf8), ?MODULE, do_import, [Filename])); + false -> minirest:return({error, no_existent_node}) end end, - return(Result) + minirest:return(Result) end. do_import(Filename) -> FullFilename = filename:join([emqx:get_env(data_dir), Filename]), - emqx_mgmt_data_backup:import(FullFilename). + emqx_mgmt_data_backup:import(FullFilename, "{}"). download(#{filename := Filename}, _Params) -> FullFilename = filename:join([emqx:get_env(data_dir), Filename]), @@ -140,7 +136,7 @@ download(#{filename := Filename}, _Params) -> {ok, #{filename => list_to_binary(Filename), file => Bin}}; {error, Reason} -> - return({error, Reason}) + minirest:return({error, Reason}) end. upload(Bindings, Params) -> @@ -151,9 +147,9 @@ do_upload(_Bindings, #{<<"filename">> := Filename, FullFilename = filename:join([emqx:get_env(data_dir), Filename]), case file:write_file(FullFilename, Bin) of ok -> - return({ok, [{node, node()}]}); + minirest:return({ok, [{node, node()}]}); {error, Reason} -> - return({error, Reason}) + minirest:return({error, Reason}) end; do_upload(Bindings, Params = #{<<"file">> := _}) -> Seconds = erlang:system_time(second), @@ -161,13 +157,13 @@ do_upload(Bindings, Params = #{<<"file">> := _}) -> Filename = io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S]), do_upload(Bindings, Params#{<<"filename">> => Filename}); do_upload(_Bindings, _Params) -> - return({error, missing_required_params}). + minirest:return({error, missing_required_params}). delete(#{filename := Filename}, _Params) -> FullFilename = filename:join([emqx:get_env(data_dir), Filename]), case file:delete(FullFilename) of ok -> - return(); + minirest:return(); {error, Reason} -> - return({error, Reason}) + minirest:return({error, Reason}) end. diff --git a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl index 382c1051b..5425d82b4 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl @@ -16,8 +16,6 @@ -module(emqx_mgmt_api_listeners). --import(minirest, [return/1]). - -rest_api(#{name => list_listeners, method => 'GET', path => "/listeners/", @@ -46,18 +44,18 @@ %% List listeners on a node. list(#{node := Node}, _Params) -> - return({ok, format(emqx_mgmt:list_listeners(Node))}); + minirest:return({ok, format(emqx_mgmt:list_listeners(Node))}); %% List listeners in the cluster. list(_Binding, _Params) -> - return({ok, [#{node => Node, listeners => format(Listeners)} + minirest:return({ok, [#{node => Node, listeners => format(Listeners)} || {Node, Listeners} <- emqx_mgmt:list_listeners()]}). %% Restart listeners on a node. restart(#{node := Node, identifier := Identifier}, _Params) -> case emqx_mgmt:restart_listener(Node, Identifier) of - ok -> return({ok, "Listener restarted."}); - {error, Error} -> return({error, Error}) + ok -> minirest:return({ok, "Listener restarted."}); + {error, Error} -> minirest:return({error, Error}) end; %% Restart listeners in the cluster. @@ -66,8 +64,8 @@ restart(#{identifier := <<"http", _/binary>>}, _Params) -> restart(#{identifier := Identifier}, _Params) -> Results = [{Node, emqx_mgmt:restart_listener(Node, Identifier)} || {Node, _Info} <- emqx_mgmt:list_nodes()], case lists:filter(fun({_, Result}) -> Result =/= ok end, Results) of - [] -> return(ok); - Errors -> return({error, {restart, Errors}}) + [] -> minirest:return(ok); + Errors -> minirest:return({error, {restart, Errors}}) end. format(Listeners) when is_list(Listeners) -> diff --git a/apps/emqx_management/src/emqx_mgmt_api_metrics.erl b/apps/emqx_management/src/emqx_mgmt_api_metrics.erl index 3e9c88db6..c265fd20e 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_metrics.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_metrics.erl @@ -16,8 +16,6 @@ -module(emqx_mgmt_api_metrics). --import(minirest, [return/1]). - -rest_api(#{name => list_all_metrics, method => 'GET', path => "/metrics", @@ -33,12 +31,12 @@ -export([list/2]). list(Bindings, _Params) when map_size(Bindings) == 0 -> - return({ok, [#{node => Node, metrics => maps:from_list(Metrics)} + minirest:return({ok, [#{node => Node, metrics => maps:from_list(Metrics)} || {Node, Metrics} <- emqx_mgmt:get_metrics()]}); list(#{node := Node}, _Params) -> case emqx_mgmt:get_metrics(Node) of - {error, Reason} -> return({error, Reason}); - Metrics -> return({ok, maps:from_list(Metrics)}) + {error, Reason} -> minirest:return({error, Reason}); + Metrics -> minirest:return({ok, maps:from_list(Metrics)}) end. diff --git a/apps/emqx_management/src/emqx_mgmt_api_nodes.erl b/apps/emqx_management/src/emqx_mgmt_api_nodes.erl index c5791f3d5..89325193f 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_nodes.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_nodes.erl @@ -16,8 +16,6 @@ -module(emqx_mgmt_api_nodes). --import(minirest, [return/1]). - -rest_api(#{name => list_nodes, method => 'GET', path => "/nodes/", @@ -35,10 +33,10 @@ ]). list(_Bindings, _Params) -> - return({ok, [format(Node, Info) || {Node, Info} <- emqx_mgmt:list_nodes()]}). + minirest:return({ok, [format(Node, Info) || {Node, Info} <- emqx_mgmt:list_nodes()]}). get(#{node := Node}, _Params) -> - return({ok, emqx_mgmt:lookup_node(Node)}). + minirest:return({ok, emqx_mgmt:lookup_node(Node)}). format(Node, {error, Reason}) -> #{node => Node, error => Reason}; diff --git a/apps/emqx_management/src/emqx_mgmt_api_plugins.erl b/apps/emqx_management/src/emqx_mgmt_api_plugins.erl index 991dffad9..c50a806ea 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_plugins.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_plugins.erl @@ -20,8 +20,6 @@ -include_lib("emqx/include/emqx.hrl"). --import(minirest, [return/1]). - -rest_api(#{name => list_all_plugins, method => 'GET', path => "/plugins/", @@ -71,36 +69,36 @@ ]). list(#{node := Node}, _Params) -> - return({ok, [format(Plugin) || Plugin <- emqx_mgmt:list_plugins(Node)]}); + minirest:return({ok, [format(Plugin) || Plugin <- emqx_mgmt:list_plugins(Node)]}); list(_Bindings, _Params) -> - return({ok, [format({Node, Plugins}) || {Node, Plugins} <- emqx_mgmt:list_plugins()]}). + minirest:return({ok, [format({Node, Plugins}) || {Node, Plugins} <- emqx_mgmt:list_plugins()]}). load(#{node := Node, plugin := Plugin}, _Params) -> - return(emqx_mgmt:load_plugin(Node, Plugin)). + minirest:return(emqx_mgmt:load_plugin(Node, Plugin)). unload(#{node := Node, plugin := Plugin}, _Params) -> - return(emqx_mgmt:unload_plugin(Node, Plugin)); + minirest:return(emqx_mgmt:unload_plugin(Node, Plugin)); unload(#{plugin := Plugin}, _Params) -> Results = [emqx_mgmt:unload_plugin(Node, Plugin) || {Node, _Info} <- emqx_mgmt:list_nodes()], case lists:filter(fun(Item) -> Item =/= ok end, Results) of [] -> - return(ok); + minirest:return(ok); Errors -> - return(lists:last(Errors)) + minirest:return(lists:last(Errors)) end. reload(#{node := Node, plugin := Plugin}, _Params) -> - return(emqx_mgmt:reload_plugin(Node, Plugin)); + minirest:return(emqx_mgmt:reload_plugin(Node, Plugin)); reload(#{plugin := Plugin}, _Params) -> Results = [emqx_mgmt:reload_plugin(Node, Plugin) || {Node, _Info} <- emqx_mgmt:list_nodes()], case lists:filter(fun(Item) -> Item =/= ok end, Results) of [] -> - return(ok); + minirest:return(ok); Errors -> - return(lists:last(Errors)) + minirest:return(lists:last(Errors)) end. format({Node, Plugins}) -> diff --git a/apps/emqx_management/src/emqx_mgmt_api_pubsub.erl b/apps/emqx_management/src/emqx_mgmt_api_pubsub.erl index 3b7f7392f..693f71a3e 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_pubsub.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_pubsub.erl @@ -20,12 +20,6 @@ -include_lib("emqx/include/emqx_mqtt.hrl"). -include("emqx_mgmt.hrl"). --import(proplists, [ get_value/2 - , get_value/3 - ]). - --import(minirest, [return/1]). - -rest_api(#{name => mqtt_subscribe, method => 'POST', path => "/mqtt/subscribe", @@ -73,41 +67,41 @@ subscribe(_Bindings, Params) -> logger:debug("API subscribe Params:~p", [Params]), {ClientId, Topic, QoS} = parse_subscribe_params(Params), - return(do_subscribe(ClientId, Topic, QoS)). + minirest:return(do_subscribe(ClientId, Topic, QoS)). publish(_Bindings, Params) -> logger:debug("API publish Params:~p", [Params]), {ClientId, Topic, Qos, Retain, Payload} = parse_publish_params(Params), case do_publish(ClientId, Topic, Qos, Retain, Payload) of {ok, MsgIds} -> - case get_value(<<"return">>, Params, undefined) of - undefined -> return(ok); + case proplists:get_value(<<"return">>, Params, undefined) of + undefined -> minirest:return(ok); _Val -> - case get_value(<<"topics">>, Params, undefined) of - undefined -> return({ok, #{msgid => lists:last(MsgIds)}}); - _ -> return({ok, #{msgids => MsgIds}}) + case proplists:get_value(<<"topics">>, Params, undefined) of + undefined -> minirest:return({ok, #{msgid => lists:last(MsgIds)}}); + _ -> minirest:return({ok, #{msgids => MsgIds}}) end end; Result -> - return(Result) + minirest:return(Result) end. unsubscribe(_Bindings, Params) -> logger:debug("API unsubscribe Params:~p", [Params]), {ClientId, Topic} = parse_unsubscribe_params(Params), - return(do_unsubscribe(ClientId, Topic)). + minirest:return(do_unsubscribe(ClientId, Topic)). subscribe_batch(_Bindings, Params) -> logger:debug("API subscribe batch Params:~p", [Params]), - return({ok, loop_subscribe(Params)}). + minirest:return({ok, loop_subscribe(Params)}). publish_batch(_Bindings, Params) -> logger:debug("API publish batch Params:~p", [Params]), - return({ok, loop_publish(Params)}). + minirest:return({ok, loop_publish(Params)}). unsubscribe_batch(_Bindings, Params) -> logger:debug("API unsubscribe batch Params:~p", [Params]), - return({ok, loop_unsubscribe(Params)}). + minirest:return({ok, loop_unsubscribe(Params)}). loop_subscribe(Params) -> loop_subscribe(Params, []). @@ -120,7 +114,7 @@ loop_subscribe([Params | ParamsN], Acc) -> {_, Code0, _Reason} -> Code0 end, Result = #{clientid => ClientId, - topic => resp_topic(get_value(<<"topic">>, Params), get_value(<<"topics">>, Params, <<"">>)), + topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), code => Code}, loop_subscribe(ParamsN, [Result | Acc]). @@ -134,7 +128,7 @@ loop_publish([Params | ParamsN], Acc) -> {ok, _} -> 0; {_, Code0, _} -> Code0 end, - Result = #{topic => resp_topic(get_value(<<"topic">>, Params), get_value(<<"topics">>, Params, <<"">>)), + Result = #{topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), code => Code}, loop_publish(ParamsN, [Result | Acc]). @@ -149,7 +143,7 @@ loop_unsubscribe([Params | ParamsN], Acc) -> {_, Code0, _} -> Code0 end, Result = #{clientid => ClientId, - topic => resp_topic(get_value(<<"topic">>, Params), get_value(<<"topics">>, Params, <<"">>)), + topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), code => Code}, loop_unsubscribe(ParamsN, [Result | Acc]). @@ -184,24 +178,24 @@ do_unsubscribe(ClientId, Topic) -> end. parse_subscribe_params(Params) -> - ClientId = get_value(<<"clientid">>, Params), - Topics = topics(filter, get_value(<<"topic">>, Params), get_value(<<"topics">>, Params, <<"">>)), - QoS = get_value(<<"qos">>, Params, 0), + ClientId = proplists:get_value(<<"clientid">>, Params), + Topics = topics(filter, proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), + QoS = proplists:get_value(<<"qos">>, Params, 0), {ClientId, Topics, QoS}. parse_publish_params(Params) -> - Topics = topics(name, get_value(<<"topic">>, Params), get_value(<<"topics">>, Params, <<"">>)), - ClientId = get_value(<<"clientid">>, Params), - Payload = decode_payload(get_value(<<"payload">>, Params, <<>>), - get_value(<<"encoding">>, Params, <<"plain">>)), - Qos = get_value(<<"qos">>, Params, 0), - Retain = get_value(<<"retain">>, Params, false), + Topics = topics(name, proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), + ClientId = proplists:get_value(<<"clientid">>, Params), + Payload = decode_payload(proplists:get_value(<<"payload">>, Params, <<>>), + proplists:get_value(<<"encoding">>, Params, <<"plain">>)), + Qos = proplists:get_value(<<"qos">>, Params, 0), + Retain = proplists:get_value(<<"retain">>, Params, false), Payload1 = maybe_maps_to_binary(Payload), {ClientId, Topics, Qos, Retain, Payload1}. parse_unsubscribe_params(Params) -> - ClientId = get_value(<<"clientid">>, Params), - Topic = get_value(<<"topic">>, Params), + ClientId = proplists:get_value(<<"clientid">>, Params), + Topic = proplists:get_value(<<"topic">>, Params), {ClientId, Topic}. topics(Type, undefined, Topics0) -> diff --git a/apps/emqx_management/src/emqx_mgmt_api_routes.erl b/apps/emqx_management/src/emqx_mgmt_api_routes.erl index 3a58a26a2..ed173436f 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_routes.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_routes.erl @@ -18,8 +18,6 @@ -include_lib("emqx/include/emqx.hrl"). --import(minirest, [return/1]). - -rest_api(#{name => list_routes, method => 'GET', path => "/routes/", @@ -37,11 +35,11 @@ ]). list(Bindings, Params) when map_size(Bindings) == 0 -> - return({ok, emqx_mgmt_api:paginate(emqx_route, Params, fun format/1)}). + minirest:return({ok, emqx_mgmt_api:paginate(emqx_route, Params, fun format/1)}). lookup(#{topic := Topic}, _Params) -> Topic1 = emqx_mgmt_util:urldecode(Topic), - return({ok, [format(R) || R <- emqx_mgmt:lookup_routes(Topic1)]}). + minirest:return({ok, [format(R) || R <- emqx_mgmt:lookup_routes(Topic1)]}). format(#route{topic = Topic, dest = {_, Node}}) -> #{topic => Topic, node => Node}; format(#route{topic = Topic, dest = Node}) -> diff --git a/apps/emqx_management/src/emqx_mgmt_api_stats.erl b/apps/emqx_management/src/emqx_mgmt_api_stats.erl index 57e0b4fcf..97d80410b 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_stats.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_stats.erl @@ -16,8 +16,6 @@ -module(emqx_mgmt_api_stats). --import(minirest, [return/1]). - -rest_api(#{name => list_stats, method => 'GET', path => "/stats/", @@ -36,12 +34,12 @@ %% List stats of all nodes list(Bindings, _Params) when map_size(Bindings) == 0 -> - return({ok, [#{node => Node, stats => maps:from_list(Stats)} + minirest:return({ok, [#{node => Node, stats => maps:from_list(Stats)} || {Node, Stats} <- emqx_mgmt:get_stats()]}). %% List stats of a node lookup(#{node := Node}, _Params) -> case emqx_mgmt:get_stats(Node) of - {error, Reason} -> return({error, Reason}); - Stats -> return({ok, maps:from_list(Stats)}) + {error, Reason} -> minirest:return({error, Reason}); + Stats -> minirest:return({ok, maps:from_list(Stats)}) end. diff --git a/apps/emqx_management/src/emqx_mgmt_api_subscriptions.erl b/apps/emqx_management/src/emqx_mgmt_api_subscriptions.erl index 51f7a6dec..f61ebfd97 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_subscriptions.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_subscriptions.erl @@ -18,8 +18,6 @@ -include_lib("emqx/include/emqx.hrl"). --import(minirest, [return/1]). - -define(SUBS_QS_SCHEMA, {emqx_suboption, [{<<"clientid">>, binary}, {<<"topic">>, binary}, @@ -65,9 +63,9 @@ list(Bindings, Params) when map_size(Bindings) == 0 -> case proplists:get_value(<<"topic">>, Params) of undefined -> - return({ok, emqx_mgmt_api:cluster_query(Params, ?SUBS_QS_SCHEMA, ?query_fun)}); + minirest:return({ok, emqx_mgmt_api:cluster_query(Params, ?SUBS_QS_SCHEMA, ?query_fun)}); Topic -> - return({ok, emqx_mgmt:list_subscriptions_via_topic(emqx_mgmt_util:urldecode(Topic), ?format_fun)}) + minirest:return({ok, emqx_mgmt:list_subscriptions_via_topic(emqx_mgmt_util:urldecode(Topic), ?format_fun)}) end; list(#{node := Node} = Bindings, Params) -> @@ -75,22 +73,22 @@ list(#{node := Node} = Bindings, Params) -> undefined -> case Node =:= node() of true -> - return({ok, emqx_mgmt_api:node_query(Node, Params, ?SUBS_QS_SCHEMA, ?query_fun)}); + minirest:return({ok, emqx_mgmt_api:node_query(Node, Params, ?SUBS_QS_SCHEMA, ?query_fun)}); false -> case rpc:call(Node, ?MODULE, list, [Bindings, Params]) of - {badrpc, Reason} -> return({error, Reason}); + {badrpc, Reason} -> minirest:return({error, Reason}); Res -> Res end end; Topic -> - return({ok, emqx_mgmt:list_subscriptions_via_topic(Node, emqx_mgmt_util:urldecode(Topic), ?format_fun)}) + minirest:return({ok, emqx_mgmt:list_subscriptions_via_topic(Node, emqx_mgmt_util:urldecode(Topic), ?format_fun)}) end. lookup(#{node := Node, clientid := ClientId}, _Params) -> - return({ok, format(emqx_mgmt:lookup_subscriptions(Node, emqx_mgmt_util:urldecode(ClientId)))}); + minirest:return({ok, format(emqx_mgmt:lookup_subscriptions(Node, emqx_mgmt_util:urldecode(ClientId)))}); lookup(#{clientid := ClientId}, _Params) -> - return({ok, format(emqx_mgmt:lookup_subscriptions(emqx_mgmt_util:urldecode(ClientId)))}). + minirest:return({ok, format(emqx_mgmt:lookup_subscriptions(emqx_mgmt_util:urldecode(ClientId)))}). format(Items) when is_list(Items) -> [format(Item) || Item <- Items]; diff --git a/apps/emqx_management/src/emqx_mgmt_cli.erl b/apps/emqx_management/src/emqx_mgmt_cli.erl index 20b4f1b01..a7c78d4b8 100644 --- a/apps/emqx_management/src/emqx_mgmt_cli.erl +++ b/apps/emqx_management/src/emqx_mgmt_cli.erl @@ -39,6 +39,7 @@ , log/1 , mgmt/1 , data/1 + , acl/1 ]). -define(PROC_INFOKEYS, [status, @@ -115,12 +116,12 @@ mgmt(_) -> status([]) -> {InternalStatus, _ProvidedStatus} = init:get_status(), - emqx_ctl:print("Node ~p is ~p~n", [node(), InternalStatus]), + emqx_ctl:print("Node ~p ~s is ~p~n", [node(), emqx_app:get_release(), InternalStatus]), case lists:keysearch(?APP, 1, application:which_applications()) of false -> - emqx_ctl:print("~s is not running~n", [?APP]); + emqx_ctl:print("Application ~s is not running~n", [?APP]); {value, {?APP, _Desc, Vsn}} -> - emqx_ctl:print("~s ~s is running~n", [?APP, Vsn]) + emqx_ctl:print("Application ~s ~s is running~n", [?APP, Vsn]) end; status(_) -> emqx_ctl:usage("status", "Show broker status"). @@ -561,7 +562,9 @@ data(["export"]) -> end; data(["import", Filename]) -> - case emqx_mgmt_data_backup:import(Filename) of + data(["import", Filename, "--env", "{}"]); +data(["import", Filename, "--env", Env]) -> + case emqx_mgmt_data_backup:import(Filename, Env) of ok -> emqx_ctl:print("The emqx data has been imported successfully.~n"); {error, import_failed} -> @@ -573,8 +576,37 @@ data(["import", Filename]) -> end; data(_) -> - emqx_ctl:usage([{"data import ", "Import data from the specified file"}, - {"data export", "Export data"}]). + emqx_ctl:usage([{"data import [--env '']", + "Import data from the specified file, possibly with overrides"}, + {"data export", "Export data"}]). + +%%-------------------------------------------------------------------- +%% @doc acl Command + +acl(["cache-clean", "node", Node]) -> + case emqx_mgmt:clean_acl_cache_all(erlang:list_to_existing_atom(Node)) of + ok -> + emqx_ctl:print("ACL cache drain started on node ~s.~n", [Node]); + {error, Reason} -> + emqx_ctl:print("ACL drain failed on node ~s: ~0p.~n", [Node, Reason]) + end; + +acl(["cache-clean", "all"]) -> + case emqx_mgmt:clean_acl_cache_all() of + ok -> + emqx_ctl:print("Started ACL cache drain in all nodes~n"); + {error, Reason} -> + emqx_ctl:print("ACL cache-clean failed: ~p.~n", [Reason]) + end; + +acl(["cache-clean", ClientId]) -> + emqx_mgmt:clean_acl_cache(ClientId); + +acl(_) -> + emqx_ctl:usage([{"acl cache-clean all", "Clears acl cache on all nodes"}, + {"acl cache-clean node ", "Clears acl cache on given node"}, + {"acl cache-clean ", "Clears acl cache for given client"} + ]). %%-------------------------------------------------------------------- %% Dump ETS @@ -669,7 +701,7 @@ indent_print({Key, Val}) -> listener_identifier(Protocol, ListenOn) -> case emqx_listeners:find_id_by_listen_on(ListenOn) of false -> - "http" ++ _ = atom_to_list(Protocol); %% assert + atom_to_list(Protocol); ID -> ID end. diff --git a/apps/emqx_management/src/emqx_mgmt_data_backup.erl b/apps/emqx_management/src/emqx_mgmt_data_backup.erl index cd6ef6fb9..2b1791c1d 100644 --- a/apps/emqx_management/src/emqx_mgmt_data_backup.erl +++ b/apps/emqx_management/src/emqx_mgmt_data_backup.erl @@ -17,6 +17,7 @@ -module(emqx_mgmt_data_backup). -include("emqx_mgmt.hrl"). +-include_lib("emqx_rule_engine/include/rule_engine.hrl"). -include_lib("emqx/include/emqx.hrl"). -include_lib("kernel/include/file.hrl"). @@ -51,7 +52,7 @@ ]). -export([ export/0 - , import/1 + , import/2 ]). %%-------------------------------------------------------------------- @@ -59,7 +60,11 @@ %%-------------------------------------------------------------------- export_rules() -> - lists:map(fun({_, RuleId, _, RawSQL, _, _, _, _, _, _, Actions, Enabled, Desc}) -> + lists:map(fun(#rule{id = RuleId, + rawsql = RawSQL, + actions = Actions, + enabled = Enabled, + description = Desc}) -> [{id, RuleId}, {rawsql, RawSQL}, {actions, actions_to_prop_list(Actions)}, @@ -68,7 +73,11 @@ export_rules() -> end, emqx_rule_registry:get_rules()). export_resources() -> - lists:map(fun({_, Id, Type, Config, CreatedAt, Desc}) -> + lists:map(fun(#resource{id = Id, + type = Type, + config = Config, + created_at = CreatedAt, + description = Desc}) -> NCreatedAt = case CreatedAt of undefined -> null; _ -> CreatedAt @@ -174,7 +183,7 @@ export_confs() -> confs_to_binary(Confs) -> [{list_to_binary(Key), list_to_binary(Val)} || {Key, Val} <-Confs]. --else. +-endif. import_rule(#{<<"id">> := RuleId, <<"rawsql">> := RawSQL, @@ -200,11 +209,10 @@ map_to_action(Map = #{<<"id">> := ActionInstId, <<"name">> := Name, <<"args">> : args => Args, fallbacks => map_to_actions(maps:get(<<"fallbacks">>, Map, []))}. --endif. import_rules(Rules) -> - lists:foreach(fun(Resource) -> - import_resource(Resource) + lists:foreach(fun(Rule) -> + import_rule(Rule) end, Rules). import_resources(Reources) -> @@ -233,29 +241,10 @@ import_resources_and_rules(Resources, Rules, _FromVersion) -> import_rules(Rules). -else. import_resources_and_rules(Resources, Rules, FromVersion) - when FromVersion =:= "4.0" orelse FromVersion =:= "4.1" orelse FromVersion =:= "4.2" -> - Configs = lists:foldl(fun(#{<<"id">> := ID, - <<"type">> := <<"web_hook">>, - <<"config">> := #{<<"content_type">> := ContentType, - <<"headers">> := Headers, - <<"method">> := Method, - <<"url">> := URL}} = Resource, Acc) -> - NConfig = #{<<"connect_timeout">> => 5, - <<"request_timeout">> => 5, - <<"cacertfile">> => <<>>, - <<"certfile">> => <<>>, - <<"keyfile">> => <<>>, - <<"pool_size">> => 8, - <<"url">> => URL, - <<"verify">> => true}, - NResource = Resource#{<<"config">> := NConfig}, - {ok, _Resource} = import_resource(NResource), - NHeaders = maps:put(<<"content-type">>, ContentType, Headers), - [{ID, #{headers => NHeaders, method => Method}} | Acc]; - (Resource, Acc) -> - {ok, _Resource} = import_resource(Resource), - Acc - end, [], Resources), + when FromVersion =:= "4.0" orelse + FromVersion =:= "4.1" orelse + FromVersion =:= "4.2" -> + Configs = lists:foldl(fun compatible_version/2 , [], Resources), lists:foreach(fun(#{<<"actions">> := Actions} = Rule) -> NActions = apply_new_config(Actions, Configs), import_rule(Rule#{<<"actions">> := NActions}) @@ -265,6 +254,79 @@ import_resources_and_rules(Resources, Rules, _FromVersion) -> import_resources(Resources), import_rules(Rules). +%% 4.2.5 + +compatible_version(#{<<"id">> := ID, + <<"type">> := <<"web_hook">>, + <<"config">> := #{<<"connect_timeout">> := ConnectTimeout, + <<"content_type">> := ContentType, + <<"headers">> := Headers, + <<"method">> := Method, + <<"pool_size">> := PoolSize, + <<"request_timeout">> := RequestTimeout, + <<"url">> := URL}} = Resource, Acc) -> + CovertFun = fun(Int) -> + list_to_binary(integer_to_list(Int) ++ "s") + end, + Cfg = make_new_config(#{<<"pool_size">> => PoolSize, + <<"connect_timeout">> => CovertFun(ConnectTimeout), + <<"request_timeout">> => CovertFun(RequestTimeout), + <<"url">> => URL}), + {ok, _Resource} = import_resource(Resource#{<<"config">> := Cfg}), + NHeaders = maps:put(<<"content-type">>, ContentType, covert_empty_headers(Headers)), + [{ID, #{headers => NHeaders, method => Method}} | Acc]; +% 4.2.0 +compatible_version(#{<<"id">> := ID, + <<"type">> := <<"web_hook">>, + <<"config">> := #{<<"headers">> := Headers, + <<"method">> := Method,%% 4.2.0 Different here + <<"url">> := URL}} = Resource, Acc) -> + Cfg = make_new_config(#{<<"url">> => URL}), + {ok, _Resource} = import_resource(Resource#{<<"config">> := Cfg}), + NHeaders = maps:put(<<"content-type">>, <<"application/json">> , covert_empty_headers(Headers)), + [{ID, #{headers => NHeaders, method => Method}} | Acc]; + +%% bridge mqtt +%% 4.2.0 - 4.2.5 bridge_mqtt, ssl enabled from on/off to true/false +compatible_version(#{<<"type">> := <<"bridge_mqtt">>, + <<"id">> := ID, %% begin 4.2.0. + <<"config">> := #{<<"ssl">> := Ssl} = Config} = Resource, Acc) -> + F = fun(B) -> + case B of + <<"on">> -> true; + <<"off">> -> false; + Other -> Other + end + end, + NewConfig = Config#{<<"ssl">> := F(Ssl)}, + {ok, _Resource} = import_resource(Resource#{<<"config">> := NewConfig}), + [{ID, NewConfig} | Acc]; + +% 4.2.3, add :content_type +compatible_version(#{<<"id">> := ID, + <<"type">> := <<"web_hook">>, + <<"config">> := #{<<"headers">> := Headers, + <<"content_type">> := ContentType,%% 4.2.3 Different here + <<"method">> := Method, + <<"url">> := URL}} = Resource, Acc) -> + Cfg = make_new_config(#{<<"url">> => URL}), + {ok, _Resource} = import_resource(Resource#{<<"config">> := Cfg}), + NHeaders = maps:put(<<"content-type">>, ContentType, covert_empty_headers(Headers)), + [{ID, #{headers => NHeaders, method => Method}} | Acc]; +% normal version +compatible_version(Resource, Acc) -> + {ok, _Resource} = import_resource(Resource), + Acc. + +make_new_config(Cfg) -> + Config = #{<<"pool_size">> => 8, + <<"connect_timeout">> => <<"5s">>, + <<"request_timeout">> => <<"5s">>, + <<"cacertfile">> => <<>>, + <<"certfile">> => <<>>, + <<"keyfile">> => <<>>, + <<"verify">> => false}, + maps:merge(Cfg, Config). + apply_new_config(Actions, Configs) -> apply_new_config(Actions, Configs, []). @@ -284,7 +346,18 @@ apply_new_config([Action = #{<<"name">> := <<"data_to_webserver">>, <<"method">> => Method, <<"path">> => Path}, apply_new_config(More, Configs, [Action#{<<"args">> := Args} | Acc]) - end. + end; + +apply_new_config([Action = #{<<"args">> := #{<<"$resource">> := ResourceId, + <<"forward_topic">> := ForwardTopic, + <<"payload_tmpl">> := PayloadTmpl}, + <<"fallbacks">> := _Fallbacks, + <<"id">> := _Id, + <<"name">> := <<"data_to_mqtt_broker">>} | More], Configs, Acc) -> + Args = #{<<"$resource">> => ResourceId, + <<"payload_tmpl">> => PayloadTmpl, + <<"forward_topic">> => ForwardTopic}, + apply_new_config(More, Configs, [Action#{<<"args">> := Args} | Acc]). -endif. @@ -368,9 +441,11 @@ import_acl_mnesia(Acls, _) -> do_import_acl_mnesia(Acls). -else. import_auth_mnesia(Auths, FromVersion) when FromVersion =:= "4.0" orelse - FromVersion =:= "4.1" orelse - FromVersion =:= "4.2" -> + FromVersion =:= "4.1" -> do_import_auth_mnesia_by_old_data(Auths); +import_auth_mnesia(Auths, "4.2") -> + %% 4.2 contains a bug where password is not base64-encoded + do_import_auth_mnesia_4_2(Auths); import_auth_mnesia(Auths, _) -> do_import_auth_mnesia(Auths). @@ -381,6 +456,17 @@ import_acl_mnesia(Acls, FromVersion) when FromVersion =:= "4.0" orelse import_acl_mnesia(Acls, _) -> do_import_acl_mnesia(Acls). + +do_import_auth_mnesia_4_2(Auths) -> + case ets:info(emqx_user) of + undefined -> ok; + _ -> + CreatedAt = erlang:system_time(millisecond), + lists:foreach(fun(#{<<"login">> := Login, + <<"password">> := Password}) -> + mnesia:dirty_write({emqx_user, {get_old_type(), Login}, Password, CreatedAt}) + end, Auths) + end. -endif. do_import_auth_mnesia_by_old_data(Auths) -> @@ -390,9 +476,11 @@ do_import_auth_mnesia_by_old_data(Auths) -> CreatedAt = erlang:system_time(millisecond), lists:foreach(fun(#{<<"login">> := Login, <<"password">> := Password}) -> - mnesia:dirty_write({emqx_user, {username, Login}, base64:decode(Password), CreatedAt}) + mnesia:dirty_write({emqx_user, {get_old_type(), Login}, base64:decode(Password), CreatedAt}) end, Auths) end. + + do_import_auth_mnesia(Auths) -> case ets:info(emqx_user) of undefined -> ok; @@ -418,7 +506,7 @@ do_import_acl_mnesia_by_old_data(Acls) -> true -> allow; false -> deny end, - mnesia:dirty_write({emqx_acl, {{username, Login}, Topic}, any_to_atom(Action), Allow1, CreatedAt}) + mnesia:dirty_write({emqx_acl, {{get_old_type(), Login}, Topic}, any_to_atom(Action), Allow1, CreatedAt}) end, Acls) end. do_import_acl_mnesia(Acls) -> @@ -490,8 +578,8 @@ export() -> case file:write_file(NFilename, emqx_json:encode(Data)) of ok -> case file:read_file_info(NFilename) of - {ok, #file_info{size = Size, ctime = {{Y, M, D}, {H, MM, S}}}} -> - CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y, M, D, H, MM, S]), + {ok, #file_info{size = Size, ctime = {{Y1, M1, D1}, {H1, MM1, S1}}}} -> + CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y1, M1, D1, H1, MM1, S1]), {ok, #{filename => list_to_binary(NFilename), size => Size, created_at => list_to_binary(CreatedAt), @@ -526,11 +614,14 @@ do_export_extra_data() -> do_export_extra_data() -> []. -endif. -import(Filename) -> +import(Filename, OverridesJson) -> case file:read_file(Filename) of {ok, Json} -> - Data = emqx_json:decode(Json, [return_maps]), + Imported = emqx_json:decode(Json, [return_maps]), + Overrides = emqx_json:decode(OverridesJson, [return_maps]), + Data = maps:merge(Imported, Overrides), Version = to_version(maps:get(<<"version">>, Data)), + read_global_auth_type(Data, Version), case lists:member(Version, ?VERSIONS) of true -> try @@ -538,7 +629,7 @@ import(Filename) -> logger:debug("The emqx data has been imported successfully"), ok catch Class:Reason:Stack -> - logger:error("The emqx data import failed: ~0p", [{Class,Reason,Stack}]), + logger:error("The emqx data import failed: ~0p", [{Class, Reason, Stack}]), {error, import_failed} end; false -> @@ -568,3 +659,35 @@ do_import_extra_data(Data, _Version) -> -else. do_import_extra_data(_Data, _Version) -> ok. -endif. + +-ifndef(EMQX_ENTERPRISE). +covert_empty_headers(Headers) -> + case Headers of + [] -> #{}; + Other -> Other + end. +-endif. + +read_global_auth_type(Data, Version) when Version =:= "4.0" orelse + Version =:= "4.1" orelse + Version =:= "4.2" -> + case Data of + #{<<"auth.mnesia.as">> := <<"username">>} -> application:set_env(emqx_auth_mnesia, as, username); + #{<<"auth.mnesia.as">> := <<"clientid">>} -> application:set_env(emqx_auth_mnesia, as, clientid); + _ -> + logger:error("While importing data from EMQX versions prior to 4.3 " + "it is necessary to specify the value of \"auth.mnesia.as\" parameter " + "as it was configured in etc/plugins/emqx_auth_mnesia.conf.\n" + "Use the following command to import data:\n" + " $ emqx_ctl data import --env '{\"auth.mnesia.as\":\"username\"}'\n" + "or\n" + " $ emqx_ctl data import --env '{\"auth.mnesia.as\":\"clientid\"}'", + []), + error(import_failed) + end; +read_global_auth_type(_Data, _Version) -> + ok. + +get_old_type() -> + {ok, Type} = application:get_env(emqx_auth_mnesia, as), + Type. diff --git a/apps/emqx_management/src/emqx_mgmt_http.erl b/apps/emqx_management/src/emqx_mgmt_http.erl index aee057d39..82b45b368 100644 --- a/apps/emqx_management/src/emqx_mgmt_http.erl +++ b/apps/emqx_management/src/emqx_mgmt_http.erl @@ -16,8 +16,6 @@ -module(emqx_mgmt_http). --import(proplists, [get_value/3]). - -export([ start_listeners/0 , handle_request/2 , stop_listeners/0 @@ -58,8 +56,8 @@ start_listener({Proto, Port, Options}) when Proto == https -> minirest:start_https(listener_name(Proto), ranch_opts(Port, Options), Dispatch). ranch_opts(Port, Options0) -> - NumAcceptors = get_value(num_acceptors, Options0, 4), - MaxConnections = get_value(max_connections, Options0, 512), + NumAcceptors = proplists:get_value(num_acceptors, Options0, 4), + MaxConnections = proplists:get_value(max_connections, Options0, 512), Options = lists:foldl(fun({K, _V}, Acc) when K =:= max_connections orelse K =:= num_acceptors -> Acc; ({inet6, true}, Acc) -> [inet6 | Acc]; diff --git a/apps/emqx_management/test/emqx_auth_mnesia_migration_SUITE.erl b/apps/emqx_management/test/emqx_auth_mnesia_migration_SUITE.erl new file mode 100644 index 000000000..8db0c68d7 --- /dev/null +++ b/apps/emqx_management/test/emqx_auth_mnesia_migration_SUITE.erl @@ -0,0 +1,106 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_auth_mnesia_migration_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("eunit/include/eunit.hrl"). + +-include_lib("emqx/include/emqx.hrl"). +-include_lib("emqx/include/emqx_mqtt.hrl"). +-include_lib("emqx_auth_mnesia/include/emqx_auth_mnesia.hrl"). + +-ifdef(EMQX_ENTERPRISE). +-define(VERSIONS, ["e4.1", "e4.2"]). +-else. +-define(VERSIONS, ["v4.1", "v4.2"]). +-endif. + +all() -> + [{group, Id} || {Id, _, _} <- groups()]. + +groups() -> + [{username, [], cases()}, {clientid, [], cases()}]. + +cases() -> + [t_import]. + +init_per_suite(Config) -> + emqx_ct_helpers:start_apps([emqx_management, emqx_dashboard, emqx_auth_mnesia]), + ekka_mnesia:start(), + emqx_mgmt_auth:mnesia(boot), + Config. + +end_per_suite(_Config) -> + emqx_ct_helpers:stop_apps([emqx_modules, emqx_management, emqx_dashboard, emqx_management, emqx_auth_mnesia]), + ekka_mnesia:ensure_stopped(). + +init_per_group(username, Config) -> + [{cred_type, username} | Config]; +init_per_group(clientid, Config) -> + [{cred_type, clientid} | Config]. + +end_per_group(_, Config) -> + Config. + +init_per_testcase(_, Config) -> + Config. + +end_per_testcase(_, _Config) -> + mnesia:clear_table(emqx_acl), + mnesia:clear_table(emqx_user), + ok. + +t_import(Config) -> + test_import(Config, ?VERSIONS). + +test_import(Config, [V | Versions]) -> + do_import(Config, V), + test_import(Config, Versions); +test_import(_Config, []) -> ok. + +do_import(Config, V) -> + File = V ++ ".json", + Type = proplists:get_value(cred_type, Config), + mnesia:clear_table(emqx_acl), + mnesia:clear_table(emqx_user), + Filename = filename:join(proplists:get_value(data_dir, Config), File), + Overrides = emqx_json:encode(#{<<"auth.mnesia.as">> => atom_to_binary(Type)}), + ?assertMatch(ok, emqx_mgmt_data_backup:import(Filename, Overrides)), + Records = lists:sort(ets:tab2list(emqx_acl)), + %% Check importing of records related to emqx_auth_mnesia + ?assertMatch([#emqx_acl{ + filter = {{Type,<<"emqx_c">>}, <<"Topic/A">>}, + action = pub, + access = allow + }, + #emqx_acl{ + filter = {{Type,<<"emqx_c">>}, <<"Topic/A">>}, + action = sub, + access = allow + }], + lists:sort(Records)), + ?assertMatch([#emqx_user{ + login = {Type, <<"emqx_c">>} + }], ets:tab2list(emqx_user)), + Req = #{clientid => <<"blah">>} + #{Type => <<"emqx_c">>, + password => "emqx_p" + }, + ?assertMatch({stop, #{auth_result := success}}, + emqx_auth_mnesia:check(Req, #{}, #{hash_type => sha256})). diff --git a/apps/emqx_management/test/emqx_auth_mnesia_migration_SUITE_data/v4.1.json b/apps/emqx_management/test/emqx_auth_mnesia_migration_SUITE_data/v4.1.json new file mode 100644 index 000000000..04a7a273f --- /dev/null +++ b/apps/emqx_management/test/emqx_auth_mnesia_migration_SUITE_data/v4.1.json @@ -0,0 +1,48 @@ +{ + "acl_mnesia": [ + { + "action": "sub", + "allow": true, + "login": "emqx_c", + "topic": "Topic/A" + }, + { + "action": "pub", + "allow": true, + "login": "emqx_c", + "topic": "Topic/A" + } + ], + "apps": [ + { + "desc": "Application user", + "expired": "undefined", + "id": "admin", + "name": "Default", + "secret": "public", + "status": true + } + ], + "auth_clientid": [], + "auth_mnesia": [ + { + "is_superuser": false, + "login": "emqx_c", + "password": "Y2ViNWU5MTdmNzkzMGFlOGYwZGMzY2ViNDk2YTQyOGY3ZTY0NDczNmVlYmNhMzZhMmI4ZjZiYmFjNzU2MTcxYQ==" + } + ], + "auth_username": [], + "blacklist": [], + "date": "2021-03-30 09:11:29", + "resources": [], + "rules": [], + "schemas": [], + "users": [ + { + "password": "t89PhgOb15rSCdpxm7Obp7QGcyY=", + "tags": "administrator", + "username": "admin" + } + ], + "version": "4.1" +} diff --git a/apps/emqx_management/test/emqx_auth_mnesia_migration_SUITE_data/v4.2.json b/apps/emqx_management/test/emqx_auth_mnesia_migration_SUITE_data/v4.2.json new file mode 100644 index 000000000..57958aa58 --- /dev/null +++ b/apps/emqx_management/test/emqx_auth_mnesia_migration_SUITE_data/v4.2.json @@ -0,0 +1,53 @@ +{ + "schemas": [], + "acl_mnesia": [ + { + "allow": true, + "action": "sub", + "topic": "Topic/A", + "login": "emqx_c" + }, + { + "allow": true, + "action": "pub", + "topic": "Topic/A", + "login": "emqx_c" + } + ], + "auth_mnesia": [ + { + "is_superuser": false, + "password": "ceb5e917f7930ae8f0dc3ceb496a428f7e644736eebca36a2b8f6bbac756171a", + "login": "emqx_c" + } + ], + "auth_username": [], + "auth_clientid": [], + "users": [ + { + "tags": "viewer", + "password": "oVqjR1wOi2u4DtsuXNctYt6+SKE=", + "username": "test" + }, + { + "tags": "administrator", + "password": "9SO4rEEZ6rNwA4vAwp3cnXgQsAM=", + "username": "admin" + } + ], + "apps": [ + { + "expired": "undefined", + "status": true, + "desc": "Application user", + "name": "Default", + "secret": "public", + "id": "admin" + } + ], + "blacklist": [], + "resources": [], + "rules": [], + "date": "2021-03-26 09:51:38", + "version": "4.2" +} diff --git a/apps/emqx_management/test/emqx_mgmt_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_SUITE.erl index a3e9a6fad..c9aea26d6 100644 --- a/apps/emqx_management/test/emqx_mgmt_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_SUITE.erl @@ -53,16 +53,16 @@ groups() -> ]}]. apps() -> - [emqx, emqx_management, emqx_auth_mnesia]. + [emqx_management, emqx_auth_mnesia, emqx_modules]. init_per_suite(Config) -> ekka_mnesia:start(), emqx_mgmt_auth:mnesia(boot), - emqx_ct_helpers:start_apps([emqx_management, emqx_auth_mnesia]), + emqx_ct_helpers:start_apps(apps()), Config. end_per_suite(_Config) -> - emqx_ct_helpers:stop_apps([emqx_management, emqx_auth_mnesia]). + emqx_ct_helpers:stop_apps(apps()). t_app(_Config) -> {ok, AppSecret} = emqx_mgmt_auth:add_app(<<"app_id">>, <<"app_name">>), diff --git a/apps/emqx_management/test/emqx_mgmt_api_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_SUITE.erl index 8e98567c4..a63398b05 100644 --- a/apps/emqx_management/test/emqx_mgmt_api_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_api_SUITE.erl @@ -57,13 +57,13 @@ groups() -> }]. init_per_suite(Config) -> - emqx_ct_helpers:start_apps([emqx_management, emqx_auth_mnesia]), + emqx_ct_helpers:start_apps([emqx_management, emqx_auth_mnesia, emqx_modules]), ekka_mnesia:start(), emqx_mgmt_auth:mnesia(boot), Config. end_per_suite(_Config) -> - emqx_ct_helpers:stop_apps([emqx_auth_mnesia, emqx_management]), + emqx_ct_helpers:stop_apps([emqx_auth_mnesia, emqx_management, emqx_modules]), ekka_mnesia:ensure_stopped(). init_per_testcase(data, Config) -> @@ -71,14 +71,6 @@ init_per_testcase(data, Config) -> application:ensure_all_started(emqx_dahboard), ok = emqx_rule_registry:mnesia(boot), application:ensure_all_started(emqx_rule_engine), - - meck:new(emqx_sys, [passthrough, no_history]), - meck:expect(emqx_sys, version, 0, - fun() -> - Tag =os:cmd("git describe --abbrev=0 --tags") -- "\n", - re:replace(Tag, "[v|e]", "", [{return ,list}]) - end), - Config; init_per_testcase(_, Config) -> @@ -87,10 +79,6 @@ init_per_testcase(_, Config) -> end_per_testcase(data, _Config) -> application:stop(emqx_dahboard), application:stop(emqx_rule_engine), - application:stop(emqx_modules), - application:stop(emqx_schema_registry), - application:stop(emqx_conf), - meck:unload(emqx_sys), ok; end_per_testcase(_, _Config) -> @@ -221,6 +209,7 @@ clients(_) -> {ok, _} = emqtt:connect(C1), {ok, C2} = emqtt:start_link(#{username => Username2, clientid => ClientId2}), {ok, _} = emqtt:connect(C2), + timer:sleep(300), {ok, Clients1} = request_api(get, api_path(["clients", binary_to_list(ClientId1)]) @@ -241,7 +230,7 @@ clients(_) -> "clients", "username", binary_to_list(Username2)]) , auth_header_()), - ?assertEqual(<<"client2">>, maps:get(<<"clientid">>, lists:nth(1, get(<<"data">>, Clients4)))), + ?assertEqual(<<"client2">>, maps:get(<<"clientid">>, lists:nth(1, get(<<"data">>, Clients4)))), {ok, Clients5} = request_api(get, api_path(["clients"]), "_limit=100&_page=1", auth_header_()), ?assertEqual(2, maps:get(<<"count">>, get(<<"meta">>, Clients5))), @@ -265,6 +254,8 @@ clients(_) -> {ok, Ok} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), ?assertEqual(?SUCCESS, get(<<"code">>, Ok)), + timer:sleep(300), + {ok, NotFound0} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), ?assertEqual(?ERROR12, get(<<"code">>, NotFound0)), diff --git a/apps/emqx_plugin_libs/src/emqx_plugin_libs_ssl.erl b/apps/emqx_plugin_libs/src/emqx_plugin_libs_ssl.erl index 4b0746335..9fc9e66ef 100644 --- a/apps/emqx_plugin_libs/src/emqx_plugin_libs_ssl.erl +++ b/apps/emqx_plugin_libs/src/emqx_plugin_libs_ssl.erl @@ -16,7 +16,10 @@ -module(emqx_plugin_libs_ssl). --export([save_files_return_opts/2]). +-export([save_files_return_opts/2, + save_files_return_opts/3, + save_file/2 + ]). -type file_input_key() :: binary(). %% <<"file">> | <<"filename">> -type file_input() :: #{file_input_key() => binary()}. @@ -36,6 +39,15 @@ -type opt_value() :: term(). -type opts() :: [{opt_key(), opt_value()}]. +%% @doc Parse ssl options input. +%% If the input contains file content, save the files in the given dir. +%% Returns ssl options for Erlang's ssl application. +-spec save_files_return_opts(opts_input(), atom() | string() | binary(), + string() | binary()) -> opts(). +save_files_return_opts(Options, SubDir, ResId) -> + Dir = filename:join([emqx:get_env(data_dir), SubDir, ResId]), + save_files_return_opts(Options, Dir). + %% @doc Parse ssl options input. %% If the input contains file content, save the files in the given dir. %% Returns ssl options for Erlang's ssl application. @@ -46,34 +58,43 @@ save_files_return_opts(Options, Dir) -> KeyFile = Get(<<"keyfile">>), CertFile = Get(<<"certfile">>), CAFile = GetD(<<"cacertfile">>, Get(<<"cafile">>)), - Key = save_file(KeyFile, Dir), - Cert = save_file(CertFile, Dir), - CA = save_file(CAFile, Dir), + Key = do_save_file(KeyFile, Dir), + Cert = do_save_file(CertFile, Dir), + CA = do_save_file(CAFile, Dir), Verify = case GetD(<<"verify">>, false) of false -> verify_none; _ -> verify_peer end, + SNI = Get(<<"server_name_indication">>), Versions = emqx_tls_lib:integral_versions(Get(<<"tls_versions">>)), Ciphers = emqx_tls_lib:integral_ciphers(Versions, Get(<<"ciphers">>)), filter([{keyfile, Key}, {certfile, Cert}, {cacertfile, CA}, - {verify, Verify}, {versions, Versions}, {ciphers, Ciphers}]). + {verify, Verify}, {server_name_indication, SNI}, {versions, Versions}, {ciphers, Ciphers}]). + +%% @doc Save a key or certificate file in data dir, +%% and return path of the saved file. +%% empty string is returned if the input is empty. +-spec save_file(file_input(), atom() | string() | binary()) -> string(). +save_file(Param, SubDir) -> + Dir = filename:join([emqx:get_env(data_dir), SubDir]), + do_save_file( Param, Dir). filter([]) -> []; filter([{_, ""} | T]) -> filter(T); filter([H | T]) -> [H | filter(T)]. -save_file(#{<<"filename">> := FileName, <<"file">> := Content}, Dir) +do_save_file(#{<<"filename">> := FileName, <<"file">> := Content}, Dir) when FileName =/= undefined andalso Content =/= undefined -> - save_file(ensure_str(FileName), iolist_to_binary(Content), Dir); -save_file(FilePath, _) when is_binary(FilePath) -> + do_save_file(ensure_str(FileName), iolist_to_binary(Content), Dir); +do_save_file(FilePath, _) when is_binary(FilePath) -> ensure_str(FilePath); -save_file(FilePath, _) when is_list(FilePath) -> +do_save_file(FilePath, _) when is_list(FilePath) -> FilePath; -save_file(_, _) -> "". +do_save_file(_, _) -> "". -save_file("", _, _Dir) -> ""; %% ignore -save_file(_, <<>>, _Dir) -> ""; %% ignore -save_file(FileName, Content, Dir) -> +do_save_file("", _, _Dir) -> ""; %% ignore +do_save_file(_, <<>>, _Dir) -> ""; %% ignore +do_save_file(FileName, Content, Dir) -> FullFilename = filename:join([Dir, FileName]), ok = filelib:ensure_dir(FullFilename), case file:write_file(FullFilename, Content) of diff --git a/apps/emqx_rule_engine/include/rule_engine.hrl b/apps/emqx_rule_engine/include/rule_engine.hrl index 97263099d..a7fe9c60a 100644 --- a/apps/emqx_rule_engine/include/rule_engine.hrl +++ b/apps/emqx_rule_engine/include/rule_engine.hrl @@ -79,6 +79,7 @@ , on_action_failed :: continue | stop , actions :: list(#action_instance{}) , enabled :: boolean() + , created_at :: integer() %% epoch in millisecond precision , description :: binary() }). @@ -86,7 +87,7 @@ { id :: resource_id() , type :: resource_type_name() , config :: #{} %% the configs got from API for initializing resource - , created_at :: integer() %% epoch in millisecond precision + , created_at :: integer() | undefined %% epoch in millisecond precision , description :: binary() }). diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.erl b/apps/emqx_rule_engine/src/emqx_rule_engine.erl index 860b9e702..12f00c191 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.erl @@ -179,6 +179,7 @@ create_rule(Params = #{rawsql := Sql, actions := ActArgs}) -> on_action_failed = maps:get(on_action_failed, Params, continue), actions = Actions, enabled = Enabled, + created_at = erlang:system_time(millisecond), description = maps:get(description, Params, "") }, ok = emqx_rule_registry:add_rule(Rule), diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl index beb1204df..e7287d98d 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl @@ -230,7 +230,7 @@ update_rule(#{id := Id}, Params) -> end. list_rules(_Bindings, _Params) -> - return_all(emqx_rule_registry:get_rules()). + return_all(emqx_rule_registry:get_rules_ordered_by_ts()). show_rule(#{id := Id}, _Params) -> reply_with(fun emqx_rule_registry:get_rule/1, Id). @@ -495,8 +495,8 @@ parse_rule_params([{<<"actions">>, Actions} | Params], Rule) -> parse_rule_params(Params, Rule#{actions => parse_actions(Actions)}); parse_rule_params([{<<"description">>, Descr} | Params], Rule) -> parse_rule_params(Params, Rule#{description => Descr}); -parse_rule_params([_ | Params], Res) -> - parse_rule_params(Params, Res). +parse_rule_params([_ | Params], Rule) -> + parse_rule_params(Params, Rule). on_failed(<<"continue">>) -> continue; on_failed(<<"stop">>) -> stop; diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_cli.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_cli.erl index 9edc198f9..dbcf3e0e5 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_cli.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_cli.erl @@ -98,7 +98,7 @@ unload() -> %%----------------------------------------------------------------------------- -dialyzer([{nowarn_function, [rules/1]}]). rules(["list"]) -> - print_all(emqx_rule_registry:get_rules()); + print_all(emqx_rule_registry:get_rules_ordered_by_ts()); rules(["show", RuleId]) -> print_with(fun emqx_rule_registry:get_rule/1, list_to_binary(RuleId)); diff --git a/apps/emqx_rule_engine/src/emqx_rule_funcs.erl b/apps/emqx_rule_engine/src/emqx_rule_funcs.erl index 55917f751..7c939e93d 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_funcs.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_funcs.erl @@ -512,12 +512,10 @@ map(Data) -> emqx_rule_utils:map(Data). bin2hexstr(Bin) when is_binary(Bin) -> - IntL = binary_to_list(Bin), - list_to_binary([io_lib:format("~2.16.0B", [Int]) || Int <- IntL]). + emqx_misc:bin2hexstr_A_F(Bin). hexstr2bin(Str) when is_binary(Str) -> - list_to_binary([binary_to_integer(W, 16) || <> <= Str]). - + emqx_misc:hexstr2bin(Str). %%------------------------------------------------------------------------------ %% NULL Funcs @@ -776,14 +774,7 @@ sha256(S) when is_binary(S) -> hash(sha256, S). hash(Type, Data) -> - hexstring(crypto:hash(Type, Data)). - -hexstring(<>) -> - iolist_to_binary(io_lib:format("~32.16.0b", [X])); -hexstring(<>) -> - iolist_to_binary(io_lib:format("~40.16.0b", [X])); -hexstring(<>) -> - iolist_to_binary(io_lib:format("~64.16.0b", [X])). + emqx_misc:bin2hexstr_a_f(crypto:hash(Type, Data)). %%------------------------------------------------------------------------------ %% Data encode and decode Funcs diff --git a/apps/emqx_rule_engine/src/emqx_rule_registry.erl b/apps/emqx_rule_engine/src/emqx_rule_registry.erl index dc7a33805..80667f995 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_registry.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_registry.erl @@ -20,6 +20,7 @@ -include("rule_engine.hrl"). -include_lib("emqx/include/logger.hrl"). +-include_lib("stdlib/include/qlc.hrl"). -export([start_link/0]). @@ -27,6 +28,7 @@ -export([ get_rules/0 , get_rules_for/1 , get_rules_with_same_event/1 + , get_rules_ordered_by_ts/0 , get_rule/1 , add_rule/1 , add_rules/1 @@ -168,6 +170,14 @@ start_link() -> get_rules() -> get_all_records(?RULE_TAB). +get_rules_ordered_by_ts() -> + F = fun() -> + Query = qlc:q([E || E <- mnesia:table(?RULE_TAB)]), + qlc:e(qlc:keysort(#rule.created_at, Query, [{order, ascending}])) + end, + {atomic, List} = mnesia:transaction(F), + List. + -spec(get_rules_for(Topic :: binary()) -> list(emqx_rule_engine:rule())). get_rules_for(Topic) -> [Rule || Rule = #rule{for = For} <- get_rules(), diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl index 389c308ad..dae6f5edb 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl @@ -694,44 +694,44 @@ t_update_rule(_Config) -> ok. t_disable_rule(_Config) -> - ets:new(simpile_action_2, [named_table, set, public]), - ets:insert(simpile_action_2, {created, 0}), - ets:insert(simpile_action_2, {destroyed, 0}), + ets:new(simple_action_2, [named_table, set, public]), + ets:insert(simple_action_2, {created, 0}), + ets:insert(simple_action_2, {destroyed, 0}), Now = erlang:timestamp(), emqx_rule_registry:add_action( - #action{name = 'simpile_action_2', app = ?APP, + #action{name = 'simple_action_2', app = ?APP, module = ?MODULE, - on_create = simpile_action_2_create, - on_destroy = simpile_action_2_destroy, + on_create = simple_action_2_create, + on_destroy = simple_action_2_destroy, types=[], params_spec = #{}, title = #{en => <<"Simple Action">>}, description = #{en => <<"Simple Action">>}}), {ok, #rule{actions = [#action_instance{id = ActInsId0}]}} = emqx_rule_engine:create_rule( #{id => <<"simple_rule_2">>, rawsql => <<"select * from \"t/#\"">>, - actions => [#{name => 'simpile_action_2', args => #{}}] + actions => [#{name => 'simple_action_2', args => #{}}] }), - [{_, CAt}] = ets:lookup(simpile_action_2, created), + [{_, CAt}] = ets:lookup(simple_action_2, created), ?assert(CAt > Now), - [{_, DAt}] = ets:lookup(simpile_action_2, destroyed), + [{_, DAt}] = ets:lookup(simple_action_2, destroyed), ?assert(DAt < Now), %% disable the rule and verify the old action instances has been cleared Now2 = erlang:timestamp(), emqx_rule_engine:update_rule(#{ id => <<"simple_rule_2">>, enabled => false}), - [{_, CAt2}] = ets:lookup(simpile_action_2, created), + [{_, CAt2}] = ets:lookup(simple_action_2, created), ?assert(CAt2 < Now2), - [{_, DAt2}] = ets:lookup(simpile_action_2, destroyed), + [{_, DAt2}] = ets:lookup(simple_action_2, destroyed), ?assert(DAt2 > Now2), %% enable the rule again and verify the action instances has been created Now3 = erlang:timestamp(), emqx_rule_engine:update_rule(#{ id => <<"simple_rule_2">>, enabled => true}), - [{_, CAt3}] = ets:lookup(simpile_action_2, created), + [{_, CAt3}] = ets:lookup(simple_action_2, created), ?assert(CAt3 > Now3), - [{_, DAt3}] = ets:lookup(simpile_action_2, destroyed), + [{_, DAt3}] = ets:lookup(simple_action_2, destroyed), ?assert(DAt3 < Now3), ok = emqx_rule_engine:delete_rule(<<"simple_rule_2">>). @@ -744,6 +744,19 @@ t_get_rules_for(_Config) -> ok = emqx_rule_registry:remove_rules([<<"rule-debug-1">>, <<"rule-debug-2">>]), ok. +t_get_rules_ordered_by_ts(_Config) -> + Now = fun() -> erlang:system_time(nanosecond) end, + ok = emqx_rule_registry:add_rules( + [make_simple_rule_with_ts(<<"rule-debug-0">>, Now()), + make_simple_rule_with_ts(<<"rule-debug-1">>, Now()), + make_simple_rule_with_ts(<<"rule-debug-2">>, Now()) + ]), + ?assertMatch([ + #rule{id = <<"rule-debug-0">>}, + #rule{id = <<"rule-debug-1">>}, + #rule{id = <<"rule-debug-2">>} + ], emqx_rule_registry:get_rules_ordered_by_ts()). + t_get_rules_for_2(_Config) -> Len0 = length(emqx_rule_registry:get_rules_for(<<"simple/1">>)), ok = emqx_rule_registry:add_rules( @@ -2166,6 +2179,17 @@ make_simple_rule(RuleId) when is_binary(RuleId) -> actions = [{'inspect', #{}}], description = <<"simple rule">>}. +make_simple_rule_with_ts(RuleId, Ts) when is_binary(RuleId) -> + #rule{id = RuleId, + rawsql = <<"select * from \"simple/topic\"">>, + for = [<<"simple/topic">>], + fields = [<<"*">>], + is_foreach = false, + conditions = {}, + actions = [{'inspect', #{}}], + created_at = Ts, + description = <<"simple rule">>}. + make_simple_rule(RuleId, SQL, ForTopics) when is_binary(RuleId) -> #rule{id = RuleId, rawsql = SQL, @@ -2250,12 +2274,12 @@ crash_action(_Id, _Params) -> error(crash) end. -simpile_action_2_create(_Id, _Params) -> - ets:insert(simpile_action_2, {created, erlang:timestamp()}), +simple_action_2_create(_Id, _Params) -> + ets:insert(simple_action_2, {created, erlang:timestamp()}), fun(_Data, _Envs) -> ok end. -simpile_action_2_destroy(_Id, _Params) -> - ets:insert(simpile_action_2, {destroyed, erlang:timestamp()}), +simple_action_2_destroy(_Id, _Params) -> + ets:insert(simple_action_2, {destroyed, erlang:timestamp()}), fun(_Data, _Envs) -> ok end. init_plus_by_one_action() -> diff --git a/apps/emqx_sn/src/emqx_sn_gateway.erl b/apps/emqx_sn/src/emqx_sn_gateway.erl index 335bd5531..55f70f943 100644 --- a/apps/emqx_sn/src/emqx_sn_gateway.erl +++ b/apps/emqx_sn/src/emqx_sn_gateway.erl @@ -83,11 +83,11 @@ keepalive_interval :: maybe(integer()), connpkt :: term(), asleep_timer :: tuple(), - asleep_msg_queue :: list(), enable_stats :: boolean(), stats_timer :: maybe(reference()), idle_timeout :: integer(), - enable_qos3 = false :: boolean() + enable_qos3 = false :: boolean(), + has_pending_pingresp = false :: boolean() }). -define(INFO_KEYS, [socktype, peername, sockname, sockstate]). %, active_n]). @@ -104,6 +104,15 @@ -define(NO_PEERCERT, undefined). +-define(CONN_INFO(Sockname, Peername), + #{socktype => udp, + sockname => Sockname, + peername => Peername, + protocol => 'mqtt-sn', + peercert => ?NO_PEERCERT, + conn_mod => ?MODULE + }). + %%-------------------------------------------------------------------- %% Exported APIs %%-------------------------------------------------------------------- @@ -134,13 +143,7 @@ init([{_, SockPid, Sock}, Peername, Options]) -> EnableStats = proplists:get_value(enable_stats, Options, false), case inet:sockname(Sock) of {ok, Sockname} -> - Channel = emqx_channel:init(#{socktype => udp, - sockname => Sockname, - peername => Peername, - protocol => 'mqtt-sn', - peercert => ?NO_PEERCERT, - conn_mod => ?MODULE - }, ?DEFAULT_CHAN_OPTIONS), + Channel = emqx_channel:init(?CONN_INFO(Sockname, Peername), ?DEFAULT_CHAN_OPTIONS), State = #state{gwid = GwId, username = Username, password = Password, @@ -152,7 +155,6 @@ init([{_, SockPid, Sock}, Peername, Options]) -> channel = Channel, registry = Registry, asleep_timer = emqx_sn_asleep_timer:init(), - asleep_msg_queue = [], enable_stats = EnableStats, enable_qos3 = EnableQos3, idle_timeout = IdleTimeout @@ -175,9 +177,6 @@ idle(cast, {incoming, ?SN_CONNECT_MSG(Flags, _ProtoId, Duration, ClientId)}, Sta #mqtt_sn_flags{will = Will, clean_start = CleanStart} = Flags, do_connect(ClientId, CleanStart, Will, Duration, State); -idle(cast, {incoming, Packet = ?CONNECT_PACKET(_ConnPkt)}, State) -> - handle_incoming(Packet, State); - idle(cast, {incoming, ?SN_ADVERTISE_MSG(_GwId, _Radius)}, State) -> % ignore {keep_state, State, State#state.idle_timeout}; @@ -188,7 +187,7 @@ idle(cast, {incoming, ?SN_DISCONNECT_MSG(_Duration)}, State) -> idle(cast, {incoming, ?SN_PUBLISH_MSG(_Flag, _TopicId, _MsgId, _Data)}, State = #state{enable_qos3 = false}) -> ?LOG(debug, "The enable_qos3 is false, ignore the received publish with QoS=-1 in idle mode!", [], State), - {keep_state_and_data, State#state.idle_timeout}; + {keep_state, State#state.idle_timeout}; idle(cast, {incoming, ?SN_PUBLISH_MSG(#mqtt_sn_flags{qos = ?QOS_NEG1, topic_id_type = TopicIdType @@ -206,7 +205,7 @@ idle(cast, {incoming, ?SN_PUBLISH_MSG(#mqtt_sn_flags{qos = ?QOS_NEG1, ok end, ?LOG(debug, "Client id=~p receives a publish with QoS=-1 in idle mode!", [ClientId], State), - {keep_state_and_data, State#state.idle_timeout}; + {keep_state, State#state.idle_timeout}; idle(cast, {incoming, PingReq = ?SN_PINGREQ_MSG(_ClientId)}, State) -> handle_ping(PingReq, State); @@ -400,15 +399,23 @@ asleep(cast, {incoming, ?SN_PINGREQ_MSG(undefined)}, State) -> % ClientId in PINGREQ is mandatory {keep_state, State}; -asleep(cast, {incoming, PingReq = ?SN_PINGREQ_MSG(ClientIdPing)}, - State = #state{clientid = ClientId}) -> +asleep(cast, {incoming, ?SN_PINGREQ_MSG(ClientIdPing)}, + State = #state{clientid = ClientId, channel = Channel}) -> case ClientIdPing of ClientId -> - _ = handle_ping(PingReq, State), - self() ! do_awake_jobs, - % it is better to go awake state, since the jobs in awake may take long time - % and asleep timer get timeout, it will cause disaster - {next_state, awake, State}; + inc_ping_counter(), + case emqx_session:dequeue(emqx_channel:get_session(Channel)) of + {ok, Session0} -> + send_message(?SN_PINGRESP_MSG(), State), + {keep_state, State#state{ + channel = emqx_channel:set_session(Session0, Channel)}}; + {ok, Delivers, Session0} -> + Events = [emqx_message:to_packet(PckId, Msg) || {PckId, Msg} <- Delivers] + ++ [try_goto_asleep], + {next_state, awake, State#state{ + channel = emqx_channel:set_session(Session0, Channel), + has_pending_pingresp = true}, outgoing_events(Events)} + end; _Other -> {next_state, asleep, State} end; @@ -453,6 +460,20 @@ awake(cast, {outgoing, Packet}, State) -> ok = handle_outgoing(Packet, State), {keep_state, State}; +awake(cast, {incoming, ?SN_PUBACK_MSG(TopicId, MsgId, ReturnCode)}, State) -> + do_puback(TopicId, MsgId, ReturnCode, awake, State); + +awake(cast, try_goto_asleep, State=#state{channel = Channel, + has_pending_pingresp = PingPending}) -> + case emqx_mqueue:is_empty(emqx_session:info(mqueue, emqx_channel:get_session(Channel))) of + true when PingPending =:= true -> + send_message(?SN_PINGRESP_MSG(), State), + goto_asleep_state(State#state{has_pending_pingresp = false}); + true when PingPending =:= false -> + goto_asleep_state(State); + false -> keep_state_and_data + end; + awake(EventType, EventContent, State) -> handle_event(EventType, EventContent, awake, State). @@ -489,11 +510,12 @@ handle_event(info, {datagram, SockPid, Data}, StateName, shutdown(frame_error, State) end; -handle_event(info, Deliver = {deliver, _Topic, Msg}, asleep, - State = #state{asleep_msg_queue = AsleepMsgQ}) -> +handle_event(info, {deliver, _Topic, Msg}, asleep, + State = #state{channel = Channel}) -> % section 6.14, Support of sleeping clients ?LOG(debug, "enqueue downlink message in asleep state Msg=~p", [Msg], State), - {keep_state, State#state{asleep_msg_queue = [Deliver|AsleepMsgQ]}}; + Session = emqx_session:enqueue(Msg, emqx_channel:get_session(Channel)), + {keep_state, State#state{channel = emqx_channel:set_session(Session, Channel)}}; handle_event(info, Deliver = {deliver, _Topic, _Msg}, _StateName, State = #state{channel = Channel}) -> @@ -518,18 +540,6 @@ handle_event(info, {timeout, TRef, keepalive}, _StateName, State) -> handle_event(info, {timeout, TRef, TMsg}, _StateName, State) -> handle_timeout(TRef, TMsg, State); -handle_event(info, do_awake_jobs, StateName, State=#state{clientid = ClientId}) -> - ?LOG(debug, "Do awake jobs, statename : ~p", [StateName], State), - case process_awake_jobs(ClientId, State) of - {keep_state, NewState} -> - case StateName of - awake -> goto_asleep_state(NewState); - _Other -> {keep_state, NewState} - %% device send a CONNECT immediately before this do_awake_jobs is handled - end; - Stop -> Stop - end; - handle_event(info, asleep_timeout, asleep, State) -> ?LOG(debug, "asleep timer timeout, shutdown now", [], State), stop(asleep_timeout, State); @@ -593,33 +603,31 @@ handle_call(_From, Req, State = #state{channel = Channel}) -> handle_info(Info, State = #state{channel = Channel}) -> handle_return(emqx_channel:handle_info(Info, Channel), State). -handle_ping(_PingReq, State) -> - inc_counter(recv_oct, 2), - inc_counter(recv_msg, 1), - ok = send_message(?SN_PINGRESP_MSG(), State), - {keep_state, State}. - handle_timeout(TRef, TMsg, State = #state{channel = Channel}) -> handle_return(emqx_channel:handle_timeout(TRef, TMsg, Channel), State). -handle_return({ok, NChannel}, State) -> - {keep_state, State#state{channel = NChannel}}; -handle_return({ok, Replies, NChannel}, State) -> - {keep_state, State#state{channel = NChannel}, next_events(Replies)}; +handle_return(Return, State) -> + handle_return(Return, State, []). -handle_return({shutdown, Reason, NChannel}, State) -> +handle_return({ok, NChannel}, State, AddEvents) -> + handle_return({ok, AddEvents, NChannel}, State, []); +handle_return({ok, Replies, NChannel}, State, AddEvents) -> + {keep_state, State#state{channel = NChannel}, outgoing_events(append(Replies, AddEvents))}; +handle_return({shutdown, Reason, NChannel}, State, _AddEvents) -> stop({shutdown, Reason}, State#state{channel = NChannel}); -handle_return({shutdown, Reason, OutPacket, NChannel}, State) -> +handle_return({shutdown, Reason, OutPacket, NChannel}, State, _AddEvents) -> NState = State#state{channel = NChannel}, ok = handle_outgoing(OutPacket, NState), stop({shutdown, Reason}, NState). -next_events(Packet) when is_record(Packet, mqtt_packet) -> +outgoing_events(Actions) -> + lists:map(fun outgoing_event/1, Actions). + +outgoing_event(Packet) when is_record(Packet, mqtt_packet); + is_record(Packet, mqtt_sn_message)-> next_event({outgoing, Packet}); -next_events(Action) when is_tuple(Action) -> - next_event(Action); -next_events(Actions) when is_list(Actions) -> - lists:map(fun next_event/1, Actions). +outgoing_event(Action) -> + next_event(Action). close_socket(State = #state{sockstate = closed}) -> State; close_socket(State = #state{socket = _Socket}) -> @@ -673,6 +681,13 @@ call(Pid, Req) -> %%-------------------------------------------------------------------- %% Internal Functions %%-------------------------------------------------------------------- +handle_ping(_PingReq, State) -> + ok = send_message(?SN_PINGRESP_MSG(), State), + inc_ping_counter(), + {keep_state, State}. + +inc_ping_counter() -> + inc_counter(recv_msg, 1). mqtt2sn(?CONNACK_PACKET(0, _SessPresent), _State) -> ?SN_CONNACK_MSG(0); @@ -786,11 +801,17 @@ mqttsn_to_mqtt(?SN_PUBCOMP, MsgId) -> ?PUBCOMP_PACKET(MsgId). do_connect(ClientId, CleanStart, WillFlag, Duration, State) -> + %% 6.6 Client’s Publish Procedure + %% At any point in time a client may have only one QoS level 1 or 2 PUBLISH message + %% outstanding, i.e. it has to wait for the termination of this PUBLISH message exchange + %% before it could start a new level 1 or 2 transaction. + OnlyOneInflight = #{'Receive-Maximum' => 1}, ConnPkt = #mqtt_packet_connect{clientid = ClientId, clean_start = CleanStart, username = State#state.username, password = State#state.password, - keepalive = Duration + keepalive = Duration, + properties = OnlyOneInflight }, put(clientid, ClientId), case WillFlag of @@ -939,11 +960,11 @@ do_publish_will(#state{will_msg = WillMsg, clientid = ClientId}) -> _ = emqx_broker:publish(emqx_packet:to_message(Publish, ClientId)), ok. -do_puback(TopicId, MsgId, ReturnCode, _StateName, +do_puback(TopicId, MsgId, ReturnCode, StateName, State=#state{clientid = ClientId, registry = Registry}) -> case ReturnCode of ?SN_RC_ACCEPTED -> - handle_incoming(?PUBACK_PACKET(MsgId), State); + handle_incoming(?PUBACK_PACKET(MsgId), StateName, State); ?SN_RC_INVALID_TOPIC_ID -> case emqx_sn_registry:lookup_topic(Registry, ClientId, TopicId) of undefined -> ok; @@ -990,15 +1011,6 @@ update_will_msg(undefined, Msg) -> update_will_msg(Will = #will_msg{}, Msg) -> Will#will_msg{payload = Msg}. -process_awake_jobs(_ClientId, State = #state{asleep_msg_queue = []}) -> - {keep_state, State}; -process_awake_jobs(_ClientId, State = #state{channel = Channel, - asleep_msg_queue = AsleepMsgQ}) -> - Delivers = lists:reverse(AsleepMsgQ), - NState = State#state{asleep_msg_queue = []}, - Result = emqx_channel:handle_deliver(Delivers, Channel), - handle_return(Result, NState). - enqueue_msgid(suback, MsgId, TopicId) -> put({suback, MsgId}, TopicId); enqueue_msgid(puback, MsgId, TopicId) -> @@ -1022,12 +1034,21 @@ get_topic_id(Type, MsgId) -> TopicId -> TopicId end. -handle_incoming(Packet = ?PACKET(Type), State = #state{channel = Channel}) -> +handle_incoming(Packet, State) -> + handle_incoming(Packet, unknown, State). + +handle_incoming(?PUBACK_PACKET(_) = Packet, awake, State) -> + Result = channel_handle_in(Packet, State), + handle_return(Result, State, [try_goto_asleep]); +handle_incoming(Packet, _StName, State) -> + Result = channel_handle_in(Packet, State), + handle_return(Result, State). + +channel_handle_in(Packet = ?PACKET(Type), State = #state{channel = Channel}) -> _ = inc_incoming_stats(Type), ok = emqx_metrics:inc_recv(Packet), ?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)], State), - Result = emqx_channel:handle_in(Packet, Channel), - handle_return(Result, State). + emqx_channel:handle_in(Packet, Channel). handle_outgoing(Packets, State) when is_list(Packets) -> lists:foreach(fun(Packet) -> handle_outgoing(Packet, State) end, Packets); @@ -1081,3 +1102,8 @@ next_event(Content) -> inc_counter(Key, Inc) -> _ = emqx_pd:inc_counter(Key, Inc), ok. + +append(Replies, AddEvents) when is_list(Replies) -> + Replies ++ AddEvents; +append(Replies, AddEvents) -> + [Replies] ++ AddEvents. diff --git a/apps/emqx_sn/test/emqx_sn_protocol_SUITE.erl b/apps/emqx_sn/test/emqx_sn_protocol_SUITE.erl index 35ad84193..be783dc66 100644 --- a/apps/emqx_sn/test/emqx_sn_protocol_SUITE.erl +++ b/apps/emqx_sn/test/emqx_sn_protocol_SUITE.erl @@ -402,8 +402,6 @@ t_publish_negqos_case09(_) -> What = receive_response(Socket), ?assertEqual(Eexp, What) end, - %% dbg:start(), dbg:tracer(), dbg:p(all, c), - %% dbg:tpl(emqx_sn_gateway, send_message, x), send_disconnect_msg(Socket, undefined), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), @@ -1049,14 +1047,15 @@ t_asleep_test03_to_awake_qos1_dl_msg(_) -> % goto awake state, receive downlink messages, and go back to asleep send_pingreq_msg(Socket, ClientId), - ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), - - %% {unexpected_udp_data, _} = receive_response(Socket), + %% the broker should sent dl msgs to the awake client before sending the pingresp UdpData = receive_response(Socket), MsgId_udp = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicId1, Payload1}, UdpData), send_puback_msg(Socket, TopicId1, MsgId_udp), + %% check the pingresp is received at last + ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), + gen_udp:close(Socket). t_asleep_test04_to_awake_qos1_dl_msg(_) -> @@ -1106,8 +1105,6 @@ t_asleep_test04_to_awake_qos1_dl_msg(_) -> % goto awake state, receive downlink messages, and go back to asleep send_pingreq_msg(Socket, <<"test">>), - ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% get REGISTER first, since this topic has never been registered %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -1119,6 +1116,8 @@ t_asleep_test04_to_awake_qos1_dl_msg(_) -> MsgId_udp = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload1}, UdpData), send_puback_msg(Socket, TopicIdNew, MsgId_udp), + ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), + gen_udp:close(Socket). t_asleep_test05_to_awake_qos1_dl_msg(_) -> @@ -1173,7 +1172,6 @@ t_asleep_test05_to_awake_qos1_dl_msg(_) -> % goto awake state, receive downlink messages, and go back to asleep send_pingreq_msg(Socket, <<"test">>), - ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), UdpData_reg = receive_response(Socket), {TopicIdNew, MsgId_reg} = check_register_msg_on_udp(TopicName_test5, UdpData_reg), @@ -1197,7 +1195,7 @@ t_asleep_test05_to_awake_qos1_dl_msg(_) -> TopicIdNew, Payload4}, UdpData4), send_puback_msg(Socket, TopicIdNew, MsgId4) end, - timer:sleep(50), + ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), gen_udp:close(Socket). t_asleep_test06_to_awake_qos2_dl_msg(_) -> @@ -1249,14 +1247,12 @@ t_asleep_test06_to_awake_qos2_dl_msg(_) -> % goto awake state, receive downlink messages, and go back to asleep send_pingreq_msg(Socket, <<"test">>), - ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), UdpData = wrap_receive_response(Socket), MsgId_udp = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicId_tom, Payload1}, UdpData), send_pubrec_msg(Socket, MsgId_udp), - timer:sleep(300), - + ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), gen_udp:close(Socket). t_asleep_test07_to_connected(_) -> @@ -1391,8 +1387,6 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) -> % goto awake state, receive downlink messages, and go back to asleep send_pingreq_msg(Socket, <<"test">>), - ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), - UdpData_reg = receive_response(Socket), {TopicIdNew, MsgId_reg} = check_register_msg_on_udp(TopicName_test9, UdpData_reg), send_regack_msg(Socket, TopicIdNew, MsgId_reg), @@ -1424,7 +1418,7 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) -> TopicIdNew, Payload4}, UdpData4), send_puback_msg(Socket, TopicIdNew, MsgId4) end, - timer:sleep(100), + ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), %% send PINGREQ again to enter awake state send_pingreq_msg(Socket, <<"test">>), @@ -1787,7 +1781,7 @@ wrap_receive_response(Socket) -> Other end. receive_response(Socket) -> - receive_response(Socket, 5000). + receive_response(Socket, 2000). receive_response(Socket, Timeout) -> receive {udp, Socket, _, _, Bin} -> @@ -1832,8 +1826,8 @@ get_udp_broadcast_address() -> "255.255.255.255". check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, TopicType, TopicId, Payload}, UdpData) -> - ct:pal("UdpData: ~p, Payload: ~p", [UdpData, Payload]), <> = UdpData, + ct:pal("UdpData: ~p, Payload: ~p, PayloadIn: ~p", [UdpData, Payload, PayloadIn]), Size9 = byte_size(Payload) + 7, Eexp = <>, ?assertEqual(Eexp, HeaderUdp), % mqtt-sn header should be same diff --git a/apps/emqx_web_hook/etc/emqx_web_hook.conf b/apps/emqx_web_hook/etc/emqx_web_hook.conf index c585a4e13..6707e4673 100644 --- a/apps/emqx_web_hook/etc/emqx_web_hook.conf +++ b/apps/emqx_web_hook/etc/emqx_web_hook.conf @@ -43,6 +43,15 @@ web.hook.body.encoding_of_payload_field = plain ## Value: true | false ## web.hook.ssl.verify = false +## If not specified, the server's names returned in server's certificate is validated against +## what's provided `web.hook.url` config's host part. +## Setting to 'disable' will make EMQ X ignore unmatched server names. +## If set with a host name, the server's names returned in server's certificate is validated +## against this value. +## +## Value: String | disable +## web.hook.ssl.server_name_indication = disable + ## Connection process pool size ## ## Value: Number diff --git a/apps/emqx_web_hook/priv/emqx_web_hook.schema b/apps/emqx_web_hook/priv/emqx_web_hook.schema index 3a56b8b1d..8ba1cc0fd 100644 --- a/apps/emqx_web_hook/priv/emqx_web_hook.schema +++ b/apps/emqx_web_hook/priv/emqx_web_hook.schema @@ -34,6 +34,10 @@ {datatype, {enum, [true, false]}} ]}. +{mapping, "web.hook.ssl.server_name_indication", "emqx_web_hook.server_name_indication", [ + {datatype, string} +]}. + {mapping, "web.hook.pool_size", "emqx_web_hook.pool_size", [ {default, 32}, {datatype, integer} diff --git a/apps/emqx_web_hook/src/emqx_web_hook.erl b/apps/emqx_web_hook/src/emqx_web_hook.erl index ec525c759..6c63d6d49 100644 --- a/apps/emqx_web_hook/src/emqx_web_hook.erl +++ b/apps/emqx_web_hook/src/emqx_web_hook.erl @@ -325,7 +325,7 @@ send_http_request(ClientID, Params) -> {ok, Path} = application:get_env(?APP, path), Headers = application:get_env(?APP, headers, []), Body = emqx_json:encode(Params), - ?LOG(debug, "Send to: ~0p, params: ~0s", [Path, Body]), + ?LOG(debug, "Send to: ~0p, params: ~s", [Path, Body]), case ehttpc:request(ehttpc_pool:pick_worker(?APP, ClientID), post, {Path, Headers, Body}) of {ok, StatusCode, _} when StatusCode >= 200 andalso StatusCode < 300 -> ok; diff --git a/apps/emqx_web_hook/src/emqx_web_hook_actions.erl b/apps/emqx_web_hook/src/emqx_web_hook_actions.erl index 4e63b54df..f026434c6 100644 --- a/apps/emqx_web_hook/src/emqx_web_hook_actions.erl +++ b/apps/emqx_web_hook/src/emqx_web_hook_actions.erl @@ -84,17 +84,23 @@ certfile => #{order => 7, type => file, default => <<"">>, - title =>#{en => <<"SSL Cert">>, - zh => <<"SSL Cert"/utf8>>}, + title => #{en => <<"SSL Cert">>, + zh => <<"SSL Cert"/utf8>>}, description => #{en => <<"Your ssl certfile">>, zh => <<"SSL 证书"/utf8>>}}, verify => #{order => 8, type => boolean, default => false, - title =>#{en => <<"Verify Server Certfile">>, - zh => <<"校验服务器证书"/utf8>>}, + title => #{en => <<"Verify Server Certfile">>, + zh => <<"校验服务器证书"/utf8>>}, description => #{en => <<"Whether to verify the server certificate. By default, the client will not verify the server's certificate. If verification is required, please set it to true.">>, - zh => <<"是否校验服务器证书。 默认客户端不会去校验服务器的证书,如果需要校验,请设置成true。"/utf8>>}} + zh => <<"是否校验服务器证书。 默认客户端不会去校验服务器的证书,如果需要校验,请设置成true。"/utf8>>}}, + server_name_indication => #{order => 9, + type => string, + title => #{en => <<"Server Name Indication">>, + zh => <<"服务器名称指示"/utf8>>}, + description => #{en => <<"Specify the hostname used for peer certificate verification, or set to disable to turn off this verification.">>, + zh => <<"指定用于对端证书验证时使用的主机名,或者设置为 disable 以关闭此项验证。"/utf8>>}} }). -define(ACTION_PARAM_RESOURCE, #{ @@ -346,8 +352,7 @@ pool_name(ResId) -> list_to_atom("webhook:" ++ str(ResId)). get_ssl_opts(Opts, ResId) -> - Dir = filename:join([emqx:get_env(data_dir), "rule", ResId]), - [{ssl, true}, {ssl_opts, emqx_plugin_libs_ssl:save_files_return_opts(Opts, Dir)}]. + [{ssl, true}, {ssl_opts, emqx_plugin_libs_ssl:save_files_return_opts(Opts, "rules", ResId)}]. parse_host(Host) -> case inet:parse_address(Host) of diff --git a/apps/emqx_web_hook/src/emqx_web_hook_app.erl b/apps/emqx_web_hook/src/emqx_web_hook_app.erl index 67775e00f..c75c7cb01 100644 --- a/apps/emqx_web_hook/src/emqx_web_hook_app.erl +++ b/apps/emqx_web_hook/src/emqx_web_hook_app.erl @@ -60,11 +60,18 @@ translate_env() -> true -> verify_peer; false -> verify_none end, + SNI = case application:get_env(?APP, server_name_indication, undefined) of + "disable" -> disable; + SNI0 -> SNI0 + end, TLSOpts = lists:filter(fun({_K, V}) -> V /= <<>> andalso V /= undefined andalso V /= "" andalso true - end, [{keyfile, KeyFile}, {certfile, CertFile}, {cacertfile, CACertFile}]), - NTLSOpts = [ {verify, VerifyType} - , {versions, emqx_tls_lib:default_versions()} + end, [{keyfile, KeyFile}, + {certfile, CertFile}, + {cacertfile, CACertFile}, + {verify, VerifyType}, + {server_name_indication, SNI}]), + NTLSOpts = [ {versions, emqx_tls_lib:default_versions()} , {ciphers, emqx_tls_lib:default_ciphers()} | TLSOpts ], diff --git a/apps/emqx_web_hook/test/http_server.erl b/apps/emqx_web_hook/test/http_server.erl index e0f367eba..6a23e1035 100644 --- a/apps/emqx_web_hook/test/http_server.erl +++ b/apps/emqx_web_hook/test/http_server.erl @@ -12,7 +12,7 @@ -export([start_link/0]). -export([get_received_data/0]). -export([stop/1]). --export([code_change/3, handle_call/3, handle_cast/2, handle_info/2, init/1, terminate/2]). +-export([code_change/3, handle_call/3, handle_cast/2, handle_info/2, init/1, init/2, terminate/2]). -define(HTTP_PORT, 9999). -define(HTTPS_PORT, 8888). -record(state, {}). @@ -102,4 +102,4 @@ init(Req, State) -> reply(Req, ok) -> cowboy_req:reply(200, #{<<"content-type">> => <<"text/plain">>}, <<"ok">>, Req); reply(Req, error) -> - cowboy_req:reply(404, #{<<"content-type">> => <<"text/plain">>}, <<"deny">>, Req). \ No newline at end of file + cowboy_req:reply(404, #{<<"content-type">> => <<"text/plain">>}, <<"deny">>, Req). diff --git a/bin/emqx b/bin/emqx index 006284b4c..e7c809ad2 100755 --- a/bin/emqx +++ b/bin/emqx @@ -221,6 +221,7 @@ generate_config() { TMP_ARG_FILE="$RUNNER_DATA_DIR/configs/vm.args.tmp" cp "$RUNNER_ETC_DIR/vm.args" "$TMP_ARG_FILE" echo "" >> "$TMP_ARG_FILE" + echo "-pa ${REL_DIR}/consolidated" >> "$TMP_ARG_FILE" sed '/^#/d' "$CUTTLE_GEN_ARG_FILE" | sed '/^$/d' | while IFS='' read -r ARG_LINE || [ -n "$ARG_LINE" ]; do ARG_KEY=$(echo "$ARG_LINE" | awk '{$NF="";print}') ARG_VALUE=$(echo "$ARG_LINE" | awk '{print $NF}') @@ -259,20 +260,49 @@ if [ -z "$RELX_CONFIG_PATH" ]; then fi fi +IS_BOOT_COMMAND='no' +case "$1" in + start|start_boot) + IS_BOOT_COMMAND='yes' + ;; + console|console_clean|console_boot) + IS_BOOT_COMMAND='yes' + ;; + foreground) + IS_BOOT_COMMAND='yes' + ;; +esac + + if [ -z "$NAME_ARG" ]; then NODENAME="${EMQX_NODE_NAME:-}" - # check if there is a node running, inspect its name - # shellcheck disable=SC2009 # pgrep does not support Extended Regular Expressions - [ -z "$NODENAME" ] && NODENAME=$(ps -ef | grep -E '\-progname\s.*emqx\s' | grep -o -E '\-name (\S*)' | awk '{print $2}') - [ -z "$NODENAME" ] && NODENAME=$(grep -E '^[ \t]*node.name[ \t]*=[ \t]*' "$RUNNER_ETC_DIR/emqx.conf" 2> /dev/null | tail -1 | cut -d = -f 2- | tr -d '"') + # compatible with docker entrypoint + [ -z "$NODENAME" ] && [ -n "$EMQX_NAME" ] && [ -n "$EMQX_HOST" ] && NODENAME="${EMQX_NAME}@${EMQX_HOST}" if [ -z "$NODENAME" ]; then - echoerr "vm.args needs to have a -name parameter." - echoerr " -sname is not supported." - echoerr "perhaps you do not have read permissions on $RUNNER_ETC_DIR/emqx.conf" - exit 1 - else - NAME_ARG="-name ${NODENAME# *}" + if [ "$IS_BOOT_COMMAND" = 'no' ]; then + # for non-boot commands, inspect vm.