Merge branch 'master' into file-transfer

* master: (279 commits)
  chore: shorten ct/run.sh script
  chore: rename cassandra_impl to cassandra_connector
  chore: fix mix.exs checking
  refactor(cassandra): move cassandra bridge into its own app
  chore: apply review suggestions
  chore: update changes/ce/fix-10449.en.md
  test: add a test for authn {}
  chore: add changlog for authn_http validation
  fix: always check authn_http's header and ssl_option
  chore: apply suggestions from code review
  fix(emqx_bridge): validate Webhook bad URL and return 'BAD_REQUEST' if it's invalid
  fix(emqx_alarm): add safe call API to activate/deactivate alarms and use it in resource_manager
  perf(emqx_alarm): use dirty Mnesia operations to activate an alarm
  ci: simplify find-apps.sh for ee apps
  perf(emqx_resource): don't reactivate alarms on reoccurring errors
  ci: check if Elixir files are formatted in pre-commit hook
  fix(dynamo): fix field name errors
  chore: remove *_collector for prometheus api's example
  chore: make plugins config to low level
  chore: re-split dynamo i18n file
  ...
This commit is contained in:
Ilya Averyanov 2023-04-21 17:37:17 +03:00
commit 0211bcf030
858 changed files with 29237 additions and 20079 deletions

View File

@ -9,4 +9,7 @@ DYNAMO_TAG=1.21.0
CASSANDRA_TAG=3.11.6 CASSANDRA_TAG=3.11.6
MINIO_TAG=RELEASE.2023-03-20T20-16-18Z MINIO_TAG=RELEASE.2023-03-20T20-16-18Z
MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server
SQLSERVER_TAG=2019-CU19-ubuntu-20.04
TARGET=emqx/emqx TARGET=emqx/emqx

View File

@ -0,0 +1,19 @@
version: '3.9'
services:
sql_server:
container_name: sqlserver
# See also:
# https://mcr.microsoft.com/en-us/product/mssql/server/about
# https://hub.docker.com/_/microsoft-mssql-server
image: ${MS_IMAGE_ADDR}:${SQLSERVER_TAG}
environment:
# See also:
# https://learn.microsoft.com/en-us/sql/linux/sql-server-linux-configure-environment-variables
ACCEPT_EULA: "Y"
MSSQL_SA_PASSWORD: "mqtt_public1"
restart: always
# ports:
# - "1433:1433"
networks:
- emqx_bridge

View File

@ -19,6 +19,8 @@ services:
- 8086:8086 - 8086:8086
# InfluxDB TLS # InfluxDB TLS
- 8087:8087 - 8087:8087
# SQL Server
- 11433:1433
# MySQL # MySQL
- 13306:3306 - 13306:3306
# MySQL TLS # MySQL TLS

View File

@ -24,6 +24,7 @@ services:
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret - /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
- ./kerberos/krb5.conf:/etc/kdc/krb5.conf - ./kerberos/krb5.conf:/etc/kdc/krb5.conf
- ./kerberos/krb5.conf:/etc/krb5.conf - ./kerberos/krb5.conf:/etc/krb5.conf
# - ./odbc/odbcinst.ini:/etc/odbcinst.ini
working_dir: /emqx working_dir: /emqx
tty: true tty: true
user: "${DOCKER_USER:-root}" user: "${DOCKER_USER:-root}"

View File

@ -0,0 +1,9 @@
[ms-sql]
Description=Microsoft ODBC Driver 17 for SQL Server
Driver=/opt/microsoft/msodbcsql17/lib64/libmsodbcsql-17.10.so.2.1
UsageCount=1
[ODBC Driver 17 for SQL Server]
Description=Microsoft ODBC Driver 17 for SQL Server
Driver=/opt/microsoft/msodbcsql17/lib64/libmsodbcsql-17.10.so.2.1
UsageCount=1

View File

@ -96,6 +96,12 @@
"upstream": "cassandra:9142", "upstream": "cassandra:9142",
"enabled": true "enabled": true
}, },
{
"name": "sqlserver",
"listen": "0.0.0.0:1433",
"upstream": "sqlserver:1433",
"enabled": true
},
{ {
"name": "minio_tcp", "name": "minio_tcp",
"listen": "0.0.0.0:19000", "listen": "0.0.0.0:19000",

View File

@ -25,7 +25,7 @@ jobs:
prepare: prepare:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
# prepare source with any OTP version, no need for a matrix # prepare source with any OTP version, no need for a matrix
container: "ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-24.3.4.2-3-ubuntu22.04" container: "ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-24.3.4.2-3-ubuntu22.04"
outputs: outputs:
PROFILE: ${{ steps.get_profile.outputs.PROFILE }} PROFILE: ${{ steps.get_profile.outputs.PROFILE }}
@ -121,7 +121,7 @@ jobs:
# NOTE: 'otp' and 'elixir' are to configure emqx-builder image # NOTE: 'otp' and 'elixir' are to configure emqx-builder image
# only support latest otp and elixir, not a matrix # only support latest otp and elixir, not a matrix
builder: builder:
- 5.0-33 # update to latest - 5.0-34 # update to latest
otp: otp:
- 24.3.4.2-3 # switch to 25 once ready to release 5.1 - 24.3.4.2-3 # switch to 25 once ready to release 5.1
elixir: elixir:

View File

@ -24,7 +24,7 @@ jobs:
prepare: prepare:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
if: (github.repository_owner == 'emqx' && github.event_name == 'schedule') || github.event_name != 'schedule' if: (github.repository_owner == 'emqx' && github.event_name == 'schedule') || github.event_name != 'schedule'
container: ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-24.3.4.2-3-ubuntu22.04 container: ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-24.3.4.2-3-ubuntu22.04
outputs: outputs:
BUILD_PROFILE: ${{ steps.get_profile.outputs.BUILD_PROFILE }} BUILD_PROFILE: ${{ steps.get_profile.outputs.BUILD_PROFILE }}
IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }} IS_EXACT_TAG: ${{ steps.get_profile.outputs.IS_EXACT_TAG }}
@ -221,7 +221,7 @@ jobs:
- aws-arm64 - aws-arm64
- ubuntu-22.04 - ubuntu-22.04
builder: builder:
- 5.0-33 - 5.0-34
elixir: elixir:
- 1.13.4 - 1.13.4
exclude: exclude:
@ -235,7 +235,7 @@ jobs:
arch: amd64 arch: amd64
os: ubuntu22.04 os: ubuntu22.04
build_machine: ubuntu-22.04 build_machine: ubuntu-22.04
builder: 5.0-33 builder: 5.0-34
elixir: 1.13.4 elixir: 1.13.4
release_with: elixir release_with: elixir
- profile: emqx - profile: emqx
@ -243,7 +243,7 @@ jobs:
arch: amd64 arch: amd64
os: amzn2 os: amzn2
build_machine: ubuntu-22.04 build_machine: ubuntu-22.04
builder: 5.0-33 builder: 5.0-34
elixir: 1.13.4 elixir: 1.13.4
release_with: elixir release_with: elixir

View File

@ -35,7 +35,7 @@ jobs:
- ["emqx-enterprise", "24.3.4.2-3", "amzn2", "erlang"] - ["emqx-enterprise", "24.3.4.2-3", "amzn2", "erlang"]
- ["emqx-enterprise", "25.1.2-3", "ubuntu20.04", "erlang"] - ["emqx-enterprise", "25.1.2-3", "ubuntu20.04", "erlang"]
builder: builder:
- 5.0-33 - 5.0-34
elixir: elixir:
- '1.13.4' - '1.13.4'

View File

@ -6,7 +6,7 @@ on:
jobs: jobs:
check_deps_integrity: check_deps_integrity:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
container: ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-25.1.2-3-ubuntu22.04 container: ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-25.1.2-3-ubuntu22.04
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3

View File

@ -5,7 +5,7 @@ on: [pull_request]
jobs: jobs:
code_style_check: code_style_check:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
container: "ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-25.1.2-3-ubuntu22.04" container: "ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-25.1.2-3-ubuntu22.04"
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:

View File

@ -9,7 +9,7 @@ jobs:
elixir_apps_check: elixir_apps_check:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
# just use the latest builder # just use the latest builder
container: "ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-25.1.2-3-ubuntu22.04" container: "ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-25.1.2-3-ubuntu22.04"
strategy: strategy:
fail-fast: false fail-fast: false

View File

@ -8,7 +8,7 @@ on:
jobs: jobs:
elixir_deps_check: elixir_deps_check:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
container: ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-25.1.2-3-ubuntu22.04 container: ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-25.1.2-3-ubuntu22.04
steps: steps:
- name: Checkout - name: Checkout
@ -23,7 +23,18 @@ jobs:
mix local.hex --force mix local.hex --force
mix local.rebar --force mix local.rebar --force
mix deps.get mix deps.get
# we check only enterprise because `rebar3 tree`, even if an
# enterprise app is excluded from `project_app_dirs` in
# `rebar.config.erl`, will still list dependencies from it.
# Since the enterprise profile is a superset of the
# community one and thus more complete, we use the former.
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
- name: check elixir deps - name: check elixir deps
run: ./scripts/check-elixir-deps-discrepancies.exs run: ./scripts/check-elixir-deps-discrepancies.exs
env:
MIX_ENV: emqx-enterprise
PROFILE: emqx-enterprise
... ...

View File

@ -17,7 +17,7 @@ jobs:
profile: profile:
- emqx - emqx
- emqx-enterprise - emqx-enterprise
container: ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-25.1.2-3-ubuntu22.04 container: ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-25.1.2-3-ubuntu22.04
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v3

View File

@ -12,7 +12,7 @@ jobs:
strategy: strategy:
matrix: matrix:
builder: builder:
- 5.0-33 - 5.0-34
otp: otp:
- 24.3.4.2-3 - 24.3.4.2-3
- 25.1.2-3 - 25.1.2-3

View File

@ -17,7 +17,7 @@ jobs:
prepare: prepare:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
# prepare source with any OTP version, no need for a matrix # prepare source with any OTP version, no need for a matrix
container: ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-24.3.4.2-3-debian11 container: ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-24.3.4.2-3-debian11
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@ -50,7 +50,7 @@ jobs:
os: os:
- ["debian11", "debian:11-slim"] - ["debian11", "debian:11-slim"]
builder: builder:
- 5.0-33 - 5.0-34
otp: otp:
- 24.3.4.2-3 - 24.3.4.2-3
elixir: elixir:
@ -123,7 +123,7 @@ jobs:
os: os:
- ["debian11", "debian:11-slim"] - ["debian11", "debian:11-slim"]
builder: builder:
- 5.0-33 - 5.0-34
otp: otp:
- 24.3.4.2-3 - 24.3.4.2-3
elixir: elixir:

View File

@ -15,7 +15,7 @@ concurrency:
jobs: jobs:
relup_test_plan: relup_test_plan:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
container: "ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-24.3.4.2-3-ubuntu22.04" container: "ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-24.3.4.2-3-ubuntu22.04"
outputs: outputs:
CUR_EE_VSN: ${{ steps.find-versions.outputs.CUR_EE_VSN }} CUR_EE_VSN: ${{ steps.find-versions.outputs.CUR_EE_VSN }}
OLD_VERSIONS: ${{ steps.find-versions.outputs.OLD_VERSIONS }} OLD_VERSIONS: ${{ steps.find-versions.outputs.OLD_VERSIONS }}

View File

@ -31,12 +31,12 @@ jobs:
MATRIX="$(echo "${APPS}" | jq -c ' MATRIX="$(echo "${APPS}" | jq -c '
[ [
(.[] | select(.profile == "emqx") | . + { (.[] | select(.profile == "emqx") | . + {
builder: "5.0-33", builder: "5.0-34",
otp: "25.1.2-3", otp: "25.1.2-3",
elixir: "1.13.4" elixir: "1.13.4"
}), }),
(.[] | select(.profile == "emqx-enterprise") | . + { (.[] | select(.profile == "emqx-enterprise") | . + {
builder: "5.0-33", builder: "5.0-34",
otp: ["24.3.4.2-3", "25.1.2-3"][], otp: ["24.3.4.2-3", "25.1.2-3"][],
elixir: "1.13.4" elixir: "1.13.4"
}) })
@ -231,7 +231,7 @@ jobs:
- ct - ct
- ct_docker - ct_docker
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
container: "ghcr.io/emqx/emqx-builder/5.0-33:1.13.4-24.3.4.2-3-ubuntu22.04" container: "ghcr.io/emqx/emqx-builder/5.0-34:1.13.4-24.3.4.2-3-ubuntu22.04"
steps: steps:
- uses: AutoModality/action-clean@v1 - uses: AutoModality/action-clean@v1
- uses: actions/download-artifact@v3 - uses: actions/download-artifact@v3

View File

@ -6,8 +6,8 @@ export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.0-28:1.13.4-24.3.4.2-2
export EMQX_DEFAULT_RUNNER = debian:11-slim export EMQX_DEFAULT_RUNNER = debian:11-slim
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh) export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
export ELIXIR_VSN ?= $(shell $(CURDIR)/scripts/get-elixir-vsn.sh) export ELIXIR_VSN ?= $(shell $(CURDIR)/scripts/get-elixir-vsn.sh)
export EMQX_DASHBOARD_VERSION ?= v1.2.0 export EMQX_DASHBOARD_VERSION ?= v1.2.3
export EMQX_EE_DASHBOARD_VERSION ?= e1.0.5 export EMQX_EE_DASHBOARD_VERSION ?= e1.0.6-beta.1
export EMQX_REL_FORM ?= tgz export EMQX_REL_FORM ?= tgz
export QUICER_DOWNLOAD_FROM_RELEASE = 1 export QUICER_DOWNLOAD_FROM_RELEASE = 1
ifeq ($(OS),Windows_NT) ifeq ($(OS),Windows_NT)
@ -89,12 +89,17 @@ APPS=$(shell $(SCRIPTS)/find-apps.sh)
.PHONY: $(APPS:%=%-ct) .PHONY: $(APPS:%=%-ct)
define gen-app-ct-target define gen-app-ct-target
$1-ct: $(REBAR) $1-ct: $(REBAR)
@$(SCRIPTS)/pre-compile.sh $(PROFILE) $(eval SUITES := $(shell $(SCRIPTS)/find-suites.sh $1))
@ENABLE_COVER_COMPILE=1 $(REBAR) ct -c -v \ ifneq ($(SUITES),)
--readable=$(CT_READABLE) \ @$(SCRIPTS)/pre-compile.sh $(PROFILE)
--name $(CT_NODE_NAME) \ @ENABLE_COVER_COMPILE=1 $(REBAR) ct -c -v \
--cover_export_name $(CT_COVER_EXPORT_PREFIX)-$(subst /,-,$1) \ --readable=$(CT_READABLE) \
--suite $(shell $(SCRIPTS)/find-suites.sh $1) --name $(CT_NODE_NAME) \
--cover_export_name $(CT_COVER_EXPORT_PREFIX)-$(subst /,-,$1) \
--suite $(SUITES)
else
@echo 'No suites found for $1'
endif
endef endef
$(foreach app,$(APPS),$(eval $(call gen-app-ct-target,$(app)))) $(foreach app,$(APPS),$(eval $(call gen-app-ct-target,$(app))))
@ -239,7 +244,6 @@ $(foreach zt,$(ALL_DOCKERS),$(eval $(call gen-docker-target,$(zt))))
.PHONY: .PHONY:
merge-config: merge-config:
@$(SCRIPTS)/merge-config.escript @$(SCRIPTS)/merge-config.escript
@$(SCRIPTS)/merge-i18n.escript
## elixir target is to create release packages using Elixir's Mix ## elixir target is to create release packages using Elixir's Mix
.PHONY: $(REL_PROFILES:%=%-elixir) $(PKG_PROFILES:%=%-elixir) .PHONY: $(REL_PROFILES:%=%-elixir) $(PKG_PROFILES:%=%-elixir)

View File

@ -1,4 +1,4 @@
%% This additional config file is used when the config 'cluster.proto_dis' in emqx.conf is set to 'inet_tls'. %% This additional config file is used when the config 'cluster.proto_dist' in emqx.conf is set to 'inet_tls'.
%% Which means the EMQX nodes will connect to each other over TLS. %% Which means the EMQX nodes will connect to each other over TLS.
%% For more information about inter-broker security, see: https://docs.emqx.com/en/enterprise/v5.0/deploy/cluster/security.html %% For more information about inter-broker security, see: https://docs.emqx.com/en/enterprise/v5.0/deploy/cluster/security.html

View File

@ -32,10 +32,10 @@
%% `apps/emqx/src/bpapi/README.md' %% `apps/emqx/src/bpapi/README.md'
%% Community edition %% Community edition
-define(EMQX_RELEASE_CE, "5.0.21"). -define(EMQX_RELEASE_CE, "5.0.23").
%% Enterprise edition %% Enterprise edition
-define(EMQX_RELEASE_EE, "5.0.2-rc.4"). -define(EMQX_RELEASE_EE, "5.0.3-alpha.1").
%% the HTTP API version %% the HTTP API version
-define(EMQX_API_VERSION, "5.0"). -define(EMQX_API_VERSION, "5.0").

View File

@ -24,6 +24,8 @@
filter :: filter ::
emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address() | undefined | '_', emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address() | undefined | '_',
enable = true :: boolean() | '_', enable = true :: boolean() | '_',
payload_encode = text :: hex | text | hidden | '_',
extra = #{} :: map() | '_',
start_at :: integer() | undefined | '_', start_at :: integer() | undefined | '_',
end_at :: integer() | undefined | '_' end_at :: integer() | undefined | '_'
}). }).

View File

@ -57,16 +57,16 @@
-define(ERROR_CODES, [ -define(ERROR_CODES, [
{?BAD_USERNAME_OR_PWD, <<"Bad username or password">>}, {?BAD_USERNAME_OR_PWD, <<"Bad username or password">>},
{?BAD_API_KEY_OR_SECRET, <<"Bad API key or secret">>}, {?BAD_API_KEY_OR_SECRET, <<"Bad API key or secret">>},
{'BAD_REQUEST', <<"Request parameters are not legal">>}, {'BAD_REQUEST', <<"Request parameters are invalid">>},
{'NOT_MATCH', <<"Conditions are not matched">>}, {'NOT_MATCH', <<"Conditions are not matched">>},
{'ALREADY_EXISTS', <<"Resource already existed">>}, {'ALREADY_EXISTS', <<"Resource already existed">>},
{'BAD_CONFIG_SCHEMA', <<"Configuration data is not legal">>}, {'BAD_CONFIG_SCHEMA', <<"Configuration data is invalid">>},
{'BAD_LISTENER_ID', <<"Bad listener ID">>}, {'BAD_LISTENER_ID', <<"Bad listener ID">>},
{'BAD_NODE_NAME', <<"Bad Node Name">>}, {'BAD_NODE_NAME', <<"Bad Node Name">>},
{'BAD_RPC', <<"RPC Failed. Check the cluster status and the requested node status">>}, {'BAD_RPC', <<"RPC Failed. Check the cluster status and the requested node status">>},
{'BAD_TOPIC', <<"Topic syntax error, Topic needs to comply with the MQTT protocol standard">>}, {'BAD_TOPIC', <<"Topic syntax error, Topic needs to comply with the MQTT protocol standard">>},
{'EXCEED_LIMIT', <<"Create resources that exceed the maximum limit or minimum limit">>}, {'EXCEED_LIMIT', <<"Create resources that exceed the maximum limit or minimum limit">>},
{'INVALID_PARAMETER', <<"Request parameters is not legal and exceeds the boundary value">>}, {'INVALID_PARAMETER', <<"Request parameters is invalid and exceeds the boundary value">>},
{'CONFLICT', <<"Conflicting request resources">>}, {'CONFLICT', <<"Conflicting request resources">>},
{'NO_DEFAULT_VALUE', <<"Request parameters do not use default values">>}, {'NO_DEFAULT_VALUE', <<"Request parameters do not use default values">>},
{'DEPENDENCY_EXISTS', <<"Resource is dependent by another resource">>}, {'DEPENDENCY_EXISTS', <<"Resource is dependent by another resource">>},

View File

@ -1,10 +1,12 @@
%% This file is automatically generated by `make static_checks`, do not edit. %% This file is automatically generated by `make static_checks`, do not edit.
{emqx,1}. {emqx,1}.
{emqx,2}.
{emqx_authn,1}. {emqx_authn,1}.
{emqx_authz,1}. {emqx_authz,1}.
{emqx_bridge,1}. {emqx_bridge,1}.
{emqx_bridge,2}. {emqx_bridge,2}.
{emqx_bridge,3}. {emqx_bridge,3}.
{emqx_bridge,4}.
{emqx_broker,1}. {emqx_broker,1}.
{emqx_cm,1}. {emqx_cm,1}.
{emqx_cm,2}. {emqx_cm,2}.

View File

@ -18,25 +18,25 @@
]}. ]}.
%% Deps here may duplicate with emqx.git root level rebar.config %% Deps here may duplicate with emqx.git root level rebar.config
%% but there not be any descrpancy. %% but there may not be any discrepancy.
%% This rebar.config is necessary because the app may be used as a %% This rebar.config is necessary because the app may be used as a
%% `git_subdir` dependency in other projects. %% `git_subdir` dependency in other projects.
{deps, [ {deps, [
{emqx_utils, {path, "../emqx_utils"}},
{lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}}, {lc, {git, "https://github.com/emqx/lc.git", {tag, "0.3.2"}}},
{gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}}, {gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}},
{jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}},
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}}, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}},
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}}, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.6"}}}, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.6"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.38.0"}}}, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.2"}}},
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.2"}}},
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
{snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.7"}}} {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.7"}}}
]}. ]}.
{plugins, [{rebar3_proper, "0.12.1"}]}. {plugins, [{rebar3_proper, "0.12.1"}, rebar3_path_deps]}.
{extra_src_dirs, [{"etc", [recursive]}]}. {extra_src_dirs, [{"etc", [recursive]}]}.
{profiles, [ {profiles, [
{test, [ {test, [

View File

@ -32,25 +32,15 @@ remove_handler() ->
ok = emqx_config_handler:remove_handler(?LOG), ok = emqx_config_handler:remove_handler(?LOG),
ok. ok.
%% refresh logger config when booting, the override config may have changed after node start. %% refresh logger config when booting, the cluster config may have changed after node start.
%% Kernel's app env is confirmed before the node starts, %% Kernel's app env is confirmed before the node starts,
%% but we only copy cluster-override.conf from other node after this node starts, %% but we only copy cluster.conf from other node after this node starts,
%% so we need to refresh the logger config after this node starts. %% so we need to refresh the logger config after this node starts.
%% It will not affect the logger config when cluster-override.conf is unchanged. %% It will not affect the logger config when cluster.conf is unchanged.
refresh_config() -> refresh_config() ->
Overrides = emqx_config:read_override_confs(),
refresh_config(Overrides).
refresh_config(#{<<"log">> := _}) ->
%% read the checked config %% read the checked config
LogConfig = emqx:get_config(?LOG, undefined), LogConfig = emqx:get_config(?LOG, undefined),
Conf = #{log => LogConfig}, do_refresh_config(#{log => LogConfig}).
ok = do_refresh_config(Conf);
refresh_config(_) ->
%% No config override found for 'log', do nothing
%% because the 'kernel' app should already be configured
%% from the base configs. i.e. emqx.conf + env vars
ok.
%% this call is shared between initial config refresh at boot %% this call is shared between initial config refresh at boot
%% and dynamic config update from HTTP API %% and dynamic config update from HTTP API
@ -61,10 +51,9 @@ do_refresh_config(Conf) ->
ok = maybe_update_log_level(Level), ok = maybe_update_log_level(Level),
ok. ok.
%% always refresh config when the override config is changed
post_config_update(?LOG, _Req, NewConf, _OldConf, _AppEnvs) -> post_config_update(?LOG, _Req, NewConf, _OldConf, _AppEnvs) ->
ok = do_refresh_config(#{log => NewConf}); do_refresh_config(#{log => NewConf}).
post_config_update(_ConfPath, _Req, _NewConf, _OldConf, _AppEnvs) ->
ok.
maybe_update_log_level(NewLevel) -> maybe_update_log_level(NewLevel) ->
OldLevel = emqx_logger:get_primary_log_level(), OldLevel = emqx_logger:get_primary_log_level(),

View File

@ -3,7 +3,7 @@
{id, "emqx"}, {id, "emqx"},
{description, "EMQX Core"}, {description, "EMQX Core"},
% strict semver, bump manually! % strict semver, bump manually!
{vsn, "5.0.21"}, {vsn, "5.0.24"},
{modules, []}, {modules, []},
{registered, []}, {registered, []},
{applications, [ {applications, [
@ -16,7 +16,6 @@
cowboy, cowboy,
sasl, sasl,
os_mon, os_mon,
jiffy,
lc, lc,
hocon hocon
]}, ]},

View File

@ -30,6 +30,12 @@
stop/0 stop/0
]). ]).
%% Cluster API
-export([
cluster_nodes/1,
running_nodes/0
]).
%% PubSub API %% PubSub API
-export([ -export([
subscribe/1, subscribe/1,
@ -102,6 +108,18 @@ is_running() ->
_ -> true _ -> true
end. end.
%%--------------------------------------------------------------------
%% Cluster API
%%--------------------------------------------------------------------
-spec running_nodes() -> [node()].
running_nodes() ->
mria:running_nodes().
-spec cluster_nodes(all | running | cores | stopped) -> [node()].
cluster_nodes(Type) ->
mria:cluster_nodes(Type).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% PubSub API %% PubSub API
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -164,29 +182,29 @@ run_hook(HookPoint, Args) ->
run_fold_hook(HookPoint, Args, Acc) -> run_fold_hook(HookPoint, Args, Acc) ->
emqx_hooks:run_fold(HookPoint, Args, Acc). emqx_hooks:run_fold(HookPoint, Args, Acc).
-spec get_config(emqx_map_lib:config_key_path()) -> term(). -spec get_config(emqx_utils_maps:config_key_path()) -> term().
get_config(KeyPath) -> get_config(KeyPath) ->
emqx_config:get(KeyPath). emqx_config:get(KeyPath).
-spec get_config(emqx_map_lib:config_key_path(), term()) -> term(). -spec get_config(emqx_utils_maps:config_key_path(), term()) -> term().
get_config(KeyPath, Default) -> get_config(KeyPath, Default) ->
emqx_config:get(KeyPath, Default). emqx_config:get(KeyPath, Default).
-spec get_raw_config(emqx_map_lib:config_key_path()) -> term(). -spec get_raw_config(emqx_utils_maps:config_key_path()) -> term().
get_raw_config(KeyPath) -> get_raw_config(KeyPath) ->
emqx_config:get_raw(KeyPath). emqx_config:get_raw(KeyPath).
-spec get_raw_config(emqx_map_lib:config_key_path(), term()) -> term(). -spec get_raw_config(emqx_utils_maps:config_key_path(), term()) -> term().
get_raw_config(KeyPath, Default) -> get_raw_config(KeyPath, Default) ->
emqx_config:get_raw(KeyPath, Default). emqx_config:get_raw(KeyPath, Default).
-spec update_config(emqx_map_lib:config_key_path(), emqx_config:update_request()) -> -spec update_config(emqx_utils_maps:config_key_path(), emqx_config:update_request()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(KeyPath, UpdateReq) -> update_config(KeyPath, UpdateReq) ->
update_config(KeyPath, UpdateReq, #{}). update_config(KeyPath, UpdateReq, #{}).
-spec update_config( -spec update_config(
emqx_map_lib:config_key_path(), emqx_utils_maps:config_key_path(),
emqx_config:update_request(), emqx_config:update_request(),
emqx_config:update_opts() emqx_config:update_opts()
) -> ) ->
@ -198,12 +216,12 @@ update_config([RootName | _] = KeyPath, UpdateReq, Opts) ->
{{update, UpdateReq}, Opts} {{update, UpdateReq}, Opts}
). ).
-spec remove_config(emqx_map_lib:config_key_path()) -> -spec remove_config(emqx_utils_maps:config_key_path()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config(KeyPath) -> remove_config(KeyPath) ->
remove_config(KeyPath, #{}). remove_config(KeyPath, #{}).
-spec remove_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) -> -spec remove_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
remove_config([RootName | _] = KeyPath, Opts) -> remove_config([RootName | _] = KeyPath, Opts) ->
emqx_config_handler:update_config( emqx_config_handler:update_config(
@ -212,7 +230,7 @@ remove_config([RootName | _] = KeyPath, Opts) ->
{remove, Opts} {remove, Opts}
). ).
-spec reset_config(emqx_map_lib:config_key_path(), emqx_config:update_opts()) -> -spec reset_config(emqx_utils_maps:config_key_path(), emqx_config:update_opts()) ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}. {ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
reset_config([RootName | _] = KeyPath, Opts) -> reset_config([RootName | _] = KeyPath, Opts) ->
case emqx_config:get_default_value(KeyPath) of case emqx_config:get_default_value(KeyPath) of

View File

@ -42,7 +42,9 @@
get_alarms/0, get_alarms/0,
get_alarms/1, get_alarms/1,
format/1, format/1,
format/2 format/2,
safe_activate/3,
safe_deactivate/1
]). ]).
%% gen_server callbacks %% gen_server callbacks
@ -57,7 +59,6 @@
%% Internal exports (RPC) %% Internal exports (RPC)
-export([ -export([
create_activate_alarm/3,
do_get_alarms/0 do_get_alarms/0
]). ]).
@ -123,6 +124,9 @@ activate(Name, Details) ->
activate(Name, Details, Message) -> activate(Name, Details, Message) ->
gen_server:call(?MODULE, {activate_alarm, Name, Details, Message}). gen_server:call(?MODULE, {activate_alarm, Name, Details, Message}).
safe_activate(Name, Details, Message) ->
safe_call({activate_alarm, Name, Details, Message}).
-spec ensure_deactivated(binary() | atom()) -> ok. -spec ensure_deactivated(binary() | atom()) -> ok.
ensure_deactivated(Name) -> ensure_deactivated(Name) ->
ensure_deactivated(Name, no_details). ensure_deactivated(Name, no_details).
@ -155,6 +159,9 @@ deactivate(Name, Details) ->
deactivate(Name, Details, Message) -> deactivate(Name, Details, Message) ->
gen_server:call(?MODULE, {deactivate_alarm, Name, Details, Message}). gen_server:call(?MODULE, {deactivate_alarm, Name, Details, Message}).
safe_deactivate(Name) ->
safe_call({deactivate_alarm, Name, no_details, <<"">>}).
-spec delete_all_deactivated_alarms() -> ok. -spec delete_all_deactivated_alarms() -> ok.
delete_all_deactivated_alarms() -> delete_all_deactivated_alarms() ->
gen_server:call(?MODULE, delete_all_deactivated_alarms). gen_server:call(?MODULE, delete_all_deactivated_alarms).
@ -218,17 +225,12 @@ init([]) ->
{ok, #{}, get_validity_period()}. {ok, #{}, get_validity_period()}.
handle_call({activate_alarm, Name, Details, Message}, _From, State) -> handle_call({activate_alarm, Name, Details, Message}, _From, State) ->
Res = mria:transaction( case create_activate_alarm(Name, Details, Message) of
mria:local_content_shard(), {ok, Alarm} ->
fun ?MODULE:create_activate_alarm/3,
[Name, Details, Message]
),
case Res of
{atomic, Alarm} ->
do_actions(activate, Alarm, emqx:get_config([alarm, actions])), do_actions(activate, Alarm, emqx:get_config([alarm, actions])),
{reply, ok, State, get_validity_period()}; {reply, ok, State, get_validity_period()};
{aborted, Reason} -> Err ->
{reply, Reason, State, get_validity_period()} {reply, Err, State, get_validity_period()}
end; end;
handle_call({deactivate_alarm, Name, Details, Message}, _From, State) -> handle_call({deactivate_alarm, Name, Details, Message}, _From, State) ->
case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of
@ -283,9 +285,9 @@ get_validity_period() ->
emqx:get_config([alarm, validity_period]). emqx:get_config([alarm, validity_period]).
create_activate_alarm(Name, Details, Message) -> create_activate_alarm(Name, Details, Message) ->
case mnesia:read(?ACTIVATED_ALARM, Name) of case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of
[#activated_alarm{name = Name}] -> [#activated_alarm{name = Name}] ->
mnesia:abort({error, already_existed}); {error, already_existed};
[] -> [] ->
Alarm = #activated_alarm{ Alarm = #activated_alarm{
name = Name, name = Name,
@ -293,8 +295,8 @@ create_activate_alarm(Name, Details, Message) ->
message = normalize_message(Name, iolist_to_binary(Message)), message = normalize_message(Name, iolist_to_binary(Message)),
activate_at = erlang:system_time(microsecond) activate_at = erlang:system_time(microsecond)
}, },
ok = mnesia:write(?ACTIVATED_ALARM, Alarm, write), ok = mria:dirty_write(?ACTIVATED_ALARM, Alarm),
Alarm {ok, Alarm}
end. end.
do_get_alarms() -> do_get_alarms() ->
@ -423,7 +425,7 @@ do_actions(deactivate, Alarm = #deactivated_alarm{name = Name}, [log | More]) ->
do_actions(deactivate, Alarm, More); do_actions(deactivate, Alarm, More);
do_actions(Operation, Alarm, [publish | More]) -> do_actions(Operation, Alarm, [publish | More]) ->
Topic = topic(Operation), Topic = topic(Operation),
{ok, Payload} = emqx_json:safe_encode(normalize(Alarm)), {ok, Payload} = emqx_utils_json:safe_encode(normalize(Alarm)),
Message = emqx_message:make( Message = emqx_message:make(
?MODULE, ?MODULE,
0, 0,
@ -474,3 +476,19 @@ normalize_message(Name, <<"">>) ->
list_to_binary(io_lib:format("~p", [Name])); list_to_binary(io_lib:format("~p", [Name]));
normalize_message(_Name, Message) -> normalize_message(_Name, Message) ->
Message. Message.
safe_call(Req) ->
try
gen_server:call(?MODULE, Req)
catch
_:{timeout, _} = Reason ->
?SLOG(warning, #{msg => "emqx_alarm_safe_call_timeout", reason => Reason}),
{error, timeout};
_:Reason:St ->
?SLOG(error, #{
msg => "emqx_alarm_safe_call_exception",
reason => Reason,
stacktrace => St
}),
{error, Reason}
end.

View File

@ -277,9 +277,9 @@ atom(Bin) -> binary_to_existing_atom(Bin, utf8).
certs_dir(ChainName, ConfigOrID) -> certs_dir(ChainName, ConfigOrID) ->
DirName = dir(ChainName, ConfigOrID), DirName = dir(ChainName, ConfigOrID),
SubDir = iolist_to_binary(filename:join(["authn", DirName])), SubDir = iolist_to_binary(filename:join(["authn", DirName])),
emqx_misc:safe_filename(SubDir). emqx_utils:safe_filename(SubDir).
dir(ChainName, ID) when is_binary(ID) -> dir(ChainName, ID) when is_binary(ID) ->
emqx_misc:safe_filename(iolist_to_binary([to_bin(ChainName), "-", ID])); emqx_utils:safe_filename(iolist_to_binary([to_bin(ChainName), "-", ID]));
dir(ChainName, Config) when is_map(Config) -> dir(ChainName, Config) when is_map(Config) ->
dir(ChainName, authenticator_id(Config)). dir(ChainName, authenticator_id(Config)).

View File

@ -243,7 +243,7 @@ handle_info(Info, State) ->
{noreply, State}. {noreply, State}.
terminate(_Reason, #{expiry_timer := TRef}) -> terminate(_Reason, #{expiry_timer := TRef}) ->
emqx_misc:cancel_timer(TRef). emqx_utils:cancel_timer(TRef).
code_change(_OldVsn, State, _Extra) -> code_change(_OldVsn, State, _Extra) ->
{ok, State}. {ok, State}.
@ -254,10 +254,10 @@ code_change(_OldVsn, State, _Extra) ->
-ifdef(TEST). -ifdef(TEST).
ensure_expiry_timer(State) -> ensure_expiry_timer(State) ->
State#{expiry_timer := emqx_misc:start_timer(10, expire)}. State#{expiry_timer := emqx_utils:start_timer(10, expire)}.
-else. -else.
ensure_expiry_timer(State) -> ensure_expiry_timer(State) ->
State#{expiry_timer := emqx_misc:start_timer(timer:minutes(1), expire)}. State#{expiry_timer := emqx_utils:start_timer(timer:minutes(1), expire)}.
-endif. -endif.
expire_banned_items(Now) -> expire_banned_items(Now) ->

View File

@ -85,7 +85,7 @@ commit(Batch = #batch{batch_q = Q, commit_fun = Commit}) ->
reset(Batch). reset(Batch).
reset(Batch = #batch{linger_timer = TRef}) -> reset(Batch = #batch{linger_timer = TRef}) ->
_ = emqx_misc:cancel_timer(TRef), _ = emqx_utils:cancel_timer(TRef),
Batch#batch{batch_q = [], linger_timer = undefined}. Batch#batch{batch_q = [], linger_timer = undefined}.
-spec size(batch()) -> non_neg_integer(). -spec size(batch()) -> non_neg_integer().

View File

@ -71,7 +71,7 @@
code_change/3 code_change/3
]). ]).
-import(emqx_tables, [lookup_value/2, lookup_value/3]). -import(emqx_utils_ets, [lookup_value/2, lookup_value/3]).
-ifdef(TEST). -ifdef(TEST).
-compile(export_all). -compile(export_all).
@ -92,7 +92,7 @@
start_link(Pool, Id) -> start_link(Pool, Id) ->
ok = create_tabs(), ok = create_tabs(),
gen_server:start_link( gen_server:start_link(
{local, emqx_misc:proc_name(?BROKER, Id)}, {local, emqx_utils:proc_name(?BROKER, Id)},
?MODULE, ?MODULE,
[Pool, Id], [Pool, Id],
[] []
@ -107,15 +107,15 @@ create_tabs() ->
TabOpts = [public, {read_concurrency, true}, {write_concurrency, true}], TabOpts = [public, {read_concurrency, true}, {write_concurrency, true}],
%% SubOption: {Topic, SubPid} -> SubOption %% SubOption: {Topic, SubPid} -> SubOption
ok = emqx_tables:new(?SUBOPTION, [ordered_set | TabOpts]), ok = emqx_utils_ets:new(?SUBOPTION, [ordered_set | TabOpts]),
%% Subscription: SubPid -> Topic1, Topic2, Topic3, ... %% Subscription: SubPid -> Topic1, Topic2, Topic3, ...
%% duplicate_bag: o(1) insert %% duplicate_bag: o(1) insert
ok = emqx_tables:new(?SUBSCRIPTION, [duplicate_bag | TabOpts]), ok = emqx_utils_ets:new(?SUBSCRIPTION, [duplicate_bag | TabOpts]),
%% Subscriber: Topic -> SubPid1, SubPid2, SubPid3, ... %% Subscriber: Topic -> SubPid1, SubPid2, SubPid3, ...
%% bag: o(n) insert:( %% bag: o(n) insert:(
ok = emqx_tables:new(?SUBSCRIBER, [bag | TabOpts]). ok = emqx_utils_ets:new(?SUBSCRIBER, [bag | TabOpts]).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Subscribe API %% Subscribe API

View File

@ -73,11 +73,11 @@ register_sub(SubPid, SubId) when is_pid(SubPid) ->
-spec lookup_subid(pid()) -> maybe(emqx_types:subid()). -spec lookup_subid(pid()) -> maybe(emqx_types:subid()).
lookup_subid(SubPid) when is_pid(SubPid) -> lookup_subid(SubPid) when is_pid(SubPid) ->
emqx_tables:lookup_value(?SUBMON, SubPid). emqx_utils_ets:lookup_value(?SUBMON, SubPid).
-spec lookup_subpid(emqx_types:subid()) -> maybe(pid()). -spec lookup_subpid(emqx_types:subid()) -> maybe(pid()).
lookup_subpid(SubId) -> lookup_subpid(SubId) ->
emqx_tables:lookup_value(?SUBID, SubId). emqx_utils_ets:lookup_value(?SUBID, SubId).
-spec get_sub_shard(pid(), emqx_types:topic()) -> non_neg_integer(). -spec get_sub_shard(pid(), emqx_types:topic()) -> non_neg_integer().
get_sub_shard(SubPid, Topic) -> get_sub_shard(SubPid, Topic) ->
@ -105,15 +105,15 @@ reclaim_seq(Topic) ->
init([]) -> init([]) ->
%% Helper table %% Helper table
ok = emqx_tables:new(?HELPER, [{read_concurrency, true}]), ok = emqx_utils_ets:new(?HELPER, [{read_concurrency, true}]),
%% Shards: CPU * 32 %% Shards: CPU * 32
true = ets:insert(?HELPER, {shards, emqx_vm:schedulers() * 32}), true = ets:insert(?HELPER, {shards, emqx_vm:schedulers() * 32}),
%% SubSeq: Topic -> SeqId %% SubSeq: Topic -> SeqId
ok = emqx_sequence:create(?SUBSEQ), ok = emqx_sequence:create(?SUBSEQ),
%% SubId: SubId -> SubPid %% SubId: SubId -> SubPid
ok = emqx_tables:new(?SUBID, [public, {read_concurrency, true}, {write_concurrency, true}]), ok = emqx_utils_ets:new(?SUBID, [public, {read_concurrency, true}, {write_concurrency, true}]),
%% SubMon: SubPid -> SubId %% SubMon: SubPid -> SubId
ok = emqx_tables:new(?SUBMON, [public, {read_concurrency, true}, {write_concurrency, true}]), ok = emqx_utils_ets:new(?SUBMON, [public, {read_concurrency, true}, {write_concurrency, true}]),
%% Stats timer %% Stats timer
ok = emqx_stats:update_interval(broker_stats, fun emqx_broker:stats_fun/0), ok = emqx_stats:update_interval(broker_stats, fun emqx_broker:stats_fun/0),
{ok, #{pmon => emqx_pmon:new()}}. {ok, #{pmon => emqx_pmon:new()}}.
@ -131,7 +131,7 @@ handle_cast(Msg, State) ->
{noreply, State}. {noreply, State}.
handle_info({'DOWN', _MRef, process, SubPid, _Reason}, State = #{pmon := PMon}) -> handle_info({'DOWN', _MRef, process, SubPid, _Reason}, State = #{pmon := PMon}) ->
SubPids = [SubPid | emqx_misc:drain_down(?BATCH_SIZE)], SubPids = [SubPid | emqx_utils:drain_down(?BATCH_SIZE)],
ok = emqx_pool:async_submit( ok = emqx_pool:async_submit(
fun lists:foreach/2, [fun clean_down/1, SubPids] fun lists:foreach/2, [fun clean_down/1, SubPids]
), ),

View File

@ -61,7 +61,7 @@
-export([set_field/3]). -export([set_field/3]).
-import( -import(
emqx_misc, emqx_utils,
[ [
run_fold/3, run_fold/3,
pipeline/3, pipeline/3,
@ -622,7 +622,7 @@ process_connect(
NChannel = Channel#channel{session = Session}, NChannel = Channel#channel{session = Session},
handle_out(connack, {?RC_SUCCESS, sp(false), AckProps}, ensure_connected(NChannel)); handle_out(connack, {?RC_SUCCESS, sp(false), AckProps}, ensure_connected(NChannel));
{ok, #{session := Session, present := true, pendings := Pendings}} -> {ok, #{session := Session, present := true, pendings := Pendings}} ->
Pendings1 = lists:usort(lists:append(Pendings, emqx_misc:drain_deliver())), Pendings1 = lists:usort(lists:append(Pendings, emqx_utils:drain_deliver())),
NChannel = Channel#channel{ NChannel = Channel#channel{
session = Session, session = Session,
resuming = true, resuming = true,
@ -1216,7 +1216,7 @@ handle_call(
) -> ) ->
ok = emqx_session:takeover(Session), ok = emqx_session:takeover(Session),
%% TODO: Should not drain deliver here (side effect) %% TODO: Should not drain deliver here (side effect)
Delivers = emqx_misc:drain_deliver(), Delivers = emqx_utils:drain_deliver(),
AllPendings = lists:append(Delivers, Pendings), AllPendings = lists:append(Delivers, Pendings),
disconnect_and_shutdown(takenover, AllPendings, Channel); disconnect_and_shutdown(takenover, AllPendings, Channel);
handle_call(list_authz_cache, Channel) -> handle_call(list_authz_cache, Channel) ->
@ -1417,7 +1417,7 @@ ensure_timer(Name, Channel = #channel{timers = Timers}) ->
ensure_timer(Name, Time, Channel = #channel{timers = Timers}) -> ensure_timer(Name, Time, Channel = #channel{timers = Timers}) ->
Msg = maps:get(Name, ?TIMER_TABLE), Msg = maps:get(Name, ?TIMER_TABLE),
TRef = emqx_misc:start_timer(Time, Msg), TRef = emqx_utils:start_timer(Time, Msg),
Channel#channel{timers = Timers#{Name => TRef}}. Channel#channel{timers = Timers#{Name => TRef}}.
reset_timer(Name, Channel) -> reset_timer(Name, Channel) ->
@ -1645,7 +1645,7 @@ check_banned(_ConnPkt, #channel{clientinfo = ClientInfo}) ->
%% Flapping %% Flapping
count_flapping_event(_ConnPkt, Channel = #channel{clientinfo = ClientInfo = #{zone := Zone}}) -> count_flapping_event(_ConnPkt, Channel = #channel{clientinfo = ClientInfo = #{zone := Zone}}) ->
emqx_config:get_zone_conf(Zone, [flapping_detect, enable]) andalso is_integer(emqx_config:get_zone_conf(Zone, [flapping_detect, window_time])) andalso
emqx_flapping:detect(ClientInfo), emqx_flapping:detect(ClientInfo),
{ok, Channel}. {ok, Channel}.
@ -2060,7 +2060,7 @@ clear_keepalive(Channel = #channel{timers = Timers}) ->
undefined -> undefined ->
Channel; Channel;
TRef -> TRef ->
emqx_misc:cancel_timer(TRef), emqx_utils:cancel_timer(TRef),
Channel#channel{timers = maps:without([alive_timer], Timers)} Channel#channel{timers = maps:without([alive_timer], Timers)}
end. end.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -2256,7 +2256,7 @@ get_mqtt_conf(Zone, Key, Default) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
set_field(Name, Value, Channel) -> set_field(Name, Value, Channel) ->
Pos = emqx_misc:index_of(Name, record_info(fields, channel)), Pos = emqx_utils:index_of(Name, record_info(fields, channel)),
setelement(Pos + 1, Channel, Value). setelement(Pos + 1, Channel, Value).
get_mqueue(#channel{session = Session}) -> get_mqueue(#channel{session = Session}) ->

View File

@ -655,10 +655,10 @@ cast(Msg) -> gen_server:cast(?CM, Msg).
init([]) -> init([]) ->
TabOpts = [public, {write_concurrency, true}], TabOpts = [public, {write_concurrency, true}],
ok = emqx_tables:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]), ok = emqx_utils_ets:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]),
ok = emqx_tables:new(?CHAN_CONN_TAB, [bag | TabOpts]), ok = emqx_utils_ets:new(?CHAN_CONN_TAB, [bag | TabOpts]),
ok = emqx_tables:new(?CHAN_INFO_TAB, [ordered_set, compressed | TabOpts]), ok = emqx_utils_ets:new(?CHAN_INFO_TAB, [ordered_set, compressed | TabOpts]),
ok = emqx_tables:new(?CHAN_LIVE_TAB, [ordered_set, {write_concurrency, true} | TabOpts]), ok = emqx_utils_ets:new(?CHAN_LIVE_TAB, [ordered_set, {write_concurrency, true} | TabOpts]),
ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0), ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0),
State = #{chan_pmon => emqx_pmon:new()}, State = #{chan_pmon => emqx_pmon:new()},
{ok, State}. {ok, State}.
@ -676,7 +676,7 @@ handle_cast(Msg, State) ->
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) -> handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
?tp(emqx_cm_process_down, #{stale_pid => Pid, reason => _Reason}), ?tp(emqx_cm_process_down, #{stale_pid => Pid, reason => _Reason}),
ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)], ChanPids = [Pid | emqx_utils:drain_down(?BATCH_SIZE)],
{Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon), {Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon),
lists:foreach(fun mark_channel_disconnected/1, ChanPids), lists:foreach(fun mark_channel_disconnected/1, ChanPids),
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]), ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]),

View File

@ -24,7 +24,7 @@
init_load/2, init_load/2,
init_load/3, init_load/3,
read_override_conf/1, read_override_conf/1,
read_override_confs/0, has_deprecated_file/0,
delete_override_conf_files/0, delete_override_conf_files/0,
check_config/2, check_config/2,
fill_defaults/1, fill_defaults/1,
@ -33,8 +33,10 @@
save_configs/5, save_configs/5,
save_to_app_env/1, save_to_app_env/1,
save_to_config_map/2, save_to_config_map/2,
save_to_override_conf/2 save_to_override_conf/3
]). ]).
-export([raw_conf_with_default/4]).
-export([merge_envs/2]).
-export([ -export([
get_root/1, get_root/1,
@ -142,7 +144,7 @@
-type app_envs() :: [proplists:property()]. -type app_envs() :: [proplists:property()].
%% @doc For the given path, get root value enclosed in a single-key map. %% @doc For the given path, get root value enclosed in a single-key map.
-spec get_root(emqx_map_lib:config_key_path()) -> map(). -spec get_root(emqx_utils_maps:config_key_path()) -> map().
get_root([RootName | _]) -> get_root([RootName | _]) ->
#{RootName => do_get(?CONF, [RootName], #{})}. #{RootName => do_get(?CONF, [RootName], #{})}.
@ -153,14 +155,14 @@ get_root_raw([RootName | _]) ->
%% @doc Get a config value for the given path. %% @doc Get a config value for the given path.
%% The path should at least include root config name. %% The path should at least include root config name.
-spec get(emqx_map_lib:config_key_path()) -> term(). -spec get(emqx_utils_maps:config_key_path()) -> term().
get(KeyPath) -> do_get(?CONF, KeyPath). get(KeyPath) -> do_get(?CONF, KeyPath).
-spec get(emqx_map_lib:config_key_path(), term()) -> term(). -spec get(emqx_utils_maps:config_key_path(), term()) -> term().
get(KeyPath, Default) -> do_get(?CONF, KeyPath, Default). get(KeyPath, Default) -> do_get(?CONF, KeyPath, Default).
-spec find(emqx_map_lib:config_key_path()) -> -spec find(emqx_utils_maps:config_key_path()) ->
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}. {ok, term()} | {not_found, emqx_utils_maps:config_key_path(), term()}.
find([]) -> find([]) ->
Ref = make_ref(), Ref = make_ref(),
case do_get(?CONF, [], Ref) of case do_get(?CONF, [], Ref) of
@ -170,12 +172,12 @@ find([]) ->
find(KeyPath) -> find(KeyPath) ->
atom_conf_path( atom_conf_path(
KeyPath, KeyPath,
fun(AtomKeyPath) -> emqx_map_lib:deep_find(AtomKeyPath, get_root(KeyPath)) end, fun(AtomKeyPath) -> emqx_utils_maps:deep_find(AtomKeyPath, get_root(KeyPath)) end,
{return, {not_found, KeyPath}} {return, {not_found, KeyPath}}
). ).
-spec find_raw(emqx_map_lib:config_key_path()) -> -spec find_raw(emqx_utils_maps:config_key_path()) ->
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}. {ok, term()} | {not_found, emqx_utils_maps:config_key_path(), term()}.
find_raw([]) -> find_raw([]) ->
Ref = make_ref(), Ref = make_ref(),
case do_get_raw([], Ref) of case do_get_raw([], Ref) of
@ -183,9 +185,9 @@ find_raw([]) ->
Res -> {ok, Res} Res -> {ok, Res}
end; end;
find_raw(KeyPath) -> find_raw(KeyPath) ->
emqx_map_lib:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)). emqx_utils_maps:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)).
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path()) -> term(). -spec get_zone_conf(atom(), emqx_utils_maps:config_key_path()) -> term().
get_zone_conf(Zone, KeyPath) -> get_zone_conf(Zone, KeyPath) ->
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
%% not found in zones, try to find the global config %% not found in zones, try to find the global config
@ -195,7 +197,7 @@ get_zone_conf(Zone, KeyPath) ->
Value Value
end. end.
-spec get_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> term(). -spec get_zone_conf(atom(), emqx_utils_maps:config_key_path(), term()) -> term().
get_zone_conf(Zone, KeyPath, Default) -> get_zone_conf(Zone, KeyPath, Default) ->
case find(?ZONE_CONF_PATH(Zone, KeyPath)) of case find(?ZONE_CONF_PATH(Zone, KeyPath)) of
%% not found in zones, try to find the global config %% not found in zones, try to find the global config
@ -205,24 +207,24 @@ get_zone_conf(Zone, KeyPath, Default) ->
Value Value
end. end.
-spec put_zone_conf(atom(), emqx_map_lib:config_key_path(), term()) -> ok. -spec put_zone_conf(atom(), emqx_utils_maps:config_key_path(), term()) -> ok.
put_zone_conf(Zone, KeyPath, Conf) -> put_zone_conf(Zone, KeyPath, Conf) ->
?MODULE:put(?ZONE_CONF_PATH(Zone, KeyPath), Conf). ?MODULE:put(?ZONE_CONF_PATH(Zone, KeyPath), Conf).
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) -> term(). -spec get_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path()) -> term().
get_listener_conf(Type, Listener, KeyPath) -> get_listener_conf(Type, Listener, KeyPath) ->
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath)). ?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
-spec get_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> term(). -spec get_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path(), term()) -> term().
get_listener_conf(Type, Listener, KeyPath, Default) -> get_listener_conf(Type, Listener, KeyPath, Default) ->
?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Default). ?MODULE:get(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Default).
-spec put_listener_conf(atom(), atom(), emqx_map_lib:config_key_path(), term()) -> ok. -spec put_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path(), term()) -> ok.
put_listener_conf(Type, Listener, KeyPath, Conf) -> put_listener_conf(Type, Listener, KeyPath, Conf) ->
?MODULE:put(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Conf). ?MODULE:put(?LISTENER_CONF_PATH(Type, Listener, KeyPath), Conf).
-spec find_listener_conf(atom(), atom(), emqx_map_lib:config_key_path()) -> -spec find_listener_conf(atom(), atom(), emqx_utils_maps:config_key_path()) ->
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}. {ok, term()} | {not_found, emqx_utils_maps:config_key_path(), term()}.
find_listener_conf(Type, Listener, KeyPath) -> find_listener_conf(Type, Listener, KeyPath) ->
find(?LISTENER_CONF_PATH(Type, Listener, KeyPath)). find(?LISTENER_CONF_PATH(Type, Listener, KeyPath)).
@ -241,20 +243,20 @@ erase(RootName) ->
persistent_term:erase(?PERSIS_KEY(?RAW_CONF, bin(RootName))), persistent_term:erase(?PERSIS_KEY(?RAW_CONF, bin(RootName))),
ok. ok.
-spec put(emqx_map_lib:config_key_path(), term()) -> ok. -spec put(emqx_utils_maps:config_key_path(), term()) -> ok.
put(KeyPath, Config) -> put(KeyPath, Config) ->
Putter = fun(Path, Map, Value) -> Putter = fun(Path, Map, Value) ->
emqx_map_lib:deep_put(Path, Map, Value) emqx_utils_maps:deep_put(Path, Map, Value)
end, end,
do_put(?CONF, Putter, KeyPath, Config). do_put(?CONF, Putter, KeyPath, Config).
%% Puts value into configuration even if path doesn't exist %% Puts value into configuration even if path doesn't exist
%% For paths of non-existing atoms use force_put(KeyPath, Config, unsafe) %% For paths of non-existing atoms use force_put(KeyPath, Config, unsafe)
-spec force_put(emqx_map_lib:config_key_path(), term()) -> ok. -spec force_put(emqx_utils_maps:config_key_path(), term()) -> ok.
force_put(KeyPath, Config) -> force_put(KeyPath, Config) ->
force_put(KeyPath, Config, safe). force_put(KeyPath, Config, safe).
-spec force_put(emqx_map_lib:config_key_path(), term(), safe | unsafe) -> ok. -spec force_put(emqx_utils_maps:config_key_path(), term(), safe | unsafe) -> ok.
force_put(KeyPath0, Config, Safety) -> force_put(KeyPath0, Config, Safety) ->
KeyPath = KeyPath =
case Safety of case Safety of
@ -262,19 +264,19 @@ force_put(KeyPath0, Config, Safety) ->
unsafe -> [unsafe_atom(Key) || Key <- KeyPath0] unsafe -> [unsafe_atom(Key) || Key <- KeyPath0]
end, end,
Putter = fun(Path, Map, Value) -> Putter = fun(Path, Map, Value) ->
emqx_map_lib:deep_force_put(Path, Map, Value) emqx_utils_maps:deep_force_put(Path, Map, Value)
end, end,
do_put(?CONF, Putter, KeyPath, Config). do_put(?CONF, Putter, KeyPath, Config).
-spec get_default_value(emqx_map_lib:config_key_path()) -> {ok, term()} | {error, term()}. -spec get_default_value(emqx_utils_maps:config_key_path()) -> {ok, term()} | {error, term()}.
get_default_value([RootName | _] = KeyPath) -> get_default_value([RootName | _] = KeyPath) ->
BinKeyPath = [bin(Key) || Key <- KeyPath], BinKeyPath = [bin(Key) || Key <- KeyPath],
case find_raw([RootName]) of case find_raw([RootName]) of
{ok, RawConf} -> {ok, RawConf} ->
RawConf1 = emqx_map_lib:deep_remove(BinKeyPath, #{bin(RootName) => RawConf}), RawConf1 = emqx_utils_maps:deep_remove(BinKeyPath, #{bin(RootName) => RawConf}),
try fill_defaults(get_schema_mod(RootName), RawConf1, #{}) of try fill_defaults(get_schema_mod(RootName), RawConf1, #{}) of
FullConf -> FullConf ->
case emqx_map_lib:deep_find(BinKeyPath, FullConf) of case emqx_utils_maps:deep_find(BinKeyPath, FullConf) of
{not_found, _, _} -> {error, no_default_value}; {not_found, _, _} -> {error, no_default_value};
{ok, Val} -> {ok, Val} {ok, Val} -> {ok, Val}
end end
@ -285,10 +287,10 @@ get_default_value([RootName | _] = KeyPath) ->
{error, {rootname_not_found, RootName}} {error, {rootname_not_found, RootName}}
end. end.
-spec get_raw(emqx_map_lib:config_key_path()) -> term(). -spec get_raw(emqx_utils_maps:config_key_path()) -> term().
get_raw(KeyPath) -> do_get_raw(KeyPath). get_raw(KeyPath) -> do_get_raw(KeyPath).
-spec get_raw(emqx_map_lib:config_key_path(), term()) -> term(). -spec get_raw(emqx_utils_maps:config_key_path(), term()) -> term().
get_raw(KeyPath, Default) -> do_get_raw(KeyPath, Default). get_raw(KeyPath, Default) -> do_get_raw(KeyPath, Default).
-spec put_raw(map()) -> ok. -spec put_raw(map()) -> ok.
@ -301,10 +303,10 @@ put_raw(Config) ->
hocon_maps:ensure_plain(Config) hocon_maps:ensure_plain(Config)
). ).
-spec put_raw(emqx_map_lib:config_key_path(), term()) -> ok. -spec put_raw(emqx_utils_maps:config_key_path(), term()) -> ok.
put_raw(KeyPath, Config) -> put_raw(KeyPath, Config) ->
Putter = fun(Path, Map, Value) -> Putter = fun(Path, Map, Value) ->
emqx_map_lib:deep_force_put(Path, Map, Value) emqx_utils_maps:deep_force_put(Path, Map, Value)
end, end,
do_put(?RAW_CONF, Putter, KeyPath, Config). do_put(?RAW_CONF, Putter, KeyPath, Config).
@ -326,9 +328,12 @@ init_load(SchemaMod, ConfFiles) ->
%% in the rear of the list overrides prior values. %% in the rear of the list overrides prior values.
-spec init_load(module(), [string()] | binary() | hocon:config()) -> ok. -spec init_load(module(), [string()] | binary() | hocon:config()) -> ok.
init_load(SchemaMod, Conf, Opts) when is_list(Conf) orelse is_binary(Conf) -> init_load(SchemaMod, Conf, Opts) when is_list(Conf) orelse is_binary(Conf) ->
init_load(SchemaMod, parse_hocon(Conf), Opts); HasDeprecatedFile = has_deprecated_file(),
init_load(SchemaMod, RawConf, Opts) when is_map(RawConf) -> RawConf = parse_hocon(HasDeprecatedFile, Conf),
ok = save_schema_mod_and_names(SchemaMod), init_load(HasDeprecatedFile, SchemaMod, RawConf, Opts).
init_load(true, SchemaMod, RawConf, Opts) when is_map(RawConf) ->
%% deprecated conf will be removed in 5.1
%% Merge environment variable overrides on top %% Merge environment variable overrides on top
RawConfWithEnvs = merge_envs(SchemaMod, RawConf), RawConfWithEnvs = merge_envs(SchemaMod, RawConf),
Overrides = read_override_confs(), Overrides = read_override_confs(),
@ -338,6 +343,16 @@ init_load(SchemaMod, RawConf, Opts) when is_map(RawConf) ->
%% check configs against the schema %% check configs against the schema
{AppEnvs, CheckedConf} = check_config(SchemaMod, RawConfAll, #{}), {AppEnvs, CheckedConf} = check_config(SchemaMod, RawConfAll, #{}),
save_to_app_env(AppEnvs), save_to_app_env(AppEnvs),
ok = save_to_config_map(CheckedConf, RawConfAll);
init_load(false, SchemaMod, RawConf, Opts) when is_map(RawConf) ->
ok = save_schema_mod_and_names(SchemaMod),
RootNames = get_root_names(),
%% Merge environment variable overrides on top
RawConfWithEnvs = merge_envs(SchemaMod, RawConf),
RawConfAll = raw_conf_with_default(SchemaMod, RootNames, RawConfWithEnvs, Opts),
%% check configs against the schema
{AppEnvs, CheckedConf} = check_config(SchemaMod, RawConfAll, #{}),
save_to_app_env(AppEnvs),
ok = save_to_config_map(CheckedConf, RawConfAll). ok = save_to_config_map(CheckedConf, RawConfAll).
%% @doc Read merged cluster + local overrides. %% @doc Read merged cluster + local overrides.
@ -374,27 +389,37 @@ schema_default(Schema) ->
#{} #{}
end. end.
parse_hocon(Conf) -> parse_hocon(HasDeprecatedFile, Conf) ->
IncDirs = include_dirs(), IncDirs = include_dirs(),
case do_parse_hocon(Conf, IncDirs) of case do_parse_hocon(HasDeprecatedFile, Conf, IncDirs) of
{ok, HoconMap} -> {ok, HoconMap} ->
HoconMap; HoconMap;
{error, Reason} -> {error, Reason} ->
?SLOG(error, #{ ?SLOG(error, #{
msg => "failed_to_load_hocon_conf", msg => "failed_to_load_hocon_file",
reason => Reason, reason => Reason,
pwd => file:get_cwd(), pwd => file:get_cwd(),
include_dirs => IncDirs, include_dirs => IncDirs,
config_file => Conf config_file => Conf
}), }),
error(failed_to_load_hocon_conf) error(failed_to_load_hocon_file)
end. end.
do_parse_hocon(Conf, IncDirs) -> do_parse_hocon(true, Conf, IncDirs) ->
Opts = #{format => map, include_dirs => IncDirs}, Opts = #{format => map, include_dirs => IncDirs},
case is_binary(Conf) of case is_binary(Conf) of
true -> hocon:binary(Conf, Opts); true -> hocon:binary(Conf, Opts);
false -> hocon:files(Conf, Opts) false -> hocon:files(Conf, Opts)
end;
do_parse_hocon(false, Conf, IncDirs) ->
Opts = #{format => map, include_dirs => IncDirs},
case is_binary(Conf) of
%% only use in test
true ->
hocon:binary(Conf, Opts);
false ->
ClusterFile = cluster_hocon_file(),
hocon:files([ClusterFile | Conf], Opts)
end. end.
include_dirs() -> include_dirs() ->
@ -430,7 +455,7 @@ do_check_config(SchemaMod, RawConf, Opts0) ->
Opts = maps:merge(Opts0, Opts1), Opts = maps:merge(Opts0, Opts1),
{AppEnvs, CheckedConf} = {AppEnvs, CheckedConf} =
hocon_tconf:map_translate(SchemaMod, RawConf, Opts), hocon_tconf:map_translate(SchemaMod, RawConf, Opts),
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(CheckedConf)}. {AppEnvs, emqx_utils_maps:unsafe_atom_key_map(CheckedConf)}.
fill_defaults(RawConf) -> fill_defaults(RawConf) ->
fill_defaults(RawConf, #{}). fill_defaults(RawConf, #{}).
@ -466,10 +491,12 @@ fill_defaults(SchemaMod, RawConf, Opts0) ->
%% Delete override config files. %% Delete override config files.
-spec delete_override_conf_files() -> ok. -spec delete_override_conf_files() -> ok.
delete_override_conf_files() -> delete_override_conf_files() ->
F1 = override_conf_file(#{override_to => local}), F1 = deprecated_conf_file(#{override_to => local}),
F2 = override_conf_file(#{override_to => cluster}), F2 = deprecated_conf_file(#{override_to => cluster}),
F3 = cluster_hocon_file(),
ok = ensure_file_deleted(F1), ok = ensure_file_deleted(F1),
ok = ensure_file_deleted(F2). ok = ensure_file_deleted(F2),
ok = ensure_file_deleted(F3).
ensure_file_deleted(F) -> ensure_file_deleted(F) ->
case file:delete(F) of case file:delete(F) of
@ -480,19 +507,33 @@ ensure_file_deleted(F) ->
-spec read_override_conf(map()) -> raw_config(). -spec read_override_conf(map()) -> raw_config().
read_override_conf(#{} = Opts) -> read_override_conf(#{} = Opts) ->
File = override_conf_file(Opts), File =
case has_deprecated_file() of
true -> deprecated_conf_file(Opts);
false -> cluster_hocon_file()
end,
load_hocon_file(File, map). load_hocon_file(File, map).
override_conf_file(Opts) when is_map(Opts) -> %% @doc Return `true' if this node is upgraded from older version which used cluster-override.conf for
%% cluster-wide config persistence.
has_deprecated_file() ->
DeprecatedFile = deprecated_conf_file(#{override_to => cluster}),
filelib:is_regular(DeprecatedFile).
deprecated_conf_file(Opts) when is_map(Opts) ->
Key = Key =
case maps:get(override_to, Opts, cluster) of case maps:get(override_to, Opts, cluster) of
local -> local_override_conf_file; local -> local_override_conf_file;
cluster -> cluster_override_conf_file cluster -> cluster_override_conf_file
end, end,
application:get_env(emqx, Key, undefined); application:get_env(emqx, Key, undefined);
override_conf_file(Which) when is_atom(Which) -> deprecated_conf_file(Which) when is_atom(Which) ->
application:get_env(emqx, Which, undefined). application:get_env(emqx, Which, undefined).
%% The newer version cluster-wide config persistence file.
cluster_hocon_file() ->
application:get_env(emqx, cluster_hocon_file, undefined).
-spec save_schema_mod_and_names(module()) -> ok. -spec save_schema_mod_and_names(module()) -> ok.
save_schema_mod_and_names(SchemaMod) -> save_schema_mod_and_names(SchemaMod) ->
RootNames = hocon_schema:root_names(SchemaMod), RootNames = hocon_schema:root_names(SchemaMod),
@ -522,11 +563,15 @@ get_schema_mod(RootName) ->
get_root_names() -> get_root_names() ->
maps:get(names, persistent_term:get(?PERSIS_SCHEMA_MODS, #{names => []})). maps:get(names, persistent_term:get(?PERSIS_SCHEMA_MODS, #{names => []})).
-spec save_configs(app_envs(), config(), raw_config(), raw_config(), update_opts()) -> ok. -spec save_configs(
app_envs(), config(), raw_config(), raw_config(), update_opts()
) -> ok.
save_configs(AppEnvs, Conf, RawConf, OverrideConf, Opts) -> save_configs(AppEnvs, Conf, RawConf, OverrideConf, Opts) ->
%% We first try to save to override.conf, because saving to files is more error prone %% We first try to save to files, because saving to files is more error prone
%% than saving into memory. %% than saving into memory.
ok = save_to_override_conf(OverrideConf, Opts), HasDeprecatedFile = has_deprecated_file(),
ok = save_to_override_conf(HasDeprecatedFile, OverrideConf, Opts),
save_to_app_env(AppEnvs), save_to_app_env(AppEnvs),
save_to_config_map(Conf, RawConf). save_to_config_map(Conf, RawConf).
@ -544,11 +589,12 @@ save_to_config_map(Conf, RawConf) ->
?MODULE:put(Conf), ?MODULE:put(Conf),
?MODULE:put_raw(RawConf). ?MODULE:put_raw(RawConf).
-spec save_to_override_conf(raw_config(), update_opts()) -> ok | {error, term()}. -spec save_to_override_conf(boolean(), raw_config(), update_opts()) -> ok | {error, term()}.
save_to_override_conf(undefined, _) -> save_to_override_conf(_, undefined, _) ->
ok; ok;
save_to_override_conf(RawConf, Opts) -> %% TODO: Remove deprecated override conf file when 5.1
case override_conf_file(Opts) of save_to_override_conf(true, RawConf, Opts) ->
case deprecated_conf_file(Opts) of
undefined -> undefined ->
ok; ok;
FileName -> FileName ->
@ -564,6 +610,24 @@ save_to_override_conf(RawConf, Opts) ->
}), }),
{error, Reason} {error, Reason}
end end
end;
save_to_override_conf(false, RawConf, _Opts) ->
case cluster_hocon_file() of
undefined ->
ok;
FileName ->
ok = filelib:ensure_dir(FileName),
case file:write_file(FileName, hocon_pp:do(RawConf, #{})) of
ok ->
ok;
{error, Reason} ->
?SLOG(error, #{
msg => "failed_to_save_conf_file",
filename => FileName,
reason => Reason
}),
{error, Reason}
end
end. end.
add_handlers() -> add_handlers() ->
@ -645,11 +709,11 @@ do_put(Type, Putter, [RootName | KeyPath], DeepValue) ->
do_deep_get(?CONF, KeyPath, Map, Default) -> do_deep_get(?CONF, KeyPath, Map, Default) ->
atom_conf_path( atom_conf_path(
KeyPath, KeyPath,
fun(AtomKeyPath) -> emqx_map_lib:deep_get(AtomKeyPath, Map, Default) end, fun(AtomKeyPath) -> emqx_utils_maps:deep_get(AtomKeyPath, Map, Default) end,
{return, Default} {return, Default}
); );
do_deep_get(?RAW_CONF, KeyPath, Map, Default) -> do_deep_get(?RAW_CONF, KeyPath, Map, Default) ->
emqx_map_lib:deep_get([bin(Key) || Key <- KeyPath], Map, Default). emqx_utils_maps:deep_get([bin(Key) || Key <- KeyPath], Map, Default).
do_deep_put(?CONF, Putter, KeyPath, Map, Value) -> do_deep_put(?CONF, Putter, KeyPath, Map, Value) ->
atom_conf_path( atom_conf_path(

View File

@ -43,7 +43,6 @@
terminate/2, terminate/2,
code_change/3 code_change/3
]). ]).
-export([is_mutable/3]).
-define(MOD, {mod}). -define(MOD, {mod}).
-define(WKEY, '?'). -define(WKEY, '?').
@ -230,26 +229,15 @@ process_update_request([_], _Handlers, {remove, _Opts}) ->
process_update_request(ConfKeyPath, _Handlers, {remove, Opts}) -> process_update_request(ConfKeyPath, _Handlers, {remove, Opts}) ->
OldRawConf = emqx_config:get_root_raw(ConfKeyPath), OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
BinKeyPath = bin_path(ConfKeyPath), BinKeyPath = bin_path(ConfKeyPath),
case check_permissions(remove, BinKeyPath, OldRawConf, Opts) of NewRawConf = emqx_utils_maps:deep_remove(BinKeyPath, OldRawConf),
allow -> OverrideConf = remove_from_override_config(BinKeyPath, Opts),
NewRawConf = emqx_map_lib:deep_remove(BinKeyPath, OldRawConf), {ok, NewRawConf, OverrideConf, Opts};
OverrideConf = remove_from_override_config(BinKeyPath, Opts),
{ok, NewRawConf, OverrideConf, Opts};
{deny, Reason} ->
{error, {permission_denied, Reason}}
end;
process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) -> process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
OldRawConf = emqx_config:get_root_raw(ConfKeyPath), OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
case do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) of case do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) of
{ok, NewRawConf} -> {ok, NewRawConf} ->
BinKeyPath = bin_path(ConfKeyPath), OverrideConf = merge_to_override_config(NewRawConf, Opts),
case check_permissions(update, BinKeyPath, NewRawConf, Opts) of {ok, NewRawConf, OverrideConf, Opts};
allow ->
OverrideConf = merge_to_override_config(NewRawConf, Opts),
{ok, NewRawConf, OverrideConf, Opts};
{deny, Reason} ->
{error, {permission_denied, Reason}}
end;
Error -> Error ->
Error Error
end. end.
@ -271,8 +259,10 @@ do_update_config(
SubOldRawConf = get_sub_config(ConfKeyBin, OldRawConf), SubOldRawConf = get_sub_config(ConfKeyBin, OldRawConf),
SubHandlers = get_sub_handlers(ConfKey, Handlers), SubHandlers = get_sub_handlers(ConfKey, Handlers),
case do_update_config(SubConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq, ConfKeyPath) of case do_update_config(SubConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq, ConfKeyPath) of
{ok, NewUpdateReq} -> merge_to_old_config(#{ConfKeyBin => NewUpdateReq}, OldRawConf); {ok, NewUpdateReq} ->
Error -> Error merge_to_old_config(#{ConfKeyBin => NewUpdateReq}, OldRawConf);
Error ->
Error
end. end.
check_and_save_configs( check_and_save_configs(
@ -445,7 +435,7 @@ remove_from_override_config(_BinKeyPath, #{persistent := false}) ->
undefined; undefined;
remove_from_override_config(BinKeyPath, Opts) -> remove_from_override_config(BinKeyPath, Opts) ->
OldConf = emqx_config:read_override_conf(Opts), OldConf = emqx_config:read_override_conf(Opts),
emqx_map_lib:deep_remove(BinKeyPath, OldConf). emqx_utils_maps:deep_remove(BinKeyPath, OldConf).
%% apply new config on top of override config %% apply new config on top of override config
merge_to_override_config(_RawConf, #{persistent := false}) -> merge_to_override_config(_RawConf, #{persistent := false}) ->
@ -467,7 +457,7 @@ return_change_result(_ConfKeyPath, {remove, _Opts}) ->
return_rawconf(ConfKeyPath, #{rawconf_with_defaults := true}) -> return_rawconf(ConfKeyPath, #{rawconf_with_defaults := true}) ->
FullRawConf = emqx_config:fill_defaults(emqx_config:get_raw([])), FullRawConf = emqx_config:fill_defaults(emqx_config:get_raw([])),
emqx_map_lib:deep_get(bin_path(ConfKeyPath), FullRawConf); emqx_utils_maps:deep_get(bin_path(ConfKeyPath), FullRawConf);
return_rawconf(ConfKeyPath, _) -> return_rawconf(ConfKeyPath, _) ->
emqx_config:get_raw(ConfKeyPath). emqx_config:get_raw(ConfKeyPath).
@ -485,16 +475,16 @@ atom(Atom) when is_atom(Atom) ->
-dialyzer({nowarn_function, do_remove_handler/2}). -dialyzer({nowarn_function, do_remove_handler/2}).
do_remove_handler(ConfKeyPath, Handlers) -> do_remove_handler(ConfKeyPath, Handlers) ->
NewHandlers = emqx_map_lib:deep_remove(ConfKeyPath ++ [?MOD], Handlers), NewHandlers = emqx_utils_maps:deep_remove(ConfKeyPath ++ [?MOD], Handlers),
remove_empty_leaf(ConfKeyPath, NewHandlers). remove_empty_leaf(ConfKeyPath, NewHandlers).
remove_empty_leaf([], Handlers) -> remove_empty_leaf([], Handlers) ->
Handlers; Handlers;
remove_empty_leaf(KeyPath, Handlers) -> remove_empty_leaf(KeyPath, Handlers) ->
case emqx_map_lib:deep_find(KeyPath, Handlers) =:= {ok, #{}} of case emqx_utils_maps:deep_find(KeyPath, Handlers) =:= {ok, #{}} of
%% empty leaf %% empty leaf
true -> true ->
Handlers1 = emqx_map_lib:deep_remove(KeyPath, Handlers), Handlers1 = emqx_utils_maps:deep_remove(KeyPath, Handlers),
SubKeyPath = lists:sublist(KeyPath, length(KeyPath) - 1), SubKeyPath = lists:sublist(KeyPath, length(KeyPath) - 1),
remove_empty_leaf(SubKeyPath, Handlers1); remove_empty_leaf(SubKeyPath, Handlers1);
false -> false ->
@ -511,7 +501,7 @@ assert_callback_function(Mod) ->
end, end,
ok. ok.
-spec schema(module(), emqx_map_lib:config_key_path()) -> hocon_schema:schema(). -spec schema(module(), emqx_utils_maps:config_key_path()) -> hocon_schema:schema().
schema(SchemaModule, [RootKey | _]) -> schema(SchemaModule, [RootKey | _]) ->
Roots = hocon_schema:roots(SchemaModule), Roots = hocon_schema:roots(SchemaModule),
{Field, Translations} = {Field, Translations} =
@ -546,98 +536,3 @@ load_prev_handlers() ->
save_handlers(Handlers) -> save_handlers(Handlers) ->
application:set_env(emqx, ?MODULE, Handlers). application:set_env(emqx, ?MODULE, Handlers).
check_permissions(_Action, _ConfKeyPath, _NewRawConf, #{override_to := local}) ->
allow;
check_permissions(Action, ConfKeyPath, NewRawConf, _Opts) ->
case emqx_map_lib:deep_find(ConfKeyPath, NewRawConf) of
{ok, NewRaw} ->
LocalOverride = emqx_config:read_override_conf(#{override_to => local}),
case emqx_map_lib:deep_find(ConfKeyPath, LocalOverride) of
{ok, LocalRaw} ->
case is_mutable(Action, NewRaw, LocalRaw) of
ok ->
allow;
{error, Error} ->
?SLOG(error, #{
msg => "prevent_remove_local_override_conf",
config_key_path => ConfKeyPath,
error => Error
}),
{deny, "Disable changed from local-override.conf"}
end;
{not_found, _, _} ->
allow
end;
{not_found, _, _} ->
allow
end.
is_mutable(Action, NewRaw, LocalRaw) ->
try
KeyPath = [],
is_mutable(KeyPath, Action, NewRaw, LocalRaw)
catch
throw:Error -> Error
end.
-define(REMOVE_FAILED, "remove_failed").
-define(UPDATE_FAILED, "update_failed").
is_mutable(KeyPath, Action, New = #{}, Local = #{}) ->
maps:foreach(
fun(Key, SubLocal) ->
case maps:find(Key, New) of
error -> ok;
{ok, SubNew} -> is_mutable(KeyPath ++ [Key], Action, SubNew, SubLocal)
end
end,
Local
);
is_mutable(KeyPath, remove, Update, Origin) ->
throw({error, {?REMOVE_FAILED, KeyPath, Update, Origin}});
is_mutable(_KeyPath, update, Val, Val) ->
ok;
is_mutable(KeyPath, update, Update, Origin) ->
throw({error, {?UPDATE_FAILED, KeyPath, Update, Origin}}).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
is_mutable_update_test() ->
Action = update,
?assertEqual(ok, is_mutable(Action, #{}, #{})),
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => #{}}}}, #{a => #{b => #{c => #{}}}})),
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c => 1}}})),
?assertEqual(
{error, {?UPDATE_FAILED, [a, b, c], 1, 2}},
is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c => 2}}})
),
?assertEqual(
{error, {?UPDATE_FAILED, [a, b, d], 2, 3}},
is_mutable(Action, #{a => #{b => #{c => 1, d => 2}}}, #{a => #{b => #{c => 1, d => 3}}})
),
ok.
is_mutable_remove_test() ->
Action = remove,
?assertEqual(ok, is_mutable(Action, #{}, #{})),
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => #{}}}}, #{a1 => #{b => #{c => #{}}}})),
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b1 => #{c => 1}}})),
?assertEqual(ok, is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c1 => 1}}})),
?assertEqual(
{error, {?REMOVE_FAILED, [a, b, c], 1, 1}},
is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c => 1}}})
),
?assertEqual(
{error, {?REMOVE_FAILED, [a, b, c], 1, 2}},
is_mutable(Action, #{a => #{b => #{c => 1}}}, #{a => #{b => #{c => 2}}})
),
?assertEqual(
{error, {?REMOVE_FAILED, [a, b, c], 1, 1}},
is_mutable(Action, #{a => #{b => #{c => 1, d => 2}}}, #{a => #{b => #{c => 1, d => 3}}})
),
ok.
-endif.

View File

@ -77,7 +77,7 @@
-export([set_field/3]). -export([set_field/3]).
-import( -import(
emqx_misc, emqx_utils,
[start_timer/2] [start_timer/2]
). ).
@ -182,10 +182,8 @@
-define(ALARM_SOCK_STATS_KEYS, [send_pend, recv_cnt, recv_oct, send_cnt, send_oct]). -define(ALARM_SOCK_STATS_KEYS, [send_pend, recv_cnt, recv_oct, send_cnt, send_oct]).
-define(ALARM_SOCK_OPTS_KEYS, [high_watermark, high_msgq_watermark, sndbuf, recbuf, buffer]). -define(ALARM_SOCK_OPTS_KEYS, [high_watermark, high_msgq_watermark, sndbuf, recbuf, buffer]).
%% use macro to do compile time limiter's type check -define(LIMITER_BYTES_IN, bytes).
-define(LIMITER_BYTES_IN, bytes_in). -define(LIMITER_MESSAGE_IN, messages).
-define(LIMITER_MESSAGE_IN, message_in).
-define(EMPTY_QUEUE, {[], []}).
-dialyzer({no_match, [info/2]}). -dialyzer({no_match, [info/2]}).
-dialyzer( -dialyzer(
@ -260,7 +258,7 @@ stats(#state{
{error, _} -> [] {error, _} -> []
end, end,
ChanStats = emqx_channel:stats(Channel), ChanStats = emqx_channel:stats(Channel),
ProcStats = emqx_misc:proc_stats(), ProcStats = emqx_utils:proc_stats(),
lists:append([SockStats, ChanStats, ProcStats]). lists:append([SockStats, ChanStats, ProcStats]).
%% @doc Set TCP keepalive socket options to override system defaults. %% @doc Set TCP keepalive socket options to override system defaults.
@ -392,7 +390,7 @@ run_loop(
emqx_channel:info(zone, Channel), emqx_channel:info(zone, Channel),
[force_shutdown] [force_shutdown]
), ),
emqx_misc:tune_heap_size(ShutdownPolicy), emqx_utils:tune_heap_size(ShutdownPolicy),
case activate_socket(State) of case activate_socket(State) of
{ok, NState} -> {ok, NState} ->
hibernate(Parent, NState); hibernate(Parent, NState);
@ -472,7 +470,7 @@ ensure_stats_timer(_Timeout, State) ->
-compile({inline, [cancel_stats_timer/1]}). -compile({inline, [cancel_stats_timer/1]}).
cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) -> cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) ->
?tp(debug, cancel_stats_timer, #{}), ?tp(debug, cancel_stats_timer, #{}),
ok = emqx_misc:cancel_timer(TRef), ok = emqx_utils:cancel_timer(TRef),
State#state{stats_timer = undefined}; State#state{stats_timer = undefined};
cancel_stats_timer(State) -> cancel_stats_timer(State) ->
State. State.
@ -558,7 +556,7 @@ handle_msg(
{incoming, Packet = ?CONNECT_PACKET(ConnPkt)}, {incoming, Packet = ?CONNECT_PACKET(ConnPkt)},
State = #state{idle_timer = IdleTimer} State = #state{idle_timer = IdleTimer}
) -> ) ->
ok = emqx_misc:cancel_timer(IdleTimer), ok = emqx_utils:cancel_timer(IdleTimer),
Serialize = emqx_frame:serialize_opts(ConnPkt), Serialize = emqx_frame:serialize_opts(ConnPkt),
NState = State#state{ NState = State#state{
serialize = Serialize, serialize = Serialize,
@ -593,7 +591,7 @@ handle_msg(
#state{listener = {Type, Listener}} = State #state{listener = {Type, Listener}} = State
) -> ) ->
ActiveN = get_active_n(Type, Listener), ActiveN = get_active_n(Type, Listener),
Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)], Delivers = [Deliver | emqx_utils:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State); with_channel(handle_deliver, [Delivers], State);
%% Something sent %% Something sent
handle_msg({inet_reply, _Sock, ok}, State = #state{listener = {Type, Listener}}) -> handle_msg({inet_reply, _Sock, ok}, State = #state{listener = {Type, Listener}}) ->
@ -1073,7 +1071,7 @@ check_oom(State = #state{channel = Channel}) ->
emqx_channel:info(zone, Channel), [force_shutdown] emqx_channel:info(zone, Channel), [force_shutdown]
), ),
?tp(debug, check_oom, #{policy => ShutdownPolicy}), ?tp(debug, check_oom, #{policy => ShutdownPolicy}),
case emqx_misc:check_oom(ShutdownPolicy) of case emqx_utils:check_oom(ShutdownPolicy) of
{shutdown, Reason} -> {shutdown, Reason} ->
%% triggers terminate/2 callback immediately %% triggers terminate/2 callback immediately
erlang:exit({shutdown, Reason}); erlang:exit({shutdown, Reason});
@ -1200,7 +1198,7 @@ inc_counter(Key, Inc) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
set_field(Name, Value, State) -> set_field(Name, Value, State) ->
Pos = emqx_misc:index_of(Name, record_info(fields, state)), Pos = emqx_utils:index_of(Name, record_info(fields, state)),
setelement(Pos + 1, State, Value). setelement(Pos + 1, State, Value).
get_state(Pid) -> get_state(Pid) ->

View File

@ -117,7 +117,7 @@ handle_call(Call, _From, State) ->
handle_cast({evict, URL}, State0 = #state{refresh_timers = RefreshTimers0}) -> handle_cast({evict, URL}, State0 = #state{refresh_timers = RefreshTimers0}) ->
emqx_ssl_crl_cache:delete(URL), emqx_ssl_crl_cache:delete(URL),
MTimer = maps:get(URL, RefreshTimers0, undefined), MTimer = maps:get(URL, RefreshTimers0, undefined),
emqx_misc:cancel_timer(MTimer), emqx_utils:cancel_timer(MTimer),
RefreshTimers = maps:without([URL], RefreshTimers0), RefreshTimers = maps:without([URL], RefreshTimers0),
State = State0#state{refresh_timers = RefreshTimers}, State = State0#state{refresh_timers = RefreshTimers},
?tp( ?tp(
@ -223,9 +223,9 @@ ensure_timer(URL, State = #state{refresh_interval = Timeout}) ->
ensure_timer(URL, State = #state{refresh_timers = RefreshTimers0}, Timeout) -> ensure_timer(URL, State = #state{refresh_timers = RefreshTimers0}, Timeout) ->
?tp(crl_cache_ensure_timer, #{url => URL, timeout => Timeout}), ?tp(crl_cache_ensure_timer, #{url => URL, timeout => Timeout}),
MTimer = maps:get(URL, RefreshTimers0, undefined), MTimer = maps:get(URL, RefreshTimers0, undefined),
emqx_misc:cancel_timer(MTimer), emqx_utils:cancel_timer(MTimer),
RefreshTimers = RefreshTimers0#{ RefreshTimers = RefreshTimers0#{
URL => emqx_misc:start_timer( URL => emqx_utils:start_timer(
Timeout, Timeout,
{refresh, URL} {refresh, URL}
) )
@ -297,7 +297,7 @@ handle_cache_overflow(State0) ->
{_Time, OldestURL, InsertionTimes} = gb_trees:take_smallest(InsertionTimes0), {_Time, OldestURL, InsertionTimes} = gb_trees:take_smallest(InsertionTimes0),
emqx_ssl_crl_cache:delete(OldestURL), emqx_ssl_crl_cache:delete(OldestURL),
MTimer = maps:get(OldestURL, RefreshTimers0, undefined), MTimer = maps:get(OldestURL, RefreshTimers0, undefined),
emqx_misc:cancel_timer(MTimer), emqx_utils:cancel_timer(MTimer),
RefreshTimers = maps:remove(OldestURL, RefreshTimers0), RefreshTimers = maps:remove(OldestURL, RefreshTimers0),
CachedURLs = sets:del_element(OldestURL, CachedURLs0), CachedURLs = sets:del_element(OldestURL, CachedURLs0),
?tp(debug, crl_cache_overflow, #{oldest_url => OldestURL}), ?tp(debug, crl_cache_overflow, #{oldest_url => OldestURL}),

View File

@ -27,6 +27,10 @@
%% API %% API
-export([detect/1]). -export([detect/1]).
-ifdef(TEST).
-export([get_policy/2]).
-endif.
%% gen_server callbacks %% gen_server callbacks
-export([ -export([
init/1, init/1,
@ -39,15 +43,6 @@
%% Tab %% Tab
-define(FLAPPING_TAB, ?MODULE). -define(FLAPPING_TAB, ?MODULE).
%% Default Policy
-define(FLAPPING_THRESHOLD, 30).
-define(FLAPPING_DURATION, 60000).
-define(FLAPPING_BANNED_INTERVAL, 300000).
-define(DEFAULT_DETECT_POLICY, #{
max_count => ?FLAPPING_THRESHOLD,
window_time => ?FLAPPING_DURATION,
ban_time => ?FLAPPING_BANNED_INTERVAL
}).
-record(flapping, { -record(flapping, {
clientid :: emqx_types:clientid(), clientid :: emqx_types:clientid(),
@ -69,7 +64,7 @@ stop() -> gen_server:stop(?MODULE).
%% @doc Detect flapping when a MQTT client disconnected. %% @doc Detect flapping when a MQTT client disconnected.
-spec detect(emqx_types:clientinfo()) -> boolean(). -spec detect(emqx_types:clientinfo()) -> boolean().
detect(#{clientid := ClientId, peerhost := PeerHost, zone := Zone}) -> detect(#{clientid := ClientId, peerhost := PeerHost, zone := Zone}) ->
Policy = #{max_count := Threshold} = get_policy(Zone), Policy = #{max_count := Threshold} = get_policy([max_count, window_time, ban_time], Zone),
%% The initial flapping record sets the detect_cnt to 0. %% The initial flapping record sets the detect_cnt to 0.
InitVal = #flapping{ InitVal = #flapping{
clientid = ClientId, clientid = ClientId,
@ -89,8 +84,22 @@ detect(#{clientid := ClientId, peerhost := PeerHost, zone := Zone}) ->
end end
end. end.
get_policy(Zone) -> get_policy(Keys, Zone) when is_list(Keys) ->
emqx_config:get_zone_conf(Zone, [flapping_detect]). RootKey = flapping_detect,
Conf = emqx_config:get_zone_conf(Zone, [RootKey]),
lists:foldl(
fun(Key, Acc) ->
case maps:find(Key, Conf) of
{ok, V} -> Acc#{Key => V};
error -> Acc#{Key => emqx_config:get([RootKey, Key])}
end
end,
#{},
Keys
);
get_policy(Key, Zone) ->
#{Key := Conf} = get_policy([Key], Zone),
Conf.
now_diff(TS) -> erlang:system_time(millisecond) - TS. now_diff(TS) -> erlang:system_time(millisecond) - TS.
@ -99,7 +108,7 @@ now_diff(TS) -> erlang:system_time(millisecond) - TS.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init([]) -> init([]) ->
ok = emqx_tables:new(?FLAPPING_TAB, [ ok = emqx_utils_ets:new(?FLAPPING_TAB, [
public, public,
set, set,
{keypos, #flapping.clientid}, {keypos, #flapping.clientid},
@ -166,8 +175,7 @@ handle_cast(Msg, State) ->
handle_info({timeout, _TRef, {garbage_collect, Zone}}, State) -> handle_info({timeout, _TRef, {garbage_collect, Zone}}, State) ->
Timestamp = Timestamp =
erlang:system_time(millisecond) - erlang:system_time(millisecond) - get_policy(window_time, Zone),
maps:get(window_time, get_policy(Zone)),
MatchSpec = [{{'_', '_', '_', '$1', '_'}, [{'<', '$1', Timestamp}], [true]}], MatchSpec = [{{'_', '_', '_', '$1', '_'}, [{'<', '$1', Timestamp}], [true]}],
ets:select_delete(?FLAPPING_TAB, MatchSpec), ets:select_delete(?FLAPPING_TAB, MatchSpec),
_ = start_timer(Zone), _ = start_timer(Zone),
@ -183,15 +191,19 @@ code_change(_OldVsn, State, _Extra) ->
{ok, State}. {ok, State}.
start_timer(Zone) -> start_timer(Zone) ->
WindTime = maps:get(window_time, get_policy(Zone)), case get_policy(window_time, Zone) of
emqx_misc:start_timer(WindTime, {garbage_collect, Zone}). WindowTime when is_integer(WindowTime) ->
emqx_utils:start_timer(WindowTime, {garbage_collect, Zone});
disabled ->
ok
end.
start_timers() -> start_timers() ->
lists:foreach( maps:foreach(
fun({Zone, _ZoneConf}) -> fun(Zone, _ZoneConf) ->
start_timer(Zone) start_timer(Zone)
end, end,
maps:to_list(emqx:get_config([zones], #{})) emqx:get_config([zones], #{})
). ).
fmt_host(PeerHost) -> fmt_host(PeerHost) ->

View File

@ -145,10 +145,10 @@ npid() ->
NPid. NPid.
to_hexstr(I) when byte_size(I) =:= 16 -> to_hexstr(I) when byte_size(I) =:= 16 ->
emqx_misc:bin_to_hexstr(I, upper). emqx_utils:bin_to_hexstr(I, upper).
from_hexstr(S) when byte_size(S) =:= 32 -> from_hexstr(S) when byte_size(S) =:= 32 ->
emqx_misc:hexstr_to_bin(S). emqx_utils:hexstr_to_bin(S).
to_base62(<<I:128>>) -> to_base62(<<I:128>>) ->
emqx_base62:encode(I). emqx_base62:encode(I).

View File

@ -229,7 +229,7 @@ lookup(HookPoint) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init([]) -> init([]) ->
ok = emqx_tables:new(?TAB, [{keypos, #hook.name}, {read_concurrency, true}]), ok = emqx_utils_ets:new(?TAB, [{keypos, #hook.name}, {read_concurrency, true}]),
{ok, #{}}. {ok, #{}}.
handle_call({add, HookPoint, Callback = #callback{action = {M, F, _}}}, _From, State) -> handle_call({add, HookPoint, Callback = #callback{action = {M, F, _}}}, _From, State) ->

View File

@ -139,7 +139,8 @@ make_token_bucket_limiter(Cfg, Bucket) ->
Cfg#{ Cfg#{
tokens => emqx_limiter_server:get_initial_val(Cfg), tokens => emqx_limiter_server:get_initial_val(Cfg),
lasttime => ?NOW, lasttime => ?NOW,
bucket => Bucket bucket => Bucket,
capacity => emqx_limiter_schema:calc_capacity(Cfg)
}. }.
%%@doc create a limiter server's reference %%@doc create a limiter server's reference
@ -375,7 +376,7 @@ return_pause(infinity, PauseType, Fun, Diff, Limiter) ->
{PauseType, ?MINIMUM_PAUSE, make_retry_context(Fun, Diff), Limiter}; {PauseType, ?MINIMUM_PAUSE, make_retry_context(Fun, Diff), Limiter};
return_pause(Rate, PauseType, Fun, Diff, Limiter) -> return_pause(Rate, PauseType, Fun, Diff, Limiter) ->
Val = erlang:round(Diff * emqx_limiter_schema:default_period() / Rate), Val = erlang:round(Diff * emqx_limiter_schema:default_period() / Rate),
Pause = emqx_misc:clamp(Val, ?MINIMUM_PAUSE, ?MAXIMUM_PAUSE), Pause = emqx_utils:clamp(Val, ?MINIMUM_PAUSE, ?MAXIMUM_PAUSE),
{PauseType, Pause, make_retry_context(Fun, Diff), Limiter}. {PauseType, Pause, make_retry_context(Fun, Diff), Limiter}.
-spec make_retry_context(undefined | retry_fun(Limiter), non_neg_integer()) -> -spec make_retry_context(undefined | retry_fun(Limiter), non_neg_integer()) ->

View File

@ -23,6 +23,7 @@
%% API %% API
-export([ -export([
new/3, new/3,
infinity_bucket/0,
check/3, check/3,
try_restore/2, try_restore/2,
available/1 available/1
@ -58,6 +59,10 @@ new(Counter, Index, Rate) ->
rate => Rate rate => Rate
}. }.
-spec infinity_bucket() -> bucket_ref().
infinity_bucket() ->
infinity.
%% @doc check tokens %% @doc check tokens
-spec check(pos_integer(), bucket_ref(), Disivisble :: boolean()) -> -spec check(pos_integer(), bucket_ref(), Disivisble :: boolean()) ->
HasToken :: HasToken ::

View File

@ -24,6 +24,7 @@
fields/1, fields/1,
to_rate/1, to_rate/1,
to_capacity/1, to_capacity/1,
to_burst/1,
default_period/0, default_period/0,
to_burst_rate/1, to_burst_rate/1,
to_initial/1, to_initial/1,
@ -31,20 +32,20 @@
get_bucket_cfg_path/2, get_bucket_cfg_path/2,
desc/1, desc/1,
types/0, types/0,
infinity_value/0 calc_capacity/1
]). ]).
-define(KILOBYTE, 1024). -define(KILOBYTE, 1024).
-define(BUCKET_KEYS, [ -define(BUCKET_KEYS, [
{bytes_in, bucket_infinity}, {bytes, bucket_infinity},
{message_in, bucket_infinity}, {messages, bucket_infinity},
{connection, bucket_limit}, {connection, bucket_limit},
{message_routing, bucket_infinity} {message_routing, bucket_infinity}
]). ]).
-type limiter_type() :: -type limiter_type() ::
bytes_in bytes
| message_in | messages
| connection | connection
| message_routing | message_routing
%% internal limiter for unclassified resources %% internal limiter for unclassified resources
@ -54,8 +55,10 @@
-type bucket_name() :: atom(). -type bucket_name() :: atom().
-type rate() :: infinity | float(). -type rate() :: infinity | float().
-type burst_rate() :: 0 | float(). -type burst_rate() :: 0 | float().
%% this is a compatible type for the deprecated field and type `capacity`.
-type burst() :: burst_rate().
%% the capacity of the token bucket %% the capacity of the token bucket
-type capacity() :: non_neg_integer(). %%-type capacity() :: non_neg_integer().
%% initial capacity of the token bucket %% initial capacity of the token bucket
-type initial() :: non_neg_integer(). -type initial() :: non_neg_integer().
-type bucket_path() :: list(atom()). -type bucket_path() :: list(atom()).
@ -72,13 +75,13 @@
-typerefl_from_string({rate/0, ?MODULE, to_rate}). -typerefl_from_string({rate/0, ?MODULE, to_rate}).
-typerefl_from_string({burst_rate/0, ?MODULE, to_burst_rate}). -typerefl_from_string({burst_rate/0, ?MODULE, to_burst_rate}).
-typerefl_from_string({capacity/0, ?MODULE, to_capacity}). -typerefl_from_string({burst/0, ?MODULE, to_burst}).
-typerefl_from_string({initial/0, ?MODULE, to_initial}). -typerefl_from_string({initial/0, ?MODULE, to_initial}).
-reflect_type([ -reflect_type([
rate/0, rate/0,
burst_rate/0, burst_rate/0,
capacity/0, burst/0,
initial/0, initial/0,
failure_strategy/0, failure_strategy/0,
bucket_name/0 bucket_name/0
@ -90,14 +93,17 @@
namespace() -> limiter. namespace() -> limiter.
roots() -> [limiter]. roots() ->
[{limiter, hoconsc:mk(hoconsc:ref(?MODULE, limiter), #{importance => ?IMPORTANCE_HIDDEN})}].
fields(limiter) -> fields(limiter) ->
[ [
{Type, {Type,
?HOCON(?R_REF(node_opts), #{ ?HOCON(?R_REF(node_opts), #{
desc => ?DESC(Type), desc => ?DESC(Type),
default => #{} default => #{},
importance => ?IMPORTANCE_HIDDEN,
aliases => alias_of_type(Type)
})} })}
|| Type <- types() || Type <- types()
] ++ ] ++
@ -107,6 +113,7 @@ fields(limiter) ->
?R_REF(client_fields), ?R_REF(client_fields),
#{ #{
desc => ?DESC(client), desc => ?DESC(client),
importance => ?IMPORTANCE_HIDDEN,
default => maps:from_list([ default => maps:from_list([
{erlang:atom_to_binary(Type), #{}} {erlang:atom_to_binary(Type), #{}}
|| Type <- types() || Type <- types()
@ -124,30 +131,20 @@ fields(node_opts) ->
})} })}
]; ];
fields(client_fields) -> fields(client_fields) ->
[ client_fields(types(), #{default => #{}});
{Type,
?HOCON(?R_REF(client_opts), #{
desc => ?DESC(Type),
default => #{}
})}
|| Type <- types()
];
fields(bucket_infinity) -> fields(bucket_infinity) ->
[ fields_of_bucket(<<"infinity">>);
{rate, ?HOCON(rate(), #{desc => ?DESC(rate), default => <<"infinity">>})},
{capacity, ?HOCON(capacity(), #{desc => ?DESC(capacity), default => <<"infinity">>})},
{initial, ?HOCON(initial(), #{default => <<"0">>, desc => ?DESC(initial)})}
];
fields(bucket_limit) -> fields(bucket_limit) ->
[ fields_of_bucket(<<"1000/s">>);
{rate, ?HOCON(rate(), #{desc => ?DESC(rate), default => <<"1000/s">>})},
{capacity, ?HOCON(capacity(), #{desc => ?DESC(capacity), default => <<"1000">>})},
{initial, ?HOCON(initial(), #{default => <<"0">>, desc => ?DESC(initial)})}
];
fields(client_opts) -> fields(client_opts) ->
[ [
{rate, ?HOCON(rate(), #{default => <<"infinity">>, desc => ?DESC(rate)})}, {rate, ?HOCON(rate(), #{default => <<"infinity">>, desc => ?DESC(rate)})},
{initial, ?HOCON(initial(), #{default => <<"0">>, desc => ?DESC(initial)})}, {initial,
?HOCON(initial(), #{
default => <<"0">>,
desc => ?DESC(initial),
importance => ?IMPORTANCE_HIDDEN
})},
%% low_watermark add for emqx_channel and emqx_session %% low_watermark add for emqx_channel and emqx_session
%% both modules consume first and then check %% both modules consume first and then check
%% so we need to use this value to prevent excessive consumption %% so we need to use this value to prevent excessive consumption
@ -157,20 +154,24 @@ fields(client_opts) ->
initial(), initial(),
#{ #{
desc => ?DESC(low_watermark), desc => ?DESC(low_watermark),
default => <<"0">> default => <<"0">>,
importance => ?IMPORTANCE_HIDDEN
} }
)}, )},
{capacity, {burst,
?HOCON(capacity(), #{ ?HOCON(burst(), #{
desc => ?DESC(client_bucket_capacity), desc => ?DESC(burst),
default => <<"infinity">> default => <<"0">>,
importance => ?IMPORTANCE_HIDDEN,
aliases => [capacity]
})}, })},
{divisible, {divisible,
?HOCON( ?HOCON(
boolean(), boolean(),
#{ #{
desc => ?DESC(divisible), desc => ?DESC(divisible),
default => false default => false,
importance => ?IMPORTANCE_HIDDEN
} }
)}, )},
{max_retry_time, {max_retry_time,
@ -178,7 +179,8 @@ fields(client_opts) ->
emqx_schema:duration(), emqx_schema:duration(),
#{ #{
desc => ?DESC(max_retry_time), desc => ?DESC(max_retry_time),
default => <<"10s">> default => <<"10s">>,
importance => ?IMPORTANCE_HIDDEN
} }
)}, )},
{failure_strategy, {failure_strategy,
@ -186,16 +188,18 @@ fields(client_opts) ->
failure_strategy(), failure_strategy(),
#{ #{
desc => ?DESC(failure_strategy), desc => ?DESC(failure_strategy),
default => force default => force,
importance => ?IMPORTANCE_HIDDEN
} }
)} )}
]; ];
fields(listener_fields) -> fields(listener_fields) ->
bucket_fields(?BUCKET_KEYS, listener_client_fields); composite_bucket_fields(?BUCKET_KEYS, listener_client_fields);
fields(listener_client_fields) -> fields(listener_client_fields) ->
client_fields(?BUCKET_KEYS); {Types, _} = lists:unzip(?BUCKET_KEYS),
client_fields(Types, #{required => false});
fields(Type) -> fields(Type) ->
bucket_field(Type). simple_bucket_field(Type).
desc(limiter) -> desc(limiter) ->
"Settings for the rate limiter."; "Settings for the rate limiter.";
@ -230,19 +234,12 @@ get_bucket_cfg_path(Type, BucketName) ->
[limiter, Type, bucket, BucketName]. [limiter, Type, bucket, BucketName].
types() -> types() ->
[bytes_in, message_in, connection, message_routing, internal]. [bytes, messages, connection, message_routing, internal].
%%-------------------------------------------------------------------- calc_capacity(#{rate := infinity}) ->
%% Internal functions infinity;
%%-------------------------------------------------------------------- calc_capacity(#{rate := Rate, burst := Burst}) ->
erlang:floor(1000 * Rate / default_period()) + Burst.
%% `infinity` to `infinity_value` rules:
%% 1. all infinity capacity will change to infinity_value
%% 2. if the rate of global and bucket both are `infinity`,
%% use `infinity_value` as bucket rate. see `emqx_limiter_server:get_counter_rate/2`
infinity_value() ->
%% 1 TB
1099511627776.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Internal functions %% Internal functions
@ -251,6 +248,17 @@ infinity_value() ->
to_burst_rate(Str) -> to_burst_rate(Str) ->
to_rate(Str, false, true). to_rate(Str, false, true).
%% The default value of `capacity` is `infinity`,
%% but we have changed `capacity` to `burst` which should not be `infinity`
%% and its default value is 0, so we should convert `infinity` to 0
to_burst(Str) ->
case to_rate(Str, true, true) of
{ok, infinity} ->
{ok, 0};
Any ->
Any
end.
%% rate can be: 10 10MB 10MB/s 10MB/2s infinity %% rate can be: 10 10MB 10MB/s 10MB/2s infinity
%% e.g. the bytes_in regex tree is: %% e.g. the bytes_in regex tree is:
%% %%
@ -335,7 +343,7 @@ to_quota(Str, Regex) ->
{match, [Quota, ""]} -> {match, [Quota, ""]} ->
{ok, erlang:list_to_integer(Quota)}; {ok, erlang:list_to_integer(Quota)};
{match, ""} -> {match, ""} ->
{ok, infinity_value()}; {ok, infinity};
_ -> _ ->
{error, Str} {error, Str}
end end
@ -350,7 +358,8 @@ apply_unit("mb", Val) -> Val * ?KILOBYTE * ?KILOBYTE;
apply_unit("gb", Val) -> Val * ?KILOBYTE * ?KILOBYTE * ?KILOBYTE; apply_unit("gb", Val) -> Val * ?KILOBYTE * ?KILOBYTE * ?KILOBYTE;
apply_unit(Unit, _) -> throw("invalid unit:" ++ Unit). apply_unit(Unit, _) -> throw("invalid unit:" ++ Unit).
bucket_field(Type) when is_atom(Type) -> %% A bucket with only one type
simple_bucket_field(Type) when is_atom(Type) ->
fields(bucket_infinity) ++ fields(bucket_infinity) ++
[ [
{client, {client,
@ -358,16 +367,22 @@ bucket_field(Type) when is_atom(Type) ->
?R_REF(?MODULE, client_opts), ?R_REF(?MODULE, client_opts),
#{ #{
desc => ?DESC(client), desc => ?DESC(client),
required => false required => false,
importance => importance_of_type(Type),
aliases => alias_of_type(Type)
} }
)} )}
]. ].
bucket_fields(Types, ClientRef) ->
%% A bucket with multi types
composite_bucket_fields(Types, ClientRef) ->
[ [
{Type, {Type,
?HOCON(?R_REF(?MODULE, Opts), #{ ?HOCON(?R_REF(?MODULE, Opts), #{
desc => ?DESC(?MODULE, Type), desc => ?DESC(?MODULE, Type),
required => false required => false,
importance => importance_of_type(Type),
aliases => alias_of_type(Type)
})} })}
|| {Type, Opts} <- Types || {Type, Opts} <- Types
] ++ ] ++
@ -382,12 +397,47 @@ bucket_fields(Types, ClientRef) ->
)} )}
]. ].
client_fields(Types) -> fields_of_bucket(Default) ->
[
{rate, ?HOCON(rate(), #{desc => ?DESC(rate), default => Default})},
{burst,
?HOCON(burst(), #{
desc => ?DESC(burst),
default => <<"0">>,
importance => ?IMPORTANCE_HIDDEN,
aliases => [capacity]
})},
{initial,
?HOCON(initial(), #{
default => <<"0">>,
desc => ?DESC(initial),
importance => ?IMPORTANCE_HIDDEN
})}
].
client_fields(Types, Meta) ->
[ [
{Type, {Type,
?HOCON(?R_REF(client_opts), #{ ?HOCON(?R_REF(client_opts), Meta#{
desc => ?DESC(Type), desc => ?DESC(Type),
required => false importance => importance_of_type(Type),
aliases => alias_of_type(Type)
})} })}
|| {Type, _} <- Types || Type <- Types
]. ].
importance_of_type(interval) ->
?IMPORTANCE_HIDDEN;
importance_of_type(message_routing) ->
?IMPORTANCE_HIDDEN;
importance_of_type(connection) ->
?IMPORTANCE_HIDDEN;
importance_of_type(_) ->
?DEFAULT_IMPORTANCE.
alias_of_type(messages) ->
[message_in];
alias_of_type(bytes) ->
[bytes_in];
alias_of_type(_) ->
[].

View File

@ -118,17 +118,24 @@ connect(_Id, _Type, undefined) ->
{ok, emqx_htb_limiter:make_infinity_limiter()}; {ok, emqx_htb_limiter:make_infinity_limiter()};
connect(Id, Type, Cfg) -> connect(Id, Type, Cfg) ->
case find_limiter_cfg(Type, Cfg) of case find_limiter_cfg(Type, Cfg) of
{undefined, _} -> {_ClientCfg, undefined, _NodeCfg} ->
{ok, emqx_htb_limiter:make_infinity_limiter()}; {ok, emqx_htb_limiter:make_infinity_limiter()};
{#{rate := infinity}, #{rate := infinity}, #{rate := infinity}} ->
{ok, emqx_htb_limiter:make_infinity_limiter()};
{ClientCfg, #{rate := infinity}, #{rate := infinity}} ->
{ok,
emqx_htb_limiter:make_token_bucket_limiter(
ClientCfg, emqx_limiter_bucket_ref:infinity_bucket()
)};
{ {
#{ #{rate := CliRate} = ClientCfg,
rate := BucketRate, #{rate := BucketRate} = BucketCfg,
capacity := BucketSize _
},
#{rate := CliRate, capacity := CliSize} = ClientCfg
} -> } ->
case emqx_limiter_manager:find_bucket(Id, Type) of case emqx_limiter_manager:find_bucket(Id, Type) of
{ok, Bucket} -> {ok, Bucket} ->
BucketSize = emqx_limiter_schema:calc_capacity(BucketCfg),
CliSize = emqx_limiter_schema:calc_capacity(ClientCfg),
{ok, {ok,
if if
CliRate < BucketRate orelse CliSize < BucketSize -> CliRate < BucketRate orelse CliSize < BucketSize ->
@ -493,12 +500,14 @@ make_root(#{rate := Rate, burst := Burst}) ->
produced => 0.0 produced => 0.0
}. }.
do_add_bucket(Id, #{rate := Rate, capacity := Capacity} = Cfg, #{buckets := Buckets} = State) -> do_add_bucket(_Id, #{rate := infinity}, #{root := #{rate := infinity}} = State) ->
State;
do_add_bucket(Id, #{rate := Rate} = Cfg, #{buckets := Buckets} = State) ->
case maps:get(Id, Buckets, undefined) of case maps:get(Id, Buckets, undefined) of
undefined -> undefined ->
make_bucket(Id, Cfg, State); make_bucket(Id, Cfg, State);
Bucket -> Bucket ->
Bucket2 = Bucket#{rate := Rate, capacity := Capacity}, Bucket2 = Bucket#{rate := Rate, capacity := emqx_limiter_schema:calc_capacity(Cfg)},
State#{buckets := Buckets#{Id := Bucket2}} State#{buckets := Buckets#{Id := Bucket2}}
end. end.
@ -509,7 +518,7 @@ make_bucket(Id, Cfg, #{index := ?COUNTER_SIZE} = State) ->
}); });
make_bucket( make_bucket(
Id, Id,
#{rate := Rate, capacity := Capacity} = Cfg, #{rate := Rate} = Cfg,
#{type := Type, counter := Counter, index := Index, buckets := Buckets} = State #{type := Type, counter := Counter, index := Index, buckets := Buckets} = State
) -> ) ->
NewIndex = Index + 1, NewIndex = Index + 1,
@ -519,7 +528,7 @@ make_bucket(
rate => Rate, rate => Rate,
obtained => Initial, obtained => Initial,
correction => 0, correction => 0,
capacity => Capacity, capacity => emqx_limiter_schema:calc_capacity(Cfg),
counter => Counter, counter => Counter,
index => NewIndex index => NewIndex
}, },
@ -541,19 +550,14 @@ do_del_bucket(Id, #{type := Type, buckets := Buckets} = State) ->
get_initial_val( get_initial_val(
#{ #{
initial := Initial, initial := Initial,
rate := Rate, rate := Rate
capacity := Capacity
} }
) -> ) ->
%% initial will nevner be infinity(see the emqx_limiter_schema)
InfVal = emqx_limiter_schema:infinity_value(),
if if
Initial > 0 -> Initial > 0 ->
Initial; Initial;
Rate =/= infinity -> Rate =/= infinity ->
erlang:min(Rate, Capacity); Rate;
Capacity =/= infinity andalso Capacity =/= InfVal ->
Capacity;
true -> true ->
0 0
end. end.
@ -568,11 +572,12 @@ call(Type, Msg) ->
end. end.
find_limiter_cfg(Type, #{rate := _} = Cfg) -> find_limiter_cfg(Type, #{rate := _} = Cfg) ->
{Cfg, find_client_cfg(Type, maps:get(client, Cfg, undefined))}; {find_client_cfg(Type, maps:get(client, Cfg, undefined)), Cfg, find_node_cfg(Type)};
find_limiter_cfg(Type, Cfg) -> find_limiter_cfg(Type, Cfg) ->
{ {
find_client_cfg(Type, emqx_utils_maps:deep_get([client, Type], Cfg, undefined)),
maps:get(Type, Cfg, undefined), maps:get(Type, Cfg, undefined),
find_client_cfg(Type, emqx_map_lib:deep_get([client, Type], Cfg, undefined)) find_node_cfg(Type)
}. }.
find_client_cfg(Type, BucketCfg) -> find_client_cfg(Type, BucketCfg) ->
@ -585,3 +590,6 @@ merge_client_cfg(NodeCfg, undefined) ->
NodeCfg; NodeCfg;
merge_client_cfg(NodeCfg, BucketCfg) -> merge_client_cfg(NodeCfg, BucketCfg) ->
maps:merge(NodeCfg, BucketCfg). maps:merge(NodeCfg, BucketCfg).
find_node_cfg(Type) ->
emqx:get_config([limiter, Type], #{rate => infinity, burst => 0}).

View File

@ -427,12 +427,12 @@ pre_config_update([listeners, _Type, _Name], {create, _NewConf}, _RawConf) ->
pre_config_update([listeners, _Type, _Name], {update, _Request}, undefined) -> pre_config_update([listeners, _Type, _Name], {update, _Request}, undefined) ->
{error, not_found}; {error, not_found};
pre_config_update([listeners, Type, Name], {update, Request}, RawConf) -> pre_config_update([listeners, Type, Name], {update, Request}, RawConf) ->
NewConfT = emqx_map_lib:deep_merge(RawConf, Request), NewConfT = emqx_utils_maps:deep_merge(RawConf, Request),
NewConf = ensure_override_limiter_conf(NewConfT, Request), NewConf = ensure_override_limiter_conf(NewConfT, Request),
CertsDir = certs_dir(Type, Name), CertsDir = certs_dir(Type, Name),
{ok, convert_certs(CertsDir, NewConf)}; {ok, convert_certs(CertsDir, NewConf)};
pre_config_update([listeners, _Type, _Name], {action, _Action, Updated}, RawConf) -> pre_config_update([listeners, _Type, _Name], {action, _Action, Updated}, RawConf) ->
NewConf = emqx_map_lib:deep_merge(RawConf, Updated), NewConf = emqx_utils_maps:deep_merge(RawConf, Updated),
{ok, NewConf}; {ok, NewConf};
pre_config_update(_Path, _Request, RawConf) -> pre_config_update(_Path, _Request, RawConf) ->
{ok, RawConf}. {ok, RawConf}.
@ -500,7 +500,7 @@ esockd_opts(ListenerId, Type, Opts0) ->
ws_opts(Type, ListenerName, Opts) -> ws_opts(Type, ListenerName, Opts) ->
WsPaths = [ WsPaths = [
{emqx_map_lib:deep_get([websocket, mqtt_path], Opts, "/mqtt"), emqx_ws_connection, #{ {emqx_utils_maps:deep_get([websocket, mqtt_path], Opts, "/mqtt"), emqx_ws_connection, #{
zone => zone(Opts), zone => zone(Opts),
listener => {Type, ListenerName}, listener => {Type, ListenerName},
limiter => limiter(Opts), limiter => limiter(Opts),
@ -538,7 +538,7 @@ esockd_access_rules(StrRules) ->
[A, CIDR] = string:tokens(S, " "), [A, CIDR] = string:tokens(S, " "),
%% esockd rules only use words 'allow' and 'deny', both are existing %% esockd rules only use words 'allow' and 'deny', both are existing
%% comparison of strings may be better, but there is a loss of backward compatibility %% comparison of strings may be better, but there is a loss of backward compatibility
case emqx_misc:safe_to_existing_atom(A) of case emqx_utils:safe_to_existing_atom(A) of
{ok, Action} -> {ok, Action} ->
[ [
{ {
@ -560,7 +560,7 @@ esockd_access_rules(StrRules) ->
merge_default(Options) -> merge_default(Options) ->
case lists:keytake(tcp_options, 1, Options) of case lists:keytake(tcp_options, 1, Options) of
{value, {tcp_options, TcpOpts}, Options1} -> {value, {tcp_options, TcpOpts}, Options1} ->
[{tcp_options, emqx_misc:merge_opts(?MQTT_SOCKOPTS, TcpOpts)} | Options1]; [{tcp_options, emqx_utils:merge_opts(?MQTT_SOCKOPTS, TcpOpts)} | Options1];
false -> false ->
[{tcp_options, ?MQTT_SOCKOPTS} | Options] [{tcp_options, ?MQTT_SOCKOPTS} | Options]
end. end.

View File

@ -62,11 +62,11 @@
%% The JSON object is pretty-printed. %% The JSON object is pretty-printed.
%% NOTE: do not use this function for logging. %% NOTE: do not use this function for logging.
best_effort_json(Input) -> best_effort_json(Input) ->
best_effort_json(Input, [space, {indent, 4}]). best_effort_json(Input, [pretty, force_utf8]).
best_effort_json(Input, Opts) -> best_effort_json(Input, Opts) ->
Config = #{depth => unlimited, single_line => true}, Config = #{depth => unlimited, single_line => true},
JsonReady = best_effort_json_obj(Input, Config), JsonReady = best_effort_json_obj(Input, Config),
jsx:encode(JsonReady, Opts). emqx_utils_json:encode(JsonReady, Opts).
-spec format(logger:log_event(), config()) -> iodata(). -spec format(logger:log_event(), config()) -> iodata().
format(#{level := Level, msg := Msg, meta := Meta} = Event, Config0) when is_map(Config0) -> format(#{level := Level, msg := Msg, meta := Meta} = Event, Config0) when is_map(Config0) ->
@ -92,7 +92,7 @@ format(Msg, Meta, Config) ->
} }
end, end,
Data = maps:without([report_cb], Data0), Data = maps:without([report_cb], Data0),
jiffy:encode(json_obj(Data, Config)). emqx_utils_json:encode(json_obj(Data, Config)).
maybe_format_msg({report, Report} = Msg, #{report_cb := Cb} = Meta, Config) -> maybe_format_msg({report, Report} = Msg, #{report_cb := Cb} = Meta, Config) ->
case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of
@ -378,15 +378,15 @@ p_config() ->
best_effort_json_test() -> best_effort_json_test() ->
?assertEqual( ?assertEqual(
<<"{}">>, <<"{\n \n}">>,
emqx_logger_jsonfmt:best_effort_json([]) emqx_logger_jsonfmt:best_effort_json([])
), ),
?assertEqual( ?assertEqual(
<<"{\n \"key\": []\n}">>, <<"{\n \"key\" : [\n \n ]\n}">>,
emqx_logger_jsonfmt:best_effort_json(#{key => []}) emqx_logger_jsonfmt:best_effort_json(#{key => []})
), ),
?assertEqual( ?assertEqual(
<<"[\n {\n \"key\": []\n }\n]">>, <<"[\n {\n \"key\" : [\n \n ]\n }\n]">>,
emqx_logger_jsonfmt:best_effort_json([#{key => []}]) emqx_logger_jsonfmt:best_effort_json([#{key => []}])
), ),
ok. ok.

View File

@ -541,7 +541,7 @@ init([]) ->
CRef = counters:new(?MAX_SIZE, [write_concurrency]), CRef = counters:new(?MAX_SIZE, [write_concurrency]),
ok = persistent_term:put(?MODULE, CRef), ok = persistent_term:put(?MODULE, CRef),
% Create index mapping table % Create index mapping table
ok = emqx_tables:new(?TAB, [{keypos, 2}, {read_concurrency, true}]), ok = emqx_utils_ets:new(?TAB, [{keypos, 2}, {read_concurrency, true}]),
Metrics = lists:append([ Metrics = lists:append([
?BYTES_METRICS, ?BYTES_METRICS,
?PACKET_METRICS, ?PACKET_METRICS,

View File

@ -110,7 +110,7 @@ register_listener(ListenerID, Opts) ->
-spec inject_sni_fun(emqx_listeners:listener_id(), map()) -> map(). -spec inject_sni_fun(emqx_listeners:listener_id(), map()) -> map().
inject_sni_fun(ListenerID, Conf0) -> inject_sni_fun(ListenerID, Conf0) ->
SNIFun = emqx_const_v1:make_sni_fun(ListenerID), SNIFun = emqx_const_v1:make_sni_fun(ListenerID),
Conf = emqx_map_lib:deep_merge(Conf0, #{ssl_options => #{sni_fun => SNIFun}}), Conf = emqx_utils_maps:deep_merge(Conf0, #{ssl_options => #{sni_fun => SNIFun}}),
ok = ?MODULE:register_listener(ListenerID, Conf), ok = ?MODULE:register_listener(ListenerID, Conf),
Conf. Conf.
@ -120,7 +120,7 @@ inject_sni_fun(ListenerID, Conf0) ->
init(_Args) -> init(_Args) ->
logger:set_process_metadata(#{domain => [emqx, ocsp, cache]}), logger:set_process_metadata(#{domain => [emqx, ocsp, cache]}),
emqx_tables:new(?CACHE_TAB, [ emqx_utils_ets:new(?CACHE_TAB, [
named_table, named_table,
public, public,
{heir, whereis(emqx_kernel_sup), none}, {heir, whereis(emqx_kernel_sup), none},
@ -149,7 +149,7 @@ handle_call({register_listener, ListenerID, Conf}, _From, State0) ->
msg => "registering_ocsp_cache", msg => "registering_ocsp_cache",
listener_id => ListenerID listener_id => ListenerID
}), }),
RefreshInterval0 = emqx_map_lib:deep_get([ssl_options, ocsp, refresh_interval], Conf), RefreshInterval0 = emqx_utils_maps:deep_get([ssl_options, ocsp, refresh_interval], Conf),
RefreshInterval = max(RefreshInterval0, ?MIN_REFRESH_INTERVAL), RefreshInterval = max(RefreshInterval0, ?MIN_REFRESH_INTERVAL),
State = State0#{{refresh_interval, ListenerID} => RefreshInterval}, State = State0#{{refresh_interval, ListenerID} => RefreshInterval},
%% we need to pass the config along because this might be called %% we need to pass the config along because this might be called
@ -476,9 +476,9 @@ ensure_timer(ListenerID, State, Timeout) ->
ensure_timer(ListenerID, {refresh, ListenerID}, State, Timeout). ensure_timer(ListenerID, {refresh, ListenerID}, State, Timeout).
ensure_timer(ListenerID, Message, State, Timeout) -> ensure_timer(ListenerID, Message, State, Timeout) ->
emqx_misc:cancel_timer(maps:get(?REFRESH_TIMER(ListenerID), State, undefined)), emqx_utils:cancel_timer(maps:get(?REFRESH_TIMER(ListenerID), State, undefined)),
State#{ State#{
?REFRESH_TIMER(ListenerID) => emqx_misc:start_timer( ?REFRESH_TIMER(ListenerID) => emqx_utils:start_timer(
Timeout, Timeout,
Message Message
) )

View File

@ -180,8 +180,8 @@ code_change(_OldVsn, State, _Extra) ->
%% Internal functions %% Internal functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
cancel_outdated_timer(#{mem_time_ref := MemRef, cpu_time_ref := CpuRef}) -> cancel_outdated_timer(#{mem_time_ref := MemRef, cpu_time_ref := CpuRef}) ->
emqx_misc:cancel_timer(MemRef), emqx_utils:cancel_timer(MemRef),
emqx_misc:cancel_timer(CpuRef), emqx_utils:cancel_timer(CpuRef),
ok. ok.
start_cpu_check_timer() -> start_cpu_check_timer() ->
@ -204,7 +204,7 @@ start_mem_check_timer() ->
end. end.
start_timer(Interval, Msg) -> start_timer(Interval, Msg) ->
emqx_misc:start_timer(Interval, Msg). emqx_utils:start_timer(Interval, Msg).
update_mem_alarm_status(HWM) when HWM > 1.0 orelse HWM < 0.0 -> update_mem_alarm_status(HWM) when HWM > 1.0 orelse HWM < 0.0 ->
?SLOG(warning, #{msg => "discarded_out_of_range_mem_alarm_threshold", value => HWM}), ?SLOG(warning, #{msg => "discarded_out_of_range_mem_alarm_threshold", value => HWM}),

View File

@ -57,7 +57,7 @@
-spec start_link(atom(), pos_integer()) -> startlink_ret(). -spec start_link(atom(), pos_integer()) -> startlink_ret().
start_link(Pool, Id) -> start_link(Pool, Id) ->
gen_server:start_link( gen_server:start_link(
{local, emqx_misc:proc_name(?MODULE, Id)}, {local, emqx_utils:proc_name(?MODULE, Id)},
?MODULE, ?MODULE,
[Pool, Id], [Pool, Id],
[{hibernate_after, 1000}] [{hibernate_after, 1000}]

View File

@ -98,7 +98,7 @@ mnesia(boot) ->
-spec start_link(atom(), pos_integer()) -> startlink_ret(). -spec start_link(atom(), pos_integer()) -> startlink_ret().
start_link(Pool, Id) -> start_link(Pool, Id) ->
gen_server:start_link( gen_server:start_link(
{local, emqx_misc:proc_name(?MODULE, Id)}, {local, emqx_utils:proc_name(?MODULE, Id)},
?MODULE, ?MODULE,
[Pool, Id], [Pool, Id],
[{hibernate_after, 1000}] [{hibernate_after, 1000}]

View File

@ -27,6 +27,8 @@
cast/5, cast/5,
multicall/4, multicall/4,
multicall/5, multicall/5,
multicall_on_running/5,
on_running/3,
unwrap_erpc/1 unwrap_erpc/1
]). ]).
@ -91,6 +93,17 @@ multicall(Nodes, Mod, Fun, Args) ->
multicall(Key, Nodes, Mod, Fun, Args) -> multicall(Key, Nodes, Mod, Fun, Args) ->
gen_rpc:multicall(rpc_nodes([{Key, Node} || Node <- Nodes]), Mod, Fun, Args). gen_rpc:multicall(rpc_nodes([{Key, Node} || Node <- Nodes]), Mod, Fun, Args).
-spec multicall_on_running([node()], module(), atom(), list(), timeout()) -> [term() | {error, _}].
multicall_on_running(Nodes, Mod, Fun, Args, Timeout) ->
unwrap_erpc(erpc:multicall(Nodes, emqx_rpc, on_running, [Mod, Fun, Args], Timeout)).
-spec on_running(module(), atom(), list()) -> term().
on_running(Mod, Fun, Args) ->
case emqx:is_running() of
true -> apply(Mod, Fun, Args);
false -> error(emqx_down)
end.
-spec cast(node(), module(), atom(), list()) -> cast_result(). -spec cast(node(), module(), atom(), list()) -> cast_result().
cast(Node, Mod, Fun, Args) -> cast(Node, Mod, Fun, Args) ->
%% Note: using a non-ordered cast here, since the generated key is %% Note: using a non-ordered cast here, since the generated key is

View File

@ -44,6 +44,7 @@
-type port_number() :: 1..65536. -type port_number() :: 1..65536.
-type server_parse_option() :: #{default_port => port_number(), no_port => boolean()}. -type server_parse_option() :: #{default_port => port_number(), no_port => boolean()}.
-type url() :: binary(). -type url() :: binary().
-type json_binary() :: binary().
-typerefl_from_string({duration/0, emqx_schema, to_duration}). -typerefl_from_string({duration/0, emqx_schema, to_duration}).
-typerefl_from_string({duration_s/0, emqx_schema, to_duration_s}). -typerefl_from_string({duration_s/0, emqx_schema, to_duration_s}).
@ -58,6 +59,7 @@
-typerefl_from_string({cipher/0, emqx_schema, to_erl_cipher_suite}). -typerefl_from_string({cipher/0, emqx_schema, to_erl_cipher_suite}).
-typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}). -typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}).
-typerefl_from_string({url/0, emqx_schema, to_url}). -typerefl_from_string({url/0, emqx_schema, to_url}).
-typerefl_from_string({json_binary/0, emqx_schema, to_json_binary}).
-export([ -export([
validate_heap_size/1, validate_heap_size/1,
@ -84,7 +86,8 @@
to_ip_port/1, to_ip_port/1,
to_erl_cipher_suite/1, to_erl_cipher_suite/1,
to_comma_separated_atoms/1, to_comma_separated_atoms/1,
to_url/1 to_url/1,
to_json_binary/1
]). ]).
-export([ -export([
@ -112,7 +115,8 @@
ip_port/0, ip_port/0,
cipher/0, cipher/0,
comma_separated_atoms/0, comma_separated_atoms/0,
url/0 url/0,
json_binary/0
]). ]).
-export([namespace/0, roots/0, roots/1, fields/1, desc/1, tags/0]). -export([namespace/0, roots/0, roots/1, fields/1, desc/1, tags/0]).
@ -141,25 +145,31 @@ roots(high) ->
{"listeners", {"listeners",
sc( sc(
ref("listeners"), ref("listeners"),
#{} #{importance => ?IMPORTANCE_HIGH}
)},
{"zones",
sc(
map("name", ref("zone")),
#{desc => ?DESC(zones)}
)}, )},
{"mqtt", {"mqtt",
sc( sc(
ref("mqtt"), ref("mqtt"),
#{desc => ?DESC(mqtt)} #{
desc => ?DESC(mqtt),
importance => ?IMPORTANCE_MEDIUM
}
)},
{"zones",
sc(
map("name", ref("zone")),
#{
desc => ?DESC(zones),
importance => ?IMPORTANCE_LOW
}
)}, )},
{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, authentication(global)}, {?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, authentication(global)},
%% NOTE: authorization schema here is only to keep emqx app prue %% NOTE: authorization schema here is only to keep emqx app pure
%% the full schema for EMQX node is injected in emqx_conf_schema. %% the full schema for EMQX node is injected in emqx_conf_schema.
{?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME, {?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME,
sc( sc(
ref(?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME), ref(?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME),
#{} #{importance => ?IMPORTANCE_HIDDEN}
)} )}
]; ];
roots(medium) -> roots(medium) ->
@ -182,7 +192,7 @@ roots(medium) ->
{"overload_protection", {"overload_protection",
sc( sc(
ref("overload_protection"), ref("overload_protection"),
#{} #{importance => ?IMPORTANCE_HIDDEN}
)} )}
]; ];
roots(low) -> roots(low) ->
@ -195,12 +205,16 @@ roots(low) ->
{"conn_congestion", {"conn_congestion",
sc( sc(
ref("conn_congestion"), ref("conn_congestion"),
#{} #{
importance => ?IMPORTANCE_HIDDEN
}
)}, )},
{"stats", {"stats",
sc( sc(
ref("stats"), ref("stats"),
#{} #{
importance => ?IMPORTANCE_HIDDEN
}
)}, )},
{"sysmon", {"sysmon",
sc( sc(
@ -215,17 +229,17 @@ roots(low) ->
{"flapping_detect", {"flapping_detect",
sc( sc(
ref("flapping_detect"), ref("flapping_detect"),
#{} #{importance => ?IMPORTANCE_HIDDEN}
)}, )},
{"persistent_session_store", {"persistent_session_store",
sc( sc(
ref("persistent_session_store"), ref("persistent_session_store"),
#{} #{importance => ?IMPORTANCE_HIDDEN}
)}, )},
{"trace", {"trace",
sc( sc(
ref("trace"), ref("trace"),
#{} #{importance => ?IMPORTANCE_HIDDEN}
)}, )},
{"crl_cache", {"crl_cache",
sc( sc(
@ -335,6 +349,7 @@ fields("stats") ->
boolean(), boolean(),
#{ #{
default => true, default => true,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(stats_enable) desc => ?DESC(stats_enable)
} }
)} )}
@ -605,8 +620,7 @@ fields("mqtt") ->
)} )}
]; ];
fields("zone") -> fields("zone") ->
Fields = emqx_zone_schema:roots(), emqx_zone_schema:zone();
[{F, ref(emqx_zone_schema, F)} || F <- Fields];
fields("flapping_detect") -> fields("flapping_detect") ->
[ [
{"enable", {"enable",
@ -614,25 +628,27 @@ fields("flapping_detect") ->
boolean(), boolean(),
#{ #{
default => false, default => false,
deprecated => {since, "5.0.23"},
desc => ?DESC(flapping_detect_enable) desc => ?DESC(flapping_detect_enable)
} }
)}, )},
{"max_count",
sc(
integer(),
#{
default => 15,
desc => ?DESC(flapping_detect_max_count)
}
)},
{"window_time", {"window_time",
sc( sc(
duration(), hoconsc:union([disabled, duration()]),
#{ #{
default => <<"1m">>, default => disabled,
importance => ?IMPORTANCE_HIGH,
desc => ?DESC(flapping_detect_window_time) desc => ?DESC(flapping_detect_window_time)
} }
)}, )},
{"max_count",
sc(
non_neg_integer(),
#{
default => 15,
desc => ?DESC(flapping_detect_max_count)
}
)},
{"ban_time", {"ban_time",
sc( sc(
duration(), duration(),
@ -1494,12 +1510,14 @@ fields("broker") ->
ref("broker_perf"), ref("broker_perf"),
#{importance => ?IMPORTANCE_HIDDEN} #{importance => ?IMPORTANCE_HIDDEN}
)}, )},
%% FIXME: Need new design for shared subscription group
{"shared_subscription_group", {"shared_subscription_group",
sc( sc(
map(name, ref("shared_subscription_group")), map(name, ref("shared_subscription_group")),
#{ #{
example => #{<<"example_group">> => #{<<"strategy">> => <<"random">>}}, example => #{<<"example_group">> => #{<<"strategy">> => <<"random">>}},
desc => ?DESC(shared_subscription_group_strategy) desc => ?DESC(shared_subscription_group_strategy),
importance => ?IMPORTANCE_HIDDEN
} }
)} )}
]; ];
@ -1615,7 +1633,9 @@ fields("sysmon") ->
{"top", {"top",
sc( sc(
ref("sysmon_top"), ref("sysmon_top"),
#{} %% Userful monitoring solution when benchmarking,
%% but hardly common enough for regular users.
#{importance => ?IMPORTANCE_HIDDEN}
)} )}
]; ];
fields("sysmon_vm") -> fields("sysmon_vm") ->
@ -1849,6 +1869,8 @@ fields("trace") ->
{"payload_encode", {"payload_encode",
sc(hoconsc:enum([hex, text, hidden]), #{ sc(hoconsc:enum([hex, text, hidden]), #{
default => text, default => text,
deprecated => {since, "5.0.22"},
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(fields_trace_payload_encode) desc => ?DESC(fields_trace_payload_encode)
})} })}
]. ].
@ -2205,6 +2227,7 @@ common_ssl_opts_schema(Defaults) ->
#{ #{
default => AvailableVersions, default => AvailableVersions,
desc => ?DESC(common_ssl_opts_schema_versions), desc => ?DESC(common_ssl_opts_schema_versions),
importance => ?IMPORTANCE_HIGH,
validator => fun(Inputs) -> validate_tls_versions(AvailableVersions, Inputs) end validator => fun(Inputs) -> validate_tls_versions(AvailableVersions, Inputs) end
} }
)}, )},
@ -2215,6 +2238,7 @@ common_ssl_opts_schema(Defaults) ->
#{ #{
default => <<"emqx_tls_psk:lookup">>, default => <<"emqx_tls_psk:lookup">>,
converter => fun ?MODULE:user_lookup_fun_tr/2, converter => fun ?MODULE:user_lookup_fun_tr/2,
importance => ?IMPORTANCE_HIDDEN,
desc => ?DESC(common_ssl_opts_schema_user_lookup_fun) desc => ?DESC(common_ssl_opts_schema_user_lookup_fun)
} }
)}, )},
@ -2300,8 +2324,6 @@ server_ssl_opts_schema(Defaults, IsRanchListener) ->
ref("ocsp"), ref("ocsp"),
#{ #{
required => false, required => false,
%% TODO: remove after e5.0.2
importance => ?IMPORTANCE_HIDDEN,
validator => fun ocsp_inner_validator/1 validator => fun ocsp_inner_validator/1
} }
)}, )},
@ -2310,6 +2332,7 @@ server_ssl_opts_schema(Defaults, IsRanchListener) ->
boolean(), boolean(),
#{ #{
default => false, default => false,
importance => ?IMPORTANCE_MEDIUM,
desc => ?DESC("server_ssl_opts_schema_enable_crl_check") desc => ?DESC("server_ssl_opts_schema_enable_crl_check")
} }
)} )}
@ -2321,7 +2344,7 @@ mqtt_ssl_listener_ssl_options_validator(Conf) ->
fun ocsp_outer_validator/1, fun ocsp_outer_validator/1,
fun crl_outer_validator/1 fun crl_outer_validator/1
], ],
case emqx_misc:pipeline(Checks, Conf, not_used) of case emqx_utils:pipeline(Checks, Conf, not_used) of
{ok, _, _} -> {ok, _, _} ->
ok; ok;
{error, Reason, _NotUsed} -> {error, Reason, _NotUsed} ->
@ -2342,7 +2365,7 @@ ocsp_outer_validator(_Conf) ->
ok. ok.
ocsp_inner_validator(#{enable_ocsp_stapling := _} = Conf) -> ocsp_inner_validator(#{enable_ocsp_stapling := _} = Conf) ->
ocsp_inner_validator(emqx_map_lib:binary_key_map(Conf)); ocsp_inner_validator(emqx_utils_maps:binary_key_map(Conf));
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := false} = _Conf) -> ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := false} = _Conf) ->
ok; ok;
ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := true} = Conf) -> ocsp_inner_validator(#{<<"enable_ocsp_stapling">> := true} = Conf) ->
@ -2576,6 +2599,14 @@ to_url(Str) ->
Error Error
end. end.
to_json_binary(Str) ->
case emqx_utils_json:safe_decode(Str) of
{ok, _} ->
{ok, iolist_to_binary(Str)};
Error ->
Error
end.
to_bar_separated_list(Str) -> to_bar_separated_list(Str) ->
{ok, string:tokens(Str, "| ")}. {ok, string:tokens(Str, "| ")}.
@ -2642,20 +2673,22 @@ to_atom(Str) when is_list(Str) ->
to_atom(Bin) when is_binary(Bin) -> to_atom(Bin) when is_binary(Bin) ->
binary_to_atom(Bin, utf8). binary_to_atom(Bin, utf8).
validate_heap_size(Siz) -> validate_heap_size(Siz) when is_integer(Siz) ->
MaxSiz = MaxSiz =
case erlang:system_info(wordsize) of case erlang:system_info(wordsize) of
% arch_64 % arch_64
8 -> 8 -> (1 bsl 59) - 1;
(1 bsl 59) - 1;
% arch_32 % arch_32
4 -> 4 -> (1 bsl 27) - 1
(1 bsl 27) - 1
end, end,
case Siz > MaxSiz of case Siz > MaxSiz of
true -> error(io_lib:format("force_shutdown_policy: heap-size ~ts is too large", [Siz])); true ->
false -> ok {error, #{reason => max_heap_size_too_large, maximum => MaxSiz}};
end. false ->
ok
end;
validate_heap_size(_SizStr) ->
{error, invalid_heap_size}.
validate_alarm_actions(Actions) -> validate_alarm_actions(Actions) ->
UnSupported = lists:filter( UnSupported = lists:filter(
@ -2732,10 +2765,16 @@ str(S) when is_list(S) ->
S. S.
authentication(Which) -> authentication(Which) ->
Desc = {Importance, Desc} =
case Which of case Which of
global -> ?DESC(global_authentication); global ->
listener -> ?DESC(listener_authentication) %% For root level authentication, it is recommended to configure
%% from the dashboard or API.
%% Hence it's considered a low-importance when it comes to
%% configuration importance.
{?IMPORTANCE_LOW, ?DESC(global_authentication)};
listener ->
{?IMPORTANCE_HIDDEN, ?DESC(listener_authentication)}
end, end,
%% poor man's dependency injection %% poor man's dependency injection
%% this is due to the fact that authn is implemented outside of 'emqx' app. %% this is due to the fact that authn is implemented outside of 'emqx' app.
@ -2748,7 +2787,11 @@ authentication(Which) ->
Module -> Module ->
Module:root_type() Module:root_type()
end, end,
hoconsc:mk(Type, #{desc => Desc, converter => fun ensure_array/2}). hoconsc:mk(Type, #{
desc => Desc,
converter => fun ensure_array/2,
importance => Importance
}).
%% the older version schema allows individual element (instead of a chain) in config %% the older version schema allows individual element (instead of a chain) in config
ensure_array(undefined, _) -> undefined; ensure_array(undefined, _) -> undefined;

View File

@ -39,7 +39,7 @@
%% @doc Create a sequence. %% @doc Create a sequence.
-spec create(name()) -> ok. -spec create(name()) -> ok.
create(Name) -> create(Name) ->
emqx_tables:new(Name, [public, set, {write_concurrency, true}]). emqx_utils_ets:new(Name, [public, set, {write_concurrency, true}]).
%% @doc Next value of the sequence. %% @doc Next value of the sequence.
-spec nextval(name(), key()) -> seqid(). -spec nextval(name(), key()) -> seqid().

View File

@ -941,7 +941,7 @@ age(Now, Ts) -> Now - Ts.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
set_field(Name, Value, Session) -> set_field(Name, Value, Session) ->
Pos = emqx_misc:index_of(Name, record_info(fields, session)), Pos = emqx_utils:index_of(Name, record_info(fields, session)),
setelement(Pos + 1, Session, Value). setelement(Pos + 1, Session, Value).
get_mqueue(#session{mqueue = Q}) -> get_mqueue(#session{mqueue = Q}) ->

View File

@ -95,7 +95,7 @@ create_table(Tab, Storage) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
create_init_tab() -> create_init_tab() ->
emqx_tables:new(?SESSION_INIT_TAB, [ emqx_utils_ets:new(?SESSION_INIT_TAB, [
public, public,
{read_concurrency, true}, {read_concurrency, true},
{write_concurrency, true} {write_concurrency, true}
@ -104,7 +104,7 @@ create_init_tab() ->
-spec start_link(atom(), pos_integer()) -> startlink_ret(). -spec start_link(atom(), pos_integer()) -> startlink_ret().
start_link(Pool, Id) -> start_link(Pool, Id) ->
gen_server:start_link( gen_server:start_link(
{local, emqx_misc:proc_name(?MODULE, Id)}, {local, emqx_utils:proc_name(?MODULE, Id)},
?MODULE, ?MODULE,
[Pool, Id], [Pool, Id],
[{hibernate_after, 1000}] [{hibernate_after, 1000}]
@ -182,7 +182,7 @@ pending(SessionID, MarkerIDs) ->
call(pick(SessionID), {pending, SessionID, MarkerIDs}). call(pick(SessionID), {pending, SessionID, MarkerIDs}).
buffer(SessionID, STopic, Msg) -> buffer(SessionID, STopic, Msg) ->
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
undefined -> ok; undefined -> ok;
Worker -> emqx_session_router_worker:buffer(Worker, STopic, Msg) Worker -> emqx_session_router_worker:buffer(Worker, STopic, Msg)
end. end.
@ -194,7 +194,7 @@ resume_begin(From, SessionID) when is_pid(From), is_binary(SessionID) ->
-spec resume_end(pid(), binary()) -> -spec resume_end(pid(), binary()) ->
{'ok', [emqx_types:message()]} | {'error', term()}. {'ok', [emqx_types:message()]} | {'error', term()}.
resume_end(From, SessionID) when is_pid(From), is_binary(SessionID) -> resume_end(From, SessionID) when is_pid(From), is_binary(SessionID) ->
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
undefined -> undefined ->
?tp(ps_session_not_found, #{sid => SessionID}), ?tp(ps_session_not_found, #{sid => SessionID}),
{error, not_found}; {error, not_found};
@ -249,7 +249,7 @@ handle_cast({delete_routes, SessionID, Subscriptions}, State) ->
ok = lists:foreach(Fun, maps:to_list(Subscriptions)), ok = lists:foreach(Fun, maps:to_list(Subscriptions)),
{noreply, State}; {noreply, State};
handle_cast({resume_end, SessionID, Pid}, State) -> handle_cast({resume_end, SessionID, Pid}, State) ->
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
undefined -> skip; undefined -> skip;
P when P =:= Pid -> ets:delete(?SESSION_INIT_TAB, SessionID); P when P =:= Pid -> ets:delete(?SESSION_INIT_TAB, SessionID);
P when is_pid(P) -> skip P when is_pid(P) -> skip
@ -283,7 +283,7 @@ init_resume_worker(RemotePid, SessionID, #{pmon := Pmon} = State) ->
error; error;
{ok, Pid} -> {ok, Pid} ->
Pmon1 = emqx_pmon:monitor(Pid, Pmon), Pmon1 = emqx_pmon:monitor(Pid, Pmon),
case emqx_tables:lookup_value(?SESSION_INIT_TAB, SessionID) of case emqx_utils_ets:lookup_value(?SESSION_INIT_TAB, SessionID) of
undefined -> undefined ->
{ok, Pid, State#{pmon => Pmon1}}; {ok, Pid, State#{pmon => Pmon1}};
{_, OldPid} -> {_, OldPid} ->

View File

@ -399,9 +399,11 @@ init([]) ->
ok = mria:wait_for_tables([?TAB]), ok = mria:wait_for_tables([?TAB]),
{ok, _} = mnesia:subscribe({table, ?TAB, simple}), {ok, _} = mnesia:subscribe({table, ?TAB, simple}),
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun ?MODULE:init_monitors/0), {atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun ?MODULE:init_monitors/0),
ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]), ok = emqx_utils_ets:new(?SHARED_SUBS, [protected, bag]),
ok = emqx_tables:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]), ok = emqx_utils_ets:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]),
ok = emqx_tables:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [public, set, {write_concurrency, true}]), ok = emqx_utils_ets:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [
public, set, {write_concurrency, true}
]),
{ok, update_stats(#state{pmon = PMon})}. {ok, update_stats(#state{pmon = PMon})}.
init_monitors() -> init_monitors() ->

View File

@ -201,7 +201,7 @@ cast(Msg) -> gen_server:cast(?SERVER, Msg).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init(#{tick_ms := TickMs}) -> init(#{tick_ms := TickMs}) ->
ok = emqx_tables:new(?TAB, [public, set, {write_concurrency, true}]), ok = emqx_utils_ets:new(?TAB, [public, set, {write_concurrency, true}]),
Stats = lists:append([ Stats = lists:append([
?CONNECTION_STATS, ?CONNECTION_STATS,
?CHANNEL_STATS, ?CHANNEL_STATS,
@ -213,7 +213,7 @@ init(#{tick_ms := TickMs}) ->
{ok, start_timer(#state{updates = [], tick_ms = TickMs}), hibernate}. {ok, start_timer(#state{updates = [], tick_ms = TickMs}), hibernate}.
start_timer(#state{tick_ms = Ms} = State) -> start_timer(#state{tick_ms = Ms} = State) ->
State#state{timer = emqx_misc:start_timer(Ms, tick)}. State#state{timer = emqx_utils:start_timer(Ms, tick)}.
handle_call(stop, _From, State) -> handle_call(stop, _From, State) ->
{stop, normal, ok, State}; {stop, normal, ok, State};
@ -301,7 +301,7 @@ handle_info(Info, State) ->
{noreply, State}. {noreply, State}.
terminate(_Reason, #state{timer = TRef}) -> terminate(_Reason, #state{timer = TRef}) ->
emqx_misc:cancel_timer(TRef). emqx_utils:cancel_timer(TRef).
code_change(_OldVsn, State, _Extra) -> code_change(_OldVsn, State, _Extra) ->
{ok, State}. {ok, State}.

View File

@ -62,7 +62,7 @@
-endif. -endif.
-import(emqx_topic, [systop/1]). -import(emqx_topic, [systop/1]).
-import(emqx_misc, [start_timer/2]). -import(emqx_utils, [start_timer/2]).
-record(state, { -record(state, {
heartbeat :: maybe(reference()), heartbeat :: maybe(reference()),
@ -222,7 +222,7 @@ handle_info(Info, State) ->
terminate(_Reason, #state{heartbeat = TRef1, ticker = TRef2}) -> terminate(_Reason, #state{heartbeat = TRef1, ticker = TRef2}) ->
_ = emqx_config_handler:remove_handler(?CONF_KEY_PATH), _ = emqx_config_handler:remove_handler(?CONF_KEY_PATH),
unload_event_hooks(sys_event_messages()), unload_event_hooks(sys_event_messages()),
lists:foreach(fun emqx_misc:cancel_timer/1, [TRef1, TRef2]). lists:foreach(fun emqx_utils:cancel_timer/1, [TRef1, TRef2]).
unload_event_hooks([]) -> unload_event_hooks([]) ->
ok; ok;
@ -348,7 +348,7 @@ publish(Event, Payload) when
Event == unsubscribed Event == unsubscribed
-> ->
Topic = event_topic(Event, Payload), Topic = event_topic(Event, Payload),
safe_publish(Topic, emqx_json:encode(Payload)). safe_publish(Topic, emqx_utils_json:encode(Payload)).
metric_topic(Name) -> metric_topic(Name) ->
translate_topic("metrics/", Name). translate_topic("metrics/", Name).

View File

@ -77,7 +77,7 @@ init([]) ->
{ok, start_timer(#{timer => undefined, events => []})}. {ok, start_timer(#{timer => undefined, events => []})}.
start_timer(State) -> start_timer(State) ->
State#{timer := emqx_misc:start_timer(timer:seconds(2), reset)}. State#{timer := emqx_utils:start_timer(timer:seconds(2), reset)}.
sysm_opts(VM) -> sysm_opts(VM) ->
sysm_opts(maps:to_list(VM), []). sysm_opts(maps:to_list(VM), []).
@ -204,7 +204,7 @@ handle_info(Info, State) ->
{noreply, State}. {noreply, State}.
terminate(_Reason, #{timer := TRef}) -> terminate(_Reason, #{timer := TRef}) ->
emqx_misc:cancel_timer(TRef), emqx_utils:cancel_timer(TRef),
ok. ok.
code_change(_OldVsn, State, _Extra) -> code_change(_OldVsn, State, _Extra) ->

View File

@ -317,7 +317,9 @@ ensure_ssl_files(Dir, SSL, Opts) ->
ensure_ssl_files(_Dir, SSL, [], _Opts) -> ensure_ssl_files(_Dir, SSL, [], _Opts) ->
{ok, SSL}; {ok, SSL};
ensure_ssl_files(Dir, SSL, [KeyPath | KeyPaths], Opts) -> ensure_ssl_files(Dir, SSL, [KeyPath | KeyPaths], Opts) ->
case ensure_ssl_file(Dir, KeyPath, SSL, emqx_map_lib:deep_get(KeyPath, SSL, undefined), Opts) of case
ensure_ssl_file(Dir, KeyPath, SSL, emqx_utils_maps:deep_get(KeyPath, SSL, undefined), Opts)
of
{ok, NewSSL} -> {ok, NewSSL} ->
ensure_ssl_files(Dir, NewSSL, KeyPaths, Opts); ensure_ssl_files(Dir, NewSSL, KeyPaths, Opts);
{error, Reason} -> {error, Reason} ->
@ -332,7 +334,7 @@ delete_ssl_files(Dir, NewOpts0, OldOpts0) ->
{ok, OldOpts} = ensure_ssl_files(Dir, OldOpts0, #{dry_run => DryRun}), {ok, OldOpts} = ensure_ssl_files(Dir, OldOpts0, #{dry_run => DryRun}),
Get = fun Get = fun
(_KP, undefined) -> undefined; (_KP, undefined) -> undefined;
(KP, Opts) -> emqx_map_lib:deep_get(KP, Opts, undefined) (KP, Opts) -> emqx_utils_maps:deep_get(KP, Opts, undefined)
end, end,
lists:foreach( lists:foreach(
fun(KeyPath) -> delete_old_file(Get(KeyPath, NewOpts), Get(KeyPath, OldOpts)) end, fun(KeyPath) -> delete_old_file(Get(KeyPath, NewOpts), Get(KeyPath, OldOpts)) end,
@ -372,7 +374,7 @@ do_ensure_ssl_file(Dir, KeyPath, SSL, MaybePem, DryRun) ->
true -> true ->
case save_pem_file(Dir, KeyPath, MaybePem, DryRun) of case save_pem_file(Dir, KeyPath, MaybePem, DryRun) of
{ok, Path} -> {ok, Path} ->
NewSSL = emqx_map_lib:deep_put(KeyPath, SSL, Path), NewSSL = emqx_utils_maps:deep_put(KeyPath, SSL, Path),
{ok, NewSSL}; {ok, NewSSL};
{error, Reason} -> {error, Reason} ->
{error, Reason} {error, Reason}
@ -482,9 +484,9 @@ is_valid_pem_file(Path) ->
%% so they are forced to upload a cert file, or use an existing file path. %% so they are forced to upload a cert file, or use an existing file path.
-spec drop_invalid_certs(map()) -> map(). -spec drop_invalid_certs(map()) -> map().
drop_invalid_certs(#{enable := False} = SSL) when ?IS_FALSE(False) -> drop_invalid_certs(#{enable := False} = SSL) when ?IS_FALSE(False) ->
lists:foldl(fun emqx_map_lib:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS_A); lists:foldl(fun emqx_utils_maps:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS_A);
drop_invalid_certs(#{<<"enable">> := False} = SSL) when ?IS_FALSE(False) -> drop_invalid_certs(#{<<"enable">> := False} = SSL) when ?IS_FALSE(False) ->
lists:foldl(fun emqx_map_lib:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS); lists:foldl(fun emqx_utils_maps:deep_remove/2, SSL, ?SSL_FILE_OPT_PATHS);
drop_invalid_certs(#{enable := True} = SSL) when ?IS_TRUE(True) -> drop_invalid_certs(#{enable := True} = SSL) when ?IS_TRUE(True) ->
do_drop_invalid_certs(?SSL_FILE_OPT_PATHS_A, SSL); do_drop_invalid_certs(?SSL_FILE_OPT_PATHS_A, SSL);
drop_invalid_certs(#{<<"enable">> := True} = SSL) when ?IS_TRUE(True) -> drop_invalid_certs(#{<<"enable">> := True} = SSL) when ?IS_TRUE(True) ->
@ -493,7 +495,7 @@ drop_invalid_certs(#{<<"enable">> := True} = SSL) when ?IS_TRUE(True) ->
do_drop_invalid_certs([], SSL) -> do_drop_invalid_certs([], SSL) ->
SSL; SSL;
do_drop_invalid_certs([KeyPath | KeyPaths], SSL) -> do_drop_invalid_certs([KeyPath | KeyPaths], SSL) ->
case emqx_map_lib:deep_get(KeyPath, SSL, undefined) of case emqx_utils_maps:deep_get(KeyPath, SSL, undefined) of
undefined -> undefined ->
do_drop_invalid_certs(KeyPaths, SSL); do_drop_invalid_certs(KeyPaths, SSL);
PemOrPath -> PemOrPath ->
@ -501,7 +503,7 @@ do_drop_invalid_certs([KeyPath | KeyPaths], SSL) ->
true -> true ->
do_drop_invalid_certs(KeyPaths, SSL); do_drop_invalid_certs(KeyPaths, SSL);
{error, _} -> {error, _} ->
do_drop_invalid_certs(KeyPaths, emqx_map_lib:deep_remove(KeyPath, SSL)) do_drop_invalid_certs(KeyPaths, emqx_utils_maps:deep_remove(KeyPath, SSL))
end end
end. end.
@ -586,7 +588,9 @@ ensure_ssl_file_key(_SSL, []) ->
ok; ok;
ensure_ssl_file_key(SSL, RequiredKeyPaths) -> ensure_ssl_file_key(SSL, RequiredKeyPaths) ->
NotFoundRef = make_ref(), NotFoundRef = make_ref(),
Filter = fun(KeyPath) -> NotFoundRef =:= emqx_map_lib:deep_get(KeyPath, SSL, NotFoundRef) end, Filter = fun(KeyPath) ->
NotFoundRef =:= emqx_utils_maps:deep_get(KeyPath, SSL, NotFoundRef)
end,
case lists:filter(Filter, RequiredKeyPaths) of case lists:filter(Filter, RequiredKeyPaths) of
[] -> ok; [] -> ok;
Miss -> {error, #{reason => ssl_file_option_not_found, which_options => Miss}} Miss -> {error, #{reason => ssl_file_option_not_found, which_options => Miss}}

View File

@ -21,6 +21,7 @@
-include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/logger.hrl").
-include_lib("kernel/include/file.hrl"). -include_lib("kernel/include/file.hrl").
-include_lib("snabbkaffe/include/trace.hrl"). -include_lib("snabbkaffe/include/trace.hrl").
-include_lib("emqx/include/emqx_trace.hrl").
-export([ -export([
publish/1, publish/1,
@ -54,8 +55,6 @@
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-include("emqx_trace.hrl").
-ifdef(TEST). -ifdef(TEST).
-export([ -export([
log_file/2, log_file/2,
@ -147,7 +146,11 @@ list(Enable) ->
-spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) -> -spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) ->
{ok, #?TRACE{}} {ok, #?TRACE{}}
| {error, {duplicate_condition, iodata()} | {already_existed, iodata()} | iodata()}. | {error,
{duplicate_condition, iodata()}
| {already_existed, iodata()}
| {bad_type, any()}
| iodata()}.
create(Trace) -> create(Trace) ->
case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of
true -> true ->
@ -222,14 +225,16 @@ format(Traces) ->
init([]) -> init([]) ->
erlang:process_flag(trap_exit, true), erlang:process_flag(trap_exit, true),
Fields = record_info(fields, ?TRACE),
ok = mria:create_table(?TRACE, [ ok = mria:create_table(?TRACE, [
{type, set}, {type, set},
{rlog_shard, ?SHARD}, {rlog_shard, ?SHARD},
{storage, disc_copies}, {storage, disc_copies},
{record_name, ?TRACE}, {record_name, ?TRACE},
{attributes, record_info(fields, ?TRACE)} {attributes, Fields}
]), ]),
ok = mria:wait_for_tables([?TRACE]), ok = mria:wait_for_tables([?TRACE]),
maybe_migrate_trace(Fields),
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}), {ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
ok = filelib:ensure_dir(filename:join([trace_dir(), dummy])), ok = filelib:ensure_dir(filename:join([trace_dir(), dummy])),
ok = filelib:ensure_dir(filename:join([zip_dir(), dummy])), ok = filelib:ensure_dir(filename:join([zip_dir(), dummy])),
@ -267,7 +272,7 @@ handle_info({timeout, TRef, update_trace}, #{timer := TRef} = State) ->
?tp(update_trace_done, #{}), ?tp(update_trace_done, #{}),
{noreply, State#{timer => NextTRef}}; {noreply, State#{timer => NextTRef}};
handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) -> handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) ->
emqx_misc:cancel_timer(TRef), emqx_utils:cancel_timer(TRef),
handle_info({timeout, TRef, update_trace}, State); handle_info({timeout, TRef, update_trace}, State);
handle_info(Info, State) -> handle_info(Info, State) ->
?SLOG(error, #{unexpected_info => Info}), ?SLOG(error, #{unexpected_info => Info}),
@ -275,7 +280,7 @@ handle_info(Info, State) ->
terminate(_Reason, #{timer := TRef}) -> terminate(_Reason, #{timer := TRef}) ->
_ = mnesia:unsubscribe({table, ?TRACE, simple}), _ = mnesia:unsubscribe({table, ?TRACE, simple}),
emqx_misc:cancel_timer(TRef), emqx_utils:cancel_timer(TRef),
stop_all_trace_handler(), stop_all_trace_handler(),
update_trace_handler(), update_trace_handler(),
_ = file:del_dir_r(zip_dir()), _ = file:del_dir_r(zip_dir()),
@ -297,7 +302,7 @@ update_trace(Traces) ->
ok = stop_trace(NeedStop, Started), ok = stop_trace(NeedStop, Started),
clean_stale_trace_files(), clean_stale_trace_files(),
NextTime = find_closest_time(Traces, Now), NextTime = find_closest_time(Traces, Now),
emqx_misc:start_timer(NextTime, update_trace). emqx_utils:start_timer(NextTime, update_trace).
stop_all_trace_handler() -> stop_all_trace_handler() ->
lists:foreach( lists:foreach(
@ -358,9 +363,10 @@ start_trace(Trace) ->
name = Name, name = Name,
type = Type, type = Type,
filter = Filter, filter = Filter,
start_at = Start start_at = Start,
payload_encode = PayloadEncode
} = Trace, } = Trace,
Who = #{name => Name, type => Type, filter => Filter}, Who = #{name => Name, type => Type, filter => Filter, payload_encode => PayloadEncode},
emqx_trace_handler:install(Who, debug, log_file(Name, Start)). emqx_trace_handler:install(Who, debug, log_file(Name, Start)).
stop_trace(Finished, Started) -> stop_trace(Finished, Started) ->
@ -490,6 +496,8 @@ to_trace(#{type := ip_address, ip_address := Filter} = Trace, Rec) ->
end; end;
to_trace(#{type := Type}, _Rec) -> to_trace(#{type := Type}, _Rec) ->
{error, io_lib:format("required ~s field", [Type])}; {error, io_lib:format("required ~s field", [Type])};
to_trace(#{payload_encode := PayloadEncode} = Trace, Rec) ->
to_trace(maps:remove(payload_encode, Trace), Rec#?TRACE{payload_encode = PayloadEncode});
to_trace(#{start_at := StartAt} = Trace, Rec) -> to_trace(#{start_at := StartAt} = Trace, Rec) ->
{ok, Sec} = to_system_second(StartAt), {ok, Sec} = to_system_second(StartAt),
to_trace(maps:remove(start_at, Trace), Rec#?TRACE{start_at = Sec}); to_trace(maps:remove(start_at, Trace), Rec#?TRACE{start_at = Sec});
@ -573,3 +581,29 @@ filter_cli_handler(Names) ->
now_second() -> now_second() ->
os:system_time(second). os:system_time(second).
maybe_migrate_trace(Fields) ->
case mnesia:table_info(emqx_trace, attributes) =:= Fields of
true ->
ok;
false ->
TransFun = fun(Trace) ->
case Trace of
{?TRACE, Name, Type, Filter, Enable, StartAt, EndAt} ->
#?TRACE{
name = Name,
type = Type,
filter = Filter,
enable = Enable,
start_at = StartAt,
end_at = EndAt,
payload_encode = text,
extra = #{}
};
#?TRACE{} ->
Trace
end
end,
{atomic, ok} = mnesia:transform_table(?TRACE, TransFun, Fields, ?TRACE),
ok
end.

View File

@ -44,7 +44,8 @@
-type tracer() :: #{ -type tracer() :: #{
name := binary(), name := binary(),
type := clientid | topic | ip_address, type := clientid | topic | ip_address,
filter := emqx_types:clientid() | emqx_types:topic() | emqx_trace:ip_address() filter := emqx_types:clientid() | emqx_types:topic() | emqx_trace:ip_address(),
payload_encode := text | hidden | hex
}. }.
-define(CONFIG(_LogFile_), #{ -define(CONFIG(_LogFile_), #{
@ -70,7 +71,12 @@
LogFilePath :: string() LogFilePath :: string()
) -> ok | {error, term()}. ) -> ok | {error, term()}.
install(Name, Type, Filter, Level, LogFile) -> install(Name, Type, Filter, Level, LogFile) ->
Who = #{type => Type, filter => ensure_bin(Filter), name => ensure_bin(Name)}, Who = #{
type => Type,
filter => ensure_bin(Filter),
name => ensure_bin(Name),
payload_encode => payload_encode()
},
install(Who, Level, LogFile). install(Who, Level, LogFile).
-spec install( -spec install(
@ -160,14 +166,14 @@ filters(#{type := topic, filter := Filter, name := Name}) ->
filters(#{type := ip_address, filter := Filter, name := Name}) -> filters(#{type := ip_address, filter := Filter, name := Name}) ->
[{ip_address, {fun ?MODULE:filter_ip_address/2, {ensure_list(Filter), Name}}}]. [{ip_address, {fun ?MODULE:filter_ip_address/2, {ensure_list(Filter), Name}}}].
formatter(#{type := _Type}) -> formatter(#{type := _Type, payload_encode := PayloadEncode}) ->
{emqx_trace_formatter, #{ {emqx_trace_formatter, #{
%% template is for ?SLOG message not ?TRACE. %% template is for ?SLOG message not ?TRACE.
template => [time, " [", level, "] ", msg, "\n"], template => [time, " [", level, "] ", msg, "\n"],
single_line => true, single_line => true,
max_size => unlimited, max_size => unlimited,
depth => unlimited, depth => unlimited,
payload_encode => payload_encode() payload_encode => PayloadEncode
}}. }}.
filter_traces(#{id := Id, level := Level, dst := Dst, filters := Filters}, Acc) -> filter_traces(#{id := Id, level := Level, dst := Dst, filters := Filters}, Acc) ->
@ -190,7 +196,7 @@ handler_id(Name, Type) ->
do_handler_id(Name, Type) do_handler_id(Name, Type)
catch catch
_:_ -> _:_ ->
Hash = emqx_misc:bin_to_hexstr(crypto:hash(md5, Name), lower), Hash = emqx_utils:bin_to_hexstr(crypto:hash(md5, Name), lower),
do_handler_id(Hash, Type) do_handler_id(Hash, Type)
end. end.

View File

@ -107,7 +107,7 @@ code_change(_OldVsn, State, _Extra) ->
start_check_timer() -> start_check_timer() ->
Interval = emqx:get_config([sysmon, vm, process_check_interval]), Interval = emqx:get_config([sysmon, vm, process_check_interval]),
emqx_misc:start_timer(Interval, check). emqx_utils:start_timer(Interval, check).
usage(Percent) -> usage(Percent) ->
integer_to_list(floor(Percent * 100)) ++ "%". integer_to_list(floor(Percent * 100)) ++ "%".

View File

@ -52,7 +52,7 @@
-export([set_field/3]). -export([set_field/3]).
-import( -import(
emqx_misc, emqx_utils,
[ [
maybe_apply/2, maybe_apply/2,
start_timer/2 start_timer/2
@ -121,8 +121,8 @@
-define(SOCK_STATS, [recv_oct, recv_cnt, send_oct, send_cnt]). -define(SOCK_STATS, [recv_oct, recv_cnt, send_oct, send_cnt]).
-define(ENABLED(X), (X =/= undefined)). -define(ENABLED(X), (X =/= undefined)).
-define(LIMITER_BYTES_IN, bytes_in). -define(LIMITER_BYTES_IN, bytes).
-define(LIMITER_MESSAGE_IN, message_in). -define(LIMITER_MESSAGE_IN, messages).
-dialyzer({no_match, [info/2]}). -dialyzer({no_match, [info/2]}).
-dialyzer({nowarn_function, [websocket_init/1]}). -dialyzer({nowarn_function, [websocket_init/1]}).
@ -172,7 +172,7 @@ stats(WsPid) when is_pid(WsPid) ->
stats(#state{channel = Channel}) -> stats(#state{channel = Channel}) ->
SockStats = emqx_pd:get_counters(?SOCK_STATS), SockStats = emqx_pd:get_counters(?SOCK_STATS),
ChanStats = emqx_channel:stats(Channel), ChanStats = emqx_channel:stats(Channel),
ProcStats = emqx_misc:proc_stats(), ProcStats = emqx_utils:proc_stats(),
lists:append([SockStats, ChanStats, ProcStats]). lists:append([SockStats, ChanStats, ProcStats]).
%% kick|discard|takeover %% kick|discard|takeover
@ -340,7 +340,7 @@ tune_heap_size(Channel) ->
) )
of of
#{enable := false} -> ok; #{enable := false} -> ok;
ShutdownPolicy -> emqx_misc:tune_heap_size(ShutdownPolicy) ShutdownPolicy -> emqx_utils:tune_heap_size(ShutdownPolicy)
end. end.
get_stats_enable(Zone) -> get_stats_enable(Zone) ->
@ -454,7 +454,7 @@ websocket_info(
State = #state{listener = {Type, Listener}} State = #state{listener = {Type, Listener}}
) -> ) ->
ActiveN = get_active_n(Type, Listener), ActiveN = get_active_n(Type, Listener),
Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)], Delivers = [Deliver | emqx_utils:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State); with_channel(handle_deliver, [Delivers], State);
websocket_info( websocket_info(
{timeout, _, limit_timeout}, {timeout, _, limit_timeout},
@ -678,7 +678,7 @@ check_oom(State = #state{channel = Channel}) ->
#{enable := false} -> #{enable := false} ->
State; State;
#{enable := true} -> #{enable := true} ->
case emqx_misc:check_oom(ShutdownPolicy) of case emqx_utils:check_oom(ShutdownPolicy) of
Shutdown = {shutdown, _Reason} -> Shutdown = {shutdown, _Reason} ->
postpone(Shutdown, State); postpone(Shutdown, State);
_Other -> _Other ->
@ -913,7 +913,7 @@ inc_qos_stats_key(_, _) -> undefined.
%% Cancel idle timer %% Cancel idle timer
cancel_idle_timer(State = #state{idle_timer = IdleTimer}) -> cancel_idle_timer(State = #state{idle_timer = IdleTimer}) ->
ok = emqx_misc:cancel_timer(IdleTimer), ok = emqx_utils:cancel_timer(IdleTimer),
State#state{idle_timer = undefined}. State#state{idle_timer = undefined}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -1046,7 +1046,7 @@ check_max_connection(Type, Listener) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
set_field(Name, Value, State) -> set_field(Name, Value, State) ->
Pos = emqx_misc:index_of(Name, record_info(fields, state)), Pos = emqx_utils:index_of(Name, record_info(fields, state)),
setelement(Pos + 1, State, Value). setelement(Pos + 1, State, Value).
%% ensure lowercase letters in headers %% ensure lowercase letters in headers

View File

@ -15,8 +15,10 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-module(emqx_zone_schema). -module(emqx_zone_schema).
-include_lib("typerefl/include/types.hrl").
-include_lib("hocon/include/hoconsc.hrl").
-export([namespace/0, roots/0, fields/1, desc/1]). -export([namespace/0, roots/0, fields/1, desc/1, zone/0, zone_without_hidden/0]).
namespace() -> zone. namespace() -> zone.
@ -33,6 +35,32 @@ roots() ->
"overload_protection" "overload_protection"
]. ].
zone() ->
Fields = roots(),
Hidden = hidden(),
lists:map(
fun(F) ->
case lists:member(F, Hidden) of
true ->
{F, ?HOCON(?R_REF(F), #{importance => ?IMPORTANCE_HIDDEN})};
false ->
{F, ?HOCON(?R_REF(F), #{})}
end
end,
Fields
).
zone_without_hidden() ->
lists:map(fun(F) -> {F, ?HOCON(?R_REF(F), #{})} end, roots() -- hidden()).
hidden() ->
[
"stats",
"overload_protection",
"conn_congestion",
"flapping_detect"
].
%% zone schemas are clones from the same name from root level %% zone schemas are clones from the same name from root level
%% only not allowed to have default values. %% only not allowed to have default values.
fields(Name) -> fields(Name) ->

View File

@ -0,0 +1,86 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_proto_v2).
-behaviour(emqx_bpapi).
-include("bpapi.hrl").
-export([
introduced_in/0,
are_running/1,
is_running/1,
get_alarms/2,
get_stats/1,
get_metrics/1,
deactivate_alarm/2,
delete_all_deactivated_alarms/1,
clean_authz_cache/1,
clean_authz_cache/2,
clean_pem_cache/1
]).
introduced_in() ->
"5.0.22".
-spec is_running(node()) -> boolean() | {badrpc, term()}.
is_running(Node) ->
rpc:call(Node, emqx, is_running, []).
-spec are_running([node()]) -> emqx_rpc:erpc_multicall(boolean()).
are_running(Nodes) when is_list(Nodes) ->
erpc:multicall(Nodes, emqx, is_running, []).
-spec get_alarms(node(), all | activated | deactivated) -> [map()].
get_alarms(Node, Type) ->
rpc:call(Node, emqx_alarm, get_alarms, [Type]).
-spec get_stats(node()) -> emqx_stats:stats() | {badrpc, _}.
get_stats(Node) ->
rpc:call(Node, emqx_stats, getstats, []).
-spec get_metrics(node()) -> [{emqx_metrics:metric_name(), non_neg_integer()}] | {badrpc, _}.
get_metrics(Node) ->
rpc:call(Node, emqx_metrics, all, []).
-spec clean_authz_cache(node(), emqx_types:clientid()) ->
ok
| {error, not_found}
| {badrpc, _}.
clean_authz_cache(Node, ClientId) ->
rpc:call(Node, emqx_authz_cache, drain_cache, [ClientId]).
-spec clean_authz_cache(node()) -> ok | {badrpc, _}.
clean_authz_cache(Node) ->
rpc:call(Node, emqx_authz_cache, drain_cache, []).
-spec clean_pem_cache(node()) -> ok | {badrpc, _}.
clean_pem_cache(Node) ->
rpc:call(Node, ssl_pem_cache, clear, []).
-spec deactivate_alarm(node(), binary() | atom()) ->
ok | {error, not_found} | {badrpc, _}.
deactivate_alarm(Node, Name) ->
rpc:call(Node, emqx_alarm, deactivate, [Name]).
-spec delete_all_deactivated_alarms(node()) -> ok | {badrpc, _}.
delete_all_deactivated_alarms(Node) ->
rpc:call(Node, emqx_alarm, delete_all_deactivated_alarms, []).

View File

@ -148,6 +148,14 @@ t_run_hook(_) ->
?assertEqual(3, emqx:run_fold_hook(foldl_filter2_hook, [arg], 1)), ?assertEqual(3, emqx:run_fold_hook(foldl_filter2_hook, [arg], 1)),
?assertEqual(2, emqx:run_fold_hook(foldl_filter2_hook, [arg1], 1)). ?assertEqual(2, emqx:run_fold_hook(foldl_filter2_hook, [arg1], 1)).
t_cluster_nodes(_) ->
Expected = [node()],
?assertEqual(Expected, emqx:running_nodes()),
?assertEqual(Expected, emqx:cluster_nodes(running)),
?assertEqual(Expected, emqx:cluster_nodes(all)),
?assertEqual(Expected, emqx:cluster_nodes(cores)),
?assertEqual([], emqx:cluster_nodes(stopped)).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Hook fun %% Hook fun
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -95,13 +95,17 @@ all() ->
emqx_common_test_helpers:all(?MODULE). emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) -> init_per_suite(Config) ->
LogLevel = emqx_logger:get_primary_log_level(),
ok = emqx_logger:set_log_level(debug),
application:set_env(ekka, strict_mode, true), application:set_env(ekka, strict_mode, true),
emqx_common_test_helpers:boot_modules(all), emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]), emqx_common_test_helpers:start_apps([]),
Config. [{log_level, LogLevel} | Config].
end_per_suite(_) -> end_per_suite(Config) ->
emqx_common_test_helpers:stop_apps([]), emqx_common_test_helpers:stop_apps([]),
LogLevel = ?config(log_level),
emqx_logger:set_log_level(LogLevel),
ok. ok.
init_per_testcase(Case, Config) -> init_per_testcase(Case, Config) ->

View File

@ -186,9 +186,8 @@ t_session_taken(_) ->
false false
end end
end, end,
3000 6000
), ),
Publish(), Publish(),
C2 = Connect(), C2 = Connect(),

View File

@ -162,8 +162,7 @@ limiter_conf() ->
Make = fun() -> Make = fun() ->
#{ #{
burst => 0, burst => 0,
rate => infinity, rate => infinity
capacity => infinity
} }
end, end,
@ -172,7 +171,7 @@ limiter_conf() ->
Acc#{Name => Make()} Acc#{Name => Make()}
end, end,
#{}, #{},
[bytes_in, message_in, message_routing, connection, internal] [bytes, messages, message_routing, connection, internal]
). ).
stats_conf() -> stats_conf() ->
@ -1137,7 +1136,7 @@ t_ws_cookie_init(_) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
t_flapping_detect(_) -> t_flapping_detect(_) ->
emqx_config:put_zone_conf(default, [flapping_detect, enable], true), emqx_config:put_zone_conf(default, [flapping_detect, window_time], 60000),
Parent = self(), Parent = self(),
ok = meck:expect( ok = meck:expect(
emqx_cm, emqx_cm,
@ -1258,7 +1257,7 @@ limiter_cfg() ->
Client = #{ Client = #{
rate => 5, rate => 5,
initial => 0, initial => 0,
capacity => 5, burst => 0,
low_watermark => 1, low_watermark => 1,
divisible => false, divisible => false,
max_retry_time => timer:seconds(5), max_retry_time => timer:seconds(5),
@ -1270,7 +1269,7 @@ limiter_cfg() ->
}. }.
bucket_cfg() -> bucket_cfg() ->
#{rate => 10, initial => 0, capacity => 10}. #{rate => 10, initial => 0, burst => 0}.
add_bucket() -> add_bucket() ->
emqx_limiter_server:add_bucket(?MODULE, message_routing, bucket_cfg()). emqx_limiter_server:add_bucket(?MODULE, message_routing, bucket_cfg()).

View File

@ -313,6 +313,7 @@ stop_apps(Apps) ->
ok = emqx_config:delete_override_conf_files(), ok = emqx_config:delete_override_conf_files(),
application:unset_env(emqx, local_override_conf_file), application:unset_env(emqx, local_override_conf_file),
application:unset_env(emqx, cluster_override_conf_file), application:unset_env(emqx, cluster_override_conf_file),
application:unset_env(emqx, cluster_hocon_file),
application:unset_env(gen_rpc, port_discovery), application:unset_env(gen_rpc, port_discovery),
ok. ok.
@ -461,6 +462,10 @@ force_set_config_file_paths(emqx_conf, [Path] = Paths) ->
ok = file:write_file(Path, Bin, [append]), ok = file:write_file(Path, Bin, [append]),
application:set_env(emqx, config_files, Paths); application:set_env(emqx, config_files, Paths);
force_set_config_file_paths(emqx, Paths) -> force_set_config_file_paths(emqx, Paths) ->
%% we need init cluster conf, so we can save the cluster conf to the file
application:set_env(emqx, local_override_conf_file, "local_override.conf"),
application:set_env(emqx, cluster_override_conf_file, "cluster_override.conf"),
application:set_env(emqx, cluster_conf_file, "cluster.hocon"),
application:set_env(emqx, config_files, Paths); application:set_env(emqx, config_files, Paths);
force_set_config_file_paths(_, _) -> force_set_config_file_paths(_, _) ->
ok. ok.
@ -476,7 +481,7 @@ copy_certs(_, _) ->
load_config(SchemaModule, Config, Opts) -> load_config(SchemaModule, Config, Opts) ->
ConfigBin = ConfigBin =
case is_map(Config) of case is_map(Config) of
true -> jsx:encode(Config); true -> emqx_utils_json:encode(Config);
false -> Config false -> Config
end, end,
ok = emqx_config:delete_override_conf_files(), ok = emqx_config:delete_override_conf_files(),
@ -675,7 +680,8 @@ start_slave(Name, Opts) when is_map(Opts) ->
] ]
); );
slave -> slave ->
slave:start_link(host(), Name, ebin_path()) Env = " -env HOCON_ENV_OVERRIDE_PREFIX EMQX_",
slave:start_link(host(), Name, ebin_path() ++ Env)
end end
end, end,
case DoStart() of case DoStart() of
@ -748,6 +754,21 @@ setup_node(Node, Opts) when is_map(Opts) ->
%% `emqx_conf' app and correctly catch up the config. %% `emqx_conf' app and correctly catch up the config.
StartAutocluster = maps:get(start_autocluster, Opts, false), StartAutocluster = maps:get(start_autocluster, Opts, false),
ct:pal(
"setting up node ~p:\n ~p",
[
Node,
#{
start_autocluster => StartAutocluster,
load_apps => LoadApps,
apps => Apps,
env => Env,
join_to => JoinTo,
start_apps => StartApps
}
]
),
%% Load env before doing anything to avoid overriding %% Load env before doing anything to avoid overriding
[ok = erpc:call(Node, ?MODULE, load, [App]) || App <- [gen_rpc, ekka, mria, emqx | LoadApps]], [ok = erpc:call(Node, ?MODULE, load, [App]) || App <- [gen_rpc, ekka, mria, emqx | LoadApps]],
@ -772,10 +793,7 @@ setup_node(Node, Opts) when is_map(Opts) ->
end, end,
%% Setting env before starting any applications %% Setting env before starting any applications
[ set_envs(Node, Env),
ok = rpc:call(Node, application, set_env, [Application, Key, Value])
|| {Application, Key, Value} <- Env
],
%% Here we start the apps %% Here we start the apps
EnvHandlerForRpc = EnvHandlerForRpc =
@ -793,8 +811,9 @@ setup_node(Node, Opts) when is_map(Opts) ->
node(), node(),
integer_to_list(erlang:unique_integer()) integer_to_list(erlang:unique_integer())
]), ]),
Cookie = atom_to_list(erlang:get_cookie()),
os:putenv("EMQX_NODE__DATA_DIR", NodeDataDir), os:putenv("EMQX_NODE__DATA_DIR", NodeDataDir),
os:putenv("EMQX_NODE__COOKIE", atom_to_list(erlang:get_cookie())), os:putenv("EMQX_NODE__COOKIE", Cookie),
emqx_config:init_load(SchemaMod), emqx_config:init_load(SchemaMod),
os:unsetenv("EMQX_NODE__DATA_DIR"), os:unsetenv("EMQX_NODE__DATA_DIR"),
os:unsetenv("EMQX_NODE__COOKIE"), os:unsetenv("EMQX_NODE__COOKIE"),
@ -825,7 +844,15 @@ setup_node(Node, Opts) when is_map(Opts) ->
ok; ok;
_ -> _ ->
StartAutocluster andalso StartAutocluster andalso
(ok = rpc:call(Node, emqx_machine_boot, start_autocluster, [])), begin
%% Note: we need to re-set the env because
%% starting the apps apparently make some of them
%% to be lost... This is particularly useful for
%% setting extra apps to be restarted after
%% joining.
set_envs(Node, Env),
ok = erpc:call(Node, emqx_machine_boot, start_autocluster, [])
end,
case rpc:call(Node, ekka, join, [JoinTo]) of case rpc:call(Node, ekka, join, [JoinTo]) of
ok -> ok ->
ok; ok;
@ -882,6 +909,14 @@ merge_opts(Opts1, Opts2) ->
Opts2 Opts2
). ).
set_envs(Node, Env) ->
lists:foreach(
fun({Application, Key, Value}) ->
ok = rpc:call(Node, application, set_env, [Application, Key, Value])
end,
Env
).
erl_flags() -> erl_flags() ->
%% One core and redirecting logs to master %% One core and redirecting logs to master
"+S 1:1 -master " ++ atom_to_list(node()) ++ " " ++ ebin_path(). "+S 1:1 -master " ++ atom_to_list(node()) ++ " " ++ ebin_path().
@ -1006,7 +1041,7 @@ switch_proxy(Switch, Name, ProxyHost, ProxyPort) ->
off -> #{<<"enabled">> => false}; off -> #{<<"enabled">> => false};
on -> #{<<"enabled">> => true} on -> #{<<"enabled">> => true}
end, end,
BodyBin = emqx_json:encode(Body), BodyBin = emqx_utils_json:encode(Body),
{ok, {{_, 200, _}, _, _}} = httpc:request( {ok, {{_, 200, _}, _, _}} = httpc:request(
post, post,
{Url, [], "application/json", BodyBin}, {Url, [], "application/json", BodyBin},
@ -1026,7 +1061,7 @@ timeout_proxy(on, Name, ProxyHost, ProxyPort) ->
<<"toxicity">> => 1.0, <<"toxicity">> => 1.0,
<<"attributes">> => #{<<"timeout">> => 0} <<"attributes">> => #{<<"timeout">> => 0}
}, },
BodyBin = emqx_json:encode(Body), BodyBin = emqx_utils_json:encode(Body),
{ok, {{_, 200, _}, _, _}} = httpc:request( {ok, {{_, 200, _}, _, _}} = httpc:request(
post, post,
{Url, [], "application/json", BodyBin}, {Url, [], "application/json", BodyBin},
@ -1061,7 +1096,7 @@ latency_up_proxy(on, Name, ProxyHost, ProxyPort) ->
<<"jitter">> => 3_000 <<"jitter">> => 3_000
} }
}, },
BodyBin = emqx_json:encode(Body), BodyBin = emqx_utils_json:encode(Body),
{ok, {{_, 200, _}, _, _}} = httpc:request( {ok, {{_, 200, _}, _, _}} = httpc:request(
post, post,
{Url, [], "application/json", BodyBin}, {Url, [], "application/json", BodyBin},

View File

@ -54,7 +54,7 @@ request_api(Method, Url, QueryParams, Auth, Body, HttpOpts) ->
[] -> [] ->
{NewUrl, [Auth]}; {NewUrl, [Auth]};
_ -> _ ->
{NewUrl, [Auth], "application/json", emqx_json:encode(Body)} {NewUrl, [Auth], "application/json", emqx_utils_json:encode(Body)}
end, end,
do_request_api(Method, Request, HttpOpts). do_request_api(Method, Request, HttpOpts).
@ -70,7 +70,7 @@ do_request_api(Method, Request, HttpOpts) ->
end. end.
get_http_data(ResponseBody) -> get_http_data(ResponseBody) ->
emqx_json:decode(ResponseBody, [return_maps]). emqx_utils_json:decode(ResponseBody, [return_maps]).
auth_header(User, Pass) -> auth_header(User, Pass) ->
Encoded = base64:encode_to_string(lists:append([User, ":", Pass])), Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),

View File

@ -57,5 +57,5 @@ t_fill_default_values(_) ->
WithDefaults WithDefaults
), ),
%% ensure JSON compatible %% ensure JSON compatible
_ = emqx_json:encode(WithDefaults), _ = emqx_utils_json:encode(WithDefaults),
ok. ok.

View File

@ -21,8 +21,7 @@
-define(MOD, {mod}). -define(MOD, {mod}).
-define(WKEY, '?'). -define(WKEY, '?').
-define(LOCAL_CONF, "/tmp/local-override.conf"). -define(CLUSTER_CONF, "/tmp/cluster.conf").
-define(CLUSTER_CONF, "/tmp/cluster-override.conf").
-include_lib("eunit/include/eunit.hrl"). -include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl"). -include_lib("common_test/include/ct.hrl").
@ -38,7 +37,6 @@ end_per_suite(_Config) ->
emqx_common_test_helpers:stop_apps([]). emqx_common_test_helpers:stop_apps([]).
init_per_testcase(_Case, Config) -> init_per_testcase(_Case, Config) ->
_ = file:delete(?LOCAL_CONF),
_ = file:delete(?CLUSTER_CONF), _ = file:delete(?CLUSTER_CONF),
Config. Config.
@ -200,62 +198,6 @@ t_sub_key_update_remove(_Config) ->
ok = emqx_config_handler:remove_handler(KeyPath2), ok = emqx_config_handler:remove_handler(KeyPath2),
ok. ok.
t_local_override_update_remove(_Config) ->
application:set_env(emqx, local_override_conf_file, ?LOCAL_CONF),
application:set_env(emqx, cluster_override_conf_file, ?CLUSTER_CONF),
KeyPath = [sysmon, os, cpu_high_watermark],
ok = emqx_config_handler:add_handler(KeyPath, ?MODULE),
LocalOpts = #{override_to => local},
{ok, Res} = emqx:update_config(KeyPath, <<"70%">>, LocalOpts),
?assertMatch(
#{
config := 0.7,
post_config_update := #{},
raw_config := <<"70%">>
},
Res
),
ClusterOpts = #{override_to => cluster},
?assertMatch(
{error, {permission_denied, _}}, emqx:update_config(KeyPath, <<"71%">>, ClusterOpts)
),
?assertMatch(0.7, emqx:get_config(KeyPath)),
KeyPath2 = [sysmon, os, cpu_low_watermark],
ok = emqx_config_handler:add_handler(KeyPath2, ?MODULE),
?assertMatch(
{error, {permission_denied, _}}, emqx:update_config(KeyPath2, <<"40%">>, ClusterOpts)
),
%% remove
?assertMatch({error, {permission_denied, _}}, emqx:remove_config(KeyPath)),
?assertEqual(
{ok, #{post_config_update => #{}}},
emqx:remove_config(KeyPath, #{override_to => local})
),
?assertEqual(
{ok, #{post_config_update => #{}}},
emqx:remove_config(KeyPath)
),
?assertError({config_not_found, KeyPath}, emqx:get_raw_config(KeyPath)),
OSKey = maps:keys(emqx:get_raw_config([sysmon, os])),
?assertEqual(false, lists:member(<<"cpu_high_watermark">>, OSKey)),
?assert(length(OSKey) > 0),
?assertEqual(
{ok, #{config => 0.8, post_config_update => #{}, raw_config => <<"80%">>}},
emqx:reset_config(KeyPath, ClusterOpts)
),
OSKey1 = maps:keys(emqx:get_raw_config([sysmon, os])),
?assertEqual(true, lists:member(<<"cpu_high_watermark">>, OSKey1)),
?assert(length(OSKey1) > 1),
ok = emqx_config_handler:remove_handler(KeyPath),
ok = emqx_config_handler:remove_handler(KeyPath2),
application:unset_env(emqx, local_override_conf_file),
application:unset_env(emqx, cluster_override_conf_file),
ok.
t_check_failed(_Config) -> t_check_failed(_Config) ->
KeyPath = [sysmon, os, cpu_check_interval], KeyPath = [sysmon, os, cpu_check_interval],
Opts = #{rawconf_with_defaults => true}, Opts = #{rawconf_with_defaults => true},
@ -426,9 +368,9 @@ wait_for_new_pid() ->
callback_error(FailedPath, Update, Error) -> callback_error(FailedPath, Update, Error) ->
Opts = #{rawconf_with_defaults => true}, Opts = #{rawconf_with_defaults => true},
ok = emqx_config_handler:add_handler(FailedPath, ?MODULE), ok = emqx_config_handler:add_handler(FailedPath, ?MODULE),
Old = emqx:get_raw_config(FailedPath), Old = emqx:get_raw_config(FailedPath, undefined),
?assertEqual(Error, emqx:update_config(FailedPath, Update, Opts)), ?assertEqual(Error, emqx:update_config(FailedPath, Update, Opts)),
New = emqx:get_raw_config(FailedPath), New = emqx:get_raw_config(FailedPath, undefined),
?assertEqual(Old, New), ?assertEqual(Old, New),
ok = emqx_config_handler:remove_handler(FailedPath), ok = emqx_config_handler:remove_handler(FailedPath),
ok. ok.

View File

@ -427,7 +427,7 @@ t_ensure_rate_limit(_) ->
fun(_, Client) -> {pause, 3000, undefined, Client} end fun(_, Client) -> {pause, 3000, undefined, Client} end
), ),
{ok, State2} = emqx_connection:check_limiter( {ok, State2} = emqx_connection:check_limiter(
[{1000, bytes_in}], [{1000, bytes}],
[], [],
WhenOk, WhenOk,
[], [],
@ -496,16 +496,16 @@ t_get_conn_info(_) ->
t_oom_shutdown(init, Config) -> t_oom_shutdown(init, Config) ->
ok = snabbkaffe:start_trace(), ok = snabbkaffe:start_trace(),
ok = meck:new(emqx_misc, [non_strict, passthrough, no_history, no_link]), ok = meck:new(emqx_utils, [non_strict, passthrough, no_history, no_link]),
meck:expect( meck:expect(
emqx_misc, emqx_utils,
check_oom, check_oom,
fun(_) -> {shutdown, "fake_oom"} end fun(_) -> {shutdown, "fake_oom"} end
), ),
Config; Config;
t_oom_shutdown('end', _Config) -> t_oom_shutdown('end', _Config) ->
snabbkaffe:stop(), snabbkaffe:stop(),
meck:unload(emqx_misc), meck:unload(emqx_utils),
ok. ok.
t_oom_shutdown(_) -> t_oom_shutdown(_) ->
@ -703,31 +703,29 @@ handle_call(Pid, Call, St) -> emqx_connection:handle_call(Pid, Call, St).
-define(LIMITER_ID, 'tcp:default'). -define(LIMITER_ID, 'tcp:default').
init_limiter() -> init_limiter() ->
emqx_limiter_container:get_limiter_by_types(?LIMITER_ID, [bytes_in, message_in], limiter_cfg()). emqx_limiter_container:get_limiter_by_types(?LIMITER_ID, [bytes, messages], limiter_cfg()).
limiter_cfg() -> limiter_cfg() ->
Infinity = emqx_limiter_schema:infinity_value(),
Cfg = bucket_cfg(), Cfg = bucket_cfg(),
Client = #{ Client = #{
rate => Infinity, rate => infinity,
initial => 0, initial => 0,
capacity => Infinity, burst => 0,
low_watermark => 1, low_watermark => 1,
divisible => false, divisible => false,
max_retry_time => timer:seconds(5), max_retry_time => timer:seconds(5),
failure_strategy => force failure_strategy => force
}, },
#{bytes_in => Cfg, message_in => Cfg, client => #{bytes_in => Client, message_in => Client}}. #{bytes => Cfg, messages => Cfg, client => #{bytes => Client, messages => Client}}.
bucket_cfg() -> bucket_cfg() ->
Infinity = emqx_limiter_schema:infinity_value(), #{rate => infinity, initial => 0, burst => 0}.
#{rate => Infinity, initial => 0, capacity => Infinity}.
add_bucket() -> add_bucket() ->
Cfg = bucket_cfg(), Cfg = bucket_cfg(),
emqx_limiter_server:add_bucket(?LIMITER_ID, bytes_in, Cfg), emqx_limiter_server:add_bucket(?LIMITER_ID, bytes, Cfg),
emqx_limiter_server:add_bucket(?LIMITER_ID, message_in, Cfg). emqx_limiter_server:add_bucket(?LIMITER_ID, messages, Cfg).
del_bucket() -> del_bucket() ->
emqx_limiter_server:del_bucket(?LIMITER_ID, bytes_in), emqx_limiter_server:del_bucket(?LIMITER_ID, bytes),
emqx_limiter_server:del_bucket(?LIMITER_ID, message_in). emqx_limiter_server:del_bucket(?LIMITER_ID, messages).

View File

@ -402,7 +402,7 @@ request(Method, Url, QueryParams, Body) ->
Opts = #{return_all => true}, Opts = #{return_all => true},
case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of
{ok, {Reason, Headers, BodyR}} -> {ok, {Reason, Headers, BodyR}} ->
{ok, {Reason, Headers, emqx_json:decode(BodyR, [return_maps])}}; {ok, {Reason, Headers, emqx_utils_json:decode(BodyR, [return_maps])}};
Error -> Error ->
Error Error
end. end.
@ -997,7 +997,7 @@ do_t_update_listener(Config) ->
<<"enable_crl_check">> => true <<"enable_crl_check">> => true
} }
}, },
ListenerData1 = emqx_map_lib:deep_merge(ListenerData0, CRLConfig), ListenerData1 = emqx_utils_maps:deep_merge(ListenerData0, CRLConfig),
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1), {ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
?assertMatch( ?assertMatch(
#{ #{
@ -1040,7 +1040,7 @@ do_t_validations(_Config) ->
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId), {ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
ListenerData1 = ListenerData1 =
emqx_map_lib:deep_merge( emqx_utils_maps:deep_merge(
ListenerData0, ListenerData0,
#{ #{
<<"ssl_options">> => <<"ssl_options">> =>
@ -1052,7 +1052,7 @@ do_t_validations(_Config) ->
), ),
{error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1), {error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1),
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} = #{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} =
emqx_json:decode(ResRaw1, [return_maps]), emqx_utils_json:decode(ResRaw1, [return_maps]),
?assertMatch( ?assertMatch(
#{ #{
<<"mismatches">> := <<"mismatches">> :=
@ -1064,7 +1064,7 @@ do_t_validations(_Config) ->
} }
} }
}, },
emqx_json:decode(MsgRaw1, [return_maps]) emqx_utils_json:decode(MsgRaw1, [return_maps])
), ),
ok. ok.

View File

@ -101,3 +101,21 @@ t_expired_detecting(_) ->
ets:tab2list(emqx_flapping) ets:tab2list(emqx_flapping)
) )
). ).
t_conf_without_window_time(_) ->
%% enable is deprecated, so we need to make sure it won't be used.
Global = emqx_config:get([flapping_detect]),
?assertNot(maps:is_key(enable, Global)),
%% zones don't have default value, so we need to make sure fallback to global conf.
%% this new_zone will fallback to global conf.
emqx_config:put_zone_conf(new_zone, [flapping_detect], #{}),
?assertEqual(Global, get_policy(new_zone)),
emqx_config:put_zone_conf(new_zone_1, [flapping_detect], #{window_time => 100}),
?assertEqual(100, emqx_flapping:get_policy(window_time, new_zone_1)),
?assertEqual(maps:get(ban_time, Global), emqx_flapping:get_policy(ban_time, new_zone_1)),
?assertEqual(maps:get(max_count, Global), emqx_flapping:get_policy(max_count, new_zone_1)),
ok.
get_policy(Zone) ->
emqx_flapping:get_policy([window_time, ban_time, max_count], Zone).

View File

@ -143,7 +143,7 @@ init_per_testcase(t_ocsp_responder_error_responses, Config) ->
} }
}, },
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}}, Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
ConfBin = emqx_map_lib:binary_key_map(Conf), ConfBin = emqx_utils_maps:binary_key_map(Conf),
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}), hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts), emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
snabbkaffe:start_trace(), snabbkaffe:start_trace(),
@ -184,7 +184,7 @@ init_per_testcase(_TestCase, Config) ->
} }
}, },
Conf = #{listeners => #{Type => #{Name => ListenerOpts}}}, Conf = #{listeners => #{Type => #{Name => ListenerOpts}}},
ConfBin = emqx_map_lib:binary_key_map(Conf), ConfBin = emqx_utils_maps:binary_key_map(Conf),
hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}), hocon_tconf:check_plain(emqx_schema, ConfBin, #{required => false, atom_keys => false}),
emqx_config:put_listener_conf(Type, Name, [], ListenerOpts), emqx_config:put_listener_conf(Type, Name, [], ListenerOpts),
snabbkaffe:start_trace(), snabbkaffe:start_trace(),
@ -430,7 +430,7 @@ request(Method, Url, QueryParams, Body) ->
Opts = #{return_all => true}, Opts = #{return_all => true},
case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of case emqx_mgmt_api_test_util:request_api(Method, Url, QueryParams, AuthHeader, Body, Opts) of
{ok, {Reason, Headers, BodyR}} -> {ok, {Reason, Headers, BodyR}} ->
{ok, {Reason, Headers, emqx_json:decode(BodyR, [return_maps])}}; {ok, {Reason, Headers, emqx_utils_json:decode(BodyR, [return_maps])}};
Error -> Error ->
Error Error
end. end.
@ -677,9 +677,13 @@ do_t_update_listener(Config) ->
%% no ocsp at first %% no ocsp at first
ListenerId = "ssl:default", ListenerId = "ssl:default",
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId), {ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
?assertEqual( ?assertMatch(
undefined, #{
emqx_map_lib:deep_get([<<"ssl_options">>, <<"ocsp">>], ListenerData0, undefined) <<"enable_ocsp_stapling">> := false,
<<"refresh_http_timeout">> := _,
<<"refresh_interval">> := _
},
emqx_utils_maps:deep_get([<<"ssl_options">>, <<"ocsp">>], ListenerData0, undefined)
), ),
assert_no_http_get(), assert_no_http_get(),
@ -702,7 +706,7 @@ do_t_update_listener(Config) ->
} }
} }
}, },
ListenerData1 = emqx_map_lib:deep_merge(ListenerData0, OCSPConfig), ListenerData1 = emqx_utils_maps:deep_merge(ListenerData0, OCSPConfig),
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1), {ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
?assertMatch( ?assertMatch(
#{ #{
@ -722,14 +726,14 @@ do_t_update_listener(Config) ->
%% location %% location
?assertNotEqual( ?assertNotEqual(
IssuerPemPath, IssuerPemPath,
emqx_map_lib:deep_get( emqx_utils_maps:deep_get(
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>], [<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
ListenerData2 ListenerData2
) )
), ),
?assertNotEqual( ?assertNotEqual(
IssuerPem, IssuerPem,
emqx_map_lib:deep_get( emqx_utils_maps:deep_get(
[<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>], [<<"ssl_options">>, <<"ocsp">>, <<"issuer_pem">>],
ListenerData2 ListenerData2
) )
@ -818,7 +822,7 @@ do_t_validations(_Config) ->
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId), {ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
ListenerData1 = ListenerData1 =
emqx_map_lib:deep_merge( emqx_utils_maps:deep_merge(
ListenerData0, ListenerData0,
#{ #{
<<"ssl_options">> => <<"ssl_options">> =>
@ -827,7 +831,7 @@ do_t_validations(_Config) ->
), ),
{error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1), {error, {_, _, ResRaw1}} = update_listener_via_api(ListenerId, ListenerData1),
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} = #{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw1} =
emqx_json:decode(ResRaw1, [return_maps]), emqx_utils_json:decode(ResRaw1, [return_maps]),
?assertMatch( ?assertMatch(
#{ #{
<<"mismatches">> := <<"mismatches">> :=
@ -839,11 +843,11 @@ do_t_validations(_Config) ->
} }
} }
}, },
emqx_json:decode(MsgRaw1, [return_maps]) emqx_utils_json:decode(MsgRaw1, [return_maps])
), ),
ListenerData2 = ListenerData2 =
emqx_map_lib:deep_merge( emqx_utils_maps:deep_merge(
ListenerData0, ListenerData0,
#{ #{
<<"ssl_options">> => <<"ssl_options">> =>
@ -857,7 +861,7 @@ do_t_validations(_Config) ->
), ),
{error, {_, _, ResRaw2}} = update_listener_via_api(ListenerId, ListenerData2), {error, {_, _, ResRaw2}} = update_listener_via_api(ListenerId, ListenerData2),
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw2} = #{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw2} =
emqx_json:decode(ResRaw2, [return_maps]), emqx_utils_json:decode(ResRaw2, [return_maps]),
?assertMatch( ?assertMatch(
#{ #{
<<"mismatches">> := <<"mismatches">> :=
@ -869,11 +873,11 @@ do_t_validations(_Config) ->
} }
} }
}, },
emqx_json:decode(MsgRaw2, [return_maps]) emqx_utils_json:decode(MsgRaw2, [return_maps])
), ),
ListenerData3a = ListenerData3a =
emqx_map_lib:deep_merge( emqx_utils_maps:deep_merge(
ListenerData0, ListenerData0,
#{ #{
<<"ssl_options">> => <<"ssl_options">> =>
@ -886,10 +890,12 @@ do_t_validations(_Config) ->
} }
} }
), ),
ListenerData3 = emqx_map_lib:deep_remove([<<"ssl_options">>, <<"certfile">>], ListenerData3a), ListenerData3 = emqx_utils_maps:deep_remove(
[<<"ssl_options">>, <<"certfile">>], ListenerData3a
),
{error, {_, _, ResRaw3}} = update_listener_via_api(ListenerId, ListenerData3), {error, {_, _, ResRaw3}} = update_listener_via_api(ListenerId, ListenerData3),
#{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw3} = #{<<"code">> := <<"BAD_REQUEST">>, <<"message">> := MsgRaw3} =
emqx_json:decode(ResRaw3, [return_maps]), emqx_utils_json:decode(ResRaw3, [return_maps]),
?assertMatch( ?assertMatch(
#{ #{
<<"mismatches">> := <<"mismatches">> :=
@ -901,7 +907,7 @@ do_t_validations(_Config) ->
} }
} }
}, },
emqx_json:decode(MsgRaw3, [return_maps]) emqx_utils_json:decode(MsgRaw3, [return_maps])
), ),
ok. ok.

View File

@ -72,7 +72,7 @@ t_consume(_) ->
Cfg = fun(Cfg) -> Cfg = fun(Cfg) ->
Cfg#{ Cfg#{
rate := 100, rate := 100,
capacity := 100, burst := 0,
initial := 100, initial := 100,
max_retry_time := 1000, max_retry_time := 1000,
failure_strategy := force failure_strategy := force
@ -89,7 +89,7 @@ t_retry(_) ->
Cfg = fun(Cfg) -> Cfg = fun(Cfg) ->
Cfg#{ Cfg#{
rate := 50, rate := 50,
capacity := 200, burst := 150,
initial := 0, initial := 0,
max_retry_time := 1000, max_retry_time := 1000,
failure_strategy := force failure_strategy := force
@ -109,7 +109,7 @@ t_restore(_) ->
Cfg = fun(Cfg) -> Cfg = fun(Cfg) ->
Cfg#{ Cfg#{
rate := 1, rate := 1,
capacity := 200, burst := 199,
initial := 50, initial := 50,
max_retry_time := 100, max_retry_time := 100,
failure_strategy := force failure_strategy := force
@ -129,7 +129,7 @@ t_max_retry_time(_) ->
Cfg = fun(Cfg) -> Cfg = fun(Cfg) ->
Cfg#{ Cfg#{
rate := 1, rate := 1,
capacity := 1, burst := 0,
max_retry_time := 500, max_retry_time := 500,
failure_strategy := drop failure_strategy := drop
} }
@ -139,8 +139,12 @@ t_max_retry_time(_) ->
Begin = ?NOW, Begin = ?NOW,
Result = emqx_htb_limiter:consume(101, Client), Result = emqx_htb_limiter:consume(101, Client),
?assertMatch({drop, _}, Result), ?assertMatch({drop, _}, Result),
Time = ?NOW - Begin, End = ?NOW,
?assert(Time >= 500 andalso Time < 550) Time = End - Begin,
?assert(
Time >= 500 andalso Time < 550,
lists:flatten(io_lib:format("Begin:~p, End:~p, Time:~p~n", [Begin, End, Time]))
)
end, end,
with_per_client(Cfg, Case). with_per_client(Cfg, Case).
@ -150,7 +154,7 @@ t_divisible(_) ->
divisible := true, divisible := true,
rate := ?RATE("1000/1s"), rate := ?RATE("1000/1s"),
initial := 600, initial := 600,
capacity := 600 burst := 0
} }
end, end,
Case = fun(BucketCfg) -> Case = fun(BucketCfg) ->
@ -176,7 +180,7 @@ t_low_watermark(_) ->
low_watermark := 400, low_watermark := 400,
rate := ?RATE("1000/1s"), rate := ?RATE("1000/1s"),
initial := 1000, initial := 1000,
capacity := 1000 burst := 0
} }
end, end,
Case = fun(BucketCfg) -> Case = fun(BucketCfg) ->
@ -201,8 +205,7 @@ t_infinity_client(_) ->
Fun = fun(Cfg) -> Cfg end, Fun = fun(Cfg) -> Cfg end,
Case = fun(Cfg) -> Case = fun(Cfg) ->
Client = connect(Cfg), Client = connect(Cfg),
InfVal = emqx_limiter_schema:infinity_value(), ?assertMatch(infinity, Client),
?assertMatch(#{bucket := #{rate := InfVal}}, Client),
Result = emqx_htb_limiter:check(100000, Client), Result = emqx_htb_limiter:check(100000, Client),
?assertEqual({ok, Client}, Result) ?assertEqual({ok, Client}, Result)
end, end,
@ -212,12 +215,12 @@ t_try_restore_agg(_) ->
Fun = fun(#{client := Cli} = Bucket) -> Fun = fun(#{client := Cli} = Bucket) ->
Bucket2 = Bucket#{ Bucket2 = Bucket#{
rate := 1, rate := 1,
capacity := 200, burst := 199,
initial := 50 initial := 50
}, },
Cli2 = Cli#{ Cli2 = Cli#{
rate := infinity, rate := infinity,
capacity := infinity, burst := 0,
divisible := true, divisible := true,
max_retry_time := 100, max_retry_time := 100,
failure_strategy := force failure_strategy := force
@ -239,11 +242,11 @@ t_short_board(_) ->
Bucket2 = Bucket#{ Bucket2 = Bucket#{
rate := ?RATE("100/1s"), rate := ?RATE("100/1s"),
initial := 0, initial := 0,
capacity := 100 burst := 0
}, },
Cli2 = Cli#{ Cli2 = Cli#{
rate := ?RATE("600/1s"), rate := ?RATE("600/1s"),
capacity := 600, burst := 0,
initial := 600 initial := 600
}, },
Bucket2#{client := Cli2} Bucket2#{client := Cli2}
@ -261,46 +264,45 @@ t_rate(_) ->
Bucket2 = Bucket#{ Bucket2 = Bucket#{
rate := ?RATE("100/100ms"), rate := ?RATE("100/100ms"),
initial := 0, initial := 0,
capacity := infinity burst := 0
}, },
Cli2 = Cli#{ Cli2 = Cli#{
rate := infinity, rate := infinity,
capacity := infinity, burst := 0,
initial := 0 initial := 0
}, },
Bucket2#{client := Cli2} Bucket2#{client := Cli2}
end, end,
Case = fun(Cfg) -> Case = fun(Cfg) ->
Time = 1000,
Client = connect(Cfg), Client = connect(Cfg),
Ts1 = erlang:system_time(millisecond),
C1 = emqx_htb_limiter:available(Client), C1 = emqx_htb_limiter:available(Client),
timer:sleep(1000), timer:sleep(1100),
Ts2 = erlang:system_time(millisecond),
C2 = emqx_htb_limiter:available(Client), C2 = emqx_htb_limiter:available(Client),
ShouldInc = floor((Ts2 - Ts1) / 100) * 100, ShouldInc = floor(Time / 100) * 100,
Inc = C2 - C1, Inc = C2 - C1,
?assert(in_range(Inc, ShouldInc - 100, ShouldInc + 100), "test bucket rate") ?assert(in_range(Inc, ShouldInc - 100, ShouldInc + 100), "test bucket rate")
end, end,
with_bucket(Fun, Case). with_bucket(Fun, Case).
t_capacity(_) -> t_capacity(_) ->
Capacity = 600, Capacity = 1200,
Fun = fun(#{client := Cli} = Bucket) -> Fun = fun(#{client := Cli} = Bucket) ->
Bucket2 = Bucket#{ Bucket2 = Bucket#{
rate := ?RATE("100/100ms"), rate := ?RATE("100/100ms"),
initial := 0, initial := 0,
capacity := 600 burst := 200
}, },
Cli2 = Cli#{ Cli2 = Cli#{
rate := infinity, rate := infinity,
capacity := infinity, burst := 0,
initial := 0 initial := 0
}, },
Bucket2#{client := Cli2} Bucket2#{client := Cli2}
end, end,
Case = fun(Cfg) -> Case = fun(Cfg) ->
Client = connect(Cfg), Client = connect(Cfg),
timer:sleep(1000), timer:sleep(1500),
C1 = emqx_htb_limiter:available(Client), C1 = emqx_htb_limiter:available(Client),
?assertEqual(Capacity, C1, "test bucket capacity") ?assertEqual(Capacity, C1, "test bucket capacity")
end, end,
@ -318,11 +320,11 @@ t_collaborative_alloc(_) ->
Bucket2 = Bucket#{ Bucket2 = Bucket#{
rate := ?RATE("400/1s"), rate := ?RATE("400/1s"),
initial := 0, initial := 0,
capacity := 600 burst := 200
}, },
Cli2 = Cli#{ Cli2 = Cli#{
rate := ?RATE("50"), rate := ?RATE("50"),
capacity := 100, burst := 50,
initial := 100 initial := 100
}, },
Bucket2#{client := Cli2} Bucket2#{client := Cli2}
@ -363,11 +365,11 @@ t_burst(_) ->
Bucket2 = Bucket#{ Bucket2 = Bucket#{
rate := ?RATE("200/1s"), rate := ?RATE("200/1s"),
initial := 0, initial := 0,
capacity := 200 burst := 0
}, },
Cli2 = Cli#{ Cli2 = Cli#{
rate := ?RATE("50/1s"), rate := ?RATE("50/1s"),
capacity := 200, burst := 150,
divisible := true divisible := true
}, },
Bucket2#{client := Cli2} Bucket2#{client := Cli2}
@ -401,11 +403,11 @@ t_limit_global_with_unlimit_other(_) ->
Bucket2 = Bucket#{ Bucket2 = Bucket#{
rate := infinity, rate := infinity,
initial := 0, initial := 0,
capacity := infinity burst := 0
}, },
Cli2 = Cli#{ Cli2 = Cli#{
rate := infinity, rate := infinity,
capacity := infinity, burst := 0,
initial := 0 initial := 0
}, },
Bucket2#{client := Cli2} Bucket2#{client := Cli2}
@ -414,7 +416,7 @@ t_limit_global_with_unlimit_other(_) ->
Case = fun() -> Case = fun() ->
C1 = counters:new(1, []), C1 = counters:new(1, []),
start_client({b1, Bucket}, ?NOW + 2000, C1, 20), start_client({b1, Bucket}, ?NOW + 2000, C1, 20),
timer:sleep(2100), timer:sleep(2200),
check_average_rate(C1, 2, 600) check_average_rate(C1, 2, 600)
end, end,
@ -432,7 +434,7 @@ t_check_container(_) ->
Cfg#{ Cfg#{
rate := ?RATE("1000/1s"), rate := ?RATE("1000/1s"),
initial := 1000, initial := 1000,
capacity := 1000 burst := 0
} }
end, end,
Case = fun(#{client := Client} = BucketCfg) -> Case = fun(#{client := Client} = BucketCfg) ->
@ -565,13 +567,73 @@ t_schema_unit(_) ->
?assertMatch({error, _}, M:to_rate("100MB/1")), ?assertMatch({error, _}, M:to_rate("100MB/1")),
?assertMatch({error, _}, M:to_rate("100/10x")), ?assertMatch({error, _}, M:to_rate("100/10x")),
?assertEqual({ok, emqx_limiter_schema:infinity_value()}, M:to_capacity("infinity")), ?assertEqual({ok, infinity}, M:to_capacity("infinity")),
?assertEqual({ok, 100}, M:to_capacity("100")), ?assertEqual({ok, 100}, M:to_capacity("100")),
?assertEqual({ok, 100 * 1024}, M:to_capacity("100KB")), ?assertEqual({ok, 100 * 1024}, M:to_capacity("100KB")),
?assertEqual({ok, 100 * 1024 * 1024}, M:to_capacity("100MB")), ?assertEqual({ok, 100 * 1024 * 1024}, M:to_capacity("100MB")),
?assertEqual({ok, 100 * 1024 * 1024 * 1024}, M:to_capacity("100GB")), ?assertEqual({ok, 100 * 1024 * 1024 * 1024}, M:to_capacity("100GB")),
ok. ok.
compatibility_for_capacity(_) ->
CfgStr = <<
""
"\n"
"listeners.tcp.default {\n"
" bind = \"0.0.0.0:1883\"\n"
" max_connections = 1024000\n"
" limiter.messages.capacity = infinity\n"
" limiter.client.messages.capacity = infinity\n"
"}\n"
""
>>,
?assertMatch(
#{
messages := #{burst := 0},
client := #{messages := #{burst := 0}}
},
parse_and_check(CfgStr)
).
compatibility_for_message_in(_) ->
CfgStr = <<
""
"\n"
"listeners.tcp.default {\n"
" bind = \"0.0.0.0:1883\"\n"
" max_connections = 1024000\n"
" limiter.message_in.rate = infinity\n"
" limiter.client.message_in.rate = infinity\n"
"}\n"
""
>>,
?assertMatch(
#{
messages := #{rate := infinity},
client := #{messages := #{rate := infinity}}
},
parse_and_check(CfgStr)
).
compatibility_for_bytes_in(_) ->
CfgStr = <<
""
"\n"
"listeners.tcp.default {\n"
" bind = \"0.0.0.0:1883\"\n"
" max_connections = 1024000\n"
" limiter.bytes_in.rate = infinity\n"
" limiter.client.bytes_in.rate = infinity\n"
"}\n"
""
>>,
?assertMatch(
#{
bytes := #{rate := infinity},
client := #{bytes := #{rate := infinity}}
},
parse_and_check(CfgStr)
).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%%% Internal functions %%% Internal functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -748,17 +810,16 @@ connect(Name, Cfg) ->
Limiter. Limiter.
make_limiter_cfg() -> make_limiter_cfg() ->
Infinity = emqx_limiter_schema:infinity_value(),
Client = #{ Client = #{
rate => Infinity, rate => infinity,
initial => 0, initial => 0,
capacity => Infinity, burst => 0,
low_watermark => 0, low_watermark => 0,
divisible => false, divisible => false,
max_retry_time => timer:seconds(5), max_retry_time => timer:seconds(5),
failure_strategy => force failure_strategy => force
}, },
#{client => Client, rate => Infinity, initial => 0, capacity => Infinity}. #{client => Client, rate => infinity, initial => 0, burst => 0}.
add_bucket(Cfg) -> add_bucket(Cfg) ->
add_bucket(?MODULE, Cfg). add_bucket(?MODULE, Cfg).
@ -812,3 +873,7 @@ apply_modifier(Pairs, #{default := Template}) ->
Acc#{N => M(Template)} Acc#{N => M(Template)}
end, end,
lists:foldl(Fun, #{}, Pairs). lists:foldl(Fun, #{}, Pairs).
parse_and_check(ConfigString) ->
ok = emqx_common_test_helpers:load_config(emqx_schema, ConfigString),
emqx:get_config([listeners, tcp, default, limiter]).

View File

@ -119,7 +119,7 @@ t_has_routes(_) ->
?R:delete_route(<<"devices/+/messages">>). ?R:delete_route(<<"devices/+/messages">>).
t_unexpected(_) -> t_unexpected(_) ->
Router = emqx_misc:proc_name(?R, 1), Router = emqx_utils:proc_name(?R, 1),
?assertEqual(ignored, gen_server:call(Router, bad_request)), ?assertEqual(ignored, gen_server:call(Router, bad_request)),
?assertEqual(ok, gen_server:cast(Router, bad_message)), ?assertEqual(ok, gen_server:cast(Router, bad_message)),
Router ! bad_info. Router ! bad_info.

View File

@ -191,7 +191,7 @@ ssl_files_save_delete_test() ->
FileKey = maps:get(<<"keyfile">>, SSL), FileKey = maps:get(<<"keyfile">>, SSL),
?assertMatch(<<"/tmp/ssl-test-dir/key-", _:16/binary>>, FileKey), ?assertMatch(<<"/tmp/ssl-test-dir/key-", _:16/binary>>, FileKey),
?assertEqual({ok, bin(test_key())}, file:read_file(FileKey)), ?assertEqual({ok, bin(test_key())}, file:read_file(FileKey)),
FileIssuerPem = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL), FileIssuerPem = emqx_utils_maps:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL),
?assertMatch(<<"/tmp/ssl-test-dir/ocsp_issuer_pem-", _:16/binary>>, FileIssuerPem), ?assertMatch(<<"/tmp/ssl-test-dir/ocsp_issuer_pem-", _:16/binary>>, FileIssuerPem),
?assertEqual({ok, bin(test_key())}, file:read_file(FileIssuerPem)), ?assertEqual({ok, bin(test_key())}, file:read_file(FileIssuerPem)),
%% no old file to delete %% no old file to delete
@ -251,8 +251,8 @@ ssl_file_replace_test() ->
{ok, SSL3} = emqx_tls_lib:ensure_ssl_files(Dir, SSL1), {ok, SSL3} = emqx_tls_lib:ensure_ssl_files(Dir, SSL1),
File1 = maps:get(<<"keyfile">>, SSL2), File1 = maps:get(<<"keyfile">>, SSL2),
File2 = maps:get(<<"keyfile">>, SSL3), File2 = maps:get(<<"keyfile">>, SSL3),
IssuerPem1 = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL2), IssuerPem1 = emqx_utils_maps:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL2),
IssuerPem2 = emqx_map_lib:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL3), IssuerPem2 = emqx_utils_maps:deep_get([<<"ocsp">>, <<"issuer_pem">>], SSL3),
?assert(filelib:is_regular(File1)), ?assert(filelib:is_regular(File1)),
?assert(filelib:is_regular(File2)), ?assert(filelib:is_regular(File2)),
?assert(filelib:is_regular(IssuerPem1)), ?assert(filelib:is_regular(IssuerPem1)),

View File

@ -22,10 +22,9 @@
-include_lib("common_test/include/ct.hrl"). -include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl"). -include_lib("eunit/include/eunit.hrl").
-include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/emqx_trace.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl").
-record(emqx_trace, {name, type, filter, enable = true, start_at, end_at}).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Setups %% Setups
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -97,7 +96,9 @@ t_base_create_delete(_Config) ->
type => clientid, type => clientid,
name => <<"name1">>, name => <<"name1">>,
start_at => Now, start_at => Now,
end_at => Now + 30 * 60 end_at => Now + 30 * 60,
payload_encode => text,
extra => #{}
} }
], ],
?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])), ?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])),
@ -385,6 +386,48 @@ t_find_closed_time(_Config) ->
?assertEqual(1000, emqx_trace:find_closest_time(Traces, Now)), ?assertEqual(1000, emqx_trace:find_closest_time(Traces, Now)),
ok. ok.
t_migrate_trace(_Config) ->
build_new_trace_data(),
build_old_trace_data(),
reload(),
Traces = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces)),
lists:foreach(
fun(#{name := Name, enable := Enable}) ->
?assertEqual(true, Enable, Name)
end,
Traces
),
LoggerIds = logger:get_handler_ids(),
lists:foreach(
fun(Id) ->
?assertEqual(true, lists:member(Id, LoggerIds), LoggerIds)
end,
[
trace_topic_test_topic_migrate_new,
trace_topic_test_topic_migrate_old
]
),
ok.
build_new_trace_data() ->
Now = erlang:system_time(second),
{ok, _} = emqx_trace:create([
{<<"name">>, <<"test_topic_migrate_new">>},
{<<"type">>, topic},
{<<"topic">>, <<"/test/migrate/new">>},
{<<"start_at">>, Now - 10}
]).
build_old_trace_data() ->
Now = erlang:system_time(second),
OldAttrs = [name, type, filter, enable, start_at, end_at],
{atomic, ok} = mnesia:transform_table(emqx_trace, ignore, OldAttrs, emqx_trace),
OldTrace =
{emqx_trace, <<"test_topic_migrate_old">>, topic, <<"topic">>, true, Now - 10, Now + 100},
ok = mnesia:dirty_write(OldTrace),
ok.
reload() -> reload() ->
catch ok = gen_server:stop(emqx_trace), catch ok = gen_server:stop(emqx_trace),
{ok, _Pid} = emqx_trace:start_link(). {ok, _Pid} = emqx_trace:start_link().

View File

@ -509,16 +509,16 @@ t_handle_timeout_emit_stats(_) ->
t_ensure_rate_limit(_) -> t_ensure_rate_limit(_) ->
{ok, Rate} = emqx_limiter_schema:to_rate("50MB"), {ok, Rate} = emqx_limiter_schema:to_rate("50MB"),
Limiter = init_limiter(#{ Limiter = init_limiter(#{
bytes_in => bucket_cfg(), bytes => bucket_cfg(),
message_in => bucket_cfg(), messages => bucket_cfg(),
client => #{bytes_in => client_cfg(Rate)} client => #{bytes => client_cfg(Rate)}
}), }),
St = st(#{limiter => Limiter}), St = st(#{limiter => Limiter}),
%% must bigger than value in emqx_ratelimit_SUITE %% must bigger than value in emqx_ratelimit_SUITE
{ok, Need} = emqx_limiter_schema:to_capacity("1GB"), {ok, Need} = emqx_limiter_schema:to_capacity("1GB"),
St1 = ?ws_conn:check_limiter( St1 = ?ws_conn:check_limiter(
[{Need, bytes_in}], [{Need, bytes}],
[], [],
fun(_, _, S) -> S end, fun(_, _, S) -> S end,
[], [],
@ -699,23 +699,21 @@ init_limiter() ->
init_limiter(limiter_cfg()). init_limiter(limiter_cfg()).
init_limiter(LimiterCfg) -> init_limiter(LimiterCfg) ->
emqx_limiter_container:get_limiter_by_types(?LIMITER_ID, [bytes_in, message_in], LimiterCfg). emqx_limiter_container:get_limiter_by_types(?LIMITER_ID, [bytes, messages], LimiterCfg).
limiter_cfg() -> limiter_cfg() ->
Cfg = bucket_cfg(), Cfg = bucket_cfg(),
Client = client_cfg(), Client = client_cfg(),
#{bytes_in => Cfg, message_in => Cfg, client => #{bytes_in => Client, message_in => Client}}. #{bytes => Cfg, messages => Cfg, client => #{bytes => Client, messages => Client}}.
client_cfg() -> client_cfg() ->
Infinity = emqx_limiter_schema:infinity_value(), client_cfg(infinity).
client_cfg(Infinity).
client_cfg(Rate) -> client_cfg(Rate) ->
Infinity = emqx_limiter_schema:infinity_value(),
#{ #{
rate => Rate, rate => Rate,
initial => 0, initial => 0,
capacity => Infinity, burst => 0,
low_watermark => 1, low_watermark => 1,
divisible => false, divisible => false,
max_retry_time => timer:seconds(5), max_retry_time => timer:seconds(5),
@ -723,14 +721,13 @@ client_cfg(Rate) ->
}. }.
bucket_cfg() -> bucket_cfg() ->
Infinity = emqx_limiter_schema:infinity_value(), #{rate => infinity, initial => 0, burst => 0}.
#{rate => Infinity, initial => 0, capacity => Infinity}.
add_bucket() -> add_bucket() ->
Cfg = bucket_cfg(), Cfg = bucket_cfg(),
emqx_limiter_server:add_bucket(?LIMITER_ID, bytes_in, Cfg), emqx_limiter_server:add_bucket(?LIMITER_ID, bytes, Cfg),
emqx_limiter_server:add_bucket(?LIMITER_ID, message_in, Cfg). emqx_limiter_server:add_bucket(?LIMITER_ID, messages, Cfg).
del_bucket() -> del_bucket() ->
emqx_limiter_server:del_bucket(?LIMITER_ID, bytes_in), emqx_limiter_server:del_bucket(?LIMITER_ID, bytes),
emqx_limiter_server:del_bucket(?LIMITER_ID, message_in). emqx_limiter_server:del_bucket(?LIMITER_ID, messages).

View File

@ -2,6 +2,7 @@
{deps, [ {deps, [
{emqx, {path, "../emqx"}}, {emqx, {path, "../emqx"}},
{emqx_utils, {path, "../emqx_utils"}},
{emqx_connector, {path, "../emqx_connector"}} {emqx_connector, {path, "../emqx_connector"}}
]}. ]}.

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{application, emqx_authn, [ {application, emqx_authn, [
{description, "EMQX Authentication"}, {description, "EMQX Authentication"},
{vsn, "0.1.16"}, {vsn, "0.1.18"},
{modules, []}, {modules, []},
{registered, [emqx_authn_sup, emqx_authn_registry]}, {registered, [emqx_authn_sup, emqx_authn_registry]},
{applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]}, {applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]},

View File

@ -872,8 +872,8 @@ lookup_from_local_node(ChainName, AuthenticatorID) ->
case emqx_resource:get_instance(ResourceId) of case emqx_resource:get_instance(ResourceId) of
{error, not_found} -> {error, not_found} ->
{error, {NodeId, not_found_resource}}; {error, {NodeId, not_found_resource}};
{ok, _, #{status := Status, metrics := ResourceMetrics}} -> {ok, _, #{status := Status}} ->
{ok, {NodeId, Status, Metrics, ResourceMetrics}} {ok, {NodeId, Status, Metrics, emqx_resource:get_metrics(ResourceId)}}
end end
end; end;
{error, Reason} -> {error, Reason} ->
@ -929,7 +929,7 @@ aggregate_metrics([]) ->
aggregate_metrics([HeadMetrics | AllMetrics]) -> aggregate_metrics([HeadMetrics | AllMetrics]) ->
ErrorLogger = fun(Reason) -> ?SLOG(info, #{msg => "bad_metrics_value", error => Reason}) end, ErrorLogger = fun(Reason) -> ?SLOG(info, #{msg => "bad_metrics_value", error => Reason}) end,
Fun = fun(ElemMap, AccMap) -> Fun = fun(ElemMap, AccMap) ->
emqx_map_lib:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger) emqx_utils_maps:best_effort_recursive_sum(AccMap, ElemMap, ErrorLogger)
end, end,
lists:foldl(Fun, HeadMetrics, AllMetrics). lists:foldl(Fun, HeadMetrics, AllMetrics).
@ -1069,7 +1069,7 @@ update_user(ChainName, AuthenticatorID, UserID, UserInfo0) ->
true -> true ->
serialize_error({missing_parameter, password}); serialize_error({missing_parameter, password});
false -> false ->
UserInfo = emqx_map_lib:safe_atom_key_map(UserInfo0), UserInfo = emqx_utils_maps:safe_atom_key_map(UserInfo0),
case emqx_authentication:update_user(ChainName, AuthenticatorID, UserID, UserInfo) of case emqx_authentication:update_user(ChainName, AuthenticatorID, UserID, UserInfo) of
{ok, User} -> {ok, User} ->
{200, User}; {200, User};

View File

@ -18,10 +18,12 @@
-elvis([{elvis_style, invalid_dynamic_call, disable}]). -elvis([{elvis_style, invalid_dynamic_call, disable}]).
-include_lib("hocon/include/hoconsc.hrl"). -include_lib("hocon/include/hoconsc.hrl").
-include("emqx_authn.hrl").
-export([ -export([
common_fields/0, common_fields/0,
roots/0, roots/0,
validations/0,
tags/0, tags/0,
fields/1, fields/1,
authenticator_type/0, authenticator_type/0,
@ -207,3 +209,27 @@ array(Name) ->
array(Name, DescId) -> array(Name, DescId) ->
{Name, ?HOCON(?R_REF(Name), #{desc => ?DESC(DescId)})}. {Name, ?HOCON(?R_REF(Name), #{desc => ?DESC(DescId)})}.
validations() ->
[
{check_http_ssl_opts, fun(Conf) ->
CheckFun = fun emqx_authn_http:check_ssl_opts/1,
validation(Conf, CheckFun)
end},
{check_http_headers, fun(Conf) ->
CheckFun = fun emqx_authn_http:check_headers/1,
validation(Conf, CheckFun)
end}
].
validation(Conf, CheckFun) when is_map(Conf) ->
validation(hocon_maps:get(?CONF_NS, Conf), CheckFun);
validation(undefined, _) ->
ok;
validation([], _) ->
ok;
validation([AuthN | Tail], CheckFun) ->
case CheckFun(#{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY => AuthN}) of
ok -> validation(Tail, CheckFun);
Error -> Error
end.

View File

@ -28,6 +28,7 @@
parse_sql/2, parse_sql/2,
render_deep/2, render_deep/2,
render_str/2, render_str/2,
render_urlencoded_str/2,
render_sql_params/2, render_sql_params/2,
is_superuser/1, is_superuser/1,
bin/1, bin/1,
@ -129,6 +130,13 @@ render_str(Template, Credential) ->
#{return => full_binary, var_trans => fun handle_var/2} #{return => full_binary, var_trans => fun handle_var/2}
). ).
render_urlencoded_str(Template, Credential) ->
emqx_placeholder:proc_tmpl(
Template,
mapping_credential(Credential),
#{return => full_binary, var_trans => fun urlencode_var/2}
).
render_sql_params(ParamList, Credential) -> render_sql_params(ParamList, Credential) ->
emqx_placeholder:proc_tmpl( emqx_placeholder:proc_tmpl(
ParamList, ParamList,
@ -217,6 +225,11 @@ without_password(Credential, [Name | Rest]) ->
without_password(Credential, Rest) without_password(Credential, Rest)
end. end.
urlencode_var({var, _} = Var, Value) ->
emqx_http_lib:uri_encode(handle_var(Var, Value));
urlencode_var(Var, Value) ->
handle_var(Var, Value).
handle_var({var, _Name}, undefined) -> handle_var({var, _Name}, undefined) ->
<<>>; <<>>;
handle_var({var, <<"peerhost">>}, PeerHost) -> handle_var({var, <<"peerhost">>}, PeerHost) ->

View File

@ -105,14 +105,16 @@ mnesia(boot) ->
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
namespace() -> "authn-scram-builtin_db". namespace() -> "authn".
tags() -> tags() ->
[<<"Authentication">>]. [<<"Authentication">>].
roots() -> [?CONF_NS]. %% used for config check when the schema module is resolved
roots() ->
[{?CONF_NS, hoconsc:mk(hoconsc:ref(?MODULE, scram))}].
fields(?CONF_NS) -> fields(scram) ->
[ [
{mechanism, emqx_authn_schema:mechanism(scram)}, {mechanism, emqx_authn_schema:mechanism(scram)},
{backend, emqx_authn_schema:backend(built_in_database)}, {backend, emqx_authn_schema:backend(built_in_database)},
@ -120,7 +122,7 @@ fields(?CONF_NS) ->
{iteration_count, fun iteration_count/1} {iteration_count, fun iteration_count/1}
] ++ emqx_authn_schema:common_fields(). ] ++ emqx_authn_schema:common_fields().
desc(?CONF_NS) -> desc(scram) ->
"Settings for Salted Challenge Response Authentication Mechanism\n" "Settings for Salted Challenge Response Authentication Mechanism\n"
"(SCRAM) authentication."; "(SCRAM) authentication.";
desc(_) -> desc(_) ->
@ -141,7 +143,7 @@ iteration_count(_) -> undefined.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() -> refs() ->
[hoconsc:ref(?MODULE, ?CONF_NS)]. [hoconsc:ref(?MODULE, scram)].
create( create(
AuthenticatorID, AuthenticatorID,

View File

@ -38,6 +38,8 @@
headers/1 headers/1
]). ]).
-export([check_headers/1, check_ssl_opts/1]).
-export([ -export([
refs/0, refs/0,
union_member_selector/1, union_member_selector/1,
@ -51,34 +53,35 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
namespace() -> "authn-http". namespace() -> "authn".
tags() -> tags() ->
[<<"Authentication">>]. [<<"Authentication">>].
%% used for config check when the schema module is resolved
roots() -> roots() ->
[ [
{?CONF_NS, {?CONF_NS,
hoconsc:mk( hoconsc:mk(
hoconsc:union(fun union_member_selector/1), hoconsc:union(fun ?MODULE:union_member_selector/1),
#{} #{}
)} )}
]. ].
fields(get) -> fields(http_get) ->
[ [
{method, #{type => get, required => true, desc => ?DESC(method)}}, {method, #{type => get, required => true, desc => ?DESC(method)}},
{headers, fun headers_no_content_type/1} {headers, fun headers_no_content_type/1}
] ++ common_fields(); ] ++ common_fields();
fields(post) -> fields(http_post) ->
[ [
{method, #{type => post, required => true, desc => ?DESC(method)}}, {method, #{type => post, required => true, desc => ?DESC(method)}},
{headers, fun headers/1} {headers, fun headers/1}
] ++ common_fields(). ] ++ common_fields().
desc(get) -> desc(http_get) ->
?DESC(get); ?DESC(get);
desc(post) -> desc(http_post) ->
?DESC(post); ?DESC(post);
desc(_) -> desc(_) ->
undefined. undefined.
@ -106,8 +109,8 @@ common_fields() ->
validations() -> validations() ->
[ [
{check_ssl_opts, fun check_ssl_opts/1}, {check_ssl_opts, fun ?MODULE:check_ssl_opts/1},
{check_headers, fun check_headers/1} {check_headers, fun ?MODULE:check_headers/1}
]. ].
url(type) -> binary(); url(type) -> binary();
@ -156,8 +159,8 @@ request_timeout(_) -> undefined.
refs() -> refs() ->
[ [
hoconsc:ref(?MODULE, get), hoconsc:ref(?MODULE, http_get),
hoconsc:ref(?MODULE, post) hoconsc:ref(?MODULE, http_post)
]. ].
union_member_selector(all_union_members) -> union_member_selector(all_union_members) ->
@ -166,9 +169,9 @@ union_member_selector({value, Value}) ->
refs(Value). refs(Value).
refs(#{<<"method">> := <<"get">>}) -> refs(#{<<"method">> := <<"get">>}) ->
[hoconsc:ref(?MODULE, get)]; [hoconsc:ref(?MODULE, http_get)];
refs(#{<<"method">> := <<"post">>}) -> refs(#{<<"method">> := <<"post">>}) ->
[hoconsc:ref(?MODULE, post)]; [hoconsc:ref(?MODULE, http_post)];
refs(_) -> refs(_) ->
throw(#{ throw(#{
field_name => method, field_name => method,
@ -261,21 +264,47 @@ transform_header_name(Headers) ->
). ).
check_ssl_opts(Conf) -> check_ssl_opts(Conf) ->
{BaseUrl, _Path, _Query} = parse_url(get_conf_val("url", Conf)), case is_backend_http(Conf) of
case BaseUrl of true ->
<<"https://", _/binary>> -> Url = get_conf_val("url", Conf),
case get_conf_val("ssl.enable", Conf) of {BaseUrl, _Path, _Query} = parse_url(Url),
true -> ok; case BaseUrl of
false -> false <<"https://", _/binary>> ->
case get_conf_val("ssl.enable", Conf) of
true ->
ok;
false ->
<<"it's required to enable the TLS option to establish a https connection">>
end;
<<"http://", _/binary>> ->
ok
end; end;
<<"http://", _/binary>> -> false ->
ok ok
end. end.
check_headers(Conf) -> check_headers(Conf) ->
Method = to_bin(get_conf_val("method", Conf)), case is_backend_http(Conf) of
Headers = get_conf_val("headers", Conf), true ->
Method =:= <<"post">> orelse (not maps:is_key(<<"content-type">>, Headers)). Headers = get_conf_val("headers", Conf),
case to_bin(get_conf_val("method", Conf)) of
<<"post">> ->
ok;
<<"get">> ->
case maps:is_key(<<"content-type">>, Headers) of
false -> ok;
true -> <<"HTTP GET requests cannot include content-type header.">>
end
end;
false ->
ok
end.
is_backend_http(Conf) ->
case get_conf_val("backend", Conf) of
http -> true;
_ -> false
end.
parse_url(Url) -> parse_url(Url) ->
case string:split(Url, "//", leading) of case string:split(Url, "//", leading) of
@ -285,9 +314,9 @@ parse_url(Url) ->
BaseUrl = iolist_to_binary([Scheme, "//", HostPort]), BaseUrl = iolist_to_binary([Scheme, "//", HostPort]),
case string:split(Remaining, "?", leading) of case string:split(Remaining, "?", leading) of
[Path, QueryString] -> [Path, QueryString] ->
{BaseUrl, Path, QueryString}; {BaseUrl, <<"/", Path/binary>>, QueryString};
[Path] -> [Path] ->
{BaseUrl, Path, <<>>} {BaseUrl, <<"/", Path/binary>>, <<>>}
end; end;
[HostPort] -> [HostPort] ->
{iolist_to_binary([Scheme, "//", HostPort]), <<>>, <<>>} {iolist_to_binary([Scheme, "//", HostPort]), <<>>, <<>>}
@ -310,7 +339,7 @@ parse_config(
method => Method, method => Method,
path => Path, path => Path,
headers => ensure_header_name_type(Headers), headers => ensure_header_name_type(Headers),
base_path_templete => emqx_authn_utils:parse_str(Path), base_path_template => emqx_authn_utils:parse_str(Path),
base_query_template => emqx_authn_utils:parse_deep( base_query_template => emqx_authn_utils:parse_deep(
cow_qs:parse_qs(to_bin(Query)) cow_qs:parse_qs(to_bin(Query))
), ),
@ -323,12 +352,12 @@ parse_config(
generate_request(Credential, #{ generate_request(Credential, #{
method := Method, method := Method,
headers := Headers0, headers := Headers0,
base_path_templete := BasePathTemplate, base_path_template := BasePathTemplate,
base_query_template := BaseQueryTemplate, base_query_template := BaseQueryTemplate,
body_template := BodyTemplate body_template := BodyTemplate
}) -> }) ->
Headers = maps:to_list(Headers0), Headers = maps:to_list(Headers0),
Path = emqx_authn_utils:render_str(BasePathTemplate, Credential), Path = emqx_authn_utils:render_urlencoded_str(BasePathTemplate, Credential),
Query = emqx_authn_utils:render_deep(BaseQueryTemplate, Credential), Query = emqx_authn_utils:render_deep(BaseQueryTemplate, Credential),
Body = emqx_authn_utils:render_deep(BodyTemplate, Credential), Body = emqx_authn_utils:render_deep(BodyTemplate, Credential),
case Method of case Method of
@ -343,9 +372,9 @@ generate_request(Credential, #{
end. end.
append_query(Path, []) -> append_query(Path, []) ->
encode_path(Path); Path;
append_query(Path, Query) -> append_query(Path, Query) ->
encode_path(Path) ++ "?" ++ binary_to_list(qs(Query)). Path ++ "?" ++ binary_to_list(qs(Query)).
qs(KVs) -> qs(KVs) ->
qs(KVs, []). qs(KVs, []).
@ -357,7 +386,7 @@ qs([{K, V} | More], Acc) ->
qs(More, [["&", uri_encode(K), "=", uri_encode(V)] | Acc]). qs(More, [["&", uri_encode(K), "=", uri_encode(V)] | Acc]).
serialize_body(<<"application/json">>, Body) -> serialize_body(<<"application/json">>, Body) ->
emqx_json:encode(Body); emqx_utils_json:encode(Body);
serialize_body(<<"application/x-www-form-urlencoded">>, Body) -> serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
qs(maps:to_list(Body)). qs(maps:to_list(Body)).
@ -395,7 +424,7 @@ safely_parse_body(ContentType, Body) ->
end. end.
parse_body(<<"application/json", _/binary>>, Body) -> parse_body(<<"application/json", _/binary>>, Body) ->
{ok, emqx_json:decode(Body, [return_maps])}; {ok, emqx_utils_json:decode(Body, [return_maps])};
parse_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) -> parse_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) ->
Flags = [<<"result">>, <<"is_superuser">>], Flags = [<<"result">>, <<"is_superuser">>],
RawMap = maps:from_list(cow_qs:parse_qs(Body)), RawMap = maps:from_list(cow_qs:parse_qs(Body)),
@ -407,10 +436,6 @@ parse_body(ContentType, _) ->
uri_encode(T) -> uri_encode(T) ->
emqx_http_lib:uri_encode(to_list(T)). emqx_http_lib:uri_encode(to_list(T)).
encode_path(Path) ->
Parts = string:split(Path, "/", all),
lists:flatten(["/" ++ Part || Part <- lists:map(fun uri_encode/1, Parts)]).
request_for_log(Credential, #{url := Url} = State) -> request_for_log(Credential, #{url := Url} = State) ->
SafeCredential = emqx_authn_utils:without_password(Credential), SafeCredential = emqx_authn_utils:without_password(Credential),
case generate_request(SafeCredential, State) of case generate_request(SafeCredential, State) of

View File

@ -99,7 +99,7 @@ handle_info(
State1; State1;
{StatusLine, Headers, Body} -> {StatusLine, Headers, Body} ->
try try
JWKS = jose_jwk:from(emqx_json:decode(Body, [return_maps])), JWKS = jose_jwk:from(emqx_utils_json:decode(Body, [return_maps])),
{_, JWKs} = JWKS#jose_jwk.keys, {_, JWKs} = JWKS#jose_jwk.keys,
State1#{jwks := JWKs} State1#{jwks := JWKs}
catch catch

View File

@ -35,18 +35,17 @@
callback_mode() -> always_sync. callback_mode() -> always_sync.
on_start(InstId, Opts) -> on_start(InstId, Opts) ->
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
PoolOpts = [ PoolOpts = [
{pool_size, maps:get(pool_size, Opts, ?DEFAULT_POOL_SIZE)}, {pool_size, maps:get(pool_size, Opts, ?DEFAULT_POOL_SIZE)},
{connector_opts, Opts} {connector_opts, Opts}
], ],
case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, PoolOpts) of case emqx_resource_pool:start(InstId, ?MODULE, PoolOpts) of
ok -> {ok, #{pool_name => PoolName}}; ok -> {ok, #{pool_name => InstId}};
{error, Reason} -> {error, Reason} {error, Reason} -> {error, Reason}
end. end.
on_stop(_InstId, #{pool_name := PoolName}) -> on_stop(_InstId, #{pool_name := PoolName}) ->
emqx_plugin_libs_pool:stop_pool(PoolName). emqx_resource_pool:stop(PoolName).
on_query(InstId, get_jwks, #{pool_name := PoolName}) -> on_query(InstId, get_jwks, #{pool_name := PoolName}) ->
Result = ecpool:pick_and_do(PoolName, {emqx_authn_jwks_client, get_jwks, []}, no_handover), Result = ecpool:pick_and_do(PoolName, {emqx_authn_jwks_client, get_jwks, []}, no_handover),
@ -72,18 +71,17 @@ on_query(_InstId, {update, Opts}, #{pool_name := PoolName}) ->
ok. ok.
on_get_status(_InstId, #{pool_name := PoolName}) -> on_get_status(_InstId, #{pool_name := PoolName}) ->
Func = case emqx_resource_pool:health_check_workers(PoolName, fun health_check/1) of
fun(Conn) ->
case emqx_authn_jwks_client:get_jwks(Conn) of
{ok, _} -> true;
_ -> false
end
end,
case emqx_plugin_libs_pool:health_check_ecpool_workers(PoolName, Func) of
true -> connected; true -> connected;
false -> disconnected false -> disconnected
end. end.
health_check(Conn) ->
case emqx_authn_jwks_client:get_jwks(Conn) of
{ok, _} -> true;
_ -> false
end.
connect(Opts) -> connect(Opts) ->
ConnectorOpts = proplists:get_value(connector_opts, Opts), ConnectorOpts = proplists:get_value(connector_opts, Opts),
emqx_authn_jwks_client:start_link(ConnectorOpts). emqx_authn_jwks_client:start_link(ConnectorOpts).

View File

@ -43,36 +43,57 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
namespace() -> "authn-jwt". namespace() -> "authn".
tags() -> tags() ->
[<<"Authentication">>]. [<<"Authentication">>].
%% used for config check when the schema module is resolved
roots() -> roots() ->
[ [
{?CONF_NS, {?CONF_NS,
hoconsc:mk( hoconsc:mk(
hoconsc:union(fun union_member_selector/1), hoconsc:union(fun ?MODULE:union_member_selector/1),
#{} #{}
)} )}
]. ].
fields('hmac-based') -> fields(jwt_hmac) ->
[ [
{use_jwks, sc(hoconsc:enum([false]), #{required => true, desc => ?DESC(use_jwks)})}, %% for hmac, it's the 'algorithm' field which selects this type
%% use_jwks field can be ignored (kept for backward compatibility)
{use_jwks,
sc(
hoconsc:enum([false]),
#{
required => false,
desc => ?DESC(use_jwks),
importance => ?IMPORTANCE_HIDDEN
}
)},
{algorithm, {algorithm,
sc(hoconsc:enum(['hmac-based']), #{required => true, desc => ?DESC(algorithm)})}, sc(hoconsc:enum(['hmac-based']), #{required => true, desc => ?DESC(algorithm)})},
{secret, fun secret/1}, {secret, fun secret/1},
{secret_base64_encoded, fun secret_base64_encoded/1} {secret_base64_encoded, fun secret_base64_encoded/1}
] ++ common_fields(); ] ++ common_fields();
fields('public-key') -> fields(jwt_public_key) ->
[ [
{use_jwks, sc(hoconsc:enum([false]), #{required => true, desc => ?DESC(use_jwks)})}, %% for public-key, it's the 'algorithm' field which selects this type
%% use_jwks field can be ignored (kept for backward compatibility)
{use_jwks,
sc(
hoconsc:enum([false]),
#{
required => false,
desc => ?DESC(use_jwks),
importance => ?IMPORTANCE_HIDDEN
}
)},
{algorithm, {algorithm,
sc(hoconsc:enum(['public-key']), #{required => true, desc => ?DESC(algorithm)})}, sc(hoconsc:enum(['public-key']), #{required => true, desc => ?DESC(algorithm)})},
{public_key, fun public_key/1} {public_key, fun public_key/1}
] ++ common_fields(); ] ++ common_fields();
fields('jwks') -> fields(jwt_jwks) ->
[ [
{use_jwks, sc(hoconsc:enum([true]), #{required => true, desc => ?DESC(use_jwks)})}, {use_jwks, sc(hoconsc:enum([true]), #{required => true, desc => ?DESC(use_jwks)})},
{endpoint, fun endpoint/1}, {endpoint, fun endpoint/1},
@ -85,12 +106,12 @@ fields('jwks') ->
}} }}
] ++ common_fields(). ] ++ common_fields().
desc('hmac-based') -> desc(jwt_hmac) ->
?DESC('hmac-based'); ?DESC(jwt_hmac);
desc('public-key') -> desc(jwt_public_key) ->
?DESC('public-key'); ?DESC(jwt_public_key);
desc('jwks') -> desc(jwt_jwks) ->
?DESC('jwks'); ?DESC(jwt_jwks);
desc(undefined) -> desc(undefined) ->
undefined. undefined.
@ -160,9 +181,9 @@ from(_) -> undefined.
refs() -> refs() ->
[ [
hoconsc:ref(?MODULE, 'hmac-based'), hoconsc:ref(?MODULE, jwt_hmac),
hoconsc:ref(?MODULE, 'public-key'), hoconsc:ref(?MODULE, jwt_public_key),
hoconsc:ref(?MODULE, 'jwks') hoconsc:ref(?MODULE, jwt_jwks)
]. ].
union_member_selector(all_union_members) -> union_member_selector(all_union_members) ->
@ -179,11 +200,11 @@ boolean(<<"false">>) -> false;
boolean(Other) -> Other. boolean(Other) -> Other.
select_ref(true, _) -> select_ref(true, _) ->
[hoconsc:ref(?MODULE, 'jwks')]; [hoconsc:ref(?MODULE, 'jwt_jwks')];
select_ref(false, #{<<"public_key">> := _}) -> select_ref(false, #{<<"public_key">> := _}) ->
[hoconsc:ref(?MODULE, 'public-key')]; [hoconsc:ref(?MODULE, jwt_public_key)];
select_ref(false, _) -> select_ref(false, _) ->
[hoconsc:ref(?MODULE, 'hmac-based')]; [hoconsc:ref(?MODULE, jwt_hmac)];
select_ref(_, _) -> select_ref(_, _) ->
throw(#{ throw(#{
field_name => use_jwks, field_name => use_jwks,
@ -407,7 +428,7 @@ do_verify(_JWT, [], _VerifyClaims) ->
do_verify(JWT, [JWK | More], VerifyClaims) -> do_verify(JWT, [JWK | More], VerifyClaims) ->
try jose_jws:verify(JWK, JWT) of try jose_jws:verify(JWK, JWT) of
{true, Payload, _JWT} -> {true, Payload, _JWT} ->
Claims0 = emqx_json:decode(Payload, [return_maps]), Claims0 = emqx_utils_json:decode(Payload, [return_maps]),
Claims = try_convert_to_num(Claims0, [<<"exp">>, <<"iat">>, <<"nbf">>]), Claims = try_convert_to_num(Claims0, [<<"exp">>, <<"iat">>, <<"nbf">>]),
case verify_claims(Claims, VerifyClaims) of case verify_claims(Claims, VerifyClaims) of
ok -> ok ->

View File

@ -107,14 +107,16 @@ mnesia(boot) ->
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
namespace() -> "authn-builtin_db". namespace() -> "authn".
tags() -> tags() ->
[<<"Authentication">>]. [<<"Authentication">>].
roots() -> [?CONF_NS]. %% used for config check when the schema module is resolved
roots() ->
[{?CONF_NS, hoconsc:mk(hoconsc:ref(?MODULE, builtin_db))}].
fields(?CONF_NS) -> fields(builtin_db) ->
[ [
{mechanism, emqx_authn_schema:mechanism(password_based)}, {mechanism, emqx_authn_schema:mechanism(password_based)},
{backend, emqx_authn_schema:backend(built_in_database)}, {backend, emqx_authn_schema:backend(built_in_database)},
@ -122,8 +124,8 @@ fields(?CONF_NS) ->
{password_hash_algorithm, fun emqx_authn_password_hashing:type_rw/1} {password_hash_algorithm, fun emqx_authn_password_hashing:type_rw/1}
] ++ emqx_authn_schema:common_fields(). ] ++ emqx_authn_schema:common_fields().
desc(?CONF_NS) -> desc(builtin_db) ->
?DESC(?CONF_NS); ?DESC(builtin_db);
desc(_) -> desc(_) ->
undefined. undefined.
@ -138,7 +140,7 @@ user_id_type(_) -> undefined.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() -> refs() ->
[hoconsc:ref(?MODULE, ?CONF_NS)]. [hoconsc:ref(?MODULE, builtin_db)].
create(_AuthenticatorID, Config) -> create(_AuthenticatorID, Config) ->
create(Config). create(Config).
@ -332,7 +334,7 @@ run_fuzzy_filter(
%% Example: data/user-credentials.json %% Example: data/user-credentials.json
import_users_from_json(Bin, #{user_group := UserGroup}) -> import_users_from_json(Bin, #{user_group := UserGroup}) ->
case emqx_json:safe_decode(Bin, [return_maps]) of case emqx_utils_json:safe_decode(Bin, [return_maps]) of
{ok, List} -> {ok, List} ->
trans(fun ?MODULE:import/2, [UserGroup, List]); trans(fun ?MODULE:import/2, [UserGroup, List]);
{error, Reason} -> {error, Reason} ->

View File

@ -44,32 +44,33 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
namespace() -> "authn-mongodb". namespace() -> "authn".
tags() -> tags() ->
[<<"Authentication">>]. [<<"Authentication">>].
%% used for config check when the schema module is resolved
roots() -> roots() ->
[ [
{?CONF_NS, {?CONF_NS,
hoconsc:mk( hoconsc:mk(
hoconsc:union(fun union_member_selector/1), hoconsc:union(fun ?MODULE:union_member_selector/1),
#{} #{}
)} )}
]. ].
fields(standalone) -> fields(mongo_single) ->
common_fields() ++ emqx_connector_mongo:fields(single); common_fields() ++ emqx_connector_mongo:fields(single);
fields('replica-set') -> fields(mongo_rs) ->
common_fields() ++ emqx_connector_mongo:fields(rs); common_fields() ++ emqx_connector_mongo:fields(rs);
fields('sharded-cluster') -> fields(mongo_sharded) ->
common_fields() ++ emqx_connector_mongo:fields(sharded). common_fields() ++ emqx_connector_mongo:fields(sharded).
desc(standalone) -> desc(mongo_single) ->
?DESC(standalone); ?DESC(single);
desc('replica-set') -> desc(mongo_rs) ->
?DESC('replica-set'); ?DESC('replica-set');
desc('sharded-cluster') -> desc(mongo_sharded) ->
?DESC('sharded-cluster'); ?DESC('sharded-cluster');
desc(_) -> desc(_) ->
undefined. undefined.
@ -126,9 +127,9 @@ is_superuser_field(_) -> undefined.
refs() -> refs() ->
[ [
hoconsc:ref(?MODULE, standalone), hoconsc:ref(?MODULE, mongo_single),
hoconsc:ref(?MODULE, 'replica-set'), hoconsc:ref(?MODULE, mongo_rs),
hoconsc:ref(?MODULE, 'sharded-cluster') hoconsc:ref(?MODULE, mongo_sharded)
]. ].
create(_AuthenticatorID, Config) -> create(_AuthenticatorID, Config) ->
@ -254,11 +255,11 @@ union_member_selector({value, Value}) ->
refs(Value). refs(Value).
refs(#{<<"mongo_type">> := <<"single">>}) -> refs(#{<<"mongo_type">> := <<"single">>}) ->
[hoconsc:ref(?MODULE, standalone)]; [hoconsc:ref(?MODULE, mongo_single)];
refs(#{<<"mongo_type">> := <<"rs">>}) -> refs(#{<<"mongo_type">> := <<"rs">>}) ->
[hoconsc:ref(?MODULE, 'replica-set')]; [hoconsc:ref(?MODULE, mongo_rs)];
refs(#{<<"mongo_type">> := <<"sharded">>}) -> refs(#{<<"mongo_type">> := <<"sharded">>}) ->
[hoconsc:ref(?MODULE, 'sharded-cluster')]; [hoconsc:ref(?MODULE, mongo_sharded)];
refs(_) -> refs(_) ->
throw(#{ throw(#{
field_name => mongo_type, field_name => mongo_type,

View File

@ -45,14 +45,16 @@
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
namespace() -> "authn-mysql". namespace() -> "authn".
tags() -> tags() ->
[<<"Authentication">>]. [<<"Authentication">>].
roots() -> [?CONF_NS]. %% used for config check when the schema module is resolved
roots() ->
[{?CONF_NS, hoconsc:mk(hoconsc:ref(?MODULE, mysql))}].
fields(?CONF_NS) -> fields(mysql) ->
[ [
{mechanism, emqx_authn_schema:mechanism(password_based)}, {mechanism, emqx_authn_schema:mechanism(password_based)},
{backend, emqx_authn_schema:backend(mysql)}, {backend, emqx_authn_schema:backend(mysql)},
@ -62,8 +64,8 @@ fields(?CONF_NS) ->
] ++ emqx_authn_schema:common_fields() ++ ] ++ emqx_authn_schema:common_fields() ++
proplists:delete(prepare_statement, emqx_connector_mysql:fields(config)). proplists:delete(prepare_statement, emqx_connector_mysql:fields(config)).
desc(?CONF_NS) -> desc(mysql) ->
?DESC(?CONF_NS); ?DESC(mysql);
desc(_) -> desc(_) ->
undefined. undefined.
@ -82,7 +84,7 @@ query_timeout(_) -> undefined.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
refs() -> refs() ->
[hoconsc:ref(?MODULE, ?CONF_NS)]. [hoconsc:ref(?MODULE, mysql)].
create(_AuthenticatorID, Config) -> create(_AuthenticatorID, Config) ->
create(Config). create(Config).

Some files were not shown because too many files have changed in this diff Show More