Merge branch 'master' into emqx_connector

This commit is contained in:
Shawn 2021-06-07 21:12:00 +08:00 committed by GitHub
commit a37b3957e3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
277 changed files with 2417 additions and 3445 deletions

View File

@ -7,6 +7,8 @@ COPY . /emqx
WORKDIR /emqx
RUN rm -rf _build/${EMQX_NAME}/lib _build/${EMQX_NAME}-pkg/lib
RUN make ${EMQX_NAME}-zip || cat rebar3.crashdump
RUN make ${EMQX_NAME}-pkg || cat rebar3.crashdump

View File

@ -3,10 +3,23 @@ set -x -e -u
export CODE_PATH=${CODE_PATH:-"/emqx"}
export EMQX_NAME=${EMQX_NAME:-"emqx"}
export PACKAGE_PATH="${CODE_PATH}/_packages/${EMQX_NAME}"
export RELUP_PACKAGE_PATH="${CODE_PATH}/relup_packages/${EMQX_NAME}"
export RELUP_PACKAGE_PATH="${CODE_PATH}/_upgrade_base"
# export EMQX_NODE_NAME="emqx-on-$(uname -m)@127.0.0.1"
# export EMQX_NODE_COOKIE=$(date +%s%N)
case "$(uname -m)" in
x86_64)
ARCH='amd64'
;;
aarch64)
ARCH='arm64'
;;
arm*)
ARCH=arm
;;
esac
export ARCH
emqx_prepare(){
mkdir -p "${PACKAGE_PATH}"
@ -136,19 +149,19 @@ running_test(){
}
relup_test(){
TARGET_VERSION="$1"
TARGET_VERSION="$("$CODE_PATH"/pkg-vsn.sh)"
if [ -d "${RELUP_PACKAGE_PATH}" ];then
cd "${RELUP_PACKAGE_PATH }"
cd "${RELUP_PACKAGE_PATH}"
for var in "${EMQX_NAME}"-*-"$(uname -m)".zip;do
for var in "${EMQX_NAME}"-*-"${ARCH}".zip;do
packagename=$(basename "${var}")
unzip "$packagename"
./emqx/bin/emqx start || ( tail emqx/log/emqx.log.1 && exit 1 )
./emqx/bin/emqx_ctl status
./emqx/bin/emqx versions
cp "${PACKAGE_PATH}/${EMQX_NAME}"-*-"${TARGET_VERSION}-$(uname -m)".zip ./emqx/releases
cp "${PACKAGE_PATH}/${EMQX_NAME}"-*-"${TARGET_VERSION}-${ARCH}".zip ./emqx/releases
./emqx/bin/emqx install "${TARGET_VERSION}"
[ "$(./emqx/bin/emqx versions |grep permanent | grep -oE "[0-9].[0-9].[0-9]")" = "${TARGET_VERSION}" ] || exit 1
[ "$(./emqx/bin/emqx versions |grep permanent | awk '{print $2}')" = "${TARGET_VERSION}" ] || exit 1
./emqx/bin/emqx_ctl status
./emqx/bin/emqx stop
rm -rf emqx
@ -158,4 +171,4 @@ relup_test(){
emqx_prepare
emqx_test
# relup_test <TODO: parameterise relup target version>
relup_test

View File

@ -11,3 +11,4 @@ EMQX_AUTH__PGSQL__DATABASE=mqtt
EMQX_AUTH__REDIS__SERVER=redis_server:6379
EMQX_AUTH__REDIS__PASSWORD=public
CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_
HOCON_ENV_OVERRIDE_PREFIX=EMQX_

View File

@ -9,7 +9,7 @@ services:
- emqx2
volumes:
- ./haproxy/haproxy.cfg:/usr/local/etc/haproxy/haproxy.cfg
- ../../etc/certs:/usr/local/etc/haproxy/certs
- ../../apps/emqx/etc/certs:/usr/local/etc/haproxy/certs
ports:
- "18083:18083"
# - "1883:1883"

View File

@ -72,7 +72,7 @@
[shell bench]
!cd $BENCH_PATH
!./emqtt_bench pub -c 10 -I 1000 -t t/%i -s 64 -L 600
!./emqtt_bench pub -c 10 -I 1000 -t t/%i -s 64 -L 300
???sent
[shell emqx]
@ -109,7 +109,7 @@
???publish complete
??SH-PROMPT:
!curl http://127.0.0.1:8080/counter
???{"data":600,"code":0}
???{"data":300,"code":0}
?SH-PROMPT
[shell emqx2]

View File

@ -3,10 +3,6 @@ name: Cross build packages
on:
schedule:
- cron: '0 */6 * * *'
push:
tags:
- v*
- e*
release:
types:
- published
@ -19,25 +15,34 @@ jobs:
outputs:
profiles: ${{ steps.set_profile.outputs.profiles}}
old_vsns: ${{ steps.set_profile.outputs.old_vsns}}
steps:
- uses: actions/checkout@v2
with:
path: source
fetch-depth: 0
- name: set profile
id: set_profile
shell: bash
run: |
if make -C source emqx-ee --dry-run > /dev/null 2>&1; then
cd source
vsn="$(./pkg-vsn.sh)"
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
if make emqx-ee --dry-run > /dev/null 2>&1; then
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")"
echo "::set-output name=old_vsns::$old_vsns"
echo "::set-output name=profiles::[\"emqx-ee\"]"
else
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
echo "::set-output name=old_vsns::$old_vsns"
echo "::set-output name=profiles::[\"emqx\", \"emqx-edge\"]"
fi
- name: get_all_deps
if: endsWith(github.repository, 'emqx')
run: |
make -C source deps-all
zip -ryq source.zip source
zip -ryq source.zip source/* source/.[^.]*
- name: get_all_deps
if: endsWith(github.repository, 'enterprise')
run: |
@ -45,7 +50,7 @@ jobs:
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> source/scripts/git-token
make -C source deps-all
zip -ryq source.zip source
zip -ryq source.zip source/* source/.[^.]*
- uses: actions/upload-artifact@v2
with:
name: source
@ -95,6 +100,10 @@ jobs:
if (Test-Path rebar.lock) {
Remove-Item -Force -Path rebar.lock
}
make ensure-rebar3
copy rebar3 "${{ steps.install_erlang.outputs.erlpath }}\bin"
ls "${{ steps.install_erlang.outputs.erlpath }}\bin"
rebar3 --help
make ${{ matrix.profile }}
mkdir -p _packages/${{ matrix.profile }}
Compress-Archive -Path _build/${{ matrix.profile }}/rel/emqx -DestinationPath _build/${{ matrix.profile }}/rel/$pkg_name
@ -124,7 +133,7 @@ jobs:
matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
erl_otp:
- 23.2.7.2
- erl23.2.7.2-emqx-2
exclude:
- profile: emqx-edge
@ -155,6 +164,8 @@ jobs:
- name: build
run: |
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
make -C source ensure-rebar3
sudo cp source/rebar3 /usr/local/bin/rebar3
make -C source ${{ matrix.profile }}-zip
- name: test
run: |
@ -208,7 +219,7 @@ jobs:
- centos7
- centos6
- raspbian10
- raspbian9
# - raspbian9
exclude:
- os: centos6
arch: arm64
@ -245,35 +256,32 @@ jobs:
path: .
- name: unzip source code
run: unzip -q source.zip
- name: downloads emqx zip packages
- name: downloads old emqx zip packages
env:
PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }}
SYSTEM: ${{ matrix.os }}
OLD_VSNS: ${{ needs.prepare.outputs.old_vsns }}
run: |
set -e -u -x
cd source
if [ $PROFILE = "emqx" ];then broker="emqx-ce"; else broker="$PROFILE"; fi
if [ $PROFILE = "emqx-ee" ];then edition='enterprise'; else edition='opensource'; fi
vsn="$(./pkg-vsn.sh)"
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
if [ $PROFILE = "emqx-ee" ]; then
old_vsns=($(git tag -l "e$pre_vsn.[0-9]" | sed "s/e$vsn//"))
else
old_vsns=($(git tag -l "v$pre_vsn.[0-9]" | sed "s/v$vsn//"))
set -e -x -u
broker=$PROFILE
if [ $PROFILE = "emqx" ];then
broker="emqx-ce"
fi
if [ ! -z "$(echo $SYSTEM | grep -oE 'raspbian')" ]; then
export ARCH="arm"
fi
mkdir -p _upgrade_base
cd _upgrade_base
for tag in ${old_vsns[@]};do
if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-${{ secrets.AWS_DEFAULT_REGION }}.amazonaws.com/${{ secrets.AWS_S3_BUCKET }}/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip) | grep -oE "^[23]+")" ];then
mkdir -p source/_upgrade_base
cd source/_upgrade_base
old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
for tag in ${old_vsns[@]}; do
if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip) | grep -oE "^[23]+")" ];then
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256
echo "$(cat $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256) $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip" | sha256sum -c || exit 1
fi
done
cd -
- name: build emqx packages
env:
ERL_OTP: erl23.2.7.2-emqx-2

View File

@ -53,7 +53,7 @@ jobs:
strategy:
matrix:
erl_otp:
- 23.2.7.2
- 23.2.7.2-emqx-2
steps:
- uses: actions/checkout@v1
@ -82,11 +82,14 @@ jobs:
if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60
run: |
export OTP_GITHUB_URL="https://github.com/emqx/otp"
kerl build ${{ matrix.erl_otp }}
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }}
- name: build
run: |
. $HOME/.kerl/${{ matrix.erl_otp }}/activate
make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3
make ${EMQX_NAME}-zip
- name: test
run: |

View File

@ -4,6 +4,7 @@ on:
push:
branches:
- master
- main-v*
jobs:
sync_to_enterprise:
@ -22,11 +23,16 @@ jobs:
id: create_pull_request
run: |
set -euo pipefail
if [ "$GITHUB_REF" = "refs/heads/master" ]; then
EE_REF="refs/heads/enterprise"
else
EE_REF="${GITHUB_REF}-enterprise"
fi
R=$(curl --silent --show-error \
-H "Accept: application/vnd.github.v3+json" \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-X POST \
-d '{"title": "Sync code into enterprise from opensource", "head": "master", "base":"enterprise"}' \
-d "{\"title\": \"Sync code from opensource $GITHUB_REF to entperprise $EE_REF\", \"head\": \"$GITHUB_REF\", \"base\":\"$EE_REF\"}" \
https://api.github.com/repos/${{ github.repository_owner }}/emqx-enterprise/pulls)
echo $R | jq
echo "::set-output name=url::$(echo $R | jq '.url')"

View File

@ -5,9 +5,6 @@ on:
tags:
- v*
- e*
release:
types:
- published
pull_request:
jobs:
@ -51,7 +48,7 @@ jobs:
export CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_
printenv > .env
docker exec -i erlang sh -c "make ensure-rebar3"
docker exec -i erlang sh -c "./rebar3 eunit --dir apps/emqx_auth_ldap"
docker exec -i erlang sh -c "./rebar3 eunit --application=emqx_auth_ldap"
docker exec --env-file .env -i erlang sh -c "./rebar3 ct --dir apps/emqx_auth_ldap"
- uses: actions/upload-artifact@v1
if: failure()
@ -120,7 +117,7 @@ jobs:
export CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_
printenv > .env
docker exec -i erlang sh -c "make ensure-rebar3"
docker exec -i erlang sh -c "./rebar3 eunit --dir apps/emqx_auth_mongo"
docker exec -i erlang sh -c "./rebar3 eunit --application=emqx_auth_mongo"
docker exec --env-file .env -i erlang sh -c "./rebar3 ct --dir apps/emqx_auth_mongo"
- uses: actions/upload-artifact@v1
if: failure()
@ -202,7 +199,7 @@ jobs:
export CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_
printenv > .env
docker exec -i erlang sh -c "make ensure-rebar3"
docker exec -i erlang sh -c "./rebar3 eunit --dir apps/emqx_auth_mysql"
docker exec -i erlang sh -c "./rebar3 eunit --application=emqx_auth_mysql"
docker exec --env-file .env -i erlang sh -c "./rebar3 ct --dir apps/emqx_auth_mysql"
- uses: actions/upload-artifact@v1
if: failure()
@ -276,7 +273,7 @@ jobs:
CUTTLEFISH_ENV_OVERRIDE_PREFIX=EMQX_
printenv > .env
docker exec -i erlang sh -c "make ensure-rebar3"
docker exec -i erlang sh -c "./rebar3 eunit --dir apps/emqx_auth_pgsql"
docker exec -i erlang sh -c "./rebar3 eunit --application=emqx_auth_pgsql"
docker exec --env-file .env -i erlang sh -c "./rebar3 ct --dir apps/emqx_auth_pgsql"
- uses: actions/upload-artifact@v1
if: failure()
@ -397,7 +394,7 @@ jobs:
export EMQX_AUTH__REIDS__PASSWORD=public
printenv > .env
docker exec -i erlang sh -c "make ensure-rebar3"
docker exec -i erlang sh -c "./rebar3 eunit --dir apps/emqx_auth_redis"
docker exec -i erlang sh -c "./rebar3 eunit --application=emqx_auth_redis"
docker exec --env-file .env -i erlang sh -c "./rebar3 ct --dir apps/emqx_auth_redis"
- uses: actions/upload-artifact@v1
if: failure()

View File

@ -0,0 +1,26 @@
name: Check emqx app standalone
on:
push:
tags:
- v*
- e*
pull_request:
jobs:
check_all:
runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04
steps:
- uses: actions/checkout@v2
- name: run
run: |
make ensure-rebar3
cp rebar3 apps/emqx/
cd apps/emqx
./rebar3 xref
./rebar3 dialyzer
./rebar3 eunit -v
./rebar3 ct -v
./rebar3 proper -d test/props

View File

@ -5,9 +5,6 @@ on:
tags:
- v*
- e*
release:
types:
- published
pull_request:
jobs:

View File

@ -5,9 +5,6 @@ on:
tags:
- v*
- e*
release:
types:
- published
pull_request:
jobs:
@ -130,7 +127,7 @@ jobs:
docker exec --env-file .env -i erlang bash -c "make coveralls"
- name: cat rebar.crashdump
if: failure()
run: if [ -f 'rebar3.crashdump' ];then cat 'rebar3.crashdump' fi
run: if [ -f 'rebar3.crashdump' ];then cat 'rebar3.crashdump'; fi
- uses: actions/upload-artifact@v1
if: failure()
with:

View File

@ -1,5 +1,5 @@
$(shell $(CURDIR)/scripts/git-hooks-init.sh)
REBAR_VERSION = 3.14.3-emqx-7
REBAR_VERSION = 3.14.3-emqx-8
REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts

View File

@ -7,6 +7,7 @@
[![Slack Invite](<https://slack-invite.emqx.io/badge.svg>)](https://slack-invite.emqx.io)
[![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
[![Community](https://img.shields.io/badge/Community-EMQ%20X-yellow)](https://askemq.com)
[![YouTube](https://img.shields.io/badge/Subscribe-EMQ%20中文-FF0000?logo=youtube)](https://www.youtube.com/channel/UCir_r04HIsLjf2qqyZ4A8Cg)
[![最棒的物联网 MQTT 开源团队期待您的加入](https://www.emqx.io/static/img/github_readme_cn_bg.png)](https://careers.emqx.cn/)

View File

@ -6,6 +6,7 @@
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx)
[![Slack Invite](<https://slack-invite.emqx.io/badge.svg>)](https://slack-invite.emqx.io)
[![Twitter](https://img.shields.io/badge/Twitter-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
[![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
[![The best IoT MQTT open source team looks forward to your joining](https://www.emqx.io/static/img/github_readme_en_bg.png)](https://www.emqx.io/careers)

View File

@ -7,6 +7,7 @@
[![Slack Invite](<https://slack-invite.emqx.io/badge.svg>)](https://slack-invite.emqx.io)
[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
[![Community](https://img.shields.io/badge/Community-EMQ%20X-yellow?logo=github)](https://github.com/emqx/emqx/discussions)
[![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
[![The best IoT MQTT open source team looks forward to your joining](https://www.emqx.io/static/img/github_readme_en_bg.png)](https://www.emqx.io/careers)

View File

@ -6,7 +6,7 @@
[![Docker Pulls](https://img.shields.io/docker/pulls/emqx/emqx)](https://hub.docker.com/r/emqx/emqx)
[![Slack Invite](<https://slack-invite.emqx.io/badge.svg>)](https://slack-invite.emqx.io)
[![Twitter](https://img.shields.io/badge/Follow-EMQ-1DA1F2?logo=twitter)](https://twitter.com/EMQTech)
[![Community](https://img.shields.io/badge/Community-EMQ%20X-yellow?logo=github)](https://github.com/emqx/emqx/discussions)
[![YouTube](https://img.shields.io/badge/Subscribe-EMQ-FF0000?logo=youtube)](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
[![The best IoT MQTT open source team looks forward to your joining](https://www.emqx.io/static/img/github_readme_en_bg.png)](https://www.emqx.io/careers)

View File

@ -453,6 +453,13 @@ log.file = emqx.log
## Default: No Limit
#log.chars_limit = 8192
## Maximum depth for Erlang term log formatting
## and Erlang process message queue inspection.
##
## Value: Integer or 'unlimited' (without quotes)
## Default: 20
#log.max_depth = 20
## Log formatter
## Value: text | json
#log.formatter = text
@ -598,42 +605,42 @@ log.rotation.count = 5
## Notice: Disable the option in production deployment!
##
## Value: true | false
allow_anonymous = true
acl.allow_anonymous = true
## Allow or deny if no ACL rules matched.
##
## Value: allow | deny
acl_nomatch = allow
acl.acl_nomatch = allow
## Default ACL File.
##
## Value: File Name
acl_file = "{{ platform_etc_dir }}/acl.conf"
acl.acl_file = "{{ platform_etc_dir }}/acl.conf"
## Whether to enable ACL cache.
##
## If enabled, ACLs roles for each client will be cached in the memory
##
## Value: on | off
enable_acl_cache = on
acl.enable_acl_cache = on
## The maximum count of ACL entries can be cached for a client.
##
## Value: Integer greater than 0
## Default: 32
acl_cache_max_size = 32
acl.acl_cache_max_size = 32
## The time after which an ACL cache entry will be deleted
##
## Value: Duration
## Default: 1 minute
acl_cache_ttl = 1m
acl.acl_cache_ttl = 1m
## The action when acl check reject current operation
##
## Value: ignore | disconnect
## Default: ignore
acl_deny_action = ignore
acl.acl_deny_action = ignore
## Specify the global flapping detect policy.
## The value is a string composed of flapping threshold, duration and banned interval.
@ -642,7 +649,7 @@ acl_deny_action = ignore
## 3. banned interval: the banned interval if a flapping is detected.
##
## Value: Integer,Duration,Duration
flapping_detect_policy = "30, 1m, 5m"
acl.flapping_detect_policy = "30, 1m, 5m"
##--------------------------------------------------------------------
## MQTT Protocol
@ -2155,7 +2162,7 @@ listener.wss.external.check_origins = "https://localhost:8084, https://127.0.0.1
## The file to store loaded module names.
##
## Value: File
modules.loaded_file = "{{ platform_data_dir }}/loaded_modules"
module.loaded_file = "{{ platform_data_dir }}/loaded_modules"
##--------------------------------------------------------------------
## Presence Module
@ -2204,8 +2211,8 @@ module.presence.qos = 1
## Rewrite Module
## {rewrite, Topic, Re, Dest}
## module.rewrite.pub.rule.1 = "x/# ^x/y/(.+)$ z/y/$1"
## module.rewrite.sub.rule.1 = "y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2"
## module.rewrite.pub_rule.1 = "x/# ^x/y/(.+)$ z/y/$1"
## module.rewrite.sub_rule.1 = "y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2"
## CONFIG_SECTION_END=modules ==================================================

47
apps/emqx/rebar.config Normal file
View File

@ -0,0 +1,47 @@
{erl_opts, [warn_unused_vars,warn_shadow_vars,warn_unused_import,
warn_obsolete_guard,compressed]}.
{xref_checks,[undefined_function_calls,undefined_functions,locals_not_used,
deprecated_function_calls,warnings_as_errors,deprecated_functions]}.
%% Deps here may duplicate with emqx.git root level rebar.config
%% but there not be any descrpancy.
%% This rebar.config is necessary because the app may be used as a
%% `git_subdir` dependency in other projects.
{deps,
[ {gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}}
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.2"}}}
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.0"}}}
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.9.0"}}}
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
, {cuttlefish, {git, "https://github.com/emqx/cuttlefish", {tag, "v4.0.1"}}} %% todo delete when plugins use hocon
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.5.0"}}}
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {branch, "2.0.4"}}}
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.13.0"}}}
]}.
{plugins, [rebar3_proper]}.
{extra_src_dirs, [{"etc", [recursive]}]}.
{profiles, [
{test,
[{deps,
[ meck
, {bbmustache,"1.10.0"}
, {emqx_ct_helpers, {git,"https://github.com/zmstone/emqx-ct-helpers", {branch,"hocon"}}}
, {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.2.3.1"}}}
]},
{extra_src_dirs, [{"test",[recursive]}]}
]}
]}.
{dialyzer, [
{warnings, [unmatched_returns, error_handling, race_conditions]},
{plt_location, "."},
{plt_prefix, "emqx_dialyzer"},
{plt_apps, all_apps},
{plt_extra_apps, [hocon]},
{statistics, true}
]
}.

View File

@ -0,0 +1,11 @@
Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {branch, "0.6.0"}}},
AddBcrypt = fun(C) ->
{deps, Deps0} = lists:keyfind(deps, 1, C),
Deps = [Bcrypt | Deps0],
lists:keystore(deps, 1, C, {deps, Deps})
end,
case os:type() of
{win32, _} -> CONFIG;
_ -> AddBcrypt(CONFIG)
end.

View File

@ -1,7 +1,7 @@
{application, emqx,
[{id, "emqx"},
{description, "EMQ X"},
{vsn, "4.3.2"}, % strict semver, bump manually!
{vsn, "5.0.0"}, % strict semver, bump manually!
{modules, []},
{registered, []},
{applications, [kernel,stdlib,gproc,gen_rpc,esockd,cowboy,sasl,os_mon]},

View File

@ -0,0 +1,69 @@
%% -*- mode: erlang -*-
{VSN,
[{"4.3.2",
[{load_module,emqx_http_lib,brutal_purge,soft_purge,[]},
{load_module,emqx_channel,brutal_purge,soft_purge,[]},
{load_module,emqx_app,brutal_purge,soft_purge,[]},
{load_module,emqx_connection,brutal_purge,soft_purge,[]}]},
{"4.3.1",
[{load_module,emqx_ws_connection,brutal_purge,soft_purge,[]},
{load_module,emqx_connection,brutal_purge,soft_purge,[]},
{load_module,emqx_frame,brutal_purge,soft_purge,[]},
{load_module,emqx_cm,brutal_purge,soft_purge,[]},
{load_module,emqx_congestion,brutal_purge,soft_purge,[]},
{load_module,emqx_node_dump,brutal_purge,soft_purge,[]},
{load_module,emqx_channel,brutal_purge,soft_purge,[]},
{load_module,emqx_app,brutal_purge,soft_purge,[]},
{load_module,emqx_plugins,brutal_purge,soft_purge,[]},
{load_module,emqx_logger_textfmt,brutal_purge,soft_purge,[]},
{load_module,emqx_http_lib,brutal_purge,soft_purge,[]}]},
{"4.3.0",
[{load_module,emqx_logger_jsonfmt,brutal_purge,soft_purge,[]},
{load_module,emqx_ws_connection,brutal_purge,soft_purge,[]},
{load_module,emqx_congestion,brutal_purge,soft_purge,[]},
{load_module,emqx_connection,brutal_purge,soft_purge,[]},
{load_module,emqx_frame,brutal_purge,soft_purge,[]},
{load_module,emqx_trie,brutal_purge,soft_purge,[]},
{load_module,emqx_cm,brutal_purge,soft_purge,[]},
{load_module,emqx_node_dump,brutal_purge,soft_purge,[]},
{load_module,emqx_channel,brutal_purge,soft_purge,[]},
{load_module,emqx_app,brutal_purge,soft_purge,[]},
{load_module,emqx_plugins,brutal_purge,soft_purge,[]},
{load_module,emqx_logger_textfmt,brutal_purge,soft_purge,[]},
{load_module,emqx_metrics,brutal_purge,soft_purge,[]},
{apply,{emqx_metrics,upgrade_retained_delayed_counter_type,[]}},
{load_module,emqx_http_lib,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}],
[{"4.3.2",
[{load_module,emqx_http_lib,brutal_purge,soft_purge,[]},
{load_module,emqx_channel,brutal_purge,soft_purge,[]},
{load_module,emqx_app,brutal_purge,soft_purge,[]},
{load_module,emqx_connection,brutal_purge,soft_purge,[]}]},
{"4.3.1",
[{load_module,emqx_ws_connection,brutal_purge,soft_purge,[]},
{load_module,emqx_connection,brutal_purge,soft_purge,[]},
{load_module,emqx_frame,brutal_purge,soft_purge,[]},
{load_module,emqx_cm,brutal_purge,soft_purge,[]},
{load_module,emqx_congestion,brutal_purge,soft_purge,[]},
{load_module,emqx_node_dump,brutal_purge,soft_purge,[]},
{load_module,emqx_channel,brutal_purge,soft_purge,[]},
{load_module,emqx_app,brutal_purge,soft_purge,[]},
{load_module,emqx_plugins,brutal_purge,soft_purge,[]},
{load_module,emqx_logger_textfmt,brutal_purge,soft_purge,[]},
{load_module,emqx_http_lib,brutal_purge,soft_purge,[]}]},
{"4.3.0",
[{load_module,emqx_logger_jsonfmt,brutal_purge,soft_purge,[]},
{load_module,emqx_ws_connection,brutal_purge,soft_purge,[]},
{load_module,emqx_connection,brutal_purge,soft_purge,[]},
{load_module,emqx_congestion,brutal_purge,soft_purge,[]},
{load_module,emqx_frame,brutal_purge,soft_purge,[]},
{load_module,emqx_trie,brutal_purge,soft_purge,[]},
{load_module,emqx_cm,brutal_purge,soft_purge,[]},
{load_module,emqx_node_dump,brutal_purge,soft_purge,[]},
{load_module,emqx_channel,brutal_purge,soft_purge,[]},
{load_module,emqx_app,brutal_purge,soft_purge,[]},
{load_module,emqx_plugins,brutal_purge,soft_purge,[]},
{load_module,emqx_logger_textfmt,brutal_purge,soft_purge,[]},
{load_module,emqx_metrics,brutal_purge,soft_purge,[]},
{load_module,emqx_http_lib,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}]}.

View File

@ -407,7 +407,8 @@ handle_in(Packet = ?SUBSCRIBE_PACKET(PacketId, Properties, TopicFilters),
case emqx_packet:check(Packet) of
ok ->
TopicFilters0 = parse_topic_filters(TopicFilters),
TupleTopicFilters0 = check_sub_acls(TopicFilters0, Channel),
TopicFilters1 = put_subid_in_subopts(Properties, TopicFilters0),
TupleTopicFilters0 = check_sub_acls(TopicFilters1, Channel),
case emqx_zone:get_env(Zone, acl_deny_action, ignore) =:= disconnect andalso
lists:any(fun({_TopicFilter, ReasonCode}) ->
ReasonCode =:= ?RC_NOT_AUTHORIZED
@ -419,8 +420,7 @@ handle_in(Packet = ?SUBSCRIBE_PACKET(PacketId, Properties, TopicFilters),
_Fun(lists:keyreplace(Key, 1, TupleList, Tuple), More);
_Fun(TupleList, []) -> TupleList
end,
TopicFilters1 = [ TopicFilter || {TopicFilter, 0} <- TupleTopicFilters0],
TopicFilters2 = put_subid_in_subopts(Properties, TopicFilters1),
TopicFilters2 = [ TopicFilter || {TopicFilter, 0} <- TupleTopicFilters0],
TopicFilters3 = run_hooks('client.subscribe',
[ClientInfo, Properties],
TopicFilters2),

View File

@ -41,7 +41,8 @@
, stats/1
]).
-export([ async_set_keepalive/4
-export([ async_set_keepalive/3
, async_set_keepalive/4
, async_set_socket_options/2
]).
@ -200,6 +201,9 @@ stats(#state{transport = Transport,
%%
%% NOTE: This API sets TCP socket options, which has nothing to do with
%% the MQTT layer's keepalive (PINGREQ and PINGRESP).
async_set_keepalive(Idle, Interval, Probes) ->
async_set_keepalive(self(), Idle, Interval, Probes).
async_set_keepalive(Pid, Idle, Interval, Probes) ->
Options = [ {keepalive, true}
, {raw, 6, 4, <<Idle:32/native>>}
@ -345,6 +349,7 @@ ensure_stats_timer(_Timeout, State) -> State.
-compile({inline, [cancel_stats_timer/1]}).
cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) ->
?tp(debug, cancel_stats_timer, #{}),
ok = emqx_misc:cancel_timer(TRef),
State#state{stats_timer = undefined};
cancel_stats_timer(State) -> State.

View File

@ -121,17 +121,8 @@ parse(Bin, {{body, #{hdr := Header,
len := Length,
rest := Body}
}, Options}) when is_binary(Bin) ->
BodyBytes = body_bytes(Body),
{NewBodyPart, Tail} = split(BodyBytes + size(Bin) - Length, Bin),
NewBody = append_body(Body, NewBodyPart),
parse_frame(NewBody, Tail, Header, Length, Options).
%% split given binary with the first N bytes
split(N, Bin) when N =< 0 ->
{Bin, <<>>};
split(N, Bin) when N =< size(Bin) ->
<<H:N/binary, T/binary>> = Bin,
{H, T}.
NewBody = append_body(Body, Bin),
parse_frame(NewBody, Header, Length, Options).
parse_remaining_len(<<>>, Header, Options) ->
{more, {{len, #{hdr => Header, len => {1, 0}}}, Options}};
@ -178,19 +169,15 @@ append_body(H, T) when is_binary(H) ->
append_body(?Q(Bytes, Q), T) ->
?Q(Bytes + iolist_size(T), queue:in(T, Q)).
flatten_body(Body, Tail) when is_binary(Body) -> <<Body/binary, Tail/binary>>;
flatten_body(?Q(_, Q), Tail) -> iolist_to_binary([queue:to_list(Q), Tail]).
flatten_body(Body) when is_binary(Body) -> Body;
flatten_body(?Q(_, Q)) -> iolist_to_binary(queue:to_list(Q)).
parse_frame(Body, Header, 0, Options) ->
{ok, packet(Header), flatten_body(Body), ?none(Options)};
parse_frame(Body, Header, Length, Options) ->
%% already appended
parse_frame(Body, _SplitTail = <<>>, Header, Length, Options).
parse_frame(Body, Tail, Header, 0, Options) ->
{ok, packet(Header), flatten_body(Body, Tail), ?none(Options)};
parse_frame(Body, Tail, Header, Length, Options) ->
case body_bytes(Body) >= Length of
true ->
<<FrameBin:Length/binary, Rest/binary>> = flatten_body(Body, Tail),
<<FrameBin:Length/binary, Rest/binary>> = flatten_body(Body),
case parse_packet(Header, FrameBin, Options) of
{Variable, Payload} ->
{ok, packet(Header, Variable, Payload), Rest, ?none(Options)};
@ -202,7 +189,7 @@ parse_frame(Body, Tail, Header, Length, Options) ->
false ->
{more, {{body, #{hdr => Header,
len => Length,
rest => append_body(Body, Tail)
rest => Body
}}, Options}}
end.

View File

@ -32,6 +32,9 @@
-export([format/2]).
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-export([report_cb_1/1, report_cb_2/2, report_cb_crash/2]).
-endif.
@ -220,8 +223,6 @@ json_key(Term) ->
end.
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
no_crash_test_() ->
Opts = [{numtests, 1000}, {to_file, user}],

View File

@ -35,15 +35,9 @@ format(#{msg := Msg0, meta := Meta} = Event, Config) ->
logger_formatter:format(Event#{msg := Msg}, Config).
maybe_merge({report, Report}, Meta) when is_map(Report) ->
{report, maps:merge(rename(Report), filter(Meta))};
{report, maps:merge(Report, filter(Meta))};
maybe_merge(Report, _Meta) ->
Report.
filter(Meta) ->
maps:without(?WITHOUT_MERGE, Meta).
rename(#{'$kind' := Kind} = Meta0) -> % snabbkaffe
Meta = maps:remove('$kind', Meta0),
Meta#{msg => Kind};
rename(Meta) ->
Meta.

View File

@ -52,7 +52,7 @@ censor([Key | _], Val) ->
end.
is_sensitive(Key) when is_atom(Key) ->
is_sensitive(atom_to_binary(Key));
is_sensitive(atom_to_binary(Key, utf8));
is_sensitive(Key) when is_list(Key) ->
try iolist_to_binary(Key) of
Bin ->

View File

@ -34,6 +34,8 @@
, apply_configs/1
]).
-export([funlog/2]).
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
@ -172,7 +174,14 @@ load_ext_plugin(PluginDir) ->
error({plugin_app_file_not_found, AppFile})
end,
ok = load_plugin_app(AppName, Ebin),
ok = load_plugin_conf(AppName, PluginDir).
try
ok = load_plugin_conf(AppName, PluginDir)
catch
throw : {conf_file_not_found, ConfFile} ->
%% this is maybe a dependency of an external plugin
?LOG(debug, "config_load_error_ignored for app=~p, path=~s", [AppName, ConfFile]),
ok
end.
load_plugin_app(AppName, Ebin) ->
_ = code:add_patha(Ebin),
@ -180,8 +189,8 @@ load_plugin_app(AppName, Ebin) ->
lists:foreach(
fun(BeamFile) ->
Module = list_to_atom(filename:basename(BeamFile, ".beam")),
case code:ensure_loaded(Module) of
{module, Module} -> ok;
case code:load_file(Module) of
{module, _} -> ok;
{error, Reason} -> error({failed_to_load_plugin_beam, BeamFile, Reason})
end
end, Modules),
@ -193,12 +202,12 @@ load_plugin_app(AppName, Ebin) ->
load_plugin_conf(AppName, PluginDir) ->
Priv = filename:join([PluginDir, "priv"]),
Etc = filename:join([PluginDir, "etc"]),
Schema = filelib:wildcard(filename:join([Priv, "*.schema"])),
ConfFile = filename:join([Etc, atom_to_list(AppName) ++ ".conf"]),
Conf = case filelib:is_file(ConfFile) of
true -> cuttlefish_conf:file(ConfFile);
false -> error({conf_file_not_found, ConfFile})
false -> throw({conf_file_not_found, ConfFile})
end,
Schema = filelib:wildcard(filename:join([Priv, "*.schema"])),
?LOG(debug, "loading_extra_plugin_config conf=~s, schema=~s", [ConfFile, Schema]),
AppsEnv = cuttlefish_generator:map(cuttlefish_schema:files(Schema), Conf),
lists:foreach(fun({AppName1, Envs}) ->
@ -260,8 +269,7 @@ do_generate_configs(App) ->
true ->
Schema = cuttlefish_schema:files([SchemaFile]),
Conf = cuttlefish_conf:file(ConfFile),
LogFun = fun(Key, Value) -> ?LOG(info, "~s = ~p", [string:join(Key, "."), Value]) end,
cuttlefish_generator:map(Schema, Conf, undefined, LogFun);
cuttlefish_generator:map(Schema, Conf, undefined, fun ?MODULE:funlog/2);
false ->
error({schema_not_found, SchemaFile})
end.
@ -404,3 +412,7 @@ plugin_type(protocol) -> protocol;
plugin_type(backend) -> backend;
plugin_type(bridge) -> bridge;
plugin_type(_) -> feature.
funlog(Key, Value) ->
?LOG(info, "~s = ~p", [string:join(Key, "."), Value]).

Some files were not shown because too many files have changed in this diff Show More