Merge remote-tracking branch 'upstream/release-57' into 20240807-sync-release-branches

This commit is contained in:
Ivan Dyachkov 2024-08-07 09:44:38 +02:00
commit dd686c24a0
38 changed files with 702 additions and 197 deletions

View File

@ -10,7 +10,7 @@ services:
nofile: 1024
image: openldap
#ports:
# - 389:389
# - "389:389"
volumes:
- ./certs/ca.crt:/etc/certs/ca.crt
restart: always

View File

@ -122,9 +122,10 @@ jobs:
run: |
ls -lR _packages/$PROFILE
mv _packages/$PROFILE/*.tar.gz ./
- name: Enable containerd image store on Docker Engine
run: |
echo "$(jq '. += {"features": {"containerd-snapshotter": true}}' /etc/docker/daemon.json)" > daemon.json
echo "$(sudo cat /etc/docker/daemon.json | jq '. += {"features": {"containerd-snapshotter": true}}')" > daemon.json
sudo mv daemon.json /etc/docker/daemon.json
sudo systemctl restart docker

View File

@ -10,8 +10,8 @@ include env.sh
# Dashboard version
# from https://github.com/emqx/emqx-dashboard5
export EMQX_DASHBOARD_VERSION ?= v1.9.1
export EMQX_EE_DASHBOARD_VERSION ?= e1.7.1
export EMQX_DASHBOARD_VERSION ?= v1.9.2
export EMQX_EE_DASHBOARD_VERSION ?= e1.7.2
export EMQX_RELUP ?= true
export EMQX_REL_FORM ?= tgz

View File

@ -683,6 +683,7 @@ end).
-define(FRAME_PARSE_ERROR, frame_parse_error).
-define(FRAME_SERIALIZE_ERROR, frame_serialize_error).
-define(THROW_FRAME_ERROR(Reason), erlang:throw({?FRAME_PARSE_ERROR, Reason})).
-define(THROW_SERIALIZE_ERROR(Reason), erlang:throw({?FRAME_SERIALIZE_ERROR, Reason})).

View File

@ -32,7 +32,7 @@
%% `apps/emqx/src/bpapi/README.md'
%% Opensource edition
-define(EMQX_RELEASE_CE, "5.7.1").
-define(EMQX_RELEASE_CE, "5.7.2").
%% Enterprise edition
-define(EMQX_RELEASE_EE, "5.7.1").
-define(EMQX_RELEASE_EE, "5.7.2").

View File

@ -91,7 +91,7 @@
?_DO_TRACE(Tag, Msg, Meta),
?SLOG(
Level,
(emqx_trace_formatter:format_meta_map(Meta))#{msg => Msg, tag => Tag},
(Meta)#{msg => Msg, tag => Tag},
#{is_trace => false}
)
end).

View File

@ -146,7 +146,9 @@
-type replies() :: emqx_types:packet() | reply() | [reply()].
-define(IS_MQTT_V5, #channel{conninfo = #{proto_ver := ?MQTT_PROTO_V5}}).
-define(IS_CONNECTED_OR_REAUTHENTICATING(ConnState),
((ConnState == connected) orelse (ConnState == reauthenticating))
).
-define(IS_COMMON_SESSION_TIMER(N),
((N == retry_delivery) orelse (N == expire_awaiting_rel))
).
@ -337,7 +339,7 @@ take_conn_info_fields(Fields, ClientInfo, ConnInfo) ->
| {shutdown, Reason :: term(), channel()}
| {shutdown, Reason :: term(), replies(), channel()}.
handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = ConnState}) when
ConnState =:= connected orelse ConnState =:= reauthenticating
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState)
->
handle_out(disconnect, ?RC_PROTOCOL_ERROR, Channel);
handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = connecting}) ->
@ -567,29 +569,8 @@ handle_in(
process_disconnect(ReasonCode, Properties, NChannel);
handle_in(?AUTH_PACKET(), Channel) ->
handle_out(disconnect, ?RC_IMPLEMENTATION_SPECIFIC_ERROR, Channel);
handle_in({frame_error, Reason}, Channel = #channel{conn_state = idle}) ->
shutdown(shutdown_count(frame_error, Reason), Channel);
handle_in(
{frame_error, #{cause := frame_too_large} = R}, Channel = #channel{conn_state = connecting}
) ->
shutdown(
shutdown_count(frame_error, R), ?CONNACK_PACKET(?RC_PACKET_TOO_LARGE), Channel
);
handle_in({frame_error, Reason}, Channel = #channel{conn_state = connecting}) ->
shutdown(shutdown_count(frame_error, Reason), ?CONNACK_PACKET(?RC_MALFORMED_PACKET), Channel);
handle_in(
{frame_error, #{cause := frame_too_large}}, Channel = #channel{conn_state = ConnState}
) when
ConnState =:= connected orelse ConnState =:= reauthenticating
->
handle_out(disconnect, {?RC_PACKET_TOO_LARGE, frame_too_large}, Channel);
handle_in({frame_error, Reason}, Channel = #channel{conn_state = ConnState}) when
ConnState =:= connected orelse ConnState =:= reauthenticating
->
handle_out(disconnect, {?RC_MALFORMED_PACKET, Reason}, Channel);
handle_in({frame_error, Reason}, Channel = #channel{conn_state = disconnected}) ->
?SLOG(error, #{msg => "malformed_mqtt_message", reason => Reason}),
{ok, Channel};
handle_in({frame_error, Reason}, Channel) ->
handle_frame_error(Reason, Channel);
handle_in(Packet, Channel) ->
?SLOG(error, #{msg => "disconnecting_due_to_unexpected_message", packet => Packet}),
handle_out(disconnect, ?RC_PROTOCOL_ERROR, Channel).
@ -1021,6 +1002,68 @@ not_nacked({deliver, _Topic, Msg}) ->
true
end.
%%--------------------------------------------------------------------
%% Handle Frame Error
%%--------------------------------------------------------------------
handle_frame_error(
Reason = #{cause := frame_too_large},
Channel = #channel{conn_state = ConnState, conninfo = ConnInfo}
) when
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState)
->
ShutdownCount = shutdown_count(frame_error, Reason),
case proto_ver(Reason, ConnInfo) of
?MQTT_PROTO_V5 ->
handle_out(disconnect, {?RC_PACKET_TOO_LARGE, frame_too_large}, Channel);
_ ->
shutdown(ShutdownCount, Channel)
end;
%% Only send CONNACK with reason code `frame_too_large` for MQTT-v5.0 when connecting,
%% otherwise DONOT send any CONNACK or DISCONNECT packet.
handle_frame_error(
Reason,
Channel = #channel{conn_state = ConnState, conninfo = ConnInfo}
) when
is_map(Reason) andalso
(ConnState == idle orelse ConnState == connecting)
->
ShutdownCount = shutdown_count(frame_error, Reason),
ProtoVer = proto_ver(Reason, ConnInfo),
NChannel = Channel#channel{conninfo = ConnInfo#{proto_ver => ProtoVer}},
case ProtoVer of
?MQTT_PROTO_V5 ->
shutdown(ShutdownCount, ?CONNACK_PACKET(?RC_PACKET_TOO_LARGE), NChannel);
_ ->
shutdown(ShutdownCount, NChannel)
end;
handle_frame_error(
Reason,
Channel = #channel{conn_state = connecting}
) ->
shutdown(
shutdown_count(frame_error, Reason),
?CONNACK_PACKET(?RC_MALFORMED_PACKET),
Channel
);
handle_frame_error(
Reason,
Channel = #channel{conn_state = ConnState}
) when
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState)
->
handle_out(
disconnect,
{?RC_MALFORMED_PACKET, Reason},
Channel
);
handle_frame_error(
Reason,
Channel = #channel{conn_state = disconnected}
) ->
?SLOG(error, #{msg => "malformed_mqtt_message", reason => Reason}),
{ok, Channel}.
%%--------------------------------------------------------------------
%% Handle outgoing packet
%%--------------------------------------------------------------------
@ -1289,7 +1332,7 @@ handle_info(
session = Session
}
) when
ConnState =:= connected orelse ConnState =:= reauthenticating
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState)
->
{Intent, Session1} = session_disconnect(ClientInfo, ConnInfo, Session),
Channel1 = ensure_disconnected(Reason, maybe_publish_will_msg(sock_closed, Channel)),
@ -2636,8 +2679,7 @@ save_alias(outbound, AliasId, Topic, TopicAliases = #{outbound := Aliases}) ->
NAliases = maps:put(Topic, AliasId, Aliases),
TopicAliases#{outbound => NAliases}.
-compile({inline, [reply/2, shutdown/2, shutdown/3, sp/1, flag/1]}).
-compile({inline, [reply/2, shutdown/2, shutdown/3]}).
reply(Reply, Channel) ->
{reply, Reply, Channel}.
@ -2673,13 +2715,13 @@ disconnect_and_shutdown(
?IS_MQTT_V5 =
#channel{conn_state = ConnState}
) when
ConnState =:= connected orelse ConnState =:= reauthenticating
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState)
->
NChannel = ensure_disconnected(Reason, Channel),
shutdown(Reason, Reply, ?DISCONNECT_PACKET(reason_code(Reason)), NChannel);
%% mqtt v3/v4 connected sessions
disconnect_and_shutdown(Reason, Reply, Channel = #channel{conn_state = ConnState}) when
ConnState =:= connected orelse ConnState =:= reauthenticating
?IS_CONNECTED_OR_REAUTHENTICATING(ConnState)
->
NChannel = ensure_disconnected(Reason, Channel),
shutdown(Reason, Reply, NChannel);
@ -2722,6 +2764,13 @@ is_durable_session(#channel{session = Session}) ->
false
end.
proto_ver(#{proto_ver := ProtoVer}, _ConnInfo) ->
ProtoVer;
proto_ver(_Reason, #{proto_ver := ProtoVer}) ->
ProtoVer;
proto_ver(_, _) ->
?MQTT_PROTO_V4.
%%--------------------------------------------------------------------
%% For CT tests
%%--------------------------------------------------------------------

View File

@ -783,7 +783,8 @@ parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
input_bytes => Data,
parsed_packets => Packets
}),
{[{frame_error, Reason} | Packets], State};
NState = enrich_state(Reason, State),
{[{frame_error, Reason} | Packets], NState};
error:Reason:Stacktrace ->
?LOG(error, #{
at_state => emqx_frame:describe_state(ParseState),
@ -1227,6 +1228,12 @@ inc_counter(Key, Inc) ->
_ = emqx_pd:inc_counter(Key, Inc),
ok.
enrich_state(#{parse_state := NParseState}, State) ->
Serialize = emqx_frame:serialize_opts(NParseState),
State#state{parse_state = NParseState, serialize = Serialize};
enrich_state(_, State) ->
State.
set_tcp_keepalive({quic, _Listener}) ->
ok;
set_tcp_keepalive({Type, Id}) ->

View File

@ -267,28 +267,50 @@ packet(Header, Variable) ->
packet(Header, Variable, Payload) ->
#mqtt_packet{header = Header, variable = Variable, payload = Payload}.
parse_connect(FrameBin, StrictMode) ->
{ProtoName, Rest} = parse_utf8_string_with_cause(FrameBin, StrictMode, invalid_proto_name),
case ProtoName of
<<"MQTT">> ->
ok;
<<"MQIsdp">> ->
ok;
_ ->
%% from spec: the server MAY send disconnect with reason code 0x84
%% we chose to close socket because the client is likely not talking MQTT anyway
?PARSE_ERR(#{
cause => invalid_proto_name,
expected => <<"'MQTT' or 'MQIsdp'">>,
received => ProtoName
})
end,
parse_connect2(ProtoName, Rest, StrictMode).
parse_connect(FrameBin, Options = #{strict_mode := StrictMode}) ->
{ProtoName, Rest0} = parse_utf8_string_with_cause(FrameBin, StrictMode, invalid_proto_name),
%% No need to parse and check proto_ver if proto_name is invalid, check it first
%% And the matching check of `proto_name` and `proto_ver` fields will be done in `emqx_packet:check_proto_ver/2`
_ = validate_proto_name(ProtoName),
{IsBridge, ProtoVer, Rest2} = parse_connect_proto_ver(Rest0),
NOptions = Options#{version => ProtoVer},
try
do_parse_connect(ProtoName, IsBridge, ProtoVer, Rest2, StrictMode)
catch
throw:{?FRAME_PARSE_ERROR, ReasonM} when is_map(ReasonM) ->
?PARSE_ERR(
ReasonM#{
proto_ver => ProtoVer,
proto_name => ProtoName,
parse_state => ?NONE(NOptions)
}
);
throw:{?FRAME_PARSE_ERROR, Reason} ->
?PARSE_ERR(
#{
cause => Reason,
proto_ver => ProtoVer,
proto_name => ProtoName,
parse_state => ?NONE(NOptions)
}
)
end.
parse_connect2(
do_parse_connect(
ProtoName,
<<BridgeTag:4, ProtoVer:4, UsernameFlagB:1, PasswordFlagB:1, WillRetainB:1, WillQoS:2,
WillFlagB:1, CleanStart:1, Reserved:1, KeepAlive:16/big, Rest2/binary>>,
IsBridge,
ProtoVer,
<<
UsernameFlagB:1,
PasswordFlagB:1,
WillRetainB:1,
WillQoS:2,
WillFlagB:1,
CleanStart:1,
Reserved:1,
KeepAlive:16/big,
Rest/binary
>>,
StrictMode
) ->
_ = validate_connect_reserved(Reserved),
@ -303,14 +325,14 @@ parse_connect2(
UsernameFlag = bool(UsernameFlagB),
PasswordFlag = bool(PasswordFlagB)
),
{Properties, Rest3} = parse_properties(Rest2, ProtoVer, StrictMode),
{Properties, Rest3} = parse_properties(Rest, ProtoVer, StrictMode),
{ClientId, Rest4} = parse_utf8_string_with_cause(Rest3, StrictMode, invalid_clientid),
ConnPacket = #mqtt_packet_connect{
proto_name = ProtoName,
proto_ver = ProtoVer,
%% For bridge mode, non-standard implementation
%% Invented by mosquitto, named 'try_private': https://mosquitto.org/man/mosquitto-conf-5.html
is_bridge = (BridgeTag =:= 8),
is_bridge = IsBridge,
clean_start = bool(CleanStart),
will_flag = WillFlag,
will_qos = WillQoS,
@ -343,16 +365,16 @@ parse_connect2(
unexpected_trailing_bytes => size(Rest7)
})
end;
parse_connect2(_ProtoName, Bin, _StrictMode) ->
%% sent less than 32 bytes
do_parse_connect(_ProtoName, _IsBridge, _ProtoVer, Bin, _StrictMode) ->
%% sent less than 24 bytes
?PARSE_ERR(#{cause => malformed_connect, header_bytes => Bin}).
parse_packet(
#mqtt_packet_header{type = ?CONNECT},
FrameBin,
#{strict_mode := StrictMode}
Options
) ->
parse_connect(FrameBin, StrictMode);
parse_connect(FrameBin, Options);
parse_packet(
#mqtt_packet_header{type = ?CONNACK},
<<AckFlags:8, ReasonCode:8, Rest/binary>>,
@ -516,6 +538,12 @@ parse_packet_id(<<PacketId:16/big, Rest/binary>>) ->
parse_packet_id(_) ->
?PARSE_ERR(invalid_packet_id).
parse_connect_proto_ver(<<BridgeTag:4, ProtoVer:4, Rest/binary>>) ->
{_IsBridge = (BridgeTag =:= 8), ProtoVer, Rest};
parse_connect_proto_ver(Bin) ->
%% sent less than 1 bytes or empty
?PARSE_ERR(#{cause => malformed_connect, header_bytes => Bin}).
parse_properties(Bin, Ver, _StrictMode) when Ver =/= ?MQTT_PROTO_V5 ->
{#{}, Bin};
%% TODO: version mess?
@ -739,6 +767,8 @@ serialize_fun(#{version := Ver, max_size := MaxSize, strict_mode := StrictMode})
initial_serialize_opts(Opts) ->
maps:merge(?DEFAULT_OPTIONS, Opts).
serialize_opts(?NONE(Options)) ->
maps:merge(?DEFAULT_OPTIONS, Options);
serialize_opts(#mqtt_packet_connect{proto_ver = ProtoVer, properties = ConnProps}) ->
MaxSize = get_property('Maximum-Packet-Size', ConnProps, ?MAX_PACKET_SIZE),
#{version => ProtoVer, max_size => MaxSize, strict_mode => false}.
@ -1157,18 +1187,34 @@ validate_subqos([3 | _]) -> ?PARSE_ERR(bad_subqos);
validate_subqos([_ | T]) -> validate_subqos(T);
validate_subqos([]) -> ok.
%% from spec: the server MAY send disconnect with reason code 0x84
%% we chose to close socket because the client is likely not talking MQTT anyway
validate_proto_name(<<"MQTT">>) ->
ok;
validate_proto_name(<<"MQIsdp">>) ->
ok;
validate_proto_name(ProtoName) ->
?PARSE_ERR(#{
cause => invalid_proto_name,
expected => <<"'MQTT' or 'MQIsdp'">>,
received => ProtoName
}).
%% MQTT-v3.1.1-[MQTT-3.1.2-3], MQTT-v5.0-[MQTT-3.1.2-3]
-compile({inline, [validate_connect_reserved/1]}).
validate_connect_reserved(0) -> ok;
validate_connect_reserved(1) -> ?PARSE_ERR(reserved_connect_flag).
-compile({inline, [validate_connect_will/3]}).
%% MQTT-v3.1.1-[MQTT-3.1.2-13], MQTT-v5.0-[MQTT-3.1.2-11]
validate_connect_will(false, _, WillQos) when WillQos > 0 -> ?PARSE_ERR(invalid_will_qos);
validate_connect_will(false, _, WillQoS) when WillQoS > 0 -> ?PARSE_ERR(invalid_will_qos);
%% MQTT-v3.1.1-[MQTT-3.1.2-14], MQTT-v5.0-[MQTT-3.1.2-12]
validate_connect_will(true, _, WillQoS) when WillQoS > 2 -> ?PARSE_ERR(invalid_will_qos);
%% MQTT-v3.1.1-[MQTT-3.1.2-15], MQTT-v5.0-[MQTT-3.1.2-13]
validate_connect_will(false, WillRetain, _) when WillRetain -> ?PARSE_ERR(invalid_will_retain);
validate_connect_will(_, _, _) -> ok.
-compile({inline, [validate_connect_password_flag/4]}).
%% MQTT-v3.1
%% Username flag and password flag are not strongly related
%% https://public.dhe.ibm.com/software/dw/webservices/ws-mqtt/mqtt-v3r1.html#connect
@ -1183,6 +1229,7 @@ validate_connect_password_flag(true, ?MQTT_PROTO_V5, _, _) ->
validate_connect_password_flag(_, _, _, _) ->
ok.
-compile({inline, [bool/1]}).
bool(0) -> false;
bool(1) -> true.

View File

@ -432,7 +432,7 @@ do_start_listener(Type, Name, Id, #{bind := ListenOn} = Opts) when ?ESOCKD_LISTE
esockd:open(
Id,
ListenOn,
merge_default(esockd_opts(Id, Type, Name, Opts))
merge_default(esockd_opts(Id, Type, Name, Opts, _OldOpts = undefined))
);
%% Start MQTT/WS listener
do_start_listener(Type, Name, Id, Opts) when ?COWBOY_LISTENER(Type) ->
@ -476,7 +476,7 @@ do_update_listener(Type, Name, OldConf, NewConf = #{bind := ListenOn}) when
Id = listener_id(Type, Name),
case maps:get(bind, OldConf) of
ListenOn ->
esockd:set_options({Id, ListenOn}, esockd_opts(Id, Type, Name, NewConf));
esockd:set_options({Id, ListenOn}, esockd_opts(Id, Type, Name, NewConf, OldConf));
_Different ->
%% TODO
%% Again, we're not strictly required to drop live connections in this case.
@ -588,7 +588,7 @@ perform_listener_change(update, {{Type, Name, ConfOld}, {_, _, ConfNew}}) ->
perform_listener_change(stop, {Type, Name, Conf}) ->
stop_listener(Type, Name, Conf).
esockd_opts(ListenerId, Type, Name, Opts0) ->
esockd_opts(ListenerId, Type, Name, Opts0, OldOpts) ->
Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0),
Limiter = limiter(Opts0),
Opts2 =
@ -620,7 +620,7 @@ esockd_opts(ListenerId, Type, Name, Opts0) ->
tcp ->
Opts3#{tcp_options => tcp_opts(Opts0)};
ssl ->
OptsWithCRL = inject_crl_config(Opts0),
OptsWithCRL = inject_crl_config(Opts0, OldOpts),
OptsWithSNI = inject_sni_fun(ListenerId, OptsWithCRL),
OptsWithRootFun = inject_root_fun(OptsWithSNI),
OptsWithVerifyFun = inject_verify_fun(OptsWithRootFun),
@ -996,7 +996,7 @@ inject_sni_fun(_ListenerId, Conf) ->
Conf.
inject_crl_config(
Conf = #{ssl_options := #{enable_crl_check := true} = SSLOpts}
Conf = #{ssl_options := #{enable_crl_check := true} = SSLOpts}, _OldOpts
) ->
HTTPTimeout = emqx_config:get([crl_cache, http_timeout], timer:seconds(15)),
Conf#{
@ -1006,7 +1006,16 @@ inject_crl_config(
crl_cache => {emqx_ssl_crl_cache, {internal, [{http, HTTPTimeout}]}}
}
};
inject_crl_config(Conf) ->
inject_crl_config(#{ssl_options := SSLOpts0} = Conf0, #{} = OldOpts) ->
%% Note: we must set crl options to `undefined' to unset them. Otherwise,
%% `esockd' will retain such options when `esockd:merge_opts/2' is called and the SSL
%% options were previously enabled.
WasEnabled = emqx_utils_maps:deep_get([ssl_options, enable_crl_check], OldOpts, false),
Undefine = fun(Acc, K) -> emqx_utils_maps:put_if(Acc, K, undefined, WasEnabled) end,
SSLOpts1 = Undefine(SSLOpts0, crl_check),
SSLOpts = Undefine(SSLOpts1, crl_cache),
Conf0#{ssl_options := SSLOpts};
inject_crl_config(Conf, undefined = _OldOpts) ->
Conf.
maybe_unregister_ocsp_stapling_refresh(

View File

@ -105,7 +105,7 @@ format(Msg, Meta, Config) ->
maybe_format_msg(undefined, _Meta, _Config) ->
#{};
maybe_format_msg({report, Report0} = Msg, #{report_cb := Cb} = Meta, Config) ->
Report = emqx_logger_textfmt:try_encode_payload(Report0, Config),
Report = emqx_logger_textfmt:try_encode_meta(Report0, Config),
case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of
true ->
%% reporting a map without a customised format function

View File

@ -20,7 +20,7 @@
-export([format/2]).
-export([check_config/1]).
-export([try_format_unicode/1, try_encode_payload/2]).
-export([try_format_unicode/1, try_encode_meta/2]).
%% Used in the other log formatters
-export([evaluate_lazy_values_if_dbg_level/1, evaluate_lazy_values/1]).
@ -111,7 +111,7 @@ is_list_report_acceptable(_) ->
enrich_report(ReportRaw0, Meta, Config) ->
%% clientid and peername always in emqx_conn's process metadata.
%% topic and username can be put in meta using ?SLOG/3, or put in msg's report by ?SLOG/2
ReportRaw = try_encode_payload(ReportRaw0, Config),
ReportRaw = try_encode_meta(ReportRaw0, Config),
Topic =
case maps:get(topic, Meta, undefined) of
undefined -> maps:get(topic, ReportRaw, undefined);
@ -180,9 +180,22 @@ enrich_topic({Fmt, Args}, #{topic := Topic}) when is_list(Fmt) ->
enrich_topic(Msg, _) ->
Msg.
try_encode_payload(#{payload := Payload} = Report, #{payload_encode := Encode}) ->
try_encode_meta(Report, Config) ->
lists:foldl(
fun(Meta, Acc) ->
try_encode_meta(Meta, Acc, Config)
end,
Report,
[payload, packet]
).
try_encode_meta(payload, #{payload := Payload} = Report, #{payload_encode := Encode}) ->
Report#{payload := encode_payload(Payload, Encode)};
try_encode_payload(Report, _Config) ->
try_encode_meta(packet, #{packet := Packet} = Report, #{payload_encode := Encode}) when
is_tuple(Packet)
->
Report#{packet := emqx_packet:format(Packet, Encode)};
try_encode_meta(_, Report, _Config) ->
Report.
encode_payload(Payload, text) ->
@ -190,4 +203,5 @@ encode_payload(Payload, text) ->
encode_payload(_Payload, hidden) ->
"******";
encode_payload(Payload, hex) ->
binary:encode_hex(Payload).
Bin = emqx_utils_conv:bin(Payload),
binary:encode_hex(Bin).

View File

@ -51,7 +51,6 @@
]).
-export([
format/1,
format/2
]).
@ -481,10 +480,6 @@ will_msg(#mqtt_packet_connect{
headers = #{username => Username, properties => Props}
}.
%% @doc Format packet
-spec format(emqx_types:packet()) -> iolist().
format(Packet) -> format(Packet, emqx_trace_handler:payload_encode()).
%% @doc Format packet
-spec format(emqx_types:packet(), hex | text | hidden) -> iolist().
format(#mqtt_packet{header = Header, variable = Variable, payload = Payload}, PayloadEncode) ->

View File

@ -62,7 +62,7 @@
streams := [{pid(), quicer:stream_handle()}],
%% New stream opts
stream_opts := map(),
%% If conneciton is resumed from session ticket
%% If connection is resumed from session ticket
is_resumed => boolean(),
%% mqtt message serializer config
serialize => undefined,
@ -70,8 +70,8 @@
}.
-type cb_ret() :: quicer_lib:cb_ret().
%% @doc Data streams initializions are started in parallel with control streams, data streams are blocked
%% for the activation from control stream after it is accepted as a legit conneciton.
%% @doc Data streams initializations are started in parallel with control streams, data streams are blocked
%% for the activation from control stream after it is accepted as a legit connection.
%% For security, the initial number of allowed data streams from client should be limited by
%% 'peer_bidi_stream_count` & 'peer_unidi_stream_count`
-spec activate_data_streams(pid(), {
@ -80,7 +80,7 @@
activate_data_streams(ConnOwner, {PS, Serialize, Channel}) ->
gen_server:call(ConnOwner, {activate_data_streams, {PS, Serialize, Channel}}, infinity).
%% @doc conneciton owner init callback
%% @doc connection owner init callback
-spec init(map()) -> {ok, cb_state()}.
init(#{stream_opts := SOpts} = S) when is_list(SOpts) ->
init(S#{stream_opts := maps:from_list(SOpts)});

View File

@ -589,6 +589,14 @@ ensure_valid_options(Options, Versions) ->
ensure_valid_options([], _, Acc) ->
lists:reverse(Acc);
ensure_valid_options([{K, undefined} | T], Versions, Acc) when
K =:= crl_check;
K =:= crl_cache
->
%% Note: we must set crl options to `undefined' to unset them. Otherwise,
%% `esockd' will retain such options when `esockd:merge_opts/2' is called and the SSL
%% options were previously enabled.
ensure_valid_options(T, Versions, [{K, undefined} | Acc]);
ensure_valid_options([{_, undefined} | T], Versions, Acc) ->
ensure_valid_options(T, Versions, Acc);
ensure_valid_options([{_, ""} | T], Versions, Acc) ->

View File

@ -17,7 +17,6 @@
-include("emqx_mqtt.hrl").
-export([format/2]).
-export([format_meta_map/1]).
%% logger_formatter:config/0 is not exported.
-type config() :: map().
@ -43,10 +42,6 @@ format(
format(Event, Config) ->
emqx_logger_textfmt:format(Event, Config).
format_meta_map(Meta) ->
Encode = emqx_trace_handler:payload_encode(),
format_meta_map(Meta, Encode).
format_meta_map(Meta, Encode) ->
format_meta_map(Meta, Encode, [
{packet, fun format_packet/2},

View File

@ -436,6 +436,7 @@ websocket_handle({Frame, _}, State) ->
%% TODO: should not close the ws connection
?LOG(error, #{msg => "unexpected_frame", frame => Frame}),
shutdown(unexpected_ws_frame, State).
websocket_info({call, From, Req}, State) ->
handle_call(From, Req, State);
websocket_info({cast, rate_limit}, State) ->
@ -737,7 +738,8 @@ parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
input_bytes => Data
}),
FrameError = {frame_error, Reason},
{[{incoming, FrameError} | Packets], State};
NState = enrich_state(Reason, State),
{[{incoming, FrameError} | Packets], NState};
error:Reason:Stacktrace ->
?LOG(error, #{
at_state => emqx_frame:describe_state(ParseState),
@ -830,7 +832,7 @@ serialize_and_inc_stats_fun(#state{serialize = Serialize}) ->
?LOG(warning, #{
msg => "packet_discarded",
reason => "frame_too_large",
packet => emqx_packet:format(Packet)
packet => Packet
}),
ok = emqx_metrics:inc('delivery.dropped.too_large'),
ok = emqx_metrics:inc('delivery.dropped'),
@ -1069,6 +1071,13 @@ check_max_connection(Type, Listener) ->
{denny, Reason}
end
end.
enrich_state(#{parse_state := NParseState}, State) ->
Serialize = emqx_frame:serialize_opts(NParseState),
State#state{parse_state = NParseState, serialize = Serialize};
enrich_state(_, State) ->
State.
%%--------------------------------------------------------------------
%% For CT tests
%%--------------------------------------------------------------------

View File

@ -414,24 +414,32 @@ t_handle_in_auth(_) ->
emqx_channel:handle_in(?AUTH_PACKET(), Channel).
t_handle_in_frame_error(_) ->
IdleChannel = channel(#{conn_state => idle}),
{shutdown, #{shutdown_count := frame_too_large, cause := frame_too_large}, _Chan} =
emqx_channel:handle_in({frame_error, #{cause => frame_too_large}}, IdleChannel),
IdleChannelV5 = channel(#{conn_state => idle}),
%% no CONNACK packet for v4
?assertMatch(
{shutdown, #{shutdown_count := frame_too_large, cause := frame_too_large}, _Chan},
emqx_channel:handle_in(
{frame_error, #{cause => frame_too_large}}, v4(IdleChannelV5)
)
),
ConnectingChan = channel(#{conn_state => connecting}),
ConnackPacket = ?CONNACK_PACKET(?RC_PACKET_TOO_LARGE),
{shutdown,
#{
shutdown_count := frame_too_large,
cause := frame_too_large,
limit := 100,
received := 101
},
ConnackPacket,
_} =
?assertMatch(
{shutdown,
#{
shutdown_count := frame_too_large,
cause := frame_too_large,
limit := 100,
received := 101
},
ConnackPacket, _},
emqx_channel:handle_in(
{frame_error, #{cause => frame_too_large, received => 101, limit => 100}},
ConnectingChan
),
)
),
DisconnectPacket = ?DISCONNECT_PACKET(?RC_PACKET_TOO_LARGE),
ConnectedChan = channel(#{conn_state => connected}),
?assertMatch(

View File

@ -138,13 +138,14 @@ init_per_testcase(t_refresh_config = TestCase, Config) ->
];
init_per_testcase(TestCase, Config) when
TestCase =:= t_update_listener;
TestCase =:= t_update_listener_enable_disable;
TestCase =:= t_validations
->
ct:timetrap({seconds, 30}),
ok = snabbkaffe:start_trace(),
%% when running emqx standalone tests, we can't use those
%% features.
case does_module_exist(emqx_management) of
case does_module_exist(emqx_mgmt) of
true ->
DataDir = ?config(data_dir, Config),
CRLFile = filename:join([DataDir, "intermediate-revoked.crl.pem"]),
@ -165,7 +166,7 @@ init_per_testcase(TestCase, Config) when
{emqx_conf, #{config => #{listeners => #{ssl => #{default => ListenerConf}}}}},
emqx,
emqx_management,
{emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}
emqx_mgmt_api_test_util:emqx_dashboard()
],
#{work_dir => emqx_cth_suite:work_dir(TestCase, Config)}
),
@ -206,6 +207,7 @@ read_crl(Filename) ->
end_per_testcase(TestCase, Config) when
TestCase =:= t_update_listener;
TestCase =:= t_update_listener_enable_disable;
TestCase =:= t_validations
->
Skip = proplists:get_bool(skip_does_not_apply, Config),
@ -1057,3 +1059,104 @@ do_t_validations(_Config) ->
),
ok.
%% Checks that if CRL is ever enabled and then disabled, clients can connect, even if they
%% would otherwise not have their corresponding CRLs cached and fail with `{bad_crls,
%% no_relevant_crls}`.
t_update_listener_enable_disable(Config) ->
case proplists:get_bool(skip_does_not_apply, Config) of
true ->
ct:pal("skipping as this test does not apply in this profile"),
ok;
false ->
do_t_update_listener_enable_disable(Config)
end.
do_t_update_listener_enable_disable(Config) ->
DataDir = ?config(data_dir, Config),
Keyfile = filename:join([DataDir, "server.key.pem"]),
Certfile = filename:join([DataDir, "server.cert.pem"]),
Cacertfile = filename:join([DataDir, "ca-chain.cert.pem"]),
ClientCert = filename:join(DataDir, "client.cert.pem"),
ClientKey = filename:join(DataDir, "client.key.pem"),
ListenerId = "ssl:default",
%% Enable CRL
{ok, {{_, 200, _}, _, ListenerData0}} = get_listener_via_api(ListenerId),
CRLConfig0 =
#{
<<"ssl_options">> =>
#{
<<"keyfile">> => Keyfile,
<<"certfile">> => Certfile,
<<"cacertfile">> => Cacertfile,
<<"enable_crl_check">> => true,
<<"fail_if_no_peer_cert">> => true
}
},
ListenerData1 = emqx_utils_maps:deep_merge(ListenerData0, CRLConfig0),
{ok, {_, _, ListenerData2}} = update_listener_via_api(ListenerId, ListenerData1),
?assertMatch(
#{
<<"ssl_options">> :=
#{
<<"enable_crl_check">> := true,
<<"verify">> := <<"verify_peer">>,
<<"fail_if_no_peer_cert">> := true
}
},
ListenerData2
),
%% Disable CRL
CRLConfig1 =
#{
<<"ssl_options">> =>
#{
<<"keyfile">> => Keyfile,
<<"certfile">> => Certfile,
<<"cacertfile">> => Cacertfile,
<<"enable_crl_check">> => false,
<<"fail_if_no_peer_cert">> => true
}
},
ListenerData3 = emqx_utils_maps:deep_merge(ListenerData2, CRLConfig1),
redbug:start(
[
"esockd_server:get_listener_prop -> return",
"esockd_server:set_listener_prop -> return",
"esockd:merge_opts -> return",
"esockd_listener_sup:set_options -> return",
"emqx_listeners:inject_crl_config -> return"
],
[{msgs, 100}]
),
{ok, {_, _, ListenerData4}} = update_listener_via_api(ListenerId, ListenerData3),
?assertMatch(
#{
<<"ssl_options">> :=
#{
<<"enable_crl_check">> := false,
<<"verify">> := <<"verify_peer">>,
<<"fail_if_no_peer_cert">> := true
}
},
ListenerData4
),
%% Now the client that would be blocked tries to connect and should now be allowed.
{ok, C} = emqtt:start_link([
{ssl, true},
{ssl_opts, [
{certfile, ClientCert},
{keyfile, ClientKey},
{verify, verify_none}
]},
{port, 8883}
]),
?assertMatch({ok, _}, emqtt:connect(C)),
emqtt:stop(C),
?assertNotReceive({http_get, _}),
ok.

View File

@ -63,6 +63,7 @@ groups() ->
t_parse_malformed_properties,
t_malformed_connect_header,
t_malformed_connect_data,
t_malformed_connect_data_proto_ver,
t_reserved_connect_flag,
t_invalid_clientid,
t_undefined_password,
@ -167,6 +168,8 @@ t_parse_malformed_utf8_string(_) ->
ParseState = emqx_frame:initial_parse_state(#{strict_mode => true}),
?ASSERT_FRAME_THROW(utf8_string_invalid, emqx_frame:parse(MalformedPacket, ParseState)).
%% TODO: parse v3 with 0 length clientid
t_serialize_parse_v3_connect(_) ->
Bin =
<<16, 37, 0, 6, 77, 81, 73, 115, 100, 112, 3, 2, 0, 60, 0, 23, 109, 111, 115, 113, 112, 117,
@ -324,7 +327,7 @@ t_serialize_parse_bridge_connect(_) ->
header = #mqtt_packet_header{type = ?CONNECT},
variable = #mqtt_packet_connect{
clientid = <<"C_00:0C:29:2B:77:52">>,
proto_ver = 16#03,
proto_ver = ?MQTT_PROTO_V3,
proto_name = <<"MQIsdp">>,
is_bridge = true,
will_retain = true,
@ -686,15 +689,36 @@ t_malformed_connect_header(_) ->
).
t_malformed_connect_data(_) ->
ProtoNameWithLen = <<0, 6, "MQIsdp">>,
ConnectFlags = <<2#00000000>>,
ClientIdwithLen = <<0, 1, "a">>,
UnexpectedRestBin = <<0, 1, 2>>,
?ASSERT_FRAME_THROW(
#{cause := malformed_connect, unexpected_trailing_bytes := _},
emqx_frame:parse(<<16, 15, 0, 6, 77, 81, 73, 115, 100, 112, 3, 0, 0, 0, 0, 0, 0>>)
#{cause := malformed_connect, unexpected_trailing_bytes := 3},
emqx_frame:parse(
<<16, 18, ProtoNameWithLen/binary, ?MQTT_PROTO_V3, ConnectFlags/binary, 0, 0,
ClientIdwithLen/binary, UnexpectedRestBin/binary>>
)
).
t_malformed_connect_data_proto_ver(_) ->
Proto3NameWithLen = <<0, 6, "MQIsdp">>,
?ASSERT_FRAME_THROW(
#{cause := malformed_connect, header_bytes := <<>>},
emqx_frame:parse(<<16, 8, Proto3NameWithLen/binary>>)
),
ProtoNameWithLen = <<0, 4, "MQTT">>,
?ASSERT_FRAME_THROW(
#{cause := malformed_connect, header_bytes := <<>>},
emqx_frame:parse(<<16, 6, ProtoNameWithLen/binary>>)
).
t_reserved_connect_flag(_) ->
?assertException(
throw,
{frame_parse_error, reserved_connect_flag},
{frame_parse_error, #{
cause := reserved_connect_flag, proto_ver := ?MQTT_PROTO_V3, proto_name := <<"MQIsdp">>
}},
emqx_frame:parse(<<16, 15, 0, 6, 77, 81, 73, 115, 100, 112, 3, 1, 0, 0, 1, 0, 0>>)
).
@ -726,7 +750,7 @@ t_undefined_password(_) ->
},
variable = #mqtt_packet_connect{
proto_name = <<"MQTT">>,
proto_ver = 4,
proto_ver = ?MQTT_PROTO_V4,
is_bridge = false,
clean_start = true,
will_flag = false,
@ -774,7 +798,9 @@ t_invalid_will_retain(_) ->
54, 75, 78, 112, 57, 0, 6, 68, 103, 55, 87, 87, 87>>,
?assertException(
throw,
{frame_parse_error, invalid_will_retain},
{frame_parse_error, #{
cause := invalid_will_retain, proto_ver := ?MQTT_PROTO_V5, proto_name := <<"MQTT">>
}},
emqx_frame:parse(ConnectBin)
),
ok.
@ -796,22 +822,30 @@ t_invalid_will_qos(_) ->
),
?assertException(
throw,
{frame_parse_error, invalid_will_qos},
{frame_parse_error, #{
cause := invalid_will_qos, proto_ver := ?MQTT_PROTO_V5, proto_name := <<"MQTT">>
}},
emqx_frame:parse(ConnectBinFun(Will_F_WillQoS1))
),
?assertException(
throw,
{frame_parse_error, invalid_will_qos},
{frame_parse_error, #{
cause := invalid_will_qos, proto_ver := ?MQTT_PROTO_V5, proto_name := <<"MQTT">>
}},
emqx_frame:parse(ConnectBinFun(Will_F_WillQoS2))
),
?assertException(
throw,
{frame_parse_error, invalid_will_qos},
{frame_parse_error, #{
cause := invalid_will_qos, proto_ver := ?MQTT_PROTO_V5, proto_name := <<"MQTT">>
}},
emqx_frame:parse(ConnectBinFun(Will_F_WillQoS3))
),
?assertException(
throw,
{frame_parse_error, invalid_will_qos},
{frame_parse_error, #{
cause := invalid_will_qos, proto_ver := ?MQTT_PROTO_V5, proto_name := <<"MQTT">>
}},
emqx_frame:parse(ConnectBinFun(Will_T_WillQoS3))
),
ok.

View File

@ -377,42 +377,60 @@ t_will_msg(_) ->
t_format(_) ->
io:format("~ts", [
emqx_packet:format(#mqtt_packet{
header = #mqtt_packet_header{type = ?CONNACK, retain = true, dup = 0},
variable = undefined
})
]),
io:format("~ts", [
emqx_packet:format(#mqtt_packet{
header = #mqtt_packet_header{type = ?CONNACK}, variable = 1, payload = <<"payload">>
})
emqx_packet:format(
#mqtt_packet{
header = #mqtt_packet_header{type = ?CONNACK, retain = true, dup = 0},
variable = undefined
},
text
)
]),
io:format(
"~ts",
[
emqx_packet:format(
#mqtt_packet{
header = #mqtt_packet_header{type = ?CONNACK},
variable = 1,
payload = <<"payload">>
},
text
)
]
),
io:format("~ts", [
emqx_packet:format(
?CONNECT_PACKET(#mqtt_packet_connect{
will_flag = true,
will_retain = true,
will_qos = ?QOS_2,
will_topic = <<"topic">>,
will_payload = <<"payload">>
})
?CONNECT_PACKET(
#mqtt_packet_connect{
will_flag = true,
will_retain = true,
will_qos = ?QOS_2,
will_topic = <<"topic">>,
will_payload = <<"payload">>
}
),
text
)
]),
io:format("~ts", [
emqx_packet:format(?CONNECT_PACKET(#mqtt_packet_connect{password = password}))
emqx_packet:format(?CONNECT_PACKET(#mqtt_packet_connect{password = password}), text)
]),
io:format("~ts", [emqx_packet:format(?CONNACK_PACKET(?CONNACK_SERVER))]),
io:format("~ts", [emqx_packet:format(?PUBLISH_PACKET(?QOS_1, 1))]),
io:format("~ts", [emqx_packet:format(?PUBLISH_PACKET(?QOS_2, <<"topic">>, 10, <<"payload">>))]),
io:format("~ts", [emqx_packet:format(?PUBACK_PACKET(?PUBACK, 98))]),
io:format("~ts", [emqx_packet:format(?PUBREL_PACKET(99))]),
io:format("~ts", [emqx_packet:format(?CONNACK_PACKET(?CONNACK_SERVER), text)]),
io:format("~ts", [emqx_packet:format(?PUBLISH_PACKET(?QOS_1, 1), text)]),
io:format("~ts", [
emqx_packet:format(?SUBSCRIBE_PACKET(15, [{<<"topic">>, ?QOS_0}, {<<"topic1">>, ?QOS_1}]))
emqx_packet:format(?PUBLISH_PACKET(?QOS_2, <<"topic">>, 10, <<"payload">>), text)
]),
io:format("~ts", [emqx_packet:format(?SUBACK_PACKET(40, [?QOS_0, ?QOS_1]))]),
io:format("~ts", [emqx_packet:format(?UNSUBSCRIBE_PACKET(89, [<<"t">>, <<"t2">>]))]),
io:format("~ts", [emqx_packet:format(?UNSUBACK_PACKET(90))]),
io:format("~ts", [emqx_packet:format(?DISCONNECT_PACKET(128))]).
io:format("~ts", [emqx_packet:format(?PUBACK_PACKET(?PUBACK, 98), text)]),
io:format("~ts", [emqx_packet:format(?PUBREL_PACKET(99), text)]),
io:format("~ts", [
emqx_packet:format(
?SUBSCRIBE_PACKET(15, [{<<"topic">>, ?QOS_0}, {<<"topic1">>, ?QOS_1}]), text
)
]),
io:format("~ts", [emqx_packet:format(?SUBACK_PACKET(40, [?QOS_0, ?QOS_1]), text)]),
io:format("~ts", [emqx_packet:format(?UNSUBSCRIBE_PACKET(89, [<<"t">>, <<"t2">>]), text)]),
io:format("~ts", [emqx_packet:format(?UNSUBACK_PACKET(90), text)]),
io:format("~ts", [emqx_packet:format(?DISCONNECT_PACKET(128), text)]).
t_parse_empty_publish(_) ->
%% 52: 0011(type=PUBLISH) 0100 (QoS=2)

View File

@ -125,6 +125,7 @@ create(Type, Name, Conf0, Opts) ->
TypeBin = bin(Type),
ResourceId = resource_id(Type, Name),
Conf = Conf0#{connector_type => TypeBin, connector_name => Name},
_ = emqx_alarm:ensure_deactivated(ResourceId),
{ok, _Data} = emqx_resource:create_local(
ResourceId,
?CONNECTOR_RESOURCE_GROUP,
@ -132,7 +133,6 @@ create(Type, Name, Conf0, Opts) ->
parse_confs(TypeBin, Name, Conf),
parse_opts(Conf, Opts)
),
_ = emqx_alarm:ensure_deactivated(ResourceId),
ok.
update(ConnectorId, {OldConf, Conf}) ->

View File

@ -29,13 +29,9 @@
start(_Type, _Args) ->
ok = mria:wait_for_tables(emqx_mgmt_auth:create_tables()),
case emqx_mgmt_auth:init_bootstrap_file() of
ok ->
emqx_conf:add_handler([api_key], emqx_mgmt_auth),
emqx_mgmt_sup:start_link();
{error, Reason} ->
{error, Reason}
end.
emqx_mgmt_auth:try_init_bootstrap_file(),
emqx_conf:add_handler([api_key], emqx_mgmt_auth),
emqx_mgmt_sup:start_link().
stop(_State) ->
emqx_conf:remove_handler([api_key]),

View File

@ -32,7 +32,7 @@
update/5,
delete/1,
list/0,
init_bootstrap_file/0,
try_init_bootstrap_file/0,
format/1
]).
@ -52,6 +52,7 @@
-ifdef(TEST).
-export([create/7]).
-export([trans/2, force_create_app/1]).
-export([init_bootstrap_file/1]).
-endif.
-define(APP, emqx_app).
@ -114,11 +115,12 @@ post_config_update([api_key], _Req, NewConf, _OldConf, _AppEnvs) ->
end,
ok.
-spec init_bootstrap_file() -> ok | {error, _}.
init_bootstrap_file() ->
-spec try_init_bootstrap_file() -> ok | {error, _}.
try_init_bootstrap_file() ->
File = bootstrap_file(),
?SLOG(debug, #{msg => "init_bootstrap_api_keys_from_file", file => File}),
init_bootstrap_file(File).
_ = init_bootstrap_file(File),
ok.
create(Name, Enable, ExpiredAt, Desc, Role) ->
ApiKey = generate_unique_api_key(Name),
@ -357,10 +359,6 @@ init_bootstrap_file(File) ->
init_bootstrap_file(File, Dev, MP);
{error, Reason0} ->
Reason = emqx_utils:explain_posix(Reason0),
FmtReason = emqx_utils:format(
"load API bootstrap file failed, file:~ts, reason:~ts",
[File, Reason]
),
?SLOG(
error,
@ -371,7 +369,7 @@ init_bootstrap_file(File) ->
}
),
{error, FmtReason}
{error, Reason}
end.
init_bootstrap_file(File, Dev, MP) ->

View File

@ -100,7 +100,7 @@ t_bootstrap_file(_) ->
BadBin = <<"test-1:secret-11\ntest-2 secret-12">>,
ok = file:write_file(File, BadBin),
update_file(File),
?assertMatch({error, #{reason := "invalid_format"}}, emqx_mgmt_auth:init_bootstrap_file()),
?assertMatch({error, #{reason := "invalid_format"}}, emqx_mgmt_auth:init_bootstrap_file(File)),
?assertEqual(ok, auth_authorize(TestPath, <<"test-1">>, <<"secret-11">>)),
?assertMatch({error, _}, auth_authorize(TestPath, <<"test-2">>, <<"secret-12">>)),
update_file(<<>>),
@ -123,7 +123,7 @@ t_bootstrap_file_override(_) ->
ok = file:write_file(File, Bin),
update_file(File),
?assertEqual(ok, emqx_mgmt_auth:init_bootstrap_file()),
?assertEqual(ok, emqx_mgmt_auth:init_bootstrap_file(File)),
MatchFun = fun(ApiKey) -> mnesia:match_object(#?APP{api_key = ApiKey, _ = '_'}) end,
?assertMatch(
@ -156,7 +156,7 @@ t_bootstrap_file_dup_override(_) ->
File = "./bootstrap_api_keys.txt",
ok = file:write_file(File, Bin),
update_file(File),
?assertEqual(ok, emqx_mgmt_auth:init_bootstrap_file()),
?assertEqual(ok, emqx_mgmt_auth:init_bootstrap_file(File)),
SameAppWithDiffName = #?APP{
name = <<"name-1">>,
@ -190,7 +190,7 @@ t_bootstrap_file_dup_override(_) ->
%% Similar to loading bootstrap file at node startup
%% the duplicated apikey in mnesia will be cleaned up
?assertEqual(ok, emqx_mgmt_auth:init_bootstrap_file()),
?assertEqual(ok, emqx_mgmt_auth:init_bootstrap_file(File)),
?assertMatch(
{ok, [
#?APP{

View File

@ -1049,19 +1049,22 @@ do_load_plugin_app(AppName, Ebin) ->
end.
start_app(App) ->
case application:ensure_all_started(App) of
{ok, Started} ->
case run_with_timeout(application, ensure_all_started, [App], 10_000) of
{ok, {ok, Started}} ->
case Started =/= [] of
true -> ?SLOG(debug, #{msg => "started_plugin_apps", apps => Started});
false -> ok
end,
?SLOG(debug, #{msg => "started_plugin_app", app => App}),
ok;
{error, {ErrApp, Reason}} ->
end;
{ok, {error, Reason}} ->
throw(#{
msg => "failed_to_start_app",
app => App,
reason => Reason
});
{error, Reason} ->
throw(#{
msg => "failed_to_start_plugin_app",
app => App,
err_app => ErrApp,
reason => Reason
})
end.
@ -1586,3 +1589,20 @@ bin(B) when is_binary(B) -> B.
wrap_to_list(Path) ->
binary_to_list(iolist_to_binary(Path)).
run_with_timeout(Module, Function, Args, Timeout) ->
Self = self(),
Fun = fun() ->
Result = apply(Module, Function, Args),
Self ! {self(), Result}
end,
Pid = spawn(Fun),
TimerRef = erlang:send_after(Timeout, self(), {timeout, Pid}),
receive
{Pid, Result} ->
_ = erlang:cancel_timer(TimerRef),
{ok, Result};
{timeout, Pid} ->
exit(Pid, kill),
{error, timeout}
end.

View File

@ -34,7 +34,7 @@
type :: serde_type(),
eval_context :: term(),
%% for future use
extra = []
extra = #{}
}).
-type serde() :: #serde{}.

View File

@ -148,14 +148,19 @@ post_config_update(
post_config_update(
[?CONF_KEY_ROOT, schemas, NewName],
_Cmd,
NewSchemas,
%% undefined or OldSchemas
_,
NewSchema,
OldSchema,
_AppEnvs
) ->
case build_serdes([{NewName, NewSchemas}]) of
case OldSchema of
undefined ->
ok;
_ ->
ensure_serde_absent(NewName)
end,
case build_serdes([{NewName, NewSchema}]) of
ok ->
{ok, #{NewName => NewSchemas}};
{ok, #{NewName => NewSchema}};
{error, Reason, SerdesToRollback} ->
lists:foreach(fun ensure_serde_absent/1, SerdesToRollback),
{error, Reason}
@ -176,6 +181,7 @@ post_config_update(?CONF_KEY_PATH, _Cmd, NewConf = #{schemas := NewSchemas}, Old
async_delete_serdes(RemovedNames)
end,
SchemasToBuild = maps:to_list(maps:merge(Changed, Added)),
ok = lists:foreach(fun ensure_serde_absent/1, [N || {N, _} <- SchemasToBuild]),
case build_serdes(SchemasToBuild) of
ok ->
{ok, NewConf};

View File

@ -48,6 +48,10 @@
-type eval_context() :: term().
-type fingerprint() :: binary().
-type protobuf_cache_key() :: {schema_name(), fingerprint()}.
-export_type([serde_type/0]).
%%------------------------------------------------------------------------------
@ -175,11 +179,12 @@ make_serde(avro, Name, Source) ->
eval_context = Store
};
make_serde(protobuf, Name, Source) ->
SerdeMod = make_protobuf_serde_mod(Name, Source),
{CacheKey, SerdeMod} = make_protobuf_serde_mod(Name, Source),
#serde{
name = Name,
type = protobuf,
eval_context = SerdeMod
eval_context = SerdeMod,
extra = #{cache_key => CacheKey}
};
make_serde(json, Name, Source) ->
case json_decode(Source) of
@ -254,8 +259,9 @@ eval_encode(#serde{type = json, name = Name}, [Map]) ->
destroy(#serde{type = avro, name = _Name}) ->
?tp(serde_destroyed, #{type => avro, name => _Name}),
ok;
destroy(#serde{type = protobuf, name = _Name, eval_context = SerdeMod}) ->
destroy(#serde{type = protobuf, name = _Name, eval_context = SerdeMod} = Serde) ->
unload_code(SerdeMod),
destroy_protobuf_code(Serde),
?tp(serde_destroyed, #{type => protobuf, name => _Name}),
ok;
destroy(#serde{type = json, name = Name}) ->
@ -282,13 +288,14 @@ jesse_validate(Name, Map) ->
jesse_name(Str) ->
unicode:characters_to_list(Str).
-spec make_protobuf_serde_mod(schema_name(), schema_source()) -> module().
-spec make_protobuf_serde_mod(schema_name(), schema_source()) -> {protobuf_cache_key(), module()}.
make_protobuf_serde_mod(Name, Source) ->
{SerdeMod0, SerdeModFileName} = protobuf_serde_mod_name(Name),
case lazy_generate_protobuf_code(Name, SerdeMod0, Source) of
{ok, SerdeMod, ModBinary} ->
load_code(SerdeMod, SerdeModFileName, ModBinary),
SerdeMod;
CacheKey = protobuf_cache_key(Name, Source),
{CacheKey, SerdeMod};
{error, #{error := Error, warnings := Warnings}} ->
?SLOG(
warning,
@ -310,6 +317,13 @@ protobuf_serde_mod_name(Name) ->
SerdeModFileName = SerdeModName ++ ".memory",
{SerdeMod, SerdeModFileName}.
%% Fixme: we cannot uncomment the following typespec because Dialyzer complains that
%% `Source' should be `string()' due to `gpb_compile:string/3', but it does work fine with
%% binaries...
%% -spec protobuf_cache_key(schema_name(), schema_source()) -> {schema_name(), fingerprint()}.
protobuf_cache_key(Name, Source) ->
{Name, erlang:md5(Source)}.
-spec lazy_generate_protobuf_code(schema_name(), module(), schema_source()) ->
{ok, module(), binary()} | {error, #{error := term(), warnings := [term()]}}.
lazy_generate_protobuf_code(Name, SerdeMod0, Source) ->
@ -326,9 +340,9 @@ lazy_generate_protobuf_code(Name, SerdeMod0, Source) ->
-spec lazy_generate_protobuf_code_trans(schema_name(), module(), schema_source()) ->
{ok, module(), binary()} | {error, #{error := term(), warnings := [term()]}}.
lazy_generate_protobuf_code_trans(Name, SerdeMod0, Source) ->
Fingerprint = erlang:md5(Source),
_ = mnesia:lock({record, ?PROTOBUF_CACHE_TAB, Fingerprint}, write),
case mnesia:read(?PROTOBUF_CACHE_TAB, Fingerprint) of
CacheKey = protobuf_cache_key(Name, Source),
_ = mnesia:lock({record, ?PROTOBUF_CACHE_TAB, CacheKey}, write),
case mnesia:read(?PROTOBUF_CACHE_TAB, CacheKey) of
[#protobuf_cache{module = SerdeMod, module_binary = ModBinary}] ->
?tp(schema_registry_protobuf_cache_hit, #{name => Name}),
{ok, SerdeMod, ModBinary};
@ -337,7 +351,7 @@ lazy_generate_protobuf_code_trans(Name, SerdeMod0, Source) ->
case generate_protobuf_code(SerdeMod0, Source) of
{ok, SerdeMod, ModBinary} ->
CacheEntry = #protobuf_cache{
fingerprint = Fingerprint,
fingerprint = CacheKey,
module = SerdeMod,
module_binary = ModBinary
},
@ -345,7 +359,7 @@ lazy_generate_protobuf_code_trans(Name, SerdeMod0, Source) ->
{ok, SerdeMod, ModBinary};
{ok, SerdeMod, ModBinary, _Warnings} ->
CacheEntry = #protobuf_cache{
fingerprint = Fingerprint,
fingerprint = CacheKey,
module = SerdeMod,
module_binary = ModBinary
},
@ -390,6 +404,21 @@ unload_code(SerdeMod) ->
_ = code:delete(SerdeMod),
ok.
-spec destroy_protobuf_code(serde()) -> ok.
destroy_protobuf_code(Serde) ->
#serde{extra = #{cache_key := CacheKey}} = Serde,
{atomic, Res} = mria:transaction(
?SCHEMA_REGISTRY_SHARD,
fun destroy_protobuf_code_trans/1,
[CacheKey]
),
?tp("schema_registry_protobuf_cache_destroyed", #{name => Serde#serde.name}),
Res.
-spec destroy_protobuf_code_trans({schema_name(), fingerprint()}) -> ok.
destroy_protobuf_code_trans(CacheKey) ->
mnesia:delete(?PROTOBUF_CACHE_TAB, CacheKey, write).
-spec has_inner_type(serde_type(), eval_context(), [binary()]) ->
boolean().
has_inner_type(protobuf, _SerdeMod, [_, _ | _]) ->

View File

@ -207,6 +207,66 @@ t_protobuf_invalid_schema(_Config) ->
),
ok.
%% Checks that we unload code and clear code generation cache after destroying a protobuf
%% serde.
t_destroy_protobuf(_Config) ->
SerdeName = ?FUNCTION_NAME,
SerdeNameBin = atom_to_binary(SerdeName),
?check_trace(
#{timetrap => 5_000},
begin
Params = schema_params(protobuf),
ok = emqx_schema_registry:add_schema(SerdeName, Params),
{ok, {ok, _}} =
?wait_async_action(
emqx_schema_registry:delete_schema(SerdeName),
#{?snk_kind := serde_destroyed, name := SerdeNameBin}
),
%% Create again to check we don't hit the cache.
ok = emqx_schema_registry:add_schema(SerdeName, Params),
{ok, {ok, _}} =
?wait_async_action(
emqx_schema_registry:delete_schema(SerdeName),
#{?snk_kind := serde_destroyed, name := SerdeNameBin}
),
ok
end,
fun(Trace) ->
?assertMatch([], ?of_kind(schema_registry_protobuf_cache_hit, Trace)),
?assertMatch([_ | _], ?of_kind("schema_registry_protobuf_cache_destroyed", Trace)),
ok
end
),
ok.
%% Checks that we don't leave entries lingering in the protobuf code cache table when
%% updating the source of a serde.
t_update_protobuf_cache(_Config) ->
SerdeName = ?FUNCTION_NAME,
?check_trace(
#{timetrap => 5_000},
begin
#{source := Source0} = Params0 = schema_params(protobuf),
ok = emqx_schema_registry:add_schema(SerdeName, Params0),
%% Now we touch the source so protobuf needs to be recompiled.
Source1 = <<Source0/binary, "\n\n">>,
Params1 = Params0#{source := Source1},
{ok, {ok, _}} =
?wait_async_action(
emqx_schema_registry:add_schema(SerdeName, Params1),
#{?snk_kind := "schema_registry_protobuf_cache_destroyed"}
),
ok
end,
fun(Trace) ->
?assertMatch([], ?of_kind(schema_registry_protobuf_cache_hit, Trace)),
?assertMatch([_, _ | _], ?of_kind(schema_registry_protobuf_cache_miss, Trace)),
?assertMatch([_ | _], ?of_kind("schema_registry_protobuf_cache_destroyed", Trace)),
ok
end
),
ok.
t_json_invalid_schema(_Config) ->
SerdeName = invalid_json,
Params = schema_params(json),

View File

@ -0,0 +1,4 @@
Stop returning `CONNACK` or `DISCONNECT` to clients that sent malformed CONNECT packets.
- Only send `CONNACK` with reason code `frame_too_large` for MQTT-v5.0 when connecting if the protocol version field in CONNECT can be detected.
- Otherwise **DONOT** send any CONNACK or DISCONNECT packet.

View File

@ -0,0 +1 @@
Previously, if CRL checks were ever enabled for a listener, later disabling them via the configuration would not actually disable them until the listener restarted. This has been fixed.

View File

@ -0,0 +1,8 @@
Add a startup timeout limit for the plug-in application. Currently the timeout is 10 seconds.
Starting a bad plugin while EMQX is running will result in a thrown runtime error.
When EMQX is closed and restarted, the main starting process may hang due to the the plugin application to start failures.
Maybe restarting with modified:
- Modifed config file: make the bad plugin enabled.
- Add a plugin with bad plugin config.

View File

@ -0,0 +1 @@
Fixed an issue where the internal cache for Protobuf schemas in Schema Registry was not properly cleaned up after deleting or updating a schema.

87
changes/v5.7.2.en.md Normal file
View File

@ -0,0 +1,87 @@
## 5.7.2
*Release Date: 2024-08-06*
### Enhancements
- [#13317](https://github.com/emqx/emqx/pull/13317) Added a new per-authorization source metric type: `ignore`. This metric increments when an authorization source attempts to authorize a request but encounters scenarios where the authorization is not applicable or encounters an error, resulting in an undecidable outcome.
- [#13336](https://github.com/emqx/emqx/pull/13336) Added functionality to initialize authentication data in the built-in database of an empty EMQX node or cluster using a bootstrap file in CSV or JSON format. This feature introduces new configuration entries, `bootstrap_file` and `bootstrap_type`.
- [#13348](https://github.com/emqx/emqx/pull/13348) Added a new field `payload_encode` in the log configuration to determine the format of the payload in the log data.
- [#13436](https://github.com/emqx/emqx/pull/13436) Added the option to add custom request headers to JWKS requests.
- [#13507](https://github.com/emqx/emqx/pull/13507) Introduced a new built-in function `getenv` in the rule engine and variform expression to facilitate access to environment variables. This function adheres to the following constraints:
- Prefix `EMQXVAR_` is added before reading from OS environment variables. For example, `getenv('FOO_BAR')` is to read `EMQXVAR_FOO_BAR`.
- These values are immutable once loaded from the OS environment.
- [#13521](https://github.com/emqx/emqx/pull/13521) Resolved an issue where LDAP query timeouts could cause the underlying connection to become unusable, potentially causing subsequent queries to return outdated results. The fix ensures the system reconnects automatically in case of a timeout.
- [#13528](https://github.com/emqx/emqx/pull/13528) Applied log throttling for the event of unrecoverable errors in data integrations.
- [#13548](https://github.com/emqx/emqx/pull/13548) EMQX now can optionally invoke the `on_config_changed/2` callback function when the plugin configuration is updated via the REST API. This callback function is assumed to be exported by the `<PluginName>_app` module.
For example, if the plugin name and version are `my_plugin-1.0.0`, then the callback function is assumed to be `my_plugin_app:on_config_changed/2`.
- [#13386](https://github.com/emqx/emqx/pull/13386) Added support for initializing a list of banned clients on an empty EMQX node or cluster with a bootstrap file in CSV format. The corresponding config entry to specify the file path is `banned.bootstrap_file`. This file is a CSV file with `,` as its delimiter. The first line of this file must be a header line. All valid headers are listed here:
- as :: required
- who :: required
- by :: optional
- reason :: optional
- at :: optional
- until :: optional
See the [Configuration Manual](https://docs.emqx.com/en/enterprise/v@EE_VERSION@/hocon/) for details on each field.
Each row in the rest of this file must contain the same number of columns as the header line, and the column can be omitted then its value is `undefined`.
### Bug Fixes
- [#13222](https://github.com/emqx/emqx/pull/13222) Resolved issues with flags checking and error handling associated with the Will message in the `CONNECT` packet.
For detailed specifications, refer to:
- MQTT-v3.1.1-[MQTT-3.1.2-13], MQTT-v5.0-[MQTT-3.1.2-11]
- MQTT-v3.1.1-[MQTT-3.1.2-14], MQTT-v5.0-[MQTT-3.1.2-12]
- MQTT-v3.1.1-[MQTT-3.1.2-15], MQTT-v5.0-[MQTT-3.1.2-13]
- [#13307](https://github.com/emqx/emqx/pull/13307) Updated `ekka` library to version 0.19.5. This version of `ekka` utilizes `mria` 0.8.8, enhancing auto-heal functionality. Previously, the auto-heal worked only when all core nodes were reachable. This update allows to apply auto-heal once the majority of core nodes are alive. For details, refer to the [Mria PR](https://github.com/emqx/mria/pull/180).
- [#13334](https://github.com/emqx/emqx/pull/13334) Implemented strict mode checking for the `PasswordFlag` in the MQTT v3.1.1 CONNECT packet to align with protocol specifications.
Note: To ensure bug-to-bug compatibility, this check is performed only in strict mode.
- [#13344](https://github.com/emqx/emqx/pull/13344) Resolved an issue where the `POST /clients/:clientid/subscribe/bulk` API would not function correctly if the node receiving the API request did not maintain the connection to the specified `clientid`.
- [#13358](https://github.com/emqx/emqx/pull/13358) Fixed an issue when the `reason` in the `authn_complete_event` event was incorrectly displayed.
- [#13375](https://github.com/emqx/emqx/pull/13375) The value `infinity` has been added as default value to the listener configuration fields `max_conn_rate`, `messages_rate`, and `bytes_rate`.
- [#13382](https://github.com/emqx/emqx/pull/13382) Updated the `emqtt` library to version 0.4.14, which resolves an issue preventing `emqtt_pool`s from reusing pools that are in an inconsistent state.
- [#13389](https://github.com/emqx/emqx/pull/13389) Fixed an issue where the `Derived Key Length` for `pbkdf2` could be set to a negative integer.
- [#13389](https://github.com/emqx/emqx/pull/13389) Fixed an issue where topics in the authorization rules might be parsed incorrectly.
- [#13393](https://github.com/emqx/emqx/pull/13393) Fixed an issue where plugin applications failed to restart after a node joined a cluster, resulting in hooks not being properly installed and causing inconsistent states.
- [#13398](https://github.com/emqx/emqx/pull/13398) Fixed an issue where ACL rules were incorrectly cleared when reloading the built-in database for authorization using the command line.
- [#13403](https://github.com/emqx/emqx/pull/13403) Addressed a security issue where environment variable configuration overrides were inadvertently logging passwords. This fix ensures that passwords present in environment variables are not logged.
- [#13408](https://github.com/emqx/emqx/pull/13408) Resolved a `function_clause` crash triggered by authentication attempts with invalid salt or password types. This fix enhances error handling to better manage authentication failures involving incorrect salt or password types.
- [#13419](https://github.com/emqx/emqx/pull/13419) Resolved an issue where crash log messages from the `/configs` API were displaying garbled hints. This fix ensures that log messages related to API calls are clear and understandable.
- [#13422](https://github.com/emqx/emqx/pull/13422) Fixed an issue where the option `force_shutdown.max_heap_size` could not be set to 0 to disable this tuning.
- [#13442](https://github.com/emqx/emqx/pull/13442) Fixed an issue where the health check interval configuration for actions/sources was not being respected. Previously, EMQX ignored the specified health check interval for actions and used the connector's interval instead. The fix ensures that EMQX now correctly uses the health check interval configured for actions/sources, allowing for independent and accurate health monitoring frequencies.
- [#13503](https://github.com/emqx/emqx/pull/13503) Fixed an issue where connectors did not adhere to the configured health check interval upon initial startup, requiring an update or restart to apply the correct interval.
- [#13515](https://github.com/emqx/emqx/pull/13515) Fixed an issue where the same client could not subscribe to the same exclusive topic when the node was down for some reason.
- [#13527](https://github.com/emqx/emqx/pull/13527) Fixed an issue in the Rule Engine where executing a SQL test for the Message Publish event would consistently return no results when a `$bridges/...` source was included in the `FROM` clause.
- [#13541](https://github.com/emqx/emqx/pull/13541) Fixed an issue where disabling CRL checks for a listener required a listener restart to take effect.
- [#13552](https://github.com/emqx/emqx/pull/13552) Added a startup timeout limit for EMQX plugins with a default timeout of 10 seconds. Before this update, problematic plugins could cause runtime errors during startup, leading to potential issues where the main startup process might hang when EMQX is stopped and restarted.

View File

@ -14,8 +14,8 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
version: 5.7.1
version: 5.7.2
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application.
appVersion: 5.7.1
appVersion: 5.7.2

View File

@ -14,8 +14,8 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
version: 5.7.1
version: 5.7.2
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application.
appVersion: 5.7.1
appVersion: 5.7.2

View File

@ -131,10 +131,7 @@ def test_docs_link(driver, dashboard_url):
# it's v5.x in the url
emqx_version = 'v' + emqx_version
if prefix == 'e':
docs_base_url = "https://docs.emqx.com/en/enterprise"
else:
docs_base_url = "https://docs.emqx.com/en/emqx"
docs_base_url = "https://docs.emqx.com/en/emqx"
docs_url = f"{docs_base_url}/{emqx_version}"
xpath = f"//div[@id='app']//div[@class='nav-header']//a[@href[starts-with(.,'{docs_url}')]]"