Merge pull request #9749 from keynslug/fix/count-respect-matchspec

fix(paging): respect matchspec even if qs is empty when counting
This commit is contained in:
Zaiming (Stone) Shi 2023-01-13 14:29:03 +01:00 committed by GitHub
commit bb3dceb456
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 24 additions and 11 deletions

View File

@ -109,14 +109,17 @@ t_api(_) ->
]),
[]
),
#{
<<"data">> := [],
<<"meta">> := #{
<<"limit">> := 20,
<<"page">> := 1,
<<"hasnext">> := false
}
} = jsx:decode(Request1_1),
?assertEqual(
#{
<<"data">> => [],
<<"meta">> => #{
<<"limit">> => 20,
<<"page">> => 1,
<<"hasnext">> => false
}
},
jsx:decode(Request1_1)
),
{ok, 200, Request2} =
request(
@ -160,6 +163,14 @@ t_api(_) ->
[]
),
% ensure that db contain a mix of records
{ok, 204, _} =
request(
post,
uri(["authorization", "sources", "built_in_database", "username"]),
[?USERNAME_RULES_EXAMPLE]
),
{ok, 204, _} =
request(
post,

View File

@ -384,8 +384,6 @@ apply_total_query(QueryState = #{table := Tab}) ->
Fun(Tab)
end.
counting_total_fun(_QueryState = #{qs := {[], []}}) ->
fun(Tab) -> ets:info(Tab, size) end;
counting_total_fun(_QueryState = #{match_spec := Ms, fuzzy_fun := undefined}) ->
%% XXX: Calculating the total number of data that match a certain
%% condition under a large table is very expensive because the
@ -400,7 +398,9 @@ counting_total_fun(_QueryState = #{match_spec := Ms, fuzzy_fun := undefined}) ->
counting_total_fun(_QueryState = #{fuzzy_fun := FuzzyFun}) when FuzzyFun =/= undefined ->
%% XXX: Calculating the total number for a fuzzy searching is very very expensive
%% so it is not supported now
false.
false;
counting_total_fun(_QueryState = #{qs := {[], []}}) ->
fun(Tab) -> ets:info(Tab, size) end.
%% ResultAcc :: #{count := integer(),
%% cursor := integer(),

View File

@ -0,0 +1 @@
In some cases search APIs could respond with an incorrect `count` value in the metadata, that is usually much bigger than expected, this is now fixed.

View File

@ -0,0 +1 @@
在某些情况下,搜索 API 可能会在元数据中响应不正确的 `count` 值,这通常比预期的要大得多,现在已经修复了。