modify `giza' library so that i can get sphinx search stats, and because of this, i can add a more detailed page navigation

src
Kevin Lynx 11 years ago
parent 237d90f81a
commit aed757f2a8

@ -71,11 +71,12 @@ search_by_mongo(Keyword) ->
Tip ++ Body.
search_by_sphinx(Keyword, Page) ->
{Rets, _Stats} = db_frontend:search(Keyword, Page, ?COUNT_PER_PAGE + 1),
{Rets, Stats} = db_frontend:search(Keyword, Page, ?COUNT_PER_PAGE + 1),
{TotalFound, CostTime, _} = Stats,
US = http_common:list_to_utf_binary(Keyword),
?LOG_STR(?INFO, ?FMT("API: search /~s/ found ~p, ", [US, length(Rets)])),
Tip = ?TEXT("{\"keyword\":\"~s\",\"found\":~p,\"cost\":~p,\"page\":~p,",
[Keyword, length(Rets), 0, Page]),
[Keyword, TotalFound, CostTime, Page]),
BodyList = format_search_result(Rets),
Body = ?TEXT("\"results\":[~s]}", [BodyList]),
Tip ++ Body.

@ -33,7 +33,7 @@ search(Keyword, Page, Count) ->
mongodb ->
db_store_mongo:search(Conn, Keyword);
sphinx ->
Offset = Page * Count,
Offset = Page * Count - if Page > 0 -> 1; true -> 0 end,
sphinx_search:search(Conn, Keyword, Offset, Count)
end.

@ -11,12 +11,13 @@
real_stats/3,
recent/3,
today_top/3,
do_search/2,
append_page_nav/3,
top/3]).
-define(TEXT(Fmt, Args), lists:flatten(io_lib:format(Fmt, Args))).
-import(torrent_file, [size_string/1]).
-define(CONTENT_TYPE, "Content-Type: text/html\r\n\r\n").
-define(COUNT_PER_PAGE, 10).
-define(PAGE_NAV_MAX, 10).
-include("vlog.hrl").
search(SessionID, Env, Input) ->
@ -109,36 +110,40 @@ search_by_mongo(Keyword) ->
Tip ++ Body.
search_by_sphinx(Keyword, Page) ->
{Rets, _Stats} = db_frontend:search(Keyword, Page, ?COUNT_PER_PAGE + 1),
{Rets, Stats} = db_frontend:search(Keyword, Page, ?COUNT_PER_PAGE + 1),
{TotalFound, CostTime, DBTime} = Stats,
US = http_common:list_to_utf_binary(Keyword),
?LOG_STR(?INFO, ?FMT("search /~s/ found ~p, cost ~b sp ms, ~b db ms",
[US, length(Rets), 0, 0])),
[US, length(Rets), CostTime, DBTime])),
ThisPage = lists:sublist(Rets, ?COUNT_PER_PAGE),
Tip = ?TEXT("<h4>search ~s, ~b results, ~f seconds, db ~f seconds</h4>",
[Keyword, TotalFound, CostTime / 1000, DBTime / 1000 / 1000]),
BodyList = format_search_result(ThisPage),
Body = ?TEXT("<ol>~s</ol>", [lists:flatten(BodyList)]),
Body ++ append_page_nav(Keyword, Page, Rets).
append_page_nav(Key, Page, ThisRet) ->
Nav = case length(ThisRet) of
0 when Page > 0 ->
format_page_nav(Key, Page - 1, "Prev");
0 ->
[];
Size when Page > 0 ->
format_page_nav(Key, Page - 1, "Prev") ++
if Size > ?COUNT_PER_PAGE ->
"&nbsp;" ++ format_page_nav(Key, Page + 1, "Next");
true -> []
end;
Size ->
if Size > ?COUNT_PER_PAGE -> format_page_nav(Key, Page + 1, "Next");
true -> []
end
Tip ++ Body ++ append_page_nav(Keyword, Page, TotalFound).
append_page_nav(Key, ThisPage, Total) ->
TotalPage = Total div ?COUNT_PER_PAGE,
StartPage = case ThisPage - ?PAGE_NAV_MAX div 3 of
N when N < 0 -> 0;
N -> N
end,
EndPage = case StartPage + ?PAGE_NAV_MAX of
E when E > TotalPage -> TotalPage;
E -> E
end,
"<p class=\"page-nav\">" ++ Nav ++ "</p>".
Links = lists:foldl(fun(I, Str) ->
D = I + 1,
Str ++ if I == ThisPage ->
integer_to_list(D);
true ->format_page_nav(Key, I, integer_to_list(D))
end
end, [], lists:seq(StartPage, EndPage)),
FirstTip = if StartPage > 0 -> format_page_nav(Key, 0, "1") ++ "..."; true -> [] end,
"<p class=\"page-nav\">" ++ FirstTip ++ Links ++ "</p>".
format_page_nav(Key, Page, Tip) ->
?TEXT("<a href=\"http_handler:search?q=~s&p=~p\">~s</a>", [Key, Page, Tip]).
?TEXT("&nbsp;<a href=\"http_handler:search?q=~s&p=~p\">~s</a>&nbsp;", [Key, Page, Tip]).
format_search_result(RetList) ->
[format_one_result(Result, false) || Result <- RetList].

@ -5,26 +5,33 @@
%%
-module(sphinx_search).
-include("vlog.hrl").
-export([search/4]).
-export([search/4, search_hash/3]).
-define(PORT, 9312).
-define(INDEX, "xml").
search(Conn, Key, Offset, Count) ->
Rets = search_hash(Key, Offset, Count),
TotalFound = proplists:get_value(total_found, Rets),
CostTime = proplists:get_value(time, Rets),
Hashes = proplists:get_value(match, Rets),
T1 = now(),
Tors = decode_search_ret(Conn, Hashes),
DBUsed = timer:now_diff(now(), T1),
Stats = {TotalFound, CostTime, DBUsed},
{Tors, Stats}.
search_hash(Key, Offset, Count) ->
Q1 = giza_query:new(?INDEX, Key),
Q2 = giza_query:port(Q1, ?PORT),
Q3 = giza_query:offset(Q2, Offset),
Q4 = giza_query:limit(Q3, Count),
T1 = now(),
{T2, TDocs} = case catch giza_request:send(Q4) of
{'EXIT', R} ->
?W(?FMT("sphinx search error ~p", [R])),
{now(), []};
{ok, Ret} ->
{now(), decode_search_ret(Conn, Ret)}
end,
T3 = now(),
Stats = {timer:now_diff(T2, T1), timer:now_diff(T3, T2)},
{TDocs, Stats}.
case catch giza_request:send(Q4) of
{'EXIT', R} ->
?W(?FMT("sphinx search error ~p", [R])),
failed;
{ok, Ret} ->
Ret
end.
decode_search_ret(Conn, Ret) ->
Hashes = [translate_hash(Item) || Item <- Ret],

@ -1,3 +1,7 @@
## 08.03.2013
* sphinx searching is stable right now, you can config to use it
## 07.30.2013
* add sphinx (coreseek which based on sphinx) to help searhcing, in expirement stage

@ -41,6 +41,9 @@ a.download-tip {
span.file-size {
color:#888;
}
p.page-nav {
text-align:center;
}
</style>
</head>
<body>

Loading…
Cancel
Save