mirror of
https://github.com/documize/community.git
synced 2025-07-24 15:49:44 +02:00
Rebuild search index
This commit is contained in:
parent
30315a36c7
commit
df0a13b6ed
18 changed files with 960 additions and 669 deletions
|
@ -23,6 +23,7 @@ import (
|
|||
"github.com/documize/community/domain"
|
||||
"github.com/documize/community/domain/auth"
|
||||
"github.com/documize/community/domain/organization"
|
||||
indexer "github.com/documize/community/domain/search"
|
||||
"github.com/documize/community/model/doc"
|
||||
"github.com/documize/community/model/org"
|
||||
"github.com/documize/community/model/space"
|
||||
|
@ -32,6 +33,7 @@ import (
|
|||
type Handler struct {
|
||||
Runtime *env.Runtime
|
||||
Store *domain.Store
|
||||
Indexer indexer.Indexer
|
||||
}
|
||||
|
||||
// Meta provides org meta data based upon request domain (e.g. acme.documize.com).
|
||||
|
@ -176,7 +178,83 @@ func (h *Handler) Sitemap(w http.ResponseWriter, r *http.Request) {
|
|||
response.WriteBytes(w, buffer.Bytes())
|
||||
}
|
||||
|
||||
// Reindex indexes all documents and attachments.
|
||||
func (h *Handler) Reindex(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := domain.GetRequestContext(r)
|
||||
|
||||
if !ctx.Global {
|
||||
response.WriteForbiddenError(w)
|
||||
h.Runtime.Log.Info(fmt.Sprintf("%s attempted search reindex"))
|
||||
return
|
||||
}
|
||||
|
||||
go h.rebuildSearchIndex(ctx)
|
||||
|
||||
response.WriteEmpty(w)
|
||||
}
|
||||
|
||||
// rebuildSearchIndex indexes all documents and attachments.
|
||||
func (h *Handler) rebuildSearchIndex(ctx domain.RequestContext) {
|
||||
method := "meta.rebuildSearchIndex"
|
||||
|
||||
docs, err := h.Store.Meta.GetDocumentsID(ctx)
|
||||
if err != nil {
|
||||
h.Runtime.Log.Error(method, err)
|
||||
return
|
||||
}
|
||||
|
||||
h.Runtime.Log.Info(fmt.Sprintf("Search re-index started for %d documents", len(docs)))
|
||||
|
||||
for i := range docs {
|
||||
d := docs[i]
|
||||
|
||||
pages, err := h.Store.Meta.GetDocumentPages(ctx, d)
|
||||
if err != nil {
|
||||
h.Runtime.Log.Error(method, err)
|
||||
return
|
||||
}
|
||||
|
||||
for j := range pages {
|
||||
h.Indexer.IndexContent(ctx, pages[j])
|
||||
}
|
||||
|
||||
// Log process every N documents.
|
||||
if i%100 == 0 {
|
||||
h.Runtime.Log.Info(fmt.Sprintf("Search re-indexed %d documents...", i))
|
||||
}
|
||||
}
|
||||
|
||||
h.Runtime.Log.Info(fmt.Sprintf("Search re-index finished for %d documents", len(docs)))
|
||||
}
|
||||
|
||||
// SearchStatus returns state of search index
|
||||
func (h *Handler) SearchStatus(w http.ResponseWriter, r *http.Request) {
|
||||
method := "meta.SearchStatus"
|
||||
ctx := domain.GetRequestContext(r)
|
||||
|
||||
if !ctx.Global {
|
||||
response.WriteForbiddenError(w)
|
||||
h.Runtime.Log.Info(fmt.Sprintf("%s attempted get of search status"))
|
||||
return
|
||||
}
|
||||
|
||||
count, err := h.Store.Meta.SearchIndexCount(ctx)
|
||||
if err != nil {
|
||||
response.WriteServerError(w, method, err)
|
||||
h.Runtime.Log.Error(method, err)
|
||||
return
|
||||
}
|
||||
|
||||
var ss = searchStatus{Entries: count}
|
||||
|
||||
response.WriteJSON(w, ss)
|
||||
}
|
||||
|
||||
type sitemapItem struct {
|
||||
URL string
|
||||
Date string
|
||||
}
|
||||
|
||||
type searchStatus struct {
|
||||
Entries int `json:"entries"`
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue