mirror of
https://github.com/documize/community.git
synced 2025-07-23 07:09:43 +02:00
add entries to search index
This commit is contained in:
parent
2c5f73a486
commit
1b96c9093f
15 changed files with 244 additions and 417 deletions
|
@ -26,6 +26,7 @@ import (
|
||||||
"github.com/documize/community/core/uniqueid"
|
"github.com/documize/community/core/uniqueid"
|
||||||
"github.com/documize/community/domain"
|
"github.com/documize/community/domain"
|
||||||
"github.com/documize/community/domain/document"
|
"github.com/documize/community/domain/document"
|
||||||
|
indexer "github.com/documize/community/domain/search"
|
||||||
"github.com/documize/community/model/attachment"
|
"github.com/documize/community/model/attachment"
|
||||||
"github.com/documize/community/model/audit"
|
"github.com/documize/community/model/audit"
|
||||||
uuid "github.com/nu7hatch/gouuid"
|
uuid "github.com/nu7hatch/gouuid"
|
||||||
|
@ -35,6 +36,7 @@ import (
|
||||||
type Handler struct {
|
type Handler struct {
|
||||||
Runtime *env.Runtime
|
Runtime *env.Runtime
|
||||||
Store *domain.Store
|
Store *domain.Store
|
||||||
|
Indexer indexer.Indexer
|
||||||
}
|
}
|
||||||
|
|
||||||
// Download is the end-point that responds to a request for a particular attachment
|
// Download is the end-point that responds to a request for a particular attachment
|
||||||
|
@ -155,6 +157,10 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
ctx.Transaction.Commit()
|
ctx.Transaction.Commit()
|
||||||
|
|
||||||
|
a, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
|
||||||
|
d, _ := h.Store.Document.Get(ctx, documentID)
|
||||||
|
go h.Indexer.IndexDocument(ctx, d, a)
|
||||||
|
|
||||||
response.WriteEmpty(w)
|
response.WriteEmpty(w)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -226,5 +232,9 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
ctx.Transaction.Commit()
|
ctx.Transaction.Commit()
|
||||||
|
|
||||||
|
all, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
|
||||||
|
d, _ := h.Store.Document.Get(ctx, documentID)
|
||||||
|
go h.Indexer.IndexDocument(ctx, d, all)
|
||||||
|
|
||||||
response.WriteEmpty(w)
|
response.WriteEmpty(w)
|
||||||
}
|
}
|
||||||
|
|
|
@ -146,6 +146,9 @@ func (h *Handler) convert(w http.ResponseWriter, r *http.Request, job, folderID
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
a, _ := h.Store.Attachment.GetAttachments(ctx, nd.RefID)
|
||||||
|
go h.Indexer.IndexDocument(ctx, nd, a)
|
||||||
|
|
||||||
response.WriteJSON(w, nd)
|
response.WriteJSON(w, nd)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,12 +17,14 @@ import (
|
||||||
api "github.com/documize/community/core/convapi"
|
api "github.com/documize/community/core/convapi"
|
||||||
"github.com/documize/community/core/env"
|
"github.com/documize/community/core/env"
|
||||||
"github.com/documize/community/domain"
|
"github.com/documize/community/domain"
|
||||||
|
indexer "github.com/documize/community/domain/search"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Handler contains the runtime information such as logging and database.
|
// Handler contains the runtime information such as logging and database.
|
||||||
type Handler struct {
|
type Handler struct {
|
||||||
Runtime *env.Runtime
|
Runtime *env.Runtime
|
||||||
Store *domain.Store
|
Store *domain.Store
|
||||||
|
Indexer indexer.Indexer
|
||||||
}
|
}
|
||||||
|
|
||||||
// UploadConvert is an endpoint to both upload and convert a document
|
// UploadConvert is an endpoint to both upload and convert a document
|
||||||
|
|
|
@ -252,7 +252,8 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
ctx.Transaction.Commit()
|
ctx.Transaction.Commit()
|
||||||
|
|
||||||
h.Indexer.UpdateDocument(ctx, d)
|
a, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
|
||||||
|
go h.Indexer.IndexDocument(ctx, d, a)
|
||||||
|
|
||||||
response.WriteEmpty(w)
|
response.WriteEmpty(w)
|
||||||
}
|
}
|
||||||
|
@ -316,7 +317,7 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
ctx.Transaction.Commit()
|
ctx.Transaction.Commit()
|
||||||
|
|
||||||
h.Indexer.DeleteDocument(ctx, documentID)
|
go h.Indexer.DeleteDocument(ctx, documentID)
|
||||||
|
|
||||||
response.WriteEmpty(w)
|
response.WriteEmpty(w)
|
||||||
}
|
}
|
||||||
|
|
|
@ -143,8 +143,7 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
|
||||||
ctx.Transaction.Commit()
|
ctx.Transaction.Commit()
|
||||||
|
|
||||||
np, _ := h.Store.Page.Get(ctx, pageID)
|
np, _ := h.Store.Page.Get(ctx, pageID)
|
||||||
|
go h.Indexer.IndexContent(ctx, np)
|
||||||
h.Indexer.Add(ctx, np, pageID)
|
|
||||||
|
|
||||||
response.WriteJSON(w, np)
|
response.WriteJSON(w, np)
|
||||||
}
|
}
|
||||||
|
@ -338,7 +337,7 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
h.Store.Audit.Record(ctx, audit.EventTypeSectionDelete)
|
h.Store.Audit.Record(ctx, audit.EventTypeSectionDelete)
|
||||||
|
|
||||||
h.Indexer.Delete(ctx, documentID, pageID)
|
go h.Indexer.DeleteContent(ctx, pageID)
|
||||||
|
|
||||||
h.Store.Link.DeleteSourcePageLinks(ctx, pageID)
|
h.Store.Link.DeleteSourcePageLinks(ctx, pageID)
|
||||||
|
|
||||||
|
@ -421,7 +420,7 @@ func (h *Handler) DeletePages(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
h.Indexer.Delete(ctx, documentID, page.PageID)
|
go h.Indexer.DeleteContent(ctx, page.PageID)
|
||||||
|
|
||||||
h.Store.Link.DeleteSourcePageLinks(ctx, page.PageID)
|
h.Store.Link.DeleteSourcePageLinks(ctx, page.PageID)
|
||||||
|
|
||||||
|
@ -590,7 +589,7 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
ctx.Transaction.Commit()
|
ctx.Transaction.Commit()
|
||||||
|
|
||||||
h.Indexer.Update(ctx, model.Page)
|
go h.Indexer.IndexContent(ctx, model.Page)
|
||||||
|
|
||||||
updatedPage, err := h.Store.Page.Get(ctx, pageID)
|
updatedPage, err := h.Store.Page.Get(ctx, pageID)
|
||||||
|
|
||||||
|
@ -649,8 +648,6 @@ func (h *Handler) ChangePageSequence(w http.ResponseWriter, r *http.Request) {
|
||||||
h.Runtime.Log.Error(method, err)
|
h.Runtime.Log.Error(method, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
h.Indexer.UpdateSequence(ctx, documentID, p.PageID, p.Sequence)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence)
|
h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence)
|
||||||
|
@ -712,8 +709,6 @@ func (h *Handler) ChangePageLevel(w http.ResponseWriter, r *http.Request) {
|
||||||
h.Runtime.Log.Error(method, err)
|
h.Runtime.Log.Error(method, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
h.Indexer.UpdateLevel(ctx, documentID, p.PageID, p.Level)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence)
|
h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence)
|
||||||
|
|
|
@ -15,14 +15,13 @@ import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/documize/community/core/env"
|
"github.com/documize/community/core/env"
|
||||||
"github.com/documize/community/core/streamutil"
|
"github.com/documize/community/core/streamutil"
|
||||||
"github.com/documize/community/core/stringutil"
|
"github.com/documize/community/core/stringutil"
|
||||||
"github.com/documize/community/domain"
|
"github.com/documize/community/domain"
|
||||||
"github.com/documize/community/domain/store/mysql"
|
"github.com/documize/community/model/attachment"
|
||||||
"github.com/documize/community/model"
|
"github.com/documize/community/model/doc"
|
||||||
"github.com/documize/community/model/page"
|
"github.com/documize/community/model/page"
|
||||||
"github.com/documize/community/model/search"
|
"github.com/documize/community/model/search"
|
||||||
"github.com/jmoiron/sqlx"
|
"github.com/jmoiron/sqlx"
|
||||||
|
@ -34,210 +33,160 @@ type Scope struct {
|
||||||
Runtime *env.Runtime
|
Runtime *env.Runtime
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add search entry (legacy name: searchAdd).
|
// IndexDocument adds search index entries for document inserting title, tags and attachments as
|
||||||
func (s Scope) Add(ctx domain.RequestContext, page page.Page) (err error) {
|
// searchable items. Any existing document entries are removed.
|
||||||
id := page.RefID
|
func (s Scope) IndexDocument(ctx domain.RequestContext, doc doc.Document, a []attachment.Attachment) (err error) {
|
||||||
|
// remove previous search entries
|
||||||
// translate the html into text for the search
|
var stmt1 *sqlx.Stmt
|
||||||
nonHTML, err := stringutil.HTML(page.Body).Text(false)
|
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND documentid=? AND (itemtype='doc' OR itemtype='file' OR itemtype='tag')")
|
||||||
|
defer streamutil.Close(stmt1)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errors.Wrap(err, "search decode body")
|
err = errors.Wrap(err, "prepare delete document index entries")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// insert into the search table, getting the document title along the way
|
_, err = stmt1.Exec(ctx.OrgID, doc.RefID)
|
||||||
var stmt *sqlx.Stmt
|
|
||||||
stmt, err = ctx.Transaction.Preparex(
|
|
||||||
"INSERT INTO search (id, orgid, documentid, level, sequence, documenttitle, slug, pagetitle, body, created, revised) " +
|
|
||||||
" SELECT page.refid,page.orgid,document.refid,page.level,page.sequence,document.title,document.slug,page.title,?,page.created,page.revised " +
|
|
||||||
" FROM document,page WHERE page.refid=? AND document.refid=page.documentid")
|
|
||||||
|
|
||||||
defer streamutil.Close(stmt)
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = errors.Wrap(err, "prepare search insert")
|
err = errors.Wrap(err, "execute delete document index entries")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = stmt.Exec(nonHTML, id)
|
// insert doc title
|
||||||
|
var stmt2 *sqlx.Stmt
|
||||||
if err != nil {
|
stmt2, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
|
||||||
err = errors.Wrap(err, "execute search insert")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update search entry (legacy name: searchUpdate).
|
|
||||||
func (s Scope) Update(ctx domain.RequestContext, page page.Page) (err error) {
|
|
||||||
// translate the html into text for the search
|
|
||||||
nonHTML, err := stringutil.HTML(page.Body).Text(false)
|
|
||||||
if err != nil {
|
|
||||||
err = errors.Wrap(err, "search decode body")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
su, err := ctx.Transaction.Preparex("UPDATE search SET pagetitle=?,body=?,sequence=?,level=?,revised=? WHERE id=?")
|
|
||||||
defer streamutil.Close(su)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
err = errors.Wrap(err, "prepare search update")
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = su.Exec(page.Title, nonHTML, page.Sequence, page.Level, page.Revised, page.RefID)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
err = errors.Wrap(err, "execute search update")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateDocument search entries for document (legacy name: searchUpdateDocument).
|
|
||||||
func (s Scope) UpdateDocument(ctx domain.RequestContext, page page.Page) (err error) {
|
|
||||||
stmt, err := ctx.Transaction.Preparex("UPDATE search SET documenttitle=?, slug=?, revised=? WHERE documentid=?")
|
|
||||||
defer streamutil.Close(stmt)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
err = errors.Wrap(err, "prepare search document update")
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = stmt.Exec(page.Title, page.Body, time.Now().UTC(), page.DocumentID)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
err = errors.Wrap(err, "execute search document update")
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteDocument removes document search entries (legacy name: searchDeleteDocument)
|
|
||||||
func (s Scope) DeleteDocument(ctx domain.RequestContext, page page.Page) (err error) {
|
|
||||||
var bm = mysql.BaseQuery{}
|
|
||||||
|
|
||||||
_, err = bm.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE from search WHERE documentid='%s'", page.DocumentID))
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
err = errors.Wrap(err, "delete document search entries")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rebuild ... (legacy name: searchRebuild)
|
|
||||||
func (s Scope) Rebuild(ctx domain.RequestContext, p page.Page) (err error) {
|
|
||||||
var bm = mysql.BaseQuery{}
|
|
||||||
|
|
||||||
_, err = bm.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE from search WHERE documentid='%s'", p.DocumentID))
|
|
||||||
if err != nil {
|
|
||||||
err = errors.Wrap(err, err.Error())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var pages []struct{ ID string }
|
|
||||||
|
|
||||||
stmt2, err := ctx.Transaction.Preparex("SELECT refid as id FROM page WHERE documentid=? ")
|
|
||||||
defer streamutil.Close(stmt2)
|
defer streamutil.Close(stmt2)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = errors.Wrap(err, err.Error())
|
err = errors.Wrap(err, "prepare insert document title entry")
|
||||||
return err
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
err = stmt2.Select(&pages, p.DocumentID)
|
_, err = stmt2.Exec(ctx.OrgID, doc.RefID, "", "doc", doc.Title)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = errors.Wrap(err, err.Error())
|
err = errors.Wrap(err, "execute insert document title entry")
|
||||||
return err
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(pages) > 0 {
|
// insert doc tags
|
||||||
for _, pg := range pages {
|
tags := strings.Split(doc.Tags, "#")
|
||||||
err = s.Add(ctx, page.Page{BaseEntity: model.BaseEntity{RefID: pg.ID}})
|
for _, t := range tags {
|
||||||
if err != nil {
|
if len(t) == 0 {
|
||||||
err = errors.Wrap(err, err.Error())
|
continue
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// rebuild doc-level tags & excerpts
|
var stmt3 *sqlx.Stmt
|
||||||
// get the 0'th page data and rewrite it
|
stmt3, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
|
||||||
|
defer streamutil.Close(stmt3)
|
||||||
target := page.Page{}
|
|
||||||
|
|
||||||
stmt1, err := ctx.Transaction.Preparex("SELECT * FROM page WHERE refid=?")
|
|
||||||
defer streamutil.Close(stmt1)
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = errors.Wrap(err, err.Error())
|
err = errors.Wrap(err, "prepare insert document tag entry")
|
||||||
return err
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
err = stmt1.Get(&target, pages[0].ID)
|
_, err = stmt3.Exec(ctx.OrgID, doc.RefID, "", "tag", t)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = errors.Wrap(err, err.Error())
|
err = errors.Wrap(err, "execute insert document tag entry")
|
||||||
return err
|
return
|
||||||
}
|
|
||||||
|
|
||||||
err = s.Update(ctx, target) // to rebuild the document-level tags + excerpt
|
|
||||||
if err != nil {
|
|
||||||
err = errors.Wrap(err, err.Error())
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
for _, file := range a {
|
||||||
|
var stmt4 *sqlx.Stmt
|
||||||
|
stmt4, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
|
||||||
|
defer streamutil.Close(stmt4)
|
||||||
|
if err != nil {
|
||||||
|
err = errors.Wrap(err, "prepare insert document file entry")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = stmt4.Exec(ctx.OrgID, doc.RefID, file.RefID, "file", file.Filename)
|
||||||
|
if err != nil {
|
||||||
|
err = errors.Wrap(err, "execute insert document file entry")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateSequence ... (legacy name: searchUpdateSequence)
|
// DeleteDocument removes all search entries for document.
|
||||||
func (s Scope) UpdateSequence(ctx domain.RequestContext, page page.Page) (err error) {
|
func (s Scope) DeleteDocument(ctx domain.RequestContext, ID string) (err error) {
|
||||||
supdate, err := ctx.Transaction.Preparex("UPDATE search SET sequence=?,revised=? WHERE id=?")
|
// remove all search entries
|
||||||
defer streamutil.Close(supdate)
|
var stmt1 *sqlx.Stmt
|
||||||
|
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND documentid=?")
|
||||||
|
defer streamutil.Close(stmt1)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = errors.Wrap(err, "prepare search update sequence")
|
err = errors.Wrap(err, "prepare delete document entries")
|
||||||
return err
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = supdate.Exec(page.Sequence, time.Now().UTC(), page.RefID)
|
_, err = stmt1.Exec(ctx.OrgID, ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = errors.Wrap(err, "execute search update sequence")
|
err = errors.Wrap(err, "execute delete document entries")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateLevel ... legacy name: searchUpdateLevel)
|
// IndexContent adds search index entry for document context.
|
||||||
func (s Scope) UpdateLevel(ctx domain.RequestContext, page page.Page) (err error) {
|
// Any existing document entries are removed.
|
||||||
pageID := page.RefID
|
func (s Scope) IndexContent(ctx domain.RequestContext, p page.Page) (err error) {
|
||||||
level := page.Level
|
// remove previous search entries
|
||||||
|
var stmt1 *sqlx.Stmt
|
||||||
supdate, err := ctx.Transaction.Preparex("UPDATE search SET level=?,revised=? WHERE id=?")
|
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND documentid=? AND itemid=? AND itemtype='page' )")
|
||||||
defer streamutil.Close(supdate)
|
defer streamutil.Close(stmt1)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = errors.Wrap(err, "prepare search update level")
|
err = errors.Wrap(err, "prepare delete document content entry")
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = supdate.Exec(level, time.Now().UTC(), pageID)
|
|
||||||
if err != nil {
|
|
||||||
err = errors.Wrap(err, "execute search update level")
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
_, err = stmt1.Exec(ctx.OrgID, p.DocumentID)
|
||||||
|
if err != nil {
|
||||||
|
err = errors.Wrap(err, "execute delete document content entry")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert doc title
|
||||||
|
var stmt2 *sqlx.Stmt
|
||||||
|
stmt2, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
|
||||||
|
defer streamutil.Close(stmt2)
|
||||||
|
if err != nil {
|
||||||
|
err = errors.Wrap(err, "prepare insert document content entry")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// prepare content
|
||||||
|
content, err := stringutil.HTML(p.Body).Text(false)
|
||||||
|
if err != nil {
|
||||||
|
err = errors.Wrap(err, "search strip HTMl failed")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
content = strings.TrimSpace(content)
|
||||||
|
|
||||||
|
_, err = stmt2.Exec(ctx.OrgID, p.DocumentID, p.RefID, "page", content)
|
||||||
|
if err != nil {
|
||||||
|
err = errors.Wrap(err, "execute insert document content entry")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete ... (legacy name: searchDelete).
|
// DeleteContent removes all search entries for specific document content.
|
||||||
func (s Scope) Delete(ctx domain.RequestContext, page page.Page) (err error) {
|
func (s Scope) DeleteContent(ctx domain.RequestContext, pageID string) (err error) {
|
||||||
var bm = mysql.BaseQuery{}
|
// remove all search entries
|
||||||
_, err = bm.DeleteConstrainedWithID(ctx.Transaction, "search", ctx.OrgID, page.RefID)
|
var stmt1 *sqlx.Stmt
|
||||||
|
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND itemid=? AND itemtype='page'")
|
||||||
|
defer streamutil.Close(stmt1)
|
||||||
|
if err != nil {
|
||||||
|
err = errors.Wrap(err, "prepare delete document content entry")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = stmt1.Exec(ctx.OrgID, pageID)
|
||||||
|
if err != nil {
|
||||||
|
err = errors.Wrap(err, "execute delete document content entry")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,131 +12,21 @@
|
||||||
package search
|
package search
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/documize/community/core/env"
|
"github.com/documize/community/core/env"
|
||||||
"github.com/documize/community/domain"
|
"github.com/documize/community/domain"
|
||||||
"github.com/documize/community/model/page"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Indexer type provides the datastructure for the queues of activity to be serialized through a single background goroutine.
|
// Indexer documents!
|
||||||
// NOTE if the queue becomes full, the system will trigger the rebuilding entire files in order to clear the backlog.
|
|
||||||
type Indexer struct {
|
type Indexer struct {
|
||||||
queue chan queueEntry
|
runtime *env.Runtime
|
||||||
rebuild map[string]bool
|
store *domain.Store
|
||||||
rebuildLock sync.RWMutex
|
|
||||||
givenWarning bool
|
|
||||||
runtime *env.Runtime
|
|
||||||
store *domain.Store
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type queueEntry struct {
|
|
||||||
action func(domain.RequestContext, page.Page) error
|
|
||||||
isRebuild bool
|
|
||||||
page.Page
|
|
||||||
ctx domain.RequestContext
|
|
||||||
}
|
|
||||||
|
|
||||||
const searchQueueLength = 2048 // NOTE the largest 15Mb docx in the test set generates 2142 queue entries, but the queue is constantly emptied
|
|
||||||
|
|
||||||
// NewIndexer provides background search indexer
|
// NewIndexer provides background search indexer
|
||||||
func NewIndexer(rt *env.Runtime, s *domain.Store) (i Indexer) {
|
func NewIndexer(rt *env.Runtime, s *domain.Store) (i Indexer) {
|
||||||
i = Indexer{}
|
i = Indexer{}
|
||||||
i.queue = make(chan queueEntry, searchQueueLength) // provide some decoupling
|
|
||||||
i.rebuild = make(map[string]bool)
|
|
||||||
i.runtime = rt
|
i.runtime = rt
|
||||||
i.store = s
|
i.store = s
|
||||||
|
|
||||||
go i.processQueue()
|
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// processQueue is run as a goroutine, it processes the queue of search index update requests.
|
|
||||||
func (m *Indexer) processQueue() {
|
|
||||||
for {
|
|
||||||
//fmt.Println("DEBUG queue length=", len(Searches.queue))
|
|
||||||
if len(m.queue) <= searchQueueLength/20 { // on a busy server, the queue may never get to zero - so use 5%
|
|
||||||
m.rebuildLock.Lock()
|
|
||||||
for docid := range m.rebuild {
|
|
||||||
m.queue <- queueEntry{
|
|
||||||
action: m.store.Search.Rebuild,
|
|
||||||
isRebuild: true,
|
|
||||||
Page: page.Page{DocumentID: docid},
|
|
||||||
}
|
|
||||||
delete(m.rebuild, docid)
|
|
||||||
}
|
|
||||||
m.rebuildLock.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
qe := <-m.queue
|
|
||||||
doit := true
|
|
||||||
|
|
||||||
if len(qe.DocumentID) > 0 {
|
|
||||||
m.rebuildLock.RLock()
|
|
||||||
if m.rebuild[qe.DocumentID] {
|
|
||||||
doit = false // don't execute an action on a document queued to be rebuilt
|
|
||||||
}
|
|
||||||
m.rebuildLock.RUnlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
if doit {
|
|
||||||
tx, err := m.runtime.Db.Beginx()
|
|
||||||
if err != nil {
|
|
||||||
} else {
|
|
||||||
ctx := qe.ctx
|
|
||||||
ctx.Transaction = tx
|
|
||||||
err = qe.action(ctx, qe.Page)
|
|
||||||
if err != nil {
|
|
||||||
tx.Rollback()
|
|
||||||
// This action has failed, so re-build indexes for the entire document,
|
|
||||||
// provided it was not a re-build command that failed and we know the documentId.
|
|
||||||
if !qe.isRebuild && len(qe.DocumentID) > 0 {
|
|
||||||
m.rebuildLock.Lock()
|
|
||||||
m.rebuild[qe.DocumentID] = true
|
|
||||||
m.rebuildLock.Unlock()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
tx.Commit()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Indexer) addQueue(qe queueEntry) error {
|
|
||||||
lsq := len(m.queue)
|
|
||||||
|
|
||||||
if lsq >= (searchQueueLength - 1) {
|
|
||||||
if qe.DocumentID != "" {
|
|
||||||
m.rebuildLock.Lock()
|
|
||||||
if !m.rebuild[qe.DocumentID] {
|
|
||||||
m.runtime.Log.Info(fmt.Sprintf("WARNING: Search Queue Has No Space! Marked rebuild index for document id %s", qe.DocumentID))
|
|
||||||
}
|
|
||||||
m.rebuild[qe.DocumentID] = true
|
|
||||||
m.rebuildLock.Unlock()
|
|
||||||
} else {
|
|
||||||
m.runtime.Log.Error("addQueue", errors.New("WARNING: Search Queue Has No Space! But unable to index unknown document id"))
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if lsq > ((8 * searchQueueLength) / 10) {
|
|
||||||
if !m.givenWarning {
|
|
||||||
m.runtime.Log.Info(fmt.Sprintf("WARNING: Searches.queue length %d exceeds 80%% of capacity", lsq))
|
|
||||||
m.givenWarning = true
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if m.givenWarning {
|
|
||||||
m.runtime.Log.Info(fmt.Sprintf("INFO: Searches.queue length %d now below 80%% of capacity", lsq))
|
|
||||||
m.givenWarning = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
m.queue <- qe
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
|
@ -13,102 +13,93 @@ package search
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/documize/community/domain"
|
"github.com/documize/community/domain"
|
||||||
"github.com/documize/community/model"
|
"github.com/documize/community/model/attachment"
|
||||||
"github.com/documize/community/model/doc"
|
"github.com/documize/community/model/doc"
|
||||||
"github.com/documize/community/model/page"
|
"github.com/documize/community/model/page"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Add should be called when a new page is added to a document.
|
// IndexDocument adds search indesd entries for document inserting title, tags and attachments as
|
||||||
func (m *Indexer) Add(ctx domain.RequestContext, page page.Page, id string) (err error) {
|
// searchable items. Any existing document entries are removed.
|
||||||
page.RefID = id
|
func (m *Indexer) IndexDocument(ctx domain.RequestContext, d doc.Document, a []attachment.Attachment) {
|
||||||
|
method := "search.IndexDocument"
|
||||||
|
var err error
|
||||||
|
|
||||||
err = m.addQueue(queueEntry{
|
ctx.Transaction, err = m.runtime.Db.Beginx()
|
||||||
action: m.store.Search.Add,
|
if err != nil {
|
||||||
Page: page,
|
m.runtime.Log.Error(method, err)
|
||||||
ctx: ctx,
|
return
|
||||||
})
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update should be called after a page record has been updated.
|
|
||||||
func (m *Indexer) Update(ctx domain.RequestContext, page page.Page) (err error) {
|
|
||||||
err = m.addQueue(queueEntry{
|
|
||||||
action: m.store.Search.Update,
|
|
||||||
Page: page,
|
|
||||||
ctx: ctx,
|
|
||||||
})
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateDocument should be called after a document record has been updated.
|
|
||||||
func (m *Indexer) UpdateDocument(ctx domain.RequestContext, document doc.Document) (err error) {
|
|
||||||
err = m.addQueue(queueEntry{
|
|
||||||
action: m.store.Search.UpdateDocument,
|
|
||||||
Page: page.Page{
|
|
||||||
DocumentID: document.RefID,
|
|
||||||
Title: document.Title,
|
|
||||||
Body: document.Slug, // NOTE body==slug in this context
|
|
||||||
},
|
|
||||||
ctx: ctx,
|
|
||||||
})
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteDocument should be called after a document record has been deleted.
|
|
||||||
func (m *Indexer) DeleteDocument(ctx domain.RequestContext, documentID string) (err error) {
|
|
||||||
if len(documentID) > 0 {
|
|
||||||
m.queue <- queueEntry{
|
|
||||||
action: m.store.Search.DeleteDocument,
|
|
||||||
Page: page.Page{DocumentID: documentID},
|
|
||||||
ctx: ctx,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return
|
|
||||||
|
err = m.store.Search.IndexDocument(ctx, d, a)
|
||||||
|
if err != nil {
|
||||||
|
ctx.Transaction.Rollback()
|
||||||
|
m.runtime.Log.Error(method, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.Transaction.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateSequence should be called after a page record has been resequenced.
|
// DeleteDocument removes all search entries for document.
|
||||||
func (m *Indexer) UpdateSequence(ctx domain.RequestContext, documentID, pageID string, sequence float64) (err error) {
|
func (m *Indexer) DeleteDocument(ctx domain.RequestContext, ID string) {
|
||||||
err = m.addQueue(queueEntry{
|
method := "search.DeleteDocument"
|
||||||
action: m.store.Search.UpdateSequence,
|
var err error
|
||||||
Page: page.Page{
|
|
||||||
BaseEntity: model.BaseEntity{RefID: pageID},
|
|
||||||
Sequence: sequence,
|
|
||||||
DocumentID: documentID,
|
|
||||||
},
|
|
||||||
ctx: ctx,
|
|
||||||
})
|
|
||||||
|
|
||||||
return
|
ctx.Transaction, err = m.runtime.Db.Beginx()
|
||||||
|
if err != nil {
|
||||||
|
m.runtime.Log.Error(method, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = m.store.Search.DeleteDocument(ctx, ID)
|
||||||
|
if err != nil {
|
||||||
|
ctx.Transaction.Rollback()
|
||||||
|
m.runtime.Log.Error(method, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.Transaction.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateLevel should be called after the level of a page has been changed.
|
// IndexContent adds search index entry for document context.
|
||||||
func (m *Indexer) UpdateLevel(ctx domain.RequestContext, documentID, pageID string, level int) (err error) {
|
// Any existing document entries are removed.
|
||||||
err = m.addQueue(queueEntry{
|
func (m *Indexer) IndexContent(ctx domain.RequestContext, p page.Page) {
|
||||||
action: m.store.Search.UpdateLevel,
|
method := "search.IndexContent"
|
||||||
Page: page.Page{
|
var err error
|
||||||
BaseEntity: model.BaseEntity{RefID: pageID},
|
|
||||||
Level: uint64(level),
|
|
||||||
DocumentID: documentID,
|
|
||||||
},
|
|
||||||
ctx: ctx,
|
|
||||||
})
|
|
||||||
|
|
||||||
return
|
ctx.Transaction, err = m.runtime.Db.Beginx()
|
||||||
|
if err != nil {
|
||||||
|
m.runtime.Log.Error(method, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = m.store.Search.IndexContent(ctx, p)
|
||||||
|
if err != nil {
|
||||||
|
ctx.Transaction.Rollback()
|
||||||
|
m.runtime.Log.Error(method, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.Transaction.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete should be called after a page has been deleted.
|
// DeleteContent removes all search entries for specific document content.
|
||||||
func (m *Indexer) Delete(ctx domain.RequestContext, documentID, pageID string) (rows int64, err error) {
|
func (m *Indexer) DeleteContent(ctx domain.RequestContext, pageID string) {
|
||||||
err = m.addQueue(queueEntry{
|
method := "search.DeleteContent"
|
||||||
action: m.store.Search.Delete,
|
var err error
|
||||||
Page: page.Page{
|
|
||||||
BaseEntity: model.BaseEntity{RefID: pageID},
|
|
||||||
DocumentID: documentID,
|
|
||||||
},
|
|
||||||
ctx: ctx,
|
|
||||||
})
|
|
||||||
|
|
||||||
return
|
ctx.Transaction, err = m.runtime.Db.Beginx()
|
||||||
|
if err != nil {
|
||||||
|
m.runtime.Log.Error(method, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
err = m.store.Search.DeleteContent(ctx, pageID)
|
||||||
|
if err != nil {
|
||||||
|
ctx.Transaction.Rollback()
|
||||||
|
m.runtime.Log.Error(method, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.Transaction.Commit()
|
||||||
}
|
}
|
|
@ -180,26 +180,19 @@ type ActivityStorer interface {
|
||||||
|
|
||||||
// SearchStorer defines required methods for persisting search queries
|
// SearchStorer defines required methods for persisting search queries
|
||||||
type SearchStorer interface {
|
type SearchStorer interface {
|
||||||
Add(ctx RequestContext, page page.Page) (err error)
|
IndexDocument(ctx RequestContext, doc doc.Document, a []attachment.Attachment) (err error)
|
||||||
Update(ctx RequestContext, page page.Page) (err error)
|
DeleteDocument(ctx RequestContext, ID string) (err error)
|
||||||
UpdateDocument(ctx RequestContext, page page.Page) (err error)
|
IndexContent(ctx RequestContext, p page.Page) (err error)
|
||||||
DeleteDocument(ctx RequestContext, page page.Page) (err error)
|
DeleteContent(ctx RequestContext, pageID string) (err error)
|
||||||
Rebuild(ctx RequestContext, p page.Page) (err error)
|
Documents(ctx RequestContext, q search.QueryOptions) (results []search.QueryResult, err error)
|
||||||
UpdateSequence(ctx RequestContext, page page.Page) (err error)
|
|
||||||
UpdateLevel(ctx RequestContext, page page.Page) (err error)
|
|
||||||
Delete(ctx RequestContext, page page.Page) (err error)
|
|
||||||
Documents(ctx RequestContext, options search.QueryOptions) (results []search.QueryResult, err error)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Indexer defines required methods for managing search indexing process
|
// Indexer defines required methods for managing search indexing process
|
||||||
type Indexer interface {
|
type Indexer interface {
|
||||||
Add(ctx RequestContext, page page.Page, id string) (err error)
|
IndexDocument(ctx RequestContext, d doc.Document, a []attachment.Attachment)
|
||||||
Update(ctx RequestContext, page page.Page) (err error)
|
DeleteDocument(ctx RequestContext, ID string)
|
||||||
UpdateDocument(ctx RequestContext, page page.Page) (err error)
|
IndexContent(ctx RequestContext, p page.Page)
|
||||||
DeleteDocument(ctx RequestContext, documentID string) (err error)
|
DeleteContent(ctx RequestContext, pageID string)
|
||||||
UpdateSequence(ctx RequestContext, documentID, pageID string, sequence float64) (err error)
|
|
||||||
UpdateLevel(ctx RequestContext, documentID, pageID string, level int) (err error)
|
|
||||||
Delete(ctx RequestContext, documentID, pageID string) (err error)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// BlockStorer defines required methods for persisting reusable content blocks
|
// BlockStorer defines required methods for persisting reusable content blocks
|
||||||
|
|
|
@ -28,6 +28,7 @@ import (
|
||||||
"github.com/documize/community/core/uniqueid"
|
"github.com/documize/community/core/uniqueid"
|
||||||
"github.com/documize/community/domain"
|
"github.com/documize/community/domain"
|
||||||
"github.com/documize/community/domain/document"
|
"github.com/documize/community/domain/document"
|
||||||
|
indexer "github.com/documize/community/domain/search"
|
||||||
"github.com/documize/community/model/attachment"
|
"github.com/documize/community/model/attachment"
|
||||||
"github.com/documize/community/model/audit"
|
"github.com/documize/community/model/audit"
|
||||||
"github.com/documize/community/model/doc"
|
"github.com/documize/community/model/doc"
|
||||||
|
@ -40,6 +41,7 @@ import (
|
||||||
type Handler struct {
|
type Handler struct {
|
||||||
Runtime *env.Runtime
|
Runtime *env.Runtime
|
||||||
Store *domain.Store
|
Store *domain.Store
|
||||||
|
Indexer indexer.Indexer
|
||||||
}
|
}
|
||||||
|
|
||||||
// SavedList returns all templates saved by the user
|
// SavedList returns all templates saved by the user
|
||||||
|
@ -363,5 +365,8 @@ func (h *Handler) Use(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
event.Handler().Publish(string(event.TypeAddDocument), nd.Title)
|
event.Handler().Publish(string(event.TypeAddDocument), nd.Title)
|
||||||
|
|
||||||
|
a, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
|
||||||
|
go h.Indexer.IndexDocument(ctx, nd, a)
|
||||||
|
|
||||||
response.WriteJSON(w, nd)
|
response.WriteJSON(w, nd)
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,15 +17,22 @@ export default Ember.Component.extend({
|
||||||
|
|
||||||
didReceiveAttrs() {
|
didReceiveAttrs() {
|
||||||
let docs = this.get('results');
|
let docs = this.get('results');
|
||||||
|
let duped = [];
|
||||||
let phrase = 'Nothing found';
|
let phrase = 'Nothing found';
|
||||||
|
|
||||||
if (docs.length > 0) {
|
if (docs.length > 0) {
|
||||||
|
duped = _.uniq(docs, function (item) {
|
||||||
|
return item.documentId;
|
||||||
|
});
|
||||||
|
|
||||||
let references = docs.length === 1 ? "reference" : "references";
|
let references = docs.length === 1 ? "reference" : "references";
|
||||||
|
let docLabel = duped.length === 1 ? "document" : "documents";
|
||||||
let i = docs.length;
|
let i = docs.length;
|
||||||
phrase = `${i} ${references}`;
|
let j = duped.length;
|
||||||
|
phrase = `${i} ${references} across ${j} ${docLabel}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.set('resultPhrase', phrase);
|
this.set('resultPhrase', phrase);
|
||||||
this.set('documents', docs);
|
this.set('documents', duped);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,8 +17,8 @@ export default Ember.Controller.extend({
|
||||||
results: [],
|
results: [],
|
||||||
matchDoc: true,
|
matchDoc: true,
|
||||||
matchContent: true,
|
matchContent: true,
|
||||||
matchFile: true,
|
matchFile: false,
|
||||||
matchTag: true,
|
matchTag: false,
|
||||||
|
|
||||||
onKeywordChange: function () {
|
onKeywordChange: function () {
|
||||||
Ember.run.debounce(this, this.fetch, 750);
|
Ember.run.debounce(this, this.fetch, 750);
|
||||||
|
|
|
@ -12,9 +12,9 @@
|
||||||
<div class="input-control">
|
<div class="input-control">
|
||||||
{{focus-input type="text" value=filter placeholder='type search phrase'}}
|
{{focus-input type="text" value=filter placeholder='type search phrase'}}
|
||||||
{{#ui/ui-checkbox selected=matchDoc}}document name{{/ui/ui-checkbox}}
|
{{#ui/ui-checkbox selected=matchDoc}}document name{{/ui/ui-checkbox}}
|
||||||
{{#ui/ui-checkbox selected=matchContent}}document content{{/ui/ui-checkbox}}
|
{{#ui/ui-checkbox selected=matchContent}}content{{/ui/ui-checkbox}}
|
||||||
|
{{#ui/ui-checkbox selected=matchTag}}tag{{/ui/ui-checkbox}}
|
||||||
{{#ui/ui-checkbox selected=matchFile}}attachment name{{/ui/ui-checkbox}}
|
{{#ui/ui-checkbox selected=matchFile}}attachment name{{/ui/ui-checkbox}}
|
||||||
{{#ui/ui-checkbox selected=matchTag}}tag name{{/ui/ui-checkbox}}
|
|
||||||
</div>
|
</div>
|
||||||
<div class="examples">
|
<div class="examples">
|
||||||
<p>a OR b</p>
|
<p>a OR b</p>
|
||||||
|
|
|
@ -11,25 +11,6 @@
|
||||||
|
|
||||||
package search
|
package search
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Search holds raw search results.
|
|
||||||
type Search struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Created time.Time `json:"created"`
|
|
||||||
Revised time.Time `json:"revised"`
|
|
||||||
OrgID string
|
|
||||||
DocumentID string
|
|
||||||
Level uint64
|
|
||||||
Sequence float64
|
|
||||||
DocumentTitle string
|
|
||||||
Slug string
|
|
||||||
PageTitle string
|
|
||||||
Body string
|
|
||||||
}
|
|
||||||
|
|
||||||
// QueryOptions defines how we search.
|
// QueryOptions defines how we search.
|
||||||
type QueryOptions struct {
|
type QueryOptions struct {
|
||||||
Keywords string `json:"keywords"`
|
Keywords string `json:"keywords"`
|
||||||
|
|
|
@ -54,10 +54,10 @@ func RegisterEndpoints(rt *env.Runtime, s *domain.Store) {
|
||||||
section := section.Handler{Runtime: rt, Store: s}
|
section := section.Handler{Runtime: rt, Store: s}
|
||||||
setting := setting.Handler{Runtime: rt, Store: s}
|
setting := setting.Handler{Runtime: rt, Store: s}
|
||||||
keycloak := keycloak.Handler{Runtime: rt, Store: s}
|
keycloak := keycloak.Handler{Runtime: rt, Store: s}
|
||||||
template := template.Handler{Runtime: rt, Store: s}
|
template := template.Handler{Runtime: rt, Store: s, Indexer: indexer}
|
||||||
document := document.Handler{Runtime: rt, Store: s, Indexer: indexer}
|
document := document.Handler{Runtime: rt, Store: s, Indexer: indexer}
|
||||||
attachment := attachment.Handler{Runtime: rt, Store: s}
|
attachment := attachment.Handler{Runtime: rt, Store: s, Indexer: indexer}
|
||||||
conversion := conversion.Handler{Runtime: rt, Store: s}
|
conversion := conversion.Handler{Runtime: rt, Store: s, Indexer: indexer}
|
||||||
organization := organization.Handler{Runtime: rt, Store: s}
|
organization := organization.Handler{Runtime: rt, Store: s}
|
||||||
|
|
||||||
//**************************************************
|
//**************************************************
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue