1
0
Fork 0
mirror of https://github.com/documize/community.git synced 2025-07-27 00:59:43 +02:00

Merge pull request #115 from documize/search-revamp

Search revamp
This commit is contained in:
Harvey Kandola 2017-08-16 12:35:28 +01:00 committed by GitHub
commit a306ca2fcd
27 changed files with 1231 additions and 1190 deletions

View file

@ -8,7 +8,7 @@ The mission is to bring software dev inspired features (refactoring, testing, li
## Latest version ## Latest version
v1.51.0 v1.52.0
## OS Support ## OS Support

View file

@ -4,7 +4,6 @@
1. Remove audit table 1. Remove audit table
2. Remove document.layout field ? 2. Remove document.layout field ?
## MYSQL ENCODING ## MYSQL ENCODING
https://stackoverflow.com/questions/37307146/difference-between-utf8mb4-unicode-ci-and-utf8mb4-unicode-520-ci-collations-in-m https://stackoverflow.com/questions/37307146/difference-between-utf8mb4-unicode-ci-and-utf8mb4-unicode-520-ci-collations-in-m
@ -13,7 +12,7 @@ https://mathiasbynens.be/notes/mysql-utf8mb4
https://medium.com/@adamhooper/in-mysql-never-use-utf8-use-utf8mb4-11761243e434 https://medium.com/@adamhooper/in-mysql-never-use-utf8-use-utf8mb4-11761243e434
## MIGRATE ENCODING
ALTER DATABASE documize CHARACTER SET utf8mb4 COLLATE utf8mb4_bin; ALTER DATABASE documize CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE account CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin; ALTER TABLE account CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
@ -38,7 +37,3 @@ ALTER TABLE useraction CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE useractivity CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin; ALTER TABLE useractivity CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userconfig CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin; ALTER TABLE userconfig CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userevent CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin; ALTER TABLE userevent CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
documenttitle, pagetitle, body
CHARACTER SET utf8mb4 COLLATE utf8mb4

View file

@ -0,0 +1,65 @@
/* community edition */
ALTER DATABASE documize CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE account CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE attachment CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE block CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE config CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE document CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE feedback CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE label CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE labelrole CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE link CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE organization CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE page CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE pagemeta CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE participant CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE pin CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE revision CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE search CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE share CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE user CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE useraction CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE useractivity CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userconfig CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userevent CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
DROP TABLE IF EXISTS `search_old`;
RENAME TABLE search TO search_old;
DROP TABLE IF EXISTS `search`;
CREATE TABLE IF NOT EXISTS `search` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`documentid` CHAR(16) NOT NULL COLLATE utf8_bin,
`itemid` CHAR(16) NOT NULL DEFAULT '' COLLATE utf8_bin,
`itemtype` VARCHAR(10) NOT NULL,
`content` LONGTEXT,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE INDEX `idx_search_id` (`id` ASC),
INDEX `idx_search_orgid` (`orgid` ASC),
INDEX `idx_search_documentid` (`documentid` ASC),
FULLTEXT INDEX `idx_search_content` (`content`))
DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci
ENGINE = MyISAM;
-- migrate page content
INSERT INTO search (orgid, documentid, itemid, itemtype, content) SELECT orgid, documentid, id AS itemid, 'page' AS itemtype, TRIM(body) AS content FROM search_old;
-- index document title
INSERT INTO search (orgid, documentid, itemid, itemtype, content) SELECT orgid, refid AS documentid, '' AS itemid, 'doc' AS itemtype, TRIM(title) AS content FROM document;
-- index attachment name
INSERT INTO search (orgid, documentid, itemid, itemtype, content) SELECT orgid, documentid, refid AS itemid, 'file' AS itemtype, TRIM(filename) AS content FROM attachment;
-- insert tag 1
insert into search (orgid, documentid, itemid, itemtype, content) SELECT orgid, refid as documentid, '' as itemid, 'tag' as itemtype, TRIM(REPLACE(SUBSTRING_INDEX(tags, '#', 2), '#', '')) AS content FROM document WHERE tags != '';
-- insert tag 2
insert into search (orgid, documentid, itemid, itemtype, content) SELECT orgid, refid as documentid, '' as itemid, 'tag' as itemtype, IF((LENGTH(tags) - LENGTH(REPLACE(tags, '#', '')) - 1) > 1, SUBSTRING_INDEX(SUBSTRING_INDEX(tags, '#', 3), '#', -1), '') AS content FROM document WHERE LENGTH(tags) - LENGTH(REPLACE(tags, "#", "")) > 2;
-- insert tag 3
insert into search (orgid, documentid, itemid, itemtype, content) SELECT orgid, refid as documentid, '' as itemid, 'tag' as itemtype, IF((LENGTH(tags) - LENGTH(REPLACE(tags, '#', '')) - 1) > 2, SUBSTRING_INDEX(SUBSTRING_INDEX(tags, '#', 4), '#', -1), '') AS content FROM document WHERE LENGTH(tags) - LENGTH(REPLACE(tags, "#", "")) > 3;

View file

@ -26,6 +26,7 @@ import (
"github.com/documize/community/core/uniqueid" "github.com/documize/community/core/uniqueid"
"github.com/documize/community/domain" "github.com/documize/community/domain"
"github.com/documize/community/domain/document" "github.com/documize/community/domain/document"
indexer "github.com/documize/community/domain/search"
"github.com/documize/community/model/attachment" "github.com/documize/community/model/attachment"
"github.com/documize/community/model/audit" "github.com/documize/community/model/audit"
uuid "github.com/nu7hatch/gouuid" uuid "github.com/nu7hatch/gouuid"
@ -35,6 +36,7 @@ import (
type Handler struct { type Handler struct {
Runtime *env.Runtime Runtime *env.Runtime
Store *domain.Store Store *domain.Store
Indexer indexer.Indexer
} }
// Download is the end-point that responds to a request for a particular attachment // Download is the end-point that responds to a request for a particular attachment
@ -155,6 +157,10 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit() ctx.Transaction.Commit()
a, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
d, _ := h.Store.Document.Get(ctx, documentID)
go h.Indexer.IndexDocument(ctx, d, a)
response.WriteEmpty(w) response.WriteEmpty(w)
} }
@ -226,5 +232,9 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit() ctx.Transaction.Commit()
all, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
d, _ := h.Store.Document.Get(ctx, documentID)
go h.Indexer.IndexDocument(ctx, d, all)
response.WriteEmpty(w) response.WriteEmpty(w)
} }

View file

@ -146,6 +146,9 @@ func (h *Handler) convert(w http.ResponseWriter, r *http.Request, job, folderID
return return
} }
a, _ := h.Store.Attachment.GetAttachments(ctx, nd.RefID)
go h.Indexer.IndexDocument(ctx, nd, a)
response.WriteJSON(w, nd) response.WriteJSON(w, nd)
} }

View file

@ -17,12 +17,14 @@ import (
api "github.com/documize/community/core/convapi" api "github.com/documize/community/core/convapi"
"github.com/documize/community/core/env" "github.com/documize/community/core/env"
"github.com/documize/community/domain" "github.com/documize/community/domain"
indexer "github.com/documize/community/domain/search"
) )
// Handler contains the runtime information such as logging and database. // Handler contains the runtime information such as logging and database.
type Handler struct { type Handler struct {
Runtime *env.Runtime Runtime *env.Runtime
Store *domain.Store Store *domain.Store
Indexer indexer.Indexer
} }
// UploadConvert is an endpoint to both upload and convert a document // UploadConvert is an endpoint to both upload and convert a document

View file

@ -16,7 +16,6 @@ import (
"encoding/json" "encoding/json"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
"net/url"
"github.com/documize/community/core/env" "github.com/documize/community/core/env"
"github.com/documize/community/core/request" "github.com/documize/community/core/request"
@ -253,7 +252,8 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit() ctx.Transaction.Commit()
h.Indexer.UpdateDocument(ctx, d) a, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
go h.Indexer.IndexDocument(ctx, d, a)
response.WriteEmpty(w) response.WriteEmpty(w)
} }
@ -317,7 +317,7 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit() ctx.Transaction.Commit()
h.Indexer.DeleteDocument(ctx, documentID) go h.Indexer.DeleteDocument(ctx, documentID)
response.WriteEmpty(w) response.WriteEmpty(w)
} }
@ -327,27 +327,36 @@ func (h *Handler) SearchDocuments(w http.ResponseWriter, r *http.Request) {
method := "document.search" method := "document.search"
ctx := domain.GetRequestContext(r) ctx := domain.GetRequestContext(r)
keywords := request.Query(r, "keywords") defer streamutil.Close(r.Body)
decoded, err := url.QueryUnescape(keywords) body, err := ioutil.ReadAll(r.Body)
if err != nil { if err != nil {
response.WriteBadRequestError(w, method, err.Error()) response.WriteBadRequestError(w, method, err.Error())
h.Runtime.Log.Error(method, err)
return return
} }
results, err := h.Store.Search.Documents(ctx, decoded) options := search.QueryOptions{}
err = json.Unmarshal(body, &options)
if err != nil {
response.WriteBadRequestError(w, method, err.Error())
h.Runtime.Log.Error(method, err)
return
}
results, err := h.Store.Search.Documents(ctx, options)
if err != nil { if err != nil {
h.Runtime.Log.Error(method, err) h.Runtime.Log.Error(method, err)
} }
// Put in slugs for easy UI display of search URL // Put in slugs for easy UI display of search URL
for key, result := range results { for key, result := range results {
result.DocumentSlug = stringutil.MakeSlug(result.DocumentTitle) result.DocumentSlug = stringutil.MakeSlug(result.Document)
result.FolderSlug = stringutil.MakeSlug(result.LabelName) result.SpaceSlug = stringutil.MakeSlug(result.Space)
results[key] = result results[key] = result
} }
if len(results) == 0 { if len(results) == 0 {
results = []search.DocumentSearch{} results = []search.QueryResult{}
} }
h.Store.Audit.Record(ctx, audit.EventTypeSearch) h.Store.Audit.Record(ctx, audit.EventTypeSearch)

View file

@ -143,8 +143,7 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit() ctx.Transaction.Commit()
np, _ := h.Store.Page.Get(ctx, pageID) np, _ := h.Store.Page.Get(ctx, pageID)
go h.Indexer.IndexContent(ctx, np)
h.Indexer.Add(ctx, np, pageID)
response.WriteJSON(w, np) response.WriteJSON(w, np)
} }
@ -338,7 +337,7 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
h.Store.Audit.Record(ctx, audit.EventTypeSectionDelete) h.Store.Audit.Record(ctx, audit.EventTypeSectionDelete)
h.Indexer.Delete(ctx, documentID, pageID) go h.Indexer.DeleteContent(ctx, pageID)
h.Store.Link.DeleteSourcePageLinks(ctx, pageID) h.Store.Link.DeleteSourcePageLinks(ctx, pageID)
@ -421,7 +420,7 @@ func (h *Handler) DeletePages(w http.ResponseWriter, r *http.Request) {
return return
} }
h.Indexer.Delete(ctx, documentID, page.PageID) go h.Indexer.DeleteContent(ctx, page.PageID)
h.Store.Link.DeleteSourcePageLinks(ctx, page.PageID) h.Store.Link.DeleteSourcePageLinks(ctx, page.PageID)
@ -590,7 +589,7 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit() ctx.Transaction.Commit()
h.Indexer.Update(ctx, model.Page) go h.Indexer.IndexContent(ctx, model.Page)
updatedPage, err := h.Store.Page.Get(ctx, pageID) updatedPage, err := h.Store.Page.Get(ctx, pageID)
@ -649,8 +648,6 @@ func (h *Handler) ChangePageSequence(w http.ResponseWriter, r *http.Request) {
h.Runtime.Log.Error(method, err) h.Runtime.Log.Error(method, err)
return return
} }
h.Indexer.UpdateSequence(ctx, documentID, p.PageID, p.Sequence)
} }
h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence) h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence)
@ -712,8 +709,6 @@ func (h *Handler) ChangePageLevel(w http.ResponseWriter, r *http.Request) {
h.Runtime.Log.Error(method, err) h.Runtime.Log.Error(method, err)
return return
} }
h.Indexer.UpdateLevel(ctx, documentID, p.PageID, p.Level)
} }
h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence) h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence)

32
domain/search/handler.go Normal file
View file

@ -0,0 +1,32 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package search
import (
"github.com/documize/community/core/env"
"github.com/documize/community/domain"
)
// Indexer documents!
type Indexer struct {
runtime *env.Runtime
store *domain.Store
}
// NewIndexer provides background search indexer
func NewIndexer(rt *env.Runtime, s *domain.Store) (i Indexer) {
i = Indexer{}
i.runtime = rt
i.store = s
return
}

View file

@ -12,17 +12,16 @@
package mysql package mysql
import ( import (
"database/sql"
"fmt" "fmt"
"regexp"
"strings" "strings"
"time"
"github.com/documize/community/core/env" "github.com/documize/community/core/env"
"github.com/documize/community/core/streamutil" "github.com/documize/community/core/streamutil"
"github.com/documize/community/core/stringutil" "github.com/documize/community/core/stringutil"
"github.com/documize/community/domain" "github.com/documize/community/domain"
"github.com/documize/community/domain/store/mysql" "github.com/documize/community/model/attachment"
"github.com/documize/community/model" "github.com/documize/community/model/doc"
"github.com/documize/community/model/page" "github.com/documize/community/model/page"
"github.com/documize/community/model/search" "github.com/documize/community/model/search"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
@ -34,278 +33,316 @@ type Scope struct {
Runtime *env.Runtime Runtime *env.Runtime
} }
// Add search entry (legacy name: searchAdd). // IndexDocument adds search index entries for document inserting title, tags and attachments as
func (s Scope) Add(ctx domain.RequestContext, page page.Page) (err error) { // searchable items. Any existing document entries are removed.
id := page.RefID func (s Scope) IndexDocument(ctx domain.RequestContext, doc doc.Document, a []attachment.Attachment) (err error) {
// remove previous search entries
// translate the html into text for the search var stmt1 *sqlx.Stmt
nonHTML, err := stringutil.HTML(page.Body).Text(false) stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND documentid=? AND (itemtype='doc' OR itemtype='file' OR itemtype='tag')")
if err != nil {
errors.Wrap(err, "search decode body")
return
}
// insert into the search table, getting the document title along the way
var stmt *sqlx.Stmt
stmt, err = ctx.Transaction.Preparex(
"INSERT INTO search (id, orgid, documentid, level, sequence, documenttitle, slug, pagetitle, body, created, revised) " +
" SELECT page.refid,page.orgid,document.refid,page.level,page.sequence,document.title,document.slug,page.title,?,page.created,page.revised " +
" FROM document,page WHERE page.refid=? AND document.refid=page.documentid")
defer streamutil.Close(stmt)
if err != nil {
err = errors.Wrap(err, "prepare search insert")
return
}
_, err = stmt.Exec(nonHTML, id)
if err != nil {
err = errors.Wrap(err, "execute search insert")
return
}
return nil
}
// Update search entry (legacy name: searchUpdate).
func (s Scope) Update(ctx domain.RequestContext, page page.Page) (err error) {
// translate the html into text for the search
nonHTML, err := stringutil.HTML(page.Body).Text(false)
if err != nil {
err = errors.Wrap(err, "search decode body")
return
}
su, err := ctx.Transaction.Preparex("UPDATE search SET pagetitle=?,body=?,sequence=?,level=?,revised=? WHERE id=?")
defer streamutil.Close(su)
if err != nil {
err = errors.Wrap(err, "prepare search update")
return err
}
_, err = su.Exec(page.Title, nonHTML, page.Sequence, page.Level, page.Revised, page.RefID)
if err != nil {
err = errors.Wrap(err, "execute search update")
return
}
return nil
}
// UpdateDocument search entries for document (legacy name: searchUpdateDocument).
func (s Scope) UpdateDocument(ctx domain.RequestContext, page page.Page) (err error) {
stmt, err := ctx.Transaction.Preparex("UPDATE search SET documenttitle=?, slug=?, revised=? WHERE documentid=?")
defer streamutil.Close(stmt)
if err != nil {
err = errors.Wrap(err, "prepare search document update")
return err
}
_, err = stmt.Exec(page.Title, page.Body, time.Now().UTC(), page.DocumentID)
if err != nil {
err = errors.Wrap(err, "execute search document update")
return err
}
return nil
}
// DeleteDocument removes document search entries (legacy name: searchDeleteDocument)
func (s Scope) DeleteDocument(ctx domain.RequestContext, page page.Page) (err error) {
var bm = mysql.BaseQuery{}
_, err = bm.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE from search WHERE documentid='%s'", page.DocumentID))
if err != nil {
err = errors.Wrap(err, "delete document search entries")
}
return nil
}
// Rebuild ... (legacy name: searchRebuild)
func (s Scope) Rebuild(ctx domain.RequestContext, p page.Page) (err error) {
var bm = mysql.BaseQuery{}
_, err = bm.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE from search WHERE documentid='%s'", p.DocumentID))
if err != nil {
err = errors.Wrap(err, err.Error())
return err
}
var pages []struct{ ID string }
stmt2, err := ctx.Transaction.Preparex("SELECT refid as id FROM page WHERE documentid=? ")
defer streamutil.Close(stmt2)
if err != nil {
err = errors.Wrap(err, err.Error())
return err
}
err = stmt2.Select(&pages, p.DocumentID)
if err != nil {
err = errors.Wrap(err, err.Error())
return err
}
if len(pages) > 0 {
for _, pg := range pages {
err = s.Add(ctx, page.Page{BaseEntity: model.BaseEntity{RefID: pg.ID}})
if err != nil {
err = errors.Wrap(err, err.Error())
return err
}
}
// rebuild doc-level tags & excerpts
// get the 0'th page data and rewrite it
target := page.Page{}
stmt1, err := ctx.Transaction.Preparex("SELECT * FROM page WHERE refid=?")
defer streamutil.Close(stmt1) defer streamutil.Close(stmt1)
if err != nil { if err != nil {
err = errors.Wrap(err, err.Error()) err = errors.Wrap(err, "prepare delete document index entries")
return err
}
err = stmt1.Get(&target, pages[0].ID)
if err != nil {
err = errors.Wrap(err, err.Error())
return err
}
err = s.Update(ctx, target) // to rebuild the document-level tags + excerpt
if err != nil {
err = errors.Wrap(err, err.Error())
return err
}
}
return return
} }
// UpdateSequence ... (legacy name: searchUpdateSequence) _, err = stmt1.Exec(ctx.OrgID, doc.RefID)
func (s Scope) UpdateSequence(ctx domain.RequestContext, page page.Page) (err error) {
supdate, err := ctx.Transaction.Preparex("UPDATE search SET sequence=?,revised=? WHERE id=?")
defer streamutil.Close(supdate)
if err != nil { if err != nil {
err = errors.Wrap(err, "prepare search update sequence") err = errors.Wrap(err, "execute delete document index entries")
return err return
} }
_, err = supdate.Exec(page.Sequence, time.Now().UTC(), page.RefID) // insert doc title
var stmt2 *sqlx.Stmt
stmt2, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
defer streamutil.Close(stmt2)
if err != nil { if err != nil {
err = errors.Wrap(err, "execute search update sequence") err = errors.Wrap(err, "prepare insert document title entry")
return
}
_, err = stmt2.Exec(ctx.OrgID, doc.RefID, "", "doc", doc.Title)
if err != nil {
err = errors.Wrap(err, "execute insert document title entry")
return
}
// insert doc tags
tags := strings.Split(doc.Tags, "#")
for _, t := range tags {
if len(t) == 0 {
continue
}
var stmt3 *sqlx.Stmt
stmt3, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
defer streamutil.Close(stmt3)
if err != nil {
err = errors.Wrap(err, "prepare insert document tag entry")
return
}
_, err = stmt3.Exec(ctx.OrgID, doc.RefID, "", "tag", t)
if err != nil {
err = errors.Wrap(err, "execute insert document tag entry")
return
}
}
for _, file := range a {
var stmt4 *sqlx.Stmt
stmt4, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
defer streamutil.Close(stmt4)
if err != nil {
err = errors.Wrap(err, "prepare insert document file entry")
return
}
_, err = stmt4.Exec(ctx.OrgID, doc.RefID, file.RefID, "file", file.Filename)
if err != nil {
err = errors.Wrap(err, "execute insert document file entry")
return
}
}
return nil
}
// DeleteDocument removes all search entries for document.
func (s Scope) DeleteDocument(ctx domain.RequestContext, ID string) (err error) {
// remove all search entries
var stmt1 *sqlx.Stmt
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND documentid=?")
defer streamutil.Close(stmt1)
if err != nil {
err = errors.Wrap(err, "prepare delete document entries")
return
}
_, err = stmt1.Exec(ctx.OrgID, ID)
if err != nil {
err = errors.Wrap(err, "execute delete document entries")
return return
} }
return return
} }
// UpdateLevel ... legacy name: searchUpdateLevel) // IndexContent adds search index entry for document context.
func (s Scope) UpdateLevel(ctx domain.RequestContext, page page.Page) (err error) { // Any existing document entries are removed.
pageID := page.RefID func (s Scope) IndexContent(ctx domain.RequestContext, p page.Page) (err error) {
level := page.Level // remove previous search entries
var stmt1 *sqlx.Stmt
supdate, err := ctx.Transaction.Preparex("UPDATE search SET level=?,revised=? WHERE id=?") stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND documentid=? AND itemid=? AND itemtype='page'")
defer streamutil.Close(supdate) defer streamutil.Close(stmt1)
if err != nil { if err != nil {
err = errors.Wrap(err, "prepare search update level") err = errors.Wrap(err, "prepare delete document content entry")
return err
}
_, err = supdate.Exec(level, time.Now().UTC(), pageID)
if err != nil {
err = errors.Wrap(err, "execute search update level")
return return
} }
_, err = stmt1.Exec(ctx.OrgID, p.DocumentID, p.RefID)
if err != nil {
err = errors.Wrap(err, "execute delete document content entry")
return return
} }
// Delete ... (legacy name: searchDelete). // insert doc title
func (s Scope) Delete(ctx domain.RequestContext, page page.Page) (err error) { var stmt2 *sqlx.Stmt
var bm = mysql.BaseQuery{} stmt2, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
_, err = bm.DeleteConstrainedWithID(ctx.Transaction, "search", ctx.OrgID, page.RefID) defer streamutil.Close(stmt2)
if err != nil {
err = errors.Wrap(err, "prepare insert document content entry")
return
}
// prepare content
content, err := stringutil.HTML(p.Body).Text(false)
if err != nil {
err = errors.Wrap(err, "search strip HTML failed")
return
}
content = strings.TrimSpace(content)
_, err = stmt2.Exec(ctx.OrgID, p.DocumentID, p.RefID, "page", content)
if err != nil {
err = errors.Wrap(err, "execute insert document content entry")
return
}
return nil
}
// DeleteContent removes all search entries for specific document content.
func (s Scope) DeleteContent(ctx domain.RequestContext, pageID string) (err error) {
// remove all search entries
var stmt1 *sqlx.Stmt
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND itemid=? AND itemtype=?")
defer streamutil.Close(stmt1)
if err != nil {
err = errors.Wrap(err, "prepare delete document content entry")
return
}
_, err = stmt1.Exec(ctx.OrgID, pageID, "page")
if err != nil {
err = errors.Wrap(err, "execute delete document content entry")
return
}
return return
} }
// Documents searches the documents that the client is allowed to see, using the keywords search string, then audits that search. // Documents searches the documents that the client is allowed to see, using the keywords search string, then audits that search.
// Visible documents include both those in the client's own organisation and those that are public, or whose visibility includes the client. // Visible documents include both those in the client's own organisation and those that are public, or whose visibility includes the client.
func (s Scope) Documents(ctx domain.RequestContext, keywords string) (results []search.DocumentSearch, err error) { func (s Scope) Documents(ctx domain.RequestContext, q search.QueryOptions) (results []search.QueryResult, err error) {
if len(keywords) == 0 { q.Keywords = strings.TrimSpace(q.Keywords)
if len(q.Keywords) == 0 {
return return
} }
var tagQuery, keywordQuery string results = []search.QueryResult{}
r, _ := regexp.Compile(`(#[a-z0-9][a-z0-9\-_]*)`) // Match doc names
res := r.FindAllString(keywords, -1) if q.Doc {
r1, err1 := s.matchFullText(ctx, q.Keywords, "doc")
if len(res) == 0 { if err1 != nil {
tagQuery = " " err = errors.Wrap(err1, "search document names")
} else { return
if len(res) == 1 {
tagQuery = " AND document.tags LIKE '%" + res[0] + "#%' "
} else {
fmt.Println("lots of tags!")
tagQuery = " AND ("
for i := 0; i < len(res); i++ {
tagQuery += "document.tags LIKE '%" + res[i] + "#%'"
if i < len(res)-1 {
tagQuery += " OR "
}
} }
tagQuery += ") " results = append(results, r1...)
} }
keywords = r.ReplaceAllString(keywords, "") // Match doc content
keywords = strings.Replace(keywords, " ", "", -1) if q.Content {
r2, err2 := s.matchFullText(ctx, q.Keywords, "page")
if err2 != nil {
err = errors.Wrap(err2, "search document content")
return
} }
keywords = strings.TrimSpace(keywords) results = append(results, r2...)
if len(keywords) > 0 {
keywordQuery = "AND MATCH(documenttitle,pagetitle,body) AGAINST('" + keywords + "' in boolean mode)"
} }
sql := `SELECT search.id, documentid, pagetitle, document.labelid, document.title as documenttitle, document.tags, // Match doc tags
COALESCE(label.label,'Unknown') AS labelname, document.excerpt as documentexcerpt if q.Tag {
FROM search, document LEFT JOIN label ON label.orgid=document.orgid AND label.refid = document.labelid r3, err3 := s.matchFullText(ctx, q.Keywords, "tag")
WHERE search.documentid = document.refid AND search.orgid=? AND document.template=0 ` + tagQuery + if err3 != nil {
`AND document.labelid IN err = errors.Wrap(err3, "search document tag")
(SELECT refid from label WHERE orgid=? AND type=2 AND userid=? return
}
results = append(results, r3...)
}
// Match doc attachments
if q.Attachment {
r4, err4 := s.matchLike(ctx, q.Keywords, "file")
if err4 != nil {
err = errors.Wrap(err4, "search document attachments")
return
}
results = append(results, r4...)
}
return
}
func (s Scope) matchFullText(ctx domain.RequestContext, keywords, itemType string) (r []search.QueryResult, err error) {
sql1 := `
SELECT
s.id, s.orgid, s.documentid, s.itemid, s.itemtype,
d.labelid as spaceid, COALESCE(d.title,'Unknown') AS document, d.tags, d.excerpt,
COALESCE(l.label,'Unknown') AS space
FROM
search s,
document d
LEFT JOIN
label l ON l.orgid=d.orgid AND l.refid = d.labelid
WHERE
s.orgid = ?
AND s.itemtype = ?
AND s.documentid = d.refid
-- AND d.template = 0
AND d.labelid IN (SELECT refid from label WHERE orgid=? AND type=2 AND userid=?
UNION ALL SELECT refid FROM label a where orgid=? AND type=1 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid='' AND (canedit=1 OR canview=1)) UNION ALL SELECT refid FROM label a where orgid=? AND type=1 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid='' AND (canedit=1 OR canview=1))
UNION ALL SELECT refid FROM label a where orgid=? AND type=3 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid=? AND (canedit=1 OR canview=1))) ` + keywordQuery UNION ALL SELECT refid FROM label a where orgid=? AND type=3 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid=? AND (canedit=1 OR canview=1)))
AND MATCH(s.content) AGAINST(? IN BOOLEAN MODE)`
err = s.Runtime.Db.Select(&results, err = s.Runtime.Db.Select(&r,
sql, sql1,
ctx.OrgID, ctx.OrgID,
itemType,
ctx.OrgID, ctx.OrgID,
ctx.UserID, ctx.UserID,
ctx.OrgID, ctx.OrgID,
ctx.OrgID, ctx.OrgID,
ctx.OrgID, ctx.OrgID,
ctx.OrgID, ctx.OrgID,
ctx.UserID) ctx.UserID,
keywords)
if err == sql.ErrNoRows {
err = nil
r = []search.QueryResult{}
}
if err != nil { if err != nil {
err = errors.Wrap(err, "search documents") err = errors.Wrap(err, "search document "+itemType)
return
}
return
}
func (s Scope) matchLike(ctx domain.RequestContext, keywords, itemType string) (r []search.QueryResult, err error) {
// LIKE clause does not like quotes!
keywords = strings.Replace(keywords, "'", "", -1)
keywords = strings.Replace(keywords, "\"", "", -1)
keywords = strings.Replace(keywords, "%", "", -1)
keywords = fmt.Sprintf("%%%s%%", keywords)
sql1 := `
SELECT
s.id, s.orgid, s.documentid, s.itemid, s.itemtype,
d.labelid as spaceid, COALESCE(d.title,'Unknown') AS document, d.tags, d.excerpt,
COALESCE(l.label,'Unknown') AS space
FROM
search s,
document d
LEFT JOIN
label l ON l.orgid=d.orgid AND l.refid = d.labelid
WHERE
s.orgid = ?
AND s.itemtype = ?
AND s.documentid = d.refid
-- AND d.template = 0
AND d.labelid IN (SELECT refid from label WHERE orgid=? AND type=2 AND userid=?
UNION ALL SELECT refid FROM label a where orgid=? AND type=1 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid='' AND (canedit=1 OR canview=1))
UNION ALL SELECT refid FROM label a where orgid=? AND type=3 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid=? AND (canedit=1 OR canview=1)))
AND s.content LIKE ?`
err = s.Runtime.Db.Select(&r,
sql1,
ctx.OrgID,
itemType,
ctx.OrgID,
ctx.UserID,
ctx.OrgID,
ctx.OrgID,
ctx.OrgID,
ctx.OrgID,
ctx.UserID,
keywords)
if err == sql.ErrNoRows {
err = nil
r = []search.QueryResult{}
}
if err != nil {
err = errors.Wrap(err, "search document "+itemType)
return return
} }

View file

@ -1,142 +0,0 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package search
import (
"errors"
"fmt"
"sync"
"github.com/documize/community/core/env"
"github.com/documize/community/domain"
"github.com/documize/community/model/page"
)
// Indexer type provides the datastructure for the queues of activity to be serialized through a single background goroutine.
// NOTE if the queue becomes full, the system will trigger the rebuilding entire files in order to clear the backlog.
type Indexer struct {
queue chan queueEntry
rebuild map[string]bool
rebuildLock sync.RWMutex
givenWarning bool
runtime *env.Runtime
store *domain.Store
}
type queueEntry struct {
action func(domain.RequestContext, page.Page) error
isRebuild bool
page.Page
ctx domain.RequestContext
}
const searchQueueLength = 2048 // NOTE the largest 15Mb docx in the test set generates 2142 queue entries, but the queue is constantly emptied
// NewIndexer provides background search indexer
func NewIndexer(rt *env.Runtime, s *domain.Store) (i Indexer) {
i = Indexer{}
i.queue = make(chan queueEntry, searchQueueLength) // provide some decoupling
i.rebuild = make(map[string]bool)
i.runtime = rt
i.store = s
go i.processQueue()
return
}
// processQueue is run as a goroutine, it processes the queue of search index update requests.
func (m *Indexer) processQueue() {
for {
//fmt.Println("DEBUG queue length=", len(Searches.queue))
if len(m.queue) <= searchQueueLength/20 { // on a busy server, the queue may never get to zero - so use 5%
m.rebuildLock.Lock()
for docid := range m.rebuild {
m.queue <- queueEntry{
action: m.store.Search.Rebuild,
isRebuild: true,
Page: page.Page{DocumentID: docid},
}
delete(m.rebuild, docid)
}
m.rebuildLock.Unlock()
}
qe := <-m.queue
doit := true
if len(qe.DocumentID) > 0 {
m.rebuildLock.RLock()
if m.rebuild[qe.DocumentID] {
doit = false // don't execute an action on a document queued to be rebuilt
}
m.rebuildLock.RUnlock()
}
if doit {
tx, err := m.runtime.Db.Beginx()
if err != nil {
} else {
ctx := qe.ctx
ctx.Transaction = tx
err = qe.action(ctx, qe.Page)
if err != nil {
tx.Rollback()
// This action has failed, so re-build indexes for the entire document,
// provided it was not a re-build command that failed and we know the documentId.
if !qe.isRebuild && len(qe.DocumentID) > 0 {
m.rebuildLock.Lock()
m.rebuild[qe.DocumentID] = true
m.rebuildLock.Unlock()
}
} else {
tx.Commit()
}
}
}
}
}
func (m *Indexer) addQueue(qe queueEntry) error {
lsq := len(m.queue)
if lsq >= (searchQueueLength - 1) {
if qe.DocumentID != "" {
m.rebuildLock.Lock()
if !m.rebuild[qe.DocumentID] {
m.runtime.Log.Info(fmt.Sprintf("WARNING: Search Queue Has No Space! Marked rebuild index for document id %s", qe.DocumentID))
}
m.rebuild[qe.DocumentID] = true
m.rebuildLock.Unlock()
} else {
m.runtime.Log.Error("addQueue", errors.New("WARNING: Search Queue Has No Space! But unable to index unknown document id"))
}
return nil
}
if lsq > ((8 * searchQueueLength) / 10) {
if !m.givenWarning {
m.runtime.Log.Info(fmt.Sprintf("WARNING: Searches.queue length %d exceeds 80%% of capacity", lsq))
m.givenWarning = true
}
} else {
if m.givenWarning {
m.runtime.Log.Info(fmt.Sprintf("INFO: Searches.queue length %d now below 80%% of capacity", lsq))
m.givenWarning = false
}
}
m.queue <- qe
return nil
}

View file

@ -13,102 +13,93 @@ package search
import ( import (
"github.com/documize/community/domain" "github.com/documize/community/domain"
"github.com/documize/community/model" "github.com/documize/community/model/attachment"
"github.com/documize/community/model/doc" "github.com/documize/community/model/doc"
"github.com/documize/community/model/page" "github.com/documize/community/model/page"
) )
// Add should be called when a new page is added to a document. // IndexDocument adds search indesd entries for document inserting title, tags and attachments as
func (m *Indexer) Add(ctx domain.RequestContext, page page.Page, id string) (err error) { // searchable items. Any existing document entries are removed.
page.RefID = id func (m *Indexer) IndexDocument(ctx domain.RequestContext, d doc.Document, a []attachment.Attachment) {
method := "search.IndexDocument"
err = m.addQueue(queueEntry{ var err error
action: m.store.Search.Add,
Page: page,
ctx: ctx,
})
ctx.Transaction, err = m.runtime.Db.Beginx()
if err != nil {
m.runtime.Log.Error(method, err)
return return
} }
// Update should be called after a page record has been updated. err = m.store.Search.IndexDocument(ctx, d, a)
func (m *Indexer) Update(ctx domain.RequestContext, page page.Page) (err error) { if err != nil {
err = m.addQueue(queueEntry{ ctx.Transaction.Rollback()
action: m.store.Search.Update, m.runtime.Log.Error(method, err)
Page: page,
ctx: ctx,
})
return return
} }
// UpdateDocument should be called after a document record has been updated. ctx.Transaction.Commit()
func (m *Indexer) UpdateDocument(ctx domain.RequestContext, document doc.Document) (err error) { }
err = m.addQueue(queueEntry{
action: m.store.Search.UpdateDocument,
Page: page.Page{
DocumentID: document.RefID,
Title: document.Title,
Body: document.Slug, // NOTE body==slug in this context
},
ctx: ctx,
})
// DeleteDocument removes all search entries for document.
func (m *Indexer) DeleteDocument(ctx domain.RequestContext, ID string) {
method := "search.DeleteDocument"
var err error
ctx.Transaction, err = m.runtime.Db.Beginx()
if err != nil {
m.runtime.Log.Error(method, err)
return return
} }
// DeleteDocument should be called after a document record has been deleted. err = m.store.Search.DeleteDocument(ctx, ID)
func (m *Indexer) DeleteDocument(ctx domain.RequestContext, documentID string) (err error) { if err != nil {
if len(documentID) > 0 { ctx.Transaction.Rollback()
m.queue <- queueEntry{ m.runtime.Log.Error(method, err)
action: m.store.Search.DeleteDocument,
Page: page.Page{DocumentID: documentID},
ctx: ctx,
}
}
return return
} }
// UpdateSequence should be called after a page record has been resequenced. ctx.Transaction.Commit()
func (m *Indexer) UpdateSequence(ctx domain.RequestContext, documentID, pageID string, sequence float64) (err error) { }
err = m.addQueue(queueEntry{
action: m.store.Search.UpdateSequence,
Page: page.Page{
BaseEntity: model.BaseEntity{RefID: pageID},
Sequence: sequence,
DocumentID: documentID,
},
ctx: ctx,
})
// IndexContent adds search index entry for document context.
// Any existing document entries are removed.
func (m *Indexer) IndexContent(ctx domain.RequestContext, p page.Page) {
method := "search.IndexContent"
var err error
ctx.Transaction, err = m.runtime.Db.Beginx()
if err != nil {
m.runtime.Log.Error(method, err)
return return
} }
// UpdateLevel should be called after the level of a page has been changed. err = m.store.Search.IndexContent(ctx, p)
func (m *Indexer) UpdateLevel(ctx domain.RequestContext, documentID, pageID string, level int) (err error) { if err != nil {
err = m.addQueue(queueEntry{ ctx.Transaction.Rollback()
action: m.store.Search.UpdateLevel, m.runtime.Log.Error(method, err)
Page: page.Page{
BaseEntity: model.BaseEntity{RefID: pageID},
Level: uint64(level),
DocumentID: documentID,
},
ctx: ctx,
})
return return
} }
// Delete should be called after a page has been deleted. ctx.Transaction.Commit()
func (m *Indexer) Delete(ctx domain.RequestContext, documentID, pageID string) (rows int64, err error) { }
err = m.addQueue(queueEntry{
action: m.store.Search.Delete,
Page: page.Page{
BaseEntity: model.BaseEntity{RefID: pageID},
DocumentID: documentID,
},
ctx: ctx,
})
// DeleteContent removes all search entries for specific document content.
func (m *Indexer) DeleteContent(ctx domain.RequestContext, pageID string) {
method := "search.DeleteContent"
var err error
ctx.Transaction, err = m.runtime.Db.Beginx()
if err != nil {
m.runtime.Log.Error(method, err)
return return
} }
err = m.store.Search.DeleteContent(ctx, pageID)
if err != nil {
ctx.Transaction.Rollback()
m.runtime.Log.Error(method, err)
return
}
ctx.Transaction.Commit()
}

View file

@ -180,26 +180,19 @@ type ActivityStorer interface {
// SearchStorer defines required methods for persisting search queries // SearchStorer defines required methods for persisting search queries
type SearchStorer interface { type SearchStorer interface {
Add(ctx RequestContext, page page.Page) (err error) IndexDocument(ctx RequestContext, doc doc.Document, a []attachment.Attachment) (err error)
Update(ctx RequestContext, page page.Page) (err error) DeleteDocument(ctx RequestContext, ID string) (err error)
UpdateDocument(ctx RequestContext, page page.Page) (err error) IndexContent(ctx RequestContext, p page.Page) (err error)
DeleteDocument(ctx RequestContext, page page.Page) (err error) DeleteContent(ctx RequestContext, pageID string) (err error)
Rebuild(ctx RequestContext, p page.Page) (err error) Documents(ctx RequestContext, q search.QueryOptions) (results []search.QueryResult, err error)
UpdateSequence(ctx RequestContext, page page.Page) (err error)
UpdateLevel(ctx RequestContext, page page.Page) (err error)
Delete(ctx RequestContext, page page.Page) (err error)
Documents(ctx RequestContext, keywords string) (results []search.DocumentSearch, err error)
} }
// Indexer defines required methods for managing search indexing process // Indexer defines required methods for managing search indexing process
type Indexer interface { type Indexer interface {
Add(ctx RequestContext, page page.Page, id string) (err error) IndexDocument(ctx RequestContext, d doc.Document, a []attachment.Attachment)
Update(ctx RequestContext, page page.Page) (err error) DeleteDocument(ctx RequestContext, ID string)
UpdateDocument(ctx RequestContext, page page.Page) (err error) IndexContent(ctx RequestContext, p page.Page)
DeleteDocument(ctx RequestContext, documentID string) (err error) DeleteContent(ctx RequestContext, pageID string)
UpdateSequence(ctx RequestContext, documentID, pageID string, sequence float64) (err error)
UpdateLevel(ctx RequestContext, documentID, pageID string, level int) (err error)
Delete(ctx RequestContext, documentID, pageID string) (err error)
} }
// BlockStorer defines required methods for persisting reusable content blocks // BlockStorer defines required methods for persisting reusable content blocks

View file

@ -28,6 +28,7 @@ import (
"github.com/documize/community/core/uniqueid" "github.com/documize/community/core/uniqueid"
"github.com/documize/community/domain" "github.com/documize/community/domain"
"github.com/documize/community/domain/document" "github.com/documize/community/domain/document"
indexer "github.com/documize/community/domain/search"
"github.com/documize/community/model/attachment" "github.com/documize/community/model/attachment"
"github.com/documize/community/model/audit" "github.com/documize/community/model/audit"
"github.com/documize/community/model/doc" "github.com/documize/community/model/doc"
@ -40,6 +41,7 @@ import (
type Handler struct { type Handler struct {
Runtime *env.Runtime Runtime *env.Runtime
Store *domain.Store Store *domain.Store
Indexer indexer.Indexer
} }
// SavedList returns all templates saved by the user // SavedList returns all templates saved by the user
@ -363,5 +365,8 @@ func (h *Handler) Use(w http.ResponseWriter, r *http.Request) {
event.Handler().Publish(string(event.TypeAddDocument), nd.Title) event.Handler().Publish(string(event.TypeAddDocument), nd.Title)
a, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
go h.Indexer.IndexDocument(ctx, nd, a)
response.WriteJSON(w, nd) response.WriteJSON(w, nd)
} }

View file

@ -37,7 +37,7 @@ func main() {
// product details // product details
rt.Product = env.ProdInfo{} rt.Product = env.ProdInfo{}
rt.Product.Major = "1" rt.Product.Major = "1"
rt.Product.Minor = "51" rt.Product.Minor = "52"
rt.Product.Patch = "0" rt.Product.Patch = "0"
rt.Product.Version = fmt.Sprintf("%s.%s.%s", rt.Product.Major, rt.Product.Minor, rt.Product.Patch) rt.Product.Version = fmt.Sprintf("%s.%s.%s", rt.Product.Major, rt.Product.Minor, rt.Product.Patch)
rt.Product.Edition = "Community" rt.Product.Edition = "Community"

File diff suppressed because one or more lines are too long

View file

@ -16,39 +16,23 @@ export default Ember.Component.extend({
resultPhrase: "", resultPhrase: "",
didReceiveAttrs() { didReceiveAttrs() {
let results = this.get('results'); let docs = this.get('results');
let temp = _.groupBy(results, 'documentId'); let duped = [];
let documents = [];
_.each(temp, function (document) {
let refs = [];
if (document.length > 1) {
refs = document.slice(1);
}
_.each(refs, function (ref, index) {
ref.comma = index === refs.length - 1 ? "" : ", ";
});
let hasRefs = refs.length > 0;
documents.pushObject({
doc: document[0],
ref: refs,
hasReferences: hasRefs
});
});
let phrase = 'Nothing found'; let phrase = 'Nothing found';
if (results.length > 0) { if (docs.length > 0) {
let references = results.length === 1 ? "reference" : "references"; duped = _.uniq(docs, function (item) {
let i = results.length; return item.documentId;
phrase = `${i} ${references}`; });
let references = docs.length === 1 ? "reference" : "references";
let docLabel = duped.length === 1 ? "document" : "documents";
let i = docs.length;
let j = duped.length;
phrase = `${i} ${references} across ${j} ${docLabel}`;
} }
this.set('resultPhrase', phrase); this.set('resultPhrase', phrase);
this.set('documents', documents); this.set('documents', duped);
} }
}); });

View file

@ -13,19 +13,51 @@ import Ember from 'ember';
export default Ember.Controller.extend({ export default Ember.Controller.extend({
searchService: Ember.inject.service('search'), searchService: Ember.inject.service('search'),
queryParams: ['filter'],
filter: "", filter: "",
results: [], results: [],
matchDoc: true,
matchContent: true,
matchFile: false,
matchTag: false,
onKeywordChange: function () { onKeywordChange: function () {
Ember.run.debounce(this, this.fetch, 750); Ember.run.debounce(this, this.fetch, 750);
}.observes('filter'), }.observes('filter'),
onMatchDoc: function () {
Ember.run.debounce(this, this.fetch, 750);
}.observes('matchDoc'),
onMatchContent: function () {
Ember.run.debounce(this, this.fetch, 750);
}.observes('matchContent'),
onMatchTag: function () {
Ember.run.debounce(this, this.fetch, 750);
}.observes('matchTag'),
onMatchFile: function () {
Ember.run.debounce(this, this.fetch, 750);
}.observes('matchFile'),
fetch() { fetch() {
let self = this; let self = this;
let payload = {
keywords: this.get('filter'),
doc: this.get('matchDoc'),
attachment: this.get('matchFile'),
tag: this.get('matchTag'),
content: this.get('matchContent')
};
this.get('searchService').find(this.get('filter')).then(function (response) { payload.keywords = payload.keywords.trim();
if (payload.keywords.length == 0) {
return;
}
if (!payload.doc && !payload.tag && !payload.content && !payload.attachment) {
return;
}
this.get('searchService').find(payload).then(function(response) {
self.set('results', response); self.set('results', response);
}); });
} },
}); });

View file

@ -1,15 +1,26 @@
{{layout/zone-navigation}} {{layout/zone-navigation}}
{{#layout/zone-container}} {{#layout/zone-container}}
{{#layout/zone-sidebar}} {{#layout/zone-sidebar}}
<div class="sidebar-toolbar"> <div class="sidebar-toolbar">
</div> </div>
<div class="sidebar-common"> <div class="sidebar-common">
{{layout/sidebar-intro title="Search" message='#tag, keyword, "some phrase", this AND that, this OR that'}} {{layout/sidebar-intro title="Search" message='Search across document name, contents, tags and attachment filenames'}}
</div> </div>
<div class="sidebar-wrapper"> <div class="sidebar-wrapper">
<div class="page-search"> <div class="page-search">
<div class="input-control"> <div class="input-control">
{{focus-input type="text" value=filter placeholder='type search phrase (case sensitive)'}} {{focus-input type="text" value=filter placeholder='type search phrase'}}
{{#ui/ui-checkbox selected=matchDoc}}document name{{/ui/ui-checkbox}}
{{#ui/ui-checkbox selected=matchContent}}content{{/ui/ui-checkbox}}
{{#ui/ui-checkbox selected=matchTag}}tag{{/ui/ui-checkbox}}
{{#ui/ui-checkbox selected=matchFile}}attachment name{{/ui/ui-checkbox}}
</div>
<div class="examples">
<p>a OR b</p>
<p>x AND y</p>
<p>"phrase match"</p>
<p>* for wildcard match</p>
</div> </div>
</div> </div>
</div> </div>

View file

@ -19,12 +19,12 @@ export default Ember.Service.extend({
sessionService: service('session'), sessionService: service('session'),
ajax: service(), ajax: service(),
// getUsers returns all users for organization. // find all matching documents.
find(keywords) { find(payload) {
let url = "search?keywords=" + encodeURIComponent(keywords); return this.get('ajax').request("search", {
method: "POST",
return this.get('ajax').request(url, { data: JSON.stringify(payload),
method: "GET" contentType: 'json'
}); });
}, },
}); });

View file

@ -5,6 +5,10 @@
} }
} }
.examples {
color: $color-gray;
}
.search-results { .search-results {
> .heading { > .heading {
font-size: 2rem; font-size: 2rem;

View file

@ -1,13 +1,13 @@
<div class="search-results"> <div class="search-results">
<div class="heading">{{resultPhrase}}</div> <div class="heading">{{resultPhrase}}</div>
<ul class="list"> <ul class="list">
{{#each documents key="doc.id" as |result index|}} {{#each documents key="id" as |result index|}}
<li class="item"> <li class="item">
<a class="link" href="s/{{result.doc.folderId}}/{{result.doc.folderSlug}}/d/{{ result.doc.documentId }}/{{result.doc.documentSlug}}?page={{ result.doc.id }}"> <a class="link" href="s/{{result.spaceId}}/{{result.spaceSlug}}/d/{{ result.documentId }}/{{result.documentSlug}}?page={{ result.itemId }}">
<div class="title">{{ result.doc.documentTitle }}</div> <div class="title">{{ result.document }}</div>
<div class="folder">{{ result.doc.folderName }}</div> <div class="folder">{{ result.space }}</div>
<div class="excerpt">{{ result.doc.documentExcerpt }}</div> <div class="excerpt">{{ result.excerpt }}</div>
<div class="chips">{{search/tag-list documentTags=result.doc.documentTags}}</div> <div class="chips">{{search/tag-list documentTags=result.tags}}</div>
</a> </a>
</li> </li>
{{/each}} {{/each}}

View file

@ -1,6 +1,6 @@
{ {
"name": "documize", "name": "documize",
"version": "1.51.0", "version": "1.52.0",
"description": "The Document IDE", "description": "The Document IDE",
"private": true, "private": true,
"repository": "", "repository": "",

View file

@ -3,5 +3,5 @@
"target": "es6", "target": "es6",
"experimentalDecorators": true "experimentalDecorators": true
}, },
"exclude": ["node_modules", "bower_components", "tmp", "vendor", ".git", "dist", "dist-prod"] "exclude": ["node_modules", "bower_components", "tmp", "vendor", ".git", "dist", "dist-prod", "gui/node_modules", "gui/dist", "gui/dist-prod", "gui/tmp"]
} }

View file

@ -1,16 +1,16 @@
{ {
"community": "community":
{ {
"version": "1.51.0", "version": "1.52.0",
"major": 1, "major": 1,
"minor": 51, "minor": 52,
"patch": 0 "patch": 0
}, },
"enterprise": "enterprise":
{ {
"version": "1.53.0", "version": "1.54.0",
"major": 1, "major": 1,
"minor": 53, "minor": 54,
"patch": 0 "patch": 0
} }
} }

View file

@ -11,35 +11,27 @@
package search package search
import ( // QueryOptions defines how we search.
"time" type QueryOptions struct {
) Keywords string `json:"keywords"`
Doc bool `json:"doc"`
// Search holds raw search results. Tag bool `json:"tag"`
type Search struct { Attachment bool `json:"attachment"`
ID string `json:"id"` Content bool `json:"content"`
Created time.Time `json:"created"`
Revised time.Time `json:"revised"`
OrgID string
DocumentID string
Level uint64
Sequence float64
DocumentTitle string
Slug string
PageTitle string
Body string
} }
// DocumentSearch represents 'presentable' search results. // QueryResult represents 'presentable' search results.
type DocumentSearch struct { type QueryResult struct {
ID string `json:"id"` ID string `json:"id"`
OrgID string `json:"orgId"`
ItemID string `json:"itemId"`
ItemType string `json:"itemType"`
DocumentID string `json:"documentId"` DocumentID string `json:"documentId"`
DocumentTitle string `json:"documentTitle"`
DocumentSlug string `json:"documentSlug"` DocumentSlug string `json:"documentSlug"`
DocumentExcerpt string `json:"documentExcerpt"` Document string `json:"document"`
Tags string `json:"documentTags"` Excerpt string `json:"excerpt"`
PageTitle string `json:"pageTitle"` Tags string `json:"tags"`
LabelID string `json:"folderId"` SpaceID string `json:"spaceId"`
LabelName string `json:"folderName"` Space string `json:"space"`
FolderSlug string `json:"folderSlug"` SpaceSlug string `json:"spaceSlug"`
} }

View file

@ -54,10 +54,10 @@ func RegisterEndpoints(rt *env.Runtime, s *domain.Store) {
section := section.Handler{Runtime: rt, Store: s} section := section.Handler{Runtime: rt, Store: s}
setting := setting.Handler{Runtime: rt, Store: s} setting := setting.Handler{Runtime: rt, Store: s}
keycloak := keycloak.Handler{Runtime: rt, Store: s} keycloak := keycloak.Handler{Runtime: rt, Store: s}
template := template.Handler{Runtime: rt, Store: s} template := template.Handler{Runtime: rt, Store: s, Indexer: indexer}
document := document.Handler{Runtime: rt, Store: s, Indexer: indexer} document := document.Handler{Runtime: rt, Store: s, Indexer: indexer}
attachment := attachment.Handler{Runtime: rt, Store: s} attachment := attachment.Handler{Runtime: rt, Store: s, Indexer: indexer}
conversion := conversion.Handler{Runtime: rt, Store: s} conversion := conversion.Handler{Runtime: rt, Store: s, Indexer: indexer}
organization := organization.Handler{Runtime: rt, Store: s} organization := organization.Handler{Runtime: rt, Store: s}
//************************************************** //**************************************************
@ -132,7 +132,7 @@ func RegisterEndpoints(rt *env.Runtime, s *domain.Store) {
Add(rt, RoutePrefixPrivate, "users/{userID}", []string{"DELETE", "OPTIONS"}, nil, user.Delete) Add(rt, RoutePrefixPrivate, "users/{userID}", []string{"DELETE", "OPTIONS"}, nil, user.Delete)
Add(rt, RoutePrefixPrivate, "users/sync", []string{"GET", "OPTIONS"}, nil, keycloak.Sync) Add(rt, RoutePrefixPrivate, "users/sync", []string{"GET", "OPTIONS"}, nil, keycloak.Sync)
Add(rt, RoutePrefixPrivate, "search", []string{"GET", "OPTIONS"}, nil, document.SearchDocuments) Add(rt, RoutePrefixPrivate, "search", []string{"POST", "OPTIONS"}, nil, document.SearchDocuments)
Add(rt, RoutePrefixPrivate, "templates", []string{"POST", "OPTIONS"}, nil, template.SaveAs) Add(rt, RoutePrefixPrivate, "templates", []string{"POST", "OPTIONS"}, nil, template.SaveAs)
Add(rt, RoutePrefixPrivate, "templates", []string{"GET", "OPTIONS"}, nil, template.SavedList) Add(rt, RoutePrefixPrivate, "templates", []string{"GET", "OPTIONS"}, nil, template.SavedList)