1
0
Fork 0
mirror of https://github.com/documize/community.git synced 2025-07-24 07:39:43 +02:00

Merge pull request #115 from documize/search-revamp

Search revamp
This commit is contained in:
Harvey Kandola 2017-08-16 12:35:28 +01:00 committed by GitHub
commit a306ca2fcd
27 changed files with 1231 additions and 1190 deletions

View file

@ -8,7 +8,7 @@ The mission is to bring software dev inspired features (refactoring, testing, li
## Latest version
v1.51.0
v1.52.0
## OS Support

View file

@ -4,7 +4,6 @@
1. Remove audit table
2. Remove document.layout field ?
## MYSQL ENCODING
https://stackoverflow.com/questions/37307146/difference-between-utf8mb4-unicode-ci-and-utf8mb4-unicode-520-ci-collations-in-m
@ -13,7 +12,7 @@ https://mathiasbynens.be/notes/mysql-utf8mb4
https://medium.com/@adamhooper/in-mysql-never-use-utf8-use-utf8mb4-11761243e434
## MIGRATE ENCODING
ALTER DATABASE documize CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE account CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
@ -38,7 +37,3 @@ ALTER TABLE useraction CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE useractivity CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userconfig CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userevent CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
documenttitle, pagetitle, body
CHARACTER SET utf8mb4 COLLATE utf8mb4

View file

@ -0,0 +1,65 @@
/* community edition */
ALTER DATABASE documize CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE account CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE attachment CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE block CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE config CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE document CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE feedback CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE label CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE labelrole CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE link CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE organization CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE page CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE pagemeta CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE participant CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE pin CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE revision CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE search CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE share CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE user CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE useraction CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE useractivity CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userconfig CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userevent CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
DROP TABLE IF EXISTS `search_old`;
RENAME TABLE search TO search_old;
DROP TABLE IF EXISTS `search`;
CREATE TABLE IF NOT EXISTS `search` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`documentid` CHAR(16) NOT NULL COLLATE utf8_bin,
`itemid` CHAR(16) NOT NULL DEFAULT '' COLLATE utf8_bin,
`itemtype` VARCHAR(10) NOT NULL,
`content` LONGTEXT,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE INDEX `idx_search_id` (`id` ASC),
INDEX `idx_search_orgid` (`orgid` ASC),
INDEX `idx_search_documentid` (`documentid` ASC),
FULLTEXT INDEX `idx_search_content` (`content`))
DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci
ENGINE = MyISAM;
-- migrate page content
INSERT INTO search (orgid, documentid, itemid, itemtype, content) SELECT orgid, documentid, id AS itemid, 'page' AS itemtype, TRIM(body) AS content FROM search_old;
-- index document title
INSERT INTO search (orgid, documentid, itemid, itemtype, content) SELECT orgid, refid AS documentid, '' AS itemid, 'doc' AS itemtype, TRIM(title) AS content FROM document;
-- index attachment name
INSERT INTO search (orgid, documentid, itemid, itemtype, content) SELECT orgid, documentid, refid AS itemid, 'file' AS itemtype, TRIM(filename) AS content FROM attachment;
-- insert tag 1
insert into search (orgid, documentid, itemid, itemtype, content) SELECT orgid, refid as documentid, '' as itemid, 'tag' as itemtype, TRIM(REPLACE(SUBSTRING_INDEX(tags, '#', 2), '#', '')) AS content FROM document WHERE tags != '';
-- insert tag 2
insert into search (orgid, documentid, itemid, itemtype, content) SELECT orgid, refid as documentid, '' as itemid, 'tag' as itemtype, IF((LENGTH(tags) - LENGTH(REPLACE(tags, '#', '')) - 1) > 1, SUBSTRING_INDEX(SUBSTRING_INDEX(tags, '#', 3), '#', -1), '') AS content FROM document WHERE LENGTH(tags) - LENGTH(REPLACE(tags, "#", "")) > 2;
-- insert tag 3
insert into search (orgid, documentid, itemid, itemtype, content) SELECT orgid, refid as documentid, '' as itemid, 'tag' as itemtype, IF((LENGTH(tags) - LENGTH(REPLACE(tags, '#', '')) - 1) > 2, SUBSTRING_INDEX(SUBSTRING_INDEX(tags, '#', 4), '#', -1), '') AS content FROM document WHERE LENGTH(tags) - LENGTH(REPLACE(tags, "#", "")) > 3;

View file

@ -26,6 +26,7 @@ import (
"github.com/documize/community/core/uniqueid"
"github.com/documize/community/domain"
"github.com/documize/community/domain/document"
indexer "github.com/documize/community/domain/search"
"github.com/documize/community/model/attachment"
"github.com/documize/community/model/audit"
uuid "github.com/nu7hatch/gouuid"
@ -35,6 +36,7 @@ import (
type Handler struct {
Runtime *env.Runtime
Store *domain.Store
Indexer indexer.Indexer
}
// Download is the end-point that responds to a request for a particular attachment
@ -155,6 +157,10 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit()
a, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
d, _ := h.Store.Document.Get(ctx, documentID)
go h.Indexer.IndexDocument(ctx, d, a)
response.WriteEmpty(w)
}
@ -226,5 +232,9 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit()
all, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
d, _ := h.Store.Document.Get(ctx, documentID)
go h.Indexer.IndexDocument(ctx, d, all)
response.WriteEmpty(w)
}

View file

@ -146,6 +146,9 @@ func (h *Handler) convert(w http.ResponseWriter, r *http.Request, job, folderID
return
}
a, _ := h.Store.Attachment.GetAttachments(ctx, nd.RefID)
go h.Indexer.IndexDocument(ctx, nd, a)
response.WriteJSON(w, nd)
}

View file

@ -17,12 +17,14 @@ import (
api "github.com/documize/community/core/convapi"
"github.com/documize/community/core/env"
"github.com/documize/community/domain"
indexer "github.com/documize/community/domain/search"
)
// Handler contains the runtime information such as logging and database.
type Handler struct {
Runtime *env.Runtime
Store *domain.Store
Indexer indexer.Indexer
}
// UploadConvert is an endpoint to both upload and convert a document

View file

@ -16,7 +16,6 @@ import (
"encoding/json"
"io/ioutil"
"net/http"
"net/url"
"github.com/documize/community/core/env"
"github.com/documize/community/core/request"
@ -253,7 +252,8 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit()
h.Indexer.UpdateDocument(ctx, d)
a, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
go h.Indexer.IndexDocument(ctx, d, a)
response.WriteEmpty(w)
}
@ -317,7 +317,7 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit()
h.Indexer.DeleteDocument(ctx, documentID)
go h.Indexer.DeleteDocument(ctx, documentID)
response.WriteEmpty(w)
}
@ -327,27 +327,36 @@ func (h *Handler) SearchDocuments(w http.ResponseWriter, r *http.Request) {
method := "document.search"
ctx := domain.GetRequestContext(r)
keywords := request.Query(r, "keywords")
decoded, err := url.QueryUnescape(keywords)
defer streamutil.Close(r.Body)
body, err := ioutil.ReadAll(r.Body)
if err != nil {
response.WriteBadRequestError(w, method, err.Error())
h.Runtime.Log.Error(method, err)
return
}
results, err := h.Store.Search.Documents(ctx, decoded)
options := search.QueryOptions{}
err = json.Unmarshal(body, &options)
if err != nil {
response.WriteBadRequestError(w, method, err.Error())
h.Runtime.Log.Error(method, err)
return
}
results, err := h.Store.Search.Documents(ctx, options)
if err != nil {
h.Runtime.Log.Error(method, err)
}
// Put in slugs for easy UI display of search URL
for key, result := range results {
result.DocumentSlug = stringutil.MakeSlug(result.DocumentTitle)
result.FolderSlug = stringutil.MakeSlug(result.LabelName)
result.DocumentSlug = stringutil.MakeSlug(result.Document)
result.SpaceSlug = stringutil.MakeSlug(result.Space)
results[key] = result
}
if len(results) == 0 {
results = []search.DocumentSearch{}
results = []search.QueryResult{}
}
h.Store.Audit.Record(ctx, audit.EventTypeSearch)

View file

@ -143,8 +143,7 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit()
np, _ := h.Store.Page.Get(ctx, pageID)
h.Indexer.Add(ctx, np, pageID)
go h.Indexer.IndexContent(ctx, np)
response.WriteJSON(w, np)
}
@ -338,7 +337,7 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
h.Store.Audit.Record(ctx, audit.EventTypeSectionDelete)
h.Indexer.Delete(ctx, documentID, pageID)
go h.Indexer.DeleteContent(ctx, pageID)
h.Store.Link.DeleteSourcePageLinks(ctx, pageID)
@ -421,7 +420,7 @@ func (h *Handler) DeletePages(w http.ResponseWriter, r *http.Request) {
return
}
h.Indexer.Delete(ctx, documentID, page.PageID)
go h.Indexer.DeleteContent(ctx, page.PageID)
h.Store.Link.DeleteSourcePageLinks(ctx, page.PageID)
@ -590,7 +589,7 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
ctx.Transaction.Commit()
h.Indexer.Update(ctx, model.Page)
go h.Indexer.IndexContent(ctx, model.Page)
updatedPage, err := h.Store.Page.Get(ctx, pageID)
@ -649,8 +648,6 @@ func (h *Handler) ChangePageSequence(w http.ResponseWriter, r *http.Request) {
h.Runtime.Log.Error(method, err)
return
}
h.Indexer.UpdateSequence(ctx, documentID, p.PageID, p.Sequence)
}
h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence)
@ -712,8 +709,6 @@ func (h *Handler) ChangePageLevel(w http.ResponseWriter, r *http.Request) {
h.Runtime.Log.Error(method, err)
return
}
h.Indexer.UpdateLevel(ctx, documentID, p.PageID, p.Level)
}
h.Store.Audit.Record(ctx, audit.EventTypeSectionResequence)

32
domain/search/handler.go Normal file
View file

@ -0,0 +1,32 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package search
import (
"github.com/documize/community/core/env"
"github.com/documize/community/domain"
)
// Indexer documents!
type Indexer struct {
runtime *env.Runtime
store *domain.Store
}
// NewIndexer provides background search indexer
func NewIndexer(rt *env.Runtime, s *domain.Store) (i Indexer) {
i = Indexer{}
i.runtime = rt
i.store = s
return
}

View file

@ -12,17 +12,16 @@
package mysql
import (
"database/sql"
"fmt"
"regexp"
"strings"
"time"
"github.com/documize/community/core/env"
"github.com/documize/community/core/streamutil"
"github.com/documize/community/core/stringutil"
"github.com/documize/community/domain"
"github.com/documize/community/domain/store/mysql"
"github.com/documize/community/model"
"github.com/documize/community/model/attachment"
"github.com/documize/community/model/doc"
"github.com/documize/community/model/page"
"github.com/documize/community/model/search"
"github.com/jmoiron/sqlx"
@ -34,278 +33,316 @@ type Scope struct {
Runtime *env.Runtime
}
// Add search entry (legacy name: searchAdd).
func (s Scope) Add(ctx domain.RequestContext, page page.Page) (err error) {
id := page.RefID
// translate the html into text for the search
nonHTML, err := stringutil.HTML(page.Body).Text(false)
// IndexDocument adds search index entries for document inserting title, tags and attachments as
// searchable items. Any existing document entries are removed.
func (s Scope) IndexDocument(ctx domain.RequestContext, doc doc.Document, a []attachment.Attachment) (err error) {
// remove previous search entries
var stmt1 *sqlx.Stmt
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND documentid=? AND (itemtype='doc' OR itemtype='file' OR itemtype='tag')")
defer streamutil.Close(stmt1)
if err != nil {
errors.Wrap(err, "search decode body")
err = errors.Wrap(err, "prepare delete document index entries")
return
}
// insert into the search table, getting the document title along the way
var stmt *sqlx.Stmt
stmt, err = ctx.Transaction.Preparex(
"INSERT INTO search (id, orgid, documentid, level, sequence, documenttitle, slug, pagetitle, body, created, revised) " +
" SELECT page.refid,page.orgid,document.refid,page.level,page.sequence,document.title,document.slug,page.title,?,page.created,page.revised " +
" FROM document,page WHERE page.refid=? AND document.refid=page.documentid")
defer streamutil.Close(stmt)
_, err = stmt1.Exec(ctx.OrgID, doc.RefID)
if err != nil {
err = errors.Wrap(err, "prepare search insert")
err = errors.Wrap(err, "execute delete document index entries")
return
}
_, err = stmt.Exec(nonHTML, id)
if err != nil {
err = errors.Wrap(err, "execute search insert")
return
}
return nil
}
// Update search entry (legacy name: searchUpdate).
func (s Scope) Update(ctx domain.RequestContext, page page.Page) (err error) {
// translate the html into text for the search
nonHTML, err := stringutil.HTML(page.Body).Text(false)
if err != nil {
err = errors.Wrap(err, "search decode body")
return
}
su, err := ctx.Transaction.Preparex("UPDATE search SET pagetitle=?,body=?,sequence=?,level=?,revised=? WHERE id=?")
defer streamutil.Close(su)
if err != nil {
err = errors.Wrap(err, "prepare search update")
return err
}
_, err = su.Exec(page.Title, nonHTML, page.Sequence, page.Level, page.Revised, page.RefID)
if err != nil {
err = errors.Wrap(err, "execute search update")
return
}
return nil
}
// UpdateDocument search entries for document (legacy name: searchUpdateDocument).
func (s Scope) UpdateDocument(ctx domain.RequestContext, page page.Page) (err error) {
stmt, err := ctx.Transaction.Preparex("UPDATE search SET documenttitle=?, slug=?, revised=? WHERE documentid=?")
defer streamutil.Close(stmt)
if err != nil {
err = errors.Wrap(err, "prepare search document update")
return err
}
_, err = stmt.Exec(page.Title, page.Body, time.Now().UTC(), page.DocumentID)
if err != nil {
err = errors.Wrap(err, "execute search document update")
return err
}
return nil
}
// DeleteDocument removes document search entries (legacy name: searchDeleteDocument)
func (s Scope) DeleteDocument(ctx domain.RequestContext, page page.Page) (err error) {
var bm = mysql.BaseQuery{}
_, err = bm.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE from search WHERE documentid='%s'", page.DocumentID))
if err != nil {
err = errors.Wrap(err, "delete document search entries")
}
return nil
}
// Rebuild ... (legacy name: searchRebuild)
func (s Scope) Rebuild(ctx domain.RequestContext, p page.Page) (err error) {
var bm = mysql.BaseQuery{}
_, err = bm.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE from search WHERE documentid='%s'", p.DocumentID))
if err != nil {
err = errors.Wrap(err, err.Error())
return err
}
var pages []struct{ ID string }
stmt2, err := ctx.Transaction.Preparex("SELECT refid as id FROM page WHERE documentid=? ")
// insert doc title
var stmt2 *sqlx.Stmt
stmt2, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
defer streamutil.Close(stmt2)
if err != nil {
err = errors.Wrap(err, err.Error())
return err
err = errors.Wrap(err, "prepare insert document title entry")
return
}
err = stmt2.Select(&pages, p.DocumentID)
_, err = stmt2.Exec(ctx.OrgID, doc.RefID, "", "doc", doc.Title)
if err != nil {
err = errors.Wrap(err, err.Error())
return err
err = errors.Wrap(err, "execute insert document title entry")
return
}
if len(pages) > 0 {
for _, pg := range pages {
err = s.Add(ctx, page.Page{BaseEntity: model.BaseEntity{RefID: pg.ID}})
if err != nil {
err = errors.Wrap(err, err.Error())
return err
}
// insert doc tags
tags := strings.Split(doc.Tags, "#")
for _, t := range tags {
if len(t) == 0 {
continue
}
// rebuild doc-level tags & excerpts
// get the 0'th page data and rewrite it
target := page.Page{}
stmt1, err := ctx.Transaction.Preparex("SELECT * FROM page WHERE refid=?")
defer streamutil.Close(stmt1)
var stmt3 *sqlx.Stmt
stmt3, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
defer streamutil.Close(stmt3)
if err != nil {
err = errors.Wrap(err, err.Error())
return err
err = errors.Wrap(err, "prepare insert document tag entry")
return
}
err = stmt1.Get(&target, pages[0].ID)
_, err = stmt3.Exec(ctx.OrgID, doc.RefID, "", "tag", t)
if err != nil {
err = errors.Wrap(err, err.Error())
return err
}
err = s.Update(ctx, target) // to rebuild the document-level tags + excerpt
if err != nil {
err = errors.Wrap(err, err.Error())
return err
err = errors.Wrap(err, "execute insert document tag entry")
return
}
}
return
for _, file := range a {
var stmt4 *sqlx.Stmt
stmt4, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
defer streamutil.Close(stmt4)
if err != nil {
err = errors.Wrap(err, "prepare insert document file entry")
return
}
_, err = stmt4.Exec(ctx.OrgID, doc.RefID, file.RefID, "file", file.Filename)
if err != nil {
err = errors.Wrap(err, "execute insert document file entry")
return
}
}
return nil
}
// UpdateSequence ... (legacy name: searchUpdateSequence)
func (s Scope) UpdateSequence(ctx domain.RequestContext, page page.Page) (err error) {
supdate, err := ctx.Transaction.Preparex("UPDATE search SET sequence=?,revised=? WHERE id=?")
defer streamutil.Close(supdate)
// DeleteDocument removes all search entries for document.
func (s Scope) DeleteDocument(ctx domain.RequestContext, ID string) (err error) {
// remove all search entries
var stmt1 *sqlx.Stmt
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND documentid=?")
defer streamutil.Close(stmt1)
if err != nil {
err = errors.Wrap(err, "prepare search update sequence")
return err
err = errors.Wrap(err, "prepare delete document entries")
return
}
_, err = supdate.Exec(page.Sequence, time.Now().UTC(), page.RefID)
_, err = stmt1.Exec(ctx.OrgID, ID)
if err != nil {
err = errors.Wrap(err, "execute search update sequence")
err = errors.Wrap(err, "execute delete document entries")
return
}
return
}
// UpdateLevel ... legacy name: searchUpdateLevel)
func (s Scope) UpdateLevel(ctx domain.RequestContext, page page.Page) (err error) {
pageID := page.RefID
level := page.Level
supdate, err := ctx.Transaction.Preparex("UPDATE search SET level=?,revised=? WHERE id=?")
defer streamutil.Close(supdate)
// IndexContent adds search index entry for document context.
// Any existing document entries are removed.
func (s Scope) IndexContent(ctx domain.RequestContext, p page.Page) (err error) {
// remove previous search entries
var stmt1 *sqlx.Stmt
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND documentid=? AND itemid=? AND itemtype='page'")
defer streamutil.Close(stmt1)
if err != nil {
err = errors.Wrap(err, "prepare search update level")
return err
}
_, err = supdate.Exec(level, time.Now().UTC(), pageID)
if err != nil {
err = errors.Wrap(err, "execute search update level")
err = errors.Wrap(err, "prepare delete document content entry")
return
}
return
_, err = stmt1.Exec(ctx.OrgID, p.DocumentID, p.RefID)
if err != nil {
err = errors.Wrap(err, "execute delete document content entry")
return
}
// insert doc title
var stmt2 *sqlx.Stmt
stmt2, err = ctx.Transaction.Preparex("INSERT INTO search (orgid, documentid, itemid, itemtype, content) VALUES (?, ?, ?, ?, ?)")
defer streamutil.Close(stmt2)
if err != nil {
err = errors.Wrap(err, "prepare insert document content entry")
return
}
// prepare content
content, err := stringutil.HTML(p.Body).Text(false)
if err != nil {
err = errors.Wrap(err, "search strip HTML failed")
return
}
content = strings.TrimSpace(content)
_, err = stmt2.Exec(ctx.OrgID, p.DocumentID, p.RefID, "page", content)
if err != nil {
err = errors.Wrap(err, "execute insert document content entry")
return
}
return nil
}
// Delete ... (legacy name: searchDelete).
func (s Scope) Delete(ctx domain.RequestContext, page page.Page) (err error) {
var bm = mysql.BaseQuery{}
_, err = bm.DeleteConstrainedWithID(ctx.Transaction, "search", ctx.OrgID, page.RefID)
// DeleteContent removes all search entries for specific document content.
func (s Scope) DeleteContent(ctx domain.RequestContext, pageID string) (err error) {
// remove all search entries
var stmt1 *sqlx.Stmt
stmt1, err = ctx.Transaction.Preparex("DELETE FROM search WHERE orgid=? AND itemid=? AND itemtype=?")
defer streamutil.Close(stmt1)
if err != nil {
err = errors.Wrap(err, "prepare delete document content entry")
return
}
_, err = stmt1.Exec(ctx.OrgID, pageID, "page")
if err != nil {
err = errors.Wrap(err, "execute delete document content entry")
return
}
return
}
// Documents searches the documents that the client is allowed to see, using the keywords search string, then audits that search.
// Visible documents include both those in the client's own organisation and those that are public, or whose visibility includes the client.
func (s Scope) Documents(ctx domain.RequestContext, keywords string) (results []search.DocumentSearch, err error) {
if len(keywords) == 0 {
func (s Scope) Documents(ctx domain.RequestContext, q search.QueryOptions) (results []search.QueryResult, err error) {
q.Keywords = strings.TrimSpace(q.Keywords)
if len(q.Keywords) == 0 {
return
}
var tagQuery, keywordQuery string
results = []search.QueryResult{}
r, _ := regexp.Compile(`(#[a-z0-9][a-z0-9\-_]*)`)
res := r.FindAllString(keywords, -1)
if len(res) == 0 {
tagQuery = " "
} else {
if len(res) == 1 {
tagQuery = " AND document.tags LIKE '%" + res[0] + "#%' "
} else {
fmt.Println("lots of tags!")
tagQuery = " AND ("
for i := 0; i < len(res); i++ {
tagQuery += "document.tags LIKE '%" + res[i] + "#%'"
if i < len(res)-1 {
tagQuery += " OR "
}
}
tagQuery += ") "
// Match doc names
if q.Doc {
r1, err1 := s.matchFullText(ctx, q.Keywords, "doc")
if err1 != nil {
err = errors.Wrap(err1, "search document names")
return
}
keywords = r.ReplaceAllString(keywords, "")
keywords = strings.Replace(keywords, " ", "", -1)
results = append(results, r1...)
}
keywords = strings.TrimSpace(keywords)
// Match doc content
if q.Content {
r2, err2 := s.matchFullText(ctx, q.Keywords, "page")
if err2 != nil {
err = errors.Wrap(err2, "search document content")
return
}
if len(keywords) > 0 {
keywordQuery = "AND MATCH(documenttitle,pagetitle,body) AGAINST('" + keywords + "' in boolean mode)"
results = append(results, r2...)
}
sql := `SELECT search.id, documentid, pagetitle, document.labelid, document.title as documenttitle, document.tags,
COALESCE(label.label,'Unknown') AS labelname, document.excerpt as documentexcerpt
FROM search, document LEFT JOIN label ON label.orgid=document.orgid AND label.refid = document.labelid
WHERE search.documentid = document.refid AND search.orgid=? AND document.template=0 ` + tagQuery +
`AND document.labelid IN
(SELECT refid from label WHERE orgid=? AND type=2 AND userid=?
UNION ALL SELECT refid FROM label a where orgid=? AND type=1 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid='' AND (canedit=1 OR canview=1))
UNION ALL SELECT refid FROM label a where orgid=? AND type=3 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid=? AND (canedit=1 OR canview=1))) ` + keywordQuery
// Match doc tags
if q.Tag {
r3, err3 := s.matchFullText(ctx, q.Keywords, "tag")
if err3 != nil {
err = errors.Wrap(err3, "search document tag")
return
}
err = s.Runtime.Db.Select(&results,
sql,
results = append(results, r3...)
}
// Match doc attachments
if q.Attachment {
r4, err4 := s.matchLike(ctx, q.Keywords, "file")
if err4 != nil {
err = errors.Wrap(err4, "search document attachments")
return
}
results = append(results, r4...)
}
return
}
func (s Scope) matchFullText(ctx domain.RequestContext, keywords, itemType string) (r []search.QueryResult, err error) {
sql1 := `
SELECT
s.id, s.orgid, s.documentid, s.itemid, s.itemtype,
d.labelid as spaceid, COALESCE(d.title,'Unknown') AS document, d.tags, d.excerpt,
COALESCE(l.label,'Unknown') AS space
FROM
search s,
document d
LEFT JOIN
label l ON l.orgid=d.orgid AND l.refid = d.labelid
WHERE
s.orgid = ?
AND s.itemtype = ?
AND s.documentid = d.refid
-- AND d.template = 0
AND d.labelid IN (SELECT refid from label WHERE orgid=? AND type=2 AND userid=?
UNION ALL SELECT refid FROM label a where orgid=? AND type=1 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid='' AND (canedit=1 OR canview=1))
UNION ALL SELECT refid FROM label a where orgid=? AND type=3 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid=? AND (canedit=1 OR canview=1)))
AND MATCH(s.content) AGAINST(? IN BOOLEAN MODE)`
err = s.Runtime.Db.Select(&r,
sql1,
ctx.OrgID,
itemType,
ctx.OrgID,
ctx.UserID,
ctx.OrgID,
ctx.OrgID,
ctx.OrgID,
ctx.OrgID,
ctx.UserID)
ctx.UserID,
keywords)
if err == sql.ErrNoRows {
err = nil
r = []search.QueryResult{}
}
if err != nil {
err = errors.Wrap(err, "search documents")
err = errors.Wrap(err, "search document "+itemType)
return
}
return
}
func (s Scope) matchLike(ctx domain.RequestContext, keywords, itemType string) (r []search.QueryResult, err error) {
// LIKE clause does not like quotes!
keywords = strings.Replace(keywords, "'", "", -1)
keywords = strings.Replace(keywords, "\"", "", -1)
keywords = strings.Replace(keywords, "%", "", -1)
keywords = fmt.Sprintf("%%%s%%", keywords)
sql1 := `
SELECT
s.id, s.orgid, s.documentid, s.itemid, s.itemtype,
d.labelid as spaceid, COALESCE(d.title,'Unknown') AS document, d.tags, d.excerpt,
COALESCE(l.label,'Unknown') AS space
FROM
search s,
document d
LEFT JOIN
label l ON l.orgid=d.orgid AND l.refid = d.labelid
WHERE
s.orgid = ?
AND s.itemtype = ?
AND s.documentid = d.refid
-- AND d.template = 0
AND d.labelid IN (SELECT refid from label WHERE orgid=? AND type=2 AND userid=?
UNION ALL SELECT refid FROM label a where orgid=? AND type=1 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid='' AND (canedit=1 OR canview=1))
UNION ALL SELECT refid FROM label a where orgid=? AND type=3 AND refid IN (SELECT labelid from labelrole WHERE orgid=? AND userid=? AND (canedit=1 OR canview=1)))
AND s.content LIKE ?`
err = s.Runtime.Db.Select(&r,
sql1,
ctx.OrgID,
itemType,
ctx.OrgID,
ctx.UserID,
ctx.OrgID,
ctx.OrgID,
ctx.OrgID,
ctx.OrgID,
ctx.UserID,
keywords)
if err == sql.ErrNoRows {
err = nil
r = []search.QueryResult{}
}
if err != nil {
err = errors.Wrap(err, "search document "+itemType)
return
}

View file

@ -1,142 +0,0 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package search
import (
"errors"
"fmt"
"sync"
"github.com/documize/community/core/env"
"github.com/documize/community/domain"
"github.com/documize/community/model/page"
)
// Indexer type provides the datastructure for the queues of activity to be serialized through a single background goroutine.
// NOTE if the queue becomes full, the system will trigger the rebuilding entire files in order to clear the backlog.
type Indexer struct {
queue chan queueEntry
rebuild map[string]bool
rebuildLock sync.RWMutex
givenWarning bool
runtime *env.Runtime
store *domain.Store
}
type queueEntry struct {
action func(domain.RequestContext, page.Page) error
isRebuild bool
page.Page
ctx domain.RequestContext
}
const searchQueueLength = 2048 // NOTE the largest 15Mb docx in the test set generates 2142 queue entries, but the queue is constantly emptied
// NewIndexer provides background search indexer
func NewIndexer(rt *env.Runtime, s *domain.Store) (i Indexer) {
i = Indexer{}
i.queue = make(chan queueEntry, searchQueueLength) // provide some decoupling
i.rebuild = make(map[string]bool)
i.runtime = rt
i.store = s
go i.processQueue()
return
}
// processQueue is run as a goroutine, it processes the queue of search index update requests.
func (m *Indexer) processQueue() {
for {
//fmt.Println("DEBUG queue length=", len(Searches.queue))
if len(m.queue) <= searchQueueLength/20 { // on a busy server, the queue may never get to zero - so use 5%
m.rebuildLock.Lock()
for docid := range m.rebuild {
m.queue <- queueEntry{
action: m.store.Search.Rebuild,
isRebuild: true,
Page: page.Page{DocumentID: docid},
}
delete(m.rebuild, docid)
}
m.rebuildLock.Unlock()
}
qe := <-m.queue
doit := true
if len(qe.DocumentID) > 0 {
m.rebuildLock.RLock()
if m.rebuild[qe.DocumentID] {
doit = false // don't execute an action on a document queued to be rebuilt
}
m.rebuildLock.RUnlock()
}
if doit {
tx, err := m.runtime.Db.Beginx()
if err != nil {
} else {
ctx := qe.ctx
ctx.Transaction = tx
err = qe.action(ctx, qe.Page)
if err != nil {
tx.Rollback()
// This action has failed, so re-build indexes for the entire document,
// provided it was not a re-build command that failed and we know the documentId.
if !qe.isRebuild && len(qe.DocumentID) > 0 {
m.rebuildLock.Lock()
m.rebuild[qe.DocumentID] = true
m.rebuildLock.Unlock()
}
} else {
tx.Commit()
}
}
}
}
}
func (m *Indexer) addQueue(qe queueEntry) error {
lsq := len(m.queue)
if lsq >= (searchQueueLength - 1) {
if qe.DocumentID != "" {
m.rebuildLock.Lock()
if !m.rebuild[qe.DocumentID] {
m.runtime.Log.Info(fmt.Sprintf("WARNING: Search Queue Has No Space! Marked rebuild index for document id %s", qe.DocumentID))
}
m.rebuild[qe.DocumentID] = true
m.rebuildLock.Unlock()
} else {
m.runtime.Log.Error("addQueue", errors.New("WARNING: Search Queue Has No Space! But unable to index unknown document id"))
}
return nil
}
if lsq > ((8 * searchQueueLength) / 10) {
if !m.givenWarning {
m.runtime.Log.Info(fmt.Sprintf("WARNING: Searches.queue length %d exceeds 80%% of capacity", lsq))
m.givenWarning = true
}
} else {
if m.givenWarning {
m.runtime.Log.Info(fmt.Sprintf("INFO: Searches.queue length %d now below 80%% of capacity", lsq))
m.givenWarning = false
}
}
m.queue <- qe
return nil
}

View file

@ -13,102 +13,93 @@ package search
import (
"github.com/documize/community/domain"
"github.com/documize/community/model"
"github.com/documize/community/model/attachment"
"github.com/documize/community/model/doc"
"github.com/documize/community/model/page"
)
// Add should be called when a new page is added to a document.
func (m *Indexer) Add(ctx domain.RequestContext, page page.Page, id string) (err error) {
page.RefID = id
// IndexDocument adds search indesd entries for document inserting title, tags and attachments as
// searchable items. Any existing document entries are removed.
func (m *Indexer) IndexDocument(ctx domain.RequestContext, d doc.Document, a []attachment.Attachment) {
method := "search.IndexDocument"
var err error
err = m.addQueue(queueEntry{
action: m.store.Search.Add,
Page: page,
ctx: ctx,
})
return
}
// Update should be called after a page record has been updated.
func (m *Indexer) Update(ctx domain.RequestContext, page page.Page) (err error) {
err = m.addQueue(queueEntry{
action: m.store.Search.Update,
Page: page,
ctx: ctx,
})
return
}
// UpdateDocument should be called after a document record has been updated.
func (m *Indexer) UpdateDocument(ctx domain.RequestContext, document doc.Document) (err error) {
err = m.addQueue(queueEntry{
action: m.store.Search.UpdateDocument,
Page: page.Page{
DocumentID: document.RefID,
Title: document.Title,
Body: document.Slug, // NOTE body==slug in this context
},
ctx: ctx,
})
return
}
// DeleteDocument should be called after a document record has been deleted.
func (m *Indexer) DeleteDocument(ctx domain.RequestContext, documentID string) (err error) {
if len(documentID) > 0 {
m.queue <- queueEntry{
action: m.store.Search.DeleteDocument,
Page: page.Page{DocumentID: documentID},
ctx: ctx,
}
ctx.Transaction, err = m.runtime.Db.Beginx()
if err != nil {
m.runtime.Log.Error(method, err)
return
}
return
err = m.store.Search.IndexDocument(ctx, d, a)
if err != nil {
ctx.Transaction.Rollback()
m.runtime.Log.Error(method, err)
return
}
ctx.Transaction.Commit()
}
// UpdateSequence should be called after a page record has been resequenced.
func (m *Indexer) UpdateSequence(ctx domain.RequestContext, documentID, pageID string, sequence float64) (err error) {
err = m.addQueue(queueEntry{
action: m.store.Search.UpdateSequence,
Page: page.Page{
BaseEntity: model.BaseEntity{RefID: pageID},
Sequence: sequence,
DocumentID: documentID,
},
ctx: ctx,
})
// DeleteDocument removes all search entries for document.
func (m *Indexer) DeleteDocument(ctx domain.RequestContext, ID string) {
method := "search.DeleteDocument"
var err error
return
ctx.Transaction, err = m.runtime.Db.Beginx()
if err != nil {
m.runtime.Log.Error(method, err)
return
}
err = m.store.Search.DeleteDocument(ctx, ID)
if err != nil {
ctx.Transaction.Rollback()
m.runtime.Log.Error(method, err)
return
}
ctx.Transaction.Commit()
}
// UpdateLevel should be called after the level of a page has been changed.
func (m *Indexer) UpdateLevel(ctx domain.RequestContext, documentID, pageID string, level int) (err error) {
err = m.addQueue(queueEntry{
action: m.store.Search.UpdateLevel,
Page: page.Page{
BaseEntity: model.BaseEntity{RefID: pageID},
Level: uint64(level),
DocumentID: documentID,
},
ctx: ctx,
})
// IndexContent adds search index entry for document context.
// Any existing document entries are removed.
func (m *Indexer) IndexContent(ctx domain.RequestContext, p page.Page) {
method := "search.IndexContent"
var err error
return
ctx.Transaction, err = m.runtime.Db.Beginx()
if err != nil {
m.runtime.Log.Error(method, err)
return
}
err = m.store.Search.IndexContent(ctx, p)
if err != nil {
ctx.Transaction.Rollback()
m.runtime.Log.Error(method, err)
return
}
ctx.Transaction.Commit()
}
// Delete should be called after a page has been deleted.
func (m *Indexer) Delete(ctx domain.RequestContext, documentID, pageID string) (rows int64, err error) {
err = m.addQueue(queueEntry{
action: m.store.Search.Delete,
Page: page.Page{
BaseEntity: model.BaseEntity{RefID: pageID},
DocumentID: documentID,
},
ctx: ctx,
})
// DeleteContent removes all search entries for specific document content.
func (m *Indexer) DeleteContent(ctx domain.RequestContext, pageID string) {
method := "search.DeleteContent"
var err error
return
}
ctx.Transaction, err = m.runtime.Db.Beginx()
if err != nil {
m.runtime.Log.Error(method, err)
return
}
err = m.store.Search.DeleteContent(ctx, pageID)
if err != nil {
ctx.Transaction.Rollback()
m.runtime.Log.Error(method, err)
return
}
ctx.Transaction.Commit()
}

View file

@ -180,26 +180,19 @@ type ActivityStorer interface {
// SearchStorer defines required methods for persisting search queries
type SearchStorer interface {
Add(ctx RequestContext, page page.Page) (err error)
Update(ctx RequestContext, page page.Page) (err error)
UpdateDocument(ctx RequestContext, page page.Page) (err error)
DeleteDocument(ctx RequestContext, page page.Page) (err error)
Rebuild(ctx RequestContext, p page.Page) (err error)
UpdateSequence(ctx RequestContext, page page.Page) (err error)
UpdateLevel(ctx RequestContext, page page.Page) (err error)
Delete(ctx RequestContext, page page.Page) (err error)
Documents(ctx RequestContext, keywords string) (results []search.DocumentSearch, err error)
IndexDocument(ctx RequestContext, doc doc.Document, a []attachment.Attachment) (err error)
DeleteDocument(ctx RequestContext, ID string) (err error)
IndexContent(ctx RequestContext, p page.Page) (err error)
DeleteContent(ctx RequestContext, pageID string) (err error)
Documents(ctx RequestContext, q search.QueryOptions) (results []search.QueryResult, err error)
}
// Indexer defines required methods for managing search indexing process
type Indexer interface {
Add(ctx RequestContext, page page.Page, id string) (err error)
Update(ctx RequestContext, page page.Page) (err error)
UpdateDocument(ctx RequestContext, page page.Page) (err error)
DeleteDocument(ctx RequestContext, documentID string) (err error)
UpdateSequence(ctx RequestContext, documentID, pageID string, sequence float64) (err error)
UpdateLevel(ctx RequestContext, documentID, pageID string, level int) (err error)
Delete(ctx RequestContext, documentID, pageID string) (err error)
IndexDocument(ctx RequestContext, d doc.Document, a []attachment.Attachment)
DeleteDocument(ctx RequestContext, ID string)
IndexContent(ctx RequestContext, p page.Page)
DeleteContent(ctx RequestContext, pageID string)
}
// BlockStorer defines required methods for persisting reusable content blocks

View file

@ -28,6 +28,7 @@ import (
"github.com/documize/community/core/uniqueid"
"github.com/documize/community/domain"
"github.com/documize/community/domain/document"
indexer "github.com/documize/community/domain/search"
"github.com/documize/community/model/attachment"
"github.com/documize/community/model/audit"
"github.com/documize/community/model/doc"
@ -40,6 +41,7 @@ import (
type Handler struct {
Runtime *env.Runtime
Store *domain.Store
Indexer indexer.Indexer
}
// SavedList returns all templates saved by the user
@ -363,5 +365,8 @@ func (h *Handler) Use(w http.ResponseWriter, r *http.Request) {
event.Handler().Publish(string(event.TypeAddDocument), nd.Title)
a, _ := h.Store.Attachment.GetAttachments(ctx, documentID)
go h.Indexer.IndexDocument(ctx, nd, a)
response.WriteJSON(w, nd)
}

View file

@ -37,7 +37,7 @@ func main() {
// product details
rt.Product = env.ProdInfo{}
rt.Product.Major = "1"
rt.Product.Minor = "51"
rt.Product.Minor = "52"
rt.Product.Patch = "0"
rt.Product.Version = fmt.Sprintf("%s.%s.%s", rt.Product.Major, rt.Product.Minor, rt.Product.Patch)
rt.Product.Edition = "Community"

File diff suppressed because one or more lines are too long

View file

@ -16,39 +16,23 @@ export default Ember.Component.extend({
resultPhrase: "",
didReceiveAttrs() {
let results = this.get('results');
let temp = _.groupBy(results, 'documentId');
let documents = [];
_.each(temp, function (document) {
let refs = [];
if (document.length > 1) {
refs = document.slice(1);
}
_.each(refs, function (ref, index) {
ref.comma = index === refs.length - 1 ? "" : ", ";
});
let hasRefs = refs.length > 0;
documents.pushObject({
doc: document[0],
ref: refs,
hasReferences: hasRefs
});
});
let docs = this.get('results');
let duped = [];
let phrase = 'Nothing found';
if (results.length > 0) {
let references = results.length === 1 ? "reference" : "references";
let i = results.length;
phrase = `${i} ${references}`;
if (docs.length > 0) {
duped = _.uniq(docs, function (item) {
return item.documentId;
});
let references = docs.length === 1 ? "reference" : "references";
let docLabel = duped.length === 1 ? "document" : "documents";
let i = docs.length;
let j = duped.length;
phrase = `${i} ${references} across ${j} ${docLabel}`;
}
this.set('resultPhrase', phrase);
this.set('documents', documents);
this.set('documents', duped);
}
});

View file

@ -1,11 +1,11 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
// by contacting <sales@documize.com>.
//
// https://documize.com
@ -13,19 +13,51 @@ import Ember from 'ember';
export default Ember.Controller.extend({
searchService: Ember.inject.service('search'),
queryParams: ['filter'],
filter: "",
results: [],
matchDoc: true,
matchContent: true,
matchFile: false,
matchTag: false,
onKeywordChange: function () {
Ember.run.debounce(this, this.fetch, 750);
}.observes('filter'),
onMatchDoc: function () {
Ember.run.debounce(this, this.fetch, 750);
}.observes('matchDoc'),
onMatchContent: function () {
Ember.run.debounce(this, this.fetch, 750);
}.observes('matchContent'),
onMatchTag: function () {
Ember.run.debounce(this, this.fetch, 750);
}.observes('matchTag'),
onMatchFile: function () {
Ember.run.debounce(this, this.fetch, 750);
}.observes('matchFile'),
fetch() {
let self = this;
let payload = {
keywords: this.get('filter'),
doc: this.get('matchDoc'),
attachment: this.get('matchFile'),
tag: this.get('matchTag'),
content: this.get('matchContent')
};
this.get('searchService').find(this.get('filter')).then(function (response) {
payload.keywords = payload.keywords.trim();
if (payload.keywords.length == 0) {
return;
}
if (!payload.doc && !payload.tag && !payload.content && !payload.attachment) {
return;
}
this.get('searchService').find(payload).then(function(response) {
self.set('results', response);
});
}
});
},
});

View file

@ -1,15 +1,26 @@
{{layout/zone-navigation}}
{{#layout/zone-container}}
{{#layout/zone-sidebar}}
<div class="sidebar-toolbar">
</div>
<div class="sidebar-common">
{{layout/sidebar-intro title="Search" message='#tag, keyword, "some phrase", this AND that, this OR that'}}
{{layout/sidebar-intro title="Search" message='Search across document name, contents, tags and attachment filenames'}}
</div>
<div class="sidebar-wrapper">
<div class="page-search">
<div class="input-control">
{{focus-input type="text" value=filter placeholder='type search phrase (case sensitive)'}}
{{focus-input type="text" value=filter placeholder='type search phrase'}}
{{#ui/ui-checkbox selected=matchDoc}}document name{{/ui/ui-checkbox}}
{{#ui/ui-checkbox selected=matchContent}}content{{/ui/ui-checkbox}}
{{#ui/ui-checkbox selected=matchTag}}tag{{/ui/ui-checkbox}}
{{#ui/ui-checkbox selected=matchFile}}attachment name{{/ui/ui-checkbox}}
</div>
<div class="examples">
<p>a OR b</p>
<p>x AND y</p>
<p>"phrase match"</p>
<p>* for wildcard match</p>
</div>
</div>
</div>

View file

@ -19,12 +19,12 @@ export default Ember.Service.extend({
sessionService: service('session'),
ajax: service(),
// getUsers returns all users for organization.
find(keywords) {
let url = "search?keywords=" + encodeURIComponent(keywords);
return this.get('ajax').request(url, {
method: "GET"
// find all matching documents.
find(payload) {
return this.get('ajax').request("search", {
method: "POST",
data: JSON.stringify(payload),
contentType: 'json'
});
},
});

View file

@ -5,6 +5,10 @@
}
}
.examples {
color: $color-gray;
}
.search-results {
> .heading {
font-size: 2rem;

View file

@ -1,13 +1,13 @@
<div class="search-results">
<div class="heading">{{resultPhrase}}</div>
<ul class="list">
{{#each documents key="doc.id" as |result index|}}
{{#each documents key="id" as |result index|}}
<li class="item">
<a class="link" href="s/{{result.doc.folderId}}/{{result.doc.folderSlug}}/d/{{ result.doc.documentId }}/{{result.doc.documentSlug}}?page={{ result.doc.id }}">
<div class="title">{{ result.doc.documentTitle }}</div>
<div class="folder">{{ result.doc.folderName }}</div>
<div class="excerpt">{{ result.doc.documentExcerpt }}</div>
<div class="chips">{{search/tag-list documentTags=result.doc.documentTags}}</div>
<a class="link" href="s/{{result.spaceId}}/{{result.spaceSlug}}/d/{{ result.documentId }}/{{result.documentSlug}}?page={{ result.itemId }}">
<div class="title">{{ result.document }}</div>
<div class="folder">{{ result.space }}</div>
<div class="excerpt">{{ result.excerpt }}</div>
<div class="chips">{{search/tag-list documentTags=result.tags}}</div>
</a>
</li>
{{/each}}

View file

@ -1,6 +1,6 @@
{
"name": "documize",
"version": "1.51.0",
"version": "1.52.0",
"description": "The Document IDE",
"private": true,
"repository": "",

View file

@ -3,5 +3,5 @@
"target": "es6",
"experimentalDecorators": true
},
"exclude": ["node_modules", "bower_components", "tmp", "vendor", ".git", "dist", "dist-prod"]
"exclude": ["node_modules", "bower_components", "tmp", "vendor", ".git", "dist", "dist-prod", "gui/node_modules", "gui/dist", "gui/dist-prod", "gui/tmp"]
}

View file

@ -1,16 +1,16 @@
{
"community":
{
"version": "1.51.0",
"version": "1.52.0",
"major": 1,
"minor": 51,
"minor": 52,
"patch": 0
},
"enterprise":
{
"version": "1.53.0",
"version": "1.54.0",
"major": 1,
"minor": 53,
"minor": 54,
"patch": 0
}
}

View file

@ -11,35 +11,27 @@
package search
import (
"time"
)
// Search holds raw search results.
type Search struct {
ID string `json:"id"`
Created time.Time `json:"created"`
Revised time.Time `json:"revised"`
OrgID string
DocumentID string
Level uint64
Sequence float64
DocumentTitle string
Slug string
PageTitle string
Body string
// QueryOptions defines how we search.
type QueryOptions struct {
Keywords string `json:"keywords"`
Doc bool `json:"doc"`
Tag bool `json:"tag"`
Attachment bool `json:"attachment"`
Content bool `json:"content"`
}
// DocumentSearch represents 'presentable' search results.
type DocumentSearch struct {
ID string `json:"id"`
DocumentID string `json:"documentId"`
DocumentTitle string `json:"documentTitle"`
DocumentSlug string `json:"documentSlug"`
DocumentExcerpt string `json:"documentExcerpt"`
Tags string `json:"documentTags"`
PageTitle string `json:"pageTitle"`
LabelID string `json:"folderId"`
LabelName string `json:"folderName"`
FolderSlug string `json:"folderSlug"`
// QueryResult represents 'presentable' search results.
type QueryResult struct {
ID string `json:"id"`
OrgID string `json:"orgId"`
ItemID string `json:"itemId"`
ItemType string `json:"itemType"`
DocumentID string `json:"documentId"`
DocumentSlug string `json:"documentSlug"`
Document string `json:"document"`
Excerpt string `json:"excerpt"`
Tags string `json:"tags"`
SpaceID string `json:"spaceId"`
Space string `json:"space"`
SpaceSlug string `json:"spaceSlug"`
}

View file

@ -54,10 +54,10 @@ func RegisterEndpoints(rt *env.Runtime, s *domain.Store) {
section := section.Handler{Runtime: rt, Store: s}
setting := setting.Handler{Runtime: rt, Store: s}
keycloak := keycloak.Handler{Runtime: rt, Store: s}
template := template.Handler{Runtime: rt, Store: s}
template := template.Handler{Runtime: rt, Store: s, Indexer: indexer}
document := document.Handler{Runtime: rt, Store: s, Indexer: indexer}
attachment := attachment.Handler{Runtime: rt, Store: s}
conversion := conversion.Handler{Runtime: rt, Store: s}
attachment := attachment.Handler{Runtime: rt, Store: s, Indexer: indexer}
conversion := conversion.Handler{Runtime: rt, Store: s, Indexer: indexer}
organization := organization.Handler{Runtime: rt, Store: s}
//**************************************************
@ -132,7 +132,7 @@ func RegisterEndpoints(rt *env.Runtime, s *domain.Store) {
Add(rt, RoutePrefixPrivate, "users/{userID}", []string{"DELETE", "OPTIONS"}, nil, user.Delete)
Add(rt, RoutePrefixPrivate, "users/sync", []string{"GET", "OPTIONS"}, nil, keycloak.Sync)
Add(rt, RoutePrefixPrivate, "search", []string{"GET", "OPTIONS"}, nil, document.SearchDocuments)
Add(rt, RoutePrefixPrivate, "search", []string{"POST", "OPTIONS"}, nil, document.SearchDocuments)
Add(rt, RoutePrefixPrivate, "templates", []string{"POST", "OPTIONS"}, nil, template.SaveAs)
Add(rt, RoutePrefixPrivate, "templates", []string{"GET", "OPTIONS"}, nil, template.SavedList)