mirror of
https://github.com/documize/community.git
synced 2025-08-02 20:15:26 +02:00
Improve move/copy/merge sections between documents
Processx all child sections during copy/move/merge operation. Ensure links work as expected within newly copied/moved content. Index copied/moved content for searching. fixes #138 fixes #248
This commit is contained in:
parent
ec8d5c78e2
commit
a90c5834fa
7 changed files with 133 additions and 47 deletions
|
@ -775,6 +775,9 @@ func (h *Handler) Duplicate(w http.ResponseWriter, r *http.Request) {
|
|||
method := "document.Duplicate"
|
||||
ctx := domain.GetRequestContext(r)
|
||||
|
||||
// Holds old to new ref ID values.
|
||||
pageRefMap := make(map[string]string)
|
||||
|
||||
// Parse payload
|
||||
defer streamutil.Close(r.Body)
|
||||
body, err := ioutil.ReadAll(r.Body)
|
||||
|
@ -893,21 +896,12 @@ func (h *Handler) Duplicate(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
}
|
||||
// Links
|
||||
for l := range dl {
|
||||
dl[l].SourceDocumentID = d.RefID
|
||||
dl[l].RefID = uniqueid.Generate()
|
||||
|
||||
err = h.Store.Link.Add(ctx, dl[l])
|
||||
if err != nil {
|
||||
ctx.Transaction.Rollback()
|
||||
response.WriteServerError(w, method, err)
|
||||
h.Runtime.Log.Error(method, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
// Sections
|
||||
for j := range pages {
|
||||
// Create mapping between old and new section IDs.
|
||||
pageRefMap[pages[j].RefID] = uniqueid.Generate()
|
||||
|
||||
// Get meta for section
|
||||
sm := page.Meta{}
|
||||
for k := range meta {
|
||||
|
@ -926,7 +920,7 @@ func (h *Handler) Duplicate(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
pages[j].RefID = uniqueid.Generate()
|
||||
pages[j].RefID = pageRefMap[pages[j].RefID]
|
||||
pages[j].DocumentID = d.RefID
|
||||
sm.DocumentID = d.RefID
|
||||
sm.SectionID = pages[j].RefID
|
||||
|
@ -954,6 +948,25 @@ func (h *Handler) Duplicate(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
}
|
||||
}
|
||||
// Links
|
||||
for l := range dl {
|
||||
// Update common meta for all links.
|
||||
dl[l].RefID = uniqueid.Generate()
|
||||
dl[l].SourceDocumentID = d.RefID
|
||||
|
||||
// Remap section ID.
|
||||
if len(dl[l].SourceSectionID) > 0 && len(pageRefMap[dl[l].SourceSectionID]) > 0 {
|
||||
dl[l].SourceSectionID = pageRefMap[dl[l].SourceSectionID]
|
||||
}
|
||||
|
||||
err = h.Store.Link.Add(ctx, dl[l])
|
||||
if err != nil {
|
||||
ctx.Transaction.Rollback()
|
||||
response.WriteServerError(w, method, err)
|
||||
h.Runtime.Log.Error(method, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Record activity and finish.
|
||||
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
|
||||
|
|
|
@ -98,13 +98,13 @@ func (s Store) GetPageLinks(ctx domain.RequestContext, documentID, pageID string
|
|||
WHERE c_orgid=? AND c_sourcedocid=? AND c_sourcesectionid=?`),
|
||||
ctx.OrgID, documentID, pageID)
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
err = errors.Wrap(err, "get page links")
|
||||
return
|
||||
}
|
||||
if len(links) == 0 {
|
||||
if err == sql.ErrNoRows || len(links) == 0 {
|
||||
err = nil
|
||||
links = []link.Link{}
|
||||
}
|
||||
if err != nil {
|
||||
err = errors.Wrap(err, "get page links")
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
|
|
@ -911,16 +911,28 @@ func (h *Handler) Copy(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
// fetch data
|
||||
// Get both source and target documents.
|
||||
doc, err := h.Store.Document.Get(ctx, documentID)
|
||||
if err != nil {
|
||||
response.WriteServerError(w, method, err)
|
||||
h.Runtime.Log.Error(method, err)
|
||||
return
|
||||
}
|
||||
targetDoc, err := h.Store.Document.Get(ctx, targetID)
|
||||
if err != nil {
|
||||
response.WriteServerError(w, method, err)
|
||||
h.Runtime.Log.Error(method, err)
|
||||
return
|
||||
}
|
||||
|
||||
// workflow check
|
||||
if doc.Protection == workflow.ProtectionLock || doc.Protection == workflow.ProtectionReview {
|
||||
// Workflow check for target (receiving) doc.
|
||||
if targetDoc.Protection == workflow.ProtectionLock || targetDoc.Protection == workflow.ProtectionReview {
|
||||
response.WriteForbiddenError(w)
|
||||
return
|
||||
}
|
||||
|
||||
// Check permissions for target document and copy permission.
|
||||
if !permission.CanChangeDocument(ctx, *h.Store, targetDoc.RefID) {
|
||||
response.WriteForbiddenError(w)
|
||||
return
|
||||
}
|
||||
|
@ -951,8 +963,9 @@ func (h *Handler) Copy(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
newPageID := uniqueid.Generate()
|
||||
p.RefID = newPageID
|
||||
p.Level = 1
|
||||
p.Sequence = 0
|
||||
p.Level = p.Level
|
||||
// p.Sequence = p.Sequence
|
||||
p.Sequence, _ = h.Store.Page.GetNextPageSequence(ctx, targetDoc.RefID)
|
||||
p.DocumentID = targetID
|
||||
p.UserID = ctx.UserID
|
||||
pageMeta.DocumentID = targetID
|
||||
|
@ -1003,6 +1016,33 @@ func (h *Handler) Copy(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
// Copy section links.
|
||||
links, err := h.Store.Link.GetPageLinks(ctx, documentID, pageID)
|
||||
if err != nil {
|
||||
ctx.Transaction.Rollback()
|
||||
response.WriteServerError(w, method, err)
|
||||
h.Runtime.Log.Error(method, err)
|
||||
return
|
||||
}
|
||||
for lindex := range links {
|
||||
links[lindex].RefID = uniqueid.Generate()
|
||||
links[lindex].SourceSectionID = newPageID
|
||||
links[lindex].SourceDocumentID = targetID
|
||||
|
||||
err = h.Store.Link.Add(ctx, links[lindex])
|
||||
if err != nil {
|
||||
ctx.Transaction.Rollback()
|
||||
response.WriteServerError(w, method, err)
|
||||
h.Runtime.Log.Error(method, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Update doc revised.
|
||||
h.Store.Document.UpdateRevised(ctx, targetID)
|
||||
|
||||
// If document is published, we record activity and
|
||||
// index content for search.
|
||||
if doc.Lifecycle == workflow.LifecycleLive {
|
||||
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
|
||||
SpaceID: doc.SpaceID,
|
||||
|
@ -1010,17 +1050,18 @@ func (h *Handler) Copy(w http.ResponseWriter, r *http.Request) {
|
|||
SectionID: newPageID,
|
||||
SourceType: activity.SourceTypePage,
|
||||
ActivityType: activity.TypeCreated})
|
||||
}
|
||||
|
||||
// Update doc revised.
|
||||
h.Store.Document.UpdateRevised(ctx, targetID)
|
||||
go h.Indexer.IndexContent(ctx, p)
|
||||
}
|
||||
|
||||
ctx.Transaction.Commit()
|
||||
|
||||
h.Store.Audit.Record(ctx, audit.EventTypeSectionCopy)
|
||||
|
||||
np, _ := h.Store.Page.Get(ctx, pageID)
|
||||
// Re-level all pages in document.
|
||||
h.LevelizeDocument(ctx, targetID)
|
||||
|
||||
np, _ := h.Store.Page.Get(ctx, pageID)
|
||||
response.WriteJSON(w, np)
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue