1
0
Fork 0
mirror of https://github.com/documize/community.git synced 2025-07-19 21:29:42 +02:00

Improve move/copy/merge sections between documents

Processx all child sections during copy/move/merge operation.

Ensure links work as expected within newly copied/moved content.

Index copied/moved content for searching.

fixes #138
fixes #248
This commit is contained in:
sauls8t 2019-06-06 16:17:36 +01:00
parent ec8d5c78e2
commit a90c5834fa
7 changed files with 133 additions and 47 deletions

View file

@ -775,6 +775,9 @@ func (h *Handler) Duplicate(w http.ResponseWriter, r *http.Request) {
method := "document.Duplicate"
ctx := domain.GetRequestContext(r)
// Holds old to new ref ID values.
pageRefMap := make(map[string]string)
// Parse payload
defer streamutil.Close(r.Body)
body, err := ioutil.ReadAll(r.Body)
@ -893,21 +896,12 @@ func (h *Handler) Duplicate(w http.ResponseWriter, r *http.Request) {
return
}
}
// Links
for l := range dl {
dl[l].SourceDocumentID = d.RefID
dl[l].RefID = uniqueid.Generate()
err = h.Store.Link.Add(ctx, dl[l])
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
}
// Sections
for j := range pages {
// Create mapping between old and new section IDs.
pageRefMap[pages[j].RefID] = uniqueid.Generate()
// Get meta for section
sm := page.Meta{}
for k := range meta {
@ -926,7 +920,7 @@ func (h *Handler) Duplicate(w http.ResponseWriter, r *http.Request) {
return
}
pages[j].RefID = uniqueid.Generate()
pages[j].RefID = pageRefMap[pages[j].RefID]
pages[j].DocumentID = d.RefID
sm.DocumentID = d.RefID
sm.SectionID = pages[j].RefID
@ -954,6 +948,25 @@ func (h *Handler) Duplicate(w http.ResponseWriter, r *http.Request) {
}
}
}
// Links
for l := range dl {
// Update common meta for all links.
dl[l].RefID = uniqueid.Generate()
dl[l].SourceDocumentID = d.RefID
// Remap section ID.
if len(dl[l].SourceSectionID) > 0 && len(pageRefMap[dl[l].SourceSectionID]) > 0 {
dl[l].SourceSectionID = pageRefMap[dl[l].SourceSectionID]
}
err = h.Store.Link.Add(ctx, dl[l])
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
}
// Record activity and finish.
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{

View file

@ -98,13 +98,13 @@ func (s Store) GetPageLinks(ctx domain.RequestContext, documentID, pageID string
WHERE c_orgid=? AND c_sourcedocid=? AND c_sourcesectionid=?`),
ctx.OrgID, documentID, pageID)
if err != nil && err != sql.ErrNoRows {
err = errors.Wrap(err, "get page links")
return
}
if len(links) == 0 {
if err == sql.ErrNoRows || len(links) == 0 {
err = nil
links = []link.Link{}
}
if err != nil {
err = errors.Wrap(err, "get page links")
}
return
}

View file

@ -911,16 +911,28 @@ func (h *Handler) Copy(w http.ResponseWriter, r *http.Request) {
return
}
// fetch data
// Get both source and target documents.
doc, err := h.Store.Document.Get(ctx, documentID)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
targetDoc, err := h.Store.Document.Get(ctx, targetID)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
// workflow check
if doc.Protection == workflow.ProtectionLock || doc.Protection == workflow.ProtectionReview {
// Workflow check for target (receiving) doc.
if targetDoc.Protection == workflow.ProtectionLock || targetDoc.Protection == workflow.ProtectionReview {
response.WriteForbiddenError(w)
return
}
// Check permissions for target document and copy permission.
if !permission.CanChangeDocument(ctx, *h.Store, targetDoc.RefID) {
response.WriteForbiddenError(w)
return
}
@ -951,8 +963,9 @@ func (h *Handler) Copy(w http.ResponseWriter, r *http.Request) {
newPageID := uniqueid.Generate()
p.RefID = newPageID
p.Level = 1
p.Sequence = 0
p.Level = p.Level
// p.Sequence = p.Sequence
p.Sequence, _ = h.Store.Page.GetNextPageSequence(ctx, targetDoc.RefID)
p.DocumentID = targetID
p.UserID = ctx.UserID
pageMeta.DocumentID = targetID
@ -1003,6 +1016,33 @@ func (h *Handler) Copy(w http.ResponseWriter, r *http.Request) {
}
}
// Copy section links.
links, err := h.Store.Link.GetPageLinks(ctx, documentID, pageID)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
for lindex := range links {
links[lindex].RefID = uniqueid.Generate()
links[lindex].SourceSectionID = newPageID
links[lindex].SourceDocumentID = targetID
err = h.Store.Link.Add(ctx, links[lindex])
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
}
// Update doc revised.
h.Store.Document.UpdateRevised(ctx, targetID)
// If document is published, we record activity and
// index content for search.
if doc.Lifecycle == workflow.LifecycleLive {
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: doc.SpaceID,
@ -1010,17 +1050,18 @@ func (h *Handler) Copy(w http.ResponseWriter, r *http.Request) {
SectionID: newPageID,
SourceType: activity.SourceTypePage,
ActivityType: activity.TypeCreated})
}
// Update doc revised.
h.Store.Document.UpdateRevised(ctx, targetID)
go h.Indexer.IndexContent(ctx, p)
}
ctx.Transaction.Commit()
h.Store.Audit.Record(ctx, audit.EventTypeSectionCopy)
np, _ := h.Store.Page.Get(ctx, pageID)
// Re-level all pages in document.
h.LevelizeDocument(ctx, targetID)
np, _ := h.Store.Page.Get(ctx, pageID)
response.WriteJSON(w, np)
}

View file

@ -9,7 +9,7 @@
//
// https://documize.com
import { Promise as EmberPromise } from 'rsvp';
import { Promise as EmberPromise, all } from 'rsvp';
import { inject as service } from '@ember/service';
import Notifier from '../../../mixins/notifier';
import Controller from '@ember/controller';
@ -46,8 +46,19 @@ export default Controller.extend(Notifier, {
onCopyPage(pageId, targetDocumentId) {
let documentId = this.get('document.id');
this.get('documentService').copyPage(documentId, pageId, targetDocumentId).then(() => {
let pages = this.get('pages');
// Make list of page ID values including all child pages.
let pagesToProcess = [{ pageId: pageId }].concat(this.get('documentService').getChildren(pages, pageId));
// Copy each page.
let promises = [];
pagesToProcess.forEach((page, index) => {
promises[index] = this.get('documentService').copyPage(documentId, page.pageId, targetDocumentId);
});
// Do post-processing after all copying has completed.
all(promises).then(() => {
// refresh data if copied to same document
if (documentId === targetDocumentId) {
this.set('pageId', '');
@ -62,9 +73,21 @@ export default Controller.extend(Notifier, {
onMovePage(pageId, targetDocumentId) {
let documentId = this.get('document.id');
let pages = this.get('pages');
this.get('documentService').copyPage(documentId, pageId, targetDocumentId).then(() => {
this.send('onPageDeleted', { id: pageId, children: false });
// Make list of page ID values including all child pages.
let pagesToProcess = [{ pageId: pageId }].concat(this.get('documentService').getChildren(pages, pageId));
// Copy each page.
let promises = [];
pagesToProcess.forEach((page, index) => {
promises[index] = this.get('documentService').copyPage(documentId, page.pageId, targetDocumentId);
});
// Do post-processing after all copying has completed.
all(promises).then(() => {
// For move operation we delete all copied pages.
this.send('onPageDeleted', { id: pageId, children: true });
});
},
@ -109,19 +132,9 @@ export default Controller.extend(Notifier, {
let documentId = this.get('document.id');
let deleteId = deletePage.id;
let deleteChildren = deletePage.children;
let pendingChanges = [];
let pages = this.get('pages');
let pageIndex = _.findIndex(pages, function(i) { return i.get('page.id') === deleteId; });
let item = pages[pageIndex];
// select affected pages
for (var i = pageIndex + 1; i < pages.get('length'); i++) {
if (i === pageIndex + 1 && pages[i].get('page.level') === item.get('page.level')) break;
if (pages[i].get('page.level') <= item.get('page.level')) break;
pendingChanges.push({ pageId: pages[i].get('page.id'), level: pages[i].get('page.level') - 1 });
}
let pendingChanges = this.get('documentService').getChildren(pages, deleteId);
this.set('currentPageId', null);

View file

@ -183,6 +183,22 @@ export default Service.extend({
});
},
// Given a page ID, return all children of the starting page.
getChildren(pages, pageId) {
let children = [];
let pageIndex = _.findIndex(pages, function(i) { return i.get('page.id') === pageId; });
let item = pages[pageIndex];
for (var i = pageIndex + 1; i < pages.get('length'); i++) {
if (i === pageIndex + 1 && pages[i].get('page.level') === item.get('page.level')) break;
if (pages[i].get('page.level') <= item.get('page.level')) break;
children.push({ pageId: pages[i].get('page.id'), level: pages[i].get('page.level') - 1 });
}
return children;
},
//**************************************************
// Page Revisions
//**************************************************

View file

@ -185,9 +185,8 @@
> .dates {
margin-bottom: 3rem;
font-size: 1rem;
font-size: 0.9rem;
font-weight: 300;
font-style: italic;
color: map-get($gray-shades, 700);
}
}

View file

@ -31,10 +31,14 @@
{{/if}}
<li class="item" {{action "onShowPrintModal"}}>Print</li>
<li class="item" {{action "onExport"}}>Download</li>
{{#if permissions.documentAdd}}
{{#if (or permissions.documentAdd permissions.documentCopy)}}
<li class="divider"/>
<li class="item" {{action "onShowTemplateModal"}}>Template</li>
<li class="item" {{action "onShowDuplicateModal"}}>Duplicate</li>
{{#if permissions.documentAdd}}
<li class="item" {{action "onShowTemplateModal"}}>Template</li>
{{/if}}
{{#if permissions.documentCopy}}
<li class="item" {{action "onShowDuplicateModal"}}>Copy</li>
{{/if}}
{{/if}}
{{#if permissions.documentDelete}}
<li class="divider"/>