1
0
Fork 0
mirror of https://github.com/documize/community.git synced 2025-08-02 12:05:23 +02:00

Merge pull request #179 from documize/backup-resttore

In-app Backup & Restore
This commit is contained in:
Saul S 2018-10-20 12:46:23 +01:00 committed by GitHub
commit 88b84dc5ed
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
87 changed files with 4360 additions and 1028 deletions

4
Gopkg.lock generated
View file

@ -115,14 +115,14 @@
[[projects]]
branch = "master"
digest = "1:7654989089e5bd5b6734ec3be8b695e87d3f1f8d95620b343fd7d3995a5b60d7"
digest = "1:6c41d4f998a03b6604227ccad36edaed6126c397e5d78709ef4814a1145a6757"
name = "github.com/jmoiron/sqlx"
packages = [
".",
"reflectx",
]
pruneopts = "UT"
revision = "0dae4fefe7c0e190f7b5a78dac28a1c82cc8d849"
revision = "d161d7a76b5661016ad0b085869f77fd410f3e6a"
[[projects]]
digest = "1:8ef506fc2bb9ced9b151dafa592d4046063d744c646c1bbe801982ce87e4bc24"

View file

@ -58,9 +58,9 @@ Space view.
## Latest version
[Community edition: v1.71.0](https://github.com/documize/community/releases)
[Community edition: v1.72.0](https://github.com/documize/community/releases)
[Enterprise edition: v1.73.0](https://documize.com/downloads)
[Enterprise edition: v1.74.0](https://documize.com/downloads)
## OS support

1
core/env/logger.go vendored
View file

@ -15,6 +15,7 @@ package env
// Logger provides the interface for Documize compatible loggers.
type Logger interface {
Info(message string)
Infof(message string, a ...interface{})
Trace(message string)
Error(message string, err error)
// SetDB(l Logger, db *sqlx.DB) Logger

View file

@ -43,7 +43,6 @@ func (s Store) Record(ctx domain.RequestContext, t audit.EventType) {
_, err = tx.Exec(s.Bind("INSERT INTO dmz_audit_log (c_orgid, c_userid, c_eventtype, c_ip, c_created) VALUES (?, ?, ?, ?, ?)"),
e.OrgID, e.UserID, e.Type, e.IP, e.Created)
if err != nil {
tx.Rollback()
s.Runtime.Log.Error("prepare audit insert", err)

816
domain/backup/backup.go Normal file
View file

@ -0,0 +1,816 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
// Package backup handle data backup/restore to/from ZIP format.
package backup
// The backup process can be told to export all data or just for the
// current organization (tenant).
//
// Selected data is marshalled to JSON format and then zipped up
// into a single file on the server. The resultant file is then sent
// to the caller (e.g. web browser) as a file download. Unless specified,
// the file is deleted at the end of the process.
//
// The backup file contains a manifest file that describes the backup.
import (
"archive/zip"
"encoding/json"
"fmt"
"os"
"time"
"github.com/documize/community/core/env"
"github.com/documize/community/core/uniqueid"
"github.com/documize/community/domain"
"github.com/documize/community/domain/store"
"github.com/documize/community/model/account"
"github.com/documize/community/model/action"
"github.com/documize/community/model/activity"
"github.com/documize/community/model/attachment"
"github.com/documize/community/model/audit"
m "github.com/documize/community/model/backup"
"github.com/documize/community/model/block"
"github.com/documize/community/model/category"
"github.com/documize/community/model/doc"
"github.com/documize/community/model/group"
"github.com/documize/community/model/link"
"github.com/documize/community/model/org"
"github.com/documize/community/model/page"
"github.com/documize/community/model/permission"
"github.com/documize/community/model/pin"
"github.com/documize/community/model/space"
uuid "github.com/nu7hatch/gouuid"
"github.com/pkg/errors"
)
// Handler contains the runtime information such as logging and database.
type backerHandler struct {
Runtime *env.Runtime
Store *store.Store
Spec m.ExportSpec
Context domain.RequestContext
}
// Represents backup file.
type backupItem struct {
Filename, Content string
}
// Export data to JSON format, indented to look nice.
func toJSON(v interface{}) (string, error) {
j, err := json.MarshalIndent(v, "", " ")
if err != nil {
return "", err
}
return string(j), nil
}
// GenerateBackup produces ZIP file of specified content.GenerateBackup
// File is located at the same location as the running program.
// NOTE: it is up to the caller to remove the file from disk.
func (b backerHandler) GenerateBackup() (filename string, err error) {
// As precaution we first generate short string first.
var id = uniqueid.Generate()
newUUID, err := uuid.NewV4()
if err == nil {
id = newUUID.String()
}
filename = fmt.Sprintf("dmz-backup-%s.zip", id)
bf, err := os.Create(filename)
if err != nil {
return
}
defer bf.Close()
// Create a zip writer on the file write
zw := zip.NewWriter(bf)
// Get the files to write to the ZIP file.
files, err := b.produce(id)
if err != nil {
return filename, err
}
// Write backup data to zip file on disk.
for _, file := range files {
fileWriter, e2 := zw.Create(file.Filename)
if e2 != nil {
return filename, e2
}
_, e2 = fileWriter.Write([]byte(file.Content))
if err != nil {
return filename, e2
}
}
// Close out process.
err = zw.Close()
if err != nil {
return
}
return filename, nil
}
// Produce collection of files to be included in backup file.
func (b backerHandler) produce(id string) (files []backupItem, err error) {
// Backup manifest
c, err := b.manifest(id)
if err != nil {
return
}
files = append(files, backupItem{Filename: "manifest.json", Content: c})
// Organization
err = b.dmzOrg(&files)
if err != nil {
return
}
// Config, User Config
err = b.dmzConfig(&files)
if err != nil {
return
}
// User, Account
err = b.dmzUserAccount(&files)
if err != nil {
return
}
// Group, Member
err = b.dmzGroup(&files)
if err != nil {
return
}
// Activity, Audit
err = b.dmzActivity(&files)
if err != nil {
return
}
// Pin
err = b.dmzPin(&files)
if err != nil {
return
}
// Space, Permission.
err = b.dmzSpace(&files)
if err != nil {
return
}
// Category, Category Member.
err = b.dmzCategory(&files)
if err != nil {
return
}
// Section, Section Meta, Section Revision, Section Template.
err = b.dmzSection(&files)
if err != nil {
return
}
// Document, Link, Vote, Comment, Share, Attachment.
err = b.dmzDocument(&files)
if err != nil {
return
}
// Action
err = b.dmzAction(&files)
if err != nil {
return
}
return
}
// Manifest describes envrionement of backup source.
func (b backerHandler) manifest(id string) (string, error) {
m := m.Manifest{
ID: id,
Edition: b.Runtime.Product.Edition,
Version: b.Runtime.Product.Version,
Major: b.Runtime.Product.Major,
Minor: b.Runtime.Product.Minor,
Patch: b.Runtime.Product.Patch,
Revision: b.Runtime.Product.Revision,
StoreType: b.Runtime.StoreProvider.Type(),
Created: time.Now().UTC(),
OrgID: b.Spec.OrgID,
}
s, err := toJSON(m)
return s, err
}
// Organization.
func (b backerHandler) dmzOrg(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_refid='%s' ", b.Spec.OrgID)
}
o := []org.Organization{}
err = b.Runtime.Db.Select(&o, `SELECT id, c_refid AS refid,
c_title AS title, c_message AS message, c_domain AS domain,
c_service AS conversionendpoint, c_email AS email, c_serial AS serial, c_active AS active,
c_anonaccess AS allowanonymousaccess, c_authprovider AS authprovider,
coalesce(c_authconfig,`+b.Runtime.StoreProvider.JSONEmpty()+`) AS authconfig, c_maxtags AS maxtags,
c_created AS created, c_revised AS revised
FROM dmz_org`+w)
if err != nil {
return
}
content, err := toJSON(o)
if err != nil {
return
}
*files = append(*files, backupItem{Filename: "dmz_org.json", Content: content})
return
}
// Config, User Config.
func (b backerHandler) dmzConfig(files *[]backupItem) (err error) {
type config struct {
ConfigKey string `json:"key"`
ConfigValue string `json:"config"`
}
c := []config{}
err = b.Runtime.Db.Select(&c, `SELECT c_key AS configkey, c_config AS configvalue FROM dmz_config`)
if err != nil {
return
}
content, err := toJSON(c)
if err != nil {
return
}
*files = append(*files, backupItem{Filename: "dmz_config.json", Content: content})
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" where c_orgid='%s' ", b.Spec.OrgID)
}
type userConfig struct {
OrgID string `json:"orgId"`
UserID string `json:"userId"`
ConfigKey string `json:"key"`
ConfigValue string `json:"config"`
}
uc := []userConfig{}
err = b.Runtime.Db.Select(&uc, `select c_orgid AS orgid, c_userid AS userid,
c_key AS configkey, c_config AS configvalue FROM dmz_user_config`+w)
if err != nil {
return
}
content, err = toJSON(uc)
if err != nil {
return
}
*files = append(*files, backupItem{Filename: "dmz_user_config.json", Content: content})
return
}
// User, Account.
func (b backerHandler) dmzUserAccount(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" , dmz_user_account a WHERE u.c_refid=a.c_userid AND a.c_orgid='%s' ", b.Spec.OrgID)
}
u := []m.User{}
err = b.Runtime.Db.Select(&u, `SELECT u.id, u.c_refid AS refid,
u.c_firstname AS firstname, u.c_lastname AS lastname, u.c_email AS email,
u.c_initials AS initials, u.c_globaladmin AS globaladmin,
u.c_password AS password, u.c_salt AS salt, u.c_reset AS reset, u.c_lastversion AS lastversion,
u.c_created AS created, u.c_revised AS revised
FROM dmz_user u`+w)
if err != nil {
return
}
content, err := toJSON(u)
if err != nil {
return
}
*files = append(*files, backupItem{Filename: "dmz_user.json", Content: content})
w = ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
acc := []account.Account{}
err = b.Runtime.Db.Select(&acc, `SELECT id, c_refid AS refid, c_orgid AS orgid, c_userid AS userid,
c_editor AS editor, c_admin AS admin, c_users AS users, c_analytics AS analytics,
c_active AS active, c_created AS created, c_revised AS revised
FROM dmz_user_account`+w)
if err != nil {
return
}
content, err = toJSON(acc)
if err != nil {
return
}
*files = append(*files, backupItem{Filename: "dmz_user_account.json", Content: content})
return
}
// Group, Group Member.
func (b backerHandler) dmzGroup(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
g := []group.Group{}
err = b.Runtime.Db.Select(&g, `
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_name AS name, c_desc AS purpose,
c_created AS created, c_revised AS revised
FROM dmz_group`+w)
if err != nil {
return
}
content, err := toJSON(g)
if err != nil {
return
}
*files = append(*files, backupItem{Filename: "dmz_group.json", Content: content})
w = ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
gm := []group.Member{}
err = b.Runtime.Db.Select(&gm, `
SELECT id, c_orgid AS orgid, c_groupid AS groupid, c_userid AS userid
FROM dmz_group_member`+w)
if err != nil {
return
}
content, err = toJSON(gm)
if err != nil {
return
}
*files = append(*files, backupItem{Filename: "dmz_group_member.json", Content: content})
return
}
// Activity, Audit
func (b backerHandler) dmzActivity(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
ac := []activity.UserActivity{}
err = b.Runtime.Db.Select(&ac, `
SELECT id, c_orgid AS orgid, c_userid AS userid, c_spaceid AS spaceid,
c_docid AS documentid, c_sectionid AS sectionid, c_sourcetype AS sourcetype,
c_activitytype AS activitytype, c_metadata AS metadata, c_created AS created
FROM dmz_user_activity`+w)
if err != nil {
return errors.Wrap(err, "select.activity")
}
content, err := toJSON(ac)
if err != nil {
return errors.Wrap(err, "json.activity")
}
*files = append(*files, backupItem{Filename: "dmz_user_activity.json", Content: content})
w = ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
al := []audit.AppEvent{}
err = b.Runtime.Db.Select(&al, `
SELECT c_orgid AS orgid, c_userid AS userid, c_eventtype AS type,
c_ip AS ip, c_created AS created
FROM dmz_audit_log`+w)
if err != nil {
return errors.Wrap(err, "select.audit")
}
content, err = toJSON(al)
if err != nil {
return errors.Wrap(err, "json.audit")
}
*files = append(*files, backupItem{Filename: "dmz_audit_log.json", Content: content})
return
}
// Pin
func (b backerHandler) dmzPin(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
p := []pin.Pin{}
err = b.Runtime.Db.Select(&p, `
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_userid AS userid, c_spaceid AS spaceid, c_docid AS documentid,
c_name AS name, c_sequence AS sequence, c_created AS created, c_revised AS revised
FROM dmz_pin`+w)
if err != nil {
return errors.Wrap(err, "select.pin")
}
content, err := toJSON(p)
if err != nil {
return errors.Wrap(err, "json.pin")
}
*files = append(*files, backupItem{Filename: "dmz_pin.json", Content: content})
return
}
// Space, Permission.
func (b backerHandler) dmzSpace(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
sp := []space.Space{}
err = b.Runtime.Db.Select(&sp, `SELECT id, c_refid AS refid,
c_name AS name, c_orgid AS orgid, c_userid AS userid,
c_type AS type, c_lifecycle AS lifecycle, c_likes AS likes,
c_created AS created, c_revised AS revised
FROM dmz_space`+w)
if err != nil {
return
}
content, err := toJSON(sp)
if err != nil {
return
}
*files = append(*files, backupItem{Filename: "dmz_space.json", Content: content})
w = ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
p := []permission.Permission{}
err = b.Runtime.Db.Select(&p, `
SELECT id, c_orgid AS orgid, c_who AS who, c_whoid AS whoid,
c_action AS action, c_scope AS scope, c_location AS location,
c_refid AS refid, c_created AS created
FROM dmz_permission`+w)
if err != nil {
return errors.Wrap(err, "select.permission")
}
content, err = toJSON(p)
if err != nil {
return errors.Wrap(err, "json.permission")
}
*files = append(*files, backupItem{Filename: "dmz_permission.json", Content: content})
return
}
// Category, Category Member.
func (b backerHandler) dmzCategory(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
cat := []category.Category{}
err = b.Runtime.Db.Select(&cat, `
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_spaceid AS spaceid,
c_name AS name, c_created AS created, c_revised AS revised
FROM dmz_category`+w)
if err != nil {
return errors.Wrap(err, "select.category")
}
content, err := toJSON(cat)
if err != nil {
return errors.Wrap(err, "json.category")
}
*files = append(*files, backupItem{Filename: "dmz_category.json", Content: content})
w = ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
cm := []category.Member{}
err = b.Runtime.Db.Select(&cm, `
SELECT id, c_refid AS refid, c_orgid AS orgid,
c_spaceid AS spaceid, c_categoryid AS categoryid,
c_docid AS documentid, c_created AS created, c_revised AS revised
FROM dmz_category_member`+w)
if err != nil {
return errors.Wrap(err, "select.categorymember")
}
content, err = toJSON(cm)
if err != nil {
return errors.Wrap(err, "json.categorymember")
}
*files = append(*files, backupItem{Filename: "dmz_category_member.json", Content: content})
return
}
// Section, Section Meta, Section Revision, Section Template.
func (b backerHandler) dmzSection(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
// Section
sec := []page.Page{}
err = b.Runtime.Db.Select(&sec, `
SELECT id, c_refid AS refid, c_orgid AS orgid, c_docid AS documentid, c_userid AS userid, c_contenttype AS contenttype, c_type AS type,
c_level AS level, c_sequence AS sequence, c_name AS name, c_body AS body, c_revisions AS revisions, c_templateid AS templateid,
c_status AS status, c_relativeid AS relativeid, c_created AS created, c_revised AS revised
FROM dmz_section`+w)
if err != nil {
return errors.Wrap(err, "select.section")
}
content, err := toJSON(sec)
if err != nil {
return errors.Wrap(err, "json.section")
}
*files = append(*files, backupItem{Filename: "dmz_section.json", Content: content})
// Section Meta
sm := []page.Meta{}
err = b.Runtime.Db.Select(&sm, `
SELECT id, c_sectionid AS sectionid,
c_orgid AS orgid, c_userid AS userid, c_docid AS documentid,
c_rawbody AS rawbody, coalesce(c_config,`+b.Runtime.StoreProvider.JSONEmpty()+`) as config,
c_external AS externalsource, c_created AS created, c_revised AS revised
FROM dmz_section_meta`+w)
if err != nil {
return errors.Wrap(err, "select.sectionmeta")
}
content, err = toJSON(sm)
if err != nil {
return errors.Wrap(err, "json.sectionmeta")
}
*files = append(*files, backupItem{Filename: "dmz_section_meta.json", Content: content})
// Section Revision
sr := []page.Revision{}
err = b.Runtime.Db.Select(&sr, `
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_docid AS documentid, c_ownerid AS ownerid,
c_sectionid AS sectionid,
c_userid AS userid, c_contenttype AS contenttype, c_type AS type,
c_name AS name, c_body AS body, coalesce(c_rawbody, '') as rawbody,
coalesce(c_config,`+b.Runtime.StoreProvider.JSONEmpty()+`) as config,
c_created AS created, c_revised AS revised
FROM dmz_section_revision`+w)
if err != nil {
return errors.Wrap(err, "select.sectionrevision")
}
content, err = toJSON(sr)
if err != nil {
return errors.Wrap(err, "json.sectionrevision")
}
*files = append(*files, backupItem{Filename: "dmz_section_revision.json", Content: content})
// Section Template
st := []block.Block{}
err = b.Runtime.Db.Select(&st, `
SELECT id, c_refid as refid,
c_orgid as orgid,
c_spaceid AS spaceid, c_userid AS userid, c_contenttype AS contenttype, c_type AS type,
c_name AS name, c_body AS body, c_desc AS excerpt, c_rawbody AS rawbody,
c_config AS config, c_external AS externalsource, c_used AS used,
c_created AS created, c_revised AS revised
FROM dmz_section_template`+w)
if err != nil {
return errors.Wrap(err, "select.sectiontemplate")
}
content, err = toJSON(st)
if err != nil {
return errors.Wrap(err, "json.sectiontemplate")
}
*files = append(*files, backupItem{Filename: "dmz_section_template.json", Content: content})
return
}
// Document, Link, Vote, Comment, Share, Attachment.
func (b backerHandler) dmzDocument(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
// Document
d := []doc.Document{}
err = b.Runtime.Db.Select(&d, `
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_userid AS userid,
c_job AS job, c_location AS location, c_name AS name, c_desc AS excerpt, c_slug AS slug,
c_tags AS tags, c_template AS template, c_protection AS protection, c_approval AS approval,
c_lifecycle AS lifecycle, c_versioned AS versioned, c_versionid AS versionid,
c_versionorder AS versionorder, c_groupid AS groupid, c_created AS created, c_revised AS revised
FROM dmz_doc`+w)
if err != nil {
return errors.Wrap(err, "select.document")
}
content, err := toJSON(d)
if err != nil {
return errors.Wrap(err, "json.document")
}
*files = append(*files, backupItem{Filename: "dmz_doc.json", Content: content})
// Vote
type vote struct {
RefID string `json:"refId"`
OrgID string `json:"orgId"`
DocumentID string `json:"documentId"`
VoterID string `json:"voterId"`
Vote int `json:"vote"`
Created time.Time `json:"created"`
Revised time.Time `json:"revised"`
}
vt := []vote{}
err = b.Runtime.Db.Select(&vt, `
SELECT c_refid AS refid, c_orgid AS orgid,
c_voter AS voterid, c_vote AS vote,
c_docid AS documentid, c_created AS created, c_revised AS revised
FROM dmz_doc_vote`+w)
if err != nil {
return errors.Wrap(err, "select.docvote")
}
content, err = toJSON(vt)
if err != nil {
return errors.Wrap(err, "json.docvote")
}
*files = append(*files, backupItem{Filename: "dmz_doc_vote.json", Content: content})
// Link
ln := []link.Link{}
err = b.Runtime.Db.Select(&ln, `
select c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_userid AS userid,
c_sourcedocid AS sourcedocumentid, c_sourcesectionid AS sourcesectionid,
c_targetdocid AS targetdocumentid, c_targetid AS targetid, c_externalid AS externalid,
c_type as linktype, c_orphan As orphan, c_created AS created, c_revised AS revised
FROM dmz_doc_link`+w)
if err != nil {
return errors.Wrap(err, "select.doclink")
}
content, err = toJSON(ln)
if err != nil {
return errors.Wrap(err, "json.doclink")
}
*files = append(*files, backupItem{Filename: "dmz_doc_link.json", Content: content})
// Comment
type comment struct {
RefID string `json:"feedbackId"`
OrgID string `json:"orgId"`
DocumentID string `json:"documentId"`
UserID string `json:"userId"`
Email string `json:"email"`
Feedback string `json:"feedback"`
Created string `json:"created"`
}
cm := []comment{}
err = b.Runtime.Db.Select(&cm, `
SELECT c_refid AS refid, c_orgid AS orgid, c_docid AS documentid,
c_userid AS userid, c_email AS email,
c_feedback AS feedback, c_created AS created
FROM dmz_doc_comment`+w)
if err != nil {
return errors.Wrap(err, "select.doccomment")
}
content, err = toJSON(cm)
if err != nil {
return errors.Wrap(err, "json.doccomment")
}
*files = append(*files, backupItem{Filename: "dmz_doc_comment.json", Content: content})
// Share
type share struct {
ID uint64 `json:"id"`
OrgID string `json:"orgId"`
UserID string `json:"userId"`
DocumentID string `json:"documentId"`
Email string `json:"email"`
Message string `json:"message"`
Viewed string `json:"viewed"` // recording each view as |date-viewed|date-viewed|
Secret string `json:"secret"` // secure token used to access document
Expires string `json:"expires"` // number of days from creation, value of 0 means never
Active bool `json:"active"`
Created time.Time `json:"created"`
}
sh := []share{}
err = b.Runtime.Db.Select(&sh, `
SELECT id AS id, c_orgid AS orgid, c_docid AS documentid,
c_userid AS userid, c_email AS email, c_message AS message, c_viewed AS viewed,
c_expires AS expires, c_active AS active, c_secret AS secret, c_created AS created
FROM dmz_doc_share`+w)
if err != nil {
return errors.Wrap(err, "select.docshare")
}
content, err = toJSON(sh)
if err != nil {
return errors.Wrap(err, "json.docshare")
}
*files = append(*files, backupItem{Filename: "dmz_doc_share.json", Content: content})
// Attachment
at := []attachment.Attachment{}
err = b.Runtime.Db.Select(&at, `
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_docid AS documentid, c_job AS job, c_fileid AS fileid,
c_filename AS filename, c_data AS data, c_extension AS extension,
c_created AS created, c_revised AS revised
FROM dmz_doc_attachment`+w)
if err != nil {
return errors.Wrap(err, "select.docattachment")
}
content, err = toJSON(at)
if err != nil {
return errors.Wrap(err, "json.docattachment")
}
*files = append(*files, backupItem{Filename: "dmz_doc_attachment.json", Content: content})
return
}
// Action
func (b backerHandler) dmzAction(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
ac := []action.UserAction{}
err = b.Runtime.Db.Select(&ac, `
SELECT c_refid AS refid, c_orgid AS orgid, c_docid AS documentid, c_userid AS userid,
c_actiontype AS actiontype, c_note AS note, c_requestorid AS requestorid, c_requested AS requested, c_due AS due,
c_completed AS completed, c_iscomplete AS iscomplete, c_reftype AS reftype, c_reftypeid AS reftypeid,
c_created AS created, c_revised AS revised
FROM dmz_action`+w)
if err != nil {
return errors.Wrap(err, "select.action")
}
content, err := toJSON(ac)
if err != nil {
return errors.Wrap(err, "json.action")
}
*files = append(*files, backupItem{Filename: "dmz_action.json", Content: content})
return
}

View file

@ -9,24 +9,46 @@
//
// https://documize.com
// Package backup handle data backup/restore to/from ZIP format.
package backup
// Documize data is all held in the SQL database in relational format.
// The objective is to export the data into a compressed file that
// can be restored again as required.
//
// This allows for the following scenarios to be supported:
//
// 1. Copying data from one Documize instance to another.
// 2. Changing database provider (e.g. from MySQL to PostgreSQL).
// 3. Moving between Documize Cloud and self-hosted instances.
// 4. GDPR compliance (send copy of data and nuke whatever remains).
// 5. Setting up sample Documize instance with pre-defined content.
//
// The initial implementation is restricted to tenant or global
// backup/restore operations and can only be performed by a verified
// Global Administrator.
//
// In future the process should be able to support per space backup/restore
// operations. This is subject to further review.
import (
"archive/zip"
"bytes"
"encoding/json"
"fmt"
"github.com/documize/community/core/request"
"io"
"io/ioutil"
"net/http"
"os"
"strconv"
"github.com/documize/community/core/env"
"github.com/documize/community/core/response"
"github.com/documize/community/core/streamutil"
"github.com/documize/community/core/uniqueid"
"github.com/documize/community/domain"
indexer "github.com/documize/community/domain/search"
"github.com/documize/community/domain/store"
m "github.com/documize/community/model/backup"
)
// Handler contains the runtime information such as logging and database.
@ -57,7 +79,7 @@ func (h *Handler) Backup(w http.ResponseWriter, r *http.Request) {
return
}
spec := backupSpec{}
spec := m.ExportSpec{}
err = json.Unmarshal(body, &spec)
if err != nil {
response.WriteBadRequestError(w, method, err.Error())
@ -65,31 +87,41 @@ func (h *Handler) Backup(w http.ResponseWriter, r *http.Request) {
return
}
// data, err := backup(ctx, *h.Store, spec)
// if err != nil {
// response.WriteServerError(w, method, err)
// h.Runtime.Log.Error(method, err)
// return
// }
h.Runtime.Log.Info("Backup started")
// Filename is current timestamp
fn := fmt.Sprintf("dmz-backup-%s.zip", uniqueid.Generate())
bh := backerHandler{Runtime: h.Runtime, Store: h.Store, Context: ctx, Spec: spec}
ziptest(fn)
bb, err := ioutil.ReadFile(fn)
// Produce zip file on disk.
filename, err := bh.GenerateBackup()
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
w.Header().Set("Content-Type", "application/zip")
w.Header().Set("Content-Disposition", `attachment; filename="`+fn+`" ; `+`filename*="`+fn+`"`)
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(bb)))
w.Header().Set("x-documize-filename", fn)
// Read backup file into memory.
// DEBT: write file directly to HTTP response stream?
bk, err := ioutil.ReadFile(filename)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
x, err := w.Write(bb)
h.Runtime.Log.Info(fmt.Sprintf("Backup size pending download %d", len(bk)))
// Standard HTTP headers.
w.Header().Set("Content-Type", "application/zip")
w.Header().Set("Content-Disposition", `attachment; filename="`+filename+`" ; `+`filename*="`+filename+`"`)
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(bk)))
// Custom HTTP header helps API consumer to extract backup filename cleanly
// instead of parsing 'Content-Disposition' header.
// This HTTP header is CORS white-listed.
w.Header().Set("x-documize-filename", filename)
// Write backup to response stream.
x, err := w.Write(bk)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
@ -97,90 +129,75 @@ func (h *Handler) Backup(w http.ResponseWriter, r *http.Request) {
}
w.WriteHeader(http.StatusOK)
h.Runtime.Log.Info(fmt.Sprintf("Backup completed for %s by %s, size %d", ctx.OrgID, ctx.UserID, x))
// Delete backup file if not requested to keep it.
if !spec.Retain {
os.Remove(filename)
}
}
type backupSpec struct {
}
// Restore receives ZIP file for restore operation.
// Options are specified as HTTP query paramaters.
func (h *Handler) Restore(w http.ResponseWriter, r *http.Request) {
method := "system.restore"
ctx := domain.GetRequestContext(r)
func backup(ctx domain.RequestContext, s store.Store, spec backupSpec) (file []byte, err error) {
buf := new(bytes.Buffer)
zw := zip.NewWriter(buf)
// Add some files to the archive.
var files = []struct {
Name, Body string
}{
{"readme.txt", "This archive contains some text files."},
{"gopher.txt", "Gopher names:\nGeorge\nGeoffrey\nGonzo"},
{"todo.txt", "Get animal handling licence.\nWrite more examples."},
if !ctx.Administrator {
response.WriteForbiddenError(w)
h.Runtime.Log.Info(fmt.Sprintf("Non-admin attempted system restore operation (user ID: %s)", ctx.UserID))
return
}
for _, file := range files {
f, err := zw.Create(file.Name)
if err != nil {
return nil, err
}
h.Runtime.Log.Info(fmt.Sprintf("Restored attempted by user: %s", ctx.UserID))
_, err = f.Write([]byte(file.Body))
if err != nil {
return nil, err
}
}
// Make sure to check the error on Close.
err = zw.Close()
overwriteOrg, err := strconv.ParseBool(request.Query(r, "org"))
if err != nil {
return nil, err
h.Runtime.Log.Info("Restore invoked without 'org' parameter")
response.WriteMissingDataError(w, method, "org=false/true missing")
return
}
return buf.Bytes(), nil
}
func ziptest(filename string) {
// Create a file to write the archive buffer to
// Could also use an in memory buffer.
outFile, err := os.Create(filename)
filedata, fileheader, err := r.FormFile("restore-file")
if err != nil {
fmt.Println(err)
}
defer outFile.Close()
// Create a zip writer on top of the file writer
zipWriter := zip.NewWriter(outFile)
// Add files to archive
// We use some hard coded data to demonstrate,
// but you could iterate through all the files
// in a directory and pass the name and contents
// of each file, or you can take data from your
// program and write it write in to the archive
// without
var filesToArchive = []struct {
Name, Body string
}{
{"test.txt", "String contents of file"},
{"test2.txt", "\x61\x62\x63\n"},
response.WriteMissingDataError(w, method, "restore-file")
h.Runtime.Log.Error(method, err)
return
}
// Create and write files to the archive, which in turn
// are getting written to the underlying writer to the
// .zip file we created at the beginning
for _, file := range filesToArchive {
fileWriter, err := zipWriter.Create(file.Name)
if err != nil {
fmt.Println(err)
}
_, err = fileWriter.Write([]byte(file.Body))
if err != nil {
fmt.Println(err)
}
}
// Clean up
err = zipWriter.Close()
b := new(bytes.Buffer)
_, err = io.Copy(b, filedata)
if err != nil {
fmt.Println(err)
h.Runtime.Log.Error(method, err)
response.WriteServerError(w, method, err)
return
}
h.Runtime.Log.Info(fmt.Sprintf("Restore file: %s %d", fileheader.Filename, len(b.Bytes())))
//
org, err := h.Store.Organization.GetOrganization(ctx, ctx.OrgID)
if err != nil {
h.Runtime.Log.Error(method, err)
response.WriteServerError(w, method, err)
return
}
// Prepare context and start restore process.
spec := m.ImportSpec{OverwriteOrg: overwriteOrg, Org: org}
rh := restoreHandler{Runtime: h.Runtime, Store: h.Store, Context: ctx, Spec: spec}
// Run the restore process.
err = rh.PerformRestore(b.Bytes(), r.ContentLength)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
h.Runtime.Log.Infof("Restore remapped %d OrgID values", len(rh.MapOrgID))
h.Runtime.Log.Infof("Restore remapped %d UserID values", len(rh.MapUserID))
h.Runtime.Log.Info("Restore completed")
response.WriteEmpty(w)
}

1865
domain/backup/restore.go Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,59 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package backup
import (
"testing"
)
// go test github.com/documize/community/domain/backup -run TestRemapORg
func TestRemapOrg(t *testing.T) {
r := restoreHandler{MapOrgID: make(map[string]string)}
r.MapOrgID["abc"] = "def"
r.MapOrgID["xyz"] = "123"
n := r.remapOrg("abc")
if n != "def" {
t.Errorf("expected def got %s", n)
}
n = r.remapOrg("xyz")
if n != "123" {
t.Errorf("expected 123 got %s", n)
}
n = r.remapOrg("jkl")
if n != "jkl" {
t.Errorf("expected jkl got %s", n)
}
}
func TestRemapUser(t *testing.T) {
r := restoreHandler{MapUserID: make(map[string]string)}
r.MapUserID["abc"] = "def"
r.MapUserID["xyz"] = "123"
n := r.remapUser("abc")
if n != "def" {
t.Errorf("expected def got %s", n)
}
n = r.remapUser("xyz")
if n != "123" {
t.Errorf("expected 123 got %s", n)
}
n = r.remapUser("jkl")
if n != "jkl" {
t.Errorf("expected jkl got %s", n)
}
}

View file

@ -121,16 +121,16 @@ func (h *Handler) GetBySpace(w http.ResponseWriter, r *http.Request) {
method := "block.space"
ctx := domain.GetRequestContext(r)
folderID := request.Param(r, "folderID")
if len(folderID) == 0 {
response.WriteMissingDataError(w, method, "folderID")
spaceID := request.Param(r, "spaceID")
if len(spaceID) == 0 {
response.WriteMissingDataError(w, method, "spaceID")
return
}
var b []block.Block
var err error
b, err = h.Store.Block.GetBySpace(ctx, folderID)
b, err = h.Store.Block.GetBySpace(ctx, spaceID)
if len(b) == 0 {
b = []block.Block{}

View file

@ -203,7 +203,10 @@ func (s Store) RemoveDocumentCategories(ctx domain.RequestContext, documentID st
// DeleteBySpace removes all category and category associations for given space.
func (s Store) DeleteBySpace(ctx domain.RequestContext, spaceID string) (rows int64, err error) {
s1 := fmt.Sprintf("DELETE FROM dmz_category_member WHERE c_orgid='%s' AND c_spaceid='%s'", ctx.OrgID, spaceID)
s.DeleteWhere(ctx.Transaction, s1)
_, err = s.DeleteWhere(ctx.Transaction, s1)
if err != nil {
return
}
s2 := fmt.Sprintf("DELETE FROM dmz_category WHERE c_orgid='%s' AND c_spaceid='%s'", ctx.OrgID, spaceID)
return s.DeleteWhere(ctx.Transaction, s2)

View file

@ -51,9 +51,9 @@ func (h *Handler) upload(w http.ResponseWriter, r *http.Request) (string, string
method := "conversion.upload"
ctx := domain.GetRequestContext(r)
folderID := request.Param(r, "folderID")
spaceID := request.Param(r, "spaceID")
if !permission.CanUploadDocument(ctx, *h.Store, folderID) {
if !permission.CanUploadDocument(ctx, *h.Store, spaceID) {
response.WriteForbiddenError(w)
return "", "", ""
}
@ -92,10 +92,10 @@ func (h *Handler) upload(w http.ResponseWriter, r *http.Request) (string, string
h.Runtime.Log.Info(fmt.Sprintf("Org %s (%s) [Uploaded] %s", ctx.OrgName, ctx.OrgID, filename.Filename))
return job, folderID, ctx.OrgID
return job, spaceID, ctx.OrgID
}
func (h *Handler) convert(w http.ResponseWriter, r *http.Request, job, folderID string, conversion api.ConversionJobRequest) {
func (h *Handler) convert(w http.ResponseWriter, r *http.Request, job, spaceID string, conversion api.ConversionJobRequest) {
method := "conversion.upload"
ctx := domain.GetRequestContext(r)
@ -145,7 +145,7 @@ func (h *Handler) convert(w http.ResponseWriter, r *http.Request, job, folderID
}
// Fetch space where document resides.
sp, err := h.Store.Space.Get(ctx, folderID)
sp, err := h.Store.Space.Get(ctx, spaceID)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
@ -161,9 +161,6 @@ func (h *Handler) convert(w http.ResponseWriter, r *http.Request, job, folderID
return
}
a, _ := h.Store.Attachment.GetAttachments(ctx, nd.RefID)
go h.Indexer.IndexDocument(ctx, nd, a)
response.WriteJSON(w, nd)
}
@ -252,7 +249,6 @@ func processDocument(ctx domain.RequestContext, r *env.Runtime, store *store.Sto
ActivityType: activity.TypeCreated})
err = ctx.Transaction.Commit()
if err != nil {
err = errors.Wrap(err, "cannot commit new document import")
return
@ -260,12 +256,11 @@ func processDocument(ctx domain.RequestContext, r *env.Runtime, store *store.Sto
newDocument, err = store.Document.Get(ctx, documentID)
if err != nil {
ctx.Transaction.Rollback()
err = errors.Wrap(err, "cannot fetch new document")
return
}
indexer.IndexDocument(ctx, newDocument, da)
go indexer.IndexDocument(ctx, newDocument, da)
store.Audit.Record(ctx, audit.EventTypeDocumentUpload)

View file

@ -29,12 +29,12 @@ type Handler struct {
// UploadConvert is an endpoint to both upload and convert a document
func (h *Handler) UploadConvert(w http.ResponseWriter, r *http.Request) {
job, folderID, orgID := h.upload(w, r)
job, spaceID, orgID := h.upload(w, r)
if job == "" {
return // error already handled
}
h.convert(w, r, job, folderID, api.ConversionJobRequest{
h.convert(w, r, job, spaceID, api.ConversionJobRequest{
Job: job,
IndexDepth: 4,
OrgID: orgID,

View file

@ -278,7 +278,6 @@ func (s Store) Delete(ctx domain.RequestContext, documentID string) (rows int64,
// Remove document pages, revisions, attachments, updates the search subsystem.
func (s Store) DeleteBySpace(ctx domain.RequestContext, spaceID string) (rows int64, err error) {
rows, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_section WHERE c_docid IN (SELECT c_refid FROM dmz_doc WHERE c_spaceid='%s' AND c_orgid='%s')", spaceID, ctx.OrgID))
if err != nil {
return
}

View file

@ -39,9 +39,9 @@ func (h *Handler) GetLinkCandidates(w http.ResponseWriter, r *http.Request) {
method := "link.Candidates"
ctx := domain.GetRequestContext(r)
folderID := request.Param(r, "folderID")
if len(folderID) == 0 {
response.WriteMissingDataError(w, method, "folderID")
spaceID := request.Param(r, "spaceID")
if len(spaceID) == 0 {
response.WriteMissingDataError(w, method, "spaceID")
return
}
@ -81,7 +81,7 @@ func (h *Handler) GetLinkCandidates(w http.ResponseWriter, r *http.Request) {
if p.RefID != pageID {
c := link.Candidate{
RefID: uniqueid.Generate(),
SpaceID: folderID,
SpaceID: spaceID,
DocumentID: documentID,
TargetID: p.RefID,
LinkType: p.Type,
@ -109,7 +109,7 @@ func (h *Handler) GetLinkCandidates(w http.ResponseWriter, r *http.Request) {
for _, f := range files {
c := link.Candidate{
RefID: uniqueid.Generate(),
SpaceID: folderID,
SpaceID: spaceID,
DocumentID: documentID,
TargetID: f.RefID,
LinkType: "file",

View file

@ -18,7 +18,6 @@ import (
"time"
"github.com/documize/community/core/env"
"github.com/documize/community/core/streamutil"
"github.com/documize/community/domain"
"github.com/documize/community/domain/store"
"github.com/documize/community/model/org"
@ -33,11 +32,7 @@ type Store struct {
// AddOrganization inserts the passed organization record into the organization table.
func (s Store) AddOrganization(ctx domain.RequestContext, org org.Organization) (err error) {
org.Created = time.Now().UTC()
org.Revised = time.Now().UTC()
_, err = ctx.Transaction.Exec(
s.Bind("INSERT INTO dmz_org (c_refid, c_company, c_title, c_message, c_domain, c_email, c_anonaccess, c_serial, c_maxtags, c_created, c_revised) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"),
_, err = ctx.Transaction.Exec(s.Bind("INSERT INTO dmz_org (c_refid, c_company, c_title, c_message, c_domain, c_email, c_anonaccess, c_serial, c_maxtags, c_created, c_revised) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"),
org.RefID, org.Company, org.Title, org.Message, strings.ToLower(org.Domain),
strings.ToLower(org.Email), org.AllowAnonymousAccess, org.Serial, org.MaxTags, org.Created, org.Revised)
@ -50,22 +45,16 @@ func (s Store) AddOrganization(ctx domain.RequestContext, org org.Organization)
// GetOrganization returns the Organization reocrod from the organization database table with the given id.
func (s Store) GetOrganization(ctx domain.RequestContext, id string) (org org.Organization, err error) {
stmt, err := s.Runtime.Db.Preparex(s.Bind(`SELECT id, c_refid AS refid,
err = s.Runtime.Db.Get(&org, s.Bind(`SELECT id, c_refid AS refid,
c_title AS title, c_message AS message, c_domain AS domain,
c_service AS conversionendpoint, c_email AS email, c_serial AS serial, c_active AS active,
c_anonaccess AS allowanonymousaccess, c_authprovider AS authprovider,
coalesce(c_authconfig,` + s.EmptyJSON() + `) AS authconfig, c_maxtags AS maxtags,
coalesce(c_authconfig,`+s.EmptyJSON()+`) AS authconfig, c_maxtags AS maxtags,
c_created AS created, c_revised AS revised
FROM dmz_org
WHERE c_refid=?`))
defer streamutil.Close(stmt)
WHERE c_refid=?`),
id)
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("unable to prepare select for org %s", id))
return
}
err = stmt.Get(&org, id)
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("unable to get org %s", id))
return

View file

@ -29,7 +29,7 @@ type Store struct {
}
//**************************************************
// Page Revisions
// Page
//**************************************************
// Add inserts the given page into the page table, adds that page to the queue of pages to index and audits that the page has been added.
@ -157,7 +157,7 @@ func (s Store) Update(ctx domain.RequestContext, page page.Page, refID, userID s
c_name, c_body, c_rawbody, c_config, c_created, c_revised)
SELECT ? AS refid, a.c_orgid, a.c_docid, a.c_userid AS ownerid, a.c_refid AS sectionid,
? AS userid, a.c_contenttype, a.c_type, a.c_name, a.c_body,
b.c_rawbody, b.c_config, ? AS c_created, ? As c_revised
b.c_rawbody, b.c_config, ? AS c_created, ? AS c_revised
FROM dmz_section a, dmz_section_meta b
WHERE a.c_refid=? AND a.c_refid=b.c_sectionid`),
refID, userID, time.Now().UTC(), time.Now().UTC(), page.RefID)

View file

@ -17,7 +17,6 @@ import (
"strings"
"github.com/documize/community/core/env"
"github.com/documize/community/core/streamutil"
"github.com/documize/community/core/stringutil"
"github.com/documize/community/domain"
"github.com/documize/community/domain/store"
@ -26,7 +25,6 @@ import (
"github.com/documize/community/model/page"
"github.com/documize/community/model/search"
"github.com/documize/community/model/workflow"
"github.com/jmoiron/sqlx"
"github.com/pkg/errors"
)
@ -189,24 +187,16 @@ func (s Store) DeleteContent(ctx domain.RequestContext, pageID string) (err erro
method := "search.DeleteContent"
// remove all search entries
var stmt1 *sqlx.Stmt
stmt1, err = ctx.Transaction.Preparex(s.Bind("DELETE FROM dmz_search WHERE c_orgid=? AND c_itemid=? AND c_itemtype=?"))
defer streamutil.Close(stmt1)
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_search WHERE c_orgid=? AND c_itemid=? AND c_itemtype=?"),
ctx.OrgID, pageID, "page")
if err != nil && err != sql.ErrNoRows {
err = errors.Wrap(err, "prepare delete document content entry")
s.Runtime.Log.Error(method, err)
return
}
_, err = stmt1.Exec(ctx.OrgID, pageID, "page")
if err != nil && err != sql.ErrNoRows {
err = errors.Wrap(err, "execute delete document content entry")
s.Runtime.Log.Error(method, err)
return
}
return
return nil
}
// Documents searches the documents that the client is allowed to see, using the keywords search string, then audits that search.

View file

@ -20,6 +20,7 @@ import (
"io/ioutil"
"net/http"
"strings"
"time"
"github.com/documize/community/core/env"
"github.com/documize/community/core/event"
@ -102,6 +103,9 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
sp.UserID = ctx.UserID
sp.Type = space.ScopePrivate
sp.Lifecycle = wf.LifecycleLive
sp.UserID = ctx.UserID
sp.Created = time.Now().UTC()
sp.Revised = time.Now().UTC()
err = h.Store.Space.Add(ctx, sp)
if err != nil {
@ -745,6 +749,17 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
return
}
// Close out the delete process
ctx.Transaction.Commit()
// Record this action.
ctx.Transaction, err = h.Runtime.Db.Beginx()
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
err = h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: id,
SourceType: activity.SourceTypeSpace,
@ -754,8 +769,6 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
h.Runtime.Log.Error(method, err)
}
ctx.Transaction.Commit()
h.Store.Audit.Record(ctx, audit.EventTypeSpaceDelete)
event.Handler().Publish(string(event.TypeRemoveSpace))

View file

@ -31,6 +31,9 @@ func TestSpace(t *testing.T) {
sp.UserID = ctx.UserID
sp.Type = space.ScopePublic
sp.Name = "PublicTestSpace"
sp.UserID = ctx.UserID
sp.Created = time.Now().UTC()
sp.Revised = time.Now().UTC()
err = s.Space.Add(ctx, sp)
if err != nil {
@ -98,6 +101,9 @@ func TestSpace(t *testing.T) {
sp2.OrgID = ctx.OrgID
sp2.Type = space.ScopePrivate
sp2.Name = "PrivateTestSpace"
sp.UserID = ctx.UserID
sp.Created = time.Now().UTC()
sp.Revised = time.Now().UTC()
err = s.Space.Add(ctx, sp2)
if err != nil {

View file

@ -30,10 +30,6 @@ type Store struct {
// Add adds new folder into the store.
func (s Store) Add(ctx domain.RequestContext, sp space.Space) (err error) {
sp.UserID = ctx.UserID
sp.Created = time.Now().UTC()
sp.Revised = time.Now().UTC()
_, err = ctx.Transaction.Exec(s.Bind("INSERT INTO dmz_space (c_refid, c_name, c_orgid, c_userid, c_type, c_lifecycle, c_likes, c_created, c_revised) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"),
sp.RefID, sp.Name, sp.OrgID, sp.UserID, sp.Type, sp.Lifecycle, sp.Likes, sp.Created, sp.Revised)

View file

@ -43,61 +43,57 @@ func (c *Context) Bind(sql string) string {
// Delete record.
func (c *Context) Delete(tx *sqlx.Tx, table string, id string) (rows int64, err error) {
result, err := tx.Exec(c.Bind("DELETE FROM "+table+" WHERE c_refid=?"), id)
if err != nil && err != sql.ErrNoRows {
_, err = tx.Exec(c.Bind("DELETE FROM "+table+" WHERE c_refid=?"), id)
if err == sql.ErrNoRows {
err = nil
}
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("unable to delete row in table %s", table))
return
}
rows, err = result.RowsAffected()
err = nil
return
}
// DeleteConstrained record constrained to Organization using refid.
func (c *Context) DeleteConstrained(tx *sqlx.Tx, table string, orgID, id string) (rows int64, err error) {
result, err := tx.Exec(c.Bind("DELETE FROM "+table+" WHERE c_orgid=? AND c_refid=?"), orgID, id)
if err != nil && err != sql.ErrNoRows {
_, err = tx.Exec(c.Bind("DELETE FROM "+table+" WHERE c_orgid=? AND c_refid=?"), orgID, id)
if err == sql.ErrNoRows {
err = nil
}
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("unable to delete row in table %s", table))
return
}
rows, err = result.RowsAffected()
err = nil
return
}
// DeleteConstrainedWithID record constrained to Organization using non refid.
func (c *Context) DeleteConstrainedWithID(tx *sqlx.Tx, table string, orgID, id string) (rows int64, err error) {
result, err := tx.Exec(c.Bind("DELETE FROM "+table+" WHERE c_orgid=? AND id=?"), orgID, id)
if err != nil && err != sql.ErrNoRows {
err = errors.Wrap(err, fmt.Sprintf("unable to delete row in table %s", table))
_, err = tx.Exec(c.Bind("DELETE FROM "+table+" WHERE c_orgid=? AND id=?"), orgID, id)
if err == sql.ErrNoRows {
err = nil
}
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("unable to delete rows by id: %s", id))
return
}
rows, err = result.RowsAffected()
err = nil
return
}
// DeleteWhere free form query.
func (c *Context) DeleteWhere(tx *sqlx.Tx, statement string) (rows int64, err error) {
result, err := tx.Exec(statement)
if err != nil && err != sql.ErrNoRows {
_, err = tx.Exec(statement)
if err == sql.ErrNoRows {
err = nil
}
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("unable to delete rows: %s", statement))
return
}
rows, err = result.RowsAffected()
err = nil
return
}

View file

@ -53,13 +53,13 @@ func (h *Handler) SavedList(w http.ResponseWriter, r *http.Request) {
method := "template.saved"
ctx := domain.GetRequestContext(r)
folderID := request.Param(r, "folderID")
if len(folderID) == 0 {
response.WriteMissingDataError(w, method, "folderID")
spaceID := request.Param(r, "spaceID")
if len(spaceID) == 0 {
response.WriteMissingDataError(w, method, "spaceID")
return
}
documents, err := h.Store.Document.TemplatesBySpace(ctx, folderID)
documents, err := h.Store.Document.TemplatesBySpace(ctx, spaceID)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
@ -77,7 +77,7 @@ func (h *Handler) SavedList(w http.ResponseWriter, r *http.Request) {
t.Dated = d.Created
t.Type = template.TypePrivate
if d.SpaceID == folderID {
if d.SpaceID == spaceID {
templates = append(templates, t)
}
}
@ -266,9 +266,9 @@ func (h *Handler) Use(w http.ResponseWriter, r *http.Request) {
method := "template.use"
ctx := domain.GetRequestContext(r)
folderID := request.Param(r, "folderID")
if len(folderID) == 0 {
response.WriteMissingDataError(w, method, "folderID")
spaceID := request.Param(r, "spaceID")
if len(spaceID) == 0 {
response.WriteMissingDataError(w, method, "spaceID")
return
}
@ -295,7 +295,7 @@ func (h *Handler) Use(w http.ResponseWriter, r *http.Request) {
d.Excerpt = "Add detailed description for document..."
d.Slug = stringutil.MakeSlug(d.Name)
d.Tags = ""
d.SpaceID = folderID
d.SpaceID = spaceID
documentID := uniqueid.Generate()
d.RefID = documentID
@ -321,7 +321,7 @@ func (h *Handler) Use(w http.ResponseWriter, r *http.Request) {
}
// Fetch space where document resides.
sp, err := h.Store.Space.Get(ctx, folderID)
sp, err := h.Store.Space.Get(ctx, spaceID)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
@ -339,7 +339,7 @@ func (h *Handler) Use(w http.ResponseWriter, r *http.Request) {
documentID = uniqueid.Generate()
d.RefID = documentID
d.Template = false
d.SpaceID = folderID
d.SpaceID = spaceID
d.UserID = ctx.UserID
d.Name = docTitle
@ -409,6 +409,7 @@ func (h *Handler) Use(w http.ResponseWriter, r *http.Request) {
// Clone categories.
cats, err := h.Store.Category.GetDocumentCategoryMembership(ctx, templateID)
if err != nil && err != sql.ErrNoRows {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
@ -422,6 +423,7 @@ func (h *Handler) Use(w http.ResponseWriter, r *http.Request) {
cc.SpaceID = d.SpaceID
err = h.Store.Category.AssociateDocument(ctx, cc)
if err != nil && err != sql.ErrNoRows {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return

View file

@ -38,9 +38,9 @@ func main() {
// product details
rt.Product = env.ProdInfo{}
rt.Product.Major = "1"
rt.Product.Minor = "71"
rt.Product.Minor = "72"
rt.Product.Patch = "0"
rt.Product.Revision = 181007125514
rt.Product.Revision = 181020074716
rt.Product.Version = fmt.Sprintf("%s.%s.%s", rt.Product.Major, rt.Product.Minor, rt.Product.Patch)
rt.Product.Edition = "Community"
rt.Product.Title = fmt.Sprintf("%s Edition", rt.Product.Edition)

View file

@ -35,6 +35,11 @@ func (l Logger) Info(message string) {
l.log.Println(message)
}
// Infof logs message via Sprintf.
func (l Logger) Infof(message string, a ...interface{}) {
l.log.Println(fmt.Sprintf(message, a))
}
// Trace logs message if tracing enabled.
func (l Logger) Trace(message string) {
if l.trace {

File diff suppressed because one or more lines are too long

View file

@ -9,24 +9,180 @@
//
// https://documize.com
import $ from 'jquery';
import { inject as service } from '@ember/service';
import Notifier from '../../mixins/notifier';
import Modal from '../../mixins/modal';
import Component from '@ember/component';
export default Component.extend(Notifier, {
export default Component.extend(Notifier, Modal, {
appMeta: service(),
router: service(),
browserSvc: service('browser'),
buttonLabel: 'Run Backup',
backupLabel: 'Backup',
backupSystemLabel: 'System Backup',
backupSpec: null,
backupFilename: '',
backupError: false,
backupSuccess: false,
backupRunning: false,
restoreSpec: null,
restoreButtonLabel: 'Restore',
restoreUploadReady: false,
confirmRestore: '',
didReceiveAttrs() {
this._super(...arguments);
this.set('backupSpec', {
retain: true,
org: this.get('appMeta.orgId')
});
this.set('restoreSpec', {
overwriteOrg: true,
recreateUsers: true
});
this.set('restoreFile', null);
this.set('confirmRestore', '');
this.set('backupType', { Tenant: true, System: false });
},
didInsertElement() {
this._super(...arguments);
this.$('#restore-file').on('change', function(){
var fileName = document.getElementById("restore-file").files[0].name;
$(this).next('.custom-file-label').html(fileName);
});
},
doBackup() {
this.showWait();
this.set('backupFilename', '');
this.set('backupSuccess', false);
this.set('backupFailed', false);
this.set('backupRunning', true);
let spec = this.get('backupSpec');
this.get('onBackup')(spec).then((filename) => {
this.showDone();
this.set('backupLabel', 'Start Backup');
this.set('backupSuccess', true);
this.set('backupFilename', filename);
this.set('backupRunning', false);
}, ()=> {
this.showDone();
this.set('backupLabel', 'Run Backup');
this.set('backupFailed', true);
this.set('backupRunning', false);
});
},
actions: {
onBackup() {
this.showWait();
this.set('buttonLabel', 'Please wait, backup running...');
// We perform tenant level backup.
this.set('backupSpec.org', this.get('appMeta.orgId'));
this.get('onBackup')({}).then(() => {
this.set('buttonLabel', 'Run Backup');
this.doBackup();
},
onSystemBackup() {
// We perform system-level backup.
this.set('backupSpec.org', '*');
this.doBackup();
},
onShowRestoreModal() {
this.modalOpen("#confirm-restore-modal", {"show": true}, '#confirm-restore');
},
onRestore(e) {
e.preventDefault();
let typed = this.get('confirmRestore');
typed = typed.toLowerCase();
if (typed !== 'restore' || typed === '') {
$("#confirm-restore").addClass("is-invalid").focus();
return;
}
this.set('confirmRestore', '');
$("#confirm-restore").removeClass("is-invalid");
this.modalClose('#confirm-restore-modal');
// do we have upload file?
// let files = document.getElementById("restore-file").files;
// if (is.undefined(files) || is.null(files)) {
// return;
// }
// let file = document.getElementById("restore-file").files[0];
// if (is.undefined(file) || is.null(file)) {
// return;
// }
let filedata = this.get('restoreFile');
if (is.null(filedata)) {
return;
}
// start restore process
this.showWait();
this.set('restoreButtonLabel', 'Please wait, restore running...');
this.set('restoreSuccess', false);
this.set('restoreFailed', false);
// If Documize Global Admin we perform system-level restore.
// Otherwise it is current tenant backup.
let spec = this.get('restoreSpec');
if (this.get('session.isGlobalAdmin')) {
spec.org = "*";
}
this.get('onRestore')(spec, filedata).then(() => {
this.showDone();
this.set('backupLabel', 'Restore');
this.set('restoreSuccess', true);
this.get('router').transitionTo('auth.logout');
}, ()=> {
this.showDone();
this.set('restorbackupLabel', 'Restore');
this.set('restoreFailed', true);
});
},
upload(event) {
this.set('restoreUploadReady', false);
this.set('restoreFile', null);
// const reader = new FileReader();
const file = event.target.files[0];
this.set('restoreFile', file);
this.set('restoreUploadReady', true);
// let imageData;
// reader.onload = () => {
// imageData = reader.result;
// this.set('restoreFile', imageData);
// this.set('restoreUploadReady', true);
// this.set('restoreUploading', false);
// };
// if (file) {
// reader.readAsDataURL(file);
// }
}
}
});
// {{#ui/ui-checkbox selected=restoreSpec.recreateUsers}}
// Recreate user accounts &mdash; users, groups, permissions
// {{/ui/ui-checkbox}}

View file

@ -127,7 +127,7 @@ export default Component.extend(ModalMixin, TooltipMixin, {
selection = {
context: '',
documentId: documentId,
folderId: folderId,
spaceId: folderId,
id: stringUtil.makeId(16),
linkType: 'network',
targetId: '',

View file

@ -147,7 +147,7 @@ export default Component.extend(ModalMixin, Tooltips, {
this.get('documentService').getPageMeta(this.get('document.id'), page.get('id')).then((pm) => {
let block = {
folderId: this.get('folder.id'),
spaceId: this.get('folder.id'),
contentType: page.get('contentType'),
pageType: page.get('pageType'),
title: blockTitle,

View file

@ -137,7 +137,7 @@ export default Component.extend(Notifier, {
// prepare links associated with document
link.forEach((l) => {
let t = {
folderId: folderId,
spaceId: folderId,
documentId: docId,
categoryId: l.get('id')
};
@ -148,7 +148,7 @@ export default Component.extend(Notifier, {
// prepare links no longer associated with document
unlink.forEach((l) => {
let t = {
folderId: folderId,
spaceId: folderId,
documentId: docId,
categoryId: l.get('id')
};

View file

@ -108,7 +108,7 @@ export default Component.extend(ModalMixin, TooltipMixin, Notifer, {
let c = {
category: cat,
folderId: this.get('space.id')
spaceId: this.get('space.id')
};
this.showWait();

View file

@ -41,11 +41,11 @@ export default Component.extend(AuthMixin, Notifier, {
let folder = this.get('space');
let spaceTypeOptions = A([]);
spaceTypeOptions.pushObject({id: constants.FolderType.Private, label: 'Private - viewable only by me'});
spaceTypeOptions.pushObject({id: constants.FolderType.Protected, label: 'Protected - access is restricted to selected users'});
spaceTypeOptions.pushObject({id: constants.FolderType.Public, label: 'Public - can be seen by everyone'});
spaceTypeOptions.pushObject({id: constants.SpaceType.Private, label: 'Private - viewable only by me'});
spaceTypeOptions.pushObject({id: constants.SpaceType.Protected, label: 'Protected - access is restricted to selected users'});
spaceTypeOptions.pushObject({id: constants.SpaceType.Public, label: 'Public - can be seen by everyone'});
this.set('spaceTypeOptions', spaceTypeOptions);
this.set('spaceType', spaceTypeOptions.findBy('id', folder.get('folderType')));
this.set('spaceType', spaceTypeOptions.findBy('id', folder.get('spaceType')));
this.set('allowLikes', folder.get('allowLikes'));
@ -75,7 +75,7 @@ export default Component.extend(AuthMixin, Notifier, {
if (!this.get('isSpaceAdmin')) return;
let space = this.get('space');
space.set('folderType', this.get('spaceType.id'));
space.set('spaceType', this.get('spaceType.id'));
let allowLikes = this.get('allowLikes');
space.set('likes', allowLikes ? this.get('likes') : '');

View file

@ -99,7 +99,7 @@ export default Component.extend(Notifier, Modals, {
let raw = {
id: stringUtil.makeId(16),
orgId: this.get('folder.orgId'),
folderId: this.get('folder.id'),
spaceId: this.get('folder.id'),
whoId: whoId,
who: who,
name: name,

View file

@ -92,7 +92,7 @@ export default Component.extend(ModalMixin, {
actions: {
jumpToPin(pin) {
let folderId = pin.get('folderId');
let folderId = pin.get('spaceId');
let documentId = pin.get('documentId');
if (_.isEmpty(documentId)) {

View file

@ -36,13 +36,13 @@ export default Component.extend(TooltipMixin, NotifierMixin, AuthMixin, {
let privateFolders = [];
_.each(folders, folder => {
if (folder.get('folderType') === constants.FolderType.Public) {
if (folder.get('spaceType') === constants.SpaceType.Public) {
publicFolders.pushObject(folder);
}
if (folder.get('folderType') === constants.FolderType.Private) {
if (folder.get('spaceType') === constants.SpaceType.Private) {
privateFolders.pushObject(folder);
}
if (folder.get('folderType') === constants.FolderType.Protected) {
if (folder.get('spaceType') === constants.SpaceType.Protected) {
protectedFolders.pushObject(folder);
}
});

View file

@ -92,7 +92,7 @@ export default Component.extend(ModalMixin, TooltipMixin, AuthMixin, Notifier, {
let pin = {
pin: this.get('pinState.newName'),
documentId: this.get('document.id'),
folderId: this.get('space.id')
spaceId: this.get('space.id')
};
this.get('pinned').pinItem(pin).then((pin) => {

View file

@ -164,7 +164,7 @@ export default Component.extend(ModalMixin, TooltipMixin, AuthMixin, Notifier, {
let pin = {
pin: this.get('pinState.newName'),
documentId: '',
folderId: this.get('space.id')
spaceId: this.get('space.id')
};
this.get('pinned').pinItem(pin).then((pin) => {

View file

@ -15,7 +15,7 @@ import EmberObject from "@ember/object";
// let constants = this.get('constants');
let constants = EmberObject.extend({
FolderType: { // eslint-disable-line ember/avoid-leaking-state-in-ember-objects
SpaceType: { // eslint-disable-line ember/avoid-leaking-state-in-ember-objects
Public: 1,
Private: 2,
Protected: 3

View file

@ -14,7 +14,7 @@ import attr from 'ember-data/attr';
export default Model.extend({
orgId: attr('string'),
folderId: attr('string'),
spaceId: attr('string'),
userId: attr('string'),
contentType: attr('string'),
pageType: attr('string'),

View file

@ -14,7 +14,7 @@ import attr from 'ember-data/attr';
export default Model.extend({
orgId: attr('string'),
folderId: attr('string'),
spaceId: attr('string'),
category: attr('string'),
created: attr(),
revised: attr(),

View file

@ -15,7 +15,7 @@ import attr from 'ember-data/attr';
export default Model.extend({
orgId: attr('string'),
folderId: attr('string'),
spaceId: attr('string'),
documentId: attr('string'),
pageId: attr('string'),
pageTitle: attr('string'),

View file

@ -20,7 +20,7 @@ export default Model.extend({
job: attr('string'),
location: attr('string'),
orgId: attr('string'),
folderId: attr('string'),
spaceId: attr('string'),
userId: attr('string'),
tags: attr('string'),
template: attr('boolean'),

View file

@ -18,7 +18,7 @@ export default Model.extend({
name: attr('string'),
orgId: attr('string'),
userId: attr('string'),
folderType: attr('number', { defaultValue: 2 }),
spaceType: attr('number', { defaultValue: 2 }),
lifecycle: attr('number', { defaultValue: 1 }),
likes: attr('string'),
@ -32,17 +32,17 @@ export default Model.extend({
markAsRestricted() {
let constants = this.get('constants');
this.set('folderType', constants.FolderType.Protected);
this.set('spaceType', constants.SpaceType.Protected);
},
markAsPrivate() {
let constants = this.get('constants');
this.set('folderType', constants.FolderType.Private);
this.set('spaceType', constants.SpaceType.Private);
},
markAsPublic() {
let constants = this.get('constants');
this.set('folderType', constants.FolderType.Public);
this.set('spaceType', constants.SpaceType.Public);
},
// client-side prop that holds who can see this folder

View file

@ -16,7 +16,7 @@ import { computed } from '@ember/object';
export default Model.extend({
orgId: attr('string'),
userId: attr('string'),
folderId: attr('string'),
spaceId: attr('string'),
documentId: attr('string'),
sequence: attr('number', { defaultValue: 99 }),
pin: attr('string'),

View file

@ -20,8 +20,8 @@ export default Model.extend({
firstname: attr('string'),
lastname: attr('string'),
name: attr('string'),
folderId: attr('string'),
folderType: attr('number', { defaultValue: 0 }),
spaceId: attr('string'),
spaceType: attr('number', { defaultValue: 0 }),
fullname: computed('firstname', 'lastname', function () {
return `${this.get('firstname')} ${this.get('lastname')}`;

View file

@ -14,7 +14,7 @@ import attr from 'ember-data/attr';
export default Model.extend({
orgId: attr('string'),
folderId: attr('string'),
spaceId: attr('string'),
whoId: attr('string'),
who: attr('string'),
spaceView: attr('boolean'),

View file

@ -16,7 +16,7 @@ import attr from 'ember-data/attr';
export default Model.extend({
documentName: attr('string'),
documentId: attr('string'),
folderId: attr('string'),
spaceId: attr('string'),
contributed: attr('string'),
viewed: attr('string'),
created: attr('string'),

View file

@ -17,9 +17,11 @@ export default Controller.extend({
actions: {
onBackup(spec) {
if(this.get('session.isAdmin')) {
return this.get('global').backup(spec);
}
return this.get('global').backup(spec);
},
onRestore(spec, filedata) {
return this.get('global').restore(spec, filedata);
}
}
});

View file

@ -1 +1 @@
{{customize/backup-restore onBackup=(action 'onBackup')}}
{{customize/backup-restore onBackup=(action 'onBackup') onRestore=(action 'onRestore')}}

View file

@ -65,7 +65,7 @@ export default Controller.extend(TooltipMixin, Notifier, {
this.set('deleteSpace.name', '');
this.get('folderService').adminList().then((folders) => {
let nonPrivateFolders = folders.rejectBy('folderType', 2);
let nonPrivateFolders = folders.rejectBy('spaceType', 2);
if (is.empty(nonPrivateFolders) || is.null(folders) || is.undefined(folders)) {
nonPrivateFolders = [];
}

View file

@ -27,7 +27,7 @@ export default Route.extend(AuthenticatedRouteMixin, {
},
setupController(controller, model) {
let nonPrivateFolders = model.rejectBy('folderType', 2);
let nonPrivateFolders = model.rejectBy('spaceType', 2);
if (is.empty(nonPrivateFolders) || is.null(model) || is.undefined(model)) {
nonPrivateFolders = [];
}

View file

@ -30,6 +30,7 @@
{{#if session.isGlobalAdmin}}
{{#link-to 'customize.license' activeClass='selected' class="tab tab-vertical" tagName="li" }}Product{{/link-to}}
{{/if}}
{{#link-to 'customize.backup' activeClass='selected' class="tab tab-vertical" tagName="li" }}Backup & Restore{{/link-to}}
</ul>
</div>
{{/layout/middle-zone-sidebar}}

View file

@ -48,7 +48,7 @@ export default Controller.extend(NotifierMixin, {
all(promises1).then(() => {
promises1.forEach(function(doc, index) {
doc.then((d) => {
d.set('folderId', targetSpaceId);
d.set('spaceId', targetSpaceId);
d.set('selected', false);
promises2[index] = self.get('documentService').save(d);
});

View file

@ -410,7 +410,7 @@ export default Service.extend({
data.permissions = perms;
data.roles = roles;
data.folders = folders;
data.folder = folders.findBy('id', doc.get('folderId'));
data.folder = folders.findBy('id', doc.get('spaceId'));
data.links = response.links;
data.versions = response.versions;

View file

@ -18,6 +18,7 @@ export default Service.extend({
appMeta: service(),
browserSvc: service('browser'),
store: service(),
router: service(),
// Returns SMTP configuration.
getSMTPConfig() {
@ -141,9 +142,13 @@ export default Service.extend({
}
},
// Run tenant level backup.
// Run backup.
backup(spec) {
return new EmberPromise((resolve) => {
return new EmberPromise((resolve, reject) => {
if (!this.get('sessionService.isGlobalAdmin') && !this.get('sessionService.isAdmin')) {
reject();
}
let url = this.get('appMeta.endpoint');
let token = this.get('sessionService.session.content.authenticated.token');
let uploadUrl = `${url}/global/backup?token=${token}`;
@ -162,19 +167,59 @@ export default Service.extend({
a.style = "display: none";
document.body.appendChild(a);
let filename = xhr.getResponseHeader('x-documize-filename').replace('"', '');
let url = window.URL.createObjectURL(blob);
a.href = url;
a.download = xhr.getResponseHeader('x-documize-filename').replace('"', '');
a.download = filename;
a.click();
window.URL.revokeObjectURL(url);
document.body.removeChild(a);
resolve();
resolve(filename);
} else {
reject();
}
}
xhr.onerror= function() {
reject();
}
xhr.send(JSON.stringify(spec));
});
},
restore(spec, file) {
var data = new FormData();
data.set('restore-file', file);
return new EmberPromise((resolve, reject) => {
if (!this.get('sessionService.isGlobalAdmin') && !this.get('sessionService.isAdmin')) {
reject();
}
let url = this.get('appMeta.endpoint');
let token = this.get('sessionService.session.content.authenticated.token');
let uploadUrl = `${url}/global/restore?token=${token}&org=${spec.overwriteOrg}&users=${spec.recreateUsers}`;
var xhr = new XMLHttpRequest();
xhr.open('POST', uploadUrl);
xhr.onload = function() {
if (this.status == 200) {
resolve();
} else {
reject();
}
}
xhr.onerror= function() {
reject();
}
xhr.send(data);
});
}
});

View file

@ -82,7 +82,7 @@ export default Service.extend(Notifier, {
linkId: a.attributes["data-link-id"].value,
linkType: a.attributes["data-link-type"].value,
documentId: a.attributes["data-link-target-document-id"].value,
folderId: a.attributes["data-link-space-id"].value,
spaceId: a.attributes["data-link-space-id"].value,
targetId: a.attributes["data-link-target-id"].value,
externalId: is.undefined(a.attributes["data-link-external-id"]) ? '' : a.attributes["data-link-external-id"].value,
url: a.attributes["href"].value,

View file

@ -133,7 +133,7 @@ export default Service.extend({
return this.getUserPins().then((pins) => {
pins.forEach((pin) => {
if (pin.get('userId') === userId && pin.get('documentId') === '' && pin.get('folderId') === spaceId) {
if (pin.get('userId') === userId && pin.get('documentId') === '' && pin.get('spaceId') === spaceId) {
resolve(pin.get('id'));
}
});

View file

@ -112,6 +112,7 @@ $link-hover-decoration: none;
@import "node_modules/bootstrap/scss/button-group";
@import "node_modules/bootstrap/scss/dropdown";
@import "node_modules/bootstrap/scss/forms";
@import "node_modules/bootstrap/scss/custom-forms";
@import "node_modules/bootstrap/scss/input-group";
@import "node_modules/bootstrap/scss/modal";
@import "node_modules/bootstrap/scss/utilities";

View file

@ -152,4 +152,74 @@
> .max-results {
float: right;
}
> .backup-restore {
margin: 20px 0;
font-size: 1.1rem;
> .backup-zone {
@include border-radius(3px);
border: 1px solid $color-border;
padding: 20px 20px;
background-color: lighten($color-green, 60%);
color: $color-off-black;
> .backup-fail {
margin: 10px 0;
color: $color-red;
}
> .backup-success {
margin: 10px 0;
color: $color-green;
}
}
> .restore-zone {
@include border-radius(3px);
border: 1px solid $color-border;
margin: 50px 0;
padding: 20px 20px;
background-color: lighten($color-red, 60%);
color: $color-off-black;
> .restore-fail {
margin: 10px 0;
color: $color-red;
}
> .restore-success {
margin: 10px 0;
color: $color-green;
}
> .upload-backup-file {
@include ease-in();
margin: 50px 0 10px 0;
> .dz-preview, .dz-processing {
display: none !important;
}
}
.restore-upload-busy {
text-align: center;
> img {
height: 50px;
width: 50px;
}
> .wait {
color: $color-gray;
margin: 10px 0;
}
> .ready {
color: $color-green;
margin: 10px 0;
}
}
}
}
}

View file

@ -7,33 +7,33 @@
margin: 0 0 5px 0;
> .material-icons {
font-size: 1rem;
font-size: 1.5rem;
color: $color-gray;
vertical-align: top;
}
> .selected {
color: $color-link;
color: $color-blue;
}
&:hover {
color: $color-link;
color: $color-blue;
}
> .text {
display: inline-block;
font-size: 0.9rem;
vertical-align: text-top;
font-size: 1.1rem;
vertical-align: sub;
color: $color-off-black;
}
}
.ui-checkbox-selected {
color: $color-link;
color: $color-blue;
}
.widget-checkbox {
color: $color-link;
color: $color-blue;
cursor: pointer;
}

View file

@ -7,21 +7,29 @@
margin: 0 0 5px 0;
> .material-icons {
font-size: 1.4rem;
font-size: 1.5rem;
color: $color-gray;
vertical-align: top;
margin-right: 5px;
}
> .selected {
color: $color-link;
color: $color-blue;
}
&:hover {
color: $color-link;
color: $color-blue;
}
> .text {
display: inline-block;
font-size: 1.1rem;
vertical-align: sub;
color: $color-off-black;
}
}
.ui-radio-selected {
color: $color-link;
color: $color-blue;
}

View file

@ -1,4 +1,3 @@
<div class="row">
<div class="col">
<div class="view-customize">
@ -9,10 +8,95 @@
</div>
<div class="view-customize">
<form class="mt-5 ">
<div class="form-group">
<p>It can take several minutes to complete the backup process &mdash; please be patient while the backup is running.</p>
<div class="btn btn-success mt-3" {{action 'onBackup'}}>{{buttonLabel}}</div>
<div class="backup-restore">
<div class="backup-zone">
{{#if session.isGlobalAdmin}}
<p>
Documize is a multi-tenanted application enabling both "tech.mycompany.com" and "sales.mycompany.com" to run using the same executable/database.
As a Documize <b>Global Administrator</b>, you will be performing a complete system-wide backup across all tenants.
A Documize <b>Tenant Administrator</b> can login to perform a tenant-level backup (e.g. marketing.mycompany.com).
</p>
{{else}}
<p>
Documize is a multi-tenanted application enabling both "tech.mycompany.com" and "sales.mycompany.com" to run using the same executable/database.
A Documize <b>Global Administrator</b>, you will be performing a complete system-wide backup across all tenants.
As a Documize <b>Tenant Administrator</b> you can perform a tenant-level backup (e.g. marketing.mycompany.com).
</p>
{{/if}}
<p>Please use a Tenant Backup when migrating between self-host and Documize Cloud hosting.</p>
<p>It can take <b>several minutes</b> to complete the backup process &mdash; please be patient while the backup operation is in progress.</p>
<div class="margin-top-30 margin-bottom-20">
{{#ui/ui-checkbox selected=backupSpec.retain}}
Retain backup file on server
{{/ui/ui-checkbox}}
</div>
{{#if backupRunning}}
<h3 class="text-success">Backup running, please wait...</h3>
{{else}}
<button class="btn btn-success mb-3" {{action 'onBackup'}}>TENANT BACKUP ({{appMeta.appHost}})</button>
{{#if session.isGlobalAdmin}}
<div class="button-gap" />
<button class="btn btn-success mb-3" {{action 'onSystemBackup'}}>SYSTEM BACKUP</button>
{{/if}}
{{/if}}
{{#if backupFailed}}
<div class="backup-fail">Backup failed &mdash; please check server logs</div>
{{/if}}
{{#if backupSuccess}}
<div class="backup-success">Backup successful ({{backupFilename}})</div>
{{/if}}
</div>
</form>
</div>
<div class="backup-restore">
<div class="restore-zone">
{{#if session.isGlobalAdmin}}
<p class="text-danger">Restore from a <b>system backup</b> should only be performed on an <b>empty Documize database.</b></p>
{{/if}}
<p>Restore operation will <b>re-create</b> users, groups, permissions, spaces, categories and content.</p>
<p>It can take <b>several minutes</b> to complete the restore process &mdash; please be patient while the restore operation is in progress.</p>
<div class="margin-top-30 margin-bottom-20">
<div class="custom-file">
<input type="file" class="custom-file-input" id="restore-file" accept="application/zip" multiple=false onchange={{action "upload"}}>
<label class="custom-file-label" for="restore-file">Choose backup file</label>
</div>
<div class="margin-top-20"></div>
</div>
{{#if restoreFailed}}
<div class="restore-fail">Restore failed &mdash; please check server logs</div>
{{else if restoreSuccess}}
<div class="restore-success">Restore completed &mdash; restart your browser and log in</div>
{{else}}
{{#if restoreUploadReady}}
<button class="btn btn-danger mb-3" {{action 'onShowRestoreModal'}}>{{restoreButtonLabel}}</button>
{{/if}}
{{/if}}
</div>
</div>
</div>
<div id="confirm-restore-modal" class="modal" tabindex="-1" role="dialog">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">Confirm Restore</div>
<div class="modal-body">
<form onsubmit={{action 'onRestore'}}>
<div class="form-group">
<label for="delete-space-name">Please type RESTORE to commence the process</label>
{{input type='text' id="confirm-restore" class="form-control mousetrap" placeholder="Please type RESTORE" value=confirmRestore}}
<small class="form-text text-muted">You should only restore to an empty Documize instance</small>
</div>
</form>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-outline-secondary" data-dismiss="modal">Cancel</button>
<button type="button" class="btn btn-danger" onclick={{action 'onRestore'}}>Start Restore</button>
</div>
</div>
</div>
</div>

View file

@ -4,5 +4,5 @@
{{else}}
<i class="material-icons">radio_button_unchecked</i>
{{/if}}
{{yield}}
<div class="text">{{yield}}</div>
</div>

View file

@ -262,7 +262,7 @@ export default function () {
"name": name,
"orgId": "VzMuyEw_3WqiafcD",
"userId": "VzMuyEw_3WqiafcE",
"folderType": 2
"spaceType": 2
};
return schema.db.folders.insert(folder);
@ -346,7 +346,7 @@ export default function () {
"name": "Test Folder",
"orgId": "VzMuyEw_3WqiafcD",
"userId": "VzMuyEw_3WqiafcE",
"folderType": 2
"spaceType": 2
};
});

View file

@ -1,11 +1,11 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
// by contacting <sales@documize.com>.
//
// https://documize.com
@ -18,5 +18,5 @@ export default Mirage.Factory.extend({
"name": faker.list.cycle('My Project', 'Test'),
"orgId": "VzMuyEw_3WqiafcD",
"userId": "VzMuyEw_3WqiafcE",
"folderType": faker.list.cycle(1, 2)
});
"spaceType": faker.list.cycle(1, 2)
});

View file

@ -1,6 +1,6 @@
{
"name": "documize",
"version": "1.71.0",
"version": "1.72.0",
"description": "The Document IDE",
"private": true,
"repository": "",

62
model/action/action.go Normal file
View file

@ -0,0 +1,62 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package action
import (
"time"
"github.com/documize/community/core/timeutil"
"github.com/documize/community/model"
)
// UserAction represents an action that a user should perform on a document.
type UserAction struct {
model.BaseEntity
OrgID string `json:"orgId"`
DocumentID string `json:"documentId"`
UserID string `json:"userId"`
ActionType Type `json:"actionType"`
RefType string `json:"refType"` // page or attachment
RefTypeID string `json:"refTypeId"` // page or attachment ID
Note string `json:"note"`
RequestorID string `json:"requestorId"`
Requested time.Time `json:"requestedDate"`
Due time.Time `json:"dueDate"`
Completed timeutil.NullTime `json:"completedDate"`
IsComplete bool `json:"isComplete"`
}
// Type determines type of action that has been requested of a user
type Type int
const (
// ActionTypeRead document
ActionTypeRead Type = 1
// ActionTypeFeedback for a document
ActionTypeFeedback Type = 2
// ActionTypeContribute to document
ActionTypeContribute Type = 3
// ActionTypeApprovalRequest for a section change
ActionTypeApprovalRequest Type = 4
// ActionTypeApproved section change
ActionTypeApproved Type = 5
// ActionTypeRejected section change
ActionTypeRejected Type = 6
// ActionTypePublish content as Live
ActionTypePublish Type = 7
)

View file

@ -18,7 +18,7 @@ type UserActivity struct {
ID uint64 `json:"id"`
OrgID string `json:"orgId"`
UserID string `json:"userId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
DocumentID string `json:"documentId"`
SectionID string `json:"pageId"`
ActivityType Type `json:"activityType"`
@ -34,7 +34,7 @@ type UserActivity struct {
type DocumentActivity struct {
ID uint64 `json:"id"`
OrgID string `json:"orgId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
DocumentID string `json:"documentId"`
SectionID string `json:"pageId"`
SectionName string `json:"pageTitle"`

View file

@ -21,6 +21,6 @@ type Attachment struct {
Job string `json:"job"`
FileID string `json:"fileId"`
Filename string `json:"filename"`
Data []byte `json:"-"`
Data []byte `json:"data"`
Extension string `json:"extension"`
}

View file

@ -28,55 +28,81 @@ type AppEvent struct {
type EventType string
const (
EventTypeDocumentAdd EventType = "added-document"
EventTypeDocumentUpload EventType = "uploaded-document"
EventTypeDocumentView EventType = "viewed-document"
EventTypeDocumentUpdate EventType = "updated-document"
EventTypeDocumentDelete EventType = "removed-document"
EventTypeDocumentRevisions EventType = "viewed-document-revisions"
EventTypeDocumentPermission EventType = "changed-document-permissions"
EventTypeSpaceAdd EventType = "added-space"
EventTypeSpaceUpdate EventType = "updated-space"
EventTypeSpaceDelete EventType = "removed-space"
EventTypeSpacePermission EventType = "changed-space-permissions"
EventTypeSpaceJoin EventType = "joined-space"
EventTypeSpaceInvite EventType = "invited-space"
EventTypeCategoryPermission EventType = "changed-category-permissions"
EventTypeSectionAdd EventType = "added-document-section"
EventTypeSectionUpdate EventType = "updated-document-section"
EventTypeSectionDelete EventType = "removed-document-section"
EventTypeSectionRollback EventType = "rolled-back-document-section"
EventTypeSectionResequence EventType = "resequenced-document-section"
EventTypeSectionCopy EventType = "copied-document-section"
EventTypeAttachmentAdd EventType = "added-attachment"
EventTypeAttachmentDownload EventType = "downloaded-attachment"
EventTypeAttachmentDelete EventType = "removed-attachment"
EventTypePinAdd EventType = "added-pin"
EventTypePinDelete EventType = "removed-pin"
EventTypePinResequence EventType = "resequenced-pin"
EventTypeBlockAdd EventType = "added-reusable-block"
EventTypeBlockUpdate EventType = "updated-reusable-block"
EventTypeBlockDelete EventType = "removed-reusable-block"
EventTypeTemplateAdd EventType = "added-document-template"
EventTypeTemplateUse EventType = "used-document-template"
EventTypeUserAdd EventType = "added-user"
EventTypeUserUpdate EventType = "updated-user"
EventTypeUserDelete EventType = "removed-user"
EventTypeUserPasswordReset EventType = "reset-user-password"
EventTypeAccountAdd EventType = "added-account"
EventTypeSystemLicense EventType = "changed-system-license"
EventTypeSystemAuth EventType = "changed-system-auth"
EventTypeSystemSMTP EventType = "changed-system-smtp"
EventTypeSessionStart EventType = "started-session"
EventTypeSearch EventType = "searched"
EventTypeCategoryAdd EventType = "added-category"
EventTypeCategoryDelete EventType = "removed-category"
EventTypeCategoryUpdate EventType = "updated-category"
EventTypeCategoryLink EventType = "linked-category"
EventTypeCategoryUnlink EventType = "unlinked-category"
EventTypeGroupAdd EventType = "added-group"
EventTypeGroupDelete EventType = "removed-group"
EventTypeGroupUpdate EventType = "updated-group"
EventTypeGroupJoin EventType = "joined-group"
EventTypeGroupLeave EventType = "left-group"
EventTypeDocumentAdd EventType = "added-document"
EventTypeDocumentUpload EventType = "uploaded-document"
EventTypeDocumentView EventType = "viewed-document"
EventTypeDocumentUpdate EventType = "updated-document"
EventTypeDocumentDelete EventType = "removed-document"
EventTypeDocumentRevisions EventType = "viewed-document-revisions"
EventTypeDocumentPermission EventType = "changed-document-permissions"
EventTypeSpaceAdd EventType = "added-space"
EventTypeSpaceUpdate EventType = "updated-space"
EventTypeSpaceDelete EventType = "removed-space"
EventTypeSpacePermission EventType = "changed-space-permissions"
EventTypeSpaceJoin EventType = "joined-space"
EventTypeSpaceInvite EventType = "invited-space"
EventTypeCategoryPermission EventType = "changed-category-permissions"
EventTypeSectionAdd EventType = "added-document-section"
EventTypeSectionUpdate EventType = "updated-document-section"
EventTypeSectionDelete EventType = "removed-document-section"
EventTypeSectionRollback EventType = "rolled-back-document-section"
EventTypeSectionResequence EventType = "resequenced-document-section"
EventTypeSectionCopy EventType = "copied-document-section"
EventTypeAttachmentAdd EventType = "added-attachment"
EventTypeAttachmentDownload EventType = "downloaded-attachment"
EventTypeAttachmentDelete EventType = "removed-attachment"
EventTypePinAdd EventType = "added-pin"
EventTypePinDelete EventType = "removed-pin"
EventTypePinResequence EventType = "resequenced-pin"
EventTypeBlockAdd EventType = "added-reusable-block"
EventTypeBlockUpdate EventType = "updated-reusable-block"
EventTypeBlockDelete EventType = "removed-reusable-block"
EventTypeTemplateAdd EventType = "added-document-template"
EventTypeTemplateUse EventType = "used-document-template"
EventTypeUserAdd EventType = "added-user"
EventTypeUserUpdate EventType = "updated-user"
EventTypeUserDelete EventType = "removed-user"
EventTypeUserPasswordReset EventType = "reset-user-password"
EventTypeAccountAdd EventType = "added-account"
EventTypeSystemLicense EventType = "changed-system-license"
EventTypeSystemAuth EventType = "changed-system-auth"
EventTypeSystemSMTP EventType = "changed-system-smtp"
EventTypeSessionStart EventType = "started-session"
EventTypeSearch EventType = "searched"
EventTypeCategoryAdd EventType = "added-category"
EventTypeCategoryDelete EventType = "removed-category"
EventTypeCategoryUpdate EventType = "updated-category"
EventTypeCategoryLink EventType = "linked-category"
EventTypeCategoryUnlink EventType = "unlinked-category"
EventTypeGroupAdd EventType = "added-group"
EventTypeGroupDelete EventType = "removed-group"
EventTypeGroupUpdate EventType = "updated-group"
EventTypeGroupJoin EventType = "joined-group"
EventTypeGroupLeave EventType = "left-group"
EventTypeSecureShare EventType = "shared-secure-document"
EventTypeFeedbackAdd EventType = "added-feedback"
EventTypeFeedbackEdit EventType = "edited-feedback"
EventTypePDF EventType = "generated-pdf"
EventTypeActionAdd EventType = "added-action"
EventTypeActionUpdate EventType = "updated-action"
EventTypeActionView EventType = "viewed-actions"
EventTypeActionDelete EventType = "removed-action"
EventTypeWorkflowApprovalRequested EventType = "request-approval"
EventTypeWorkflowApprovalWithdrawn EventType = "withdrew-approval"
EventTypeWorkflowDiscardChanges EventType = "discarded-changes"
EventTypeWorkflowApprovedChange EventType = "approved-change"
EventTypeWorkflowRejectedChange EventType = "rejected-change"
EventTypeWorkflowPublishRequested EventType = "requested-publication"
// EventTypeVersionAdd records addition of version
EventTypeVersionAdd EventType = "added-version"
// EventTypeVersionRemove records removal of version
EventTypeVersionRemove EventType = "removed-version"
// EventTypeVersionUnversion records disassociation of document from versioning group
EventTypeVersionUnversion EventType = "un-versioned-document"
// EventTypeVersionReorder records reordering of versions
EventTypeVersionReorder EventType = "reordered-version"
)

95
model/backup/backup.go Normal file
View file

@ -0,0 +1,95 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
// Package backup handle data backup/restore to/from ZIP format.
package backup
import (
"time"
"github.com/documize/community/core/env"
"github.com/documize/community/model"
"github.com/documize/community/model/org"
)
// Manifest contains backup meta information.
type Manifest struct {
// ID is unique per backup.
ID string `json:"id"`
// A value of "*' means all tenants/oragnizations are backed up (requires global admin permission).
// A genuine ID means only that specific organization is backed up.
OrgID string `json:"org"`
// Product edition at the time of the backup.
Edition string `json:"edition"`
// When the backup took place.
Created time.Time `json:"created"`
// Product version at the time of the backup.
Major string `json:"major"`
Minor string `json:"minor"`
Patch string `json:"patch"`
Revision int `json:"revision"`
Version string `json:"version"`
// Database provider used by source system.
StoreType env.StoreType `json:"storeType"`
}
// ExportSpec controls what data is exported to the backup file.
type ExportSpec struct {
// A value of "*' means all tenants/oragnizations are backed up (requires global admin permission).
// A genuine ID means only that specific organization is backed up.
OrgID string `json:"org"`
// Retain will keep the backup file on disk after operation is complete.
// File is located in the same folder as the running executable.
Retain bool `json:"retain"`
}
// SystemBackup happens if org ID is "*".
func (e *ExportSpec) SystemBackup() bool {
return e.OrgID == "*"
}
// ImportSpec controls what content is imported and how.
type ImportSpec struct {
// Overwrite current organization settings.
OverwriteOrg bool `json:"overwriteOrg"`
// As found in backup file.
Manifest Manifest
// Handle to the current organization being used for restore process.
Org org.Organization
// Was the backup file for a global system backup?
// TRUE if Manifest.Org = "*".
GlobalBackup bool
}
// User represents user object for backup/restore operations.
// We include user specific secrets in such operations.
type User struct {
model.BaseEntity
Firstname string `json:"firstname"`
Lastname string `json:"lastname"`
Email string `json:"email"`
Initials string `json:"initials"`
Active bool `json:"active"`
GlobalAdmin bool `json:"global"`
Password string `json:"password"`
Salt string `json:"salt"`
Reset string `json:"reset"`
LastVersion string `json:"lastVersion"`
}

View file

@ -17,7 +17,7 @@ import "github.com/documize/community/model"
type Block struct {
model.BaseEntity
OrgID string `json:"orgId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
UserID string `json:"userId"`
ContentType string `json:"contentType"`
Type string `json:"pageType"`

View file

@ -17,7 +17,7 @@ import "github.com/documize/community/model"
type Category struct {
model.BaseEntity
OrgID string `json:"orgId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
Name string `json:"category"`
}
@ -26,7 +26,7 @@ type Member struct {
model.BaseEntity
OrgID string `json:"orgId"`
CategoryID string `json:"categoryId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
DocumentID string `json:"documentId"`
}

View file

@ -23,7 +23,7 @@ import (
type Document struct {
model.BaseEntity
OrgID string `json:"orgId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
UserID string `json:"userId"`
Job string `json:"job"`
Location string `json:"location"`
@ -73,7 +73,7 @@ type DocumentMetaViewer struct {
// DocumentMetaEditor contains the "edit" metatdata content.
type DocumentMetaEditor struct {
PageID string `json:"pageId"`
SectionID string `json:"pageId"`
UserID string `json:"userId"`
Action string `json:"action"`
Created time.Time `json:"created"`

View file

@ -17,7 +17,7 @@ import "github.com/documize/community/model"
type Link struct {
model.BaseEntity
OrgID string `json:"orgId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
UserID string `json:"userId"`
LinkType string `json:"linkType"`
SourceDocumentID string `json:"sourceDocumentId"`
@ -32,7 +32,7 @@ type Link struct {
type Candidate struct {
RefID string `json:"id"`
LinkType string `json:"linkType"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
DocumentID string `json:"documentId"`
TargetID string `json:"targetId"`
Title string `json:"title"` // what we label the link

View file

@ -21,7 +21,7 @@ import (
type SitemapDocument struct {
DocumentID string
Document string
FolderID string
SpaceID string
Folder string
Revised time.Time
}

View file

@ -16,7 +16,7 @@ import "github.com/documize/community/model"
// Organization defines a company that uses this app.
type Organization struct {
model.BaseEntity
Company string `json:"-"`
Company string `json:"company"`
Title string `json:"title"`
Message string `json:"message"`
Domain string `json:"domain"`
@ -26,6 +26,6 @@ type Organization struct {
AuthConfig string `json:"authConfig"`
ConversionEndpoint string `json:"conversionEndpoint"`
MaxTags int `json:"maxTags"`
Serial string `json:"-"`
Active bool `json:"-"`
Serial string `json:"serial"`
Active bool `json:"active"`
}

View file

@ -17,7 +17,7 @@ package permission
type Record struct {
ID uint64 `json:"id"`
OrgID string `json:"orgId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
WhoID string `json:"whoId"`
Who WhoType `json:"who"`
SpaceView bool `json:"spaceView"`
@ -149,7 +149,7 @@ func EncodeRecord(r Record, a Action) (p Permission) {
// CategoryViewRequestModel represents who should be allowed to see a category.
type CategoryViewRequestModel struct {
OrgID string `json:"orgId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
CategoryID string `json:"categoryID"`
WhoID string `json:"whoId"`
Who WhoType `json:"who"`

View file

@ -18,7 +18,7 @@ type Pin struct {
model.BaseEntity
OrgID string `json:"orgId"`
UserID string `json:"userId"`
SpaceID string `json:"folderId"`
SpaceID string `json:"spaceId"`
DocumentID string `json:"documentId"`
Name string `json:"pin"`
Sequence int `json:"sequence"`

View file

@ -22,7 +22,7 @@ type Space struct {
Name string `json:"name"`
OrgID string `json:"orgId"`
UserID string `json:"userId"`
Type Scope `json:"folderType"`
Type Scope `json:"spaceType"`
// Lifecycle stores the default value all new documents are given upon creation.
Lifecycle workflow.Lifecycle `json:"lifecycle"`
@ -64,8 +64,8 @@ func (l *Space) IsRestricted() bool {
// Viewer details who can see a particular space
type Viewer struct {
Name string `json:"name"`
SpaceID string `json:"folderId"`
Type int `json:"folderType"`
SpaceID string `json:"spaceId"`
Type int `json:"spaceType"`
UserID string `json:"userId"`
Firstname string `json:"firstname"`
Lastname string `json:"lastname"`

View file

@ -97,7 +97,7 @@ func RegisterEndpoints(rt *env.Runtime, s *store.Store) {
// Secured private routes (require authentication)
//**************************************************
AddPrivate(rt, "import/folder/{folderID}", []string{"POST", "OPTIONS"}, nil, conversion.UploadConvert)
AddPrivate(rt, "import/folder/{spaceID}", []string{"POST", "OPTIONS"}, nil, conversion.UploadConvert)
AddPrivate(rt, "documents", []string{"GET", "OPTIONS"}, nil, document.BySpace)
AddPrivate(rt, "documents/{documentID}", []string{"GET", "OPTIONS"}, nil, document.Get)
@ -161,19 +161,19 @@ func RegisterEndpoints(rt *env.Runtime, s *store.Store) {
AddPrivate(rt, "search", []string{"POST", "OPTIONS"}, nil, document.SearchDocuments)
AddPrivate(rt, "templates", []string{"POST", "OPTIONS"}, nil, template.SaveAs)
AddPrivate(rt, "templates/{templateID}/folder/{folderID}", []string{"POST", "OPTIONS"}, []string{"type", "saved"}, template.Use)
AddPrivate(rt, "templates/{folderID}", []string{"GET", "OPTIONS"}, nil, template.SavedList)
AddPrivate(rt, "templates/{templateID}/folder/{spaceID}", []string{"POST", "OPTIONS"}, []string{"type", "saved"}, template.Use)
AddPrivate(rt, "templates/{spaceID}", []string{"GET", "OPTIONS"}, nil, template.SavedList)
AddPrivate(rt, "sections", []string{"GET", "OPTIONS"}, nil, section.GetSections)
AddPrivate(rt, "sections", []string{"POST", "OPTIONS"}, nil, section.RunSectionCommand)
AddPrivate(rt, "sections/refresh", []string{"GET", "OPTIONS"}, nil, section.RefreshSections)
AddPrivate(rt, "sections/blocks/space/{folderID}", []string{"GET", "OPTIONS"}, nil, block.GetBySpace)
AddPrivate(rt, "sections/blocks/space/{spaceID}", []string{"GET", "OPTIONS"}, nil, block.GetBySpace)
AddPrivate(rt, "sections/blocks/{blockID}", []string{"GET", "OPTIONS"}, nil, block.Get)
AddPrivate(rt, "sections/blocks/{blockID}", []string{"PUT", "OPTIONS"}, nil, block.Update)
AddPrivate(rt, "sections/blocks/{blockID}", []string{"DELETE", "OPTIONS"}, nil, block.Delete)
AddPrivate(rt, "sections/blocks", []string{"POST", "OPTIONS"}, nil, block.Add)
AddPrivate(rt, "links/{folderID}/{documentID}/{pageID}", []string{"GET", "OPTIONS"}, nil, link.GetLinkCandidates)
AddPrivate(rt, "links/{spaceID}/{documentID}/{pageID}", []string{"GET", "OPTIONS"}, nil, link.GetLinkCandidates)
AddPrivate(rt, "links", []string{"GET", "OPTIONS"}, nil, link.SearchLinkCandidates)
AddPrivate(rt, "documents/{documentID}/links", []string{"GET", "OPTIONS"}, nil, document.DocumentLinks)
@ -220,6 +220,7 @@ func RegisterEndpoints(rt *env.Runtime, s *store.Store) {
AddPrivate(rt, "global/ldap/preview", []string{"POST", "OPTIONS"}, nil, ldap.Preview)
AddPrivate(rt, "global/ldap/sync", []string{"GET", "OPTIONS"}, nil, ldap.Sync)
AddPrivate(rt, "global/backup", []string{"POST", "OPTIONS"}, nil, backup.Backup)
AddPrivate(rt, "global/restore", []string{"POST", "OPTIONS"}, nil, backup.Restore)
Add(rt, RoutePrefixRoot, "robots.txt", []string{"GET", "OPTIONS"}, nil, meta.RobotsTxt)
Add(rt, RoutePrefixRoot, "sitemap.xml", []string{"GET", "OPTIONS"}, nil, meta.Sitemap)

View file

@ -20,6 +20,8 @@ explains how to use `database/sql` along with sqlx.
## Recent Changes
* The [introduction](https://github.com/jmoiron/sqlx/pull/387) of `sql.ColumnType` sets the required minimum Go version to 1.8.
* sqlx/types.JsonText has been renamed to JSONText to follow Go naming conventions.
This breaks backwards compatibility, but it's in a way that is trivially fixable

View file

@ -2,6 +2,7 @@ package sqlx
import (
"bytes"
"database/sql/driver"
"errors"
"reflect"
"strconv"
@ -16,6 +17,7 @@ const (
QUESTION
DOLLAR
NAMED
AT
)
// BindType returns the bindtype for a given database given a drivername.
@ -29,6 +31,8 @@ func BindType(driverName string) int {
return QUESTION
case "oci8", "ora", "goracle":
return NAMED
case "sqlserver":
return AT
}
return UNKNOWN
}
@ -56,6 +60,8 @@ func Rebind(bindType int, query string) string {
rqb = append(rqb, '$')
case NAMED:
rqb = append(rqb, ':', 'a', 'r', 'g')
case AT:
rqb = append(rqb, '@', 'p')
}
j++
@ -110,6 +116,9 @@ func In(query string, args ...interface{}) (string, []interface{}, error) {
meta := make([]argMeta, len(args))
for i, arg := range args {
if a, ok := arg.(driver.Valuer); ok {
arg, _ = a.Value()
}
v := reflect.ValueOf(arg)
t := reflectx.Deref(v.Type())
@ -137,7 +146,7 @@ func In(query string, args ...interface{}) (string, []interface{}, error) {
}
newArgs := make([]interface{}, 0, flatArgsCount)
buf := bytes.NewBuffer(make([]byte, 0, len(query)+len(", ?")*flatArgsCount))
buf := make([]byte, 0, len(query)+len(", ?")*flatArgsCount)
var arg, offset int
@ -163,10 +172,10 @@ func In(query string, args ...interface{}) (string, []interface{}, error) {
}
// write everything up to and including our ? character
buf.WriteString(query[:offset+i+1])
buf = append(buf, query[:offset+i+1]...)
for si := 1; si < argMeta.length; si++ {
buf.WriteString(", ?")
buf = append(buf, ", ?"...)
}
newArgs = appendReflectSlice(newArgs, argMeta.v, argMeta.length)
@ -177,13 +186,13 @@ func In(query string, args ...interface{}) (string, []interface{}, error) {
offset = 0
}
buf.WriteString(query)
buf = append(buf, query...)
if arg < len(meta) {
return "", nil, errors.New("number of bindVars less than number arguments")
}
return buf.String(), newArgs, nil
return string(buf), newArgs, nil
}
func appendReflectSlice(args []interface{}, v reflect.Value, vlen int) []interface{} {

7
vendor/github.com/jmoiron/sqlx/go.mod generated vendored Normal file
View file

@ -0,0 +1,7 @@
module github.com/jmoiron/sqlx
require (
github.com/go-sql-driver/mysql v1.4.0
github.com/lib/pq v1.0.0
github.com/mattn/go-sqlite3 v1.9.0
)

6
vendor/github.com/jmoiron/sqlx/go.sum generated vendored Normal file
View file

@ -0,0 +1,6 @@
github.com/go-sql-driver/mysql v1.4.0 h1:7LxgVwFb2hIQtMm87NdgAVfXjnt4OePseqT1tKx+opk=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/4=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=

View file

@ -259,6 +259,10 @@ func compileNamedQuery(qs []byte, bindType int) (query string, names []string, e
}
inName = true
name = []byte{}
} else if inName && i > 0 && b == '=' {
rebound = append(rebound, ':', '=')
inName = false
continue
// if we're in a name, and this is an allowed character, continue
} else if inName && (unicode.IsOneOf(allowedBindRunes, rune(b)) || b == '_' || b == '.') && i != last {
// append the byte to the name if we are in a name and not on the last byte
@ -287,6 +291,12 @@ func compileNamedQuery(qs []byte, bindType int) (query string, names []string, e
rebound = append(rebound, byte(b))
}
currentVar++
case AT:
rebound = append(rebound, '@', 'p')
for _, b := range strconv.Itoa(currentVar) {
rebound = append(rebound, byte(b))
}
currentVar++
}
// add this byte to string unless it was not part of the name
if i != last {

View file

@ -471,8 +471,6 @@ func (tx *Tx) Stmtx(stmt interface{}) *Stmt {
s = v.Stmt
case *Stmt:
s = v.Stmt
case sql.Stmt:
s = &v
case *sql.Stmt:
s = v
default:

View file

@ -217,8 +217,6 @@ func (tx *Tx) StmtxContext(ctx context.Context, stmt interface{}) *Stmt {
s = v.Stmt
case *Stmt:
s = v.Stmt
case sql.Stmt:
s = &v
case *sql.Stmt:
s = v
default: