1
0
Fork 0
mirror of https://github.com/documize/community.git synced 2025-07-18 20:59:43 +02:00

Install helps docs as part of onboarding process

Our own docs are installed as sample data!

Refactored search reindexing code.
This commit is contained in:
HarveyKandola 2019-06-24 17:01:56 +01:00
parent 411f64c359
commit 3621e2fb79
26 changed files with 1957 additions and 1280 deletions

View file

@ -46,6 +46,10 @@ robocopy /e /NFL /NDL /NJH core\database\scripts\mysql embed\bindata\scripts\mys
robocopy /e /NFL /NDL /NJH core\database\scripts\postgresql embed\bindata\scripts\postgresql robocopy /e /NFL /NDL /NJH core\database\scripts\postgresql embed\bindata\scripts\postgresql
robocopy /e /NFL /NDL /NJH core\database\scripts\sqlserver embed\bindata\scripts\sqlserver robocopy /e /NFL /NDL /NJH core\database\scripts\sqlserver embed\bindata\scripts\sqlserver
rd /s /q embed\bindata\onboard
mkdir embed\bindata\onboard
robocopy /e /NFL /NDL /NJH domain\onboard\*.json embed\bindata\onboard
echo "Generating in-memory static assets..." echo "Generating in-memory static assets..."
go get -u github.com/jteeuwen/go-bindata/... go get -u github.com/jteeuwen/go-bindata/...
go get -u github.com/elazarl/go-bindata-assetfs/... go get -u github.com/elazarl/go-bindata-assetfs/...

View file

@ -38,6 +38,10 @@ cp -r core/database/scripts/mysql/*.sql embed/bindata/scripts/mysql
cp -r core/database/scripts/postgresql/*.sql embed/bindata/scripts/postgresql cp -r core/database/scripts/postgresql/*.sql embed/bindata/scripts/postgresql
cp -r core/database/scripts/sqlserver/*.sql embed/bindata/scripts/sqlserver cp -r core/database/scripts/sqlserver/*.sql embed/bindata/scripts/sqlserver
rm -rf embed/bindata/onboard
mkdir -p embed/bindata/onboard
cp -r domain/onboard/*.json embed/bindata/onboard
echo "Generating in-memory static assets..." echo "Generating in-memory static assets..."
# go get -u github.com/jteeuwen/go-bindata/... # go get -u github.com/jteeuwen/go-bindata/...
# go get -u github.com/elazarl/go-bindata-assetfs/... # go get -u github.com/elazarl/go-bindata-assetfs/...

View file

@ -133,7 +133,6 @@ func setupAccount(rt *env.Runtime, completion onboardRequest, serial string) (er
return return
} }
//accountTitle := "This is where you will find documentation for your all projects. You can customize this message from the settings screen."
salt := secrets.GenerateSalt() salt := secrets.GenerateSalt()
password := secrets.GeneratePassword(completion.Password, salt) password := secrets.GeneratePassword(completion.Password, salt)
@ -147,7 +146,7 @@ func setupAccount(rt *env.Runtime, completion onboardRequest, serial string) (er
orgID, completion.Company, completion.CompanyLong, completion.Message, completion.URL, completion.Email, serial, activationKey) orgID, completion.Company, completion.CompanyLong, completion.Message, completion.URL, completion.Email, serial, activationKey)
if err != nil { if err != nil {
rt.Log.Error("INSERT INTO dmz_org failed", err) rt.Log.Error("INSERT INTO dmz_org failed", err)
tx.Rollback() rt.Rollback(tx)
return return
} }
@ -157,7 +156,7 @@ func setupAccount(rt *env.Runtime, completion onboardRequest, serial string) (er
userID, completion.Firstname, completion.Lastname, completion.Email, stringutil.MakeInitials(completion.Firstname, completion.Lastname), salt, password, true) userID, completion.Firstname, completion.Lastname, completion.Email, stringutil.MakeInitials(completion.Firstname, completion.Lastname), salt, password, true)
if err != nil { if err != nil {
rt.Log.Error("INSERT INTO dmz_user failed", err) rt.Log.Error("INSERT INTO dmz_user failed", err)
tx.Rollback() rt.Rollback(tx)
return return
} }
@ -167,80 +166,7 @@ func setupAccount(rt *env.Runtime, completion onboardRequest, serial string) (er
accountID, userID, orgID, true, true, true, true) accountID, userID, orgID, true, true, true, true)
if err != nil { if err != nil {
rt.Log.Error("INSERT INTO dmz_user_account failed", err) rt.Log.Error("INSERT INTO dmz_user_account failed", err)
tx.Rollback() rt.Rollback(tx)
return
}
// Create space.
spaceID := uniqueid.Generate()
_, err = tx.Exec(RebindParams("INSERT INTO dmz_space (c_refid, c_orgid, c_userid, c_name, c_type) VALUES (?, ?, ?, ?, ?)", rt.StoreProvider.Type()),
spaceID, orgID, userID, "Welcome", 2)
if err != nil {
rt.Log.Error("INSERT INTO dmz_space failed", err)
tx.Rollback()
return
}
// Assign permissions to space.
perms := []string{"view", "manage", "own", "doc-add", "doc-edit", "doc-delete", "doc-move", "doc-copy", "doc-template", "doc-approve", "doc-version", "doc-lifecycle"}
for _, p := range perms {
_, err = tx.Exec(RebindParams("INSERT INTO dmz_permission (c_orgid, c_who, c_whoid, c_action, c_scope, c_location, c_refid) VALUES (?, ?, ?, ?, ?, ?, ?)", rt.StoreProvider.Type()),
orgID, "user", userID, p, "object", "space", spaceID)
if err != nil {
rt.Log.Error("INSERT INTO dmz_permission failed", err)
tx.Rollback()
return
}
}
// Create some user groups.
groupDevID := uniqueid.Generate()
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group (c_refid, c_orgid, c_name, c_desc) VALUES (?, ?, ?, ?)", rt.StoreProvider.Type()),
groupDevID, orgID, "Technology", "On-site and remote development teams")
if err != nil {
rt.Log.Error("INSERT INTO dmz_group failed", err)
tx.Rollback()
return
}
groupProjectID := uniqueid.Generate()
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group (c_refid, c_orgid, c_name, c_desc) VALUES (?, ?, ?, ?)", rt.StoreProvider.Type()),
groupProjectID, orgID, "Project Management", "HQ PMO and Account Management departments")
if err != nil {
rt.Log.Error("INSERT INTO dmz_group failed", err)
tx.Rollback()
return
}
groupBackofficeID := uniqueid.Generate()
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group (c_refid, c_orgid, c_name, c_desc) VALUES (?, ?, ?, ?)", rt.StoreProvider.Type()),
groupBackofficeID, orgID, "Back Office", "Finance and HR people")
if err != nil {
rt.Log.Error("INSERT INTO dmz_group failed", err)
tx.Rollback()
return
}
// Join the user groups.
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group_member (c_orgid, c_groupid, c_userid) VALUES (?, ?, ?)", rt.StoreProvider.Type()),
orgID, groupDevID, userID)
if err != nil {
rt.Log.Error("INSERT INTO dmz_group_member failed", err)
tx.Rollback()
return
}
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group_member (c_orgid, c_groupid, c_userid) VALUES (?, ?, ?)", rt.StoreProvider.Type()),
orgID, groupProjectID, userID)
if err != nil {
rt.Log.Error("INSERT INTO dmz_group_member failed", err)
tx.Rollback()
return
}
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group_member (c_orgid, c_groupid, c_userid) VALUES (?, ?, ?)", rt.StoreProvider.Type()),
orgID, groupBackofficeID, userID)
if err != nil {
rt.Log.Error("INSERT INTO dmz_group_member failed", err)
tx.Rollback()
return return
} }

View file

@ -202,100 +202,11 @@ func (h *Handler) Sitemap(w http.ResponseWriter, r *http.Request) {
response.WriteBytes(w, buffer.Bytes()) response.WriteBytes(w, buffer.Bytes())
} }
// Reindex indexes all documents and attachments.
func (h *Handler) Reindex(w http.ResponseWriter, r *http.Request) {
ctx := domain.GetRequestContext(r)
if !ctx.GlobalAdmin {
response.WriteForbiddenError(w)
h.Runtime.Log.Info(fmt.Sprintf("%s attempted search reindex", ctx.UserID))
return
}
go h.rebuildSearchIndex(ctx)
response.WriteEmpty(w)
}
// rebuildSearchIndex indexes all documents and attachments.
func (h *Handler) rebuildSearchIndex(ctx domain.RequestContext) {
method := "meta.rebuildSearchIndex"
docs, err := h.Store.Meta.Documents(ctx)
if err != nil {
h.Runtime.Log.Error(method, err)
return
}
h.Runtime.Log.Info(fmt.Sprintf("Search re-index started for %d documents", len(docs)))
for i := range docs {
d := docs[i]
dc, err := h.Store.Meta.Document(ctx, d)
if err != nil {
h.Runtime.Log.Error(method, err)
// continue
}
at, err := h.Store.Meta.Attachments(ctx, d)
if err != nil {
h.Runtime.Log.Error(method, err)
// continue
}
h.Indexer.IndexDocument(ctx, dc, at)
pages, err := h.Store.Meta.Pages(ctx, d)
if err != nil {
h.Runtime.Log.Error(method, err)
// continue
}
for j := range pages {
h.Indexer.IndexContent(ctx, pages[j])
}
// Log process every N documents.
if i%100 == 0 {
h.Runtime.Log.Info(fmt.Sprintf("Search re-indexed %d documents...", i))
}
}
h.Runtime.Log.Info(fmt.Sprintf("Search re-index finished for %d documents", len(docs)))
}
// SearchStatus returns state of search index
func (h *Handler) SearchStatus(w http.ResponseWriter, r *http.Request) {
method := "meta.SearchStatus"
ctx := domain.GetRequestContext(r)
if !ctx.GlobalAdmin {
response.WriteForbiddenError(w)
h.Runtime.Log.Info(fmt.Sprintf("%s attempted get of search status", ctx.UserID))
return
}
count, err := h.Store.Meta.SearchIndexCount(ctx)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
var ss = searchStatus{Entries: count}
response.WriteJSON(w, ss)
}
type sitemapItem struct { type sitemapItem struct {
URL string URL string
Date string Date string
} }
type searchStatus struct {
Entries int `json:"entries"`
}
// Themes returns list of installed UI themes. // Themes returns list of installed UI themes.
func (h *Handler) Themes(w http.ResponseWriter, r *http.Request) { func (h *Handler) Themes(w http.ResponseWriter, r *http.Request) {
type theme struct { type theme struct {

View file

@ -0,0 +1 @@
[{"id":"bh2sku21b54as00dsbk0","created":"2019-01-21T13:34:49Z","revised":"2019-01-21T13:34:49Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WNEpptWJ9AABRnha","category":"Maintenance"},{"id":"bh2skra1b54as00dsbjg","created":"2019-01-21T13:34:37Z","revised":"2019-01-21T13:34:37Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WNEpptWJ9AABRnha","category":"Setup"},{"id":"bh2s7ja1b54as00dsbj0","created":"2019-01-21T13:06:21Z","revised":"2019-01-21T13:06:21Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WNEpptWJ9AABRnha","category":"Users \u0026 Groups"},{"id":"bh2s7hi1b54as00dsbig","created":"2019-01-21T13:06:14Z","revised":"2019-01-21T13:06:14Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WNEpptWJ9AABRnha","category":"Integrations"},{"id":"bh2r4sq1b54as00dsb3g","created":"2019-01-21T11:52:20Z","revised":"2019-01-21T11:52:20Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WNEpptWJ9AABRnha","category":"Authentication"},{"id":"WtXOU7dMOwABe2UO","created":"2018-04-17T10:37:07Z","revised":"2018-04-17T10:37:07Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WtXNJ7dMOwABe2UK","category":"Enterprise"},{"id":"bgtmocgjkjf0hvnjli30","created":"2019-01-13T16:49:54Z","revised":"2019-01-13T16:49:54Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"betnq7venbkq5cjhoc0g","category":"Breaking Changes"},{"id":"bgtmo7ojkjf0hvnjli2g","created":"2019-01-13T16:49:36Z","revised":"2019-01-13T16:49:36Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"betnq7venbkq5cjhoc0g","category":"New Features"},{"id":"bgtmo50jkjf0hvnjli20","created":"2019-01-13T16:49:25Z","revised":"2019-01-13T16:49:25Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"betnq7venbkq5cjhoc0g","category":"Enhancements"},{"id":"bgtmo3gjkjf0hvnjli1g","created":"2019-01-13T16:49:18Z","revised":"2019-01-13T16:49:18Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"betnq7venbkq5cjhoc0g","category":"Bug Fixes"},{"id":"bgtmo1ojkjf0hvnjli10","created":"2019-01-13T16:49:11Z","revised":"2019-01-13T16:49:11Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"betnq7venbkq5cjhoc0g","category":"Community Edition"},{"id":"bgtmnvojkjf0hvnjli0g","created":"2019-01-13T16:49:03Z","revised":"2019-01-13T16:49:03Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"betnq7venbkq5cjhoc0g","category":"Enterprise Edition"}]

File diff suppressed because one or more lines are too long

1
domain/onboard/dmz_doc.json Executable file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
[{"id":"WQnJXPMKrwABT4Nf","created":"2018-03-27T16:56:30Z","revised":"2018-03-27T16:56:30Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"VzO9ZqMOCgABGyfW","userId":"w4Nm8Fgw","linkType":"document","sourceDocumentId":"V16L08ucxwABhZF6","sourcePageId":"WQnJMvMKrwABT4Na","targetDocumentId":"VzSL8cVZ4QAB2B4Y","targetId":"","externalId":"","orphan":false},{"id":"Wt9Cf3cHWQABMuQa","created":"2018-04-24T14:43:20Z","revised":"2018-04-24T14:43:20Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WtXNJ7dMOwABe2UK","userId":"w4Nm8Fgw","linkType":"file","sourceDocumentId":"WtWbRbdMOwABe2SK","sourcePageId":"WtW317dMOwABe2TF","targetDocumentId":"WtWbRbdMOwABe2SK","targetId":"Wt9CaXcHWQABMuQM","externalId":"","orphan":false},{"id":"TtwPpzAUe4YeHYkC","created":"2018-07-11T00:17:14Z","revised":"2018-07-11T00:17:14Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"Wr5izCXOmgABcntP","userId":"w4Nm8Fgw","linkType":"network","sourceDocumentId":"W0VMb2zXqwABN4rn","sourcePageId":"W0VMdWzXqwABN4ro","targetDocumentId":"W0VMb2zXqwABN4rn","targetId":"","externalId":"","orphan":false},{"id":"W316i7J3xwAB8UmW","created":"2018-08-22T15:00:36Z","revised":"2018-08-22T15:00:36Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"VzO9ZqMOCgABGyfW","userId":"w4Nm8Fgw","linkType":"document","sourceDocumentId":"W31Sv7J3xwAB8Ult","sourcePageId":"W314jLJ3xwAB8UmL","targetDocumentId":"W316KbJ3xwAB8UmO","targetId":"","externalId":"","orphan":false},{"id":"bh7hek8cm9o1pnrmkf80","created":"2019-01-28T14:58:27Z","revised":"2019-01-28T14:58:27Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WOzFU_MXigAB6sIH","userId":"w4Nm8Fgw","linkType":"document","sourceDocumentId":"bh70rt0cm9o1pnrmk8o0","sourcePageId":"bh7hce0cm9o1pnrmkf5g","targetDocumentId":"bh5ikpa1b54as00dt4o0","targetId":"","externalId":"","orphan":false},{"id":"bh7lj28cm9o1pnrmkit0","created":"2019-01-28T19:35:44Z","revised":"2019-01-28T19:35:44Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WOzFU_MXigAB6sIH","userId":"w4Nm8Fgw","linkType":"section","sourceDocumentId":"WOvEC_MXigAB6sE1","sourcePageId":"WOyuEvMXigAB6sF_","targetDocumentId":"WOvEC_MXigAB6sE1","targetId":"WOytzvMXigAB6sFl","externalId":"","orphan":false},{"id":"bh7lq58cm9o1pnrmkj50","created":"2019-01-28T19:50:55Z","revised":"2019-01-28T19:50:55Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WtXNJ7dMOwABe2UK","userId":"w4Nm8Fgw","linkType":"document","sourceDocumentId":"bh7lnugcm9o1pnrmkj1g","sourcePageId":"bh7lp0gcm9o1pnrmkj3g","targetDocumentId":"WtWbRbdMOwABe2SK","targetId":"","externalId":"","orphan":false},{"id":"bh8337gcm9o1pnrmknm0","created":"2019-01-29T11:11:07Z","revised":"2019-01-29T11:11:07Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"WOzFU_MXigAB6sIH","userId":"w4Nm8Fgw","linkType":"section","sourceDocumentId":"bh82vl8cm9o1pnrmkngg","sourcePageId":"bh8307ocm9o1pnrmknk0","targetDocumentId":"WOvEC_MXigAB6sE1","targetId":"bh7ll9gcm9o1pnrmkj00","externalId":"","orphan":false},{"id":"W317bLJ3xwAB8Umf","created":"2019-03-05T09:01:06Z","revised":"2019-03-05T09:01:06Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"VzO9ZqMOCgABGyfW","userId":"w4Nm8Fgw","linkType":"document","sourceDocumentId":"W31Sv7J3xwAB8Ult","sourcePageId":"W317UrJ3xwAB8Uma","targetDocumentId":"V16L08ucxwABhZF6","targetId":"","externalId":"","orphan":false},{"id":"bj3gka67m02fu20qv23g","created":"2019-04-29T14:34:03Z","revised":"2019-04-29T14:34:03Z","orgId":"bk54k53c1osmepq14jq0","spaceId":"VzO9ZqMOCgABGyfW","userId":"w4Nm8Fgw","linkType":"document","sourceDocumentId":"V16L08ucxwABhZF6","sourcePageId":"WQnJtPMKrwABT4Ni","targetDocumentId":"VzSL8cVZ4QAB2B4Y","targetId":"","externalId":"","orphan":false}]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1
domain/onboard/dmz_space.json Executable file
View file

@ -0,0 +1 @@
[{"id":"leFaXwlU","created":"2016-05-11T23:03:47Z","revised":"2019-03-14T15:39:39Z","name":"Extending, Compiling \u0026 Architecture","desc":"Building integrations, compiling from source and understanding product architecture","orgId":"bk54k53c1osmepq14jq0","userId":"w4Nm8Fgw","labelId":"bgt3n9bgt73a1fb2llp0","spaceType":1,"countCategory":0,"countContent":4,"icon":"dmeta-meta-devops","lifecycle":0,"likes":"Did this help you?"},{"id":"betnq7venbkq5cjhoc0g","created":"2018-10-08T15:51:59Z","revised":"2019-06-11T10:33:44Z","name":"Release Notes","desc":"Documentation for what has changed within Documize including new features, enhancements, fixes and underlying technology stack improvements","orgId":"bk54k53c1osmepq14jq0","userId":"w4Nm8Fgw","labelId":"bgt3l3rgt73a1fb2llog","spaceType":1,"countCategory":6,"countContent":6,"icon":"dmeta-meta-announce","lifecycle":0,"likes":""},{"id":"WtXNJ7dMOwABe2UK","created":"2018-04-17T10:32:07Z","revised":"2019-01-12T19:10:37Z","name":"API","desc":"Programmatically access and manipulate Documize user data and configuration options","orgId":"bk54k53c1osmepq14jq0","userId":"w4Nm8Fgw","labelId":"bgt3n9bgt73a1fb2llp0","spaceType":1,"countCategory":1,"countContent":6,"icon":"dmeta-meta-flow","lifecycle":0,"likes":"Did this help you?"},{"id":"WNEpptWJ9AABRnha","created":"2017-03-21T13:24:55Z","revised":"2019-01-22T14:03:30Z","name":"Administration Guides","desc":"Managing all aspects of your Documize instance","orgId":"bk54k53c1osmepq14jq0","userId":"w4Nm8Fgw","labelId":"bgt3l3rgt73a1fb2llog","spaceType":1,"countCategory":5,"countContent":13,"icon":"dmeta-meta-tune","lifecycle":0,"likes":"Did this help you?"}]

View file

@ -0,0 +1 @@
[{"id":"bgt3n9bgt73a1fb2llp0","created":"2019-01-12T19:10:29Z","revised":"2019-01-12T19:12:53Z","orgId":"bk54k53c1osmepq14jq0","name":"Developers","color":"#880e4f"},{"id":"bgt3l3rgt73a1fb2llog","created":"2019-01-12T19:05:52Z","revised":"2019-01-12T19:12:50Z","orgId":"bk54k53c1osmepq14jq0","name":"Administration","color":"#ef6c00"},{"id":"bgt3kubgt73a1fb2llo0","created":"2019-01-12T19:05:30Z","revised":"2019-01-12T19:05:30Z","orgId":"bk54k53c1osmepq14jq0","name":"Getting Started","color":"#2e7d32"}]

367
domain/onboard/endpoint.go Normal file
View file

@ -0,0 +1,367 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
// Package onboard handles the setup of sample data for a new Documize instance.
package onboard
import (
"database/sql"
"encoding/json"
"fmt"
"net/http"
"github.com/pkg/errors"
"github.com/documize/community/core/env"
"github.com/documize/community/core/response"
"github.com/documize/community/domain"
indexer "github.com/documize/community/domain/search"
"github.com/documize/community/domain/store"
om "github.com/documize/community/model/onboard"
"github.com/documize/community/model/permission"
"github.com/documize/community/server/web"
)
// Handler contains the runtime information such as logging and database.
type Handler struct {
Runtime *env.Runtime
Store *store.Store
Indexer indexer.Indexer
}
// InstallSample inserts sample data into database.
func (h *Handler) InstallSample(w http.ResponseWriter, r *http.Request) {
ctx := domain.GetRequestContext(r)
// Only proceed if we are in good standing.
if !h.Runtime.Product.IsValid(ctx) {
response.WriteBadLicense(w)
return
}
if !ctx.Administrator || !ctx.Authenticated || !ctx.GlobalAdmin {
response.WriteForbiddenError(w)
return
}
// Only proceed if we have no spaces and documents.
// This prevents sample data restore inside existing live instance.
spaces, docs := h.Store.Onboard.ContentCounts()
if spaces > 0 || docs > 0 {
h.Runtime.Log.Info("Unable to install sample data when database contains spaces/docs")
response.WriteForbiddenError(w)
return
}
// Load sample data from embedded assets.
data := h.loadSampleData()
if data.LoadFailure {
response.WriteError(w, "Unable to unpack sample data")
h.Runtime.Log.Info("Unable to unpack sample data")
return
}
data.Context = ctx
err := h.processSampleData(data)
if err != nil {
response.WriteError(w, "Unable to process sample data")
h.Runtime.Log.Error("Unable to process sample data", err)
return
}
h.Runtime.Log.Info("Onboarding complete")
h.Runtime.Log.Info("Building search index")
go h.Indexer.Rebuild(ctx)
response.WriteEmpty(w)
}
// Read sample data that is stored as embedded asset.
func (h *Handler) loadSampleData() (data om.SampleData) {
h.loadFile(data, "dmz_category.json", &data.Category)
h.loadFile(data, "dmz_category_member.json", &data.CategoryMember)
h.loadFile(data, "dmz_doc.json", &data.Document)
h.loadFile(data, "dmz_doc_attachment.json", &data.DocumentAttachment)
h.loadFile(data, "dmz_doc_link.json", &data.DocumentLink)
h.loadFile(data, "dmz_section.json", &data.Section)
h.loadFile(data, "dmz_section_meta.json", &data.SectionMeta)
h.loadFile(data, "dmz_space.json", &data.Space)
h.loadFile(data, "dmz_space_label.json", &data.SpaceLabel)
return
}
func (h *Handler) loadFile(data om.SampleData, filename string, v interface{}) {
err := h.unpackFile(filename, &v)
if err != nil {
data.LoadFailure = true
}
}
// Reads file and unmarshals content as JSON.
func (h *Handler) unpackFile(filename string, v interface{}) (err error) {
data, err := web.Embed.Asset("bindata/onboard/" + filename)
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("missing %s", filename))
h.Runtime.Log.Error("failed to load file", err)
return
}
err = json.Unmarshal(data, &v)
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("failed to read %s as JSON", filename))
h.Runtime.Log.Error("failed to load file", err)
return
}
return nil
}
// Insert data into database using sample data loaded from embedded assets.
func (h *Handler) processSampleData(data om.SampleData) (err error) {
data.Context.Transaction, _ = h.Runtime.StartTx(sql.LevelReadUncommitted)
// Space Label.
h.Runtime.Log.Info(fmt.Sprintf("Installing space label (%d)", len(data.SpaceLabel)))
for i := range data.SpaceLabel {
_, err = data.Context.Transaction.Exec(h.Runtime.Db.Rebind(`
INSERT INTO dmz_space_label
(c_refid, c_orgid, c_name, c_color, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?)`),
data.SpaceLabel[i].RefID, data.Context.OrgID, data.SpaceLabel[i].Name,
data.SpaceLabel[i].Color, data.SpaceLabel[i].Created, data.SpaceLabel[i].Revised)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert space label %s", data.SpaceLabel[i].RefID))
return
}
}
// Space.
h.Runtime.Log.Info(fmt.Sprintf("Installing space (%d)", len(data.Space)))
for i := range data.Space {
_, err = data.Context.Transaction.Exec(h.Runtime.Db.Rebind(`
INSERT INTO dmz_space
(c_refid, c_name, c_orgid, c_userid, c_type, c_lifecycle,
c_likes, c_icon, c_desc, c_count_category, c_count_content,
c_labelid, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
data.Space[i].RefID, data.Space[i].Name, data.Context.OrgID,
data.Context.UserID, data.Space[i].Type, data.Space[i].Lifecycle,
data.Space[i].Likes, data.Space[i].Icon, data.Space[i].Description,
data.Space[i].CountCategory, data.Space[i].CountContent,
data.Space[i].LabelID, data.Space[i].Created, data.Space[i].Revised)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert space record %s", data.Space[i].RefID))
return
}
}
h.Runtime.Log.Info(fmt.Sprintf("Installing category (%d)", len(data.Category)))
for i := range data.Category {
_, err = data.Context.Transaction.Exec(h.Runtime.Db.Rebind(`
INSERT INTO dmz_category (c_refid, c_orgid, c_spaceid, c_name, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?)`),
data.Category[i].RefID, data.Context.OrgID, data.Category[i].SpaceID, data.Category[i].Name,
data.Category[i].Created, data.Category[i].Revised)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert category %s", data.Category[i].RefID))
return
}
}
h.Runtime.Log.Info(fmt.Sprintf("Installing category member (%d)", len(data.CategoryMember)))
for i := range data.CategoryMember {
_, err = data.Context.Transaction.Exec(h.Runtime.Db.Rebind(`
INSERT INTO dmz_category_member
(c_refid, c_orgid, c_categoryid, c_spaceid, c_docid, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?)`),
data.CategoryMember[i].RefID, data.Context.OrgID, data.CategoryMember[i].CategoryID,
data.CategoryMember[i].SpaceID, data.CategoryMember[i].DocumentID,
data.CategoryMember[i].Created, data.CategoryMember[i].Revised)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert category %s", data.Category[i].RefID))
return
}
}
// Assign permissions per space space.
perm := permission.Permission{}
perm.OrgID = data.Context.OrgID
perm.Who = permission.UserPermission
perm.WhoID = data.Context.UserID
perm.Scope = permission.ScopeRow
perm.Location = permission.LocationSpace
for i := range data.Space {
perm.RefID = data.Space[i].RefID
perm.Action = "" // we send array for actions below
err = h.Store.Permission.AddPermissions(data.Context, perm,
permission.SpaceOwner, permission.SpaceManage, permission.SpaceView,
permission.DocumentAdd, permission.DocumentCopy, permission.DocumentDelete,
permission.DocumentEdit, permission.DocumentMove,
permission.DocumentTemplate, permission.DocumentApprove,
permission.DocumentVersion, permission.DocumentLifecycle)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert space permission %s", data.Space[i].RefID))
return
}
}
// Assign permissions per category.
for i := range data.Category {
pc := permission.Permission{}
pc.OrgID = data.Context.OrgID
pc.Who = permission.UserPermission
pc.WhoID = data.Context.UserID
pc.Scope = permission.ScopeRow
pc.Location = permission.LocationCategory
pc.RefID = data.Category[i].RefID
pc.Action = permission.CategoryView
err = h.Store.Permission.AddPermission(data.Context, pc)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert category permission %s", data.Category[i].RefID))
return
}
}
h.Runtime.Log.Info(fmt.Sprintf("Installing document (%d)", len(data.Document)))
for i := range data.Document {
_, err = data.Context.Transaction.Exec(h.Runtime.Db.Rebind(`
INSERT INTO dmz_doc
(c_refid, c_orgid, c_spaceid, c_userid, c_job, c_location,
c_name, c_desc, c_slug, c_tags, c_template, c_protection, c_approval,
c_lifecycle, c_versioned, c_versionid, c_versionorder, c_groupid, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
data.Document[i].RefID, data.Context.OrgID, data.Document[i].SpaceID,
data.Context.UserID, data.Document[i].Job,
data.Document[i].Location, data.Document[i].Name, data.Document[i].Excerpt,
data.Document[i].Slug, data.Document[i].Tags,
data.Document[i].Template, data.Document[i].Protection,
data.Document[i].Approval, data.Document[i].Lifecycle,
data.Document[i].Versioned, data.Document[i].VersionID,
data.Document[i].VersionOrder, data.Document[i].GroupID,
data.Document[i].Created, data.Document[i].Revised)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert document %s", data.Document[i].RefID))
return
}
}
h.Runtime.Log.Info(fmt.Sprintf("Installing document attachment (%d)", len(data.DocumentAttachment)))
for i := range data.DocumentAttachment {
_, err = data.Context.Transaction.Exec(h.Runtime.Db.Rebind(`
INSERT INTO dmz_doc_attachment
(c_refid, c_orgid, c_docid, c_sectionid, c_job, c_fileid,
c_filename, c_data, c_extension, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
data.DocumentAttachment[i].RefID, data.Context.OrgID,
data.DocumentAttachment[i].DocumentID, data.DocumentAttachment[i].SectionID,
data.DocumentAttachment[i].Job, data.DocumentAttachment[i].FileID,
data.DocumentAttachment[i].Filename,
data.DocumentAttachment[i].Data, data.DocumentAttachment[i].Extension,
data.DocumentAttachment[i].Created, data.DocumentAttachment[i].Revised)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert document attachment %s", data.DocumentAttachment[i].RefID))
return
}
}
h.Runtime.Log.Info(fmt.Sprintf("Installing document link (%d)", len(data.DocumentLink)))
for i := range data.DocumentLink {
_, err = data.Context.Transaction.Exec(h.Runtime.Db.Rebind(`
INSERT INTO dmz_doc_link
(c_refid, c_orgid, c_spaceid, c_userid, c_sourcedocid, c_sourcesectionid,
c_targetdocid, c_targetid, c_externalid, c_type, c_orphan, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
data.DocumentLink[i].RefID, data.Context.OrgID, data.DocumentLink[i].SpaceID,
data.Context.UserID, data.DocumentLink[i].SourceDocumentID, data.DocumentLink[i].SourceSectionID,
data.DocumentLink[i].TargetDocumentID, data.DocumentLink[i].TargetID, data.DocumentLink[i].ExternalID,
data.DocumentLink[i].LinkType, data.DocumentLink[i].Orphan,
data.DocumentLink[i].Created, data.DocumentLink[i].Revised)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert document link %s", data.DocumentLink[i].RefID))
return
}
}
h.Runtime.Log.Info(fmt.Sprintf("Installing section (%d)", len(data.Section)))
for i := range data.Section {
_, err = data.Context.Transaction.Exec(h.Runtime.Db.Rebind(`
INSERT INTO dmz_section
(c_refid, c_orgid, c_docid, c_userid, c_contenttype, c_type, c_level, c_name, c_body,
c_revisions, c_sequence, c_templateid, c_status, c_relativeid, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
data.Section[i].RefID, data.Context.OrgID, data.Section[i].DocumentID,
data.Context.UserID,
data.Section[i].ContentType, data.Section[i].Type,
data.Section[i].Level, data.Section[i].Name,
data.Section[i].Body, data.Section[i].Revisions,
data.Section[i].Sequence, data.Section[i].TemplateID,
data.Section[i].Status, data.Section[i].RelativeID,
data.Section[i].Created, data.Section[i].Revised)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert section %s", data.Section[i].RefID))
return
}
}
h.Runtime.Log.Info(fmt.Sprintf("Installing section meta (%d)", len(data.SectionMeta)))
for i := range data.SectionMeta {
_, err = data.Context.Transaction.Exec(h.Runtime.Db.Rebind(`
INSERT INTO dmz_section_meta
(c_sectionid, c_orgid, c_userid, c_docid, c_rawbody,
c_config, c_external, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`),
data.SectionMeta[i].SectionID, data.Context.OrgID, data.Context.UserID,
data.SectionMeta[i].DocumentID,
data.SectionMeta[i].RawBody, data.SectionMeta[i].Config,
data.SectionMeta[i].ExternalSource,
data.SectionMeta[i].Created, data.SectionMeta[i].Revised)
if err != nil {
h.Runtime.Rollback(data.Context.Transaction)
err = errors.Wrap(err, fmt.Sprintf("unable to insert section meta %s", data.SectionMeta[i].SectionID))
return
}
}
ok := h.Runtime.Commit(data.Context.Transaction)
if !ok {
h.Runtime.Rollback(data.Context.Transaction)
return
}
// Build search index
return nil
}

46
domain/onboard/store.go Normal file
View file

@ -0,0 +1,46 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
// Package onboard handles the setup of sample data for a new Documize instance.
package onboard
import (
"github.com/documize/community/domain/store"
)
// Store provides data access to version information.
type Store struct {
store.Context
}
// ContentCounts returns the number of spaces and documents.
func (s Store) ContentCounts() (spaces, docs int) {
// By default we assume there is content in case of error condition.
spaces = 10
docs = 10
var m int
var err error
row := s.Runtime.Db.QueryRow("SELECT COUNT(*) FROM dmz_space")
err = row.Scan(&m)
if err == nil {
spaces = m
}
row = s.Runtime.Db.QueryRow("SELECT COUNT(*) FROM dmz_doc")
err = row.Scan(&m)
if err == nil {
docs = m
}
return
}

71
domain/search/endpoint.go Normal file
View file

@ -0,0 +1,71 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package search
import (
"fmt"
"net/http"
"github.com/documize/community/core/env"
"github.com/documize/community/core/response"
"github.com/documize/community/domain"
"github.com/documize/community/domain/store"
)
// Handler contains the runtime information such as logging and database.
type Handler struct {
Runtime *env.Runtime
Store *store.Store
Indexer Indexer
}
// Reindex indexes all documents and attachments.
func (h *Handler) Reindex(w http.ResponseWriter, r *http.Request) {
ctx := domain.GetRequestContext(r)
if !ctx.GlobalAdmin {
response.WriteForbiddenError(w)
h.Runtime.Log.Info(fmt.Sprintf("%s attempted search reindex", ctx.UserID))
return
}
go h.Indexer.Rebuild(ctx)
response.WriteEmpty(w)
}
// Status returns state of search index
func (h *Handler) Status(w http.ResponseWriter, r *http.Request) {
method := "meta.SearchStatus"
ctx := domain.GetRequestContext(r)
if !ctx.GlobalAdmin {
response.WriteForbiddenError(w)
h.Runtime.Log.Info(fmt.Sprintf("%s attempted get of search status", ctx.UserID))
return
}
count, err := h.Store.Meta.SearchIndexCount(ctx)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
var ss = searchStatus{Entries: count}
response.WriteJSON(w, ss)
}
type searchStatus struct {
Entries int `json:"entries"`
}

View file

@ -13,6 +13,7 @@ package search
import ( import (
"database/sql" "database/sql"
"fmt"
"github.com/documize/community/domain" "github.com/documize/community/domain"
"github.com/documize/community/model/attachment" "github.com/documize/community/model/attachment"
@ -118,6 +119,53 @@ func (m *Indexer) DeleteContent(ctx domain.RequestContext, pageID string) {
m.runtime.Commit(ctx.Transaction) m.runtime.Commit(ctx.Transaction)
} }
// Rebuild recreates all search indexes.
func (m *Indexer) Rebuild(ctx domain.RequestContext) {
method := "search.rebuildSearchIndex"
docs, err := m.store.Meta.Documents(ctx)
if err != nil {
m.runtime.Log.Error(method, err)
return
}
m.runtime.Log.Info(fmt.Sprintf("Search re-index started for %d documents", len(docs)))
for i := range docs {
d := docs[i]
dc, err := m.store.Meta.Document(ctx, d)
if err != nil {
m.runtime.Log.Error(method, err)
// continue
}
at, err := m.store.Meta.Attachments(ctx, d)
if err != nil {
m.runtime.Log.Error(method, err)
// continue
}
m.IndexDocument(ctx, dc, at)
pages, err := m.store.Meta.Pages(ctx, d)
if err != nil {
m.runtime.Log.Error(method, err)
// continue
}
for j := range pages {
m.IndexContent(ctx, pages[j])
}
// Log process every N documents.
if i%100 == 0 {
m.runtime.Log.Info(fmt.Sprintf("Search re-indexed %d documents...", i))
}
}
m.runtime.Log.Info(fmt.Sprintf("Search re-index finished for %d documents", len(docs)))
}
// FilterCategoryProtected removes search results that cannot be seen by user // FilterCategoryProtected removes search results that cannot be seen by user
// due to document cateogory viewing permissions. // due to document cateogory viewing permissions.
func FilterCategoryProtected(results []sm.QueryResult, cats []category.Category, members []category.Member) (filtered []sm.QueryResult) { func FilterCategoryProtected(results []sm.QueryResult, cats []category.Category, members []category.Member) (filtered []sm.QueryResult) {

View file

@ -53,6 +53,7 @@ type Store struct {
Setting SettingStorer Setting SettingStorer
Space SpaceStorer Space SpaceStorer
User UserStorer User UserStorer
Onboard OnboardStorer
} }
// SpaceStorer defines required methods for space management // SpaceStorer defines required methods for space management
@ -317,3 +318,8 @@ type LabelStorer interface {
Delete(ctx domain.RequestContext, id string) (rows int64, err error) Delete(ctx domain.RequestContext, id string) (rows int64, err error)
RemoveReference(ctx domain.RequestContext, labelID string) (err error) RemoveReference(ctx domain.RequestContext, labelID string) (err error)
} }
// OnboardStorer defines required methods for enterprise customer onboarding process.
type OnboardStorer interface {
ContentCounts() (spaces, docs int)
}

View file

@ -36,6 +36,7 @@ import (
pin "github.com/documize/community/domain/pin" pin "github.com/documize/community/domain/pin"
search "github.com/documize/community/domain/search" search "github.com/documize/community/domain/search"
setting "github.com/documize/community/domain/setting" setting "github.com/documize/community/domain/setting"
"github.com/documize/community/domain/onboard"
space "github.com/documize/community/domain/space" space "github.com/documize/community/domain/space"
"github.com/documize/community/domain/store" "github.com/documize/community/domain/store"
user "github.com/documize/community/domain/user" user "github.com/documize/community/domain/user"
@ -155,6 +156,11 @@ func SetMySQLProvider(r *env.Runtime, s *store.Store) {
labelStore := label.Store{} labelStore := label.Store{}
labelStore.Runtime = r labelStore.Runtime = r
s.Label = labelStore s.Label = labelStore
// New user onboarding.
onboardStore := onboard.Store{}
onboardStore.Runtime = r
s.Onboard = onboardStore
} }
// MySQLProvider supports MySQL 5.7.x and 8.0.x versions. // MySQLProvider supports MySQL 5.7.x and 8.0.x versions.

View file

@ -28,6 +28,7 @@ import (
label "github.com/documize/community/domain/label" label "github.com/documize/community/domain/label"
link "github.com/documize/community/domain/link" link "github.com/documize/community/domain/link"
meta "github.com/documize/community/domain/meta" meta "github.com/documize/community/domain/meta"
"github.com/documize/community/domain/onboard"
org "github.com/documize/community/domain/organization" org "github.com/documize/community/domain/organization"
page "github.com/documize/community/domain/page" page "github.com/documize/community/domain/page"
permission "github.com/documize/community/domain/permission" permission "github.com/documize/community/domain/permission"
@ -153,6 +154,11 @@ func SetPostgreSQLProvider(r *env.Runtime, s *store.Store) {
labelStore := label.Store{} labelStore := label.Store{}
labelStore.Runtime = r labelStore.Runtime = r
s.Label = labelStore s.Label = labelStore
// New user onboarding.
onboardStore := onboard.Store{}
onboardStore.Runtime = r
s.Onboard = onboardStore
} }
// Type returns name of provider // Type returns name of provider

View file

@ -29,6 +29,7 @@ import (
label "github.com/documize/community/domain/label" label "github.com/documize/community/domain/label"
link "github.com/documize/community/domain/link" link "github.com/documize/community/domain/link"
meta "github.com/documize/community/domain/meta" meta "github.com/documize/community/domain/meta"
"github.com/documize/community/domain/onboard"
org "github.com/documize/community/domain/organization" org "github.com/documize/community/domain/organization"
page "github.com/documize/community/domain/page" page "github.com/documize/community/domain/page"
permission "github.com/documize/community/domain/permission" permission "github.com/documize/community/domain/permission"
@ -165,6 +166,11 @@ func SetSQLServerProvider(r *env.Runtime, s *store.Store) {
labelStore := label.Store{} labelStore := label.Store{}
labelStore.Runtime = r labelStore.Runtime = r
s.Label = labelStore s.Label = labelStore
// New user onboarding.
onboardStore := onboard.Store{}
onboardStore.Runtime = r
s.Onboard = onboardStore
} }
// Type returns name of provider // Type returns name of provider

File diff suppressed because one or more lines are too long

View file

@ -13,6 +13,7 @@ import { inject as service } from '@ember/service';
import Route from '@ember/routing/route'; import Route from '@ember/routing/route';
export default Route.extend({ export default Route.extend({
globalSvc: service('global'),
session: service(), session: service(),
localStorage: service(), localStorage: service(),
@ -27,7 +28,15 @@ export default Route.extend({
model({ token }) { model({ token }) {
this.get("session").authenticate('authenticator:documize', decodeURIComponent(token)) this.get("session").authenticate('authenticator:documize', decodeURIComponent(token))
.then(() => { .then(() => {
this.transitionTo('folders'); if (this.get('localStorage').isFirstRun()) {
this.get('globalSvc').onboard().then(() => {
this.transitionTo('folders');
}).catch(() => {
this.transitionTo('folders');
});
} else {
this.transitionTo('folders');
}
}, () => { }, () => {
this.transitionTo('auth.login'); this.transitionTo('auth.login');
}); });

View file

@ -166,15 +166,15 @@ export default Service.extend({
let token = this.get('sessionService.session.content.authenticated.token'); let token = this.get('sessionService.session.content.authenticated.token');
let uploadUrl = `${url}/global/backup?token=${token}`; let uploadUrl = `${url}/global/backup?token=${token}`;
var xhr = new XMLHttpRequest(); let xhr = new XMLHttpRequest();
xhr.open('POST', uploadUrl); xhr.open('POST', uploadUrl);
xhr.setRequestHeader("Content-Type", "application/json"); xhr.setRequestHeader("Content-Type", "application/json");
xhr.responseType = 'blob'; xhr.responseType = 'blob';
xhr.onload = function() { xhr.onload = function() {
if (this.status == 200) { if (this.status === 200) {
// get binary data as a response // get binary data as a response
var blob = this.response; let blob = this.response;
let a = document.createElement("a"); let a = document.createElement("a");
a.style = "display: none"; a.style = "display: none";
@ -194,18 +194,18 @@ export default Service.extend({
} else { } else {
reject(); reject();
} }
} };
xhr.onerror= function() { xhr.onerror= function() {
reject(); reject();
} };
xhr.send(JSON.stringify(spec)); xhr.send(JSON.stringify(spec));
}); });
}, },
restore(spec, file) { restore(spec, file) {
var data = new FormData(); let data = new FormData();
data.set('restore-file', file); data.set('restore-file', file);
return new EmberPromise((resolve, reject) => { return new EmberPromise((resolve, reject) => {
@ -217,20 +217,20 @@ export default Service.extend({
let token = this.get('sessionService.session.content.authenticated.token'); let token = this.get('sessionService.session.content.authenticated.token');
let uploadUrl = `${url}/global/restore?token=${token}&org=${spec.overwriteOrg}&users=${spec.recreateUsers}`; let uploadUrl = `${url}/global/restore?token=${token}&org=${spec.overwriteOrg}&users=${spec.recreateUsers}`;
var xhr = new XMLHttpRequest(); let xhr = new XMLHttpRequest();
xhr.open('POST', uploadUrl); xhr.open('POST', uploadUrl);
xhr.onload = function() { xhr.onload = function() {
if (this.status == 200) { if (this.status === 200) {
resolve(); resolve();
} else { } else {
reject(); reject();
} }
} };
xhr.onerror= function() { xhr.onerror= function() {
reject(); reject();
} };
xhr.send(data); xhr.send(data);
}); });
@ -242,9 +242,13 @@ export default Service.extend({
method: 'POST', method: 'POST',
contentType: 'text', contentType: 'text',
data: comment, data: comment,
}).then(() => {
return;
}); });
} }
}, },
onboard() {
return this.get('ajax').request(`setup/onboard`, {
method: 'POST',
});
}
}); });

39
model/onboard/model.go Normal file
View file

@ -0,0 +1,39 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
// Package onboarding handles the setup of sample data for a new Documize instance.
package onboard
import (
"github.com/documize/community/domain"
"github.com/documize/community/model/attachment"
"github.com/documize/community/model/category"
"github.com/documize/community/model/doc"
"github.com/documize/community/model/label"
"github.com/documize/community/model/link"
"github.com/documize/community/model/page"
"github.com/documize/community/model/space"
)
// SampleData holds initial welcome data used during installation process.
type SampleData struct {
LoadFailure bool // signals any data load failure
Context domain.RequestContext
Category []category.Category
CategoryMember []category.Member
Document []doc.Document
DocumentAttachment []attachment.Attachment
DocumentLink []link.Link
Section []page.Page
SectionMeta []page.Meta
Space []space.Space
SpaceLabel []label.Label
}

View file

@ -28,6 +28,7 @@ import (
"github.com/documize/community/domain/label" "github.com/documize/community/domain/label"
"github.com/documize/community/domain/link" "github.com/documize/community/domain/link"
"github.com/documize/community/domain/meta" "github.com/documize/community/domain/meta"
"github.com/documize/community/domain/onboard"
"github.com/documize/community/domain/organization" "github.com/documize/community/domain/organization"
"github.com/documize/community/domain/page" "github.com/documize/community/domain/page"
"github.com/documize/community/domain/permission" "github.com/documize/community/domain/permission"
@ -72,20 +73,23 @@ func RegisterEndpoints(rt *env.Runtime, s *store.Store) {
permission := permission.Handler{Runtime: rt, Store: s} permission := permission.Handler{Runtime: rt, Store: s}
organization := organization.Handler{Runtime: rt, Store: s} organization := organization.Handler{Runtime: rt, Store: s}
//************************************************** searchEndpoint := search.Handler{Runtime: rt, Store: s, Indexer: indexer}
onboardEndpoint := onboard.Handler{Runtime: rt, Store: s, Indexer: indexer}
// **************************************************
// Non-secure public info routes // Non-secure public info routes
//************************************************** // **************************************************
AddPublic(rt, "meta", []string{"GET", "OPTIONS"}, nil, meta.Meta) AddPublic(rt, "meta", []string{"GET", "OPTIONS"}, nil, meta.Meta)
AddPublic(rt, "meta/themes", []string{"GET", "OPTIONS"}, nil, meta.Themes) AddPublic(rt, "meta/themes", []string{"GET", "OPTIONS"}, nil, meta.Themes)
AddPublic(rt, "version", []string{"GET", "OPTIONS"}, nil, AddPublic(rt, "version", []string{"GET", "OPTIONS"}, nil,
func(w http.ResponseWriter, r *http.Request) { func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(rt.Product.Version)) _, _ = w.Write([]byte(rt.Product.Version))
}) })
//************************************************** // **************************************************
// Non-secure public service routes // Non-secure public service routes
//************************************************** // **************************************************
AddPublic(rt, "authenticate/keycloak", []string{"POST", "OPTIONS"}, nil, keycloak.Authenticate) AddPublic(rt, "authenticate/keycloak", []string{"POST", "OPTIONS"}, nil, keycloak.Authenticate)
AddPublic(rt, "authenticate/ldap", []string{"POST", "OPTIONS"}, nil, ldap.Authenticate) AddPublic(rt, "authenticate/ldap", []string{"POST", "OPTIONS"}, nil, ldap.Authenticate)
@ -98,9 +102,9 @@ func RegisterEndpoints(rt *env.Runtime, s *store.Store) {
AddPublic(rt, "logo", []string{"GET", "OPTIONS"}, []string{"default", "true"}, meta.DefaultLogo) AddPublic(rt, "logo", []string{"GET", "OPTIONS"}, []string{"default", "true"}, meta.DefaultLogo)
AddPublic(rt, "logo", []string{"GET", "OPTIONS"}, nil, meta.Logo) AddPublic(rt, "logo", []string{"GET", "OPTIONS"}, nil, meta.Logo)
//************************************************** // **************************************************
// Secured private routes (require authentication) // Secured private routes (require authentication)
//************************************************** // **************************************************
AddPrivate(rt, "import/folder/{spaceID}", []string{"POST", "OPTIONS"}, nil, conversion.UploadConvert) AddPrivate(rt, "import/folder/{spaceID}", []string{"POST", "OPTIONS"}, nil, conversion.UploadConvert)
@ -226,13 +230,15 @@ func RegisterEndpoints(rt *env.Runtime, s *store.Store) {
AddPrivate(rt, "global/smtp", []string{"PUT", "OPTIONS"}, nil, setting.SetSMTP) AddPrivate(rt, "global/smtp", []string{"PUT", "OPTIONS"}, nil, setting.SetSMTP)
AddPrivate(rt, "global/auth", []string{"GET", "OPTIONS"}, nil, setting.AuthConfig) AddPrivate(rt, "global/auth", []string{"GET", "OPTIONS"}, nil, setting.AuthConfig)
AddPrivate(rt, "global/auth", []string{"PUT", "OPTIONS"}, nil, setting.SetAuthConfig) AddPrivate(rt, "global/auth", []string{"PUT", "OPTIONS"}, nil, setting.SetAuthConfig)
AddPrivate(rt, "global/search/status", []string{"GET", "OPTIONS"}, nil, meta.SearchStatus)
AddPrivate(rt, "global/search/reindex", []string{"POST", "OPTIONS"}, nil, meta.Reindex)
AddPrivate(rt, "global/sync/keycloak", []string{"GET", "OPTIONS"}, nil, keycloak.Sync) AddPrivate(rt, "global/sync/keycloak", []string{"GET", "OPTIONS"}, nil, keycloak.Sync)
AddPrivate(rt, "global/ldap/preview", []string{"POST", "OPTIONS"}, nil, ldap.Preview) AddPrivate(rt, "global/ldap/preview", []string{"POST", "OPTIONS"}, nil, ldap.Preview)
AddPrivate(rt, "global/ldap/sync", []string{"GET", "OPTIONS"}, nil, ldap.Sync) AddPrivate(rt, "global/ldap/sync", []string{"GET", "OPTIONS"}, nil, ldap.Sync)
AddPrivate(rt, "global/backup", []string{"POST", "OPTIONS"}, nil, backup.Backup) AddPrivate(rt, "global/backup", []string{"POST", "OPTIONS"}, nil, backup.Backup)
AddPrivate(rt, "global/restore", []string{"POST", "OPTIONS"}, nil, backup.Restore) AddPrivate(rt, "global/restore", []string{"POST", "OPTIONS"}, nil, backup.Restore)
AddPrivate(rt, "global/search/status", []string{"GET", "OPTIONS"}, nil, searchEndpoint.Status)
AddPrivate(rt, "global/search/reindex", []string{"POST", "OPTIONS"}, nil, searchEndpoint.Reindex)
AddPrivate(rt, "setup/onboard", []string{"POST", "OPTIONS"}, nil, onboardEndpoint.InstallSample)
Add(rt, RoutePrefixRoot, "robots.txt", []string{"GET", "OPTIONS"}, nil, meta.RobotsTxt) Add(rt, RoutePrefixRoot, "robots.txt", []string{"GET", "OPTIONS"}, nil, meta.RobotsTxt)
Add(rt, RoutePrefixRoot, "sitemap.xml", []string{"GET", "OPTIONS"}, nil, meta.Sitemap) Add(rt, RoutePrefixRoot, "sitemap.xml", []string{"GET", "OPTIONS"}, nil, meta.Sitemap)