2017-07-26 20:03:23 +01:00
|
|
|
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
|
|
|
|
//
|
|
|
|
// This software (Documize Community Edition) is licensed under
|
|
|
|
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
|
|
|
|
//
|
|
|
|
// You can operate outside the AGPL restrictions by purchasing
|
|
|
|
// Documize Enterprise Edition and obtaining a commercial license
|
|
|
|
// by contacting <sales@documize.com>.
|
|
|
|
//
|
|
|
|
// https://documize.com
|
|
|
|
|
|
|
|
package meta
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"fmt"
|
|
|
|
"net/http"
|
2018-09-27 15:14:48 +01:00
|
|
|
"strings"
|
2017-07-26 20:03:23 +01:00
|
|
|
"text/template"
|
|
|
|
|
|
|
|
"github.com/documize/community/core/env"
|
|
|
|
"github.com/documize/community/core/response"
|
|
|
|
"github.com/documize/community/core/stringutil"
|
|
|
|
"github.com/documize/community/domain"
|
|
|
|
"github.com/documize/community/domain/auth"
|
|
|
|
"github.com/documize/community/domain/organization"
|
2018-03-29 12:23:09 +01:00
|
|
|
indexer "github.com/documize/community/domain/search"
|
2018-09-27 15:14:48 +01:00
|
|
|
"github.com/documize/community/domain/store"
|
2017-07-26 20:03:23 +01:00
|
|
|
"github.com/documize/community/model/doc"
|
|
|
|
"github.com/documize/community/model/org"
|
|
|
|
"github.com/documize/community/model/space"
|
|
|
|
)
|
|
|
|
|
|
|
|
// Handler contains the runtime information such as logging and database.
|
|
|
|
type Handler struct {
|
|
|
|
Runtime *env.Runtime
|
2018-09-27 15:14:48 +01:00
|
|
|
Store *store.Store
|
2018-03-29 12:23:09 +01:00
|
|
|
Indexer indexer.Indexer
|
2017-07-26 20:03:23 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Meta provides org meta data based upon request domain (e.g. acme.documize.com).
|
|
|
|
func (h *Handler) Meta(w http.ResponseWriter, r *http.Request) {
|
|
|
|
data := org.SiteMeta{}
|
|
|
|
data.URL = organization.GetSubdomainFromHost(r)
|
|
|
|
|
2017-08-02 15:58:39 +01:00
|
|
|
org, err := h.Store.Organization.GetOrganizationByDomain(data.URL)
|
2017-07-26 20:03:23 +01:00
|
|
|
if err != nil {
|
2018-03-06 10:39:47 +00:00
|
|
|
h.Runtime.Log.Info("unable to fetch request meta for " + data.URL)
|
|
|
|
response.WriteNotFound(w)
|
2017-07-26 20:03:23 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
data.OrgID = org.RefID
|
|
|
|
data.Title = org.Title
|
|
|
|
data.Message = org.Message
|
|
|
|
data.AllowAnonymousAccess = org.AllowAnonymousAccess
|
2018-09-27 15:14:48 +01:00
|
|
|
data.AuthProvider = strings.TrimSpace(org.AuthProvider)
|
2017-07-26 20:03:23 +01:00
|
|
|
data.AuthConfig = org.AuthConfig
|
2018-07-05 12:02:10 -04:00
|
|
|
data.MaxTags = org.MaxTags
|
2017-07-26 20:03:23 +01:00
|
|
|
data.Version = h.Runtime.Product.Version
|
2018-10-04 21:03:47 +01:00
|
|
|
data.Revision = h.Runtime.Product.Revision
|
|
|
|
data.Edition = h.Runtime.Product.Edition
|
2017-07-26 20:03:23 +01:00
|
|
|
data.ConversionEndpoint = org.ConversionEndpoint
|
2018-09-28 16:33:15 +01:00
|
|
|
data.Storage = h.Runtime.StoreProvider.Type()
|
2018-11-05 19:48:50 +00:00
|
|
|
data.Location = h.Runtime.Flags.Location // reserved
|
2018-11-24 16:13:21 +00:00
|
|
|
data.Theme = org.Theme
|
2017-07-26 20:03:23 +01:00
|
|
|
|
|
|
|
// Strip secrets
|
|
|
|
data.AuthConfig = auth.StripAuthSecrets(h.Runtime, org.AuthProvider, org.AuthConfig)
|
|
|
|
|
|
|
|
response.WriteJSON(w, data)
|
|
|
|
}
|
|
|
|
|
|
|
|
// RobotsTxt returns robots.txt depending on site configuration.
|
|
|
|
// Did we allow anonymouse access?
|
|
|
|
func (h *Handler) RobotsTxt(w http.ResponseWriter, r *http.Request) {
|
|
|
|
method := "GetRobots"
|
|
|
|
ctx := domain.GetRequestContext(r)
|
|
|
|
|
|
|
|
dom := organization.GetSubdomainFromHost(r)
|
2017-08-22 17:23:30 +01:00
|
|
|
o, err := h.Store.Organization.GetOrganizationByDomain(dom)
|
2017-07-26 20:03:23 +01:00
|
|
|
|
|
|
|
// default is to deny
|
|
|
|
robots :=
|
|
|
|
`User-agent: *
|
2017-08-03 10:00:24 +01:00
|
|
|
Disallow: /
|
|
|
|
`
|
2017-07-26 20:03:23 +01:00
|
|
|
|
|
|
|
if err != nil {
|
2017-08-22 17:23:30 +01:00
|
|
|
h.Runtime.Log.Info(fmt.Sprintf("%s failed to get Organization for domain %s", method, dom))
|
|
|
|
o = org.Organization{}
|
|
|
|
o.AllowAnonymousAccess = false
|
2017-07-26 20:03:23 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Anonymous access would mean we allow bots to crawl.
|
2017-08-22 17:23:30 +01:00
|
|
|
if o.AllowAnonymousAccess {
|
2017-07-26 20:03:23 +01:00
|
|
|
sitemap := ctx.GetAppURL("sitemap.xml")
|
2018-09-19 16:38:05 +01:00
|
|
|
robots = fmt.Sprintf(`User-agent: *
|
|
|
|
Disallow: /settings/
|
|
|
|
Disallow: /settings/*
|
|
|
|
Disallow: /profile/
|
|
|
|
Disallow: /profile/*
|
|
|
|
Disallow: /auth/login/
|
|
|
|
Disallow: /auth/login/
|
|
|
|
Disallow: /auth/logout/
|
|
|
|
Disallow: /auth/logout/*
|
|
|
|
Disallow: /auth/reset/*
|
|
|
|
Disallow: /auth/reset/*
|
|
|
|
Disallow: /auth/sso/
|
|
|
|
Disallow: /auth/sso/*
|
|
|
|
Disallow: /auth/*
|
|
|
|
Disallow: /auth/**
|
|
|
|
Disallow: /share
|
|
|
|
Disallow: /share/*
|
2018-12-19 12:41:36 +00:00
|
|
|
Disallow: /attachments
|
|
|
|
Disallow: /attachments/*
|
|
|
|
Disallow: /attachment
|
|
|
|
Disallow: /attachment/*
|
2018-09-19 16:38:05 +01:00
|
|
|
Sitemap: %s`, sitemap)
|
2017-07-26 20:03:23 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
response.WriteBytes(w, []byte(robots))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Sitemap returns URLs that can be indexed.
|
|
|
|
// We only include public folders and documents (e.g. can be seen by everyone).
|
|
|
|
func (h *Handler) Sitemap(w http.ResponseWriter, r *http.Request) {
|
|
|
|
method := "meta.Sitemap"
|
|
|
|
ctx := domain.GetRequestContext(r)
|
|
|
|
|
|
|
|
dom := organization.GetSubdomainFromHost(r)
|
2017-08-22 17:23:30 +01:00
|
|
|
o, err := h.Store.Organization.GetOrganizationByDomain(dom)
|
2017-07-26 20:03:23 +01:00
|
|
|
|
|
|
|
if err != nil {
|
2017-08-22 17:23:30 +01:00
|
|
|
h.Runtime.Log.Info(fmt.Sprintf("%s failed to get Organization for domain %s", method, dom))
|
|
|
|
o = org.Organization{}
|
|
|
|
o.AllowAnonymousAccess = false
|
2017-07-26 20:03:23 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
sitemap :=
|
|
|
|
`<?xml version="1.0" encoding="UTF-8"?>
|
2017-08-03 10:00:24 +01:00
|
|
|
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">
|
|
|
|
{{range .}}<url>
|
|
|
|
<loc>{{ .URL }}</loc>
|
|
|
|
<lastmod>{{ .Date }}</lastmod>
|
|
|
|
</url>{{end}}
|
|
|
|
</urlset>`
|
2017-07-26 20:03:23 +01:00
|
|
|
|
|
|
|
var items []sitemapItem
|
|
|
|
|
|
|
|
// Anonymous access means we announce folders/documents shared with 'Everyone'.
|
2017-08-22 17:23:30 +01:00
|
|
|
if o.AllowAnonymousAccess {
|
2017-07-26 20:03:23 +01:00
|
|
|
// Grab shared folders
|
2017-08-22 17:23:30 +01:00
|
|
|
folders, err := h.Store.Space.PublicSpaces(ctx, o.RefID)
|
2017-07-26 20:03:23 +01:00
|
|
|
if err != nil {
|
|
|
|
folders = []space.Space{}
|
|
|
|
h.Runtime.Log.Error(fmt.Sprintf("%s failed to get folders for domain %s", method, dom), err)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, folder := range folders {
|
|
|
|
var item sitemapItem
|
|
|
|
item.URL = ctx.GetAppURL(fmt.Sprintf("s/%s/%s", folder.RefID, stringutil.MakeSlug(folder.Name)))
|
|
|
|
item.Date = folder.Revised.Format("2006-01-02T15:04:05.999999-07:00")
|
|
|
|
items = append(items, item)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Grab documents from shared folders
|
|
|
|
var documents []doc.SitemapDocument
|
2017-08-22 17:23:30 +01:00
|
|
|
documents, err = h.Store.Document.PublicDocuments(ctx, o.RefID)
|
2017-07-26 20:03:23 +01:00
|
|
|
if err != nil {
|
|
|
|
documents = []doc.SitemapDocument{}
|
|
|
|
h.Runtime.Log.Error(fmt.Sprintf("%s failed to get documents for domain %s", method, dom), err)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, document := range documents {
|
|
|
|
var item sitemapItem
|
|
|
|
item.URL = ctx.GetAppURL(fmt.Sprintf("s/%s/%s/d/%s/%s",
|
2018-09-19 16:38:05 +01:00
|
|
|
document.SpaceID, stringutil.MakeSlug(document.Space), document.DocumentID, stringutil.MakeSlug(document.Document)))
|
2017-07-26 20:03:23 +01:00
|
|
|
item.Date = document.Revised.Format("2006-01-02T15:04:05.999999-07:00")
|
|
|
|
items = append(items, item)
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
buffer := new(bytes.Buffer)
|
|
|
|
t := template.Must(template.New("tmp").Parse(sitemap))
|
2017-08-03 10:00:24 +01:00
|
|
|
t.Execute(buffer, &items)
|
2017-07-26 20:03:23 +01:00
|
|
|
|
|
|
|
response.WriteBytes(w, buffer.Bytes())
|
|
|
|
}
|
|
|
|
|
2018-03-29 12:23:09 +01:00
|
|
|
// Reindex indexes all documents and attachments.
|
|
|
|
func (h *Handler) Reindex(w http.ResponseWriter, r *http.Request) {
|
|
|
|
ctx := domain.GetRequestContext(r)
|
|
|
|
|
2018-09-19 16:03:29 +01:00
|
|
|
if !ctx.GlobalAdmin {
|
2018-03-29 12:23:09 +01:00
|
|
|
response.WriteForbiddenError(w)
|
|
|
|
h.Runtime.Log.Info(fmt.Sprintf("%s attempted search reindex"))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
go h.rebuildSearchIndex(ctx)
|
|
|
|
|
|
|
|
response.WriteEmpty(w)
|
|
|
|
}
|
|
|
|
|
|
|
|
// rebuildSearchIndex indexes all documents and attachments.
|
|
|
|
func (h *Handler) rebuildSearchIndex(ctx domain.RequestContext) {
|
|
|
|
method := "meta.rebuildSearchIndex"
|
|
|
|
|
2018-10-06 17:23:55 +01:00
|
|
|
docs, err := h.Store.Meta.Documents(ctx)
|
2018-03-29 12:23:09 +01:00
|
|
|
if err != nil {
|
|
|
|
h.Runtime.Log.Error(method, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
h.Runtime.Log.Info(fmt.Sprintf("Search re-index started for %d documents", len(docs)))
|
|
|
|
|
|
|
|
for i := range docs {
|
|
|
|
d := docs[i]
|
|
|
|
|
2018-10-06 17:23:55 +01:00
|
|
|
dc, err := h.Store.Meta.Document(ctx, d)
|
2018-09-28 16:33:15 +01:00
|
|
|
if err != nil {
|
|
|
|
h.Runtime.Log.Error(method, err)
|
2018-10-06 17:23:55 +01:00
|
|
|
// continue
|
2018-09-28 16:33:15 +01:00
|
|
|
}
|
2018-10-06 17:23:55 +01:00
|
|
|
at, err := h.Store.Meta.Attachments(ctx, d)
|
2018-09-28 16:33:15 +01:00
|
|
|
if err != nil {
|
|
|
|
h.Runtime.Log.Error(method, err)
|
2018-10-06 17:23:55 +01:00
|
|
|
// continue
|
2018-09-28 16:33:15 +01:00
|
|
|
}
|
|
|
|
|
2018-10-06 17:23:55 +01:00
|
|
|
h.Indexer.IndexDocument(ctx, dc, at)
|
2018-09-28 16:33:15 +01:00
|
|
|
|
2018-10-06 17:23:55 +01:00
|
|
|
pages, err := h.Store.Meta.Pages(ctx, d)
|
2018-03-29 12:23:09 +01:00
|
|
|
if err != nil {
|
|
|
|
h.Runtime.Log.Error(method, err)
|
2018-10-06 17:23:55 +01:00
|
|
|
// continue
|
2018-03-29 12:23:09 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for j := range pages {
|
|
|
|
h.Indexer.IndexContent(ctx, pages[j])
|
|
|
|
}
|
|
|
|
|
|
|
|
// Log process every N documents.
|
|
|
|
if i%100 == 0 {
|
|
|
|
h.Runtime.Log.Info(fmt.Sprintf("Search re-indexed %d documents...", i))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
h.Runtime.Log.Info(fmt.Sprintf("Search re-index finished for %d documents", len(docs)))
|
|
|
|
}
|
|
|
|
|
|
|
|
// SearchStatus returns state of search index
|
|
|
|
func (h *Handler) SearchStatus(w http.ResponseWriter, r *http.Request) {
|
|
|
|
method := "meta.SearchStatus"
|
|
|
|
ctx := domain.GetRequestContext(r)
|
|
|
|
|
2018-09-19 16:03:29 +01:00
|
|
|
if !ctx.GlobalAdmin {
|
2018-03-29 12:23:09 +01:00
|
|
|
response.WriteForbiddenError(w)
|
|
|
|
h.Runtime.Log.Info(fmt.Sprintf("%s attempted get of search status"))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
count, err := h.Store.Meta.SearchIndexCount(ctx)
|
|
|
|
if err != nil {
|
|
|
|
response.WriteServerError(w, method, err)
|
|
|
|
h.Runtime.Log.Error(method, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var ss = searchStatus{Entries: count}
|
|
|
|
|
|
|
|
response.WriteJSON(w, ss)
|
|
|
|
}
|
|
|
|
|
2017-07-26 20:03:23 +01:00
|
|
|
type sitemapItem struct {
|
|
|
|
URL string
|
|
|
|
Date string
|
|
|
|
}
|
2018-03-29 12:23:09 +01:00
|
|
|
|
|
|
|
type searchStatus struct {
|
|
|
|
Entries int `json:"entries"`
|
|
|
|
}
|
2018-11-24 16:13:21 +00:00
|
|
|
|
|
|
|
// Themes returns list of installed UI themes.
|
|
|
|
func (h *Handler) Themes(w http.ResponseWriter, r *http.Request) {
|
|
|
|
type theme struct {
|
2018-11-24 18:39:43 +00:00
|
|
|
Name string `json:"name"`
|
2018-11-24 16:13:21 +00:00
|
|
|
Primary string `json:"primary"`
|
|
|
|
}
|
|
|
|
|
|
|
|
th := []theme{}
|
2018-11-24 18:39:43 +00:00
|
|
|
th = append(th, theme{Name: "", Primary: "#280A42"})
|
2018-11-24 16:13:21 +00:00
|
|
|
th = append(th, theme{Name: "Blue", Primary: "#176091"})
|
|
|
|
th = append(th, theme{Name: "Deep Orange", Primary: "#BF360C"})
|
|
|
|
th = append(th, theme{Name: "Teal", Primary: "#00695C"})
|
|
|
|
|
|
|
|
response.WriteJSON(w, th)
|
|
|
|
}
|