1
0
Fork 0
mirror of https://github.com/documize/community.git synced 2025-08-02 20:15:26 +02:00

Make Sitemap & Robots.txt use new schema

This commit is contained in:
Harvey Kandola 2018-09-19 16:38:05 +01:00
parent 4f0cc2f616
commit 3ea6ae3c37
8 changed files with 458 additions and 46 deletions

View file

@ -178,12 +178,8 @@ func (s Scope) TemplatesBySpace(ctx domain.RequestContext, spaceID string) (docu
// These documents can then be seen by search crawlers.
func (s Scope) PublicDocuments(ctx domain.RequestContext, orgID string) (documents []doc.SitemapDocument, err error) {
err = s.Runtime.Db.Select(&documents, `
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_userid AS userid,
c_job AS job, c_location AS location, c_name AS name, c_desc AS excerpt, c_slug AS slug,
c_tags AS tags, c_template AS template, c_protection AS protection, c_approval AS approval,
c_lifecycle AS lifecycle, c_versioned AS versioned, c_versionid AS versionid,
c_versionorder AS versionorder, c_groupid AS groupid, c_created AS created, c_revised AS revised
FROM dmz_doc
SELECT d.c_refid AS documentid, d.c_name AS document, d.c_revised as revised, l.c_refid AS spaceid, l.c_name AS space
FROM dmz_doc d
LEFT JOIN dmz_space l ON l.c_refid=d.c_spaceid
WHERE d.c_orgid=? AND l.c_type=1 AND d.c_lifecycle=1 AND d.c_template=0`, orgID)
@ -192,12 +188,16 @@ func (s Scope) PublicDocuments(ctx domain.RequestContext, orgID string) (documen
documents = []doc.SitemapDocument{}
}
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("execute GetPublicDocuments for org %s%s", orgID))
err = errors.Wrap(err, fmt.Sprintf("execute GetPublicDocuments for org %s", orgID))
}
return
}
/*
FROM document d LEFT JOIN label l ON l.refid=d.labelid
*/
// Update changes the given document record to the new values, updates search information and audits the action.
func (s Scope) Update(ctx domain.RequestContext, document doc.Document) (err error) {
document.Revised = time.Now().UTC()

View file

@ -91,23 +91,24 @@ func (h *Handler) RobotsTxt(w http.ResponseWriter, r *http.Request) {
// Anonymous access would mean we allow bots to crawl.
if o.AllowAnonymousAccess {
sitemap := ctx.GetAppURL("sitemap.xml")
robots = fmt.Sprintf(
`User-agent: *
Disallow: /settings/
Disallow: /settings/*
Disallow: /profile/
Disallow: /profile/*
Disallow: /auth/login/
Disallow: /auth/login/
Disallow: /auth/logout/
Disallow: /auth/logout/*
Disallow: /auth/reset/*
Disallow: /auth/reset/*
Disallow: /auth/sso/
Disallow: /auth/sso/*
Disallow: /share
Disallow: /share/*
Sitemap: %s`, sitemap)
robots = fmt.Sprintf(`User-agent: *
Disallow: /settings/
Disallow: /settings/*
Disallow: /profile/
Disallow: /profile/*
Disallow: /auth/login/
Disallow: /auth/login/
Disallow: /auth/logout/
Disallow: /auth/logout/*
Disallow: /auth/reset/*
Disallow: /auth/reset/*
Disallow: /auth/sso/
Disallow: /auth/sso/*
Disallow: /auth/*
Disallow: /auth/**
Disallow: /share
Disallow: /share/*
Sitemap: %s`, sitemap)
}
response.WriteBytes(w, []byte(robots))
@ -166,7 +167,7 @@ func (h *Handler) Sitemap(w http.ResponseWriter, r *http.Request) {
for _, document := range documents {
var item sitemapItem
item.URL = ctx.GetAppURL(fmt.Sprintf("s/%s/%s/d/%s/%s",
document.SpaceID, stringutil.MakeSlug(document.Folder), document.DocumentID, stringutil.MakeSlug(document.Document)))
document.SpaceID, stringutil.MakeSlug(document.Space), document.DocumentID, stringutil.MakeSlug(document.Document)))
item.Date = document.Revised.Format("2006-01-02T15:04:05.999999-07:00")
items = append(items, item)
}