1
0
Fork 0
mirror of https://github.com/documize/community.git synced 2025-08-08 06:55:28 +02:00

Compare commits

..

526 commits

Author SHA1 Message Date
Harvey Kandola
efb092ef8f Prep v5.13.0 release 2024-12-31 13:09:30 -05:00
Harvey Kandola
3fc0a15f87 es-AR 2024-12-31 12:05:21 -05:00
Harvey Kandola
c841c85478 Add en-AR i18n 2024-12-30 13:49:39 -05:00
Harvey Kandola
2dae03332b Prep release 2024-06-18 11:53:23 -04:00
Harvey Kandola
44b1f263cd Restore activity and audit logs 2024-06-18 11:53:15 -04:00
Harvey Kandola
982e16737e Bump version 2024-02-19 11:55:21 -05:00
Harvey Kandola
f641e42434 Remove default conversion service URL 2024-02-19 11:55:15 -05:00
Harvey Kandola
8895db56af Skip restore of user logs 2024-02-19 11:54:50 -05:00
Harvey Kandola
acb59e1b43 Bump Go deps 2024-02-19 11:54:27 -05:00
Harvey Kandola
f2ba294be8 Prep 5.11.3 2024-02-16 10:53:02 -05:00
Harvey Kandola
69940cb7f1
Merge pull request #398 from mb3m/fix-win-build
Fix Windows build script
2024-02-02 06:44:23 -05:00
Harvey Kandola
6bfdda7178
Merge pull request #399 from mb3m/fix-french
French translation updates
2024-02-02 06:42:35 -05:00
Thomas Bolon
027fdf108c french translation updates
- uppercase with accent (E => É)
- Label => Étiquette
- misc terms
- space before punctuations ("?" => " ?")
2024-02-02 11:44:50 +01:00
Thomas Bolon
1f12df76aa typo 2024-02-02 11:17:49 +01:00
Thomas Bolon
d811b88896 fix windows build
- add a cd .. after ember build: the script moves to gui\ directory but
  does not move back, despite the rest of the script expecting running
  from the root directory.

- add multiple echo commands to help debug error

- fix robocopy wrong usage where glob patterns are specified on the
  source path instead of a specific third argument
2024-02-02 11:15:47 +01:00
Harvey Kandola
20fb853907 Implement Azure SQL database compatibility checks 2024-01-16 12:47:44 -05:00
Harvey Kandola
599c53a971 Loosen Microsoft SQL Server database compat. checks 2024-01-15 16:48:21 -05:00
Harvey Kandola
1f462ed4f7 Support Microsoft SQL Azure v12+ 2024-01-15 14:43:27 -05:00
Harvey Kandola
9f122fa79b Bump version 2024-01-11 11:15:43 -05:00
Harvey Kandola
4210caca48 Improve document transaction scopes 2024-01-10 15:54:56 -05:00
Harvey Kandola
c62fa4612b Upgrade Go deps 2024-01-10 14:47:49 -05:00
Harvey Kandola
510e1bd0bd Bump version to 5.11.0 2024-01-10 14:47:40 -05:00
Harvey Kandola
a32510b8e6 Add additional SQL Server indexes 2024-01-10 14:47:08 -05:00
Harvey Kandola
589f3f581f Prep 5.10.0 release 2023-09-18 12:27:18 -04:00
Harvey Kandola
20bba4cd7e Add performance indexes for MS SQL Server database provider 2023-09-18 12:27:09 -04:00
Harvey Kandola
cbf5f4be7d
Merge pull request #397 from armando-marra/master
Modified i18n.js and Readme.md
2023-09-08 12:15:28 -04:00
Armando Marra
dc63639c99
Update README.md
Added italian to languages list
2023-09-08 09:10:05 +02:00
Armando Marra
26f435bdc9
Merge branch 'documize:master' into master 2023-09-08 09:08:21 +02:00
Armando Marra
a8a82963fa
Merge pull request #1 from armando-marra/patch-1-it-lang
Update i18n.js
2023-09-08 09:07:42 +02:00
Armando Marra
ab8582e807
Update i18n.js
Added Italian language support
2023-09-08 09:06:48 +02:00
Harvey Kandola
4fa0566274
Merge pull request #396 from armando-marra/master
Added Italian translation
2023-08-23 09:09:45 -04:00
armando-marra
f4b45d2aa7
Update localize.go
Added Italian language
2023-08-23 14:22:47 +02:00
armando-marra
1abc5d3e52
Italian language JSON file 2023-08-23 14:14:38 +02:00
Harvey Kandola
6e463ff2f4 Bump version 2023-08-21 12:20:53 -04:00
Harvey Kandola
f80b3f3d10 5.8.0 2023-07-24 11:11:46 -04:00
Harvey Kandola
6c218cf087 Add additional indexes for SQL Server 2023-07-24 11:11:39 -04:00
Harvey Kandola
3d1c8a6c54 Remove archived versions from version selector 2023-07-24 11:11:25 -04:00
Harvey Kandola
576fd5e604
Merge pull request #395 from xadammr/patch-1
Add support for SQL Server 2022
2023-07-17 17:07:09 -04:00
Adam Roe
62407a28b4
Add support for SQL Server 2022 2023-07-15 18:27:20 +10:00
Harvey Kandola
0adf6d5dc8 Update permission.go 2023-06-12 13:24:47 -04:00
Harvey Kandola
15f8a64c86 Bump Go & improve SQL layer 2023-05-15 13:47:22 -04:00
Harvey Kandola
95c67acaa0
Merge pull request #394 from uchida-nunet/master
Minor corrections Japanese translation
2023-03-04 08:45:39 -05:00
Harvey Kandola
d8f66b5ffb Bump version 2023-02-27 13:30:34 -05:00
Uchida, Yasuhiro
c051e81a99 Update ja-JP.json 2023-02-24 01:08:43 +09:00
Uchida, Yasuhiro
1d86b98949
Update ja-JP.json 2023-02-24 00:59:06 +09:00
Harvey Kandola
0a1cc86907
Merge pull request #393 from uchida-nunet/master
add Japanese language support
2023-02-11 10:28:58 -05:00
uchida-nunet
a49869d35d Update ja-JP.json 2023-02-11 18:09:27 +09:00
uchida-nunet
848afd3263 change gui/app/services/i18n.js 2023-02-11 12:51:59 +09:00
uchida-nunet
b9cb99e3bb mod 2023-02-11 12:43:32 +09:00
uchida-nunet
64261ffcf5 Merge branch 'master' of https://github.com/uchida-nunet/documize-community 2023-02-11 12:30:12 +09:00
uchida-nunet
0030418707 add Japanese language support 2023-02-11 12:29:43 +09:00
uchida-nunet
0f91ee518e add Japanese language support 2023-02-11 12:06:29 +09:00
Harvey Kandola
5de1b7a92e
Merge pull request #392 from Xhelliom/dev
Minor corrections french translation
2023-02-07 11:04:17 -05:00
Stephane Wetterwald
a2524f785e Minor corrections 2023-02-07 15:57:25 +01:00
Harvey Kandola
f16b9f3810 Build prep 2023-02-06 14:01:19 -05:00
Harvey Kandola
1c09771c33 Bump version 2023-02-06 11:46:10 -05:00
Harvey Kandola
13fc5b5015 Test for user permissions before setting them 2023-02-06 11:46:03 -05:00
Harvey Kandola
76c777acc1 French localization 2023-02-06 11:45:47 -05:00
Harvey Kandola
ea9ff78411
Merge pull request #391 from Xhelliom/dev
Translate to french
2023-02-06 11:19:35 -05:00
Stephane Wetterwald
4a9dd47894 translate to french 2023-02-01 21:40:34 +01:00
Harvey Kandola
7565779ef1
Update docker-compose.yaml
Revert broken command
2023-01-11 20:04:00 -05:00
Stephane Wetterwald
c07e7b6afc more translations 2022-12-29 16:50:00 +01:00
Harvey Kandola
88bdafcb1b
Merge pull request #390 from rursache/patch-1
Updated docker-compose.yaml
2022-12-17 13:09:34 -05:00
Radu Ursache
5a3cb1b226
Updated docker-compose.yaml
Updated `docker-compose.yaml` to read the correct arch from device so arm64 devices can run Documize as well
2022-12-17 20:07:23 +02:00
Stephane Wetterwald
6ee8e6c7b4 translated to line 200 2022-12-15 12:32:35 +01:00
Stephane Wetterwald
599c464d2d add fr 2022-12-15 12:18:14 +01:00
Harvey Kandola
ae77fa2275
Merge pull request #389 from nemunaire/b/dockerfile
Dockerfile: Force node 16 + update go and alpine tags
2022-11-22 08:54:19 -05:00
Pierre-Olivier Mercier
610367aac5 Force node 16 + update go and alpine tags 2022-11-22 10:11:41 +01:00
Harvey Kandola
be2c2a7a2c v5.4.2 prep 2022-10-31 18:32:59 -04:00
Harvey Kandola
0d28b7ee79 Fix org reset during restore 2022-10-31 18:32:46 -04:00
Harvey Kandola
aa8b473018 Bump version to 5.4.1 2022-10-21 11:13:13 -04:00
Harvey Kandola
6993dc678f Fix regression for robots.txt and sitemap.xml 2022-10-21 11:13:03 -04:00
Harvey Kandola
e0e3f0c141 Fix middleware for favicon/sitemap 2022-10-21 11:12:46 -04:00
Harvey Kandola
4c031fe7e4 Set TLS min to 1.3 2022-10-21 11:12:17 -04:00
Harvey Kandola
e4025bee42 Bump to v5.4.0 2022-10-10 17:40:45 -04:00
Harvey Kandola
876775b395 Support TLS version selection
Allow config file and ENV variables to define minimum TLS version used for SSL connections.

tlsversion=1.3
2022-10-10 17:40:36 -04:00
Harvey Kandola
828c01d189 Update README.md 2022-09-18 15:40:10 -04:00
Harvey Kandola
a69bcc0af6 Update README.md 2022-09-18 15:39:16 -04:00
Harvey Kandola
5ec911dce2 Dotcom pricing landing page changes 2022-09-18 15:25:57 -04:00
Harvey Kandola
ce07d4d147 Bump version 2022-09-18 14:29:38 -04:00
Harvey Kandola
f3ef83162e
Update README.md 2022-09-16 12:25:02 -04:00
Harvey Kandola
f1a01ec195 Bump version to 5.2.2 2022-09-12 12:35:51 -04:00
Harvey Kandola
01e53c3d27 Fix sanitization of document title 2022-09-12 12:35:40 -04:00
Harvey Kandola
2cf21a7bea Fix i18n issue 2022-09-08 16:45:28 -04:00
Harvey Kandola
d4c606760c Bump version 2022-08-01 10:13:21 -04:00
Harvey Kandola
9343d77b26 Bump PDF viewer dep 2022-08-01 10:13:15 -04:00
Harvey Kandola
30aa8aadb6 Fix i18n bug for PDF viewer 2022-08-01 10:13:03 -04:00
Harvey Kandola
29bc2677a8 v5.2.0 2022-07-13 13:28:31 -04:00
Harvey Kandola
d9827df440
Merge pull request #387 from kmkzif/master
Add portuguese language support
2022-07-13 13:21:24 -04:00
KMKZ
cfd7ebd2bf Add portuguese language support 2022-07-12 22:14:26 -03:00
Harvey Kandola
b510615691 Update community.go 2022-07-10 15:49:34 -04:00
Harvey Kandola
e8641405cf Update package.json 2022-07-10 15:48:21 -04:00
Harvey Kandola
209f1b667e Bump version 2022-07-10 15:38:37 -04:00
Harvey Kandola
e70019d73b
Merge pull request #386 from 569258yin/master
Add chinese language support
2022-07-10 15:32:39 -04:00
manwang
dc26f063c8 add chinese language support 2022-07-09 08:50:35 +08:00
Harvey Kandola
68d067ef7b Update en-US.json 2022-05-16 16:30:17 -04:00
Harvey Kandola
0d52f434d5 Localized string updates 2022-05-16 16:23:19 -04:00
Harvey Kandola
ce22c78dac Bump version 2022-05-16 13:22:02 -04:00
Harvey Kandola
f976ea36f6 Fix version control translation strings 2022-05-16 13:12:55 -04:00
Harvey Kandola
1734963693 Remove unnecessary permission check 2022-05-12 12:27:06 -04:00
Harvey Kandola
247a2b2c03 Remove back tick 2022-04-15 15:53:47 -04:00
Harvey Kandola
38a790dd04 v5 release
Supported locales: English and German.
2022-04-13 13:54:27 -04:00
Harvey Kandola
b77b4abdc2 Add de-DE locale 2022-04-08 11:59:08 -04:00
Harvey Kandola
6b498a74c6
Merge pull request #383 from lafriks-fork/fix/docker_missing_i18n
Fix missing i18n files in docker build
2022-04-05 11:50:25 -04:00
Lauris BH
f6dd872782
Fix missing i18n files in docker build 2022-04-05 15:54:03 +03:00
Harvey Kandola
9473ecba9a Update flowchart.go 2022-03-24 13:13:11 -04:00
Harvey Kandola
1a909dd046 Support custom URLS for Diagrams.net 2022-03-24 13:05:45 -04:00
Harvey Kandola
607a2d5797 Create howto-localize.png 2022-03-23 12:35:32 -04:00
Harvey Kandola
037dfc40cd Update en-US.json 2022-03-22 13:18:32 -04:00
Harvey Kandola
65348eee28 More i18n strings 2022-03-22 13:13:57 -04:00
Harvey Kandola
78932fb8c7 Fix i18n strings 2022-03-22 12:14:31 -04:00
Harvey Kandola
6c8b10753d i18n fixes 2022-03-21 19:14:10 -04:00
Harvey Kandola
e56263564c
Merge pull request #382 from documize/i18n
Localization support
2022-03-21 13:18:44 -04:00
Harvey Kandola
22b6a4fb78 Update i18n.js 2022-03-21 13:15:53 -04:00
Harvey Kandola
7e26c003d6 Select user language before server language 2022-03-21 13:14:55 -04:00
Harvey Kandola
e81cbad385 Select language for server and per user 2022-03-21 12:43:45 -04:00
Harvey Kandola
4494ace0a2 Set org and per user locale 2022-03-19 18:07:38 -04:00
Harvey Kandola
23abcf1585 i18n fixes 2022-03-17 16:42:15 -04:00
Harvey Kandola
67070c3bfc Require sub check 2022-03-17 16:31:11 -04:00
Harvey Kandola
77c767a351 Update en-US.json 2022-03-17 16:17:37 -04:00
Harvey Kandola
17162ce336 i18n mail template strings 2022-03-17 13:03:04 -04:00
Harvey Kandola
7255eb4f56 i18n server-side strings 2022-03-16 17:39:01 -04:00
Harvey Kandola
df534f72fa i18n server-side strings 2022-03-16 16:58:42 -04:00
Harvey Kandola
f4a1350a41 i18n server-side loading and setup 2022-03-16 13:32:48 -04:00
Harvey Kandola
cd15c393fe Go 1.18 & tidy deps 2022-03-16 13:32:27 -04:00
McMatts
7f66977ac1 i18n notify strings 2022-03-14 14:49:10 -04:00
McMatts
33a9cbb5b0 Update licensing links 2022-03-14 14:33:54 -04:00
McMatts
716343680a i18n Enterprise feature-set 2022-03-14 14:09:50 -04:00
McMatts
5db5f4d63b i18n continuation 2022-03-14 12:01:46 -04:00
McMatts
3d3d50762e i18n section stings 2022-03-10 16:35:59 -05:00
McMatts
20c9168140 i18n section strings 2022-03-10 15:21:25 -05:00
McMatts
ce9c635fb4 i18n section strings 2022-03-10 13:49:19 -05:00
McMatts
f735ae1278 i18n section types 2022-03-10 12:10:39 -05:00
McMatts
bca7794c00 i18n doc strings 2022-03-09 15:20:16 -05:00
McMatts
371706fb49 i18n doc strings 2022-03-09 14:04:17 -05:00
McMatts
a236cbb01c i18n doc strings 2022-03-09 13:36:48 -05:00
McMatts
93b6f26365 i18n space strings 2022-03-08 17:05:12 -05:00
McMatts
5e687f5ef4 i18n space strings 2022-03-08 13:05:30 -05:00
McMatts
97c4c927ac i18n space strings 2022-03-08 12:50:34 -05:00
McMatts
4885a1b380 i18n space strings 2022-03-08 12:16:36 -05:00
McMatts
e0805d7131 i18n search strings 2022-03-08 11:35:59 -05:00
McMatts
6d735e8579 i18n page strings 2022-03-04 18:31:23 -05:00
McMatts
073ef81e80 i18n strings
Pods completed
2022-03-04 13:46:22 -05:00
McMatts
38c9a94a9c i18n strings 2022-03-04 13:28:36 -05:00
McMatts
59dc6ea991 i18n page strings 2022-03-04 13:07:11 -05:00
McMatts
4ab48cc67d i18n admin strings 2022-03-03 19:42:37 -05:00
McMatts
53297f7627 i18n admin strings 2022-03-03 19:10:06 -05:00
McMatts
4ed2b3902c JS build tweaks 2022-03-03 18:21:16 -05:00
McMatts
6968581e5b i18n admin strings 2022-03-03 14:38:27 -05:00
McMatts
c09a116e56 i18n admin strings 2022-03-03 14:03:48 -05:00
McMatts
7cf672646a i18n admin strings 2022-03-02 20:52:59 -05:00
McMatts
29447a2784 i18n admin strings 2022-03-02 20:30:39 -05:00
McMatts
479d03ba70 i18n admin sections 2022-03-02 19:55:52 -05:00
McMatts
a7dac6911c i18n strings 2022-03-01 22:59:56 -05:00
McMatts
08f21346c1 [WIP] i18n constant strings 2022-03-01 22:40:51 -05:00
McMatts
ce4f62d346 [WIP] i18n strings 2022-03-01 22:03:18 -05:00
McMatts
8a25509019 Implement basic i18n client-side foundation 2022-03-01 20:01:06 -05:00
McMatts
59c929d251 New logo 2022-03-01 20:00:33 -05:00
McMatts
245c538990 Initial i18n experiment 2022-03-01 16:22:53 -05:00
HarveyKandola
32a9528e6d Update README.md 2022-02-27 12:48:26 -05:00
HarveyKandola
a15f0c8eb6 Update README.md 2022-02-27 12:48:04 -05:00
HarveyKandola
eb9fbd25b9 Update README.md 2022-02-27 12:47:09 -05:00
Harvey Kandola
dbef758035
Merge pull request #381 from typkrft/master
Updated URL
2022-02-17 11:07:03 -05:00
Brandon
dea25a2b85
Updated URL
The current URL doesn't point to the actual download.
2022-02-17 10:59:25 -05:00
HarveyKandola
fcf38d8af9 Change downloads asset location 2022-02-14 14:23:10 -05:00
HarveyKandola
ce93a5e623 v4.2.2 2022-02-08 18:48:30 -05:00
HarveyKandola
8df1cc73b0 Update license-key.hbs 2022-02-08 18:31:37 -05:00
HarveyKandola
53ec7c9274 Update README.md 2022-01-12 14:15:56 -05:00
HarveyKandola
cfe85248ce Update screenshot 2022-01-12 14:15:11 -05:00
HarveyKandola
30c31a1ba7 Merge branch 'master' of https://github.com/documize/community 2022-01-12 14:13:18 -05:00
HarveyKandola
a97b6b22d9 Skip LDAP user creation if not sync'ed 2022-01-12 14:12:51 -05:00
Harvey Kandola
e985c5f808
Update README.md 2022-01-11 21:27:18 -05:00
HarveyKandola
4b89f3b1c2 Release 4.2.0 2022-01-11 21:20:01 -05:00
HarveyKandola
707dc1e052 Increase category name size 2022-01-11 13:52:41 -05:00
HarveyKandola
88211739f0 Bump deps 2022-01-11 13:52:30 -05:00
HarveyKandola
6b3cdb5033 Support for ARIA 2022-01-11 12:40:52 -05:00
HarveyKandola
45f216b8a1 Improved tooltip rendering speed 2022-01-10 17:14:08 -05:00
HarveyKandola
c31c130ffd Remove Ember Attacher add-on 2022-01-10 17:01:54 -05:00
HarveyKandola
5d5e212a6b FreeBSD builds 2021-11-19 08:34:19 -05:00
HarveyKandola
8fa5569ae5 Implement more re-branding updates 2021-11-15 14:52:29 -05:00
HarveyKandola
8976bf817b Update README 2021-11-10 19:31:03 -05:00
HarveyKandola
0c3fed2b18 Prep re-branding 2021-11-03 11:46:20 -04:00
HarveyKandola
60dfb54d54 Allow logger.Info multiple args 2021-10-07 14:16:05 -04:00
Harvey Kandola
c6863201b3
Merge pull request #380 from nemunaire/f/dockerfile 2021-09-22 10:31:34 -04:00
Pierre-Olivier Mercier
45567e274a Modernize Dockerfile 2021-09-21 22:14:30 +02:00
HarveyKandola
dff4c6929b Update README.md 2021-09-09 12:29:38 -04:00
HarveyKandola
eea8db9288 Fingerprint during Ember build 2021-09-07 20:18:30 -04:00
HarveyKandola
e19c4ad18a Update build.bat 2021-09-07 11:32:22 -04:00
HarveyKandola
989b7cd62c Sync with Community 2021-08-19 13:02:56 -04:00
HarveyKandola
df8f650319 Support VB.NET for code syntax highlighting 2021-08-18 19:41:19 -04:00
HarveyKandola
565a063231 Tweaked document list rendering of tooltips 2021-08-18 19:40:57 -04:00
HarveyKandola
cb46f34503 Update PDF dep 2021-08-18 19:40:12 -04:00
HarveyKandola
470e2d3ecf Move over to Go embed directive 2021-08-18 19:39:51 -04:00
HarveyKandola
cddba799f8 Update Ember deps 2021-08-18 19:39:14 -04:00
HarveyKandola
05df22ed4a Update go deps 2021-08-18 19:38:43 -04:00
HarveyKandola
a5dfa6ee39 Merge branch 'master' of https://github.com/documize/community 2021-03-28 16:03:51 -04:00
HarveyKandola
780ce2df61 Bump 3.9.0 release 2021-03-28 16:03:47 -04:00
Harvey Kandola
9f28e1bff2
Merge pull request #379 from esquith/patch-1
Update README.md
2021-03-28 21:02:53 +01:00
Esquith Allen
8ae94295a2
Update README.md 2021-03-28 16:00:39 -04:00
HarveyKandola
adb7b4d7bf Enforce TLS 1.2 minimum 2021-03-16 13:58:27 -04:00
HarveyKandola
66fcb77d8b Improve user management 2020-11-29 20:31:20 -05:00
Harvey Kandola
972413110f
Merge pull request #377 from Ma27/preview-button-toggle
gui/editors: toggle Preview/Edit button
2020-11-12 18:57:42 +00:00
Harvey Kandola
a0a166136e
Merge pull request #378 from Ma27/markdown-bulletpoints-margin
document/wysiwig: no margin for sublists
2020-11-12 18:54:17 +00:00
HarveyKandola
30d12ba756 Updated Draw.io integration 2020-11-09 11:01:09 -05:00
Maximilian Bosch
06bf9efcfc
document/wysiwig: no margin for sublists
When having a list with sublists in a `markdown` section like this

  * top-level
  * another one
    * sub-elem 1
    * sub-elem 2

then documize will create a margin of 15px before and after the sublist
which is rather unintuitive as this will create the wrong impression
that these bullet-points have no relation to the parent one even though
this is the most common semantic reason for sublists.

This patch removes the margin at the top and bottom for those kinds of
sublists.
2020-10-31 21:47:23 +01:00
Maximilian Bosch
9ed8f79315
gui/editors: toggle Preview/Edit button
When hitting "Preview" on a previewable section (e.g. one with
markdown), the tooltip and icon of the preview-button still show
"Preview". With this change, both the text and the button will be
switched accordingly.
2020-10-31 21:18:20 +01:00
Maximilian Bosch
73e8c7a278
docker-compose: explicitly pin postgres container to v12 2020-10-31 21:18:04 +01:00
Harvey Kandola
806efd7eac
Merge pull request #376 from AntonLydike/master
Fixed and updated Dockerfile
2020-08-23 15:14:57 +01:00
Anton Lydike
724f3c88b3 fixed and updated dockerfile 2020-08-21 15:42:10 +02:00
Harvey Kandola
4a7d915ebb
Merge pull request #375 from dimoonster/master
Proxy support when communicate with plantuml
2020-08-17 16:09:51 +01:00
Di_Moon
c7413da943 add proxy support when communicate with plantuml (remove spaces) 2020-08-14 11:56:27 +03:00
Di_Moon
4e0218f5ea add proxy support when communicate with plantuml 2020-08-14 11:54:30 +03:00
HarveyKandola
4fe022aa0c Database and LDAP upgrades
Bumped underlying dependencies affecting database and LDAP connectivity.

Bumped to Go v1.14.3 and released v3.8.0.
2020-05-21 12:32:46 +01:00
Harvey Kandola
aaa8c3282d
Update README.md 2020-03-09 12:37:13 +00:00
Harvey Kandola
5e022dd0b8
Update README.md 2020-03-09 12:30:37 +00:00
Harvey Kandola
bbca180298
Clarify support options 2020-03-09 12:30:16 +00:00
HarveyKandola
cdc7489659 Update Go modules/vendor directory
Closes #368
2020-02-15 15:42:54 +00:00
HarveyKandola
ab95fcc64d Prep 3.7.0 release
Awesome work @sauls8t !!
2020-02-04 14:43:47 +00:00
sauls8t
9bee58057e Add new pin related audit types 2020-02-04 14:37:02 +00:00
sauls8t
bda9719ecb Handle non-pinned spaces 2020-02-04 14:36:40 +00:00
sauls8t
fbd4b17c15 Record all doc pin related user actions 2020-02-04 14:36:11 +00:00
sauls8t
c689379f92 Restrict pin sequence changing
Only all documents filter enables the up/down pin sequence change clickers.
2020-02-04 12:37:59 +00:00
HarveyKandola
d1774b42bd Allow for clickable links in space description
Closes #361
2020-02-04 12:28:57 +00:00
sauls8t
8ac35a6b74 Routes for new pinned documents 2020-02-03 21:01:25 +00:00
sauls8t
813f270a9d Update schema for new pinned documents 2020-02-03 21:01:11 +00:00
sauls8t
e014f5b5c1 Pinned documents
Closes #278

Pin documents to the top of each space.
2020-02-03 21:00:35 +00:00
sauls8t
2b66d0096a MariaDB restore process compatibility 2020-02-03 20:58:25 +00:00
sauls8t
50f47f61a5 Rename folder/space for all space export 2020-02-03 20:57:36 +00:00
sauls8t
d26ecdc12f Prep 3.6.0 release 2019-12-14 13:13:47 +00:00
sauls8t
1a89201bd9 Provide generic IFrame section for documents
Closes #352
2019-12-09 12:31:44 +00:00
sauls8t
accf0a2c63 Use latest PlantUML lib and render as SVG
Closes #349 and #350
2019-12-09 10:54:02 +00:00
HarveyKandola
cafa3ceed0 Build prep 3.5.2 2019-12-06 18:02:00 +00:00
HarveyKandola
2b3e9dfbc9 Stop TinyMCE dialogs from scrolling background 2019-12-06 15:11:16 +00:00
HarveyKandola
1c1ebee15a Upgrade TinyMCE to 5.1.3 2019-12-06 15:10:50 +00:00
HarveyKandola
6ba4ca9c16 Fix scroll-to-section regression
Fixes #344
2019-12-06 11:09:02 +00:00
HarveyKandola
5aaa9f874d Hide PlantUML sample diagrams when you have content
Closes #346
2019-12-06 10:13:15 +00:00
sauls8t
51a25adbdb v3.5.1 2019-11-30 10:31:09 +00:00
Harvey Kandola
9d025c3f71
Update README.md 2019-11-19 18:11:49 +00:00
sauls8t
a4384210d4 Sanitize HTML in Markdown sections 2019-11-19 11:47:51 +00:00
HarveyKandola
6882491201 Build prep 2019-11-15 18:21:56 +00:00
HarveyKandola
d4edcb8b2c Show default category indicator to admins
Fixes #339
2019-11-15 16:38:56 +00:00
HarveyKandola
f117e91bcb Specify default categories for new documents
Closes #339

All new documents will be assigned default categories.

Documents created from templates that already have categories take precedence.
2019-11-15 14:51:52 +00:00
HarveyKandola
5c1ad25dc9 Allow rich rext for Site Message and Space Desc
Closes #291

Style both site-wide message and per space descriptions using the fully-featured rich text editor.
2019-11-15 12:41:08 +00:00
HarveyKandola
8970a21b58 Fix regression for downloading certain binary attachments 2019-11-14 17:38:53 +00:00
McMatts
0e6f2f1f5e Allow non space creators to manage space settings
Fixes #337
2019-11-13 09:49:19 +00:00
McMatts
7fc74be7cd Merge branch 'master' of https://github.com/documize/community 2019-11-08 14:55:32 +00:00
McMatts
faeadb2bbb Update embeded assets 2019-11-08 14:55:27 +00:00
McMatts
be50bf9f14
Update README.md 2019-11-08 14:54:33 +00:00
McMatts
60ef205948 Improve jump-to-section scrolling 2019-11-08 12:45:22 +00:00
McMatts
7ae801554d Bump version 2019-11-08 12:44:57 +00:00
McMatts
441efd42e9 Prevent edits to locked docs/sections 2019-11-08 12:44:41 +00:00
McMatts
a19ba46f7a Add http scheme for copied links 2019-11-08 11:11:27 +00:00
sauls8t
ad361c22ba Write HTTP headers before body write 2019-11-08 10:40:10 +00:00
sauls8t
7954f4b976 3.4.1 release 2019-11-02 09:22:09 +00:00
sauls8t
2d105f2154 Better handling for document.update TX 2019-10-30 13:57:26 +00:00
sauls8t
811e239baf Support comma-escaped LDAP DN values 2019-10-30 13:57:07 +00:00
HarveyKandola
c7e71173ea SQL Server per doc activity report 2019-10-28 15:04:36 +00:00
HarveyKandola
8fa8a3657c Handle escaped comma in LDAP DN string
Closes #326
2019-10-28 15:03:53 +00:00
HarveyKandola
a64a219ce8 Remove obsolete anchor plugin 2019-10-26 16:55:54 +01:00
HarveyKandola
d7a484a936 3.4 embedded assets 2019-10-26 16:16:16 +01:00
HarveyKandola
017b19141c 3.4.0 build prep 2019-10-26 16:15:56 +01:00
McMatts
39f457e90e Upgrade TinyMCE to v5.1.0
Introduces new sticky toolbar option!
2019-10-21 10:54:01 +01:00
McMatts
30d3e6f82e Improve canEdit permissions for attachments 2019-10-21 10:34:34 +01:00
McMatts
8c2bed283f Sync edit checks across editions 2019-10-21 10:34:10 +01:00
McMatts
a3867c617a Bump version and embed assets 2019-10-21 10:33:51 +01:00
McMatts
28424e7e4b Change audit store to use own TX 2019-10-21 10:33:37 +01:00
HarveyKandola
7c70274f5e Add edit check to atatchments list view 2019-10-14 17:29:34 +01:00
HarveyKandola
ef5b5cdb32 Bump version to 3.3.2 2019-10-14 12:27:41 +01:00
HarveyKandola
ccd756aca0 Document PostgreSQL wildcard search syntax 2019-10-10 11:58:41 +01:00
HarveyKandola
444b89e425 Improve SMTP setting persistence
Should close #327 by providing better handling of non-english dialect(?).
2019-10-07 15:44:07 +01:00
HarveyKandola
3d0f17386b Bump released to v3.3.1 2019-09-26 16:51:05 +01:00
HarveyKandola
513fd9f994 Enable tag searching for SQL Server database 2019-09-24 16:44:28 +01:00
HarveyKandola
5cef58eeba Re-sync space counts when moving docs
Between spaces!
2019-09-24 14:07:44 +01:00
HarveyKandola
fad1de2e41 Overhaul the space stats module
Closes #274

All space counters are reset after document and catergory operations.
2019-09-24 13:39:57 +01:00
sauls8t
6b723568d3 Provide Bash/Shell code syntax in Rich Text Editor
Closes #322
2019-09-18 11:25:21 +01:00
sauls8t
00889f0e0e Build number refresh 2019-09-17 11:13:34 +01:00
sauls8t
6629d76453 Add covering indexes to support super-size deployments
Resolves issues for enterprise customers with tonnes of data in these SQL tables:

- dmz_doc
- dmz_section

Multi-tenant compliant.
2019-09-14 13:04:56 +01:00
sauls8t
74300b009b v3.3.0 release prep 2019-09-12 13:16:10 +01:00
sauls8t
5004e5a85e Enable Full-text Search when using SQL Server 2016+ 2019-09-11 19:08:53 +01:00
HarveyKandola
0524a0c74c Added link to Go modules migration guide 2019-09-06 11:12:42 +01:00
HarveyKandola
b826852137 Moved from Dep to Go Modules
We have finally dropped go dep and moved over to go mod !

During the move, some dependencies have been bumped.
2019-09-06 11:06:28 +01:00
HarveyKandola
2c164a135a Remove obsolete build flag 2019-09-05 10:37:40 +01:00
HarveyKandola
44febcc25c Provide Docker Compose file for quick app+db setup
Closes #311
2019-08-29 14:14:10 +01:00
HarveyKandola
66e11cefbc Bump version to 3.2.0 2019-08-28 13:41:52 +01:00
HarveyKandola
5e9eeb5bf9 Use Ember v3.12.0
Bumped JS dependencies where possible
2019-08-28 12:57:02 +01:00
HarveyKandola
5b7610d726 Update README 2019-08-28 12:27:38 +01:00
HarveyKandola
0419f3b7b3 Update Jira client library to latest release
Related to #308
2019-08-27 17:16:06 +01:00
HarveyKandola
5b72da037c Ensure only change approvers can change doc level attachments
Does not affect section level attachments
2019-08-27 16:17:38 +01:00
HarveyKandola
d14e8a3ff6 Delete package-lock.json 2019-08-27 15:33:18 +01:00
HarveyKandola
9a3d2c3c28 Fixed HTML for new auth settings 2019-08-27 15:25:10 +01:00
HarveyKandola
3b76e10ee0 Ensure doc protected from non-category viewers
Closes #310
2019-08-27 15:24:59 +01:00
Harvey Kandola
29d7307537
Merge pull request #306 from dereknex/auth-with-cas
Authentication with Central Authentication Service (CAS) such as https://www.apereo.org/projects/cas
2019-08-27 11:05:18 +01:00
HarveyKandola
96e5812fc0 Improve CAS documentation 2019-08-27 11:02:56 +01:00
HarveyKandola
c35eb16fc5 Merge branch 'auth-with-cas' of https://github.com/dereknex/community into pr/306 2019-08-27 10:59:42 +01:00
HarveyKandola
9dd78ca9be Fix typos 2019-08-27 10:59:40 +01:00
Derek Chen
891ba07db8 add run cas server section 2019-08-21 23:25:12 +08:00
sauls8t
2ee9a9ff46 Setting first/last name for all scenarios 2019-08-18 15:23:01 +01:00
Derek Chen
399c36611f update user firstname and lastname when login with CAS 2019-08-18 21:33:25 +08:00
Derek Chen
fbb73560c0 Get firstname and lastname from CAS attributes 2019-08-17 22:13:37 +08:00
Derek Chen
15e687841f Merge branch 'auth-with-cas' of github.com:dereknex/community into auth-with-cas 2019-08-16 22:24:51 +08:00
Derek Chen
0a10087160 fix got blank page when CAS auth failure 2019-08-16 22:17:52 +08:00
Derek Chen
9d0d4a7861 fix got blank page when CAS auth failure 2019-08-16 22:00:55 +08:00
sauls8t
fc60a5917e Allow user sync when dual auth mode
LDAP sync can be executed if dual mode authentication is enabled.
2019-08-16 10:37:58 +01:00
sauls8t
285a01508b LDAP fix for dual mode 2019-08-15 17:28:45 +01:00
sauls8t
4f248bf018 Linting 2019-08-15 17:15:39 +01:00
sauls8t
32dbab826d Bumped database driver dependencies
Latest used for MySQL, SQL Server and PostgreSQL
2019-08-15 14:51:40 +01:00
sauls8t
b6e1543b7f Merge branch 'master' of https://github.com/documize/community 2019-08-15 13:57:00 +01:00
Harvey Kandola
f8bb879a70
Merge pull request #307 from dereknex/docker-build
Build Documize with docker
2019-08-15 13:54:53 +01:00
sauls8t
20366e6776 Fix typo 2019-08-15 13:54:26 +01:00
Derek Chen
ffacf17c5f update bindata.go 2019-08-14 17:39:41 +08:00
Chen Chong
bfe4c5d768 Update Dockerfile 2019-08-14 09:07:57 +00:00
Derek Chen
0f3a618140
Update Dockerfile
fix docker build miss 'gcc' error
2019-08-14 16:54:52 +08:00
Derek Chen
826f6d96a6 fix docker build miss gcc issue 2019-08-14 16:51:55 +08:00
苗超
6a9fa0140a Update Dockerfile 2019-08-14 08:38:47 +00:00
Derek Chen
24619c6a58 fix merge error 2019-08-14 11:48:59 +08:00
Derek Chen
ebc8214049 Merge branch 'docker-build' into auth-with-cas 2019-08-14 11:39:16 +08:00
Derek Chen
71c1def5c7 add Dockerfile 2019-08-14 11:36:41 +08:00
Derek Chen
fded0014a3 Merge branch 'master' into auth-with-cas 2019-08-13 22:32:40 +08:00
Derek Chen
041091504f fix lost auth token issue when refresh page 2019-08-13 22:10:08 +08:00
Derek Chen
8c2df6178d auth with cas 2019-08-09 13:44:03 +08:00
sauls8t
02d478c6dd Latest embedded assets 2019-08-07 17:21:39 -04:00
Derek Chen
8c99977fc9 working on forentend 2019-08-08 00:40:03 +08:00
sauls8t
9d6b6fec23 Bump version to 3.1.2 2019-08-05 13:54:25 -04:00
sauls8t
4e0e3b5101 Upgrade Markdown preview dependency
v9.0.1
2019-08-05 13:35:26 -04:00
sauls8t
e219c97a6b Enbale browser context menu for TinyMCE 2019-08-05 13:29:08 -04:00
sauls8t
7485f2cef7 Upgrade TinyMCE to v5.0.12 2019-08-05 13:28:48 -04:00
sauls8t
627195aae7 Clarify permissions explanation 2019-08-05 13:06:35 -04:00
Harvey Kandola
f39be2a594
Merge pull request #302 from Ma27/markdown-html-preview
Render HTML in Markdown during previews as well
2019-08-04 13:20:46 -04:00
Maximilian Bosch
444b4fd1f7
Render HTML in Markdown during previews as well
When having e.g. a markdown-based section in a document where you
reference another document, a HTML-link is generated. However when
previewing changes, the raw HTML is shown.

With setting `html: true` in the configuration for `markdownit`[1],
HTML is detected and properly parsed (Also markup tags such as `<h1>`
are rendered properly just like when using `blackfriday` to request a rendered
section).

Regarding probably harmful side-effects: setting e.g. `<style>* { display:
none !important; }</style>` causes a white page with `markdownit` as
well as with `blackfriday`. `<script>` tags aren't affected since
`handlebars` mostly breaks with `<script>` tags within variables that
are substituted using `{{{var}}}` into the DOM[3].

Please note that I didn't commit the modifications in `embed/bindata.go`
as it seemed to me after looking at the history that those updates are
only done when preparing a release.

[1] 1ad3aec204/lib/presets/default.js (L6-L9)
[2] https://github.com/documize/blackfriday/blob/master/markdown.go#L105-L146
[3] https://github.com/wycats/handlebars.js/issues/531
2019-08-01 21:02:35 +02:00
sauls8t
b31f330c41 Fixed SQL Server JSON support
Closes #298
2019-07-18 10:14:41 -04:00
sauls8t
69077ce419 Provider sample data for Cloud onboarding 2019-06-29 15:37:49 +01:00
sauls8t
201d2a339c Prep v3.1.0 build 2019-06-25 17:52:03 +01:00
sauls8t
326019d655 Add padding for section headings in export view 2019-06-25 17:25:00 +01:00
sauls8t
264c25cfe0 Prevent space user invitation for non-Documize auth providers
If running LDAP or Keycloak authentication, you cannot bypass user sync process to invite new users.

Closes #284
2019-06-25 17:05:25 +01:00
sauls8t
595301db64 Replace UPLOAD with + for attachments action 2019-06-25 16:53:43 +01:00
sauls8t
d6432afdad Sort labels by name
DB layer sorts, UI just receives and displays as-is.
2019-06-25 16:52:31 +01:00
HarveyKandola
9c36241b58 Update SQL Server driver library 2019-06-25 15:37:19 +01:00
HarveyKandola
c538fc9eb1 Update PostgreSQL driver library 2019-06-25 15:33:51 +01:00
HarveyKandola
f3df43efe0 Updated Go MySQL driver library 2019-06-25 15:26:53 +01:00
HarveyKandola
d04becc1a3 Improved backup/restore compatibility between DB providers 2019-06-25 15:26:38 +01:00
HarveyKandola
3621e2fb79 Install helps docs as part of onboarding process
Our own docs are installed as sample data!

Refactored search reindexing code.
2019-06-24 17:01:56 +01:00
HarveyKandola
411f64c359 Typo 2019-06-20 15:28:34 +01:00
HarveyKandola
ae923e7df1 Make tooltips use theme skin 2019-06-20 15:28:23 +01:00
HarveyKandola
bfe5262cb5 Streamlined add router methods 2019-06-20 15:27:09 +01:00
sauls8t
80f0876b51 Show admins incomplete config indicator
SMTP checks to start with.
2019-06-19 13:39:36 +01:00
sauls8t
b2cd375936 Increase border radius for UI card elements 2019-06-19 12:48:18 +01:00
sauls8t
243a170071 Fixed SQL syntax for SQL Server
Affected User Group admin
2019-06-19 12:47:47 +01:00
sauls8t
fb3f2cc24b Send product edition meta to setup wizard 2019-06-19 12:47:18 +01:00
sauls8t
946c433018 Detect first-time login event 2019-06-19 12:46:46 +01:00
sauls8t
4d2f30711c Accept activation key during setup process
Enterprise edition only.
2019-06-19 12:46:05 +01:00
HarveyKandola
887c999a1e Bumped Go compilation to v1.12.6 2019-06-14 15:16:28 +01:00
HarveyKandola
b256bf2e9d Move product assets to AWS S3
Moving from Google Cloud bucket to AWS S3 bucket.
2019-06-12 18:17:05 +01:00
Harvey Kandola
8f4cd755de
Merge pull request #282 from documize/core-0619
The all new v3 is here sporting a new layout, better theming, quick-jump to spaces and content, tonnes of document view improvements, comment replies, expand/collapse doc views and much more.
2019-06-12 14:19:12 +01:00
HarveyKandola
64612b825a Include new comment fields in backup/restore process
c_sectionid
c_replyto
2019-06-12 13:52:30 +01:00
HarveyKandola
216866a953 Shrink doc meta font size 2019-06-12 08:44:37 +01:00
HarveyKandola
14820df165 Move doc headings above doc meta 2019-06-11 18:03:28 +01:00
sauls8t
f7a738ad84 v3 build prep 2019-06-11 11:41:09 +01:00
sauls8t
df2775f8a4 Remove confusing dick handlers from document meta zone 2019-06-11 10:38:57 +01:00
sauls8t
a710839f69 Tidy up draft permission handling
Removes inconsistency of approvals vs. lifecycle.
2019-06-11 10:38:39 +01:00
sauls8t
2a45c82b46 Remove redundant logging 2019-06-11 10:37:49 +01:00
sauls8t
7eec01811a Add “Reply” to constants 2019-06-10 15:47:13 +01:00
sauls8t
ef5e4db298 Link comments doc sections + threaded replies
Allow for comments to be related to doc sections.

Enable one-level replies to comments.

closes #240
2019-06-07 19:44:06 +01:00
sauls8t
b1cb0ed155 Make section publish modal focus on first input 2019-06-06 18:29:38 +01:00
sauls8t
78fd14b3d3 Fix indentation 2019-06-06 18:17:03 +01:00
sauls8t
82ddcc057d Make copy/move process use dropdowns for selection
We currently use keyword searching to find target documents.

Replace this with space and document list dropdown for quicker and easier target document selection.
2019-06-06 17:58:48 +01:00
sauls8t
a90c5834fa Improve move/copy/merge sections between documents
Processx all child sections during copy/move/merge operation.

Ensure links work as expected within newly copied/moved content.

Index copied/moved content for searching.

fixes #138
fixes #248
2019-06-06 16:17:36 +01:00
sauls8t
ec8d5c78e2 Provide copy document option
Duplicates entire document tree into a new document (same space).
2019-06-06 11:45:41 +01:00
HarveyKandola
b75969ae90 Use app-meta service to construct section link 2019-06-05 12:56:02 +01:00
HarveyKandola
9b82f42cc1 Copy link to section to clipboard
Closes #174
2019-06-05 12:52:15 +01:00
HarveyKandola
b8fee6b962 Indent doc sections as per hierarchy
Reflect doc structure by indenting sections as you read the document.

Fixes #281.
2019-06-05 11:41:43 +01:00
HarveyKandola
8baad7e2f0 Enable doc section expand/collapse
Closes #170
2019-06-05 11:09:24 +01:00
HarveyKandola
99a5418dba Only show space dropdown when label has 2+ spaces 2019-06-03 17:00:32 +01:00
HarveyKandola
acd3dd63b5 Fix typo 2019-06-03 17:00:06 +01:00
HarveyKandola
96872990f9 Update export process CSS
This will ensure new styles are reflected during export process.
2019-06-03 12:21:18 +01:00
McMatts
c59a467cdb Print selected sections
Close #146
2019-05-31 19:41:34 +01:00
McMatts
715c31a1da Fix Ember lint issues and print view 2019-05-31 11:48:16 +01:00
McMatts
40237344e2 Show doc created/revised dates
Fixes #229
2019-05-30 15:10:28 +01:00
McMatts
91a3c59cd2 Remove redundant comment/feedback feature
Enterprise edition contains the feedbacl/comment feature and Community edition should not show this.

Closes #272
2019-05-30 10:41:09 +01:00
McMatts
fe7548cd97 Change position of doc attachments 2019-05-29 16:57:22 +01:00
McMatts
ca4a9a74ee Sync doc meta layout with Enterprise edition 2019-05-29 14:58:00 +01:00
McMatts
1e1cbdd843 Reduce space between doc sections
Fixes #253
2019-05-29 14:36:37 +01:00
McMatts
c8b82c85fe Sync doc meta style with Enterprise edition 2019-05-29 14:31:10 +01:00
McMatts
bae7909801 Sync with Enterprise edition changes 2019-05-29 12:49:20 +01:00
McMatts
c870547fa1 Move doc meta fields out of sidebar 2019-05-29 11:09:19 +01:00
McMatts
c0876e7be8 Allow for scrollable doc TOC + scroll to top + wrap section names
Closes #254
2019-05-28 16:12:25 +01:00
McMatts
cd9f681adf Resolve Code section type syntax load bug
For certain syntax types CodeMirror was missing required logic.

Fixes #276
2019-05-28 15:48:16 +01:00
McMatts
0240f98eb0 Add scroll to top option for document view
Closes #175
2019-05-28 14:08:19 +01:00
McMatts
c49707d160 Make popups close on doble-click + quote @size attrs
1. Popups should close on subsequent trigger clicks
2. @attrs should be quoted as oper linter warnings.
2019-05-28 10:59:48 +01:00
McMatts
c65eb97948 Show jump list to other space documents
Closes #219

Jump to documents within space when viewing a document.
2019-05-24 15:08:54 +01:00
McMatts
bc9dab72f2 Show quick list and jump to other spaces with same label
Closes #275
2019-05-24 13:45:01 +01:00
McMatts
6ae9414361 Complete UI refactoring to new nav and toolbar UX
Compact design!
2019-05-24 12:30:31 +01:00
McMatts
e37782e5b7 Fix edge case for attachments download auth checks
Edit permissions no longer interfere with download checks.
2019-05-24 12:29:46 +01:00
Harvey Kandola
2bbeaf91a0 Implement dropdown menu for document toolbar 2019-05-22 06:35:54 +01:00
Harvey Kandola
de273a38ed New toolbar styling and layout controls
Built to work with forthcoming feature set that requires display of more options.
2019-05-21 17:05:57 +01:00
Harvey Kandola
08794f8d5f Migrate views to new master layout 2019-05-20 17:40:57 +01:00
Harvey Kandola
14f313a836 Make Login action more prominent
Closes #247
2019-05-20 15:21:33 +01:00
Harvey Kandola
62c3cd03ad [WIP] Improve master layout for different devices + product growth
We have two pressing needs:

1. Improve experience on real estate challenged devices.
2. Make room for product feature-set growth.

To hit these targets, we need to develop better UX through smarter on-screen space management.
2019-05-20 10:39:48 +01:00
McMatts
bce1c1b166
Update README.md 2019-05-17 11:36:53 +01:00
Harvey Kandola
758bf07272 Bump version 2019-05-16 13:21:04 +01:00
Harvey Kandola
479508e436 Upgrade UI framework to Ember v3.10.0 2019-05-16 12:49:27 +01:00
Harvey Kandola
49bf4eeaa0 Update .gitignore 2019-05-16 12:48:46 +01:00
Harvey Kandola
1c45aef461 Fix TinyMCE toolbar 2019-05-16 11:14:34 +01:00
Harvey Kandola
a988bc0c3c
Update README.md 2019-05-15 16:50:51 +01:00
Harvey Kandola
91ec2f89d8 Compile for ARM and ARM64 (Raspberry Pi)
Closes #267 so folks can run Documize on the Pi.
2019-05-15 16:45:26 +01:00
McMatts
2477c36f11
Merge pull request #266 from documize/core-0519
Upgrade rich text editor, dual LDAP/forms authentication
2019-05-14 14:54:35 +01:00
McMatts
e2a3962092 Remove double screen blink after logout 2019-05-14 14:52:36 +01:00
McMatts
8ecbb9cdee Bump version 2.5.0 2019-05-14 14:52:11 +01:00
McMatts
f738077f5a Remove rogue log output 2019-05-14 14:51:58 +01:00
Harvey Kandola
f17de58fff Improve hyperlink color
Closes #261

Still more work to do, but an improvement nonetheless.
2019-05-14 11:28:19 +01:00
Harvey Kandola
40a0d77f93 Remove unecessary TRIM() from SQL queries
Should resolve #265
2019-05-14 09:38:26 +01:00
Harvey Kandola
072ca0dfed Ensure user admin works for new dual login mode 2019-05-13 16:50:20 +01:00
Harvey Kandola
b054addb9c Support dual login via LDAP and forms authentication
Closes #256 as we now support dual login -- LDAP and forms authentication.

Also bumped the vendored library to LDAP.v3 as it contains bug fixes.
2019-05-13 16:14:11 +01:00
Harvey Kandola
e59e1f060a
Update README.md 2019-05-13 15:58:30 +01:00
sauls8t
faf9a555d2 Upgrade to Tiny editor v5 2019-05-13 12:47:38 +01:00
sauls8t
8ab3cbe7e8 Support any name database
Closes #264
2019-05-10 16:38:19 +01:00
Saul S
86d25b2191
Update README.md 2019-04-29 13:37:00 +01:00
Saul S
9a53958c8f
Merge pull request #258 from documize/startup-config
Support for documize.conf files
2019-04-29 13:36:34 +01:00
sauls8t
b971c52469 Rebind dropzone elements for section attachments 2019-04-29 13:34:38 +01:00
sauls8t
34d1639899 Look for implicit documize.conf
@harveykandola

Now looks for implicit documize.conf, then specified .conf, then fallback to flags and envars.
2019-04-28 14:54:48 +01:00
sauls8t
1fefdaec9f Add comments 2019-04-27 17:11:58 +01:00
sauls8t
8f1bc8ce1f Specify runtime parameters in TOML format config file
In addition to specifying parameters in the command line and environment variables, you can now omit all parameters and provide a config file.

Example:

./documize myconfig.conf

Note: there is no switch setting, just provide the filename as an argument.

Fixes #243
2019-04-27 16:59:10 +01:00
sauls8t
d151555597 Update .gitignore to exclude .conf files
These are .toml format files
2019-04-26 19:54:31 +01:00
Harvey Kandola
4255291223 Merge branch 'master' of https://github.com/documize/community 2019-04-26 16:18:53 +01:00
Harvey Kandola
86a4e82c12 Reduce meta endpoint logging 2019-04-26 16:18:50 +01:00
Harvey Kandola
daa9e08ab4
Update README.md 2019-04-26 14:30:36 +01:00
Harvey Kandola
c666e68c2b v2.4.1 prep 2019-04-26 12:21:04 +01:00
Harvey Kandola
e6e3ed71ac
Merge pull request #250 from siepkes/postgresql_postgis_fix
Ignore PostGIS when determining if PostgreSQL is empty
2019-04-23 16:30:51 +01:00
Jasper Siepkes
89a28ad22f
Ignore PostGIS when determining if PostgreSQL is empty
If the PostGIS extension is installed in PostgreSQL the
'spatial_ref_sys' will automatically have been installed in the public
schema. Documize would see this table and incorrectly assume it
shouldn't enter setup mode.
2019-04-23 17:07:09 +02:00
Harvey Kandola
8e4ad6422b Ensure Backup/Restore process section attachments
New DB column c_sectionid means something new to backup and restore.
2019-04-21 11:53:48 +01:00
Harvey Kandola
4de83beba4 Update build process to include PDFJS 2019-04-19 15:09:12 +01:00
Harvey Kandola
10c57a0ae1
Merge pull request #249 from documize/section-pdf-files
PDF section type + section level attachments
2019-04-19 13:48:59 +01:00
Harvey Kandola
728789195c PDF section type build prep
Closes #207 -- view PDF's with new PDF section type.
2019-04-19 13:18:31 +01:00
Harvey Kandola
b5cd378302 Complete PDF section type 2019-04-19 11:30:40 +01:00
Harvey Kandola
7fde947a52 Enable PDF section editor 2019-04-18 15:42:18 +01:00
Harvey Kandola
61d0086337 Provide per section attachments
Upload and delete attachments on a per section basis.
2019-04-18 13:31:48 +01:00
Harvey Kandola
166aeba09b [WIP] PDF viewer section & per section attachments 2019-04-17 17:13:18 +01:00
Harvey Kandola
c0ed3c3d04 Bump version 2019-04-16 13:10:44 +01:00
Harvey Kandola
ab5314d5e1 Remove cursor from dropdown menu header 2019-04-16 12:53:35 +01:00
Harvey Kandola
51a0e1127e Add spam control basics 2019-04-16 12:53:22 +01:00
Harvey Kandola
e10d04d22e Exclude draft versions from non-lifecycle users
Only show draft documents to those with lifecycle permissions.

Closes #242
2019-04-15 13:23:41 +01:00
sauls8t
2fffb7869e Bump version 2019-04-13 17:23:37 +01:00
sauls8t
82ed36478b Show if search result is template
Helps to distinguish between docs and templates when listing search results.
2019-04-11 15:49:58 +01:00
sauls8t
1da49974cb Improve reverse proxy support
Should address edge cases and close #224 -- subject to field testing.

Does require the NGINX deployments use the following:

proxy_pass <http://documize-url:5001>;
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $remote_addr;
2019-04-11 14:45:36 +01:00
sauls8t
9e3eac19aa Allow doc/section/files links to open in tabs
Closes #233 and might help #236
2019-04-10 20:03:34 +01:00
Saul S
92696c5181
Include SQL Server 2019-04-09 15:31:29 +01:00
Saul S
eecf316d50
Merge pull request #238 from documize/mssqlserver
Microsoft SQL Server 2016+ database support
2019-04-09 15:30:11 +01:00
Harvey Kandola
fa383a58ff Bump version and update NOTICES 2019-04-09 13:24:42 +01:00
Harvey Kandola
b5a5cfd697 Give missing account warning during password reset
The reset password process will tell the user if they do not have an account.
2019-04-09 13:24:27 +01:00
Harvey Kandola
2ddd7ada9b Make storage providers generate row limit clause 2019-04-04 12:08:57 +01:00
Harvey Kandola
8515a77403 Remove debug info from compiled binaries 2019-04-04 12:08:30 +01:00
Harvey Kandola
f8d97d2a56 Make backup/restore compat with SQL Server 2019-04-02 15:47:31 +01:00
Harvey Kandola
e98f7b9218 Enable searching for SQL Server storage provider 2019-04-02 15:30:42 +01:00
Harvey Kandola
a41f43c380 WIP SQL Server support 2019-04-01 20:08:57 +01:00
Harvey Kandola
64403c402b Make store SQL Server compatible 2019-04-01 12:02:23 +01:00
Harvey Kandola
9ec858286f Create tables
Script to create tables with correct data types in SQL Server 2016 and above.

Search table not included.
2019-03-29 16:42:49 +00:00
Harvey Kandola
80aab3ce99 SQL Server provider basics 2019-03-27 15:09:48 -04:00
Harvey Kandola
deb579d8ad Microsoft SQL Server driver and prep 2019-03-26 08:51:02 -04:00
Harvey Kandola
e6335dd58c Prep 2.2.1 release 2019-03-25 13:41:22 -04:00
Harvey Kandola
e1a8f8b724 Display third party notices in UI 2019-03-24 17:53:22 -04:00
Harvey Kandola
e1001bb11e Merge branch 'master' of https://github.com/documize/community 2019-03-24 17:18:26 -04:00
Harvey Kandola
dbee77df56 Add more 3rd party library references 2019-03-24 17:18:24 -04:00
Harvey Kandola
9c2bff0374
Update README.md 2019-03-21 11:37:16 +00:00
Harvey Kandola
651cbb1dfe Add NOTICES.md
Covers third party dependencies
2019-03-20 12:53:50 +00:00
Harvey Kandola
a98c3a0fe2 Use + CONTENT for consistency
Closes #228
2019-03-18 18:04:34 +00:00
Harvey Kandola
a08b583b22 Use monospace font for Markdown editor
Closes #227
2019-03-18 18:02:47 +00:00
McMatts
6738d2c9e1
Merge pull request #225 from documize/dev0319
v2.2.0 merge
2019-03-15 13:01:28 +00:00
Harvey Kandola
441001fffe Make code section font size REM based
In line with recent changes.
2019-03-15 12:57:58 +00:00
Harvey Kandola
a4e07fbf7f Release v2.2.0 prep 2019-03-13 17:48:53 +00:00
McMatts
9a41e82aa3 Provide view density switcher for spaces & space view
Closes #218
2019-03-13 15:17:15 +00:00
McMatts
b89a297c70 Fixed an issue with duplicate search results
Issue crept in during switch to lodash lib.
2019-03-13 15:15:43 +00:00
McMatts
ca1e281775 Decrease tooltip delay 2019-03-13 15:15:12 +00:00
McMatts
c5fc0f93e0 Improve click handling for navigaiton elements in sidebar 2019-03-13 11:43:14 +00:00
McMatts
217e8a3a29 Add animation for ember attacher popup components 2019-03-13 11:42:55 +00:00
McMatts
1854998c80 Add diagnostic output for attachment download 2019-03-13 11:42:32 +00:00
McMatts
f4a371357e Clear new section title input box
After inserting new section we clear the seciton title
2019-03-13 11:42:12 +00:00
McMatts
1d00f8ac6e Allow sorting of search results and space contents
Closes #187

Sort search results and space contents by Name, Created or Revised.
2019-03-13 11:40:36 +00:00
McMatts
0985dbf5b6 Improve doc table of contents font sizing 2019-03-08 15:51:36 +00:00
McMatts
b2fcad649e Set new sidebar color 2019-03-08 15:51:19 +00:00
McMatts
f062005946 Introduce transaction isolation customization
Search related indexing transactions requires better TX begin/commit.

New helpers provide TX isolation control.
2019-03-08 15:50:55 +00:00
McMatts
36d7136210 Add diagnostic message for LDAP auth 2019-03-08 15:45:46 +00:00
McMatts
0bfde82040 Move space label to main view from sidebar
Solves long-standing debate about removing meta from sidebar, and keeping sidebat for navigation/filtering.
2019-03-08 15:45:12 +00:00
McMatts
e6e5f75ee7 Set doc revised for when section events
Supports new space recency filters.
2019-03-08 15:43:17 +00:00
McMatts
eb9501014d Make WYSIWYG font size & line height to REM
End of pixels @harveykandola
2019-03-06 14:11:37 +00:00
Harvey Kandola
e35639502d
Update README.md 2019-03-05 12:34:39 +00:00
Harvey Kandola
3db4981181
Update README.md 2019-03-05 12:33:16 +00:00
Harvey Kandola
3206eb4176
Update README.md 2019-03-05 12:32:45 +00:00
Harvey Kandola
576e1beade
Update README.md 2019-03-05 12:31:36 +00:00
Harvey Kandola
54eefc5132
Update README.md 2019-03-05 12:30:59 +00:00
McMatts
fbb1e334f8 Bump version to 2.1.1 2019-03-04 20:49:14 +00:00
McMatts
bb73655327 Write to stderr when onboarding shared space fails 2019-03-04 19:00:15 +00:00
McMatts
8c2febd636 Fixed lodash conversion issue with _.rest to _.drop
Solves move down on table of contents
2019-03-04 18:59:44 +00:00
McMatts
395008d06d Allow hypen in tag names with Firefox
When keycodes are different across browsers. :|
2019-03-04 18:59:06 +00:00
Harvey Kandola
d009e4ed2a Make setup process use new isEmail helper 2019-03-03 18:03:48 +00:00
Harvey Kandola
54bf258c61 Bump version + bundle web assets 2019-03-03 16:00:25 +00:00
Harvey Kandola
8332e8a03d Send back saved label after edits 2019-03-03 15:59:13 +00:00
Harvey Kandola
a6f8be2928 revert back to ember-ajax 3.x series 2019-03-03 15:58:58 +00:00
Harvey Kandola
cbd9fddcfe Space out save button 2019-03-03 15:58:39 +00:00
Harvey Kandola
4013b5ca03 Fix typo for self-host subdomain help info 2019-03-03 15:58:28 +00:00
Harvey Kandola
24b1326c31 Remove un-needed success alerts 2019-03-03 15:58:03 +00:00
Harvey Kandola
566807bc14 Replaced underscore.js & is.js with lodash.js 2019-03-03 13:10:04 +00:00
Harvey Kandola
df8e843bf5 Enabled TLS 1.3 support
IE11 might have issues so we don't support IE11.
2019-03-01 17:15:15 +00:00
Harvey Kandola
4d0de69489 New release prep 2019-03-01 17:13:29 +00:00
Harvey Kandola
25c247e99b Introduce new Tabular editor with CSV import support
Closes #211 and #202

An all-new tabular editor has been added -- this replaces the previous tabular editor.

Better formatting options.

CSV data can also be imported straight into the table.
2019-03-01 14:28:18 +00:00
Harvey Kandola
ed99b0c9f3 JS linting 2019-03-01 14:25:32 +00:00
Harvey Kandola
8b0bb456d9 Format Blockquote
Fixes #217 for both Rich Text and Markdown section types.
2019-03-01 12:59:10 +00:00
Harvey Kandola
e438542cab Upgrade ember-ajax to v4.0.1
Was v3.1.0
2019-02-28 17:47:12 +00:00
Harvey Kandola
553c17181e Upgrade EmberJS to v3.8.0 2019-02-28 17:46:02 +00:00
Harvey Kandola
9b06ddecb5 Refactor content linking code flow
Fixed EmberJS deprecation warnings by removing usage of observers.

Fixed edge case bug for repeated content link insertion modal clicks.
2019-02-28 15:11:46 +00:00
Harvey Kandola
3fd1d793a3 Standardize text color for PRE & CODE blocks 2019-02-28 14:16:51 +00:00
Harvey Kandola
fc17ea5225 Standardize style of PRE & CODE blocks
For Markdown, styling was not consistent between inline blocks and full code blocks.

Fixes #203
2019-02-28 13:58:59 +00:00
Harvey Kandola
f47f09661f Improve search by tag for MySQL
Fixes #214

MySQL requires specific full text search queries.
2019-02-28 13:39:53 +00:00
Harvey Kandola
4b7d4cf872 Ensure non-negative space summary counts
Sometimes users direct manipulate database and so space level counts don't match.

We ensure that counts don't go negative.
2019-02-28 12:42:42 +00:00
Harvey Kandola
c108d0eb30 Handle wrap-around for add section popup 2019-02-28 12:32:22 +00:00
Harvey Kandola
1f5221ffa0 Add install docs link to setup wizard 2019-02-28 12:31:57 +00:00
Harvey Kandola
a888b12ad1 Make TinyMCE Prism integration use full CSS path 2019-02-28 12:31:28 +00:00
Harvey Kandola
2510972a83 Allow SMTP config to specify FQDN of sending server
/cc @backba
2019-02-28 12:31:02 +00:00
Harvey Kandola
560f786b8b Merge branch 'master' of https://github.com/documize/community 2019-02-27 14:50:43 +00:00
Harvey Kandola
af641b93f1 Show doc links on setup screen 2019-02-27 14:50:42 +00:00
Harvey Kandola
f4fa63359f
Update README.md 2019-02-27 13:58:18 +00:00
Harvey Kandola
f828583b49 Allow admins to set subdomain for their instance
Fixes #209

Admins can see and set subdomain for their instance.

Self-host customers only!
2019-02-27 13:49:59 +00:00
Harvey Kandola
af9bc25660 PlantUML to force UTF-8 encoding
Should Fix #213
2019-02-27 13:10:19 +00:00
Harvey Kandola
66003dac21 Logo fetching to match on domain mismatch
Fixes #209

Sometimes people use subdomain like docs.example.org but backend does not reflect the domain, e.g. dmz_org.c_domain is empty.

So we fall back to loading logo for empty c_domain value as well.
2019-02-26 11:55:08 +00:00
sauls8t
d4f6694933 Change links to use www.documize.com 2019-02-24 18:41:18 +00:00
sauls8t
27030a0dc2 Update README.md 2019-02-22 18:28:22 +00:00
sauls8t
43f515a1f9 README update 2019-02-22 16:47:26 +00:00
sauls8t
9aaea9492a Fix issue with backup/restore of space labels
Closed #206
2019-02-21 10:56:15 +00:00
1803 changed files with 388215 additions and 104888 deletions

View file

@ -1,3 +1,6 @@
.DS_Store
.git
bin
.idea
selfcert
gui/dist-prod

View file

@ -1,9 +0,0 @@
root = true
[*]
indent_style = space
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
indent_size = 4
insert_final_newline = true

3
.gitignore vendored
View file

@ -18,6 +18,7 @@ _convert
bin/*
dist/*
embed/bindata/*
edition/static/*
gui/dist/*
gui/dist-prod/*
@ -52,7 +53,6 @@ _testmain.go
node_modules
# Misc.
build
plugin-msword/plugin-msword
plugin-msword/plugin-msword-osx
npm-debug.log
@ -70,3 +70,4 @@ bower.json.ember-try
package.json.ember-try
embed/bindata_assetfs.go
dmz-backup*.zip
*.conf

View file

@ -1,62 +0,0 @@
{
"css": {
"indent_size": 4,
"indent_level": 0,
"indent_with_tabs": true,
"preserve_newlines": true,
"max_preserve_newlines": 2,
"newline_between_rules": true,
"selector_separator_newlines": true
},
"scss": {
"indent_size": 4,
"indent_level": 0,
"indent_with_tabs": true,
"preserve_newlines": true,
"max_preserve_newlines": 2,
"newline_between_rules": true,
"selector_separator_newlines": true
},
"html": {
"indent_size": 4,
"indent_level": 0,
"indent_with_tabs": true,
"preserve_newlines": true,
"max_preserve_newlines": 2,
"wrap_line_length": 0,
"indent_handlebars": true,
"indent_inner_html": false,
"indent_scripts": "keep"
},
"hbs": {
"indent_size": 4,
"indent_level": 0,
"indent_with_tabs": true,
"max_preserve_newlines": 2,
"preserve_newlines": true,
"wrap_line_length": 0
},
"js": {
"indent_size": 4,
"indent_level": 0,
"indent_with_tabs": true,
"preserve_newlines": true,
"wrap_line_length": 0,
"break_chained_methods": false,
"max_preserve_newlines": 2,
"jslint_happy": true,
"brace_style": "collapse-preserve-inline",
"keep_function_indentation": false,
"space_after_anon_function": false,
"space_before_anon_function": false,
"space_before_conditional": true,
"space_in_empty_paren": false,
"space_before_func_paren": false,
"space_in_paren": false
},
"sql": {
"indent_size": 4,
"indent_level": 0,
"indent_with_tabs": true
}
}

View file

@ -1,3 +0,0 @@
gui/public/tinymce/**
gui/public/tinymce/
gui/public/tinymce

View file

@ -1,3 +0,0 @@
{
"esversion":6
}

32
Dockerfile Normal file
View file

@ -0,0 +1,32 @@
FROM node:16-alpine as frontbuilder
WORKDIR /go/src/github.com/documize/community/gui
COPY ./gui /go/src/github.com/documize/community/gui
RUN npm --network-timeout=100000 install
RUN npm run build -- --environment=production --output-path dist-prod --suppress-sizes true
FROM golang:1.21-alpine as builder
WORKDIR /go/src/github.com/documize/community
COPY . /go/src/github.com/documize/community
COPY --from=frontbuilder /go/src/github.com/documize/community/gui/dist-prod/assets /go/src/github.com/documize/community/edition/static/public/assets
COPY --from=frontbuilder /go/src/github.com/documize/community/gui/dist-prod/codemirror /go/src/github.com/documize/community/edition/static/public/codemirror
COPY --from=frontbuilder /go/src/github.com/documize/community/gui/dist-prod/prism /go/src/github.com/documize/community/edition/static/public/prism
COPY --from=frontbuilder /go/src/github.com/documize/community/gui/dist-prod/sections /go/src/github.com/documize/community/edition/static/public/sections
COPY --from=frontbuilder /go/src/github.com/documize/community/gui/dist-prod/tinymce /go/src/github.com/documize/community/edition/static/public/tinymce
COPY --from=frontbuilder /go/src/github.com/documize/community/gui/dist-prod/pdfjs /go/src/github.com/documize/community/edition/static/public/pdfjs
COPY --from=frontbuilder /go/src/github.com/documize/community/gui/dist-prod/i18n /go/src/github.com/documize/community/edition/static/public/i18n
COPY --from=frontbuilder /go/src/github.com/documize/community/gui/dist-prod/*.* /go/src/github.com/documize/community/edition/static/
COPY --from=frontbuilder /go/src/github.com/documize/community/gui/dist-prod/i18n/*.json /go/src/github.com/documize/community/edition/static/i18n/
COPY domain/mail/*.html /go/src/github.com/documize/community/edition/static/mail/
COPY core/database/templates/*.html /go/src/github.com/documize/community/edition/static/
COPY core/database/scripts/mysql/*.sql /go/src/github.com/documize/community/edition/static/scripts/mysql/
COPY core/database/scripts/postgresql/*.sql /go/src/github.com/documize/community/edition/static/scripts/postgresql/
COPY core/database/scripts/sqlserver/*.sql /go/src/github.com/documize/community/edition/static/scripts/sqlserver/
COPY domain/onboard/*.json /go/src/github.com/documize/community/edition/static/onboard/
RUN env GODEBUG=tls13=1 go build -mod=vendor -o bin/documize-community ./edition/community.go
# build release image
FROM alpine:3.16
RUN apk add --no-cache ca-certificates
COPY --from=builder /go/src/github.com/documize/community/bin/documize-community /documize
EXPOSE 5001
ENTRYPOINT [ "/documize" ]

301
Gopkg.lock generated
View file

@ -1,301 +0,0 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
digest = "1:606d068450c82b9ddaa21de992f73563754077f0f411235cdfe71d0903a268c3"
name = "github.com/codegangsta/negroni"
packages = ["."]
pruneopts = "UT"
revision = "5dbbc83f748fc3ad38585842b0aedab546d0ea1e"
version = "v0.3.0"
[[projects]]
digest = "1:217f778e19b8d206112c21d21a7cc72ca3cb493b67631680a2324bc50335d432"
name = "github.com/dgrijalva/jwt-go"
packages = ["."]
pruneopts = "UT"
revision = "dbeaa9332f19a944acb5736b4456cfcc02140e29"
version = "v3.1.0"
[[projects]]
digest = "1:39c2113f3a89585666e6f973650cff186b2d06deb4aa202c88addb87b0a201db"
name = "github.com/documize/blackfriday"
packages = ["."]
pruneopts = "UT"
revision = "cadec560ec52d93835bf2f15bd794700d3a2473b"
version = "v2.0.0"
[[projects]]
branch = "master"
digest = "1:04bfeb11ea882e0a0867828e54374c066a1368f8da53bb1bbc16a9886967303a"
name = "github.com/documize/glick"
packages = ["."]
pruneopts = "UT"
revision = "a8ccbef88237fcafe9cef3c9aee7ad83d0e132f9"
[[projects]]
branch = "master"
digest = "1:2405d7a1e936e015b07c1c88acccc30d7f2e917b1b5acea08d06d116b8657a5c"
name = "github.com/documize/html-diff"
packages = ["."]
pruneopts = "UT"
revision = "f61c192c7796644259832ef705c49259797e7fff"
[[projects]]
digest = "1:0ae2e1b2d4cdff4834aa28ce2e33a7b6de91e10150e3647fe1b9fd63a51b39ce"
name = "github.com/documize/slug"
packages = ["."]
pruneopts = "UT"
revision = "e9f42fa127660e552d0ad2b589868d403a9be7c6"
version = "v1.1.1"
[[projects]]
digest = "1:f4f6279cb37479954644babd8f8ef00584ff9fa63555d2c6718c1c3517170202"
name = "github.com/elazarl/go-bindata-assetfs"
packages = ["."]
pruneopts = "UT"
revision = "30f82fa23fd844bd5bb1e5f216db87fd77b5eb43"
version = "v1.0.0"
[[projects]]
digest = "1:ca82a3b99694824c627573c2a76d0e49719b4a9c02d1d85a2ac91f1c1f52ab9b"
name = "github.com/fatih/structs"
packages = ["."]
pruneopts = "UT"
revision = "a720dfa8df582c51dee1b36feabb906bde1588bd"
version = "v1.0"
[[projects]]
digest = "1:adea5a94903eb4384abef30f3d878dc9ff6b6b5b0722da25b82e5169216dfb61"
name = "github.com/go-sql-driver/mysql"
packages = ["."]
pruneopts = "UT"
revision = "d523deb1b23d913de5bdada721a6071e71283618"
version = "v1.4.0"
[[projects]]
digest = "1:ffc060c551980d37ee9e428ef528ee2813137249ccebb0bfc412ef83071cac91"
name = "github.com/golang/protobuf"
packages = ["proto"]
pruneopts = "UT"
revision = "925541529c1fa6821df4e44ce2723319eb2be768"
version = "v1.0.0"
[[projects]]
digest = "1:51bee9f1987dcdb9f9a1b4c20745d78f6bf6f5f14ad4e64ca883eb64df4c0045"
name = "github.com/google/go-github"
packages = ["github"]
pruneopts = "UT"
revision = "e48060a28fac52d0f1cb758bc8b87c07bac4a87d"
version = "v15.0.0"
[[projects]]
branch = "master"
digest = "1:a63cff6b5d8b95638bfe300385d93b2a6d9d687734b863da8e09dc834510a690"
name = "github.com/google/go-querystring"
packages = ["query"]
pruneopts = "UT"
revision = "53e6ce116135b80d037921a7fdd5138cf32d7a8a"
[[projects]]
digest = "1:160eabf7a69910fd74f29c692718bc2437c1c1c7d4c9dea9712357752a70e5df"
name = "github.com/gorilla/context"
packages = ["."]
pruneopts = "UT"
revision = "1ea25387ff6f684839d82767c1733ff4d4d15d0a"
version = "v1.1"
[[projects]]
digest = "1:88aa9e326e2bd6045a46e00a922954b3e1a9ac5787109f49ac85366df370e1e5"
name = "github.com/gorilla/mux"
packages = ["."]
pruneopts = "UT"
revision = "53c1911da2b537f792e7cafcb446b05ffe33b996"
version = "v1.6.1"
[[projects]]
branch = "master"
digest = "1:6c41d4f998a03b6604227ccad36edaed6126c397e5d78709ef4814a1145a6757"
name = "github.com/jmoiron/sqlx"
packages = [
".",
"reflectx",
]
pruneopts = "UT"
revision = "d161d7a76b5661016ad0b085869f77fd410f3e6a"
[[projects]]
digest = "1:8ef506fc2bb9ced9b151dafa592d4046063d744c646c1bbe801982ce87e4bc24"
name = "github.com/lib/pq"
packages = [
".",
"oid",
]
pruneopts = "UT"
revision = "4ded0e9383f75c197b3a2aaa6d590ac52df6fd79"
version = "v1.0.0"
[[projects]]
branch = "master"
digest = "1:8213aea9ec57afac7c765f9127bb3a5677866e03c0d3815f236045f16d5bc468"
name = "github.com/mb0/diff"
packages = ["."]
pruneopts = "UT"
revision = "d8d9a906c24d7b0ee77287e0463e5ca7f026032e"
[[projects]]
branch = "master"
digest = "1:0e1e5f960c58fdc677212fcc70e55042a0084d367623e51afbdb568963832f5d"
name = "github.com/nu7hatch/gouuid"
packages = ["."]
pruneopts = "UT"
revision = "179d4d0c4d8d407a32af483c2354df1d2c91e6c3"
[[projects]]
digest = "1:40e195917a951a8bf867cd05de2a46aaf1806c50cf92eebf4c16f78cd196f747"
name = "github.com/pkg/errors"
packages = ["."]
pruneopts = "UT"
revision = "645ef00459ed84a119197bfb8d8205042c6df63d"
version = "v0.8.0"
[[projects]]
branch = "master"
digest = "1:e6a29574542c00bb18adb1bfbe629ff88c468c2af2e2e953d3e58eda07165086"
name = "github.com/rainycape/unidecode"
packages = ["."]
pruneopts = "UT"
revision = "cb7f23ec59bec0d61b19c56cd88cee3d0cc1870c"
[[projects]]
branch = "master"
digest = "1:def689e73e9252f6f7fe66834a76751a41b767e03daab299e607e7226c58a855"
name = "github.com/shurcooL/sanitized_anchor_name"
packages = ["."]
pruneopts = "UT"
revision = "86672fcb3f950f35f2e675df2240550f2a50762f"
[[projects]]
digest = "1:821c90494c34add2aa5f7c3b894f55dd08741acbb390901663050449b777c39a"
name = "github.com/trivago/tgo"
packages = [
"tcontainer",
"treflect",
]
pruneopts = "UT"
revision = "e4d1ddd28c17dd89ed26327cf69fded22060671b"
version = "v1.0.1"
[[projects]]
branch = "master"
digest = "1:1ecf2a49df33be51e757d0033d5d51d5f784f35f68e5a38f797b2d3f03357d71"
name = "golang.org/x/crypto"
packages = [
"bcrypt",
"blowfish",
]
pruneopts = "UT"
revision = "650f4a345ab4e5b245a3034b110ebc7299e68186"
[[projects]]
branch = "master"
digest = "1:ac7eaa5f1179480f517d32831225215cc20940152d66be29f3d5204ea15d425f"
name = "golang.org/x/net"
packages = [
"context",
"context/ctxhttp",
"html",
"html/atom",
]
pruneopts = "UT"
revision = "f5dfe339be1d06f81b22525fe34671ee7d2c8904"
[[projects]]
branch = "master"
digest = "1:fe84cb4abe7f53047ac44cf10d917d707a718711e146c9239700e4c8cc94a891"
name = "golang.org/x/oauth2"
packages = [
".",
"internal",
]
pruneopts = "UT"
revision = "543e37812f10c46c622c9575afd7ad22f22a12ba"
[[projects]]
digest = "1:f40806967647e80fc51b941a586afefea6058592692c0bbfb3be7ea6b2b2a82d"
name = "google.golang.org/appengine"
packages = [
"cloudsql",
"internal",
"internal/base",
"internal/datastore",
"internal/log",
"internal/remote_api",
"internal/urlfetch",
"urlfetch",
]
pruneopts = "UT"
revision = "150dc57a1b433e64154302bdc40b6bb8aefa313a"
version = "v1.0.0"
[[projects]]
branch = "v3"
digest = "1:7388652e2215a3f45d341d58766ed58317971030eb1cbd75f005f96ace8e9196"
name = "gopkg.in/alexcesaro/quotedprintable.v3"
packages = ["."]
pruneopts = "UT"
revision = "2caba252f4dc53eaf6b553000885530023f54623"
[[projects]]
digest = "1:ec2b97c119fc66f96b421f8798deb2f87cb4a5ee81cafeaf9b55420d035f8fea"
name = "gopkg.in/andygrunwald/go-jira.v1"
packages = ["."]
pruneopts = "UT"
revision = "0298784c4606cdf01e99644da115863c052a737c"
version = "v1.5.0"
[[projects]]
digest = "1:81e1c5cee195fca5de06e2540cb63eea727a850b7e5c213548e7f81521c97a57"
name = "gopkg.in/asn1-ber.v1"
packages = ["."]
pruneopts = "UT"
revision = "379148ca0225df7a432012b8df0355c2a2063ac0"
version = "v1.2"
[[projects]]
digest = "1:93aaeb913621a3a53aaa78592c00f46d63e3bb0ea76e2d9b07327b50959a5778"
name = "gopkg.in/ldap.v2"
packages = ["."]
pruneopts = "UT"
revision = "bb7a9ca6e4fbc2129e3db588a34bc970ffe811a9"
version = "v2.5.1"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
input-imports = [
"github.com/codegangsta/negroni",
"github.com/dgrijalva/jwt-go",
"github.com/documize/blackfriday",
"github.com/documize/glick",
"github.com/documize/html-diff",
"github.com/documize/slug",
"github.com/elazarl/go-bindata-assetfs",
"github.com/go-sql-driver/mysql",
"github.com/google/go-github/github",
"github.com/gorilla/mux",
"github.com/jmoiron/sqlx",
"github.com/lib/pq",
"github.com/nu7hatch/gouuid",
"github.com/pkg/errors",
"golang.org/x/crypto/bcrypt",
"golang.org/x/net/context",
"golang.org/x/net/html",
"golang.org/x/net/html/atom",
"golang.org/x/oauth2",
"gopkg.in/alexcesaro/quotedprintable.v3",
"gopkg.in/andygrunwald/go-jira.v1",
"gopkg.in/ldap.v2",
]
solver-name = "gps-cdcl"
solver-version = 1

View file

@ -1,98 +0,0 @@
# Gopkg.toml example
#
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
#
# [prune]
# non-go = false
# go-tests = true
# unused-packages = true
[[constraint]]
name = "github.com/codegangsta/negroni"
version = "0.3.0"
[[constraint]]
name = "github.com/dgrijalva/jwt-go"
version = "3.1.0"
[[constraint]]
name = "github.com/documize/blackfriday"
version = "2.0.0"
[[constraint]]
branch = "master"
name = "github.com/documize/glick"
[[constraint]]
branch = "master"
name = "github.com/documize/html-diff"
[[constraint]]
name = "github.com/elazarl/go-bindata-assetfs"
version = "1.0.0"
[[constraint]]
name = "github.com/go-sql-driver/mysql"
version = "1.3.0"
[[constraint]]
name = "github.com/google/go-github"
version = "15.0.0"
[[constraint]]
name = "github.com/gorilla/mux"
version = "1.6.1"
[[constraint]]
branch = "master"
name = "github.com/jmoiron/sqlx"
[[constraint]]
branch = "master"
name = "github.com/nu7hatch/gouuid"
[[constraint]]
name = "github.com/pkg/errors"
version = "0.8.0"
[[constraint]]
branch = "master"
name = "golang.org/x/crypto"
[[constraint]]
branch = "master"
name = "golang.org/x/net"
[[constraint]]
branch = "master"
name = "golang.org/x/oauth2"
[prune]
go-tests = true
unused-packages = true
[[constraint]]
name = "github.com/documize/slug"
version = "1.1.1"
[[constraint]]
name = "gopkg.in/andygrunwald/go-jira.v1"
version = "1.5.0"

2572
NOTICES.md Normal file

File diff suppressed because it is too large Load diff

143
README.md
View file

@ -1,135 +1,110 @@
> We provide frequent product releases ensuring self-host customers enjoy the same features as our cloud/SaaS customers.
>
> Harvey Kandola, CEO & Founder, Documize Inc.
Documize Community is an open source, modern, self-hosted, enterprise-grade knowledge management solution.
## The Mission
- Built for technical and non-technical users
- Designed to unify both customer-facing and internal documentation
- Organization through labels, spaces and categories
To bring software development inspired features to the world of documenting -- refactoring, importing, testing, linting, metrics, PRs, versioning....
It's built with Golang + EmberJS and compiled down to a single executable binary that is available for Linux, Windows and Mac.
## What is it?
All you need to provide is your database -- PostgreSQL, Microsoft SQL Server or any MySQL variant.
Documize is an intelligent document environment (IDE) for authoring, tracking and delivering documentation -- everything you need in one place.
## Why should I care?
Because maybe like us you're tired of:
* juggling WYSIWYG editors, wiki software and other document related solutions
* playing email tennis with documents, contributions, versions and feedback
* sharing not-so-secure folders with external participants
Sound familiar? Read on.
## Who is it for?
Anyone who wants a single place for any kind of document.
Anyone who wants to loop in external participants with complete security.
Anyone who wishes documentation and knowledge capture worked like agile software development.
Anyone who knows that nested folders fail miserably.
Anyone who wants to move on from wiki software.
## What's different about Documize?
Sane organization through personal, team and public spaces.
Granular document access control via categories.
Section based approach to document construction.
Reusable templates and content blocks.
Documentation related tasking and delegation.
Integrations for embedding SaaS data within documents, zero add-on/marketplace fees.
## What does it look like?
![Documize](screenshot-1.png "Documize")
[See it live](https://docs.documize.com)
![Documize Community](https://github.com/documize/community/blob/master/screenshot.png?raw=true)
## Latest Release
[Community Edition: v2.0.4](https://github.com/documize/community/releases)
[Community edition: v5.13.0](https://github.com/documize/community/releases)
[Enterprise Edition: v2.0.4](https://documize.com/downloads)
[Community+ edition: v5.13.0](https://www.documize.com/community/get-started)
## OS support
The Community+ edition is the "enterprise" offering with advanced capabilities and customer support:
Documize can be installed and run on:
- content approval workflows
- content organization by label, space and category
- content version management
- content lifecycle management
- content feedback capture
- content PDF export
- analytics and reporting
- activity streams
- audit logs
- actions assignments
- product support
The Community+ edition is [free](https://www.documize.com/community/get-started) for the first five users -- thereafter pricing starts at just $900 annually for 100 users.
## OS Support
- Linux
- Windows
- macOS
- Raspberry Pi (ARM build)
Heck, Documize will probably run just fine on a Raspberry Pi 3.
Support for AMD and ARM 64 bit architectures.
## Database Support
Documize supports the following database systems:
For all database types, Full-Text Search (FTS) support is mandatory.
- PostgreSQL (v9.6+)
- Microsoft SQL Server (2016+ with FTS)
- Microsoft SQL Azure (v12+)
- MySQL (v5.7.10+ and v8.0.12+)
- Percona (v5.7.16-10+)
- MariaDB (10.3.0+)
Coming soon: Microsoft SQL Server 2017 (Linux/Windows).
## Browser Support
Documize supports the following (evergreen) browsers:
- Chrome
- Firefox
- Chrome
- Safari
- Microsoft Edge
- Brave
- Vivaldi
- Opera
- Microsoft Edge (v42+)
## Technology Stack
Documize is built with the following:
- Ember JS (v3.7.2)
- Go (v1.11.5)
- Go (v1.23.4)
- Ember JS (v3.12.0)
## Authentication Options
Besides email/password login, you can also leverage the following options.
Besides email/password login, you can also authenticate via:
### LDAP / Active Directory
* LDAP
* Active Directory
* Red Hat Keycloak
* Central Authentication Service (CAS)
Connect and sync Documize with any LDAP v3 compliant provider including Microsoft Active Directory.
When using LDAP/Active Directory, you can enable dual-authentication with email/password.
### Keycloak Integration
## Localization
Documize provides out-of-the-box integration with [Redhat Keycloak](http://www.keycloak.org) for open source identity and access management.
Languages supported out-of-the-box:
Connect and authenticate with LDAP, Active Directory or leverage Social Login.
- English
- German
- French
- Chinese (中文)
- Portuguese (Brazil) (Português - Brasil)
- Japanese (日本語)
- Italian
- Spanish Argentinian
<https://docs.documize.com>
PR's welcome for additional languages.
### Auth0 Compatible
## Product/Technical Support
Documize is compatible with Auth0 identity as a service.
For both Community and Community+ editions, please contact our help desk for product help, suggestions and other enquiries.
[![JWT Auth for open source projects](https://cdn.auth0.com/oss/badges/a0-badge-dark.png)](https://auth0.com/?utm_source=oss&utm_medium=gp&utm_campaign=oss)
<support@documize.com>
Open Source Identity and Access Management
## Developer's Note
We try to follow sound advice when writing commit messages:
https://chris.beams.io/posts/git-commit/
We aim to respond within two working days.
## The Legal Bit
<https://documize.com>
<https://www.documize.com>
This software (Documize Community Edition) is licensed under GNU AGPL v3 <http://www.gnu.org/licenses/agpl-3.0.en.html>. You can operate outside the AGPL restrictions by purchasing Documize Enterprise Edition and obtaining a commercial license by contacting <sales@documize.com>. Documize® is a registered trade mark of Documize Inc.
This software (Documize Community edition) is licensed under GNU AGPL v3 <http://www.gnu.org/licenses/agpl-3.0.en.html>.
Documize Community uses other open source components and we acknowledge them in [NOTICES](NOTICES.md)

View file

@ -7,56 +7,61 @@ echo "Building Ember assets..."
cd gui
call ember b -o dist-prod/ --environment=production
::Call allows the rest of the file to run
echo "Copying Ember assets..."
cd ..
rd /s /q embed\bindata\public
mkdir embed\bindata\public
rd /s /q edition\static\public
mkdir edition\static\public
echo "Copying Ember assets folder"
robocopy /e /NFL /NDL /NJH gui\dist-prod\assets embed\bindata\public\assets
robocopy /e /NFL /NDL /NJH gui\dist-prod\assets edition\static\public\assets
echo "Copying Ember codemirror folder"
robocopy /e /NFL /NDL /NJH gui\dist-prod\codemirror embed\bindata\public\codemirror
robocopy /e /NFL /NDL /NJH gui\dist-prod\codemirror edition\static\public\codemirror
echo "Copying Ember prism folder"
robocopy /e /NFL /NDL /NJH gui\dist-prod\prism embed\bindata\public\prism
robocopy /e /NFL /NDL /NJH gui\dist-prod\prism edition\static\public\prism
echo "Copying Ember tinymce folder"
robocopy /e /NFL /NDL /NJH gui\dist-prod\tinymce embed\bindata\public\tinymce
robocopy /e /NFL /NDL /NJH gui\dist-prod\tinymce edition\static\public\tinymce
echo "Copying Ember pdfjs folder"
robocopy /e /NFL /NDL /NJH gui\dist-prod\pdfjs edition\static\public\pdfjs
echo "Copying Ember sections folder"
robocopy /e /NFL /NDL /NJH gui\dist-prod\sections embed\bindata\public\sections
robocopy /e /NFL /NDL /NJH gui\dist-prod\sections edition\static\public\sections
echo "Copying i18n folder"
robocopy /e /NFL /NDL /NJH gui\dist-prod\i18n edition\static\public\i18n
copy gui\dist-prod\*.* embed\bindata
copy gui\dist-prod\favicon.ico embed\bindata\public
copy gui\dist-prod\manifest.json embed\bindata\public
echo "Copying static files"
copy gui\dist-prod\*.* edition\static
rd /s /q embed\bindata\mail
mkdir embed\bindata\mail
copy domain\mail\*.html embed\bindata\mail
copy core\database\templates\*.html embed\bindata
echo "Copying favicon.ico"
copy gui\dist-prod\favicon.ico edition\static\public
rd /s /q embed\bindata\scripts
mkdir embed\bindata\scripts
mkdir embed\bindata\scripts\mysql
mkdir embed\bindata\scripts\postgresql
echo "Copying manifest.json"
copy gui\dist-prod\manifest.json edition\static\public
echo "Copying mail templates"
rd /s /q edition\static\mail
mkdir edition\static\mail
copy domain\mail\*.html edition\static\mail
echo "Copying database templates"
copy core\database\templates\*.html edition\static
rd /s /q edition\static\i18n
mkdir edition\static\i18n
robocopy /e /NFL /NDL /NJH gui\dist-prod\i18n edition\static\i18n *.json
rd /s /q edition\static\scripts
mkdir edition\static\scripts
mkdir edition\static\scripts\mysql
mkdir edition\static\scripts\postgresql
mkdir edition\static\scripts\sqlserver
echo "Copying database scripts folder"
robocopy /e /NFL /NDL /NJH core\database\scripts\mysql embed\bindata\scripts\mysql
robocopy /e /NFL /NDL /NJH core\database\scripts\postgresql embed\bindata\scripts\postgresql
robocopy /e /NFL /NDL /NJH core\database\scripts\mysql edition\static\scripts\mysql
robocopy /e /NFL /NDL /NJH core\database\scripts\postgresql edition\static\scripts\postgresql
robocopy /e /NFL /NDL /NJH core\database\scripts\sqlserver edition\static\scripts\sqlserver
echo "Generating in-memory static assets..."
go get -u github.com/jteeuwen/go-bindata/...
go get -u github.com/elazarl/go-bindata-assetfs/...
cd embed
go generate
cd ..
rd /s /q edition\static\onboard
mkdir edition\static\onboard
robocopy /e /NFL /NDL /NJH domain\onboard edition\static\onboard *.json
echo "Compiling Windows"
set GOOS=windows
go build -gcflags=-trimpath=%GOPATH% -asmflags=-trimpath=%GOPATH% -o bin/documize-community-windows-amd64.exe edition/community.go
echo "Compiling Linux"
set GOOS=linux
go build -gcflags=-trimpath=%GOPATH% -asmflags=-trimpath=%GOPATH% -o bin/documize-community-linux-amd64 edition/community.go
echo "Compiling Darwin"
set GOOS=darwin
go build -gcflags=-trimpath=%GOPATH% -asmflags=-trimpath=%GOPATH% -o bin/documize-community-darwin-amd64 edition/community.go
go build -mod=vendor -trimpath -gcflags="all=-trimpath=$GOPATH" -o bin/documize-community-windows-amd64.exe edition/community.go

View file

@ -8,56 +8,65 @@ echo "Build process started $NOW"
echo "Building Ember assets..."
cd gui
# export NODE_OPTIONS=--openssl-legacy-provider
ember build ---environment=production --output-path dist-prod --suppress-sizes true
cd ..
echo "Copying Ember assets..."
rm -rf embed/bindata/public
mkdir -p embed/bindata/public
cp -r gui/dist-prod/assets embed/bindata/public
cp -r gui/dist-prod/codemirror embed/bindata/public/codemirror
cp -r gui/dist-prod/prism embed/bindata/public/prism
cp -r gui/dist-prod/sections embed/bindata/public/sections
cp -r gui/dist-prod/tinymce embed/bindata/public/tinymce
cp gui/dist-prod/*.* embed/bindata
cp gui/dist-prod/favicon.ico embed/bindata/public
cp gui/dist-prod/manifest.json embed/bindata/public
rm -rf edition/static/public
mkdir -p edition/static/public
cp -r gui/dist-prod/assets edition/static/public
cp -r gui/dist-prod/codemirror edition/static/public/codemirror
cp -r gui/dist-prod/prism edition/static/public/prism
cp -r gui/dist-prod/sections edition/static/public/sections
cp -r gui/dist-prod/tinymce edition/static/public/tinymce
cp -r gui/dist-prod/pdfjs edition/static/public/pdfjs
cp -r gui/dist-prod/i18n edition/static/public/i18n
cp gui/dist-prod/*.* edition/static
cp gui/dist-prod/favicon.ico edition/static/public
cp gui/dist-prod/manifest.json edition/static/public
rm -rf embed/bindata/mail
mkdir -p embed/bindata/mail
cp domain/mail/*.html embed/bindata/mail
cp core/database/templates/*.html embed/bindata
rm -rf edition/static/mail
mkdir -p edition/static/mail
cp domain/mail/*.html edition/static/mail
cp core/database/templates/*.html edition/static
rm -rf embed/bindata/scripts
mkdir -p embed/bindata/scripts
mkdir -p embed/bindata/scripts/mysql
mkdir -p embed/bindata/scripts/postgresql
cp -r core/database/scripts/mysql/*.sql embed/bindata/scripts/mysql
cp -r core/database/scripts/postgresql/*.sql embed/bindata/scripts/postgresql
rm -rf edition/static/i18n
mkdir -p edition/static/i18n
cp -r gui/dist-prod/i18n/*.json edition/static/i18n
echo "Generating in-memory static assets..."
# go get -u github.com/jteeuwen/go-bindata/...
# go get -u github.com/elazarl/go-bindata-assetfs/...
cd embed
go generate
rm -rf edition/static/scripts
mkdir -p edition/static/scripts
mkdir -p edition/static/scripts/mysql
mkdir -p edition/static/scripts/postgresql
mkdir -p edition/static/scripts/sqlserver
cp -r core/database/scripts/mysql/*.sql edition/static/scripts/mysql
cp -r core/database/scripts/postgresql/*.sql edition/static/scripts/postgresql
cp -r core/database/scripts/sqlserver/*.sql edition/static/scripts/sqlserver
echo "Compiling app..."
cd ..
for arch in amd64 ; do
for os in darwin linux windows ; do
if [ "$os" == "windows" ] ; then
echo "Compiling documize-community-$os-$arch.exe"
env GOOS=$os GOARCH=$arch go build -gcflags="all=-trimpath=$GOPATH" -o bin/documize-community-$os-$arch.exe ./edition/community.go
else
echo "Compiling documize-community-$os-$arch"
env GOOS=$os GOARCH=$arch go build -gcflags="all=-trimpath=$GOPATH" -o bin/documize-community-$os-$arch ./edition/community.go
fi
done
done
rm -rf edition/static/onboard
mkdir -p edition/static/onboard
cp -r domain/onboard/*.json edition/static/onboard
echo "Compiling for macOS Intel..."
env GOOS=darwin GOARCH=amd64 go build -mod=vendor -trimpath -o bin/documize-community-darwin-amd64 ./edition/community.go
echo "Compiling for macOS ARM..."
env GOOS=darwin GOARCH=arm64 go build -mod=vendor -trimpath -o bin/documize-community-darwin-arm64 ./edition/community.go
echo "Compiling for Windows AMD..."
env GOOS=windows GOARCH=amd64 go build -mod=vendor -trimpath -o bin/documize-community-windows-amd64.exe ./edition/community.go
echo "Compiling for Linux AMD..."
env GOOS=linux GOARCH=amd64 go build -mod=vendor -trimpath -o bin/documize-community-linux-amd64 ./edition/community.go
echo "Compiling for Linux ARM..."
env GOOS=linux GOARCH=arm go build -mod=vendor -trimpath -o bin/documize-community-linux-arm ./edition/community.go
echo "Compiling for Linux ARM64..."
env GOOS=linux GOARCH=arm64 go build -mod=vendor -trimpath -o bin/documize-community-linux-arm64 ./edition/community.go
echo "Compiling for FreeBSD ARM64..."
env GOOS=freebsd GOARCH=arm64 go build -mod=vendor -trimpath -o bin/documize-community-freebsd-arm64 ./edition/community.go
echo "Compiling for FreeBSD AMD64..."
env GOOS=freebsd GOARCH=amd64 go build -mod=vendor -trimpath -o bin/documize-community-freebsd-amd64 ./edition/community.go
echo "Finished."
# CGO_ENABLED=0 GOOS=linux go build -a -ldflags="-s -w" -installsuffix cgo
# go build -ldflags '-d -s -w' -a -tags netgo -installsuffix netgo test.go
# ldd test

View file

@ -1,23 +0,0 @@
#! /bin/bash
echo "Generating in-memory static assets..."
# go get -u github.com/jteeuwen/go-bindata/...
# go get -u github.com/elazarl/go-bindata-assetfs/...
cd embed
go generate
echo "Compiling app..."
cd ..
for arch in amd64 ; do
for os in darwin linux windows ; do
if [ "$os" == "windows" ] ; then
echo "Compiling documize-community-$os-$arch.exe"
env GOOS=$os GOARCH=$arch go build -gcflags=-trimpath=$GOPATH -asmflags=-trimpath=$GOPATH -o bin/documize-community-$os-$arch.exe ./edition/community.go
else
echo "Compiling documize-community-$os-$arch"
env GOOS=$os GOARCH=$arch go build -gcflags=-trimpath=$GOPATH -asmflags=-trimpath=$GOPATH -o bin/documize-community-$os-$arch ./edition/community.go
fi
done
done
echo "Finished."

View file

@ -19,8 +19,8 @@ import (
"net/http"
"path/filepath"
"context"
api "github.com/documize/community/core/convapi"
"golang.org/x/net/context"
)
// Msword type provides a peg to hang the Convert method on.

View file

@ -19,7 +19,7 @@ import (
"github.com/documize/community/core/api/plugins"
api "github.com/documize/community/core/convapi"
"golang.org/x/net/context"
"context"
)
// Convert provides the entry-point into the document conversion process.

View file

@ -1,155 +0,0 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package convert_test
import (
"strings"
"testing"
"github.com/documize/community/core/api/convert"
"github.com/documize/community/core/api/plugins"
api "github.com/documize/community/core/convapi"
"github.com/documize/community/core/log"
"golang.org/x/net/context"
)
func TestConvert(t *testing.T) {
plugins.PluginFile = "" // no file as html is built-in
if lerr := plugins.LibSetup(); lerr == nil {
//t.Error("did not error on plugin.Libsetup() with no plugin.json file")
//return
}
defer log.IfErr(plugins.Lib.KillSubProcs())
ctx := context.Background()
xtn := "html"
fileRequest := new(api.DocumentConversionRequest)
fileRequest.Filedata = []byte(yorkweb)
resp, err := convert.Convert(ctx, xtn, fileRequest)
if err != nil {
t.Error(err)
return
}
if len(resp.Pages) != 3 ||
!strings.HasPrefix(resp.Pages[1].Title, "STARTING") ||
!strings.HasPrefix(resp.Pages[2].Title, "EXERCISE") {
for p, pg := range resp.Pages {
t.Error(p, pg.Level, len(pg.Body), pg.Title)
}
}
exp := "There are lots of ways to create web pages using already coded programmes. … HTML isn' t computer code, but is a language that uses US English to enable texts( words, images, sounds) to be inserted and formatting such as colo( u) r and centre/ erin…"
if resp.Excerpt != exp {
t.Errorf("unexpected excerpt wanted: `%s` got: `%s`", exp, resp.Excerpt)
}
// check errors are caught
resp, err = convert.Convert(ctx, "unknown", fileRequest)
if err == nil {
t.Error("does not error on unknown extension")
}
}
// www.york.ac.uk/teaching/cws/wws/webpage1.html
const yorkweb = `
<HMTL>
<HEAD>
<TITLE>webpage1</TITLE>
</HEAD>
<BODY BGCOLOR="FFFFFf" LINK="006666" ALINK="8B4513" VLINK="006666">
<TABLE WIDTH="75%" ALIGN="center">
<TR>
<TD>
<DIV ALIGN="center"><H1>STARTING . . . </H1></DIV>
<DIV ALIGN="justify"><P>There are lots of ways to create web pages using already coded programmes. These lessons will teach you how to use the underlying HyperText Markup Language - HTML.
<BR>
<P>HTML isn't computer code, but is a language that uses US English to enable texts (words, images, sounds) to be inserted and formatting such as colo(u)r and centre/ering to be written in. The process is fairly simple; the main difficulties often lie in small mistakes - if you slip up while word processing your reader may pick up your typos, but the page will still be legible. However, if your HTML is inaccurate the page may not appear - writing web pages is, at the least, very good practice for proof reading!</P>
<P>Learning HTML will enable you to:
<UL>
<LI>create your own simple pages
<LI>read and appreciate pages created by others
<LI>develop an understanding of the creative and literary implications of web-texts
<LI>have the confidence to branch out into more complex web design
</UL></P>
<P>A HTML web page is made up of tags. Tags are placed in brackets like this <B>< tag > </B>. A tag tells the browser how to display information. Most tags need to be opened < tag > and closed < /tag >.
<P> To make a simple web page you need to know only four tags:
<UL>
<LI>< HTML > tells the browser your page is written in HTML format
<LI>< HEAD > this is a kind of preface of vital information that doesn't appear on the screen.
<LI>< TITLE >Write the title of the web page here - this is the information that viewers see on the upper bar of their screen. (I've given this page the title 'webpage1').
<LI>< BODY >This is where you put the content of your page, the words and pictures that people read on the screen.
</UL>
<P>All these tags need to be closed.
<H4>EXERCISE</H4>
<P>Write a simple web page.</P>
<P> Copy out exactly the HTML below, using a WP program such as Notepad.<BR>
Information in <I>italics</I> indicates where you can insert your own text, other information is HTML and needs to be exact. However, make sure there are no spaces between the tag brackets and the text inside.<BR>
(Find Notepad by going to the START menu\ PROGRAMS\ ACCESSORIES\ NOTEPAD).
<P>
< HTML ><BR>
< HEAD ><BR>
< TITLE ><I> title of page</I>< /TITLE ><BR>
< /HEAD ><BR>
< BODY><BR>
<I> write what you like here: 'my first web page', or a piece about what you are reading, or a few thoughts on the course, or copy out a few words from a book or cornflake packet. Just type in your words using no extras such as bold, or italics, as these have special HTML tags, although you may use upper and lower case letters and single spaces. </I><BR>
< /BODY ><BR>
< /HTML ><BR>
<P>Save the file as 'first.html' (ie. call the file anything at all) It's useful if you start a folder - just as you would for word-processing - and call it something like WEBPAGES, and put your first.html file in the folder.
<P>NOW - open your browser.<BR>
On Netscape the process is: <BR>
Top menu; FILE\ OPEN PAGE\ CHOOSE FILE<BR>
Click on your WEBPAGES folder\ FIRST file<BR>
Click 'open' and your page should appear.
<P>On Internet Explorer: <BR>
Top menu; FILE\ OPEN\ BROWSE <BR>
Click on your WEBPAGES folder\ FIRST file<BR>
Click 'open' and your page should appear.<BR>
<P>If the page doesn't open, go back over your notepad typing and make sure that all the HTML tags are correct. Check there are no spaces between tags and internal text; check that all tags are closed; check that you haven't written < HTLM > or < BDDY >. Your page will work eventually.
<P>
Make another page. Call it somethingdifferent.html and place it in the same WEBPAGES folder as detailed above.
<P>start formatting in <A HREF="webpage2.html">lesson two</A>
<BR><A HREF="col3.html">back to wws index</A> </P>
</P>
</DIV>
</TD>
</TR>
</TABLE>
</BODY>
</HTML>
`

View file

@ -16,7 +16,7 @@ import (
api "github.com/documize/community/core/convapi"
"golang.org/x/net/context"
"context"
)
// Convert provides the standard interface for conversion of a ".documizeapi" json document.

View file

@ -16,9 +16,9 @@ import (
"fmt"
"strings"
"context"
api "github.com/documize/community/core/convapi"
"github.com/documize/community/core/stringutil"
"golang.org/x/net/context"
"golang.org/x/net/html"
"golang.org/x/net/html/atom"
)

View file

@ -16,7 +16,7 @@ import (
"github.com/documize/blackfriday"
"golang.org/x/net/context"
"context"
)
// Convert provides the standard interface for conversion of a Markdown document.

View file

@ -1,61 +0,0 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package plugins
import (
"os"
"testing"
)
func TestSetup(t *testing.T) {
err := LibSetup()
if err == nil {
//t.Error("should error on non-existent config file")
//t.Fail()
}
ssc, err := Lib.Actions("Convert")
if err != nil {
t.Error(err)
}
// TODO(Elliott) review for empty database
//if len(ssc) > 3 {
// t.Errorf("extra convert formats:%v", ssc)
//}
/* this code leaves plugins still running */
err = os.Chdir("../../..")
if err != nil {
t.Error(err)
}
err = LibSetup()
if err != nil {
t.Error(err)
}
ssc, err = Lib.Actions("Convert")
if err != nil {
t.Error(err)
}
if len(ssc) == 0 {
t.Error("no extra convert formats (defined)")
}
err = os.Chdir("documize/api/plugins")
if err != nil {
t.Error(err)
}
err = Lib.KillSubProcs()
if err != nil {
t.Error(err)
}
}

70
core/asset/assets.go Normal file
View file

@ -0,0 +1,70 @@
package asset
import (
"embed"
"errors"
"io"
"io/fs"
"mime"
"net/http"
"path"
"path/filepath"
)
// GetPublicFileSystem
func GetPublicFileSystem(e embed.FS) (hfs http.FileSystem, err error) {
fsys, err := fs.Sub(e, "static/public")
if err != nil {
return nil, errors.New("failed GetPublicFileSystem")
}
return http.FS(fsys), nil
}
// FetchStatic loads static asset from embed file system.
func FetchStatic(e embed.FS, filename string) (content, contentType string, err error) {
data, err := e.ReadFile("static/" + filename)
if err != nil {
return
}
contentType = mime.TypeByExtension(filepath.Ext(filename))
content = string(data)
return
}
// FetchStaticDir returns filenames within specified directory
func FetchStaticDir(fs embed.FS, directory string) (files []string, err error) {
entries, err := fs.ReadDir("static/" + directory)
if err != nil {
return
}
for i := range entries {
if !entries[i].Type().IsDir() {
files = append(files, entries[i].Name())
}
}
return files, nil
}
// WriteStatic loads static asset from embed file system and writes to HTTP.
func WriteStatic(fs embed.FS, prefix, requestedPath string, w http.ResponseWriter) error {
f, err := fs.Open(path.Join(prefix, requestedPath))
if err != nil {
return err
}
defer f.Close()
stat, _ := f.Stat()
if stat.IsDir() {
return errors.New("cannot write static file")
}
contentType := mime.TypeByExtension(filepath.Ext(requestedPath))
w.Header().Set("Content-Type", contentType)
_, err = io.Copy(w, f)
return err
}

View file

@ -82,7 +82,7 @@ func Check(runtime *env.Runtime) bool {
return false
}
if len(flds) == 0 {
if len(flds) <= 5 {
runtime.Log.Info("Database: starting setup mode for empty database")
runtime.Flags.SiteMode = env.SiteModeSetup
return false

View file

@ -26,7 +26,7 @@ func InstallUpgrade(runtime *env.Runtime, existingDB bool) (err error) {
// amLeader := false
// Get all SQL scripts.
scripts, err := LoadScripts()
scripts, err := LoadScripts(runtime)
if err != nil {
runtime.Log.Error("Database: unable to load scripts", err)
return
@ -77,35 +77,42 @@ func InstallUpgrade(runtime *env.Runtime, existingDB bool) (err error) {
runtime.Log.Info(fmt.Sprintf("Database: legacy schema has %d scripts to process", len(toProcess)))
}
tx, err := runtime.Db.Beginx()
if err != nil {
return err
}
err = runScripts(runtime, tx, toProcess)
err = runScripts(runtime, toProcess)
if err != nil {
runtime.Log.Error("Database: error processing SQL scripts", err)
tx.Rollback()
}
tx.Commit()
return nil
}
// Run SQL scripts to instal or upgrade this database.
func runScripts(runtime *env.Runtime, tx *sqlx.Tx, scripts []Script) (err error) {
// We do not use transactions for Microsoft SQL Server because
// CREATE FULLTEXT CATALOG statement cannot be used inside a user transaction.
func runScripts(runtime *env.Runtime, scripts []Script) (err error) {
tx, err := runtime.Db.Beginx()
if err != nil {
return err
}
// We can have multiple scripts as each Documize database change has it's own SQL script.
for _, script := range scripts {
runtime.Log.Info(fmt.Sprintf("Database: processing SQL script %d", script.Version))
err = executeSQL(tx, runtime.StoreProvider.Type(), runtime.StoreProvider.TypeVariant(), script.Script)
err = executeSQL(tx, runtime, script.Script)
if err != nil {
runtime.Log.Error(fmt.Sprintf("error executing SQL script %d", script.Version), err)
if runtime.StoreProvider.Type() != env.StoreTypeSQLServer {
tx.Rollback()
}
return err
}
// Record the fact we have processed this database script version.
_, err = tx.Exec(runtime.StoreProvider.QueryRecordVersionUpgrade(script.Version))
if runtime.StoreProvider.Type() != env.StoreTypeSQLServer {
_, err = tx.Exec(runtime.StoreProvider.QueryRecordVersionUpgrade(script.Version))
} else {
_, err = runtime.Db.Exec(runtime.StoreProvider.QueryRecordVersionUpgrade(script.Version))
}
if err != nil {
// For MySQL we try the legacy DB schema.
if runtime.StoreProvider.Type() == env.StoreTypeMySQL {
@ -114,31 +121,45 @@ func runScripts(runtime *env.Runtime, tx *sqlx.Tx, scripts []Script) (err error)
_, err = tx.Exec(runtime.StoreProvider.QueryRecordVersionUpgradeLegacy(script.Version))
if err != nil {
runtime.Log.Error(fmt.Sprintf("error recording execution of SQL script %d", script.Version), err)
if runtime.StoreProvider.Type() != env.StoreTypeSQLServer {
tx.Rollback()
}
return err
}
} else {
// Unknown issue running script on non-MySQL database.
runtime.Log.Error(fmt.Sprintf("error executing SQL script %d", script.Version), err)
if runtime.StoreProvider.Type() != env.StoreTypeSQLServer {
tx.Rollback()
}
return err
}
}
}
tx.Commit()
return nil
}
// executeSQL runs specified SQL commands.
func executeSQL(tx *sqlx.Tx, st env.StoreType, variant env.StoreType, SQLfile []byte) error {
func executeSQL(tx *sqlx.Tx, runtime *env.Runtime, SQLfile []byte) error {
// Turn SQL file contents into runnable SQL statements.
stmts := getStatements(SQLfile)
for _, stmt := range stmts {
// MariaDB has no specific JSON column type (but has JSON queries)
if st == env.StoreTypeMySQL && variant == env.StoreTypeMariaDB {
if runtime.StoreProvider.Type() == env.StoreTypeMySQL &&
runtime.StoreProvider.TypeVariant() == env.StoreTypeMariaDB {
stmt = strings.Replace(stmt, "` JSON", "` TEXT", -1)
}
_, err := tx.Exec(stmt)
var err error
if runtime.StoreProvider.Type() != env.StoreTypeSQLServer {
_, err = tx.Exec(stmt)
} else {
_, err = runtime.Db.Exec(stmt)
}
if err != nil {
fmt.Println("sql statement error:", stmt)
return err

View file

@ -1,110 +0,0 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package database
// import (
// "crypto/rand"
// "time"
// "github.com/documize/community/core/env"
// "github.com/jmoiron/sqlx"
// )
// // Lock will try to lock the database instance to the running process.
// // Uses a "random" delay as a por man's database cluster-aware process.
// // We skip delay if there are no scripts to process.
// func Lock(runtime *env.Runtime, scriptsToProcess int) (bool, error) {
// // Wait for random period of time.
// b := make([]byte, 2)
// _, err := rand.Read(b)
// if err != nil {
// return false, err
// }
// wait := ((time.Duration(b[0]) << 8) | time.Duration(b[1])) * time.Millisecond / 10 // up to 6.5 secs wait
// // Why delay if nothing to process?
// if scriptsToProcess > 0 {
// time.Sleep(wait)
// }
// // Start transaction fotr lock process.
// tx, err := runtime.Db.Beginx()
// if err != nil {
// runtime.Log.Error("Database: unable to start transaction", err)
// return false, err
// }
// // Lock the database.
// _, err = tx.Exec(runtime.StoreProvider.QueryStartLock())
// if err != nil {
// runtime.Log.Error("Database: unable to lock tables", err)
// return false, err
// }
// // Unlock the database at the end of this function.
// defer func() {
// _, err = tx.Exec(runtime.StoreProvider.QueryFinishLock())
// if err != nil {
// runtime.Log.Error("Database: unable to unlock tables", err)
// }
// tx.Commit()
// }()
// // Try to record this process as leader of database migration process.
// _, err = tx.Exec(runtime.StoreProvider.QueryInsertProcessID())
// if err != nil {
// runtime.Log.Info("Database: marked as slave process awaiting upgrade")
// return false, nil
// }
// // We are the leader!
// runtime.Log.Info("Database: marked as database upgrade process leader")
// return true, err
// }
// // Unlock completes process that was started with Lock().
// func Unlock(runtime *env.Runtime, tx *sqlx.Tx, err error, amLeader bool) error {
// if amLeader {
// defer func() {
// doUnlock(runtime)
// }()
// if tx != nil {
// if err == nil {
// tx.Commit()
// runtime.Log.Info("Database: is ready")
// return nil
// }
// tx.Rollback()
// }
// runtime.Log.Error("Database: install/upgrade failed", err)
// return err
// }
// return nil // not the leader, so ignore errors
// }
// // Helper method for defer function called from Unlock().
// func doUnlock(runtime *env.Runtime) error {
// tx, err := runtime.Db.Beginx()
// if err != nil {
// return err
// }
// _, err = tx.Exec(runtime.StoreProvider.QueryDeleteProcessID())
// if err != nil {
// return err
// }
// return tx.Commit()
// }

View file

@ -28,6 +28,8 @@ func RebindParams(sql string, s env.StoreType) string {
switch s {
case env.StoreTypePostgreSQL:
bindParam = sqlx.DOLLAR
case env.StoreTypeSQLServer:
bindParam = sqlx.AT
}
return sqlx.Rebind(bindParam, sql)

View file

@ -1,38 +0,0 @@
## PENDING REMOVALS
NONE
## MYSQL ENCODING
https://stackoverflow.com/questions/37307146/difference-between-utf8mb4-unicode-ci-and-utf8mb4-unicode-520-ci-collations-in-m
https://mathiasbynens.be/notes/mysql-utf8mb4
https://medium.com/@adamhooper/in-mysql-never-use-utf8-use-utf8mb4-11761243e434
## MIGRATE ENCODING
ALTER DATABASE documize CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE account CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE attachment CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE block CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE config CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE document CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE feedback CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE label CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE labelrole CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE link CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE organization CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE page CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE pagemeta CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE participant CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE pin CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE revision CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE search CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE share CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE user CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE useraction CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE useractivity CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userconfig CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE userevent CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;

View file

@ -12,11 +12,12 @@
package database
import (
"embed"
"fmt"
"sort"
"github.com/documize/community/core/asset"
"github.com/documize/community/core/env"
"github.com/documize/community/server/web"
)
// Scripts holds all .SQL files for all supported database providers.
@ -33,16 +34,19 @@ type Script struct {
}
// LoadScripts returns .SQL scripts for supported database providers.
func LoadScripts() (s Scripts, err error) {
assetDir := "bindata/scripts"
func LoadScripts(runtime *env.Runtime) (s Scripts, err error) {
// MySQL
s.MySQL, err = loadFiles(fmt.Sprintf("%s/mysql", assetDir))
s.MySQL, err = loadFiles(runtime.Assets, "scripts/mysql")
if err != nil {
return
}
// PostgreSQL
s.PostgreSQL, err = loadFiles(fmt.Sprintf("%s/postgresql", assetDir))
s.PostgreSQL, err = loadFiles(runtime.Assets, "scripts/postgresql")
if err != nil {
return
}
// PostgreSQL
s.SQLServer, err = loadFiles(runtime.Assets, "scripts/sqlserver")
if err != nil {
return
}
@ -57,7 +61,7 @@ func SpecificScripts(runtime *env.Runtime, all Scripts) (s []Script) {
return all.MySQL
case env.StoreTypePostgreSQL:
return all.PostgreSQL
case env.StoreTypeMSSQL:
case env.StoreTypeSQLServer:
return all.SQLServer
}
@ -65,20 +69,22 @@ func SpecificScripts(runtime *env.Runtime, all Scripts) (s []Script) {
}
// loadFiles returns all SQL scripts in specified folder as [][]byte.
func loadFiles(path string) (b []Script, err error) {
buf := []byte{}
scripts, err := web.AssetDir(path)
func loadFiles(fs embed.FS, path string) (b []Script, err error) {
scripts, err := asset.FetchStaticDir(fs, path)
if err != nil {
return
}
sort.Strings(scripts)
for _, file := range scripts {
buf, err = web.Asset(fmt.Sprintf("%s/%s", path, file))
for i := range scripts {
filename := scripts[i]
sqlfile, _, err := asset.FetchStatic(fs, fmt.Sprintf("%s/%s", path, filename))
if err != nil {
return
return b, err
}
b = append(b, Script{Version: extractVersionNumber(file), Script: buf})
b = append(b, Script{Version: extractVersionNumber(filename), Script: []byte(sqlfile)})
}
return b, nil

View file

@ -1,3 +1,3 @@
/* community edition */
ALTER TABLE organization ADD COLUMN `service` VARCHAR(100) NOT NULL DEFAULT 'https://api.documize.com' AFTER `domain`;
ALTER TABLE organization ADD COLUMN `service` VARCHAR(100) NOT NULL DEFAULT '' AFTER `domain`;

View file

@ -1,6 +1,5 @@
/* community edition */
ALTER DATABASE documize CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE account CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE attachment CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;
ALTER TABLE block CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_bin;

View file

@ -36,7 +36,7 @@ ALTER TABLE dmz_org
CHANGE `title` `c_title` VARCHAR(500) NOT NULL,
CHANGE `message` `c_message` VARCHAR(500) NOT NULL,
CHANGE `domain` `c_domain` VARCHAR(200) NOT NULL DEFAULT '',
CHANGE `service` `c_service` VARCHAR(200) NOT NULL DEFAULT 'https://api.documize.com',
CHANGE `service` `c_service` VARCHAR(200) NOT NULL DEFAULT '',
CHANGE `email` `c_email` VARCHAR(500) NOT NULL DEFAULT '',
CHANGE `allowanonymousaccess` `c_anonaccess` BOOL NOT NULL DEFAULT 0,
CHANGE `authprovider` `c_authprovider` CHAR(20) NOT NULL DEFAULT 'documize',

View file

@ -0,0 +1,4 @@
/* Community Edition */
-- Support per section attachments
ALTER TABLE dmz_doc_attachment ADD COLUMN `c_sectionid` VARCHAR(20) NOT NULL DEFAULT '' COLLATE utf8_bin AFTER `c_docid`;

View file

@ -0,0 +1,5 @@
/* Enterprise edition */
-- Feedback feature: support threaded comments and section references
ALTER TABLE dmz_doc_comment ADD COLUMN `c_replyto` VARCHAR(20) NOT NULL DEFAULT '' COLLATE utf8_bin AFTER `c_docid`;
ALTER TABLE dmz_doc_comment ADD COLUMN `c_sectionid` VARCHAR(20) NOT NULL DEFAULT '' COLLATE utf8_bin AFTER `c_docid`;

View file

@ -0,0 +1,5 @@
/* Community edition */
-- Indexes to improve performance
CREATE UNIQUE INDEX idx_doc_4 ON dmz_doc(c_orgid,c_refid);
CREATE UNIQUE INDEX idx_section_4 ON dmz_section(c_orgid,c_refid);

View file

@ -0,0 +1,7 @@
/* Community Edition */
-- Increase column sizes to support rich text data entry
ALTER TABLE dmz_org MODIFY `c_message` VARCHAR(800) NOT NULL DEFAULT '';
ALTER TABLE dmz_space MODIFY `c_desc` VARCHAR(800) NOT NULL DEFAULT '';
ALTER TABLE dmz_category MODIFY `c_name` VARCHAR(200) NOT NULL DEFAULT '';
ALTER TABLE dmz_category ADD COLUMN `c_default` BOOL NOT NULL DEFAULT 0 AFTER `c_name`;

View file

@ -0,0 +1,4 @@
/* Community Edition */
-- Allow for pinned documents per space.
ALTER TABLE dmz_doc ADD COLUMN `c_seq` INT NOT NULL DEFAULT 99999 AFTER `c_versionorder`;

View file

@ -0,0 +1,5 @@
/* Community Edition */
-- Local aware.
ALTER TABLE dmz_org ADD COLUMN `c_locale` VARCHAR(20) NOT NULL DEFAULT 'en-US';
ALTER TABLE dmz_user ADD COLUMN `c_locale` VARCHAR(20) NOT NULL DEFAULT 'en-US';

View file

@ -228,7 +228,7 @@ CREATE TABLE dmz_org (
c_title varchar(500) COLLATE ucs_basic NOT NULL,
c_message varchar(500) COLLATE ucs_basic NOT NULL,
c_domain varchar(200) COLLATE ucs_basic NOT NULL DEFAULT '',
c_service varchar(200) COLLATE ucs_basic NOT NULL DEFAULT 'https://api.documize.com',
c_service varchar(200) COLLATE ucs_basic NOT NULL DEFAULT '',
c_email varchar(500) COLLATE ucs_basic NOT NULL DEFAULT '',
c_anonaccess bool NOT NULL DEFAULT '0',
c_authprovider varchar(20) COLLATE ucs_basic NOT NULL DEFAULT 'documize',

View file

@ -15,7 +15,7 @@ CREATE TABLE dmz_space_label (
CREATE INDEX idx_space_label_1 ON dmz_space_label (id);
CREATE INDEX idx_space_label_2 ON dmz_space_label (c_orgid);
-- Space table upgrade to support labelling, icon and summary stats
-- Space table upgrade to support label, icon and summary stats
ALTER TABLE dmz_space ADD COLUMN c_desc VARCHAR(200) NOT NULL DEFAULT '';
ALTER TABLE dmz_space ADD COLUMN c_labelid VARCHAR(20) NOT NULL DEFAULT '' COLLATE ucs_basic;
ALTER TABLE dmz_space ADD COLUMN c_icon VARCHAR(20) NOT NULL DEFAULT '';

View file

@ -0,0 +1,4 @@
/* Community Edition */
-- Support per section attachments
ALTER TABLE dmz_doc_attachment ADD COLUMN c_sectionid VARCHAR(20) NOT NULL DEFAULT '' COLLATE ucs_basic;

View file

@ -0,0 +1,5 @@
/* Enterprise edition */
-- Feedback feature: support threaded comments and section references
ALTER TABLE dmz_doc_comment ADD COLUMN c_replyto VARCHAR(20) NOT NULL DEFAULT '' COLLATE ucs_basic;
ALTER TABLE dmz_doc_comment ADD COLUMN c_sectionid VARCHAR(20) NOT NULL DEFAULT '' COLLATE ucs_basic;

View file

@ -0,0 +1,5 @@
/* Community edition */
-- Indexes to improve performance
CREATE UNIQUE INDEX idx_doc_4 ON dmz_doc (c_orgid,c_refid);
CREATE UNIQUE INDEX idx_section_4 ON dmz_section (c_orgid,c_refid);

View file

@ -0,0 +1,7 @@
/* Community Edition */
-- Increase column sizes to support rich text data entry
ALTER TABLE dmz_org ALTER COLUMN c_message TYPE VARCHAR(2000);
ALTER TABLE dmz_space ALTER COLUMN c_desc TYPE VARCHAR(2000);
ALTER TABLE dmz_category ALTER COLUMN c_name TYPE VARCHAR(200);
ALTER TABLE dmz_category ADD COLUMN c_default bool NOT NULL DEFAULT '0';

View file

@ -0,0 +1,5 @@
/* Community Edition */
-- Allow for pinned documents per space.
ALTER TABLE dmz_doc ADD COLUMN c_seq INT NOT NULL DEFAULT '99999';

View file

@ -0,0 +1,5 @@
/* Community Edition */
-- Local aware.
ALTER TABLE dmz_org ADD COLUMN c_locale VARCHAR(20) NOT NULL DEFAULT 'en-US';
ALTER TABLE dmz_user ADD COLUMN c_locale VARCHAR(20) NOT NULL DEFAULT 'en-US';

View file

@ -0,0 +1,469 @@
-- SQL to set up the Documize database
DROP TABLE IF EXISTS dmz_action;
CREATE TABLE dmz_action (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_requestorid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_actiontype INT NOT NULL DEFAULT '0',
c_note NVARCHAR(2000) NOT NULL DEFAULT '',
c_requested DATETIME2 NULL DEFAULT NULL,
c_due DATETIME2 NULL DEFAULT NULL,
c_completed DATETIME2 NULL DEFAULT NULL,
c_iscomplete BIT NOT NULL DEFAULT '0',
c_reftype NVARCHAR(1) NOT NULL DEFAULT 'D',
c_reftypeid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_action_1 ON dmz_action (c_refid);
CREATE INDEX idx_action_2 ON dmz_action (c_userid);
CREATE INDEX idx_action_3 ON dmz_action (c_docid);
CREATE INDEX idx_action_4 ON dmz_action (c_requestorid);
DROP TABLE IF EXISTS dmz_audit_log;
CREATE TABLE dmz_audit_log (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_eventtype NVARCHAR(100) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_ip NVARCHAR(39) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_audit_log_1 ON dmz_audit_log (c_orgid);
CREATE INDEX idx_audit_log_2 ON dmz_audit_log (c_userid);
CREATE INDEX idx_audit_log_3 ON dmz_audit_log (c_eventtype);
DROP TABLE IF EXISTS dmz_category;
CREATE TABLE dmz_category (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_spaceid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_name NVARCHAR(50) COLLATE Latin1_General_CS_AS NOT NULL,
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_category_1 ON dmz_category (c_refid);
CREATE INDEX idx_category_2 ON dmz_category (c_orgid);
CREATE INDEX idx_category_3 ON dmz_category (c_orgid,c_spaceid);
DROP TABLE IF EXISTS dmz_category_member;
CREATE TABLE dmz_category_member (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_spaceid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_categoryid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_category_member_1 ON dmz_category_member (c_docid);
CREATE INDEX idx_category_member_2 ON dmz_category_member (c_orgid,c_docid);
CREATE INDEX idx_category_member_3 ON dmz_category_member (c_orgid,c_spaceid);
DROP TABLE IF EXISTS dmz_config;
CREATE TABLE dmz_config (
c_key NVARCHAR(200) COLLATE Latin1_General_CS_AS NOT NULL,
c_config NVARCHAR(MAX) DEFAULT NULL
);
DROP TABLE IF EXISTS dmz_doc;
CREATE TABLE dmz_doc (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_spaceid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_job NVARCHAR(36) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_location NVARCHAR(2000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_name NVARCHAR(2000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_desc NVARCHAR(2000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_slug NVARCHAR(2000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_tags NVARCHAR(1000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_template BIT NOT NULL DEFAULT '0',
c_protection INT NOT NULL DEFAULT '0',
c_approval INT NOT NULL DEFAULT '0',
c_lifecycle INT NOT NULL DEFAULT '1',
c_versioned BIT NOT NULL DEFAULT '0',
c_versionid NVARCHAR(100) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_versionorder INT NOT NULL DEFAULT '0',
c_groupid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_doc_1 ON dmz_doc (id);
CREATE INDEX idx_doc_2 ON dmz_doc (c_orgid);
CREATE INDEX idx_doc_3 ON dmz_doc (c_spaceid);
DROP TABLE IF EXISTS dmz_doc_attachment;
CREATE TABLE dmz_doc_attachment (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_job NVARCHAR(36) COLLATE Latin1_General_CS_AS NOT NULL,
c_fileid NVARCHAR(10) COLLATE Latin1_General_CS_AS NOT NULL,
c_filename NVARCHAR(255) COLLATE Latin1_General_CS_AS NOT NULL,
c_data VARBINARY(MAX),
c_extension NVARCHAR(6) COLLATE Latin1_General_CS_AS NOT NULL,
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_doc_attachment_1 ON dmz_doc_attachment (id);
CREATE INDEX idx_doc_attachment_2 ON dmz_doc_attachment (c_orgid);
CREATE INDEX idx_doc_attachment_3 ON dmz_doc_attachment (c_docid);
CREATE INDEX idx_doc_attachment_4 ON dmz_doc_attachment (c_job,c_fileid);
DROP TABLE IF EXISTS dmz_doc_comment;
CREATE TABLE dmz_doc_comment (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS DEFAULT '',
c_email NVARCHAR(250) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_feedback NVARCHAR(MAX) COLLATE Latin1_General_CS_AS,
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_doc_comment_1 ON dmz_doc_comment (c_refid);
DROP TABLE IF EXISTS dmz_doc_link;
CREATE TABLE dmz_doc_link (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_spaceid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_sourcedocid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_sourcesectionid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_type NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_targetdocid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_targetid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_externalid NVARCHAR(1000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_orphan BIT NOT NULL DEFAULT '0',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
DROP TABLE IF EXISTS dmz_doc_share;
CREATE TABLE dmz_doc_share (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS DEFAULT '',
c_email NVARCHAR(250) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_message NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_viewed NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_secret NVARCHAR(250) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_expires NVARCHAR(20) COLLATE Latin1_General_CS_AS DEFAULT '',
c_active BIT NOT NULL DEFAULT '1',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
DROP TABLE IF EXISTS dmz_doc_vote;
CREATE TABLE dmz_doc_vote (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_voter NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_vote INT NOT NULL DEFAULT '0',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_doc_vote_1 ON dmz_doc_vote (c_refid);
CREATE INDEX idx_doc_vote_2 ON dmz_doc_vote (c_docid);
CREATE INDEX idx_doc_vote_3 ON dmz_doc_vote (c_orgid);
CREATE INDEX idx_doc_vote_4 ON dmz_doc_vote (c_orgid,c_docid);
DROP TABLE IF EXISTS dmz_group;
CREATE TABLE dmz_group (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_name NVARCHAR(50) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_desc NVARCHAR(100) COLLATE Latin1_General_CS_AS DEFAULT '',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_group_1 ON dmz_group (c_refid);
CREATE INDEX idx_group_2 ON dmz_group (c_orgid);
DROP TABLE IF EXISTS dmz_group_member;
CREATE TABLE dmz_group_member (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_groupid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL
);
CREATE INDEX idx_group_member_1 ON dmz_group_member (c_groupid,c_userid);
CREATE INDEX idx_group_member_2 ON dmz_group_member (c_orgid,c_groupid,c_userid);
DROP TABLE IF EXISTS dmz_org;
CREATE TABLE dmz_org (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_company NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL,
c_title NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL,
c_message NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL,
c_domain NVARCHAR(200) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_service NVARCHAR(200) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_email NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_anonaccess BIT NOT NULL DEFAULT '0',
c_authprovider NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT 'documize',
c_authconfig NVARCHAR(MAX) DEFAULT NULL,
c_maxtags INT NOT NULL DEFAULT '3',
c_sub NVARCHAR(MAX) NULL,
c_theme NVARCHAR(20) NOT NULL DEFAULT '',
c_logo VARBINARY(MAX),
c_verified BIT NOT NULL DEFAULT '0',
c_serial NVARCHAR(50) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_active BIT NOT NULL DEFAULT '1',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
);
CREATE INDEX idx_org_1 ON dmz_org (id);
CREATE INDEX idx_org_2 ON dmz_org (c_domain);
DROP TABLE IF EXISTS dmz_permission;
CREATE TABLE dmz_permission (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_who NVARCHAR(30) COLLATE Latin1_General_CS_AS NOT NULL,
c_whoid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_action NVARCHAR(30) COLLATE Latin1_General_CS_AS NOT NULL,
c_scope NVARCHAR(30) COLLATE Latin1_General_CS_AS NOT NULL,
c_location NVARCHAR(100) COLLATE Latin1_General_CS_AS NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_permission_1 ON dmz_permission (c_orgid);
CREATE INDEX idx_permission_2 ON dmz_permission (c_orgid,c_who,c_whoid,c_location);
CREATE INDEX idx_permission_3 ON dmz_permission (c_orgid,c_who,c_whoid,c_location,c_action);
CREATE INDEX idx_permission_4 ON dmz_permission (c_orgid,c_location,c_refid);
CREATE INDEX idx_permission_5 ON dmz_permission (c_orgid,c_who,c_location,c_action);
DROP TABLE IF EXISTS dmz_pin;
CREATE TABLE dmz_pin (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS DEFAULT '',
c_spaceid NVARCHAR(20) COLLATE Latin1_General_CS_AS DEFAULT '',
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS DEFAULT '',
c_sequence BIGINT NOT NULL DEFAULT '99',
c_name NVARCHAR(100) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_pin_1 ON dmz_pin (c_userid);
DROP TABLE IF EXISTS dmz_search;
CREATE TABLE dmz_search (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_itemid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_itemtype NVARCHAR(10) COLLATE Latin1_General_CS_AS NOT NULL,
c_content NVARCHAR(MAX) COLLATE Latin1_General_CS_AS,
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_search_1 ON dmz_search (c_orgid);
CREATE INDEX idx_search_2 ON dmz_search (c_docid);
DROP TABLE IF EXISTS dmz_section;
CREATE TABLE dmz_section (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_contenttype NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT 'wysiwyg',
c_type NVARCHAR(10) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT 'section',
c_templateid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_level bigint NOT NULL,
c_sequence double precision NOT NULL,
c_name NVARCHAR(2000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_body NVARCHAR(MAX) COLLATE Latin1_General_CS_AS,
c_revisions bigint NOT NULL,
c_status INT NOT NULL DEFAULT '0',
c_relativeid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_section_1 ON dmz_section (id);
CREATE INDEX idx_section_2 ON dmz_section (c_orgid);
CREATE INDEX idx_section_3 ON dmz_section (c_docid);
DROP TABLE IF EXISTS dmz_section_meta;
CREATE TABLE dmz_section_meta (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_sectionid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_rawbody NVARCHAR(MAX),
c_config NVARCHAR(MAX) DEFAULT NULL,
c_external BIT DEFAULT '0',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_section_meta_1 ON dmz_section_meta (id);
CREATE INDEX idx_section_meta_2 ON dmz_section_meta (c_sectionid);
CREATE INDEX idx_section_meta_3 ON dmz_section_meta (c_orgid);
CREATE INDEX idx_section_meta_4 ON dmz_section_meta (c_docid);
DROP TABLE IF EXISTS dmz_section_revision;
CREATE TABLE dmz_section_revision (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_ownerid NVARCHAR(20) COLLATE Latin1_General_CS_AS DEFAULT '',
c_sectionid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_contenttype NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT 'wysiwyg',
c_type NVARCHAR(10) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT 'section',
c_name NVARCHAR(2000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_body NVARCHAR(MAX) COLLATE Latin1_General_CS_AS,
c_rawbody NVARCHAR(MAX),
c_config NVARCHAR(MAX) DEFAULT NULL,
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_section_revision_1 ON dmz_section_revision (id);
CREATE INDEX idx_section_revision_2 ON dmz_section_revision (c_orgid);
CREATE INDEX idx_section_revision_3 ON dmz_section_revision (c_docid);
CREATE INDEX idx_section_revision_4 ON dmz_section_revision (c_sectionid);
DROP TABLE IF EXISTS dmz_section_template;
CREATE TABLE dmz_section_template (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_spaceid NVARCHAR(20) COLLATE Latin1_General_CS_AS DEFAULT '',
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS DEFAULT '',
c_contenttype NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT 'wysiwyg',
c_type NVARCHAR(10) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT 'section',
c_name NVARCHAR(2000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_body NVARCHAR(MAX) COLLATE Latin1_General_CS_AS,
c_desc NVARCHAR(2000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_used INT NOT NULL,
c_rawbody NVARCHAR(MAX),
c_config NVARCHAR(MAX) DEFAULT NULL,
c_external BIT DEFAULT '0',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_section_template_1 ON dmz_section_template (c_refid);
CREATE INDEX idx_section_template_2 ON dmz_section_template (c_spaceid);
DROP TABLE IF EXISTS dmz_space;
CREATE TABLE dmz_space (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_name NVARCHAR(300) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_type INT NOT NULL DEFAULT '1',
c_lifecycle INT NOT NULL DEFAULT '1',
c_desc NVARCHAR(200) NOT NULL DEFAULT '',
c_labelid NVARCHAR(20) NOT NULL DEFAULT '' COLLATE Latin1_General_CS_AS,
c_likes NVARCHAR(1000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_icon NVARCHAR(50) NOT NULL DEFAULT '',
c_count_category INT NOT NULL DEFAULT 0,
c_count_content INT NOT NULL DEFAULT 0,
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_space_1 ON dmz_space (id);
CREATE INDEX idx_space_2 ON dmz_space (c_userid);
CREATE INDEX idx_space_3 ON dmz_space (c_orgid);
DROP TABLE IF EXISTS dmz_space_label;
CREATE TABLE dmz_space_label (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_name NVARCHAR(50) NOT NULL DEFAULT '',
c_color NVARCHAR(10) NOT NULL DEFAULT '',
c_created DATETIME2 DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_space_label_1 ON dmz_space_label (id);
CREATE INDEX idx_space_label_2 ON dmz_space_label (c_orgid);
DROP TABLE IF EXISTS dmz_user;
CREATE TABLE dmz_user (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_firstname NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_lastname NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_email NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_initials NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_globaladmin BIT NOT NULL DEFAULT '0',
c_password NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_salt NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_reset NVARCHAR(500) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_active BIT NOT NULL DEFAULT '1',
c_lastversion NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_user_1 ON dmz_user (id);
CREATE INDEX idx_user_2 ON dmz_user (c_email);
DROP TABLE IF EXISTS dmz_user_account;
CREATE TABLE dmz_user_account (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_refid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_editor BIT NOT NULL DEFAULT '0',
c_admin BIT NOT NULL DEFAULT '0',
c_users BIT NOT NULL DEFAULT '1',
c_analytics BIT NOT NULL DEFAULT '0',
c_active BIT NOT NULL DEFAULT '1',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP,
c_revised DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_user_account_1 ON dmz_user_account (id);
CREATE INDEX idx_user_account_2 ON dmz_user_account (c_userid);
CREATE INDEX idx_user_account_3 ON dmz_user_account (c_orgid);
DROP TABLE IF EXISTS dmz_user_activity;
CREATE TABLE dmz_user_activity (
id BIGINT PRIMARY KEY IDENTITY (1, 1) NOT NULL,
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_spaceid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_docid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_sectionid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_sourcetype INT NOT NULL DEFAULT '0',
c_activitytype INT NOT NULL DEFAULT '0',
c_metadata NVARCHAR(1000) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '',
c_created DATETIME2 NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX idx_user_activity_1 ON dmz_user_activity (c_orgid);
CREATE INDEX idx_user_activity_2 ON dmz_user_activity (c_userid);
CREATE INDEX idx_user_activity_3 ON dmz_user_activity (c_activitytype);
CREATE INDEX idx_user_activity_4 ON dmz_user_activity (c_orgid,c_docid,c_sourcetype);
CREATE INDEX idx_user_activity_5 ON dmz_user_activity (c_orgid,c_docid,c_userid,c_sourcetype);
DROP TABLE IF EXISTS dmz_user_config;
CREATE TABLE dmz_user_config (
c_orgid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_userid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL,
c_key NVARCHAR(200) COLLATE Latin1_General_CS_AS NOT NULL,
c_config NVARCHAR(MAX) DEFAULT NULL
);
INSERT INTO dmz_config VALUES ('SMTP','{"userid": "","password": "","host": "","port": "","sender": ""}');
INSERT INTO dmz_config VALUES ('FILEPLUGINS', '[{"Comment": "Disable (or not) built-in html import (NOTE: no Plugin name)","Disabled": false,"API": "Convert","Actions": ["htm","html"]},{"Comment": "Disable (or not) built-in Documize API import used from SDK (NOTE: no Plugin name)","Disabled": false,"API": "Convert","Actions": ["documizeapi"]}]');
INSERT INTO dmz_config VALUES ('SECTION-TRELLO','{"appKey": ""}');
INSERT INTO dmz_config VALUES ('META','{"database": "0"}');

View file

@ -0,0 +1,4 @@
/* Community Edition */
-- Support per section attachments
ALTER TABLE dmz_doc_attachment ADD c_sectionid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '';

View file

@ -0,0 +1,5 @@
/* Enterprise edition */
-- Feedback feature: support threaded comments and section references
ALTER TABLE dmz_doc_comment ADD c_replyto NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '';
ALTER TABLE dmz_doc_comment ADD c_sectionid NVARCHAR(20) COLLATE Latin1_General_CS_AS NOT NULL DEFAULT '';

View file

@ -0,0 +1,16 @@
/* Community edition */
-- Fulltext search support
IF EXISTS (SELECT * FROM sysfulltextcatalogs ftc WHERE ftc.name = N'dmz_search_catalog')
DROP FULLTEXT CATALOG dmz_search_catalog;
CREATE FULLTEXT CATALOG dmz_search_catalog;
CREATE UNIQUE INDEX idx_doc_4 ON dmz_doc(c_refid);
CREATE UNIQUE INDEX idx_section_4 ON dmz_section(c_refid);
CREATE FULLTEXT INDEX ON dmz_doc (c_name, c_desc) KEY INDEX idx_doc_4 ON dmz_search_catalog
WITH CHANGE_TRACKING AUTO;
CREATE FULLTEXT INDEX ON dmz_section (c_name, c_body) KEY INDEX idx_section_4 ON dmz_search_catalog
WITH CHANGE_TRACKING AUTO;

View file

@ -0,0 +1,7 @@
/* Community edition */
-- Increase column sizes to support rich text data entry
ALTER TABLE dmz_org ALTER COLUMN c_message NVARCHAR(2000);
ALTER TABLE dmz_space ALTER COLUMN c_desc NVARCHAR(2000);
ALTER TABLE dmz_category ALTER COLUMN c_name NVARCHAR(200);
ALTER TABLE dmz_category ADD c_default BIT NOT NULL DEFAULT '0';

View file

@ -0,0 +1,4 @@
/* Community edition */
-- Allow for pinned documents per space.
ALTER TABLE dmz_doc ADD c_seq INT NOT NULL DEFAULT '99999';

View file

@ -0,0 +1,5 @@
/* Community edition */
-- Local aware.
ALTER TABLE dmz_org ADD c_locale NVARCHAR(20) NOT NULL DEFAULT 'en-US';
ALTER TABLE dmz_user ADD c_locale NVARCHAR(20) NOT NULL DEFAULT 'en-US';

View file

@ -0,0 +1,4 @@
/* Community edition */
-- Performance indexes
CREATE INDEX idx_action_5 ON dmz_action (c_orgid,c_userid,c_docid,c_actiontype,c_iscomplete,c_reftype,c_reftypeid);

View file

@ -0,0 +1,8 @@
/* Community edition */
-- Performance indexes
CREATE INDEX idx_action_6 ON dmz_action (c_orgid,c_reftypeid,c_reftype);
CREATE INDEX idx_action_7 ON dmz_action (c_orgid,c_refid);
CREATE INDEX idx_section_5 ON dmz_section (c_orgid,c_refid);

View file

@ -0,0 +1,6 @@
/* Community edition */
-- Performance indexes
CREATE INDEX idx_action_8 ON dmz_action (c_orgid,c_docid);
CREATE INDEX idx_user_3 ON dmz_user (c_refid);

View file

@ -12,6 +12,8 @@
package database
import (
"encoding/json"
"encoding/xml"
"errors"
"net/http"
"time"
@ -21,6 +23,7 @@ import (
"github.com/documize/community/core/secrets"
"github.com/documize/community/core/stringutil"
"github.com/documize/community/core/uniqueid"
"github.com/documize/community/domain"
"github.com/documize/community/domain/store"
"github.com/documize/community/server/web"
)
@ -65,15 +68,16 @@ func (h *Handler) Setup(w http.ResponseWriter, r *http.Request) {
}
details := onboardRequest{
URL: "",
Company: r.Form.Get("title"),
CompanyLong: r.Form.Get("title"),
Message: r.Form.Get("message"),
Email: r.Form.Get("email"),
Password: r.Form.Get("password"),
Firstname: r.Form.Get("firstname"),
Lastname: r.Form.Get("lastname"),
Revised: time.Now().UTC(),
URL: "",
Company: r.Form.Get("title"),
CompanyLong: r.Form.Get("title"),
Message: r.Form.Get("message"),
Email: r.Form.Get("email"),
Password: r.Form.Get("password"),
Firstname: r.Form.Get("firstname"),
Lastname: r.Form.Get("lastname"),
ActivationKey: r.Form.Get("activationKey"),
Revised: time.Now().UTC(),
}
if details.Company == "" ||
@ -108,15 +112,16 @@ func (h *Handler) Setup(w http.ResponseWriter, r *http.Request) {
// The result of completing the onboarding process.
type onboardRequest struct {
URL string
Company string
CompanyLong string
Message string
Email string
Password string
Firstname string
Lastname string
Revised time.Time
URL string
Company string
CompanyLong string
Message string
Email string
Password string
Firstname string
Lastname string
ActivationKey string
Revised time.Time
}
// setupAccount prepares the database for a newly onboard customer.
@ -128,17 +133,20 @@ func setupAccount(rt *env.Runtime, completion onboardRequest, serial string) (er
return
}
//accountTitle := "This is where you will find documentation for your all projects. You can customize this message from the settings screen."
salt := secrets.GenerateSalt()
password := secrets.GeneratePassword(completion.Password, salt)
// Process activation key if we have one.
activationKey := processActivationKey(rt, completion)
// Allocate organization to the user.
orgID := uniqueid.Generate()
_, err = tx.Exec(RebindParams("INSERT INTO dmz_org (c_refid, c_company, c_title, c_message, c_domain, c_email, c_serial) VALUES (?, ?, ?, ?, ?, ?, ?)", rt.StoreProvider.Type()),
orgID, completion.Company, completion.CompanyLong, completion.Message, completion.URL, completion.Email, serial)
_, err = tx.Exec(RebindParams("INSERT INTO dmz_org (c_refid, c_company, c_title, c_message, c_domain, c_email, c_serial, c_sub) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
rt.StoreProvider.Type()),
orgID, completion.Company, completion.CompanyLong, completion.Message, completion.URL, completion.Email, serial, activationKey)
if err != nil {
rt.Log.Error("INSERT INTO dmz_org failed", err)
tx.Rollback()
rt.Rollback(tx)
return
}
@ -148,7 +156,7 @@ func setupAccount(rt *env.Runtime, completion onboardRequest, serial string) (er
userID, completion.Firstname, completion.Lastname, completion.Email, stringutil.MakeInitials(completion.Firstname, completion.Lastname), salt, password, true)
if err != nil {
rt.Log.Error("INSERT INTO dmz_user failed", err)
tx.Rollback()
rt.Rollback(tx)
return
}
@ -158,80 +166,7 @@ func setupAccount(rt *env.Runtime, completion onboardRequest, serial string) (er
accountID, userID, orgID, true, true, true, true)
if err != nil {
rt.Log.Error("INSERT INTO dmz_user_account failed", err)
tx.Rollback()
return
}
// Create space.
spaceID := uniqueid.Generate()
_, err = tx.Exec(RebindParams("INSERT INTO dmz_space (c_refid, c_orgid, c_userid, c_name, c_type) VALUES (?, ?, ?, ?, ?)", rt.StoreProvider.Type()),
spaceID, orgID, userID, "Welcome", 2)
if err != nil {
rt.Log.Error("INSERT INTO dmz_space failed", err)
tx.Rollback()
return
}
// Assign permissions to space.
perms := []string{"view", "manage", "own", "doc-add", "doc-edit", "doc-delete", "doc-move", "doc-copy", "doc-template", "doc-approve", "doc-version", "doc-lifecycle"}
for _, p := range perms {
_, err = tx.Exec(RebindParams("INSERT INTO dmz_permission (c_orgid, c_who, c_whoid, c_action, c_scope, c_location, c_refid) VALUES (?, ?, ?, ?, ?, ?, ?)", rt.StoreProvider.Type()),
orgID, "user", userID, p, "object", "space", spaceID)
if err != nil {
rt.Log.Error("INSERT INTO dmz_permission failed", err)
tx.Rollback()
return
}
}
// Create some user groups.
groupDevID := uniqueid.Generate()
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group (c_refid, c_orgid, c_name, c_desc) VALUES (?, ?, ?, ?)", rt.StoreProvider.Type()),
groupDevID, orgID, "Technology", "On-site and remote development teams")
if err != nil {
rt.Log.Error("INSERT INTO dmz_group failed", err)
tx.Rollback()
return
}
groupProjectID := uniqueid.Generate()
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group (c_refid, c_orgid, c_name, c_desc) VALUES (?, ?, ?, ?)", rt.StoreProvider.Type()),
groupProjectID, orgID, "Project Management", "HQ PMO and Account Management departments")
if err != nil {
rt.Log.Error("INSERT INTO dmz_group failed", err)
tx.Rollback()
return
}
groupBackofficeID := uniqueid.Generate()
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group (c_refid, c_orgid, c_name, c_desc) VALUES (?, ?, ?, ?)", rt.StoreProvider.Type()),
groupBackofficeID, orgID, "Back Office", "Finance and HR people")
if err != nil {
rt.Log.Error("INSERT INTO dmz_group failed", err)
tx.Rollback()
return
}
// Join the user groups.
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group_member (c_orgid, c_groupid, c_userid) VALUES (?, ?, ?)", rt.StoreProvider.Type()),
orgID, groupDevID, userID)
if err != nil {
rt.Log.Error("INSERT INTO dmz_group_member failed", err)
tx.Rollback()
return
}
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group_member (c_orgid, c_groupid, c_userid) VALUES (?, ?, ?)", rt.StoreProvider.Type()),
orgID, groupProjectID, userID)
if err != nil {
rt.Log.Error("INSERT INTO dmz_group_member failed", err)
tx.Rollback()
return
}
_, err = tx.Exec(RebindParams("INSERT INTO dmz_group_member (c_orgid, c_groupid, c_userid) VALUES (?, ?, ?)", rt.StoreProvider.Type()),
orgID, groupBackofficeID, userID)
if err != nil {
rt.Log.Error("INSERT INTO dmz_group_member failed", err)
tx.Rollback()
rt.Rollback(tx)
return
}
@ -243,3 +178,30 @@ func setupAccount(rt *env.Runtime, completion onboardRequest, serial string) (er
return
}
func processActivationKey(rt *env.Runtime, or onboardRequest) (key string) {
key = "{}"
if len(or.ActivationKey) == 0 {
return
}
j := domain.SubscriptionData{}
x := domain.SubscriptionXML{Key: "", Signature: ""}
err1 := xml.Unmarshal([]byte(or.ActivationKey), &x)
if err1 == nil {
j.Key = x.Key
j.Signature = x.Signature
} else {
rt.Log.Error("failed to XML unmarshal subscription XML", err1)
}
d, err2 := json.Marshal(j)
if err2 == nil {
key = string(d)
} else {
rt.Log.Error("failed to JSON marshal subscription XML", err2)
}
return
}

View file

@ -14,7 +14,7 @@
html {
-webkit-font-smoothing: antialiased;
}
body {
font-family: 'Open Sans', sans-serif;
background-color: #1b75bb;
@ -22,54 +22,54 @@
color: #ffffff;
padding-top: 50px;
}
.container {
max-width: 1200px;
margin: 0 auto;
text-align: center;
}
.logo {
margin: 0 15px;
}
.content {
margin: 0 15px;
}
.content > div {
margin: 50px 0;
}
.content h1 {
font-size: 24px;
font-weight: 400;
text-transform: uppercase;
margin: 0 0 30px;
}
.content p {
font-size: 18px;
line-height: 28px;
margin: 30px 0 0 0;
}
.content .image {
text-align: center;
}
.clearfix {
overflow: auto;
zoom: 1;
}
.btn-main {
border: 1px solid #ffffff;
padding: 12px 20px;
border-radius: 5px;
margin-left: 25px;
}
@media (min-width: 768px) {
body {
margin-top: 100px;
@ -102,7 +102,7 @@
<body>
<div class="container">
<div class="logo">
<img src="/assets/img/setup/logo.png" alt="Documize">
<img src="/assets/img/setup/logo.png" alt="Documize Community">
</div>
<div class="content clearfix">
<div class="image">
@ -110,11 +110,11 @@
</div>
<div class="text">
<h1>Database Error</h1>
<p>There seems to be a problem with the Documize database: <strong>{{.DBname}}</strong></p>
<p>There seems to be a problem with the Documize Community database: <strong>{{.DBname}}</strong></p>
<p><em>{{.Issue}}</em></p>
</div>
</div>
</div>
</body>
</html>
</html>

File diff suppressed because one or more lines are too long

108
core/env/command_line.go vendored Normal file
View file

@ -0,0 +1,108 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
// Package env provides runtime, server level setup and configuration
package env
import (
"flag"
"fmt"
"os"
"sort"
"strings"
"sync"
)
// prefix provides the prefix for all environment variables
const prefix = "DOCUMIZE"
const goInit = "(default)"
var flagList progFlags
var cliMutex sync.Mutex
type flagItem struct {
target *string
name, setter, value string
required bool
}
type progFlags struct {
items []flagItem
}
// Len is part of sort.Interface.
func (v *progFlags) Len() int {
return len(v.items)
}
// Swap is part of sort.Interface.
func (v *progFlags) Swap(i, j int) {
v.items[i], v.items[j] = v.items[j], v.items[i]
}
// Less is part of sort.Interface.
func (v *progFlags) Less(i, j int) bool {
return v.items[i].name < v.items[j].name
}
// register prepares flag for subsequent parsing
func register(target *string, name string, required bool, usage string) {
cliMutex.Lock()
defer cliMutex.Unlock()
name = strings.ToLower(strings.TrimSpace(name))
setter := prefix + strings.ToUpper(name)
value := os.Getenv(setter)
if value == "" {
value = *target // use the Go initialized value
setter = goInit
}
flag.StringVar(target, name, value, usage)
flagList.items = append(flagList.items, flagItem{target: target, name: name, required: required, value: value, setter: setter})
}
// parse loads flags from OS environment and command line switches
func parse(doFirst string) (ok bool) {
cliMutex.Lock()
defer cliMutex.Unlock()
flag.Parse()
sort.Sort(&flagList)
for pass := 1; pass <= 2; pass++ {
for vi, v := range flagList.items {
if (pass == 1 && v.name == doFirst) || (pass == 2 && v.name != doFirst) {
if v.value != *(v.target) || (v.value != "" && *(v.target) == "") {
flagList.items[vi].setter = "-" + v.name // v is a local copy, not the underlying data
}
if v.required {
if *(v.target) == "" {
fmt.Fprintln(os.Stderr)
fmt.Fprintln(os.Stderr, "In order to run", os.Args[0], "the following must be provided:")
for _, vv := range flagList.items {
if vv.required {
fmt.Fprintf(os.Stderr, "* setting from environment variable '%s' or flag '-%s' or an application setting '%s', current value: '%s' set by '%s'\n",
prefix+strings.ToUpper(vv.name), vv.name, vv.name, *(vv.target), vv.setter)
}
}
fmt.Fprintln(os.Stderr)
flag.Usage()
return false
}
}
}
}
}
return true
}

149
core/env/flags.go vendored
View file

@ -12,26 +12,19 @@
// Package env provides runtime, server level setup and configuration
package env
import (
"flag"
"fmt"
"os"
"sort"
"strings"
"sync"
)
// Flags provides access to environment and command line switches for this program.
type Flags struct {
DBType string // database type
DBConn string // database connection string
Salt string // the salt string used to encode JWT tokens
DBType string // database type
SSLCertFile string // (optional) name of SSL certificate PEM file
SSLKeyFile string // (optional) name of SSL key PEM file
HTTPPort string // (optional) HTTP or HTTPS port
ForceHTTPPort2SSL string // (optional) HTTP that should be redirected to HTTPS
SSLCertFile string // (optional) name of SSL certificate PEM file
SSLKeyFile string // (optional) name of SSL key PEM file
TLSVersion string // (optional) minimum TLS version for SSL connections
SiteMode string // (optional) if 1 then serve offline web page
Location string // reserved
ConfigSource string // tells us if configuration info was obtained from command line or config file
}
// SSLEnabled returns true if both cert and key were provided at runtime.
@ -39,125 +32,27 @@ func (f *Flags) SSLEnabled() bool {
return f.SSLCertFile != "" && f.SSLKeyFile != ""
}
type flagItem struct {
target *string
name, setter, value string
required bool
// ConfigToml represents configuration file that contains all flags as per above.
type ConfigToml struct {
HTTP httpConfig `toml:"http"`
Database databaseConfig `toml:"database"`
Install installConfig `toml:"install"`
}
type progFlags struct {
items []flagItem
type httpConfig struct {
Port int
ForceSSLPort int
Cert string
Key string
TLSVersion string
}
// Len is part of sort.Interface.
func (v *progFlags) Len() int {
return len(v.items)
type databaseConfig struct {
Type string
Connection string
Salt string
}
// Swap is part of sort.Interface.
func (v *progFlags) Swap(i, j int) {
v.items[i], v.items[j] = v.items[j], v.items[i]
}
// Less is part of sort.Interface.
func (v *progFlags) Less(i, j int) bool {
return v.items[i].name < v.items[j].name
}
// prefix provides the prefix for all environment variables
const prefix = "DOCUMIZE"
const goInit = "(default)"
var flagList progFlags
var loadMutex sync.Mutex
// ParseFlags loads command line and OS environment variables required by the program to function.
func ParseFlags() (f Flags, ok bool) {
ok = true
var dbConn, dbType, jwtKey, siteMode, port, certFile, keyFile, forcePort2SSL, location string
register(&jwtKey, "salt", false, "the salt string used to encode JWT tokens, if not set a random value will be generated")
register(&certFile, "cert", false, "the cert.pem file used for https")
register(&keyFile, "key", false, "the key.pem file used for https")
register(&port, "port", false, "http/https port number")
register(&forcePort2SSL, "forcesslport", false, "redirect given http port number to TLS")
register(&siteMode, "offline", false, "set to '1' for OFFLINE mode")
register(&dbType, "dbtype", true, "specify the database provider: mysql|percona|mariadb|postgresql")
register(&dbConn, "db", true, `'database specific connection string for example "user:password@tcp(localhost:3306)/dbname"`)
register(&location, "location", false, `reserved`)
if !parse("db") {
ok = false
}
f.DBConn = dbConn
f.ForceHTTPPort2SSL = forcePort2SSL
f.HTTPPort = port
f.Salt = jwtKey
f.SiteMode = siteMode
f.SSLCertFile = certFile
f.SSLKeyFile = keyFile
f.DBType = strings.ToLower(dbType)
// reserved
if len(location) == 0 {
location = "selfhost"
}
f.Location = strings.ToLower(location)
return f, ok
}
// register prepares flag for subsequent parsing
func register(target *string, name string, required bool, usage string) {
loadMutex.Lock()
defer loadMutex.Unlock()
name = strings.ToLower(strings.TrimSpace(name))
setter := prefix + strings.ToUpper(name)
value := os.Getenv(setter)
if value == "" {
value = *target // use the Go initialized value
setter = goInit
}
flag.StringVar(target, name, value, usage)
flagList.items = append(flagList.items, flagItem{target: target, name: name, required: required, value: value, setter: setter})
}
// parse loads flags from OS environment and command line switches
func parse(doFirst string) (ok bool) {
loadMutex.Lock()
defer loadMutex.Unlock()
flag.Parse()
sort.Sort(&flagList)
for pass := 1; pass <= 2; pass++ {
for vi, v := range flagList.items {
if (pass == 1 && v.name == doFirst) || (pass == 2 && v.name != doFirst) {
if v.value != *(v.target) || (v.value != "" && *(v.target) == "") {
flagList.items[vi].setter = "-" + v.name // v is a local copy, not the underlying data
}
if v.required {
if *(v.target) == "" {
fmt.Fprintln(os.Stderr)
fmt.Fprintln(os.Stderr, "In order to run", os.Args[0], "the following must be provided:")
for _, vv := range flagList.items {
if vv.required {
fmt.Fprintf(os.Stderr, "* setting from environment variable '%s' or flag '-%s' or an application setting '%s', current value: '%s' set by '%s'\n",
prefix+strings.ToUpper(vv.name), vv.name, vv.name, *(vv.target), vv.setter)
}
}
fmt.Fprintln(os.Stderr)
flag.Usage()
return false
}
}
}
}
}
return true
type installConfig struct {
Location string
}

145
core/env/parser.go vendored Normal file
View file

@ -0,0 +1,145 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
// Package env provides runtime, server level setup and configuration
package env
import (
"fmt"
"os"
"strconv"
"strings"
"github.com/BurntSushi/toml"
)
// LoadConfig loads runtime parameters like port numbers and DB connections.
// We first check for -config switch that would point us towards a .CONF file.
// If not found, we then read parameters from command line and environment vars.
func LoadConfig() (f Flags, ok bool) {
// Check and process config file
f, ok = configFile()
// If not OK then get parameters from command line and environment variables.
if !ok {
f, ok = commandLineEnv()
}
// reserved
if len(f.Location) == 0 {
f.Location = "selfhost"
}
return
}
// configFile checks for the presence of zero or one command argument.
// If no arguments are provided then we look for and load documize.conf file.
// If one argument is provided then we load the specified config file.
// If more than one argument is provided then we exit as flags as have passed.
// checks to see if it is a TOML format config file.
func configFile() (f Flags, ok bool) {
ok = false
var configFile string
// First argument is always program being executed.
// No additional arguments means check for documize.conf file.
if len(os.Args) == 1 {
// No arguments, so we default to default config filename.
configFile = "documize.conf"
} else if len(os.Args) == 2 {
// Config filename passed in, so we use it.
configFile = os.Args[1]
} else {
// Too many arguments means flags passed in so we return.
return
}
// Does file exist?
if len(configFile) == 0 || !configFileExists(configFile) {
return
}
// Tell caller where the config came from.
f.ConfigSource = configFile
// We parse the TOML format config file.
var ct ConfigToml
if _, err := toml.DecodeFile(configFile, &ct); err != nil {
fmt.Println(err)
return
}
f.DBType = strings.ToLower(ct.Database.Type)
f.DBConn = ct.Database.Connection
f.Salt = ct.Database.Salt
f.HTTPPort = strconv.Itoa(ct.HTTP.Port)
f.ForceHTTPPort2SSL = strconv.Itoa(ct.HTTP.ForceSSLPort)
f.SSLCertFile = ct.HTTP.Cert
f.SSLKeyFile = ct.HTTP.Key
f.TLSVersion = ct.HTTP.TLSVersion
f.Location = strings.ToLower(ct.Install.Location)
if len(f.TLSVersion) == 0 {
f.TLSVersion = "1.3"
}
ok = true
return
}
// commandLineEnv loads command line and OS environment variables required by the program to function.
func commandLineEnv() (f Flags, ok bool) {
ok = true
var dbConn, dbType, jwtKey, siteMode, port, certFile, keyFile, forcePort2SSL, TLSVersion, location string
// register(&configFile, "salt", false, "the salt string used to encode JWT tokens, if not set a random value will be generated")
register(&jwtKey, "salt", false, "the salt string used to encode JWT tokens, if not set a random value will be generated")
register(&certFile, "cert", false, "the cert.pem file used for https")
register(&keyFile, "key", false, "the key.pem file used for https")
register(&port, "port", false, "http/https port number")
register(&forcePort2SSL, "forcesslport", false, "redirect given http port number to TLS")
register(&TLSVersion, "tlsversion", false, "select minimum TLS: 1.0, 1.1, 1.2, 1.3")
register(&siteMode, "offline", false, "set to '1' for OFFLINE mode")
register(&dbType, "dbtype", true, "specify the database provider: mysql|percona|mariadb|postgresql|sqlserver")
register(&dbConn, "db", true, `'database specific connection string for example "user:password@tcp(localhost:3306)/dbname"`)
register(&location, "location", false, `reserved`)
if !parse("db") {
ok = false
}
f.DBType = strings.ToLower(dbType)
f.DBConn = dbConn
f.ForceHTTPPort2SSL = forcePort2SSL
f.HTTPPort = port
f.Salt = jwtKey
f.SiteMode = siteMode
f.SSLCertFile = certFile
f.SSLKeyFile = keyFile
f.TLSVersion = TLSVersion
f.Location = strings.ToLower(location)
f.ConfigSource = "flags/environment"
if len(f.TLSVersion) == 0 {
f.TLSVersion = "1.3"
}
return f, ok
}
func configFileExists(fn string) bool {
info, err := os.Stat(fn)
if os.IsNotExist(err) {
return false
}
return !info.IsDir()
}

15
core/env/provider.go vendored
View file

@ -28,8 +28,8 @@ const (
// StoreTypePostgreSQL is PostgreSQL
StoreTypePostgreSQL StoreType = "PostgreSQL"
// StoreTypeMSSQL is Microsoft SQL Server
StoreTypeMSSQL StoreType = "MSSQL"
// StoreTypeSQLServer is Microsoft SQL Server
StoreTypeSQLServer StoreType = "SQLServer"
)
// StoreProvider defines a database provider.
@ -103,4 +103,15 @@ type StoreProvider interface {
// Must use ? for parameter placeholder character as DB layer
// will convert to database specific parameter placeholder character.
ConvertTimestamp() (statement string)
// IsTrue returns storage provider boolean TRUE:
// MySQL is 1, PostgresSQL is TRUE, SQL Server is 1
IsTrue() string
// IsFalse returns storage provider boolean FALSE:
// MySQL is 0, PostgresSQL is FALSE, SQL Server is 0
IsFalse() string
// RowLimit returns SQL for limited number of returned rows
RowLimit(max int) string
}

64
core/env/runtime.go vendored
View file

@ -13,25 +13,26 @@
package env
import (
"context"
"database/sql"
"context"
"database/sql"
"embed"
"github.com/documize/community/domain"
"github.com/documize/community/domain"
"github.com/jmoiron/sqlx"
)
const (
// SiteModeNormal serves app
SiteModeNormal = ""
// SiteModeNormal serves app
SiteModeNormal = ""
// SiteModeOffline serves offline.html
SiteModeOffline = "1"
// SiteModeOffline serves offline.html
SiteModeOffline = "1"
// SiteModeSetup tells Ember to serve setup route
SiteModeSetup = "2"
// SiteModeSetup tells Ember to serve setup route
SiteModeSetup = "2"
// SiteModeBadDB redirects to db-error.html page
SiteModeBadDB = "3"
// SiteModeBadDB redirects to db-error.html page
SiteModeBadDB = "3"
)
// Runtime provides access to database, logger and other server-level scoped objects.
@ -42,42 +43,39 @@ type Runtime struct {
StoreProvider StoreProvider
Log Logger
Product domain.Product
Assets embed.FS
}
var ctx = context.Background()
// StartTx beings database transaction with application defined
// database transaction isolation level.
// StartTx begins database transaction with given transaction isolation level.
// Any error encountered during this operation is logged to runtime logger.
func (r *Runtime) StartTx() (tx *sqlx.Tx, ok bool) {
tx, err := r.Db.BeginTxx(ctx, &sql.TxOptions{Isolation: sql.LevelReadUncommitted})
if err != nil {
r.Log.Error("unable to start database transaction", err)
return nil, false
}
func (r *Runtime) StartTx(i sql.IsolationLevel) (tx *sqlx.Tx, ok bool) {
tx, err := r.Db.BeginTxx(context.Background(), &sql.TxOptions{Isolation: i})
if err != nil {
r.Log.Error("unable to start database transaction", err)
return nil, false
}
return tx, true
return tx, true
}
// Rollback aborts active database transaction.
// Any error encountered during this operation is logged to runtime logger.
func (r *Runtime) Rollback(tx *sqlx.Tx) bool {
if err := tx.Commit(); err != nil {
r.Log.Error("unable to commit database transaction", err)
return false
}
if err := tx.Commit(); err != nil {
r.Log.Error("unable to commit database transaction", err)
return false
}
return true
return true
}
// Commit flushes pending changes to database.
// Any error encountered during this operation is logged to runtime logger.
func (r *Runtime) Commit(tx *sqlx.Tx) bool {
if err := tx.Commit(); err != nil {
r.Log.Error("unable to commit database transaction", err)
return false
}
if err := tx.Commit(); err != nil {
r.Log.Error("unable to commit database transaction", err)
return false
}
return true
return true
}

91
core/i18n/localize.go Normal file
View file

@ -0,0 +1,91 @@
package i18n
import (
"embed"
"encoding/json"
"fmt"
"strings"
"github.com/documize/community/core/asset"
"github.com/pkg/errors"
)
const (
DefaultLocale = "en-US"
)
var localeMap map[string]map[string]string
// SupportedLocales returns array of locales.
func SupportedLocales() (locales []string) {
locales = append(locales, "en-US")
locales = append(locales, "de-DE")
locales = append(locales, "zh-CN")
locales = append(locales, "pt-BR")
locales = append(locales, "fr-FR")
locales = append(locales, "ja-JP")
locales = append(locales, "it-IT")
locales = append(locales, "es-AR")
return
}
// Intialize will load language files
func Initialize(e embed.FS) (err error) {
localeMap = make(map[string]map[string]string)
locales := SupportedLocales()
for i := range locales {
content, _, err := asset.FetchStatic(e, "i18n/"+locales[i]+".json")
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("missing locale %s", locales[i]))
return err
}
var payload interface{}
json.Unmarshal([]byte(content), &payload)
m := payload.(map[string]interface{})
translations := make(map[string]string)
for j := range m {
translations[j] = m[j].(string)
}
localeMap[locales[i]] = translations
}
return nil
}
// Localize will returns string value for given key using specified locale).
// e.g. locale = "en-US", key = "admin_billing"
//
// Replacements are for replacing string placeholders ({1} {2} {3}) with
// replacement text.
// e.g. "This is {1} example" --> replacements[0] will replace {1}
func Localize(locale string, key string, replacements ...string) (s string) {
l, ok := localeMap[locale]
if !ok {
// fallback
l = localeMap[DefaultLocale]
}
s, ok = l[key]
if !ok {
// missing translation key is echo'ed back
s = fmt.Sprintf("!! %s !!", key)
}
// placeholders are one-based: {1} {2} {3}
// replacements array is zero-based hence the +1 below
if len(replacements) > 0 {
for i := range replacements {
s = strings.Replace(s, fmt.Sprintf("{%d}", i+1), replacements[i], 1)
}
}
return
}

View file

@ -38,7 +38,7 @@ func CommandWithTimeout(command *exec.Cmd, timeout time.Duration) ([]byte, error
select {
case <-time.After(timeout):
if err := command.Process.Kill(); err != nil {
fmt.Errorf("failed to kill: ", err)
fmt.Printf("failed to kill: %s", err.Error())
}
<-done // prevent memory leak
//fmt.Println("DEBUG timeout")

29
core/request/url.go Normal file
View file

@ -0,0 +1,29 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
// Package request provides HTTP request parsing functions.
package request
import (
"net/http"
"strings"
)
// IsSSL returns true if Referer header contains "https".
// If Referer header is empty we look at r.TLS setting.
func IsSSL(r *http.Request) bool {
rf := r.Referer()
if len(rf) > 1 {
return strings.HasPrefix(rf, "https")
}
return r.TLS != nil
}

View file

@ -48,8 +48,8 @@ func WriteServerError(w http.ResponseWriter, method string, err error) {
// WriteError notifies HTTP client of general application error.
func WriteError(w http.ResponseWriter, method string) {
writeStatus(w, http.StatusBadRequest)
w.Write([]byte("{Error: 'Internal server error'}"))
writeStatus(w, http.StatusBadRequest)
w.Write([]byte("{Error: 'Internal server error'}"))
}
// WriteDuplicateError notifies HTTP client of duplicate data that has been rejected.
@ -114,3 +114,17 @@ func WriteJSON(w http.ResponseWriter, v interface{}) {
j, _ := json.Marshal(v)
w.Write(j)
}
// WriteText to HTTP response
func WriteText(w http.ResponseWriter, data []byte) {
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
w.WriteHeader(http.StatusOK)
w.Write(data)
}
// WriteXML to HTTP response
func WriteXML(w http.ResponseWriter, data []byte) {
w.Header().Set("Content-Type", "application/xml; charset=utf-8")
w.WriteHeader(http.StatusOK)
w.Write(data)
}

View file

@ -0,0 +1,50 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package stringutil
import (
"strings"
)
// CleanDBValue returns like query minus dodgy characters.
func CleanDBValue(filter string) string {
filter = strings.ReplaceAll(filter, " ", "")
filter = strings.ReplaceAll(filter, " ' ", "")
filter = strings.ReplaceAll(filter, "'", "")
filter = strings.ReplaceAll(filter, " ` ", "")
filter = strings.ReplaceAll(filter, "`", "")
filter = strings.ReplaceAll(filter, " \" ", "")
filter = strings.ReplaceAll(filter, "\"", "")
filter = strings.ReplaceAll(filter, " -- ", "")
filter = strings.ReplaceAll(filter, "--", "")
filter = strings.ReplaceAll(filter, ";", "")
filter = strings.ReplaceAll(filter, ":", "")
filter = strings.ReplaceAll(filter, "~", "")
filter = strings.ReplaceAll(filter, "!", "")
filter = strings.ReplaceAll(filter, "#", "")
filter = strings.ReplaceAll(filter, "%", "")
filter = strings.ReplaceAll(filter, "*", "")
filter = strings.ReplaceAll(filter, "\\", "")
filter = strings.ReplaceAll(filter, "/", "")
filter = strings.ReplaceAll(filter, "union select", "")
filter = strings.ReplaceAll(filter, "UNION SELECT", "")
filter = strings.ReplaceAll(filter, " from ", "")
filter = strings.ReplaceAll(filter, " FROM ", "")
filter = strings.ReplaceAll(filter, " OR 1=1 ", "")
filter = strings.ReplaceAll(filter, " OR 1=1 ", "")
filter = strings.ReplaceAll(filter, " = ", "")
filter = strings.ReplaceAll(filter, "=", "")
filter = strings.TrimSpace(filter)
return filter
}

50
docker-compose.yaml Normal file
View file

@ -0,0 +1,50 @@
# This Docker Compose file will start up Documize with PostgreSQL.
#
# Use 'documize-community-plus-linux-amd64' for Community+ Edition (default).
# Use 'documize-community-linux-amd64' for Community Edition.
#
# You can move between editions anytime without any data loss
# because of the common database schema.
#
# The latest product release executable will be pulled down
# from a public Amazon S3 bucket.
#
# Use 'docker-compose up|down' to start or stop containers.
version: "3"
services:
db:
image: postgres:12
restart: always
ports:
- 5432:5432
environment:
POSTGRES_USER: documize
POSTGRES_PASSWORD: Passw0rd
POSTGRES_DB: documize
volumes:
- db-data:/var/lib/postgresql/data
networks:
- documizenet
app:
image: debian:latest
command: /bin/sh -c "apt-get -qq update && apt-get -qq install -y wget && wget https://community-downloads.s3.us-east-2.amazonaws.com/documize-community-plus-linux-amd64 && chmod 777 ./documize-community-plus-linux-amd64 && ./documize-community-plus-linux-amd64"
depends_on:
- db
ports:
- 5001:5001
environment:
DOCUMIZEPORT: 5001
DOCUMIZEDB: host=db port=5432 dbname=documize user=documize password=Passw0rd sslmode=disable
DOCUMIZEDBTYPE: postgresql
DOCUMIZESALT: hsk3Acndky8cdTNx3
DOCUMIZELOCATION: selfhost
networks:
- documizenet
volumes:
db-data:
networks:
documizenet:

View file

@ -69,7 +69,7 @@ func (s Store) GetUserAccounts(ctx domain.RequestContext, userID string) (t []ac
a.c_active AS active, a.c_created AS created, a.c_revised AS revised,
b.c_company AS company, b.c_title AS title, b.c_message AS message, b.c_domain as domain
FROM dmz_user_account a, dmz_org b
WHERE a.c_userid=? AND a.c_orgid=b.c_refid AND a.c_active=true
WHERE a.c_userid=? AND a.c_orgid=b.c_refid AND a.c_active=`+s.IsTrue()+`
ORDER BY b.c_title`),
userID)
@ -88,7 +88,7 @@ func (s Store) GetAccountsByOrg(ctx domain.RequestContext) (t []account.Account,
a.c_active AS active, a.c_created AS created, a.c_revised AS revised,
b.c_company AS company, b.c_title AS title, b.c_message AS message, b.c_domain as domain
FROM dmz_user_account a, dmz_org b
WHERE a.c_orgid=b.c_refid AND a.c_orgid=? AND a.c_active=true`),
WHERE a.c_orgid=b.c_refid AND a.c_orgid=? AND a.c_active=`+s.IsTrue()),
ctx.OrgID)
if err != sql.ErrNoRows && err != nil {
@ -100,7 +100,7 @@ func (s Store) GetAccountsByOrg(ctx domain.RequestContext) (t []account.Account,
// CountOrgAccounts returns the numnber of active user accounts for specified organization.
func (s Store) CountOrgAccounts(ctx domain.RequestContext) (c int) {
row := s.Runtime.Db.QueryRow(s.Bind("SELECT count(*) FROM dmz_user_account WHERE c_orgid=? AND c_active=true"), ctx.OrgID)
row := s.Runtime.Db.QueryRow(s.Bind("SELECT count(*) FROM dmz_user_account WHERE c_orgid=? AND c_active="+s.IsTrue()), ctx.OrgID)
err := row.Scan(&c)
if err == sql.ErrNoRows {
return 0

View file

@ -13,7 +13,6 @@ package activity
import (
"database/sql"
"fmt"
"time"
"github.com/documize/community/domain"
@ -29,16 +28,16 @@ type Store struct {
}
// RecordUserActivity logs user initiated data changes.
func (s Store) RecordUserActivity(ctx domain.RequestContext, activity activity.UserActivity) (err error) {
func (s Store) RecordUserActivity(ctx domain.RequestContext, activity activity.UserActivity) {
activity.OrgID = ctx.OrgID
activity.UserID = ctx.UserID
activity.Created = time.Now().UTC()
_, err = ctx.Transaction.Exec(s.Bind("INSERT INTO dmz_user_activity (c_orgid, c_userid, c_spaceid, c_docid, c_sectionid, c_sourcetype, c_activitytype, c_metadata, c_created) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"),
_, err := ctx.Transaction.Exec(s.Bind("INSERT INTO dmz_user_activity (c_orgid, c_userid, c_spaceid, c_docid, c_sectionid, c_sourcetype, c_activitytype, c_metadata, c_created) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"),
activity.OrgID, activity.UserID, activity.SpaceID, activity.DocumentID, activity.SectionID, activity.SourceType, activity.ActivityType, activity.Metadata, activity.Created)
if err != nil {
err = errors.Wrap(err, "execute record user activity")
s.Runtime.Log.Error("execute record user activity", err)
}
return
@ -46,7 +45,7 @@ func (s Store) RecordUserActivity(ctx domain.RequestContext, activity activity.U
// GetDocumentActivity returns the metadata for a specified document.
func (s Store) GetDocumentActivity(ctx domain.RequestContext, id string) (a []activity.DocumentActivity, err error) {
qry := s.Bind(`SELECT a.id, DATE(a.c_created) AS created, a.c_orgid AS orgid,
qry := s.Bind(`SELECT a.id, a.c_created AS created, a.c_orgid AS orgid,
COALESCE(a.c_userid, '') AS userid, a.c_spaceid AS spaceid,
a.c_docid AS documentid, a.c_sectionid AS sectionid, a.c_activitytype AS activitytype,
a.c_metadata AS metadata,
@ -77,8 +76,10 @@ func (s Store) GetDocumentActivity(ctx domain.RequestContext, id string) (a []ac
// DeleteDocumentChangeActivity removes all entries for document changes (add, remove, update).
func (s Store) DeleteDocumentChangeActivity(ctx domain.RequestContext, documentID string) (rows int64, err error) {
rows, err = s.DeleteWhere(ctx.Transaction,
fmt.Sprintf("DELETE FROM dmz_user_activity WHERE c_orgid='%s' AND c_docid='%s' AND (c_activitytype=1 OR c_activitytype=2 OR c_activitytype=3 OR c_activitytype=4 OR c_activitytype=7)", ctx.OrgID, documentID))
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_user_activity WHERE c_orgid=? AND c_docid=? AND (c_activitytype=1 OR c_activitytype=2 OR c_activitytype=3 OR c_activitytype=4 OR c_activitytype=7)"), ctx.OrgID, documentID)
if err == sql.ErrNoRows {
err = nil
}
return
}

View file

@ -14,9 +14,6 @@ package attachment
import (
"bytes"
"database/sql"
"fmt"
"github.com/documize/community/domain/auth"
"github.com/documize/community/model/space"
"io"
"mime"
"net/http"
@ -28,12 +25,14 @@ import (
"github.com/documize/community/core/secrets"
"github.com/documize/community/core/uniqueid"
"github.com/documize/community/domain"
"github.com/documize/community/domain/auth"
"github.com/documize/community/domain/organization"
"github.com/documize/community/domain/permission"
indexer "github.com/documize/community/domain/search"
"github.com/documize/community/domain/store"
"github.com/documize/community/model/attachment"
"github.com/documize/community/model/audit"
"github.com/documize/community/model/space"
"github.com/documize/community/model/workflow"
uuid "github.com/nu7hatch/gouuid"
)
@ -67,7 +66,7 @@ func (h *Handler) Download(w http.ResponseWriter, r *http.Request) {
// Get attachment being requested.
a, err := h.Store.Attachment.GetAttachment(ctx, ctx.OrgID, request.Param(r, "attachmentID"))
if err == sql.ErrNoRows {
response.WriteNotFoundError(w, method, request.Param(r, "fileID"))
response.WriteNotFoundError(w, method, request.Param(r, "attachmentID"))
return
}
if err != nil {
@ -88,7 +87,7 @@ func (h *Handler) Download(w http.ResponseWriter, r *http.Request) {
return
}
// Get the space for this attachment
// Get the space for this attachment.
sp, err := h.Store.Space.Get(ctx, doc.SpaceID)
if err == sql.ErrNoRows {
response.WriteNotFoundError(w, method, a.DocumentID)
@ -100,8 +99,7 @@ func (h *Handler) Download(w http.ResponseWriter, r *http.Request) {
return
}
// Get the organization for this request
// Get the space for this attachment
// Get the organization for this request.
org, err := h.Store.Organization.GetOrganization(ctx, ctx.OrgID)
if err == sql.ErrNoRows {
response.WriteNotFoundError(w, method, a.DocumentID)
@ -161,6 +159,12 @@ func (h *Handler) Download(w http.ResponseWriter, r *http.Request) {
canDownload = true
}
if !canDownload && len(secureToken) == 0 && len(authToken) == 0 {
h.Runtime.Log.Error("get attachment received no access token", err)
response.WriteForbiddenError(w)
return
}
// Send back error if caller unable view attachment
if !canDownload {
h.Runtime.Log.Error("get attachment refused", err)
@ -174,10 +178,14 @@ func (h *Handler) Download(w http.ResponseWriter, r *http.Request) {
typ = "application/octet-stream"
}
dataSize := len(a.Data)
// w.WriteHeader(http.StatusOK)
w.Header().Set("Content-Type", typ)
w.Header().Set("Content-Disposition", `Attachment; filename="`+a.Filename+`" ; `+`filename*="`+a.Filename+`"`)
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(a.Data)))
w.WriteHeader(http.StatusOK)
if dataSize != 0 {
w.Header().Set("Content-Length", string(dataSize))
}
_, err = w.Write(a.Data)
if err != nil {
@ -210,7 +218,6 @@ func (h *Handler) Get(w http.ResponseWriter, r *http.Request) {
response.WriteServerError(w, method, err)
return
}
if len(a) == 0 {
a = []attachment.Attachment{}
}
@ -292,6 +299,9 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
return
}
// File can be associated with a section as well.
sectionID := request.Query(r, "page")
if !permission.CanChangeDocument(ctx, *h.Store, documentID) {
response.WriteForbiddenError(w)
return
@ -329,6 +339,7 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
a.FileID = random[0:9]
a.Filename = filename.Filename
a.Data = b.Bytes()
a.SectionID = sectionID
ctx.Transaction, err = h.Runtime.Db.Beginx()
if err != nil {

View file

@ -33,11 +33,13 @@ func (s Store) Add(ctx domain.RequestContext, a attachment.Attachment) (err erro
a.OrgID = ctx.OrgID
a.Created = time.Now().UTC()
a.Revised = time.Now().UTC()
bits := strings.Split(a.Filename, ".")
a.Extension = bits[len(bits)-1]
if len(a.Extension) == 0 {
bits := strings.Split(a.Filename, ".")
a.Extension = bits[len(bits)-1]
}
_, err = ctx.Transaction.Exec(s.Bind("INSERT INTO dmz_doc_attachment (c_refid, c_orgid, c_docid, c_job, c_fileid, c_filename, c_data, c_extension, c_created, c_revised) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"),
a.RefID, a.OrgID, a.DocumentID, a.Job, a.FileID, a.Filename, a.Data, a.Extension, a.Created, a.Revised)
_, err = ctx.Transaction.Exec(s.Bind("INSERT INTO dmz_doc_attachment (c_refid, c_orgid, c_docid, c_sectionid, c_job, c_fileid, c_filename, c_data, c_extension, c_created, c_revised) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"),
a.RefID, a.OrgID, a.DocumentID, a.SectionID, a.Job, a.FileID, a.Filename, a.Data, a.Extension, a.Created, a.Revised)
if err != nil {
err = errors.Wrap(err, "execute insert attachment")
@ -50,7 +52,7 @@ func (s Store) Add(ctx domain.RequestContext, a attachment.Attachment) (err erro
func (s Store) GetAttachment(ctx domain.RequestContext, orgID, attachmentID string) (a attachment.Attachment, err error) {
err = s.Runtime.Db.Get(&a, s.Bind(`
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_docid AS documentid, c_job AS job, c_fileid AS fileid,
c_orgid AS orgid, c_docid AS documentid, c_sectionid AS sectionid, c_job AS job, c_fileid AS fileid,
c_filename AS filename, c_data AS data, c_extension AS extension,
c_created AS created, c_revised AS revised
FROM dmz_doc_attachment
@ -68,7 +70,7 @@ func (s Store) GetAttachment(ctx domain.RequestContext, orgID, attachmentID stri
func (s Store) GetAttachments(ctx domain.RequestContext, docID string) (a []attachment.Attachment, err error) {
err = s.Runtime.Db.Select(&a, s.Bind(`
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_docid AS documentid, c_job AS job, c_fileid AS fileid,
c_orgid AS orgid, c_docid AS documentid, c_sectionid AS sectionid, c_job AS job, c_fileid AS fileid,
c_filename AS filename, c_extension AS extension,
c_created AS created, c_revised AS revised
FROM dmz_doc_attachment
@ -88,11 +90,36 @@ func (s Store) GetAttachments(ctx domain.RequestContext, docID string) (a []atta
return
}
// GetSectionAttachments returns a slice containing the attachment records
// with file data for specified document section.
func (s Store) GetSectionAttachments(ctx domain.RequestContext, sectionID string) (a []attachment.Attachment, err error) {
err = s.Runtime.Db.Select(&a, s.Bind(`
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_docid AS documentid, c_sectionid AS sectionid, c_job AS job, c_fileid AS fileid,
c_filename AS filename, c_data AS data, c_extension AS extension,
c_created AS created, c_revised AS revised
FROM dmz_doc_attachment
WHERE c_orgid=? AND c_sectionid=?
ORDER BY c_filename`),
ctx.OrgID, sectionID)
if err == sql.ErrNoRows {
err = nil
a = []attachment.Attachment{}
}
if err != nil {
err = errors.Wrap(err, "execute select section attachments")
return
}
return
}
// GetAttachmentsWithData returns a slice containing the attachment records (including their data) for document docID, ordered by filename.
func (s Store) GetAttachmentsWithData(ctx domain.RequestContext, docID string) (a []attachment.Attachment, err error) {
err = s.Runtime.Db.Select(&a, s.Bind(`
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_docid AS documentid, c_job AS job, c_fileid AS fileid,
c_orgid AS orgid, c_docid AS documentid, c_sectionid AS sectionid, c_job AS job, c_fileid AS fileid,
c_filename AS filename, c_data AS data, c_extension AS extension,
c_created AS created, c_revised AS revised
FROM dmz_doc_attachment
@ -116,3 +143,13 @@ func (s Store) GetAttachmentsWithData(ctx domain.RequestContext, docID string) (
func (s Store) Delete(ctx domain.RequestContext, id string) (rows int64, err error) {
return s.DeleteConstrained(ctx.Transaction, "dmz_doc_attachment", ctx.OrgID, id)
}
// DeleteSection removes all attachments agasinst a section.
func (s Store) DeleteSection(ctx domain.RequestContext, sectionID string) (rows int64, err error) {
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_doc_attachment WHERE c_orgid=? AND c_sectionid=?"), ctx.OrgID, sectionID)
if err == sql.ErrNoRows {
err = nil
}
return
}

View file

@ -14,6 +14,7 @@ package audit
import (
"time"
"database/sql"
"github.com/documize/community/domain"
"github.com/documize/community/domain/store"
@ -35,21 +36,21 @@ func (s Store) Record(ctx domain.RequestContext, t audit.EventType) {
e.IP = ctx.ClientIP
e.Type = string(t)
tx, err := s.Runtime.Db.Beginx()
if err != nil {
s.Runtime.Log.Error("transaction", err)
tx, ok := s.Runtime.StartTx(sql.LevelReadUncommitted)
if !ok {
s.Runtime.Log.Info("unable to start transaction")
return
}
_, err = tx.Exec(s.Bind("INSERT INTO dmz_audit_log (c_orgid, c_userid, c_eventtype, c_ip, c_created) VALUES (?, ?, ?, ?, ?)"),
_, err := tx.Exec(s.Bind("INSERT INTO dmz_audit_log (c_orgid, c_userid, c_eventtype, c_ip, c_created) VALUES (?, ?, ?, ?, ?)"),
e.OrgID, e.UserID, e.Type, e.IP, e.Created)
if err != nil {
tx.Rollback()
s.Runtime.Rollback(tx)
s.Runtime.Log.Error("prepare audit insert", err)
return
}
tx.Commit()
s.Runtime.Commit(tx)
return
}

View file

@ -48,6 +48,7 @@ func AddExternalUser(ctx domain.RequestContext, rt *env.Runtime, store *store.St
if addUser {
userID = uniqueid.Generate()
u.RefID = userID
u.Locale = ctx.OrgLocale
err = store.User.Add(ctx, u)
if err != nil {

25
domain/auth/cas/README.md Normal file
View file

@ -0,0 +1,25 @@
# Authenticating with Aperao CAS
## Introduction
Documize can delegate user authentication to aperao CAS integration.
This document assumes that the Documize administrator has installed and is familiar with CAS server.
https://www.apereo.org/projects/cas
Documize is tested against the CAS version 5.3.x.
## Run a CAS server
Refer to the following like [https://apereo.github.io/cas/5.0.x/installation/Docker-Installation.html](https://apereo.github.io/cas/5.0.x/installation/Docker-Installation.html) to run CAS server. Usually the server address is `https://localhost:8443/cas".
## Configuring Documize
CAS authentication is configured and enabled from Settings.
Type in the CAS Server URL, Redirect URL.
* **CAS Server URL**: The CAS host address, eg: `https://localhost:8443/cas`
* **Redirect URL**: The CAS authorize callback URL. If your documize URL is `https://example.documize.com,` then redirect URL is `https://example.documize.com/auth/cas`.

177
domain/auth/cas/endpoint.go Normal file
View file

@ -0,0 +1,177 @@
package cas
import (
"database/sql"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"strings"
"github.com/documize/community/core/env"
"github.com/documize/community/core/response"
"github.com/documize/community/core/secrets"
"github.com/documize/community/core/streamutil"
"github.com/documize/community/core/stringutil"
"github.com/documize/community/domain"
"github.com/documize/community/domain/auth"
"github.com/documize/community/domain/store"
usr "github.com/documize/community/domain/user"
ath "github.com/documize/community/model/auth"
"github.com/documize/community/model/user"
casv2 "gopkg.in/cas.v2"
)
// Handler contains the runtime information such as logging and database.
type Handler struct {
Runtime *env.Runtime
Store *store.Store
}
// Authenticate checks CAS authentication credentials.
func (h *Handler) Authenticate(w http.ResponseWriter, r *http.Request) {
method := "authenticate"
ctx := domain.GetRequestContext(r)
defer streamutil.Close(r.Body)
body, err := ioutil.ReadAll(r.Body)
if err != nil {
response.WriteBadRequestError(w, method, "Bad payload")
h.Runtime.Log.Error(method, err)
return
}
a := ath.CASAuthRequest{}
err = json.Unmarshal(body, &a)
if err != nil {
response.WriteBadRequestError(w, method, err.Error())
h.Runtime.Log.Error(method, err)
return
}
a.Ticket = strings.TrimSpace(a.Ticket)
org, err := h.Store.Organization.GetOrganizationByDomain("")
if err != nil {
response.WriteUnauthorizedError(w)
h.Runtime.Log.Error(method, err)
return
}
ctx.OrgID = org.RefID
// Fetch CAS auth provider config
ac := ath.CASConfig{}
err = json.Unmarshal([]byte(org.AuthConfig), &ac)
if err != nil {
response.WriteBadRequestError(w, method, "Unable to unmarshal CAS configuration")
h.Runtime.Log.Error(method, err)
return
}
service := url.QueryEscape(ac.RedirectURL)
validateURL := ac.URL + "/serviceValidate?ticket=" + a.Ticket + "&service=" + service
resp, err := http.Get(validateURL)
if err != nil {
response.WriteBadRequestError(w, method, "Unable to get service validate url")
h.Runtime.Log.Error(method, err)
return
}
defer streamutil.Close(resp.Body)
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
response.WriteBadRequestError(w, method, "Unable to verify CAS ticket: "+a.Ticket)
h.Runtime.Log.Error(method, err)
return
}
userInfo, err := casv2.ParseServiceResponse(data)
if err != nil {
response.WriteBadRequestError(w, method, "Unable to get user information")
h.Runtime.Log.Error(method, err)
return
}
h.Runtime.Log.Info("cas logon attempt " + userInfo.User)
u, err := h.Store.User.GetByDomain(ctx, a.Domain, userInfo.User)
if err != nil && err != sql.ErrNoRows {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
// Create user account if not found
if err == sql.ErrNoRows {
h.Runtime.Log.Info("cas add user " + userInfo.User + " @ " + a.Domain)
u = user.User{}
u.Active = true
u.ViewUsers = false
u.Analytics = false
u.Admin = false
u.GlobalAdmin = false
u.Email = userInfo.User
fn := userInfo.Attributes.Get("first_name")
ln := userInfo.Attributes.Get("last_name")
if len(fn) > 0 || len(ln) > 0 {
u.Initials = stringutil.MakeInitials(fn, ln)
u.Firstname = fn
u.Lastname = ln
} else {
u.Initials = stringutil.MakeInitials(userInfo.User, "")
u.Firstname = userInfo.User
u.Lastname = ""
}
u.Salt = secrets.GenerateSalt()
u.Password = secrets.GeneratePassword(secrets.GenerateRandomPassword(), u.Salt)
u, err = auth.AddExternalUser(ctx, h.Runtime, h.Store, u, true)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
}
// Password correct and active user
if userInfo.User != strings.TrimSpace(strings.ToLower(u.Email)) {
response.WriteUnauthorizedError(w)
return
}
// Attach user accounts and work out permissions.
usr.AttachUserAccounts(ctx, *h.Store, org.RefID, &u)
// No accounts signals data integrity problem
// so we reject login request.
if len(u.Accounts) == 0 {
response.WriteUnauthorizedError(w)
err = fmt.Errorf("no user accounts found for %s", u.Email)
h.Runtime.Log.Error(method, err)
return
}
// Abort login request if account is disabled.
for _, ac := range u.Accounts {
if ac.OrgID == org.RefID {
if ac.Active == false {
response.WriteUnauthorizedError(w)
err = fmt.Errorf("no ACTIVE user account found for %s", u.Email)
h.Runtime.Log.Error(method, err)
return
}
break
}
}
// Generate JWT token
authModel := ath.AuthenticationModel{}
authModel.Token = auth.GenerateJWT(h.Runtime, u.RefID, org.RefID, a.Domain)
authModel.User = u
response.WriteJSON(w, authModel)
return
}

View file

@ -17,6 +17,8 @@ import (
"net/http"
"strings"
"github.com/documize/community/core/request"
"github.com/documize/community/core/env"
"github.com/documize/community/core/response"
"github.com/documize/community/core/secrets"
@ -122,6 +124,7 @@ func (h *Handler) Login(w http.ResponseWriter, r *http.Request) {
}
// ValidateToken finds and validates authentication token.
// TODO: remove
func (h *Handler) ValidateToken(w http.ResponseWriter, r *http.Request) {
// TODO should this go after token validation?
if s := r.URL.Query().Get("section"); s != "" {
@ -195,7 +198,7 @@ func (h *Handler) ValidateToken(w http.ResponseWriter, r *http.Request) {
rc.GlobalAdmin = false
rc.AppURL = r.Host
rc.Subdomain = organization.GetSubdomainFromHost(r)
rc.SSL = r.TLS != nil
rc.SSL = request.IsSSL(r)
// Fetch user permissions for this org
if !rc.Authenticated {

View file

@ -21,6 +21,7 @@ import (
"strings"
"github.com/documize/community/core/env"
"github.com/documize/community/core/i18n"
"github.com/documize/community/core/response"
"github.com/documize/community/core/secrets"
"github.com/documize/community/core/streamutil"
@ -57,7 +58,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
// Org contains raw auth provider config
org, err := h.Store.Organization.GetOrganization(ctx, ctx.OrgID)
if err != nil {
result.Message = "Error: unable to get organization record"
result.Message = i18n.Localize(ctx.Locale, "server_err_org")
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Error(result.Message, err)
@ -66,7 +67,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
// Exit if not using Keycloak
if org.AuthProvider != ath.AuthProviderKeycloak {
result.Message = "Error: skipping user sync with Keycloak as it is not the configured option"
result.Message = i18n.Localize(ctx.Locale, "server_keycloak_error1")
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Info(result.Message)
@ -77,7 +78,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
c := ath.KeycloakConfig{}
err = json.Unmarshal([]byte(org.AuthConfig), &c)
if err != nil {
result.Message = "Error: unable read Keycloak configuration data"
result.Message = i18n.Localize(ctx.Locale, "server_keycloak_error2")
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Error(result.Message, err)
@ -87,7 +88,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
// User list from Keycloak
kcUsers, err := Fetch(c)
if err != nil {
result.Message = "Error: unable to fetch Keycloak users: " + err.Error()
result.Message = i18n.Localize(ctx.Locale, "server_keycloak_error3", err.Error())
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Error(result.Message, err)
@ -97,7 +98,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
// User list from Documize
dmzUsers, err := h.Store.User.GetUsersForOrganization(ctx, "", 99999)
if err != nil {
result.Message = "Error: unable to fetch Documize users"
result.Message = i18n.Localize(ctx.Locale, "server_error_user")
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Error(result.Message, err)
@ -135,8 +136,8 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
}
}
result.Message = fmt.Sprintf("Keycloak sync found %d users, %d new users added, %d users with missing data ignored",
len(kcUsers), len(insert), missing)
result.Message = i18n.Localize(ctx.Locale, "server_keycloak_summary",
fmt.Sprintf("%d", len(kcUsers)), fmt.Sprintf("%d", len(insert)), fmt.Sprintf("%d", missing))
response.WriteJSON(w, result)
h.Runtime.Log.Info(result.Message)

View file

@ -21,6 +21,7 @@ import (
"strings"
"github.com/documize/community/core/env"
"github.com/documize/community/core/i18n"
"github.com/documize/community/core/response"
"github.com/documize/community/core/secrets"
"github.com/documize/community/core/streamutil"
@ -126,8 +127,7 @@ func (h *Handler) Preview(w http.ResponseWriter, r *http.Request) {
response.WriteJSON(w, result)
}
// Sync gets list of Keycloak users and inserts new users into Documize
// and marks Keycloak disabled users as inactive.
// Sync gets list of LDAP users and inserts new users into Documize.
func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
ctx := domain.GetRequestContext(r)
@ -147,7 +147,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
// Org contains raw auth provider config
org, err := h.Store.Organization.GetOrganization(ctx, ctx.OrgID)
if err != nil {
result.Message = "Error: unable to get organization record"
result.Message = i18n.Localize(ctx.Locale, "server_error_org")
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Error(result.Message, err)
@ -156,7 +156,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
// Exit if not using LDAP
if org.AuthProvider != ath.AuthProviderLDAP {
result.Message = "Error: skipping user sync with LDAP as it is not the configured option"
result.Message = i18n.Localize(ctx.Locale, "server_ldap_error1")
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Info(result.Message)
@ -167,7 +167,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
c := lm.LDAPConfig{}
err = json.Unmarshal([]byte(org.AuthConfig), &c)
if err != nil {
result.Message = "Error: unable read LDAP configuration data"
result.Message = i18n.Localize(ctx.Locale, "server_ldap_error2")
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Error(result.Message, err)
@ -177,7 +177,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
// Get user list from LDAP.
ldapUsers, err := fetchUsers(c)
if err != nil {
result.Message = "Error: unable to fetch LDAP users: " + err.Error()
result.Message = i18n.Localize(ctx.Locale, "server_ldap_error3", err.Error())
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Error(result.Message, err)
@ -187,7 +187,7 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
// Get user list from Documize
dmzUsers, err := h.Store.User.GetUsersForOrganization(ctx, "", 99999)
if err != nil {
result.Message = "Error: unable to fetch Documize users"
result.Message = i18n.Localize(ctx.Locale, "server_error_user")
result.IsError = true
response.WriteJSON(w, result)
h.Runtime.Log.Error(result.Message, err)
@ -224,10 +224,8 @@ func (h *Handler) Sync(w http.ResponseWriter, r *http.Request) {
}
result.IsError = false
result.Message = "Sync complete with LDAP server"
result.Message = fmt.Sprintf(
"LDAP sync found %d users, %d new users added, %d users with missing data ignored",
len(ldapUsers), len(insert), missing)
result.Message = i18n.Localize(ctx.Locale, "server_ldap_complete")
result.Message = i18n.Localize(ctx.Locale, "server_ldap_summary", fmt.Sprintf("%d", len(ldapUsers)), fmt.Sprintf("%d", len(insert)), fmt.Sprintf("%d", missing))
h.Runtime.Log.Info(result.Message)
@ -273,13 +271,12 @@ func (h *Handler) Authenticate(w http.ResponseWriter, r *http.Request) {
// Check for required fields.
if len(username) == 0 || len(password) == 0 {
response.WriteUnauthorizedError(w)
h.Runtime.Log.Info("LDAP authentication aborted due to missing username/password")
return
}
dom = h.Store.Organization.CheckDomain(ctx, dom) // TODO optimize by removing this once js allows empty domains
h.Runtime.Log.Info("LDAP login request " + username + " @ " + dom)
// Get the org and it's associated LDAP config.
org, err := h.Store.Organization.GetOrganizationByDomain(dom)
if err != nil {
@ -298,6 +295,13 @@ func (h *Handler) Authenticate(w http.ResponseWriter, r *http.Request) {
ctx.OrgID = org.RefID
// We first connect to LDAP and try to authenticate user.
// If user auth fails and dual authentication is enabled,
// we try to authenticate with email/password combo.
var u user.User
// Try LDAP
h.Runtime.Log.Info("LDAP login request " + username + " @ " + dom)
l, err := connect(lc)
if err != nil {
response.WriteBadRequestError(w, method, "unable to dial LDAP server")
@ -305,48 +309,78 @@ func (h *Handler) Authenticate(w http.ResponseWriter, r *http.Request) {
return
}
defer l.Close()
lu, ok, err := authenticate(l, lc, username, password)
if err != nil {
response.WriteBadRequestError(w, method, "error during LDAP authentication")
h.Runtime.Log.Error(method, err)
return
}
if !ok {
response.WriteUnauthorizedError(w)
return
}
h.Runtime.Log.Info("LDAP logon completed " + lu.Email)
// If OK then we complete LDAP specific processing
if ok {
h.Runtime.Log.Info("LDAP logon completed " + lu.Email)
u, err := h.Store.User.GetByDomain(ctx, dom, lu.Email)
if err != nil && err != sql.ErrNoRows {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
// Create user account if not found
if err == sql.ErrNoRows {
h.Runtime.Log.Info("Adding new LDAP user " + lu.Email + " @ " + dom)
u = convertUser(lc, lu)
u.Salt = secrets.GenerateSalt()
u.Password = secrets.GeneratePassword(secrets.GenerateRandomPassword(), u.Salt)
u, err = auth.AddExternalUser(ctx, h.Runtime, h.Store, u, lc.DefaultPermissionAddSpace)
if err != nil {
u, err = h.Store.User.GetByDomain(ctx, dom, lu.Email)
if err != nil && err != sql.ErrNoRows {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
if len(lu.Email) == 0 || len(u.Email) == 0 {
response.WriteUnauthorizedError(w)
h.Runtime.Log.Infof("LDAP user without email faild auth (%s)", username)
return
}
// If user authenticated BUT is not within Documize, we fail authentication.
// If dual auth is enabled, we can try regular email/password login (see next).
if err == sql.ErrNoRows {
ok = false
}
}
// If LDAP authentication failed, we check to see if we are allowed
// to perform authentication via regular email/password.
if !ok {
// Return as unauthorized if dual authentication not enabled.
if !lc.AllowFormsAuth {
h.Runtime.Log.Info("LDAP failed login request for " + username + " @ " + dom)
response.WriteUnauthorizedError(w)
return
}
h.Runtime.Log.Info("Trying forms auth as LDAP login login failed for " + username + " @ " + dom)
// Now try regular email/password authentication.
u, err = h.Store.User.GetByDomain(ctx, dom, username)
if err == sql.ErrNoRows {
response.WriteUnauthorizedError(w)
return
}
if err != nil && err != sql.ErrNoRows {
h.Runtime.Log.Error("unable to fetch user", err)
response.WriteServerError(w, method, err)
return
}
if len(u.Reset) > 0 || len(u.Password) == 0 {
response.WriteUnauthorizedError(w)
return
}
// Password correct and active user
if username != strings.TrimSpace(strings.ToLower(u.Email)) || !secrets.MatchPassword(u.Password, password, u.Salt) {
response.WriteUnauthorizedError(w)
return
}
}
// Below is standard flow for user authentication regardless
// if they used LDAP or email/password combo.
// Attach user accounts and work out permissions.
usr.AttachUserAccounts(ctx, *h.Store, org.RefID, &u)
// No accounts signals data integrity problem
// so we reject login request.
// No accounts signals data integrity problem so we reject login request.
if len(u.Accounts) == 0 {
response.WriteUnauthorizedError(w)
h.Runtime.Log.Error(method, err)
@ -365,7 +399,7 @@ func (h *Handler) Authenticate(w http.ResponseWriter, r *http.Request) {
}
}
// Generate JWT token
// Send back newly generated JWT token.
authModel := ath.AuthenticationModel{}
authModel.Token = auth.GenerateJWT(h.Runtime, u.RefID, org.RefID, dom)
authModel.User = u

View file

@ -20,8 +20,8 @@ import (
"github.com/documize/community/core/stringutil"
lm "github.com/documize/community/model/auth"
"github.com/documize/community/model/user"
ld "github.com/go-ldap/ldap/v3"
"github.com/pkg/errors"
ld "gopkg.in/ldap.v2"
)
// Connect establishes connection to LDAP server.
@ -172,9 +172,9 @@ func executeGroupFilter(c lm.LDAPConfig) (u []lm.LDAPUser, err error) {
continue
}
// Get CN element from DN.
for _, entry := range rawMembers {
// get CN element from DN
parts := strings.Split(entry, ",")
parts := splitDN(entry)
if len(parts) == 0 {
continue
}
@ -204,6 +204,44 @@ func executeGroupFilter(c lm.LDAPConfig) (u []lm.LDAPUser, err error) {
return
}
// splitDN handles splitting of DN string whilst respecting
// escaped comma characters.
//
// DN values can contain escaped commas like in two ways:
//
// \,
// \5c,
//
// Relevant notes:
//
// https://docs.oracle.com/cd/E19424-01/820-4811/gdxpo/index.html#6ng8i269q
// https://devblogs.microsoft.com/scripting/how-can-i-work-with-a-cn-that-has-a-comma-in-it/
//
// Example:
//
// CN=Surname\, Name,OU=Something,OU=AD-Example,OU=Examaple,DC=example,DC=example,DC=com
//
// When we split on comma, here is our logic:
//
// 1. Replace any escaped comma values with a special character sequence.
// 2. Split string on comma as per usual.
// 3. Put back the original escaped comma values.
func splitDN(dn string) []string {
dn = strings.ReplaceAll(dn, `\5c,`, "!!1!!")
dn = strings.ReplaceAll(dn, `\,`, "!!2!!")
sp := strings.Split(dn, ",")
for i := range sp {
val := sp[i]
val = strings.ReplaceAll(val, "!!1!!", `\5c,`)
val = strings.ReplaceAll(val, "!!2!!", `\,`)
sp[i] = val
}
return sp
}
// extractUser build user record from LDAP result attributes.
func extractUser(c lm.LDAPConfig, e *ld.Entry) (u lm.LDAPUser) {
u.Firstname = e.GetAttributeValue(c.AttributeUserFirstname)

View file

@ -0,0 +1,39 @@
// Copyright 2016 Documize Inc. <legal@documize.com>. All rights reserved.
//
// This software (Documize Community Edition) is licensed under
// GNU AGPL v3 http://www.gnu.org/licenses/agpl-3.0.en.html
//
// You can operate outside the AGPL restrictions by purchasing
// Documize Enterprise Edition and obtaining a commercial license
// by contacting <sales@documize.com>.
//
// https://documize.com
package ldap
import (
"testing"
)
var testSplitData = []struct {
in string
count int
}{
{`CN=Surname\,Name,OU=Something,OU=AD-Example,OU=Examaple,DC=example,DC=example,DC=com`, 7},
{`CN=Surname\, Name,OU=Something,OU=AD-Example,OU=Examaple,DC=example,DC=example,DC=com`, 7},
{`CN=Surname\5c, Name,OU=Some\,thing,OU=AD-Example,OU=Examaple,DC=example,DC=example,DC=com`, 7},
{`CN=Surname\5c,Name,OU=Something,OU=AD-Example,OU=Examaple,DC=example,DC=example,DC=com`, 7},
{`CN=Given,OU=Something,OU=AD-Example,OU=Examaple,DC=example,DC=example,DC=com`, 7},
{"cn=Hubert\\, J. Farnsworth,ou=people,dc=planetexpress,dc=com", 4},
}
func Test_SplitDN(t *testing.T) {
for _, td := range testSplitData {
sp := splitDN(td.in)
if len(sp) != td.count {
t.Errorf("Did not receive %d split entries", td.count)
return
}
t.Logf("%d entries: %v", len(sp), sp)
}
}

View file

@ -45,6 +45,7 @@ import (
"github.com/documize/community/model/category"
"github.com/documize/community/model/doc"
"github.com/documize/community/model/group"
"github.com/documize/community/model/label"
"github.com/documize/community/model/link"
"github.com/documize/community/model/page"
"github.com/documize/community/model/permission"
@ -170,6 +171,12 @@ func (b backerHandler) produce(id string) (files []backupItem, err error) {
return
}
// Space Label
err = b.dmzSpaceLabel(&files)
if err != nil {
return
}
// Space, Permission.
err = b.dmzSpace(&files)
if err != nil {
@ -237,7 +244,7 @@ func (b backerHandler) dmzOrg(files *[]backupItem) (err error) {
c_anonaccess AS allowanonymousaccess, c_authprovider AS authprovider,
coalesce(c_sub,`+b.Runtime.StoreProvider.JSONEmpty()+`) AS subscription,
coalesce(c_authconfig,`+b.Runtime.StoreProvider.JSONEmpty()+`) AS authconfig, c_maxtags AS maxtags,
c_theme AS theme, c_logo AS logo, c_created AS created, c_revised AS revised
c_theme AS theme, c_logo AS logo, c_locale as locale, c_created AS created, c_revised AS revised
FROM dmz_org`+w)
if err != nil {
return
@ -301,7 +308,7 @@ func (b backerHandler) dmzUserAccount(files *[]backupItem) (err error) {
err = b.Runtime.Db.Select(&u, `SELECT u.id, u.c_refid AS refid,
u.c_firstname AS firstname, u.c_lastname AS lastname, u.c_email AS email,
u.c_initials AS initials, u.c_globaladmin AS globaladmin,
u.c_password AS password, u.c_salt AS salt, u.c_reset AS reset, u.c_lastversion AS lastversion,
u.c_password AS password, u.c_salt AS salt, u.c_reset AS reset, u.c_lastversion AS lastversion, u.c_locale as locale,
u.c_created AS created, u.c_revised AS revised
FROM dmz_user u`+w)
if err != nil {
@ -452,6 +459,32 @@ func (b backerHandler) dmzPin(files *[]backupItem) (err error) {
return
}
// Space Label
func (b backerHandler) dmzSpaceLabel(files *[]backupItem) (err error) {
w := ""
if !b.Spec.SystemBackup() {
w = fmt.Sprintf(" WHERE c_orgid='%s' ", b.Spec.OrgID)
}
l := []label.Label{}
err = b.Runtime.Db.Select(&l, `
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_name AS name, c_color AS color,
c_created AS created, c_revised AS revised
FROM dmz_space_label`+w)
if err != nil {
return errors.Wrap(err, "select.space_label")
}
content, err := toJSON(l)
if err != nil {
return errors.Wrap(err, "json.space_label")
}
*files = append(*files, backupItem{Filename: "dmz_space_label.json", Content: content})
return
}
// Space, Permission.
func (b backerHandler) dmzSpace(files *[]backupItem) (err error) {
w := ""
@ -512,7 +545,8 @@ func (b backerHandler) dmzCategory(files *[]backupItem) (err error) {
err = b.Runtime.Db.Select(&cat, `
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_spaceid AS spaceid,
c_name AS name, c_created AS created, c_revised AS revised
c_name AS name, c_default AS isdefault,
c_created AS created, c_revised AS revised
FROM dmz_category`+w)
if err != nil {
return errors.Wrap(err, "select.category")
@ -648,7 +682,7 @@ func (b backerHandler) dmzDocument(files *[]backupItem) (err error) {
c_job AS job, c_location AS location, c_name AS name, c_desc AS excerpt, c_slug AS slug,
c_tags AS tags, c_template AS template, c_protection AS protection, c_approval AS approval,
c_lifecycle AS lifecycle, c_versioned AS versioned, c_versionid AS versionid,
c_versionorder AS versionorder, c_groupid AS groupid, c_created AS created, c_revised AS revised
c_versionorder AS versionorder, c_seq AS sequence, c_groupid AS groupid, c_created AS created, c_revised AS revised
FROM dmz_doc`+w)
if err != nil {
return errors.Wrap(err, "select.document")
@ -700,7 +734,8 @@ func (b backerHandler) dmzDocument(files *[]backupItem) (err error) {
err = b.Runtime.Db.Select(&cm, `
SELECT c_refid AS refid, c_orgid AS orgid, c_docid AS documentid,
c_userid AS userid, c_email AS email,
c_feedback AS feedback, c_created AS created
c_feedback AS feedback, c_sectionid AS sectionid, c_replyto AS replyto,
c_created AS created
FROM dmz_doc_comment`+w)
if err != nil {
return errors.Wrap(err, "select.doccomment")
@ -733,7 +768,7 @@ func (b backerHandler) dmzDocument(files *[]backupItem) (err error) {
at := []attachment.Attachment{}
err = b.Runtime.Db.Select(&at, `
SELECT id, c_refid AS refid,
c_orgid AS orgid, c_docid AS documentid, c_job AS job, c_fileid AS fileid,
c_orgid AS orgid, c_docid AS documentid, c_sectionid AS sectionid, c_job AS job, c_fileid AS fileid,
c_filename AS filename, c_data AS data, c_extension AS extension,
c_created AS created, c_revised AS revised
FROM dmz_doc_attachment`+w)

View file

@ -89,7 +89,7 @@ func (h *Handler) Backup(w http.ResponseWriter, r *http.Request) {
return
}
h.Runtime.Log.Info("Backup started")
h.Runtime.Log.Infof("Backup started %s", ctx.OrgID)
bh := backerHandler{Runtime: h.Runtime, Store: h.Store, Context: ctx, Spec: spec}
@ -113,7 +113,7 @@ func (h *Handler) Backup(w http.ResponseWriter, r *http.Request) {
return
}
h.Runtime.Log.Info(fmt.Sprintf("Backup size pending download %d", len(bk)))
h.Runtime.Log.Info(fmt.Sprintf("Backup size of org %s pending download %d", ctx.OrgID, len(bk)))
// Standard HTTP headers.
w.Header().Set("Content-Type", "application/zip")
@ -124,6 +124,7 @@ func (h *Handler) Backup(w http.ResponseWriter, r *http.Request) {
// instead of parsing 'Content-Disposition' header.
// This HTTP header is CORS white-listed.
w.Header().Set("x-documize-filename", filename)
w.WriteHeader(http.StatusOK)
// Write backup to response stream.
x, err := w.Write(bk)
@ -140,8 +141,6 @@ func (h *Handler) Backup(w http.ResponseWriter, r *http.Request) {
if !spec.Retain {
os.Remove(filename)
}
w.WriteHeader(http.StatusOK)
}
// Restore receives ZIP file for restore operation.
@ -206,5 +205,8 @@ func (h *Handler) Restore(w http.ResponseWriter, r *http.Request) {
h.Runtime.Log.Infof("Restore remapped %d UserID values", len(rh.MapUserID))
h.Runtime.Log.Info("Restore completed")
h.Runtime.Log.Info("Building search index")
go h.Indexer.Rebuild(ctx)
response.WriteEmpty(w)
}

View file

@ -59,6 +59,8 @@ type comment struct {
UserID string `json:"userId"`
Email string `json:"email"`
Feedback string `json:"feedback"`
SectionID string `json:"sectionId"`
ReplyTo string `json:"replyTo"`
Created time.Time `json:"created"`
}

View file

@ -38,6 +38,7 @@ import (
"github.com/documize/community/model/category"
"github.com/documize/community/model/doc"
"github.com/documize/community/model/group"
"github.com/documize/community/model/label"
"github.com/documize/community/model/link"
"github.com/documize/community/model/page"
"github.com/documize/community/model/permission"
@ -154,6 +155,12 @@ func (r *restoreHandler) PerformRestore(b []byte, l int64) (err error) {
return
}
// Space Label.
err = r.dmzSpaceLabel()
if err != nil {
return
}
// Space.
err = r.dmzSpace()
if err != nil {
@ -363,14 +370,15 @@ func (r *restoreHandler) dmzOrg() (err error) {
INSERT INTO dmz_org (c_refid, c_company, c_title, c_message,
c_domain, c_service, c_email, c_anonaccess, c_authprovider, c_authconfig,
c_maxtags, c_verified, c_serial, c_sub, c_active,
c_theme, c_logo, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
c_theme, c_logo, c_locale, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
org[i].RefID, org[i].Company, org[i].Title, org[i].Message,
strings.ToLower(org[i].Domain), org[i].ConversionEndpoint, strings.ToLower(org[i].Email),
org[i].AllowAnonymousAccess, org[i].AuthProvider, org[i].AuthConfig,
org[i].MaxTags, true, org[i].Serial, org[i].Subscription,
org[i].Theme, org[i].Logo,
org[i].Active, org[i].Created, org[i].Revised)
org[i].MaxTags, r.Runtime.StoreProvider.IsTrue(), org[i].Serial,
org[i].Subscription, org[i].Active,
org[i].Theme, org[i].Logo, org[i].Locale,
org[i].Created, org[i].Revised)
if err != nil {
r.Context.Transaction.Rollback()
err = errors.Wrap(err, fmt.Sprintf("unable to insert %s %s", filename, org[i].RefID))
@ -404,6 +412,7 @@ func (r *restoreHandler) dmzOrg() (err error) {
org[0].Title = r.Spec.Org.Title
org[0].Subscription = r.Spec.Org.Subscription
org[0].Theme = r.Spec.Org.Theme
org[0].Locale = r.Spec.Org.Locale
}
_, err = r.Context.Transaction.NamedExec(`UPDATE dmz_org SET
@ -417,7 +426,8 @@ func (r *restoreHandler) dmzOrg() (err error) {
c_message=:message,
c_title=:title,
c_serial=:serial,
c_sub=:subscription
c_sub=:subscription,
c_locale=:locale
WHERE c_refid=:refid`, &org[0])
if err != nil {
r.Context.Transaction.Rollback()
@ -582,6 +592,64 @@ func (r *restoreHandler) dmzAction() (err error) {
return nil
}
// Space Label.
func (r *restoreHandler) dmzSpaceLabel() (err error) {
filename := "dmz_space_label.json"
label := []label.Label{}
err = r.fileJSON(filename, &label)
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("failed to load %s", filename))
return
}
r.Runtime.Log.Info(fmt.Sprintf("Extracted %s", filename))
r.Context.Transaction, err = r.Runtime.Db.Beginx()
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("unable to start TX for %s", filename))
return
}
// Nuke all existing data.
nuke := "TRUNCATE TABLE dmz_space_label"
if !r.Spec.GlobalBackup {
nuke = fmt.Sprintf("DELETE FROM dmz_space_label WHERE c_orgid='%s'", r.Spec.Org.RefID)
}
_, err = r.Context.Transaction.Exec(nuke)
if err != nil {
r.Context.Transaction.Rollback()
err = errors.Wrap(err, fmt.Sprintf("unable to truncate table %s", filename))
return
}
for i := range label {
_, err = r.Context.Transaction.Exec(r.Runtime.Db.Rebind(`
INSERT INTO dmz_space_label
(c_refid, c_orgid, c_name, c_color, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?)`),
label[i].RefID, r.remapOrg(label[i].OrgID), label[i].Name, label[i].Color,
label[i].Created, label[i].Revised)
if err != nil {
r.Context.Transaction.Rollback()
err = errors.Wrap(err, fmt.Sprintf("unable to insert %s %s", filename, label[i].RefID))
return
}
}
err = r.Context.Transaction.Commit()
if err != nil {
r.Context.Transaction.Rollback()
err = errors.Wrap(err, fmt.Sprintf("unable to commit %s", filename))
return
}
r.Runtime.Log.Info(fmt.Sprintf("Processed %s %d records", filename, len(label)))
return nil
}
// Space.
func (r *restoreHandler) dmzSpace() (err error) {
filename := "dmz_space.json"
@ -677,9 +745,9 @@ func (r *restoreHandler) dmzCategory() (err error) {
for i := range ct {
_, err = r.Context.Transaction.Exec(r.Runtime.Db.Rebind(`
INSERT INTO dmz_category (c_refid, c_orgid, c_spaceid, c_name, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?)`),
ct[i].RefID, r.remapOrg(ct[i].OrgID), ct[i].SpaceID, ct[i].Name, ct[i].Created, ct[i].Revised)
INSERT INTO dmz_category (c_refid, c_orgid, c_spaceid, c_name, c_default, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?)`),
ct[i].RefID, r.remapOrg(ct[i].OrgID), ct[i].SpaceID, ct[i].Name, ct[i].IsDefault, ct[i].Created, ct[i].Revised)
if err != nil {
r.Context.Transaction.Rollback()
@ -1268,12 +1336,13 @@ func (r *restoreHandler) dmzDoc() (err error) {
INSERT INTO dmz_doc
(c_refid, c_orgid, c_spaceid, c_userid, c_job, c_location,
c_name, c_desc, c_slug, c_tags, c_template, c_protection, c_approval,
c_lifecycle, c_versioned, c_versionid, c_versionorder, c_groupid, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
c_lifecycle, c_versioned, c_versionid, c_versionorder, c_seq, c_groupid,
c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
doc[i].RefID, r.remapOrg(doc[i].OrgID), doc[i].SpaceID, r.remapUser(doc[i].UserID), doc[i].Job,
doc[i].Location, doc[i].Name, doc[i].Excerpt, doc[i].Slug, doc[i].Tags,
doc[i].Template, doc[i].Protection, doc[i].Approval, doc[i].Lifecycle,
doc[i].Versioned, doc[i].VersionID, doc[i].VersionOrder, doc[i].GroupID,
doc[i].Versioned, doc[i].VersionID, doc[i].VersionOrder, doc[i].Sequence, doc[i].GroupID,
doc[i].Created, doc[i].Revised)
if err != nil {
@ -1455,16 +1524,17 @@ func (r *restoreHandler) dmzDocAttachment() (err error) {
for i := range at {
_, err = r.Context.Transaction.Exec(r.Runtime.Db.Rebind(`
INSERT INTO dmz_doc_attachment
(c_refid, c_orgid, c_docid, c_job, c_fileid,
(c_refid, c_orgid, c_docid, c_sectionid, c_job, c_fileid,
c_filename, c_data, c_extension, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
at[i].RefID, r.remapOrg(at[i].OrgID), at[i].DocumentID, at[i].Job,
at[i].FileID, at[i].Filename,
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
at[i].RefID, r.remapOrg(at[i].OrgID), at[i].DocumentID, at[i].SectionID,
at[i].Job, at[i].FileID, at[i].Filename,
at[i].Data, at[i].Extension, at[i].Created, at[i].Revised)
if err != nil {
r.Context.Transaction.Rollback()
err = errors.Wrap(err, fmt.Sprintf("unable to insert %s %s", filename, at[i].RefID))
r.Runtime.Log.Error("warning", err)
return
}
}
@ -1485,15 +1555,6 @@ func (r *restoreHandler) dmzDocAttachment() (err error) {
func (r *restoreHandler) dmzDocComment() (err error) {
filename := "dmz_doc_comment.json"
type comment struct {
RefID string `json:"feedbackId"`
OrgID string `json:"orgId"`
DocumentID string `json:"documentId"`
UserID string `json:"userId"`
Email string `json:"email"`
Feedback string `json:"feedback"`
Created time.Time `json:"created"`
}
cm := []comment{}
err = r.fileJSON(filename, &cm)
if err != nil {
@ -1524,10 +1585,10 @@ func (r *restoreHandler) dmzDocComment() (err error) {
for i := range cm {
_, err = r.Context.Transaction.Exec(r.Runtime.Db.Rebind(`
INSERT INTO dmz_doc_comment
(c_refid, c_orgid, c_userid, c_docid, c_email, c_feedback, c_created)
VALUES (?, ?, ?, ?, ?, ?, ?)`),
(c_refid, c_orgid, c_userid, c_docid, c_email, c_feedback, c_replyto, c_sectionid, c_created)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`),
cm[i].RefID, r.remapOrg(cm[i].OrgID), r.remapUser(cm[i].UserID), cm[i].DocumentID,
cm[i].Email, cm[i].Feedback, cm[i].Created)
cm[i].Email, cm[i].Feedback, cm[i].ReplyTo, cm[i].SectionID, cm[i].Created)
if err != nil {
r.Context.Transaction.Rollback()
@ -1661,6 +1722,7 @@ func (r *restoreHandler) dmzUser() (err error) {
insert = true
}
if err != nil {
r.Context.Transaction.Rollback()
err = errors.Wrap(err, fmt.Sprintf("unable to check email %s", u[i].Email))
return
}
@ -1675,11 +1737,11 @@ func (r *restoreHandler) dmzUser() (err error) {
_, err = r.Context.Transaction.Exec(r.Runtime.Db.Rebind(`
INSERT INTO dmz_user
(c_refid, c_firstname, c_lastname, c_email, c_initials, c_globaladmin,
c_password, c_salt, c_reset, c_active, c_lastversion, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
c_password, c_salt, c_reset, c_active, c_lastversion, c_locale, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
r.remapUser(u[i].RefID), u[i].Firstname, u[i].Lastname, strings.ToLower(u[i].Email), u[i].Initials,
u[i].GlobalAdmin, u[i].Password, u[i].Salt, u[i].Reset, u[i].Active,
u[i].LastVersion, u[i].Created, u[i].Revised)
u[i].LastVersion, u[i].Locale, u[i].Created, u[i].Revised)
if err != nil {
r.Context.Transaction.Rollback()

View file

@ -97,7 +97,7 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
// Get returns requested reusable content block.
func (h *Handler) Get(w http.ResponseWriter, r *http.Request) {
method := "block.add"
method := "block.get"
ctx := domain.GetRequestContext(r)
blockID := request.Param(r, "blockID")
@ -135,7 +135,6 @@ func (h *Handler) GetBySpace(w http.ResponseWriter, r *http.Request) {
if len(b) == 0 {
b = []block.Block{}
}
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)

View file

@ -74,10 +74,10 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
return
}
// Category max length 30.
// Category max length 50.
cat.Name = strings.TrimSpace(cat.Name)
if len(cat.Name) > 30 {
cat.Name = cat.Name[:30]
if len(cat.Name) > 50 {
cat.Name = cat.Name[:50]
}
err = h.Store.Category.Add(ctx, cat)
@ -105,14 +105,6 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
return
}
err = h.Store.Space.IncrementCategoryCount(ctx, cat.SpaceID)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
ctx.Transaction.Commit()
cat, err = h.Store.Category.Get(ctx, cat.RefID)
@ -122,6 +114,7 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
return
}
h.Store.Space.SetStats(ctx, cat.SpaceID)
h.Store.Audit.Record(ctx, audit.EventTypeCategoryAdd)
response.WriteJSON(w, cat)
@ -303,16 +296,9 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
return
}
err = h.Store.Space.DecrementCategoryCount(ctx, cat.SpaceID)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
ctx.Transaction.Commit()
h.Store.Space.SetStats(ctx, cat.SpaceID)
h.Store.Audit.Record(ctx, audit.EventTypeCategoryDelete)
response.WriteEmpty(w)

View file

@ -33,8 +33,8 @@ func (s Store) Add(ctx domain.RequestContext, c category.Category) (err error) {
c.Created = time.Now().UTC()
c.Revised = time.Now().UTC()
_, err = ctx.Transaction.Exec(s.Bind("INSERT INTO dmz_category (c_refid, c_orgid, c_spaceid, c_name, c_created, c_revised) VALUES (?, ?, ?, ?, ?, ?)"),
c.RefID, c.OrgID, c.SpaceID, c.Name, c.Created, c.Revised)
_, err = ctx.Transaction.Exec(s.Bind("INSERT INTO dmz_category (c_refid, c_orgid, c_spaceid, c_name, c_default, c_created, c_revised) VALUES (?, ?, ?, ?, ?, ?, ?)"),
c.RefID, c.OrgID, c.SpaceID, c.Name, c.IsDefault, c.Created, c.Revised)
if err != nil {
err = errors.Wrap(err, "unable to execute insert category")
@ -47,7 +47,7 @@ func (s Store) Add(ctx domain.RequestContext, c category.Category) (err error) {
// Context is used to for user ID.
func (s Store) GetBySpace(ctx domain.RequestContext, spaceID string) (c []category.Category, err error) {
err = s.Runtime.Db.Select(&c, s.Bind(`
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_created AS created, c_revised AS revised
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_default AS isdefault, c_created AS created, c_revised AS revised
FROM dmz_category
WHERE c_orgid=? AND c_spaceid=? AND c_refid IN
(
@ -77,7 +77,7 @@ func (s Store) GetAllBySpace(ctx domain.RequestContext, spaceID string) (c []cat
c = []category.Category{}
err = s.Runtime.Db.Select(&c, s.Bind(`
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_created AS created, c_revised AS revised
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_default AS isdefault, c_created AS created, c_revised AS revised
FROM dmz_category
WHERE c_orgid=? AND c_spaceid=? AND c_spaceid IN
(
@ -105,7 +105,7 @@ func (s Store) GetAllBySpace(ctx domain.RequestContext, spaceID string) (c []cat
// GetByOrg returns all categories accessible by user for their org.
func (s Store) GetByOrg(ctx domain.RequestContext, userID string) (c []category.Category, err error) {
err = s.Runtime.Db.Select(&c, s.Bind(`
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_created AS created, c_revised AS revised
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_default AS isdefault, c_created AS created, c_revised AS revised
FROM dmz_category
WHERE c_orgid=? AND c_refid IN
(SELECT c_refid FROM dmz_permission WHERE c_orgid=? AND c_location='category' AND c_refid IN (
@ -131,7 +131,7 @@ func (s Store) GetByOrg(ctx domain.RequestContext, userID string) (c []category.
func (s Store) Update(ctx domain.RequestContext, c category.Category) (err error) {
c.Revised = time.Now().UTC()
_, err = ctx.Transaction.NamedExec(s.Bind("UPDATE dmz_category SET c_name=:name, c_revised=:revised WHERE c_orgid=:orgid AND c_refid=:refid"), c)
_, err = ctx.Transaction.NamedExec(s.Bind("UPDATE dmz_category SET c_name=:name, c_default=:isdefault, c_revised=:revised WHERE c_orgid=:orgid AND c_refid=:refid"), c)
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("unable to execute update for category %s", c.RefID))
}
@ -142,7 +142,7 @@ func (s Store) Update(ctx domain.RequestContext, c category.Category) (err error
// Get returns specified category
func (s Store) Get(ctx domain.RequestContext, id string) (c category.Category, err error) {
err = s.Runtime.Db.Get(&c, s.Bind(`
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_created AS created, c_revised AS revised
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_default AS isdefault, c_created AS created, c_revised AS revised
FROM dmz_category
WHERE c_orgid=? AND c_refid=?`),
ctx.OrgID, id)
@ -176,46 +176,69 @@ func (s Store) AssociateDocument(ctx domain.RequestContext, m category.Member) (
// DisassociateDocument removes document associatation from category.
func (s Store) DisassociateDocument(ctx domain.RequestContext, categoryID, documentID string) (rows int64, err error) {
sql := fmt.Sprintf("DELETE FROM dmz_category_member WHERE c_orgid='%s' AND c_categoryid='%s' AND c_docid='%s'",
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_category_member WHERE c_orgid=? AND c_categoryid=? AND c_docid=?"),
ctx.OrgID, categoryID, documentID)
return s.DeleteWhere(ctx.Transaction, sql)
if err == sql.ErrNoRows {
err = nil
}
return
}
// RemoveCategoryMembership removes all category associations from the store.
func (s Store) RemoveCategoryMembership(ctx domain.RequestContext, categoryID string) (rows int64, err error) {
sql := fmt.Sprintf("DELETE FROM dmz_category_member WHERE c_orgid='%s' AND c_categoryid='%s'",
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_category_member WHERE c_orgid=? AND c_categoryid=?"),
ctx.OrgID, categoryID)
return s.DeleteWhere(ctx.Transaction, sql)
if err == sql.ErrNoRows {
err = nil
}
return
}
// RemoveSpaceCategoryMemberships removes all category associations from the store for the space.
func (s Store) RemoveSpaceCategoryMemberships(ctx domain.RequestContext, spaceID string) (rows int64, err error) {
sql := fmt.Sprintf("DELETE FROM dmz_category_member WHERE c_orgid='%s' AND c_spaceid='%s'",
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_category_member WHERE c_orgid=? AND c_spaceid=?"),
ctx.OrgID, spaceID)
return s.DeleteWhere(ctx.Transaction, sql)
if err == sql.ErrNoRows {
err = nil
}
return
}
// RemoveDocumentCategories removes all document category associations from the store.
func (s Store) RemoveDocumentCategories(ctx domain.RequestContext, documentID string) (rows int64, err error) {
sql := fmt.Sprintf("DELETE FROM dmz_category_member WHERE c_orgid='%s' AND c_docid='%s'",
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_category_member WHERE c_orgid=? AND c_docid=?"),
ctx.OrgID, documentID)
return s.DeleteWhere(ctx.Transaction, sql)
if err == sql.ErrNoRows {
err = nil
}
return
}
// DeleteBySpace removes all category and category associations for given space.
func (s Store) DeleteBySpace(ctx domain.RequestContext, spaceID string) (rows int64, err error) {
s1 := fmt.Sprintf("DELETE FROM dmz_category_member WHERE c_orgid='%s' AND c_spaceid='%s'", ctx.OrgID, spaceID)
_, err = s.DeleteWhere(ctx.Transaction, s1)
if err != nil {
return
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_category_member WHERE c_orgid=? AND c_spaceid=?"),
ctx.OrgID, spaceID)
if err == sql.ErrNoRows {
err = nil
}
s2 := fmt.Sprintf("DELETE FROM dmz_category WHERE c_orgid='%s' AND c_spaceid='%s'", ctx.OrgID, spaceID)
return s.DeleteWhere(ctx.Transaction, s2)
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_category WHERE c_orgid=? AND c_spaceid=?"),
ctx.OrgID, spaceID)
if err == sql.ErrNoRows {
err = nil
}
return
}
// GetSpaceCategorySummary returns number of documents and users for space categories.
@ -223,29 +246,29 @@ func (s Store) GetSpaceCategorySummary(ctx domain.RequestContext, spaceID string
c = []category.SummaryModel{}
err = s.Runtime.Db.Select(&c, s.Bind(`
SELECT 'documents' AS type, c_categoryid AS categoryid, COUNT(*) AS count
SELECT 'documents' AS grouptype, c_categoryid AS categoryid, COUNT(*) AS count
FROM dmz_category_member
WHERE c_orgid=? AND c_spaceid=?
AND c_docid IN (
SELECT c_refid
FROM dmz_doc
WHERE c_orgid=? AND c_spaceid=? AND c_lifecycle!=2 AND c_template=false AND c_groupid=''
WHERE c_orgid=? AND c_spaceid=? AND c_lifecycle!=2 AND c_template=`+s.IsFalse()+` AND c_groupid=''
UNION ALL
SELECT d.c_refid
FROM (
SELECT c_groupid, MIN(c_versionorder) AS latestversion
FROM dmz_doc
WHERE c_orgid=? AND c_spaceid=? AND c_lifecycle!=2 AND c_groupid!='' AND c_template=false
WHERE c_orgid=? AND c_spaceid=? AND c_lifecycle!=2 AND c_groupid!='' AND c_template=`+s.IsFalse()+`
GROUP BY c_groupid
) AS x INNER JOIN dmz_doc AS d ON d.c_groupid=x.c_groupid AND d.c_versionorder=x.latestversion
)
GROUP BY c_categoryid, type
GROUP BY c_categoryid
UNION ALL
SELECT 'users' AS type, c_refid AS categoryid, count(*) AS count
SELECT 'users' AS grouptype, c_refid AS categoryid, count(*) AS count
FROM dmz_permission
WHERE c_orgid=? AND c_location='category' AND c_refid IN
(SELECT c_refid FROM dmz_category WHERE c_orgid=? AND c_spaceid=?)
GROUP BY c_refid, type`),
GROUP BY c_refid`),
ctx.OrgID, spaceID,
ctx.OrgID, spaceID, ctx.OrgID, spaceID,
ctx.OrgID, ctx.OrgID, spaceID)
@ -262,16 +285,15 @@ func (s Store) GetSpaceCategorySummary(ctx domain.RequestContext, spaceID string
// GetDocumentCategoryMembership returns all space categories associated with given document.
func (s Store) GetDocumentCategoryMembership(ctx domain.RequestContext, documentID string) (c []category.Category, err error) {
c = []category.Category{}
err = s.Runtime.Db.Select(&c, s.Bind(`
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_created AS created, c_revised AS revised
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_name AS name, c_default AS isdefault, c_created AS created, c_revised AS revised
FROM dmz_category
WHERE c_orgid=? AND c_refid IN (SELECT c_categoryid FROM dmz_category_member WHERE c_orgid=? AND c_docid=?)`),
ctx.OrgID, ctx.OrgID, documentID)
if err == sql.ErrNoRows {
err = nil
c = []category.Category{}
}
if err != nil {
err = errors.Wrap(err, fmt.Sprintf("unable to execute select categories for document %s", documentID))
@ -280,7 +302,8 @@ func (s Store) GetDocumentCategoryMembership(ctx domain.RequestContext, document
return
}
// GetSpaceCategoryMembership returns category/document associations within space.
// GetSpaceCategoryMembership returns category/document associations within space,
// for specified user.
func (s Store) GetSpaceCategoryMembership(ctx domain.RequestContext, spaceID string) (c []category.Member, err error) {
err = s.Runtime.Db.Select(&c, s.Bind(`
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_categoryid AS categoryid, c_docid AS documentid, c_created AS created, c_revised AS revised

View file

@ -17,6 +17,8 @@ import (
"net/http"
"time"
"github.com/documize/community/core/request"
"github.com/jmoiron/sqlx"
)
@ -42,6 +44,8 @@ type RequestContext struct {
GlobalAdmin bool
ViewUsers bool
Subscription Subscription
Locale string
OrgLocale string
}
//GetAppURL returns full HTTP url for the app
@ -69,7 +73,7 @@ func GetRequestContext(r *http.Request) (ctx RequestContext) {
ctx = RequestContext{}
ctx.AppURL = r.Host
ctx.SSL = r.TLS != nil
ctx.SSL = request.IsSSL(r)
return
}

View file

@ -34,6 +34,7 @@ import (
"github.com/documize/community/model/activity"
"github.com/documize/community/model/attachment"
"github.com/documize/community/model/audit"
cm "github.com/documize/community/model/category"
"github.com/documize/community/model/doc"
"github.com/documize/community/model/page"
"github.com/documize/community/model/space"
@ -165,7 +166,8 @@ func (h *Handler) convert(w http.ResponseWriter, r *http.Request, job, spaceID s
response.WriteJSON(w, nd)
}
func processDocument(ctx domain.RequestContext, r *env.Runtime, store *store.Store, indexer indexer.Indexer, filename, job string, sp space.Space, fileResult *api.DocumentConversionResponse) (newDocument doc.Document, err error) {
func processDocument(ctx domain.RequestContext, r *env.Runtime, store *store.Store, indexer indexer.Indexer, filename,
job string, sp space.Space, fileResult *api.DocumentConversionResponse) (newDocument doc.Document, err error) {
// Convert into database objects
document := convertFileResult(filename, fileResult)
document.Job = job
@ -174,6 +176,7 @@ func processDocument(ctx domain.RequestContext, r *env.Runtime, store *store.Sto
document.UserID = ctx.UserID
documentID := uniqueid.Generate()
document.RefID = documentID
document.Sequence = doc.Unsequenced
if r.Product.Edition == domain.CommunityEdition {
document.Lifecycle = workflow.LifecycleLive
@ -243,18 +246,33 @@ func processDocument(ctx domain.RequestContext, r *env.Runtime, store *store.Sto
da = append(da, a)
}
// Add default categories to newly created document (if we have them).
cats, err := store.Category.GetBySpace(ctx, document.SpaceID)
if err != nil {
r.Log.Error("fetch default categories for new document", err)
}
for ic := range cats {
if cats[ic].IsDefault {
c := cm.Member{}
c.OrgID = ctx.OrgID
c.SpaceID = sp.RefID
c.RefID = uniqueid.Generate()
c.DocumentID = document.RefID
c.CategoryID = cats[ic].RefID
err = store.Category.AssociateDocument(ctx, c)
if err != nil {
r.Log.Error("apply default category to new document", err)
}
}
}
store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: newDocument.SpaceID,
DocumentID: newDocument.RefID,
SpaceID: document.SpaceID,
DocumentID: document.RefID,
SourceType: activity.SourceTypeDocument,
ActivityType: activity.TypeCreated})
err = store.Space.IncrementContentCount(ctx, newDocument.SpaceID)
if err != nil {
err = errors.Wrap(err, "cannot increment space content count")
return
}
err = ctx.Transaction.Commit()
if err != nil {
err = errors.Wrap(err, "cannot commit new document import")
@ -269,6 +287,7 @@ func processDocument(ctx domain.RequestContext, r *env.Runtime, store *store.Sto
go indexer.IndexDocument(ctx, newDocument, da)
store.Space.SetStats(ctx, newDocument.SpaceID)
store.Audit.Record(ctx, audit.EventTypeDocumentUpload)
return

View file

@ -66,6 +66,8 @@ func FilterCategoryProtected(docs []doc.Document, cats []category.Category, memb
// CopyDocument clones an existing document
func CopyDocument(ctx domain.RequestContext, s store.Store, documentID string) (newDocumentID string, err error) {
unseq := doc.Unsequenced
doc, err := s.Document.Get(ctx, documentID)
if err != nil {
err = errors.Wrap(err, "unable to fetch existing document")
@ -79,6 +81,7 @@ func CopyDocument(ctx domain.RequestContext, s store.Store, documentID string) (
doc.VersionID = ""
doc.GroupID = ""
doc.Template = false
doc.Sequence = unseq
// Duplicate pages and associated meta
pages, err := s.Page.GetPages(ctx, documentID)

View file

@ -14,6 +14,7 @@ package document
import (
"database/sql"
"encoding/json"
"errors"
"io/ioutil"
"net/http"
"sort"
@ -24,15 +25,19 @@ import (
"github.com/documize/community/core/response"
"github.com/documize/community/core/streamutil"
"github.com/documize/community/core/stringutil"
"github.com/documize/community/core/uniqueid"
"github.com/documize/community/domain"
"github.com/documize/community/domain/organization"
"github.com/documize/community/domain/permission"
indexer "github.com/documize/community/domain/search"
"github.com/documize/community/domain/store"
"github.com/documize/community/model/activity"
"github.com/documize/community/model/attachment"
"github.com/documize/community/model/audit"
"github.com/documize/community/model/category"
"github.com/documize/community/model/doc"
"github.com/documize/community/model/link"
"github.com/documize/community/model/page"
pm "github.com/documize/community/model/permission"
"github.com/documize/community/model/search"
"github.com/documize/community/model/space"
@ -59,6 +64,13 @@ func (h *Handler) Get(w http.ResponseWriter, r *http.Request) {
return
}
var ok bool
ctx.Transaction, ok = h.Runtime.StartTx(sql.LevelReadUncommitted)
if !ok {
h.Runtime.Log.Info("unable to start transaction " + method)
return
}
document, err := h.Store.Document.Get(ctx, id)
if err == sql.ErrNoRows {
response.WriteNotFoundError(w, method, id)
@ -77,28 +89,22 @@ func (h *Handler) Get(w http.ResponseWriter, r *http.Request) {
// draft mode does not record document views
if document.Lifecycle == workflow.LifecycleLive {
ctx.Transaction, err = h.Runtime.Db.Beginx()
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
err = h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: document.SpaceID,
DocumentID: document.RefID,
SourceType: activity.SourceTypeDocument,
ActivityType: activity.TypeRead})
if err != nil {
ctx.Transaction.Rollback()
h.Runtime.Log.Error(method, err)
return
}
ctx.Transaction.Commit()
}
ctx.Transaction.Commit()
h.Store.Audit.Record(ctx, audit.EventTypeDocumentView)
response.WriteJSON(w, document)
@ -144,19 +150,9 @@ func (h *Handler) BySpace(w http.ResponseWriter, r *http.Request) {
return
}
// Get the space as we need to check settings.
space, err := h.Store.Space.Get(ctx, spaceID)
// Can user view drafts?
viewDrafts := permission.CanViewDrafts(ctx, *h.Store, spaceID)
// If space defaults to drfat documents, then this means
// user can view drafts as long as they have edit rights.
canEdit := permission.HasPermission(ctx, *h.Store, spaceID, pm.DocumentEdit)
if space.Lifecycle == workflow.LifecycleDraft && canEdit {
viewDrafts = true
}
// Get complete list of documents regardless of category permission
// and versioning.
documents, err := h.Store.Document.GetBySpace(ctx, spaceID)
@ -196,10 +192,26 @@ func (h *Handler) BySpace(w http.ResponseWriter, r *http.Request) {
}
}
// Sort document list by title.
sort.Sort(doc.ByName(filtered))
sortedDocs := doc.SortedDocs{}
response.WriteJSON(w, filtered)
for j := range filtered {
if filtered[j].Sequence == doc.Unsequenced {
sortedDocs.Unpinned = append(sortedDocs.Unpinned, filtered[j])
} else {
sortedDocs.Pinned = append(sortedDocs.Pinned, filtered[j])
}
}
// Sort document list by title.
sort.Sort(doc.ByName(sortedDocs.Unpinned))
// Sort document list by sequence.
sort.Sort(doc.BySeq(sortedDocs.Pinned))
final := sortedDocs.Pinned
final = append(final, sortedDocs.Unpinned...)
response.WriteJSON(w, final)
}
// Update updates an existing document using the format described
@ -237,36 +249,40 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
d.RefID = documentID
ctx.Transaction, err = h.Runtime.Db.Beginx()
if err != nil {
var ok bool
ctx.Transaction, ok = h.Runtime.StartTx(sql.LevelReadUncommitted)
if !ok {
h.Runtime.Log.Info("unable to start transaction " + method)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
// If space changed for document, remove document categories.
oldDoc, err := h.Store.Document.Get(ctx, documentID)
if err != nil {
ctx.Transaction.Rollback()
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
if oldDoc.SpaceID != d.SpaceID {
h.Store.Category.RemoveDocumentCategories(ctx, d.RefID)
_, _ = h.Store.Category.RemoveDocumentCategories(ctx, d.RefID)
err = h.Store.Document.MoveActivity(ctx, documentID, oldDoc.SpaceID, d.SpaceID)
if err != nil {
ctx.Transaction.Rollback()
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
}
// d.Name = bluemonday.StrictPolicy().Sanitize(d.Name)
// d.Excerpt = bluemonday.StrictPolicy().Sanitize(d.Excerpt)
err = h.Store.Document.Update(ctx, d)
if err != nil {
ctx.Transaction.Rollback()
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
@ -278,7 +294,7 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
if len(d.GroupID) > 0 {
err = h.Store.Document.UpdateGroup(ctx, d)
if err != nil {
ctx.Transaction.Rollback()
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
@ -315,7 +331,12 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
}
}
ctx.Transaction.Commit()
h.Runtime.Commit(ctx.Transaction)
_ = h.Store.Space.SetStats(ctx, d.SpaceID)
if oldDoc.SpaceID != d.SpaceID {
_ = h.Store.Space.SetStats(ctx, oldDoc.SpaceID)
}
h.Store.Audit.Record(ctx, audit.EventTypeDocumentUpdate)
@ -346,6 +367,13 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
return
}
var ok bool
ctx.Transaction, ok = h.Runtime.StartTx(sql.LevelReadUncommitted)
if !ok {
h.Runtime.Log.Info("unable to start transaction " + method)
return
}
doc, err := h.Store.Document.Get(ctx, documentID)
if err != nil {
response.WriteServerError(w, method, err)
@ -382,13 +410,6 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
return
}
ctx.Transaction, err = h.Runtime.Db.Beginx()
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
_, err = h.Store.Document.Delete(ctx, documentID)
if err != nil {
ctx.Transaction.Rollback()
@ -417,16 +438,9 @@ func (h *Handler) Delete(w http.ResponseWriter, r *http.Request) {
ActivityType: activity.TypeDeleted})
}
err = h.Store.Space.DecrementContentCount(ctx, doc.SpaceID)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
ctx.Transaction.Commit()
h.Store.Space.SetStats(ctx, doc.SpaceID)
h.Store.Audit.Record(ctx, audit.EventTypeDocumentDelete)
go h.Indexer.DeleteDocument(ctx, documentID)
@ -487,18 +501,13 @@ func (h *Handler) SearchDocuments(w http.ResponseWriter, r *http.Request) {
return
}
err = h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: "",
DocumentID: "",
Metadata: options.Keywords,
SourceType: activity.SourceTypeSearch,
ActivityType: activity.TypeSearched})
if err != nil {
ctx.Transaction.Rollback()
h.Runtime.Log.Error(method, err)
}
ctx.Transaction.Commit()
}
}
@ -533,18 +542,13 @@ func (h *Handler) recordSearchActivity(ctx domain.RequestContext, q []search.Que
}
if _, isExisting := prev[q[i].DocumentID]; !isExisting {
err = h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: q[i].SpaceID,
DocumentID: q[i].DocumentID,
Metadata: keywords,
SourceType: activity.SourceTypeSearch,
ActivityType: activity.TypeSearched})
if err != nil {
ctx.Transaction.Rollback()
h.Runtime.Log.Error(method, err)
}
prev[q[i].DocumentID] = true
}
}
@ -563,7 +567,13 @@ func (h *Handler) FetchDocumentData(w http.ResponseWriter, r *http.Request) {
return
}
// document
var ok bool
ctx.Transaction, ok = h.Runtime.StartTx(sql.LevelReadUncommitted)
if !ok {
h.Runtime.Log.Info("unable to start transaction " + method)
return
}
document, err := h.Store.Document.Get(ctx, id)
if err == sql.ErrNoRows {
response.WriteNotFoundError(w, method, id)
@ -580,12 +590,49 @@ func (h *Handler) FetchDocumentData(w http.ResponseWriter, r *http.Request) {
return
}
// Don't serve archived document
// Don't serve archived document.
if document.Lifecycle == workflow.LifecycleArchived {
response.WriteForbiddenError(w)
return
}
// Check if draft document can been seen by user.
if document.Lifecycle == workflow.LifecycleDraft && !permission.CanViewDrafts(ctx, *h.Store, document.SpaceID) {
response.WriteForbiddenError(w)
return
}
// If document has been assigned one or more categories,
// we check to see if user can view this document.
cat, err := h.Store.Category.GetDocumentCategoryMembership(ctx, document.RefID)
if err != nil && err != sql.ErrNoRows {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
perm, err := h.Store.Permission.GetUserCategoryPermissions(ctx, ctx.UserID)
if err != nil && err != sql.ErrNoRows {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
see := []category.Category{}
for _, c := range cat {
for _, p := range perm {
if p.RefID == c.RefID {
see = append(see, c)
break
}
}
}
// User cannot view document if document has categories assigned
// but user cannot see any of them.
if len(cat) > 0 && len(see) == 0 {
response.WriteForbiddenError(w)
return
}
// permissions
perms, err := h.Store.Permission.GetUserSpacePermissions(ctx, document.SpaceID)
if err != nil && err != sql.ErrNoRows {
@ -633,14 +680,36 @@ func (h *Handler) FetchDocumentData(w http.ResponseWriter, r *http.Request) {
// Get version information for this document.
v := []doc.Version{}
if len(document.GroupID) > 0 {
v, err = h.Store.Document.GetVersions(ctx, document.GroupID)
if err != nil && err != sql.ErrNoRows {
// Get versions
vt, err := h.Store.Document.GetVersions(ctx, document.GroupID)
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
// Determine which document versions user can see.
for i := range vt {
// Everyone can see live documents
if vt[i].Lifecycle == workflow.LifecycleLive {
v = append(v, vt[i])
}
// Only lifecycle admins can see draft documents
if vt[i].Lifecycle == workflow.LifecycleDraft && record.DocumentLifecycle {
v = append(v, vt[i])
}
}
}
// Attachments.
a, err := h.Store.Attachment.GetAttachments(ctx, id)
if err != nil && err != sql.ErrNoRows {
h.Runtime.Log.Error("get attachment", err)
response.WriteServerError(w, method, err)
return
}
if len(a) == 0 {
a = []attachment.Attachment{}
}
// Prepare response.
@ -651,25 +720,14 @@ func (h *Handler) FetchDocumentData(w http.ResponseWriter, r *http.Request) {
data.Links = l
data.Spaces = sp
data.Versions = v
ctx.Transaction, err = h.Runtime.Db.Beginx()
if err != nil {
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
data.Attachments = a
if document.Lifecycle == workflow.LifecycleLive {
err = h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: document.SpaceID,
DocumentID: document.RefID,
SourceType: activity.SourceTypeDocument,
ActivityType: activity.TypeRead})
if err != nil {
ctx.Transaction.Rollback()
h.Runtime.Log.Error(method, err)
}
}
ctx.Transaction.Commit()
@ -682,12 +740,13 @@ func (h *Handler) FetchDocumentData(w http.ResponseWriter, r *http.Request) {
// BulkDocumentData represents all data associated for a single document.
// Used by FetchDocumentData() bulk data load call.
type BulkDocumentData struct {
Document doc.Document `json:"document"`
Permissions pm.Record `json:"permissions"`
Roles pm.DocumentRecord `json:"roles"`
Spaces []space.Space `json:"folders"`
Links []link.Link `json:"links"`
Versions []doc.Version `json:"versions"`
Document doc.Document `json:"document"`
Permissions pm.Record `json:"permissions"`
Roles pm.DocumentRecord `json:"roles"`
Spaces []space.Space `json:"folders"`
Links []link.Link `json:"links"`
Versions []doc.Version `json:"versions"`
Attachments []attachment.Attachment `json:"attachments"`
}
// Export returns content as self-enclosed HTML file.
@ -732,5 +791,453 @@ func (h *Handler) Export(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.WriteHeader(http.StatusOK)
w.Write([]byte(export))
_, _ = w.Write([]byte(export))
}
// Duplicate makes a copy of a document.
// Name of new document is required.
func (h *Handler) Duplicate(w http.ResponseWriter, r *http.Request) {
method := "document.Duplicate"
ctx := domain.GetRequestContext(r)
// Holds old to new ref ID values.
pageRefMap := make(map[string]string)
// Parse payload
defer streamutil.Close(r.Body)
body, err := ioutil.ReadAll(r.Body)
if err != nil {
response.WriteBadRequestError(w, method, err.Error())
h.Runtime.Log.Error(method, err)
return
}
m := doc.DuplicateModel{}
err = json.Unmarshal(body, &m)
if err != nil {
response.WriteBadRequestError(w, method, err.Error())
h.Runtime.Log.Error(method, err)
return
}
// Check permissions
if !permission.CanViewDocument(ctx, *h.Store, m.DocumentID) {
response.WriteForbiddenError(w)
return
}
if !permission.CanUploadDocument(ctx, *h.Store, m.SpaceID) {
response.WriteForbiddenError(w)
return
}
var ok bool
ctx.Transaction, ok = h.Runtime.StartTx(sql.LevelReadUncommitted)
if !ok {
h.Runtime.Log.Info("unable to start transaction " + method)
return
}
// Get document to be duplicated.
d, err := h.Store.Document.Get(ctx, m.DocumentID)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
// Assign new ID and remove versioning info.
d.RefID = uniqueid.Generate()
d.GroupID = ""
d.Name = m.Name
// Fetch doc attachments, links.
da, err := h.Store.Attachment.GetAttachmentsWithData(ctx, m.DocumentID)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
dl, err := h.Store.Link.GetDocumentOutboundLinks(ctx, m.DocumentID)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
// Fetch published and unpublished sections.
pages, err := h.Store.Page.GetPages(ctx, m.DocumentID)
if err != nil && err != sql.ErrNoRows {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
if len(pages) == 0 {
pages = []page.Page{}
}
unpublished, err := h.Store.Page.GetUnpublishedPages(ctx, m.DocumentID)
if err != nil && err != sql.ErrNoRows {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
if len(unpublished) == 0 {
unpublished = []page.Page{}
}
pages = append(pages, unpublished...)
meta, err := h.Store.Page.GetDocumentPageMeta(ctx, m.DocumentID, false)
if err != nil && err != sql.ErrNoRows {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
if len(meta) == 0 {
meta = []page.Meta{}
}
// Duplicate the complete document starting with the document.
err = h.Store.Document.Add(ctx, d)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
// Attachments
for i := range da {
da[i].RefID = uniqueid.Generate()
da[i].DocumentID = d.RefID
err = h.Store.Attachment.Add(ctx, da[i])
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
}
// Sections
for j := range pages {
// Create mapping between old and new section IDs.
pageRefMap[pages[j].RefID] = uniqueid.Generate()
// Get meta for section
sm := page.Meta{}
for k := range meta {
if meta[k].SectionID == pages[j].RefID {
sm = meta[k]
break
}
}
// Get attachments for section.
sa, err := h.Store.Attachment.GetSectionAttachments(ctx, pages[j].RefID)
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
pages[j].RefID = pageRefMap[pages[j].RefID]
pages[j].DocumentID = d.RefID
sm.DocumentID = d.RefID
sm.SectionID = pages[j].RefID
err = h.Store.Page.Add(ctx, page.NewPage{Page: pages[j], Meta: sm})
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
// Now add any section attachments.
for n := range sa {
sa[n].RefID = uniqueid.Generate()
sa[n].DocumentID = d.RefID
sa[n].SectionID = pages[j].RefID
err = h.Store.Attachment.Add(ctx, sa[n])
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
}
}
// Links
for l := range dl {
// Update common meta for all links.
dl[l].RefID = uniqueid.Generate()
dl[l].SourceDocumentID = d.RefID
// Remap section ID.
if len(dl[l].SourceSectionID) > 0 && len(pageRefMap[dl[l].SourceSectionID]) > 0 {
dl[l].SourceSectionID = pageRefMap[dl[l].SourceSectionID]
}
err = h.Store.Link.Add(ctx, dl[l])
if err != nil {
ctx.Transaction.Rollback()
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
}
// Record activity and finish.
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: d.SpaceID,
DocumentID: d.RefID,
SourceType: activity.SourceTypeDocument,
ActivityType: activity.TypeCreated})
ctx.Transaction.Commit()
h.Store.Audit.Record(ctx, audit.EventTypeDocumentAdd)
// Update search index if published.
if d.Lifecycle == workflow.LifecycleLive {
a, _ := h.Store.Attachment.GetAttachments(ctx, d.RefID)
go h.Indexer.IndexDocument(ctx, d, a)
pages, _ := h.Store.Page.GetPages(ctx, d.RefID)
for i := range pages {
go h.Indexer.IndexContent(ctx, pages[i])
}
} else {
go h.Indexer.DeleteDocument(ctx, d.RefID)
}
response.WriteEmpty(w)
}
// Pin marks existing document with sequence number so that it
// appears at the top-most space view.
func (h *Handler) Pin(w http.ResponseWriter, r *http.Request) {
method := "document.Pin"
ctx := domain.GetRequestContext(r)
documentID := request.Param(r, "documentID")
if len(documentID) == 0 {
response.WriteMissingDataError(w, method, "documentID")
return
}
var ok bool
ctx.Transaction, ok = h.Runtime.StartTx(sql.LevelReadUncommitted)
if !ok {
h.Runtime.Log.Info("unable to start transaction " + method)
response.WriteServerError(w, method, errors.New("unable to start transaction"))
return
}
d, err := h.Store.Document.Get(ctx, documentID)
if err != nil {
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
if !permission.CanManageSpace(ctx, *h.Store, d.SpaceID) {
h.Runtime.Rollback(ctx.Transaction)
response.WriteForbiddenError(w)
return
}
// Calculate the next sequence number for this newly pinned document.
seq, err := h.Store.Document.PinSequence(ctx, d.SpaceID)
if err != nil {
h.Runtime.Rollback(ctx.Transaction)
h.Runtime.Log.Error(method, err)
response.WriteServerError(w, method, err)
return
}
err = h.Store.Document.Pin(ctx, documentID, seq+1)
if err != nil {
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: d.SpaceID,
DocumentID: documentID,
SourceType: activity.SourceTypeDocument,
ActivityType: activity.TypePinned})
h.Runtime.Commit(ctx.Transaction)
h.Store.Audit.Record(ctx, audit.EventTypeDocPinAdd)
response.WriteEmpty(w)
}
// Unpin removes an existing document from the space pinned list.
func (h *Handler) Unpin(w http.ResponseWriter, r *http.Request) {
method := "document.Unpin"
ctx := domain.GetRequestContext(r)
documentID := request.Param(r, "documentID")
if len(documentID) == 0 {
response.WriteMissingDataError(w, method, "documentID")
return
}
var ok bool
ctx.Transaction, ok = h.Runtime.StartTx(sql.LevelReadUncommitted)
if !ok {
h.Runtime.Log.Info("unable to start transaction " + method)
response.WriteServerError(w, method, errors.New("unable to start transaction"))
return
}
d, err := h.Store.Document.Get(ctx, documentID)
if err != nil {
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
if !permission.CanManageSpace(ctx, *h.Store, d.SpaceID) {
h.Runtime.Rollback(ctx.Transaction)
response.WriteForbiddenError(w)
return
}
err = h.Store.Document.Unpin(ctx, documentID)
if err != nil {
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: d.SpaceID,
DocumentID: documentID,
SourceType: activity.SourceTypeDocument,
ActivityType: activity.TypeUnpinned})
h.Runtime.Commit(ctx.Transaction)
h.Store.Audit.Record(ctx, audit.EventTypeDocPinRemove)
response.WriteEmpty(w)
}
// PinMove moves pinned document up or down in the sequence.
func (h *Handler) PinMove(w http.ResponseWriter, r *http.Request) {
method := "document.PinMove"
ctx := domain.GetRequestContext(r)
documentID := request.Param(r, "documentID")
if len(documentID) == 0 {
response.WriteMissingDataError(w, method, "documentID")
return
}
direction := request.Query(r, "direction")
if len(direction) == 0 {
response.WriteMissingDataError(w, method, "direction")
return
}
var ok bool
ctx.Transaction, ok = h.Runtime.StartTx(sql.LevelReadUncommitted)
if !ok {
h.Runtime.Log.Info("unable to start transaction " + method)
response.WriteServerError(w, method, errors.New("unable to start transaction"))
return
}
d, err := h.Store.Document.Get(ctx, documentID)
if err != nil {
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
if !permission.CanManageSpace(ctx, *h.Store, d.SpaceID) {
h.Runtime.Rollback(ctx.Transaction)
response.WriteForbiddenError(w)
return
}
// Get all pinned documents in the space.
pinnedDocs, err := h.Store.Document.Pinned(ctx, d.SpaceID)
if err != nil {
h.Runtime.Rollback(ctx.Transaction)
h.Runtime.Log.Error(method, err)
response.WriteServerError(w, method, err)
return
}
// Sort document list by sequence.
sort.Sort(doc.BySeq(pinnedDocs))
// Resequence the documents.
for i := range pinnedDocs {
if pinnedDocs[i].RefID == documentID {
if direction == "u" {
if i-1 >= 0 {
me := pinnedDocs[i].Sequence
target := pinnedDocs[i-1].Sequence
pinnedDocs[i-1].Sequence = me
pinnedDocs[i].Sequence = target
}
}
if direction == "d" {
if i+1 < len(pinnedDocs) {
me := pinnedDocs[i].Sequence
target := pinnedDocs[i+1].Sequence
pinnedDocs[i+1].Sequence = me
pinnedDocs[i].Sequence = target
}
}
break
}
}
// Sort document list by sequence.
sort.Sort(doc.BySeq(pinnedDocs))
// Save the resequenced documents.
for i := range pinnedDocs {
err = h.Store.Document.Pin(ctx, pinnedDocs[i].RefID, i+1)
if err != nil {
h.Runtime.Rollback(ctx.Transaction)
response.WriteServerError(w, method, err)
h.Runtime.Log.Error(method, err)
return
}
}
h.Store.Activity.RecordUserActivity(ctx, activity.UserActivity{
SpaceID: d.SpaceID,
DocumentID: documentID,
SourceType: activity.SourceTypeDocument,
ActivityType: activity.TypePinSequence})
h.Store.Audit.Record(ctx, audit.EventTypeDocPinChange)
h.Runtime.Commit(ctx.Transaction)
response.WriteEmpty(w)
}

File diff suppressed because one or more lines are too long

View file

@ -16,10 +16,11 @@ import (
"fmt"
"time"
"github.com/pkg/errors"
"github.com/documize/community/domain"
"github.com/documize/community/domain/store"
"github.com/documize/community/model/doc"
"github.com/pkg/errors"
)
// Store provides data access to space category information.
@ -36,10 +37,12 @@ func (s Store) Add(ctx domain.RequestContext, d doc.Document) (err error) {
_, err = ctx.Transaction.Exec(s.Bind(`
INSERT INTO dmz_doc (c_refid, c_orgid, c_spaceid, c_userid, c_job, c_location, c_name, c_desc, c_slug, c_tags,
c_template, c_protection, c_approval, c_lifecycle, c_versioned, c_versionid, c_versionorder, c_groupid, c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
c_template, c_protection, c_approval, c_lifecycle, c_versioned, c_versionid, c_versionorder, c_seq, c_groupid,
c_created, c_revised)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`),
d.RefID, d.OrgID, d.SpaceID, d.UserID, d.Job, d.Location, d.Name, d.Excerpt, d.Slug, d.Tags,
d.Template, d.Protection, d.Approval, d.Lifecycle, d.Versioned, d.VersionID, d.VersionOrder, d.GroupID, d.Created, d.Revised)
d.Template, d.Protection, d.Approval, d.Lifecycle, d.Versioned, d.VersionID, d.VersionOrder, d.Sequence,
d.GroupID, d.Created, d.Revised)
if err != nil {
err = errors.Wrap(err, "execute insert document")
@ -55,7 +58,7 @@ func (s Store) Get(ctx domain.RequestContext, id string) (document doc.Document,
c_job AS job, c_location AS location, c_name AS name, c_desc AS excerpt, c_slug AS slug,
c_tags AS tags, c_template AS template, c_protection AS protection, c_approval AS approval,
c_lifecycle AS lifecycle, c_versioned AS versioned, c_versionid AS versionid,
c_versionorder AS versionorder, c_groupid AS groupid, c_created AS created, c_revised AS revised
c_versionorder AS versionorder, c_seq AS sequence, c_groupid AS groupid, c_created AS created, c_revised AS revised
FROM dmz_doc
WHERE c_orgid=? AND c_refid=?`),
ctx.OrgID, id)
@ -78,9 +81,9 @@ func (s Store) GetBySpace(ctx domain.RequestContext, spaceID string) (documents
c_job AS job, c_location AS location, c_name AS name, c_desc AS excerpt, c_slug AS slug,
c_tags AS tags, c_template AS template, c_protection AS protection, c_approval AS approval,
c_lifecycle AS lifecycle, c_versioned AS versioned, c_versionid AS versionid,
c_versionorder AS versionorder, c_groupid AS groupid, c_created AS created, c_revised AS revised
c_versionorder AS versionorder, c_seq AS sequence, c_groupid AS groupid, c_created AS created, c_revised AS revised
FROM dmz_doc
WHERE c_orgid=? AND c_template=false AND c_spaceid IN
WHERE c_orgid=? AND c_template=`+s.IsFalse()+` AND c_spaceid IN
(SELECT c_refid FROM dmz_permission WHERE c_orgid=? AND c_location='space' AND c_refid=? AND c_refid IN
(SELECT c_refid from dmz_permission WHERE c_orgid=? AND c_who='user' AND (c_whoid=? OR c_whoid='0') AND c_location='space' AND c_action='view'
UNION ALL
@ -111,9 +114,9 @@ func (s Store) TemplatesBySpace(ctx domain.RequestContext, spaceID string) (docu
c_job AS job, c_location AS location, c_name AS name, c_desc AS excerpt, c_slug AS slug,
c_tags AS tags, c_template AS template, c_protection AS protection, c_approval AS approval,
c_lifecycle AS lifecycle, c_versioned AS versioned, c_versionid AS versionid,
c_versionorder AS versionorder, c_groupid AS groupid, c_created AS created, c_revised AS revised
c_versionorder AS versionorder, c_seq AS sequence, c_groupid AS groupid, c_created AS created, c_revised AS revised
FROM dmz_doc
WHERE c_orgid=? AND c_spaceid=? AND c_template=true AND c_lifecycle=1
WHERE c_orgid=? AND c_spaceid=? AND c_template=`+s.IsTrue()+` AND c_lifecycle=1
AND c_spaceid IN
(SELECT c_refid FROM dmz_space WHERE c_orgid=? AND c_refid IN
(SELECT c_refid FROM dmz_permission WHERE c_orgid=? AND c_location='space' AND c_refid IN
@ -144,7 +147,7 @@ func (s Store) PublicDocuments(ctx domain.RequestContext, orgID string) (documen
SELECT d.c_refid AS documentid, d.c_name AS document, d.c_revised as revised, l.c_refid AS spaceid, l.c_name AS space
FROM dmz_doc d
LEFT JOIN dmz_space l ON l.c_refid=d.c_spaceid
WHERE d.c_orgid=? AND l.c_type=1 AND d.c_lifecycle=1 AND d.c_template=false`),
WHERE d.c_orgid=? AND l.c_type=1 AND d.c_lifecycle=1 AND d.c_template=`+s.IsFalse()),
orgID)
if err == sql.ErrNoRows {
@ -167,7 +170,8 @@ func (s Store) Update(ctx domain.RequestContext, document doc.Document) (err err
c_spaceid=:spaceid, c_userid=:userid, c_job=:job, c_location=:location, c_name=:name,
c_desc=:excerpt, c_slug=:slug, c_tags=:tags, c_template=:template,
c_protection=:protection, c_approval=:approval, c_lifecycle=:lifecycle,
c_versioned=:versioned, c_versionid=:versionid, c_versionorder=:versionorder,
c_versioned=:versioned, c_versionid=:versionid, c_versionorder=:versionorder,
c_seq=:sequence,
c_groupid=:groupid, c_revised=:revised
WHERE c_orgid=:orgid AND c_refid=:refid`),
&document)
@ -179,6 +183,18 @@ func (s Store) Update(ctx domain.RequestContext, document doc.Document) (err err
return
}
// UpdateRevised sets document revision date to UTC now.
func (s Store) UpdateRevised(ctx domain.RequestContext, docID string) (err error) {
_, err = ctx.Transaction.Exec(s.Bind(`UPDATE dmz_doc SET c_revised=? WHERE c_orgid=? AND c_refid=?`),
time.Now().UTC(), ctx.OrgID, docID)
if err != nil {
err = errors.Wrap(err, "document.store.UpdateRevised")
}
return
}
// UpdateGroup applies same values to all documents with the same group ID.
func (s Store) UpdateGroup(ctx domain.RequestContext, d doc.Document) (err error) {
_, err = ctx.Transaction.Exec(s.Bind(`UPDATE dmz_doc SET c_name=?, c_desc=? WHERE c_orgid=? AND c_groupid=?`),
@ -238,31 +254,11 @@ func (s Store) MoveActivity(ctx domain.RequestContext, documentID, oldSpaceID, n
// Delete removes the specified document.
// Remove document pages, revisions, attachments, updates the search subsystem.
func (s Store) Delete(ctx domain.RequestContext, documentID string) (rows int64, err error) {
rows, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_section WHERE c_docid='%s' AND c_orgid='%s'", documentID, ctx.OrgID))
if err != nil {
return
}
_, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_section_revision WHERE c_docid='%s' AND c_orgid='%s'", documentID, ctx.OrgID))
if err != nil {
return
}
_, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_doc_attachment WHERE c_docid='%s' AND c_orgid='%s'", documentID, ctx.OrgID))
if err != nil {
return
}
_, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_category_member WHERE c_docid='%s' AND c_orgid='%s'", documentID, ctx.OrgID))
if err != nil {
return
}
_, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_doc_vote WHERE c_docid='%s' AND c_orgid='%s'", documentID, ctx.OrgID))
if err != nil {
return
}
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_section WHERE c_orgid=? AND c_docid=?"), ctx.OrgID, documentID)
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_section_revision WHERE c_orgid=? AND c_docid=?"), ctx.OrgID, documentID)
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_doc_attachment WHERE c_orgid=? AND c_docid=?"), ctx.OrgID, documentID)
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_category_member WHERE c_orgid=? AND c_docid=?"), ctx.OrgID, documentID)
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_doc_vote WHERE c_orgid=? AND c_docid=?"), ctx.OrgID, documentID)
return s.DeleteConstrained(ctx.Transaction, "dmz_doc", ctx.OrgID, documentID)
}
@ -270,25 +266,10 @@ func (s Store) Delete(ctx domain.RequestContext, documentID string) (rows int64,
// DeleteBySpace removes all documents for given space.
// Remove document pages, revisions, attachments, updates the search subsystem.
func (s Store) DeleteBySpace(ctx domain.RequestContext, spaceID string) (rows int64, err error) {
rows, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_section WHERE c_docid IN (SELECT c_refid FROM dmz_doc WHERE c_spaceid='%s' AND c_orgid='%s')", spaceID, ctx.OrgID))
if err != nil {
return
}
_, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_section_revision WHERE c_docid IN (SELECT c_refid FROM dmz_doc WHERE c_spaceid='%s' AND c_orgid='%s')", spaceID, ctx.OrgID))
if err != nil {
return
}
_, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_doc_attachment WHERE c_docid IN (SELECT c_refid FROM dmz_doc WHERE c_spaceid='%s' AND c_orgid='%s')", spaceID, ctx.OrgID))
if err != nil {
return
}
_, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_doc_vote WHERE c_docid IN (SELECT c_refid FROM dmz_doc WHERE c_spaceid='%s' AND c_orgid='%s')", spaceID, ctx.OrgID))
if err != nil {
return
}
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_section WHERE c_docid IN (SELECT c_refid FROM dmz_doc WHERE c_spaceid=? AND c_orgid=?)"), spaceID, ctx.OrgID)
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_section_revision WHERE c_docid IN (SELECT c_refid FROM dmz_doc WHERE c_spaceid=? AND c_orgid=?)"), spaceID, ctx.OrgID)
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_doc_attachment WHERE c_docid IN (SELECT c_refid FROM dmz_doc WHERE c_spaceid=? AND c_orgid=?)"), spaceID, ctx.OrgID)
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_doc_vote WHERE c_docid IN (SELECT c_refid FROM dmz_doc WHERE c_spaceid=? AND c_orgid=?)"), spaceID, ctx.OrgID)
return s.DeleteConstrained(ctx.Transaction, "dmz_doc", ctx.OrgID, spaceID)
}
@ -304,7 +285,7 @@ func (s Store) GetVersions(ctx domain.RequestContext, groupID string) (v []doc.V
v = []doc.Version{}
err = s.Runtime.Db.Select(&v, s.Bind(`
SELECT c_versionid AS versionid, c_refid As documentid
SELECT c_versionid AS versionid, c_refid As documentid, c_lifecycle AS lifecycle
FROM dmz_doc
WHERE c_orgid=? AND c_groupid=?
ORDER BY c_versionorder`),
@ -319,3 +300,78 @@ func (s Store) GetVersions(ctx domain.RequestContext, groupID string) (v []doc.V
return
}
// Pin allocates sequence number to specified document so that it appears
// at the documents list.
func (s Store) Pin(ctx domain.RequestContext, documentID string, seq int) (err error) {
_, err = ctx.Transaction.Exec(s.Bind("UPDATE dmz_doc SET c_seq=? WHERE c_orgid=? AND c_refid=?"),
seq, ctx.OrgID, documentID)
if err != nil {
err = errors.Wrap(err, "document.store.Pin")
}
return
}
// Unpin resets sequence number for given document.
func (s Store) Unpin(ctx domain.RequestContext, documentID string) (err error) {
_, err = ctx.Transaction.Exec(s.Bind("UPDATE dmz_doc SET c_seq=? WHERE c_orgid=? AND c_refid=?"),
doc.Unsequenced, ctx.OrgID, documentID)
if err != nil {
err = errors.Wrap(err, "document.store.Unpin")
}
return
}
// PinSequence fectches pinned documents and returns current
// maximum sequence value.
func (s Store) PinSequence(ctx domain.RequestContext, spaceID string) (max int, err error) {
max = 0
err = s.Runtime.Db.Get(&max, s.Bind(`
SELECT COALESCE(MAX(c_seq), 0)
FROM dmz_doc
WHERE c_orgid=? AND c_spaceid=?
AND c_seq != 99999`),
ctx.OrgID, spaceID)
if err == sql.ErrNoRows {
err = nil
}
if err != nil {
max = doc.Unsequenced
err = errors.Wrap(err, "document.store.PinSequence")
}
return
}
// Pinned documents for space are fetched.
func (s Store) Pinned(ctx domain.RequestContext, spaceID string) (d []doc.Document, err error) {
d = []doc.Document{}
err = s.Runtime.Db.Select(&d, s.Bind(`
SELECT id, c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_userid AS userid,
c_job AS job, c_location AS location, c_name AS name, c_desc AS excerpt, c_slug AS slug,
c_tags AS tags, c_template AS template, c_protection AS protection, c_approval AS approval,
c_lifecycle AS lifecycle, c_versioned AS versioned, c_versionid AS versionid,
c_versionorder AS versionorder, c_seq AS sequence, c_groupid AS groupid,
c_created AS created, c_revised AS revised
FROM dmz_doc
WHERE c_orgid=? AND c_spaceid=?
AND c_seq != 99999
ORDER BY c_seq`),
ctx.OrgID, spaceID)
if err == sql.ErrNoRows {
err = nil
}
if err != nil {
err = errors.Wrap(err, "document.store.Pinned")
}
return
}

View file

@ -13,7 +13,6 @@ package group
import (
"database/sql"
"fmt"
"time"
"github.com/documize/community/domain"
@ -104,7 +103,10 @@ func (s Store) Delete(ctx domain.RequestContext, refID string) (rows int64, err
if err != nil {
return
}
return s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_group_member WHERE c_orgid='%s' AND c_groupid='%s'", ctx.OrgID, refID))
ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_group_member WHERE c_orgid=? AND c_groupid=?"), ctx.OrgID, refID)
return
}
// GetGroupMembers returns all user associated with given group.
@ -143,15 +145,8 @@ func (s Store) JoinGroup(ctx domain.RequestContext, groupID, userID string) (err
// LeaveGroup removes user from group.
func (s Store) LeaveGroup(ctx domain.RequestContext, groupID, userID string) (err error) {
_, err = s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_group_member WHERE c_orgid='%s' AND c_groupid='%s' AND c_userid='%s'",
ctx.OrgID, groupID, userID))
if err == sql.ErrNoRows {
err = nil
}
if err != nil {
err = errors.Wrap(err, "clear group member")
}
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_group_member WHERE c_orgid=? AND c_groupid=? AND c_userid=?"),
ctx.OrgID, groupID, userID)
return
}
@ -182,16 +177,8 @@ func (s Store) GetMembers(ctx domain.RequestContext) (r []group.Record, err erro
// RemoveUserGroups remove user from all group.
func (s Store) RemoveUserGroups(ctx domain.RequestContext, userID string) (err error) {
_, err = s.DeleteWhere(ctx.Transaction,
fmt.Sprintf("DELETE FROM dmz_group_member WHERE c_orgid='%s' AND c_userid='%s'",
ctx.OrgID, userID))
if err == sql.ErrNoRows {
err = nil
}
if err != nil {
err = errors.Wrap(err, "RemoveUserGroups")
}
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_group_member WHERE c_orgid=? AND c_userid=?"),
ctx.OrgID, userID)
return
}

View file

@ -63,6 +63,7 @@ func (h *Handler) Add(w http.ResponseWriter, r *http.Request) {
return
}
l.RefID = uniqueid.Generate()
l.OrgID = ctx.OrgID
ctx.Transaction, err = h.Runtime.Db.Beginx()
if err != nil {
@ -155,7 +156,7 @@ func (h *Handler) Update(w http.ResponseWriter, r *http.Request) {
h.Store.Audit.Record(ctx, audit.EventTypeLabelUpdate)
response.WriteEmpty(w)
response.WriteJSON(w, l)
}
// Delete removes space label from store and

View file

@ -51,7 +51,7 @@ func (s Store) Get(ctx domain.RequestContext) (l []label.Label, err error) {
c_name AS name, c_color AS color,
c_created AS created, c_revised AS revised
FROM dmz_space_label
WHERE c_orgid=?`),
WHERE c_orgid=? ORDER BY c_name`),
ctx.OrgID)
if err == sql.ErrNoRows {

View file

@ -13,9 +13,12 @@ package link
import (
"database/sql"
"fmt"
"net/http"
"net/url"
"github.com/documize/community/core/stringutil"
"github.com/documize/community/core/env"
"github.com/documize/community/core/request"
"github.com/documize/community/core/response"
@ -160,3 +163,55 @@ func (h *Handler) SearchLinkCandidates(w http.ResponseWriter, r *http.Request) {
response.WriteJSON(w, payload)
}
// GetLink returns link object for given ID.
func (h *Handler) GetLink(w http.ResponseWriter, r *http.Request) {
method := "link.GetLink"
ctx := domain.GetRequestContext(r)
// Param check.
linkID := request.Param(r, "linkID")
if len(linkID) == 0 {
response.WriteMissingDataError(w, method, "linkID")
return
}
// Load link record.
link, err := h.Store.Link.GetLink(ctx, linkID)
if err != nil {
response.WriteServerError(w, method, err)
return
}
// Check document permissions.
if !permission.CanViewDocument(ctx, *h.Store, link.SourceDocumentID) {
response.WriteForbiddenError(w)
return
}
// Build URL for link
url := ""
// Jump-to-document link type.
if link.LinkType == "document" {
doc, err := h.Store.Document.Get(ctx, link.TargetDocumentID)
if err != nil {
response.WriteString(w, url)
}
url = ctx.GetAppURL(fmt.Sprintf("s/%s/%s/d/%s/%s",
doc.SpaceID, doc.SpaceID, doc.RefID, stringutil.MakeSlug(doc.Name)))
}
// Jump-to-section link type.
if link.LinkType == "section" || link.LinkType == "tab" {
doc, err := h.Store.Document.Get(ctx, link.TargetDocumentID)
if err != nil {
response.WriteString(w, url)
}
url = ctx.GetAppURL(fmt.Sprintf("s/%s/%s/d/%s/%s?currentPageId=%s",
doc.SpaceID, doc.SpaceID, doc.RefID,
stringutil.MakeSlug(doc.Name), link.TargetID))
}
response.WriteString(w, url)
}

View file

@ -13,7 +13,6 @@ package link
import (
"database/sql"
"fmt"
"strings"
"time"
@ -46,6 +45,25 @@ func (s Store) Add(ctx domain.RequestContext, l link.Link) (err error) {
return
}
// GetLink returns specified link.
func (s Store) GetLink(ctx domain.RequestContext, linkID string) (l link.Link, err error) {
err = s.Runtime.Db.Get(&l, s.Bind(`
select c_refid AS refid, c_orgid AS orgid, c_spaceid AS spaceid, c_userid AS userid,
c_sourcedocid AS sourcedocumentid, c_sourcesectionid AS sourcesectionid,
c_targetdocid AS targetdocumentid, c_targetid AS targetid, c_externalid AS externalid,
c_type as linktype, c_orphan As orphan, c_created AS created, c_revised AS revised
FROM dmz_doc_link
WHERE c_orgid=? AND c_refid=?`),
ctx.OrgID, linkID)
if err != nil {
err = errors.Wrapf(err, "select link %s", linkID)
return
}
return
}
// GetDocumentOutboundLinks returns outbound links for specified document.
func (s Store) GetDocumentOutboundLinks(ctx domain.RequestContext, documentID string) (links []link.Link, err error) {
err = s.Runtime.Db.Select(&links, s.Bind(`
@ -57,13 +75,13 @@ func (s Store) GetDocumentOutboundLinks(ctx domain.RequestContext, documentID st
WHERE c_orgid=? AND c_sourcedocid=?`),
ctx.OrgID, documentID)
if err != nil && err != sql.ErrNoRows {
err = errors.Wrap(err, "select document oubound links")
return
}
if len(links) == 0 {
if err == sql.ErrNoRows || len(links) == 0 {
err = nil
links = []link.Link{}
}
if err != nil {
err = errors.Wrap(err, "select document oubound links")
}
return
}
@ -79,13 +97,13 @@ func (s Store) GetPageLinks(ctx domain.RequestContext, documentID, pageID string
WHERE c_orgid=? AND c_sourcedocid=? AND c_sourcesectionid=?`),
ctx.OrgID, documentID, pageID)
if err != nil && err != sql.ErrNoRows {
err = errors.Wrap(err, "get page links")
return
}
if len(links) == 0 {
if err == sql.ErrNoRows || len(links) == 0 {
err = nil
links = []link.Link{}
}
if err != nil {
err = errors.Wrap(err, "get page links")
}
return
}
@ -94,7 +112,7 @@ func (s Store) GetPageLinks(ctx domain.RequestContext, documentID, pageID string
func (s Store) MarkOrphanDocumentLink(ctx domain.RequestContext, documentID string) (err error) {
revised := time.Now().UTC()
_, err = ctx.Transaction.Exec(s.Bind(`UPDATE dmz_doc_link SET
c_orphan=true, c_revised=?
c_orphan=`+s.IsTrue()+`, c_revised=?
WHERE c_type='document' AND c_orgid=? AND c_targetdocid=?`),
revised, ctx.OrgID, documentID)
@ -109,7 +127,7 @@ func (s Store) MarkOrphanDocumentLink(ctx domain.RequestContext, documentID stri
func (s Store) MarkOrphanPageLink(ctx domain.RequestContext, pageID string) (err error) {
revised := time.Now().UTC()
_, err = ctx.Transaction.Exec(s.Bind(`UPDATE dmz_doc_link SET
c_orphan=true, c_revised=?
c_orphan=`+s.IsTrue()+`, c_revised=?
WHERE c_type='section' AND c_orgid=? AND c_targetid=?`),
revised, ctx.OrgID, pageID)
@ -124,7 +142,7 @@ func (s Store) MarkOrphanPageLink(ctx domain.RequestContext, pageID string) (err
func (s Store) MarkOrphanAttachmentLink(ctx domain.RequestContext, attachmentID string) (err error) {
revised := time.Now().UTC()
_, err = ctx.Transaction.Exec(s.Bind(`UPDATE dmz_doc_link SET
c_orphan=true, c_revised=?
c_orphan=`+s.IsTrue()+`, c_revised=?
WHERE c_type='file' AND c_orgid=? AND c_targetid=?`),
revised, ctx.OrgID, attachmentID)
@ -137,12 +155,18 @@ func (s Store) MarkOrphanAttachmentLink(ctx domain.RequestContext, attachmentID
// DeleteSourcePageLinks removes saved links for given source.
func (s Store) DeleteSourcePageLinks(ctx domain.RequestContext, pageID string) (rows int64, err error) {
return s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_doc_link WHERE c_orgid='%s' AND c_sourcesectionid='%s'", ctx.OrgID, pageID))
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_doc_link WHERE c_orgid=? AND c_sourcesectionid=?"),
ctx.OrgID, pageID)
return
}
// DeleteSourceDocumentLinks removes saved links for given document.
func (s Store) DeleteSourceDocumentLinks(ctx domain.RequestContext, documentID string) (rows int64, err error) {
return s.DeleteWhere(ctx.Transaction, fmt.Sprintf("DELETE FROM dmz_doc_link WHERE c_orgid='%s' AND c_sourcedocid='%s'", ctx.OrgID, documentID))
_, err = ctx.Transaction.Exec(s.Bind("DELETE FROM dmz_doc_link WHERE c_orgid=? AND c_sourcedocid=?"),
ctx.OrgID, documentID)
return
}
// DeleteLink removes saved link from the store.

Some files were not shown because too many files have changed in this diff Show more