1
0
Fork 0
mirror of https://github.com/documize/community.git synced 2025-08-07 22:45:24 +02:00

change config naming and structure

This commit is contained in:
Elliott Stoneham 2016-05-17 13:13:56 +01:00
parent 0a40c96849
commit 9d6e0faf0c
14 changed files with 452 additions and 109 deletions

View file

@ -25,7 +25,7 @@ cp documize/api/mail/*.html documize/web/bindata/mail
cp documize/database/templates/*.html documize/web/bindata cp documize/database/templates/*.html documize/web/bindata
rm -rf documize/web/bindata/scripts rm -rf documize/web/bindata/scripts
mkdir -p documize/web/bindata/scripts mkdir -p documize/web/bindata/scripts
cp -r documize/database/scripts documize/web/bindata cp -r documize/database/scripts/autobuild/*.sql documize/web/bindata/scripts
echo "Generating in-memory static assets..." echo "Generating in-memory static assets..."
go get github.com/jteeuwen/go-bindata/... go get github.com/jteeuwen/go-bindata/...

View file

@ -6,27 +6,32 @@ import (
"net/http" "net/http"
"github.com/documize/community/documize/api/request" "github.com/documize/community/documize/api/request"
"github.com/documize/community/wordsmith/environment"
) )
var endPoint = "https://api.documize.com" func endPoint() string {
r := request.ConfigString("LICENSE", "endpoint")
if r != "" {
return r
}
return "https://api.documize.com"
}
var token string func token() (string, error) {
r := request.ConfigString("LICENSE", "token")
func init() { if r == "" {
environment.GetString(&endPoint, "endpoint", false, "Documize end-point", request.FlagFromDB) return "", errors.New("Documize token is empty")
environment.GetString(&token, "token", false, "Documize token", request.FlagFromDB) }
// TODO more validation here
return r, nil
} }
var transport = &http.Transport{ var transport = &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, // TODO should be from -insecure flag TLSClientConfig: &tls.Config{
} InsecureSkipVerify: true, // TODO should be glick.InsecureSkipVerifyTLS (from -insecure flag) but get error: x509: certificate signed by unknown authority
}}
// CheckToken tests if the supplied token is valid. // CheckToken returns an error if the Documize LICENSE token is invalid.
func CheckToken() error { func CheckToken() error {
if token == "" { _, err := token()
return errors.New("Documize token is empty") return err
}
// TODO validate against endPoint site
return nil
} }

View file

@ -26,7 +26,12 @@ func (file *Msword) Convert(r api.DocumentConversionRequest, reply *api.Document
client := &http.Client{Transport: transport} client := &http.Client{Transport: transport}
resp, err := client.Post(endPoint+"/api/word?token="+token, "application/json", bytes.NewReader(byts)) tok,err:=token()
if err != nil {
return err
}
resp, err := client.Post(endPoint()+"/api/word?token="+tok, "application/json", bytes.NewReader(byts))
if err != nil { if err != nil {
return err return err
} }

View file

@ -8,7 +8,6 @@ import (
"github.com/codegangsta/negroni" "github.com/codegangsta/negroni"
"github.com/documize/community/documize/api/plugins" "github.com/documize/community/documize/api/plugins"
"github.com/documize/community/documize/api/request"
"github.com/documize/community/documize/database" "github.com/documize/community/documize/database"
"github.com/documize/community/documize/web" "github.com/documize/community/documize/web"
"github.com/documize/community/wordsmith/environment" "github.com/documize/community/wordsmith/environment"
@ -26,10 +25,10 @@ const (
var port, certFile, keyFile, forcePort2SSL string var port, certFile, keyFile, forcePort2SSL string
func init() { func init() {
environment.GetString(&certFile, "cert", false, "the cert.pem file used for https", request.FlagFromDB) environment.GetString(&certFile, "cert", false, "the cert.pem file used for https", nil)
environment.GetString(&keyFile, "key", false, "the key.pem file used for https", request.FlagFromDB) environment.GetString(&keyFile, "key", false, "the key.pem file used for https", nil)
environment.GetString(&port, "port", false, "http/https port number", request.FlagFromDB) environment.GetString(&port, "port", false, "http/https port number", nil)
environment.GetString(&forcePort2SSL, "forcesslport", false, "redirect given http port number to TLS", request.FlagFromDB) environment.GetString(&forcePort2SSL, "forcesslport", false, "redirect given http port number to TLS", nil)
} }
var testHost string // used during automated testing var testHost string // used during automated testing

View file

@ -10,7 +10,6 @@ import (
"github.com/documize/community/documize/api/request" "github.com/documize/community/documize/api/request"
"github.com/documize/community/documize/web" "github.com/documize/community/documize/web"
"github.com/documize/community/wordsmith/environment"
"github.com/documize/community/wordsmith/log" "github.com/documize/community/wordsmith/log"
) )
@ -35,7 +34,7 @@ func InviteNewUser(recipient, inviter, url, username, password string) {
subject := fmt.Sprintf("%s has invited you to Documize", inviter) subject := fmt.Sprintf("%s has invited you to Documize", inviter)
e := newEmail() e := newEmail()
e.From = creds.SMTPsender e.From = creds.SMTPsender()
e.To = []string{recipient} e.To = []string{recipient}
e.Subject = subject e.Subject = subject
@ -86,7 +85,7 @@ func InviteExistingUser(recipient, inviter, url string) {
subject := fmt.Sprintf("%s has invited you to their Documize account", inviter) subject := fmt.Sprintf("%s has invited you to their Documize account", inviter)
e := newEmail() e := newEmail()
e.From = creds.SMTPsender e.From = creds.SMTPsender()
e.To = []string{recipient} e.To = []string{recipient}
e.Subject = subject e.Subject = subject
@ -128,7 +127,7 @@ func PasswordReset(recipient, url string) {
subject := "Documize password reset request" subject := "Documize password reset request"
e := newEmail() e := newEmail()
e.From = "Documize <hello@documize.com>" e.From = creds.SMTPsender() //e.g. "Documize <hello@documize.com>"
e.To = []string{recipient} e.To = []string{recipient}
e.Subject = subject e.Subject = subject
@ -173,7 +172,7 @@ func ShareFolderExistingUser(recipient, inviter, url, folder, intro string) {
subject := fmt.Sprintf("%s has shared %s with you", inviter, folder) subject := fmt.Sprintf("%s has shared %s with you", inviter, folder)
e := newEmail() e := newEmail()
e.From = creds.SMTPsender e.From = creds.SMTPsender()
e.To = []string{recipient} e.To = []string{recipient}
e.Subject = subject e.Subject = subject
@ -224,7 +223,7 @@ func ShareFolderNewUser(recipient, inviter, url, folder, invitationMessage strin
subject := fmt.Sprintf("%s has shared %s with you on Documize", inviter, folder) subject := fmt.Sprintf("%s has shared %s with you on Documize", inviter, folder)
e := newEmail() e := newEmail()
e.From = creds.SMTPsender e.From = creds.SMTPsender()
e.To = []string{recipient} e.To = []string{recipient}
e.Subject = subject e.Subject = subject
@ -254,24 +253,30 @@ func ShareFolderNewUser(recipient, inviter, url, folder, invitationMessage strin
} }
} }
var creds struct{ SMTPuserid, SMTPpassword, SMTPhost, SMTPport, SMTPsender string } var creds = struct{ SMTPuserid, SMTPpassword, SMTPhost, SMTPport, SMTPsender func() string }{
func() string { return request.ConfigString("SMTP", "userid") },
func init() { func() string { return request.ConfigString("SMTP", "password") },
creds.SMTPport = "587" // the default value for outgoing SMTP traffic func() string { return request.ConfigString("SMTP", "host") },
creds.SMTPsender = "Documize <hello@documize.com>" // TODO review as SAAS specific func() string {
environment.GetString(&creds.SMTPuserid, "smtpuserid", false, "SMTP username for outgoing email", request.FlagFromDB) r := request.ConfigString("SMTP", "port")
environment.GetString(&creds.SMTPpassword, "smtppassword", false, "SMTP password for outgoing email", request.FlagFromDB) if r == "" {
environment.GetString(&creds.SMTPhost, "smtphost", false, "SMTP host for outgoing email", request.FlagFromDB) return "587" // default port number
environment.GetString(&creds.SMTPport, "smtpport", false, "SMTP port for outgoing email", request.FlagFromDB) }
environment.GetString(&creds.SMTPsender, "smtpsender", false, "SMTP sender's e-mail for outgoing email", request.FlagFromDB) return r
},
func() string { return request.ConfigString("SMTP", "sender") },
} }
// Helper to return SMTP credentials // Helper to return SMTP credentials
func getAuth() smtp.Auth { func getAuth() smtp.Auth {
return smtp.PlainAuth("", creds.SMTPuserid, creds.SMTPpassword, creds.SMTPhost) a := smtp.PlainAuth("", creds.SMTPuserid(), creds.SMTPpassword(), creds.SMTPhost())
//fmt.Printf("DEBUG getAuth() = %#v\n", a)
return a
} }
// Helper to return SMTP host details // Helper to return SMTP host details
func getHost() string { func getHost() string {
return creds.SMTPhost + ":" + creds.SMTPport h := creds.SMTPhost() + ":" + creds.SMTPport()
//fmt.Printf("DEBUG getHost() = %#v\n", h)
return h
} }

View file

@ -2,6 +2,7 @@
package plugins package plugins
import ( import (
"bytes"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"time" "time"
@ -18,14 +19,14 @@ import (
) )
// PluginFile is the path to the file containing the configuration information for the plugin system in JSON format. // PluginFile is the path to the file containing the configuration information for the plugin system in JSON format.
var PluginFile = "plugin.json" var PluginFile = "DB" // this points to the database
var insecure = "false" var insecure = "false"
func init() { func init() {
environment.GetString(&PluginFile, "plugin", false, environment.GetString(&PluginFile, "plugin", false,
"the JSON file describing plugins, default 'plugin.json' set to 'PLUGIN' to configure from database settings", request.FlagFromDB) "the JSON file describing plugins, default 'DB' uses the database config table 'FILEPLUGINS' entry", nil)
environment.GetString(&insecure, "insecure", false, environment.GetString(&insecure, "insecure", false,
"if 'true' allow https endpoints with invalid certificates (only for testing)", request.FlagFromDB) "if 'true' allow https endpoints with invalid certificates (only for testing)", nil)
} }
type infoLog struct{} type infoLog struct{}
@ -100,8 +101,11 @@ func LibSetup() error {
} }
var json = make([]byte, 0) var json = make([]byte, 0)
if PluginFile == "PLUGIN" { if PluginFile == "DB" {
json = []byte(request.ConfigString(PluginFile, "")) json = []byte(request.ConfigString("FILEPLUGINS", ""))
if len(bytes.TrimSpace(json)) == 0 {
return nil // don't fail if the DB does not exist yet
}
} else { } else {
json, err = ioutil.ReadFile(PluginFile) json, err = ioutil.ReadFile(PluginFile)
if err != nil { if err != nil {
@ -112,6 +116,7 @@ func LibSetup() error {
} }
err = Lib.Configure(json) err = Lib.Configure(json)
if err != nil { if err != nil {
//fmt.Println("DEBUG plugin: "+string(json))
return err return err
} }
return Lib.StartLocalRPCservers(infoLog{}, errorLog{}) return Lib.StartLocalRPCservers(infoLog{}, errorLog{})

View file

@ -1,14 +1,14 @@
package request package request
import ( import (
"strings" "bytes"
"github.com/documize/community/wordsmith/environment"
"github.com/documize/community/wordsmith/utility" "github.com/documize/community/wordsmith/utility"
) )
/* NOT CURRENTLY USED
// FlagFromDB overrides the value in *target if it is set in the database configuration JSON. // FlagFromDB overrides the value in *target if it is set in the database configuration JSON.
// Function signiture must map that in environment // Function signaiture must map that in environment.
func FlagFromDB(target *string, name string) bool { func FlagFromDB(target *string, name string) bool {
value := ConfigString(environment.Prefix, name) value := ConfigString(environment.Prefix, name)
//fmt.Println("DEBUG FlagFromDB " + value) //fmt.Println("DEBUG FlagFromDB " + value)
@ -18,17 +18,18 @@ func FlagFromDB(target *string, name string) bool {
} }
return false return false
} }
*/
// ConfigString fetches a configuration JSON element from the config table. // ConfigString fetches a configuration JSON element from the config table.
func ConfigString(area, path string) (ret string) { func ConfigString(area, path string) (ret string) {
if path != "" { if path != "" {
path = "." + path path = "." + path
} }
sql := `SELECT JSON_EXTRACT(details,'$` + path + `') AS item FROM config WHERE area = '` + area + `';` sql := "SELECT JSON_EXTRACT(`config`,'$" + path + "') FROM `config` WHERE `key` = '" + area + "';"
stmt, err := Db.Preparex(sql) stmt, err := Db.Preparex(sql)
if err != nil { if err != nil {
//log.Error(fmt.Sprintf("Unable to prepare select for ConfigString: %s", sql), err) //fmt.Printf("DEBUG: Unable to prepare select SQL for ConfigString: %s -- error: %v\n", sql, err)
return "" return ""
} }
defer utility.Close(stmt) defer utility.Close(stmt)
@ -38,12 +39,13 @@ func ConfigString(area, path string) (ret string) {
err = stmt.Get(&item) err = stmt.Get(&item)
if err != nil { if err != nil {
//log.Error(fmt.Sprintf("Unable to execute select for ConfigString: %s", sql), err) //fmt.Printf("DEBUG: Unable to prepare execute SQL for ConfigString: %s -- error: %v\n", sql, err)
return "" return ""
} }
if len(item) > 1 { if len(item) > 1 {
ret = string(item) q := []byte(`"`)
ret = string(bytes.TrimPrefix(bytes.TrimSuffix(item, q), q))
} }
//fmt.Println("DEBUG ConfigString " + sql + " => " + ret) //fmt.Println("DEBUG ConfigString " + sql + " => " + ret)

View file

@ -36,10 +36,6 @@ func (dr *databaseRequest) MakeTx() (err error) {
func init() { func init() {
var err error var err error
var upgrade = "false"
environment.GetString(&upgrade, "upgrade", false,
"to upgrade the database set to 'true' (only this Documize binary must be running)", nil)
environment.GetString(&connectionString, "db", true, environment.GetString(&connectionString, "db", true,
`"username:password@protocol(hostname:port)/databasename" for example "fred:bloggs@tcp(localhost:3306)/documize"`, `"username:password@protocol(hostname:port)/databasename" for example "fred:bloggs@tcp(localhost:3306)/documize"`,
@ -50,8 +46,6 @@ func init() {
log.Error("Unable to setup database", err) log.Error("Unable to setup database", err)
} }
database.DbPtr = &Db // allow the database package to see this DB connection
Db.SetMaxIdleConns(30) Db.SetMaxIdleConns(30)
Db.SetMaxOpenConns(100) Db.SetMaxOpenConns(100)
@ -64,25 +58,21 @@ func init() {
} }
// go into setup mode if required // go into setup mode if required
if database.Check(Db, connectionString) { if database.Check(Db, connectionString,
log.Info("database.Check(Db) OK") func() (bool, error) {
migrations, err := database.Migrations(ConfigString("DATABASE", "last_migration")) // LockDB locks the database for migrations, returning if locked and an error.
if err != nil { // TODO, and if lock fails, wait here until it unlocks
log.Error("Unable to find required database migrations: ", err) return false, errors.New("LockDB TODO")
},
func() {
// UnlockDB unlocks the database for migrations.
// Reports errors in the log.
// TODO
}) {
if err := database.Migrate(ConfigString("META", "database")); err != nil {
log.Error("Unable to run database migration: ", err)
os.Exit(2) os.Exit(2)
} }
if len(migrations) > 0 {
if strings.ToLower(upgrade) != "true" {
log.Error("database migrations are required",
errors.New("the -upgrade flag is not 'true'"))
os.Exit(2)
}
if err := migrations.Migrate(); err != nil {
log.Error("Unable to run database migration: ", err)
os.Exit(2)
}
}
} else { } else {
log.Info("database.Check(Db) !OK, going into setup mode") log.Info("database.Check(Db) !OK, going into setup mode")
} }

View file

@ -13,8 +13,22 @@ import (
var dbCheckOK bool // default false var dbCheckOK bool // default false
// Check that the database is configured correctly and that all the required tables exist // dbPtr is a pointer to the central connection to the database, used by all database requests.
func Check(Db *sqlx.DB, connectionString string) bool { var dbPtr **sqlx.DB
// lockDB locks the database
var lockDB func() (bool, error)
// unlockDB unlocks the database
var unlockDB func()
// Check that the database is configured correctly and that all the required tables exist.
// It must be the first function called in the
func Check(Db *sqlx.DB, connectionString string,lDB func() (bool, error),ulDB func()) bool {
dbPtr = &Db
lockDB=lDB
unlockDB=ulDB
csBits := strings.Split(connectionString, "/") csBits := strings.Split(connectionString, "/")
if len(csBits) > 1 { if len(csBits) > 1 {
web.SiteInfo.DBname = strings.Split(csBits[len(csBits)-1], "?")[0] web.SiteInfo.DBname = strings.Split(csBits[len(csBits)-1], "?")[0]

View file

@ -8,20 +8,19 @@ import (
"strings" "strings"
"time" "time"
"github.com/jmoiron/sqlx"
"github.com/documize/community/documize/api/util" "github.com/documize/community/documize/api/util"
"github.com/documize/community/documize/web" "github.com/documize/community/documize/web"
"github.com/documize/community/wordsmith/log" "github.com/documize/community/wordsmith/log"
"github.com/documize/community/wordsmith/utility" "github.com/documize/community/wordsmith/utility"
) )
// DbPtr is a pointer to the central connection to the database, used by all database requests.
var DbPtr **sqlx.DB
func runSQL(sql string) (id uint64, err error) { func runSQL(sql string) (id uint64, err error) {
tx, err := (*DbPtr).Beginx() if strings.TrimSpace(sql) == "" {
return 0, nil
}
tx, err := (*dbPtr).Beginx()
if err != nil { if err != nil {
log.Error("runSql - failed to get transaction", err) log.Error("runSql - failed to get transaction", err)
@ -50,6 +49,7 @@ func runSQL(sql string) (id uint64, err error) {
// Create the tables in a blank database // Create the tables in a blank database
func Create(w http.ResponseWriter, r *http.Request) { func Create(w http.ResponseWriter, r *http.Request) {
txt := "database.Create()" txt := "database.Create()"
//defer func(){fmt.Println("DEBUG"+txt)}()
if dbCheckOK { if dbCheckOK {
txt += " Check OK" txt += " Check OK"
@ -119,13 +119,15 @@ func Create(w http.ResponseWriter, r *http.Request) {
return return
} }
buf, err := web.ReadFile("scripts/create.sql") firstSQL := "db_00000.sql"
buf, err := web.ReadFile("scripts/" + firstSQL)
if err != nil { if err != nil {
log.Error("database.Create()'s web.ReadFile()", err) log.Error("database.Create()'s web.ReadFile()", err)
return return
} }
tx, err := (*DbPtr).Beginx() tx, err := (*dbPtr).Beginx()
if err != nil { if err != nil {
log.Error(" failed to get transaction", err) log.Error(" failed to get transaction", err)
return return
@ -149,6 +151,11 @@ func Create(w http.ResponseWriter, r *http.Request) {
return return
} }
if err := Migrate(firstSQL); err != nil {
log.Error("database.Create()", err)
return
}
err = setupAccount(details, util.GenerateSalt()) err = setupAccount(details, util.GenerateSalt())
if err != nil { if err != nil {
log.Error("database.Create()", err) log.Error("database.Create()", err)
@ -235,8 +242,8 @@ func setupAccount(completion onboardRequest, serial string) (err error) {
// getStatement strips out the comments and returns all the individual SQL commands (apart from "USE") as a []string. // getStatement strips out the comments and returns all the individual SQL commands (apart from "USE") as a []string.
func getStatements(bytes []byte) []string { func getStatements(bytes []byte) []string {
/* Strip comments of the form '-- comment', '// comment' or like this one */ /* Strip comments of the form '-- comment' or like this one */
stripped := regexp.MustCompile("(?s)--.*?\n|(?s)//.*?\n|/\\*.*?\\*/").ReplaceAll(bytes, []byte("\n")) stripped := regexp.MustCompile("(?s)--.*?\n|/\\*.*?\\*/").ReplaceAll(bytes, []byte("\n"))
sqls := strings.Split(string(stripped), ";") sqls := strings.Split(string(stripped), ";")
ret := make([]string, 0, len(sqls)) ret := make([]string, 0, len(sqls))
for _, v := range sqls { for _, v := range sqls {

View file

@ -8,15 +8,17 @@ import (
"github.com/documize/community/documize/web" "github.com/documize/community/documize/web"
) )
const migrationsDir = "bindata/scripts/migrate" const migrationsDir = "bindata/scripts"
// MigrationsT holds a list of migration sql files to run. // migrationsT holds a list of migration sql files to run.
type MigrationsT []string type migrationsT []string
// Migrations returns a list of the migrations to update the database as required for this version of the code. // migrations returns a list of the migrations to update the database as required for this version of the code.
func Migrations(last_migration string) (MigrationsT, error) { func migrations(lastMigration string) (migrationsT, error) {
last_migration = strings.TrimPrefix(strings.TrimSuffix(last_migration, `"`), `"`) lastMigration = strings.TrimPrefix(strings.TrimSuffix(lastMigration, `"`), `"`)
//fmt.Println(`DEBUG Migrations("`+lastMigration+`")`)
files, err := web.AssetDir(migrationsDir) files, err := web.AssetDir(migrationsDir)
if err != nil { if err != nil {
@ -24,12 +26,12 @@ func Migrations(last_migration string) (MigrationsT, error) {
} }
sort.Strings(files) sort.Strings(files)
ret := make(MigrationsT, 0, len(files)) ret := make(migrationsT, 0, len(files))
hadLast := false hadLast := false
for _, v := range files { for _, v := range files {
if v == last_migration { if v == lastMigration {
hadLast = true hadLast = true
} else { } else {
if hadLast { if hadLast {
@ -38,11 +40,12 @@ func Migrations(last_migration string) (MigrationsT, error) {
} }
} }
//fmt.Println(`DEBUG Migrations("`+lastMigration+`")=`,ret)
return ret, nil return ret, nil
} }
// Migrate the database as required. // migrate the database as required, by applying the migrations.
func (m MigrationsT) Migrate() error { func (m migrationsT) migrate() error {
for _, v := range m { for _, v := range m {
buf, err := web.Asset(migrationsDir + "/" + v) buf, err := web.Asset(migrationsDir + "/" + v)
if err != nil { if err != nil {
@ -52,3 +55,25 @@ func (m MigrationsT) Migrate() error {
} }
return nil return nil
} }
// Migrate the database as required, consolidated action.
func Migrate(lastMigration string) error {
mig, err := migrations(lastMigration)
if err != nil {
return err
}
if len(mig) == 0 {
return nil // no migrations to perform
}
locked, err := lockDB()
if err != nil {
return err
}
if locked {
defer unlockDB()
if err := mig.migrate(); err != nil {
return err
}
}
return nil
}

View file

@ -0,0 +1,279 @@
-- SQL to set up the Documize database
DROP TABLE IF EXISTS `user`;
CREATE TABLE IF NOT EXISTS `user` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`refid` CHAR(16) NOT NULL COLLATE utf8_bin,
`firstname` NVARCHAR(500) NOT NULL,
`lastname` NVARCHAR(500) NOT NULL,
`email` NVARCHAR(250) NOT NULL UNIQUE,
`initials` NVARCHAR(10) NOT NULL DEFAULT "",
`password` NVARCHAR(500) NOT NULL DEFAULT "",
`salt` NVARCHAR(100) NOT NULL DEFAULT "",
`reset` NVARCHAR(100) NOT NULL DEFAULT "",
`active` BOOL NOT NULL DEFAULT 1,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_refid PRIMARY KEY (refid),
UNIQUE INDEX `idx_user_id` (`id` ASC))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `audit`;
CREATE TABLE IF NOT EXISTS `audit` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`userid` CHAR(16) NOT NULL COLLATE utf8_bin,
`documentid` CHAR(16) NOT NULL DEFAULT "" COLLATE utf8_bin,
`pageid` CHAR(16) NOT NULL DEFAULT "" COLLATE utf8_bin,
`action` NVARCHAR(200) NOT NULL DEFAULT "",
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE INDEX `idx_audit_id` (`id` ASC),
INDEX `idx_orgid_url` (`orgid`))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `organization`;
CREATE TABLE IF NOT EXISTS `organization` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`refid` CHAR(16) NOT NULL COLLATE utf8_bin,
`company` NVARCHAR(500) NOT NULL,
`title` NVARCHAR(500) NOT NULL,
`message` NVARCHAR(500) NOT NULL,
`url` NVARCHAR(200) NOT NULL DEFAULT "",
`domain` NVARCHAR(200) NOT NULL DEFAULT "",
`email` NVARCHAR(500) NOT NULL DEFAULT "",
`allowanonymousaccess` BOOL NOT NULL DEFAULT 0,
`verified` BOOL NOT NULL DEFAULT 0,
`serial` NVARCHAR(50) NOT NULL DEFAULT "",
`active` BOOL NOT NULL DEFAULT 1,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_refid PRIMARY KEY (refid),
UNIQUE INDEX `idx_organization_id` (`id` ASC),
INDEX `idx_organization_url` (`url`),
INDEX `idx_organization_domain` (`domain`))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `account`;
CREATE TABLE IF NOT EXISTS `account` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`refid` CHAR(16) NOT NULL COLLATE utf8_bin,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`userid` CHAR(16) NOT NULL COLLATE utf8_bin,
`editor` BOOL NOT NULL DEFAULT 0,
`admin` BOOL NOT NULL DEFAULT 0,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_refid PRIMARY KEY (refid),
UNIQUE INDEX `idx_account_id` (`id` ASC),
INDEX `idx_account_userid` (`userid` ASC),
INDEX `idx_account_orgid` (`orgid` ASC))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `label`;
CREATE TABLE IF NOT EXISTS `label` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`refid` CHAR(16) NOT NULL COLLATE utf8_bin,
`label` NVARCHAR(255) NOT NULL,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`userid` CHAR(16) NOT NULL DEFAULT "" COLLATE utf8_bin,
`type` INT NOT NULL DEFAULT 1,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_refid PRIMARY KEY (refid),
UNIQUE INDEX `idx_label_id` (`id` ASC),
INDEX `idx_label_userid` (`userid` ASC),
INDEX `idx_label_orgid` (`orgid` ASC))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `labelrole`;
CREATE TABLE IF NOT EXISTS `labelrole` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`refid` CHAR(16) NOT NULL COLLATE utf8_bin,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`labelid` CHAR(16) NOT NULL COLLATE utf8_bin,
`userid` CHAR(16) NOT NULL COLLATE utf8_bin,
`canview` BOOL NOT NULL DEFAULT 0,
`canedit` BOOL NOT NULL DEFAULT 0,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_refid PRIMARY KEY (refid),
UNIQUE INDEX `idx_labelrole_id` (`id` ASC),
INDEX `idx_labelrole_userid` (`userid` ASC),
INDEX `idx_labelrole_labelid` (`labelid` ASC),
INDEX `idx_labelrole_orgid` (`orgid` ASC))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `document`;
CREATE TABLE IF NOT EXISTS `document` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`refid` CHAR(16) NOT NULL COLLATE utf8_bin,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`labelid` CHAR(16) NOT NULL COLLATE utf8_bin,
`userid` CHAR(16) NOT NULL COLLATE utf8_bin,
`job` CHAR(36) NOT NULL,
`location` NVARCHAR(2000) NOT NULL,
`title` NVARCHAR(2000) NOT NULL,
`excerpt` NVARCHAR(2000) NOT NULL,
`slug` NVARCHAR(2000) NOT NULL,
`tags` NVARCHAR(1000) NOT NULL DEFAULT '',
`template` BOOL NOT NULL DEFAULT 0,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_refid PRIMARY KEY (refid),
UNIQUE INDEX `idx_document_id` (`id` ASC),
INDEX `idx_document_orgid` (`orgid` ASC),
INDEX `idx_document_labelid` (`labelid` ASC))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `page`;
CREATE TABLE IF NOT EXISTS `page` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`refid` CHAR(16) NOT NULL COLLATE utf8_bin,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`documentid` CHAR(16) NOT NULL COLLATE utf8_bin,
`userid` CHAR(16) DEFAULT '' COLLATE utf8_bin,
`contenttype` CHAR(20) NOT NULL DEFAULT 'wysiwyg',
`level` INT UNSIGNED NOT NULL,
`sequence` DOUBLE NOT NULL,
`title` NVARCHAR(2000) NOT NULL,
`body` LONGTEXT,
`revisions` INT UNSIGNED NOT NULL,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_refid PRIMARY KEY (refid),
UNIQUE INDEX `idx_page_id` (`id` ASC),
INDEX `idx_page_orgid` (`orgid` ASC),
INDEX `idx_page_documentid` (`documentid` ASC))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `pagemeta`;
CREATE TABLE IF NOT EXISTS `pagemeta` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`pageid` CHAR(16) NOT NULL COLLATE utf8_bin,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`documentid` CHAR(16) NOT NULL COLLATE utf8_bin,
`rawbody` LONGBLOB,
`config` JSON,
`externalsource` BOOL DEFAULT 0,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_pageid PRIMARY KEY (pageid),
UNIQUE INDEX `idx_pagemeta_id` (`id` ASC),
INDEX `idx_pagemeta_pageid` (`pageid` ASC),
INDEX `idx_pagemeta_orgid` (`orgid` ASC),
INDEX `idx_pagemeta_documentid` (`documentid` ASC))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `attachment`;
CREATE TABLE IF NOT EXISTS `attachment` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`refid` CHAR(16) NOT NULL COLLATE utf8_bin,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`documentid` CHAR(16) NOT NULL COLLATE utf8_bin,
`job` CHAR(36) NOT NULL,
`fileid` CHAR(10) NOT NULL,
`filename` NVARCHAR(255) NOT NULL,
`data` LONGBLOB,
`extension` CHAR(6) NOT NULL,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_refid PRIMARY KEY (refid),
UNIQUE INDEX `idx_attachment_id` (`id` ASC),
INDEX `idx_attachment_orgid` (`orgid` ASC),
INDEX `idx_attachment_documentid` (`documentid` ASC),
INDEX `idx_attachment_job_and_fileid` (`job`,`fileid` ASC))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
DROP TABLE IF EXISTS `search`;
CREATE TABLE IF NOT EXISTS `search` (
`id` CHAR(16) NOT NULL COLLATE utf8_bin,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`documentid` CHAR(16) NOT NULL COLLATE utf8_bin,
`level` INT UNSIGNED NOT NULL,
`sequence` DOUBLE NOT NULL,
`documenttitle` NVARCHAR(2000) NOT NULL,
`pagetitle` NVARCHAR(2000) NOT NULL,
`slug` NVARCHAR(2000) NOT NULL,
`body` LONGTEXT,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE INDEX `idx_search_id` (`id` ASC),
INDEX `idx_search_orgid` (`orgid` ASC),
INDEX `idx_search_documentid` (`documentid` ASC),
INDEX `idx_search_sequence` (`sequence` ASC),
FULLTEXT(`pagetitle`,`body`))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = MyISAM;
DROP TABLE IF EXISTS `revision`;
CREATE TABLE IF NOT EXISTS `revision` (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`refid` CHAR(16) NOT NULL COLLATE utf8_bin,
`orgid` CHAR(16) NOT NULL COLLATE utf8_bin,
`documentid` CHAR(16) NOT NULL COLLATE utf8_bin,
`ownerid` CHAR(16) DEFAULT '' COLLATE utf8_bin,
`pageid` CHAR(16) NOT NULL COLLATE utf8_bin,
`userid` CHAR(16) NOT NULL COLLATE utf8_bin,
`contenttype` CHAR(20) NOT NULL DEFAULT 'wysiwyg',
`title` NVARCHAR(2000) NOT NULL,
`body` LONGTEXT,
`rawbody` LONGBLOB,
`config` JSON,
`created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`revised` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT pk_refid PRIMARY KEY (refid),
UNIQUE INDEX `idx_revision_id` (`id` ASC),
INDEX `idx_revision_orgid` (`orgid` ASC),
INDEX `idx_revision_documentid` (`documentid` ASC),
INDEX `idx_revision_pageid` (`pageid` ASC))
DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci
ENGINE = InnoDB;
/*
ALTER DATABASE documize CHARACTER SET utf8 COLLATE utf8_general_ci;
ALTER TABLE organization CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci;
ALTER TABLE account CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci;
ALTER TABLE user CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci;
ALTER TABLE revision CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci;
ALTER TABLE label CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci;
ALTER TABLE document CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci;
ALTER TABLE page CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci;
*/
DROP TABLE IF EXISTS `config`;
CREATE TABLE IF NOT EXISTS `config` (
`key` CHAR(16) NOT NULL,
`config` JSON,
UNIQUE INDEX `idx_config_area` (`key` ASC) ) ;
INSERT INTO `config` VALUES ('SMTP','{\"userid\": \"\",\"password\": \"\",\"host\": \"\",\"port\": \"\",\"sender\": \"\"}');
INSERT INTO `config` VALUES ('FILEPLUGINS',
'[{\"Comment\": \"Disable (or not) built-in html import (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"htm\",\"html\"]},{\"Comment\": \"Disable (or not) built-in Documize API import used from SDK (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"documizeapi\"]}]');
INSERT INTO `config` VALUES ('LICENSE','{\"token\": \"\",\"endpoint\": \"https://api.documize.com\"}');
INSERT INTO `config` VALUES ('META','{\"database\": \"db_00000.sql\"}');

View file

@ -266,13 +266,16 @@ ALTER TABLE page CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci;
DROP TABLE IF EXISTS `config`; DROP TABLE IF EXISTS `config`;
CREATE TABLE IF NOT EXISTS `config` ( CREATE TABLE IF NOT EXISTS `config` (
`area` CHAR(16) NOT NULL, `key` CHAR(16) NOT NULL,
`details` JSON, `config` JSON,
UNIQUE INDEX `idx_config_area` (`area` ASC) ) ; UNIQUE INDEX `idx_config_area` (`key` ASC) ) ;
INSERT INTO `config` VALUES ('DOCUMIZE','{\"plugin\": \"PLUGIN\"}'); INSERT INTO `config` VALUES ('SMTP','{\"userid\": \"\",\"password\": \"\",\"host\": \"\",\"port\": \"\",\"sender\": \"\"}');
INSERT INTO `config` VALUES ('PLUGIN',
INSERT INTO `config` VALUES ('FILEPLUGINS',
'[{\"Comment\": \"Disable (or not) built-in html import (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"htm\",\"html\"]},{\"Comment\": \"Disable (or not) built-in Documize API import used from SDK (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"documizeapi\"]}]'); '[{\"Comment\": \"Disable (or not) built-in html import (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"htm\",\"html\"]},{\"Comment\": \"Disable (or not) built-in Documize API import used from SDK (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"documizeapi\"]}]');
INSERT INTO `config` VALUES ('DATABASE','{\"last_migration\": \"migrate-00002.sql\"}'); INSERT INTO `config` VALUES ('LICENSE','{\"token\": \"\",\"endpoint\": \"https://api.documize.com\"}');
INSERT INTO `config` VALUES ('META','{\"database\": \"migrate-00002.sql\"}');
/* NOTE the line above must be changed every time a new migration is incorporated into this file */ /* NOTE the line above must be changed every time a new migration is incorporated into this file */

View file

@ -1,12 +1,16 @@
DROP TABLE IF EXISTS `config`; DROP TABLE IF EXISTS `config`;
CREATE TABLE IF NOT EXISTS `config` ( CREATE TABLE IF NOT EXISTS `config` (
`area` CHAR(16) NOT NULL, `key` CHAR(16) NOT NULL,
`details` JSON, `config` JSON,
UNIQUE INDEX `idx_config_area` (`area` ASC) ) ; UNIQUE INDEX `idx_config_area` (`key` ASC) ) ;
INSERT INTO `config` VALUES ('DOCUMIZE','{\"plugin\": \"PLUGIN\"}'); INSERT INTO `config` VALUES ('SMTP','{\"userid\": \"\",\"password\": \"\",\"host\": \"\",\"port\": \"\",\"sender\": \"\"}');
INSERT INTO `config` VALUES ('PLUGIN',
INSERT INTO `config` VALUES ('FILEPLUGINS',
'[{\"Comment\": \"Disable (or not) built-in html import (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"htm\",\"html\"]},{\"Comment\": \"Disable (or not) built-in Documize API import used from SDK (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"documizeapi\"]}]'); '[{\"Comment\": \"Disable (or not) built-in html import (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"htm\",\"html\"]},{\"Comment\": \"Disable (or not) built-in Documize API import used from SDK (NOTE: no Plugin name)\",\"Disabled\": false,\"API\": \"Convert\",\"Actions\": [\"documizeapi\"]}]');
INSERT INTO `config` VALUES ('DATABASE','{\"last_migration\": \"migrate-00002.sql\"}'); INSERT INTO `config` VALUES ('META','{\"database\": \"migrate-00002.sql\"}');
INSERT INTO `config` VALUES ('LICENSE','{\"token\": \"\",\"endpoint\": \"https://api.documize.com\"}');