You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
346 lines
8.4 KiB
346 lines
8.4 KiB
package model
|
|
|
|
import (
|
|
"database/sql"
|
|
"errors"
|
|
"fmt"
|
|
"net/url"
|
|
"time"
|
|
|
|
"git.aiterp.net/AiteRP/aitestory/formparser"
|
|
"git.aiterp.net/AiteRP/aitestory/server"
|
|
"git.aiterp.net/gisle/wrouter/generate"
|
|
"github.com/microcosm-cc/bluemonday"
|
|
"github.com/russross/blackfriday"
|
|
)
|
|
|
|
// PageCategories are used by the view model and page to enforce
|
|
// a limited selection of categories. I may move it to a configuration
|
|
var PageCategories = []string{
|
|
"OoC",
|
|
"Story",
|
|
"Background",
|
|
"Document",
|
|
"News",
|
|
"Item",
|
|
"Info",
|
|
}
|
|
|
|
// PageTypes describes how the source is rendered. For now it's only markdown,
|
|
// but who knows what the future holds.
|
|
var PageTypes = []string{
|
|
"Markdown",
|
|
}
|
|
|
|
// PageMinDate is the earliest date possible. Stories from Matriarch Eriana's childhood
|
|
// are thus not going to happen.
|
|
var PageMinDate, _ = time.Parse(time.RFC3339, "1753-01-01T00:00:00Z")
|
|
|
|
// Page is the model describing the individual articles posted
|
|
// by users.
|
|
type Page struct {
|
|
ID string `json:"id"`
|
|
Name string `json:"name"`
|
|
Author string `json:"author"`
|
|
Category string `json:"category"`
|
|
FictionalDate time.Time `json:"fictionalDate"`
|
|
PublishDate time.Time `json:"publishDate"`
|
|
EditDate time.Time `json:"editDate"`
|
|
Dated bool `json:"dated"`
|
|
Published bool `json:"published"`
|
|
Unlisted bool `json:"unlisted"`
|
|
Specific bool `json:"specific"`
|
|
Indexed bool `json:"indexed"`
|
|
BackgroundURL string `json:"backgroundUrl"`
|
|
Type string `json:"type"`
|
|
Source string `json:"source"`
|
|
Tags []Tag `json:"tags"`
|
|
|
|
prevTags []Tag
|
|
cachedOutput string
|
|
}
|
|
|
|
// Defaults fills in the default details for a page, suited for populating a form
|
|
func (page *Page) Defaults() {
|
|
page.Category = PageCategories[0]
|
|
|
|
page.Dated = true
|
|
page.Published = true
|
|
page.Unlisted = false
|
|
page.Specific = false
|
|
page.Indexed = true
|
|
|
|
page.BackgroundURL = ""
|
|
page.Type = PageTypes[0]
|
|
page.Source = ""
|
|
}
|
|
|
|
// Insert adds the page to the database
|
|
func (page *Page) Insert() error {
|
|
const insertPage = `
|
|
INSERT INTO page (
|
|
id, name, author, category, fictional_date,
|
|
publish_date, edit_date, dated, published,
|
|
unlisted, page.specific, indexed, type, source
|
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);
|
|
`
|
|
const insertTag = `INSERT INTO page_tag (page_id,tag_id,page_tag.primary) VALUES (?, ?, ?)`
|
|
|
|
db := server.Main.DB
|
|
|
|
if page.ID == "" {
|
|
page.generateID()
|
|
}
|
|
|
|
// Do the thing
|
|
_, err := db.Exec(insertPage,
|
|
page.ID, page.Name, page.Author, page.Category, page.FictionalDate, page.PublishDate,
|
|
page.EditDate, page.Dated, page.Published, page.Unlisted, page.Specific, page.Indexed,
|
|
page.Type, page.Source,
|
|
)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
// Insert tags
|
|
for i, tag := range page.Tags {
|
|
_, err := db.Exec(insertTag, page.ID, tag.ID, i == 0)
|
|
if err != nil {
|
|
page.Delete()
|
|
return err
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// Update saves the page to the database
|
|
func (page *Page) Update() error {
|
|
const updatePage = `
|
|
UPDATE page SET
|
|
name=?,category=?,fictional_date=?,publish_date=?,
|
|
edit_date=?,dated=?,published=?,unlisted=?,page.specific=?,
|
|
indexed=?,type=?,source=?
|
|
WHERE id=?
|
|
`
|
|
const clearTags = `DELETE FROM page_tag WHERE page_id=?`
|
|
const insertTag = `INSERT INTO page_tag (page_id,tag_id,page_tag.primary) VALUES (?, ?, ?)`
|
|
|
|
db := server.Main.DB
|
|
|
|
if page.ID == "" {
|
|
return errors.New("no id")
|
|
}
|
|
|
|
// Do the thing
|
|
_, err := db.Exec(updatePage,
|
|
page.Name, page.Category, page.FictionalDate, page.PublishDate,
|
|
page.EditDate, page.Dated, page.Published, page.Unlisted, page.Specific, page.Indexed,
|
|
page.Type, page.Source, page.ID,
|
|
)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
// Stop now if the tages haven't changed
|
|
if len(page.prevTags) == len(page.Tags) {
|
|
change := false
|
|
|
|
for i, tag := range page.prevTags {
|
|
if tag.ID != page.prevTags[i].ID {
|
|
change = true
|
|
break
|
|
}
|
|
}
|
|
|
|
if !change {
|
|
return nil
|
|
}
|
|
}
|
|
|
|
// Re-tag (can be optimized if need arise)
|
|
_, err = db.Exec(clearTags, page.ID)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
for i, tag := range page.Tags {
|
|
_, err := db.Exec(insertTag, page.ID, tag.ID, i == 0)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// Delete removes the page from the database
|
|
func (page *Page) Delete() error {
|
|
db := server.Main.DB
|
|
|
|
// Do the thing
|
|
results, err := db.Exec("DELETE FROM `page` WHERE id=? LIMIT 1", page.ID)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
// Count the stuffs that were done things to
|
|
affected, err := results.RowsAffected()
|
|
if err != nil {
|
|
return err
|
|
}
|
|
if affected == 0 {
|
|
return errors.New("page not found")
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// Content parses the content of the page
|
|
func (page *Page) Content() (string, error) {
|
|
if page.cachedOutput != "" {
|
|
return page.cachedOutput, nil
|
|
}
|
|
|
|
if page.Type == "Markdown" {
|
|
// TODO: Convert [[Ehanis Tioran]] to [Ehanis Tioran](https://wiki.aiterp.net/index.php?title=Ehanis%20Tioran)
|
|
|
|
unsafe := blackfriday.MarkdownCommon([]byte(page.Source))
|
|
page.cachedOutput = string(bluemonday.UGCPolicy().SanitizeBytes(unsafe))
|
|
|
|
return page.cachedOutput, nil
|
|
}
|
|
|
|
return "", fmt.Errorf("Page type '%s' is not supported", page.Type)
|
|
}
|
|
|
|
// ParseForm validates the values in a form and sets the page's values whenever possible regardless
|
|
// so that it can be pushed to the viewmodel to allow the user to correct their mistakes without fear
|
|
// of losing their hard work
|
|
func (page *Page) ParseForm(form url.Values) []error {
|
|
errors := make([]error, 0, 4)
|
|
page.cachedOutput = ""
|
|
|
|
err := formparser.Select(form.Get("category"), &page.Category, PageCategories, page.Category != "")
|
|
if err != nil {
|
|
errors = append(errors, fmt.Errorf("Category: %s", err))
|
|
}
|
|
|
|
err = formparser.Date(form.Get("fictionalDate"), &page.FictionalDate, !page.FictionalDate.IsZero())
|
|
if err != nil {
|
|
errors = append(errors, fmt.Errorf("Fictonal Date: %s", err))
|
|
}
|
|
|
|
page.Dated = form.Get("dated") != ""
|
|
page.Published = form.Get("published") != ""
|
|
page.Unlisted = form.Get("unlisted") != ""
|
|
page.Specific = form.Get("specific") != ""
|
|
page.Indexed = form.Get("indexed") != ""
|
|
|
|
err = formparser.String(form.Get("backgroundUrl"), &page.BackgroundURL, 0, 255)
|
|
if err != nil {
|
|
errors = append(errors, fmt.Errorf("Background URL: %s", err))
|
|
}
|
|
|
|
err = formparser.Select(form.Get("type"), &page.Type, PageTypes, page.Type != "")
|
|
if err != nil {
|
|
errors = append(errors, fmt.Errorf("Category: %s", err))
|
|
}
|
|
|
|
err = formparser.String(form.Get("source"), &page.Source, 0, 102400)
|
|
if err != nil {
|
|
errors = append(errors, fmt.Errorf("Content is too long, max: 100 KB (~17,000 words)"))
|
|
}
|
|
|
|
if len(errors) > 0 {
|
|
errors = nil
|
|
}
|
|
|
|
return errors
|
|
}
|
|
|
|
// Standardize page ID generation
|
|
func (page *Page) generateID() {
|
|
page.ID = generate.FriendlyID(16)
|
|
}
|
|
|
|
// FindPage finds a page by ID. The Header model handles
|
|
// listning pages
|
|
func FindPage(id string) (*Page, error) {
|
|
const selectPage = `
|
|
SELECT id,name,author,category,fictional_date,publish_date,edit_date,dated,published,
|
|
unlisted,page.specific,indexed,type,source,background_url
|
|
FROM page
|
|
WHERE id=?
|
|
`
|
|
const selectPageTags = `
|
|
SELECT tag.id,tag.type,tag.name
|
|
FROM page_tag
|
|
RIGHT JOIN tag ON (tag.id = page_tag.tag_id)
|
|
WHERE page_tag.page_id = ?
|
|
`
|
|
|
|
db := server.Main.DB
|
|
|
|
rows, err := db.Query(selectPage, id)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
if !rows.Next() {
|
|
return nil, errors.New("not found")
|
|
}
|
|
|
|
page := new(Page)
|
|
err = parsePage(page, rows)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
rows, err = db.Query(selectPageTags, page.ID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
page.Tags = make([]Tag, 0, 64)
|
|
for rows.Next() {
|
|
tag := Tag{}
|
|
rows.Scan(&tag.ID, &tag.Type, &tag.Name)
|
|
page.Tags = append(page.Tags, tag)
|
|
}
|
|
|
|
return page, nil
|
|
}
|
|
|
|
func parsePage(page *Page, rows *sql.Rows) error {
|
|
var fictionalDate, publishDate, editDate string
|
|
var bgURL *string
|
|
|
|
err := rows.Scan(
|
|
&page.ID, &page.Name, &page.Author, &page.Category, &fictionalDate,
|
|
&publishDate, &editDate, &page.Dated, &page.Published, &page.Unlisted,
|
|
&page.Specific, &page.Indexed, &page.Type, &page.Source, &bgURL,
|
|
)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
if bgURL != nil {
|
|
page.BackgroundURL = *bgURL
|
|
}
|
|
|
|
page.FictionalDate, err = time.Parse("2006-01-02 15:04:05", fictionalDate)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
page.PublishDate, err = time.Parse("2006-01-02 15:04:05", publishDate)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
page.EditDate, err = time.Parse("2006-01-02 15:04:05", editDate)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
return nil
|
|
}
|