Browse Source

Implemented Page model

master
Gisle Aune 7 years ago
parent
commit
525fbb4f94
  1. 120
      model/header.go
  2. 200
      model/page.go
  3. 271
      model/page_test.go
  4. 5
      tables.sql

120
model/header.go

@ -0,0 +1,120 @@
package model
import (
"database/sql"
"errors"
"time"
"git.aiterp.net/AiteRP/aitestory/server"
)
// Header contains a subset of the page database
// table needed for the front page. It's a read-only
// model.
type Header struct {
ID string `json:"id"`
Name string `json:"name"`
Author string `json:"author"`
Category string `json:"category"`
FictionalDate time.Time `json:"fictionalDate"`
PublishDate time.Time `json:"publishDate"`
EditDate time.Time `json:"editDate"`
Dated bool `json:"dated"`
PrimaryTag *Tag `json:"primaryTag"`
}
// ListHeaders grabs all the general pages from
// the database to list them
func ListHeaders() ([]Header, error) {
const query = `
SELECT page.id,page.name,author,category,fictional_date,publish_date,edit_date,dated,tag.id,tag.type,tag.name
FROM page
LEFT JOIN page_tag ON (page.id = page_tag.page_id AND page_tag.primary = true)
LEFT JOIN tag ON (tag.id = page_tag.tag_id)
WHERE page.specific=false AND page.published=true AND page.unlisted=false;
`
db := server.Main.DB
rows, err := db.Query(query)
if err != nil {
return nil, err
}
defer rows.Close()
results := make([]Header, 0, 64)
header := Header{}
for rows.Next() {
err := parseHeader(&header, rows)
if err != nil {
return nil, err
}
results = append(results, header)
}
return results, nil
}
// ListHeadersByTag lists all headers that has the tag
func ListHeadersByTag(tag *Tag) ([]Header, error) {
const query = `
SELECT page.id,page.name,page.author,page.category,page.fictional_date,page.publish_date,page.edit_date,page.dated,tag.id,tag.type,tag.name
FROM page_tag
RIGHT JOIN page ON page.id = page_tag.page_id
LEFT JOIN (page_tag AS pt2) ON (page.id = pt2.page_id AND pt2.primary = true)
LEFT JOIN (tag AS tag) ON (tag.id = pt2.tag_id)
WHERE page_tag.tag_id=?
`
if tag == nil {
return nil, errors.New("no tag")
}
db := server.Main.DB
rows, err := db.Query(query, tag.ID)
if err != nil {
return nil, err
}
defer rows.Close()
results := make([]Header, 0, 64)
header := Header{}
for rows.Next() {
err := parseHeader(&header, rows)
if err != nil {
return nil, err
}
results = append(results, header)
}
return results, nil
}
func parseHeader(header *Header, rows *sql.Rows) error {
var tagID, tagName, tagType string
var fictionalDate, publishDate, editDate string
var err error
rows.Scan(&header.ID, &header.Name, &header.Author, &header.Category, &fictionalDate, &publishDate, &editDate, &header.Dated, &tagID, &tagType, &tagName)
if tagID != "" {
header.PrimaryTag = &Tag{tagID, tagName, tagType}
}
header.FictionalDate, err = time.Parse("2006-01-02 15:04:05", fictionalDate)
if err != nil {
return err
}
header.PublishDate, err = time.Parse("2006-01-02 15:04:05", publishDate)
if err != nil {
return err
}
header.EditDate, err = time.Parse("2006-01-02 15:04:05", editDate)
if err != nil {
return err
}
return nil
}

200
model/page.go

@ -1,11 +1,14 @@
package model package model
import ( import (
"database/sql"
"errors"
"fmt" "fmt"
"net/url" "net/url"
"time" "time"
"git.aiterp.net/AiteRP/aitestory/formparser" "git.aiterp.net/AiteRP/aitestory/formparser"
"git.aiterp.net/AiteRP/aitestory/server"
"git.aiterp.net/gisle/wrouter/generate" "git.aiterp.net/gisle/wrouter/generate"
"github.com/microcosm-cc/bluemonday" "github.com/microcosm-cc/bluemonday"
"github.com/russross/blackfriday" "github.com/russross/blackfriday"
@ -51,7 +54,9 @@ type Page struct {
BackgroundURL string `json:"backgroundUrl"` BackgroundURL string `json:"backgroundUrl"`
Type string `json:"type"` Type string `json:"type"`
Source string `json:"source"` Source string `json:"source"`
Tags []Tag `json:"tags"`
prevTags []Tag
cachedOutput string cachedOutput string
} }
@ -72,7 +77,120 @@ func (page *Page) Defaults() {
// Insert adds the page to the database // Insert adds the page to the database
func (page *Page) Insert() error { func (page *Page) Insert() error {
const insertPage = `
INSERT INTO page (
id, name, author, category, fictional_date,
publish_date, edit_date, dated, published,
unlisted, page.specific, indexed, type, source
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);
`
const insertTag = `INSERT INTO page_tag (page_id,tag_id,page_tag.primary) VALUES (?, ?, ?)`
db := server.Main.DB
if page.ID == "" {
page.generateID() page.generateID()
}
// Do the thing
_, err := db.Exec(insertPage,
page.ID, page.Name, page.Author, page.Category, page.FictionalDate, page.PublishDate,
page.EditDate, page.Dated, page.Published, page.Unlisted, page.Specific, page.Indexed,
page.Type, page.Source,
)
if err != nil {
return err
}
// Insert tags
for i, tag := range page.Tags {
_, err := db.Exec(insertTag, page.ID, tag.ID, i == 0)
if err != nil {
page.Delete()
return err
}
}
return nil
}
// Update saves the page to the database
func (page *Page) Update() error {
const updatePage = `
UPDATE page SET
name=?,category=?,fictional_date=?,publish_date=?,
edit_date=?,dated=?,published=?,unlisted=?,page.specific=?,
indexed=?,type=?,source=?
WHERE id=?
`
const clearTags = `DELETE FROM page_tag WHERE page_id=?`
const insertTag = `INSERT INTO page_tag (page_id,tag_id,page_tag.primary) VALUES (?, ?, ?)`
db := server.Main.DB
if page.ID == "" {
return errors.New("no id")
}
// Do the thing
_, err := db.Exec(updatePage,
page.Name, page.Category, page.FictionalDate, page.PublishDate,
page.EditDate, page.Dated, page.Published, page.Unlisted, page.Specific, page.Indexed,
page.Type, page.Source, page.ID,
)
if err != nil {
return err
}
// Stop now if the tages haven't changed
if len(page.prevTags) == len(page.Tags) {
change := false
for i, tag := range page.prevTags {
if tag.ID != page.prevTags[i].ID {
change = true
break
}
}
if !change {
return nil
}
}
// Re-tag (can be optimized if need arise)
_, err = db.Exec(clearTags, page.ID)
if err != nil {
return err
}
for i, tag := range page.Tags {
_, err := db.Exec(insertTag, page.ID, tag.ID, i == 0)
if err != nil {
return err
}
}
return nil
}
// Delete removes the page from the database
func (page *Page) Delete() error {
db := server.Main.DB
// Do the thing
results, err := db.Exec("DELETE FROM `page` WHERE id=? LIMIT 1", page.ID)
if err != nil {
return err
}
// Count the stuffs that were done things to
affected, err := results.RowsAffected()
if err != nil {
return err
}
if affected == 0 {
return errors.New("page not found")
}
return nil return nil
} }
@ -144,3 +262,85 @@ func (page *Page) ParseForm(form url.Values) []error {
func (page *Page) generateID() { func (page *Page) generateID() {
page.ID = generate.FriendlyID(16) page.ID = generate.FriendlyID(16)
} }
// FindPage finds a page by ID. The Header model handles
// listning pages
func FindPage(id string) (*Page, error) {
const selectPage = `
SELECT id,name,author,category,fictional_date,publish_date,edit_date,dated,published,
unlisted,page.specific,indexed,type,source,background_url
FROM page
WHERE id=?
`
const selectPageTags = `
SELECT tag.id,tag.type,tag.name
FROM page_tag
RIGHT JOIN tag ON (tag.id = page_tag.tag_id)
WHERE page_tag.page_id = ?
`
db := server.Main.DB
rows, err := db.Query(selectPage, id)
if err != nil {
return nil, err
}
defer rows.Close()
if !rows.Next() {
return nil, errors.New("not found")
}
page := new(Page)
err = parsePage(page, rows)
if err != nil {
return nil, err
}
rows, err = db.Query(selectPageTags, page.ID)
if err != nil {
return nil, err
}
page.Tags = make([]Tag, 0, 64)
for rows.Next() {
tag := Tag{}
rows.Scan(&tag.ID, &tag.Type, &tag.Name)
page.Tags = append(page.Tags, tag)
}
return page, nil
}
func parsePage(page *Page, rows *sql.Rows) error {
var fictionalDate, publishDate, editDate string
var bgURL *string
err := rows.Scan(
&page.ID, &page.Name, &page.Author, &page.Category, &fictionalDate,
&publishDate, &editDate, &page.Dated, &page.Published, &page.Unlisted,
&page.Specific, &page.Indexed, &page.Type, &page.Source, &bgURL,
)
if err != nil {
return err
}
if bgURL != nil {
page.BackgroundURL = *bgURL
}
page.FictionalDate, err = time.Parse("2006-01-02 15:04:05", fictionalDate)
if err != nil {
return err
}
page.PublishDate, err = time.Parse("2006-01-02 15:04:05", publishDate)
if err != nil {
return err
}
page.EditDate, err = time.Parse("2006-01-02 15:04:05", editDate)
if err != nil {
return err
}
return nil
}

271
model/page_test.go

@ -1,20 +1,81 @@
package model package model
import "testing"
import "time"
import (
"testing"
"time"
"git.aiterp.net/AiteRP/aitestory/server"
"git.aiterp.net/gisle/wrouter/generate"
)
var testPageTags = []*Tag{
&Tag{"", "Event", "Skipping Work (Test)"},
&Tag{"", "Location", "Redrock's Office (Test)"},
&Tag{"", "Character", "Renala T'Iavay (Test)"},
&Tag{"", "Character", "Senva T'Vaoma (Test)"},
}
var extraPageTag = &Tag{"", "Character", "Va'ynna Atana (Test)"}
var fictionalDate, _ = time.Parse("2006-01-02", "2185-07-25")
var postingDate, _ = time.Parse("2006-01-02", "2017-09-13")
var editDate, _ = time.Parse("2006-01-02", "2017-09-15")
var testPage = Page{
Name: "Returning Va'ynna's Omni-Tool (Test)",
Author: "test:Gisle",
Category: "Story",
FictionalDate: fictionalDate,
PublishDate: postingDate,
EditDate: editDate,
Dated: true,
Published: true,
Unlisted: false,
Specific: false,
Indexed: true,
BackgroundURL: "",
Type: "Markdown",
Source: "# Returning Va'ynna's Omni-Tool",
Tags: make([]Tag, 0, 4),
}
func TestPage(t *testing.T) { func TestPage(t *testing.T) {
var assertEquals = func(t *testing.T, label string, a, b interface{}) {
if a != b {
t.Errorf("Assert Failed (%s): \"%+v\" == \"%+v\"", label, a, b)
}
}
if server.Main.Config.DB.Password == "" {
t.Skip("No database password")
return
}
t.Run("SetupTags", func(t *testing.T) {
for _, tag := range testPageTags {
err := tag.Insert()
if err != nil {
t.Error(err)
}
testPage.Tags = append(testPage.Tags, *tag)
}
err := extraPageTag.Insert()
if err != nil {
t.Error(err)
}
})
t.Run("BasicConstants", func(t *testing.T) { t.Run("BasicConstants", func(t *testing.T) {
if PageMinDate.Format(time.RFC3339) != "1753-01-01T00:00:00Z" { if PageMinDate.Format(time.RFC3339) != "1753-01-01T00:00:00Z" {
t.Error("Invalid date:", PageMinDate.Format(time.RFC3339)) t.Error("Invalid date:", PageMinDate.Format(time.RFC3339))
t.Fail()
} }
page := Page{} page := Page{}
page.generateID() page.generateID()
if len(page.ID) != 16 { if len(page.ID) != 16 {
t.Errorf("len(page.ID): %d != 16", len(page.ID)) t.Errorf("len(page.ID): %d != 16", len(page.ID))
t.Fail()
} }
id1 := page.ID id1 := page.ID
@ -26,4 +87,206 @@ func TestPage(t *testing.T) {
t.Fail() t.Fail()
} }
}) })
t.Run("Insert", func(t *testing.T) {
err := testPage.Insert()
if err != nil {
t.Error(err)
}
t.Logf("testPage.ID = \"%s\"", testPage.ID)
if testPage.ID == "" {
t.Fail()
}
})
t.Run("ListHeaders", func(t *testing.T) {
headers, err := ListHeaders()
if err != nil {
t.Error(err)
}
t.Logf("Got %d headers", len(headers))
found := false
for _, header := range headers {
if header.ID == testPage.ID {
found = true
t.Logf("Found header: %+v", header)
assertEquals(t, "Name", header.Name, testPage.Name)
assertEquals(t, "Author", header.Author, testPage.Author)
assertEquals(t, "Category", header.Category, testPage.Category)
assertEquals(t, "PublishDate", header.PublishDate, testPage.PublishDate)
assertEquals(t, "EditDate", header.EditDate, testPage.EditDate)
assertEquals(t, "FictionalDate", header.FictionalDate, testPage.FictionalDate)
assertEquals(t, "Dated", header.Dated, testPage.Dated)
assertEquals(t, "PrimaryTag.ID", header.PrimaryTag.ID, testPageTags[0].ID)
}
}
if !found {
t.Error("Did not find the inserted page's header")
}
})
t.Run("ListHeadersByTag", func(t *testing.T) {
headers, err := ListHeadersByTag(testPageTags[1])
if err != nil {
t.Error(err)
}
t.Logf("Got %d headers", len(headers))
found := false
for _, header := range headers {
if header.ID == testPage.ID {
found = true
t.Logf("Found header: %+v", header)
assertEquals(t, "Name", header.Name, testPage.Name)
assertEquals(t, "Author", header.Author, testPage.Author)
assertEquals(t, "Category", header.Category, testPage.Category)
assertEquals(t, "PublishDate", header.PublishDate, testPage.PublishDate)
assertEquals(t, "EditDate", header.EditDate, testPage.EditDate)
assertEquals(t, "FictionalDate", header.FictionalDate, testPage.FictionalDate)
assertEquals(t, "Dated", header.Dated, testPage.Dated)
assertEquals(t, "PrimaryTag.ID", header.PrimaryTag.ID, testPageTags[0].ID)
}
}
if !found {
t.Error("Did not find the inserted page's header")
}
// Make a fake tag and make sure that doesn't return stuff
headers, err = ListHeadersByTag(&Tag{ID: generate.ID()})
if err != nil {
t.Error(err)
}
if len(headers) != 0 {
t.Errorf("This shouldn't have been found: %+v", headers)
}
})
t.Run("Find", func(t *testing.T) {
page, err := FindPage(testPage.ID)
if err != nil {
t.Errorf("FindPage: %s", err)
return
}
assertEquals(t, "Name", page.Name, testPage.Name)
assertEquals(t, "Author", page.Author, testPage.Author)
assertEquals(t, "Category", page.Category, testPage.Category)
assertEquals(t, "PublishDate", page.PublishDate, testPage.PublishDate)
assertEquals(t, "EditDate", page.EditDate, testPage.EditDate)
assertEquals(t, "FictionalDate", page.FictionalDate, testPage.FictionalDate)
assertEquals(t, "Dated", page.Dated, testPage.Dated)
assertEquals(t, "Specific", page.Specific, testPage.Specific)
assertEquals(t, "Published", page.Published, testPage.Published)
assertEquals(t, "Unlisted", page.Unlisted, testPage.Unlisted)
assertEquals(t, "Indexed", page.Indexed, testPage.Indexed)
assertEquals(t, "Source", page.Source, testPage.Source)
assertEquals(t, "BackgroundURL", page.BackgroundURL, testPage.BackgroundURL)
})
t.Run("Modify", func(t *testing.T) {
page, err := FindPage(testPage.ID)
if err != nil {
t.Errorf("FindPage: %s", err)
return
}
page.Name = "New Page name"
page.Source += "\nAdditional Content is additional"
page.Tags[3] = *extraPageTag // Correct "Senva T'Vaoma" to "Renala T'Iavay"
page.Unlisted = true
err = page.Update()
if err != nil {
t.Errorf("Update: %s", err)
return
}
page2, err := FindPage(page.ID)
if err != nil {
t.Errorf("FindPage 2: %s", err)
return
}
assertEquals(t, "Name", page2.Name, page.Name)
assertEquals(t, "Author", page2.Author, page.Author)
assertEquals(t, "Category", page2.Category, page.Category)
assertEquals(t, "PublishDate", page2.PublishDate, page.PublishDate)
assertEquals(t, "EditDate", page2.EditDate, page.EditDate)
assertEquals(t, "FictionalDate", page2.FictionalDate, page.FictionalDate)
assertEquals(t, "Dated", page2.Dated, page.Dated)
assertEquals(t, "Specific", page2.Specific, page.Specific)
assertEquals(t, "Published", page2.Published, page.Published)
assertEquals(t, "Unlisted", page2.Unlisted, page.Unlisted)
assertEquals(t, "Indexed", page2.Indexed, page.Indexed)
assertEquals(t, "Source", page2.Source, page.Source)
assertEquals(t, "BackgroundURL", page2.BackgroundURL, page.BackgroundURL)
})
t.Run("VerifyUnlisted", func(t *testing.T) {
headers, err := ListHeaders()
if err != nil {
t.Error(err)
}
t.Logf("Got %d headers", len(headers))
for _, header := range headers {
if header.ID == testPage.ID {
t.Errorf("Found header: %+v", header)
break
}
}
})
t.Run("Content", func(t *testing.T) {
page, err := FindPage(testPage.ID)
if err != nil {
t.Errorf("FindPage: %s", err)
return
}
content, err := page.Content()
if err != nil {
t.Errorf("page.Content: %s", err)
}
assertEquals(t, "page.Content()", content, "<h1>Returning Va’ynna’s Omni-Tool</h1>\n\n<p>Additional Content is additional</p>\n")
})
t.Run("WikiURL", func(t *testing.T) {
t.Skip("To be implemented")
})
t.Run("Delete", func(t *testing.T) {
err := testPage.Delete()
if err != nil {
t.Error(err)
}
})
t.Run("TeardownTags", func(t *testing.T) {
for _, tag := range testPageTags {
err := tag.Delete()
if err != nil {
t.Error(err)
continue
}
t.Logf("Deleted %+v", tag)
}
err := extraPageTag.Delete()
if err != nil {
t.Error(err)
}
t.Logf("Deleted %+v", extraPageTag)
})
} }

5
tables.sql

@ -7,6 +7,8 @@ CREATE TABLE user (
INDEX(role) INDEX(role)
); );
INSERT INTO user (id, role) VALUES("test:Test", "restricted")
CREATE TABLE page ( CREATE TABLE page (
`id` CHAR(16) NOT NULL PRIMARY KEY, `id` CHAR(16) NOT NULL PRIMARY KEY,
`name` VARCHAR(255) NOT NULL, `name` VARCHAR(255) NOT NULL,
@ -25,7 +27,6 @@ CREATE TABLE page (
`type` VARCHAR(16) NOT NULL, `type` VARCHAR(16) NOT NULL,
`source` MEDIUMTEXT NOT NULL, `source` MEDIUMTEXT NOT NULL,
`cache` MEDIUMTEXT NOT NULL,
`background_url` VARCHAR(255), `background_url` VARCHAR(255),
@ -44,7 +45,7 @@ CREATE TABLE tag (
CREATE TABLE page_tag ( CREATE TABLE page_tag (
`page_id` CHAR(16) NOT NULL, `page_id` CHAR(16) NOT NULL,
`tag_id` CHAR(16) NOT NULL,
`tag_id` CHAR(24) NOT NULL,
`primary` BOOLEAN NOT NULL, `primary` BOOLEAN NOT NULL,
PRIMARY KEY (`page_id`, `tag_id`), PRIMARY KEY (`page_id`, `tag_id`),

Loading…
Cancel
Save