Gisle Aune
7 years ago
4 changed files with 591 additions and 7 deletions
-
120model/header.go
-
200model/page.go
-
271model/page_test.go
-
5tables.sql
@ -0,0 +1,120 @@ |
|||||
|
package model |
||||
|
|
||||
|
import ( |
||||
|
"database/sql" |
||||
|
"errors" |
||||
|
"time" |
||||
|
|
||||
|
"git.aiterp.net/AiteRP/aitestory/server" |
||||
|
) |
||||
|
|
||||
|
// Header contains a subset of the page database
|
||||
|
// table needed for the front page. It's a read-only
|
||||
|
// model.
|
||||
|
type Header struct { |
||||
|
ID string `json:"id"` |
||||
|
Name string `json:"name"` |
||||
|
Author string `json:"author"` |
||||
|
Category string `json:"category"` |
||||
|
FictionalDate time.Time `json:"fictionalDate"` |
||||
|
PublishDate time.Time `json:"publishDate"` |
||||
|
EditDate time.Time `json:"editDate"` |
||||
|
Dated bool `json:"dated"` |
||||
|
PrimaryTag *Tag `json:"primaryTag"` |
||||
|
} |
||||
|
|
||||
|
// ListHeaders grabs all the general pages from
|
||||
|
// the database to list them
|
||||
|
func ListHeaders() ([]Header, error) { |
||||
|
const query = ` |
||||
|
SELECT page.id,page.name,author,category,fictional_date,publish_date,edit_date,dated,tag.id,tag.type,tag.name |
||||
|
FROM page |
||||
|
LEFT JOIN page_tag ON (page.id = page_tag.page_id AND page_tag.primary = true) |
||||
|
LEFT JOIN tag ON (tag.id = page_tag.tag_id) |
||||
|
WHERE page.specific=false AND page.published=true AND page.unlisted=false; |
||||
|
` |
||||
|
|
||||
|
db := server.Main.DB |
||||
|
|
||||
|
rows, err := db.Query(query) |
||||
|
if err != nil { |
||||
|
return nil, err |
||||
|
} |
||||
|
defer rows.Close() |
||||
|
|
||||
|
results := make([]Header, 0, 64) |
||||
|
header := Header{} |
||||
|
for rows.Next() { |
||||
|
err := parseHeader(&header, rows) |
||||
|
if err != nil { |
||||
|
return nil, err |
||||
|
} |
||||
|
|
||||
|
results = append(results, header) |
||||
|
} |
||||
|
|
||||
|
return results, nil |
||||
|
} |
||||
|
|
||||
|
// ListHeadersByTag lists all headers that has the tag
|
||||
|
func ListHeadersByTag(tag *Tag) ([]Header, error) { |
||||
|
const query = ` |
||||
|
SELECT page.id,page.name,page.author,page.category,page.fictional_date,page.publish_date,page.edit_date,page.dated,tag.id,tag.type,tag.name |
||||
|
FROM page_tag |
||||
|
RIGHT JOIN page ON page.id = page_tag.page_id |
||||
|
LEFT JOIN (page_tag AS pt2) ON (page.id = pt2.page_id AND pt2.primary = true) |
||||
|
LEFT JOIN (tag AS tag) ON (tag.id = pt2.tag_id) |
||||
|
WHERE page_tag.tag_id=? |
||||
|
` |
||||
|
|
||||
|
if tag == nil { |
||||
|
return nil, errors.New("no tag") |
||||
|
} |
||||
|
|
||||
|
db := server.Main.DB |
||||
|
|
||||
|
rows, err := db.Query(query, tag.ID) |
||||
|
if err != nil { |
||||
|
return nil, err |
||||
|
} |
||||
|
defer rows.Close() |
||||
|
|
||||
|
results := make([]Header, 0, 64) |
||||
|
header := Header{} |
||||
|
for rows.Next() { |
||||
|
err := parseHeader(&header, rows) |
||||
|
if err != nil { |
||||
|
return nil, err |
||||
|
} |
||||
|
|
||||
|
results = append(results, header) |
||||
|
} |
||||
|
|
||||
|
return results, nil |
||||
|
} |
||||
|
|
||||
|
func parseHeader(header *Header, rows *sql.Rows) error { |
||||
|
var tagID, tagName, tagType string |
||||
|
var fictionalDate, publishDate, editDate string |
||||
|
var err error |
||||
|
|
||||
|
rows.Scan(&header.ID, &header.Name, &header.Author, &header.Category, &fictionalDate, &publishDate, &editDate, &header.Dated, &tagID, &tagType, &tagName) |
||||
|
if tagID != "" { |
||||
|
header.PrimaryTag = &Tag{tagID, tagName, tagType} |
||||
|
} |
||||
|
|
||||
|
header.FictionalDate, err = time.Parse("2006-01-02 15:04:05", fictionalDate) |
||||
|
if err != nil { |
||||
|
return err |
||||
|
} |
||||
|
header.PublishDate, err = time.Parse("2006-01-02 15:04:05", publishDate) |
||||
|
if err != nil { |
||||
|
return err |
||||
|
} |
||||
|
header.EditDate, err = time.Parse("2006-01-02 15:04:05", editDate) |
||||
|
if err != nil { |
||||
|
return err |
||||
|
} |
||||
|
|
||||
|
return nil |
||||
|
} |
Write
Preview
Loading…
Cancel
Save
Reference in new issue