Browse Source

add log and post postgres repos.

master
Gisle Aune 3 years ago
parent
commit
6748ff8bc2
  1. 35
      cmd/rpdata-restore/main.go
  2. 10
      database/postgres/changes.go
  3. 2
      database/postgres/channel.go
  4. 2
      database/postgres/chapters.go
  5. 2
      database/postgres/character.go
  6. 2
      database/postgres/comments.go
  7. 6
      database/postgres/db.go
  8. 246
      database/postgres/logs.go
  9. 19
      database/postgres/migrations/20210327143154_create_table_log.sql
  10. 20
      database/postgres/migrations/20210327143158_create_table_post.sql
  11. 9
      database/postgres/migrations/20210327160328_create_index_post_search.sql
  12. 9
      database/postgres/migrations/20210327160357_create_index_log_open.sql
  13. 9
      database/postgres/migrations/20210327160406_create_index_log_characters.sql
  14. 9
      database/postgres/migrations/20210327160409_create_index_log_channel_name.sql
  15. 9
      database/postgres/migrations/20210327160414_create_index_log_event_name.sql
  16. 9
      database/postgres/migrations/20210327160427_create_index_log_date.sql
  17. 9
      database/postgres/migrations/20210327160502_create_index_post_kind.sql
  18. 266
      database/postgres/posts.go
  19. 16
      database/postgres/queries/changes.sql
  20. 26
      database/postgres/queries/channels.sql
  21. 5
      database/postgres/queries/chapters.sql
  22. 2
      database/postgres/queries/characters.sql
  23. 4
      database/postgres/queries/comments.sql
  24. 6
      database/postgres/queries/counter.sql
  25. 32
      database/postgres/queries/logs.sql
  26. 60
      database/postgres/queries/posts.sql
  27. 40
      database/postgres/queries/stories.sql
  28. 43
      database/postgres/queries/tags.sql
  29. 6
      database/postgres/stories.go
  30. 13
      database/postgres/tags.go
  31. 20
      models/log.go
  32. 15
      models/post.go

35
cmd/rpdata-restore/main.go

@ -25,6 +25,7 @@ var flagMechanism = flag.String("mechanism", "", "")
var flagInputFile = flag.String("infile", "dump.zip", "The file to read from.")
var flagIncludeKeys = flag.Bool("include-keys", false, "Whether to include the keys.")
var flagReplace = flag.Bool("replace", false, "Replace existing content")
var flagOnly = flag.String("only", "", "Only restore these models (e.g. \"log,post\")")
func main() {
flag.Parse()
@ -40,6 +41,15 @@ func main() {
RestoreIDs: true,
}
var only map[string]bool
if *flagOnly != "" {
only = make(map[string]bool)
for _, part := range strings.Split(*flagOnly, ",") {
only[strings.Trim(part, "  \t\n\r")] = true
}
}
mongodb.DisableFixes = true
db, err := database.Init(cfg)
@ -76,6 +86,10 @@ func main() {
log.Fatalln("Unrecognized file path:", file.Name)
}
if only != nil && !only[parts[1]] {
continue
}
reader, err := file.Open()
if err != nil {
log.Fatalln("Unrecognized file:", file.Name, err)
@ -227,6 +241,10 @@ func main() {
log.Fatalln("Could not parse log:", parts[2], err)
}
if *flagReplace {
_ = db.Logs().Delete(ctx, logg)
}
_, err = db.Logs().Insert(ctx, logg)
if err != nil {
log.Fatalln("Could not insert log:", parts[2], err)
@ -284,12 +302,17 @@ func main() {
_ = reader.Close()
}
for _, posts := range postMap {
_, err = db.Posts().InsertMany(ctx, posts...)
if err != nil {
log.Fatalln("Could not insert post for logId:", posts[0].LogID, err)
}
if only == nil || only["post"] {
for _, posts := range postMap {
log.Printf("Inserting %d posts for log %s...", len(posts), posts[0].LogID)
log.Printf("Inserted %d posts for log %s.", len(posts), posts[0].LogID)
_, err = db.Posts().InsertMany(ctx, posts...)
if err != nil {
log.Fatalln("Could not insert post for logId:", posts[0].LogID, err)
}
log.Println("\tDone!")
}
}
}

10
database/postgres/changes.go

@ -27,7 +27,7 @@ func (r *changeRepository) Find(ctx context.Context, id string) (*models.Change,
}
func (r *changeRepository) List(ctx context.Context, filter models.ChangeFilter) ([]*models.Change, error) {
params := psqlcore.SelectChangesParams{LimitSize: 100}
params := psqlcore.SelectChangesParams{LimitSize: 0}
if filter.Keys != nil {
params.FilterKeys = true
keys := make([]string, len(filter.Keys))
@ -48,12 +48,8 @@ func (r *changeRepository) List(ctx context.Context, filter models.ChangeFilter)
params.FilterLatestDate = true
params.LatestDate = filter.LatestDate.In(time.UTC)
}
if filter.Limit != nil {
if *filter.Limit <= 0 {
params.LimitSize = 10000
} else {
params.LimitSize = int32(*filter.Limit)
}
if filter.Limit != nil && *filter.Limit <= 0 {
params.LimitSize = int32(*filter.Limit)
}
rows, err := psqlcore.New(r.db).SelectChanges(ctx, params)

2
database/postgres/channel.go

@ -23,7 +23,7 @@ func (r *channelRepository) Find(ctx context.Context, name string) (*models.Chan
func (r *channelRepository) List(ctx context.Context, filter models.ChannelFilter) ([]*models.Channel, error) {
params := psqlcore.SelectChannelsParams{
LimitSize: 1000,
LimitSize: 0,
}
if filter.Names != nil {

2
database/postgres/chapters.go

@ -25,7 +25,7 @@ func (r *chapterRepository) Find(ctx context.Context, id string) (*models.Chapte
func (r *chapterRepository) List(ctx context.Context, filter models.ChapterFilter) ([]*models.Chapter, error) {
params := psqlcore.SelectChaptersParams{
StoryID: "",
LimitSize: 10000,
LimitSize: 0,
}
if filter.StoryID != nil {
params.StoryID = *filter.StoryID

2
database/postgres/character.go

@ -67,7 +67,7 @@ func (r *characterRepository) FindName(ctx context.Context, name string) (*model
func (r *characterRepository) List(ctx context.Context, filter models.CharacterFilter) ([]*models.Character, error) {
params := psqlcore.SelectCharactersParams{
LimitSize: 10000,
LimitSize: 0,
}
if filter.IDs != nil {

2
database/postgres/comments.go

@ -25,7 +25,7 @@ func (r *commentRepository) Find(ctx context.Context, id string) (*models.Commen
func (r *commentRepository) List(ctx context.Context, filter models.CommentFilter) ([]*models.Comment, error) {
params := psqlcore.SelectCommentsParams{
ChapterID: "",
LimitSize: 10000,
LimitSize: 0,
}
if filter.ChapterID != nil {
params.ChapterID = *filter.ChapterID

6
database/postgres/db.go

@ -37,7 +37,7 @@ func Connect(cfg config.Database) (*DB, error) {
if err := q.EnsureCounter(timeout, "data_change_id"); err != nil {
return nil, err
}
if err := q.EnsureCounter(timeout, "data_log_short_id"); err != nil {
if err := q.EnsureCounter(timeout, "log_short_id"); err != nil {
return nil, err
}
@ -69,11 +69,11 @@ func (d *DB) Tags() repositories.TagRepository {
}
func (d *DB) Logs() repositories.LogRepository {
panic("implement me")
return &logRepository{insertWithIDs: d.insertWithIDs, db: d.db}
}
func (d *DB) Posts() repositories.PostRepository {
panic("implement me")
return &postRepository{insertWithIDs: d.insertWithIDs, db: d.db}
}
func (d *DB) Stories() repositories.StoryRepository {

246
database/postgres/logs.go

@ -0,0 +1,246 @@
package postgres
import (
"context"
"database/sql"
"fmt"
"git.aiterp.net/rpdata/api/database/postgres/psqlcore"
"git.aiterp.net/rpdata/api/internal/generate"
"git.aiterp.net/rpdata/api/models"
"strconv"
"time"
)
type logRepository struct {
insertWithIDs bool
db *sql.DB
}
func (r *logRepository) Find(ctx context.Context, id string) (*models.Log, error) {
log, err := psqlcore.New(r.db).SelectLog(ctx, id)
if err != nil {
return nil, err
}
return r.log(log), nil
}
func (r *logRepository) List(ctx context.Context, filter models.LogFilter) ([]*models.Log, error) {
q := psqlcore.New(r.db)
params := psqlcore.SelectLogsParams{
LimitSize: 0,
}
if filter.Search != nil {
ids, err := q.SelectLogIDsFromPostSearch(ctx, *filter.Search)
if err != nil {
return nil, err
}
params.FilterShortID = true
params.ShortIds = ids
}
if filter.Open != nil {
params.FilterOpen = true
params.Open = *filter.Open
}
if filter.Channels != nil {
params.FilterChannelName = true
params.ChannelNames = filter.Channels
}
if filter.Events != nil {
params.FilterEventName = true
params.EventNames = filter.Events
}
if filter.MinDate != nil {
params.FilterEarlistDate = true
params.EarliestDate = *filter.MinDate
}
if filter.MaxDate != nil {
params.FilterLastestDate = true
params.LatestDate = *filter.MaxDate
}
if filter.Limit > 0 {
params.LimitSize = int32(filter.Limit)
}
logs, err := q.SelectLogs(ctx, params)
if err != nil {
return nil, err
}
return r.logs(logs), nil
}
func (r *logRepository) Insert(ctx context.Context, log models.Log) (*models.Log, error) {
tx, err := r.db.BeginTx(ctx, nil)
if err != nil {
return nil, err
}
defer func() { _ = tx.Rollback() }()
q := psqlcore.New(tx)
if !r.insertWithIDs || log.ID == "" {
log.ID = generate.LogID(log)
}
if !r.insertWithIDs || log.ShortID == "" {
next, err := q.IncrementCounter(ctx, "log_short_id")
if err != nil {
return nil, err
}
log.ShortID = fmt.Sprintf("L%d", next)
} else {
n, err := strconv.Atoi(log.ShortID[1:])
if err != nil {
return nil, err
}
err = q.BumpCounter(ctx, psqlcore.BumpCounterParams{
ID: "log_short_id",
Value: int32(n),
})
if err != nil {
return nil, err
}
}
err = q.InsertLog(ctx, psqlcore.InsertLogParams{
ID: log.ID,
ShortID: log.ShortID,
Date: log.Date.UTC(),
ChannelName: log.ChannelName,
EventName: log.EventName,
Title: log.Title,
Description: log.Description,
Open: log.Open,
CharacterIds: log.CharacterIDs,
})
if err != nil {
return nil, err
}
err = tx.Commit()
if err != nil {
return nil, err
}
_ = r.updateTags(log)
return &log, nil
}
func (r *logRepository) Update(ctx context.Context, log models.Log, update models.LogUpdate) (*models.Log, error) {
log.ApplyUpdate(update)
err := psqlcore.New(r.db).UpdateLog(ctx, psqlcore.UpdateLogParams{
Title: log.Title,
EventName: log.EventName,
Description: log.Description,
Open: log.Open,
CharacterIds: log.CharacterIDs,
ID: log.ID,
})
if err != nil {
return nil, err
}
_ = r.updateTags(log)
return &log, nil
}
func (r *logRepository) Delete(ctx context.Context, log models.Log) error {
tx, err := r.db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() { _ = tx.Rollback() }()
q := psqlcore.New(tx)
err = q.DeleteLog(ctx, log.ID)
if err != nil {
return err
}
err = q.DeletePostsByLogShortID(ctx, log.ShortID)
if err != nil {
return err
}
return tx.Commit()
}
func (r *logRepository) log(log psqlcore.Log) *models.Log {
return &models.Log{
ID: log.ID,
ShortID: log.ShortID,
Date: log.Date,
ChannelName: log.ChannelName,
EventName: log.EventName,
Title: log.Title,
Description: log.Description,
Open: log.Open,
CharacterIDs: log.CharacterIds,
}
}
func (r *logRepository) logs(logs []psqlcore.Log) []*models.Log {
results := make([]*models.Log, 0, len(logs))
for _, log := range logs {
results = append(results, r.log(log))
}
return results
}
func (r *logRepository) updateTags(log models.Log) error {
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
tx, err := r.db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() { _ = tx.Rollback() }()
q := psqlcore.New(tx)
err = q.ClearTagsByTarget(ctx, psqlcore.ClearTagsByTargetParams{TargetKind: "Log", TargetID: log.ID})
if err != nil {
return err
}
if len(log.CharacterIDs) > 0 {
err := q.SetCharacterTagsFromIDs(ctx, psqlcore.SetCharacterTagsFromIDsParams{
TargetKind: "log",
TargetID: log.ID,
CharacterIds: log.CharacterIDs,
})
if err != nil {
return err
}
}
err = q.SetLocationTagFromChannelName(ctx, psqlcore.SetLocationTagFromChannelNameParams{
TargetKind: "log",
TargetID: log.ID,
ChannelName: log.ChannelName,
})
if err != nil {
return err
}
if log.EventName != "" {
err := q.SetTag(ctx, psqlcore.SetTagParams{
Tag: "Event:" + log.EventName,
TargetKind: "Log",
TargetID: log.ID,
})
if err != nil {
return err
}
}
return tx.Commit()
}

19
database/postgres/migrations/20210327143154_create_table_log.sql

@ -0,0 +1,19 @@
-- +goose Up
-- +goose StatementBegin
CREATE TABLE log (
id TEXT NOT NULL PRIMARY KEY,
short_id TEXT NOT NULL UNIQUE,
character_ids TEXT[] NOT NULL,
date TIMESTAMP NOT NULL,
channel_name TEXT NOT NULL,
event_name TEXT NOT NULL,
title TEXT NOT NULL,
description TEXT NOT NULL,
open BOOLEAN NOT NULL
);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE log;
-- +goose StatementEnd

20
database/postgres/migrations/20210327143158_create_table_post.sql

@ -0,0 +1,20 @@
-- +goose Up
-- +goose StatementBegin
CREATE TABLE log_post (
id TEXT NOT NULL PRIMARY KEY,
log_short_id TEXT NOT NULL,
time TIMESTAMP NOT NULL,
kind TEXT NOT NULL,
nick TEXT NOT NULL,
text TEXT NOT NULL,
position INT NOT NULL,
ts_vector TSVECTOR NOT NULL,
UNIQUE (log_short_id, position)
);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE log_post;
-- +goose StatementEnd

9
database/postgres/migrations/20210327160328_create_index_post_search.sql

@ -0,0 +1,9 @@
-- +goose Up
-- +goose StatementBegin
CREATE INDEX log_post_index_search ON log_post USING GIN (ts_vector);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP INDEX IF EXISTS log_post_index_search;
-- +goose StatementEnd

9
database/postgres/migrations/20210327160357_create_index_log_open.sql

@ -0,0 +1,9 @@
-- +goose Up
-- +goose StatementBegin
CREATE INDEX log_index_open ON log (open);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP INDEX IF EXISTS log_index_open
-- +goose StatementEnd

9
database/postgres/migrations/20210327160406_create_index_log_characters.sql

@ -0,0 +1,9 @@
-- +goose Up
-- +goose StatementBegin
CREATE INDEX log_index_character_ids on log USING GIN (character_ids);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP INDEX IF EXISTS log_index_character_ids;
-- +goose StatementEnd

9
database/postgres/migrations/20210327160409_create_index_log_channel_name.sql

@ -0,0 +1,9 @@
-- +goose Up
-- +goose StatementBegin
CREATE INDEX log_index_channel_name ON log (channel_name);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP INDEX IF EXISTS log_index_channel_name
-- +goose StatementEnd

9
database/postgres/migrations/20210327160414_create_index_log_event_name.sql

@ -0,0 +1,9 @@
-- +goose Up
-- +goose StatementBegin
CREATE INDEX log_index_event_name ON log (event_name);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP INDEX IF EXISTS log_index_event_name
-- +goose StatementEnd

9
database/postgres/migrations/20210327160427_create_index_log_date.sql

@ -0,0 +1,9 @@
-- +goose Up
-- +goose StatementBegin
CREATE INDEX log_index_date ON log (date);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP INDEX IF EXISTS log_index_date
-- +goose StatementEnd

9
database/postgres/migrations/20210327160502_create_index_post_kind.sql

@ -0,0 +1,9 @@
-- +goose Up
-- +goose StatementBegin
CREATE INDEX log_post_index_kind ON log_post (kind);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP INDEX IF EXISTS log_post_index_kind
-- +goose StatementEnd

266
database/postgres/posts.go

@ -0,0 +1,266 @@
package postgres
import (
"context"
"database/sql"
"errors"
"git.aiterp.net/rpdata/api/database/postgres/psqlcore"
"git.aiterp.net/rpdata/api/internal/generate"
"git.aiterp.net/rpdata/api/models"
"strings"
)
type postRepository struct {
insertWithIDs bool
db *sql.DB
}
func (r *postRepository) Find(ctx context.Context, id string) (*models.Post, error) {
post, err := psqlcore.New(r.db).SelectPost(ctx, id)
if err != nil {
return nil, err
}
return r.post(post), nil
}
func (r *postRepository) List(ctx context.Context, filter models.PostFilter) ([]*models.Post, error) {
q := psqlcore.New(r.db)
params := psqlcore.SelectPostsParams{LimitSize: 0}
if filter.LogID != nil {
params.FilterLogShortID = true
if !strings.HasPrefix(*filter.LogID, "L") {
log, err := q.SelectLog(ctx, *filter.LogID)
if err != nil {
return nil, err
}
params.LogShortID = log.ShortID
} else {
params.LogShortID = *filter.LogID
}
}
if filter.Kinds != nil {
params.FilterKinds = true
params.Kinds = filter.Kinds
}
if filter.Search != nil {
params.FilterSearch = true
params.Search = *filter.Search
}
if filter.Limit > 0 {
params.LimitSize = int32(filter.Limit)
}
posts, err := q.SelectPosts(ctx, params)
if err != nil {
return nil, err
}
return r.posts(posts), nil
}
func (r *postRepository) Insert(ctx context.Context, post models.Post) (*models.Post, error) {
if !r.insertWithIDs || len(post.ID) < 8 {
post.ID = generate.PostID()
}
position, err := psqlcore.New(r.db).InsertPost(ctx, psqlcore.InsertPostParams{
ID: post.ID,
LogShortID: post.LogID,
Time: post.Time.UTC(),
Kind: post.Kind,
Nick: post.Nick,
Text: post.Text,
})
if err != nil {
return nil, err
}
post.Position = int(position)
return &post, nil
}
func (r *postRepository) InsertMany(ctx context.Context, posts ...*models.Post) ([]*models.Post, error) {
allowedLogID := ""
for _, post := range posts {
if allowedLogID == "" {
allowedLogID = post.LogID
} else if allowedLogID != post.LogID {
return nil, errors.New("cannot insert multiple posts with different log IDs")
}
}
params := psqlcore.InsertPostsParams{
LogShortID: posts[0].LogID,
}
for _, post := range posts {
if !r.insertWithIDs || len(post.ID) < 8 {
post.ID = generate.PostID()
}
params.Ids = append(params.Ids, post.ID)
params.Kinds = append(params.Kinds, post.Kind)
params.Nicks = append(params.Nicks, post.Nick)
params.Offsets = append(params.Offsets, int32(len(params.Offsets)+1))
params.Times = append(params.Times, post.Time.UTC())
params.Texts = append(params.Texts, post.Text)
}
offset, err := psqlcore.New(r.db).InsertPosts(ctx, params)
if err != nil {
return nil, err
}
for i, post := range posts {
post.Position = int(offset) + i
}
return posts, nil
}
func (r *postRepository) Update(ctx context.Context, post models.Post, update models.PostUpdate) (*models.Post, error) {
post.ApplyUpdate(update)
err := psqlcore.New(r.db).UpdatePost(ctx, psqlcore.UpdatePostParams{
Time: post.Time,
Kind: post.Kind,
Nick: post.Nick,
Text: post.Text,
ID: post.ID,
})
if err != nil {
return nil, err
}
return &post, nil
}
func (r *postRepository) Move(ctx context.Context, post models.Post, position int) ([]*models.Post, error) {
if position == post.Position {
return r.List(ctx, models.PostFilter{LogID: &post.LogID})
}
tx, err := r.db.BeginTx(ctx, nil)
if err != nil {
return nil, err
}
defer func() { _ = tx.Rollback() }()
_, err = tx.Exec("LOCK TABLE log_post IN SHARE UPDATE EXCLUSIVE MODE")
if err != nil {
return nil, err
}
var lowest, highest int32
q := psqlcore.New(tx)
err = q.MovePost(ctx, psqlcore.MovePostParams{ID: post.ID, Position: -1})
if err != nil {
return nil, err
}
if position > post.Position {
lowest = int32(post.Position)
highest = int32(position)
err := q.ShiftPostsBetween(ctx, psqlcore.ShiftPostsBetweenParams{
ShiftOffset: -1,
LogShortID: post.LogID,
FromPosition: lowest + 1,
ToPosition: highest,
})
if err != nil {
return nil, err
}
} else {
lowest = int32(position)
highest = int32(post.Position)
err := q.ShiftPostsBetween(ctx, psqlcore.ShiftPostsBetweenParams{
ShiftOffset: 1,
LogShortID: post.LogID,
FromPosition: lowest,
ToPosition: highest - 1,
})
if err != nil {
return nil, err
}
}
err = q.MovePost(ctx, psqlcore.MovePostParams{ID: post.ID, Position: int32(position)})
if err != nil {
return nil, err
}
posts, err := q.SelectPostsByPositionRange(ctx, psqlcore.SelectPostsByPositionRangeParams{
LogShortID: post.LogID,
FromPosition: lowest,
ToPosition: highest,
})
if err != nil {
return nil, err
}
err = tx.Commit()
if err != nil {
return nil, err
}
return r.posts(posts), nil
}
func (r *postRepository) Delete(ctx context.Context, post models.Post) error {
tx, err := r.db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() { _ = tx.Rollback() }()
q := psqlcore.New(tx)
_, err = tx.Exec("LOCK TABLE log_post IN SHARE UPDATE EXCLUSIVE MODE")
if err != nil {
return err
}
err = q.DeletePost(ctx, post.ID)
if err != nil {
return err
}
err = q.ShiftPostsAfter(ctx, psqlcore.ShiftPostsAfterParams{
ShiftOffset: -1,
LogShortID: post.LogID,
FromPosition: int32(post.Position + 1),
})
if err != nil {
return err
}
return tx.Commit()
}
func (r *postRepository) post(post psqlcore.LogPost) *models.Post {
return &models.Post{
ID: post.ID,
LogID: post.LogShortID,
Time: post.Time,
Kind: post.Kind,
Nick: post.Nick,
Text: post.Text,
Position: int(post.Position),
}
}
func (r *postRepository) posts(posts []psqlcore.LogPost) []*models.Post {
results := make([]*models.Post, 0, len(posts))
for _, post := range posts {
results = append(results, r.post(post))
}
return results
}

16
database/postgres/queries/changes.sql

@ -3,19 +3,19 @@ SELECT * FROM data_change WHERE id = $1 LIMIT 1;
-- name: SelectChanges :many
SELECT * FROM data_change
WHERE (sqlc.arg(filter_keys)::bool = false OR keys && (sqlc.arg(keys)::text[]))
AND (sqlc.arg(filter_earliest_date)::bool = false OR date >= sqlc.arg(earliest_date)::timestamp)
AND (sqlc.arg(filter_latest_date)::bool = false OR date <= sqlc.arg(latest_date)::timestamp)
AND (sqlc.arg(filter_author)::bool = false OR author = sqlc.arg(author)::text)
WHERE (@filter_keys::bool = false OR keys && (@keys::text[]))
AND (@filter_earliest_date::bool = false OR date >= @earliest_date::timestamp)
AND (@filter_latest_date::bool = false OR date <= @latest_date::timestamp)
AND (@filter_author::bool = false OR author = @author::text)
ORDER BY date DESC
LIMIT sqlc.arg(limit_size)::int;
LIMIT NULLIF(@limit_size::INT, 0);
-- name: InsertChange :exec
INSERT INTO data_change (id, model, op, author, listed, date, keys, objects)
VALUES (
sqlc.arg(id)::text, sqlc.arg(model)::text, sqlc.arg(op)::text, sqlc.arg(author)::text,
sqlc.arg(listed)::boolean, sqlc.arg(date)::timestamp, sqlc.arg(keys)::text[],
sqlc.arg(objects)::jsonb
@id::text, @model::text, @op::text, @author::text,
@listed::boolean, @date::timestamp, @keys::text[],
@objects::jsonb
);
-- name: DeleteChange :exec

26
database/postgres/queries/channels.sql

@ -4,27 +4,27 @@ SELECT * FROM data_channel WHERE name = $1 LIMIT 1;
-- name: InsertChannel :exec
INSERT INTO data_channel (name, logged, hub, event_name, location_name)
VALUES (
sqlc.arg(name)::text,
sqlc.arg(logged)::boolean, sqlc.arg(hub)::boolean,
sqlc.arg(event_name)::text, sqlc.arg(location_name)::text
@name::text,
@logged::boolean, @hub::boolean,
@event_name::text, @location_name::text
);
-- name: SelectChannels :many
SELECT * FROM data_channel
WHERE (sqlc.arg(filter_name)::bool = false OR name = ANY(sqlc.arg(names)::text[]))
AND (sqlc.arg(filter_logged)::bool = false OR logged = sqlc.arg(logged))
AND (sqlc.arg(filter_event_name)::bool = false OR event_name = sqlc.arg(event_name))
AND (sqlc.arg(filter_location_name)::bool = false OR location_name = sqlc.arg(location_name))
WHERE (@filter_name::bool = false OR name = ANY(@names::text[]))
AND (@filter_logged::bool = false OR logged = @logged)
AND (@filter_event_name::bool = false OR event_name = @event_name)
AND (@filter_location_name::bool = false OR location_name = @location_name)
ORDER BY name
LIMIT sqlc.arg(limit_size)::int;
LIMIT NULLIF(@limit_size::INT, 0);
-- name: UpdateChannel :exec
UPDATE data_channel
SET logged=sqlc.arg(logged)::boolean,
hub=sqlc.arg(hub)::boolean,
event_name=sqlc.arg(event_name)::text,
location_name=sqlc.arg(location_name)::text
WHERE name=sqlc.arg(name)::text;
SET logged = @logged::boolean,
hub = @hub::boolean,
event_name = @event_name::text,
location_name = @location_name::text
WHERE name = @name::text;
-- name: DeleteChannel :exec
DELETE FROM data_channel WHERE name=$1;

5
database/postgres/queries/chapters.sql

@ -2,7 +2,10 @@
SELECT * FROM story_chapter WHERE id=$1::TEXT LIMIT 1;
-- name: SelectChapters :many
SELECT * FROM story_chapter WHERE (sqlx.arg(story_id)::TEXT == '' OR story_id = @story_id::TEXT) ORDER BY created_date LIMIT @limit_size;
SELECT * FROM story_chapter
WHERE (sqlx.arg(story_id)::TEXT == '' OR story_id = @story_id::TEXT)
ORDER BY created_date
LIMIT NULLIF(@limit_size::INT, 0);
-- name: InsertChapter :exec
INSERT INTO story_chapter (id, story_id, title, author, source, created_date, fictional_date, edited_date, comment_mode, comments_locked)

2
database/postgres/queries/characters.sql

@ -14,7 +14,7 @@ WHERE (@filter_id::bool = false OR id = ANY(@ids::text[]))
AND (@filter_nick::bool = false OR nicks && (@nicks::text[]))
AND (@filter_author::bool = false OR author = @author::text)
AND (@filter_search::bool = false OR "ts_vector" @@ to_tsquery(@search::text))
LIMIT @limit_size::int;
LIMIT NULLIF(@limit_size::INT, 0);
-- name: InsertCharacter :exec
INSERT INTO data_character (id, nicks, name, short_name, author, description, ts_vector) VALUES (

4
database/postgres/queries/comments.sql

@ -19,7 +19,9 @@ SET subject = @subject,
WHERE id = @id;
-- name: SelectComments :many
SELECT * FROM story_comment WHERE (@chapter_id = '' OR chapter_id = @chapter_id) LIMIT @limit_size;
SELECT * FROM story_comment
WHERE (@chapter_id = '' OR chapter_id = @chapter_id)
LIMIT NULLIF(@limit_size::INT, 0);
-- name: DeleteComment :exec
DELETE FROM story_comment WHERE id = $1;

6
database/postgres/queries/counter.sql

@ -1,8 +1,8 @@
-- name: EnsureCounter :exec
INSERT INTO core_counter (id, value) VALUES ($1::text, 0) ON CONFLICT DO NOTHING;
INSERT INTO core_counter (id, value) VALUES (@id::text, 0) ON CONFLICT DO NOTHING;
-- name: IncrementCounter :one
UPDATE core_counter SET value = value + 1 WHERE id = $1::text RETURNING value::int;
UPDATE core_counter SET value = value + 1 WHERE id = @id::text RETURNING value::int;
-- name: BumpCounter :exec
UPDATE core_counter SET value = value + 1 WHERE id = sqlc.arg(id)::text AND value <= sqlc.arg(value)::int;
UPDATE core_counter SET value = value + 1 WHERE id = @id::text AND value <= @value::int;

32
database/postgres/queries/logs.sql

@ -0,0 +1,32 @@
-- name: SelectLog :one
SELECT * FROM log WHERE id=$1 OR short_id=$1 LIMIT 1;
-- name: SelectLogs :many
SELECT * FROM log
WHERE (@filter_short_id::BOOL = false OR short_id = ANY(@short_ids::TEXT[]))
AND (@filter_character_id::BOOL = false OR character_ids && (@character_ids::TEXT[]))
AND (@filter_channel_name::BOOL = false OR channel_name = ANY(@channel_names::TEXT[]))
AND (@filter_event_name::BOOL = false OR event_name = ANY(@event_names::TEXT[]))
AND (@filter_open::BOOL = false OR open = @open::BOOL)
AND (@filter_earlist_date::BOOL = false OR date >= @earliest_date::TIMESTAMP)
AND (@filter_lastest_date::BOOL = false OR date <= @latest_date::TIMESTAMP)
ORDER BY date
LIMIT NULLIF(@limit_size::INT, 0);
-- name: InsertLog :exec
INSERT INTO log (id, short_id, character_ids, date, channel_name, event_name, title, description, open)
VALUES (
@id, @short_id, @character_ids, @date, @channel_name, @event_name, @title, @description, @open
);
-- name: UpdateLog :exec
UPDATE log
SET title = @title,
event_name = @event_name,
description = @description,
open = @open,
character_ids = @character_ids
WHERE id = @id;
-- name: DeleteLog :exec
DELETE FROM log WHERE id=$1;

60
database/postgres/queries/posts.sql

@ -0,0 +1,60 @@
-- name: SelectPost :one
SELECT * FROM log_post WHERE id=$1 LIMIT 1;
-- name: SelectLogIDsFromPostSearch :many
SELECT DISTINCT(log_short_id) FROM log_post WHERE "ts_vector" @@ to_tsquery(@search::TEXT);
-- name: SelectPostsByPositionRange :many
SELECT * FROM log_post WHERE log_short_id = @log_short_id AND position >= @from_position AND position = @to_position;
-- name: SelectPosts :many
SELECT * FROM log_post
WHERE (@filter_ids::BOOL = false OR id = ANY(@ids::TEXT[]))
AND (@filter_kinds::BOOL = false OR kind = ANY(@kinds::TEXT[]))
AND (@filter_log_short_id::BOOL = false OR log_short_id = @log_short_id)
AND (@filter_search::BOOL = false OR "ts_vector" @@ to_tsquery(@search::TEXT))
ORDER BY log_short_id, position
LIMIT NULLIF(@limit_size::INT, 0);
-- name: InsertPost :one
INSERT INTO log_post (id, log_short_id, time, kind, nick, text, position, ts_vector)
SELECT @id, @log_short_id, @time, @kind, @nick, @text, COALESCE(MAX(position), 0)+1, to_tsvector(@nick || ' ' || @text)
FROM log_post
WHERE log_short_id=@log_short_id
RETURNING position;
-- name: InsertPosts :one
INSERT INTO log_post (id, log_short_id, time, kind, nick, text, position, ts_vector)
SELECT UNNEST(@ids::TEXT[]), @log_short_id, UNNEST(@times::TIMESTAMP[]), UNNEST(@kinds::TEXT[]),
UNNEST(@nicks::TEXT[]), UNNEST(@texts::TEXT[]), COALESCE(MAX(position), 1) + UNNEST(@offsets::INT[]),
to_tsvector(UNNEST(@nicks::TEXT[]) || ' ' || UNNEST(@texts::TEXT[]))
FROM log_post
WHERE log_short_id = @log_short_id
RETURNING position;
-- name: UpdatePost :exec
UPDATE log_post
SET time = @time,
kind = @kind,
nick = @nick,
text = @text,
ts_vector = to_tsvector(@nick || ' ' || @text)
WHERE id = @id;
-- name: MovePost :exec
UPDATE log_post SET position = @position WHERE id = @id;
-- name: ShiftPostsBetween :exec
UPDATE log_post SET position = position + @shift_offset::INT WHERE log_short_id = @log_short_id AND position >= @from_position AND position <= @to_position;
-- name: ShiftPostsAfter :exec
UPDATE log_post SET position = position + @shift_offset::INT WHERE log_short_id = @log_short_id AND position >= @from_position;
-- name: ShiftPostsBefore :exec
UPDATE log_post SET position = position + @shift_offset::INT WHERE log_short_id = @log_short_id AND position <= @to_position;
-- name: DeletePost :exec
DELETE FROM log_post WHERE id = @id;
-- name: DeletePostsByLogShortID :exec
DELETE FROM log_post WHERE log_short_id = @log_short_id;

40
database/postgres/queries/stories.sql

@ -3,35 +3,35 @@ SELECT * FROM story WHERE id = $1 LIMIT 1;
-- name: SelectStories :many
SELECT * FROM story
WHERE (sqlc.arg(filter_id)::bool = false OR id = ANY(sqlc.arg(ids)::text[]))
AND (sqlc.arg(filter_author)::bool = false OR name = sqlc.arg(author)::text)
AND (sqlc.arg(filter_earlist_fictional_date)::bool = false OR fictional_date >= sqlc.arg(earliest_fictional_date)::timestamp)
AND (sqlc.arg(filter_lastest_fictional_date)::bool = false OR fictional_date <= sqlc.arg(latest_fictional_date)::timestamp)
AND (sqlc.arg(filter_category)::bool = false OR category = sqlc.arg(category)::text)
AND (sqlc.arg(filter_open)::bool = false OR open = sqlc.arg(open)::bool)
AND (sqlc.arg(filter_unlisted)::bool = false OR unlisted = sqlc.arg(unlisted)::bool)
WHERE (@filter_id::bool = false OR id = ANY(@ids::text[]))
AND (@filter_author::bool = false OR name = @author::text)
AND (@filter_earlist_fictional_date::bool = false OR fictional_date >= @earliest_fictional_date::timestamp)
AND (@filter_lastest_fictional_date::bool = false OR fictional_date <= @latest_fictional_date::timestamp)
AND (@filter_category::bool = false OR category = @category::text)
AND (@filter_open::bool = false OR open = @open::bool)
AND (@filter_unlisted::bool = false OR unlisted = @unlisted::bool)
ORDER BY updated_date
LIMIT sqlc.arg(limit_size)::int;
LIMIT NULLIF(@limit_size::INT, 0);
-- name: InsertStory :exec
INSERT INTO story (id, author, name, category, open, listed, sort_by_fictional_date, created_date, fictional_date, updated_date)
VALUES (
sqlc.arg(id)::text, sqlc.arg(author)::text, sqlc.arg(name)::text, sqlc.arg(category)::text,
sqlc.arg(open)::boolean, sqlc.arg(listed)::boolean, sqlc.arg(sort_by_fictional_date)::boolean,
sqlc.arg(created_date)::timestamp, sqlc.arg(fictional_date)::timestamp, sqlc.arg(updated_date)::timestamp
@id::text, @author::text, @name::text, @category::text,
@open::boolean, @listed::boolean, @sort_by_fictional_date::boolean,
@created_date::timestamp, @fictional_date::timestamp, @updated_date::timestamp
);
-- name: UpdateStory :exec
UPDATE story
SET name=sqlc.arg(name),
category=sqlc.arg(category),
author=sqlc.arg(author),
open=sqlc.arg(open),
listed=sqlc.arg(listed),
fictional_date=sqlc.arg(fictional_date),
updated_date=sqlc.arg(updated_date),
sort_by_fictional_date=sqlc.arg(sort_by_fictional_date)
WHERE id=sqlc.arg(id);
SET name = @name,
category = @category,
author = @author,
open = @open,
listed = @listed,
fictional_date = @fictional_date,
updated_date = @updated_date,
sort_by_fictional_date = @sort_by_fictional_date
WHERE id = @id;
-- name: DeleteStory :exec
DELETE FROM story WHERE id=$1;

43
database/postgres/queries/tags.sql

@ -1,31 +1,44 @@
-- name: SetTag :exec
INSERT INTO common_tag (tag, target_kind, target_id)
VALUES (
sqlc.arg(tag)::TEXT, sqlc.arg(target_kind)::TEXT, sqlc.arg(target_id)::TEXT
@tag::TEXT, @target_kind::TEXT, @target_id::TEXT
)
ON CONFLICT DO NOTHING;
-- name: SetTags :exec
INSERT INTO common_tag (tag, target_kind, target_id)
SELECT unnest(sqlc.arg(tags)::TEXT[]), sqlc.arg(target_kind)::TEXT as target_kind, sqlc.arg(target_id)::TEXT as target_id
SELECT unnest(@tags::TEXT[]), @target_kind::TEXT as target_kind, @target_id::TEXT as target_id
ON CONFLICT DO NOTHING;
-- name: ClearTag :exec
DELETE FROM common_tag
WHERE tag=sqlc.arg(tag)::TEXT
AND target_kind=sqlc.arg(target_kind)::TEXT
AND target_id=sqlc.arg(target_id)::TEXT;
WHERE tag = @tag::TEXT
AND target_kind = @target_kind::TEXT
AND target_id = @target_id::TEXT;
-- name: ClearTagsByTargetLike :exec
DELETE FROM common_tag
WHERE target_kind=sqlc.arg(target_kind)::TEXT
AND target_id=sqlc.arg(target_id)::TEXT
AND tag LIKE sqlc.arg(tag_like)::TEXT;
WHERE target_kind = @target_kind::TEXT
AND target_id = @target_id::TEXT
AND tag LIKE @tag_like::TEXT;
-- name: ClearTagsByTarget :exec
DELETE FROM common_tag
WHERE target_kind=sqlc.arg(target_kind)::TEXT
AND target_id=sqlc.arg(target_id)::TEXT;
WHERE target_kind = @target_kind::TEXT
AND target_id = @target_id::TEXT;
-- name: SetCharacterTagsFromIDs :exec
INSERT INTO common_tag
SELECT 'Character:'||name AS tag, @target_kind::TEXT as target_kind, @target_id::TEXT as target_id
FROM data_character WHERE id = ANY(@character_ids::TEXT[])
ON CONFLICT DO NOTHING;
-- name: SetLocationTagFromChannelName :exec
INSERT INTO common_tag
SELECT 'Location:'||location_name AS tag, @target_kind::TEXT as target_kind, @target_id::TEXT as target_id
FROM data_channel
WHERE name = @channel_name AND location_name != ''
ON CONFLICT DO NOTHING;
-- name: SelectDistinctTags :many
SELECT DISTINCT tag FROM common_tag ORDER BY tag;
@ -34,16 +47,16 @@ SELECT DISTINCT tag FROM common_tag ORDER BY tag;
SELECT DISTINCT tag FROM common_tag WHERE tag LIKE $1::text ORDER BY tag;
-- name: SelectTagsByTag :many
SELECT * FROM common_tag WHERE tag = sqlc.arg(tag_name)::text ORDER BY tag;
SELECT * FROM common_tag WHERE tag = @tag_name::text ORDER BY tag;
-- name: SelectTagsByTags :many
SELECT * FROM common_tag WHERE tag = ANY(sqlc.arg(tag_names)::text[]) ORDER BY tag;
SELECT * FROM common_tag WHERE tag = ANY(@tag_names::text[]) ORDER BY tag;
-- name: SelectTargetsByTags :many
SELECT DISTINCT(target_id) FROM common_tag WHERE tag = ANY(sqlc.arg(tag_names)::text[]) AND target_kind = sqlc.arg(target_kind)::text ORDER BY tag;
SELECT DISTINCT(target_id) FROM common_tag WHERE tag = ANY(@tag_names::text[]) AND target_kind = @target_kind::text ORDER BY tag;
-- name: SelectTagsByTarget :many
SELECT * FROM common_tag WHERE target_kind = sqlc.arg(target_kind) AND target_id = sqlc.arg(target_id)::text ORDER BY tag;
SELECT * FROM common_tag WHERE target_kind = @target_kind AND target_id = @target_id::text ORDER BY tag;
-- name: SelectTagsByTargets :many
SELECT * FROM common_tag WHERE target_kind = sqlc.arg(target_kind) AND target_id = ANY(sqlc.arg(target_ids)::text[]) ORDER BY tag;
SELECT * FROM common_tag WHERE target_kind = @target_kind AND target_id = ANY(@target_ids::text[]) ORDER BY tag;

6
database/postgres/stories.go

@ -33,7 +33,7 @@ func (r *storyRepository) Find(ctx context.Context, id string) (*models.Story, e
func (r *storyRepository) List(ctx context.Context, filter models.StoryFilter) ([]*models.Story, error) {
q := psqlcore.New(r.db)
params := psqlcore.SelectStoriesParams{LimitSize: 100}
params := psqlcore.SelectStoriesParams{LimitSize: 0}
if len(filter.Tags) > 0 {
targets, err := q.SelectTargetsByTags(ctx, psqlcore.SelectTargetsByTagsParams{
@ -75,8 +75,8 @@ func (r *storyRepository) List(ctx context.Context, filter models.StoryFilter) (
params.FilterUnlisted = true
params.Unlisted = *filter.Unlisted
}
if filter.Limit <= 0 {
params.LimitSize = 1000
if filter.Limit > 0 {
params.LimitSize = int32(filter.Limit)
}
stories, err := q.SelectStories(ctx, params)

13
database/postgres/tags.go

@ -41,16 +41,3 @@ func (r *tagRepository) List(ctx context.Context, filter models.TagFilter) ([]*m
return models.DecodeTagArray(tags)
}
func modelsTagsFromDataTags(tags []psqlcore.CommonTag) ([]*models.Tag, error) {
results := make([]*models.Tag, len(tags))
for i, tag := range tags {
results[i] = &models.Tag{}
err := results[i].Decode(tag.Tag)
if err != nil {
return nil, err
}
}
return results, nil
}

20
models/log.go

@ -15,6 +15,24 @@ type Log struct {
CharacterIDs []string `bson:"characterIds"`
}
func (log *Log) ApplyUpdate(update LogUpdate) {
if update.Open != nil {
log.Open = *update.Open
}
if update.EventName != nil {
log.EventName = *update.EventName
}
if update.Title != nil {
log.Title = *update.Title
}
if update.Description != nil {
log.Description = *update.Description
}
if update.CharacterIDs != nil {
log.CharacterIDs = update.CharacterIDs
}
}
// A LogSuggestion is a suggestion for a log.
type LogSuggestion struct {
Log *Log
@ -31,13 +49,13 @@ func (*Log) IsChangeObject() {
// A LogFilter is a filter that can be used to list logs.
type LogFilter struct {
Search *string
Open *bool
Characters []string
Channels []string
Events []string
MinDate *time.Time
MaxDate *time.Time
Search *string
Limit int
}

15
models/post.go

@ -13,6 +13,21 @@ type Post struct {
Position int `bson:"position"`
}
func (post *Post) ApplyUpdate(update PostUpdate) {
if update.Time != nil {
post.Time = *update.Time
}
if update.Kind != nil {
post.Kind = *update.Kind
}
if update.Nick != nil {
post.Nick = *update.Nick
}
if update.Text != nil {
post.Text = *update.Text
}
}
// IsChangeObject is an interface implementation to identify it as a valid
// ChangeObject in GQL.
func (*Post) IsChangeObject() {

Loading…
Cancel
Save