feat!: wip pronoun entry rework
This commit is contained in:
parent
68939f5e10
commit
7669595586
12 changed files with 1348 additions and 93 deletions
|
@ -7,9 +7,12 @@ import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
||||||
"codeberg.org/u1f320/pronouns.cc/backend/log"
|
"codeberg.org/u1f320/pronouns.cc/backend/log"
|
||||||
"emperror.dev/errors"
|
"emperror.dev/errors"
|
||||||
"github.com/Masterminds/squirrel"
|
"github.com/Masterminds/squirrel"
|
||||||
|
"github.com/jackc/pgconn"
|
||||||
|
"github.com/jackc/pgx/v4"
|
||||||
"github.com/jackc/pgx/v4/pgxpool"
|
"github.com/jackc/pgx/v4/pgxpool"
|
||||||
"github.com/mediocregopher/radix/v4"
|
"github.com/mediocregopher/radix/v4"
|
||||||
"github.com/minio/minio-go/v7"
|
"github.com/minio/minio-go/v7"
|
||||||
|
@ -20,6 +23,12 @@ var sq = squirrel.StatementBuilder.PlaceholderFormat(squirrel.Dollar)
|
||||||
|
|
||||||
const ErrNothingToUpdate = errors.Sentinel("nothing to update")
|
const ErrNothingToUpdate = errors.Sentinel("nothing to update")
|
||||||
|
|
||||||
|
type querier interface {
|
||||||
|
Query(ctx context.Context, sql string, args ...interface{}) (pgx.Rows, error)
|
||||||
|
QueryRow(ctx context.Context, sql string, args ...interface{}) pgx.Row
|
||||||
|
Exec(ctx context.Context, sql string, arguments ...interface{}) (pgconn.CommandTag, error)
|
||||||
|
}
|
||||||
|
|
||||||
type DB struct {
|
type DB struct {
|
||||||
*pgxpool.Pool
|
*pgxpool.Pool
|
||||||
|
|
||||||
|
@ -28,6 +37,8 @@ type DB struct {
|
||||||
minio *minio.Client
|
minio *minio.Client
|
||||||
minioBucket string
|
minioBucket string
|
||||||
baseURL *url.URL
|
baseURL *url.URL
|
||||||
|
|
||||||
|
q queries.Querier
|
||||||
}
|
}
|
||||||
|
|
||||||
func New() (*DB, error) {
|
func New() (*DB, error) {
|
||||||
|
@ -67,6 +78,8 @@ func New() (*DB, error) {
|
||||||
minio: minioClient,
|
minio: minioClient,
|
||||||
minioBucket: os.Getenv("MINIO_BUCKET"),
|
minioBucket: os.Getenv("MINIO_BUCKET"),
|
||||||
baseURL: baseURL,
|
baseURL: baseURL,
|
||||||
|
|
||||||
|
q: queries.NewQuerier(pool),
|
||||||
}
|
}
|
||||||
|
|
||||||
return db, nil
|
return db, nil
|
||||||
|
|
68
backend/db/entries.go
Normal file
68
backend/db/entries.go
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
package db
|
||||||
|
|
||||||
|
import "codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
||||||
|
|
||||||
|
type WordStatus int
|
||||||
|
|
||||||
|
const (
|
||||||
|
StatusUnknown WordStatus = 0
|
||||||
|
StatusFavourite WordStatus = 1
|
||||||
|
StatusOkay WordStatus = 2
|
||||||
|
StatusJokingly WordStatus = 3
|
||||||
|
StatusFriendsOnly WordStatus = 4
|
||||||
|
StatusAvoid WordStatus = 5
|
||||||
|
wordStatusMax WordStatus = 6
|
||||||
|
)
|
||||||
|
|
||||||
|
type FieldEntry struct {
|
||||||
|
Value string `json:"value"`
|
||||||
|
Status WordStatus `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PronounEntry struct {
|
||||||
|
Pronouns string `json:"pronouns"`
|
||||||
|
DisplayText *string `json:"display_text"`
|
||||||
|
Status WordStatus `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func dbEntriesToFieldEntries(entries []queries.FieldEntry) []FieldEntry {
|
||||||
|
out := make([]FieldEntry, len(entries))
|
||||||
|
for i := range entries {
|
||||||
|
out[i] = FieldEntry{
|
||||||
|
*entries[i].Value, WordStatus(*entries[i].Status),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func dbPronounEntriesToPronounEntries(entries []queries.PronounEntry) []PronounEntry {
|
||||||
|
out := make([]PronounEntry, len(entries))
|
||||||
|
for i := range entries {
|
||||||
|
out[i] = PronounEntry{
|
||||||
|
*entries[i].Value, entries[i].DisplayValue, WordStatus(*entries[i].Status),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func entriesToDBEntries(entries []FieldEntry) []queries.FieldEntry {
|
||||||
|
out := make([]queries.FieldEntry, len(entries))
|
||||||
|
for i := range entries {
|
||||||
|
status := int32(entries[i].Status)
|
||||||
|
out[i] = queries.FieldEntry{
|
||||||
|
&entries[i].Value, &status,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func pronounEntriesToDBEntries(entries []PronounEntry) []queries.PronounEntry {
|
||||||
|
out := make([]queries.PronounEntry, len(entries))
|
||||||
|
for i := range entries {
|
||||||
|
status := int32(entries[i].Status)
|
||||||
|
out[i] = queries.PronounEntry{
|
||||||
|
&entries[i].Pronouns, entries[i].DisplayText, &status,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
|
@ -4,8 +4,8 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
||||||
"emperror.dev/errors"
|
"emperror.dev/errors"
|
||||||
"github.com/georgysavva/scany/pgxscan"
|
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
"github.com/rs/xid"
|
"github.com/rs/xid"
|
||||||
)
|
)
|
||||||
|
@ -18,13 +18,9 @@ const (
|
||||||
)
|
)
|
||||||
|
|
||||||
type Field struct {
|
type Field struct {
|
||||||
ID int64 `json:"-"`
|
ID int64 `json:"-"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Favourite []string `json:"favourite"`
|
Entries []FieldEntry `json:"entries"`
|
||||||
Okay []string `json:"okay"`
|
|
||||||
Jokingly []string `json:"jokingly"`
|
|
||||||
FriendsOnly []string `json:"friends_only"`
|
|
||||||
Avoid []string `json:"avoid"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate validates this field. If it is invalid, a non-empty string is returned as error message.
|
// Validate validates this field. If it is invalid, a non-empty string is returned as error message.
|
||||||
|
@ -37,37 +33,17 @@ func (f Field) Validate() string {
|
||||||
return fmt.Sprintf("name max length is %d characters, length is %d", FieldNameMaxLength, length)
|
return fmt.Sprintf("name max length is %d characters, length is %d", FieldNameMaxLength, length)
|
||||||
}
|
}
|
||||||
|
|
||||||
if length := len(f.Favourite) + len(f.Okay) + len(f.Jokingly) + len(f.FriendsOnly) + len(f.Avoid); length > FieldEntriesLimit {
|
if length := len(f.Entries); length > FieldEntriesLimit {
|
||||||
return fmt.Sprintf("max number of entries is %d, current number is %d", FieldEntriesLimit, length)
|
return fmt.Sprintf("max number of entries is %d, current number is %d", FieldEntriesLimit, length)
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, entry := range f.Favourite {
|
for i, entry := range f.Entries {
|
||||||
if length := len([]rune(entry)); length > FieldEntryMaxLength {
|
if length := len([]rune(entry.Value)); length > FieldEntryMaxLength {
|
||||||
return fmt.Sprintf("favourite.%d: name max length is %d characters, length is %d", i, FieldEntryMaxLength, length)
|
return fmt.Sprintf("entries.%d: max length is %d characters, length is %d", i, FieldEntryMaxLength, length)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
for i, entry := range f.Okay {
|
if entry.Status == StatusUnknown || entry.Status >= wordStatusMax {
|
||||||
if length := len([]rune(entry)); length > FieldEntryMaxLength {
|
return fmt.Sprintf("entries.%d: status is invalid, must be between 1 and %d, is %d", i, wordStatusMax-1, entry.Status)
|
||||||
return fmt.Sprintf("okay.%d: name max length is %d characters, length is %d", i, FieldEntryMaxLength, length)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, entry := range f.Jokingly {
|
|
||||||
if length := len([]rune(entry)); length > FieldEntryMaxLength {
|
|
||||||
return fmt.Sprintf("jokingly.%d: name max length is %d characters, length is %d", i, FieldEntryMaxLength, length)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, entry := range f.FriendsOnly {
|
|
||||||
if length := len([]rune(entry)); length > FieldEntryMaxLength {
|
|
||||||
return fmt.Sprintf("friends_only.%d: name max length is %d characters, length is %d", i, FieldEntryMaxLength, length)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, entry := range f.Avoid {
|
|
||||||
if length := len([]rune(entry)); length > FieldEntryMaxLength {
|
|
||||||
return fmt.Sprintf("avoid.%d: name max length is %d characters, length is %d", i, FieldEntryMaxLength, length)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,17 +52,20 @@ func (f Field) Validate() string {
|
||||||
|
|
||||||
// UserFields returns the fields associated with the given user ID.
|
// UserFields returns the fields associated with the given user ID.
|
||||||
func (db *DB) UserFields(ctx context.Context, id xid.ID) (fs []Field, err error) {
|
func (db *DB) UserFields(ctx context.Context, id xid.ID) (fs []Field, err error) {
|
||||||
sql, args, err := sq.
|
qfields, err := db.q.GetUserFields(ctx, id.String())
|
||||||
Select("id", "name", "favourite", "okay", "jokingly", "friends_only", "avoid").
|
|
||||||
From("user_fields").Where("user_id = ?", id).OrderBy("id ASC").ToSql()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "building sql")
|
return nil, errors.Wrap(err, "querying fields")
|
||||||
}
|
}
|
||||||
|
|
||||||
err = pgxscan.Select(ctx, db, &fs, sql, args...)
|
fs = make([]Field, len(qfields))
|
||||||
if err != nil {
|
for i := range qfields {
|
||||||
return nil, errors.Cause(err)
|
fs[i] = Field{
|
||||||
|
ID: int64(*qfields[i].ID),
|
||||||
|
Name: *qfields[i].Name,
|
||||||
|
Entries: dbEntriesToFieldEntries(qfields[i].Entries),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return fs, nil
|
return fs, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,20 +81,14 @@ func (db *DB) SetUserFields(ctx context.Context, tx pgx.Tx, userID xid.ID, field
|
||||||
return errors.Wrap(err, "deleting existing fields")
|
return errors.Wrap(err, "deleting existing fields")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = tx.CopyFrom(ctx,
|
querier := queries.NewQuerier(tx)
|
||||||
pgx.Identifier{"user_fields"},
|
for _, field := range fields {
|
||||||
[]string{"user_id", "name", "favourite", "okay", "jokingly", "friends_only", "avoid"},
|
querier.InsertUserField(ctx, queries.InsertUserFieldParams{
|
||||||
pgx.CopyFromSlice(len(fields), func(i int) ([]any, error) {
|
UserID: userID.String(),
|
||||||
return []any{
|
Name: field.Name,
|
||||||
userID,
|
Entries: entriesToDBEntries(field.Entries),
|
||||||
fields[i].Name,
|
})
|
||||||
fields[i].Favourite,
|
}
|
||||||
fields[i].Okay,
|
|
||||||
fields[i].Jokingly,
|
|
||||||
fields[i].FriendsOnly,
|
|
||||||
fields[i].Avoid,
|
|
||||||
}, nil
|
|
||||||
}))
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "inserting new fields")
|
return errors.Wrap(err, "inserting new fields")
|
||||||
}
|
}
|
||||||
|
@ -124,17 +97,20 @@ func (db *DB) SetUserFields(ctx context.Context, tx pgx.Tx, userID xid.ID, field
|
||||||
|
|
||||||
// MemberFields returns the fields associated with the given member ID.
|
// MemberFields returns the fields associated with the given member ID.
|
||||||
func (db *DB) MemberFields(ctx context.Context, id xid.ID) (fs []Field, err error) {
|
func (db *DB) MemberFields(ctx context.Context, id xid.ID) (fs []Field, err error) {
|
||||||
sql, args, err := sq.
|
qfields, err := db.q.GetMemberFields(ctx, id.String())
|
||||||
Select("id", "name", "favourite", "okay", "jokingly", "friends_only", "avoid").
|
|
||||||
From("member_fields").Where("member_id = ?", id).OrderBy("id ASC").ToSql()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "building sql")
|
return nil, errors.Wrap(err, "querying fields")
|
||||||
}
|
}
|
||||||
|
|
||||||
err = pgxscan.Select(ctx, db, &fs, sql, args...)
|
fs = make([]Field, len(qfields))
|
||||||
if err != nil {
|
for i := range qfields {
|
||||||
return nil, errors.Cause(err)
|
fs[i] = Field{
|
||||||
|
ID: int64(*qfields[i].ID),
|
||||||
|
Name: *qfields[i].Name,
|
||||||
|
Entries: dbEntriesToFieldEntries(qfields[i].Entries),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return fs, nil
|
return fs, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -150,20 +126,14 @@ func (db *DB) SetMemberFields(ctx context.Context, tx pgx.Tx, memberID xid.ID, f
|
||||||
return errors.Wrap(err, "deleting existing fields")
|
return errors.Wrap(err, "deleting existing fields")
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = tx.CopyFrom(ctx,
|
querier := queries.NewQuerier(tx)
|
||||||
pgx.Identifier{"member_fields"},
|
for _, field := range fields {
|
||||||
[]string{"member_id", "name", "favourite", "okay", "jokingly", "friends_only", "avoid"},
|
querier.InsertMemberField(ctx, queries.InsertMemberFieldParams{
|
||||||
pgx.CopyFromSlice(len(fields), func(i int) ([]any, error) {
|
MemberID: memberID.String(),
|
||||||
return []any{
|
Name: field.Name,
|
||||||
memberID,
|
Entries: entriesToDBEntries(field.Entries),
|
||||||
fields[i].Name,
|
})
|
||||||
fields[i].Favourite,
|
}
|
||||||
fields[i].Okay,
|
|
||||||
fields[i].Jokingly,
|
|
||||||
fields[i].FriendsOnly,
|
|
||||||
fields[i].Avoid,
|
|
||||||
}, nil
|
|
||||||
}))
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "inserting new fields")
|
return errors.Wrap(err, "inserting new fields")
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,18 +11,6 @@ import (
|
||||||
"github.com/rs/xid"
|
"github.com/rs/xid"
|
||||||
)
|
)
|
||||||
|
|
||||||
type WordStatus int
|
|
||||||
|
|
||||||
const (
|
|
||||||
StatusUnknown WordStatus = 0
|
|
||||||
StatusFavourite WordStatus = 1
|
|
||||||
StatusOkay WordStatus = 2
|
|
||||||
StatusJokingly WordStatus = 3
|
|
||||||
StatusFriendsOnly WordStatus = 4
|
|
||||||
StatusAvoid WordStatus = 5
|
|
||||||
wordStatusMax WordStatus = 6
|
|
||||||
)
|
|
||||||
|
|
||||||
type Name struct {
|
type Name struct {
|
||||||
ID int64 `json:"-"`
|
ID int64 `json:"-"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
|
|
3
backend/db/queries/generate.go
Normal file
3
backend/db/queries/generate.go
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
package queries
|
||||||
|
|
||||||
|
//go:generate pggen gen go --query-glob queries.user.sql --query-glob queries.member.sql --postgres-connection "postgres://pggen:pggen@localhost/pggen"
|
31
backend/db/queries/queries.member.sql
Normal file
31
backend/db/queries/queries.member.sql
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
-- name: GetMemberByID :one
|
||||||
|
SELECT * FROM members
|
||||||
|
WHERE id = pggen.arg('id');
|
||||||
|
|
||||||
|
-- name: GetMemberByName :one
|
||||||
|
SELECT * FROM members
|
||||||
|
WHERE user_id = pggen.arg('user_id') AND (
|
||||||
|
id = pggen.arg('member_ref')
|
||||||
|
OR name = pggen.arg('member_ref')
|
||||||
|
);
|
||||||
|
|
||||||
|
-- name: GetMembers :many
|
||||||
|
SELECT * FROM members
|
||||||
|
WHERE user_id = pggen.arg('user_id')
|
||||||
|
ORDER BY name, id;
|
||||||
|
|
||||||
|
-- name: UpdateMemberNamesPronouns :one
|
||||||
|
UPDATE members SET
|
||||||
|
names = pggen.arg('names'),
|
||||||
|
pronouns = pggen.arg('pronouns')
|
||||||
|
WHERE id = pggen.arg('id')
|
||||||
|
RETURNING *;
|
||||||
|
|
||||||
|
-- name: GetMemberFields :many
|
||||||
|
SELECT * FROM member_fields WHERE member_id = pggen.arg('member_id') ORDER BY id ASC;
|
||||||
|
|
||||||
|
-- name: InsertMemberField :one
|
||||||
|
INSERT INTO member_fields
|
||||||
|
(member_id, name, entries) VALUES
|
||||||
|
(pggen.arg('member_id'), pggen.arg('name'), pggen.arg('entries'))
|
||||||
|
RETURNING *;
|
803
backend/db/queries/queries.member.sql.go
Normal file
803
backend/db/queries/queries.member.sql.go
Normal file
|
@ -0,0 +1,803 @@
|
||||||
|
// Code generated by pggen. DO NOT EDIT.
|
||||||
|
|
||||||
|
package queries
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"github.com/jackc/pgconn"
|
||||||
|
"github.com/jackc/pgtype"
|
||||||
|
"github.com/jackc/pgx/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Querier is a typesafe Go interface backed by SQL queries.
|
||||||
|
//
|
||||||
|
// Methods ending with Batch enqueue a query to run later in a pgx.Batch. After
|
||||||
|
// calling SendBatch on pgx.Conn, pgxpool.Pool, or pgx.Tx, use the Scan methods
|
||||||
|
// to parse the results.
|
||||||
|
type Querier interface {
|
||||||
|
GetMemberByID(ctx context.Context, id string) (GetMemberByIDRow, error)
|
||||||
|
// GetMemberByIDBatch enqueues a GetMemberByID query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
GetMemberByIDBatch(batch genericBatch, id string)
|
||||||
|
// GetMemberByIDScan scans the result of an executed GetMemberByIDBatch query.
|
||||||
|
GetMemberByIDScan(results pgx.BatchResults) (GetMemberByIDRow, error)
|
||||||
|
|
||||||
|
GetMemberByName(ctx context.Context, userID string, memberRef string) (GetMemberByNameRow, error)
|
||||||
|
// GetMemberByNameBatch enqueues a GetMemberByName query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
GetMemberByNameBatch(batch genericBatch, userID string, memberRef string)
|
||||||
|
// GetMemberByNameScan scans the result of an executed GetMemberByNameBatch query.
|
||||||
|
GetMemberByNameScan(results pgx.BatchResults) (GetMemberByNameRow, error)
|
||||||
|
|
||||||
|
GetMembers(ctx context.Context, userID string) ([]GetMembersRow, error)
|
||||||
|
// GetMembersBatch enqueues a GetMembers query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
GetMembersBatch(batch genericBatch, userID string)
|
||||||
|
// GetMembersScan scans the result of an executed GetMembersBatch query.
|
||||||
|
GetMembersScan(results pgx.BatchResults) ([]GetMembersRow, error)
|
||||||
|
|
||||||
|
UpdateMemberNamesPronouns(ctx context.Context, params UpdateMemberNamesPronounsParams) (UpdateMemberNamesPronounsRow, error)
|
||||||
|
// UpdateMemberNamesPronounsBatch enqueues a UpdateMemberNamesPronouns query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
UpdateMemberNamesPronounsBatch(batch genericBatch, params UpdateMemberNamesPronounsParams)
|
||||||
|
// UpdateMemberNamesPronounsScan scans the result of an executed UpdateMemberNamesPronounsBatch query.
|
||||||
|
UpdateMemberNamesPronounsScan(results pgx.BatchResults) (UpdateMemberNamesPronounsRow, error)
|
||||||
|
|
||||||
|
GetMemberFields(ctx context.Context, memberID string) ([]GetMemberFieldsRow, error)
|
||||||
|
// GetMemberFieldsBatch enqueues a GetMemberFields query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
GetMemberFieldsBatch(batch genericBatch, memberID string)
|
||||||
|
// GetMemberFieldsScan scans the result of an executed GetMemberFieldsBatch query.
|
||||||
|
GetMemberFieldsScan(results pgx.BatchResults) ([]GetMemberFieldsRow, error)
|
||||||
|
|
||||||
|
InsertMemberField(ctx context.Context, params InsertMemberFieldParams) (InsertMemberFieldRow, error)
|
||||||
|
// InsertMemberFieldBatch enqueues a InsertMemberField query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
InsertMemberFieldBatch(batch genericBatch, params InsertMemberFieldParams)
|
||||||
|
// InsertMemberFieldScan scans the result of an executed InsertMemberFieldBatch query.
|
||||||
|
InsertMemberFieldScan(results pgx.BatchResults) (InsertMemberFieldRow, error)
|
||||||
|
|
||||||
|
GetUserByID(ctx context.Context, id string) (GetUserByIDRow, error)
|
||||||
|
// GetUserByIDBatch enqueues a GetUserByID query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
GetUserByIDBatch(batch genericBatch, id string)
|
||||||
|
// GetUserByIDScan scans the result of an executed GetUserByIDBatch query.
|
||||||
|
GetUserByIDScan(results pgx.BatchResults) (GetUserByIDRow, error)
|
||||||
|
|
||||||
|
GetUserByUsername(ctx context.Context, username string) (GetUserByUsernameRow, error)
|
||||||
|
// GetUserByUsernameBatch enqueues a GetUserByUsername query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
GetUserByUsernameBatch(batch genericBatch, username string)
|
||||||
|
// GetUserByUsernameScan scans the result of an executed GetUserByUsernameBatch query.
|
||||||
|
GetUserByUsernameScan(results pgx.BatchResults) (GetUserByUsernameRow, error)
|
||||||
|
|
||||||
|
UpdateUserNamesPronouns(ctx context.Context, params UpdateUserNamesPronounsParams) (UpdateUserNamesPronounsRow, error)
|
||||||
|
// UpdateUserNamesPronounsBatch enqueues a UpdateUserNamesPronouns query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
UpdateUserNamesPronounsBatch(batch genericBatch, params UpdateUserNamesPronounsParams)
|
||||||
|
// UpdateUserNamesPronounsScan scans the result of an executed UpdateUserNamesPronounsBatch query.
|
||||||
|
UpdateUserNamesPronounsScan(results pgx.BatchResults) (UpdateUserNamesPronounsRow, error)
|
||||||
|
|
||||||
|
GetUserFields(ctx context.Context, userID string) ([]GetUserFieldsRow, error)
|
||||||
|
// GetUserFieldsBatch enqueues a GetUserFields query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
GetUserFieldsBatch(batch genericBatch, userID string)
|
||||||
|
// GetUserFieldsScan scans the result of an executed GetUserFieldsBatch query.
|
||||||
|
GetUserFieldsScan(results pgx.BatchResults) ([]GetUserFieldsRow, error)
|
||||||
|
|
||||||
|
InsertUserField(ctx context.Context, params InsertUserFieldParams) (InsertUserFieldRow, error)
|
||||||
|
// InsertUserFieldBatch enqueues a InsertUserField query into batch to be executed
|
||||||
|
// later by the batch.
|
||||||
|
InsertUserFieldBatch(batch genericBatch, params InsertUserFieldParams)
|
||||||
|
// InsertUserFieldScan scans the result of an executed InsertUserFieldBatch query.
|
||||||
|
InsertUserFieldScan(results pgx.BatchResults) (InsertUserFieldRow, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type DBQuerier struct {
|
||||||
|
conn genericConn // underlying Postgres transport to use
|
||||||
|
types *typeResolver // resolve types by name
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Querier = &DBQuerier{}
|
||||||
|
|
||||||
|
// genericConn is a connection to a Postgres database. This is usually backed by
|
||||||
|
// *pgx.Conn, pgx.Tx, or *pgxpool.Pool.
|
||||||
|
type genericConn interface {
|
||||||
|
// Query executes sql with args. If there is an error the returned Rows will
|
||||||
|
// be returned in an error state. So it is allowed to ignore the error
|
||||||
|
// returned from Query and handle it in Rows.
|
||||||
|
Query(ctx context.Context, sql string, args ...interface{}) (pgx.Rows, error)
|
||||||
|
|
||||||
|
// QueryRow is a convenience wrapper over Query. Any error that occurs while
|
||||||
|
// querying is deferred until calling Scan on the returned Row. That Row will
|
||||||
|
// error with pgx.ErrNoRows if no rows are returned.
|
||||||
|
QueryRow(ctx context.Context, sql string, args ...interface{}) pgx.Row
|
||||||
|
|
||||||
|
// Exec executes sql. sql can be either a prepared statement name or an SQL
|
||||||
|
// string. arguments should be referenced positionally from the sql string
|
||||||
|
// as $1, $2, etc.
|
||||||
|
Exec(ctx context.Context, sql string, arguments ...interface{}) (pgconn.CommandTag, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// genericBatch batches queries to send in a single network request to a
|
||||||
|
// Postgres server. This is usually backed by *pgx.Batch.
|
||||||
|
type genericBatch interface {
|
||||||
|
// Queue queues a query to batch b. query can be an SQL query or the name of a
|
||||||
|
// prepared statement. See Queue on *pgx.Batch.
|
||||||
|
Queue(query string, arguments ...interface{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewQuerier creates a DBQuerier that implements Querier. conn is typically
|
||||||
|
// *pgx.Conn, pgx.Tx, or *pgxpool.Pool.
|
||||||
|
func NewQuerier(conn genericConn) *DBQuerier {
|
||||||
|
return NewQuerierConfig(conn, QuerierConfig{})
|
||||||
|
}
|
||||||
|
|
||||||
|
type QuerierConfig struct {
|
||||||
|
// DataTypes contains pgtype.Value to use for encoding and decoding instead
|
||||||
|
// of pggen-generated pgtype.ValueTranscoder.
|
||||||
|
//
|
||||||
|
// If OIDs are available for an input parameter type and all of its
|
||||||
|
// transitive dependencies, pggen will use the binary encoding format for
|
||||||
|
// the input parameter.
|
||||||
|
DataTypes []pgtype.DataType
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewQuerierConfig creates a DBQuerier that implements Querier with the given
|
||||||
|
// config. conn is typically *pgx.Conn, pgx.Tx, or *pgxpool.Pool.
|
||||||
|
func NewQuerierConfig(conn genericConn, cfg QuerierConfig) *DBQuerier {
|
||||||
|
return &DBQuerier{conn: conn, types: newTypeResolver(cfg.DataTypes)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithTx creates a new DBQuerier that uses the transaction to run all queries.
|
||||||
|
func (q *DBQuerier) WithTx(tx pgx.Tx) (*DBQuerier, error) {
|
||||||
|
return &DBQuerier{conn: tx}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// preparer is any Postgres connection transport that provides a way to prepare
|
||||||
|
// a statement, most commonly *pgx.Conn.
|
||||||
|
type preparer interface {
|
||||||
|
Prepare(ctx context.Context, name, sql string) (sd *pgconn.StatementDescription, err error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrepareAllQueries executes a PREPARE statement for all pggen generated SQL
|
||||||
|
// queries in querier files. Typical usage is as the AfterConnect callback
|
||||||
|
// for pgxpool.Config
|
||||||
|
//
|
||||||
|
// pgx will use the prepared statement if available. Calling PrepareAllQueries
|
||||||
|
// is an optional optimization to avoid a network round-trip the first time pgx
|
||||||
|
// runs a query if pgx statement caching is enabled.
|
||||||
|
func PrepareAllQueries(ctx context.Context, p preparer) error {
|
||||||
|
if _, err := p.Prepare(ctx, getMemberByIDSQL, getMemberByIDSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'GetMemberByID': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, getMemberByNameSQL, getMemberByNameSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'GetMemberByName': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, getMembersSQL, getMembersSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'GetMembers': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, updateMemberNamesPronounsSQL, updateMemberNamesPronounsSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'UpdateMemberNamesPronouns': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, getMemberFieldsSQL, getMemberFieldsSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'GetMemberFields': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, insertMemberFieldSQL, insertMemberFieldSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'InsertMemberField': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, getUserByIDSQL, getUserByIDSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'GetUserByID': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, getUserByUsernameSQL, getUserByUsernameSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'GetUserByUsername': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, updateUserNamesPronounsSQL, updateUserNamesPronounsSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'UpdateUserNamesPronouns': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, getUserFieldsSQL, getUserFieldsSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'GetUserFields': %w", err)
|
||||||
|
}
|
||||||
|
if _, err := p.Prepare(ctx, insertUserFieldSQL, insertUserFieldSQL); err != nil {
|
||||||
|
return fmt.Errorf("prepare query 'InsertUserField': %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FieldEntry represents the Postgres composite type "field_entry".
|
||||||
|
type FieldEntry struct {
|
||||||
|
Value *string `json:"value"`
|
||||||
|
Status *int32 `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PronounEntry represents the Postgres composite type "pronoun_entry".
|
||||||
|
type PronounEntry struct {
|
||||||
|
Value *string `json:"value"`
|
||||||
|
DisplayValue *string `json:"display_value"`
|
||||||
|
Status *int32 `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// typeResolver looks up the pgtype.ValueTranscoder by Postgres type name.
|
||||||
|
type typeResolver struct {
|
||||||
|
connInfo *pgtype.ConnInfo // types by Postgres type name
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTypeResolver(types []pgtype.DataType) *typeResolver {
|
||||||
|
ci := pgtype.NewConnInfo()
|
||||||
|
for _, typ := range types {
|
||||||
|
if txt, ok := typ.Value.(textPreferrer); ok && typ.OID != unknownOID {
|
||||||
|
typ.Value = txt.ValueTranscoder
|
||||||
|
}
|
||||||
|
ci.RegisterDataType(typ)
|
||||||
|
}
|
||||||
|
return &typeResolver{connInfo: ci}
|
||||||
|
}
|
||||||
|
|
||||||
|
// findValue find the OID, and pgtype.ValueTranscoder for a Postgres type name.
|
||||||
|
func (tr *typeResolver) findValue(name string) (uint32, pgtype.ValueTranscoder, bool) {
|
||||||
|
typ, ok := tr.connInfo.DataTypeForName(name)
|
||||||
|
if !ok {
|
||||||
|
return 0, nil, false
|
||||||
|
}
|
||||||
|
v := pgtype.NewValue(typ.Value)
|
||||||
|
return typ.OID, v.(pgtype.ValueTranscoder), true
|
||||||
|
}
|
||||||
|
|
||||||
|
// setValue sets the value of a ValueTranscoder to a value that should always
|
||||||
|
// work and panics if it fails.
|
||||||
|
func (tr *typeResolver) setValue(vt pgtype.ValueTranscoder, val interface{}) pgtype.ValueTranscoder {
|
||||||
|
if err := vt.Set(val); err != nil {
|
||||||
|
panic(fmt.Sprintf("set ValueTranscoder %T to %+v: %s", vt, val, err))
|
||||||
|
}
|
||||||
|
return vt
|
||||||
|
}
|
||||||
|
|
||||||
|
type compositeField struct {
|
||||||
|
name string // name of the field
|
||||||
|
typeName string // Postgres type name
|
||||||
|
defaultVal pgtype.ValueTranscoder // default value to use
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tr *typeResolver) newCompositeValue(name string, fields ...compositeField) pgtype.ValueTranscoder {
|
||||||
|
if _, val, ok := tr.findValue(name); ok {
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
fs := make([]pgtype.CompositeTypeField, len(fields))
|
||||||
|
vals := make([]pgtype.ValueTranscoder, len(fields))
|
||||||
|
isBinaryOk := true
|
||||||
|
for i, field := range fields {
|
||||||
|
oid, val, ok := tr.findValue(field.typeName)
|
||||||
|
if !ok {
|
||||||
|
oid = unknownOID
|
||||||
|
val = field.defaultVal
|
||||||
|
}
|
||||||
|
isBinaryOk = isBinaryOk && oid != unknownOID
|
||||||
|
fs[i] = pgtype.CompositeTypeField{Name: field.name, OID: oid}
|
||||||
|
vals[i] = val
|
||||||
|
}
|
||||||
|
// Okay to ignore error because it's only thrown when the number of field
|
||||||
|
// names does not equal the number of ValueTranscoders.
|
||||||
|
typ, _ := pgtype.NewCompositeTypeValues(name, fs, vals)
|
||||||
|
if !isBinaryOk {
|
||||||
|
return textPreferrer{ValueTranscoder: typ, typeName: name}
|
||||||
|
}
|
||||||
|
return typ
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tr *typeResolver) newArrayValue(name, elemName string, defaultVal func() pgtype.ValueTranscoder) pgtype.ValueTranscoder {
|
||||||
|
if _, val, ok := tr.findValue(name); ok {
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
elemOID, elemVal, ok := tr.findValue(elemName)
|
||||||
|
elemValFunc := func() pgtype.ValueTranscoder {
|
||||||
|
return pgtype.NewValue(elemVal).(pgtype.ValueTranscoder)
|
||||||
|
}
|
||||||
|
if !ok {
|
||||||
|
elemOID = unknownOID
|
||||||
|
elemValFunc = defaultVal
|
||||||
|
}
|
||||||
|
typ := pgtype.NewArrayType(name, elemOID, elemValFunc)
|
||||||
|
if elemOID == unknownOID {
|
||||||
|
return textPreferrer{ValueTranscoder: typ, typeName: name}
|
||||||
|
}
|
||||||
|
return typ
|
||||||
|
}
|
||||||
|
|
||||||
|
// newFieldEntry creates a new pgtype.ValueTranscoder for the Postgres
|
||||||
|
// composite type 'field_entry'.
|
||||||
|
func (tr *typeResolver) newFieldEntry() pgtype.ValueTranscoder {
|
||||||
|
return tr.newCompositeValue(
|
||||||
|
"field_entry",
|
||||||
|
compositeField{name: "value", typeName: "text", defaultVal: &pgtype.Text{}},
|
||||||
|
compositeField{name: "status", typeName: "int4", defaultVal: &pgtype.Int4{}},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// newFieldEntryRaw returns all composite fields for the Postgres composite
|
||||||
|
// type 'field_entry' as a slice of interface{} to encode query parameters.
|
||||||
|
func (tr *typeResolver) newFieldEntryRaw(v FieldEntry) []interface{} {
|
||||||
|
return []interface{}{
|
||||||
|
v.Value,
|
||||||
|
v.Status,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// newPronounEntry creates a new pgtype.ValueTranscoder for the Postgres
|
||||||
|
// composite type 'pronoun_entry'.
|
||||||
|
func (tr *typeResolver) newPronounEntry() pgtype.ValueTranscoder {
|
||||||
|
return tr.newCompositeValue(
|
||||||
|
"pronoun_entry",
|
||||||
|
compositeField{name: "value", typeName: "text", defaultVal: &pgtype.Text{}},
|
||||||
|
compositeField{name: "display_value", typeName: "text", defaultVal: &pgtype.Text{}},
|
||||||
|
compositeField{name: "status", typeName: "int4", defaultVal: &pgtype.Int4{}},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// newPronounEntryRaw returns all composite fields for the Postgres composite
|
||||||
|
// type 'pronoun_entry' as a slice of interface{} to encode query parameters.
|
||||||
|
func (tr *typeResolver) newPronounEntryRaw(v PronounEntry) []interface{} {
|
||||||
|
return []interface{}{
|
||||||
|
v.Value,
|
||||||
|
v.DisplayValue,
|
||||||
|
v.Status,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// newFieldEntryArray creates a new pgtype.ValueTranscoder for the Postgres
|
||||||
|
// '_field_entry' array type.
|
||||||
|
func (tr *typeResolver) newFieldEntryArray() pgtype.ValueTranscoder {
|
||||||
|
return tr.newArrayValue("_field_entry", "field_entry", tr.newFieldEntry)
|
||||||
|
}
|
||||||
|
|
||||||
|
// newFieldEntryArrayInit creates an initialized pgtype.ValueTranscoder for the
|
||||||
|
// Postgres array type '_field_entry' to encode query parameters.
|
||||||
|
func (tr *typeResolver) newFieldEntryArrayInit(ps []FieldEntry) pgtype.ValueTranscoder {
|
||||||
|
dec := tr.newFieldEntryArray()
|
||||||
|
if err := dec.Set(tr.newFieldEntryArrayRaw(ps)); err != nil {
|
||||||
|
panic("encode []FieldEntry: " + err.Error()) // should always succeed
|
||||||
|
}
|
||||||
|
return textPreferrer{ValueTranscoder: dec, typeName: "_field_entry"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// newFieldEntryArrayRaw returns all elements for the Postgres array type '_field_entry'
|
||||||
|
// as a slice of interface{} for use with the pgtype.Value Set method.
|
||||||
|
func (tr *typeResolver) newFieldEntryArrayRaw(vs []FieldEntry) []interface{} {
|
||||||
|
elems := make([]interface{}, len(vs))
|
||||||
|
for i, v := range vs {
|
||||||
|
elems[i] = tr.newFieldEntryRaw(v)
|
||||||
|
}
|
||||||
|
return elems
|
||||||
|
}
|
||||||
|
|
||||||
|
// newPronounEntryArray creates a new pgtype.ValueTranscoder for the Postgres
|
||||||
|
// '_pronoun_entry' array type.
|
||||||
|
func (tr *typeResolver) newPronounEntryArray() pgtype.ValueTranscoder {
|
||||||
|
return tr.newArrayValue("_pronoun_entry", "pronoun_entry", tr.newPronounEntry)
|
||||||
|
}
|
||||||
|
|
||||||
|
// newPronounEntryArrayInit creates an initialized pgtype.ValueTranscoder for the
|
||||||
|
// Postgres array type '_pronoun_entry' to encode query parameters.
|
||||||
|
func (tr *typeResolver) newPronounEntryArrayInit(ps []PronounEntry) pgtype.ValueTranscoder {
|
||||||
|
dec := tr.newPronounEntryArray()
|
||||||
|
if err := dec.Set(tr.newPronounEntryArrayRaw(ps)); err != nil {
|
||||||
|
panic("encode []PronounEntry: " + err.Error()) // should always succeed
|
||||||
|
}
|
||||||
|
return textPreferrer{ValueTranscoder: dec, typeName: "_pronoun_entry"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// newPronounEntryArrayRaw returns all elements for the Postgres array type '_pronoun_entry'
|
||||||
|
// as a slice of interface{} for use with the pgtype.Value Set method.
|
||||||
|
func (tr *typeResolver) newPronounEntryArrayRaw(vs []PronounEntry) []interface{} {
|
||||||
|
elems := make([]interface{}, len(vs))
|
||||||
|
for i, v := range vs {
|
||||||
|
elems[i] = tr.newPronounEntryRaw(v)
|
||||||
|
}
|
||||||
|
return elems
|
||||||
|
}
|
||||||
|
|
||||||
|
const getMemberByIDSQL = `SELECT * FROM members
|
||||||
|
WHERE id = $1;`
|
||||||
|
|
||||||
|
type GetMemberByIDRow struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Bio *string `json:"bio"`
|
||||||
|
AvatarUrls []string `json:"avatar_urls"`
|
||||||
|
Links []string `json:"links"`
|
||||||
|
DisplayName *string `json:"display_name"`
|
||||||
|
Names []FieldEntry `json:"names"`
|
||||||
|
Pronouns []PronounEntry `json:"pronouns"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMemberByID implements Querier.GetMemberByID.
|
||||||
|
func (q *DBQuerier) GetMemberByID(ctx context.Context, id string) (GetMemberByIDRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "GetMemberByID")
|
||||||
|
row := q.conn.QueryRow(ctx, getMemberByIDSQL, id)
|
||||||
|
var item GetMemberByIDRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("query GetMemberByID: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetMemberByID row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetMemberByID row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMemberByIDBatch implements Querier.GetMemberByIDBatch.
|
||||||
|
func (q *DBQuerier) GetMemberByIDBatch(batch genericBatch, id string) {
|
||||||
|
batch.Queue(getMemberByIDSQL, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMemberByIDScan implements Querier.GetMemberByIDScan.
|
||||||
|
func (q *DBQuerier) GetMemberByIDScan(results pgx.BatchResults) (GetMemberByIDRow, error) {
|
||||||
|
row := results.QueryRow()
|
||||||
|
var item GetMemberByIDRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("scan GetMemberByIDBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetMemberByID row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetMemberByID row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getMemberByNameSQL = `SELECT * FROM members
|
||||||
|
WHERE user_id = $1 AND (
|
||||||
|
id = $2
|
||||||
|
OR name = $2
|
||||||
|
);`
|
||||||
|
|
||||||
|
type GetMemberByNameRow struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Bio *string `json:"bio"`
|
||||||
|
AvatarUrls []string `json:"avatar_urls"`
|
||||||
|
Links []string `json:"links"`
|
||||||
|
DisplayName *string `json:"display_name"`
|
||||||
|
Names []FieldEntry `json:"names"`
|
||||||
|
Pronouns []PronounEntry `json:"pronouns"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMemberByName implements Querier.GetMemberByName.
|
||||||
|
func (q *DBQuerier) GetMemberByName(ctx context.Context, userID string, memberRef string) (GetMemberByNameRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "GetMemberByName")
|
||||||
|
row := q.conn.QueryRow(ctx, getMemberByNameSQL, userID, memberRef)
|
||||||
|
var item GetMemberByNameRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("query GetMemberByName: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetMemberByName row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetMemberByName row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMemberByNameBatch implements Querier.GetMemberByNameBatch.
|
||||||
|
func (q *DBQuerier) GetMemberByNameBatch(batch genericBatch, userID string, memberRef string) {
|
||||||
|
batch.Queue(getMemberByNameSQL, userID, memberRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMemberByNameScan implements Querier.GetMemberByNameScan.
|
||||||
|
func (q *DBQuerier) GetMemberByNameScan(results pgx.BatchResults) (GetMemberByNameRow, error) {
|
||||||
|
row := results.QueryRow()
|
||||||
|
var item GetMemberByNameRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("scan GetMemberByNameBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetMemberByName row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetMemberByName row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getMembersSQL = `SELECT * FROM members
|
||||||
|
WHERE user_id = $1
|
||||||
|
ORDER BY name, id;`
|
||||||
|
|
||||||
|
type GetMembersRow struct {
|
||||||
|
ID *string `json:"id"`
|
||||||
|
UserID *string `json:"user_id"`
|
||||||
|
Name *string `json:"name"`
|
||||||
|
Bio *string `json:"bio"`
|
||||||
|
AvatarUrls []string `json:"avatar_urls"`
|
||||||
|
Links []string `json:"links"`
|
||||||
|
DisplayName *string `json:"display_name"`
|
||||||
|
Names []FieldEntry `json:"names"`
|
||||||
|
Pronouns []PronounEntry `json:"pronouns"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMembers implements Querier.GetMembers.
|
||||||
|
func (q *DBQuerier) GetMembers(ctx context.Context, userID string) ([]GetMembersRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "GetMembers")
|
||||||
|
rows, err := q.conn.Query(ctx, getMembersSQL, userID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query GetMembers: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
items := []GetMembersRow{}
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
for rows.Next() {
|
||||||
|
var item GetMembersRow
|
||||||
|
if err := rows.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan GetMembers row: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return nil, fmt.Errorf("assign GetMembers row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return nil, fmt.Errorf("assign GetMembers row: %w", err)
|
||||||
|
}
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("close GetMembers rows: %w", err)
|
||||||
|
}
|
||||||
|
return items, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMembersBatch implements Querier.GetMembersBatch.
|
||||||
|
func (q *DBQuerier) GetMembersBatch(batch genericBatch, userID string) {
|
||||||
|
batch.Queue(getMembersSQL, userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMembersScan implements Querier.GetMembersScan.
|
||||||
|
func (q *DBQuerier) GetMembersScan(results pgx.BatchResults) ([]GetMembersRow, error) {
|
||||||
|
rows, err := results.Query()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query GetMembersBatch: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
items := []GetMembersRow{}
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
for rows.Next() {
|
||||||
|
var item GetMembersRow
|
||||||
|
if err := rows.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan GetMembersBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return nil, fmt.Errorf("assign GetMembers row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return nil, fmt.Errorf("assign GetMembers row: %w", err)
|
||||||
|
}
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("close GetMembersBatch rows: %w", err)
|
||||||
|
}
|
||||||
|
return items, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateMemberNamesPronounsSQL = `UPDATE members SET
|
||||||
|
names = $1,
|
||||||
|
pronouns = $2
|
||||||
|
WHERE id = $3
|
||||||
|
RETURNING *;`
|
||||||
|
|
||||||
|
type UpdateMemberNamesPronounsParams struct {
|
||||||
|
Names []FieldEntry
|
||||||
|
Pronouns []PronounEntry
|
||||||
|
ID string
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateMemberNamesPronounsRow struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Bio *string `json:"bio"`
|
||||||
|
AvatarUrls []string `json:"avatar_urls"`
|
||||||
|
Links []string `json:"links"`
|
||||||
|
DisplayName *string `json:"display_name"`
|
||||||
|
Names []FieldEntry `json:"names"`
|
||||||
|
Pronouns []PronounEntry `json:"pronouns"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMemberNamesPronouns implements Querier.UpdateMemberNamesPronouns.
|
||||||
|
func (q *DBQuerier) UpdateMemberNamesPronouns(ctx context.Context, params UpdateMemberNamesPronounsParams) (UpdateMemberNamesPronounsRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "UpdateMemberNamesPronouns")
|
||||||
|
row := q.conn.QueryRow(ctx, updateMemberNamesPronounsSQL, q.types.newFieldEntryArrayInit(params.Names), q.types.newPronounEntryArrayInit(params.Pronouns), params.ID)
|
||||||
|
var item UpdateMemberNamesPronounsRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("query UpdateMemberNamesPronouns: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign UpdateMemberNamesPronouns row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign UpdateMemberNamesPronouns row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMemberNamesPronounsBatch implements Querier.UpdateMemberNamesPronounsBatch.
|
||||||
|
func (q *DBQuerier) UpdateMemberNamesPronounsBatch(batch genericBatch, params UpdateMemberNamesPronounsParams) {
|
||||||
|
batch.Queue(updateMemberNamesPronounsSQL, q.types.newFieldEntryArrayInit(params.Names), q.types.newPronounEntryArrayInit(params.Pronouns), params.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateMemberNamesPronounsScan implements Querier.UpdateMemberNamesPronounsScan.
|
||||||
|
func (q *DBQuerier) UpdateMemberNamesPronounsScan(results pgx.BatchResults) (UpdateMemberNamesPronounsRow, error) {
|
||||||
|
row := results.QueryRow()
|
||||||
|
var item UpdateMemberNamesPronounsRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.UserID, &item.Name, &item.Bio, &item.AvatarUrls, &item.Links, &item.DisplayName, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("scan UpdateMemberNamesPronounsBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign UpdateMemberNamesPronouns row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign UpdateMemberNamesPronouns row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getMemberFieldsSQL = `SELECT * FROM member_fields WHERE member_id = $1 ORDER BY id ASC;`
|
||||||
|
|
||||||
|
type GetMemberFieldsRow struct {
|
||||||
|
MemberID *string `json:"member_id"`
|
||||||
|
ID *int `json:"id"`
|
||||||
|
Name *string `json:"name"`
|
||||||
|
Entries []FieldEntry `json:"entries"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMemberFields implements Querier.GetMemberFields.
|
||||||
|
func (q *DBQuerier) GetMemberFields(ctx context.Context, memberID string) ([]GetMemberFieldsRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "GetMemberFields")
|
||||||
|
rows, err := q.conn.Query(ctx, getMemberFieldsSQL, memberID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query GetMemberFields: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
items := []GetMemberFieldsRow{}
|
||||||
|
entriesArray := q.types.newFieldEntryArray()
|
||||||
|
for rows.Next() {
|
||||||
|
var item GetMemberFieldsRow
|
||||||
|
if err := rows.Scan(&item.MemberID, &item.ID, &item.Name, entriesArray); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan GetMemberFields row: %w", err)
|
||||||
|
}
|
||||||
|
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
||||||
|
return nil, fmt.Errorf("assign GetMemberFields row: %w", err)
|
||||||
|
}
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("close GetMemberFields rows: %w", err)
|
||||||
|
}
|
||||||
|
return items, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMemberFieldsBatch implements Querier.GetMemberFieldsBatch.
|
||||||
|
func (q *DBQuerier) GetMemberFieldsBatch(batch genericBatch, memberID string) {
|
||||||
|
batch.Queue(getMemberFieldsSQL, memberID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetMemberFieldsScan implements Querier.GetMemberFieldsScan.
|
||||||
|
func (q *DBQuerier) GetMemberFieldsScan(results pgx.BatchResults) ([]GetMemberFieldsRow, error) {
|
||||||
|
rows, err := results.Query()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query GetMemberFieldsBatch: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
items := []GetMemberFieldsRow{}
|
||||||
|
entriesArray := q.types.newFieldEntryArray()
|
||||||
|
for rows.Next() {
|
||||||
|
var item GetMemberFieldsRow
|
||||||
|
if err := rows.Scan(&item.MemberID, &item.ID, &item.Name, entriesArray); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan GetMemberFieldsBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
||||||
|
return nil, fmt.Errorf("assign GetMemberFields row: %w", err)
|
||||||
|
}
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("close GetMemberFieldsBatch rows: %w", err)
|
||||||
|
}
|
||||||
|
return items, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const insertMemberFieldSQL = `INSERT INTO member_fields
|
||||||
|
(member_id, name, entries) VALUES
|
||||||
|
($1, $2, $3)
|
||||||
|
RETURNING *;`
|
||||||
|
|
||||||
|
type InsertMemberFieldParams struct {
|
||||||
|
MemberID string
|
||||||
|
Name string
|
||||||
|
Entries []FieldEntry
|
||||||
|
}
|
||||||
|
|
||||||
|
type InsertMemberFieldRow struct {
|
||||||
|
MemberID string `json:"member_id"`
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Entries []FieldEntry `json:"entries"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertMemberField implements Querier.InsertMemberField.
|
||||||
|
func (q *DBQuerier) InsertMemberField(ctx context.Context, params InsertMemberFieldParams) (InsertMemberFieldRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "InsertMemberField")
|
||||||
|
row := q.conn.QueryRow(ctx, insertMemberFieldSQL, params.MemberID, params.Name, q.types.newFieldEntryArrayInit(params.Entries))
|
||||||
|
var item InsertMemberFieldRow
|
||||||
|
entriesArray := q.types.newFieldEntryArray()
|
||||||
|
if err := row.Scan(&item.MemberID, &item.ID, &item.Name, entriesArray); err != nil {
|
||||||
|
return item, fmt.Errorf("query InsertMemberField: %w", err)
|
||||||
|
}
|
||||||
|
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
||||||
|
return item, fmt.Errorf("assign InsertMemberField row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertMemberFieldBatch implements Querier.InsertMemberFieldBatch.
|
||||||
|
func (q *DBQuerier) InsertMemberFieldBatch(batch genericBatch, params InsertMemberFieldParams) {
|
||||||
|
batch.Queue(insertMemberFieldSQL, params.MemberID, params.Name, q.types.newFieldEntryArrayInit(params.Entries))
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertMemberFieldScan implements Querier.InsertMemberFieldScan.
|
||||||
|
func (q *DBQuerier) InsertMemberFieldScan(results pgx.BatchResults) (InsertMemberFieldRow, error) {
|
||||||
|
row := results.QueryRow()
|
||||||
|
var item InsertMemberFieldRow
|
||||||
|
entriesArray := q.types.newFieldEntryArray()
|
||||||
|
if err := row.Scan(&item.MemberID, &item.ID, &item.Name, entriesArray); err != nil {
|
||||||
|
return item, fmt.Errorf("scan InsertMemberFieldBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
||||||
|
return item, fmt.Errorf("assign InsertMemberField row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// textPreferrer wraps a pgtype.ValueTranscoder and sets the preferred encoding
|
||||||
|
// format to text instead binary (the default). pggen uses the text format
|
||||||
|
// when the OID is unknownOID because the binary format requires the OID.
|
||||||
|
// Typically occurs if the results from QueryAllDataTypes aren't passed to
|
||||||
|
// NewQuerierConfig.
|
||||||
|
type textPreferrer struct {
|
||||||
|
pgtype.ValueTranscoder
|
||||||
|
typeName string
|
||||||
|
}
|
||||||
|
|
||||||
|
// PreferredParamFormat implements pgtype.ParamFormatPreferrer.
|
||||||
|
func (t textPreferrer) PreferredParamFormat() int16 { return pgtype.TextFormatCode }
|
||||||
|
|
||||||
|
func (t textPreferrer) NewTypeValue() pgtype.Value {
|
||||||
|
return textPreferrer{ValueTranscoder: pgtype.NewValue(t.ValueTranscoder).(pgtype.ValueTranscoder), typeName: t.typeName}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t textPreferrer) TypeName() string {
|
||||||
|
return t.typeName
|
||||||
|
}
|
||||||
|
|
||||||
|
// unknownOID means we don't know the OID for a type. This is okay for decoding
|
||||||
|
// because pgx call DecodeText or DecodeBinary without requiring the OID. For
|
||||||
|
// encoding parameters, pggen uses textPreferrer if the OID is unknown.
|
||||||
|
const unknownOID = 0
|
21
backend/db/queries/queries.user.sql
Normal file
21
backend/db/queries/queries.user.sql
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
-- name: GetUserByID :one
|
||||||
|
SELECT * FROM users WHERE id = pggen.arg('id');
|
||||||
|
|
||||||
|
-- name: GetUserByUsername :one
|
||||||
|
SELECT * FROM users WHERE username = pggen.arg('username');
|
||||||
|
|
||||||
|
-- name: UpdateUserNamesPronouns :one
|
||||||
|
UPDATE users SET
|
||||||
|
names = pggen.arg('names'),
|
||||||
|
pronouns = pggen.arg('pronouns')
|
||||||
|
WHERE id = pggen.arg('id')
|
||||||
|
RETURNING *;
|
||||||
|
|
||||||
|
-- name: GetUserFields :many
|
||||||
|
SELECT * FROM user_fields WHERE user_id = pggen.arg('user_id') ORDER BY id ASC;
|
||||||
|
|
||||||
|
-- name: InsertUserField :one
|
||||||
|
INSERT INTO user_fields
|
||||||
|
(user_id, name, entries) VALUES
|
||||||
|
(pggen.arg('user_id'), pggen.arg('name'), pggen.arg('entries'))
|
||||||
|
RETURNING *;
|
310
backend/db/queries/queries.user.sql.go
Normal file
310
backend/db/queries/queries.user.sql.go
Normal file
|
@ -0,0 +1,310 @@
|
||||||
|
// Code generated by pggen. DO NOT EDIT.
|
||||||
|
|
||||||
|
package queries
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"github.com/jackc/pgx/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
const getUserByIDSQL = `SELECT * FROM users WHERE id = $1;`
|
||||||
|
|
||||||
|
type GetUserByIDRow struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Username string `json:"username"`
|
||||||
|
DisplayName *string `json:"display_name"`
|
||||||
|
Bio *string `json:"bio"`
|
||||||
|
AvatarUrls []string `json:"avatar_urls"`
|
||||||
|
Links []string `json:"links"`
|
||||||
|
Discord *string `json:"discord"`
|
||||||
|
DiscordUsername *string `json:"discord_username"`
|
||||||
|
MaxInvites int32 `json:"max_invites"`
|
||||||
|
Names []FieldEntry `json:"names"`
|
||||||
|
Pronouns []PronounEntry `json:"pronouns"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByID implements Querier.GetUserByID.
|
||||||
|
func (q *DBQuerier) GetUserByID(ctx context.Context, id string) (GetUserByIDRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "GetUserByID")
|
||||||
|
row := q.conn.QueryRow(ctx, getUserByIDSQL, id)
|
||||||
|
var item GetUserByIDRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("query GetUserByID: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetUserByID row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetUserByID row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByIDBatch implements Querier.GetUserByIDBatch.
|
||||||
|
func (q *DBQuerier) GetUserByIDBatch(batch genericBatch, id string) {
|
||||||
|
batch.Queue(getUserByIDSQL, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByIDScan implements Querier.GetUserByIDScan.
|
||||||
|
func (q *DBQuerier) GetUserByIDScan(results pgx.BatchResults) (GetUserByIDRow, error) {
|
||||||
|
row := results.QueryRow()
|
||||||
|
var item GetUserByIDRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("scan GetUserByIDBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetUserByID row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetUserByID row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getUserByUsernameSQL = `SELECT * FROM users WHERE username = $1;`
|
||||||
|
|
||||||
|
type GetUserByUsernameRow struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Username string `json:"username"`
|
||||||
|
DisplayName *string `json:"display_name"`
|
||||||
|
Bio *string `json:"bio"`
|
||||||
|
AvatarUrls []string `json:"avatar_urls"`
|
||||||
|
Links []string `json:"links"`
|
||||||
|
Discord *string `json:"discord"`
|
||||||
|
DiscordUsername *string `json:"discord_username"`
|
||||||
|
MaxInvites int32 `json:"max_invites"`
|
||||||
|
Names []FieldEntry `json:"names"`
|
||||||
|
Pronouns []PronounEntry `json:"pronouns"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByUsername implements Querier.GetUserByUsername.
|
||||||
|
func (q *DBQuerier) GetUserByUsername(ctx context.Context, username string) (GetUserByUsernameRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "GetUserByUsername")
|
||||||
|
row := q.conn.QueryRow(ctx, getUserByUsernameSQL, username)
|
||||||
|
var item GetUserByUsernameRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("query GetUserByUsername: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetUserByUsername row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetUserByUsername row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByUsernameBatch implements Querier.GetUserByUsernameBatch.
|
||||||
|
func (q *DBQuerier) GetUserByUsernameBatch(batch genericBatch, username string) {
|
||||||
|
batch.Queue(getUserByUsernameSQL, username)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserByUsernameScan implements Querier.GetUserByUsernameScan.
|
||||||
|
func (q *DBQuerier) GetUserByUsernameScan(results pgx.BatchResults) (GetUserByUsernameRow, error) {
|
||||||
|
row := results.QueryRow()
|
||||||
|
var item GetUserByUsernameRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("scan GetUserByUsernameBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetUserByUsername row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign GetUserByUsername row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateUserNamesPronounsSQL = `UPDATE users SET
|
||||||
|
names = $1,
|
||||||
|
pronouns = $2
|
||||||
|
WHERE id = $3
|
||||||
|
RETURNING *;`
|
||||||
|
|
||||||
|
type UpdateUserNamesPronounsParams struct {
|
||||||
|
Names []FieldEntry
|
||||||
|
Pronouns []PronounEntry
|
||||||
|
ID string
|
||||||
|
}
|
||||||
|
|
||||||
|
type UpdateUserNamesPronounsRow struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Username string `json:"username"`
|
||||||
|
DisplayName *string `json:"display_name"`
|
||||||
|
Bio *string `json:"bio"`
|
||||||
|
AvatarUrls []string `json:"avatar_urls"`
|
||||||
|
Links []string `json:"links"`
|
||||||
|
Discord *string `json:"discord"`
|
||||||
|
DiscordUsername *string `json:"discord_username"`
|
||||||
|
MaxInvites int32 `json:"max_invites"`
|
||||||
|
Names []FieldEntry `json:"names"`
|
||||||
|
Pronouns []PronounEntry `json:"pronouns"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateUserNamesPronouns implements Querier.UpdateUserNamesPronouns.
|
||||||
|
func (q *DBQuerier) UpdateUserNamesPronouns(ctx context.Context, params UpdateUserNamesPronounsParams) (UpdateUserNamesPronounsRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "UpdateUserNamesPronouns")
|
||||||
|
row := q.conn.QueryRow(ctx, updateUserNamesPronounsSQL, q.types.newFieldEntryArrayInit(params.Names), q.types.newPronounEntryArrayInit(params.Pronouns), params.ID)
|
||||||
|
var item UpdateUserNamesPronounsRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("query UpdateUserNamesPronouns: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign UpdateUserNamesPronouns row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign UpdateUserNamesPronouns row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateUserNamesPronounsBatch implements Querier.UpdateUserNamesPronounsBatch.
|
||||||
|
func (q *DBQuerier) UpdateUserNamesPronounsBatch(batch genericBatch, params UpdateUserNamesPronounsParams) {
|
||||||
|
batch.Queue(updateUserNamesPronounsSQL, q.types.newFieldEntryArrayInit(params.Names), q.types.newPronounEntryArrayInit(params.Pronouns), params.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateUserNamesPronounsScan implements Querier.UpdateUserNamesPronounsScan.
|
||||||
|
func (q *DBQuerier) UpdateUserNamesPronounsScan(results pgx.BatchResults) (UpdateUserNamesPronounsRow, error) {
|
||||||
|
row := results.QueryRow()
|
||||||
|
var item UpdateUserNamesPronounsRow
|
||||||
|
namesArray := q.types.newFieldEntryArray()
|
||||||
|
pronounsArray := q.types.newPronounEntryArray()
|
||||||
|
if err := row.Scan(&item.ID, &item.Username, &item.DisplayName, &item.Bio, &item.AvatarUrls, &item.Links, &item.Discord, &item.DiscordUsername, &item.MaxInvites, namesArray, pronounsArray); err != nil {
|
||||||
|
return item, fmt.Errorf("scan UpdateUserNamesPronounsBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := namesArray.AssignTo(&item.Names); err != nil {
|
||||||
|
return item, fmt.Errorf("assign UpdateUserNamesPronouns row: %w", err)
|
||||||
|
}
|
||||||
|
if err := pronounsArray.AssignTo(&item.Pronouns); err != nil {
|
||||||
|
return item, fmt.Errorf("assign UpdateUserNamesPronouns row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const getUserFieldsSQL = `SELECT * FROM user_fields WHERE user_id = $1 ORDER BY id ASC;`
|
||||||
|
|
||||||
|
type GetUserFieldsRow struct {
|
||||||
|
UserID *string `json:"user_id"`
|
||||||
|
ID *int `json:"id"`
|
||||||
|
Name *string `json:"name"`
|
||||||
|
Entries []FieldEntry `json:"entries"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserFields implements Querier.GetUserFields.
|
||||||
|
func (q *DBQuerier) GetUserFields(ctx context.Context, userID string) ([]GetUserFieldsRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "GetUserFields")
|
||||||
|
rows, err := q.conn.Query(ctx, getUserFieldsSQL, userID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query GetUserFields: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
items := []GetUserFieldsRow{}
|
||||||
|
entriesArray := q.types.newFieldEntryArray()
|
||||||
|
for rows.Next() {
|
||||||
|
var item GetUserFieldsRow
|
||||||
|
if err := rows.Scan(&item.UserID, &item.ID, &item.Name, entriesArray); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan GetUserFields row: %w", err)
|
||||||
|
}
|
||||||
|
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
||||||
|
return nil, fmt.Errorf("assign GetUserFields row: %w", err)
|
||||||
|
}
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("close GetUserFields rows: %w", err)
|
||||||
|
}
|
||||||
|
return items, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserFieldsBatch implements Querier.GetUserFieldsBatch.
|
||||||
|
func (q *DBQuerier) GetUserFieldsBatch(batch genericBatch, userID string) {
|
||||||
|
batch.Queue(getUserFieldsSQL, userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUserFieldsScan implements Querier.GetUserFieldsScan.
|
||||||
|
func (q *DBQuerier) GetUserFieldsScan(results pgx.BatchResults) ([]GetUserFieldsRow, error) {
|
||||||
|
rows, err := results.Query()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("query GetUserFieldsBatch: %w", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
items := []GetUserFieldsRow{}
|
||||||
|
entriesArray := q.types.newFieldEntryArray()
|
||||||
|
for rows.Next() {
|
||||||
|
var item GetUserFieldsRow
|
||||||
|
if err := rows.Scan(&item.UserID, &item.ID, &item.Name, entriesArray); err != nil {
|
||||||
|
return nil, fmt.Errorf("scan GetUserFieldsBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
||||||
|
return nil, fmt.Errorf("assign GetUserFields row: %w", err)
|
||||||
|
}
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, fmt.Errorf("close GetUserFieldsBatch rows: %w", err)
|
||||||
|
}
|
||||||
|
return items, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const insertUserFieldSQL = `INSERT INTO user_fields
|
||||||
|
(user_id, name, entries) VALUES
|
||||||
|
($1, $2, $3)
|
||||||
|
RETURNING *;`
|
||||||
|
|
||||||
|
type InsertUserFieldParams struct {
|
||||||
|
UserID string
|
||||||
|
Name string
|
||||||
|
Entries []FieldEntry
|
||||||
|
}
|
||||||
|
|
||||||
|
type InsertUserFieldRow struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
ID int `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Entries []FieldEntry `json:"entries"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertUserField implements Querier.InsertUserField.
|
||||||
|
func (q *DBQuerier) InsertUserField(ctx context.Context, params InsertUserFieldParams) (InsertUserFieldRow, error) {
|
||||||
|
ctx = context.WithValue(ctx, "pggen_query_name", "InsertUserField")
|
||||||
|
row := q.conn.QueryRow(ctx, insertUserFieldSQL, params.UserID, params.Name, q.types.newFieldEntryArrayInit(params.Entries))
|
||||||
|
var item InsertUserFieldRow
|
||||||
|
entriesArray := q.types.newFieldEntryArray()
|
||||||
|
if err := row.Scan(&item.UserID, &item.ID, &item.Name, entriesArray); err != nil {
|
||||||
|
return item, fmt.Errorf("query InsertUserField: %w", err)
|
||||||
|
}
|
||||||
|
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
||||||
|
return item, fmt.Errorf("assign InsertUserField row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertUserFieldBatch implements Querier.InsertUserFieldBatch.
|
||||||
|
func (q *DBQuerier) InsertUserFieldBatch(batch genericBatch, params InsertUserFieldParams) {
|
||||||
|
batch.Queue(insertUserFieldSQL, params.UserID, params.Name, q.types.newFieldEntryArrayInit(params.Entries))
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertUserFieldScan implements Querier.InsertUserFieldScan.
|
||||||
|
func (q *DBQuerier) InsertUserFieldScan(results pgx.BatchResults) (InsertUserFieldRow, error) {
|
||||||
|
row := results.QueryRow()
|
||||||
|
var item InsertUserFieldRow
|
||||||
|
entriesArray := q.types.newFieldEntryArray()
|
||||||
|
if err := row.Scan(&item.UserID, &item.ID, &item.Name, entriesArray); err != nil {
|
||||||
|
return item, fmt.Errorf("scan InsertUserFieldBatch row: %w", err)
|
||||||
|
}
|
||||||
|
if err := entriesArray.AssignTo(&item.Entries); err != nil {
|
||||||
|
return item, fmt.Errorf("assign InsertUserField row: %w", err)
|
||||||
|
}
|
||||||
|
return item, nil
|
||||||
|
}
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
||||||
|
"codeberg.org/u1f320/pronouns.cc/backend/db/queries"
|
||||||
"emperror.dev/errors"
|
"emperror.dev/errors"
|
||||||
"github.com/bwmarrin/discordgo"
|
"github.com/bwmarrin/discordgo"
|
||||||
"github.com/georgysavva/scany/pgxscan"
|
"github.com/georgysavva/scany/pgxscan"
|
||||||
|
@ -98,7 +99,7 @@ func (db *DB) DiscordUser(ctx context.Context, discordID string) (u User, err er
|
||||||
return u, nil
|
return u, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *User) UpdateFromDiscord(ctx context.Context, db pgxscan.Querier, du *discordgo.User) error {
|
func (u *User) UpdateFromDiscord(ctx context.Context, db querier, du *discordgo.User) error {
|
||||||
builder := sq.Update("users").
|
builder := sq.Update("users").
|
||||||
Set("discord", du.ID).
|
Set("discord", du.ID).
|
||||||
Set("discord_username", du.String()).
|
Set("discord_username", du.String()).
|
||||||
|
@ -113,14 +114,26 @@ func (u *User) UpdateFromDiscord(ctx context.Context, db pgxscan.Querier, du *di
|
||||||
return pgxscan.Get(ctx, db, u, sql, args...)
|
return pgxscan.Get(ctx, db, u, sql, args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (db *DB) getUser(ctx context.Context, q pgxscan.Querier, id xid.ID) (u User, err error) {
|
func (db *DB) getUser(ctx context.Context, q querier, id xid.ID) (u User, err error) {
|
||||||
err = pgxscan.Get(ctx, q, &u, "select * from users where id = $1", id)
|
qu, err := queries.NewQuerier(q).GetUserByID(ctx, id.String())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Cause(err) == pgx.ErrNoRows {
|
if errors.Cause(err) == pgx.ErrNoRows {
|
||||||
return u, ErrUserNotFound
|
return u, ErrUserNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
return u, errors.Cause(err)
|
return u, errors.Wrap(err, "getting user from database")
|
||||||
|
}
|
||||||
|
|
||||||
|
u = User{
|
||||||
|
ID: id,
|
||||||
|
Username: qu.Username,
|
||||||
|
DisplayName: qu.DisplayName,
|
||||||
|
Bio: qu.Bio,
|
||||||
|
AvatarURLs: qu.AvatarUrls,
|
||||||
|
Links: qu.Links,
|
||||||
|
Discord: qu.Discord,
|
||||||
|
DiscordUsername: qu.DiscordUsername,
|
||||||
|
MaxInvites: int(qu.MaxInvites),
|
||||||
}
|
}
|
||||||
|
|
||||||
return u, nil
|
return u, nil
|
||||||
|
|
|
@ -14,7 +14,7 @@ create table users (
|
||||||
discord text unique, -- for Discord oauth
|
discord text unique, -- for Discord oauth
|
||||||
discord_username text,
|
discord_username text,
|
||||||
|
|
||||||
max_invites int default 10
|
max_invites int not null default 10
|
||||||
);
|
);
|
||||||
|
|
||||||
create table user_names (
|
create table user_names (
|
||||||
|
|
35
scripts/migrate/004_field_arrays.sql
Normal file
35
scripts/migrate/004_field_arrays.sql
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
-- +migrate Up
|
||||||
|
|
||||||
|
-- 2023-01-03: change names, pronouns, and fields to be columns instead of separate tables
|
||||||
|
|
||||||
|
create type field_entry as (
|
||||||
|
value text,
|
||||||
|
status int
|
||||||
|
);
|
||||||
|
|
||||||
|
create type pronoun_entry as (
|
||||||
|
value text,
|
||||||
|
display_value text,
|
||||||
|
status int
|
||||||
|
);
|
||||||
|
|
||||||
|
alter table users add column names field_entry[];
|
||||||
|
alter table users add column pronouns pronoun_entry[];
|
||||||
|
|
||||||
|
alter table members add column names field_entry[];
|
||||||
|
alter table members add column pronouns pronoun_entry[];
|
||||||
|
|
||||||
|
alter table user_fields add column entries field_entry[];
|
||||||
|
alter table member_fields add column entries field_entry[];
|
||||||
|
|
||||||
|
alter table user_fields drop column favourite;
|
||||||
|
alter table user_fields drop column okay;
|
||||||
|
alter table user_fields drop column jokingly;
|
||||||
|
alter table user_fields drop column friends_only;
|
||||||
|
alter table user_fields drop column avoid;
|
||||||
|
|
||||||
|
alter table member_fields drop column favourite;
|
||||||
|
alter table member_fields drop column okay;
|
||||||
|
alter table member_fields drop column jokingly;
|
||||||
|
alter table member_fields drop column friends_only;
|
||||||
|
alter table member_fields drop column avoid;
|
Loading…
Reference in a new issue