feat: migrations

This commit is contained in:
trev 2025-04-10 19:15:21 -04:00
parent 1667b78a0a
commit f9772bce47
35 changed files with 1144 additions and 370 deletions

View File

@ -13,4 +13,3 @@
# Server # Server
/server/client/ /server/client/
/server/tmp/ /server/tmp/
/server/internal/handlers/client/client/

View File

@ -5,7 +5,7 @@
<Avatar.Root class="flex h-full w-full items-center justify-center"> <Avatar.Root class="flex h-full w-full items-center justify-center">
<Avatar.Image <Avatar.Image
src={userState.user?.profilePictureId ? "/file/" + userState.user.profilePictureId : null} src={userState.user?.profilePictureId ? '/file/' + userState.user.profilePictureId : null}
alt={`${userState.user?.username}'s avatar`} alt={`${userState.user?.username}'s avatar`}
class="rounded-full" class="rounded-full"
/> />

View File

@ -3,7 +3,7 @@
import { cn } from '$lib/utils'; import { cn } from '$lib/utils';
import type { Snippet } from 'svelte'; import type { Snippet } from 'svelte';
type me = MouseEvent & { currentTarget: EventTarget & HTMLButtonElement; } type me = MouseEvent & { currentTarget: EventTarget & HTMLButtonElement };
let { let {
className, className,

View File

@ -21,7 +21,7 @@
let page: number = $state(1); let page: number = $state(1);
onMount(async() => { onMount(async () => {
await tick(); await tick();
replaceState('', `${page}`); replaceState('', `${page}`);
}); });

View File

@ -2,119 +2,118 @@ import { decode } from 'cbor2';
import { page } from '$app/state'; import { page } from '$app/state';
interface CreateCredential extends Credential { interface CreateCredential extends Credential {
response: AuthenticatorAttestationResponse response: AuthenticatorAttestationResponse;
} }
interface AttestationObject { interface AttestationObject {
authData: Uint8Array, authData: Uint8Array;
fmt: string, fmt: string;
attStmt: any
} }
interface DecodedPublicKeyObject { interface DecodedPublicKeyObject {
[key: number]: number | Uint8Array [key: number]: number | Uint8Array;
} }
export async function createPasskey(username: string, userid: number, challenge: string) { export async function createPasskey(username: string, userid: number, challenge: string) {
const challengeBuffer = Uint8Array.from(challenge, c => c.charCodeAt(0)); const challengeBuffer = Uint8Array.from(challenge, (c) => c.charCodeAt(0));
const idBuffer = Uint8Array.from(userid.toString(), c => c.charCodeAt(0)); const idBuffer = Uint8Array.from(userid.toString(), (c) => c.charCodeAt(0));
const credential = await navigator.credentials.create({ const credential = (await navigator.credentials.create({
publicKey: { publicKey: {
challenge: challengeBuffer, challenge: challengeBuffer,
rp: { id: page.url.hostname, name: "TrevStack" }, rp: { id: page.url.hostname, name: 'TrevStack' },
user: { user: {
id: idBuffer, id: idBuffer,
name: username, name: username,
displayName: username displayName: username
}, },
pubKeyCredParams: [ pubKeyCredParams: [
{ {
type: 'public-key', type: 'public-key',
alg: -7 alg: -7
}, },
{ {
type: 'public-key', type: 'public-key',
alg: -257 alg: -257
} }
], ],
timeout: 60000, timeout: 60000,
attestation: 'none' attestation: 'none'
} }
}) as CreateCredential | null; })) as CreateCredential | null;
if (!credential) { if (!credential) {
throw new Error('Could not create passkey'); throw new Error('Could not create passkey');
} }
console.log(credential.id) console.log(credential.id);
//console.log(credential.type); //console.log(credential.type);
const utf8Decoder = new TextDecoder('utf-8'); const utf8Decoder = new TextDecoder('utf-8');
const decodedClientData = utf8Decoder.decode(credential.response.clientDataJSON) const decodedClientData = utf8Decoder.decode(credential.response.clientDataJSON);
const clientDataObj = JSON.parse(decodedClientData); const clientDataObj = JSON.parse(decodedClientData);
console.log(clientDataObj); console.log(clientDataObj);
const attestationObject = new Uint8Array(credential.response.attestationObject) const attestationObject = new Uint8Array(credential.response.attestationObject);
const decodedAttestationObject = decode(attestationObject) as AttestationObject; const decodedAttestationObject = decode(attestationObject) as AttestationObject;
const { authData } = decodedAttestationObject; const { authData } = decodedAttestationObject;
// get the length of the credential ID // get the length of the credential ID
const dataView = new DataView(new ArrayBuffer(2)); const dataView = new DataView(new ArrayBuffer(2));
const idLenBytes = authData.slice(53, 55); const idLenBytes = authData.slice(53, 55);
idLenBytes.forEach((value, index) => dataView.setUint8(index, value)); idLenBytes.forEach((value, index) => dataView.setUint8(index, value));
const credentialIdLength = dataView.getUint16(0); const credentialIdLength = dataView.getUint16(0);
// get the credential ID // get the credential ID
const credentialId = authData.slice(55, 55 + credentialIdLength); // const credentialId = authData.slice(55, 55 + credentialIdLength);
// get the public key object // get the public key object
const publicKeyBytes = authData.slice(55 + credentialIdLength); const publicKeyBytes = authData.slice(55 + credentialIdLength);
console.log(publicKeyBytes); console.log(publicKeyBytes);
// the publicKeyBytes are encoded again as CBOR // the publicKeyBytes are encoded again as CBOR
const publicKeyObject = new Uint8Array(publicKeyBytes.buffer) const publicKeyObject = new Uint8Array(publicKeyBytes.buffer);
const decodedPublicKeyObject = decode(publicKeyObject) as DecodedPublicKeyObject; const decodedPublicKeyObject = decode(publicKeyObject) as DecodedPublicKeyObject;
console.log(decodedPublicKeyObject); console.log(decodedPublicKeyObject);
return { return {
id: credential.id, id: credential.id,
publicKey: publicKeyBytes, publicKey: publicKeyBytes,
algorithm: decodedPublicKeyObject[3] algorithm: decodedPublicKeyObject[3]
} };
} }
interface GetCredential extends Credential { interface GetCredential extends Credential {
response: AuthenticatorAssertionResponse response: AuthenticatorAssertionResponse;
} }
export async function getPasskey(passkeyids: string[], challenge: string) { export async function getPasskey(passkeyids: string[], challenge: string) {
const challengeBuffer = Uint8Array.from(challenge, c => c.charCodeAt(0)); const challengeBuffer = Uint8Array.from(challenge, (c) => c.charCodeAt(0));
const credential = await navigator.credentials.get({ const credential = (await navigator.credentials.get({
publicKey: { publicKey: {
challenge: challengeBuffer, challenge: challengeBuffer,
allowCredentials: passkeyids.map((passkeyid) => { allowCredentials: passkeyids.map((passkeyid) => {
return { return {
id: Uint8Array.from(passkeyid, c => c.charCodeAt(0)), id: Uint8Array.from(passkeyid, (c) => c.charCodeAt(0)),
type: 'public-key', type: 'public-key'
} };
}), }),
timeout: 60000, timeout: 60000
} }
}) as GetCredential | null; })) as GetCredential | null;
if (!credential) { if (!credential) {
throw new Error('Could not get passkey'); throw new Error('Could not get passkey');
} }
const signature = credential.response.signature; const signature = credential.response.signature;
return { return {
signature signature
} };
} }

View File

@ -7,8 +7,6 @@
import { Separator } from 'bits-ui'; import { Separator } from 'bits-ui';
import { toast } from 'svelte-sonner'; import { toast } from 'svelte-sonner';
import { userState } from '$lib/sharedState.svelte'; import { userState } from '$lib/sharedState.svelte';
import { createPasskey } from '$lib/webauthn';
import { page } from '$app/state';
import Avatar from '$lib/ui/Avatar.svelte'; import Avatar from '$lib/ui/Avatar.svelte';
let openChangeProfilePicture = $state(false); let openChangeProfilePicture = $state(false);
@ -19,7 +17,7 @@
<div class="m-auto flex w-96 flex-col gap-4 p-4"> <div class="m-auto flex w-96 flex-col gap-4 p-4">
<div class="flex items-center justify-center gap-4"> <div class="flex items-center justify-center gap-4">
<div <div
class="outline-surface-2 bg-text text-crust h-9 w-9 select-none rounded-full text-sm outline outline-offset-2" class="outline-surface-2 bg-text text-crust h-9 w-9 rounded-full text-sm outline outline-offset-2 select-none"
> >
<Avatar /> <Avatar />
</div> </div>

View File

@ -16,5 +16,5 @@ export default defineConfig({
} }
}, },
host: '0.0.0.0' host: '0.0.0.0'
}, }
}); });

12
flake.lock generated
View File

@ -79,11 +79,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1741513245, "lastModified": 1744098102,
"narHash": "sha256-7rTAMNTY1xoBwz0h7ZMtEcd8LELk9R5TzBPoHuhNSCk=", "narHash": "sha256-tzCdyIJj9AjysC3OuKA+tMD/kDEDAF9mICPDU7ix0JA=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "e3e32b642a31e6714ec1b712de8c91a3352ce7e1", "rev": "c8cd81426f45942bb2906d5ed2fe21d2f19d95b7",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -154,11 +154,11 @@
"nixpkgs": "nixpkgs_2" "nixpkgs": "nixpkgs_2"
}, },
"locked": { "locked": {
"lastModified": 1744260822, "lastModified": 1744262571,
"narHash": "sha256-PMrIBuIM12tlG9hkoMHtFSDPfaXEXKziom/XZyFWXEo=", "narHash": "sha256-zYYx5DCQuyGsEKGStakQW1eSXPofRA3LeufIEVhE/4Q=",
"owner": "spotdemo4", "owner": "spotdemo4",
"repo": "treli", "repo": "treli",
"rev": "41ecacdfc1e720ac96d1d02200b8541314dd09ea", "rev": "00b55f3cdc82e61a6c4f46c6cb745c71203ccde3",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@ -1,52 +0,0 @@
root = "."
testdata_dir = "testdata"
tmp_dir = "tmp"
[build]
args_bin = []
bin = "./tmp/main"
cmd = "go build -tags dev -o ./tmp/main ."
delay = 1000
exclude_dir = ["assets", "tmp", "vendor", "testdata"]
exclude_file = []
exclude_regex = ["_test.go"]
exclude_unchanged = false
follow_symlink = false
full_bin = ""
include_dir = []
include_ext = ["go", "tpl", "tmpl", "html"]
include_file = []
kill_delay = "0s"
log = "build-errors.log"
poll = false
poll_interval = 0
post_cmd = []
pre_cmd = []
rerun = false
rerun_delay = 500
send_interrupt = false
stop_on_error = false
[color]
app = ""
build = "yellow"
main = "magenta"
runner = "green"
watcher = "cyan"
[log]
main_only = false
silent = false
time = false
[misc]
clean_on_exit = false
[proxy]
app_port = 0
enabled = false
proxy_port = 0
[screen]
clear_on_rebuild = false
keep_scroll = true

1
server/.gitignore vendored
View File

@ -1,3 +1,2 @@
/client/ /client/
/tmp/ /tmp/
/internal/handlers/client/client/

View File

@ -10,11 +10,7 @@ replacements:
nullable: true nullable: true
replace: "int64" replace: "int64"
sqlite: sql:
output: internal/models dialect: sqlite
dir: db
psql:
output: internal/models
mysql:
output: internal/models output: internal/models

View File

@ -0,0 +1,35 @@
-- migrate:up
CREATE TABLE user(
id INTEGER PRIMARY KEY NOT NULL,
username TEXT NOT NULL,
password TEXT NOT NULL,
profile_picture_id INTEGER,
FOREIGN KEY(profile_picture_id) REFERENCES file(id)
);
CREATE TABLE file(
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
data BLOB NOT NULL,
user_id INTEGER NOT NULL,
FOREIGN KEY(user_id) REFERENCES user(id)
);
CREATE TABLE item(
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
added DATETIME NOT NULL,
description TEXT NOT NULL,
price REAL NOT NULL,
quantity INTEGER NOT NULL,
user_id INTEGER NOT NULL,
FOREIGN KEY(user_id) REFERENCES user(id)
);
-- migrate:down
drop table user;
drop table file;
drop table item;

31
server/db/schema.sql Normal file
View File

@ -0,0 +1,31 @@
CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key);
CREATE TABLE user(
id INTEGER PRIMARY KEY NOT NULL,
username TEXT NOT NULL,
password TEXT NOT NULL,
profile_picture_id INTEGER,
FOREIGN KEY(profile_picture_id) REFERENCES file(id)
);
CREATE TABLE file(
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
data BLOB NOT NULL,
user_id INTEGER NOT NULL,
FOREIGN KEY(user_id) REFERENCES user(id)
);
CREATE TABLE item(
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
added DATETIME NOT NULL,
description TEXT NOT NULL,
price REAL NOT NULL,
quantity INTEGER NOT NULL,
user_id INTEGER NOT NULL,
FOREIGN KEY(user_id) REFERENCES user(id)
);
-- Dbmate schema migrations
INSERT INTO "schema_migrations" (version) VALUES
('20250410195416');

View File

@ -0,0 +1,37 @@
package database
import (
"embed"
"log"
"net/url"
"github.com/amacneil/dbmate/v2/pkg/dbmate"
_ "github.com/spotdemo4/dbmate-sqlite-modernc/pkg/driver/sqlite" // Modernc sqlite
)
func Migrate(url *url.URL, dbFS *embed.FS) error {
if dbFS == nil {
return nil
}
db := dbmate.New(url)
db.Driver()
db.FS = dbFS
log.Println("Migrations:")
migrations, err := db.FindMigrations()
if err != nil {
return err
}
for _, m := range migrations {
log.Println(m.Version, m.FilePath)
}
log.Println("\nApplying...")
err = db.CreateAndMigrate()
if err != nil {
return err
}
return nil
}

View File

@ -2,14 +2,16 @@ package database
import ( import (
"database/sql" "database/sql"
"fmt"
"net/url"
"runtime"
_ "github.com/lib/pq" // Postgres _ "github.com/lib/pq" // Postgres
"github.com/stephenafamo/bob" "github.com/stephenafamo/bob"
) )
func NewPostgresConnection(user, pass, host, port, name string) (*bob.DB, error) { func NewPostgresConnection(url *url.URL) (*bob.DB, error) {
dsn := "host=" + host + " user=" + user + " password=" + pass + " dbname=" + name + " port=" + port + " sslmode=disable TimeZone=UTC" db, err := sql.Open("postgres", postgresConnectionString(url))
db, err := sql.Open("postgres", dsn)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -18,3 +20,55 @@ func NewPostgresConnection(user, pass, host, port, name string) (*bob.DB, error)
return &bobdb, nil return &bobdb, nil
} }
func postgresConnectionString(u *url.URL) string {
hostname := u.Hostname()
port := u.Port()
query := u.Query()
// support socket parameter for consistency with mysql
if query.Get("socket") != "" {
query.Set("host", query.Get("socket"))
query.Del("socket")
}
// default hostname
if hostname == "" && query.Get("host") == "" {
switch runtime.GOOS {
case "linux":
query.Set("host", "/var/run/postgresql")
case "darwin", "freebsd", "dragonfly", "openbsd", "netbsd":
query.Set("host", "/tmp")
default:
hostname = "localhost"
}
}
// host param overrides url hostname
if query.Get("host") != "" {
hostname = ""
}
// always specify a port
if query.Get("port") != "" {
port = query.Get("port")
query.Del("port")
}
if port == "" {
switch u.Scheme {
case "redshift":
port = "5439"
default:
port = "5432"
}
}
// generate output URL
out, _ := url.Parse(u.String())
// force scheme back to postgres if there was another postgres-compatible scheme
out.Scheme = "postgres"
out.Host = fmt.Sprintf("%s:%s", hostname, port)
out.RawQuery = query.Encode()
return out.String()
}

View File

@ -2,30 +2,15 @@ package database
import ( import (
"database/sql" "database/sql"
"os" "net/url"
"path/filepath" "regexp"
"github.com/stephenafamo/bob" "github.com/stephenafamo/bob"
_ "modernc.org/sqlite" // Sqlite _ "modernc.org/sqlite" // Sqlite
) )
func NewSQLiteConnection(name string) (*bob.DB, error) { func NewSQLiteConnection(url *url.URL) (*bob.DB, error) {
// Find config diretory db, err := sql.Open("sqlite", sqliteConnectionString(url))
configDir, err := os.UserConfigDir()
if err != nil {
return nil, err
}
// Create database directory if not exists
settingsPath := filepath.Join(configDir, "trevstack")
err = os.MkdirAll(settingsPath, 0766)
if err != nil {
return nil, err
}
// Open database
dbPath := filepath.Join(settingsPath, name)
db, err := sql.Open("sqlite", dbPath)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -35,3 +20,47 @@ func NewSQLiteConnection(name string) (*bob.DB, error) {
return &bobdb, nil return &bobdb, nil
} }
// ConnectionString converts a URL into a valid connection string
func sqliteConnectionString(u *url.URL) string {
// duplicate URL and remove scheme
newURL := *u
newURL.Scheme = ""
if newURL.Opaque == "" && newURL.Path != "" {
// When the DSN is in the form "scheme:/absolute/path" or
// "scheme://absolute/path" or "scheme:///absolute/path", url.Parse
// will consider the file path as :
// - "absolute" as the hostname
// - "path" (and the rest until "?") as the URL path.
// Instead, when the DSN is in the form "scheme:", the (relative) file
// path is stored in the "Opaque" field.
// See: https://pkg.go.dev/net/url#URL
//
// While Opaque is not escaped, the URL Path is. So, if .Path contains
// the file path, we need to un-escape it, and rebuild the full path.
newURL.Opaque = "//" + newURL.Host + mustUnescapePath(newURL.Path)
newURL.Path = ""
}
// trim duplicate leading slashes
str := regexp.MustCompile("^//+").ReplaceAllString(newURL.String(), "/")
return str
}
// MustUnescapePath unescapes a URL path, and panics if it fails.
// It is used during in cases where we are parsing a generated path.
func mustUnescapePath(s string) string {
if s == "" {
panic("missing path")
}
path, err := url.PathUnescape(s)
if err != nil {
panic(err)
}
return path
}

View File

@ -1,17 +1,23 @@
package client package client
import ( import (
"embed"
"io/fs"
"net/http" "net/http"
"github.com/spotdemo4/trevstack/server/internal/interceptors" "github.com/spotdemo4/trevstack/server/internal/interceptors"
) )
var embedfs *http.FileSystem func NewClientHandler(key string, clientFS *embed.FS) http.Handler {
if clientFS == nil {
func NewClientHandler(key string) http.Handler { return http.NotFoundHandler()
if embedfs != nil {
return interceptors.WithAuthRedirect(http.FileServer(*embedfs), key)
} }
return http.NotFoundHandler() client, err := fs.Sub(clientFS, "client")
if err != nil {
return http.NotFoundHandler()
}
fs := http.FS(client)
return interceptors.WithAuthRedirect(http.FileServer(fs), key)
} }

View File

@ -1,24 +0,0 @@
//go:build !dev
package client
import (
"embed"
"io/fs"
"log"
"net/http"
)
//go:embed all:client
var eclient embed.FS
func init() {
log.Println("Initializing client for production")
client, err := fs.Sub(eclient, "client")
if err != nil {
log.Fatalf("failed to get client: %v", err)
}
fs := http.FS(client)
embedfs = &fs
}

View File

@ -9,7 +9,6 @@ import (
"connectrpc.com/connect" "connectrpc.com/connect"
"github.com/aarondl/opt/omit" "github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/spotdemo4/trevstack/server/internal/interceptors" "github.com/spotdemo4/trevstack/server/internal/interceptors"
"github.com/spotdemo4/trevstack/server/internal/models" "github.com/spotdemo4/trevstack/server/internal/models"
itemv1 "github.com/spotdemo4/trevstack/server/internal/services/item/v1" itemv1 "github.com/spotdemo4/trevstack/server/internal/services/item/v1"
@ -26,9 +25,9 @@ func itemToConnect(item *models.Item) *itemv1.Item {
return &itemv1.Item{ return &itemv1.Item{
Id: &item.ID, Id: &item.ID,
Name: item.Name, Name: item.Name,
Description: item.Description.GetOrZero(), Description: item.Description,
Price: item.Price.GetOrZero(), Price: item.Price,
Quantity: int32(item.Quantity.GetOrZero()), Quantity: int32(item.Quantity),
Added: timestamp, Added: timestamp,
} }
} }
@ -133,9 +132,9 @@ func (h *Handler) CreateItem(ctx context.Context, req *connect.Request[itemv1.Cr
item, err := models.Items.Insert(&models.ItemSetter{ item, err := models.Items.Insert(&models.ItemSetter{
Name: omit.From(req.Msg.Item.Name), Name: omit.From(req.Msg.Item.Name),
Description: omitnull.From(req.Msg.Item.Description), Description: omit.From(req.Msg.Item.Description),
Price: omitnull.From(req.Msg.Item.Price), Price: omit.From(req.Msg.Item.Price),
Quantity: omitnull.From(int64(req.Msg.Item.Quantity)), Quantity: omit.From(int64(req.Msg.Item.Quantity)),
Added: omit.From(time.Now()), Added: omit.From(time.Now()),
UserID: omit.From(userid), UserID: omit.From(userid),
}).One(ctx, h.db) }).One(ctx, h.db)
@ -165,9 +164,9 @@ func (h *Handler) UpdateItem(ctx context.Context, req *connect.Request[itemv1.Up
// Set col // Set col
models.ItemSetter{ models.ItemSetter{
Name: omit.From(req.Msg.Item.Name), Name: omit.From(req.Msg.Item.Name),
Description: omitnull.From(req.Msg.Item.Description), Description: omit.From(req.Msg.Item.Description),
Price: omitnull.From(req.Msg.Item.Price), Price: omit.From(req.Msg.Item.Price),
Quantity: omitnull.From(int64(req.Msg.Item.Quantity)), Quantity: omit.From(int64(req.Msg.Item.Quantity)),
}.UpdateMod(), }.UpdateMod(),
// Where // Where

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package models package models
@ -15,19 +15,22 @@ import (
) )
var TableNames = struct { var TableNames = struct {
Files string Files string
Items string Items string
Users string SchemaMigrations string
Users string
}{ }{
Files: "file", Files: "file",
Items: "item", Items: "item",
Users: "user", SchemaMigrations: "schema_migrations",
Users: "user",
} }
var ColumnNames = struct { var ColumnNames = struct {
Files fileColumnNames Files fileColumnNames
Items itemColumnNames Items itemColumnNames
Users userColumnNames SchemaMigrations schemaMigrationColumnNames
Users userColumnNames
}{ }{
Files: fileColumnNames{ Files: fileColumnNames{
ID: "id", ID: "id",
@ -44,6 +47,9 @@ var ColumnNames = struct {
Quantity: "quantity", Quantity: "quantity",
UserID: "user_id", UserID: "user_id",
}, },
SchemaMigrations: schemaMigrationColumnNames{
Version: "version",
},
Users: userColumnNames{ Users: userColumnNames{
ID: "id", ID: "id",
Username: "username", Username: "username",
@ -60,18 +66,21 @@ var (
) )
func Where[Q sqlite.Filterable]() struct { func Where[Q sqlite.Filterable]() struct {
Files fileWhere[Q] Files fileWhere[Q]
Items itemWhere[Q] Items itemWhere[Q]
Users userWhere[Q] SchemaMigrations schemaMigrationWhere[Q]
Users userWhere[Q]
} { } {
return struct { return struct {
Files fileWhere[Q] Files fileWhere[Q]
Items itemWhere[Q] Items itemWhere[Q]
Users userWhere[Q] SchemaMigrations schemaMigrationWhere[Q]
Users userWhere[Q]
}{ }{
Files: buildFileWhere[Q](FileColumns), Files: buildFileWhere[Q](FileColumns),
Items: buildItemWhere[Q](ItemColumns), Items: buildItemWhere[Q](ItemColumns),
Users: buildUserWhere[Q](UserColumns), SchemaMigrations: buildSchemaMigrationWhere[Q](SchemaMigrationColumns),
Users: buildUserWhere[Q](UserColumns),
} }
} }

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package models package models
@ -11,5 +11,8 @@ var _ bob.HookableType = &File{}
// Make sure the type Item runs hooks after queries // Make sure the type Item runs hooks after queries
var _ bob.HookableType = &Item{} var _ bob.HookableType = &Item{}
// Make sure the type SchemaMigration runs hooks after queries
var _ bob.HookableType = &SchemaMigration{}
// Make sure the type User runs hooks after queries // Make sure the type User runs hooks after queries
var _ bob.HookableType = &User{} var _ bob.HookableType = &User{}

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package factory package factory
@ -12,9 +12,10 @@ import (
type contextKey string type contextKey string
var ( var (
fileCtx = newContextual[*models.File]("file") fileCtx = newContextual[*models.File]("file")
itemCtx = newContextual[*models.Item]("item") itemCtx = newContextual[*models.Item]("item")
userCtx = newContextual[*models.User]("user") schemaMigrationCtx = newContextual[*models.SchemaMigration]("schemaMigration")
userCtx = newContextual[*models.User]("user")
) )
// Contextual is a convienience wrapper around context.WithValue and context.Value // Contextual is a convienience wrapper around context.WithValue and context.Value

View File

@ -1,12 +1,13 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package factory package factory
type Factory struct { type Factory struct {
baseFileMods FileModSlice baseFileMods FileModSlice
baseItemMods ItemModSlice baseItemMods ItemModSlice
baseUserMods UserModSlice baseSchemaMigrationMods SchemaMigrationModSlice
baseUserMods UserModSlice
} }
func New() *Factory { func New() *Factory {
@ -37,6 +38,18 @@ func (f *Factory) NewItem(mods ...ItemMod) *ItemTemplate {
return o return o
} }
func (f *Factory) NewSchemaMigration(mods ...SchemaMigrationMod) *SchemaMigrationTemplate {
o := &SchemaMigrationTemplate{f: f}
if f != nil {
f.baseSchemaMigrationMods.Apply(o)
}
SchemaMigrationModSlice(mods).Apply(o)
return o
}
func (f *Factory) NewUser(mods ...UserMod) *UserTemplate { func (f *Factory) NewUser(mods ...UserMod) *UserTemplate {
o := &UserTemplate{f: f} o := &UserTemplate{f: f}
@ -65,6 +78,14 @@ func (f *Factory) AddBaseItemMod(mods ...ItemMod) {
f.baseItemMods = append(f.baseItemMods, mods...) f.baseItemMods = append(f.baseItemMods, mods...)
} }
func (f *Factory) ClearBaseSchemaMigrationMods() {
f.baseSchemaMigrationMods = nil
}
func (f *Factory) AddBaseSchemaMigrationMod(mods ...SchemaMigrationMod) {
f.baseSchemaMigrationMods = append(f.baseSchemaMigrationMods, mods...)
}
func (f *Factory) ClearBaseUserMods() { func (f *Factory) ClearBaseUserMods() {
f.baseUserMods = nil f.baseUserMods = nil
} }

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package factory package factory

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package factory package factory

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package factory package factory

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package factory package factory
@ -8,9 +8,7 @@ import (
"testing" "testing"
"time" "time"
"github.com/aarondl/opt/null"
"github.com/aarondl/opt/omit" "github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/jaswdr/faker/v2" "github.com/jaswdr/faker/v2"
models "github.com/spotdemo4/trevstack/server/internal/models" models "github.com/spotdemo4/trevstack/server/internal/models"
"github.com/stephenafamo/bob" "github.com/stephenafamo/bob"
@ -40,9 +38,9 @@ type ItemTemplate struct {
ID func() int64 ID func() int64
Name func() string Name func() string
Added func() time.Time Added func() time.Time
Description func() null.Val[string] Description func() string
Price func() null.Val[float32] Price func() float32
Quantity func() null.Val[int64] Quantity func() int64
UserID func() int64 UserID func() int64
r itemR r itemR
@ -132,13 +130,13 @@ func (o ItemTemplate) BuildSetter() *models.ItemSetter {
m.Added = omit.From(o.Added()) m.Added = omit.From(o.Added())
} }
if o.Description != nil { if o.Description != nil {
m.Description = omitnull.FromNull(o.Description()) m.Description = omit.From(o.Description())
} }
if o.Price != nil { if o.Price != nil {
m.Price = omitnull.FromNull(o.Price()) m.Price = omit.From(o.Price())
} }
if o.Quantity != nil { if o.Quantity != nil {
m.Quantity = omitnull.FromNull(o.Quantity()) m.Quantity = omit.From(o.Quantity())
} }
if o.UserID != nil { if o.UserID != nil {
m.UserID = omit.From(o.UserID()) m.UserID = omit.From(o.UserID())
@ -189,6 +187,15 @@ func ensureCreatableItem(m *models.ItemSetter) {
if m.Added.IsUnset() { if m.Added.IsUnset() {
m.Added = omit.From(random_time_Time(nil)) m.Added = omit.From(random_time_Time(nil))
} }
if m.Description.IsUnset() {
m.Description = omit.From(random_string(nil))
}
if m.Price.IsUnset() {
m.Price = omit.From(random_float32(nil))
}
if m.Quantity.IsUnset() {
m.Quantity = omit.From(random_int64(nil))
}
if m.UserID.IsUnset() { if m.UserID.IsUnset() {
m.UserID = omit.From(random_int64(nil)) m.UserID = omit.From(random_int64(nil))
} }
@ -429,14 +436,14 @@ func (m itemMods) RandomAdded(f *faker.Faker) ItemMod {
} }
// Set the model columns to this value // Set the model columns to this value
func (m itemMods) Description(val null.Val[string]) ItemMod { func (m itemMods) Description(val string) ItemMod {
return ItemModFunc(func(o *ItemTemplate) { return ItemModFunc(func(o *ItemTemplate) {
o.Description = func() null.Val[string] { return val } o.Description = func() string { return val }
}) })
} }
// Set the Column from the function // Set the Column from the function
func (m itemMods) DescriptionFunc(f func() null.Val[string]) ItemMod { func (m itemMods) DescriptionFunc(f func() string) ItemMod {
return ItemModFunc(func(o *ItemTemplate) { return ItemModFunc(func(o *ItemTemplate) {
o.Description = f o.Description = f
}) })
@ -453,29 +460,21 @@ func (m itemMods) UnsetDescription() ItemMod {
// if faker is nil, a default faker is used // if faker is nil, a default faker is used
func (m itemMods) RandomDescription(f *faker.Faker) ItemMod { func (m itemMods) RandomDescription(f *faker.Faker) ItemMod {
return ItemModFunc(func(o *ItemTemplate) { return ItemModFunc(func(o *ItemTemplate) {
o.Description = func() null.Val[string] { o.Description = func() string {
if f == nil { return random_string(f)
f = &defaultFaker
}
if f.Bool() {
return null.FromPtr[string](nil)
}
return null.From(random_string(f))
} }
}) })
} }
// Set the model columns to this value // Set the model columns to this value
func (m itemMods) Price(val null.Val[float32]) ItemMod { func (m itemMods) Price(val float32) ItemMod {
return ItemModFunc(func(o *ItemTemplate) { return ItemModFunc(func(o *ItemTemplate) {
o.Price = func() null.Val[float32] { return val } o.Price = func() float32 { return val }
}) })
} }
// Set the Column from the function // Set the Column from the function
func (m itemMods) PriceFunc(f func() null.Val[float32]) ItemMod { func (m itemMods) PriceFunc(f func() float32) ItemMod {
return ItemModFunc(func(o *ItemTemplate) { return ItemModFunc(func(o *ItemTemplate) {
o.Price = f o.Price = f
}) })
@ -492,29 +491,21 @@ func (m itemMods) UnsetPrice() ItemMod {
// if faker is nil, a default faker is used // if faker is nil, a default faker is used
func (m itemMods) RandomPrice(f *faker.Faker) ItemMod { func (m itemMods) RandomPrice(f *faker.Faker) ItemMod {
return ItemModFunc(func(o *ItemTemplate) { return ItemModFunc(func(o *ItemTemplate) {
o.Price = func() null.Val[float32] { o.Price = func() float32 {
if f == nil { return random_float32(f)
f = &defaultFaker
}
if f.Bool() {
return null.FromPtr[float32](nil)
}
return null.From(random_float32(f))
} }
}) })
} }
// Set the model columns to this value // Set the model columns to this value
func (m itemMods) Quantity(val null.Val[int64]) ItemMod { func (m itemMods) Quantity(val int64) ItemMod {
return ItemModFunc(func(o *ItemTemplate) { return ItemModFunc(func(o *ItemTemplate) {
o.Quantity = func() null.Val[int64] { return val } o.Quantity = func() int64 { return val }
}) })
} }
// Set the Column from the function // Set the Column from the function
func (m itemMods) QuantityFunc(f func() null.Val[int64]) ItemMod { func (m itemMods) QuantityFunc(f func() int64) ItemMod {
return ItemModFunc(func(o *ItemTemplate) { return ItemModFunc(func(o *ItemTemplate) {
o.Quantity = f o.Quantity = f
}) })
@ -531,16 +522,8 @@ func (m itemMods) UnsetQuantity() ItemMod {
// if faker is nil, a default faker is used // if faker is nil, a default faker is used
func (m itemMods) RandomQuantity(f *faker.Faker) ItemMod { func (m itemMods) RandomQuantity(f *faker.Faker) ItemMod {
return ItemModFunc(func(o *ItemTemplate) { return ItemModFunc(func(o *ItemTemplate) {
o.Quantity = func() null.Val[int64] { o.Quantity = func() int64 {
if f == nil { return random_int64(f)
f = &defaultFaker
}
if f.Bool() {
return null.FromPtr[int64](nil)
}
return null.From(random_int64(f))
} }
}) })
} }

View File

@ -0,0 +1,276 @@
// Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package factory
import (
"context"
"testing"
"github.com/aarondl/opt/omit"
"github.com/jaswdr/faker/v2"
models "github.com/spotdemo4/trevstack/server/internal/models"
"github.com/stephenafamo/bob"
)
type SchemaMigrationMod interface {
Apply(*SchemaMigrationTemplate)
}
type SchemaMigrationModFunc func(*SchemaMigrationTemplate)
func (f SchemaMigrationModFunc) Apply(n *SchemaMigrationTemplate) {
f(n)
}
type SchemaMigrationModSlice []SchemaMigrationMod
func (mods SchemaMigrationModSlice) Apply(n *SchemaMigrationTemplate) {
for _, f := range mods {
f.Apply(n)
}
}
// SchemaMigrationTemplate is an object representing the database table.
// all columns are optional and should be set by mods
type SchemaMigrationTemplate struct {
Version func() string
f *Factory
}
// Apply mods to the SchemaMigrationTemplate
func (o *SchemaMigrationTemplate) Apply(mods ...SchemaMigrationMod) {
for _, mod := range mods {
mod.Apply(o)
}
}
// toModel returns an *models.SchemaMigration
// this does nothing with the relationship templates
func (o SchemaMigrationTemplate) toModel() *models.SchemaMigration {
m := &models.SchemaMigration{}
if o.Version != nil {
m.Version = o.Version()
}
return m
}
// toModels returns an models.SchemaMigrationSlice
// this does nothing with the relationship templates
func (o SchemaMigrationTemplate) toModels(number int) models.SchemaMigrationSlice {
m := make(models.SchemaMigrationSlice, number)
for i := range m {
m[i] = o.toModel()
}
return m
}
// setModelRels creates and sets the relationships on *models.SchemaMigration
// according to the relationships in the template. Nothing is inserted into the db
func (t SchemaMigrationTemplate) setModelRels(o *models.SchemaMigration) {}
// BuildSetter returns an *models.SchemaMigrationSetter
// this does nothing with the relationship templates
func (o SchemaMigrationTemplate) BuildSetter() *models.SchemaMigrationSetter {
m := &models.SchemaMigrationSetter{}
if o.Version != nil {
m.Version = omit.From(o.Version())
}
return m
}
// BuildManySetter returns an []*models.SchemaMigrationSetter
// this does nothing with the relationship templates
func (o SchemaMigrationTemplate) BuildManySetter(number int) []*models.SchemaMigrationSetter {
m := make([]*models.SchemaMigrationSetter, number)
for i := range m {
m[i] = o.BuildSetter()
}
return m
}
// Build returns an *models.SchemaMigration
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use SchemaMigrationTemplate.Create
func (o SchemaMigrationTemplate) Build() *models.SchemaMigration {
m := o.toModel()
o.setModelRels(m)
return m
}
// BuildMany returns an models.SchemaMigrationSlice
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use SchemaMigrationTemplate.CreateMany
func (o SchemaMigrationTemplate) BuildMany(number int) models.SchemaMigrationSlice {
m := make(models.SchemaMigrationSlice, number)
for i := range m {
m[i] = o.Build()
}
return m
}
func ensureCreatableSchemaMigration(m *models.SchemaMigrationSetter) {
if m.Version.IsUnset() {
m.Version = omit.From(random_string(nil))
}
}
// insertOptRels creates and inserts any optional the relationships on *models.SchemaMigration
// according to the relationships in the template.
// any required relationship should have already exist on the model
func (o *SchemaMigrationTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.SchemaMigration) (context.Context, error) {
var err error
return ctx, err
}
// Create builds a schemaMigration and inserts it into the database
// Relations objects are also inserted and placed in the .R field
func (o *SchemaMigrationTemplate) Create(ctx context.Context, exec bob.Executor) (*models.SchemaMigration, error) {
_, m, err := o.create(ctx, exec)
return m, err
}
// MustCreate builds a schemaMigration and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o *SchemaMigrationTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.SchemaMigration {
_, m, err := o.create(ctx, exec)
if err != nil {
panic(err)
}
return m
}
// CreateOrFail builds a schemaMigration and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o *SchemaMigrationTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.SchemaMigration {
tb.Helper()
_, m, err := o.create(ctx, exec)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// create builds a schemaMigration and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// this returns a context that includes the newly inserted model
func (o *SchemaMigrationTemplate) create(ctx context.Context, exec bob.Executor) (context.Context, *models.SchemaMigration, error) {
var err error
opt := o.BuildSetter()
ensureCreatableSchemaMigration(opt)
m, err := models.SchemaMigrations.Insert(opt).One(ctx, exec)
if err != nil {
return ctx, nil, err
}
ctx = schemaMigrationCtx.WithValue(ctx, m)
ctx, err = o.insertOptRels(ctx, exec, m)
return ctx, m, err
}
// CreateMany builds multiple schemaMigrations and inserts them into the database
// Relations objects are also inserted and placed in the .R field
func (o SchemaMigrationTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.SchemaMigrationSlice, error) {
_, m, err := o.createMany(ctx, exec, number)
return m, err
}
// MustCreateMany builds multiple schemaMigrations and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o SchemaMigrationTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.SchemaMigrationSlice {
_, m, err := o.createMany(ctx, exec, number)
if err != nil {
panic(err)
}
return m
}
// CreateManyOrFail builds multiple schemaMigrations and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o SchemaMigrationTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.SchemaMigrationSlice {
tb.Helper()
_, m, err := o.createMany(ctx, exec, number)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// createMany builds multiple schemaMigrations and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// this returns a context that includes the newly inserted models
func (o SchemaMigrationTemplate) createMany(ctx context.Context, exec bob.Executor, number int) (context.Context, models.SchemaMigrationSlice, error) {
var err error
m := make(models.SchemaMigrationSlice, number)
for i := range m {
ctx, m[i], err = o.create(ctx, exec)
if err != nil {
return ctx, nil, err
}
}
return ctx, m, nil
}
// SchemaMigration has methods that act as mods for the SchemaMigrationTemplate
var SchemaMigrationMods schemaMigrationMods
type schemaMigrationMods struct{}
func (m schemaMigrationMods) RandomizeAllColumns(f *faker.Faker) SchemaMigrationMod {
return SchemaMigrationModSlice{
SchemaMigrationMods.RandomVersion(f),
}
}
// Set the model columns to this value
func (m schemaMigrationMods) Version(val string) SchemaMigrationMod {
return SchemaMigrationModFunc(func(o *SchemaMigrationTemplate) {
o.Version = func() string { return val }
})
}
// Set the Column from the function
func (m schemaMigrationMods) VersionFunc(f func() string) SchemaMigrationMod {
return SchemaMigrationModFunc(func(o *SchemaMigrationTemplate) {
o.Version = f
})
}
// Clear any values for the column
func (m schemaMigrationMods) UnsetVersion() SchemaMigrationMod {
return SchemaMigrationModFunc(func(o *SchemaMigrationTemplate) {
o.Version = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m schemaMigrationMods) RandomVersion(f *faker.Faker) SchemaMigrationMod {
return SchemaMigrationModFunc(func(o *SchemaMigrationTemplate) {
o.Version = func() string {
return random_string(f)
}
})
}

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package factory package factory

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package models package models

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package models package models
@ -11,9 +11,7 @@ import (
"io" "io"
"time" "time"
"github.com/aarondl/opt/null"
"github.com/aarondl/opt/omit" "github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/stephenafamo/bob" "github.com/stephenafamo/bob"
"github.com/stephenafamo/bob/dialect/sqlite" "github.com/stephenafamo/bob/dialect/sqlite"
"github.com/stephenafamo/bob/dialect/sqlite/dialect" "github.com/stephenafamo/bob/dialect/sqlite/dialect"
@ -27,13 +25,13 @@ import (
// Item is an object representing the database table. // Item is an object representing the database table.
type Item struct { type Item struct {
ID int64 `db:"id,pk" ` ID int64 `db:"id,pk" `
Name string `db:"name" ` Name string `db:"name" `
Added time.Time `db:"added" ` Added time.Time `db:"added" `
Description null.Val[string] `db:"description" ` Description string `db:"description" `
Price null.Val[float32] `db:"price" ` Price float32 `db:"price" `
Quantity null.Val[int64] `db:"quantity" ` Quantity int64 `db:"quantity" `
UserID int64 `db:"user_id" ` UserID int64 `db:"user_id" `
R itemR `db:"-" ` R itemR `db:"-" `
} }
@ -101,9 +99,9 @@ type itemWhere[Q sqlite.Filterable] struct {
ID sqlite.WhereMod[Q, int64] ID sqlite.WhereMod[Q, int64]
Name sqlite.WhereMod[Q, string] Name sqlite.WhereMod[Q, string]
Added sqlite.WhereMod[Q, time.Time] Added sqlite.WhereMod[Q, time.Time]
Description sqlite.WhereNullMod[Q, string] Description sqlite.WhereMod[Q, string]
Price sqlite.WhereNullMod[Q, float32] Price sqlite.WhereMod[Q, float32]
Quantity sqlite.WhereNullMod[Q, int64] Quantity sqlite.WhereMod[Q, int64]
UserID sqlite.WhereMod[Q, int64] UserID sqlite.WhereMod[Q, int64]
} }
@ -116,9 +114,9 @@ func buildItemWhere[Q sqlite.Filterable](cols itemColumns) itemWhere[Q] {
ID: sqlite.Where[Q, int64](cols.ID), ID: sqlite.Where[Q, int64](cols.ID),
Name: sqlite.Where[Q, string](cols.Name), Name: sqlite.Where[Q, string](cols.Name),
Added: sqlite.Where[Q, time.Time](cols.Added), Added: sqlite.Where[Q, time.Time](cols.Added),
Description: sqlite.WhereNull[Q, string](cols.Description), Description: sqlite.Where[Q, string](cols.Description),
Price: sqlite.WhereNull[Q, float32](cols.Price), Price: sqlite.Where[Q, float32](cols.Price),
Quantity: sqlite.WhereNull[Q, int64](cols.Quantity), Quantity: sqlite.Where[Q, int64](cols.Quantity),
UserID: sqlite.Where[Q, int64](cols.UserID), UserID: sqlite.Where[Q, int64](cols.UserID),
} }
} }
@ -135,13 +133,13 @@ type itemErrors struct {
// All values are optional, and do not have to be set // All values are optional, and do not have to be set
// Generated columns are not included // Generated columns are not included
type ItemSetter struct { type ItemSetter struct {
ID omit.Val[int64] `db:"id,pk" ` ID omit.Val[int64] `db:"id,pk" `
Name omit.Val[string] `db:"name" ` Name omit.Val[string] `db:"name" `
Added omit.Val[time.Time] `db:"added" ` Added omit.Val[time.Time] `db:"added" `
Description omitnull.Val[string] `db:"description" ` Description omit.Val[string] `db:"description" `
Price omitnull.Val[float32] `db:"price" ` Price omit.Val[float32] `db:"price" `
Quantity omitnull.Val[int64] `db:"quantity" ` Quantity omit.Val[int64] `db:"quantity" `
UserID omit.Val[int64] `db:"user_id" ` UserID omit.Val[int64] `db:"user_id" `
} }
func (s ItemSetter) SetColumns() []string { func (s ItemSetter) SetColumns() []string {
@ -188,13 +186,13 @@ func (s ItemSetter) Overwrite(t *Item) {
t.Added, _ = s.Added.Get() t.Added, _ = s.Added.Get()
} }
if !s.Description.IsUnset() { if !s.Description.IsUnset() {
t.Description, _ = s.Description.GetNull() t.Description, _ = s.Description.Get()
} }
if !s.Price.IsUnset() { if !s.Price.IsUnset() {
t.Price, _ = s.Price.GetNull() t.Price, _ = s.Price.Get()
} }
if !s.Quantity.IsUnset() { if !s.Quantity.IsUnset() {
t.Quantity, _ = s.Quantity.GetNull() t.Quantity, _ = s.Quantity.Get()
} }
if !s.UserID.IsUnset() { if !s.UserID.IsUnset() {
t.UserID, _ = s.UserID.Get() t.UserID, _ = s.UserID.Get()

View File

@ -0,0 +1,361 @@
// Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package models
import (
"context"
"io"
"github.com/aarondl/opt/omit"
"github.com/stephenafamo/bob"
"github.com/stephenafamo/bob/dialect/sqlite"
"github.com/stephenafamo/bob/dialect/sqlite/dialect"
"github.com/stephenafamo/bob/dialect/sqlite/dm"
"github.com/stephenafamo/bob/dialect/sqlite/sm"
"github.com/stephenafamo/bob/dialect/sqlite/um"
"github.com/stephenafamo/bob/expr"
)
// SchemaMigration is an object representing the database table.
type SchemaMigration struct {
Version string `db:"version,pk" `
}
// SchemaMigrationSlice is an alias for a slice of pointers to SchemaMigration.
// This should almost always be used instead of []*SchemaMigration.
type SchemaMigrationSlice []*SchemaMigration
// SchemaMigrations contains methods to work with the schema_migrations table
var SchemaMigrations = sqlite.NewTablex[*SchemaMigration, SchemaMigrationSlice, *SchemaMigrationSetter]("", "schema_migrations")
// SchemaMigrationsQuery is a query on the schema_migrations table
type SchemaMigrationsQuery = *sqlite.ViewQuery[*SchemaMigration, SchemaMigrationSlice]
type schemaMigrationColumnNames struct {
Version string
}
var SchemaMigrationColumns = buildSchemaMigrationColumns("schema_migrations")
type schemaMigrationColumns struct {
tableAlias string
Version sqlite.Expression
}
func (c schemaMigrationColumns) Alias() string {
return c.tableAlias
}
func (schemaMigrationColumns) AliasedAs(alias string) schemaMigrationColumns {
return buildSchemaMigrationColumns(alias)
}
func buildSchemaMigrationColumns(alias string) schemaMigrationColumns {
return schemaMigrationColumns{
tableAlias: alias,
Version: sqlite.Quote(alias, "version"),
}
}
type schemaMigrationWhere[Q sqlite.Filterable] struct {
Version sqlite.WhereMod[Q, string]
}
func (schemaMigrationWhere[Q]) AliasedAs(alias string) schemaMigrationWhere[Q] {
return buildSchemaMigrationWhere[Q](buildSchemaMigrationColumns(alias))
}
func buildSchemaMigrationWhere[Q sqlite.Filterable](cols schemaMigrationColumns) schemaMigrationWhere[Q] {
return schemaMigrationWhere[Q]{
Version: sqlite.Where[Q, string](cols.Version),
}
}
var SchemaMigrationErrors = &schemaMigrationErrors{
ErrUniqueSqliteAutoindexSchemaMigrations1: &UniqueConstraintError{s: "sqlite_autoindex_schema_migrations_1"},
}
type schemaMigrationErrors struct {
ErrUniqueSqliteAutoindexSchemaMigrations1 *UniqueConstraintError
}
// SchemaMigrationSetter is used for insert/upsert/update operations
// All values are optional, and do not have to be set
// Generated columns are not included
type SchemaMigrationSetter struct {
Version omit.Val[string] `db:"version,pk" `
}
func (s SchemaMigrationSetter) SetColumns() []string {
vals := make([]string, 0, 1)
if !s.Version.IsUnset() {
vals = append(vals, "version")
}
return vals
}
func (s SchemaMigrationSetter) Overwrite(t *SchemaMigration) {
if !s.Version.IsUnset() {
t.Version, _ = s.Version.Get()
}
}
func (s *SchemaMigrationSetter) Apply(q *dialect.InsertQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return SchemaMigrations.BeforeInsertHooks.RunHooks(ctx, exec, s)
})
if len(q.Table.Columns) == 0 {
q.Table.Columns = s.SetColumns()
}
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.Writer, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 0, 1)
if !s.Version.IsUnset() {
vals = append(vals, sqlite.Arg(s.Version))
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
func (s SchemaMigrationSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return um.Set(s.Expressions()...)
}
func (s SchemaMigrationSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 1)
if !s.Version.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
sqlite.Quote(append(prefix, "version")...),
sqlite.Arg(s.Version),
}})
}
return exprs
}
// FindSchemaMigration retrieves a single record by primary key
// If cols is empty Find will return all columns.
func FindSchemaMigration(ctx context.Context, exec bob.Executor, VersionPK string, cols ...string) (*SchemaMigration, error) {
if len(cols) == 0 {
return SchemaMigrations.Query(
SelectWhere.SchemaMigrations.Version.EQ(VersionPK),
).One(ctx, exec)
}
return SchemaMigrations.Query(
SelectWhere.SchemaMigrations.Version.EQ(VersionPK),
sm.Columns(SchemaMigrations.Columns().Only(cols...)),
).One(ctx, exec)
}
// SchemaMigrationExists checks the presence of a single record by primary key
func SchemaMigrationExists(ctx context.Context, exec bob.Executor, VersionPK string) (bool, error) {
return SchemaMigrations.Query(
SelectWhere.SchemaMigrations.Version.EQ(VersionPK),
).Exists(ctx, exec)
}
// AfterQueryHook is called after SchemaMigration is retrieved from the database
func (o *SchemaMigration) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = SchemaMigrations.AfterSelectHooks.RunHooks(ctx, exec, SchemaMigrationSlice{o})
case bob.QueryTypeInsert:
ctx, err = SchemaMigrations.AfterInsertHooks.RunHooks(ctx, exec, SchemaMigrationSlice{o})
case bob.QueryTypeUpdate:
ctx, err = SchemaMigrations.AfterUpdateHooks.RunHooks(ctx, exec, SchemaMigrationSlice{o})
case bob.QueryTypeDelete:
ctx, err = SchemaMigrations.AfterDeleteHooks.RunHooks(ctx, exec, SchemaMigrationSlice{o})
}
return err
}
// PrimaryKeyVals returns the primary key values of the SchemaMigration
func (o *SchemaMigration) PrimaryKeyVals() bob.Expression {
return sqlite.Arg(o.Version)
}
func (o *SchemaMigration) pkEQ() dialect.Expression {
return sqlite.Quote("schema_migrations", "version").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.Writer, d bob.Dialect, start int) ([]any, error) {
return o.PrimaryKeyVals().WriteSQL(ctx, w, d, start)
}))
}
// Update uses an executor to update the SchemaMigration
func (o *SchemaMigration) Update(ctx context.Context, exec bob.Executor, s *SchemaMigrationSetter) error {
v, err := SchemaMigrations.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
if err != nil {
return err
}
*o = *v
return nil
}
// Delete deletes a single SchemaMigration record with an executor
func (o *SchemaMigration) Delete(ctx context.Context, exec bob.Executor) error {
_, err := SchemaMigrations.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
return err
}
// Reload refreshes the SchemaMigration using the executor
func (o *SchemaMigration) Reload(ctx context.Context, exec bob.Executor) error {
o2, err := SchemaMigrations.Query(
SelectWhere.SchemaMigrations.Version.EQ(o.Version),
).One(ctx, exec)
if err != nil {
return err
}
*o = *o2
return nil
}
// AfterQueryHook is called after SchemaMigrationSlice is retrieved from the database
func (o SchemaMigrationSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = SchemaMigrations.AfterSelectHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeInsert:
ctx, err = SchemaMigrations.AfterInsertHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeUpdate:
ctx, err = SchemaMigrations.AfterUpdateHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeDelete:
ctx, err = SchemaMigrations.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}
func (o SchemaMigrationSlice) pkIN() dialect.Expression {
if len(o) == 0 {
return sqlite.Raw("NULL")
}
return sqlite.Quote("schema_migrations", "version").In(bob.ExpressionFunc(func(ctx context.Context, w io.Writer, d bob.Dialect, start int) ([]any, error) {
pkPairs := make([]bob.Expression, len(o))
for i, row := range o {
pkPairs[i] = row.PrimaryKeyVals()
}
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
}))
}
// copyMatchingRows finds models in the given slice that have the same primary key
// then it first copies the existing relationships from the old model to the new model
// and then replaces the old model in the slice with the new model
func (o SchemaMigrationSlice) copyMatchingRows(from ...*SchemaMigration) {
for i, old := range o {
for _, new := range from {
if new.Version != old.Version {
continue
}
o[i] = new
break
}
}
}
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
func (o SchemaMigrationSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return SchemaMigrations.BeforeUpdateHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *SchemaMigration:
o.copyMatchingRows(retrieved)
case []*SchemaMigration:
o.copyMatchingRows(retrieved...)
case SchemaMigrationSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a SchemaMigration or a slice of SchemaMigration
// then run the AfterUpdateHooks on the slice
_, err = SchemaMigrations.AfterUpdateHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
func (o SchemaMigrationSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return SchemaMigrations.BeforeDeleteHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *SchemaMigration:
o.copyMatchingRows(retrieved)
case []*SchemaMigration:
o.copyMatchingRows(retrieved...)
case SchemaMigrationSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a SchemaMigration or a slice of SchemaMigration
// then run the AfterDeleteHooks on the slice
_, err = SchemaMigrations.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
func (o SchemaMigrationSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals SchemaMigrationSetter) error {
if len(o) == 0 {
return nil
}
_, err := SchemaMigrations.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
return err
}
func (o SchemaMigrationSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
_, err := SchemaMigrations.Delete(o.DeleteMod()).Exec(ctx, exec)
return err
}
func (o SchemaMigrationSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
o2, err := SchemaMigrations.Query(sm.Where(o.pkIN())).All(ctx, exec)
if err != nil {
return err
}
o.copyMatchingRows(o2...)
return nil
}

View File

@ -1,4 +1,4 @@
// Code generated by BobGen sqlite (devel). DO NOT EDIT. // Code generated by BobGen sql (devel). DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time. // This file is meant to be re-generated in place and/or deleted at any time.
package models package models

View File

@ -3,13 +3,16 @@ package main
import ( import (
"context" "context"
"embed"
"errors" "errors"
"fmt" "fmt"
"log" "log"
"log/slog" "log/slog"
"net/http" "net/http"
"net/url"
"os" "os"
"os/signal" "os/signal"
"strings"
"syscall" "syscall"
"time" "time"
@ -26,6 +29,9 @@ import (
"github.com/spotdemo4/trevstack/server/internal/interceptors" "github.com/spotdemo4/trevstack/server/internal/interceptors"
) )
var clientFS *embed.FS
var dbFS *embed.FS
func main() { func main() {
logger := slog.New(slog.NewTextHandler(os.Stdout, nil)) logger := slog.New(slog.NewTextHandler(os.Stdout, nil))
slog.SetDefault(logger) slog.SetDefault(logger)
@ -36,41 +42,27 @@ func main() {
log.Fatal(err.Error()) log.Fatal(err.Error())
} }
// Migrate database
err = database.Migrate(env.DatabaseUrl, dbFS)
if err != nil {
log.Fatal(err.Error())
}
// Get database // Get database
db := &bob.DB{} db := &bob.DB{}
switch env.DBType { switch env.DatabaseType {
case "postgres": case "postgres":
log.Println("Using Postgres") log.Println("Using Postgres")
if env.DBUser == "" { db, err = database.NewPostgresConnection(env.DatabaseUrl)
log.Fatal("DB_USER is required")
}
if env.DBPass == "" {
log.Fatal("DB_PASS is required")
}
if env.DBHost == "" {
log.Fatal("DB_HOST is required")
}
if env.DBPort == "" {
log.Fatal("DB_PORT is required")
}
if env.DBName == "" {
log.Fatal("DB_NAME is required")
}
db, err = database.NewPostgresConnection(env.DBUser, env.DBPass, env.DBHost, env.DBPort, env.DBName)
if err != nil { if err != nil {
log.Fatalf("failed to connect to postgres: %v", err) log.Fatalf("failed to connect to postgres: %v", err)
} }
case "sqlite": case "sqlite", "sqlite3":
log.Println("Using SQLite") log.Println("Using SQLite")
if env.DBName == "" { db, err = database.NewSQLiteConnection(env.DatabaseUrl)
log.Fatal("DB_NAME is required")
}
db, err = database.NewSQLiteConnection(env.DBName)
if err != nil { if err != nil {
log.Fatalf("failed to connect to sqlite: %v", err) log.Fatalf("failed to connect to sqlite: %v", err)
} }
@ -87,7 +79,7 @@ func main() {
// Serve web interface // Serve web interface
mux := http.NewServeMux() mux := http.NewServeMux()
mux.Handle("/", client.NewClientHandler(env.Key)) mux.Handle("/", client.NewClientHandler(env.Key, clientFS))
mux.Handle("/file/", file.NewFileHandler(db, env.Key)) mux.Handle("/file/", file.NewFileHandler(db, env.Key))
mux.Handle("/grpc/", http.StripPrefix("/grpc", api)) mux.Handle("/grpc/", http.StripPrefix("/grpc", api))
@ -122,14 +114,10 @@ func main() {
} }
type env struct { type env struct {
DBType string Port string
DBUser string Key string
DBPass string DatabaseType string
DBHost string DatabaseUrl *url.URL
DBPort string
DBName string
Port string
Key string
} }
func getEnv() (*env, error) { func getEnv() (*env, error) {
@ -140,14 +128,8 @@ func getEnv() (*env, error) {
// Create // Create
env := env{ env := env{
DBType: os.Getenv("DB_TYPE"), Port: os.Getenv("PORT"),
DBUser: os.Getenv("DB_USER"), Key: os.Getenv("KEY"),
DBPass: os.Getenv("DB_PASS"),
DBHost: os.Getenv("DB_HOST"),
DBPort: os.Getenv("DB_PORT"),
DBName: os.Getenv("DB_NAME"),
Port: os.Getenv("PORT"),
Key: os.Getenv("KEY"),
} }
// Validate // Validate
@ -158,5 +140,19 @@ func getEnv() (*env, error) {
return nil, errors.New("env 'key' not found") return nil, errors.New("env 'key' not found")
} }
// Validate DATABASE_URL
dbstr := os.Getenv("DATABASE_URL")
if dbstr == "" {
return nil, errors.New("env 'DATABASE_URL' not found")
}
dbsp := strings.Split(dbstr, ":")
dburl, err := url.Parse(dbstr)
if err != nil || len(dbsp) < 2 {
return nil, errors.New("env 'DATABASE_URL' formatted incorrectly")
}
env.DatabaseType = dbsp[0]
env.DatabaseUrl = dburl
return &env, nil return &env, nil
} }

20
server/prod.go Normal file
View File

@ -0,0 +1,20 @@
//go:build !dev
package main
import (
"embed"
"log"
)
//go:embed all:client
var cFS embed.FS
//go:embed db/migrations/*.sql
var dFS embed.FS
func init() {
log.Println("initializing for production")
clientFS = &cFS
dbFS = &dFS
}