Compare commits

..

43 commits

Author SHA1 Message Date
Mark
6b4e052bed
Merge pull request #10 from lbryio/eagerloading_depth
Fix multi-depth eager loading of relationships. If a relationship is …
2020-09-02 15:59:52 -04:00
Mark Beamer Jr
3db4f30f56
Fix multi-depth eager loading of relationships. If a relationship is nil, do not add it to the collection for checking the next depth level. 2020-08-19 23:43:39 -04:00
Niko Storni
256a6d4225 update lbry.go library 2020-03-05 16:29:03 -05:00
Mark
3f035a9fe2
Merge pull request #7 from lbryio/guard_null_mod
Allow null query mods for dynamic queries
2019-07-01 23:56:28 -04:00
Mark Beamer Jr
c01b182839
Allow null query mods for dynamic queries 2019-07-01 23:54:54 -04:00
Mark
e3fe976c3c
Merge pull request #6 from lbryio/force_index
Force index
2019-06-30 23:17:36 -04:00
Mark Beamer Jr
4e1b83ab39
Add force index query mod for select query.
Fix eager loading casting for nested levels.
2019-06-30 23:01:24 -04:00
Mark Beamer Jr
f892107dad
missing import 2019-02-07 20:24:07 -05:00
Mark
fadcbfa8b6
Merge pull request #3 from lbryio/merge_changes
added support for 1 to 1 relations and added support for n unique key…
2019-02-07 19:51:35 -05:00
Mark Beamer Jr
29172e976b
changed from Fatal to Error - copy paste mistake. 2019-02-07 18:20:23 -05:00
Mark Beamer Jr
eea3d349a7
added support for 1 to 1 relations and added support for n unique keys for conflict resolution during merging.
split out into two functions.
2019-02-07 18:20:22 -05:00
Mark
d180a095ca
Merge pull request #4 from lbryio/lbrygo_update
Updated to use the latest lbry.go changes
2019-01-10 20:13:22 -05:00
Mark Beamer Jr
3baa9e72ca
Updated to use the latest lbry.go changes 2019-01-10 20:11:42 -05:00
Mark
bc08aa6160
Merge pull request #2 from lbryio/test_authchanges
make it true
2018-06-06 19:05:29 -04:00
Mark Beamer Jr
466f2d5b2c make it true 2018-06-06 19:03:50 -04:00
Mark
45ee5c902f
Merge pull request #1 from lbryio/test_authchanges
reverted default to native password authentication.
2018-06-06 18:57:40 -04:00
Mark Beamer Jr
8d4055e3eb reverted default to native password authentication. 2018-06-06 18:26:33 -04:00
Alex Grintsvayg
396f42bc91 never update created_at, updated_at columns 2018-04-16 12:41:55 -04:00
Alex Grintsvayg
77fc991e7b new location for null.go 2018-02-22 13:41:05 -05:00
Alex Grintsvayg
e4a52e21b6 switch to our errors package 2018-02-07 09:35:46 -05:00
Alex Grintsvayg
0b0a1f21c2 allow generic interface or real sql tx 2017-09-02 11:52:31 -04:00
Alex Grintsvayg
cd445bf2f4 revert interpolateParams, since we're doing our own logging now 2017-09-02 11:31:15 -04:00
Alex Grintsvayg
55f42bc038 just use interfaces 2017-08-24 11:58:13 -04:00
Alex Grintsvayg
544ff7afdd typo 2017-08-23 17:03:24 -04:00
Alex Grintsvayg
faec346481 use interface to enable custom Tx types 2017-08-23 16:59:29 -04:00
Alex Grintsvayg
3abac13aeb export InterpolateParams 2017-08-23 16:37:33 -04:00
Alex Grintsvayg
8837a986ee consistent order to unique key columns 2017-08-23 13:36:13 -04:00
Aaron L
ce9d13abf0 Fix hook documentation to include error return
Fix #171
2017-08-01 14:40:58 -04:00
Aaron L
ca9f47de8b Correct nullability for tests in to_one
- Use the nullability of the fkey column in question to determine
  nullability for the entire struct to make things easy, otherwise
  we'd have to pluck out one at a time. This makes the tests pass
  instead of fail sporadically.
- Fix #160
2017-08-01 14:40:38 -04:00
Aaron L
e339812027 Stop using aliases in the relationship select
- This caused issues with mysql who doesn't understand the syntax:
  "delete from x as y where y.id = ?"
2017-08-01 14:39:10 -04:00
Guy Tish
23f245776d Added table columns and table names as anonymous struct 2017-08-01 14:27:50 -04:00
Alex Grintsvayg
99a3a1d091 make merge compatible with an existing transaction 2017-08-01 13:00:14 -04:00
Alex Grintsvayg
9c8262b702 fix exists() finisher 2017-07-20 10:50:55 -04:00
Alex Grintsvayg
05c7f7d06a switch to more complete null package, implement nicer sql logging 2017-07-06 15:15:00 -04:00
Alex Grintsvayg
ed423a3606 detect autoincrement column, fix lastID in upsert when update doesnt change anything 2017-06-12 14:02:04 -04:00
Alex Grintsvayg
09c585cdb1 dont run delete query when there's nothing to delete 2017-06-01 09:58:50 -04:00
Alex Grintsvayg
912a689701 added unique key detection (mysql only for now), improved merging 2017-05-12 06:56:48 -04:00
Alex Grintsvayg
31fe8b6e1d model merging 2017-05-10 15:35:05 -04:00
Alex Grintsvayg
b160e5c1f4 dont error if no rows found 2017-05-10 11:21:29 -04:00
Alex Grintsvayg
ed43c9078f made query structs public, added IS NULL to filter 2017-05-09 15:30:04 -04:00
Alex Grintsvayg
f863ecb48e add model Filters, FindOrCreate methods, IsNew() 2017-05-08 16:39:16 -04:00
Alex Grintsvayg
451723ccb9 fix imports to point at lbryio repo 2017-05-08 13:25:15 -04:00
Alex Grintsvayg
687b0506db Merge branch 'mysql-uint-fix'
* mysql-uint-fix:
  detect unsigned int columns in mysql
2017-05-08 13:12:48 -04:00
66 changed files with 1018 additions and 500 deletions

View file

@ -13,14 +13,14 @@ jobs:
environment:
MYSQL_ROOT_PASSWORD: mysqlpassword
# - image: microsoft/mssql-server-linux:ctp2-0
# environment:
# ACCEPT_EULA: 'Y'
# SA_PASSWORD: 'R@@tr@@t1234'
- image: microsoft/mssql-server-linux:ctp1-4
environment:
ACCEPT_EULA: 'Y'
SA_PASSWORD: 'R@@tr@@t1234'
environment:
GOPATH: /go
ROOTPATH: /go/src/github.com/volatiletech/sqlboiler
ROOTPATH: /go/src/github.com/vattle/sqlboiler
steps:
- run:
@ -72,31 +72,31 @@ jobs:
sleep 1
done
# - run:
# name: Wait for MSSQL
# command: >
# for i in `seq 30`; do
# echo "Waiting for mssql"
# set +o errexit
# sqlcmd -H localhost -U sa -P R@@tr@@t1234 -Q "select * from information_schema.tables;" > /dev/null
# status=$?
# set -o errexit
# if [ $status -eq 0 ]; then
# break
# fi
# if [ $i -eq 30 ]; then
# echo "Failed to wait for mssql"
# exit 1
# fi
# sleep 1
# done
- run:
name: Wait for MSSQL
command: >
for i in `seq 30`; do
echo "Waiting for mssql"
set +o errexit
sqlcmd -H localhost -U sa -P R@@tr@@t1234 -Q "select * from information_schema.tables;" > /dev/null
status=$?
set -o errexit
if [ $status -eq 0 ]; then
break
fi
if [ $i -eq 30 ]; then
echo "Failed to wait for mssql"
exit 1
fi
sleep 1
done
- run:
name: Make GOPATH
command: mkdir -p /go/src/github.com/volatiletech/sqlboiler
command: mkdir -p /go/src/github.com/vattle/sqlboiler
- checkout:
path: /go/src/github.com/volatiletech/sqlboiler
path: /go/src/github.com/vattle/sqlboiler
- run:
name: Create PSQL DB
@ -108,17 +108,17 @@ jobs:
command: |
mysql --host localhost --execute 'create database sqlboiler;'
mysql --host localhost --database sqlboiler < $ROOTPATH/testdata/mysql_test_schema.sql
# - run:
# name: Create MSSQL DB
# command: |
# sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
# sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i $ROOTPATH/testdata/mssql_test_schema.sql
- run:
name: Create MSSQL DB
command: |
sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i $ROOTPATH/testdata/mssql_test_schema.sql
- run:
name: Build SQLBoiler
command: |
cd $ROOTPATH; go get -v -t
cd $ROOTPATH; go build -v github.com/volatiletech/sqlboiler
cd $ROOTPATH; go build -v github.com/vattle/sqlboiler
- run:
name: 'Configure SQLBoiler: PSQL'
@ -126,9 +126,9 @@ jobs:
- run:
name: 'Configure SQLBoiler: MySQL'
command: echo -e '[mysql]\nhost="localhost"\nport=3306\nuser="root"\npass="mysqlpassword"\ndbname="sqlboiler"\nsslmode="false"\n' >> $ROOTPATH/sqlboiler.toml
# - run:
# name: 'Configure SQLBoiler: MSSQL'
# command: echo -e '[mssql]\nhost="localhost"\nport=1433\nuser="sa"\npass="R@@tr@@t1234"\ndbname="sqlboiler"\nsslmode="disable"\n' >> $ROOTPATH/sqlboiler.toml
- run:
name: 'Configure SQLBoiler: MSSQL'
command: echo -e '[mssql]\nhost="localhost"\nport=1433\nuser="sa"\npass="R@@tr@@t1234"\ndbname="sqlboiler"\nsslmode="disable"\n' >> $ROOTPATH/sqlboiler.toml
- run:
name: 'Generate: PSQL'
@ -136,9 +136,9 @@ jobs:
- run:
name: 'Generate: MySQL'
command: cd $ROOTPATH; ./sqlboiler -o mysql mysql
# - run:
# name: 'Generate: MSSQL'
# command: cd $ROOTPATH; ./sqlboiler -o mssql mssql
- run:
name: 'Generate: MSSQL'
command: cd $ROOTPATH; ./sqlboiler -o mssql mssql
- run:
name: Download generated and test deps
@ -150,7 +150,7 @@ jobs:
name: Run Tests
command: |
cd $ROOTPATH
#cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
go test -v -race ./... | tee test_out.txt
- run:
@ -161,3 +161,48 @@ jobs:
- store_test_results:
path: test_results
#test:
# pre:
# - echo -e "[postgres]\nhost=\"localhost\"\nport=5432\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\n" > sqlboiler.toml
# - createdb -U ubuntu sqlboiler
# - psql -U ubuntu sqlboiler < ./testdata/postgres_test_schema.sql
#
# - echo -e "[mysql]\nhost=\"localhost\"\nport=3306\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\nsslmode=\"false\"\n" >> sqlboiler.toml
# - echo "create database sqlboiler;" | mysql -u ubuntu
# - mysql -u ubuntu sqlboiler < ./testdata/mysql_test_schema.sql
#
# - echo -e "[mssql]\nhost=\"localhost\"\nport=1433\nuser=\"sa\"\ndbname=\"sqlboiler\"\nsslmode=\"disable\"\n" >> sqlboiler.toml
# - docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=R@@tr@@t1234' -p 1433:1433 -d --name mssql microsoft/mssql-server-linux
# - sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
# - sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i ./testdata/mssql_test_schema.sql
#
# - ./sqlboiler -o postgres postgres
# - ./sqlboiler -o mysql mysql
# - ./sqlboiler -o mssql mssql
# - cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
# override:
# - go test -v -race ./... > $CIRCLE_ARTIFACTS/gotest.txt
# post:
# - cat $CIRCLE_ARTIFACTS/gotest.txt | go-junit-report > $CIRCLE_TEST_REPORTS/junit.xml
#
#machine:
# environment:
# GODIST: go1.7.linux-amd64.tar.gz
# PATH: /home/ubuntu/.go_workspace/bin:/usr/local/go/bin:/home/ubuntu/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/opt/mssql-tools/bin
# post:
# - mkdir -p download
# - test -e download/$GODIST || curl -o download/$GODIST https://storage.googleapis.com/golang/$GODIST
# - sudo rm -rf /usr/local/go
# - sudo tar -C /usr/local -xzf download/$GODIST
#
#dependencies:
# pre:
# - mkdir -p /home/ubuntu/.go_workspace/src/github.com/jstemmer
# - go get -u github.com/jstemmer/go-junit-report
#
# - curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
# - curl https://packages.microsoft.com/config/ubuntu/14.04/prod.list | sudo tee /etc/apt/sources.list.d/msprod.list
# - sudo apt-get update; sudo apt-get install mssql-tools unixodbc-dev
# - docker pull microsoft/mssql-server-linux
# cache_directories:
# - ~/download

1
.gitignore vendored
View file

@ -4,3 +4,4 @@ sqlboiler.toml
models/
testschema.sql
.cover
/.idea

20
LICENSE
View file

@ -1,18 +1,18 @@
Copyright (c) 2017 Volatile Technologies Inc. All rights reserved.
Copyright (c) 2016 The SQLBoiler Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Vattle or Volatile Technologies Inc. nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Vattle nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT

View file

@ -1,12 +1,12 @@
![sqlboiler logo](http://i.imgur.com/ilkv0r9.png)
![sqlboiler logo](http://i.imgur.com/NJtCT7y.png)
[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/volatiletech/sqlboiler/blob/master/LICENSE)
[![GoDoc](https://godoc.org/github.com/volatiletech/sqlboiler?status.svg)](https://godoc.org/github.com/volatiletech/sqlboiler)
[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/vattle/sqlboiler/blob/master/LICENSE)
[![GoDoc](https://godoc.org/github.com/vattle/sqlboiler?status.svg)](https://godoc.org/github.com/vattle/sqlboiler)
[![Mail](https://img.shields.io/badge/mail%20list-sqlboiler-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler)
[![Mail-Annc](https://img.shields.io/badge/mail%20list-sqlboiler--announce-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler-announce)
[![Slack](https://img.shields.io/badge/slack-%23general-lightgrey.svg)](https://sqlboiler.from-the.cloud)
[![CircleCI](https://circleci.com/gh/volatiletech/sqlboiler.svg?style=shield)](https://circleci.com/gh/volatiletech/sqlboiler)
[![Go Report Card](https://goreportcard.com/badge/volatiletech/sqlboiler)](http://goreportcard.com/report/volatiletech/sqlboiler)
[![CircleCI](https://circleci.com/gh/vattle/sqlboiler.svg?style=shield)](https://circleci.com/gh/vattle/sqlboiler)
[![Go Report Card](https://goreportcard.com/badge/vattle/sqlboiler)](http://goreportcard.com/report/vattle/sqlboiler)
SQLBoiler is a tool to generate a Go ORM tailored to your database schema.
@ -76,7 +76,6 @@ Table of Contents
* [Reload](#reload)
* [Exists](#exists)
* [Enums](#enums)
* [Constants](#constants)
* [FAQ](#faq)
* [Won't compiling models for a huge database be very slow?](#wont-compiling-models-for-a-huge-database-be-very-slow)
* [Missing imports for generated package](#missing-imports-for-generated-package)
@ -123,7 +122,7 @@ For a comprehensive list of available operations and examples please see [Featur
```go
import (
// Import this so we don't have to use qm.Limit etc.
. "github.com/volatiletech/sqlboiler/queries/qm"
. "github.com/vattle/sqlboiler/queries/qm"
)
// Open handle to database like normal
@ -215,12 +214,12 @@ fmt.Println(len(users.R.FavoriteMovies))
#### Download
```shell
go get -u -t github.com/volatiletech/sqlboiler
go get -u -t github.com/vattle/sqlboiler
```
#### Configuration
Create a configuration file. Because the project uses [viper](https://github.com/spf13/viper), TOML, JSON and YAML
Create a configuration file. Because the project uses [viper](github.com/spf13/viper), TOML, JSON and YAML
are all supported. Environment variables are also able to be used.
We will assume TOML for the rest of the documentation.
@ -296,7 +295,7 @@ generate models for, we can invoke the sqlboiler command line utility.
```text
SQL Boiler generates a Go ORM from template files, tailored to your database schema.
Complete documentation is available at http://github.com/volatiletech/sqlboiler
Complete documentation is available at http://github.com/vattle/sqlboiler
Usage:
sqlboiler [flags] <driver>
@ -626,7 +625,7 @@ when performing query building. Here is a list of all of your generated query mo
```go
// Dot import so we can access query mods directly instead of prefixing with "qm."
import . "github.com/volatiletech/sqlboiler/queries/qm"
import . "github.com/vattle/sqlboiler/queries/qm"
// Use a raw query against a generated struct (Pilot in this example)
// If this query mod exists in your call, it will override the others.
@ -737,7 +736,7 @@ in combination with your own custom, non-generated model.
### Binding
For a comprehensive ruleset for `Bind()` you can refer to our [godoc](https://godoc.org/github.com/volatiletech/sqlboiler/queries#Bind).
For a comprehensive ruleset for `Bind()` you can refer to our [godoc](https://godoc.org/github.com/vattle/sqlboiler/queries#Bind).
The `Bind()` [Finisher](#finisher) allows the results of a query built with
the [Raw SQL](#raw-query) method or the [Query Builder](#query-building) methods to be bound
@ -991,7 +990,7 @@ tx.Rollback()
```
It's also worth noting that there's a way to take advantage of `boil.SetDB()`
by using the [boil.Begin()](https://godoc.org/github.com/volatiletech/sqlboiler/boil#Begin) function.
by using the [boil.Begin()](https://godoc.org/github.com/vattle/sqlboiler/boil#Begin) function.
This opens a transaction using the globally stored database.
### Debug Logging
@ -1027,10 +1026,10 @@ Find is used to find a single row by primary key:
```go
// Retrieve pilot with all columns filled
pilot, err := models.FindPilot(db, 1)
pilot, err := models.PilotFind(db, 1)
// Retrieve a subset of column values
jet, err := models.FindJet(db, 1, "name", "color")
jet, err := models.JetFind(db, 1, "name", "color")
```
### Insert
@ -1193,7 +1192,7 @@ exists, err := models.Pilots(db, Where("id=?", 5)).Exists()
If your MySQL or Postgres tables use enums we will generate constants that hold their values
that you can use in your queries. For example:
```sql
```
CREATE TYPE workday AS ENUM('monday', 'tuesday', 'wednesday', 'thursday', 'friday');
CREATE TABLE event_one (
@ -1225,41 +1224,6 @@ still be able to use your generated library, and it will still work as expected,
to get the tests to pass in this event is to either use a parsable enum value or use a regular column
instead of an enum.
### Constants
The models package will also contain some structs that contain all of the table and column
names harvested from the database at generation time.
For table names they're generated under `models.TableNames`:
```go
// Generated code from models package
var TableNames = struct {
Messages string
Purchases string
}{
Messages: "messages",
Purchases: "purchases",
}
// Usage example:
fmt.Println(models.TableNames.Messages)
```
```go
// Generated code from models package
var MessageColumns = struct {
ID string
PurchaseID string
}{
ID: "id",
PurchaseID: "purchase_id",
}
// Usage example:
fmt.Println(models.MessageColumns.ID)
```
## FAQ
#### Won't compiling models for a huge database be very slow?
@ -1298,12 +1262,12 @@ You *must* use a DSN flag in MySQL connections, see: [Requirements](#requirement
#### Where is the homepage?
The homepage for the [SQLBoiler](https://github.com/volatiletech/sqlboiler) [Golang ORM](https://github.com/volatiletech/sqlboiler)
generator is located at: https://github.com/volatiletech/sqlboiler
The homepage for the [SQLBoiler](https://github.com/vattle/sqlboiler) [Golang ORM](https://github.com/vattle/sqlboiler)
generator is located at: https://github.com/vattle/sqlboiler
## Benchmarks
If you'd like to run the benchmarks yourself check out our [boilbench](https://github.com/volatiletech/boilbench) repo.
If you'd like to run the benchmarks yourself check out our [boilbench](https://github.com/vattle/boilbench) repo.
```bash
go test -bench . -benchmem

View file

@ -3,7 +3,7 @@ package bdb
import (
"strings"
"github.com/volatiletech/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/strmangle"
)
// Column holds information about a database column.

View file

@ -1,8 +1,8 @@
package drivers
import (
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/strmangle"
)
// MockDriver is a mock implementation of the bdb driver Interface
@ -58,6 +58,14 @@ func (m *MockDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
}[tableName], nil
}
func (m *MockDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
return []bdb.UniqueKey{}, nil
}
func (m *MockDriver) AutoincrementInfo(schema, tableName string) (string, error) {
return "", nil
}
// ForeignKeyInfo returns a list of mock foreignkeys
func (m *MockDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
return map[string][]bdb.ForeignKey{

View file

@ -7,8 +7,8 @@ import (
"strings"
_ "github.com/denisenkom/go-mssqldb"
"github.com/lbryio/sqlboiler/bdb"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
)
// MSSQLDriver holds the database connection string and a handle
@ -241,6 +241,14 @@ func (m *MSSQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
return pkey, nil
}
func (m *MSSQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
return []bdb.UniqueKey{}, errors.New("not implemented")
}
func (m *MSSQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
return "", errors.New("not implemented")
}
// ForeignKeyInfo retrieves the foreign keys for a given table name.
func (m *MSSQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
var fkeys []bdb.ForeignKey

View file

@ -3,12 +3,13 @@ package drivers
import (
"database/sql"
"fmt"
"sort"
"strconv"
"strings"
"github.com/go-sql-driver/mysql"
"github.com/lbryio/sqlboiler/bdb"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
)
// TinyintAsBool is a global that is set from main.go if a user specifies
@ -52,6 +53,7 @@ func MySQLBuildQueryString(user, pass, dbname, host string, port int, sslmode st
}
config.Addr += ":" + strconv.Itoa(port)
config.TLSConfig = sslmode
config.AllowNativePasswords = true
// MySQL is a bad, and by default reads date/datetime into a []byte
// instead of a time.Time. Tell it to stop being a bad.
@ -232,6 +234,79 @@ func (m *MySQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
return pkey, nil
}
// UniqueKeyInfo retrieves the unique keys for a given table name.
func (m *MySQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
var ukeys []bdb.UniqueKey
query := `
select tc.table_name, tc.constraint_name, GROUP_CONCAT(kcu.column_name)
from information_schema.table_constraints tc
left join information_schema.key_column_usage kcu on tc.constraint_name = kcu.constraint_name and tc.table_name = kcu.table_name and tc.table_schema = kcu.table_schema
where tc.table_schema = ? and tc.table_name = ? and tc.constraint_type = "UNIQUE"
group by tc.table_name, tc.constraint_name
`
var rows *sql.Rows
var err error
if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
return nil, err
}
for rows.Next() {
var ukey bdb.UniqueKey
var columns string
//ukey.Table = tableName
err = rows.Scan(&ukey.Table, &ukey.Name, &columns)
if err != nil {
return nil, err
}
ukey.Columns = strings.Split(columns, ",")
sort.Strings(ukey.Columns)
ukeys = append(ukeys, ukey)
}
if err = rows.Err(); err != nil {
return nil, err
}
return ukeys, nil
}
// AutoincrementInfo retrieves the autoincrement column for a given table name, if one exists.
func (m *MySQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
query := `
select column_name
from information_schema.columns
where table_schema = ? and table_name = ? and extra like "%auto_increment%"
`
var rows *sql.Rows
var err error
if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
return "", err
}
for rows.Next() {
var column string
err = rows.Scan(&column)
if err != nil {
return "", err
}
return column, nil
}
if err = rows.Err(); err != nil {
return "", err
}
return "", nil
}
// ForeignKeyInfo retrieves the foreign keys for a given table name.
func (m *MySQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
var fkeys []bdb.ForeignKey

View file

@ -8,10 +8,10 @@ import (
// Side-effect import sql driver
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/strmangle"
_ "github.com/lib/pq"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/strmangle"
)
// PostgresDriver holds the database connection string and a handle
@ -266,6 +266,14 @@ func (p *PostgresDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryK
return pkey, nil
}
func (p *PostgresDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
return []bdb.UniqueKey{}, errors.New("not implemented")
}
func (p *PostgresDriver) AutoincrementInfo(schema, tableName string) (string, error) {
return "", errors.New("not implemented")
}
// ForeignKeyInfo retrieves the foreign keys for a given table name.
func (p *PostgresDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
var fkeys []bdb.ForeignKey

View file

@ -1,11 +1,7 @@
// Package bdb supplies the sql(b)oiler (d)ata(b)ase abstractions.
package bdb
import (
"sort"
"github.com/pkg/errors"
)
import "github.com/pkg/errors"
// Interface for a database driver. Functionality required to support a specific
// database type (eg, MySQL, Postgres etc.)
@ -13,6 +9,8 @@ type Interface interface {
TableNames(schema string, whitelist, blacklist []string) ([]string, error)
Columns(schema, tableName string) ([]Column, error)
PrimaryKeyInfo(schema, tableName string) (*PrimaryKey, error)
UniqueKeyInfo(schema, tableName string) ([]UniqueKey, error)
AutoincrementInfo(schema, tableName string) (string, error)
ForeignKeyInfo(schema, tableName string) ([]ForeignKey, error)
// TranslateColumnType takes a Database column type and returns a go column type.
@ -49,8 +47,6 @@ func Tables(db Interface, schema string, whitelist, blacklist []string) ([]Table
return nil, errors.Wrap(err, "unable to get table names")
}
sort.Strings(names)
var tables []Table
for _, name := range names {
t := Table{
@ -69,10 +65,18 @@ func Tables(db Interface, schema string, whitelist, blacklist []string) ([]Table
return nil, errors.Wrapf(err, "unable to fetch table pkey info (%s)", name)
}
if t.UKeys, err = db.UniqueKeyInfo(schema, name); err != nil {
return nil, errors.Wrapf(err, "unable to fetch table ukey info (%s)", name)
}
if t.FKeys, err = db.ForeignKeyInfo(schema, name); err != nil {
return nil, errors.Wrapf(err, "unable to fetch table fkey info (%s)", name)
}
if t.AutoIncrementColumn, err = db.AutoincrementInfo(schema, name); err != nil {
return nil, errors.Wrapf(err, "unable to fetch table autoincrement info (%s)", name)
}
setIsJoinTable(&t)
tables = append(tables, t)

View file

@ -3,7 +3,7 @@ package bdb
import (
"testing"
"github.com/volatiletech/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/strmangle"
)
type testMockDriver struct{}
@ -124,14 +124,6 @@ func TestTables(t *testing.T) {
t.Errorf("Expected len 7, got: %d\n", len(tables))
}
prev := ""
for i := range tables {
if prev >= tables[i].Name {
t.Error("tables are not sorted")
}
prev = tables[i].Name
}
pilots := GetTable(tables, "pilots")
if len(pilots.Columns) != 2 {
t.Error()

View file

@ -8,6 +8,13 @@ type PrimaryKey struct {
Columns []string
}
// UniqueKey represents a unique key constraint in a database
type UniqueKey struct {
Table string
Name string
Columns []string
}
// ForeignKey represents a foreign key constraint in a database
type ForeignKey struct {
Table string

View file

@ -8,9 +8,12 @@ type Table struct {
// For dbs with real schemas, like Postgres.
// Example value: "schema_name"."table_name"
SchemaName string
Columns []Column
AutoIncrementColumn string
PKey *PrimaryKey
UKeys []UniqueKey
FKeys []ForeignKey
IsJoinTable bool

View file

@ -22,7 +22,6 @@ type Beginner interface {
Begin() (Transactor, error)
}
// SQLBeginner begins transactions (non-interface return type)
type SQLBeginner interface {
Begin() (*sql.Tx, error)
}

View file

@ -1,23 +0,0 @@
package boil
type boilErr struct {
error
}
// WrapErr wraps err in a boilErr
func WrapErr(err error) error {
return boilErr{
error: err,
}
}
// Error returns the underlying error string
func (e boilErr) Error() string {
return e.error.Error()
}
// IsBoilErr checks if err is a boilErr
func IsBoilErr(err error) bool {
_, ok := err.(boilErr)
return ok
}

View file

@ -1,24 +0,0 @@
package boil
import (
"errors"
"testing"
)
func TestErrors(t *testing.T) {
t.Parallel()
err := errors.New("test error")
if IsBoilErr(err) == true {
t.Errorf("Expected false")
}
err = WrapErr(errors.New("test error"))
if err.Error() != "test error" {
t.Errorf(`Expected "test error", got %v`, err.Error())
}
if IsBoilErr(err) != true {
t.Errorf("Expected true")
}
}

View file

@ -13,10 +13,10 @@ import (
"text/template"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/bdb/drivers"
"github.com/volatiletech/sqlboiler/queries"
"github.com/volatiletech/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb/drivers"
"github.com/lbryio/sqlboiler/queries"
"github.com/lbryio/sqlboiler/strmangle"
)
const (
@ -267,7 +267,7 @@ func (s *State) processReplacements() error {
return nil
}
var basePackage = "github.com/volatiletech/sqlboiler"
var basePackage = "github.com/lbryio/sqlboiler"
func getBasePath(baseDirConfig string) (string, error) {
if len(baseDirConfig) > 0 {

View file

@ -6,7 +6,7 @@ import (
"sort"
"strings"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb"
)
// imports defines the optional standard imports and
@ -170,26 +170,33 @@ func newImporter() importer {
`"time"`,
},
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/volatiletech/sqlboiler/boil"`,
`"github.com/volatiletech/sqlboiler/queries"`,
`"github.com/volatiletech/sqlboiler/queries/qm"`,
`"github.com/volatiletech/sqlboiler/strmangle"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/lbry.go/v2/extras/null"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/queries"`,
`"github.com/lbryio/sqlboiler/queries/qm"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
},
}
imp.Singleton = mapImports{
"boil_queries": {
"boil_queries": imports{
standard: importList{
`"fmt"`,
`"strings"`,
},
thirdParty: importList{
`"github.com/volatiletech/sqlboiler/boil"`,
`"github.com/volatiletech/sqlboiler/queries"`,
`"github.com/volatiletech/sqlboiler/queries/qm"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/queries"`,
`"github.com/lbryio/sqlboiler/queries/qm"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
},
},
"boil_types": {
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/volatiletech/sqlboiler/strmangle"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
},
},
}
@ -201,9 +208,9 @@ func newImporter() importer {
`"testing"`,
},
thirdParty: importList{
`"github.com/volatiletech/sqlboiler/boil"`,
`"github.com/volatiletech/sqlboiler/randomize"`,
`"github.com/volatiletech/sqlboiler/strmangle"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
},
}
@ -221,9 +228,9 @@ func newImporter() importer {
},
thirdParty: importList{
`"github.com/kat-co/vala"`,
`"github.com/pkg/errors"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/spf13/viper"`,
`"github.com/volatiletech/sqlboiler/boil"`,
},
},
"boil_queries_test": {
@ -236,7 +243,7 @@ func newImporter() importer {
`"regexp"`,
},
thirdParty: importList{
`"github.com/volatiletech/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/boil"`,
},
},
"boil_suites_test": {
@ -259,11 +266,11 @@ func newImporter() importer {
`"strings"`,
},
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/volatiletech/sqlboiler/bdb/drivers"`,
`"github.com/volatiletech/sqlboiler/randomize"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`_ "github.com/lib/pq"`,
`"github.com/spf13/viper"`,
},
},
"mysql": {
@ -278,11 +285,11 @@ func newImporter() importer {
`"strings"`,
},
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/volatiletech/sqlboiler/bdb/drivers"`,
`"github.com/volatiletech/sqlboiler/randomize"`,
`_ "github.com/go-sql-driver/mysql"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`"github.com/spf13/viper"`,
},
},
"mssql": {
@ -295,11 +302,11 @@ func newImporter() importer {
`"strings"`,
},
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/volatiletech/sqlboiler/bdb/drivers"`,
`"github.com/volatiletech/sqlboiler/randomize"`,
`_ "github.com/denisenkom/go-mssqldb"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`"github.com/spf13/viper"`,
},
},
}
@ -309,79 +316,79 @@ func newImporter() importer {
// TranslateColumnType to see the type assignments.
imp.BasedOnType = mapImports{
"null.Float32": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Float64": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int8": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int16": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int32": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int64": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint8": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint16": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint32": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint64": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.String": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Bool": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Time": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.JSON": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Bytes": {
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"time.Time": {
standard: importList{`"time"`},
},
"types.JSON": {
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.BytesArray": {
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.Int64Array": {
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.Float64Array": {
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.BoolArray": {
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.StringArray": {
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.Hstore": {
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
}

View file

@ -6,7 +6,7 @@ import (
"testing"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb"
)
func TestImportsSort(t *testing.T) {
@ -234,7 +234,7 @@ func TestCombineTypeImports(t *testing.T) {
`"fmt"`,
},
thirdParty: importList{
`"github.com/volatiletech/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/boil"`,
},
}
@ -245,8 +245,8 @@ func TestCombineTypeImports(t *testing.T) {
`"time"`,
},
thirdParty: importList{
`"github.com/volatiletech/sqlboiler/boil"`,
`"gopkg.in/volatiletech/null.v6"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/lbry.go/v2/extras/null"`,
},
}
@ -280,8 +280,8 @@ func TestCombineTypeImports(t *testing.T) {
`"time"`,
},
thirdParty: importList{
`"github.com/volatiletech/sqlboiler/boil"`,
`"gopkg.in/volatiletech/null.v6"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/lbry.go/v2/extras/null"`,
},
}
@ -297,11 +297,11 @@ func TestCombineImports(t *testing.T) {
a := imports{
standard: importList{"fmt"},
thirdParty: importList{"github.com/volatiletech/sqlboiler", "gopkg.in/volatiletech/null.v6"},
thirdParty: importList{"github.com/lbryio/sqlboiler", "github.com/lbryio/lbry.go/v2/extras/null"},
}
b := imports{
standard: importList{"os"},
thirdParty: importList{"github.com/volatiletech/sqlboiler"},
thirdParty: importList{"github.com/lbryio/sqlboiler"},
}
c := combineImports(a, b)
@ -309,8 +309,8 @@ func TestCombineImports(t *testing.T) {
if c.standard[0] != "fmt" && c.standard[1] != "os" {
t.Errorf("Wanted: fmt, os got: %#v", c.standard)
}
if c.thirdParty[0] != "github.com/volatiletech/sqlboiler" && c.thirdParty[1] != "gopkg.in/volatiletech/null.v6" {
t.Errorf("Wanted: github.com/volatiletech/sqlboiler, gopkg.in/volatiletech/null.v6 got: %#v", c.thirdParty)
if c.thirdParty[0] != "github.com/lbryio/sqlboiler" && c.thirdParty[1] != "github.com/lbryio/lbry.go/v2/extras/null" {
t.Errorf("Wanted: github.com/lbryio/sqlboiler, github.com/lbryio/lbry.go/v2/extras/null got: %#v", c.thirdParty)
}
}

View file

@ -14,7 +14,7 @@ import (
"github.com/pkg/errors"
)
var noEditDisclaimer = []byte(`// This file is generated by SQLBoiler (https://github.com/volatiletech/sqlboiler)
var noEditDisclaimer = []byte(`// This file is generated by SQLBoiler (https://github.com/lbryio/sqlboiler)
// and is meant to be re-generated in place and/or deleted at any time.
// DO NOT EDIT

View file

@ -8,10 +8,10 @@ import (
"strings"
"text/template"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/queries"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/queries"
"github.com/volatiletech/sqlboiler/strmangle"
)
// templateData for sqlboiler templates

View file

@ -4,8 +4,8 @@ import (
"fmt"
"strings"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/strmangle"
)
// TxtToOne contains text that will be used by templates for a one-to-many or

View file

@ -5,8 +5,8 @@ import (
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/bdb/drivers"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb/drivers"
)
func TestTxtsFromOne(t *testing.T) {

View file

@ -8,13 +8,13 @@ import (
"strings"
"github.com/kat-co/vala"
"github.com/lbryio/sqlboiler/bdb/drivers"
"github.com/lbryio/sqlboiler/boilingcore"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/volatiletech/sqlboiler/bdb/drivers"
"github.com/volatiletech/sqlboiler/boilingcore"
)
const sqlBoilerVersion = "2.5.1"
const sqlBoilerVersion = "2.4.0+lbry"
var (
cmdState *boilingcore.State
@ -62,7 +62,7 @@ func main() {
Use: "sqlboiler [flags] <driver>",
Short: "SQL Boiler generates an ORM tailored to your database schema.",
Long: "SQL Boiler generates a Go ORM from template files, tailored to your database schema.\n" +
`Complete documentation is available at http://github.com/volatiletech/sqlboiler`,
`Complete documentation is available at http://github.com/lbryio/sqlboiler`,
Example: `sqlboiler postgres`,
PreRunE: preRun,
RunE: run,

View file

@ -5,9 +5,9 @@ import (
"reflect"
"strings"
"github.com/lbryio/sqlboiler/boil"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/boil"
"github.com/volatiletech/sqlboiler/strmangle"
)
type loadRelationshipState struct {
@ -259,10 +259,14 @@ func collectLoaded(key string, loadingFrom reflect.Value) (reflect.Value, bindKi
for {
switch bkind {
case kindStruct:
if !loadedObject.IsNil() {
collection = reflect.Append(collection, loadedObject)
}
case kindPtrSliceStruct:
if !loadedObject.IsNil() {
collection = reflect.AppendSlice(collection, loadedObject)
}
}
i++
if i >= lnFrom {

View file

@ -4,7 +4,7 @@ import (
"fmt"
"testing"
"github.com/volatiletech/sqlboiler/boil"
"github.com/lbryio/sqlboiler/boil"
)
var testEagerCounters struct {

View file

@ -4,7 +4,7 @@ import (
"fmt"
"reflect"
"github.com/volatiletech/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/strmangle"
)
// NonZeroDefaultSet returns the fields included in the

View file

@ -5,7 +5,7 @@ import (
"testing"
"time"
null "gopkg.in/volatiletech/null.v6"
null "github.com/lbryio/lbry.go/v2/extras/null"
)
type testObj struct {

View file

@ -1,6 +1,6 @@
package qm
import "github.com/volatiletech/sqlboiler/queries"
import "github.com/lbryio/sqlboiler/queries"
// QueryMod to modify the query object
type QueryMod func(q *queries.Query)
@ -8,8 +8,10 @@ type QueryMod func(q *queries.Query)
// Apply the query mods to the Query object
func Apply(q *queries.Query, mods ...QueryMod) {
for _, mod := range mods {
if mod != nil {
mod(q)
}
}
}
// SQL allows you to execute a plain SQL statement
@ -123,6 +125,12 @@ func From(from string) QueryMod {
}
}
func ForceIndex(index string) QueryMod {
return func(q *queries.Query) {
queries.SetForceIndex(q, index)
}
}
// Limit the number of returned rows
func Limit(limit int) QueryMod {
return func(q *queries.Query) {

View file

@ -4,7 +4,8 @@ import (
"database/sql"
"fmt"
"github.com/volatiletech/sqlboiler/boil"
"github.com/lbryio/lbry.go/v2/extras/errors"
"github.com/lbryio/sqlboiler/boil"
)
// joinKind is the type of join
@ -29,6 +30,7 @@ type Query struct {
selectCols []string
count bool
from []string
forceindex string
joins []join
where []where
in []in
@ -136,7 +138,7 @@ func (q *Query) Query() (*sql.Rows, error) {
func (q *Query) ExecP() sql.Result {
res, err := q.Exec()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return res
@ -147,7 +149,7 @@ func (q *Query) ExecP() sql.Result {
func (q *Query) QueryP() *sql.Rows {
rows, err := q.Query()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return rows
@ -188,11 +190,6 @@ func SetSelect(q *Query, sel []string) {
q.selectCols = sel
}
// GetSelect from the query
func GetSelect(q *Query) []string {
return q.selectCols
}
// SetCount on the query.
func SetCount(q *Query) {
q.count = true
@ -267,6 +264,11 @@ func SetLastWhereAsOr(q *Query) {
q.where[len(q.where)-1].orSeparator = true
}
// SetForceIndex sets the index to be used by the query
func SetForceIndex(q *Query, index string){
q.forceindex = index
}
// SetLastInAsOr sets the or separator for the tail "IN" in the slice
func SetLastInAsOr(q *Query) {
if len(q.in) == 0 {

View file

@ -7,7 +7,7 @@ import (
"sort"
"strings"
"github.com/volatiletech/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/strmangle"
)
var (
@ -76,8 +76,14 @@ func buildSelectQuery(q *Query) (*bytes.Buffer, []interface{}) {
buf.WriteByte(')')
}
if len(q.forceindex) > 0 {
fmt.Fprintf(buf, " FROM %s FORCE INDEX (%s)", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "),q.forceindex)
}else{
fmt.Fprintf(buf, " FROM %s", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "))
}
if len(q.joins) > 0 {
argsLen := len(args)
joinBuf := strmangle.GetBuffer()
@ -190,7 +196,7 @@ func buildUpdateQuery(q *Query) (*bytes.Buffer, []interface{}) {
}
// BuildUpsertQueryMySQL builds a SQL statement string using the upsertData provided.
func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string) string {
func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string, autoIncrementCol string) string {
whitelist = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, whitelist)
buf := strmangle.GetBuffer()
@ -220,6 +226,11 @@ func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []st
strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
)
// https://stackoverflow.com/questions/778534/mysql-on-duplicate-key-last-insert-id
if autoIncrementCol != "" {
buf.WriteString(autoIncrementCol + " = LAST_INSERT_ID(" + autoIncrementCol + "), ")
}
for i, v := range update {
if i != 0 {
buf.WriteByte(',')

View file

@ -7,9 +7,9 @@ import (
"strings"
"sync"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/boil"
"github.com/volatiletech/sqlboiler/strmangle"
)
var (
@ -41,7 +41,7 @@ const (
// It panics on error. See boil.Bind() documentation.
func (q *Query) BindP(obj interface{}) {
if err := q.Bind(obj); err != nil {
panic(boil.WrapErr(err))
panic(errors.WithStack(err))
}
}

View file

@ -14,12 +14,12 @@ import (
"sync/atomic"
"time"
null "gopkg.in/volatiletech/null.v6"
null "github.com/lbryio/lbry.go/v2/extras/null"
"github.com/pkg/errors"
"github.com/satori/go.uuid"
"github.com/volatiletech/sqlboiler/strmangle"
"github.com/volatiletech/sqlboiler/types"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/types"
)
var (
@ -159,17 +159,18 @@ func randDate(s *Seed) time.Time {
// If canBeNull is true:
// The value has the possibility of being null or non-zero at random.
func randomizeField(s *Seed, field reflect.Value, fieldType string, canBeNull bool) error {
kind := field.Kind()
typ := field.Type()
if strings.HasPrefix(fieldType, "enum") {
enum, err := randEnumValue(s, fieldType)
enum, err := randEnumValue(fieldType)
if err != nil {
return err
}
if kind == reflect.Struct {
val := null.NewString(enum, s.nextInt()%2 == 0)
val := null.NewString(enum, rand.Intn(1) == 0)
field.Set(reflect.ValueOf(val))
} else {
field.Set(reflect.ValueOf(enum))
@ -622,11 +623,11 @@ func getVariableRandValue(s *Seed, kind reflect.Kind, typ reflect.Type) interfac
return nil
}
func randEnumValue(s *Seed, enum string) (string, error) {
func randEnumValue(enum string) (string, error) {
vals := strmangle.ParseEnumVals(enum)
if vals == nil || len(vals) == 0 {
return "", fmt.Errorf("unable to parse enum string: %s", enum)
}
return vals[s.nextInt()%len(vals)], nil
return vals[rand.Intn(len(vals)-1)], nil
}

View file

@ -5,7 +5,7 @@ import (
"testing"
"time"
null "gopkg.in/volatiletech/null.v6"
null "github.com/lbryio/lbry.go/v2/extras/null"
)
func TestRandomizeStruct(t *testing.T) {
@ -148,36 +148,24 @@ func TestRandomizeField(t *testing.T) {
func TestRandEnumValue(t *testing.T) {
t.Parallel()
s := NewSeed()
enum1 := "enum.workday('monday','tuesday')"
enum2 := "enum('monday','tuesday')"
enum3 := "enum('monday')"
r1, err := randEnumValue(s, enum1)
r1, err := randEnumValue(enum1)
if err != nil {
t.Error(err)
}
if r1 != "monday" && r1 != "tuesday" {
t.Errorf("Expected monday or tuesday, got: %q", r1)
t.Errorf("Expected monday or tueday, got: %q", r1)
}
r2, err := randEnumValue(s, enum2)
r2, err := randEnumValue(enum2)
if err != nil {
t.Error(err)
}
if r2 != "monday" && r2 != "tuesday" {
t.Errorf("Expected monday or tuesday, got: %q", r2)
}
r3, err := randEnumValue(s, enum3)
if err != nil {
t.Error(err)
}
if r3 != "monday" {
t.Errorf("Expected monday got: %q", r3)
t.Errorf("Expected monday or tueday, got: %q", r2)
}
}

View file

@ -1,6 +1,6 @@
package strmangle
import "github.com/volatiletech/inflect"
import "github.com/nullbio/inflect"
var boilRuleset *inflect.Ruleset
@ -193,7 +193,6 @@ func newBoilRuleset() *inflect.Ruleset {
rs.AddSingularExact("is", "is", true)
rs.AddSingularExact("us", "us", true)
rs.AddSingularExact("as", "as", true)
rs.AddSingularExact("areas", "area", true)
rs.AddPluralExact("a", "a", true)
rs.AddPluralExact("i", "i", true)
rs.AddPluralExact("is", "is", true)

View file

@ -93,7 +93,7 @@ func SchemaTable(lq, rq string, driver string, schema string, table string) stri
return fmt.Sprintf(`%s%s%s`, lq, table, rq)
}
// IdentQuote attempts to quote simple identifiers in SQL statements
// IdentQuote attempts to quote simple identifiers in SQL tatements
func IdentQuote(lq byte, rq byte, s string) string {
if strings.ToLower(s) == "null" || s == "?" {
return s

View file

@ -139,7 +139,6 @@ func TestSingular(t *testing.T) {
{"hello_people", "hello_person"},
{"hello_person", "hello_person"},
{"friends", "friend"},
{"areas", "area"},
{"hello_there_people", "hello_there_person"},
}
@ -160,7 +159,6 @@ func TestPlural(t *testing.T) {
{"hello_person", "hello_people"},
{"friend", "friends"},
{"friends", "friends"},
{"area", "areas"},
{"hello_there_person", "hello_there_people"},
}

View file

@ -27,6 +27,13 @@ var {{$modelName}}Columns = struct {
{{end -}}
}
// {{$modelName}}Filter allows you to filter on any columns by making them all pointers.
type {{$modelName}}Filter struct {
{{range $column := .Table.Columns -}}
{{titleCase $column.Name}} *{{$column.Type}} `{{generateTags $dot.Tags $column.Name}}boil:"{{$column.Name}}" json:"{{$column.Name}},omitempty" toml:"{{$column.Name}}" yaml:"{{$column.Name}},omitempty"`
{{end -}}
}
{{- if .Table.IsJoinTable -}}
{{- else}}
// {{$modelNameCamel}}R is where relationships are stored.

View file

@ -10,6 +10,7 @@ var (
{{$varNameSingular}}ColumnsWithoutDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault false | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
{{$varNameSingular}}ColumnsWithDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault true | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
{{$varNameSingular}}PrimaryKeyColumns = []string{{"{"}}{{.Table.PKey.Columns | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
{{$varNameSingular}}AutoIncrementColumn = "{{.Table.AutoIncrementColumn }}"
)
type (
@ -21,7 +22,7 @@ type (
{{$tableNameSingular}}Hook func(boil.Executor, *{{$tableNameSingular}}) error
{{- end}}
{{$varNameSingular}}Query struct {
{{$tableNameSingular}}Query struct {
*queries.Query
}
)

View file

@ -16,7 +16,7 @@ var {{$varNameSingular}}AfterUpsertHooks []{{$tableNameSingular}}Hook
func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeInsertHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -27,7 +27,7 @@ func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeUpdateHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeDeleteHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -49,7 +49,7 @@ func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeUpsertHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -60,7 +60,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterInsertHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -71,7 +71,7 @@ func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterSelectHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -82,7 +82,7 @@ func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterUpdateHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -93,7 +93,7 @@ func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterDeleteHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -104,7 +104,7 @@ func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterUpsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterUpsertHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}

View file

@ -1,27 +1,27 @@
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
// OneP returns a single {{$varNameSingular}} record from the query, and panics on error.
func (q {{$varNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
// OneP returns a single {{$tableNameSingular}} record from the query, and panics on error.
func (q {{$tableNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
o, err := q.One()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return o
}
// One returns a single {{$varNameSingular}} record from the query.
func (q {{$varNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
// One returns a single {{$tableNameSingular}} record from the query.
func (q {{$tableNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
o := &{{$tableNameSingular}}{}
queries.SetLimit(q.Query, 1)
err := q.Bind(o)
if err != nil {
if errors.Cause(err) == sql.ErrNoRows {
return nil, sql.ErrNoRows
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, errors.Wrap(err, "{{.PkgName}}: failed to execute a one query for {{.Table.Name}}")
return nil, errors.Prefix("{{.PkgName}}: failed to execute a one query for {{.Table.Name}}", err)
}
{{if not .NoHooks -}}
@ -34,22 +34,22 @@ func (q {{$varNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
}
// AllP returns all {{$tableNameSingular}} records from the query, and panics on error.
func (q {{$varNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
func (q {{$tableNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
o, err := q.All()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return o
}
// All returns all {{$tableNameSingular}} records from the query.
func (q {{$varNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
func (q {{$tableNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
var o []*{{$tableNameSingular}}
err := q.Bind(&o)
if err != nil {
return nil, errors.Wrap(err, "{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice")
return nil, errors.Prefix("{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice", err)
}
{{if not .NoHooks -}}
@ -66,17 +66,17 @@ func (q {{$varNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
}
// CountP returns the count of all {{$tableNameSingular}} records in the query, and panics on error.
func (q {{$varNameSingular}}Query) CountP() int64 {
func (q {{$tableNameSingular}}Query) CountP() int64 {
c, err := q.Count()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return c
}
// Count returns the count of all {{$tableNameSingular}} records in the query.
func (q {{$varNameSingular}}Query) Count() (int64, error) {
func (q {{$tableNameSingular}}Query) Count() (int64, error) {
var count int64
queries.SetSelect(q.Query, nil)
@ -84,32 +84,33 @@ func (q {{$varNameSingular}}Query) Count() (int64, error) {
err := q.Query.QueryRow().Scan(&count)
if err != nil {
return 0, errors.Wrap(err, "{{.PkgName}}: failed to count {{.Table.Name}} rows")
return 0, errors.Prefix("{{.PkgName}}: failed to count {{.Table.Name}} rows", err)
}
return count, nil
}
// Exists checks if the row exists in the table, and panics on error.
func (q {{$varNameSingular}}Query) ExistsP() bool {
func (q {{$tableNameSingular}}Query) ExistsP() bool {
e, err := q.Exists()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return e
}
// Exists checks if the row exists in the table.
func (q {{$varNameSingular}}Query) Exists() (bool, error) {
func (q {{$tableNameSingular}}Query) Exists() (bool, error) {
var count int64
queries.SetCount(q.Query)
queries.SetSelect(q.Query, []string{})
queries.SetLimit(q.Query, 1)
err := q.Query.QueryRow().Scan(&count)
if err != nil {
return false, errors.Wrap(err, "{{.PkgName}}: failed to check if {{.Table.Name}} exists")
return false, errors.Prefix("{{.PkgName}}: failed to check if {{.Table.Name}} exists", err)
}
return count > 0, nil

View file

@ -3,14 +3,14 @@
{{- $dot := . -}}
{{- range .Table.FKeys -}}
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
{{- $varNameSingular := .ForeignTable | singular | camelCase}}
{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
// {{$txt.Function.Name}}G pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
}
// {{$txt.Function.Name}} pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
queryMods := []qm.QueryMod{
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
}

View file

@ -3,14 +3,14 @@
{{- $dot := . -}}
{{- range .Table.ToOneRelationships -}}
{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
{{- $varNameSingular := .ForeignTable | singular | camelCase}}
{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
// {{$txt.Function.Name}}G pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
}
// {{$txt.Function.Name}} pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
queryMods := []qm.QueryMod{
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
}

View file

@ -3,19 +3,22 @@
{{- $dot := . -}}
{{- $table := .Table -}}
{{- range .Table.ToManyRelationships -}}
{{- $varNameSingular := .ForeignTable | singular | camelCase -}}
{{- $tableNameSingular := .ForeignTable | singular | titleCase -}}
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
// {{$txt.Function.Name}}G retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}}
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
}
// {{$txt.Function.Name}} retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}} with an executor
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
var queryMods []qm.QueryMod
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
queryMods := []qm.QueryMod{
qm.Select("{{$schemaForeignTable}}.*"),
}
if len(mods) != 0 {
queryMods = append(queryMods, mods...)
}
@ -34,11 +37,6 @@ func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor,
query := {{$txt.ForeignTable.NamePluralGo}}(exec, queryMods...)
queries.SetFrom(query.Query, "{{$schemaForeignTable}}")
if len(queries.GetSelect(query.Query)) == 0 {
queries.SetSelect(query.Query, []string{"{{$schemaForeignTable}}.*"})
}
return query
}

View file

@ -4,7 +4,7 @@
{{- range .Table.FKeys -}}
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo}}
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
// loaded structs of the objects.
func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
@ -45,20 +45,20 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
results, err := e.Query(query, args...)
if err != nil {
return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
}
defer results.Close()
var resultSlice []*{{$txt.ForeignTable.NameGo}}
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
}
{{if not $dot.NoHooks -}}
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(e); err != nil {
return err
return errors.Err(err)
}
}
}

View file

@ -45,20 +45,20 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
results, err := e.Query(query, args...)
if err != nil {
return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
}
defer results.Close()
var resultSlice []*{{$txt.ForeignTable.NameGo}}
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
}
{{if not $dot.NoHooks -}}
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(e); err != nil {
return err
return errors.Err(err)
}
}
}

View file

@ -54,7 +54,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
results, err := e.Query(query, args...)
if err != nil {
return errors.Wrap(err, "failed to eager load {{.ForeignTable}}")
return errors.Prefix("failed to eager load {{.ForeignTable}}", err)
}
defer results.Close()
@ -70,7 +70,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
err = results.Scan({{$foreignTable.Columns | columnNames | stringMap $dot.StringFuncs.titleCase | prefixStringSlice "&one." | join ", "}}, &localJoinCol)
if err = results.Err(); err != nil {
return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
}
resultSlice = append(resultSlice, one)
@ -78,11 +78,11 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
}
{{else -}}
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice {{.ForeignTable}}")
return errors.Prefix("failed to bind eager loaded slice {{.ForeignTable}}", err)
}
{{end}}
@ -90,7 +90,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
if len({{.ForeignTable | singular | camelCase}}AfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(e); err != nil {
return err
return errors.Err(err)
}
}
}

View file

@ -20,7 +20,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -30,7 +30,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -41,7 +41,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
var err error
if insert {
if err = related.Insert(exec); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
return errors.Prefix("failed to insert into foreign table", err)
}
}
@ -58,7 +58,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
}
if _, err = exec.Exec(updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update local table")
return errors.Prefix("failed to update local table", err)
}
o.{{$txt.Function.LocalAssignment}} = related.{{$txt.Function.ForeignAssignment}}
@ -110,7 +110,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$t
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -120,7 +120,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Exe
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -133,7 +133,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = false
if err = o.Update(exec, "{{.Column}}"); err != nil {
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = true
return errors.Wrap(err, "failed to update local table")
return errors.Prefix("failed to update local table", err)
}
o.R.{{$txt.Function.Name}} = nil

View file

@ -21,7 +21,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -31,7 +31,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -48,7 +48,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
{{- end}}
if err = related.Insert(exec); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
return errors.Prefix("failed to insert into foreign table", err)
}
} else {
updateQuery := fmt.Sprintf(
@ -64,7 +64,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
}
if _, err = exec.Exec(updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update foreign table")
return errors.Prefix("failed to update foreign table", err)
}
related.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
@ -107,7 +107,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$t
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -117,7 +117,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Exe
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -130,7 +130,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = false
if err = related.Update(exec, "{{.ForeignColumn}}"); err != nil {
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
return errors.Wrap(err, "failed to update local table")
return errors.Prefix("failed to update local table", err)
}
o.R.{{$txt.Function.Name}} = nil

View file

@ -24,7 +24,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Add{{$txt.Function.Name}}(exec, insert, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -35,7 +35,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Add{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -55,7 +55,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
{{end -}}
if err = rel.Insert(exec); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
return errors.Prefix("failed to insert into foreign table", err)
}
}{{if not .ToJoinTable}} else {
updateQuery := fmt.Sprintf(
@ -71,7 +71,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
}
if _, err = exec.Exec(updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update foreign table")
return errors.Prefix("failed to update foreign table", err)
}
rel.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
@ -93,7 +93,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
_, err = exec.Exec(query, values...)
if err != nil {
return errors.Wrap(err, "failed to insert into join table")
return errors.Prefix("failed to insert into join table", err)
}
}
{{end -}}
@ -152,7 +152,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(exec, insert, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -165,7 +165,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -190,7 +190,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
_, err := exec.Exec(query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
return errors.Prefix("failed to remove relationships before set", err)
}
{{if .ToJoinTable -}}
@ -230,7 +230,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related ...*{
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(exec, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -240,7 +240,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Exe
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -266,7 +266,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
_, err = exec.Exec(query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
return errors.Prefix("failed to remove relationships before set", err)
}
{{else -}}
for _, rel := range related {
@ -277,7 +277,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
}
{{end -}}
if err = rel.Update(exec, "{{.ForeignColumn}}"); err != nil {
return err
return errors.Err(err)
}
}
{{end -}}

View file

@ -1,12 +1,12 @@
{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
{{- $varNameSingular := .Table.Name | singular | camelCase}}
{{- $tableNameSingular := .Table.Name | singular | titleCase}}
// {{$tableNamePlural}}G retrieves all records.
func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
return {{$tableNamePlural}}(boil.GetDB(), mods...)
}
// {{$tableNamePlural}} retrieves all the records using an executor.
func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
mods = append(mods, qm.From("{{.Table.Name | .SchemaTable}}"))
return {{$varNameSingular}}Query{NewQuery(exec, mods...)}
return {{$tableNameSingular}}Query{NewQuery(exec, mods...)}
}

View file

@ -12,7 +12,7 @@ func Find{{$tableNameSingular}}G({{$pkArgs}}, selectCols ...string) (*{{$tableNa
func Find{{$tableNameSingular}}GP({{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
retobj, err := Find{{$tableNameSingular}}(boil.GetDB(), {{$pkNames | join ", "}}, selectCols...)
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return retobj
@ -35,10 +35,10 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
err := q.Bind({{$varNameSingular}}Obj)
if err != nil {
if errors.Cause(err) == sql.ErrNoRows {
return nil, sql.ErrNoRows
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, errors.Wrap(err, "{{.PkgName}}: unable to select from {{.Table.Name}}")
return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
}
return {{$varNameSingular}}Obj, nil
@ -48,8 +48,79 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
func Find{{$tableNameSingular}}P(exec boil.Executor, {{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
retobj, err := Find{{$tableNameSingular}}(exec, {{$pkNames | join ", "}}, selectCols...)
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return retobj
}
// FindOne{{$tableNameSingular}} retrieves a single record using filters.
func FindOne{{$tableNameSingular}}(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
obj := &{{$tableNameSingular}}{}
err := {{$tableNameSingular}}NewQuery(exec).
Where(filters).
Limit(1).
Bind(obj)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
}
return obj, nil
}
// FindOne{{$tableNameSingular}}G retrieves a single record using filters.
func FindOne{{$tableNameSingular}}G(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
return FindOne{{$tableNameSingular}}(boil.GetDB(), filters)
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
func FindOne{{$tableNameSingular}}OrInit(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}(exec, filters)
if err != nil {
return nil, err
}
if {{$varNameSingular}}Obj == nil {
{{$varNameSingular}}Obj = &{{$tableNameSingular}}{}
objR := reflect.ValueOf({{$varNameSingular}}Obj).Elem()
r := reflect.ValueOf(filters)
for i := 0; i < r.NumField(); i++ {
f := r.Field(i)
if f.Elem().IsValid() {
objR.FieldByName(r.Type().Field(i).Name).Set(f.Elem())
}
}
}
return {{$varNameSingular}}Obj, nil
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
func FindOne{{$tableNameSingular}}OrInitG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
return FindOne{{$tableNameSingular}}OrInit(boil.GetDB(), filters)
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
func FindOne{{$tableNameSingular}}OrCreate(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}OrInit(exec, filters)
if err != nil {
return nil, err
}
if {{$varNameSingular}}Obj.IsNew() {
err := {{$varNameSingular}}Obj.Insert(exec)
if err != nil {
return nil, err
}
}
return {{$varNameSingular}}Obj, nil
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
func FindOne{{$tableNameSingular}}OrCreateG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
return FindOne{{$tableNameSingular}}OrCreate(boil.GetDB(), filters)
}

View file

@ -10,7 +10,7 @@ func (o *{{$tableNameSingular}}) InsertG(whitelist ... string) error {
// behavior description.
func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
if err := o.Insert(boil.GetDB(), whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -18,7 +18,7 @@ func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
// for whitelist behavior description.
func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... string) {
if err := o.Insert(exec, whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -29,7 +29,7 @@ func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... strin
// - All columns with a default, but non-zero are included (i.e. health = 75)
func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string) error {
if o == nil {
return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
}
var err error
@ -37,7 +37,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
{{if not .NoHooks -}}
if err := o.doBeforeInsertHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{- end}}
@ -59,11 +59,11 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, wl)
if err != nil {
return err
return errors.Err(err)
}
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, returnColumns)
if err != nil {
return err
return errors.Err(err)
}
if len(wl) != 0 {
cache.query = fmt.Sprintf("INSERT INTO {{$schemaTable}} ({{.LQ}}%s{{.RQ}}) %%sVALUES (%s)%%s", strings.Join(wl, "{{.RQ}},{{.LQ}}"), strmangle.Placeholders(dialect.IndexPlaceholders, len(wl), 1, 1))
@ -110,7 +110,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
_, err = exec.Exec(cache.query, vals...)
{{- end}}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
}
{{if $canLastInsertID -}}
@ -125,7 +125,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
{{if $canLastInsertID -}}
lastID, err = result.LastInsertId()
if err != nil {
return ErrSyncFail
return errors.Err(ErrSyncFail)
}
{{$colName := index .Table.PKey.Columns 0 -}}
@ -150,7 +150,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
}
{{else}}
if len(cache.retMapping) != 0 {
@ -160,7 +160,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
}
{{end}}

View file

@ -12,7 +12,7 @@ func (o *{{$tableNameSingular}}) UpdateG(whitelist ...string) error {
// Panics on error. See Update for whitelist behavior description.
func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
if err := o.Update(boil.GetDB(), whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -21,7 +21,7 @@ func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
func (o *{{$tableNameSingular}}) UpdateP(exec boil.Executor, whitelist ... string) {
err := o.Update(exec, whitelist...)
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
var err error
{{if not .NoHooks -}}
if err = o.doBeforeUpdateHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{end -}}
@ -56,13 +56,11 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
{{if eq .DriverName "mssql"}}
wl = strmangle.SetComplement(wl, {{$varNameSingular}}ColumnsWithAuto)
{{end}}
{{if not .NoAutoTimestamps}}
if len(whitelist) == 0 {
wl = strmangle.SetComplement(wl, []string{"created_at"})
wl = strmangle.SetComplement(wl, []string{"created_at","updated_at"})
}
{{end -}}
if len(wl) == 0 {
return errors.New("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
return errors.Err("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
}
cache.query = fmt.Sprintf("UPDATE {{$schemaTable}} SET %s WHERE %s",
@ -71,7 +69,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
)
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, append(wl, {{$varNameSingular}}PrimaryKeyColumns...))
if err != nil {
return err
return errors.Err(err)
}
}
@ -84,7 +82,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
_, err = exec.Exec(cache.query, values...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to update {{.Table.Name}} row")
return errors.Prefix("{{.PkgName}}: unable to update {{.Table.Name}} row", err)
}
if !cached {
@ -101,19 +99,19 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
}
// UpdateAllP updates all rows with matching column names, and panics on error.
func (q {{$varNameSingular}}Query) UpdateAllP(cols M) {
func (q {{$tableNameSingular}}Query) UpdateAllP(cols M) {
if err := q.UpdateAll(cols); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// UpdateAll updates all rows with the specified column values.
func (q {{$varNameSingular}}Query) UpdateAll(cols M) error {
func (q {{$tableNameSingular}}Query) UpdateAll(cols M) error {
queries.SetUpdate(q.Query, cols)
_, err := q.Query.Exec()
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to update all for {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to update all for {{.Table.Name}}", err)
}
return nil
@ -127,14 +125,14 @@ func (o {{$tableNameSingular}}Slice) UpdateAllG(cols M) error {
// UpdateAllGP updates all rows with the specified column values, and panics on error.
func (o {{$tableNameSingular}}Slice) UpdateAllGP(cols M) {
if err := o.UpdateAll(boil.GetDB(), cols); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// UpdateAllP updates all rows with the specified column values, and panics on error.
func (o {{$tableNameSingular}}Slice) UpdateAllP(exec boil.Executor, cols M) {
if err := o.UpdateAll(exec, cols); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -146,7 +144,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
}
if len(cols) == 0 {
return errors.New("{{.PkgName}}: update all requires at least one column argument")
return errors.Err("{{.PkgName}}: update all requires at least one column argument")
}
colNames := make([]string, len(cols))
@ -176,7 +174,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
_, err := exec.Exec(sql, args...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to update all in {{$varNameSingular}} slice")
return errors.Prefix("{{.PkgName}}: unable to update all in {{$varNameSingular}} slice", err)
}
return nil

View file

@ -9,7 +9,7 @@ func (o *{{$tableNameSingular}}) UpsertG({{if eq .DriverName "postgres"}}updateO
// UpsertGP attempts an insert, and does an update or ignore on conflict. Panics on error.
func (o *{{$tableNameSingular}}) UpsertGP({{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
if err := o.Upsert(boil.GetDB(), {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -17,21 +17,21 @@ func (o *{{$tableNameSingular}}) UpsertGP({{if eq .DriverName "postgres"}}update
// UpsertP panics on error.
func (o *{{$tableNameSingular}}) UpsertP(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
if err := o.Upsert(exec, {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// Upsert attempts an insert using an executor, and does an update or ignore on conflict.
func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) error {
if o == nil {
return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
}
{{- template "timestamp_upsert_helper" . }}
{{if not .NoHooks -}}
if err := o.doBeforeUpsertHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{- end}}
@ -87,7 +87,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
}
}
if len(insert) == 0 {
return errors.New("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build insert column list")
return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build insert column list")
}
ret = strmangle.SetMerge(ret, {{$varNameSingular}}ColumnsWithAuto)
@ -104,7 +104,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
{{end -}}
if len(update) == 0 {
return errors.New("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
}
{{if eq .DriverName "postgres"}}
@ -115,7 +115,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
}
cache.query = queries.BuildUpsertQueryPostgres(dialect, "{{$schemaTable}}", updateOnConflict, ret, update, conflict, insert)
{{else if eq .DriverName "mysql"}}
cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, insert)
cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, insert, {{$varNameSingular}}AutoIncrementColumn)
cache.retQuery = fmt.Sprintf(
"SELECT %s FROM {{.LQ}}{{.Table.Name}}{{.RQ}} WHERE {{whereClause .LQ .RQ 0 .Table.PKey.Columns}}",
strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), ","),
@ -129,12 +129,12 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, {{if eq .DriverName "mssql"}}whitelist{{else}}insert{{end}})
if err != nil {
return err
return errors.Err(err)
}
if len(ret) != 0 {
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, ret)
if err != nil {
return err
return errors.Err(err)
}
}
}
@ -159,7 +159,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
_, err = exec.Exec(cache.query, vals...)
{{- end}}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to upsert for {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to upsert for {{.Table.Name}}", err)
}
{{if $canLastInsertID -}}
@ -174,7 +174,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
{{if $canLastInsertID -}}
lastID, err = result.LastInsertId()
if err != nil {
return ErrSyncFail
return errors.Err(ErrSyncFail)
}
{{$colName := index .Table.PKey.Columns 0 -}}
@ -199,7 +199,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
}
{{- else}}
if len(cache.retMapping) != 0 {
@ -211,7 +211,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
_, err = exec.Exec(cache.query, vals...)
}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to upsert {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to upsert {{.Table.Name}}", err)
}
{{- end}}

View file

@ -6,7 +6,7 @@
// Panics on error.
func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
if err := o.Delete(exec); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -14,7 +14,7 @@ func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
// DeleteG will match against the primary key column to find the record to delete.
func (o *{{$tableNameSingular}}) DeleteG() error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
}
return o.Delete(boil.GetDB())
@ -25,7 +25,7 @@ func (o *{{$tableNameSingular}}) DeleteG() error {
// Panics on error.
func (o *{{$tableNameSingular}}) DeleteGP() {
if err := o.DeleteG(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -33,12 +33,12 @@ func (o *{{$tableNameSingular}}) DeleteGP() {
// Delete will match against the primary key column to find the record to delete.
func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
}
{{if not .NoHooks -}}
if err := o.doBeforeDeleteHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{- end}}
@ -52,12 +52,12 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
_, err := exec.Exec(sql, args...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to delete from {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to delete from {{.Table.Name}}", err)
}
{{if not .NoHooks -}}
if err := o.doAfterDeleteHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{- end}}
@ -65,23 +65,23 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
}
// DeleteAllP deletes all rows, and panics on error.
func (q {{$varNameSingular}}Query) DeleteAllP() {
func (q {{$tableNameSingular}}Query) DeleteAllP() {
if err := q.DeleteAll(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// DeleteAll deletes all matching rows.
func (q {{$varNameSingular}}Query) DeleteAll() error {
func (q {{$tableNameSingular}}Query) DeleteAll() error {
if q.Query == nil {
return errors.New("{{.PkgName}}: no {{$varNameSingular}}Query provided for delete all")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}}Query provided for delete all")
}
queries.SetDelete(q.Query)
_, err := q.Query.Exec()
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to delete all from {{.Table.Name}}", err)
}
return nil
@ -90,14 +90,14 @@ func (q {{$varNameSingular}}Query) DeleteAll() error {
// DeleteAllGP deletes all rows in the slice, and panics on error.
func (o {{$tableNameSingular}}Slice) DeleteAllGP() {
if err := o.DeleteAllG(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// DeleteAllG deletes all rows in the slice.
func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
}
return o.DeleteAll(boil.GetDB())
}
@ -105,14 +105,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
// DeleteAllP deletes all rows in the slice, using an executor, and panics on error.
func (o {{$tableNameSingular}}Slice) DeleteAllP(exec boil.Executor) {
if err := o.DeleteAll(exec); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// DeleteAll deletes all rows in the slice, using an executor.
func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
}
if len(o) == 0 {
@ -123,7 +123,7 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
if len({{$varNameSingular}}BeforeDeleteHooks) != 0 {
for _, obj := range o {
if err := obj.doBeforeDeleteHooks(exec); err != nil {
return err
return errors.Err(err)
}
}
}
@ -145,14 +145,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
_, err := exec.Exec(sql, args...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice")
return errors.Prefix("{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice", err)
}
{{if not .NoHooks -}}
if len({{$varNameSingular}}AfterDeleteHooks) != 0 {
for _, obj := range o {
if err := obj.doAfterDeleteHooks(exec); err != nil {
return err
return errors.Err(err)
}
}
}

View file

@ -5,21 +5,21 @@
// ReloadGP refetches the object from the database and panics on error.
func (o *{{$tableNameSingular}}) ReloadGP() {
if err := o.ReloadG(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// ReloadP refetches the object from the database with an executor. Panics on error.
func (o *{{$tableNameSingular}}) ReloadP(exec boil.Executor) {
if err := o.Reload(exec); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// ReloadG refetches the object from the database using the primary keys.
func (o *{{$tableNameSingular}}) ReloadG() error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
}
return o.Reload(boil.GetDB())
@ -30,7 +30,7 @@ func (o *{{$tableNameSingular}}) ReloadG() error {
func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
ret, err := Find{{$tableNameSingular}}(exec, {{.Table.PKey.Columns | stringMap .StringFuncs.titleCase | prefixStringSlice "o." | join ", "}})
if err != nil {
return err
return errors.Err(err)
}
*o = *ret
@ -42,7 +42,7 @@ func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
// Panics on error.
func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
if err := o.ReloadAllG(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -51,7 +51,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
// Panics on error.
func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
if err := o.ReloadAll(exec); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -59,7 +59,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
// and overwrites the original object slice with the newly updated slice.
func (o *{{$tableNameSingular}}Slice) ReloadAllG() error {
if o == nil {
return errors.New("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
return errors.Err("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
}
return o.ReloadAll(boil.GetDB())
@ -86,7 +86,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAll(exec boil.Executor) error {
err := q.Bind(&{{$varNamePlural}})
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice")
return errors.Prefix("{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice", err)
}
*o = {{$varNamePlural}}

View file

@ -1,4 +1,5 @@
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
{{- $colDefs := sqlColDefinitions .Table.Columns .Table.PKey.Columns -}}
{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase | stringMap .StringFuncs.replaceReserved -}}
{{- $pkArgs := joinSlices " " $pkNames $colDefs.Types | join ", " -}}
@ -21,7 +22,7 @@ func {{$tableNameSingular}}Exists(exec boil.Executor, {{$pkArgs}}) (bool, error)
err := row.Scan(&exists)
if err != nil {
return false, errors.Wrap(err, "{{.PkgName}}: unable to check if {{.Table.Name}} exists")
return false, errors.Prefix("{{.PkgName}}: unable to check if {{.Table.Name}} exists", err)
}
return exists, nil
@ -36,7 +37,7 @@ func {{$tableNameSingular}}ExistsG({{$pkArgs}}) (bool, error) {
func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
e, err := {{$tableNameSingular}}Exists(boil.GetDB(), {{$pkNames | join ", "}})
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return e
@ -46,8 +47,43 @@ func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
func {{$tableNameSingular}}ExistsP(exec boil.Executor, {{$pkArgs}}) bool {
e, err := {{$tableNameSingular}}Exists(exec, {{$pkNames | join ", "}})
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return e
}
// IsNew() checks if record exists in db (aka if its primary key is set).
func (o *{{$tableNameSingular}}) IsNew() bool {
r := reflect.ValueOf(o).Elem()
for i := 0; i < r.NumField(); i++ {
column := r.Type().Field(i).Tag.Get("boil")
for _, pkColumn := range {{$varNameSingular}}PrimaryKeyColumns {
if column == pkColumn {
field := r.Field(i)
if field.Interface() != reflect.Zero(field.Type()).Interface() {
return false
}
}
}
}
return true
}
// Save() inserts the record if it does not exist, or updates it if it does.
func (o *{{$tableNameSingular}}) Save(exec boil.Executor, whitelist ...string) error {
if o.IsNew() {
return o.Insert(exec, whitelist...)
} else {
return o.Update(exec, whitelist...)
}
}
// SaveG() inserts the record if it does not exist, or updates it if it does.
func (o *{{$tableNameSingular}}) SaveG(whitelist ...string) error {
if o.IsNew() {
return o.InsertG(whitelist...)
} else {
return o.UpdateG(whitelist...)
}
}

33
templates/22_query.tpl Normal file
View file

@ -0,0 +1,33 @@
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
// {{$tableNameSingular}}NewQuery filters query results
func {{$tableNameSingular}}NewQuery(exec boil.Executor) *{{$tableNameSingular}}Query {
return &{{$tableNameSingular}}Query{NewQuery(exec, qm.Select("*"), qm.From("{{.Table.Name | .SchemaTable}}"))}
}
// {{$tableNameSingular}}NewQuery filters query results
func {{$tableNameSingular}}NewQueryG() *{{$tableNameSingular}}Query {
return {{$tableNameSingular}}NewQuery(boil.GetDB())
}
// Where filters query results
func (q *{{$tableNameSingular}}Query) Where(filters {{$tableNameSingular}}Filter) *{{$tableNameSingular}}Query {
r := reflect.ValueOf(filters)
for i := 0; i < r.NumField(); i++ {
f := r.Field(i)
if f.Elem().IsValid() {
if nullable, ok := f.Elem().Interface().(null.Nullable); ok && nullable.IsNull() {
queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" IS NULL")
} else {
queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" = ?", f.Elem().Interface())
}
}
}
return q
}
// Limit limits query results
func (q *{{$tableNameSingular}}Query) Limit(limit int) *{{$tableNameSingular}}Query {
queries.SetLimit(q.Query, limit)
return q
}

107
templates/23_merge.tpl Normal file
View file

@ -0,0 +1,107 @@
{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
{{- if .Table.IsJoinTable -}}
{{- else -}}
{{- $dot := . }}
// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
func Merge{{$tableNamePlural}}(exec boil.Executor, primaryID uint64, secondaryID uint64) (err error) {
tx, ok := exec.(boil.Transactor)
if !ok {
txdb, ok := exec.(boil.Beginner)
if !ok {
return errors.Err("database does not support transactions")
}
tx, err = txdb.Begin()
if err != nil {
return errors.Err(err)
}
defer func() {
if p := recover(); p != nil {
tx.Rollback()
panic(p) // Rollback, then propagate panic
} else if err != nil {
tx.Rollback()
} else {
err = tx.Commit()
}
}()
}
primary, err := Find{{$tableNameSingular}}(tx, primaryID)
if err != nil {
return errors.Err(err)
} else if primary == nil {
return errors.Err("primary {{$tableNameSingular}} not found")
}
secondary, err := Find{{$tableNameSingular}}(tx, secondaryID)
if err != nil {
return errors.Err(err)
} else if secondary == nil {
return errors.Err("secondary {{$tableNameSingular}} not found")
}
foreignKeys := []foreignKey{
{{- range .Tables -}}
{{- range .FKeys -}}
{{- if eq $dot.Table.Name .ForeignTable }}
{foreignTable: "{{.Table}}", foreignColumn: "{{.Column}}"},
{{- end -}}
{{- end -}}
{{- end }}
}
conflictingKeys := []conflictingUniqueKey{
{{- range .Tables -}}
{{- $table := . -}}
{{- range .FKeys -}}
{{- $fk := . -}}
{{- if eq $dot.Table.Name .ForeignTable -}}
{{- range $table.UKeys -}}
{{- if setInclude $fk.Column .Columns }}
{table: "{{$fk.Table}}", objectIdColumn: "{{$fk.Column}}", columns: []string{`{{ .Columns | join "`,`" }}`}},
{{- end -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{- end }}
}
err = mergeModels(tx, primaryID, secondaryID, foreignKeys, conflictingKeys)
if err != nil {
return err
}
pr := reflect.ValueOf(primary)
sr := reflect.ValueOf(secondary)
// for any column thats null on the primary and not null on the secondary, copy from secondary to primary
for i := 0; i < sr.Elem().NumField(); i++ {
pf := pr.Elem().Field(i)
sf := sr.Elem().Field(i)
if sf.IsValid() {
if nullable, ok := sf.Interface().(null.Nullable); ok && !nullable.IsNull() && pf.Interface().(null.Nullable).IsNull() {
pf.Set(sf)
}
}
}
err = primary.Update(tx)
if err != nil {
return err
}
err = secondary.Delete(tx)
if err != nil {
return err
}
return nil
}
// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
func Merge{{$tableNamePlural}}G(primaryID uint64, secondaryID uint64) error {
return Merge{{$tableNamePlural}}(boil.GetDB(), primaryID, secondaryID)
}
{{- end -}}{{/* join table */}}

View file

@ -19,3 +19,168 @@ func NewQuery(exec boil.Executor, mods ...qm.QueryMod) *queries.Query {
return q
}
func mergeModels(tx boil.Executor, primaryID uint64, secondaryID uint64, foreignKeys []foreignKey, conflictingKeys []conflictingUniqueKey) error {
if len(foreignKeys) < 1 {
return nil
}
var err error
for _, conflict := range conflictingKeys {
if len(conflict.columns) == 1 && conflict.columns[0] == conflict.objectIdColumn {
err = deleteOneToOneConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
} else {
err = deleteOneToManyConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
}
if err != nil {
return err
}
}
for _, fk := range foreignKeys {
// TODO: use NewQuery here, not plain sql
query := fmt.Sprintf(
"UPDATE %s SET %s = %s WHERE %s = %s",
fk.foreignTable, fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 2, 1),
)
_, err = tx.Exec(query, primaryID, secondaryID)
if err != nil {
return errors.Err(err)
}
}
return checkMerge(tx, foreignKeys)
}
func deleteOneToOneConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
query := fmt.Sprintf(
"SELECT COUNT(*) FROM %s WHERE %s IN (%s)",
conflict.table, conflict.objectIdColumn,
strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
)
var count int
err := tx.QueryRow(query, primaryID, secondaryID).Scan(&count)
if err != nil {
return errors.Err(err)
}
if count > 2 {
return errors.Err("it should not be possible to have more than two rows here")
} else if count != 2 {
return nil // no conflicting rows
}
query = fmt.Sprintf(
"DELETE FROM %s WHERE %s = %s",
conflict.table, conflict.objectIdColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
)
_, err = tx.Exec(query, secondaryID)
return errors.Err(err)
}
func deleteOneToManyConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
conflictingColumns := strmangle.SetComplement(conflict.columns, []string{conflict.objectIdColumn})
query := fmt.Sprintf(
"SELECT %s FROM %s WHERE %s IN (%s) GROUP BY %s HAVING count(distinct %s) > 1",
strings.Join(conflictingColumns, ","), conflict.table, conflict.objectIdColumn,
strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
strings.Join(conflictingColumns, ","), conflict.objectIdColumn,
)
//The selectParams should be the ObjectIDs to search for regarding the conflict.
rows, err := tx.Query(query, primaryID, secondaryID)
if err != nil {
return errors.Err(err)
}
//Since we don't don't know if advance how many columns the query returns, we have dynamically assign them to be
// used in the delete query.
colNames, err := rows.Columns()
if err != nil {
return errors.Err(err)
}
//Each row result of the query needs to be removed for being a conflicting row. Store each row's keys in an array.
var rowsToRemove = [][]interface{}(nil)
for rows.Next() {
//Set pointers for dynamic scan
iColPtrs := make([]interface{}, len(colNames))
for i := 0; i < len(colNames); i++ {
s := string("")
iColPtrs[i] = &s
}
//Dynamically scan n columns
err = rows.Scan(iColPtrs...)
if err != nil {
return errors.Err(err)
}
//Grab scanned values for query arguments
iCol := make([]interface{}, len(colNames))
for i, col := range iColPtrs {
x := col.(*string)
iCol[i] = *x
}
rowsToRemove = append(rowsToRemove, iCol)
}
defer rows.Close()
//This query will adjust dynamically depending on the number of conflicting keys, adding AND expressions for each
// key to ensure the right conflicting rows are deleted.
query = fmt.Sprintf(
"DELETE FROM %s %s",
conflict.table,
"WHERE "+strings.Join(conflict.columns, " = ? AND ")+" = ?",
)
//There could be multiple conflicting rows between ObjectIDs. In the SELECT query we grab each row and their column
// keys to be deleted here in a loop.
for _, rowToDelete := range rowsToRemove {
rowToDelete = append(rowToDelete, secondaryID)
_, err = tx.Exec(query, rowToDelete...)
if err != nil {
return errors.Err(err)
}
}
return nil
}
func checkMerge(tx boil.Executor, foreignKeys []foreignKey) error {
uniqueColumns := []interface{}{}
uniqueColumnNames := map[string]bool{}
handledTablesColumns := map[string]bool{}
for _, fk := range foreignKeys {
handledTablesColumns[fk.foreignTable+"."+fk.foreignColumn] = true
if _, ok := uniqueColumnNames[fk.foreignColumn]; !ok {
uniqueColumns = append(uniqueColumns, fk.foreignColumn)
uniqueColumnNames[fk.foreignColumn] = true
}
}
q := fmt.Sprintf(
`SELECT table_name, column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA=DATABASE() AND column_name IN (%s)`,
strmangle.Placeholders(dialect.IndexPlaceholders, len(uniqueColumns), 1, 1),
)
rows, err := tx.Query(q, uniqueColumns...)
defer rows.Close()
if err != nil {
return errors.Err(err)
}
for rows.Next() {
var tableName string
var columnName string
err = rows.Scan(&tableName, &columnName)
if err != nil {
return errors.Err(err)
}
if _, exists := handledTablesColumns[tableName+"."+columnName]; !exists {
return errors.Err("missing merge for " + tableName + "." + columnName)
}
}
return nil
}

View file

@ -1,10 +1,26 @@
// M type is for providing columns and column values to UpdateAll.
type M map[string]interface{}
// foreignKey connects two tables. When merging records, foreign keys from secondary record must
// be reassigned to primary record.
type foreignKey struct {
foreignTable string
foreignColumn string
}
// conflictingUniqueKey records a merge conflict. If two rows exist with the same value in the
// conflicting column for two records being merged, one row must be deleted.
type conflictingUniqueKey struct {
table string
objectIdColumn string
columns []string
}
// ErrSyncFail occurs during insert when the record could not be retrieved in
// order to populate default value information. This usually happens when LastInsertId
// fails or there was a primary key configuration that was not resolvable.
var ErrSyncFail = errors.New("{{.PkgName}}: failed to synchronize data after insert")
var ErrSyncFail = errors.Base("{{.PkgName}}: failed to synchronize data after insert")
type insertCache struct {
query string

View file

@ -25,17 +25,17 @@ func (m *mssqlTester) setup() error {
m.testDBName = randomize.StableDBName(m.dbName)
if err = m.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
if err = m.createTestDB(); err != nil {
return err
return errors.Err(err)
}
createCmd := exec.Command("sqlcmd", "-S", m.host, "-U", m.user, "-P", m.pass, "-d", m.testDBName)
f, err := os.Open("tables_schema.sql")
if err != nil {
return errors.Wrap(err, "failed to open tables_schema.sql file")
return errors.Prefix("failed to open tables_schema.sql file", err)
}
defer f.Close()
@ -43,12 +43,12 @@ func (m *mssqlTester) setup() error {
createCmd.Stdin = newFKeyDestroyer(rgxMSSQLkey, f)
if err = createCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start sqlcmd command")
return errors.Prefix("failed to start sqlcmd command", err)
}
if err = createCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for sqlcmd command")
return errors.Prefix("failed to wait for sqlcmd command", err)
}
return nil
@ -92,7 +92,7 @@ func (m *mssqlTester) teardown() error {
}
if err := m.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
return nil
@ -110,7 +110,7 @@ func (m *mssqlTester) runCmd(stdin, command string, args ...string) error {
fmt.Println("failed running:", command, args)
fmt.Println(stdout.String())
fmt.Println(stderr.String())
return err
return errors.Err(err)
}
return nil

View file

@ -30,14 +30,14 @@ func (m *mysqlTester) setup() error {
m.testDBName = randomize.StableDBName(m.dbName)
if err = m.makeOptionFile(); err != nil {
return errors.Wrap(err, "couldn't make option file")
return errors.Prefix("couldn't make option file", err)
}
if err = m.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
if err = m.createTestDB(); err != nil {
return err
return errors.Err(err)
}
dumpCmd := exec.Command("mysqldump", m.defaultsFile(), "--no-data", m.dbName)
@ -48,22 +48,22 @@ func (m *mysqlTester) setup() error {
createCmd.Stdin = newFKeyDestroyer(rgxMySQLkey, r)
if err = dumpCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start mysqldump command")
return errors.Prefix("failed to start mysqldump command", err)
}
if err = createCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start mysql command")
return errors.Prefix("failed to start mysql command", err)
}
if err = dumpCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for mysqldump command")
return errors.Prefix("failed to wait for mysqldump command", err)
}
w.Close() // After dumpCmd is done, close the write end of the pipe
if err = createCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for mysql command")
return errors.Prefix("failed to wait for mysql command", err)
}
return nil
@ -87,7 +87,7 @@ func (m *mysqlTester) defaultsFile() string {
func (m *mysqlTester) makeOptionFile() error {
tmp, err := ioutil.TempFile("", "optionfile")
if err != nil {
return errors.Wrap(err, "failed to create option file")
return errors.Prefix("failed to create option file", err)
}
isTCP := false
@ -95,7 +95,7 @@ func (m *mysqlTester) makeOptionFile() error {
if os.IsNotExist(err) {
isTCP = true
} else if err != nil {
return errors.Wrap(err, "could not stat m.host")
return errors.Prefix("could not stat m.host", err)
}
fmt.Fprintln(tmp, "[client]")
@ -139,7 +139,7 @@ func (m *mysqlTester) teardown() error {
}
if err := m.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
return os.Remove(m.optionFile)
@ -159,7 +159,7 @@ func (m *mysqlTester) runCmd(stdin, command string, args ...string) error {
fmt.Println("failed running:", command, args)
fmt.Println(stdout.String())
fmt.Println(stderr.String())
return err
return errors.Err(err)
}
return nil

View file

@ -33,14 +33,14 @@ func (p *pgTester) setup() error {
p.testDBName = randomize.StableDBName(p.dbName)
if err = p.makePGPassFile(); err != nil {
return err
return errors.Err(err)
}
if err = p.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
if err = p.createTestDB(); err != nil {
return err
return errors.Err(err)
}
dumpCmd := exec.Command("pg_dump", "--schema-only", p.dbName)
@ -53,22 +53,22 @@ func (p *pgTester) setup() error {
createCmd.Stdin = newFKeyDestroyer(rgxPGFkey, r)
if err = dumpCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start pg_dump command")
return errors.Prefix("failed to start pg_dump command", err)
}
if err = createCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start psql command")
return errors.Prefix("failed to start psql command", err)
}
if err = dumpCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for pg_dump command")
return errors.Prefix("failed to wait for pg_dump command", err)
}
w.Close() // After dumpCmd is done, close the write end of the pipe
if err = createCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for psql command")
return errors.Prefix("failed to wait for psql command", err)
}
return nil
@ -90,7 +90,7 @@ func (p *pgTester) runCmd(stdin, command string, args ...string) error {
fmt.Println("failed running:", command, args)
fmt.Println(stdout.String())
fmt.Println(stderr.String())
return err
return errors.Err(err)
}
return nil
@ -108,7 +108,7 @@ func (p *pgTester) pgEnv() []string {
func (p *pgTester) makePGPassFile() error {
tmp, err := ioutil.TempFile("", "pgpass")
if err != nil {
return errors.Wrap(err, "failed to create option file")
return errors.Prefix("failed to create option file", err)
}
fmt.Fprintf(tmp, "%s:%d:postgres:%s", p.host, p.port, p.user)
@ -145,12 +145,12 @@ func (p *pgTester) dropTestDB() error {
func (p *pgTester) teardown() error {
var err error
if err = p.dbConn.Close(); err != nil {
return err
return errors.Err(err)
}
p.dbConn = nil
if err = p.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
return os.Remove(p.pgPassFile)

View file

@ -143,5 +143,5 @@ func validateConfig(driverName string) error {
).Check()
}
return errors.New("not a valid driver name")
return errors.Err("not a valid driver name")
}