Compare commits

...

61 commits

Author SHA1 Message Date
Mark
6b4e052bed
Merge pull request #10 from lbryio/eagerloading_depth
Fix multi-depth eager loading of relationships. If a relationship is …
2020-09-02 15:59:52 -04:00
Mark Beamer Jr
3db4f30f56
Fix multi-depth eager loading of relationships. If a relationship is nil, do not add it to the collection for checking the next depth level. 2020-08-19 23:43:39 -04:00
Niko Storni
256a6d4225 update lbry.go library 2020-03-05 16:29:03 -05:00
Mark
3f035a9fe2
Merge pull request #7 from lbryio/guard_null_mod
Allow null query mods for dynamic queries
2019-07-01 23:56:28 -04:00
Mark Beamer Jr
c01b182839
Allow null query mods for dynamic queries 2019-07-01 23:54:54 -04:00
Mark
e3fe976c3c
Merge pull request #6 from lbryio/force_index
Force index
2019-06-30 23:17:36 -04:00
Mark Beamer Jr
4e1b83ab39
Add force index query mod for select query.
Fix eager loading casting for nested levels.
2019-06-30 23:01:24 -04:00
Mark Beamer Jr
f892107dad
missing import 2019-02-07 20:24:07 -05:00
Mark
fadcbfa8b6
Merge pull request #3 from lbryio/merge_changes
added support for 1 to 1 relations and added support for n unique key…
2019-02-07 19:51:35 -05:00
Mark Beamer Jr
29172e976b
changed from Fatal to Error - copy paste mistake. 2019-02-07 18:20:23 -05:00
Mark Beamer Jr
eea3d349a7
added support for 1 to 1 relations and added support for n unique keys for conflict resolution during merging.
split out into two functions.
2019-02-07 18:20:22 -05:00
Mark
d180a095ca
Merge pull request #4 from lbryio/lbrygo_update
Updated to use the latest lbry.go changes
2019-01-10 20:13:22 -05:00
Mark Beamer Jr
3baa9e72ca
Updated to use the latest lbry.go changes 2019-01-10 20:11:42 -05:00
Mark
bc08aa6160
Merge pull request #2 from lbryio/test_authchanges
make it true
2018-06-06 19:05:29 -04:00
Mark Beamer Jr
466f2d5b2c make it true 2018-06-06 19:03:50 -04:00
Mark
45ee5c902f
Merge pull request #1 from lbryio/test_authchanges
reverted default to native password authentication.
2018-06-06 18:57:40 -04:00
Mark Beamer Jr
8d4055e3eb reverted default to native password authentication. 2018-06-06 18:26:33 -04:00
Alex Grintsvayg
396f42bc91 never update created_at, updated_at columns 2018-04-16 12:41:55 -04:00
Alex Grintsvayg
77fc991e7b new location for null.go 2018-02-22 13:41:05 -05:00
Alex Grintsvayg
e4a52e21b6 switch to our errors package 2018-02-07 09:35:46 -05:00
Alex Grintsvayg
0b0a1f21c2 allow generic interface or real sql tx 2017-09-02 11:52:31 -04:00
Alex Grintsvayg
cd445bf2f4 revert interpolateParams, since we're doing our own logging now 2017-09-02 11:31:15 -04:00
Alex Grintsvayg
55f42bc038 just use interfaces 2017-08-24 11:58:13 -04:00
Alex Grintsvayg
544ff7afdd typo 2017-08-23 17:03:24 -04:00
Alex Grintsvayg
faec346481 use interface to enable custom Tx types 2017-08-23 16:59:29 -04:00
Alex Grintsvayg
3abac13aeb export InterpolateParams 2017-08-23 16:37:33 -04:00
Alex Grintsvayg
8837a986ee consistent order to unique key columns 2017-08-23 13:36:13 -04:00
Aaron L
ce9d13abf0 Fix hook documentation to include error return
Fix #171
2017-08-01 14:40:58 -04:00
Aaron L
ca9f47de8b Correct nullability for tests in to_one
- Use the nullability of the fkey column in question to determine
  nullability for the entire struct to make things easy, otherwise
  we'd have to pluck out one at a time. This makes the tests pass
  instead of fail sporadically.
- Fix #160
2017-08-01 14:40:38 -04:00
Aaron L
e339812027 Stop using aliases in the relationship select
- This caused issues with mysql who doesn't understand the syntax:
  "delete from x as y where y.id = ?"
2017-08-01 14:39:10 -04:00
Guy Tish
23f245776d Added table columns and table names as anonymous struct 2017-08-01 14:27:50 -04:00
Alex Grintsvayg
99a3a1d091 make merge compatible with an existing transaction 2017-08-01 13:00:14 -04:00
Alex Grintsvayg
9c8262b702 fix exists() finisher 2017-07-20 10:50:55 -04:00
Alex Grintsvayg
05c7f7d06a switch to more complete null package, implement nicer sql logging 2017-07-06 15:15:00 -04:00
Alex Grintsvayg
ed423a3606 detect autoincrement column, fix lastID in upsert when update doesnt change anything 2017-06-12 14:02:04 -04:00
Alex Grintsvayg
09c585cdb1 dont run delete query when there's nothing to delete 2017-06-01 09:58:50 -04:00
Alex Grintsvayg
912a689701 added unique key detection (mysql only for now), improved merging 2017-05-12 06:56:48 -04:00
Alex Grintsvayg
31fe8b6e1d model merging 2017-05-10 15:35:05 -04:00
Alex Grintsvayg
b160e5c1f4 dont error if no rows found 2017-05-10 11:21:29 -04:00
Alex Grintsvayg
ed43c9078f made query structs public, added IS NULL to filter 2017-05-09 15:30:04 -04:00
Alex Grintsvayg
f863ecb48e add model Filters, FindOrCreate methods, IsNew() 2017-05-08 16:39:16 -04:00
Alex Grintsvayg
451723ccb9 fix imports to point at lbryio repo 2017-05-08 13:25:15 -04:00
Alex Grintsvayg
687b0506db Merge branch 'mysql-uint-fix'
* mysql-uint-fix:
  detect unsigned int columns in mysql
2017-05-08 13:12:48 -04:00
Alex Grintsvayg
1467b88f04 detect unsigned int columns in mysql 2017-05-08 13:10:21 -04:00
Aaron L
070df18197 Add mailing lists 2017-05-01 19:44:52 -07:00
Aaron L
3b5ab423b3 Bump version 2017-05-01 18:41:49 -07:00
Aaron L
632b89fae0 Merge branch 'dev' 2017-05-01 18:41:02 -07:00
Aaron L
112a836af2 Make UDT's that aren't enums fall through
- This allows typse that are not enumerations to properly escape the
  enumeration code in the query.
- Fix #131
2017-04-28 21:07:39 -07:00
Aaron L
7a8d78cceb Merge branch 'mssql_tests' into dev 2017-04-28 20:08:34 -07:00
Aaron L
a9264e6447 Merge branch 'maks/fix_noauto_template' into dev 2017-04-24 10:15:00 -07:00
Maksim
bdd28d9d5b fix removing whitespace 2017-04-24 12:46:56 -04:00
Aaron L
6a0817d37d Update benchmark section 2017-04-23 14:28:21 -07:00
Aaron
5a33894412 Add contribution guidelines 2017-04-16 11:59:13 -07:00
Aaron
d9b991e487 Merge remote-tracking branch 'michsior/dev' into dev 2017-04-16 11:35:12 -07:00
Michał Mrozek
91a798d9af Update global.go 2017-04-16 20:00:43 +02:00
Aaron L
0818af0e26 Don't use XSlice where unneeded.
- In the bowels of the eager loading we weave in and out of reflection,
  but we should not care about using XSlice unless it's going back to
  the user. This change makes it so the XSlice is only used where it
  matters, everywhere else is *[]*X to avoid type assertion errors from
  being able to have either or come into the Load() functions.
- Fix #124
2017-04-04 19:44:36 -07:00
Aaron L
10cfe74989 Fix a bug that could occur on no-field inserts 2017-04-04 19:42:49 -07:00
Aaron L
d13410617f Correct whitespace errors 2017-04-04 19:40:12 -07:00
Aaron L
91950e711e Add slack badge to readme 2017-04-04 17:46:51 -07:00
Patrick O
aca431b3ce Merge pull request #123 from concreted/patch-1
(fix) typo in README
2017-04-01 12:49:13 +10:00
Aric Huang
f94fa547e7 (fix) typo in README 2017-03-31 15:02:32 -07:00
67 changed files with 1145 additions and 430 deletions

1
.gitignore vendored
View file

@ -4,3 +4,4 @@ sqlboiler.toml
models/
testschema.sql
.cover
/.idea

60
CONTRIBUTING.md Normal file
View file

@ -0,0 +1,60 @@
# Contributing
Thanks for your interest in contributing to SQLBoiler!
We have a very lightweight process and aim to keep it that way.
Read the sections for the piece you're interested in and go from
there.
If you need quick communication we're usually on [Slack](https://sqlboiler.from-the.cloud).
# New Code / Features
## Small Change
#### TLDR
1. Open PR against **dev** branch with explanation
1. Participate in Github Code Review
#### Long version
For code that requires little to no discussion, please just open a pull request with some
explanation against the **dev** branch. All code goes through dev before going out in a release.
## Bigger Change
#### TLDR
1. Start proposal of idea in Github issue
1. After design concensus, open PR with the work against the **dev** branch
1. Participate in Github Code Review
#### Long version
If however you're working on something bigger, it's usually better to check with us on the idea
before starting on a pull request, just so there's no time wasted in redoing/refactoring or being
outright rejected because the PR is at odds with the design. The best way to accomplish this is to
open an issue to discuss it. It can always start as a Slack conversation but should eventually end
up as an issue to avoid penalizing the rest of the users for not being on Slack. Once we agree on
the way to do something, then open the PR against the **dev** branch and we'll commence code review
with the Github code review tools. Then it will be merged into dev, and later go out in a release.
# Bugs
Issues should be filed on Github, simply use the template provided and fill in detail. If there's
more information you feel you should give use your best judgement and add it in, the more the better.
See the section below for information on providing database schemas.
Bugs that have responses from contributors but no action from those who opened them after a time
will be closed with the comment: "Stale"
## Schemas
A database schema can help us fix generation issues very quickly. However not everyone is willing to part
with their database schema for various reasons and that's fine. Instead of providing the schema please
then provide a subset of your database (you can munge the names so as to be unrecognizable) that can
help us reproduce the problem.
*Note:* Your schema information is included in the output from `--debug`, so be careful giving this
information out publicly on a Github issue if you're sensitive about this.

129
README.md
View file

@ -2,6 +2,9 @@
[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/vattle/sqlboiler/blob/master/LICENSE)
[![GoDoc](https://godoc.org/github.com/vattle/sqlboiler?status.svg)](https://godoc.org/github.com/vattle/sqlboiler)
[![Mail](https://img.shields.io/badge/mail%20list-sqlboiler-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler)
[![Mail-Annc](https://img.shields.io/badge/mail%20list-sqlboiler--announce-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler-announce)
[![Slack](https://img.shields.io/badge/slack-%23general-lightgrey.svg)](https://sqlboiler.from-the.cloud)
[![CircleCI](https://circleci.com/gh/vattle/sqlboiler.svg?style=shield)](https://circleci.com/gh/vattle/sqlboiler)
[![Go Report Card](https://goreportcard.com/badge/vattle/sqlboiler)](http://goreportcard.com/report/vattle/sqlboiler)
@ -421,7 +424,7 @@ much benefit over it.
**Method 3: Embedding**
This pattern is not for the feint of heart, what it provides in benefits it
This pattern is not for the faint of heart, what it provides in benefits it
more than makes up for in downsides. It's possible to embed the SQLBoiler
structs inside your own to enhance them. However it's subject to easy breakages
and a dependency on these additional objects. It can also introduce
@ -953,15 +956,16 @@ it with the `AddModelHook` method. Here is an example of a before insert hook:
```go
// Define my hook function
func myHook(exec boil.Executor, p *Pilot) {
func myHook(exec boil.Executor, p *Pilot) error {
// Do stuff
return nil
}
// Register my before insert hook for pilots
models.AddPilotHook(boil.BeforeInsertHook, myHook)
```
Your `ModelHook` will always be defined as `func(boil.Executor, *Model)`
Your `ModelHook` will always be defined as `func(boil.Executor, *Model) error`
### Transactions
@ -1265,51 +1269,78 @@ generator is located at: https://github.com/vattle/sqlboiler
If you'd like to run the benchmarks yourself check out our [boilbench](https://github.com/vattle/boilbench) repo.
Here are the results (lower is better):
`go test -bench . -benchmem`
```
BenchmarkGORMDelete/gorm-8 100000 15364 ns/op 5395 B/op 113 allocs/op
BenchmarkGORPDelete/gorp-8 1000000 1703 ns/op 304 B/op 12 allocs/op
BenchmarkXORMDelete/xorm-8 100000 14733 ns/op 3634 B/op 107 allocs/op
BenchmarkBoilDelete/boil-8 2000000 986 ns/op 120 B/op 7 allocs/op
BenchmarkGORMInsert/gorm-8 100000 19197 ns/op 8054 B/op 161 allocs/op
BenchmarkGORPInsert/gorp-8 500000 3413 ns/op 1008 B/op 32 allocs/op
BenchmarkXORMInsert/xorm-8 100000 15428 ns/op 5836 B/op 131 allocs/op
BenchmarkBoilInsert/boil-8 500000 3041 ns/op 568 B/op 21 allocs/op
BenchmarkGORMSelectAll/gorm-8 20000 85422 ns/op 29912 B/op 511 allocs/op
BenchmarkGORPSelectAll/gorp-8 50000 35824 ns/op 8837 B/op 312 allocs/op
BenchmarkXORMSelectAll/xorm-8 30000 58843 ns/op 13805 B/op 298 allocs/op
BenchmarkBoilSelectAll/boil-8 100000 13844 ns/op 2840 B/op 61 allocs/op
BenchmarkGORMSelectSubset/gorm-8 10000 100714 ns/op 30875 B/op 517 allocs/op
BenchmarkGORPSelectSubset/gorp-8 30000 43547 ns/op 8837 B/op 312 allocs/op
BenchmarkXORMSelectSubset/xorm-8 30000 48128 ns/op 12989 B/op 282 allocs/op
BenchmarkBoilSelectSubset/boil-8 100000 12316 ns/op 2977 B/op 65 allocs/op
BenchmarkGORMSelectComplex/gorm-8 10000 133598 ns/op 49398 B/op 772 allocs/op
BenchmarkGORPSelectComplex/gorp-8 50000 40588 ns/op 9037 B/op 321 allocs/op
BenchmarkXORMSelectComplex/xorm-8 30000 56367 ns/op 14174 B/op 313 allocs/op
BenchmarkBoilSelectComplex/boil-8 100000 16941 ns/op 3821 B/op 95 allocs/op
BenchmarkGORMUpdate/gorm-8 50000 25406 ns/op 9710 B/op 195 allocs/op
BenchmarkGORPUpdate/gorp-8 300000 3614 ns/op 1152 B/op 34 allocs/op
BenchmarkXORMUpdate/xorm-8 100000 17510 ns/op 4458 B/op 132 allocs/op
BenchmarkBoilUpdate/boil-8 500000 2958 ns/op 520 B/op 16 allocs/op
BenchmarkGORMRawBind/gorm-8 10000 112577 ns/op 38270 B/op 595 allocs/op
BenchmarkGORPRawBind/gorp-8 30000 40967 ns/op 8837 B/op 312 allocs/op
BenchmarkXORMRawBind/xorm-8 30000 54739 ns/op 12692 B/op 273 allocs/op
BenchmarkSQLXRawBind/sqlx-8 200000 13537 ns/op 4268 B/op 49 allocs/op
BenchmarkBoilRawBind/boil-8 200000 11144 ns/op 4334 B/op 49 allocs/op
```bash
go test -bench . -benchmem
```
<img style="margin-right:6px;" src="http://i.imgur.com/TglZGoI.png"/>
<img style="margin-right:6px;" src="http://i.imgur.com/Ktm2ta4.png"/>
<img style="margin-right:6px;" src="http://i.imgur.com/yv8kFPA.png"/>
<img style="margin-right:6px;" src="http://i.imgur.com/890Zswe.png"/>
<img style="margin-right:6px;" src="http://i.imgur.com/qMgoAFJ.png"/>
<img style="margin-right:6px;" src="http://i.imgur.com/sDoNiCN.png"/>
<img style="margin-right:6px;" src="http://i.imgur.com/EvUa4UT.png"/>
### Results (lower is better)
Test machine:
```text
OS: Ubuntu 16.04
CPU: Intel(R) Core(TM) i7-4771 CPU @ 3.50GHz
Mem: 16GB
Go: go version go1.8.1 linux/amd64
```
The graphs below have many runs like this as input to calculate errors. Here
is a sample run:
```text
BenchmarkGORMSelectAll/gorm-8 20000 66500 ns/op 28998 B/op 455 allocs/op
BenchmarkGORPSelectAll/gorp-8 50000 31305 ns/op 9141 B/op 318 allocs/op
BenchmarkXORMSelectAll/xorm-8 20000 66074 ns/op 16317 B/op 417 allocs/op
BenchmarkKallaxSelectAll/kallax-8 100000 18278 ns/op 7428 B/op 145 allocs/op
BenchmarkBoilSelectAll/boil-8 100000 12759 ns/op 3145 B/op 67 allocs/op
BenchmarkGORMSelectSubset/gorm-8 20000 69469 ns/op 30008 B/op 462 allocs/op
BenchmarkGORPSelectSubset/gorp-8 50000 31102 ns/op 9141 B/op 318 allocs/op
BenchmarkXORMSelectSubset/xorm-8 20000 64151 ns/op 15933 B/op 414 allocs/op
BenchmarkKallaxSelectSubset/kallax-8 100000 16996 ns/op 6499 B/op 132 allocs/op
BenchmarkBoilSelectSubset/boil-8 100000 13579 ns/op 3281 B/op 71 allocs/op
BenchmarkGORMSelectComplex/gorm-8 20000 76284 ns/op 34566 B/op 521 allocs/op
BenchmarkGORPSelectComplex/gorp-8 50000 31886 ns/op 9501 B/op 328 allocs/op
BenchmarkXORMSelectComplex/xorm-8 20000 68430 ns/op 17694 B/op 464 allocs/op
BenchmarkKallaxSelectComplex/kallax-8 50000 26095 ns/op 10293 B/op 212 allocs/op
BenchmarkBoilSelectComplex/boil-8 100000 16403 ns/op 4205 B/op 102 allocs/op
BenchmarkGORMDelete/gorm-8 200000 10356 ns/op 5059 B/op 98 allocs/op
BenchmarkGORPDelete/gorp-8 1000000 1335 ns/op 352 B/op 13 allocs/op
BenchmarkXORMDelete/xorm-8 200000 10796 ns/op 4146 B/op 122 allocs/op
BenchmarkKallaxDelete/kallax-8 300000 5141 ns/op 2241 B/op 48 allocs/op
BenchmarkBoilDelete/boil-8 2000000 796 ns/op 168 B/op 8 allocs/op
BenchmarkGORMInsert/gorm-8 100000 15238 ns/op 8278 B/op 150 allocs/op
BenchmarkGORPInsert/gorp-8 300000 4648 ns/op 1616 B/op 38 allocs/op
BenchmarkXORMInsert/xorm-8 100000 12600 ns/op 6092 B/op 138 allocs/op
BenchmarkKallaxInsert/kallax-8 100000 15115 ns/op 6003 B/op 126 allocs/op
BenchmarkBoilInsert/boil-8 1000000 2249 ns/op 984 B/op 23 allocs/op
BenchmarkGORMUpdate/gorm-8 100000 18609 ns/op 9389 B/op 174 allocs/op
BenchmarkGORPUpdate/gorp-8 500000 3180 ns/op 1536 B/op 35 allocs/op
BenchmarkXORMUpdate/xorm-8 100000 13149 ns/op 5098 B/op 149 allocs/op
BenchmarkKallaxUpdate/kallax-8 100000 22880 ns/op 11366 B/op 219 allocs/op
BenchmarkBoilUpdate/boil-8 1000000 1810 ns/op 936 B/op 18 allocs/op
BenchmarkGORMRawBind/gorm-8 20000 65821 ns/op 30502 B/op 444 allocs/op
BenchmarkGORPRawBind/gorp-8 50000 31300 ns/op 9141 B/op 318 allocs/op
BenchmarkXORMRawBind/xorm-8 20000 62024 ns/op 15588 B/op 403 allocs/op
BenchmarkKallaxRawBind/kallax-8 200000 7843 ns/op 4380 B/op 46 allocs/op
BenchmarkSQLXRawBind/sqlx-8 100000 13056 ns/op 4572 B/op 55 allocs/op
BenchmarkBoilRawBind/boil-8 200000 11519 ns/op 4638 B/op 55 allocs/op
```
<img src="http://i.imgur.com/SltE8UQ.png"/><img src="http://i.imgur.com/lzvM5jJ.png"/><img src="http://i.imgur.com/SS0zNd2.png"/>
<img src="http://i.imgur.com/Kk0IM0J.png"/><img src="http://i.imgur.com/1IFtpdP.png"/><img src="http://i.imgur.com/t6Usecx.png"/>
<img src="http://i.imgur.com/98DOzcr.png"/><img src="http://i.imgur.com/NSp5r4Q.png"/><img src="http://i.imgur.com/dEGlOgI.png"/>
<img src="http://i.imgur.com/W0zhuGb.png"/><img src="http://i.imgur.com/YIvDuFv.png"/><img src="http://i.imgur.com/sKwuMaU.png"/>
<img src="http://i.imgur.com/ZUMYVmw.png"/><img src="http://i.imgur.com/T61rH3K.png"/><img src="http://i.imgur.com/lDr0xhY.png"/>
<img src="http://i.imgur.com/LWo10M9.png"/><img src="http://i.imgur.com/Td15owT.png"/><img src="http://i.imgur.com/45XXw4K.png"/>
<img src="http://i.imgur.com/lpP8qds.png"/><img src="http://i.imgur.com/hLyH3jQ.png"/><img src="http://i.imgur.com/C2v10t3.png"/>

View file

@ -3,7 +3,7 @@ package bdb
import (
"strings"
"github.com/vattle/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/strmangle"
)
// Column holds information about a database column.

View file

@ -1,8 +1,8 @@
package drivers
import (
"github.com/vattle/sqlboiler/bdb"
"github.com/vattle/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/strmangle"
)
// MockDriver is a mock implementation of the bdb driver Interface
@ -58,6 +58,14 @@ func (m *MockDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
}[tableName], nil
}
func (m *MockDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
return []bdb.UniqueKey{}, nil
}
func (m *MockDriver) AutoincrementInfo(schema, tableName string) (string, error) {
return "", nil
}
// ForeignKeyInfo returns a list of mock foreignkeys
func (m *MockDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
return map[string][]bdb.ForeignKey{

View file

@ -7,8 +7,8 @@ import (
"strings"
_ "github.com/denisenkom/go-mssqldb"
"github.com/lbryio/sqlboiler/bdb"
"github.com/pkg/errors"
"github.com/vattle/sqlboiler/bdb"
)
// MSSQLDriver holds the database connection string and a handle
@ -241,6 +241,14 @@ func (m *MSSQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
return pkey, nil
}
func (m *MSSQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
return []bdb.UniqueKey{}, errors.New("not implemented")
}
func (m *MSSQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
return "", errors.New("not implemented")
}
// ForeignKeyInfo retrieves the foreign keys for a given table name.
func (m *MSSQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
var fkeys []bdb.ForeignKey

View file

@ -3,12 +3,13 @@ package drivers
import (
"database/sql"
"fmt"
"sort"
"strconv"
"strings"
"github.com/go-sql-driver/mysql"
"github.com/lbryio/sqlboiler/bdb"
"github.com/pkg/errors"
"github.com/vattle/sqlboiler/bdb"
)
// TinyintAsBool is a global that is set from main.go if a user specifies
@ -52,6 +53,7 @@ func MySQLBuildQueryString(user, pass, dbname, host string, port int, sslmode st
}
config.Addr += ":" + strconv.Itoa(port)
config.TLSConfig = sslmode
config.AllowNativePasswords = true
// MySQL is a bad, and by default reads date/datetime into a []byte
// instead of a time.Time. Tell it to stop being a bad.
@ -232,6 +234,79 @@ func (m *MySQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
return pkey, nil
}
// UniqueKeyInfo retrieves the unique keys for a given table name.
func (m *MySQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
var ukeys []bdb.UniqueKey
query := `
select tc.table_name, tc.constraint_name, GROUP_CONCAT(kcu.column_name)
from information_schema.table_constraints tc
left join information_schema.key_column_usage kcu on tc.constraint_name = kcu.constraint_name and tc.table_name = kcu.table_name and tc.table_schema = kcu.table_schema
where tc.table_schema = ? and tc.table_name = ? and tc.constraint_type = "UNIQUE"
group by tc.table_name, tc.constraint_name
`
var rows *sql.Rows
var err error
if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
return nil, err
}
for rows.Next() {
var ukey bdb.UniqueKey
var columns string
//ukey.Table = tableName
err = rows.Scan(&ukey.Table, &ukey.Name, &columns)
if err != nil {
return nil, err
}
ukey.Columns = strings.Split(columns, ",")
sort.Strings(ukey.Columns)
ukeys = append(ukeys, ukey)
}
if err = rows.Err(); err != nil {
return nil, err
}
return ukeys, nil
}
// AutoincrementInfo retrieves the autoincrement column for a given table name, if one exists.
func (m *MySQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
query := `
select column_name
from information_schema.columns
where table_schema = ? and table_name = ? and extra like "%auto_increment%"
`
var rows *sql.Rows
var err error
if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
return "", err
}
for rows.Next() {
var column string
err = rows.Scan(&column)
if err != nil {
return "", err
}
return column, nil
}
if err = rows.Err(); err != nil {
return "", err
}
return "", nil
}
// ForeignKeyInfo retrieves the foreign keys for a given table name.
func (m *MySQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
var fkeys []bdb.ForeignKey
@ -272,23 +347,42 @@ func (m *MySQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey
// "varchar" to "string" and "bigint" to "int64". It returns this parsed data
// as a Column object.
func (m *MySQLDriver) TranslateColumnType(c bdb.Column) bdb.Column {
unsigned := strings.Contains(c.FullDBType, "unsigned")
if c.Nullable {
switch c.DBType {
case "tinyint":
// map tinyint(1) to bool if TinyintAsBool is true
if TinyintAsBool && c.FullDBType == "tinyint(1)" {
c.Type = "null.Bool"
} else if unsigned {
c.Type = "null.Uint8"
} else {
c.Type = "null.Int8"
}
case "smallint":
c.Type = "null.Int16"
if unsigned {
c.Type = "null.Uint16"
} else {
c.Type = "null.Int16"
}
case "mediumint":
c.Type = "null.Int32"
if unsigned {
c.Type = "null.Uint32"
} else {
c.Type = "null.Int32"
}
case "int", "integer":
c.Type = "null.Int"
if unsigned {
c.Type = "null.Uint"
} else {
c.Type = "null.Int"
}
case "bigint":
c.Type = "null.Int64"
if unsigned {
c.Type = "null.Uint64"
} else {
c.Type = "null.Int64"
}
case "float":
c.Type = "null.Float32"
case "double", "double precision", "real":
@ -310,17 +404,35 @@ func (m *MySQLDriver) TranslateColumnType(c bdb.Column) bdb.Column {
// map tinyint(1) to bool if TinyintAsBool is true
if TinyintAsBool && c.FullDBType == "tinyint(1)" {
c.Type = "bool"
} else if unsigned {
c.Type = "uint8"
} else {
c.Type = "int8"
}
case "smallint":
c.Type = "int16"
if unsigned {
c.Type = "uint16"
} else {
c.Type = "int16"
}
case "mediumint":
c.Type = "int32"
if unsigned {
c.Type = "uint32"
} else {
c.Type = "int32"
}
case "int", "integer":
c.Type = "int"
if unsigned {
c.Type = "uint"
} else {
c.Type = "int"
}
case "bigint":
c.Type = "int64"
if unsigned {
c.Type = "uint64"
} else {
c.Type = "int64"
}
case "float":
c.Type = "float32"
case "double", "double precision", "real":

View file

@ -3,14 +3,15 @@ package drivers
import (
"database/sql"
"fmt"
"os"
"strings"
// Side-effect import sql driver
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/strmangle"
_ "github.com/lib/pq"
"github.com/pkg/errors"
"github.com/vattle/sqlboiler/bdb"
"github.com/vattle/sqlboiler/strmangle"
)
// PostgresDriver holds the database connection string and a handle
@ -132,7 +133,7 @@ func (p *PostgresDriver) Columns(schema, tableName string) ([]bdb.Column, error)
select
c.column_name,
(
case when c.data_type = 'USER-DEFINED' and c.udt_name <> 'hstore'
case when pgt.typtype = 'e'
then
(
select 'enum.' || c.udt_name || '(''' || string_agg(labels.label, ''',''') || ''')'
@ -176,6 +177,8 @@ func (p *PostgresDriver) Columns(schema, tableName string) ([]bdb.Column, error)
)) as is_unique
from information_schema.columns as c
inner join pg_namespace as pgn on pgn.nspname = c.udt_schema
left join pg_type pgt on c.data_type = 'USER-DEFINED' and pgn.oid = pgt.typnamespace and c.udt_name = pgt.typname
left join information_schema.element_types e
on ((c.table_catalog, c.table_schema, c.table_name, 'TABLE', c.dtd_identifier)
= (e.object_catalog, e.object_schema, e.object_name, e.object_type, e.collection_type_identifier))
@ -263,6 +266,14 @@ func (p *PostgresDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryK
return pkey, nil
}
func (p *PostgresDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
return []bdb.UniqueKey{}, errors.New("not implemented")
}
func (p *PostgresDriver) AutoincrementInfo(schema, tableName string) (string, error) {
return "", errors.New("not implemented")
}
// ForeignKeyInfo retrieves the foreign keys for a given table name.
func (p *PostgresDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
var fkeys []bdb.ForeignKey
@ -349,7 +360,7 @@ func (p *PostgresDriver) TranslateColumnType(c bdb.Column) bdb.Column {
c.DBType = "hstore"
} else {
c.Type = "string"
fmt.Printf("Warning: Incompatible data type detected: %s\n", c.UDTName)
fmt.Fprintln(os.Stderr, "Warning: Incompatible data type detected: %s\n", c.UDTName)
}
default:
c.Type = "null.String"

View file

@ -9,6 +9,8 @@ type Interface interface {
TableNames(schema string, whitelist, blacklist []string) ([]string, error)
Columns(schema, tableName string) ([]Column, error)
PrimaryKeyInfo(schema, tableName string) (*PrimaryKey, error)
UniqueKeyInfo(schema, tableName string) ([]UniqueKey, error)
AutoincrementInfo(schema, tableName string) (string, error)
ForeignKeyInfo(schema, tableName string) ([]ForeignKey, error)
// TranslateColumnType takes a Database column type and returns a go column type.
@ -63,10 +65,18 @@ func Tables(db Interface, schema string, whitelist, blacklist []string) ([]Table
return nil, errors.Wrapf(err, "unable to fetch table pkey info (%s)", name)
}
if t.UKeys, err = db.UniqueKeyInfo(schema, name); err != nil {
return nil, errors.Wrapf(err, "unable to fetch table ukey info (%s)", name)
}
if t.FKeys, err = db.ForeignKeyInfo(schema, name); err != nil {
return nil, errors.Wrapf(err, "unable to fetch table fkey info (%s)", name)
}
if t.AutoIncrementColumn, err = db.AutoincrementInfo(schema, name); err != nil {
return nil, errors.Wrapf(err, "unable to fetch table autoincrement info (%s)", name)
}
setIsJoinTable(&t)
tables = append(tables, t)

View file

@ -3,7 +3,7 @@ package bdb
import (
"testing"
"github.com/vattle/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/strmangle"
)
type testMockDriver struct{}

View file

@ -8,6 +8,13 @@ type PrimaryKey struct {
Columns []string
}
// UniqueKey represents a unique key constraint in a database
type UniqueKey struct {
Table string
Name string
Columns []string
}
// ForeignKey represents a foreign key constraint in a database
type ForeignKey struct {
Table string

View file

@ -8,9 +8,12 @@ type Table struct {
// For dbs with real schemas, like Postgres.
// Example value: "schema_name"."table_name"
SchemaName string
Columns []Column
Columns []Column
AutoIncrementColumn string
PKey *PrimaryKey
UKeys []UniqueKey
FKeys []ForeignKey
IsJoinTable bool

View file

@ -19,6 +19,10 @@ type Transactor interface {
// Beginner begins transactions.
type Beginner interface {
Begin() (Transactor, error)
}
type SQLBeginner interface {
Begin() (*sql.Tx, error)
}
@ -26,7 +30,11 @@ type Beginner interface {
func Begin() (Transactor, error) {
creator, ok := currentDB.(Beginner)
if !ok {
panic("database does not support transactions")
creator2, ok2 := currentDB.(SQLBeginner)
if !ok2 {
panic("database does not support transactions")
}
return creator2.Begin()
}
return creator.Begin()

View file

@ -1,23 +0,0 @@
package boil
type boilErr struct {
error
}
// WrapErr wraps err in a boilErr
func WrapErr(err error) error {
return boilErr{
error: err,
}
}
// Error returns the underlying error string
func (e boilErr) Error() string {
return e.error.Error()
}
// IsBoilErr checks if err is a boilErr
func IsBoilErr(err error) bool {
_, ok := err.(boilErr)
return ok
}

View file

@ -1,24 +0,0 @@
package boil
import (
"errors"
"testing"
)
func TestErrors(t *testing.T) {
t.Parallel()
err := errors.New("test error")
if IsBoilErr(err) == true {
t.Errorf("Expected false")
}
err = WrapErr(errors.New("test error"))
if err.Error() != "test error" {
t.Errorf(`Expected "test error", got %v`, err.Error())
}
if IsBoilErr(err) != true {
t.Errorf("Expected true")
}
}

View file

@ -1,6 +1,7 @@
package boil
import (
"io"
"os"
"time"
)
@ -20,7 +21,7 @@ var (
var DebugMode = false
// DebugWriter is where the debug output will be sent if DebugMode is true
var DebugWriter = os.Stdout
var DebugWriter io.Writer = os.Stdout
// SetDB initializes the database handle for all template db interactions
func SetDB(db Executor) {

View file

@ -13,10 +13,10 @@ import (
"text/template"
"github.com/pkg/errors"
"github.com/vattle/sqlboiler/bdb"
"github.com/vattle/sqlboiler/bdb/drivers"
"github.com/vattle/sqlboiler/queries"
"github.com/vattle/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb/drivers"
"github.com/lbryio/sqlboiler/queries"
"github.com/lbryio/sqlboiler/strmangle"
)
const (
@ -267,7 +267,7 @@ func (s *State) processReplacements() error {
return nil
}
var basePackage = "github.com/vattle/sqlboiler"
var basePackage = "github.com/lbryio/sqlboiler"
func getBasePath(baseDirConfig string) (string, error) {
if len(baseDirConfig) > 0 {

View file

@ -6,7 +6,7 @@ import (
"sort"
"strings"
"github.com/vattle/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb"
)
// imports defines the optional standard imports and
@ -170,26 +170,33 @@ func newImporter() importer {
`"time"`,
},
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/vattle/sqlboiler/boil"`,
`"github.com/vattle/sqlboiler/queries"`,
`"github.com/vattle/sqlboiler/queries/qm"`,
`"github.com/vattle/sqlboiler/strmangle"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/lbry.go/v2/extras/null"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/queries"`,
`"github.com/lbryio/sqlboiler/queries/qm"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
},
}
imp.Singleton = mapImports{
"boil_queries": {
"boil_queries": imports{
standard: importList{
`"fmt"`,
`"strings"`,
},
thirdParty: importList{
`"github.com/vattle/sqlboiler/boil"`,
`"github.com/vattle/sqlboiler/queries"`,
`"github.com/vattle/sqlboiler/queries/qm"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/queries"`,
`"github.com/lbryio/sqlboiler/queries/qm"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
},
},
"boil_types": {
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/vattle/sqlboiler/strmangle"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
},
},
}
@ -201,9 +208,9 @@ func newImporter() importer {
`"testing"`,
},
thirdParty: importList{
`"github.com/vattle/sqlboiler/boil"`,
`"github.com/vattle/sqlboiler/randomize"`,
`"github.com/vattle/sqlboiler/strmangle"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
},
}
@ -221,9 +228,9 @@ func newImporter() importer {
},
thirdParty: importList{
`"github.com/kat-co/vala"`,
`"github.com/pkg/errors"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/spf13/viper"`,
`"github.com/vattle/sqlboiler/boil"`,
},
},
"boil_queries_test": {
@ -236,7 +243,7 @@ func newImporter() importer {
`"regexp"`,
},
thirdParty: importList{
`"github.com/vattle/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/boil"`,
},
},
"boil_suites_test": {
@ -259,11 +266,11 @@ func newImporter() importer {
`"strings"`,
},
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/vattle/sqlboiler/bdb/drivers"`,
`"github.com/vattle/sqlboiler/randomize"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`_ "github.com/lib/pq"`,
`"github.com/spf13/viper"`,
},
},
"mysql": {
@ -278,11 +285,11 @@ func newImporter() importer {
`"strings"`,
},
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/vattle/sqlboiler/bdb/drivers"`,
`"github.com/vattle/sqlboiler/randomize"`,
`_ "github.com/go-sql-driver/mysql"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`"github.com/spf13/viper"`,
},
},
"mssql": {
@ -295,11 +302,11 @@ func newImporter() importer {
`"strings"`,
},
thirdParty: importList{
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/vattle/sqlboiler/bdb/drivers"`,
`"github.com/vattle/sqlboiler/randomize"`,
`_ "github.com/denisenkom/go-mssqldb"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`"github.com/spf13/viper"`,
},
},
}
@ -309,79 +316,79 @@ func newImporter() importer {
// TranslateColumnType to see the type assignments.
imp.BasedOnType = mapImports{
"null.Float32": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Float64": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int8": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int16": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int32": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Int64": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint8": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint16": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint32": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Uint64": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.String": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Bool": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Time": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.JSON": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"null.Bytes": {
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
},
"time.Time": {
standard: importList{`"time"`},
},
"types.JSON": {
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.BytesArray": {
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.Int64Array": {
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.Float64Array": {
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.BoolArray": {
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.StringArray": {
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
"types.Hstore": {
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
},
}

View file

@ -6,7 +6,7 @@ import (
"testing"
"github.com/pkg/errors"
"github.com/vattle/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb"
)
func TestImportsSort(t *testing.T) {
@ -234,7 +234,7 @@ func TestCombineTypeImports(t *testing.T) {
`"fmt"`,
},
thirdParty: importList{
`"github.com/vattle/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/boil"`,
},
}
@ -245,8 +245,8 @@ func TestCombineTypeImports(t *testing.T) {
`"time"`,
},
thirdParty: importList{
`"github.com/vattle/sqlboiler/boil"`,
`"gopkg.in/nullbio/null.v6"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/lbry.go/v2/extras/null"`,
},
}
@ -280,8 +280,8 @@ func TestCombineTypeImports(t *testing.T) {
`"time"`,
},
thirdParty: importList{
`"github.com/vattle/sqlboiler/boil"`,
`"gopkg.in/nullbio/null.v6"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/lbry.go/v2/extras/null"`,
},
}
@ -297,11 +297,11 @@ func TestCombineImports(t *testing.T) {
a := imports{
standard: importList{"fmt"},
thirdParty: importList{"github.com/vattle/sqlboiler", "gopkg.in/nullbio/null.v6"},
thirdParty: importList{"github.com/lbryio/sqlboiler", "github.com/lbryio/lbry.go/v2/extras/null"},
}
b := imports{
standard: importList{"os"},
thirdParty: importList{"github.com/vattle/sqlboiler"},
thirdParty: importList{"github.com/lbryio/sqlboiler"},
}
c := combineImports(a, b)
@ -309,8 +309,8 @@ func TestCombineImports(t *testing.T) {
if c.standard[0] != "fmt" && c.standard[1] != "os" {
t.Errorf("Wanted: fmt, os got: %#v", c.standard)
}
if c.thirdParty[0] != "github.com/vattle/sqlboiler" && c.thirdParty[1] != "gopkg.in/nullbio/null.v6" {
t.Errorf("Wanted: github.com/vattle/sqlboiler, gopkg.in/nullbio/null.v6 got: %#v", c.thirdParty)
if c.thirdParty[0] != "github.com/lbryio/sqlboiler" && c.thirdParty[1] != "github.com/lbryio/lbry.go/v2/extras/null" {
t.Errorf("Wanted: github.com/lbryio/sqlboiler, github.com/lbryio/lbry.go/v2/extras/null got: %#v", c.thirdParty)
}
}

View file

@ -14,7 +14,7 @@ import (
"github.com/pkg/errors"
)
var noEditDisclaimer = []byte(`// This file is generated by SQLBoiler (https://github.com/vattle/sqlboiler)
var noEditDisclaimer = []byte(`// This file is generated by SQLBoiler (https://github.com/lbryio/sqlboiler)
// and is meant to be re-generated in place and/or deleted at any time.
// DO NOT EDIT

View file

@ -8,10 +8,10 @@ import (
"strings"
"text/template"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/queries"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/pkg/errors"
"github.com/vattle/sqlboiler/bdb"
"github.com/vattle/sqlboiler/queries"
"github.com/vattle/sqlboiler/strmangle"
)
// templateData for sqlboiler templates

View file

@ -4,8 +4,8 @@ import (
"fmt"
"strings"
"github.com/vattle/sqlboiler/bdb"
"github.com/vattle/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/strmangle"
)
// TxtToOne contains text that will be used by templates for a one-to-many or

View file

@ -5,8 +5,8 @@ import (
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/vattle/sqlboiler/bdb"
"github.com/vattle/sqlboiler/bdb/drivers"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb/drivers"
)
func TestTxtsFromOne(t *testing.T) {

View file

@ -8,13 +8,13 @@ import (
"strings"
"github.com/kat-co/vala"
"github.com/lbryio/sqlboiler/bdb/drivers"
"github.com/lbryio/sqlboiler/boilingcore"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/vattle/sqlboiler/bdb/drivers"
"github.com/vattle/sqlboiler/boilingcore"
)
const sqlBoilerVersion = "2.2.0"
const sqlBoilerVersion = "2.4.0+lbry"
var (
cmdState *boilingcore.State
@ -62,7 +62,7 @@ func main() {
Use: "sqlboiler [flags] <driver>",
Short: "SQL Boiler generates an ORM tailored to your database schema.",
Long: "SQL Boiler generates a Go ORM from template files, tailored to your database schema.\n" +
`Complete documentation is available at http://github.com/vattle/sqlboiler`,
`Complete documentation is available at http://github.com/lbryio/sqlboiler`,
Example: `sqlboiler postgres`,
PreRunE: preRun,
RunE: run,

View file

@ -5,9 +5,9 @@ import (
"reflect"
"strings"
"github.com/lbryio/sqlboiler/boil"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/pkg/errors"
"github.com/vattle/sqlboiler/boil"
"github.com/vattle/sqlboiler/strmangle"
)
type loadRelationshipState struct {
@ -206,9 +206,16 @@ func (l loadRelationshipState) loadRelationshipsRecurse(depth int, obj reflect.V
}
bkind := kindStruct
if reflect.Indirect(loadedObject).Kind() != reflect.Struct {
if derefed := reflect.Indirect(loadedObject); derefed.Kind() != reflect.Struct {
bkind = kindPtrSliceStruct
loadedObject = loadedObject.Addr()
// Convert away any helper slice types
// elemType is *elem (from []*elem or helperSliceType)
// sliceType is *[]*elem
elemType := derefed.Type().Elem()
sliceType := reflect.PtrTo(reflect.SliceOf(elemType))
loadedObject = loadedObject.Addr().Convert(sliceType)
}
return l.loadRelationships(depth+1, loadedObject.Interface(), bkind)
}
@ -241,6 +248,9 @@ func collectLoaded(key string, loadingFrom reflect.Value) (reflect.Value, bindKi
if loadedType.Elem().Kind() == reflect.Struct {
bkind = kindStruct
loadedType = reflect.SliceOf(loadedType)
} else {
// Ensure that we get rid of all the helper "XSlice" types
loadedType = reflect.SliceOf(loadedType.Elem())
}
collection := reflect.MakeSlice(loadedType, 0, 0)
@ -249,9 +259,13 @@ func collectLoaded(key string, loadingFrom reflect.Value) (reflect.Value, bindKi
for {
switch bkind {
case kindStruct:
collection = reflect.Append(collection, loadedObject)
if !loadedObject.IsNil() {
collection = reflect.Append(collection, loadedObject)
}
case kindPtrSliceStruct:
collection = reflect.AppendSlice(collection, loadedObject)
if !loadedObject.IsNil() {
collection = reflect.AppendSlice(collection, loadedObject)
}
}
i++

View file

@ -4,7 +4,7 @@ import (
"fmt"
"testing"
"github.com/vattle/sqlboiler/boil"
"github.com/lbryio/sqlboiler/boil"
)
var testEagerCounters struct {

View file

@ -4,7 +4,7 @@ import (
"fmt"
"reflect"
"github.com/vattle/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/strmangle"
)
// NonZeroDefaultSet returns the fields included in the

View file

@ -5,7 +5,7 @@ import (
"testing"
"time"
null "gopkg.in/nullbio/null.v6"
null "github.com/lbryio/lbry.go/v2/extras/null"
)
type testObj struct {

View file

@ -1,6 +1,6 @@
package qm
import "github.com/vattle/sqlboiler/queries"
import "github.com/lbryio/sqlboiler/queries"
// QueryMod to modify the query object
type QueryMod func(q *queries.Query)
@ -8,7 +8,9 @@ type QueryMod func(q *queries.Query)
// Apply the query mods to the Query object
func Apply(q *queries.Query, mods ...QueryMod) {
for _, mod := range mods {
mod(q)
if mod != nil {
mod(q)
}
}
}
@ -123,6 +125,12 @@ func From(from string) QueryMod {
}
}
func ForceIndex(index string) QueryMod {
return func(q *queries.Query) {
queries.SetForceIndex(q, index)
}
}
// Limit the number of returned rows
func Limit(limit int) QueryMod {
return func(q *queries.Query) {

View file

@ -4,7 +4,8 @@ import (
"database/sql"
"fmt"
"github.com/vattle/sqlboiler/boil"
"github.com/lbryio/lbry.go/v2/extras/errors"
"github.com/lbryio/sqlboiler/boil"
)
// joinKind is the type of join
@ -29,6 +30,7 @@ type Query struct {
selectCols []string
count bool
from []string
forceindex string
joins []join
where []where
in []in
@ -136,7 +138,7 @@ func (q *Query) Query() (*sql.Rows, error) {
func (q *Query) ExecP() sql.Result {
res, err := q.Exec()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return res
@ -147,7 +149,7 @@ func (q *Query) ExecP() sql.Result {
func (q *Query) QueryP() *sql.Rows {
rows, err := q.Query()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return rows
@ -262,6 +264,11 @@ func SetLastWhereAsOr(q *Query) {
q.where[len(q.where)-1].orSeparator = true
}
// SetForceIndex sets the index to be used by the query
func SetForceIndex(q *Query, index string){
q.forceindex = index
}
// SetLastInAsOr sets the or separator for the tail "IN" in the slice
func SetLastInAsOr(q *Query) {
if len(q.in) == 0 {

View file

@ -7,7 +7,7 @@ import (
"sort"
"strings"
"github.com/vattle/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/strmangle"
)
var (
@ -76,7 +76,13 @@ func buildSelectQuery(q *Query) (*bytes.Buffer, []interface{}) {
buf.WriteByte(')')
}
fmt.Fprintf(buf, " FROM %s", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "))
if len(q.forceindex) > 0 {
fmt.Fprintf(buf, " FROM %s FORCE INDEX (%s)", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "),q.forceindex)
}else{
fmt.Fprintf(buf, " FROM %s", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "))
}
if len(q.joins) > 0 {
argsLen := len(args)
@ -190,7 +196,7 @@ func buildUpdateQuery(q *Query) (*bytes.Buffer, []interface{}) {
}
// BuildUpsertQueryMySQL builds a SQL statement string using the upsertData provided.
func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string) string {
func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string, autoIncrementCol string) string {
whitelist = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, whitelist)
buf := strmangle.GetBuffer()
@ -220,6 +226,11 @@ func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []st
strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
)
// https://stackoverflow.com/questions/778534/mysql-on-duplicate-key-last-insert-id
if autoIncrementCol != "" {
buf.WriteString(autoIncrementCol + " = LAST_INSERT_ID(" + autoIncrementCol + "), ")
}
for i, v := range update {
if i != 0 {
buf.WriteByte(',')

View file

@ -7,9 +7,9 @@ import (
"strings"
"sync"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/pkg/errors"
"github.com/vattle/sqlboiler/boil"
"github.com/vattle/sqlboiler/strmangle"
)
var (
@ -41,7 +41,7 @@ const (
// It panics on error. See boil.Bind() documentation.
func (q *Query) BindP(obj interface{}) {
if err := q.Bind(obj); err != nil {
panic(boil.WrapErr(err))
panic(errors.WithStack(err))
}
}

View file

@ -14,12 +14,12 @@ import (
"sync/atomic"
"time"
null "gopkg.in/nullbio/null.v6"
null "github.com/lbryio/lbry.go/v2/extras/null"
"github.com/pkg/errors"
"github.com/satori/go.uuid"
"github.com/vattle/sqlboiler/strmangle"
"github.com/vattle/sqlboiler/types"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/types"
)
var (

View file

@ -5,7 +5,7 @@ import (
"testing"
"time"
null "gopkg.in/nullbio/null.v6"
null "github.com/lbryio/lbry.go/v2/extras/null"
)
func TestRandomizeStruct(t *testing.T) {

View file

@ -17,6 +17,23 @@ type {{$modelName}} struct {
{{end -}}
}
var {{$modelName}}Columns = struct {
{{range $column := .Table.Columns -}}
{{titleCase $column.Name}} string
{{end -}}
}{
{{range $column := .Table.Columns -}}
{{titleCase $column.Name}}: "{{$column.Name}}",
{{end -}}
}
// {{$modelName}}Filter allows you to filter on any columns by making them all pointers.
type {{$modelName}}Filter struct {
{{range $column := .Table.Columns -}}
{{titleCase $column.Name}} *{{$column.Type}} `{{generateTags $dot.Tags $column.Name}}boil:"{{$column.Name}}" json:"{{$column.Name}},omitempty" toml:"{{$column.Name}}" yaml:"{{$column.Name}},omitempty"`
{{end -}}
}
{{- if .Table.IsJoinTable -}}
{{- else}}
// {{$modelNameCamel}}R is where relationships are stored.

View file

@ -10,6 +10,7 @@ var (
{{$varNameSingular}}ColumnsWithoutDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault false | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
{{$varNameSingular}}ColumnsWithDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault true | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
{{$varNameSingular}}PrimaryKeyColumns = []string{{"{"}}{{.Table.PKey.Columns | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
{{$varNameSingular}}AutoIncrementColumn = "{{.Table.AutoIncrementColumn }}"
)
type (
@ -21,7 +22,7 @@ type (
{{$tableNameSingular}}Hook func(boil.Executor, *{{$tableNameSingular}}) error
{{- end}}
{{$varNameSingular}}Query struct {
{{$tableNameSingular}}Query struct {
*queries.Query
}
)

View file

@ -16,7 +16,7 @@ var {{$varNameSingular}}AfterUpsertHooks []{{$tableNameSingular}}Hook
func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeInsertHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -27,7 +27,7 @@ func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeUpdateHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeDeleteHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -49,7 +49,7 @@ func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeUpsertHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -60,7 +60,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterInsertHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -71,7 +71,7 @@ func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterSelectHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -82,7 +82,7 @@ func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterUpdateHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -93,7 +93,7 @@ func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterDeleteHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}
@ -104,7 +104,7 @@ func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterUpsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterUpsertHooks {
if err := hook(exec, o); err != nil {
return err
return errors.Err(err)
}
}

View file

@ -1,27 +1,27 @@
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
// OneP returns a single {{$varNameSingular}} record from the query, and panics on error.
func (q {{$varNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
// OneP returns a single {{$tableNameSingular}} record from the query, and panics on error.
func (q {{$tableNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
o, err := q.One()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return o
}
// One returns a single {{$varNameSingular}} record from the query.
func (q {{$varNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
// One returns a single {{$tableNameSingular}} record from the query.
func (q {{$tableNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
o := &{{$tableNameSingular}}{}
queries.SetLimit(q.Query, 1)
err := q.Bind(o)
if err != nil {
if errors.Cause(err) == sql.ErrNoRows {
return nil, sql.ErrNoRows
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, errors.Wrap(err, "{{.PkgName}}: failed to execute a one query for {{.Table.Name}}")
return nil, errors.Prefix("{{.PkgName}}: failed to execute a one query for {{.Table.Name}}", err)
}
{{if not .NoHooks -}}
@ -34,22 +34,22 @@ func (q {{$varNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
}
// AllP returns all {{$tableNameSingular}} records from the query, and panics on error.
func (q {{$varNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
func (q {{$tableNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
o, err := q.All()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return o
}
// All returns all {{$tableNameSingular}} records from the query.
func (q {{$varNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
var o {{$tableNameSingular}}Slice
func (q {{$tableNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
var o []*{{$tableNameSingular}}
err := q.Bind(&o)
if err != nil {
return nil, errors.Wrap(err, "{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice")
return nil, errors.Prefix("{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice", err)
}
{{if not .NoHooks -}}
@ -66,17 +66,17 @@ func (q {{$varNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
}
// CountP returns the count of all {{$tableNameSingular}} records in the query, and panics on error.
func (q {{$varNameSingular}}Query) CountP() int64 {
func (q {{$tableNameSingular}}Query) CountP() int64 {
c, err := q.Count()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return c
}
// Count returns the count of all {{$tableNameSingular}} records in the query.
func (q {{$varNameSingular}}Query) Count() (int64, error) {
func (q {{$tableNameSingular}}Query) Count() (int64, error) {
var count int64
queries.SetSelect(q.Query, nil)
@ -84,32 +84,33 @@ func (q {{$varNameSingular}}Query) Count() (int64, error) {
err := q.Query.QueryRow().Scan(&count)
if err != nil {
return 0, errors.Wrap(err, "{{.PkgName}}: failed to count {{.Table.Name}} rows")
return 0, errors.Prefix("{{.PkgName}}: failed to count {{.Table.Name}} rows", err)
}
return count, nil
}
// Exists checks if the row exists in the table, and panics on error.
func (q {{$varNameSingular}}Query) ExistsP() bool {
func (q {{$tableNameSingular}}Query) ExistsP() bool {
e, err := q.Exists()
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return e
}
// Exists checks if the row exists in the table.
func (q {{$varNameSingular}}Query) Exists() (bool, error) {
func (q {{$tableNameSingular}}Query) Exists() (bool, error) {
var count int64
queries.SetCount(q.Query)
queries.SetSelect(q.Query, []string{})
queries.SetLimit(q.Query, 1)
err := q.Query.QueryRow().Scan(&count)
if err != nil {
return false, errors.Wrap(err, "{{.PkgName}}: failed to check if {{.Table.Name}} exists")
return false, errors.Prefix("{{.PkgName}}: failed to check if {{.Table.Name}} exists", err)
}
return count > 0, nil

View file

@ -3,14 +3,14 @@
{{- $dot := . -}}
{{- range .Table.FKeys -}}
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
{{- $varNameSingular := .ForeignTable | singular | camelCase}}
{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
// {{$txt.Function.Name}}G pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
}
// {{$txt.Function.Name}} pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
queryMods := []qm.QueryMod{
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
}

View file

@ -3,14 +3,14 @@
{{- $dot := . -}}
{{- range .Table.ToOneRelationships -}}
{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
{{- $varNameSingular := .ForeignTable | singular | camelCase}}
{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
// {{$txt.Function.Name}}G pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
}
// {{$txt.Function.Name}} pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
queryMods := []qm.QueryMod{
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
}

View file

@ -3,20 +3,20 @@
{{- $dot := . -}}
{{- $table := .Table -}}
{{- range .Table.ToManyRelationships -}}
{{- $varNameSingular := .ForeignTable | singular | camelCase -}}
{{- $tableNameSingular := .ForeignTable | singular | titleCase -}}
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
// {{$txt.Function.Name}}G retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}}
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
}
// {{$txt.Function.Name}} retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}} with an executor
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
queryMods := []qm.QueryMod{
qm.Select("{{id 0 | $dot.Quotes}}.*"),
qm.Select("{{$schemaForeignTable}}.*"),
}
if len(mods) != 0 {
@ -25,17 +25,18 @@ func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor,
{{if .ToJoinTable -}}
queryMods = append(queryMods,
qm.InnerJoin("{{.JoinTable | $dot.SchemaTable}} as {{id 1 | $dot.Quotes}} on {{id 0 | $dot.Quotes}}.{{.ForeignColumn | $dot.Quotes}} = {{id 1 | $dot.Quotes}}.{{.JoinForeignColumn | $dot.Quotes}}"),
qm.Where("{{id 1 | $dot.Quotes}}.{{.JoinLocalColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
{{$schemaJoinTable := .JoinTable | $.SchemaTable -}}
qm.InnerJoin("{{$schemaJoinTable}} on {{$schemaForeignTable}}.{{.ForeignColumn | $dot.Quotes}} = {{$schemaJoinTable}}.{{.JoinForeignColumn | $dot.Quotes}}"),
qm.Where("{{$schemaJoinTable}}.{{.JoinLocalColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
)
{{else -}}
queryMods = append(queryMods,
qm.Where("{{id 0 | $dot.Quotes}}.{{.ForeignColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
qm.Where("{{$schemaForeignTable}}.{{.ForeignColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
)
{{end}}
query := {{$txt.ForeignTable.NamePluralGo}}(exec, queryMods...)
queries.SetFrom(query.Query, "{{$schemaForeignTable}} as {{id 0 | $dot.Quotes}}")
queries.SetFrom(query.Query, "{{$schemaForeignTable}}")
return query
}

View file

@ -4,8 +4,7 @@
{{- range .Table.FKeys -}}
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
{{- $slice := printf "%sSlice" $txt.LocalTable.NameGo}}
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo}}
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
// loaded structs of the objects.
func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
@ -16,7 +15,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
if singular {
object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
} else {
slice = *{{$arg}}.(*{{$slice}})
slice = *{{$arg}}.(*[]*{{$txt.LocalTable.NameGo}})
count = len(slice)
}
@ -46,20 +45,20 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
results, err := e.Query(query, args...)
if err != nil {
return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
}
defer results.Close()
var resultSlice []*{{$txt.ForeignTable.NameGo}}
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
}
{{if not $dot.NoHooks -}}
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(e); err != nil {
return err
return errors.Err(err)
}
}
}

View file

@ -5,7 +5,6 @@
{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
{{- $slice := printf "%sSlice" $txt.LocalTable.NameGo}}
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
// loaded structs of the objects.
func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
@ -16,7 +15,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
if singular {
object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
} else {
slice = *{{$arg}}.(*{{$slice}})
slice = *{{$arg}}.(*[]*{{$txt.LocalTable.NameGo}})
count = len(slice)
}
@ -46,20 +45,20 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
results, err := e.Query(query, args...)
if err != nil {
return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
}
defer results.Close()
var resultSlice []*{{$txt.ForeignTable.NameGo}}
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
}
{{if not $dot.NoHooks -}}
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(e); err != nil {
return err
return errors.Err(err)
}
}
}

View file

@ -5,7 +5,6 @@
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
{{- $txt := txtsFromToMany $dot.Tables $dot.Table . -}}
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
{{- $slice := printf "%sSlice" $txt.LocalTable.NameGo -}}
{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
// loaded structs of the objects.
@ -17,7 +16,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
if singular {
object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
} else {
slice = *{{$arg}}.(*{{$slice}})
slice = *{{$arg}}.(*[]*{{$txt.LocalTable.NameGo}})
count = len(slice)
}
@ -55,7 +54,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
results, err := e.Query(query, args...)
if err != nil {
return errors.Wrap(err, "failed to eager load {{.ForeignTable}}")
return errors.Prefix("failed to eager load {{.ForeignTable}}", err)
}
defer results.Close()
@ -71,7 +70,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
err = results.Scan({{$foreignTable.Columns | columnNames | stringMap $dot.StringFuncs.titleCase | prefixStringSlice "&one." | join ", "}}, &localJoinCol)
if err = results.Err(); err != nil {
return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
}
resultSlice = append(resultSlice, one)
@ -79,11 +78,11 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
}
if err = results.Err(); err != nil {
return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
}
{{else -}}
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Wrap(err, "failed to bind eager loaded slice {{.ForeignTable}}")
return errors.Prefix("failed to bind eager loaded slice {{.ForeignTable}}", err)
}
{{end}}
@ -91,7 +90,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
if len({{.ForeignTable | singular | camelCase}}AfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(e); err != nil {
return err
return errors.Err(err)
}
}
}

View file

@ -20,7 +20,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -30,7 +30,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -41,7 +41,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
var err error
if insert {
if err = related.Insert(exec); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
return errors.Prefix("failed to insert into foreign table", err)
}
}
@ -58,7 +58,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
}
if _, err = exec.Exec(updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update local table")
return errors.Prefix("failed to update local table", err)
}
o.{{$txt.Function.LocalAssignment}} = related.{{$txt.Function.ForeignAssignment}}
@ -110,7 +110,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$t
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -120,7 +120,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Exe
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -133,7 +133,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = false
if err = o.Update(exec, "{{.Column}}"); err != nil {
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = true
return errors.Wrap(err, "failed to update local table")
return errors.Prefix("failed to update local table", err)
}
o.R.{{$txt.Function.Name}} = nil

View file

@ -21,7 +21,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -31,7 +31,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -48,7 +48,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
{{- end}}
if err = related.Insert(exec); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
return errors.Prefix("failed to insert into foreign table", err)
}
} else {
updateQuery := fmt.Sprintf(
@ -64,7 +64,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
}
if _, err = exec.Exec(updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update foreign table")
return errors.Prefix("failed to update foreign table", err)
}
related.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
@ -107,7 +107,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$t
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -117,7 +117,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Exe
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -130,7 +130,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = false
if err = related.Update(exec, "{{.ForeignColumn}}"); err != nil {
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
return errors.Wrap(err, "failed to update local table")
return errors.Prefix("failed to update local table", err)
}
o.R.{{$txt.Function.Name}} = nil

View file

@ -24,7 +24,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Add{{$txt.Function.Name}}(exec, insert, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -35,7 +35,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Add{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -55,7 +55,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
{{end -}}
if err = rel.Insert(exec); err != nil {
return errors.Wrap(err, "failed to insert into foreign table")
return errors.Prefix("failed to insert into foreign table", err)
}
}{{if not .ToJoinTable}} else {
updateQuery := fmt.Sprintf(
@ -71,7 +71,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
}
if _, err = exec.Exec(updateQuery, values...); err != nil {
return errors.Wrap(err, "failed to update foreign table")
return errors.Prefix("failed to update foreign table", err)
}
rel.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
@ -93,7 +93,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
_, err = exec.Exec(query, values...)
if err != nil {
return errors.Wrap(err, "failed to insert into join table")
return errors.Prefix("failed to insert into join table", err)
}
}
{{end -}}
@ -152,7 +152,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(exec, insert, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -165,7 +165,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -190,7 +190,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
_, err := exec.Exec(query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
return errors.Prefix("failed to remove relationships before set", err)
}
{{if .ToJoinTable -}}
@ -230,7 +230,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related ...*{
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(exec, related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -240,7 +240,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Exe
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -266,7 +266,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
_, err = exec.Exec(query, values...)
if err != nil {
return errors.Wrap(err, "failed to remove relationships before set")
return errors.Prefix("failed to remove relationships before set", err)
}
{{else -}}
for _, rel := range related {
@ -277,7 +277,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
}
{{end -}}
if err = rel.Update(exec, "{{.ForeignColumn}}"); err != nil {
return err
return errors.Err(err)
}
}
{{end -}}

View file

@ -1,12 +1,12 @@
{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
{{- $varNameSingular := .Table.Name | singular | camelCase}}
{{- $tableNameSingular := .Table.Name | singular | titleCase}}
// {{$tableNamePlural}}G retrieves all records.
func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
return {{$tableNamePlural}}(boil.GetDB(), mods...)
}
// {{$tableNamePlural}} retrieves all the records using an executor.
func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
mods = append(mods, qm.From("{{.Table.Name | .SchemaTable}}"))
return {{$varNameSingular}}Query{NewQuery(exec, mods...)}
return {{$tableNameSingular}}Query{NewQuery(exec, mods...)}
}

View file

@ -12,7 +12,7 @@ func Find{{$tableNameSingular}}G({{$pkArgs}}, selectCols ...string) (*{{$tableNa
func Find{{$tableNameSingular}}GP({{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
retobj, err := Find{{$tableNameSingular}}(boil.GetDB(), {{$pkNames | join ", "}}, selectCols...)
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return retobj
@ -35,10 +35,10 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
err := q.Bind({{$varNameSingular}}Obj)
if err != nil {
if errors.Cause(err) == sql.ErrNoRows {
return nil, sql.ErrNoRows
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, errors.Wrap(err, "{{.PkgName}}: unable to select from {{.Table.Name}}")
return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
}
return {{$varNameSingular}}Obj, nil
@ -48,8 +48,79 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
func Find{{$tableNameSingular}}P(exec boil.Executor, {{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
retobj, err := Find{{$tableNameSingular}}(exec, {{$pkNames | join ", "}}, selectCols...)
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return retobj
}
// FindOne{{$tableNameSingular}} retrieves a single record using filters.
func FindOne{{$tableNameSingular}}(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
obj := &{{$tableNameSingular}}{}
err := {{$tableNameSingular}}NewQuery(exec).
Where(filters).
Limit(1).
Bind(obj)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
}
return obj, nil
}
// FindOne{{$tableNameSingular}}G retrieves a single record using filters.
func FindOne{{$tableNameSingular}}G(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
return FindOne{{$tableNameSingular}}(boil.GetDB(), filters)
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
func FindOne{{$tableNameSingular}}OrInit(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}(exec, filters)
if err != nil {
return nil, err
}
if {{$varNameSingular}}Obj == nil {
{{$varNameSingular}}Obj = &{{$tableNameSingular}}{}
objR := reflect.ValueOf({{$varNameSingular}}Obj).Elem()
r := reflect.ValueOf(filters)
for i := 0; i < r.NumField(); i++ {
f := r.Field(i)
if f.Elem().IsValid() {
objR.FieldByName(r.Type().Field(i).Name).Set(f.Elem())
}
}
}
return {{$varNameSingular}}Obj, nil
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
func FindOne{{$tableNameSingular}}OrInitG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
return FindOne{{$tableNameSingular}}OrInit(boil.GetDB(), filters)
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
func FindOne{{$tableNameSingular}}OrCreate(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}OrInit(exec, filters)
if err != nil {
return nil, err
}
if {{$varNameSingular}}Obj.IsNew() {
err := {{$varNameSingular}}Obj.Insert(exec)
if err != nil {
return nil, err
}
}
return {{$varNameSingular}}Obj, nil
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
func FindOne{{$tableNameSingular}}OrCreateG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
return FindOne{{$tableNameSingular}}OrCreate(boil.GetDB(), filters)
}

View file

@ -10,7 +10,7 @@ func (o *{{$tableNameSingular}}) InsertG(whitelist ... string) error {
// behavior description.
func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
if err := o.Insert(boil.GetDB(), whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -18,7 +18,7 @@ func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
// for whitelist behavior description.
func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... string) {
if err := o.Insert(exec, whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -29,7 +29,7 @@ func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... strin
// - All columns with a default, but non-zero are included (i.e. health = 75)
func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string) error {
if o == nil {
return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
}
var err error
@ -37,7 +37,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
{{if not .NoHooks -}}
if err := o.doBeforeInsertHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{- end}}
@ -59,11 +59,11 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, wl)
if err != nil {
return err
return errors.Err(err)
}
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, returnColumns)
if err != nil {
return err
return errors.Err(err)
}
if len(wl) != 0 {
cache.query = fmt.Sprintf("INSERT INTO {{$schemaTable}} ({{.LQ}}%s{{.RQ}}) %%sVALUES (%s)%%s", strings.Join(wl, "{{.RQ}},{{.LQ}}"), strmangle.Placeholders(dialect.IndexPlaceholders, len(wl), 1, 1))
@ -86,9 +86,12 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
{{else -}}
queryOutput = fmt.Sprintf("OUTPUT INSERTED.{{.LQ}}%s{{.RQ}} ", strings.Join(returnColumns, "{{.RQ}},INSERTED.{{.LQ}}"))
{{end -}}
{{- end}}
{{end -}}
}
if len(wl) != 0 {
cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning)
}
cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning)
}
value := reflect.Indirect(reflect.ValueOf(o))
@ -107,7 +110,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
_, err = exec.Exec(cache.query, vals...)
{{- end}}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
}
{{if $canLastInsertID -}}
@ -122,7 +125,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
{{if $canLastInsertID -}}
lastID, err = result.LastInsertId()
if err != nil {
return ErrSyncFail
return errors.Err(ErrSyncFail)
}
{{$colName := index .Table.PKey.Columns 0 -}}
@ -147,7 +150,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
}
{{else}}
if len(cache.retMapping) != 0 {
@ -157,7 +160,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
}
{{end}}

View file

@ -12,7 +12,7 @@ func (o *{{$tableNameSingular}}) UpdateG(whitelist ...string) error {
// Panics on error. See Update for whitelist behavior description.
func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
if err := o.Update(boil.GetDB(), whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -21,7 +21,7 @@ func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
func (o *{{$tableNameSingular}}) UpdateP(exec boil.Executor, whitelist ... string) {
err := o.Update(exec, whitelist...)
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
var err error
{{if not .NoHooks -}}
if err = o.doBeforeUpdateHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{end -}}
@ -56,13 +56,11 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
{{if eq .DriverName "mssql"}}
wl = strmangle.SetComplement(wl, {{$varNameSingular}}ColumnsWithAuto)
{{end}}
{{- if not .NoAutoTimestamps}}
if len(whitelist) == 0 {
wl = strmangle.SetComplement(wl, []string{"created_at"})
wl = strmangle.SetComplement(wl, []string{"created_at","updated_at"})
}
{{end -}}
if len(wl) == 0 {
return errors.New("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
return errors.Err("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
}
cache.query = fmt.Sprintf("UPDATE {{$schemaTable}} SET %s WHERE %s",
@ -71,7 +69,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
)
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, append(wl, {{$varNameSingular}}PrimaryKeyColumns...))
if err != nil {
return err
return errors.Err(err)
}
}
@ -84,7 +82,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
_, err = exec.Exec(cache.query, values...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to update {{.Table.Name}} row")
return errors.Prefix("{{.PkgName}}: unable to update {{.Table.Name}} row", err)
}
if !cached {
@ -101,19 +99,19 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
}
// UpdateAllP updates all rows with matching column names, and panics on error.
func (q {{$varNameSingular}}Query) UpdateAllP(cols M) {
func (q {{$tableNameSingular}}Query) UpdateAllP(cols M) {
if err := q.UpdateAll(cols); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// UpdateAll updates all rows with the specified column values.
func (q {{$varNameSingular}}Query) UpdateAll(cols M) error {
func (q {{$tableNameSingular}}Query) UpdateAll(cols M) error {
queries.SetUpdate(q.Query, cols)
_, err := q.Query.Exec()
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to update all for {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to update all for {{.Table.Name}}", err)
}
return nil
@ -127,14 +125,14 @@ func (o {{$tableNameSingular}}Slice) UpdateAllG(cols M) error {
// UpdateAllGP updates all rows with the specified column values, and panics on error.
func (o {{$tableNameSingular}}Slice) UpdateAllGP(cols M) {
if err := o.UpdateAll(boil.GetDB(), cols); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// UpdateAllP updates all rows with the specified column values, and panics on error.
func (o {{$tableNameSingular}}Slice) UpdateAllP(exec boil.Executor, cols M) {
if err := o.UpdateAll(exec, cols); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -146,7 +144,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
}
if len(cols) == 0 {
return errors.New("{{.PkgName}}: update all requires at least one column argument")
return errors.Err("{{.PkgName}}: update all requires at least one column argument")
}
colNames := make([]string, len(cols))
@ -176,7 +174,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
_, err := exec.Exec(sql, args...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to update all in {{$varNameSingular}} slice")
return errors.Prefix("{{.PkgName}}: unable to update all in {{$varNameSingular}} slice", err)
}
return nil

View file

@ -9,7 +9,7 @@ func (o *{{$tableNameSingular}}) UpsertG({{if eq .DriverName "postgres"}}updateO
// UpsertGP attempts an insert, and does an update or ignore on conflict. Panics on error.
func (o *{{$tableNameSingular}}) UpsertGP({{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
if err := o.Upsert(boil.GetDB(), {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -17,21 +17,21 @@ func (o *{{$tableNameSingular}}) UpsertGP({{if eq .DriverName "postgres"}}update
// UpsertP panics on error.
func (o *{{$tableNameSingular}}) UpsertP(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
if err := o.Upsert(exec, {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// Upsert attempts an insert using an executor, and does an update or ignore on conflict.
func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) error {
if o == nil {
return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
}
{{- template "timestamp_upsert_helper" . }}
{{if not .NoHooks -}}
if err := o.doBeforeUpsertHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{- end}}
@ -87,12 +87,12 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
}
}
if len(insert) == 0 {
return errors.New("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build insert column list")
return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build insert column list")
}
ret = strmangle.SetMerge(ret, {{$varNameSingular}}ColumnsWithAuto)
ret = strmangle.SetMerge(ret, {{$varNameSingular}}ColumnsWithDefault)
{{end}}
update := strmangle.UpdateColumnSet(
{{$varNameSingular}}Columns,
@ -102,9 +102,9 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
{{if eq .DriverName "mssql" -}}
update = strmangle.SetComplement(update, {{$varNameSingular}}ColumnsWithAuto)
{{end -}}
if len(update) == 0 {
return errors.New("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
}
{{if eq .DriverName "postgres"}}
@ -115,7 +115,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
}
cache.query = queries.BuildUpsertQueryPostgres(dialect, "{{$schemaTable}}", updateOnConflict, ret, update, conflict, insert)
{{else if eq .DriverName "mysql"}}
cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, insert)
cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, insert, {{$varNameSingular}}AutoIncrementColumn)
cache.retQuery = fmt.Sprintf(
"SELECT %s FROM {{.LQ}}{{.Table.Name}}{{.RQ}} WHERE {{whereClause .LQ .RQ 0 .Table.PKey.Columns}}",
strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), ","),
@ -129,12 +129,12 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, {{if eq .DriverName "mssql"}}whitelist{{else}}insert{{end}})
if err != nil {
return err
return errors.Err(err)
}
if len(ret) != 0 {
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, ret)
if err != nil {
return err
return errors.Err(err)
}
}
}
@ -159,7 +159,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
_, err = exec.Exec(cache.query, vals...)
{{- end}}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to upsert for {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to upsert for {{.Table.Name}}", err)
}
{{if $canLastInsertID -}}
@ -174,7 +174,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
{{if $canLastInsertID -}}
lastID, err = result.LastInsertId()
if err != nil {
return ErrSyncFail
return errors.Err(ErrSyncFail)
}
{{$colName := index .Table.PKey.Columns 0 -}}
@ -199,7 +199,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
}
{{- else}}
if len(cache.retMapping) != 0 {
@ -211,7 +211,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
_, err = exec.Exec(cache.query, vals...)
}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to upsert {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to upsert {{.Table.Name}}", err)
}
{{- end}}

View file

@ -6,7 +6,7 @@
// Panics on error.
func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
if err := o.Delete(exec); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -14,7 +14,7 @@ func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
// DeleteG will match against the primary key column to find the record to delete.
func (o *{{$tableNameSingular}}) DeleteG() error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
}
return o.Delete(boil.GetDB())
@ -25,7 +25,7 @@ func (o *{{$tableNameSingular}}) DeleteG() error {
// Panics on error.
func (o *{{$tableNameSingular}}) DeleteGP() {
if err := o.DeleteG(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -33,12 +33,12 @@ func (o *{{$tableNameSingular}}) DeleteGP() {
// Delete will match against the primary key column to find the record to delete.
func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
}
{{if not .NoHooks -}}
if err := o.doBeforeDeleteHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{- end}}
@ -52,12 +52,12 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
_, err := exec.Exec(sql, args...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to delete from {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to delete from {{.Table.Name}}", err)
}
{{if not .NoHooks -}}
if err := o.doAfterDeleteHooks(exec); err != nil {
return err
return errors.Err(err)
}
{{- end}}
@ -65,23 +65,23 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
}
// DeleteAllP deletes all rows, and panics on error.
func (q {{$varNameSingular}}Query) DeleteAllP() {
func (q {{$tableNameSingular}}Query) DeleteAllP() {
if err := q.DeleteAll(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// DeleteAll deletes all matching rows.
func (q {{$varNameSingular}}Query) DeleteAll() error {
func (q {{$tableNameSingular}}Query) DeleteAll() error {
if q.Query == nil {
return errors.New("{{.PkgName}}: no {{$varNameSingular}}Query provided for delete all")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}}Query provided for delete all")
}
queries.SetDelete(q.Query)
_, err := q.Query.Exec()
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{.Table.Name}}")
return errors.Prefix("{{.PkgName}}: unable to delete all from {{.Table.Name}}", err)
}
return nil
@ -90,14 +90,14 @@ func (q {{$varNameSingular}}Query) DeleteAll() error {
// DeleteAllGP deletes all rows in the slice, and panics on error.
func (o {{$tableNameSingular}}Slice) DeleteAllGP() {
if err := o.DeleteAllG(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// DeleteAllG deletes all rows in the slice.
func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
}
return o.DeleteAll(boil.GetDB())
}
@ -105,14 +105,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
// DeleteAllP deletes all rows in the slice, using an executor, and panics on error.
func (o {{$tableNameSingular}}Slice) DeleteAllP(exec boil.Executor) {
if err := o.DeleteAll(exec); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// DeleteAll deletes all rows in the slice, using an executor.
func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
}
if len(o) == 0 {
@ -123,7 +123,7 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
if len({{$varNameSingular}}BeforeDeleteHooks) != 0 {
for _, obj := range o {
if err := obj.doBeforeDeleteHooks(exec); err != nil {
return err
return errors.Err(err)
}
}
}
@ -145,14 +145,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
_, err := exec.Exec(sql, args...)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice")
return errors.Prefix("{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice", err)
}
{{if not .NoHooks -}}
if len({{$varNameSingular}}AfterDeleteHooks) != 0 {
for _, obj := range o {
if err := obj.doAfterDeleteHooks(exec); err != nil {
return err
return errors.Err(err)
}
}
}

View file

@ -5,21 +5,21 @@
// ReloadGP refetches the object from the database and panics on error.
func (o *{{$tableNameSingular}}) ReloadGP() {
if err := o.ReloadG(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// ReloadP refetches the object from the database with an executor. Panics on error.
func (o *{{$tableNameSingular}}) ReloadP(exec boil.Executor) {
if err := o.Reload(exec); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
// ReloadG refetches the object from the database using the primary keys.
func (o *{{$tableNameSingular}}) ReloadG() error {
if o == nil {
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
}
return o.Reload(boil.GetDB())
@ -30,7 +30,7 @@ func (o *{{$tableNameSingular}}) ReloadG() error {
func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
ret, err := Find{{$tableNameSingular}}(exec, {{.Table.PKey.Columns | stringMap .StringFuncs.titleCase | prefixStringSlice "o." | join ", "}})
if err != nil {
return err
return errors.Err(err)
}
*o = *ret
@ -42,7 +42,7 @@ func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
// Panics on error.
func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
if err := o.ReloadAllG(); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -51,7 +51,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
// Panics on error.
func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
if err := o.ReloadAll(exec); err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
}
@ -59,7 +59,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
// and overwrites the original object slice with the newly updated slice.
func (o *{{$tableNameSingular}}Slice) ReloadAllG() error {
if o == nil {
return errors.New("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
return errors.Err("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
}
return o.ReloadAll(boil.GetDB())
@ -86,7 +86,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAll(exec boil.Executor) error {
err := q.Bind(&{{$varNamePlural}})
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice")
return errors.Prefix("{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice", err)
}
*o = {{$varNamePlural}}

View file

@ -1,4 +1,5 @@
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
{{- $colDefs := sqlColDefinitions .Table.Columns .Table.PKey.Columns -}}
{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase | stringMap .StringFuncs.replaceReserved -}}
{{- $pkArgs := joinSlices " " $pkNames $colDefs.Types | join ", " -}}
@ -21,7 +22,7 @@ func {{$tableNameSingular}}Exists(exec boil.Executor, {{$pkArgs}}) (bool, error)
err := row.Scan(&exists)
if err != nil {
return false, errors.Wrap(err, "{{.PkgName}}: unable to check if {{.Table.Name}} exists")
return false, errors.Prefix("{{.PkgName}}: unable to check if {{.Table.Name}} exists", err)
}
return exists, nil
@ -36,7 +37,7 @@ func {{$tableNameSingular}}ExistsG({{$pkArgs}}) (bool, error) {
func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
e, err := {{$tableNameSingular}}Exists(boil.GetDB(), {{$pkNames | join ", "}})
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return e
@ -46,8 +47,43 @@ func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
func {{$tableNameSingular}}ExistsP(exec boil.Executor, {{$pkArgs}}) bool {
e, err := {{$tableNameSingular}}Exists(exec, {{$pkNames | join ", "}})
if err != nil {
panic(boil.WrapErr(err))
panic(errors.Err(err))
}
return e
}
// IsNew() checks if record exists in db (aka if its primary key is set).
func (o *{{$tableNameSingular}}) IsNew() bool {
r := reflect.ValueOf(o).Elem()
for i := 0; i < r.NumField(); i++ {
column := r.Type().Field(i).Tag.Get("boil")
for _, pkColumn := range {{$varNameSingular}}PrimaryKeyColumns {
if column == pkColumn {
field := r.Field(i)
if field.Interface() != reflect.Zero(field.Type()).Interface() {
return false
}
}
}
}
return true
}
// Save() inserts the record if it does not exist, or updates it if it does.
func (o *{{$tableNameSingular}}) Save(exec boil.Executor, whitelist ...string) error {
if o.IsNew() {
return o.Insert(exec, whitelist...)
} else {
return o.Update(exec, whitelist...)
}
}
// SaveG() inserts the record if it does not exist, or updates it if it does.
func (o *{{$tableNameSingular}}) SaveG(whitelist ...string) error {
if o.IsNew() {
return o.InsertG(whitelist...)
} else {
return o.UpdateG(whitelist...)
}
}

33
templates/22_query.tpl Normal file
View file

@ -0,0 +1,33 @@
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
// {{$tableNameSingular}}NewQuery filters query results
func {{$tableNameSingular}}NewQuery(exec boil.Executor) *{{$tableNameSingular}}Query {
return &{{$tableNameSingular}}Query{NewQuery(exec, qm.Select("*"), qm.From("{{.Table.Name | .SchemaTable}}"))}
}
// {{$tableNameSingular}}NewQuery filters query results
func {{$tableNameSingular}}NewQueryG() *{{$tableNameSingular}}Query {
return {{$tableNameSingular}}NewQuery(boil.GetDB())
}
// Where filters query results
func (q *{{$tableNameSingular}}Query) Where(filters {{$tableNameSingular}}Filter) *{{$tableNameSingular}}Query {
r := reflect.ValueOf(filters)
for i := 0; i < r.NumField(); i++ {
f := r.Field(i)
if f.Elem().IsValid() {
if nullable, ok := f.Elem().Interface().(null.Nullable); ok && nullable.IsNull() {
queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" IS NULL")
} else {
queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" = ?", f.Elem().Interface())
}
}
}
return q
}
// Limit limits query results
func (q *{{$tableNameSingular}}Query) Limit(limit int) *{{$tableNameSingular}}Query {
queries.SetLimit(q.Query, limit)
return q
}

107
templates/23_merge.tpl Normal file
View file

@ -0,0 +1,107 @@
{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
{{- if .Table.IsJoinTable -}}
{{- else -}}
{{- $dot := . }}
// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
func Merge{{$tableNamePlural}}(exec boil.Executor, primaryID uint64, secondaryID uint64) (err error) {
tx, ok := exec.(boil.Transactor)
if !ok {
txdb, ok := exec.(boil.Beginner)
if !ok {
return errors.Err("database does not support transactions")
}
tx, err = txdb.Begin()
if err != nil {
return errors.Err(err)
}
defer func() {
if p := recover(); p != nil {
tx.Rollback()
panic(p) // Rollback, then propagate panic
} else if err != nil {
tx.Rollback()
} else {
err = tx.Commit()
}
}()
}
primary, err := Find{{$tableNameSingular}}(tx, primaryID)
if err != nil {
return errors.Err(err)
} else if primary == nil {
return errors.Err("primary {{$tableNameSingular}} not found")
}
secondary, err := Find{{$tableNameSingular}}(tx, secondaryID)
if err != nil {
return errors.Err(err)
} else if secondary == nil {
return errors.Err("secondary {{$tableNameSingular}} not found")
}
foreignKeys := []foreignKey{
{{- range .Tables -}}
{{- range .FKeys -}}
{{- if eq $dot.Table.Name .ForeignTable }}
{foreignTable: "{{.Table}}", foreignColumn: "{{.Column}}"},
{{- end -}}
{{- end -}}
{{- end }}
}
conflictingKeys := []conflictingUniqueKey{
{{- range .Tables -}}
{{- $table := . -}}
{{- range .FKeys -}}
{{- $fk := . -}}
{{- if eq $dot.Table.Name .ForeignTable -}}
{{- range $table.UKeys -}}
{{- if setInclude $fk.Column .Columns }}
{table: "{{$fk.Table}}", objectIdColumn: "{{$fk.Column}}", columns: []string{`{{ .Columns | join "`,`" }}`}},
{{- end -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{- end }}
}
err = mergeModels(tx, primaryID, secondaryID, foreignKeys, conflictingKeys)
if err != nil {
return err
}
pr := reflect.ValueOf(primary)
sr := reflect.ValueOf(secondary)
// for any column thats null on the primary and not null on the secondary, copy from secondary to primary
for i := 0; i < sr.Elem().NumField(); i++ {
pf := pr.Elem().Field(i)
sf := sr.Elem().Field(i)
if sf.IsValid() {
if nullable, ok := sf.Interface().(null.Nullable); ok && !nullable.IsNull() && pf.Interface().(null.Nullable).IsNull() {
pf.Set(sf)
}
}
}
err = primary.Update(tx)
if err != nil {
return err
}
err = secondary.Delete(tx)
if err != nil {
return err
}
return nil
}
// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
func Merge{{$tableNamePlural}}G(primaryID uint64, secondaryID uint64) error {
return Merge{{$tableNamePlural}}(boil.GetDB(), primaryID, secondaryID)
}
{{- end -}}{{/* join table */}}

View file

@ -19,3 +19,168 @@ func NewQuery(exec boil.Executor, mods ...qm.QueryMod) *queries.Query {
return q
}
func mergeModels(tx boil.Executor, primaryID uint64, secondaryID uint64, foreignKeys []foreignKey, conflictingKeys []conflictingUniqueKey) error {
if len(foreignKeys) < 1 {
return nil
}
var err error
for _, conflict := range conflictingKeys {
if len(conflict.columns) == 1 && conflict.columns[0] == conflict.objectIdColumn {
err = deleteOneToOneConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
} else {
err = deleteOneToManyConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
}
if err != nil {
return err
}
}
for _, fk := range foreignKeys {
// TODO: use NewQuery here, not plain sql
query := fmt.Sprintf(
"UPDATE %s SET %s = %s WHERE %s = %s",
fk.foreignTable, fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 2, 1),
)
_, err = tx.Exec(query, primaryID, secondaryID)
if err != nil {
return errors.Err(err)
}
}
return checkMerge(tx, foreignKeys)
}
func deleteOneToOneConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
query := fmt.Sprintf(
"SELECT COUNT(*) FROM %s WHERE %s IN (%s)",
conflict.table, conflict.objectIdColumn,
strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
)
var count int
err := tx.QueryRow(query, primaryID, secondaryID).Scan(&count)
if err != nil {
return errors.Err(err)
}
if count > 2 {
return errors.Err("it should not be possible to have more than two rows here")
} else if count != 2 {
return nil // no conflicting rows
}
query = fmt.Sprintf(
"DELETE FROM %s WHERE %s = %s",
conflict.table, conflict.objectIdColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
)
_, err = tx.Exec(query, secondaryID)
return errors.Err(err)
}
func deleteOneToManyConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
conflictingColumns := strmangle.SetComplement(conflict.columns, []string{conflict.objectIdColumn})
query := fmt.Sprintf(
"SELECT %s FROM %s WHERE %s IN (%s) GROUP BY %s HAVING count(distinct %s) > 1",
strings.Join(conflictingColumns, ","), conflict.table, conflict.objectIdColumn,
strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
strings.Join(conflictingColumns, ","), conflict.objectIdColumn,
)
//The selectParams should be the ObjectIDs to search for regarding the conflict.
rows, err := tx.Query(query, primaryID, secondaryID)
if err != nil {
return errors.Err(err)
}
//Since we don't don't know if advance how many columns the query returns, we have dynamically assign them to be
// used in the delete query.
colNames, err := rows.Columns()
if err != nil {
return errors.Err(err)
}
//Each row result of the query needs to be removed for being a conflicting row. Store each row's keys in an array.
var rowsToRemove = [][]interface{}(nil)
for rows.Next() {
//Set pointers for dynamic scan
iColPtrs := make([]interface{}, len(colNames))
for i := 0; i < len(colNames); i++ {
s := string("")
iColPtrs[i] = &s
}
//Dynamically scan n columns
err = rows.Scan(iColPtrs...)
if err != nil {
return errors.Err(err)
}
//Grab scanned values for query arguments
iCol := make([]interface{}, len(colNames))
for i, col := range iColPtrs {
x := col.(*string)
iCol[i] = *x
}
rowsToRemove = append(rowsToRemove, iCol)
}
defer rows.Close()
//This query will adjust dynamically depending on the number of conflicting keys, adding AND expressions for each
// key to ensure the right conflicting rows are deleted.
query = fmt.Sprintf(
"DELETE FROM %s %s",
conflict.table,
"WHERE "+strings.Join(conflict.columns, " = ? AND ")+" = ?",
)
//There could be multiple conflicting rows between ObjectIDs. In the SELECT query we grab each row and their column
// keys to be deleted here in a loop.
for _, rowToDelete := range rowsToRemove {
rowToDelete = append(rowToDelete, secondaryID)
_, err = tx.Exec(query, rowToDelete...)
if err != nil {
return errors.Err(err)
}
}
return nil
}
func checkMerge(tx boil.Executor, foreignKeys []foreignKey) error {
uniqueColumns := []interface{}{}
uniqueColumnNames := map[string]bool{}
handledTablesColumns := map[string]bool{}
for _, fk := range foreignKeys {
handledTablesColumns[fk.foreignTable+"."+fk.foreignColumn] = true
if _, ok := uniqueColumnNames[fk.foreignColumn]; !ok {
uniqueColumns = append(uniqueColumns, fk.foreignColumn)
uniqueColumnNames[fk.foreignColumn] = true
}
}
q := fmt.Sprintf(
`SELECT table_name, column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA=DATABASE() AND column_name IN (%s)`,
strmangle.Placeholders(dialect.IndexPlaceholders, len(uniqueColumns), 1, 1),
)
rows, err := tx.Query(q, uniqueColumns...)
defer rows.Close()
if err != nil {
return errors.Err(err)
}
for rows.Next() {
var tableName string
var columnName string
err = rows.Scan(&tableName, &columnName)
if err != nil {
return errors.Err(err)
}
if _, exists := handledTablesColumns[tableName+"."+columnName]; !exists {
return errors.Err("missing merge for " + tableName + "." + columnName)
}
}
return nil
}

View file

@ -0,0 +1,9 @@
var TableNames = struct {
{{range $table := .Tables -}}
{{titleCase $table.Name}} string
{{end -}}
}{
{{range $table := .Tables -}}
{{titleCase $table.Name}}: "{{$table.Name}}",
{{end -}}
}

View file

@ -1,10 +1,26 @@
// M type is for providing columns and column values to UpdateAll.
type M map[string]interface{}
// foreignKey connects two tables. When merging records, foreign keys from secondary record must
// be reassigned to primary record.
type foreignKey struct {
foreignTable string
foreignColumn string
}
// conflictingUniqueKey records a merge conflict. If two rows exist with the same value in the
// conflicting column for two records being merged, one row must be deleted.
type conflictingUniqueKey struct {
table string
objectIdColumn string
columns []string
}
// ErrSyncFail occurs during insert when the record could not be retrieved in
// order to populate default value information. This usually happens when LastInsertId
// fails or there was a primary key configuration that was not resolvable.
var ErrSyncFail = errors.New("{{.PkgName}}: failed to synchronize data after insert")
var ErrSyncFail = errors.Base("{{.PkgName}}: failed to synchronize data after insert")
type insertCache struct {
query string

View file

@ -25,17 +25,17 @@ func (m *mssqlTester) setup() error {
m.testDBName = randomize.StableDBName(m.dbName)
if err = m.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
if err = m.createTestDB(); err != nil {
return err
return errors.Err(err)
}
createCmd := exec.Command("sqlcmd", "-S", m.host, "-U", m.user, "-P", m.pass, "-d", m.testDBName)
f, err := os.Open("tables_schema.sql")
if err != nil {
return errors.Wrap(err, "failed to open tables_schema.sql file")
return errors.Prefix("failed to open tables_schema.sql file", err)
}
defer f.Close()
@ -43,12 +43,12 @@ func (m *mssqlTester) setup() error {
createCmd.Stdin = newFKeyDestroyer(rgxMSSQLkey, f)
if err = createCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start sqlcmd command")
return errors.Prefix("failed to start sqlcmd command", err)
}
if err = createCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for sqlcmd command")
return errors.Prefix("failed to wait for sqlcmd command", err)
}
return nil
@ -92,7 +92,7 @@ func (m *mssqlTester) teardown() error {
}
if err := m.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
return nil
@ -110,7 +110,7 @@ func (m *mssqlTester) runCmd(stdin, command string, args ...string) error {
fmt.Println("failed running:", command, args)
fmt.Println(stdout.String())
fmt.Println(stderr.String())
return err
return errors.Err(err)
}
return nil

View file

@ -30,14 +30,14 @@ func (m *mysqlTester) setup() error {
m.testDBName = randomize.StableDBName(m.dbName)
if err = m.makeOptionFile(); err != nil {
return errors.Wrap(err, "couldn't make option file")
return errors.Prefix("couldn't make option file", err)
}
if err = m.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
if err = m.createTestDB(); err != nil {
return err
return errors.Err(err)
}
dumpCmd := exec.Command("mysqldump", m.defaultsFile(), "--no-data", m.dbName)
@ -48,22 +48,22 @@ func (m *mysqlTester) setup() error {
createCmd.Stdin = newFKeyDestroyer(rgxMySQLkey, r)
if err = dumpCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start mysqldump command")
return errors.Prefix("failed to start mysqldump command", err)
}
if err = createCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start mysql command")
return errors.Prefix("failed to start mysql command", err)
}
if err = dumpCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for mysqldump command")
return errors.Prefix("failed to wait for mysqldump command", err)
}
w.Close() // After dumpCmd is done, close the write end of the pipe
if err = createCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for mysql command")
return errors.Prefix("failed to wait for mysql command", err)
}
return nil
@ -87,7 +87,7 @@ func (m *mysqlTester) defaultsFile() string {
func (m *mysqlTester) makeOptionFile() error {
tmp, err := ioutil.TempFile("", "optionfile")
if err != nil {
return errors.Wrap(err, "failed to create option file")
return errors.Prefix("failed to create option file", err)
}
isTCP := false
@ -95,7 +95,7 @@ func (m *mysqlTester) makeOptionFile() error {
if os.IsNotExist(err) {
isTCP = true
} else if err != nil {
return errors.Wrap(err, "could not stat m.host")
return errors.Prefix("could not stat m.host", err)
}
fmt.Fprintln(tmp, "[client]")
@ -139,7 +139,7 @@ func (m *mysqlTester) teardown() error {
}
if err := m.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
return os.Remove(m.optionFile)
@ -159,7 +159,7 @@ func (m *mysqlTester) runCmd(stdin, command string, args ...string) error {
fmt.Println("failed running:", command, args)
fmt.Println(stdout.String())
fmt.Println(stderr.String())
return err
return errors.Err(err)
}
return nil

View file

@ -33,14 +33,14 @@ func (p *pgTester) setup() error {
p.testDBName = randomize.StableDBName(p.dbName)
if err = p.makePGPassFile(); err != nil {
return err
return errors.Err(err)
}
if err = p.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
if err = p.createTestDB(); err != nil {
return err
return errors.Err(err)
}
dumpCmd := exec.Command("pg_dump", "--schema-only", p.dbName)
@ -53,22 +53,22 @@ func (p *pgTester) setup() error {
createCmd.Stdin = newFKeyDestroyer(rgxPGFkey, r)
if err = dumpCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start pg_dump command")
return errors.Prefix("failed to start pg_dump command", err)
}
if err = createCmd.Start(); err != nil {
return errors.Wrap(err, "failed to start psql command")
return errors.Prefix("failed to start psql command", err)
}
if err = dumpCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for pg_dump command")
return errors.Prefix("failed to wait for pg_dump command", err)
}
w.Close() // After dumpCmd is done, close the write end of the pipe
if err = createCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Wrap(err, "failed to wait for psql command")
return errors.Prefix("failed to wait for psql command", err)
}
return nil
@ -90,7 +90,7 @@ func (p *pgTester) runCmd(stdin, command string, args ...string) error {
fmt.Println("failed running:", command, args)
fmt.Println(stdout.String())
fmt.Println(stderr.String())
return err
return errors.Err(err)
}
return nil
@ -108,7 +108,7 @@ func (p *pgTester) pgEnv() []string {
func (p *pgTester) makePGPassFile() error {
tmp, err := ioutil.TempFile("", "pgpass")
if err != nil {
return errors.Wrap(err, "failed to create option file")
return errors.Prefix("failed to create option file", err)
}
fmt.Fprintf(tmp, "%s:%d:postgres:%s", p.host, p.port, p.user)
@ -145,12 +145,12 @@ func (p *pgTester) dropTestDB() error {
func (p *pgTester) teardown() error {
var err error
if err = p.dbConn.Close(); err != nil {
return err
return errors.Err(err)
}
p.dbConn = nil
if err = p.dropTestDB(); err != nil {
return err
return errors.Err(err)
}
return os.Remove(p.pgPassFile)

View file

@ -50,7 +50,7 @@ func test{{$txt.LocalTable.NameGo}}OneToOne{{$txt.ForeignTable.NameGo}}Using{{$t
}
slice := {{$txt.LocalTable.NameGo}}Slice{&local}
if err = local.L.Load{{$txt.Function.Name}}(tx, false, &slice); err != nil {
if err = local.L.Load{{$txt.Function.Name}}(tx, false, (*[]*{{$txt.LocalTable.NameGo}})(&slice)); err != nil {
t.Fatal(err)
}
if local.R.{{$txt.Function.Name}} == nil {

View file

@ -87,7 +87,7 @@ func test{{$txt.LocalTable.NameGo}}ToMany{{$txt.Function.Name}}(t *testing.T) {
}
slice := {{$txt.LocalTable.NameGo}}Slice{&a}
if err = a.L.Load{{$txt.Function.Name}}(tx, false, &slice); err != nil {
if err = a.L.Load{{$txt.Function.Name}}(tx, false, (*[]*{{$txt.LocalTable.NameGo}})(&slice)); err != nil {
t.Fatal(err)
}
if got := len(a.R.{{$txt.Function.Name}}); got != 2 {

View file

@ -13,10 +13,10 @@ func test{{$txt.LocalTable.NameGo}}ToOne{{$txt.ForeignTable.NameGo}}Using{{$txt.
var foreign {{$txt.ForeignTable.NameGo}}
seed := randomize.NewSeed()
if err := randomize.Struct(seed, &local, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
if err := randomize.Struct(seed, &local, {{$varNameSingular}}DBTypes, {{if .Nullable}}true{{else}}false{{end}}, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
t.Errorf("Unable to randomize {{$txt.LocalTable.NameGo}} struct: %s", err)
}
if err := randomize.Struct(seed, &foreign, {{$foreignVarNameSingular}}DBTypes, true, {{$foreignVarNameSingular}}ColumnsWithDefault...); err != nil {
if err := randomize.Struct(seed, &foreign, {{$foreignVarNameSingular}}DBTypes, {{if .ForeignColumnNullable}}true{{else}}false{{end}}, {{$foreignVarNameSingular}}ColumnsWithDefault...); err != nil {
t.Errorf("Unable to randomize {{$txt.ForeignTable.NameGo}} struct: %s", err)
}
@ -50,7 +50,7 @@ func test{{$txt.LocalTable.NameGo}}ToOne{{$txt.ForeignTable.NameGo}}Using{{$txt.
}
slice := {{$txt.LocalTable.NameGo}}Slice{&local}
if err = local.L.Load{{$txt.Function.Name}}(tx, false, &slice); err != nil {
if err = local.L.Load{{$txt.Function.Name}}(tx, false, (*[]*{{$txt.LocalTable.NameGo}})(&slice)); err != nil {
t.Fatal(err)
}
if local.R.{{$txt.Function.Name}} == nil {

View file

@ -143,5 +143,5 @@ func validateConfig(driverName string) error {
).Check()
}
return errors.New("not a valid driver name")
return errors.Err("not a valid driver name")
}