Compare commits

..

38 commits

Author SHA1 Message Date
Alex Grintsvayg
e3c6bfd745 use interface to enable custom Tx types 2017-09-02 12:03:40 -04:00
Aaron L
86c580f537 Bump version to v2.5.1 2017-08-07 15:31:23 -07:00
Aaron L
040f9e41be Fix randomization of enum values
- Fix #179
2017-08-07 15:29:21 -07:00
Aaron L
50b854ef83 Merge branch 'ceshihao/fix-signed-bigint' 2017-08-07 15:22:20 -07:00
ceshihao
f0f386e97b fix null.Int64 when it is signed 2017-08-03 18:19:35 +08:00
Aaron L
9dd03ac6a8 Amend license 2017-07-30 20:35:06 -07:00
Aaron L
82bffe9144 Adjust import paths 2017-07-30 20:34:54 -07:00
Aaron L
c43e856136 Merge branch 'tchssk/readme-find' 2017-07-13 22:29:10 -07:00
Taichi Sasaki
ca4dc4433b Fix examples of Find in README 2017-07-06 07:05:32 +09:00
Aaron L
0b027fa01c Fix hook documentation to include error return
Fix #171
2017-07-02 11:16:21 -07:00
Patrick L. O'Brien
e96316501c Update logo 2017-06-29 01:44:44 +10:00
Patrick L. O'Brien
b27796cb28 Update logo 2017-06-29 01:42:07 +10:00
Aaron L
cc47da44fb Amend readme for constants 2017-06-27 21:10:47 -07:00
Aaron L
82e14d2e1a Bump version 2.5.0 2017-06-27 20:59:25 -07:00
Aaron L
229c6ebc35 Merge branch 'jfernstad/fix-area-plural-issue' into dev 2017-06-27 20:58:07 -07:00
Joakim Fernstad
6022d471e3 Fix area plural issue, add testcase
- Fix #168
2017-06-27 12:56:13 +02:00
Aaron L
e58ab28787 Merge branch 'master' into dev 2017-06-26 21:19:59 -07:00
Guy Tish
f5e53ac52b Added table columns and table names as anonymous struct 2017-06-26 14:14:16 +03:00
Aaron L
c00ebe1911 Correct nullability for tests in to_one
- Use the nullability of the fkey column in question to determine
  nullability for the entire struct to make things easy, otherwise
  we'd have to pluck out one at a time. This makes the tests pass
  instead of fail sporadically.
- Fix #160
2017-06-14 21:16:31 -07:00
Aaron L
35563d1bdf Fix selecting in one-to-many relatiosships
- This fix checks the query that's being prepared for any select
  statements, if there's none then add one so the query does what's
  expected.
- Fix #159
2017-06-14 20:53:39 -07:00
Aaron L
1e9753091b Merge branch 'randimize-enums-with-single-value' into dev 2017-06-14 20:23:40 -07:00
Genadi Samokovarov
64206cfe07 Fix randomization for enums with single value
Hey there,

I hit a bug that panicked the tests with:

    panic: invalid argument to Intn

The problem happened, because I have a Postgres `ENUM` with a single
value, that caused a `rand.Intn(0)` call, which panics on zero.

See: https://golang.org/src/math/rand/rand.go?s=4112:4142#L129
2017-06-14 16:53:55 +03:00
Aaron L
553cc9f680 Odd circleci fix for syntax? 2017-06-08 20:58:31 -07:00
Aaron L
c69639e6d9 One more fix to circle file 2017-06-08 20:56:11 -07:00
Aaron L
226517279f Disable MSSQL tests until they run again
- The mssql image seems to have gotten an update that causes our tests
  to freeze instead of run, it seems to do with the create database
  calls but I haven't had time to look into it.
- Remove commented old code from circle file
2017-06-08 20:53:53 -07:00
Aaron L
9107c9dea1 Another instance of helper slices in eager code
- Another instance of helper slice types seeping in. Convert the slice
  when it's found as part of a singular's R struct.
- Fix #158
2017-06-08 20:50:35 -07:00
Aaron L
2168a70c4f Stop using aliases in the relationship select
- This caused issues with mysql who doesn't understand the syntax:
  "delete from x as y where y.id = ?"
2017-06-04 12:29:04 -07:00
Aaron L
466ea1f55f Fix another occurrency of XSlice cast failures
- When eager loading, we pull the values back out of the R struct, at
  this point it's possible to get the "poisoned" XSlice type that
  nothing can deal with again.
2017-06-04 12:03:57 -07:00
Aaron L
833fd04c6b Fix random typo 2017-06-04 11:19:03 -07:00
Aaron L
7e5374eb22 Bump SQL Server CI image version 2017-05-15 22:25:15 -07:00
Aaron L
696d572164 Merge branch 'viper' into dev 2017-05-15 22:01:44 -07:00
Aaron L
4896aae393 Merge branch 'sql-syntax-highlighting' into dev 2017-05-15 22:01:13 -07:00
Alex Macleod
9700787bf9 Highlight Enum SQL 2017-05-12 13:57:03 +01:00
Alex Macleod
7d38cba663 Fix viper link 2017-05-12 13:46:55 +01:00
Aaron L
17f2ec5108 Bump to 2.4.0 2017-05-08 19:19:12 -07:00
Aaron L
1d29e337e3 Merge branch 'guns/sort-tables' into dev 2017-05-08 19:14:25 -07:00
Aaron L
8e8100f5f0 Merge branch 'lbryio/mysql-uint-fix' into dev
- Fix #146
2017-05-08 19:14:02 -07:00
guns
e22d6cf77b Sort Table slice to ensure stable template output 2017-05-03 04:04:54 -05:00
66 changed files with 500 additions and 1018 deletions

View file

@ -13,14 +13,14 @@ jobs:
environment:
MYSQL_ROOT_PASSWORD: mysqlpassword
- image: microsoft/mssql-server-linux:ctp1-4
environment:
ACCEPT_EULA: 'Y'
SA_PASSWORD: 'R@@tr@@t1234'
# - image: microsoft/mssql-server-linux:ctp2-0
# environment:
# ACCEPT_EULA: 'Y'
# SA_PASSWORD: 'R@@tr@@t1234'
environment:
GOPATH: /go
ROOTPATH: /go/src/github.com/vattle/sqlboiler
ROOTPATH: /go/src/github.com/volatiletech/sqlboiler
steps:
- run:
@ -72,31 +72,31 @@ jobs:
sleep 1
done
- run:
name: Wait for MSSQL
command: >
for i in `seq 30`; do
echo "Waiting for mssql"
set +o errexit
sqlcmd -H localhost -U sa -P R@@tr@@t1234 -Q "select * from information_schema.tables;" > /dev/null
status=$?
set -o errexit
if [ $status -eq 0 ]; then
break
fi
if [ $i -eq 30 ]; then
echo "Failed to wait for mssql"
exit 1
fi
sleep 1
done
# - run:
# name: Wait for MSSQL
# command: >
# for i in `seq 30`; do
# echo "Waiting for mssql"
# set +o errexit
# sqlcmd -H localhost -U sa -P R@@tr@@t1234 -Q "select * from information_schema.tables;" > /dev/null
# status=$?
# set -o errexit
# if [ $status -eq 0 ]; then
# break
# fi
# if [ $i -eq 30 ]; then
# echo "Failed to wait for mssql"
# exit 1
# fi
# sleep 1
# done
- run:
name: Make GOPATH
command: mkdir -p /go/src/github.com/vattle/sqlboiler
command: mkdir -p /go/src/github.com/volatiletech/sqlboiler
- checkout:
path: /go/src/github.com/vattle/sqlboiler
path: /go/src/github.com/volatiletech/sqlboiler
- run:
name: Create PSQL DB
@ -108,17 +108,17 @@ jobs:
command: |
mysql --host localhost --execute 'create database sqlboiler;'
mysql --host localhost --database sqlboiler < $ROOTPATH/testdata/mysql_test_schema.sql
- run:
name: Create MSSQL DB
command: |
sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i $ROOTPATH/testdata/mssql_test_schema.sql
# - run:
# name: Create MSSQL DB
# command: |
# sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
# sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i $ROOTPATH/testdata/mssql_test_schema.sql
- run:
name: Build SQLBoiler
command: |
cd $ROOTPATH; go get -v -t
cd $ROOTPATH; go build -v github.com/vattle/sqlboiler
cd $ROOTPATH; go build -v github.com/volatiletech/sqlboiler
- run:
name: 'Configure SQLBoiler: PSQL'
@ -126,9 +126,9 @@ jobs:
- run:
name: 'Configure SQLBoiler: MySQL'
command: echo -e '[mysql]\nhost="localhost"\nport=3306\nuser="root"\npass="mysqlpassword"\ndbname="sqlboiler"\nsslmode="false"\n' >> $ROOTPATH/sqlboiler.toml
- run:
name: 'Configure SQLBoiler: MSSQL'
command: echo -e '[mssql]\nhost="localhost"\nport=1433\nuser="sa"\npass="R@@tr@@t1234"\ndbname="sqlboiler"\nsslmode="disable"\n' >> $ROOTPATH/sqlboiler.toml
# - run:
# name: 'Configure SQLBoiler: MSSQL'
# command: echo -e '[mssql]\nhost="localhost"\nport=1433\nuser="sa"\npass="R@@tr@@t1234"\ndbname="sqlboiler"\nsslmode="disable"\n' >> $ROOTPATH/sqlboiler.toml
- run:
name: 'Generate: PSQL'
@ -136,9 +136,9 @@ jobs:
- run:
name: 'Generate: MySQL'
command: cd $ROOTPATH; ./sqlboiler -o mysql mysql
- run:
name: 'Generate: MSSQL'
command: cd $ROOTPATH; ./sqlboiler -o mssql mssql
# - run:
# name: 'Generate: MSSQL'
# command: cd $ROOTPATH; ./sqlboiler -o mssql mssql
- run:
name: Download generated and test deps
@ -150,7 +150,7 @@ jobs:
name: Run Tests
command: |
cd $ROOTPATH
cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
#cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
go test -v -race ./... | tee test_out.txt
- run:
@ -161,48 +161,3 @@ jobs:
- store_test_results:
path: test_results
#test:
# pre:
# - echo -e "[postgres]\nhost=\"localhost\"\nport=5432\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\n" > sqlboiler.toml
# - createdb -U ubuntu sqlboiler
# - psql -U ubuntu sqlboiler < ./testdata/postgres_test_schema.sql
#
# - echo -e "[mysql]\nhost=\"localhost\"\nport=3306\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\nsslmode=\"false\"\n" >> sqlboiler.toml
# - echo "create database sqlboiler;" | mysql -u ubuntu
# - mysql -u ubuntu sqlboiler < ./testdata/mysql_test_schema.sql
#
# - echo -e "[mssql]\nhost=\"localhost\"\nport=1433\nuser=\"sa\"\ndbname=\"sqlboiler\"\nsslmode=\"disable\"\n" >> sqlboiler.toml
# - docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=R@@tr@@t1234' -p 1433:1433 -d --name mssql microsoft/mssql-server-linux
# - sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
# - sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i ./testdata/mssql_test_schema.sql
#
# - ./sqlboiler -o postgres postgres
# - ./sqlboiler -o mysql mysql
# - ./sqlboiler -o mssql mssql
# - cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
# override:
# - go test -v -race ./... > $CIRCLE_ARTIFACTS/gotest.txt
# post:
# - cat $CIRCLE_ARTIFACTS/gotest.txt | go-junit-report > $CIRCLE_TEST_REPORTS/junit.xml
#
#machine:
# environment:
# GODIST: go1.7.linux-amd64.tar.gz
# PATH: /home/ubuntu/.go_workspace/bin:/usr/local/go/bin:/home/ubuntu/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/opt/mssql-tools/bin
# post:
# - mkdir -p download
# - test -e download/$GODIST || curl -o download/$GODIST https://storage.googleapis.com/golang/$GODIST
# - sudo rm -rf /usr/local/go
# - sudo tar -C /usr/local -xzf download/$GODIST
#
#dependencies:
# pre:
# - mkdir -p /home/ubuntu/.go_workspace/src/github.com/jstemmer
# - go get -u github.com/jstemmer/go-junit-report
#
# - curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
# - curl https://packages.microsoft.com/config/ubuntu/14.04/prod.list | sudo tee /etc/apt/sources.list.d/msprod.list
# - sudo apt-get update; sudo apt-get install mssql-tools unixodbc-dev
# - docker pull microsoft/mssql-server-linux
# cache_directories:
# - ~/download

1
.gitignore vendored
View file

@ -4,4 +4,3 @@ sqlboiler.toml
models/
testschema.sql
.cover
/.idea

20
LICENSE
View file

@ -1,18 +1,18 @@
Copyright (c) 2016 The SQLBoiler Authors. All rights reserved.
Copyright (c) 2017 Volatile Technologies Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Vattle nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Vattle or Volatile Technologies Inc. nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT

View file

@ -1,12 +1,12 @@
![sqlboiler logo](http://i.imgur.com/NJtCT7y.png)
![sqlboiler logo](http://i.imgur.com/ilkv0r9.png)
[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/vattle/sqlboiler/blob/master/LICENSE)
[![GoDoc](https://godoc.org/github.com/vattle/sqlboiler?status.svg)](https://godoc.org/github.com/vattle/sqlboiler)
[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/volatiletech/sqlboiler/blob/master/LICENSE)
[![GoDoc](https://godoc.org/github.com/volatiletech/sqlboiler?status.svg)](https://godoc.org/github.com/volatiletech/sqlboiler)
[![Mail](https://img.shields.io/badge/mail%20list-sqlboiler-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler)
[![Mail-Annc](https://img.shields.io/badge/mail%20list-sqlboiler--announce-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler-announce)
[![Slack](https://img.shields.io/badge/slack-%23general-lightgrey.svg)](https://sqlboiler.from-the.cloud)
[![CircleCI](https://circleci.com/gh/vattle/sqlboiler.svg?style=shield)](https://circleci.com/gh/vattle/sqlboiler)
[![Go Report Card](https://goreportcard.com/badge/vattle/sqlboiler)](http://goreportcard.com/report/vattle/sqlboiler)
[![CircleCI](https://circleci.com/gh/volatiletech/sqlboiler.svg?style=shield)](https://circleci.com/gh/volatiletech/sqlboiler)
[![Go Report Card](https://goreportcard.com/badge/volatiletech/sqlboiler)](http://goreportcard.com/report/volatiletech/sqlboiler)
SQLBoiler is a tool to generate a Go ORM tailored to your database schema.
@ -76,6 +76,7 @@ Table of Contents
* [Reload](#reload)
* [Exists](#exists)
* [Enums](#enums)
* [Constants](#constants)
* [FAQ](#faq)
* [Won't compiling models for a huge database be very slow?](#wont-compiling-models-for-a-huge-database-be-very-slow)
* [Missing imports for generated package](#missing-imports-for-generated-package)
@ -122,7 +123,7 @@ For a comprehensive list of available operations and examples please see [Featur
```go
import (
// Import this so we don't have to use qm.Limit etc.
. "github.com/vattle/sqlboiler/queries/qm"
. "github.com/volatiletech/sqlboiler/queries/qm"
)
// Open handle to database like normal
@ -214,12 +215,12 @@ fmt.Println(len(users.R.FavoriteMovies))
#### Download
```shell
go get -u -t github.com/vattle/sqlboiler
go get -u -t github.com/volatiletech/sqlboiler
```
#### Configuration
Create a configuration file. Because the project uses [viper](github.com/spf13/viper), TOML, JSON and YAML
Create a configuration file. Because the project uses [viper](https://github.com/spf13/viper), TOML, JSON and YAML
are all supported. Environment variables are also able to be used.
We will assume TOML for the rest of the documentation.
@ -295,7 +296,7 @@ generate models for, we can invoke the sqlboiler command line utility.
```text
SQL Boiler generates a Go ORM from template files, tailored to your database schema.
Complete documentation is available at http://github.com/vattle/sqlboiler
Complete documentation is available at http://github.com/volatiletech/sqlboiler
Usage:
sqlboiler [flags] <driver>
@ -625,7 +626,7 @@ when performing query building. Here is a list of all of your generated query mo
```go
// Dot import so we can access query mods directly instead of prefixing with "qm."
import . "github.com/vattle/sqlboiler/queries/qm"
import . "github.com/volatiletech/sqlboiler/queries/qm"
// Use a raw query against a generated struct (Pilot in this example)
// If this query mod exists in your call, it will override the others.
@ -736,7 +737,7 @@ in combination with your own custom, non-generated model.
### Binding
For a comprehensive ruleset for `Bind()` you can refer to our [godoc](https://godoc.org/github.com/vattle/sqlboiler/queries#Bind).
For a comprehensive ruleset for `Bind()` you can refer to our [godoc](https://godoc.org/github.com/volatiletech/sqlboiler/queries#Bind).
The `Bind()` [Finisher](#finisher) allows the results of a query built with
the [Raw SQL](#raw-query) method or the [Query Builder](#query-building) methods to be bound
@ -990,7 +991,7 @@ tx.Rollback()
```
It's also worth noting that there's a way to take advantage of `boil.SetDB()`
by using the [boil.Begin()](https://godoc.org/github.com/vattle/sqlboiler/boil#Begin) function.
by using the [boil.Begin()](https://godoc.org/github.com/volatiletech/sqlboiler/boil#Begin) function.
This opens a transaction using the globally stored database.
### Debug Logging
@ -1026,10 +1027,10 @@ Find is used to find a single row by primary key:
```go
// Retrieve pilot with all columns filled
pilot, err := models.PilotFind(db, 1)
pilot, err := models.FindPilot(db, 1)
// Retrieve a subset of column values
jet, err := models.JetFind(db, 1, "name", "color")
jet, err := models.FindJet(db, 1, "name", "color")
```
### Insert
@ -1192,7 +1193,7 @@ exists, err := models.Pilots(db, Where("id=?", 5)).Exists()
If your MySQL or Postgres tables use enums we will generate constants that hold their values
that you can use in your queries. For example:
```
```sql
CREATE TYPE workday AS ENUM('monday', 'tuesday', 'wednesday', 'thursday', 'friday');
CREATE TABLE event_one (
@ -1224,6 +1225,41 @@ still be able to use your generated library, and it will still work as expected,
to get the tests to pass in this event is to either use a parsable enum value or use a regular column
instead of an enum.
### Constants
The models package will also contain some structs that contain all of the table and column
names harvested from the database at generation time.
For table names they're generated under `models.TableNames`:
```go
// Generated code from models package
var TableNames = struct {
Messages string
Purchases string
}{
Messages: "messages",
Purchases: "purchases",
}
// Usage example:
fmt.Println(models.TableNames.Messages)
```
```go
// Generated code from models package
var MessageColumns = struct {
ID string
PurchaseID string
}{
ID: "id",
PurchaseID: "purchase_id",
}
// Usage example:
fmt.Println(models.MessageColumns.ID)
```
## FAQ
#### Won't compiling models for a huge database be very slow?
@ -1262,12 +1298,12 @@ You *must* use a DSN flag in MySQL connections, see: [Requirements](#requirement
#### Where is the homepage?
The homepage for the [SQLBoiler](https://github.com/vattle/sqlboiler) [Golang ORM](https://github.com/vattle/sqlboiler)
generator is located at: https://github.com/vattle/sqlboiler
The homepage for the [SQLBoiler](https://github.com/volatiletech/sqlboiler) [Golang ORM](https://github.com/volatiletech/sqlboiler)
generator is located at: https://github.com/volatiletech/sqlboiler
## Benchmarks
If you'd like to run the benchmarks yourself check out our [boilbench](https://github.com/vattle/boilbench) repo.
If you'd like to run the benchmarks yourself check out our [boilbench](https://github.com/volatiletech/boilbench) repo.
```bash
go test -bench . -benchmem

View file

@ -3,7 +3,7 @@ package bdb
import (
"strings"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/volatiletech/sqlboiler/strmangle"
)
// Column holds information about a database column.

View file

@ -1,8 +1,8 @@
package drivers
import (
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/strmangle"
)
// MockDriver is a mock implementation of the bdb driver Interface
@ -58,14 +58,6 @@ func (m *MockDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
}[tableName], nil
}
func (m *MockDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
return []bdb.UniqueKey{}, nil
}
func (m *MockDriver) AutoincrementInfo(schema, tableName string) (string, error) {
return "", nil
}
// ForeignKeyInfo returns a list of mock foreignkeys
func (m *MockDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
return map[string][]bdb.ForeignKey{

View file

@ -7,8 +7,8 @@ import (
"strings"
_ "github.com/denisenkom/go-mssqldb"
"github.com/lbryio/sqlboiler/bdb"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
)
// MSSQLDriver holds the database connection string and a handle
@ -241,14 +241,6 @@ func (m *MSSQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
return pkey, nil
}
func (m *MSSQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
return []bdb.UniqueKey{}, errors.New("not implemented")
}
func (m *MSSQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
return "", errors.New("not implemented")
}
// ForeignKeyInfo retrieves the foreign keys for a given table name.
func (m *MSSQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
var fkeys []bdb.ForeignKey

View file

@ -3,13 +3,12 @@ package drivers
import (
"database/sql"
"fmt"
"sort"
"strconv"
"strings"
"github.com/go-sql-driver/mysql"
"github.com/lbryio/sqlboiler/bdb"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
)
// TinyintAsBool is a global that is set from main.go if a user specifies
@ -53,7 +52,6 @@ func MySQLBuildQueryString(user, pass, dbname, host string, port int, sslmode st
}
config.Addr += ":" + strconv.Itoa(port)
config.TLSConfig = sslmode
config.AllowNativePasswords = true
// MySQL is a bad, and by default reads date/datetime into a []byte
// instead of a time.Time. Tell it to stop being a bad.
@ -234,79 +232,6 @@ func (m *MySQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
return pkey, nil
}
// UniqueKeyInfo retrieves the unique keys for a given table name.
func (m *MySQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
var ukeys []bdb.UniqueKey
query := `
select tc.table_name, tc.constraint_name, GROUP_CONCAT(kcu.column_name)
from information_schema.table_constraints tc
left join information_schema.key_column_usage kcu on tc.constraint_name = kcu.constraint_name and tc.table_name = kcu.table_name and tc.table_schema = kcu.table_schema
where tc.table_schema = ? and tc.table_name = ? and tc.constraint_type = "UNIQUE"
group by tc.table_name, tc.constraint_name
`
var rows *sql.Rows
var err error
if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
return nil, err
}
for rows.Next() {
var ukey bdb.UniqueKey
var columns string
//ukey.Table = tableName
err = rows.Scan(&ukey.Table, &ukey.Name, &columns)
if err != nil {
return nil, err
}
ukey.Columns = strings.Split(columns, ",")
sort.Strings(ukey.Columns)
ukeys = append(ukeys, ukey)
}
if err = rows.Err(); err != nil {
return nil, err
}
return ukeys, nil
}
// AutoincrementInfo retrieves the autoincrement column for a given table name, if one exists.
func (m *MySQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
query := `
select column_name
from information_schema.columns
where table_schema = ? and table_name = ? and extra like "%auto_increment%"
`
var rows *sql.Rows
var err error
if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
return "", err
}
for rows.Next() {
var column string
err = rows.Scan(&column)
if err != nil {
return "", err
}
return column, nil
}
if err = rows.Err(); err != nil {
return "", err
}
return "", nil
}
// ForeignKeyInfo retrieves the foreign keys for a given table name.
func (m *MySQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
var fkeys []bdb.ForeignKey

View file

@ -8,10 +8,10 @@ import (
// Side-effect import sql driver
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/strmangle"
_ "github.com/lib/pq"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/strmangle"
)
// PostgresDriver holds the database connection string and a handle
@ -266,14 +266,6 @@ func (p *PostgresDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryK
return pkey, nil
}
func (p *PostgresDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
return []bdb.UniqueKey{}, errors.New("not implemented")
}
func (p *PostgresDriver) AutoincrementInfo(schema, tableName string) (string, error) {
return "", errors.New("not implemented")
}
// ForeignKeyInfo retrieves the foreign keys for a given table name.
func (p *PostgresDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
var fkeys []bdb.ForeignKey

View file

@ -1,7 +1,11 @@
// Package bdb supplies the sql(b)oiler (d)ata(b)ase abstractions.
package bdb
import "github.com/pkg/errors"
import (
"sort"
"github.com/pkg/errors"
)
// Interface for a database driver. Functionality required to support a specific
// database type (eg, MySQL, Postgres etc.)
@ -9,8 +13,6 @@ type Interface interface {
TableNames(schema string, whitelist, blacklist []string) ([]string, error)
Columns(schema, tableName string) ([]Column, error)
PrimaryKeyInfo(schema, tableName string) (*PrimaryKey, error)
UniqueKeyInfo(schema, tableName string) ([]UniqueKey, error)
AutoincrementInfo(schema, tableName string) (string, error)
ForeignKeyInfo(schema, tableName string) ([]ForeignKey, error)
// TranslateColumnType takes a Database column type and returns a go column type.
@ -47,6 +49,8 @@ func Tables(db Interface, schema string, whitelist, blacklist []string) ([]Table
return nil, errors.Wrap(err, "unable to get table names")
}
sort.Strings(names)
var tables []Table
for _, name := range names {
t := Table{
@ -65,18 +69,10 @@ func Tables(db Interface, schema string, whitelist, blacklist []string) ([]Table
return nil, errors.Wrapf(err, "unable to fetch table pkey info (%s)", name)
}
if t.UKeys, err = db.UniqueKeyInfo(schema, name); err != nil {
return nil, errors.Wrapf(err, "unable to fetch table ukey info (%s)", name)
}
if t.FKeys, err = db.ForeignKeyInfo(schema, name); err != nil {
return nil, errors.Wrapf(err, "unable to fetch table fkey info (%s)", name)
}
if t.AutoIncrementColumn, err = db.AutoincrementInfo(schema, name); err != nil {
return nil, errors.Wrapf(err, "unable to fetch table autoincrement info (%s)", name)
}
setIsJoinTable(&t)
tables = append(tables, t)

View file

@ -3,7 +3,7 @@ package bdb
import (
"testing"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/volatiletech/sqlboiler/strmangle"
)
type testMockDriver struct{}
@ -124,6 +124,14 @@ func TestTables(t *testing.T) {
t.Errorf("Expected len 7, got: %d\n", len(tables))
}
prev := ""
for i := range tables {
if prev >= tables[i].Name {
t.Error("tables are not sorted")
}
prev = tables[i].Name
}
pilots := GetTable(tables, "pilots")
if len(pilots.Columns) != 2 {
t.Error()

View file

@ -8,13 +8,6 @@ type PrimaryKey struct {
Columns []string
}
// UniqueKey represents a unique key constraint in a database
type UniqueKey struct {
Table string
Name string
Columns []string
}
// ForeignKey represents a foreign key constraint in a database
type ForeignKey struct {
Table string

View file

@ -8,12 +8,9 @@ type Table struct {
// For dbs with real schemas, like Postgres.
// Example value: "schema_name"."table_name"
SchemaName string
Columns []Column
AutoIncrementColumn string
PKey *PrimaryKey
UKeys []UniqueKey
FKeys []ForeignKey
IsJoinTable bool

View file

@ -22,6 +22,7 @@ type Beginner interface {
Begin() (Transactor, error)
}
// SQLBeginner begins transactions (non-interface return type)
type SQLBeginner interface {
Begin() (*sql.Tx, error)
}

23
boil/errors.go Normal file
View file

@ -0,0 +1,23 @@
package boil
type boilErr struct {
error
}
// WrapErr wraps err in a boilErr
func WrapErr(err error) error {
return boilErr{
error: err,
}
}
// Error returns the underlying error string
func (e boilErr) Error() string {
return e.error.Error()
}
// IsBoilErr checks if err is a boilErr
func IsBoilErr(err error) bool {
_, ok := err.(boilErr)
return ok
}

24
boil/errors_test.go Normal file
View file

@ -0,0 +1,24 @@
package boil
import (
"errors"
"testing"
)
func TestErrors(t *testing.T) {
t.Parallel()
err := errors.New("test error")
if IsBoilErr(err) == true {
t.Errorf("Expected false")
}
err = WrapErr(errors.New("test error"))
if err.Error() != "test error" {
t.Errorf(`Expected "test error", got %v`, err.Error())
}
if IsBoilErr(err) != true {
t.Errorf("Expected true")
}
}

View file

@ -13,10 +13,10 @@ import (
"text/template"
"github.com/pkg/errors"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb/drivers"
"github.com/lbryio/sqlboiler/queries"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/bdb/drivers"
"github.com/volatiletech/sqlboiler/queries"
"github.com/volatiletech/sqlboiler/strmangle"
)
const (
@ -267,7 +267,7 @@ func (s *State) processReplacements() error {
return nil
}
var basePackage = "github.com/lbryio/sqlboiler"
var basePackage = "github.com/volatiletech/sqlboiler"
func getBasePath(baseDirConfig string) (string, error) {
if len(baseDirConfig) > 0 {

View file

@ -6,7 +6,7 @@ import (
"sort"
"strings"
"github.com/lbryio/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/bdb"
)
// imports defines the optional standard imports and
@ -170,33 +170,26 @@ func newImporter() importer {
`"time"`,
},
thirdParty: importList{
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/lbry.go/v2/extras/null"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/queries"`,
`"github.com/lbryio/sqlboiler/queries/qm"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
`"github.com/pkg/errors"`,
`"github.com/volatiletech/sqlboiler/boil"`,
`"github.com/volatiletech/sqlboiler/queries"`,
`"github.com/volatiletech/sqlboiler/queries/qm"`,
`"github.com/volatiletech/sqlboiler/strmangle"`,
},
}
imp.Singleton = mapImports{
"boil_queries": imports{
standard: importList{
`"fmt"`,
`"strings"`,
},
"boil_queries": {
thirdParty: importList{
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/queries"`,
`"github.com/lbryio/sqlboiler/queries/qm"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
`"github.com/volatiletech/sqlboiler/boil"`,
`"github.com/volatiletech/sqlboiler/queries"`,
`"github.com/volatiletech/sqlboiler/queries/qm"`,
},
},
"boil_types": {
thirdParty: importList{
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
`"github.com/pkg/errors"`,
`"github.com/volatiletech/sqlboiler/strmangle"`,
},
},
}
@ -208,9 +201,9 @@ func newImporter() importer {
`"testing"`,
},
thirdParty: importList{
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`"github.com/lbryio/sqlboiler/strmangle"`,
`"github.com/volatiletech/sqlboiler/boil"`,
`"github.com/volatiletech/sqlboiler/randomize"`,
`"github.com/volatiletech/sqlboiler/strmangle"`,
},
}
@ -228,9 +221,9 @@ func newImporter() importer {
},
thirdParty: importList{
`"github.com/kat-co/vala"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/volatiletech/sqlboiler/boil"`,
},
},
"boil_queries_test": {
@ -243,7 +236,7 @@ func newImporter() importer {
`"regexp"`,
},
thirdParty: importList{
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/volatiletech/sqlboiler/boil"`,
},
},
"boil_suites_test": {
@ -266,11 +259,11 @@ func newImporter() importer {
`"strings"`,
},
thirdParty: importList{
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`_ "github.com/lib/pq"`,
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/volatiletech/sqlboiler/bdb/drivers"`,
`"github.com/volatiletech/sqlboiler/randomize"`,
`_ "github.com/lib/pq"`,
},
},
"mysql": {
@ -285,11 +278,11 @@ func newImporter() importer {
`"strings"`,
},
thirdParty: importList{
`_ "github.com/go-sql-driver/mysql"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/volatiletech/sqlboiler/bdb/drivers"`,
`"github.com/volatiletech/sqlboiler/randomize"`,
`_ "github.com/go-sql-driver/mysql"`,
},
},
"mssql": {
@ -302,11 +295,11 @@ func newImporter() importer {
`"strings"`,
},
thirdParty: importList{
`_ "github.com/denisenkom/go-mssqldb"`,
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
`"github.com/lbryio/sqlboiler/randomize"`,
`"github.com/pkg/errors"`,
`"github.com/spf13/viper"`,
`"github.com/volatiletech/sqlboiler/bdb/drivers"`,
`"github.com/volatiletech/sqlboiler/randomize"`,
`_ "github.com/denisenkom/go-mssqldb"`,
},
},
}
@ -316,79 +309,79 @@ func newImporter() importer {
// TranslateColumnType to see the type assignments.
imp.BasedOnType = mapImports{
"null.Float32": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Float64": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Int": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Int8": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Int16": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Int32": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Int64": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Uint": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Uint8": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Uint16": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Uint32": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Uint64": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.String": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Bool": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Time": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.JSON": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"null.Bytes": {
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
thirdParty: importList{`"gopkg.in/volatiletech/null.v6"`},
},
"time.Time": {
standard: importList{`"time"`},
},
"types.JSON": {
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
},
"types.BytesArray": {
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
},
"types.Int64Array": {
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
},
"types.Float64Array": {
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
},
"types.BoolArray": {
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
},
"types.StringArray": {
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
},
"types.Hstore": {
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
thirdParty: importList{`"github.com/volatiletech/sqlboiler/types"`},
},
}

View file

@ -6,7 +6,7 @@ import (
"testing"
"github.com/pkg/errors"
"github.com/lbryio/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/bdb"
)
func TestImportsSort(t *testing.T) {
@ -234,7 +234,7 @@ func TestCombineTypeImports(t *testing.T) {
`"fmt"`,
},
thirdParty: importList{
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/volatiletech/sqlboiler/boil"`,
},
}
@ -245,8 +245,8 @@ func TestCombineTypeImports(t *testing.T) {
`"time"`,
},
thirdParty: importList{
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/lbry.go/v2/extras/null"`,
`"github.com/volatiletech/sqlboiler/boil"`,
`"gopkg.in/volatiletech/null.v6"`,
},
}
@ -280,8 +280,8 @@ func TestCombineTypeImports(t *testing.T) {
`"time"`,
},
thirdParty: importList{
`"github.com/lbryio/sqlboiler/boil"`,
`"github.com/lbryio/lbry.go/v2/extras/null"`,
`"github.com/volatiletech/sqlboiler/boil"`,
`"gopkg.in/volatiletech/null.v6"`,
},
}
@ -297,11 +297,11 @@ func TestCombineImports(t *testing.T) {
a := imports{
standard: importList{"fmt"},
thirdParty: importList{"github.com/lbryio/sqlboiler", "github.com/lbryio/lbry.go/v2/extras/null"},
thirdParty: importList{"github.com/volatiletech/sqlboiler", "gopkg.in/volatiletech/null.v6"},
}
b := imports{
standard: importList{"os"},
thirdParty: importList{"github.com/lbryio/sqlboiler"},
thirdParty: importList{"github.com/volatiletech/sqlboiler"},
}
c := combineImports(a, b)
@ -309,8 +309,8 @@ func TestCombineImports(t *testing.T) {
if c.standard[0] != "fmt" && c.standard[1] != "os" {
t.Errorf("Wanted: fmt, os got: %#v", c.standard)
}
if c.thirdParty[0] != "github.com/lbryio/sqlboiler" && c.thirdParty[1] != "github.com/lbryio/lbry.go/v2/extras/null" {
t.Errorf("Wanted: github.com/lbryio/sqlboiler, github.com/lbryio/lbry.go/v2/extras/null got: %#v", c.thirdParty)
if c.thirdParty[0] != "github.com/volatiletech/sqlboiler" && c.thirdParty[1] != "gopkg.in/volatiletech/null.v6" {
t.Errorf("Wanted: github.com/volatiletech/sqlboiler, gopkg.in/volatiletech/null.v6 got: %#v", c.thirdParty)
}
}

View file

@ -14,7 +14,7 @@ import (
"github.com/pkg/errors"
)
var noEditDisclaimer = []byte(`// This file is generated by SQLBoiler (https://github.com/lbryio/sqlboiler)
var noEditDisclaimer = []byte(`// This file is generated by SQLBoiler (https://github.com/volatiletech/sqlboiler)
// and is meant to be re-generated in place and/or deleted at any time.
// DO NOT EDIT

View file

@ -8,10 +8,10 @@ import (
"strings"
"text/template"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/queries"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/queries"
"github.com/volatiletech/sqlboiler/strmangle"
)
// templateData for sqlboiler templates

View file

@ -4,8 +4,8 @@ import (
"fmt"
"strings"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/strmangle"
)
// TxtToOne contains text that will be used by templates for a one-to-many or

View file

@ -5,8 +5,8 @@ import (
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/lbryio/sqlboiler/bdb"
"github.com/lbryio/sqlboiler/bdb/drivers"
"github.com/volatiletech/sqlboiler/bdb"
"github.com/volatiletech/sqlboiler/bdb/drivers"
)
func TestTxtsFromOne(t *testing.T) {

View file

@ -8,13 +8,13 @@ import (
"strings"
"github.com/kat-co/vala"
"github.com/lbryio/sqlboiler/bdb/drivers"
"github.com/lbryio/sqlboiler/boilingcore"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/volatiletech/sqlboiler/bdb/drivers"
"github.com/volatiletech/sqlboiler/boilingcore"
)
const sqlBoilerVersion = "2.4.0+lbry"
const sqlBoilerVersion = "2.5.1"
var (
cmdState *boilingcore.State
@ -62,7 +62,7 @@ func main() {
Use: "sqlboiler [flags] <driver>",
Short: "SQL Boiler generates an ORM tailored to your database schema.",
Long: "SQL Boiler generates a Go ORM from template files, tailored to your database schema.\n" +
`Complete documentation is available at http://github.com/lbryio/sqlboiler`,
`Complete documentation is available at http://github.com/volatiletech/sqlboiler`,
Example: `sqlboiler postgres`,
PreRunE: preRun,
RunE: run,

View file

@ -5,9 +5,9 @@ import (
"reflect"
"strings"
"github.com/lbryio/sqlboiler/boil"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/boil"
"github.com/volatiletech/sqlboiler/strmangle"
)
type loadRelationshipState struct {
@ -259,14 +259,10 @@ func collectLoaded(key string, loadingFrom reflect.Value) (reflect.Value, bindKi
for {
switch bkind {
case kindStruct:
if !loadedObject.IsNil() {
collection = reflect.Append(collection, loadedObject)
}
case kindPtrSliceStruct:
if !loadedObject.IsNil() {
collection = reflect.AppendSlice(collection, loadedObject)
}
}
i++
if i >= lnFrom {

View file

@ -4,7 +4,7 @@ import (
"fmt"
"testing"
"github.com/lbryio/sqlboiler/boil"
"github.com/volatiletech/sqlboiler/boil"
)
var testEagerCounters struct {

View file

@ -4,7 +4,7 @@ import (
"fmt"
"reflect"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/volatiletech/sqlboiler/strmangle"
)
// NonZeroDefaultSet returns the fields included in the

View file

@ -5,7 +5,7 @@ import (
"testing"
"time"
null "github.com/lbryio/lbry.go/v2/extras/null"
null "gopkg.in/volatiletech/null.v6"
)
type testObj struct {

View file

@ -1,6 +1,6 @@
package qm
import "github.com/lbryio/sqlboiler/queries"
import "github.com/volatiletech/sqlboiler/queries"
// QueryMod to modify the query object
type QueryMod func(q *queries.Query)
@ -8,10 +8,8 @@ type QueryMod func(q *queries.Query)
// Apply the query mods to the Query object
func Apply(q *queries.Query, mods ...QueryMod) {
for _, mod := range mods {
if mod != nil {
mod(q)
}
}
}
// SQL allows you to execute a plain SQL statement
@ -125,12 +123,6 @@ func From(from string) QueryMod {
}
}
func ForceIndex(index string) QueryMod {
return func(q *queries.Query) {
queries.SetForceIndex(q, index)
}
}
// Limit the number of returned rows
func Limit(limit int) QueryMod {
return func(q *queries.Query) {

View file

@ -4,8 +4,7 @@ import (
"database/sql"
"fmt"
"github.com/lbryio/lbry.go/v2/extras/errors"
"github.com/lbryio/sqlboiler/boil"
"github.com/volatiletech/sqlboiler/boil"
)
// joinKind is the type of join
@ -30,7 +29,6 @@ type Query struct {
selectCols []string
count bool
from []string
forceindex string
joins []join
where []where
in []in
@ -138,7 +136,7 @@ func (q *Query) Query() (*sql.Rows, error) {
func (q *Query) ExecP() sql.Result {
res, err := q.Exec()
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return res
@ -149,7 +147,7 @@ func (q *Query) ExecP() sql.Result {
func (q *Query) QueryP() *sql.Rows {
rows, err := q.Query()
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return rows
@ -190,6 +188,11 @@ func SetSelect(q *Query, sel []string) {
q.selectCols = sel
}
// GetSelect from the query
func GetSelect(q *Query) []string {
return q.selectCols
}
// SetCount on the query.
func SetCount(q *Query) {
q.count = true
@ -264,11 +267,6 @@ func SetLastWhereAsOr(q *Query) {
q.where[len(q.where)-1].orSeparator = true
}
// SetForceIndex sets the index to be used by the query
func SetForceIndex(q *Query, index string){
q.forceindex = index
}
// SetLastInAsOr sets the or separator for the tail "IN" in the slice
func SetLastInAsOr(q *Query) {
if len(q.in) == 0 {

View file

@ -7,7 +7,7 @@ import (
"sort"
"strings"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/volatiletech/sqlboiler/strmangle"
)
var (
@ -76,14 +76,8 @@ func buildSelectQuery(q *Query) (*bytes.Buffer, []interface{}) {
buf.WriteByte(')')
}
if len(q.forceindex) > 0 {
fmt.Fprintf(buf, " FROM %s FORCE INDEX (%s)", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "),q.forceindex)
}else{
fmt.Fprintf(buf, " FROM %s", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "))
}
if len(q.joins) > 0 {
argsLen := len(args)
joinBuf := strmangle.GetBuffer()
@ -196,7 +190,7 @@ func buildUpdateQuery(q *Query) (*bytes.Buffer, []interface{}) {
}
// BuildUpsertQueryMySQL builds a SQL statement string using the upsertData provided.
func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string, autoIncrementCol string) string {
func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string) string {
whitelist = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, whitelist)
buf := strmangle.GetBuffer()
@ -226,11 +220,6 @@ func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []st
strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
)
// https://stackoverflow.com/questions/778534/mysql-on-duplicate-key-last-insert-id
if autoIncrementCol != "" {
buf.WriteString(autoIncrementCol + " = LAST_INSERT_ID(" + autoIncrementCol + "), ")
}
for i, v := range update {
if i != 0 {
buf.WriteByte(',')

View file

@ -7,9 +7,9 @@ import (
"strings"
"sync"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/pkg/errors"
"github.com/volatiletech/sqlboiler/boil"
"github.com/volatiletech/sqlboiler/strmangle"
)
var (
@ -41,7 +41,7 @@ const (
// It panics on error. See boil.Bind() documentation.
func (q *Query) BindP(obj interface{}) {
if err := q.Bind(obj); err != nil {
panic(errors.WithStack(err))
panic(boil.WrapErr(err))
}
}

View file

@ -14,12 +14,12 @@ import (
"sync/atomic"
"time"
null "github.com/lbryio/lbry.go/v2/extras/null"
null "gopkg.in/volatiletech/null.v6"
"github.com/pkg/errors"
"github.com/satori/go.uuid"
"github.com/lbryio/sqlboiler/strmangle"
"github.com/lbryio/sqlboiler/types"
"github.com/volatiletech/sqlboiler/strmangle"
"github.com/volatiletech/sqlboiler/types"
)
var (
@ -159,18 +159,17 @@ func randDate(s *Seed) time.Time {
// If canBeNull is true:
// The value has the possibility of being null or non-zero at random.
func randomizeField(s *Seed, field reflect.Value, fieldType string, canBeNull bool) error {
kind := field.Kind()
typ := field.Type()
if strings.HasPrefix(fieldType, "enum") {
enum, err := randEnumValue(fieldType)
enum, err := randEnumValue(s, fieldType)
if err != nil {
return err
}
if kind == reflect.Struct {
val := null.NewString(enum, rand.Intn(1) == 0)
val := null.NewString(enum, s.nextInt()%2 == 0)
field.Set(reflect.ValueOf(val))
} else {
field.Set(reflect.ValueOf(enum))
@ -623,11 +622,11 @@ func getVariableRandValue(s *Seed, kind reflect.Kind, typ reflect.Type) interfac
return nil
}
func randEnumValue(enum string) (string, error) {
func randEnumValue(s *Seed, enum string) (string, error) {
vals := strmangle.ParseEnumVals(enum)
if vals == nil || len(vals) == 0 {
return "", fmt.Errorf("unable to parse enum string: %s", enum)
}
return vals[rand.Intn(len(vals)-1)], nil
return vals[s.nextInt()%len(vals)], nil
}

View file

@ -5,7 +5,7 @@ import (
"testing"
"time"
null "github.com/lbryio/lbry.go/v2/extras/null"
null "gopkg.in/volatiletech/null.v6"
)
func TestRandomizeStruct(t *testing.T) {
@ -148,24 +148,36 @@ func TestRandomizeField(t *testing.T) {
func TestRandEnumValue(t *testing.T) {
t.Parallel()
s := NewSeed()
enum1 := "enum.workday('monday','tuesday')"
enum2 := "enum('monday','tuesday')"
enum3 := "enum('monday')"
r1, err := randEnumValue(enum1)
r1, err := randEnumValue(s, enum1)
if err != nil {
t.Error(err)
}
if r1 != "monday" && r1 != "tuesday" {
t.Errorf("Expected monday or tueday, got: %q", r1)
t.Errorf("Expected monday or tuesday, got: %q", r1)
}
r2, err := randEnumValue(enum2)
r2, err := randEnumValue(s, enum2)
if err != nil {
t.Error(err)
}
if r2 != "monday" && r2 != "tuesday" {
t.Errorf("Expected monday or tueday, got: %q", r2)
t.Errorf("Expected monday or tuesday, got: %q", r2)
}
r3, err := randEnumValue(s, enum3)
if err != nil {
t.Error(err)
}
if r3 != "monday" {
t.Errorf("Expected monday got: %q", r3)
}
}

View file

@ -1,6 +1,6 @@
package strmangle
import "github.com/nullbio/inflect"
import "github.com/volatiletech/inflect"
var boilRuleset *inflect.Ruleset
@ -193,6 +193,7 @@ func newBoilRuleset() *inflect.Ruleset {
rs.AddSingularExact("is", "is", true)
rs.AddSingularExact("us", "us", true)
rs.AddSingularExact("as", "as", true)
rs.AddSingularExact("areas", "area", true)
rs.AddPluralExact("a", "a", true)
rs.AddPluralExact("i", "i", true)
rs.AddPluralExact("is", "is", true)

View file

@ -93,7 +93,7 @@ func SchemaTable(lq, rq string, driver string, schema string, table string) stri
return fmt.Sprintf(`%s%s%s`, lq, table, rq)
}
// IdentQuote attempts to quote simple identifiers in SQL tatements
// IdentQuote attempts to quote simple identifiers in SQL statements
func IdentQuote(lq byte, rq byte, s string) string {
if strings.ToLower(s) == "null" || s == "?" {
return s

View file

@ -139,6 +139,7 @@ func TestSingular(t *testing.T) {
{"hello_people", "hello_person"},
{"hello_person", "hello_person"},
{"friends", "friend"},
{"areas", "area"},
{"hello_there_people", "hello_there_person"},
}
@ -159,6 +160,7 @@ func TestPlural(t *testing.T) {
{"hello_person", "hello_people"},
{"friend", "friends"},
{"friends", "friends"},
{"area", "areas"},
{"hello_there_person", "hello_there_people"},
}

View file

@ -27,13 +27,6 @@ var {{$modelName}}Columns = struct {
{{end -}}
}
// {{$modelName}}Filter allows you to filter on any columns by making them all pointers.
type {{$modelName}}Filter struct {
{{range $column := .Table.Columns -}}
{{titleCase $column.Name}} *{{$column.Type}} `{{generateTags $dot.Tags $column.Name}}boil:"{{$column.Name}}" json:"{{$column.Name}},omitempty" toml:"{{$column.Name}}" yaml:"{{$column.Name}},omitempty"`
{{end -}}
}
{{- if .Table.IsJoinTable -}}
{{- else}}
// {{$modelNameCamel}}R is where relationships are stored.

View file

@ -10,7 +10,6 @@ var (
{{$varNameSingular}}ColumnsWithoutDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault false | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
{{$varNameSingular}}ColumnsWithDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault true | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
{{$varNameSingular}}PrimaryKeyColumns = []string{{"{"}}{{.Table.PKey.Columns | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
{{$varNameSingular}}AutoIncrementColumn = "{{.Table.AutoIncrementColumn }}"
)
type (
@ -22,7 +21,7 @@ type (
{{$tableNameSingular}}Hook func(boil.Executor, *{{$tableNameSingular}}) error
{{- end}}
{{$tableNameSingular}}Query struct {
{{$varNameSingular}}Query struct {
*queries.Query
}
)

View file

@ -16,7 +16,7 @@ var {{$varNameSingular}}AfterUpsertHooks []{{$tableNameSingular}}Hook
func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeInsertHooks {
if err := hook(exec, o); err != nil {
return errors.Err(err)
return err
}
}
@ -27,7 +27,7 @@ func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeUpdateHooks {
if err := hook(exec, o); err != nil {
return errors.Err(err)
return err
}
}
@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeDeleteHooks {
if err := hook(exec, o); err != nil {
return errors.Err(err)
return err
}
}
@ -49,7 +49,7 @@ func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}BeforeUpsertHooks {
if err := hook(exec, o); err != nil {
return errors.Err(err)
return err
}
}
@ -60,7 +60,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err er
func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterInsertHooks {
if err := hook(exec, o); err != nil {
return errors.Err(err)
return err
}
}
@ -71,7 +71,7 @@ func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterSelectHooks {
if err := hook(exec, o); err != nil {
return errors.Err(err)
return err
}
}
@ -82,7 +82,7 @@ func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterUpdateHooks {
if err := hook(exec, o); err != nil {
return errors.Err(err)
return err
}
}
@ -93,7 +93,7 @@ func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterDeleteHooks {
if err := hook(exec, o); err != nil {
return errors.Err(err)
return err
}
}
@ -104,7 +104,7 @@ func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err err
func (o *{{$tableNameSingular}}) doAfterUpsertHooks(exec boil.Executor) (err error) {
for _, hook := range {{$varNameSingular}}AfterUpsertHooks {
if err := hook(exec, o); err != nil {
return errors.Err(err)
return err
}
}

View file

@ -1,27 +1,27 @@
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
// OneP returns a single {{$tableNameSingular}} record from the query, and panics on error.
func (q {{$tableNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
// OneP returns a single {{$varNameSingular}} record from the query, and panics on error.
func (q {{$varNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
o, err := q.One()
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return o
}
// One returns a single {{$tableNameSingular}} record from the query.
func (q {{$tableNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
// One returns a single {{$varNameSingular}} record from the query.
func (q {{$varNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
o := &{{$tableNameSingular}}{}
queries.SetLimit(q.Query, 1)
err := q.Bind(o)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
if errors.Cause(err) == sql.ErrNoRows {
return nil, sql.ErrNoRows
}
return nil, errors.Prefix("{{.PkgName}}: failed to execute a one query for {{.Table.Name}}", err)
return nil, errors.Wrap(err, "{{.PkgName}}: failed to execute a one query for {{.Table.Name}}")
}
{{if not .NoHooks -}}
@ -34,22 +34,22 @@ func (q {{$tableNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
}
// AllP returns all {{$tableNameSingular}} records from the query, and panics on error.
func (q {{$tableNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
func (q {{$varNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
o, err := q.All()
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return o
}
// All returns all {{$tableNameSingular}} records from the query.
func (q {{$tableNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
func (q {{$varNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
var o []*{{$tableNameSingular}}
err := q.Bind(&o)
if err != nil {
return nil, errors.Prefix("{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice", err)
return nil, errors.Wrap(err, "{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice")
}
{{if not .NoHooks -}}
@ -66,17 +66,17 @@ func (q {{$tableNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error)
}
// CountP returns the count of all {{$tableNameSingular}} records in the query, and panics on error.
func (q {{$tableNameSingular}}Query) CountP() int64 {
func (q {{$varNameSingular}}Query) CountP() int64 {
c, err := q.Count()
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return c
}
// Count returns the count of all {{$tableNameSingular}} records in the query.
func (q {{$tableNameSingular}}Query) Count() (int64, error) {
func (q {{$varNameSingular}}Query) Count() (int64, error) {
var count int64
queries.SetSelect(q.Query, nil)
@ -84,33 +84,32 @@ func (q {{$tableNameSingular}}Query) Count() (int64, error) {
err := q.Query.QueryRow().Scan(&count)
if err != nil {
return 0, errors.Prefix("{{.PkgName}}: failed to count {{.Table.Name}} rows", err)
return 0, errors.Wrap(err, "{{.PkgName}}: failed to count {{.Table.Name}} rows")
}
return count, nil
}
// Exists checks if the row exists in the table, and panics on error.
func (q {{$tableNameSingular}}Query) ExistsP() bool {
func (q {{$varNameSingular}}Query) ExistsP() bool {
e, err := q.Exists()
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return e
}
// Exists checks if the row exists in the table.
func (q {{$tableNameSingular}}Query) Exists() (bool, error) {
func (q {{$varNameSingular}}Query) Exists() (bool, error) {
var count int64
queries.SetCount(q.Query)
queries.SetSelect(q.Query, []string{})
queries.SetLimit(q.Query, 1)
err := q.Query.QueryRow().Scan(&count)
if err != nil {
return false, errors.Prefix("{{.PkgName}}: failed to check if {{.Table.Name}} exists", err)
return false, errors.Wrap(err, "{{.PkgName}}: failed to check if {{.Table.Name}} exists")
}
return count > 0, nil

View file

@ -3,14 +3,14 @@
{{- $dot := . -}}
{{- range .Table.FKeys -}}
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
{{- $varNameSingular := .ForeignTable | singular | camelCase}}
// {{$txt.Function.Name}}G pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
}
// {{$txt.Function.Name}} pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
queryMods := []qm.QueryMod{
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
}

View file

@ -3,14 +3,14 @@
{{- $dot := . -}}
{{- range .Table.ToOneRelationships -}}
{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
{{- $varNameSingular := .ForeignTable | singular | camelCase}}
// {{$txt.Function.Name}}G pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
}
// {{$txt.Function.Name}} pointed to by the foreign key.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
queryMods := []qm.QueryMod{
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
}

View file

@ -3,22 +3,19 @@
{{- $dot := . -}}
{{- $table := .Table -}}
{{- range .Table.ToManyRelationships -}}
{{- $tableNameSingular := .ForeignTable | singular | titleCase -}}
{{- $varNameSingular := .ForeignTable | singular | camelCase -}}
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
// {{$txt.Function.Name}}G retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}}
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
}
// {{$txt.Function.Name}} retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}} with an executor
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
queryMods := []qm.QueryMod{
qm.Select("{{$schemaForeignTable}}.*"),
}
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
var queryMods []qm.QueryMod
if len(mods) != 0 {
queryMods = append(queryMods, mods...)
}
@ -37,6 +34,11 @@ func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor,
query := {{$txt.ForeignTable.NamePluralGo}}(exec, queryMods...)
queries.SetFrom(query.Query, "{{$schemaForeignTable}}")
if len(queries.GetSelect(query.Query)) == 0 {
queries.SetSelect(query.Query, []string{"{{$schemaForeignTable}}.*"})
}
return query
}

View file

@ -4,7 +4,7 @@
{{- range .Table.FKeys -}}
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo}}
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
// loaded structs of the objects.
func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
@ -45,20 +45,20 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
results, err := e.Query(query, args...)
if err != nil {
return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
}
defer results.Close()
var resultSlice []*{{$txt.ForeignTable.NameGo}}
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
}
{{if not $dot.NoHooks -}}
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(e); err != nil {
return errors.Err(err)
return err
}
}
}

View file

@ -45,20 +45,20 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
results, err := e.Query(query, args...)
if err != nil {
return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
}
defer results.Close()
var resultSlice []*{{$txt.ForeignTable.NameGo}}
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
}
{{if not $dot.NoHooks -}}
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(e); err != nil {
return errors.Err(err)
return err
}
}
}

View file

@ -54,7 +54,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
results, err := e.Query(query, args...)
if err != nil {
return errors.Prefix("failed to eager load {{.ForeignTable}}", err)
return errors.Wrap(err, "failed to eager load {{.ForeignTable}}")
}
defer results.Close()
@ -70,7 +70,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
err = results.Scan({{$foreignTable.Columns | columnNames | stringMap $dot.StringFuncs.titleCase | prefixStringSlice "&one." | join ", "}}, &localJoinCol)
if err = results.Err(); err != nil {
return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
}
resultSlice = append(resultSlice, one)
@ -78,11 +78,11 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
}
if err = results.Err(); err != nil {
return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
}
{{else -}}
if err = queries.Bind(results, &resultSlice); err != nil {
return errors.Prefix("failed to bind eager loaded slice {{.ForeignTable}}", err)
return errors.Wrap(err, "failed to bind eager loaded slice {{.ForeignTable}}")
}
{{end}}
@ -90,7 +90,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
if len({{.ForeignTable | singular | camelCase}}AfterSelectHooks) != 0 {
for _, obj := range resultSlice {
if err := obj.doAfterSelectHooks(e); err != nil {
return errors.Err(err)
return err
}
}
}

View file

@ -20,7 +20,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -30,7 +30,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -41,7 +41,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
var err error
if insert {
if err = related.Insert(exec); err != nil {
return errors.Prefix("failed to insert into foreign table", err)
return errors.Wrap(err, "failed to insert into foreign table")
}
}
@ -58,7 +58,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
}
if _, err = exec.Exec(updateQuery, values...); err != nil {
return errors.Prefix("failed to update local table", err)
return errors.Wrap(err, "failed to update local table")
}
o.{{$txt.Function.LocalAssignment}} = related.{{$txt.Function.ForeignAssignment}}
@ -110,7 +110,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$t
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -120,7 +120,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Exe
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -133,7 +133,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = false
if err = o.Update(exec, "{{.Column}}"); err != nil {
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = true
return errors.Prefix("failed to update local table", err)
return errors.Wrap(err, "failed to update local table")
}
o.R.{{$txt.Function.Name}} = nil

View file

@ -21,7 +21,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -31,7 +31,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -48,7 +48,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
{{- end}}
if err = related.Insert(exec); err != nil {
return errors.Prefix("failed to insert into foreign table", err)
return errors.Wrap(err, "failed to insert into foreign table")
}
} else {
updateQuery := fmt.Sprintf(
@ -64,7 +64,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
}
if _, err = exec.Exec(updateQuery, values...); err != nil {
return errors.Prefix("failed to update foreign table", err)
return errors.Wrap(err, "failed to update foreign table")
}
related.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
@ -107,7 +107,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$t
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -117,7 +117,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Exe
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -130,7 +130,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = false
if err = related.Update(exec, "{{.ForeignColumn}}"); err != nil {
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
return errors.Prefix("failed to update local table", err)
return errors.Wrap(err, "failed to update local table")
}
o.R.{{$txt.Function.Name}} = nil

View file

@ -24,7 +24,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Add{{$txt.Function.Name}}(exec, insert, related...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -35,7 +35,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Add{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -55,7 +55,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
{{end -}}
if err = rel.Insert(exec); err != nil {
return errors.Prefix("failed to insert into foreign table", err)
return errors.Wrap(err, "failed to insert into foreign table")
}
}{{if not .ToJoinTable}} else {
updateQuery := fmt.Sprintf(
@ -71,7 +71,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
}
if _, err = exec.Exec(updateQuery, values...); err != nil {
return errors.Prefix("failed to update foreign table", err)
return errors.Wrap(err, "failed to update foreign table")
}
rel.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
@ -93,7 +93,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
_, err = exec.Exec(query, values...)
if err != nil {
return errors.Prefix("failed to insert into join table", err)
return errors.Wrap(err, "failed to insert into join table")
}
}
{{end -}}
@ -152,7 +152,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, rel
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(exec, insert, related...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -165,7 +165,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Execut
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -190,7 +190,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
_, err := exec.Exec(query, values...)
if err != nil {
return errors.Prefix("failed to remove relationships before set", err)
return errors.Wrap(err, "failed to remove relationships before set")
}
{{if .ToJoinTable -}}
@ -230,7 +230,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related ...*{
// Panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(exec, related...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -240,7 +240,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Exe
// Uses the global database handle and panics on error.
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related ...*{{$txt.ForeignTable.NameGo}}) {
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -266,7 +266,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
_, err = exec.Exec(query, values...)
if err != nil {
return errors.Prefix("failed to remove relationships before set", err)
return errors.Wrap(err, "failed to remove relationships before set")
}
{{else -}}
for _, rel := range related {
@ -277,7 +277,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
}
{{end -}}
if err = rel.Update(exec, "{{.ForeignColumn}}"); err != nil {
return errors.Err(err)
return err
}
}
{{end -}}

View file

@ -1,12 +1,12 @@
{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
{{- $tableNameSingular := .Table.Name | singular | titleCase}}
{{- $varNameSingular := .Table.Name | singular | camelCase}}
// {{$tableNamePlural}}G retrieves all records.
func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
return {{$tableNamePlural}}(boil.GetDB(), mods...)
}
// {{$tableNamePlural}} retrieves all the records using an executor.
func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
mods = append(mods, qm.From("{{.Table.Name | .SchemaTable}}"))
return {{$tableNameSingular}}Query{NewQuery(exec, mods...)}
return {{$varNameSingular}}Query{NewQuery(exec, mods...)}
}

View file

@ -12,7 +12,7 @@ func Find{{$tableNameSingular}}G({{$pkArgs}}, selectCols ...string) (*{{$tableNa
func Find{{$tableNameSingular}}GP({{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
retobj, err := Find{{$tableNameSingular}}(boil.GetDB(), {{$pkNames | join ", "}}, selectCols...)
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return retobj
@ -35,10 +35,10 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
err := q.Bind({{$varNameSingular}}Obj)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
if errors.Cause(err) == sql.ErrNoRows {
return nil, sql.ErrNoRows
}
return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
return nil, errors.Wrap(err, "{{.PkgName}}: unable to select from {{.Table.Name}}")
}
return {{$varNameSingular}}Obj, nil
@ -48,79 +48,8 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
func Find{{$tableNameSingular}}P(exec boil.Executor, {{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
retobj, err := Find{{$tableNameSingular}}(exec, {{$pkNames | join ", "}}, selectCols...)
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return retobj
}
// FindOne{{$tableNameSingular}} retrieves a single record using filters.
func FindOne{{$tableNameSingular}}(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
obj := &{{$tableNameSingular}}{}
err := {{$tableNameSingular}}NewQuery(exec).
Where(filters).
Limit(1).
Bind(obj)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
}
return obj, nil
}
// FindOne{{$tableNameSingular}}G retrieves a single record using filters.
func FindOne{{$tableNameSingular}}G(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
return FindOne{{$tableNameSingular}}(boil.GetDB(), filters)
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
func FindOne{{$tableNameSingular}}OrInit(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}(exec, filters)
if err != nil {
return nil, err
}
if {{$varNameSingular}}Obj == nil {
{{$varNameSingular}}Obj = &{{$tableNameSingular}}{}
objR := reflect.ValueOf({{$varNameSingular}}Obj).Elem()
r := reflect.ValueOf(filters)
for i := 0; i < r.NumField(); i++ {
f := r.Field(i)
if f.Elem().IsValid() {
objR.FieldByName(r.Type().Field(i).Name).Set(f.Elem())
}
}
}
return {{$varNameSingular}}Obj, nil
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
func FindOne{{$tableNameSingular}}OrInitG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
return FindOne{{$tableNameSingular}}OrInit(boil.GetDB(), filters)
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
func FindOne{{$tableNameSingular}}OrCreate(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}OrInit(exec, filters)
if err != nil {
return nil, err
}
if {{$varNameSingular}}Obj.IsNew() {
err := {{$varNameSingular}}Obj.Insert(exec)
if err != nil {
return nil, err
}
}
return {{$varNameSingular}}Obj, nil
}
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
func FindOne{{$tableNameSingular}}OrCreateG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
return FindOne{{$tableNameSingular}}OrCreate(boil.GetDB(), filters)
}

View file

@ -10,7 +10,7 @@ func (o *{{$tableNameSingular}}) InsertG(whitelist ... string) error {
// behavior description.
func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
if err := o.Insert(boil.GetDB(), whitelist...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -18,7 +18,7 @@ func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
// for whitelist behavior description.
func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... string) {
if err := o.Insert(exec, whitelist...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -29,7 +29,7 @@ func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... strin
// - All columns with a default, but non-zero are included (i.e. health = 75)
func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string) error {
if o == nil {
return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
}
var err error
@ -37,7 +37,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
{{if not .NoHooks -}}
if err := o.doBeforeInsertHooks(exec); err != nil {
return errors.Err(err)
return err
}
{{- end}}
@ -59,11 +59,11 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, wl)
if err != nil {
return errors.Err(err)
return err
}
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, returnColumns)
if err != nil {
return errors.Err(err)
return err
}
if len(wl) != 0 {
cache.query = fmt.Sprintf("INSERT INTO {{$schemaTable}} ({{.LQ}}%s{{.RQ}}) %%sVALUES (%s)%%s", strings.Join(wl, "{{.RQ}},{{.LQ}}"), strmangle.Placeholders(dialect.IndexPlaceholders, len(wl), 1, 1))
@ -110,7 +110,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
_, err = exec.Exec(cache.query, vals...)
{{- end}}
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
}
{{if $canLastInsertID -}}
@ -125,7 +125,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
{{if $canLastInsertID -}}
lastID, err = result.LastInsertId()
if err != nil {
return errors.Err(ErrSyncFail)
return ErrSyncFail
}
{{$colName := index .Table.PKey.Columns 0 -}}
@ -150,7 +150,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...)
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
}
{{else}}
if len(cache.retMapping) != 0 {
@ -160,7 +160,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
}
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
}
{{end}}

View file

@ -12,7 +12,7 @@ func (o *{{$tableNameSingular}}) UpdateG(whitelist ...string) error {
// Panics on error. See Update for whitelist behavior description.
func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
if err := o.Update(boil.GetDB(), whitelist...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -21,7 +21,7 @@ func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
func (o *{{$tableNameSingular}}) UpdateP(exec boil.Executor, whitelist ... string) {
err := o.Update(exec, whitelist...)
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
var err error
{{if not .NoHooks -}}
if err = o.doBeforeUpdateHooks(exec); err != nil {
return errors.Err(err)
return err
}
{{end -}}
@ -56,11 +56,13 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
{{if eq .DriverName "mssql"}}
wl = strmangle.SetComplement(wl, {{$varNameSingular}}ColumnsWithAuto)
{{end}}
{{if not .NoAutoTimestamps}}
if len(whitelist) == 0 {
wl = strmangle.SetComplement(wl, []string{"created_at","updated_at"})
wl = strmangle.SetComplement(wl, []string{"created_at"})
}
{{end -}}
if len(wl) == 0 {
return errors.Err("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
return errors.New("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
}
cache.query = fmt.Sprintf("UPDATE {{$schemaTable}} SET %s WHERE %s",
@ -69,7 +71,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
)
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, append(wl, {{$varNameSingular}}PrimaryKeyColumns...))
if err != nil {
return errors.Err(err)
return err
}
}
@ -82,7 +84,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
_, err = exec.Exec(cache.query, values...)
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to update {{.Table.Name}} row", err)
return errors.Wrap(err, "{{.PkgName}}: unable to update {{.Table.Name}} row")
}
if !cached {
@ -99,19 +101,19 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
}
// UpdateAllP updates all rows with matching column names, and panics on error.
func (q {{$tableNameSingular}}Query) UpdateAllP(cols M) {
func (q {{$varNameSingular}}Query) UpdateAllP(cols M) {
if err := q.UpdateAll(cols); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
// UpdateAll updates all rows with the specified column values.
func (q {{$tableNameSingular}}Query) UpdateAll(cols M) error {
func (q {{$varNameSingular}}Query) UpdateAll(cols M) error {
queries.SetUpdate(q.Query, cols)
_, err := q.Query.Exec()
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to update all for {{.Table.Name}}", err)
return errors.Wrap(err, "{{.PkgName}}: unable to update all for {{.Table.Name}}")
}
return nil
@ -125,14 +127,14 @@ func (o {{$tableNameSingular}}Slice) UpdateAllG(cols M) error {
// UpdateAllGP updates all rows with the specified column values, and panics on error.
func (o {{$tableNameSingular}}Slice) UpdateAllGP(cols M) {
if err := o.UpdateAll(boil.GetDB(), cols); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
// UpdateAllP updates all rows with the specified column values, and panics on error.
func (o {{$tableNameSingular}}Slice) UpdateAllP(exec boil.Executor, cols M) {
if err := o.UpdateAll(exec, cols); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -144,7 +146,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
}
if len(cols) == 0 {
return errors.Err("{{.PkgName}}: update all requires at least one column argument")
return errors.New("{{.PkgName}}: update all requires at least one column argument")
}
colNames := make([]string, len(cols))
@ -174,7 +176,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
_, err := exec.Exec(sql, args...)
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to update all in {{$varNameSingular}} slice", err)
return errors.Wrap(err, "{{.PkgName}}: unable to update all in {{$varNameSingular}} slice")
}
return nil

View file

@ -9,7 +9,7 @@ func (o *{{$tableNameSingular}}) UpsertG({{if eq .DriverName "postgres"}}updateO
// UpsertGP attempts an insert, and does an update or ignore on conflict. Panics on error.
func (o *{{$tableNameSingular}}) UpsertGP({{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
if err := o.Upsert(boil.GetDB(), {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -17,21 +17,21 @@ func (o *{{$tableNameSingular}}) UpsertGP({{if eq .DriverName "postgres"}}update
// UpsertP panics on error.
func (o *{{$tableNameSingular}}) UpsertP(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
if err := o.Upsert(exec, {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
// Upsert attempts an insert using an executor, and does an update or ignore on conflict.
func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) error {
if o == nil {
return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
}
{{- template "timestamp_upsert_helper" . }}
{{if not .NoHooks -}}
if err := o.doBeforeUpsertHooks(exec); err != nil {
return errors.Err(err)
return err
}
{{- end}}
@ -87,7 +87,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
}
}
if len(insert) == 0 {
return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build insert column list")
return errors.New("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build insert column list")
}
ret = strmangle.SetMerge(ret, {{$varNameSingular}}ColumnsWithAuto)
@ -104,7 +104,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
{{end -}}
if len(update) == 0 {
return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
return errors.New("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
}
{{if eq .DriverName "postgres"}}
@ -115,7 +115,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
}
cache.query = queries.BuildUpsertQueryPostgres(dialect, "{{$schemaTable}}", updateOnConflict, ret, update, conflict, insert)
{{else if eq .DriverName "mysql"}}
cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, insert, {{$varNameSingular}}AutoIncrementColumn)
cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, insert)
cache.retQuery = fmt.Sprintf(
"SELECT %s FROM {{.LQ}}{{.Table.Name}}{{.RQ}} WHERE {{whereClause .LQ .RQ 0 .Table.PKey.Columns}}",
strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), ","),
@ -129,12 +129,12 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, {{if eq .DriverName "mssql"}}whitelist{{else}}insert{{end}})
if err != nil {
return errors.Err(err)
return err
}
if len(ret) != 0 {
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, ret)
if err != nil {
return errors.Err(err)
return err
}
}
}
@ -159,7 +159,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
_, err = exec.Exec(cache.query, vals...)
{{- end}}
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to upsert for {{.Table.Name}}", err)
return errors.Wrap(err, "{{.PkgName}}: unable to upsert for {{.Table.Name}}")
}
{{if $canLastInsertID -}}
@ -174,7 +174,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
{{if $canLastInsertID -}}
lastID, err = result.LastInsertId()
if err != nil {
return errors.Err(ErrSyncFail)
return ErrSyncFail
}
{{$colName := index .Table.PKey.Columns 0 -}}
@ -199,7 +199,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
}
{{- else}}
if len(cache.retMapping) != 0 {
@ -211,7 +211,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
_, err = exec.Exec(cache.query, vals...)
}
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to upsert {{.Table.Name}}", err)
return errors.Wrap(err, "{{.PkgName}}: unable to upsert {{.Table.Name}}")
}
{{- end}}

View file

@ -6,7 +6,7 @@
// Panics on error.
func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
if err := o.Delete(exec); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -14,7 +14,7 @@ func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
// DeleteG will match against the primary key column to find the record to delete.
func (o *{{$tableNameSingular}}) DeleteG() error {
if o == nil {
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
}
return o.Delete(boil.GetDB())
@ -25,7 +25,7 @@ func (o *{{$tableNameSingular}}) DeleteG() error {
// Panics on error.
func (o *{{$tableNameSingular}}) DeleteGP() {
if err := o.DeleteG(); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -33,12 +33,12 @@ func (o *{{$tableNameSingular}}) DeleteGP() {
// Delete will match against the primary key column to find the record to delete.
func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
if o == nil {
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
}
{{if not .NoHooks -}}
if err := o.doBeforeDeleteHooks(exec); err != nil {
return errors.Err(err)
return err
}
{{- end}}
@ -52,12 +52,12 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
_, err := exec.Exec(sql, args...)
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to delete from {{.Table.Name}}", err)
return errors.Wrap(err, "{{.PkgName}}: unable to delete from {{.Table.Name}}")
}
{{if not .NoHooks -}}
if err := o.doAfterDeleteHooks(exec); err != nil {
return errors.Err(err)
return err
}
{{- end}}
@ -65,23 +65,23 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
}
// DeleteAllP deletes all rows, and panics on error.
func (q {{$tableNameSingular}}Query) DeleteAllP() {
func (q {{$varNameSingular}}Query) DeleteAllP() {
if err := q.DeleteAll(); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
// DeleteAll deletes all matching rows.
func (q {{$tableNameSingular}}Query) DeleteAll() error {
func (q {{$varNameSingular}}Query) DeleteAll() error {
if q.Query == nil {
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}}Query provided for delete all")
return errors.New("{{.PkgName}}: no {{$varNameSingular}}Query provided for delete all")
}
queries.SetDelete(q.Query)
_, err := q.Query.Exec()
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to delete all from {{.Table.Name}}", err)
return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{.Table.Name}}")
}
return nil
@ -90,14 +90,14 @@ func (q {{$tableNameSingular}}Query) DeleteAll() error {
// DeleteAllGP deletes all rows in the slice, and panics on error.
func (o {{$tableNameSingular}}Slice) DeleteAllGP() {
if err := o.DeleteAllG(); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
// DeleteAllG deletes all rows in the slice.
func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
if o == nil {
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
}
return o.DeleteAll(boil.GetDB())
}
@ -105,14 +105,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
// DeleteAllP deletes all rows in the slice, using an executor, and panics on error.
func (o {{$tableNameSingular}}Slice) DeleteAllP(exec boil.Executor) {
if err := o.DeleteAll(exec); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
// DeleteAll deletes all rows in the slice, using an executor.
func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
if o == nil {
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
}
if len(o) == 0 {
@ -123,7 +123,7 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
if len({{$varNameSingular}}BeforeDeleteHooks) != 0 {
for _, obj := range o {
if err := obj.doBeforeDeleteHooks(exec); err != nil {
return errors.Err(err)
return err
}
}
}
@ -145,14 +145,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
_, err := exec.Exec(sql, args...)
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice", err)
return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice")
}
{{if not .NoHooks -}}
if len({{$varNameSingular}}AfterDeleteHooks) != 0 {
for _, obj := range o {
if err := obj.doAfterDeleteHooks(exec); err != nil {
return errors.Err(err)
return err
}
}
}

View file

@ -5,21 +5,21 @@
// ReloadGP refetches the object from the database and panics on error.
func (o *{{$tableNameSingular}}) ReloadGP() {
if err := o.ReloadG(); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
// ReloadP refetches the object from the database with an executor. Panics on error.
func (o *{{$tableNameSingular}}) ReloadP(exec boil.Executor) {
if err := o.Reload(exec); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
// ReloadG refetches the object from the database using the primary keys.
func (o *{{$tableNameSingular}}) ReloadG() error {
if o == nil {
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
}
return o.Reload(boil.GetDB())
@ -30,7 +30,7 @@ func (o *{{$tableNameSingular}}) ReloadG() error {
func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
ret, err := Find{{$tableNameSingular}}(exec, {{.Table.PKey.Columns | stringMap .StringFuncs.titleCase | prefixStringSlice "o." | join ", "}})
if err != nil {
return errors.Err(err)
return err
}
*o = *ret
@ -42,7 +42,7 @@ func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
// Panics on error.
func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
if err := o.ReloadAllG(); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -51,7 +51,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
// Panics on error.
func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
if err := o.ReloadAll(exec); err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
}
@ -59,7 +59,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
// and overwrites the original object slice with the newly updated slice.
func (o *{{$tableNameSingular}}Slice) ReloadAllG() error {
if o == nil {
return errors.Err("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
return errors.New("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
}
return o.ReloadAll(boil.GetDB())
@ -86,7 +86,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAll(exec boil.Executor) error {
err := q.Bind(&{{$varNamePlural}})
if err != nil {
return errors.Prefix("{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice", err)
return errors.Wrap(err, "{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice")
}
*o = {{$varNamePlural}}

View file

@ -1,5 +1,4 @@
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
{{- $colDefs := sqlColDefinitions .Table.Columns .Table.PKey.Columns -}}
{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase | stringMap .StringFuncs.replaceReserved -}}
{{- $pkArgs := joinSlices " " $pkNames $colDefs.Types | join ", " -}}
@ -22,7 +21,7 @@ func {{$tableNameSingular}}Exists(exec boil.Executor, {{$pkArgs}}) (bool, error)
err := row.Scan(&exists)
if err != nil {
return false, errors.Prefix("{{.PkgName}}: unable to check if {{.Table.Name}} exists", err)
return false, errors.Wrap(err, "{{.PkgName}}: unable to check if {{.Table.Name}} exists")
}
return exists, nil
@ -37,7 +36,7 @@ func {{$tableNameSingular}}ExistsG({{$pkArgs}}) (bool, error) {
func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
e, err := {{$tableNameSingular}}Exists(boil.GetDB(), {{$pkNames | join ", "}})
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return e
@ -47,43 +46,8 @@ func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
func {{$tableNameSingular}}ExistsP(exec boil.Executor, {{$pkArgs}}) bool {
e, err := {{$tableNameSingular}}Exists(exec, {{$pkNames | join ", "}})
if err != nil {
panic(errors.Err(err))
panic(boil.WrapErr(err))
}
return e
}
// IsNew() checks if record exists in db (aka if its primary key is set).
func (o *{{$tableNameSingular}}) IsNew() bool {
r := reflect.ValueOf(o).Elem()
for i := 0; i < r.NumField(); i++ {
column := r.Type().Field(i).Tag.Get("boil")
for _, pkColumn := range {{$varNameSingular}}PrimaryKeyColumns {
if column == pkColumn {
field := r.Field(i)
if field.Interface() != reflect.Zero(field.Type()).Interface() {
return false
}
}
}
}
return true
}
// Save() inserts the record if it does not exist, or updates it if it does.
func (o *{{$tableNameSingular}}) Save(exec boil.Executor, whitelist ...string) error {
if o.IsNew() {
return o.Insert(exec, whitelist...)
} else {
return o.Update(exec, whitelist...)
}
}
// SaveG() inserts the record if it does not exist, or updates it if it does.
func (o *{{$tableNameSingular}}) SaveG(whitelist ...string) error {
if o.IsNew() {
return o.InsertG(whitelist...)
} else {
return o.UpdateG(whitelist...)
}
}

View file

@ -1,33 +0,0 @@
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
// {{$tableNameSingular}}NewQuery filters query results
func {{$tableNameSingular}}NewQuery(exec boil.Executor) *{{$tableNameSingular}}Query {
return &{{$tableNameSingular}}Query{NewQuery(exec, qm.Select("*"), qm.From("{{.Table.Name | .SchemaTable}}"))}
}
// {{$tableNameSingular}}NewQuery filters query results
func {{$tableNameSingular}}NewQueryG() *{{$tableNameSingular}}Query {
return {{$tableNameSingular}}NewQuery(boil.GetDB())
}
// Where filters query results
func (q *{{$tableNameSingular}}Query) Where(filters {{$tableNameSingular}}Filter) *{{$tableNameSingular}}Query {
r := reflect.ValueOf(filters)
for i := 0; i < r.NumField(); i++ {
f := r.Field(i)
if f.Elem().IsValid() {
if nullable, ok := f.Elem().Interface().(null.Nullable); ok && nullable.IsNull() {
queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" IS NULL")
} else {
queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" = ?", f.Elem().Interface())
}
}
}
return q
}
// Limit limits query results
func (q *{{$tableNameSingular}}Query) Limit(limit int) *{{$tableNameSingular}}Query {
queries.SetLimit(q.Query, limit)
return q
}

View file

@ -1,107 +0,0 @@
{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
{{- if .Table.IsJoinTable -}}
{{- else -}}
{{- $dot := . }}
// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
func Merge{{$tableNamePlural}}(exec boil.Executor, primaryID uint64, secondaryID uint64) (err error) {
tx, ok := exec.(boil.Transactor)
if !ok {
txdb, ok := exec.(boil.Beginner)
if !ok {
return errors.Err("database does not support transactions")
}
tx, err = txdb.Begin()
if err != nil {
return errors.Err(err)
}
defer func() {
if p := recover(); p != nil {
tx.Rollback()
panic(p) // Rollback, then propagate panic
} else if err != nil {
tx.Rollback()
} else {
err = tx.Commit()
}
}()
}
primary, err := Find{{$tableNameSingular}}(tx, primaryID)
if err != nil {
return errors.Err(err)
} else if primary == nil {
return errors.Err("primary {{$tableNameSingular}} not found")
}
secondary, err := Find{{$tableNameSingular}}(tx, secondaryID)
if err != nil {
return errors.Err(err)
} else if secondary == nil {
return errors.Err("secondary {{$tableNameSingular}} not found")
}
foreignKeys := []foreignKey{
{{- range .Tables -}}
{{- range .FKeys -}}
{{- if eq $dot.Table.Name .ForeignTable }}
{foreignTable: "{{.Table}}", foreignColumn: "{{.Column}}"},
{{- end -}}
{{- end -}}
{{- end }}
}
conflictingKeys := []conflictingUniqueKey{
{{- range .Tables -}}
{{- $table := . -}}
{{- range .FKeys -}}
{{- $fk := . -}}
{{- if eq $dot.Table.Name .ForeignTable -}}
{{- range $table.UKeys -}}
{{- if setInclude $fk.Column .Columns }}
{table: "{{$fk.Table}}", objectIdColumn: "{{$fk.Column}}", columns: []string{`{{ .Columns | join "`,`" }}`}},
{{- end -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{- end }}
}
err = mergeModels(tx, primaryID, secondaryID, foreignKeys, conflictingKeys)
if err != nil {
return err
}
pr := reflect.ValueOf(primary)
sr := reflect.ValueOf(secondary)
// for any column thats null on the primary and not null on the secondary, copy from secondary to primary
for i := 0; i < sr.Elem().NumField(); i++ {
pf := pr.Elem().Field(i)
sf := sr.Elem().Field(i)
if sf.IsValid() {
if nullable, ok := sf.Interface().(null.Nullable); ok && !nullable.IsNull() && pf.Interface().(null.Nullable).IsNull() {
pf.Set(sf)
}
}
}
err = primary.Update(tx)
if err != nil {
return err
}
err = secondary.Delete(tx)
if err != nil {
return err
}
return nil
}
// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
func Merge{{$tableNamePlural}}G(primaryID uint64, secondaryID uint64) error {
return Merge{{$tableNamePlural}}(boil.GetDB(), primaryID, secondaryID)
}
{{- end -}}{{/* join table */}}

View file

@ -19,168 +19,3 @@ func NewQuery(exec boil.Executor, mods ...qm.QueryMod) *queries.Query {
return q
}
func mergeModels(tx boil.Executor, primaryID uint64, secondaryID uint64, foreignKeys []foreignKey, conflictingKeys []conflictingUniqueKey) error {
if len(foreignKeys) < 1 {
return nil
}
var err error
for _, conflict := range conflictingKeys {
if len(conflict.columns) == 1 && conflict.columns[0] == conflict.objectIdColumn {
err = deleteOneToOneConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
} else {
err = deleteOneToManyConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
}
if err != nil {
return err
}
}
for _, fk := range foreignKeys {
// TODO: use NewQuery here, not plain sql
query := fmt.Sprintf(
"UPDATE %s SET %s = %s WHERE %s = %s",
fk.foreignTable, fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 2, 1),
)
_, err = tx.Exec(query, primaryID, secondaryID)
if err != nil {
return errors.Err(err)
}
}
return checkMerge(tx, foreignKeys)
}
func deleteOneToOneConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
query := fmt.Sprintf(
"SELECT COUNT(*) FROM %s WHERE %s IN (%s)",
conflict.table, conflict.objectIdColumn,
strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
)
var count int
err := tx.QueryRow(query, primaryID, secondaryID).Scan(&count)
if err != nil {
return errors.Err(err)
}
if count > 2 {
return errors.Err("it should not be possible to have more than two rows here")
} else if count != 2 {
return nil // no conflicting rows
}
query = fmt.Sprintf(
"DELETE FROM %s WHERE %s = %s",
conflict.table, conflict.objectIdColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
)
_, err = tx.Exec(query, secondaryID)
return errors.Err(err)
}
func deleteOneToManyConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
conflictingColumns := strmangle.SetComplement(conflict.columns, []string{conflict.objectIdColumn})
query := fmt.Sprintf(
"SELECT %s FROM %s WHERE %s IN (%s) GROUP BY %s HAVING count(distinct %s) > 1",
strings.Join(conflictingColumns, ","), conflict.table, conflict.objectIdColumn,
strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
strings.Join(conflictingColumns, ","), conflict.objectIdColumn,
)
//The selectParams should be the ObjectIDs to search for regarding the conflict.
rows, err := tx.Query(query, primaryID, secondaryID)
if err != nil {
return errors.Err(err)
}
//Since we don't don't know if advance how many columns the query returns, we have dynamically assign them to be
// used in the delete query.
colNames, err := rows.Columns()
if err != nil {
return errors.Err(err)
}
//Each row result of the query needs to be removed for being a conflicting row. Store each row's keys in an array.
var rowsToRemove = [][]interface{}(nil)
for rows.Next() {
//Set pointers for dynamic scan
iColPtrs := make([]interface{}, len(colNames))
for i := 0; i < len(colNames); i++ {
s := string("")
iColPtrs[i] = &s
}
//Dynamically scan n columns
err = rows.Scan(iColPtrs...)
if err != nil {
return errors.Err(err)
}
//Grab scanned values for query arguments
iCol := make([]interface{}, len(colNames))
for i, col := range iColPtrs {
x := col.(*string)
iCol[i] = *x
}
rowsToRemove = append(rowsToRemove, iCol)
}
defer rows.Close()
//This query will adjust dynamically depending on the number of conflicting keys, adding AND expressions for each
// key to ensure the right conflicting rows are deleted.
query = fmt.Sprintf(
"DELETE FROM %s %s",
conflict.table,
"WHERE "+strings.Join(conflict.columns, " = ? AND ")+" = ?",
)
//There could be multiple conflicting rows between ObjectIDs. In the SELECT query we grab each row and their column
// keys to be deleted here in a loop.
for _, rowToDelete := range rowsToRemove {
rowToDelete = append(rowToDelete, secondaryID)
_, err = tx.Exec(query, rowToDelete...)
if err != nil {
return errors.Err(err)
}
}
return nil
}
func checkMerge(tx boil.Executor, foreignKeys []foreignKey) error {
uniqueColumns := []interface{}{}
uniqueColumnNames := map[string]bool{}
handledTablesColumns := map[string]bool{}
for _, fk := range foreignKeys {
handledTablesColumns[fk.foreignTable+"."+fk.foreignColumn] = true
if _, ok := uniqueColumnNames[fk.foreignColumn]; !ok {
uniqueColumns = append(uniqueColumns, fk.foreignColumn)
uniqueColumnNames[fk.foreignColumn] = true
}
}
q := fmt.Sprintf(
`SELECT table_name, column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA=DATABASE() AND column_name IN (%s)`,
strmangle.Placeholders(dialect.IndexPlaceholders, len(uniqueColumns), 1, 1),
)
rows, err := tx.Query(q, uniqueColumns...)
defer rows.Close()
if err != nil {
return errors.Err(err)
}
for rows.Next() {
var tableName string
var columnName string
err = rows.Scan(&tableName, &columnName)
if err != nil {
return errors.Err(err)
}
if _, exists := handledTablesColumns[tableName+"."+columnName]; !exists {
return errors.Err("missing merge for " + tableName + "." + columnName)
}
}
return nil
}

View file

@ -1,26 +1,10 @@
// M type is for providing columns and column values to UpdateAll.
type M map[string]interface{}
// foreignKey connects two tables. When merging records, foreign keys from secondary record must
// be reassigned to primary record.
type foreignKey struct {
foreignTable string
foreignColumn string
}
// conflictingUniqueKey records a merge conflict. If two rows exist with the same value in the
// conflicting column for two records being merged, one row must be deleted.
type conflictingUniqueKey struct {
table string
objectIdColumn string
columns []string
}
// ErrSyncFail occurs during insert when the record could not be retrieved in
// order to populate default value information. This usually happens when LastInsertId
// fails or there was a primary key configuration that was not resolvable.
var ErrSyncFail = errors.Base("{{.PkgName}}: failed to synchronize data after insert")
var ErrSyncFail = errors.New("{{.PkgName}}: failed to synchronize data after insert")
type insertCache struct {
query string

View file

@ -25,17 +25,17 @@ func (m *mssqlTester) setup() error {
m.testDBName = randomize.StableDBName(m.dbName)
if err = m.dropTestDB(); err != nil {
return errors.Err(err)
return err
}
if err = m.createTestDB(); err != nil {
return errors.Err(err)
return err
}
createCmd := exec.Command("sqlcmd", "-S", m.host, "-U", m.user, "-P", m.pass, "-d", m.testDBName)
f, err := os.Open("tables_schema.sql")
if err != nil {
return errors.Prefix("failed to open tables_schema.sql file", err)
return errors.Wrap(err, "failed to open tables_schema.sql file")
}
defer f.Close()
@ -43,12 +43,12 @@ func (m *mssqlTester) setup() error {
createCmd.Stdin = newFKeyDestroyer(rgxMSSQLkey, f)
if err = createCmd.Start(); err != nil {
return errors.Prefix("failed to start sqlcmd command", err)
return errors.Wrap(err, "failed to start sqlcmd command")
}
if err = createCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Prefix("failed to wait for sqlcmd command", err)
return errors.Wrap(err, "failed to wait for sqlcmd command")
}
return nil
@ -92,7 +92,7 @@ func (m *mssqlTester) teardown() error {
}
if err := m.dropTestDB(); err != nil {
return errors.Err(err)
return err
}
return nil
@ -110,7 +110,7 @@ func (m *mssqlTester) runCmd(stdin, command string, args ...string) error {
fmt.Println("failed running:", command, args)
fmt.Println(stdout.String())
fmt.Println(stderr.String())
return errors.Err(err)
return err
}
return nil

View file

@ -30,14 +30,14 @@ func (m *mysqlTester) setup() error {
m.testDBName = randomize.StableDBName(m.dbName)
if err = m.makeOptionFile(); err != nil {
return errors.Prefix("couldn't make option file", err)
return errors.Wrap(err, "couldn't make option file")
}
if err = m.dropTestDB(); err != nil {
return errors.Err(err)
return err
}
if err = m.createTestDB(); err != nil {
return errors.Err(err)
return err
}
dumpCmd := exec.Command("mysqldump", m.defaultsFile(), "--no-data", m.dbName)
@ -48,22 +48,22 @@ func (m *mysqlTester) setup() error {
createCmd.Stdin = newFKeyDestroyer(rgxMySQLkey, r)
if err = dumpCmd.Start(); err != nil {
return errors.Prefix("failed to start mysqldump command", err)
return errors.Wrap(err, "failed to start mysqldump command")
}
if err = createCmd.Start(); err != nil {
return errors.Prefix("failed to start mysql command", err)
return errors.Wrap(err, "failed to start mysql command")
}
if err = dumpCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Prefix("failed to wait for mysqldump command", err)
return errors.Wrap(err, "failed to wait for mysqldump command")
}
w.Close() // After dumpCmd is done, close the write end of the pipe
if err = createCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Prefix("failed to wait for mysql command", err)
return errors.Wrap(err, "failed to wait for mysql command")
}
return nil
@ -87,7 +87,7 @@ func (m *mysqlTester) defaultsFile() string {
func (m *mysqlTester) makeOptionFile() error {
tmp, err := ioutil.TempFile("", "optionfile")
if err != nil {
return errors.Prefix("failed to create option file", err)
return errors.Wrap(err, "failed to create option file")
}
isTCP := false
@ -95,7 +95,7 @@ func (m *mysqlTester) makeOptionFile() error {
if os.IsNotExist(err) {
isTCP = true
} else if err != nil {
return errors.Prefix("could not stat m.host", err)
return errors.Wrap(err, "could not stat m.host")
}
fmt.Fprintln(tmp, "[client]")
@ -139,7 +139,7 @@ func (m *mysqlTester) teardown() error {
}
if err := m.dropTestDB(); err != nil {
return errors.Err(err)
return err
}
return os.Remove(m.optionFile)
@ -159,7 +159,7 @@ func (m *mysqlTester) runCmd(stdin, command string, args ...string) error {
fmt.Println("failed running:", command, args)
fmt.Println(stdout.String())
fmt.Println(stderr.String())
return errors.Err(err)
return err
}
return nil

View file

@ -33,14 +33,14 @@ func (p *pgTester) setup() error {
p.testDBName = randomize.StableDBName(p.dbName)
if err = p.makePGPassFile(); err != nil {
return errors.Err(err)
return err
}
if err = p.dropTestDB(); err != nil {
return errors.Err(err)
return err
}
if err = p.createTestDB(); err != nil {
return errors.Err(err)
return err
}
dumpCmd := exec.Command("pg_dump", "--schema-only", p.dbName)
@ -53,22 +53,22 @@ func (p *pgTester) setup() error {
createCmd.Stdin = newFKeyDestroyer(rgxPGFkey, r)
if err = dumpCmd.Start(); err != nil {
return errors.Prefix("failed to start pg_dump command", err)
return errors.Wrap(err, "failed to start pg_dump command")
}
if err = createCmd.Start(); err != nil {
return errors.Prefix("failed to start psql command", err)
return errors.Wrap(err, "failed to start psql command")
}
if err = dumpCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Prefix("failed to wait for pg_dump command", err)
return errors.Wrap(err, "failed to wait for pg_dump command")
}
w.Close() // After dumpCmd is done, close the write end of the pipe
if err = createCmd.Wait(); err != nil {
fmt.Println(err)
return errors.Prefix("failed to wait for psql command", err)
return errors.Wrap(err, "failed to wait for psql command")
}
return nil
@ -90,7 +90,7 @@ func (p *pgTester) runCmd(stdin, command string, args ...string) error {
fmt.Println("failed running:", command, args)
fmt.Println(stdout.String())
fmt.Println(stderr.String())
return errors.Err(err)
return err
}
return nil
@ -108,7 +108,7 @@ func (p *pgTester) pgEnv() []string {
func (p *pgTester) makePGPassFile() error {
tmp, err := ioutil.TempFile("", "pgpass")
if err != nil {
return errors.Prefix("failed to create option file", err)
return errors.Wrap(err, "failed to create option file")
}
fmt.Fprintf(tmp, "%s:%d:postgres:%s", p.host, p.port, p.user)
@ -145,12 +145,12 @@ func (p *pgTester) dropTestDB() error {
func (p *pgTester) teardown() error {
var err error
if err = p.dbConn.Close(); err != nil {
return errors.Err(err)
return err
}
p.dbConn = nil
if err = p.dropTestDB(); err != nil {
return errors.Err(err)
return err
}
return os.Remove(p.pgPassFile)

View file

@ -143,5 +143,5 @@ func validateConfig(driverName string) error {
).Check()
}
return errors.Err("not a valid driver name")
return errors.New("not a valid driver name")
}